Compare commits

..

No commits in common. "main" and "v0.13.0" have entirely different histories.

464 changed files with 6970 additions and 343327 deletions

1
.envrc
View file

@ -1 +0,0 @@
use flake

View file

@ -1,17 +0,0 @@
name: Advisories
on:
schedule:
- cron: '0 18 * * *'
jobs:
cargo-deny:
runs-on: ubuntu-latest
strategy:
matrix:
checks:
- advisories
- bans licenses sources
steps:
- uses: actions/checkout@v2
- uses: EmbarkStudios/cargo-deny-action@v1
with:
command: check ${{ matrix.checks }}

View file

@ -1,177 +0,0 @@
name: CI
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
fmt:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.67.0
default: true
components: rustfmt
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/fmt
shell: bash
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.67.0
default: true
components: clippy
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/lint
shell: bash
docs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.67.0
default: true
- uses: Swatinem/rust-cache@v1
- name: Build rust docs
run: ./scripts/ci/rust-docs
shell: bash
- name: Install doxygen
run: sudo apt-get install -y doxygen
shell: bash
cargo-deny:
runs-on: ubuntu-latest
strategy:
matrix:
checks:
- advisories
- bans licenses sources
continue-on-error: ${{ matrix.checks == 'advisories' }}
steps:
- uses: actions/checkout@v2
- uses: EmbarkStudios/cargo-deny-action@v1
with:
arguments: '--manifest-path ./rust/Cargo.toml'
command: check ${{ matrix.checks }}
wasm_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install wasm-bindgen-cli
run: cargo install wasm-bindgen-cli wasm-opt
- name: Install wasm32 target
run: rustup target add wasm32-unknown-unknown
- name: run tests
run: ./scripts/ci/wasm_tests
deno_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: denoland/setup-deno@v1
with:
deno-version: v1.x
- name: Install wasm-bindgen-cli
run: cargo install wasm-bindgen-cli wasm-opt
- name: Install wasm32 target
run: rustup target add wasm32-unknown-unknown
- name: run tests
run: ./scripts/ci/deno_tests
js_fmt:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: install
run: yarn global add prettier
- name: format
run: prettier -c javascript/.prettierrc javascript
js_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install wasm-bindgen-cli
run: cargo install wasm-bindgen-cli wasm-opt
- name: Install wasm32 target
run: rustup target add wasm32-unknown-unknown
- name: run tests
run: ./scripts/ci/js_tests
cmake_build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly-2023-01-26
default: true
- uses: Swatinem/rust-cache@v1
- name: Install CMocka
run: sudo apt-get install -y libcmocka-dev
- name: Install/update CMake
uses: jwlawson/actions-setup-cmake@v1.12
with:
cmake-version: latest
- name: Install rust-src
run: rustup component add rust-src
- name: Build and test C bindings
run: ./scripts/ci/cmake-build Release Static
shell: bash
linux:
runs-on: ubuntu-latest
strategy:
matrix:
toolchain:
- 1.67.0
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: ${{ matrix.toolchain }}
default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.67.0
default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.67.0
default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash

View file

@ -1,52 +0,0 @@
on:
push:
branches:
- main
name: Documentation
jobs:
deploy-docs:
concurrency: deploy-docs
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Cache
uses: Swatinem/rust-cache@v1
- name: Clean docs dir
run: rm -rf docs
shell: bash
- name: Clean Rust docs dir
uses: actions-rs/cargo@v1
with:
command: clean
args: --manifest-path ./rust/Cargo.toml --doc
- name: Build Rust docs
uses: actions-rs/cargo@v1
with:
command: doc
args: --manifest-path ./rust/Cargo.toml --workspace --all-features --no-deps
- name: Move Rust docs
run: mkdir -p docs && mv rust/target/doc/* docs/.
shell: bash
- name: Configure root page
run: echo '<meta http-equiv="refresh" content="0; url=automerge">' > docs/index.html
- name: Deploy docs
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./docs

View file

@ -1,214 +0,0 @@
name: Release
on:
push:
branches:
- main
jobs:
check_if_wasm_version_upgraded:
name: Check if WASM version has been upgraded
runs-on: ubuntu-latest
outputs:
wasm_version: ${{ steps.version-updated.outputs.current-package-version }}
wasm_has_updated: ${{ steps.version-updated.outputs.has-updated }}
steps:
- uses: JiPaix/package-json-updated-action@v1.0.5
id: version-updated
with:
path: rust/automerge-wasm/package.json
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
publish-wasm:
name: Publish WASM package
runs-on: ubuntu-latest
needs:
- check_if_wasm_version_upgraded
# We create release only if the version in the package.json has been upgraded
if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true'
steps:
- uses: actions/setup-node@v3
with:
node-version: '16.x'
registry-url: 'https://registry.npmjs.org'
- uses: denoland/setup-deno@v1
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ github.ref }}
- name: Get rid of local github workflows
run: rm -r .github/workflows
- name: Remove tmp_branch if it exists
run: git push origin :tmp_branch || true
- run: git checkout -b tmp_branch
- name: Install wasm-bindgen-cli
run: cargo install wasm-bindgen-cli wasm-opt
- name: Install wasm32 target
run: rustup target add wasm32-unknown-unknown
- name: run wasm js tests
id: wasm_js_tests
run: ./scripts/ci/wasm_tests
- name: run wasm deno tests
id: wasm_deno_tests
run: ./scripts/ci/deno_tests
- name: build release
id: build_release
run: |
npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm run release
- name: Collate deno release files
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
run: |
mkdir $GITHUB_WORKSPACE/deno_wasm_dist
cp $GITHUB_WORKSPACE/rust/automerge-wasm/deno/* $GITHUB_WORKSPACE/deno_wasm_dist
cp $GITHUB_WORKSPACE/rust/automerge-wasm/index.d.ts $GITHUB_WORKSPACE/deno_wasm_dist
cp $GITHUB_WORKSPACE/rust/automerge-wasm/README.md $GITHUB_WORKSPACE/deno_wasm_dist
cp $GITHUB_WORKSPACE/rust/automerge-wasm/LICENSE $GITHUB_WORKSPACE/deno_wasm_dist
sed -i '1i /// <reference types="./index.d.ts" />' $GITHUB_WORKSPACE/deno_wasm_dist/automerge_wasm.js
- name: Create npm release
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
run: |
if [ "$(npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm show . version)" = "$VERSION" ]; then
echo "This version is already published"
exit 0
fi
EXTRA_ARGS="--access public"
if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
echo "Is pre-release version"
EXTRA_ARGS="$EXTRA_ARGS --tag next"
fi
if [ "$NODE_AUTH_TOKEN" = "" ]; then
echo "Can't publish on NPM, You need a NPM_TOKEN secret."
false
fi
npm publish $GITHUB_WORKSPACE/rust/automerge-wasm $EXTRA_ARGS
env:
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- name: Commit wasm deno release files
run: |
git config --global user.name "actions"
git config --global user.email actions@github.com
git add $GITHUB_WORKSPACE/deno_wasm_dist
git commit -am "Add deno release files"
git push origin tmp_branch
- name: Tag wasm release
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
uses: softprops/action-gh-release@v1
with:
name: Automerge Wasm v${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
tag_name: js/automerge-wasm-${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
target_commitish: tmp_branch
generate_release_notes: false
draft: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Remove tmp_branch
run: git push origin :tmp_branch
check_if_js_version_upgraded:
name: Check if JS version has been upgraded
runs-on: ubuntu-latest
outputs:
js_version: ${{ steps.version-updated.outputs.current-package-version }}
js_has_updated: ${{ steps.version-updated.outputs.has-updated }}
steps:
- uses: JiPaix/package-json-updated-action@v1.0.5
id: version-updated
with:
path: javascript/package.json
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
publish-js:
name: Publish JS package
runs-on: ubuntu-latest
needs:
- check_if_js_version_upgraded
- check_if_wasm_version_upgraded
- publish-wasm
# We create release only if the version in the package.json has been upgraded and after the WASM release
if: |
(always() && ! cancelled()) &&
(needs.publish-wasm.result == 'success' || needs.publish-wasm.result == 'skipped') &&
needs.check_if_js_version_upgraded.outputs.js_has_updated == 'true'
steps:
- uses: actions/setup-node@v3
with:
node-version: '16.x'
registry-url: 'https://registry.npmjs.org'
- uses: denoland/setup-deno@v1
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ github.ref }}
- name: Get rid of local github workflows
run: rm -r .github/workflows
- name: Remove js_tmp_branch if it exists
run: git push origin :js_tmp_branch || true
- run: git checkout -b js_tmp_branch
- name: check js formatting
run: |
yarn global add prettier
prettier -c javascript/.prettierrc javascript
- name: run js tests
id: js_tests
run: |
cargo install wasm-bindgen-cli wasm-opt
rustup target add wasm32-unknown-unknown
./scripts/ci/js_tests
- name: build js release
id: build_release
run: |
npm --prefix $GITHUB_WORKSPACE/javascript run build
- name: build js deno release
id: build_deno_release
run: |
VERSION=$WASM_VERSION npm --prefix $GITHUB_WORKSPACE/javascript run deno:build
env:
WASM_VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- name: run deno tests
id: deno_tests
run: |
npm --prefix $GITHUB_WORKSPACE/javascript run deno:test
- name: Collate deno release files
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
run: |
mkdir $GITHUB_WORKSPACE/deno_js_dist
cp $GITHUB_WORKSPACE/javascript/deno_dist/* $GITHUB_WORKSPACE/deno_js_dist
- name: Create npm release
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
run: |
if [ "$(npm --prefix $GITHUB_WORKSPACE/javascript show . version)" = "$VERSION" ]; then
echo "This version is already published"
exit 0
fi
EXTRA_ARGS="--access public"
if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
echo "Is pre-release version"
EXTRA_ARGS="$EXTRA_ARGS --tag next"
fi
if [ "$NODE_AUTH_TOKEN" = "" ]; then
echo "Can't publish on NPM, You need a NPM_TOKEN secret."
false
fi
npm publish $GITHUB_WORKSPACE/javascript $EXTRA_ARGS
env:
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
VERSION: ${{ needs.check_if_js_version_upgraded.outputs.js_version }}
- name: Commit js deno release files
run: |
git config --global user.name "actions"
git config --global user.email actions@github.com
git add $GITHUB_WORKSPACE/deno_js_dist
git commit -am "Add deno js release files"
git push origin js_tmp_branch
- name: Tag JS release
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
uses: softprops/action-gh-release@v1
with:
name: Automerge v${{ needs.check_if_js_version_upgraded.outputs.js_version }}
tag_name: js/automerge-${{ needs.check_if_js_version_upgraded.outputs.js_version }}
target_commitish: js_tmp_branch
generate_release_notes: false
draft: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Remove js_tmp_branch
run: git push origin :js_tmp_branch

8
.gitignore vendored
View file

@ -1,6 +1,4 @@
/.direnv
perf.*
/Cargo.lock
build/
.vim/*
/target /target
**/*.rs.bk
Cargo.lock
libtest.rmeta

25
.travis.yml Normal file
View file

@ -0,0 +1,25 @@
language: rust
rust:
- stable
- beta
cache: cargo
before_script:
- rustup self update
- rustup component add clippy
- rustup component add rustfmt
script:
- cargo fmt --all -- --check
- cargo clippy --all-targets --all-features -- -D warnings
- cargo build --verbose --all
- cargo test --verbose --all
jobs:
allow_failures:
- rust: nightly
fast_finish: true
deploy:
provider: cargo
on:
tags: true
condition: "$TRAVIS_RUST_VERSION = stable"
token:
secure: FWmUT2NJTcy3ccw8B1RYgvlg5SxnkEAeBU2hxXeKLmEBAjzhVPVHjwaQ5RktMRHsyKYJEfDpLD0EHUZknhyDxzCuUKzKYlGgRmtlnsCKS+gDM4j88e/OEnDvxZ2d8ag3Jp8+3GCvv2yjUHFs2JpclqR4ib8LmL6d6x+1+1uxaMOgaDhxQCDLV0eZwX5mTdGAWJl/CpxziFXHYN8/j+e58dJgWN6TUO6BBZeZmkp4xQ6iggEUgIKLLYynG5cM2XtS/j/qbL2ObloamIv9p0SNtj8wTQupJZW3JPBc77gimfeXVQd2+4B/31lJ3GW1310gVBZ9EA7BTbC3M3AkHJFPUIgfEn803zrZhm4WxGg2B+2kENWPpSRUMjhxaPuxAVStHOBl2WSsQTmTRrSUf1nvZUdixTARr6BkKakiNPqts7X/HbxE0cxkk5gtobTyNb4HFbaM/8449U8+KbX7mDXv50FGmRrKxkepOzfRdoEz4h9LnCFWweyle2bpFCQlnro+1SnBRSVmH+c1YUZbIl+He53GUEAwObcHGk+TlhVCGMtmGj/g1THOf4VcWh8C3XoO2yWIu9FoJKvJbd7qm0+dOv+QY8fxgrs4JRSSnt8rXBXhxLKe/ZXl5fHOmLca8T6i/PRfbQ9AzFSCPcz8o4hNO/lVQPSrNrkvxSF39buuYGU=

10
Cargo.toml Normal file
View file

@ -0,0 +1,10 @@
[workspace]
members = [
"automerge",
"automerge-backend",
"automerge-backend-wasm",
]
[profile.release]
lto = true

20
LICENSE
View file

@ -1,19 +1,7 @@
Copyright (c) 2019-2021 the Automerge contributors Copyright 2019 Alex Good
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

194
README.md
View file

@ -1,147 +1,75 @@
# Automerge # Automerge
<img src='./img/sign.svg' width='500' alt='Automerge logo' /> [![docs](https://docs.rs/automerge/badge.svg)](https://docs.rs/automerge)
[![crates](https://crates.io/crates/automerge)](https://crates.io/crates/automerge)
[![Build Status](https://travis-ci.org/alexjg/automerge-rs.svg?branch=master)](https://travis-ci.org/alexjg/automerge-rs)
[![homepage](https://img.shields.io/badge/homepage-published-informational)](https://automerge.org/)
[![main docs](https://img.shields.io/badge/docs-main-informational)](https://automerge.org/automerge-rs/automerge/)
[![ci](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/ci.yaml)
[![docs](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml/badge.svg)](https://github.com/automerge/automerge-rs/actions/workflows/docs.yaml)
Automerge is a library which provides fast implementations of several different This is a very early, very much work in progress implementation of [automerge](https://github.com/automerge/automerge) in rust. At the moment it implements a simple interface for reading the state of an OpSet, and a really horrendous interface for generating new changes to the Opset.
CRDTs, a compact compression format for these CRDTs, and a sync protocol for
efficiently transmitting those changes over the network. The objective of the
project is to support [local-first](https://www.inkandswitch.com/local-first/) applications in the same way that relational
databases support server applications - by providing mechanisms for persistence
which allow application developers to avoid thinking about hard distributed
computing problems. Automerge aims to be PostgreSQL for your local-first app.
If you're looking for documentation on the JavaScript implementation take a look ## Plans
at https://automerge.org/docs/hello/. There are other implementations in both
Rust and C, but they are earlier and don't have documentation yet. You can find
them in `rust/automerge` and `rust/automerge-c` if you are comfortable
reading the code and tests to figure out how to use them.
If you're familiar with CRDTs and interested in the design of Automerge in We're tentatively working on a plan to write a backend for the current javascript implementation of Automerge in Rust. The javascript Automerge library is split into two parts, a "frontend" and a "backend". The "backend" contains a lot of the more complex logic of the CRDT and also has a fairly small API. Given these facts we think we might be able to write a rust implementation of the backend, which compiles to WASM and can be used as a drop in replacement for the current backend. This same rust implementation could also be used via FFI on a lot of other platforms, which would make language interop much easier. This is all early days but it's very exciting.
particular take a look at https://automerge.org/docs/how-it-works/backend/
Finally, if you want to talk to us about this project please [join the For now though, it's a mostly broken pure rust implementation
Slack](https://join.slack.com/t/automerge/shared_invite/zt-e4p3760n-kKh7r3KRH1YwwNfiZM8ktw)
## Status ## How to use
This project is formed of a core Rust implementation which is exposed via FFI in Add this to your dependencies
javascript+WASM, C, and soon other languages. Alex
([@alexjg](https://github.com/alexjg/)]) is working full time on maintaining
automerge, other members of Ink and Switch are also contributing time and there
are several other maintainers. The focus is currently on shipping the new JS
package. We expect to be iterating the API and adding new features over the next
six months so there will likely be several major version bumps in all packages
in that time.
In general we try and respect semver.
### JavaScript
A stable release of the javascript package is currently available as
`@automerge/automerge@2.0.0` where. pre-release verisions of the `2.0.1` are
available as `2.0.1-alpha.n`. `2.0.1*` packages are also available for Deno at
https://deno.land/x/automerge
### Rust
The rust codebase is currently oriented around producing a performant backend
for the Javascript wrapper and as such the API for Rust code is low level and
not well documented. We will be returning to this over the next few months but
for now you will need to be comfortable reading the tests and asking questions
to figure out how to use it. If you are looking to build rust applications which
use automerge you may want to look into
[autosurgeon](https://github.com/alexjg/autosurgeon)
## Repository Organisation
- `./rust` - the rust rust implementation and also the Rust components of
platform specific wrappers (e.g. `automerge-wasm` for the WASM API or
`automerge-c` for the C FFI bindings)
- `./javascript` - The javascript library which uses `automerge-wasm`
internally but presents a more idiomatic javascript interface
- `./scripts` - scripts which are useful to maintenance of the repository.
This includes the scripts which are run in CI.
- `./img` - static assets for use in `.md` files
## Building
To build this codebase you will need:
- `rust`
- `node`
- `yarn`
- `cmake`
- `cmocka`
You will also need to install the following with `cargo install`
- `wasm-bindgen-cli`
- `wasm-opt`
- `cargo-deny`
And ensure you have added the `wasm32-unknown-unknown` target for rust cross-compilation.
The various subprojects (the rust code, the wrapper projects) have their own
build instructions, but to run the tests that will be run in CI you can run
`./scripts/ci/run`.
### For macOS
These instructions worked to build locally on macOS 13.1 (arm64) as of
Nov 29th 2022.
```bash
# clone the repo
git clone https://github.com/automerge/automerge-rs
cd automerge-rs
# install rustup
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
# install homebrew
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
# install cmake, node, cmocka
brew install cmake node cmocka
# install yarn
npm install --global yarn
# install javascript dependencies
yarn --cwd ./javascript
# install rust dependencies
cargo install wasm-bindgen-cli wasm-opt cargo-deny
# get nightly rust to produce optimized automerge-c builds
rustup toolchain install nightly
rustup component add rust-src --toolchain nightly
# add wasm target in addition to current architecture
rustup target add wasm32-unknown-unknown
# Run ci script
./scripts/ci/run
```
If your build fails to find `cmocka.h` you may need to teach it about homebrew's
installation location:
``` ```
export CPATH=/opt/homebrew/include automerge = "0.0.2"
export LIBRARY_PATH=/opt/homebrew/lib
./scripts/ci/run
``` ```
## Contributing You'll need to export changes from automerge as JSON rather than using the encoding that `Automerge.save` uses. So first do this (in javascript):
Please try and split your changes up into relatively independent commits which ```javascript
change one subsystem at a time and add good commit messages which describe what const doc = <your automerge document>
the change is and why you're making it (err on the side of longer commit const changes = Automerge.getHistory(doc).map(h => h.change)
messages). `git blame` should give future maintainers a good idea of why console.log(JSON.stringify(changes, null, 4))
something is the way it is. ```
Now you can load these changes into automerge like so:
```rust,no_run
extern crate automerge;
fn main() {
let changes: Vec<automerge::Change> = serde_json::from_str("<paste the changes JSON here>").unwrap();
let document = automerge::Document::load(changes).unwrap();
let state: serde_json::Value = document.state().unwrap();
println!("{:?}", state);
}
```
You can create new changes to the document by doing things like this:
```rust,no_run
extern crate automerge;
fn main() {
let mut doc = Document::init();
let json_value: serde_json::Value = serde_json::from_str(
r#"
{
"cards_by_id": {},
"size_of_cards": 12.0,
"numRounds": 11.0,
"cards": [1.0, false]
}
"#,
)
.unwrap();
doc.create_and_apply_change(
Some("Some change".to_string()),
vec![ChangeRequest::Set {
path: Path::root().key("the-state".to_string()),
value: Value::from_json(&json_value),
}],
)
.unwrap();
}
```
Check the docs on `ChangeRequest` for more information on what you can do.

5
automerge-backend-wasm/.gitignore vendored Normal file
View file

@ -0,0 +1,5 @@
node_modules
/dist
/target
/pkg
/wasm-pack.log

View file

@ -0,0 +1,53 @@
# You must change these to your own details.
[package]
name = "automerge-backend-wasm"
description = ""
version = "0.1.0"
authors = ["Alex Good <alex@memoryandthought.me>","Orion Henry <orion@inkandswitch.com>"]
categories = ["wasm"]
readme = "README.md"
edition = "2018"
[lib]
crate-type = ["cdylib","rlib"]
[features]
# If you uncomment this line, it will enable `wee_alloc`:
#default = ["wee_alloc"]
[dependencies]
# The `wasm-bindgen` crate provides the bare minimum functionality needed
# to interact with JavaScript.
automerge-backend = { path = "../automerge-backend" }
js-sys = "^0.3"
serde = "^1.0"
serde_json = "^1.0"
# `wee_alloc` is a tiny allocator for wasm that is only ~1K in code size
# compared to the default allocator's ~10K. However, it is slower than the default
# allocator, so it's not enabled by default.
wee_alloc = { version = "0.4.2", optional = true }
[dependencies.wasm-bindgen]
version = "^0.2"
features = ["serde-serialize"]
# The `web-sys` crate allows you to interact with the various browser APIs,
# like the DOM.
[dependencies.web-sys]
version = "0.3.22"
features = ["console"]
# The `console_error_panic_hook` crate provides better debugging of panics by
# logging them with `console.error`. This is great for development, but requires
# all the `std::fmt` and `std::panicking` infrastructure, so it's only enabled
# in debug mode.
[target."cfg(debug_assertions)".dependencies]
console_error_panic_hook = "0.1.5"
# These crates are used for running unit tests.
[dev-dependencies]
futures = "^0.1"
wasm-bindgen-futures = "^0.3"
wasm-bindgen-test = "^0.3"

View file

@ -0,0 +1,48 @@
## How to install
```sh
npm install
```
## How to run in debug mode
```sh
# Builds the project and opens it in a new browser tab. Auto-reloads when the project changes.
npm start
```
## How to build in release mode
```sh
# Builds the project and places it into the `dist` folder.
npm run build
```
## How to run unit tests
```sh
# Runs tests in Firefox
npm test -- --firefox
# Runs tests in Chrome
npm test -- --chrome
# Runs tests in Safari
npm test -- --safari
```
## What does each file do?
* `Cargo.toml` contains the standard Rust metadata. You put your Rust dependencies in here. You must change this file with your details (name, description, version, authors, categories)
* `package.json` contains the standard npm metadata. You put your JavaScript dependencies in here. You must change this file with your details (author, name, version)
* `webpack.config.js` contains the Webpack configuration. You shouldn't need to change this, unless you have very special needs.
* The `js` folder contains your JavaScript code (`index.js` is used to hook everything into Webpack, you don't need to change it).
* The `src` folder contains your Rust code.
* The `static` folder contains any files that you want copied as-is into the final build. It contains an `index.html` file which loads the `index.js` file.
* The `tests` folder contains your Rust unit tests.

View file

@ -0,0 +1,79 @@
let Backend = require("./pkg")
let { fromJS, List } = require('immutable')
function toJS(obj) {
if (List.isList(obj)) {
return obj.toJS()
}
return obj
}
let init = () => {
return { state: Backend.State.new(), clock: {}, frozen: false };
}
let clean = (backend) => {
if (backend.frozen) {
let state = backend.state.forkAt(backend.clock)
backend.state = state
backend.clock = state.getClock()
backend.frozen = false
}
return backend.state
}
let mutate = (oldBackend,fn) => {
let state = clean(oldBackend)
let result = fn(state)
oldBackend.frozen = true
let newBackend = { state, clock: state.getClock(), frozen: false };
return [ newBackend, result ]
}
let applyChanges = (backend,changes) => {
return mutate(backend, (b) => b.applyChanges(toJS(changes)));
}
let applyLocalChange = (backend,change) => {
return mutate(backend, (b) => b.applyLocalChange(toJS(change)));
}
let merge = (backend1,backend2) => {
// let changes = backend2.getMissingChanges(backend1.clock)
// backend1.applyChanges(changes)
// let missing_changes = remote.get_missing_changes(self.op_set.clock.clone());
// self.apply_changes(missing_changes)
return mutate(backend1, (b) => b.merge(clean(backend2)));
}
let getClock = (backend) => {
return fromJS(backend.clock);
}
let getHistory = (backend) => {
// TODO: I cant fromJS here b/c transit screws it up
let history = clean(backend).getHistory();
return history
}
let getUndoStack = (backend) => {
let stack = clean(backend).getUndoStack();
return fromJS(stack)
}
let getRedoStack = (backend) => {
let stack = clean(backend).getRedoStack();
return fromJS(stack)
}
let getPatch = (backend) => clean(backend).getPatch()
let getChanges = (backend,other) => clean(backend).getChanges(clean(other))
let getChangesForActor = (backend,actor) => clean(backend).getChangesForActor(actor)
let getMissingChanges = (backend,clock) => clean(backend).getMissingChanges(clock)
let getMissingDeps = (backend) => clean(backend).getMissingDeps()
module.exports = {
init, applyChanges, applyLocalChange, getPatch,
getChanges, getChangesForActor, getMissingChanges, getMissingDeps, merge, getClock,
getHistory, getUndoStack, getRedoStack
}

View file

@ -0,0 +1,22 @@
{
"author": "Orion Henry <orion@inkandswitch.com>",
"name": "automerge-backend-wasm",
"description": "wasm-bindgen bindings to the automerge-backend rust implementation",
"version": "0.1.0",
"license": "MIT",
"main": "./index.js",
"scripts": {
"build": "rimraf pkg && wasm-pack build --target nodejs --out-name index",
"release": "rimraf pkg && wasm-pack build --target nodejs --out-name index --release",
"mocha": "yarn build && mocha --bail --full-trace",
"test": "cargo test && wasm-pack test --node"
},
"dependencies": {
"immutable": "^3.8.2"
},
"devDependencies": {
"mocha": "^6.2.0",
"automerge":"^0.12.1",
"rimraf": "^2.6.3"
}
}

View file

@ -0,0 +1,177 @@
use automerge_backend::{ActorID, AutomergeError, Backend, Change, ChangeRequest, Clock};
use serde::de::DeserializeOwned;
use serde::Serialize;
use wasm_bindgen::prelude::*;
extern crate web_sys;
#[allow(unused_macros)]
macro_rules! log {
( $( $t:tt )* ) => {
// web_sys::console::log_1(&format!( $( $t )* ).into());
};
}
#[cfg(feature = "wee_alloc")]
#[global_allocator]
static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;
fn js_to_rust<T: DeserializeOwned>(value: JsValue) -> Result<T, JsValue> {
value.into_serde().map_err(json_error_to_js)
}
fn rust_to_js<T: Serialize>(value: T) -> Result<JsValue, JsValue> {
JsValue::from_serde(&value).map_err(json_error_to_js)
}
#[wasm_bindgen]
#[derive(PartialEq, Debug, Clone)]
pub struct State {
backend: Backend,
}
#[allow(clippy::new_without_default)]
#[wasm_bindgen]
impl State {
#[wasm_bindgen(js_name = applyChanges)]
pub fn apply_changes(&mut self, changes: JsValue) -> Result<JsValue, JsValue> {
log!("apply_changes {:?}", changes);
let c: Vec<Change> = js_to_rust(changes)?;
let patch = self
.backend
.apply_changes(c)
.map_err(automerge_error_to_js)?;
rust_to_js(&patch)
}
#[wasm_bindgen(js_name = applyLocalChange)]
pub fn apply_local_change(&mut self, change: JsValue) -> Result<JsValue, JsValue> {
log!("apply_local_changes {:?}", change);
let c: ChangeRequest = js_to_rust(change)?;
let patch = self
.backend
.apply_local_change(c)
.map_err(automerge_error_to_js)?;
rust_to_js(&patch)
}
#[wasm_bindgen(js_name = getPatch)]
pub fn get_patch(&self) -> Result<JsValue, JsValue> {
log!("get_patch");
let patch = self.backend.get_patch();
rust_to_js(&patch)
}
#[wasm_bindgen(js_name = getChanges)]
pub fn get_changes(&self, state: &State) -> Result<JsValue, JsValue> {
log!("get_changes");
let changes = self
.backend
.get_changes(&state.backend)
.map_err(automerge_error_to_js)?;
rust_to_js(&changes)
}
#[wasm_bindgen(js_name = getChangesForActor)]
pub fn get_changes_for_actorid(&self, actorid: JsValue) -> Result<JsValue, JsValue> {
log!("get_changes_for_actorid");
let a: ActorID = js_to_rust(actorid)?;
let changes = self.backend.get_changes_for_actor_id(&a);
rust_to_js(&changes)
}
#[wasm_bindgen(js_name = getMissingChanges)]
pub fn get_missing_changes(&self, clock: JsValue) -> Result<JsValue, JsValue> {
log!("get_missing_changes");
let c: Clock = js_to_rust(clock)?;
let changes = self.backend.get_missing_changes(c);
rust_to_js(&changes)
}
#[wasm_bindgen(js_name = getMissingDeps)]
pub fn get_missing_deps(&self) -> Result<JsValue, JsValue> {
log!("get_missing_deps");
let clock = self.backend.get_missing_deps();
rust_to_js(&clock)
}
#[wasm_bindgen(js_name = getClock)]
pub fn get_clock(&self) -> Result<JsValue, JsValue> {
log!("get_clock");
let clock = self.backend.clock();
rust_to_js(&clock)
}
#[wasm_bindgen(js_name = getHistory)]
pub fn get_history(&self) -> Result<JsValue, JsValue> {
log!("get_history");
let history = self.backend.history();
rust_to_js(&history)
}
#[wasm_bindgen(js_name = getUndoStack)]
pub fn get_undo_stack(&self) -> Result<JsValue, JsValue> {
log!("get_undo_stack");
let stack = self.backend.undo_stack();
rust_to_js(&stack)
}
#[wasm_bindgen(js_name = getRedoStack)]
pub fn get_redo_stack(&self) -> Result<JsValue, JsValue> {
log!("get_redo_stack");
let stack = self.backend.redo_stack();
rust_to_js(&stack)
}
#[wasm_bindgen]
pub fn merge(&mut self, remote: &State) -> Result<JsValue, JsValue> {
log!("merge");
let patch = self
.backend
.merge(&remote.backend)
.map_err(automerge_error_to_js)?;
rust_to_js(&patch)
}
#[wasm_bindgen]
pub fn fork(&self) -> State {
log!("fork");
self.clone()
}
#[wasm_bindgen]
#[wasm_bindgen(js_name = forkAt)]
pub fn fork_at(&self, _clock: JsValue) -> Result<State, JsValue> {
log!("fork_at");
let clock: Clock = js_to_rust(_clock)?;
let changes = self
.backend
.history()
.iter()
.filter(|change| clock.get(&change.actor_id) >= change.seq)
.map(|&c| c.clone())
.collect();
let mut fork = State {
backend: Backend::init(),
};
let _patch = fork
.backend
.apply_changes(changes)
.map_err(automerge_error_to_js)?;
Ok(fork)
}
#[wasm_bindgen]
pub fn new() -> State {
State {
backend: Backend::init(),
}
}
}
fn automerge_error_to_js(err: AutomergeError) -> JsValue {
JsValue::from(std::format!("Automerge error: {}", err))
}
fn json_error_to_js(err: serde_json::Error) -> JsValue {
JsValue::from(std::format!("serde_json error: {}", err))
}

View file

@ -0,0 +1,446 @@
const assert = require('assert')
const { List } = require('immutable')
const Backend = require('..')
const uuid = require('automerge/src/uuid')
const ROOT_ID = '00000000-0000-0000-0000-000000000000'
describe('Automerge.Backend', () => {
describe('incremental diffs', () => {
it('should assign to a key in a map', () => {
const actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
]}
const s0 = Backend.init()
const [s1, patch1] = Backend.applyChanges(s0, [change1])
assert.deepEqual(patch1, {
canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'bird', value: 'magpie'}]
})
})
it('should increment a key in a map', () => {
const actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'counter', value: 1, datatype: 'counter'}
]}
const change2 = {actor, seq: 2, deps: {}, ops: [
{action: 'inc', obj: ROOT_ID, key: 'counter', value: 2}
]}
const s0 = Backend.init()
const [s1, patch1] = Backend.applyChanges(s0, [change1])
const [s2, patch2] = Backend.applyChanges(s1, [change2])
assert.deepEqual(patch2, {
canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'counter', value: 3, datatype: 'counter'}]
})
})
it('should make a conflict on assignment to the same key', () => {
const change1 = {actor: 'actor1', seq: 1, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
]}
const change2 = {actor: 'actor2', seq: 1, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'bird', value: 'blackbird'}
]}
const s0 = Backend.init()
const [s1, patch1] = Backend.applyChanges(s0, [change1])
const [s2, patch2] = Backend.applyChanges(s1, [change2])
assert.deepEqual(patch2, {
canUndo: false, canRedo: false, clock: {actor1: 1, actor2: 1}, deps: {actor1: 1, actor2: 1},
diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'bird', value: 'blackbird',
conflicts: [{actor: 'actor1', value: 'magpie'}]}
]})
})
it('should delete a key from a map', () => {
const actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
]}
const change2 = {actor, seq: 2, deps: {}, ops: [
{action: 'del', obj: ROOT_ID, key: 'bird'}
]}
const s0 = Backend.init()
const [s1, patch1] = Backend.applyChanges(s0, [change1])
const [s2, patch2] = Backend.applyChanges(s1, [change2])
assert.deepEqual(patch2, {
canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
diffs: [{action: 'remove', obj: ROOT_ID, type: 'map', key: 'bird'}]
})
})
it('should create nested maps', () => {
const birds = uuid(), actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'makeMap', obj: birds},
{action: 'set', obj: birds, key: 'wrens', value: 3},
{action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
]}
const s0 = Backend.init()
const [s1, patch1] = Backend.applyChanges(s0, [change1])
assert.deepEqual(patch1, {
canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
diffs: [
{action: 'create', obj: birds, type: 'map'},
{action: 'set', obj: birds, type: 'map', key: 'wrens', value: 3},
{action: 'set', obj: ROOT_ID, type: 'map', key: 'birds', value: birds, link: true}
]
})
})
it('should assign to keys in nested maps', () => {
const birds = uuid(), actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'makeMap', obj: birds},
{action: 'set', obj: birds, key: 'wrens', value: 3},
{action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
]}
const change2 = {actor, seq: 2, deps: {}, ops: [
{action: 'set', obj: birds, key: 'sparrows', value: 15}
]}
const s0 = Backend.init()
const [s1, patch1] = Backend.applyChanges(s0, [change1])
const [s2, patch2] = Backend.applyChanges(s1, [change2])
assert.deepEqual(patch2, {
canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
diffs: [{action: 'set', obj: birds, type: 'map', key: 'sparrows', value: 15}]
})
})
it('should create lists', () => {
const birds = uuid(), actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'makeList', obj: birds},
{action: 'ins', obj: birds, key: '_head', elem: 1},
{action: 'set', obj: birds, key: `${actor}:1`, value: 'chaffinch'},
{action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
]}
const s0 = Backend.init()
const [s1, patch1] = Backend.applyChanges(s0, [change1])
assert.deepEqual(patch1, {
canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
diffs: [
{action: 'create', obj: birds, type: 'list'},
{action: 'insert', obj: birds, type: 'list', index: 0, value: 'chaffinch', elemId: `${actor}:1`},
{action: 'set', obj: ROOT_ID, type: 'map', key: 'birds', value: birds, link: true}
]
})
})
it('should apply updates inside lists', () => {
const birds = uuid(), actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'makeList', obj: birds},
{action: 'ins', obj: birds, key: '_head', elem: 1},
{action: 'set', obj: birds, key: `${actor}:1`, value: 'chaffinch'},
{action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
]}
const change2 = {actor, seq: 2, deps: {}, ops: [
{action: 'set', obj: birds, key: `${actor}:1`, value: 'greenfinch'}
]}
const s0 = Backend.init()
const [s1, patch1] = Backend.applyChanges(s0, [change1])
const [s2, patch2] = Backend.applyChanges(s1, [change2])
assert.deepEqual(patch2, {
canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
diffs: [{action: 'set', obj: birds, type: 'list', index: 0, value: 'greenfinch'}]
})
})
it('should delete list elements', () => {
const birds = uuid(), actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'makeList', obj: birds},
{action: 'ins', obj: birds, key: '_head', elem: 1},
{action: 'set', obj: birds, key: `${actor}:1`, value: 'chaffinch'},
{action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
]}
const change2 = {actor, seq: 2, deps: {}, ops: [
{action: 'del', obj: birds, key: `${actor}:1`}
]}
const s0 = Backend.init()
const [s1, patch1] = Backend.applyChanges(s0, [change1])
const [s2, patch2] = Backend.applyChanges(s1, [change2])
assert.deepEqual(patch2, {
canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
diffs: [{action: 'remove', obj: birds, type: 'list', index: 0}]
})
})
it('should handle list element insertion and deletion in the same change', () => {
const birds = uuid(), actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'makeList', obj: birds},
{action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
]}
const change2 = {actor, seq: 2, deps: {}, ops: [
{action: 'ins', obj: birds, key: '_head', elem: 1},
{action: 'del', obj: birds, key: `${actor}:1`}
]}
const s0 = Backend.init()
const [s1, patch1] = Backend.applyChanges(s0, [change1])
const [s2, patch2] = Backend.applyChanges(s1, [change2])
assert.deepEqual(patch2, {
canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
diffs: [{action: 'maxElem', obj: birds, value: 1, type: 'list'}]
})
})
it('should support Date objects at the root', () => {
const now = new Date()
const actor = uuid(), change = {actor, seq: 1, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'now', value: now.getTime(), datatype: 'timestamp'}
]}
const s0 = Backend.init()
const [s1, patch] = Backend.applyChanges(s0, [change])
assert.deepEqual(patch, {
canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'now', value: now.getTime(), datatype: 'timestamp'}]
})
})
it('should support Date objects in a list', () => {
const now = new Date(), list = uuid(), actor = uuid()
const change = {actor, seq: 1, deps: {}, ops: [
{action: 'makeList', obj: list},
{action: 'ins', obj: list, key: '_head', elem: 1},
{action: 'set', obj: list, key: `${actor}:1`, value: now.getTime(), datatype: 'timestamp'},
{action: 'link', obj: ROOT_ID, key: 'list', value: list}
]}
const s0 = Backend.init()
const [s1, patch] = Backend.applyChanges(s0, [change])
assert.deepEqual(patch, {
canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
diffs: [
{action: 'create', obj: list, type: 'list'},
{action: 'insert', obj: list, type: 'list', index: 0,
value: now.getTime(), elemId: `${actor}:1`, datatype: 'timestamp'},
{action: 'set', obj: ROOT_ID, type: 'map', key: 'list', value: list, link: true}
]
})
})
})
describe('applyLocalChange()', () => {
it('should apply change requests', () => {
const actor = uuid()
const change1 = {requestType: 'change', actor, seq: 1, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
]}
const s0 = Backend.init()
const [s1, patch1] = Backend.applyLocalChange(s0, change1)
assert.deepEqual(patch1, {
actor, seq: 1, canUndo: true, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'bird', value: 'magpie'}]
})
})
it('should throw an exception on duplicate requests', () => {
const actor = uuid()
const change1 = {requestType: 'change', actor, seq: 1, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
]}
const change2 = {requestType: 'change', actor, seq: 2, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'bird', value: 'jay'}
]}
const s0 = Backend.init()
const [s1, patch1] = Backend.applyLocalChange(s0, change1)
const [s2, patch2] = Backend.applyLocalChange(s1, change2)
// assert.throws(() => Backend.applyLocalChange(s2, change1), /Change request has already been applied/)
assert.throws(() => Backend.applyLocalChange(s2, change2), /Change request has already been applied/)
})
})
describe('getPatch()', () => {
it('should include the most recent value for a key', () => {
const actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
]}
const change2 = {actor, seq: 2, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'bird', value: 'blackbird'}
]}
const s0 = Backend.init()
const [s1, patch] = Backend.applyChanges(s0, [change1, change2])
assert.deepEqual(Backend.getPatch(s1), {
canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'bird', value: 'blackbird'}]
})
})
it('should include conflicting values for a key', () => {
const change1 = {actor: 'actor1', seq: 1, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'bird', value: 'magpie'}
]}
const change2 = {actor: 'actor2', seq: 1, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'bird', value: 'blackbird'}
]}
const s0 = Backend.init()
const [s1, patch] = Backend.applyChanges(s0, [change1, change2])
assert.deepEqual(Backend.getPatch(s1), {
canUndo: false, canRedo: false, clock: {actor1: 1, actor2: 1}, deps: {actor1: 1, actor2: 1},
diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'bird', value: 'blackbird',
conflicts: [{actor: 'actor1', value: 'magpie'}]}
]})
})
it('should handle increments for a key in a map', () => {
const actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'counter', value: 1, datatype: 'counter'}
]}
const change2 = {actor, seq: 2, deps: {}, ops: [
{action: 'inc', obj: ROOT_ID, key: 'counter', value: 2}
]}
const s0 = Backend.init()
const [s1, patch] = Backend.applyChanges(s0, [change1, change2])
assert.deepEqual(Backend.getPatch(s1), {
canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'counter', value: 3, datatype: 'counter'}]
})
})
it('should create nested maps', () => {
const birds = uuid(), actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'makeMap', obj: birds},
{action: 'set', obj: birds, key: 'wrens', value: 3},
{action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
]}
const change2 = {actor, seq: 2, deps: {}, ops: [
{action: 'del', obj: birds, key: 'wrens'},
{action: 'set', obj: birds, key: 'sparrows', value: 15}
]}
const s0 = Backend.init()
const [s1, patch] = Backend.applyChanges(s0, [change1, change2])
assert.deepEqual(Backend.getPatch(s1), {
canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
diffs: [
{action: 'create', obj: birds, type: 'map'},
{action: 'set', obj: birds, type: 'map', key: 'sparrows', value: 15},
{action: 'set', obj: ROOT_ID, type: 'map', key: 'birds', value: birds, link: true}
]
})
})
it('should create lists', () => {
const birds = uuid(), actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'makeList', obj: birds},
{action: 'ins', obj: birds, key: '_head', elem: 1},
{action: 'set', obj: birds, key: `${actor}:1`, value: 'chaffinch'},
{action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
]}
const s0 = Backend.init()
const [s1, patch] = Backend.applyChanges(s0, [change1])
assert.deepEqual(Backend.getPatch(s1), {
canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
diffs: [
{action: 'create', obj: birds, type: 'list'},
{action: 'insert', obj: birds, type: 'list', index: 0, value: 'chaffinch', elemId: `${actor}:1`},
{action: 'maxElem', obj: birds, type: 'list', value: 1},
{action: 'set', obj: ROOT_ID, type: 'map', key: 'birds', value: birds, link: true}
]
})
})
it('should include the latest state of a list', () => {
const birds = uuid(), actor = uuid()
const change1 = {actor, seq: 1, deps: {}, ops: [
{action: 'makeList', obj: birds},
{action: 'ins', obj: birds, key: '_head', elem: 1},
{action: 'set', obj: birds, key: `${actor}:1`, value: 'chaffinch'},
{action: 'ins', obj: birds, key: `${actor}:1`, elem: 2},
{action: 'set', obj: birds, key: `${actor}:2`, value: 'goldfinch'},
{action: 'link', obj: ROOT_ID, key: 'birds', value: birds}
]}
const change2 = {actor, seq: 2, deps: {}, ops: [
{action: 'del', obj: birds, key: `${actor}:1`},
{action: 'ins', obj: birds, key: `${actor}:1`, elem: 3},
{action: 'set', obj: birds, key: `${actor}:3`, value: 'greenfinch'},
{action: 'set', obj: birds, key: `${actor}:2`, value: 'goldfinches!!'}
]}
const s0 = Backend.init()
const [s1, patch] = Backend.applyChanges(s0, [change1, change2])
assert.deepEqual(Backend.getPatch(s1), {
canUndo: false, canRedo: false, clock: {[actor]: 2}, deps: {[actor]: 2},
diffs: [
{action: 'create', obj: birds, type: 'list'},
{action: 'insert', obj: birds, type: 'list', index: 0, value: 'greenfinch', elemId: `${actor}:3`},
{action: 'insert', obj: birds, type: 'list', index: 1, value: 'goldfinches!!', elemId: `${actor}:2`},
{action: 'maxElem', obj: birds, type: 'list', value: 3},
{action: 'set', obj: ROOT_ID, type: 'map', key: 'birds', value: birds, link: true}
]
})
})
it('should handle nested maps in lists', () => {
const todos = uuid(), item = uuid(), actor = uuid()
const change = {actor, seq: 1, deps: {}, ops: [
{action: 'makeList', obj: todos},
{action: 'ins', obj: todos, key: '_head', elem: 1},
{action: 'makeMap', obj: item},
{action: 'set', obj: item, key: 'done', value: false},
{action: 'set', obj: item, key: 'title', value: 'water plants'},
{action: 'link', obj: todos, key:`${actor}:1`, value: item},
{action: 'link', obj: ROOT_ID, key: 'todos', value: todos}
]}
const s0 = Backend.init()
const [s1, patch] = Backend.applyChanges(s0, [change])
assert.deepEqual(Backend.getPatch(s1), {
canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
diffs: [
{action: 'create', obj: item, type: 'map'},
{action: 'set', obj: item, type: 'map', key: 'done', value: false},
{action: 'set', obj: item, type: 'map', key: 'title', value: 'water plants'},
{action: 'create', obj: todos, type: 'list'},
{action: 'insert', obj: todos, type: 'list', index: 0, value: item, link: true, elemId: `${actor}:1`},
{action: 'maxElem', obj: todos, type: 'list', value: 1},
{action: 'set', obj: ROOT_ID, type: 'map', key: 'todos', value: todos, link: true}
]
})
})
it('should include Date objects at the root', () => {
const now = new Date()
const actor = uuid(), change = {actor, seq: 1, deps: {}, ops: [
{action: 'set', obj: ROOT_ID, key: 'now', value: now.getTime(), datatype: 'timestamp'}
]}
const s0 = Backend.init()
const [s1, patch] = Backend.applyChanges(s0, [change])
assert.deepEqual(Backend.getPatch(s1), {
canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
diffs: [{action: 'set', obj: ROOT_ID, type: 'map', key: 'now', value: now.getTime(), datatype: 'timestamp'}]
})
})
it('should include Date objects in a list', () => {
const now = new Date(), list = uuid(), actor = uuid()
const change = {actor, seq: 1, deps: {}, ops: [
{action: 'makeList', obj: list},
{action: 'ins', obj: list, key: '_head', elem: 1},
{action: 'set', obj: list, key: `${actor}:1`, value: now.getTime(), datatype: 'timestamp'},
{action: 'link', obj: ROOT_ID, key: 'list', value: list}
]}
const s0 = Backend.init()
const [s1, patch] = Backend.applyChanges(s0, [change])
assert.deepEqual(Backend.getPatch(s1), {
canUndo: false, canRedo: false, clock: {[actor]: 1}, deps: {[actor]: 1},
diffs: [
{action: 'create', obj: list, type: 'list'},
{action: 'insert', obj: list, type: 'list', index: 0, value: now.getTime(), elemId: `${actor}:1`, datatype: 'timestamp'},
{action: 'maxElem', obj: list, type: 'list', value: 1},
{action: 'set', obj: ROOT_ID, type: 'map', key: 'list', value: list, link: true}
]
})
})
})
describe('getHistory()', () => {
it('should start with no history ', () => {
const s0 = Backend.init()
const history = Backend.getHistory(s0)
assert.deepEqual(history,[])
})
})
})

View file

@ -0,0 +1,3 @@
--use_strict
--watch-extensions js
test/*test*.js

View file

@ -0,0 +1,22 @@
#![cfg(target_arch = "wasm32")]
extern crate automerge_backend_wasm;
use automerge_backend::{Key, ObjectID, Operation, PrimitiveValue};
use wasm_bindgen::JsValue;
use wasm_bindgen_test::wasm_bindgen_test;
#[wasm_bindgen_test]
fn test_wasm() {
let op1: Operation = Operation::Set {
object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
key: Key("somekeyid".to_string()),
value: PrimitiveValue::Boolean(true),
datatype: None,
};
let js_value = JsValue::from_serde(&op1).unwrap();
let op2: Operation = js_value.into_serde().unwrap();
assert_eq!(op1, op2);
}

View file

@ -0,0 +1,644 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
ansi-colors@3.2.3:
version "3.2.3"
resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-3.2.3.tgz#57d35b8686e851e2cc04c403f1c00203976a1813"
integrity sha512-LEHHyuhlPY3TmuUYMh2oz89lTShfvgbmzaBcxve9t/9Wuy7Dwf4yoAKcND7KFT1HAQfqZ12qtc+DUrBMeKF9nw==
ansi-regex@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998"
integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=
ansi-regex@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997"
integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==
ansi-styles@^3.2.0, ansi-styles@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==
dependencies:
color-convert "^1.9.0"
argparse@^1.0.7:
version "1.0.10"
resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==
dependencies:
sprintf-js "~1.0.2"
automerge@^0.12.1:
version "0.12.1"
resolved "https://registry.yarnpkg.com/automerge/-/automerge-0.12.1.tgz#8e8ca23affa888c6376ee19068eab573cfa8ba09"
integrity sha512-7JOiRk4b6EP/Uj0AjmZTeYICXJmBRHFkL0U3mlTNXuDlUr3c4v/Wb8v0RXiX4UuVgGjkovcjOdiBMkVmzdu2KQ==
dependencies:
immutable "^3.8.2"
transit-immutable-js "^0.7.0"
transit-js "^0.8.861"
uuid "3.3.2"
balanced-match@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c=
brace-expansion@^1.1.7:
version "1.1.11"
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
dependencies:
balanced-match "^1.0.0"
concat-map "0.0.1"
browser-stdout@1.3.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60"
integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==
camelcase@^5.0.0:
version "5.3.1"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320"
integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==
chalk@^2.0.1:
version "2.4.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
dependencies:
ansi-styles "^3.2.1"
escape-string-regexp "^1.0.5"
supports-color "^5.3.0"
cliui@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5"
integrity sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==
dependencies:
string-width "^3.1.0"
strip-ansi "^5.2.0"
wrap-ansi "^5.1.0"
color-convert@^1.9.0:
version "1.9.3"
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8"
integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==
dependencies:
color-name "1.1.3"
color-name@1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=
concat-map@0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
debug@3.2.6:
version "3.2.6"
resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==
dependencies:
ms "^2.1.1"
decamelize@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290"
integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=
define-properties@^1.1.2, define-properties@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1"
integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==
dependencies:
object-keys "^1.0.12"
diff@3.5.0:
version "3.5.0"
resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12"
integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==
emoji-regex@^7.0.1:
version "7.0.3"
resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156"
integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==
es-abstract@^1.17.0-next.1:
version "1.17.4"
resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.4.tgz#e3aedf19706b20e7c2594c35fc0d57605a79e184"
integrity sha512-Ae3um/gb8F0mui/jPL+QiqmglkUsaQf7FwBEHYIFkztkneosu9imhqHpBzQ3h1vit8t5iQ74t6PEVvphBZiuiQ==
dependencies:
es-to-primitive "^1.2.1"
function-bind "^1.1.1"
has "^1.0.3"
has-symbols "^1.0.1"
is-callable "^1.1.5"
is-regex "^1.0.5"
object-inspect "^1.7.0"
object-keys "^1.1.1"
object.assign "^4.1.0"
string.prototype.trimleft "^2.1.1"
string.prototype.trimright "^2.1.1"
es-to-primitive@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a"
integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==
dependencies:
is-callable "^1.1.4"
is-date-object "^1.0.1"
is-symbol "^1.0.2"
escape-string-regexp@1.0.5, escape-string-regexp@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=
esprima@^4.0.0:
version "4.0.1"
resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71"
integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
find-up@3.0.0, find-up@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73"
integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==
dependencies:
locate-path "^3.0.0"
flat@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/flat/-/flat-4.1.0.tgz#090bec8b05e39cba309747f1d588f04dbaf98db2"
integrity sha512-Px/TiLIznH7gEDlPXcUD4KnBusa6kR6ayRUVcnEAbreRIuhkqow/mun59BuRXwoYk7ZQOLW1ZM05ilIvK38hFw==
dependencies:
is-buffer "~2.0.3"
fs.realpath@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8=
function-bind@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
get-caller-file@^2.0.1:
version "2.0.5"
resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e"
integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==
glob@7.1.3:
version "7.1.3"
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.3.tgz#3960832d3f1574108342dafd3a67b332c0969df1"
integrity sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==
dependencies:
fs.realpath "^1.0.0"
inflight "^1.0.4"
inherits "2"
minimatch "^3.0.4"
once "^1.3.0"
path-is-absolute "^1.0.0"
glob@^7.1.3:
version "7.1.4"
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255"
integrity sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A==
dependencies:
fs.realpath "^1.0.0"
inflight "^1.0.4"
inherits "2"
minimatch "^3.0.4"
once "^1.3.0"
path-is-absolute "^1.0.0"
growl@1.10.5:
version "1.10.5"
resolved "https://registry.yarnpkg.com/growl/-/growl-1.10.5.tgz#f2735dc2283674fa67478b10181059355c369e5e"
integrity sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==
has-flag@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0=
has-symbols@^1.0.0, has-symbols@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8"
integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==
has@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796"
integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==
dependencies:
function-bind "^1.1.1"
he@1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
immutable@^3.8.2:
version "3.8.2"
resolved "https://registry.yarnpkg.com/immutable/-/immutable-3.8.2.tgz#c2439951455bb39913daf281376f1530e104adf3"
integrity sha1-wkOZUUVbs5kT2vKBN28VMOEErfM=
inflight@^1.0.4:
version "1.0.6"
resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=
dependencies:
once "^1.3.0"
wrappy "1"
inherits@2:
version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
is-buffer@~2.0.3:
version "2.0.4"
resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.4.tgz#3e572f23c8411a5cfd9557c849e3665e0b290623"
integrity sha512-Kq1rokWXOPXWuaMAqZiJW4XxsmD9zGx9q4aePabbn3qCRGedtH7Cm+zV8WETitMfu1wdh+Rvd6w5egwSngUX2A==
is-callable@^1.1.4, is-callable@^1.1.5:
version "1.1.5"
resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.1.5.tgz#f7e46b596890456db74e7f6e976cb3273d06faab"
integrity sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==
is-date-object@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e"
integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==
is-fullwidth-code-point@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f"
integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=
is-regex@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.0.5.tgz#39d589a358bf18967f726967120b8fc1aed74eae"
integrity sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==
dependencies:
has "^1.0.3"
is-symbol@^1.0.2:
version "1.0.3"
resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937"
integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==
dependencies:
has-symbols "^1.0.1"
isexe@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
js-yaml@3.13.1:
version "3.13.1"
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847"
integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==
dependencies:
argparse "^1.0.7"
esprima "^4.0.0"
locate-path@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e"
integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==
dependencies:
p-locate "^3.0.0"
path-exists "^3.0.0"
lodash@^4.17.15:
version "4.17.15"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
log-symbols@2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a"
integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==
dependencies:
chalk "^2.0.1"
minimatch@3.0.4, minimatch@^3.0.4:
version "3.0.4"
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==
dependencies:
brace-expansion "^1.1.7"
minimist@0.0.8:
version "0.0.8"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d"
integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=
mkdirp@0.5.1:
version "0.5.1"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=
dependencies:
minimist "0.0.8"
mocha@^6.2.0:
version "6.2.2"
resolved "https://registry.yarnpkg.com/mocha/-/mocha-6.2.2.tgz#5d8987e28940caf8957a7d7664b910dc5b2fea20"
integrity sha512-FgDS9Re79yU1xz5d+C4rv1G7QagNGHZ+iXF81hO8zY35YZZcLEsJVfFolfsqKFWunATEvNzMK0r/CwWd/szO9A==
dependencies:
ansi-colors "3.2.3"
browser-stdout "1.3.1"
debug "3.2.6"
diff "3.5.0"
escape-string-regexp "1.0.5"
find-up "3.0.0"
glob "7.1.3"
growl "1.10.5"
he "1.2.0"
js-yaml "3.13.1"
log-symbols "2.2.0"
minimatch "3.0.4"
mkdirp "0.5.1"
ms "2.1.1"
node-environment-flags "1.0.5"
object.assign "4.1.0"
strip-json-comments "2.0.1"
supports-color "6.0.0"
which "1.3.1"
wide-align "1.1.3"
yargs "13.3.0"
yargs-parser "13.1.1"
yargs-unparser "1.6.0"
ms@2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a"
integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==
ms@^2.1.1:
version "2.1.2"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
node-environment-flags@1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/node-environment-flags/-/node-environment-flags-1.0.5.tgz#fa930275f5bf5dae188d6192b24b4c8bbac3d76a"
integrity sha512-VNYPRfGfmZLx0Ye20jWzHUjyTW/c+6Wq+iLhDzUI4XmhrDd9l/FozXV3F2xOaXjvp0co0+v1YSR3CMP6g+VvLQ==
dependencies:
object.getownpropertydescriptors "^2.0.3"
semver "^5.7.0"
object-inspect@^1.7.0:
version "1.7.0"
resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.7.0.tgz#f4f6bd181ad77f006b5ece60bd0b6f398ff74a67"
integrity sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==
object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e"
integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==
object.assign@4.1.0, object.assign@^4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da"
integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==
dependencies:
define-properties "^1.1.2"
function-bind "^1.1.1"
has-symbols "^1.0.0"
object-keys "^1.0.11"
object.getownpropertydescriptors@^2.0.3:
version "2.1.0"
resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.0.tgz#369bf1f9592d8ab89d712dced5cb81c7c5352649"
integrity sha512-Z53Oah9A3TdLoblT7VKJaTDdXdT+lQO+cNpKVnya5JDe9uLvzu1YyY1yFDFrcxrlRgWrEFH0jJtD/IbuwjcEVg==
dependencies:
define-properties "^1.1.3"
es-abstract "^1.17.0-next.1"
once@^1.3.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
dependencies:
wrappy "1"
p-limit@^2.0.0:
version "2.2.2"
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.2.2.tgz#61279b67721f5287aa1c13a9a7fbbc48c9291b1e"
integrity sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==
dependencies:
p-try "^2.0.0"
p-locate@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4"
integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==
dependencies:
p-limit "^2.0.0"
p-try@^2.0.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==
path-exists@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515"
integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=
path-is-absolute@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
require-directory@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I=
require-main-filename@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b"
integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==
rimraf@^2.6.3:
version "2.7.1"
resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec"
integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==
dependencies:
glob "^7.1.3"
semver@^5.7.0:
version "5.7.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
set-blocking@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
sprintf-js@~1.0.2:
version "1.0.3"
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=
"string-width@^1.0.2 || 2":
version "2.1.1"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e"
integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==
dependencies:
is-fullwidth-code-point "^2.0.0"
strip-ansi "^4.0.0"
string-width@^3.0.0, string-width@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961"
integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==
dependencies:
emoji-regex "^7.0.1"
is-fullwidth-code-point "^2.0.0"
strip-ansi "^5.1.0"
string.prototype.trimleft@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.1.tgz#9bdb8ac6abd6d602b17a4ed321870d2f8dcefc74"
integrity sha512-iu2AGd3PuP5Rp7x2kEZCrB2Nf41ehzh+goo8TV7z8/XDBbsvc6HQIlUl9RjkZ4oyrW1XM5UwlGl1oVEaDjg6Ag==
dependencies:
define-properties "^1.1.3"
function-bind "^1.1.1"
string.prototype.trimright@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.1.tgz#440314b15996c866ce8a0341894d45186200c5d9"
integrity sha512-qFvWL3/+QIgZXVmJBfpHmxLB7xsUXz6HsUmP8+5dRaC3Q7oKUv9Vo6aMCRZC1smrtyECFsIT30PqBJ1gTjAs+g==
dependencies:
define-properties "^1.1.3"
function-bind "^1.1.1"
strip-ansi@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f"
integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8=
dependencies:
ansi-regex "^3.0.0"
strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae"
integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==
dependencies:
ansi-regex "^4.1.0"
strip-json-comments@2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo=
supports-color@6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.0.0.tgz#76cfe742cf1f41bb9b1c29ad03068c05b4c0e40a"
integrity sha512-on9Kwidc1IUQo+bQdhi8+Tijpo0e1SS6RoGo2guUwn5vdaxw8RXOF9Vb2ws+ihWOmh4JnCJOvaziZWP1VABaLg==
dependencies:
has-flag "^3.0.0"
supports-color@^5.3.0:
version "5.5.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==
dependencies:
has-flag "^3.0.0"
transit-immutable-js@^0.7.0:
version "0.7.0"
resolved "https://registry.yarnpkg.com/transit-immutable-js/-/transit-immutable-js-0.7.0.tgz#993e25089b6311ff402140f556276d6d253005d9"
integrity sha1-mT4lCJtjEf9AIUD1VidtbSUwBdk=
transit-js@^0.8.861:
version "0.8.861"
resolved "https://registry.yarnpkg.com/transit-js/-/transit-js-0.8.861.tgz#829e516b80349a41fff5d59f5e6993b5473f72c9"
integrity sha512-4O9OrYPZw6C0M5gMTvaeOp+xYz6EF79JsyxIvqXHlt+pisSrioJWFOE80N8aCPoJLcNaXF442RZrVtdmd4wkDQ==
uuid@3.3.2:
version "3.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131"
integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==
which-module@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=
which@1.3.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
dependencies:
isexe "^2.0.0"
wide-align@1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457"
integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==
dependencies:
string-width "^1.0.2 || 2"
wrap-ansi@^5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09"
integrity sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==
dependencies:
ansi-styles "^3.2.0"
string-width "^3.0.0"
strip-ansi "^5.0.0"
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
y18n@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b"
integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==
yargs-parser@13.1.1, yargs-parser@^13.1.1:
version "13.1.1"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0"
integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ==
dependencies:
camelcase "^5.0.0"
decamelize "^1.2.0"
yargs-unparser@1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/yargs-unparser/-/yargs-unparser-1.6.0.tgz#ef25c2c769ff6bd09e4b0f9d7c605fb27846ea9f"
integrity sha512-W9tKgmSn0DpSatfri0nx52Joq5hVXgeLiqR/5G0sZNDoLZFOr/xjBUDcShCOGNsBnEMNo1KAMBkTej1Hm62HTw==
dependencies:
flat "^4.1.0"
lodash "^4.17.15"
yargs "^13.3.0"
yargs@13.3.0, yargs@^13.3.0:
version "13.3.0"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.0.tgz#4c657a55e07e5f2cf947f8a366567c04a0dedc83"
integrity sha512-2eehun/8ALW8TLoIl7MVaRUrg+yCnenu8B4kBlRxj3GJGDKU1Og7sMXPNm1BYyM1DOJmTZ4YeN/Nwxv+8XJsUA==
dependencies:
cliui "^5.0.0"
find-up "^3.0.0"
get-caller-file "^2.0.1"
require-directory "^2.1.1"
require-main-filename "^2.0.0"
set-blocking "^2.0.0"
string-width "^3.0.0"
which-module "^2.0.0"
y18n "^4.0.0"
yargs-parser "^13.1.1"

View file

@ -0,0 +1,18 @@
[package]
name = "automerge-backend"
version = "0.0.1"
authors = ["Alex Good <alex@memoryandthought.me>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
serde = { version = "^1.0", features=["derive"] }
serde_json = "^1.0"
uuid = { version = "^0.5.1", features=["v4"] }
[dependencies.web-sys]
version = "0.3"
features = [
"console",
]

View file

@ -0,0 +1,117 @@
use crate::error::AutomergeError;
use crate::operation_with_metadata::OperationWithMetadata;
use crate::protocol::{ActorID, Change, Clock};
use std::collections::HashMap;
use std::rc::Rc;
// ActorStates manages
// `change_by_actor` - a seq ordered vec of changes per actor
// `deps_by_actor` - a seq ordered vec of transitive deps per actor
// `history` - a list of all changes received in order
// this struct is used for telling if two ops are concurrent or referencing
// historic changes
#[derive(Debug, PartialEq, Clone)]
pub struct ActorStates {
pub history: Vec<Rc<Change>>,
change_by_actor: HashMap<ActorID, Vec<Rc<Change>>>,
deps_by_actor: HashMap<ActorID, Vec<Clock>>,
// this lets me return a reference to an empty clock when needed
// without having to do any extra allocations or copies
// in the default path
empty_clock: Clock,
}
impl ActorStates {
pub(crate) fn new() -> ActorStates {
ActorStates {
change_by_actor: HashMap::new(),
deps_by_actor: HashMap::new(),
empty_clock: Clock::empty(),
history: Vec::new(),
}
}
pub fn is_concurrent(&self, op1: &OperationWithMetadata, op2: &OperationWithMetadata) -> bool {
let clock1 = self.get_deps(&op1.actor_id, op1.sequence);
let clock2 = self.get_deps(&op2.actor_id, op2.sequence);
clock1.get(&op2.actor_id) < op2.sequence && clock2.get(&op1.actor_id) < op1.sequence
}
pub fn get(&self, actor_id: &ActorID) -> Vec<&Change> {
self.change_by_actor
.get(actor_id)
.map(|vec| vec.iter().map(|c| c.as_ref()).collect() )
.unwrap_or_default()
}
fn get_change(&self, actor_id: &ActorID, seq: u32) -> Option<&Rc<Change>> {
self.change_by_actor
.get(actor_id)
.and_then(|v| v.get((seq as usize) - 1))
}
fn get_deps(&self, actor_id: &ActorID, seq: u32) -> &Clock {
self.get_deps_option(actor_id, seq)
.unwrap_or(&self.empty_clock)
}
fn get_deps_option(&self, actor_id: &ActorID, seq: u32) -> Option<&Clock> {
self.deps_by_actor
.get(actor_id)
.and_then(|v| v.get((seq as usize) - 1))
}
fn transitive_deps(&self, clock: &Clock) -> Clock {
let mut all_deps = clock.clone();
clock
.into_iter()
.filter_map(|(actor_id, seq)| self.get_deps_option(actor_id, *seq))
.for_each(|deps| all_deps.merge(deps));
all_deps
}
// if the change is new - return Ok(true)
// if the change is a duplicate - dont insert and return Ok(false)
// if the change has a dup actor:seq but is different error
pub(crate) fn add_change(&mut self, change: Change) -> Result<bool, AutomergeError> {
if let Some(c) = self.get_change(&change.actor_id, change.seq) {
if &change == c.as_ref() {
return Ok(false);
} else {
return Err(AutomergeError::InvalidChange(
"Invalid reuse of sequence number for actor".to_string(),
));
}
}
let deps = change.dependencies.with(&change.actor_id, change.seq - 1);
let all_deps = self.transitive_deps(&deps);
let actor_id = change.actor_id.clone();
let rc = Rc::new(change);
self.history.push(rc.clone());
let actor_changes = self
.change_by_actor
.entry(actor_id.clone())
.or_insert_with(Vec::new);
if (rc.seq as usize) - 1 != actor_changes.len() {
panic!(
"cant push c={:?}:{:?} at ${:?}",
rc.actor_id,
rc.seq,
actor_changes.len()
);
}
actor_changes.push(rc);
let actor_deps = self.deps_by_actor.entry(actor_id).or_insert_with(Vec::new);
actor_deps.push(all_deps);
Ok(true)
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,132 @@
use crate::actor_states::ActorStates;
use crate::error::AutomergeError;
use crate::operation_with_metadata::OperationWithMetadata;
use crate::patch::{Conflict, ElementValue};
use crate::{DataType, Operation, PrimitiveValue};
use std::cmp::PartialOrd;
/// Represents a set of operations which are relevant to either an element ID
/// or object ID and which occurred without knowledge of each other
#[derive(Debug, Clone, PartialEq)]
pub struct ConcurrentOperations {
operations: Vec<OperationWithMetadata>,
}
impl ConcurrentOperations {
pub(crate) fn new() -> ConcurrentOperations {
ConcurrentOperations {
operations: Vec::new(),
}
}
pub fn active_op(&self) -> Option<&OperationWithMetadata> {
// operations are sorted in incorporate_new_op, so the first op is the
// active one
self.operations.first()
}
pub fn conflicts(&self) -> Vec<Conflict> {
self.operations
.split_first()
.map(|(_, tail)| {
tail.iter()
.map(|op| match &op.operation {
Operation::Set {
value, datatype, ..
} => Conflict {
actor: op.actor_id.clone(),
value: ElementValue::Primitive(value.clone()),
datatype: datatype.clone(),
},
Operation::Link { value, .. } => Conflict {
actor: op.actor_id.clone(),
value: ElementValue::Link(value.clone()),
datatype: None,
},
_ => panic!("Invalid operation in concurrent ops"),
})
.collect()
})
.unwrap_or_default()
}
/// Updates this set of operations based on a new operation.
///
/// Returns the previous operations (multiple if concurrent) that this op
/// replaces
pub(crate) fn incorporate_new_op(
&mut self,
new_op: OperationWithMetadata,
actor_states: &ActorStates,
) -> Result<Vec<Operation>, AutomergeError> {
let previous = self
.operations
.clone()
.into_iter()
.map(|o| o.operation)
.collect();
let mut concurrent: Vec<OperationWithMetadata> = match new_op.operation {
// If the operation is an increment op, then we are going to modify
// any Set operations to reflect the increment ops in the next
// part of this function
Operation::Increment { .. } => self.operations.clone(),
// Otherwise we filter out any operations that are not concurrent
// with the new one (i.e ones which causally precede the new one)
_ => self
.operations
.iter()
.filter(|op| actor_states.is_concurrent(&op, &new_op))
.cloned()
.collect(),
};
let this_op = new_op.clone();
match &new_op.operation {
// For Set or Link ops, we add them to the concurrent ops list, to
// be interpreted later as part of the document::walk
// implementation
Operation::Set { .. } | Operation::Link { .. } => {
concurrent.push(this_op);
}
// Increment ops are not stored in the op set, instead we update
// any Set operations which are a counter containing a number to
// reflect the increment operation
Operation::Increment {
value: inc_value, ..
} => concurrent.iter_mut().for_each(|op| {
let op_clone = op.clone();
if let Operation::Set {
value: PrimitiveValue::Number(ref mut n),
datatype: Some(DataType::Counter),
..
} = op.operation
{
if !(actor_states.is_concurrent(&new_op, &op_clone)) {
*n += inc_value
}
}
}),
// All other operations are not relevant (e.g a concurrent
// operation set containing just a delete operation actually is an
// empty set, in document::walk we interpret this into a
// nonexistent part of the state)
_ => {}
}
// the partial_cmp implementation for `OperationWithMetadata` ensures
// that the operations are in the deterministic order required by
// automerge.
//
// Note we can unwrap because the partial_cmp definition never returns
// None
concurrent.sort_by(|a, b| a.partial_cmp(b).unwrap());
concurrent.reverse();
self.operations = concurrent;
Ok(previous)
}
pub fn pure_operations(&self) -> Vec<Operation> {
self.operations
.iter()
.map(|o| o.operation.clone())
.collect()
}
}

View file

@ -0,0 +1,45 @@
use crate::protocol::ObjectID;
use std::error::Error;
use std::fmt;
#[derive(Debug)]
pub enum AutomergeError {
DuplicateObjectError,
MissingObjectError(ObjectID),
InvalidObjectType(String),
InvalidLinkTarget,
DuplicateChange(String),
NotImplemented(String),
InvalidChange(String),
DivergedState(String),
}
impl fmt::Display for AutomergeError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl Error for AutomergeError {}
#[derive(Debug)]
pub struct InvalidElementID(pub String);
impl fmt::Display for InvalidElementID {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl Error for InvalidElementID {}
#[derive(Debug)]
pub struct InvalidChangeRequest(pub String);
impl Error for InvalidChangeRequest {}
impl fmt::Display for InvalidChangeRequest {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}

View file

@ -0,0 +1,35 @@
extern crate web_sys;
#[allow(unused_macros)]
macro_rules! log {
( $( $t:tt )* ) => {
web_sys::console::log_1(&format!( $( $t )* ).into());
}
}
mod actor_states;
mod backend;
mod concurrent_operations;
mod error;
mod object_store;
mod op_set;
mod operation_with_metadata;
mod patch;
mod patch_serialization;
mod protocol;
mod protocol_serialization;
mod value;
pub use crate::protocol::{
ActorID, Change, ChangeRequest, ChangeRequestType, Clock, DataType, ElementID, Key, ObjectID,
Operation, PrimitiveValue,
};
pub use actor_states::ActorStates;
pub use backend::Backend;
pub use concurrent_operations::ConcurrentOperations;
pub use error::AutomergeError;
pub use object_store::{ListState, MapState, ObjectState, ObjectStore};
pub use op_set::{list_ops_in_order, OpSet};
pub use operation_with_metadata::OperationWithMetadata;
pub use patch::{Conflict, Diff, DiffAction, ElementValue, MapType, Patch, SequenceType};
pub use value::Value;

View file

@ -0,0 +1,622 @@
use crate::actor_states::ActorStates;
use crate::concurrent_operations::ConcurrentOperations;
use crate::error::AutomergeError;
use crate::operation_with_metadata::OperationWithMetadata;
use crate::protocol::ActorID;
use crate::{
list_ops_in_order, DataType, Diff, DiffAction, ElementID, ElementValue, Key, MapType, ObjectID,
Operation, SequenceType,
};
use std::collections::{HashMap, HashSet};
/// ObjectHistory is what the OpSet uses to store operations for a particular
/// key, they represent the two possible container types in automerge, a map or
/// a sequence (tables and text are effectively the maps and sequences
/// respectively).
#[derive(Debug, Clone, PartialEq)]
pub enum ObjectState {
Map(MapState),
List(ListState),
}
impl ObjectState {
fn new_map(map_type: MapType, object_id: ObjectID) -> ObjectState {
ObjectState::Map(MapState::new(map_type, object_id))
}
fn new_sequence(sequence_type: SequenceType, object_id: ObjectID) -> ObjectState {
ObjectState::List(ListState::new(sequence_type, object_id))
}
// this feels like we should have a trait or something
fn generate_diffs(&self) -> Vec<Diff> {
match self {
ObjectState::Map(map_state) => map_state.generate_diffs(),
ObjectState::List(list_state) => list_state.generate_diffs(),
}
}
fn handle_assign_op(
&mut self,
op_with_metadata: OperationWithMetadata,
actor_states: &ActorStates,
key: &Key,
) -> Result<(Option<Diff>, Vec<Operation>), AutomergeError> {
let (diff, mut undo_ops) = match self {
ObjectState::Map(mapstate) => {
mapstate.handle_assign_op(op_with_metadata.clone(), actor_states, key)
}
ObjectState::List(liststate) => {
liststate.handle_assign_op(op_with_metadata.clone(), actor_states, key)
}
}?;
if let Operation::Increment {
object_id,
key,
value,
} = &op_with_metadata.operation
{
undo_ops = vec![Operation::Increment {
object_id: object_id.clone(),
key: key.clone(),
value: -value,
}]
};
if undo_ops.is_empty() {
undo_ops.push(Operation::Delete {
object_id: op_with_metadata.operation.object_id().clone(),
key: key.clone(),
})
}
Ok((diff, undo_ops))
}
}
/// Stores operations on list objects
#[derive(Debug, Clone, PartialEq)]
pub struct ListState {
pub operations_by_elemid: HashMap<ElementID, ConcurrentOperations>,
pub insertions: HashMap<ElementID, ElementID>,
pub following: HashMap<ElementID, Vec<ElementID>>,
pub max_elem: u32,
pub sequence_type: SequenceType,
pub object_id: ObjectID,
}
impl ListState {
fn new(sequence_type: SequenceType, object_id: ObjectID) -> ListState {
ListState {
operations_by_elemid: HashMap::new(),
following: HashMap::new(),
insertions: HashMap::new(),
max_elem: 0,
sequence_type,
object_id,
}
}
fn generate_diffs(&self) -> Vec<Diff> {
let mut diffs = Vec::new();
let head = Diff {
action: DiffAction::CreateList(self.object_id.clone(), self.sequence_type.clone()),
conflicts: Vec::new(),
};
let ops_in_order = list_ops_in_order(&self.operations_by_elemid, &self.following)
.ok()
.unwrap_or_default();
let inserts = ops_in_order
.iter()
.filter_map(|(_, ops)| {
ops.active_op()
.map(|active_op| (active_op, ops.conflicts()))
})
.enumerate()
.map(|(after, (active_op, conflicts))| Diff {
action: list_op_to_assign_diff(
&active_op.operation,
&self.sequence_type,
after as u32,
)
.unwrap(),
conflicts,
});
let tail = Diff {
action: DiffAction::MaxElem(
self.object_id.clone(),
self.max_elem,
self.sequence_type.clone(),
),
conflicts: Vec::new(),
};
diffs.push(head);
diffs.extend(inserts);
diffs.push(tail);
diffs
}
fn handle_assign_op(
&mut self,
op: OperationWithMetadata,
actor_states: &ActorStates,
key: &Key,
) -> Result<(Option<Diff>, Vec<Operation>), AutomergeError> {
let elem_id = key.as_element_id().map_err(|_| AutomergeError::InvalidChange(format!("Attempted to link, set, delete, or increment an object in a list with invalid element ID {:?}", key.0)))?;
// We have to clone this here in order to avoid holding a reference to
// self which makes the borrow checker choke when adding an op to the
// operations_by_elemid map later
let ops_clone = self.operations_by_elemid.clone();
let ops_in_order_before_this_op = list_ops_in_order(&ops_clone, &self.following)?;
// This is a hack to avoid holding on to a mutable reference to self
// when adding a new operation
let (undo_ops, ops) = {
let mutable_ops = self
.operations_by_elemid
.entry(elem_id.clone())
.or_insert_with(ConcurrentOperations::new);
let undo_ops = mutable_ops.incorporate_new_op(op, actor_states)?;
(undo_ops, mutable_ops.clone())
};
let ops_in_order_after_this_op =
list_ops_in_order(&self.operations_by_elemid, &self.following)?;
let index_before_op = ops_in_order_before_this_op
.iter()
.filter_map(|(elem_id, ops)| ops.active_op().map(|_| elem_id))
.enumerate()
.find(|(_, op_elem_id)| &&elem_id == op_elem_id)
.map(|(index, _)| index as u32);
let index_and_value_after_op: Option<(u32, ElementValue, Option<DataType>)> =
ops_in_order_after_this_op
.iter()
.filter_map(|(elem_id, ops)| ops.active_op().map(|op| (op, elem_id)))
.enumerate()
.find(|(_, (_, op_elem_id))| &&elem_id == op_elem_id)
.map(|(index, (op, _))| {
let (value, datatype) = match &op.operation {
Operation::Set {
ref value,
ref datatype,
..
} => (ElementValue::Primitive(value.clone()), datatype),
Operation::Link { value, .. } => (ElementValue::Link(value.clone()), &None),
_ => panic!("Should not happen"),
};
(index as u32, value, datatype.clone())
});
let action: Option<DiffAction> = match (index_before_op, index_and_value_after_op) {
(Some(_), Some((after, value, datatype))) => Some(DiffAction::SetSequenceElement(
self.object_id.clone(),
self.sequence_type.clone(),
after,
value,
datatype,
)),
(Some(before), None) => Some(DiffAction::RemoveSequenceElement(
self.object_id.clone(),
self.sequence_type.clone(),
before,
)),
(None, Some((after, value, datatype))) => Some(DiffAction::InsertSequenceElement(
self.object_id.clone(),
self.sequence_type.clone(),
after,
value,
datatype,
elem_id,
)),
(None, None) => None,
};
Ok((
action.map(|action| Diff {
action,
conflicts: ops.conflicts(),
}),
undo_ops,
))
}
fn add_insertion(
&mut self,
actor_id: &ActorID,
elem_id: &ElementID,
elem: u32,
) -> Result<Diff, AutomergeError> {
let inserted_elemid = ElementID::SpecificElementID(actor_id.clone(), elem);
if self.insertions.contains_key(&inserted_elemid) {
return Err(AutomergeError::InvalidChange(format!(
"Received an insertion for already present key: {:?}",
inserted_elemid
)));
}
self.insertions
.insert(inserted_elemid.clone(), inserted_elemid.clone());
let following_ops = self
.following
.entry(elem_id.clone())
.or_insert_with(Vec::new);
following_ops.push(inserted_elemid.clone());
let ops = self
.operations_by_elemid
.entry(inserted_elemid)
.or_insert_with(ConcurrentOperations::new);
self.max_elem = std::cmp::max(self.max_elem, elem);
Ok(Diff {
action: DiffAction::MaxElem(
self.object_id.clone(),
self.max_elem,
self.sequence_type.clone(),
),
conflicts: ops.conflicts(),
})
}
}
/// Stores operations on map objects
#[derive(Debug, Clone, PartialEq)]
pub struct MapState {
pub operations_by_key: HashMap<Key, ConcurrentOperations>,
pub map_type: MapType,
pub object_id: ObjectID,
}
impl MapState {
fn new(map_type: MapType, object_id: ObjectID) -> MapState {
MapState {
operations_by_key: HashMap::new(),
map_type,
object_id,
}
}
fn generate_diffs(&self) -> Vec<Diff> {
let mut diffs = Vec::new();
if self.object_id != ObjectID::Root {
diffs.push(Diff {
action: DiffAction::CreateMap(self.object_id.clone(), self.map_type.clone()),
conflicts: Vec::new(),
})
}
diffs.extend(self.operations_by_key.iter().filter_map(|(_, ops)| {
ops.active_op()
.and_then(|op| map_op_to_assign_diff(&op.operation, &self.map_type))
.map(|action| Diff {
action,
conflicts: ops.conflicts(),
})
}));
diffs
}
fn handle_assign_op(
&mut self,
op_with_metadata: OperationWithMetadata,
actor_states: &ActorStates,
key: &Key,
) -> Result<(Option<Diff>, Vec<Operation>), AutomergeError> {
//log!("NEW OP {:?}",op_with_metadata);
let (undo_ops, ops) = {
let mutable_ops = self
.operations_by_key
.entry(key.clone())
.or_insert_with(ConcurrentOperations::new);
let undo_ops = mutable_ops.incorporate_new_op(op_with_metadata, actor_states)?;
(undo_ops, mutable_ops.clone())
};
//log!("OPS {:?}",ops);
Ok((
Some(
ops.active_op()
.map(|op| {
let action = match &op.operation {
Operation::Set {
object_id,
key,
value,
datatype,
} => DiffAction::SetMapKey(
object_id.clone(),
self.map_type.clone(),
key.clone(),
ElementValue::Primitive(value.clone()),
datatype.clone(),
),
Operation::Link {
object_id,
key,
value,
} => DiffAction::SetMapKey(
object_id.clone(),
self.map_type.clone(),
key.clone(),
ElementValue::Link(value.clone()),
None,
),
_ => panic!("Should not happen for objects"),
};
Diff {
action,
conflicts: ops.conflicts(),
}
})
.unwrap_or_else(|| Diff {
action: DiffAction::RemoveMapKey(
self.object_id.clone(),
self.map_type.clone(),
key.clone(),
),
conflicts: ops.conflicts(),
}),
),
undo_ops,
))
}
}
/// The ObjectStore is responsible for storing the concurrent operations seen
/// for each object ID and for the logic of incorporating a new operation.
#[derive(Debug, Clone, PartialEq)]
pub struct ObjectStore {
operations_by_object_id: HashMap<ObjectID, ObjectState>,
}
impl ObjectStore {
pub(crate) fn new() -> ObjectStore {
let root = ObjectState::new_map(MapType::Map, ObjectID::Root);
let mut ops_by_id = HashMap::new();
ops_by_id.insert(ObjectID::Root, root);
ObjectStore {
operations_by_object_id: ops_by_id,
}
}
pub fn state_for_object_id(&self, object_id: &ObjectID) -> Option<&ObjectState> {
self.operations_by_object_id.get(object_id)
}
pub fn generate_diffs(&self) -> Vec<Diff> {
let mut diffs = Vec::new();
let mut seen = HashSet::new();
let mut next = vec![ObjectID::Root];
while !next.is_empty() {
let oid = next.pop().unwrap();
if let Some(object_state) = self.operations_by_object_id.get(&oid) {
let new_diffs = object_state.generate_diffs();
for diff in new_diffs.iter() {
for link in diff.links() {
if !seen.contains(&link) {
next.push(link)
}
}
}
diffs.push(new_diffs);
seen.insert(oid);
}
}
diffs.iter().rev().flatten().cloned().collect()
}
/// Get the ConcurrentOperations instance corresponding to a key in an
/// object. If the object is a list this function will attempt to convert
/// the key into an element ID
pub fn concurrent_operations_for_field(
&self,
object_id: &ObjectID,
key: &Key,
) -> Option<ConcurrentOperations> {
self.operations_by_object_id
.get(object_id)
.and_then(|state| match state {
ObjectState::Map(mapstate) => mapstate.operations_by_key.get(&key),
ObjectState::List(liststate) => key
.as_element_id()
.ok()
.and_then(|elem_id| liststate.operations_by_elemid.get(&elem_id)),
})
.cloned()
}
/// Incorporates a new operation into the object store. The caller is
/// responsible for ensuring that all causal dependencies of the new
/// operation have already been applied.
///
/// The return value is a tuple of a diff to send to the frontend, and
/// a (possibly empty) vector of operations which will undo the operation
/// later.
pub fn apply_operation(
&mut self,
actor_states: &ActorStates,
op_with_metadata: OperationWithMetadata,
) -> Result<(Option<Diff>, Vec<Operation>), AutomergeError> {
let (diff, undo_ops) = match op_with_metadata.operation {
Operation::MakeMap { object_id } => {
let object = ObjectState::new_map(MapType::Map, object_id.clone());
self.operations_by_object_id
.insert(object_id.clone(), object);
(
Some(Diff {
action: DiffAction::CreateMap(object_id, MapType::Map),
conflicts: Vec::new(),
}),
Vec::new(),
)
}
Operation::MakeTable { object_id } => {
let object = ObjectState::new_map(MapType::Table, object_id.clone());
self.operations_by_object_id
.insert(object_id.clone(), object);
(
Some(Diff {
action: DiffAction::CreateMap(object_id, MapType::Table),
conflicts: Vec::new(),
}),
Vec::new(),
)
}
Operation::MakeList { object_id } => {
let object = ObjectState::new_sequence(SequenceType::List, object_id.clone());
self.operations_by_object_id
.insert(object_id.clone(), object);
(
Some(Diff {
action: DiffAction::CreateList(object_id, SequenceType::List),
conflicts: Vec::new(),
}),
Vec::new(),
)
}
Operation::MakeText { object_id } => {
let object = ObjectState::new_sequence(SequenceType::Text, object_id.clone());
self.operations_by_object_id
.insert(object_id.clone(), object);
(
Some(Diff {
action: DiffAction::CreateList(object_id, SequenceType::Text),
conflicts: Vec::new(),
}),
Vec::new(),
)
}
Operation::Link {
ref object_id,
ref key,
..
}
| Operation::Set {
ref object_id,
ref key,
..
}
| Operation::Delete {
ref object_id,
ref key,
}
| Operation::Increment {
ref object_id,
ref key,
..
} => {
let object = self
.operations_by_object_id
.get_mut(&object_id)
.ok_or_else(|| AutomergeError::MissingObjectError(object_id.clone()))?;
object.handle_assign_op(op_with_metadata.clone(), actor_states, key)?
}
Operation::Insert {
ref list_id,
ref key,
ref elem,
} => {
let list = self
.operations_by_object_id
.get_mut(&list_id)
.ok_or_else(|| AutomergeError::MissingObjectError(list_id.clone()))?;
match list {
ObjectState::Map { .. } => {
return Err(AutomergeError::InvalidChange(format!(
"Insert operation received for object key (object ID: {:?}, key: {:?}",
list_id, key
)))
}
ObjectState::List(liststate) => (
Some(liststate.add_insertion(&op_with_metadata.actor_id, key, *elem)?),
Vec::new(),
),
}
}
};
Ok((diff, undo_ops))
}
}
fn map_op_to_assign_diff(op: &Operation, map_type: &MapType) -> Option<DiffAction> {
match op {
Operation::Set {
object_id,
key,
value,
datatype,
} => Some(DiffAction::SetMapKey(
object_id.clone(),
map_type.clone(),
key.clone(),
ElementValue::Primitive(value.clone()),
datatype.clone(),
)),
Operation::Link {
object_id,
key,
value,
} => Some(DiffAction::SetMapKey(
object_id.clone(),
map_type.clone(),
key.clone(),
ElementValue::Link(value.clone()),
None,
)),
_ => None,
}
}
fn list_op_to_assign_diff(
op: &Operation,
sequence_type: &SequenceType,
after: u32,
) -> Option<DiffAction> {
match op {
Operation::Set {
ref object_id,
ref key,
ref value,
ref datatype,
..
} => key
.as_element_id()
.map(|eid| {
DiffAction::InsertSequenceElement(
object_id.clone(),
sequence_type.clone(),
after,
ElementValue::Primitive(value.clone()),
datatype.clone(),
eid,
)
})
.ok(),
Operation::Link {
value,
object_id,
key,
..
} => key
.as_element_id()
.map(|eid| {
DiffAction::InsertSequenceElement(
object_id.clone(),
sequence_type.clone(),
after,
ElementValue::Link(value.clone()),
None,
eid,
)
})
.ok(),
_ => None,
}
}

View file

@ -0,0 +1,340 @@
//! The OpSet is where most of the interesting work is done in this library.
//! It maintains a mapping from each object ID to a set of concurrent
//! operations which have been seen for that object ID.
//!
//! When the client requests the value of the CRDT (via
//! document::state) the implementation fetches the root object ID's history
//! and then recursively walks through the tree of histories constructing the
//! state. Obviously this is not very efficient.
use crate::actor_states::ActorStates;
use crate::concurrent_operations::ConcurrentOperations;
use crate::error::AutomergeError;
use crate::object_store::ObjectStore;
use crate::operation_with_metadata::OperationWithMetadata;
use crate::protocol::{Change, Clock, ElementID, ObjectID, Operation};
use crate::{ActorID, Diff, DiffAction};
use std::collections::HashMap;
use std::collections::HashSet;
use std::hash::BuildHasher;
/// The OpSet manages an ObjectStore, and a queue of incoming changes in order
/// to ensure that operations are delivered to the object store in causal order
///
/// Whenever a new change is received we iterate through any causally ready
/// changes in the queue and apply them to the object store, then repeat until
/// there are no causally ready changes left. The end result of this is that
/// the object store will contain sets of concurrent operations for each object
/// ID or element ID.
///
/// When we want to get the state of the CRDT we walk through the
/// object store, starting with the root object ID and constructing the value
/// at each node by examining the concurrent operationsi which are active for
/// that node.
#[derive(Debug, PartialEq, Clone)]
pub struct OpSet {
pub object_store: ObjectStore,
queue: Vec<Change>,
pub clock: Clock,
undo_pos: usize,
pub undo_stack: Vec<Vec<Operation>>,
pub redo_stack: Vec<Vec<Operation>>,
pub states: ActorStates,
}
impl OpSet {
pub fn init() -> OpSet {
OpSet {
object_store: ObjectStore::new(),
queue: Vec::new(),
clock: Clock::empty(),
undo_pos: 0,
undo_stack: Vec::new(),
redo_stack: Vec::new(),
states: ActorStates::new(),
}
}
pub fn do_redo(
&mut self,
actor_id: ActorID,
seq: u32,
message: Option<String>,
dependencies: Clock,
) -> Result<Vec<Diff>, AutomergeError> {
if let Some(redo_ops) = self.redo_stack.pop() {
let change = Change {
actor_id,
seq,
message,
dependencies,
operations: redo_ops,
};
self.undo_pos += 1;
self.apply_change(change, false)
} else {
Err(AutomergeError::InvalidChange("no redo ops".to_string()))
}
}
pub fn do_undo(
&mut self,
actor_id: ActorID,
seq: u32,
message: Option<String>,
dependencies: Clock,
) -> Result<Vec<Diff>, AutomergeError> {
if let Some(undo_ops) = self.undo_stack.get(self.undo_pos - 1) {
let redo_ops = undo_ops
.iter()
.filter_map(|op| match &op {
Operation::Increment {
object_id: oid,
key,
value,
} => Some(vec![Operation::Increment {
object_id: oid.clone(),
key: key.clone(),
value: -value,
}]),
Operation::Set { object_id, key, .. }
| Operation::Link { object_id, key, .. }
| Operation::Delete { object_id, key } => self
.object_store
.concurrent_operations_for_field(object_id, key)
.map(|cops| {
if cops.active_op().is_some() {
cops.pure_operations()
} else {
vec![Operation::Delete {
object_id: object_id.clone(),
key: key.clone(),
}]
}
}),
_ => None,
})
.flatten()
.collect();
self.redo_stack.push(redo_ops);
let change = Change {
actor_id,
seq,
message,
dependencies,
operations: undo_ops.clone(),
};
self.undo_pos -= 1;
self.apply_change(change, false)
} else {
Err(AutomergeError::InvalidChange(
"No undo ops to execute".to_string(),
))
}
}
/// Adds a change to the internal queue of operations, then iteratively
/// applies all causally ready changes until there are none remaining
///
/// If `make_undoable` is true, the op set will store a set of operations
/// which can be used to undo this change.
pub fn apply_change(
&mut self,
change: Change,
make_undoable: bool,
) -> Result<Vec<Diff>, AutomergeError> {
self.queue.push(change);
let diffs = self.apply_causally_ready_changes(make_undoable)?;
Ok(diffs)
}
fn apply_causally_ready_changes(
&mut self,
make_undoable: bool,
) -> Result<Vec<Diff>, AutomergeError> {
let mut diffs = Vec::new();
while let Some(next_change) = self.pop_next_causally_ready_change() {
let change_diffs = self.apply_causally_ready_change(next_change, make_undoable)?;
diffs.extend(change_diffs);
}
Ok(diffs)
}
fn pop_next_causally_ready_change(&mut self) -> Option<Change> {
let mut index = 0;
while index < self.queue.len() {
let change = self.queue.get(index).unwrap();
let deps = change.dependencies.with(&change.actor_id, change.seq - 1);
if deps <= self.clock {
return Some(self.queue.remove(index));
}
index += 1
}
None
}
fn apply_causally_ready_change(
&mut self,
change: Change,
make_undoable: bool,
) -> Result<Vec<Diff>, AutomergeError> {
// This method is a little more complicated than it intuitively should
// be due to the bookkeeping required for undo. If we're asked to make
// this operation undoable we have to store the undo operations for
// each operation and then add them to the undo stack at the end of the
// method. However, it's unnecessary to store undo operations for
// objects which are created by this change (e.g if there's an insert
// operation for a list which was created in this operation we only
// need the undo operation for the creation of the list to achieve
// the undo), so we track newly created objects and only store undo
// operations which don't operate on them.
let actor_id = change.actor_id.clone();
let seq = change.seq;
let operations = change.operations.clone();
if !self.states.add_change(change)? {
return Ok(Vec::new()); // its a duplicate - ignore
}
let mut diffs = Vec::new();
let mut undo_operations = Vec::new();
let mut new_object_ids: HashSet<ObjectID> = HashSet::new();
for operation in operations {
// Store newly created object IDs so we can decide whether we need
// undo ops later
match &operation {
Operation::MakeMap { object_id }
| Operation::MakeList { object_id }
| Operation::MakeText { object_id }
| Operation::MakeTable { object_id } => {
new_object_ids.insert(object_id.clone());
}
_ => {}
}
let op_with_metadata = OperationWithMetadata {
sequence: seq,
actor_id: actor_id.clone(),
operation: operation.clone(),
};
let (diff, undo_ops_for_this_op) = self
.object_store
.apply_operation(&self.states, op_with_metadata)?;
// If this object is not created in this change then we need to
// store the undo ops for it (if we're storing undo ops at all)
if make_undoable && !(new_object_ids.contains(operation.object_id())) {
undo_operations.extend(undo_ops_for_this_op);
}
if let Some(d) = diff {
diffs.push(d)
}
}
self.clock = self.clock.with(&actor_id, seq);
if make_undoable {
let (new_undo_stack_slice, _) = self.undo_stack.split_at(self.undo_pos);
let mut new_undo_stack: Vec<Vec<Operation>> = new_undo_stack_slice.to_vec();
new_undo_stack.push(undo_operations);
self.undo_stack = new_undo_stack;
self.undo_pos += 1;
};
Ok(Self::simplify_diffs(diffs))
}
/// Remove any redundant diffs
fn simplify_diffs(diffs: Vec<Diff>) -> Vec<Diff> {
let mut result = Vec::new();
let mut known_maxelems: HashMap<ObjectID, u32> = HashMap::new();
for diff in diffs.into_iter().rev() {
if let DiffAction::MaxElem(ref oid, max_elem, _) = diff.action {
let current_max = known_maxelems.get(oid).unwrap_or(&0);
if *current_max < max_elem {
known_maxelems.insert(oid.clone(), max_elem);
result.push(diff);
}
} else if let DiffAction::InsertSequenceElement(
ref oid,
_,
_,
_,
_,
ElementID::SpecificElementID(_, max_elem),
) = diff.action
{
let current_max = known_maxelems.get(oid).unwrap_or(&0);
if *current_max < max_elem {
known_maxelems.insert(oid.clone(), max_elem);
}
result.push(diff);
} else {
result.push(diff);
}
}
result.reverse();
result
}
pub fn can_undo(&self) -> bool {
self.undo_pos > 0
}
pub fn can_redo(&self) -> bool {
!self.redo_stack.is_empty()
}
/// Get all the changes we have that are not in `since`
pub fn get_missing_changes(&self, since: &Clock) -> Vec<&Change> {
self.states
.history
.iter()
.map(|rc| rc.as_ref())
.filter(|change| change.seq > since.get(&change.actor_id))
.collect()
}
pub fn get_missing_deps(&self) -> Clock {
// TODO: there's a lot of internal copying going on in here for something kinda simple
self.queue.iter().fold(Clock::empty(), |clock, change| {
clock
.union(&change.dependencies)
.with(&change.actor_id, change.seq - 1)
})
}
}
pub fn list_ops_in_order<'a, S: BuildHasher>(
operations_by_elemid: &'a HashMap<ElementID, ConcurrentOperations, S>,
following: &HashMap<ElementID, Vec<ElementID>, S>,
) -> Result<Vec<(ElementID, &'a ConcurrentOperations)>, AutomergeError> {
// First we construct a vector of operations to process in order based
// on the insertion orders of the operations we've received
let mut ops_in_order: Vec<(ElementID, &ConcurrentOperations)> = Vec::new();
// start with everything that was inserted after _head
let mut to_process: Vec<ElementID> = following
.get(&ElementID::Head)
.map(|heads| {
let mut sorted = heads.to_vec();
sorted.sort();
sorted
})
.unwrap_or_else(Vec::new);
// for each element ID, add the operation to the ops_in_order list,
// then find all the following element IDs, sort them and add them to
// the list of element IDs still to process.
while let Some(next_element_id) = to_process.pop() {
let ops = operations_by_elemid.get(&next_element_id).ok_or_else(|| {
AutomergeError::InvalidChange(format!(
"Missing element ID {:?} when interpreting list ops",
next_element_id
))
})?;
ops_in_order.push((next_element_id.clone(), ops));
if let Some(followers) = following.get(&next_element_id) {
let mut sorted = followers.to_vec();
sorted.sort();
to_process.extend(sorted);
}
}
Ok(ops_in_order)
}

View file

@ -0,0 +1,25 @@
use crate::protocol::{ActorID, Operation};
use std::cmp::{Ordering, PartialOrd};
/// We deserialize individual operations as part of the `Change` structure, but
/// we need access to the actor ID and sequence when applying each individual
/// operation, so we copy the operation, actor ID, and sequence into this
/// struct.
#[derive(PartialEq, Debug, Clone)]
pub struct OperationWithMetadata {
pub sequence: u32,
pub actor_id: ActorID,
pub operation: Operation,
}
/// Note, we can't implement Ord because the Operation contains floating point
/// elements
impl PartialOrd for OperationWithMetadata {
fn partial_cmp(&self, other: &OperationWithMetadata) -> Option<Ordering> {
if self.actor_id == other.actor_id {
Some(self.sequence.cmp(&other.sequence))
} else {
Some(self.actor_id.cmp(&other.actor_id))
}
}
}

View file

@ -0,0 +1,119 @@
use crate::{ActorID, Clock, DataType, ElementID, Key, ObjectID, PrimitiveValue};
use serde::Serialize;
#[derive(Debug, PartialEq, Clone)]
pub enum ElementValue {
Primitive(PrimitiveValue),
Link(ObjectID),
}
impl ElementValue {
pub fn object_id(&self) -> Option<ObjectID> {
match self {
ElementValue::Link(object_id) => Some(object_id.clone()),
_ => None,
}
}
}
#[derive(Debug, PartialEq, Clone, Serialize)]
pub enum SequenceType {
#[serde(rename = "list")]
List,
#[serde(rename = "text")]
Text,
}
#[derive(Debug, PartialEq, Clone, Serialize)]
pub enum MapType {
#[serde(rename = "map")]
Map,
#[serde(rename = "table")]
Table,
}
#[derive(Debug, Clone, PartialEq)]
pub enum DiffAction {
CreateMap(ObjectID, MapType),
CreateList(ObjectID, SequenceType),
MaxElem(ObjectID, u32, SequenceType),
RemoveMapKey(ObjectID, MapType, Key),
SetMapKey(ObjectID, MapType, Key, ElementValue, Option<DataType>),
RemoveSequenceElement(ObjectID, SequenceType, u32),
InsertSequenceElement(
ObjectID,
SequenceType,
u32,
ElementValue,
Option<DataType>,
ElementID,
),
SetSequenceElement(ObjectID, SequenceType, u32, ElementValue, Option<DataType>),
}
impl DiffAction {
fn value(&self) -> Option<ElementValue> {
match self {
DiffAction::SetMapKey(_, _, _, value, _)
| DiffAction::InsertSequenceElement(_, _, _, value, _, _)
| DiffAction::SetSequenceElement(_, _, _, value, _) => Some(value.clone()),
_ => None,
}
}
}
#[derive(Debug, PartialEq, Clone)]
pub struct Conflict {
pub actor: ActorID,
pub value: ElementValue,
pub datatype: Option<DataType>,
}
#[derive(Debug, PartialEq, Clone)]
pub struct Diff {
pub action: DiffAction,
pub conflicts: Vec<Conflict>,
}
impl Diff {
pub fn links(&self) -> Vec<ObjectID> {
let mut oids = Vec::new();
if let Some(oid) = self.action.value().and_then(|v| v.object_id()) {
oids.push(oid)
}
for c in self.conflicts.iter() {
if let Some(oid) = c.value.object_id() {
oids.push(oid)
}
}
oids
}
}
#[derive(Serialize, Debug, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Patch {
#[serde(skip_serializing_if = "Option::is_none", default)]
pub actor: Option<ActorID>,
pub can_undo: bool,
pub can_redo: bool,
pub clock: Clock,
pub deps: Clock,
pub diffs: Vec<Diff>,
#[serde(skip_serializing_if = "Option::is_none", default)]
pub seq: Option<u32>,
}
impl Patch {
pub fn empty() -> Patch {
Patch {
actor: None,
can_undo: false,
can_redo: false,
clock: Clock::empty(),
deps: Clock::empty(),
diffs: Vec::new(),
seq: None,
}
}
}

View file

@ -0,0 +1,933 @@
use crate::{
ActorID, Conflict, DataType, Diff, DiffAction, ElementID, ElementValue, Key, MapType, ObjectID,
PrimitiveValue, SequenceType,
};
use serde::de::{Error, MapAccess, Unexpected, Visitor};
use serde::ser::SerializeMap;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
impl Serialize for Conflict {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map_serializer = serializer.serialize_map(None)?;
map_serializer.serialize_entry("actor", &self.actor)?;
match &self.datatype {
Some(d) => map_serializer.serialize_entry("datatype", &d)?,
None => {}
};
match &self.value {
ElementValue::Primitive(v) => map_serializer.serialize_entry("value", &v)?,
ElementValue::Link(oid) => {
map_serializer.serialize_entry("value", &oid)?;
map_serializer.serialize_entry("link", &true)?;
}
};
map_serializer.end()
}
}
impl<'de> Deserialize<'de> for Conflict {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
const FIELDS: &[&str] = &["actor", "value", "datatype", "link"];
struct ConflictVisitor;
impl<'de> Visitor<'de> for ConflictVisitor {
type Value = Conflict;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("A conflict object")
}
fn visit_map<V>(self, mut map: V) -> Result<Conflict, V::Error>
where
V: MapAccess<'de>,
{
let mut actor: Option<ActorID> = None;
let mut value_raw: Option<PrimitiveValue> = None;
let mut datatype: Option<DataType> = None;
let mut link: Option<bool> = None;
while let Some(key) = map.next_key::<String>()? {
match key.as_ref() {
"actor" => {
if actor.is_some() {
return Err(Error::duplicate_field("actor"));
}
actor = Some(map.next_value()?);
}
"datatype" => {
if datatype.is_some() {
return Err(Error::duplicate_field("datatype"));
}
datatype = Some(map.next_value()?);
}
"value" => {
if value_raw.is_some() {
return Err(Error::duplicate_field("value"));
}
value_raw = Some(map.next_value()?);
}
"link" => {
if link.is_some() {
return Err(Error::duplicate_field("link"));
}
link = Some(map.next_value()?);
}
_ => return Err(Error::unknown_field(&key, FIELDS)),
}
}
let actor = actor.ok_or_else(|| Error::missing_field("actor"))?;
let value_raw = value_raw.ok_or_else(|| Error::missing_field("value"))?;
let is_link = link.unwrap_or(false);
let value = match (is_link, value_raw) {
(true, PrimitiveValue::Str(s)) => {
let oid = match s.as_ref() {
"00000000-0000-0000-0000-000000000000" => ObjectID::Root,
id => ObjectID::ID(id.to_string()),
};
ElementValue::Link(oid)
}
(false, v) => ElementValue::Primitive(v),
_ => return Err(Error::custom(
"Received a conflict with `link` set to true but no string in 'value' key",
)),
};
Ok(Conflict {
actor,
value,
datatype,
})
}
}
deserializer.deserialize_struct("Conflict", FIELDS, ConflictVisitor)
}
}
impl Serialize for Diff {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map_serializer = serializer.serialize_map(None)?;
if !self.conflicts.is_empty() {
map_serializer.serialize_entry("conflicts", &self.conflicts)?;
}
match &self.action {
DiffAction::CreateMap(oid, map_type) => {
map_serializer.serialize_entry("action", "create")?;
map_serializer.serialize_entry("obj", &oid)?;
map_serializer.serialize_entry("type", &map_type)?;
}
DiffAction::CreateList(oid, seq_type) => {
map_serializer.serialize_entry("action", "create")?;
map_serializer.serialize_entry("obj", &oid)?;
map_serializer.serialize_entry("type", &seq_type)?;
}
DiffAction::MaxElem(oid, max, seq_type) => {
map_serializer.serialize_entry("action", "maxElem")?;
map_serializer.serialize_entry("obj", &oid)?;
map_serializer.serialize_entry("value", &max)?;
map_serializer.serialize_entry("type", &seq_type)?;
}
DiffAction::RemoveMapKey(oid, map_type, key) => {
map_serializer.serialize_entry("action", "remove")?;
map_serializer.serialize_entry("type", &map_type)?;
map_serializer.serialize_entry("obj", &oid)?;
map_serializer.serialize_entry("key", &key)?;
}
DiffAction::SetMapKey(oid, map_type, key, value, datatype) => {
map_serializer.serialize_entry("action", "set")?;
map_serializer.serialize_entry("type", &map_type)?;
map_serializer.serialize_entry("obj", &oid)?;
map_serializer.serialize_entry("key", &key)?;
match datatype {
Some(dtype) => map_serializer.serialize_entry("datatype", &dtype)?,
None => {}
};
match value {
ElementValue::Primitive(v) => map_serializer.serialize_entry("value", &v)?,
ElementValue::Link(linked_oid) => {
map_serializer.serialize_entry("link", &true)?;
map_serializer.serialize_entry("value", &linked_oid)?;
}
};
}
DiffAction::RemoveSequenceElement(oid, seq_type, index) => {
map_serializer.serialize_entry("action", "remove")?;
map_serializer.serialize_entry("type", &seq_type)?;
map_serializer.serialize_entry("obj", &oid)?;
map_serializer.serialize_entry("index", &index)?;
}
DiffAction::InsertSequenceElement(
oid,
seq_type,
index,
value,
datatype,
element_id,
) => {
map_serializer.serialize_entry("action", "insert")?;
map_serializer.serialize_entry("type", &seq_type)?;
map_serializer.serialize_entry("obj", &oid)?;
map_serializer.serialize_entry("index", &index)?;
map_serializer.serialize_entry("elemId", &element_id)?;
match value {
ElementValue::Primitive(v) => map_serializer.serialize_entry("value", &v)?,
ElementValue::Link(linked_oid) => {
map_serializer.serialize_entry("link", &true)?;
map_serializer.serialize_entry("value", &linked_oid)?;
}
};
match datatype {
Some(d) => map_serializer.serialize_entry("datatype", &d)?,
None => {}
};
}
DiffAction::SetSequenceElement(oid, seq_type, index, value, datatype) => {
map_serializer.serialize_entry("action", "set")?;
map_serializer.serialize_entry("type", &seq_type)?;
map_serializer.serialize_entry("obj", &oid)?;
map_serializer.serialize_entry("index", &index)?;
match value {
ElementValue::Primitive(v) => map_serializer.serialize_entry("value", &v)?,
ElementValue::Link(linked_oid) => {
map_serializer.serialize_entry("link", &true)?;
map_serializer.serialize_entry("value", &linked_oid)?;
}
};
match datatype {
Some(d) => map_serializer.serialize_entry("datatype", &d)?,
None => {}
};
}
}
map_serializer.end()
}
}
impl<'de> Deserialize<'de> for Diff {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
const FIELDS: &[&str] = &["actor", "value", "datatype", "link"];
struct DiffVisitor;
impl<'de> Visitor<'de> for DiffVisitor {
type Value = Diff;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("A diff object")
}
fn visit_map<V>(self, mut map: V) -> Result<Diff, V::Error>
where
V: MapAccess<'de>,
{
let mut object_id: Option<ObjectID> = None;
let mut type_str: Option<String> = None;
let mut seq: Option<u32> = None;
let mut action: Option<String> = None;
let mut key: Option<Key> = None;
let mut value: Option<PrimitiveValue> = None;
let mut datatype: Option<DataType> = None;
let mut conflicts: Option<Vec<Conflict>> = None;
let mut index: Option<u32> = None;
let mut is_link: Option<bool> = None;
let mut elem_id: Option<ElementID> = None;
while let Some(map_key) = map.next_key::<String>()? {
match map_key.as_ref() {
"obj" => {
if object_id.is_some() {
return Err(Error::duplicate_field("obj"));
}
object_id = Some(map.next_value()?);
}
"type" => {
if type_str.is_some() {
return Err(Error::duplicate_field("type"));
}
type_str = Some(map.next_value()?);
}
"seq" => {
if seq.is_some() {
return Err(Error::duplicate_field("seq"));
}
seq = Some(map.next_value()?);
}
"action" => {
if action.is_some() {
return Err(Error::duplicate_field("action"));
}
action = Some(map.next_value()?);
}
"key" => {
if key.is_some() {
return Err(Error::duplicate_field("key"));
}
key = Some(map.next_value()?);
}
"value" => {
if value.is_some() {
return Err(Error::duplicate_field("value"));
}
value = Some(map.next_value()?);
}
"datatype" => {
if datatype.is_some() {
return Err(Error::duplicate_field("datatype"));
}
datatype = Some(map.next_value()?);
}
"conflicts" => {
if conflicts.is_some() {
return Err(Error::duplicate_field("conflicts"));
}
conflicts = Some(map.next_value()?);
}
"index" => {
if index.is_some() {
return Err(Error::duplicate_field("index"));
}
index = Some(map.next_value()?);
}
"link" => {
if is_link.is_some() {
return Err(Error::duplicate_field("link"));
}
is_link = Some(map.next_value()?);
}
"elemId" => {
if elem_id.is_some() {
return Err(Error::duplicate_field("elemId"));
}
elem_id = Some(map.next_value()?);
}
_ => return Err(Error::unknown_field(&map_key, FIELDS)),
}
}
let is_link = is_link.unwrap_or(false);
let value =
match (is_link, value) {
(true, Some(PrimitiveValue::Str(s))) => {
let oid = match s.as_ref() {
"00000000-0000-0000-0000-000000000000" => ObjectID::Root,
id => ObjectID::ID(id.to_string()),
};
Some(ElementValue::Link(oid))
}
(false, Some(v)) => Some(ElementValue::Primitive(v)),
(_, None) => None,
_ => return Err(Error::custom(
"Received a diff with `link` set to true but no string in 'value' key",
)),
};
let diff_action = match action {
Some(action_str) => match action_str.as_ref() {
"create" => {
let obj_id = object_id.ok_or_else(|| Error::missing_field("obj"))?;
let create_type =
type_str.ok_or_else(|| Error::missing_field("type"))?;
match create_type.as_ref() {
"map" => DiffAction::CreateMap(obj_id, MapType::Map),
"table" => DiffAction::CreateMap(obj_id, MapType::Table),
"list" => DiffAction::CreateList(obj_id, SequenceType::List),
"text" => DiffAction::CreateList(obj_id, SequenceType::Text),
_ => {
return Err(Error::invalid_value(
Unexpected::Str(&create_type),
&"A valid object type",
))
}
}
}
"maxElem" => {
let obj_id = object_id.ok_or_else(|| Error::missing_field("obj"))?;
let value = value.ok_or_else(|| Error::missing_field("value"))?;
let seq_type_str =
type_str.ok_or_else(|| Error::missing_field("type"))?;
let seq_type = match seq_type_str.as_ref() {
"list" => SequenceType::List,
"text" => SequenceType::Text,
_ => {
return Err(Error::invalid_value(
Unexpected::Str(&seq_type_str),
&"A valid sequence type",
))
}
};
let seq = match value {
ElementValue::Primitive(PrimitiveValue::Number(n)) => n as u32,
_ => return Err(Error::custom("Invalid value for maxElem.value")),
};
DiffAction::MaxElem(obj_id, seq, seq_type)
}
"remove" => {
let type_str = type_str.ok_or_else(|| Error::missing_field("type"))?;
let obj_id = object_id.ok_or_else(|| Error::missing_field("obj"))?;
match key {
Some(k) => {
let map_type = match type_str.as_ref() {
"map" => MapType::Map,
"table" => MapType::Table,
_ => {
return Err(Error::invalid_value(
Unexpected::Str(&type_str),
&"A valid map type",
))
}
};
DiffAction::RemoveMapKey(obj_id, map_type, k)
}
None => {
let seq_type = match type_str.as_ref() {
"list" => SequenceType::List,
"text" => SequenceType::Text,
_ => {
return Err(Error::invalid_value(
Unexpected::Str(&type_str),
&"A valid sequence type",
))
}
};
let index =
index.ok_or_else(|| Error::missing_field("index"))?;
DiffAction::RemoveSequenceElement(obj_id, seq_type, index)
}
}
}
"set" => {
let type_str = type_str.ok_or_else(|| Error::missing_field("type"))?;
let obj_id = object_id.ok_or_else(|| Error::missing_field("obj"))?;
let value = value.ok_or_else(|| Error::missing_field("value"))?;
match key {
Some(k) => {
let map_type = match type_str.as_ref() {
"map" => MapType::Map,
"table" => MapType::Table,
_ => {
return Err(Error::invalid_value(
Unexpected::Str(&type_str),
&"A valid map type",
))
}
};
DiffAction::SetMapKey(obj_id, map_type, k, value, datatype)
}
None => {
let seq_type = match type_str.as_ref() {
"list" => SequenceType::List,
"text" => SequenceType::Text,
_ => {
return Err(Error::invalid_value(
Unexpected::Str(&type_str),
&"A valid sequence type",
))
}
};
let index =
index.ok_or_else(|| Error::missing_field("index"))?;
DiffAction::SetSequenceElement(
obj_id, seq_type, index, value, datatype,
)
}
}
}
"insert" => {
let obj_id = object_id.ok_or_else(|| Error::missing_field("obj"))?;
let type_str = type_str.ok_or_else(|| Error::missing_field("type"))?;
let value = value.ok_or_else(|| Error::missing_field("value"))?;
let elem_id = elem_id.ok_or_else(|| Error::missing_field("elemId"))?;
let seq_type = match type_str.as_ref() {
"list" => SequenceType::List,
"text" => SequenceType::Text,
_ => {
return Err(Error::invalid_value(
Unexpected::Str(&type_str),
&"A valid sequence type",
))
}
};
let index = index.ok_or_else(|| Error::missing_field("index"))?;
DiffAction::InsertSequenceElement(
obj_id, seq_type, index, value, datatype, elem_id,
)
}
_ => {
return Err(Error::invalid_value(
Unexpected::Str(&action_str),
&"A valid action string",
))
}
},
None => return Err(Error::missing_field("action")),
};
let conflicts = conflicts.unwrap_or_default();
Ok(Diff {
action: diff_action,
conflicts,
})
}
}
deserializer.deserialize_struct("Conflict", FIELDS, DiffVisitor)
}
}
#[cfg(test)]
mod tests {
//use super::*;
use crate::{
ActorID, Conflict, DataType, Diff, DiffAction, ElementID, ElementValue, Key, MapType,
ObjectID, PrimitiveValue, SequenceType,
};
use serde_json;
struct TestCase {
name: &'static str,
diff: Diff,
json: serde_json::Value,
}
#[test]
fn do_tests() {
let testcases = vec![
TestCase {
name: "CreateMap",
diff: Diff {
action: DiffAction::CreateMap(ObjectID::ID("1234".to_string()), MapType::Map),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "create",
"obj": "1234",
"type": "map"
}
"#,
)
.unwrap(),
},
TestCase {
name: "CreateMap (table)",
diff: Diff {
action: DiffAction::CreateMap(ObjectID::ID("1234".to_string()), MapType::Table),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "create",
"obj": "1234",
"type": "table"
}
"#,
)
.unwrap(),
},
TestCase {
name: "CreateList",
diff: Diff {
action: DiffAction::CreateList(
ObjectID::ID("1234".to_string()),
SequenceType::List,
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "create",
"obj": "1234",
"type": "list"
}
"#,
)
.unwrap(),
},
TestCase {
name: "CreateText",
diff: Diff {
action: DiffAction::CreateList(
ObjectID::ID("1234".to_string()),
SequenceType::Text,
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "create",
"obj": "1234",
"type": "text"
}
"#,
)
.unwrap(),
},
TestCase {
name: "MaxElem(list)",
diff: Diff {
action: DiffAction::MaxElem(
ObjectID::ID("1234".to_string()),
4,
SequenceType::List,
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "maxElem",
"obj": "1234",
"type": "list",
"value": 4
}
"#,
)
.unwrap(),
},
TestCase {
name: "MaxElem(text)",
diff: Diff {
action: DiffAction::MaxElem(
ObjectID::ID("1234".to_string()),
4,
SequenceType::Text,
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "maxElem",
"obj": "1234",
"type": "text",
"value": 4
}
"#,
)
.unwrap(),
},
TestCase {
name: "RemoveMapKey(map)",
diff: Diff {
action: DiffAction::RemoveMapKey(
ObjectID::ID("1234".to_string()),
MapType::Map,
Key("key".to_string()),
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "remove",
"obj": "1234",
"type": "map",
"key": "key"
}
"#,
)
.unwrap(),
},
TestCase {
name: "RemoveMapKey(table)",
diff: Diff {
action: DiffAction::RemoveMapKey(
ObjectID::ID("1234".to_string()),
MapType::Table,
Key("key".to_string()),
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "remove",
"obj": "1234",
"type": "table",
"key": "key"
}
"#,
)
.unwrap(),
},
TestCase {
name: "SetMapKey(map)",
diff: Diff {
action: DiffAction::SetMapKey(
ObjectID::ID("1234".to_string()),
MapType::Map,
Key("key".to_string()),
ElementValue::Link(ObjectID::ID("5678".to_string())),
None,
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "set",
"obj": "1234",
"type": "map",
"key": "key",
"value": "5678",
"link": true
}
"#,
)
.unwrap(),
},
TestCase {
name: "SetMapKey(table) with link",
diff: Diff {
action: DiffAction::SetMapKey(
ObjectID::ID("1234".to_string()),
MapType::Table,
Key("key".to_string()),
ElementValue::Link(ObjectID::ID("5678".to_string())),
Some(DataType::Counter),
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "set",
"obj": "1234",
"type": "table",
"key": "key",
"value": "5678",
"link": true,
"datatype": "counter"
}
"#,
)
.unwrap(),
},
TestCase {
name: "RemoveSequenceElement",
diff: Diff {
action: DiffAction::RemoveSequenceElement(
ObjectID::ID("1234".to_string()),
SequenceType::List,
5,
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "remove",
"obj": "1234",
"type": "list",
"index": 5
}
"#,
)
.unwrap(),
},
TestCase {
name: "RemoveSequenceElement(text)",
diff: Diff {
action: DiffAction::RemoveSequenceElement(
ObjectID::ID("1234".to_string()),
SequenceType::Text,
5,
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "remove",
"obj": "1234",
"type": "text",
"index": 5
}
"#,
)
.unwrap(),
},
TestCase {
name: "InsertSequenceElement",
diff: Diff {
action: DiffAction::InsertSequenceElement(
ObjectID::ID("1234".to_string()),
SequenceType::List,
5,
ElementValue::Primitive(PrimitiveValue::Str("hi".to_string())),
None,
ElementID::from_actor_and_elem(ActorID("someactor".to_string()), 1),
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "insert",
"obj": "1234",
"type": "list",
"index": 5,
"value": "hi",
"elemId": "someactor:1"
}
"#,
)
.unwrap(),
},
TestCase {
name: "InsertSequenceElement(text with link and datatype)",
diff: Diff {
action: DiffAction::InsertSequenceElement(
ObjectID::ID("1234".to_string()),
SequenceType::Text,
5,
ElementValue::Link(ObjectID::ID("5678".to_string())),
Some(DataType::Timestamp),
ElementID::from_actor_and_elem(ActorID("someactor".to_string()), 1),
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "insert",
"obj": "1234",
"type": "text",
"index": 5,
"value": "5678",
"link": true,
"datatype": "timestamp",
"elemId": "someactor:1"
}
"#,
)
.unwrap(),
},
TestCase {
name: "SetSequenceElement",
diff: Diff {
action: DiffAction::SetSequenceElement(
ObjectID::ID("1234".to_string()),
SequenceType::Text,
5,
ElementValue::Link(ObjectID::ID("5678".to_string())),
None,
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "set",
"obj": "1234",
"type": "text",
"index": 5,
"value": "5678",
"link": true
}
"#,
)
.unwrap(),
},
TestCase {
name: "SetSequenceElement(list with primitive and datatype)",
diff: Diff {
action: DiffAction::SetSequenceElement(
ObjectID::ID("1234".to_string()),
SequenceType::List,
5,
ElementValue::Primitive(PrimitiveValue::Str("hi".to_string())),
Some(DataType::Counter),
),
conflicts: Vec::new(),
},
json: serde_json::from_str(
r#"
{
"action": "set",
"obj": "1234",
"type": "list",
"index": 5,
"value": "hi",
"datatype": "counter"
}
"#,
)
.unwrap(),
},
];
for testcase in testcases {
let serialized = serde_json::to_value(testcase.diff.clone()).unwrap_or_else(|_| {
panic!(std::format!("Failed to deserialize {}", testcase.name));
});
assert_eq!(
testcase.json, serialized,
"TestCase {} did not match",
testcase.name
);
let deserialized: Diff = serde_json::from_value(serialized).unwrap_or_else(|_| {
panic!(std::format!("Failed to deserialize for {}", testcase.name));
});
assert_eq!(
testcase.diff, deserialized,
"TestCase {} failed the round trip",
testcase.name
);
}
}
#[test]
fn test_deserialize_conflict_link() {
let json = serde_json::from_str(
r#"
{
"actor": "1234",
"value": "someid",
"link": true
}
"#,
)
.unwrap();
let expected = Conflict {
actor: ActorID("1234".to_string()),
value: ElementValue::Link(ObjectID::ID("someid".to_string())),
datatype: None,
};
let actual: Conflict = serde_json::from_value(json).unwrap();
assert_eq!(expected, actual);
}
#[test]
fn test_deserialize_conflict_nolink() {
let json = serde_json::from_str(
r#"
{
"actor": "1234",
"value": 5,
"datatype": "counter"
}
"#,
)
.unwrap();
let expected = Conflict {
actor: ActorID("1234".to_string()),
value: ElementValue::Primitive(PrimitiveValue::Number(5.0)),
datatype: Some(DataType::Counter),
};
let actual: Conflict = serde_json::from_value(json).unwrap();
assert_eq!(expected, actual);
}
}

View file

@ -0,0 +1,549 @@
//! This module contains types which are deserialized from the changes which
//! are produced by the automerge JS library. Given the following code
//!
//! ```javascript
//! doc = ... // create and edit an automerge document
//! let changes = Automerge.getHistory(doc).map(h => h.change)
//! console.log(JSON.stringify(changes, null, 4))
//! ```
//!
//! The output of this can then be deserialized like so
//!
//! ```rust,no_run
//! # use automerge_backend::Change;
//! let changes_str = "<paste the contents of the output here>";
//! let changes: Vec<Change> = serde_json::from_str(changes_str).unwrap();
//! ```
use core::cmp::max;
use serde::de;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::cmp::{Ordering, PartialOrd};
use std::collections::HashMap;
use std::str::FromStr;
use crate::error;
#[derive(Eq, PartialEq, Debug, Hash, Clone)]
pub enum ObjectID {
ID(String),
Root,
}
impl ObjectID {
fn parse(s: &str) -> ObjectID {
match s {
"00000000-0000-0000-0000-000000000000" => ObjectID::Root,
_ => ObjectID::ID(s.into()),
}
}
}
impl<'de> Deserialize<'de> for ObjectID {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(ObjectID::parse(&s))
}
}
impl Serialize for ObjectID {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let id_str = match self {
ObjectID::Root => "00000000-0000-0000-0000-000000000000",
ObjectID::ID(id) => id,
};
serializer.serialize_str(id_str)
}
}
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Hash, Clone)]
pub struct Key(pub String);
impl Key {
pub fn as_element_id(&self) -> Result<ElementID, error::InvalidElementID> {
ElementID::from_str(&self.0)
}
}
#[derive(Deserialize, Serialize, Eq, PartialEq, Hash, Debug, Clone, PartialOrd, Ord)]
pub struct ActorID(pub String);
impl ActorID {
pub fn random() -> ActorID {
ActorID(uuid::Uuid::new_v4().to_string())
}
pub fn from_string(raw: String) -> ActorID {
ActorID(raw)
}
}
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct Clock(pub HashMap<ActorID, u32>);
impl Clock {
pub fn empty() -> Clock {
Clock(HashMap::new())
}
pub fn with(&self, actor_id: &ActorID, seq: u32) -> Clock {
let mut result = self.clone();
result.set(actor_id, max(seq, self.get(actor_id)));
result
}
pub fn merge(&mut self, other: &Clock) {
other.into_iter().for_each(|(actor_id, seq)| {
self.set(actor_id, max(*seq, self.get(actor_id)));
});
}
pub fn union(&self, other: &Clock) -> Clock {
let mut result = self.clone();
result.merge(other);
result
}
pub fn set(&mut self, actor_id: &ActorID, seq: u32) {
if seq == 0 {
self.0.remove(actor_id);
} else {
self.0.insert(actor_id.clone(), seq);
}
}
pub fn get(&self, actor_id: &ActorID) -> u32 {
*self.0.get(actor_id).unwrap_or(&0)
}
pub fn divergent(&self, other: &Clock) -> bool {
!self.less_or_equal(other)
}
fn less_or_equal(&self, other: &Clock) -> bool {
self.into_iter()
.all(|(actor_id, _)| self.get(actor_id) <= other.get(actor_id))
}
}
impl PartialOrd for Clock {
fn partial_cmp(&self, other: &Clock) -> Option<Ordering> {
let le1 = self.less_or_equal(other);
let le2 = other.less_or_equal(self);
match (le1, le2) {
(true, true) => Some(Ordering::Equal),
(true, false) => Some(Ordering::Less),
(false, true) => Some(Ordering::Greater),
(false, false) => None,
}
}
}
impl<'a> IntoIterator for &'a Clock {
type Item = (&'a ActorID, &'a u32);
type IntoIter = ::std::collections::hash_map::Iter<'a, ActorID, u32>;
fn into_iter(self) -> Self::IntoIter {
self.0.iter()
}
}
#[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
#[serde(untagged)]
pub enum PrimitiveValue {
Str(String),
Number(f64),
Boolean(bool),
Null,
}
#[derive(PartialEq, Eq, Debug, Hash, Clone)]
pub enum ElementID {
Head,
SpecificElementID(ActorID, u32),
}
impl ElementID {
pub fn as_key(&self) -> Key {
match self {
ElementID::Head => Key("_head".to_string()),
ElementID::SpecificElementID(actor_id, elem) => Key(format!("{}:{}", actor_id.0, elem)),
}
}
pub fn from_actor_and_elem(actor: ActorID, elem: u32) -> ElementID {
ElementID::SpecificElementID(actor, elem)
}
}
impl<'de> Deserialize<'de> for ElementID {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
ElementID::from_str(&s).map_err(|_| de::Error::custom("invalid element ID"))
}
}
impl Serialize for ElementID {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
ElementID::Head => serializer.serialize_str("_head"),
ElementID::SpecificElementID(actor_id, elem) => {
serializer.serialize_str(&format!("{}:{}", actor_id.0, elem))
}
}
}
}
impl FromStr for ElementID {
type Err = error::InvalidElementID;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"_head" => Ok(ElementID::Head),
id => {
let components: Vec<&str> = id.split(':').collect();
match components.as_slice() {
[actor_id, elem_str] => {
let elem = u32::from_str(elem_str)
.map_err(|_| error::InvalidElementID(id.to_string()))?;
Ok(ElementID::SpecificElementID(
ActorID((*actor_id).to_string()),
elem,
))
}
_ => Err(error::InvalidElementID(id.to_string())),
}
}
}
}
}
impl PartialOrd for ElementID {
fn partial_cmp(&self, other: &ElementID) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for ElementID {
fn cmp(&self, other: &ElementID) -> Ordering {
match (self, other) {
(ElementID::Head, ElementID::Head) => Ordering::Equal,
(ElementID::Head, _) => Ordering::Less,
(_, ElementID::Head) => Ordering::Greater,
(
ElementID::SpecificElementID(self_actor, self_elem),
ElementID::SpecificElementID(other_actor, other_elem),
) => {
if self_elem == other_elem {
self_actor.cmp(other_actor)
} else {
self_elem.cmp(other_elem)
}
}
}
}
}
#[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
pub enum DataType {
#[serde(rename = "counter")]
Counter,
#[serde(rename = "timestamp")]
Timestamp,
}
#[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
#[serde(tag = "action")]
pub enum Operation {
#[serde(rename = "makeMap")]
MakeMap {
#[serde(rename = "obj")]
object_id: ObjectID,
},
#[serde(rename = "makeList")]
MakeList {
#[serde(rename = "obj")]
object_id: ObjectID,
},
#[serde(rename = "makeText")]
MakeText {
#[serde(rename = "obj")]
object_id: ObjectID,
},
#[serde(rename = "makeTable")]
MakeTable {
#[serde(rename = "obj")]
object_id: ObjectID,
},
#[serde(rename = "ins")]
Insert {
#[serde(rename = "obj")]
list_id: ObjectID,
key: ElementID,
elem: u32,
},
#[serde(rename = "set")]
Set {
#[serde(rename = "obj")]
object_id: ObjectID,
key: Key,
value: PrimitiveValue,
#[serde(skip_serializing_if = "Option::is_none", default)]
datatype: Option<DataType>,
},
#[serde(rename = "link")]
Link {
#[serde(rename = "obj")]
object_id: ObjectID,
key: Key,
value: ObjectID,
},
#[serde(rename = "del")]
Delete {
#[serde(rename = "obj")]
object_id: ObjectID,
key: Key,
},
#[serde(rename = "inc")]
Increment {
#[serde(rename = "obj")]
object_id: ObjectID,
key: Key,
value: f64,
},
}
impl Operation {
pub fn object_id(&self) -> &ObjectID {
match self {
Operation::MakeMap { object_id }
| Operation::MakeTable { object_id }
| Operation::MakeList { object_id }
| Operation::MakeText { object_id }
| Operation::Insert {
list_id: object_id, ..
}
| Operation::Set { object_id, .. }
| Operation::Link { object_id, .. }
| Operation::Delete { object_id, .. }
| Operation::Increment { object_id, .. } => object_id,
}
}
}
#[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
pub struct Change {
#[serde(rename = "ops")]
pub operations: Vec<Operation>,
#[serde(rename = "actor")]
pub actor_id: ActorID,
pub seq: u32,
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(rename = "deps")]
pub dependencies: Clock,
}
#[derive(PartialEq, Debug, Clone)]
pub struct ChangeRequest {
pub actor_id: ActorID,
pub seq: u32,
pub message: Option<String>,
pub dependencies: Clock,
pub undoable: Option<bool>,
pub request_type: ChangeRequestType,
}
#[derive(PartialEq, Debug, Clone)]
pub enum ChangeRequestType {
Change(Vec<Operation>),
Undo,
Redo,
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json;
use std::iter::FromIterator;
#[test]
fn test_deserializing_operations() {
let json_str = r#"{
"ops": [
{
"action": "makeMap",
"obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945"
},
{
"action": "makeList",
"obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945"
},
{
"action": "makeText",
"obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945"
},
{
"action": "makeTable",
"obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945"
},
{
"action": "ins",
"obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
"key": "someactorid:6",
"elem": 5
},
{
"action": "ins",
"obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
"key": "_head",
"elem": 6
},
{
"action": "set",
"obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
"key": "sometimestamp",
"value": 123456,
"datatype": "timestamp"
},
{
"action": "set",
"obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
"key": "somekeyid",
"value": true
},
{
"action": "set",
"obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
"key": "somekeyid",
"value": 123
},
{
"action": "set",
"obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
"key": "somekeyid",
"value": null
},
{
"action": "link",
"obj": "00000000-0000-0000-0000-000000000000",
"key": "cards",
"value": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945"
},
{
"action": "del",
"obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
"key": "somekey"
},
{
"action": "inc",
"obj": "2ed3ffe8-0ff3-4671-9777-aa16c3e09945",
"key": "somekey",
"value": 123
}
],
"actor": "741e7221-11cc-4ef8-86ee-4279011569fd",
"seq": 1,
"deps": {
"someid": 0
},
"message": "Initialization"
}"#;
let change: Change = serde_json::from_str(&json_str).unwrap();
assert_eq!(
change,
Change {
actor_id: ActorID("741e7221-11cc-4ef8-86ee-4279011569fd".to_string()),
operations: vec![
Operation::MakeMap {
object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string())
},
Operation::MakeList {
object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string())
},
Operation::MakeText {
object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string())
},
Operation::MakeTable {
object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string())
},
Operation::Insert {
list_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
key: ElementID::SpecificElementID(ActorID("someactorid".to_string()), 6),
elem: 5,
},
Operation::Insert {
list_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
key: ElementID::Head,
elem: 6,
},
Operation::Set {
object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
key: Key("sometimestamp".to_string()),
value: PrimitiveValue::Number(123_456.0),
datatype: Some(DataType::Timestamp)
},
Operation::Set {
object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
key: Key("somekeyid".to_string()),
value: PrimitiveValue::Boolean(true),
datatype: None
},
Operation::Set {
object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
key: Key("somekeyid".to_string()),
value: PrimitiveValue::Number(123.0),
datatype: None,
},
Operation::Set {
object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
key: Key("somekeyid".to_string()),
value: PrimitiveValue::Null,
datatype: None,
},
Operation::Link {
object_id: ObjectID::Root,
key: Key("cards".to_string()),
value: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string())
},
Operation::Delete {
object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
key: Key("somekey".to_string())
},
Operation::Increment {
object_id: ObjectID::ID("2ed3ffe8-0ff3-4671-9777-aa16c3e09945".to_string()),
key: Key("somekey".to_string()),
value: 123.0,
}
],
seq: 1,
message: Some("Initialization".to_string()),
dependencies: Clock(HashMap::from_iter(vec![(ActorID("someid".to_string()), 0)]))
}
);
}
#[test]
fn test_deserialize_elementid() {
let json_str = "\"_head\"";
let elem: ElementID = serde_json::from_str(json_str).unwrap();
assert_eq!(elem, ElementID::Head);
}
#[test]
fn test_serialize_elementid() {
let result = serde_json::to_value(ElementID::Head).unwrap();
assert_eq!(result, serde_json::Value::String("_head".to_string()));
}
}

View file

@ -0,0 +1,256 @@
use crate::{ActorID, ChangeRequest, ChangeRequestType, Clock, Operation};
use serde::de::{Error, MapAccess, Unexpected, Visitor};
use serde::ser::SerializeMap;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
impl Serialize for ChangeRequest {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map_serializer = serializer.serialize_map(None)?;
map_serializer.serialize_entry("actor", &self.actor_id)?;
map_serializer.serialize_entry("deps", &self.dependencies)?;
map_serializer.serialize_entry("message", &self.message)?;
map_serializer.serialize_entry("seq", &self.seq)?;
match &self.request_type {
ChangeRequestType::Change(ops) => {
map_serializer.serialize_entry("requestType", "change")?;
map_serializer.serialize_entry("ops", &ops)?;
}
ChangeRequestType::Undo => map_serializer.serialize_entry("requestType", "undo")?,
ChangeRequestType::Redo => map_serializer.serialize_entry("requestType", "redo")?,
};
if let Some(undoable) = &self.undoable {
map_serializer.serialize_entry("undoable", undoable)?;
}
map_serializer.end()
}
}
impl<'de> Deserialize<'de> for ChangeRequest {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
const FIELDS: &[&str] = &["ops", "deps", "message", "seq", "actor", "requestType"];
struct ChangeRequestVisitor;
impl<'de> Visitor<'de> for ChangeRequestVisitor {
type Value = ChangeRequest;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("A change request object")
}
fn visit_map<V>(self, mut map: V) -> Result<ChangeRequest, V::Error>
where
V: MapAccess<'de>,
{
let mut actor: Option<ActorID> = None;
let mut deps: Option<Clock> = None;
let mut message: Option<Option<String>> = None;
let mut seq: Option<u32> = None;
let mut ops: Option<Vec<Operation>> = None;
let mut undoable: Option<bool> = None;
let mut request_type_str: Option<String> = None;
while let Some(key) = map.next_key::<String>()? {
match key.as_ref() {
"actor" => {
if actor.is_some() {
return Err(Error::duplicate_field("actor"));
}
actor = Some(map.next_value()?);
}
"deps" => {
if deps.is_some() {
return Err(Error::duplicate_field("deps"));
}
deps = Some(map.next_value()?);
}
"message" => {
if message.is_some() {
return Err(Error::duplicate_field("message"));
}
message = map.next_value()?;
}
"seq" => {
if seq.is_some() {
return Err(Error::duplicate_field("seq"));
}
seq = Some(map.next_value()?);
}
"undoable" => {
if undoable.is_some() {
return Err(Error::duplicate_field("seq"));
}
undoable = Some(map.next_value()?);
}
"ops" => {
if ops.is_some() {
return Err(Error::duplicate_field("ops"));
}
ops = Some(map.next_value()?);
}
"requestType" => {
if request_type_str.is_some() {
return Err(Error::duplicate_field("requestType"));
}
request_type_str = Some(map.next_value()?);
}
_ => return Err(Error::unknown_field(&key, FIELDS)),
}
}
let actor = actor.ok_or_else(|| Error::missing_field("actor"))?;
let deps = deps.ok_or_else(|| Error::missing_field("deps"))?;
let seq = seq.ok_or_else(|| Error::missing_field("seq"))?;
let request_type_str =
request_type_str.ok_or_else(|| Error::missing_field("requestType"))?;
let request_type = match request_type_str.as_ref() {
"change" => {
let ops = ops.ok_or_else(|| Error::missing_field("ops"))?;
ChangeRequestType::Change(ops)
}
"undo" => ChangeRequestType::Undo,
"redo" => ChangeRequestType::Redo,
_ => {
return Err(Error::invalid_value(
Unexpected::Str(&request_type_str),
&"A valid change request type",
))
}
};
Ok(ChangeRequest {
actor_id: actor,
dependencies: deps,
undoable,
seq,
request_type,
message: message.unwrap_or(None),
})
}
}
deserializer.deserialize_struct("ChangeReqest", &FIELDS, ChangeRequestVisitor)
}
}
#[cfg(test)]
mod tests {
//use super::*;
use crate::{ActorID, ChangeRequest, ChangeRequestType, Clock, ObjectID, Operation};
use serde_json;
struct TestCase {
name: &'static str,
change_request: ChangeRequest,
json: serde_json::Value,
}
#[test]
fn do_tests() {
let actor = ActorID("actor1".to_string());
let birds = ObjectID::ID("birds".to_string());
let testcases: Vec<TestCase> = vec![
TestCase {
name: "change",
change_request: ChangeRequest {
actor_id: actor.clone(),
seq: 1,
message: None,
undoable: Some(false),
dependencies: Clock::empty().with(&actor, 1),
request_type: ChangeRequestType::Change(vec![Operation::MakeMap {
object_id: birds,
}]),
},
json: serde_json::from_str(
r#"
{
"actor": "actor1",
"seq": 1,
"message": null,
"undoable": false,
"deps": {"actor1": 1},
"requestType": "change",
"ops": [{
"action": "makeMap",
"obj": "birds"
}]
}
"#,
)
.unwrap(),
},
TestCase {
name: "undo",
change_request: ChangeRequest {
actor_id: actor.clone(),
seq: 1,
message: None,
undoable: None,
dependencies: Clock::empty().with(&actor, 1),
request_type: ChangeRequestType::Undo,
},
json: serde_json::from_str(
r#"
{
"actor": "actor1",
"seq": 1,
"message": null,
"deps": {"actor1": 1},
"requestType": "undo"
}
"#,
)
.unwrap(),
},
TestCase {
name: "redo",
change_request: ChangeRequest {
actor_id: actor.clone(),
seq: 1,
message: None,
undoable: None,
dependencies: Clock::empty().with(&actor, 1),
request_type: ChangeRequestType::Redo,
},
json: serde_json::from_str(
r#"
{
"actor": "actor1",
"seq": 1,
"message": null,
"deps": {"actor1": 1},
"requestType": "redo"
}
"#,
)
.unwrap(),
},
];
for testcase in testcases {
let serialized =
serde_json::to_value(testcase.change_request.clone()).unwrap_or_else(|_| {
panic!(std::format!("Failed to deserialize {}", testcase.name));
});
assert_eq!(
testcase.json, serialized,
"TestCase {} did not match",
testcase.name
);
let deserialized: ChangeRequest =
serde_json::from_value(serialized).unwrap_or_else(|_| {
panic!(std::format!("Failed to deserialize for {}", testcase.name));
});
assert_eq!(
testcase.change_request, deserialized,
"TestCase {} failed the round trip",
testcase.name
);
}
}
}

View file

@ -0,0 +1,54 @@
use serde::Serialize;
use std::collections::HashMap;
/// Possible values of an element of the state. Using this rather than
/// serde_json::Value because we'll probably want to make the core logic
/// independent of serde in order to be `no_std` compatible.
#[derive(Serialize, Clone, Debug, PartialEq)]
#[serde(untagged)]
pub enum Value {
Map(HashMap<String, Value>),
List(Vec<Value>),
Str(String),
Number(f64),
Boolean(bool),
Null,
}
impl Value {
pub fn from_json(json: &serde_json::Value) -> Value {
match json {
serde_json::Value::Object(kvs) => {
let result: HashMap<String, Value> = kvs
.iter()
.map(|(k, v)| (k.clone(), Value::from_json(v)))
.collect();
Value::Map(result)
}
serde_json::Value::Array(vs) => Value::List(vs.iter().map(Value::from_json).collect()),
serde_json::Value::String(s) => Value::Str(s.to_string()),
serde_json::Value::Number(n) => Value::Number(n.as_f64().unwrap_or(0.0)),
serde_json::Value::Bool(b) => Value::Boolean(*b),
serde_json::Value::Null => Value::Null,
}
}
pub fn to_json(&self) -> serde_json::Value {
match self {
Value::Map(map) => {
let result: serde_json::map::Map<String, serde_json::Value> =
map.iter().map(|(k, v)| (k.clone(), v.to_json())).collect();
serde_json::Value::Object(result)
}
Value::List(elements) => {
serde_json::Value::Array(elements.iter().map(|v| v.to_json()).collect())
}
Value::Str(s) => serde_json::Value::String(s.to_string()),
Value::Number(n) => serde_json::Value::Number(
serde_json::Number::from_f64(*n).unwrap_or_else(|| serde_json::Number::from(0)),
),
Value::Boolean(b) => serde_json::Value::Bool(*b),
Value::Null => serde_json::Value::Null,
}
}
}

1
automerge/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
target/

20
automerge/Cargo.toml Normal file
View file

@ -0,0 +1,20 @@
[package]
name = "automerge"
version = "0.0.2"
authors = ["Alex Good <alex@memoryandthought.me>"]
edition = "2018"
license = "MIT"
homepage = "https://github.com/alexjg/automerge-rs"
repository = "https://github.com/alexjg/automerge-rs"
categories = ["data-structures"]
description = "Rust implementation of the Automerge replicated JSON datatype"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
name = "automerge"
[dependencies]
serde = { version = "^1.0", features=["derive"] }
serde_json = "^1.0"
uuid = { version = "^0.5.1", features=["v4"] }
automerge-backend = { path = "../automerge-backend" }

View file

@ -0,0 +1,640 @@
/// This module handles creating changes. Most of the machinery here is related
/// to resolving paths from ChangeRequests, and generating operations to create
/// and modify data in the op set.
use crate::change_request::{ChangeRequest, ListIndex, Path, PathElement};
use crate::error::InvalidChangeRequest;
use automerge_backend::list_ops_in_order;
use automerge_backend::ActorStates;
use automerge_backend::ListState;
use automerge_backend::MapState;
use automerge_backend::ObjectState;
use automerge_backend::ObjectStore;
use automerge_backend::OperationWithMetadata;
use automerge_backend::Value;
use automerge_backend::{
ActorID, Change, Clock, ElementID, Key, ObjectID, Operation, PrimitiveValue,
};
use std::convert::TryInto;
#[derive(Clone, Debug)]
enum ResolvedPathElement {
Map(ObjectID),
List(ObjectID, u32),
Key(Key),
Index(ElementID),
Value(PrimitiveValue),
MissingKey(Key),
}
/// Represents a resolved path
#[derive(Debug, Clone)]
struct ResolvedPath(Vec<ResolvedPathElement>);
impl ResolvedPath {
fn new(elements: Vec<ResolvedPathElement>) -> ResolvedPath {
ResolvedPath(elements)
}
fn as_set_target(&self) -> Option<SetTarget> {
self.last_n(3).and_then(|last_three| {
match &last_three[..] {
[ResolvedPathElement::Map(o), ResolvedPathElement::Key(k), ResolvedPathElement::Value(_)] => Some(SetTarget{
containing_object_id: o.clone(),
key: k.clone(),
}),
[ResolvedPathElement::Map(o), ResolvedPathElement::Key(k), ResolvedPathElement::MissingKey(_)] => Some(SetTarget{
containing_object_id: o.clone(),
key: k.clone(),
}),
[ResolvedPathElement::List(l, _), ResolvedPathElement::Index(elem_id), ResolvedPathElement::Value(_)] => Some(SetTarget{
containing_object_id: l.clone(),
key: elem_id.as_key(),
}),
_ => None
}
})
}
fn as_move_source(&self) -> Option<MoveSource> {
self.last_n(3).and_then(|last_three| {
match &last_three[..] {
[ResolvedPathElement::Map(o), ResolvedPathElement::Key(k), ResolvedPathElement::Map(c)] => Some(MoveSource::Reference{
containing_object_id: o.clone(),
key: k.clone(),
contained_object_id: c.clone()
}),
[ResolvedPathElement::Map(o), ResolvedPathElement::Key(k), ResolvedPathElement::List(l, _)] => Some(MoveSource::Reference{
containing_object_id: o.clone(),
key: k.clone(),
contained_object_id: l.clone()
}),
[ResolvedPathElement::Map(o), ResolvedPathElement::Key(k), ResolvedPathElement::Value(v)] => Some(MoveSource::Value{
containing_object_id: o.clone(),
value: v.clone(),
key: k.clone(),
}),
[ResolvedPathElement::List(l, _), ResolvedPathElement::Index(elem_id), ResolvedPathElement::Map(m)] => Some(MoveSource::Reference{
containing_object_id: l.clone(),
key: elem_id.as_key(),
contained_object_id: m.clone(),
}),
[ResolvedPathElement::List(l, _), ResolvedPathElement::Index(elem_id), ResolvedPathElement::List(l2, _)] => Some(MoveSource::Reference{
containing_object_id: l.clone(),
key: elem_id.as_key(),
contained_object_id: l2.clone(),
}),
[ResolvedPathElement::List(l, _), ResolvedPathElement::Index(i), ResolvedPathElement::Value(v)] => Some(MoveSource::Value{
containing_object_id: l.clone(),
value: v.clone(),
key: i.as_key(),
}),
_ => None
}
})
}
fn as_insert_after_target(&self) -> Option<InsertAfterTarget> {
self.last_n(3).and_then(|last_three| {
match &last_three[..] {
[ResolvedPathElement::List(l, m), ResolvedPathElement::Index(e), ResolvedPathElement::Value(_)] => Some(InsertAfterTarget{
list_id: l.clone(),
element_id: e.clone(),
max_elem: *m,
}),
[_, ResolvedPathElement::List(l, m), ResolvedPathElement::Index(e)] => Some(InsertAfterTarget{
list_id: l.clone(),
element_id: e.clone(),
max_elem: *m,
}),
_ => None,
}
})
}
fn last_n(&self, n: usize) -> Option<Box<[ResolvedPathElement]>> {
if self.0.len() < n {
None
} else {
Some(
self.0
.iter()
.skip(self.0.len() - n)
.cloned()
.collect::<Vec<ResolvedPathElement>>()
.into_boxed_slice(),
)
}
}
}
/// Represents the target of a "set" change request.
#[derive(Debug, Clone)]
struct SetTarget {
containing_object_id: ObjectID,
key: Key,
}
/// Represents a path which can be moved.
enum MoveSource {
Reference {
containing_object_id: ObjectID,
key: Key,
contained_object_id: ObjectID,
},
Value {
containing_object_id: ObjectID,
key: Key,
value: PrimitiveValue,
},
}
impl MoveSource {
fn delete_op(&self) -> Operation {
match self {
MoveSource::Reference {
containing_object_id,
key,
..
}
| MoveSource::Value {
containing_object_id,
key,
..
} => Operation::Delete {
object_id: containing_object_id.clone(),
key: key.clone(),
},
}
}
}
#[derive(Debug)]
struct InsertAfterTarget {
list_id: ObjectID,
element_id: ElementID,
max_elem: u32,
}
/// The ChangeContext is responsible for taking the current state of the opset
/// (which is an ObjectStore, and a clock), and an actor ID and generating a
/// new change for a given set of ChangeRequests. The ObjectStore which the
/// ChangeContext manages is a copy of the OpSet's ObjectStore, this is because
/// in order to process ChangeRequests the ChangeContext needs to update the
/// ObjectStore.
///
/// For example, if we have several ChangeRequests which are inserting elements
/// into a list, one after another, then we need to know the element IDs of the
/// newly inserted elements to generate the correct operations.
pub struct ChangeContext<'a> {
object_store: ObjectStore,
actor_id: ActorID,
states: &'a ActorStates,
clock: Clock,
}
impl<'a> ChangeContext<'a> {
pub fn new(
object_store: &ObjectStore,
actor_id: ActorID,
states: &'a ActorStates,
clock: Clock,
) -> ChangeContext<'a> {
ChangeContext {
object_store: object_store.clone(),
states,
actor_id,
clock,
}
}
fn get_operations_for_object_id(&self, object_id: &ObjectID) -> Option<&ObjectState> {
self.object_store.state_for_object_id(object_id)
}
pub(crate) fn create_change<I>(
&mut self,
requests: I,
message: Option<String>,
) -> Result<Change, InvalidChangeRequest>
where
I: IntoIterator<Item = ChangeRequest>,
{
let ops_with_errors: Vec<Result<Vec<Operation>, InvalidChangeRequest>> = requests
.into_iter()
.map(|request| {
let ops = match request {
ChangeRequest::Set {
ref path,
ref value,
} => self.create_set_operations(&self.actor_id, path, value),
ChangeRequest::Delete { ref path } => {
self.create_delete_operation(path).map(|o| vec![o])
}
ChangeRequest::Increment {
ref path,
ref value,
} => self
.create_increment_operation(path, value.clone())
.map(|o| vec![o]),
ChangeRequest::Move { ref from, ref to } => {
self.create_move_operations(from, to)
}
ChangeRequest::InsertAfter {
ref path,
ref value,
} => self.create_insert_operation(&self.actor_id, path, value),
};
// We have to apply each operation to the object store so that
// operations which reference earlier operations within this
// change set have the correct data to refer to.
ops.iter().for_each(|inner_ops| {
inner_ops.iter().for_each(|op| {
let op_with_meta = OperationWithMetadata {
sequence: self.clock.get(&self.actor_id) + 1,
actor_id: self.actor_id.clone(),
operation: op.clone(),
};
self.object_store
.apply_operation(self.states, op_with_meta)
.unwrap();
});
});
ops
})
.collect();
let nested_ops = ops_with_errors
.into_iter()
.collect::<Result<Vec<Vec<Operation>>, InvalidChangeRequest>>()?;
let ops = nested_ops.into_iter().flatten().collect::<Vec<Operation>>();
let dependencies = self.clock.clone();
let seq = self.clock.get(&self.actor_id) + 1;
let change = Change {
actor_id: self.actor_id.clone(),
operations: ops,
seq,
message,
dependencies,
};
Ok(change)
}
pub(crate) fn create_set_operations(
&self,
actor_id: &ActorID,
path: &Path,
value: &Value,
) -> Result<Vec<Operation>, InvalidChangeRequest> {
// If we're setting a map as the root object we actually want to set
// each key of the map to the corresponding key in the root object
if let Value::Map(kvs) = value.clone() {
if path.is_root() {
let mut ops = Vec::new();
for (key, value) in kvs.into_iter() {
let key_path = path.key(key);
let mut this_key_ops =
self.create_set_operations(actor_id, &key_path, &value)?;
ops.append(&mut this_key_ops)
}
return Ok(ops);
}
};
self.resolve_path(path)
.and_then(|r| r.as_set_target())
.map(|path_resolution| match value {
Value::Map { .. } | Value::List { .. } => {
let (new_object_id, mut create_ops) = value_to_ops(actor_id, &value);
let link_op = Operation::Link {
object_id: path_resolution.containing_object_id.clone(),
key: path_resolution.key.clone(),
value: new_object_id,
};
create_ops.push(link_op);
create_ops
}
Value::Str { .. } | Value::Number { .. } | Value::Boolean { .. } | Value::Null => {
vec![create_prim(
path_resolution.containing_object_id.clone(),
path_resolution.key,
&value,
)]
}
})
.ok_or_else(|| InvalidChangeRequest(format!("Missing path: {:?}", path)))
}
pub(crate) fn create_move_operations(
&self,
from: &Path,
to: &Path,
) -> Result<Vec<Operation>, InvalidChangeRequest> {
let resolved_from = self
.resolve_path(from)
.ok_or_else(|| InvalidChangeRequest(format!("Missing from path: {:?}", from)))?;
let resolved_to = self
.resolve_path(to)
.ok_or_else(|| InvalidChangeRequest(format!("Missing to path: {:?}", to)))?;
let move_source = resolved_from
.as_move_source()
.ok_or_else(|| InvalidChangeRequest(format!("Invalid move source path: {:?}", from)))?;
let target = resolved_to
.as_set_target()
.ok_or_else(|| InvalidChangeRequest(format!("Invalid to path: {:?}", to)))?;
let delete_op = move_source.delete_op();
let insert_op = match (move_source, target) {
(
MoveSource::Value { value: v, .. },
SetTarget {
containing_object_id,
key,
},
) => Operation::Set {
object_id: containing_object_id,
key,
value: v,
datatype: None,
},
(
MoveSource::Reference {
contained_object_id,
..
},
SetTarget {
containing_object_id: target_container_id,
key: target_key,
},
) => Operation::Link {
object_id: target_container_id,
key: target_key,
value: contained_object_id,
},
};
Ok(vec![delete_op, insert_op])
}
pub(crate) fn create_delete_operation(
&self,
path: &Path,
) -> Result<Operation, InvalidChangeRequest> {
self.resolve_path(path)
.and_then(|r| r.as_move_source())
.map(|source| source.delete_op())
.ok_or_else(|| InvalidChangeRequest(format!("Invalid delete path: {:?}", path)))
}
pub(crate) fn create_increment_operation(
&self,
_path: &Path,
_value: f64,
) -> Result<Operation, InvalidChangeRequest> {
Err(InvalidChangeRequest(
"create_increment_operation not implemented".to_string(),
))
}
pub(crate) fn create_insert_operation(
&self,
actor_id: &ActorID,
after: &Path,
value: &Value,
) -> Result<Vec<Operation>, InvalidChangeRequest> {
let after_target = self
.resolve_path(after)
.and_then(|p| p.as_insert_after_target())
.ok_or_else(|| {
InvalidChangeRequest(format!("Invalid insert after path: {:?}", after))
})?;
let next_elem_id =
ElementID::SpecificElementID(actor_id.clone(), after_target.max_elem + 1);
let insert_op = Operation::Insert {
list_id: after_target.list_id.clone(),
key: after_target.element_id,
elem: after_target.max_elem + 1,
};
let mut ops = vec![insert_op];
match value {
Value::Map { .. } | Value::List { .. } => {
let (new_object_id, create_ops) = value_to_ops(actor_id, &value);
ops.extend(create_ops);
let link_op = Operation::Link {
object_id: after_target.list_id.clone(),
key: next_elem_id.as_key(),
value: new_object_id,
};
ops.push(link_op);
}
Value::Str { .. } | Value::Number { .. } | Value::Boolean { .. } | Value::Null => {
ops.push(create_prim(
after_target.list_id.clone(),
next_elem_id.as_key(),
&value,
));
}
};
Ok(ops)
}
fn resolve_path(&self, path: &Path) -> Option<ResolvedPath> {
let mut resolved_elements: Vec<ResolvedPathElement> = Vec::new();
let mut containing_object_id = ObjectID::Root;
for next_elem in path {
match resolved_elements.last() {
Some(ResolvedPathElement::MissingKey(_)) => return None,
Some(ResolvedPathElement::Index(ElementID::Head)) => return None,
_ => {}
}
match next_elem {
PathElement::Root => {
resolved_elements.push(ResolvedPathElement::Map(ObjectID::Root))
}
PathElement::Key(key) => {
resolved_elements.push(ResolvedPathElement::Key(Key(key.to_string())));
let op = self
.get_operations_for_object_id(&containing_object_id)
.and_then(|history| match history {
ObjectState::Map(MapState {
operations_by_key, ..
}) => Some(operations_by_key),
ObjectState::List { .. } => None,
})
.and_then(|kvs| kvs.get(&Key(key.to_string())))
.and_then(|cops| cops.active_op())
.map(|o| o.operation.clone());
match op {
Some(Operation::Set { value, .. }) => {
resolved_elements.push(ResolvedPathElement::Value(value))
}
Some(Operation::Link { value, .. }) => {
match self.get_operations_for_object_id(&value) {
None => return None,
Some(ObjectState::Map { .. }) => {
resolved_elements.push(ResolvedPathElement::Map(value.clone()));
containing_object_id = value.clone()
}
Some(ObjectState::List(ListState { max_elem, .. })) => {
resolved_elements
.push(ResolvedPathElement::List(value.clone(), *max_elem));
containing_object_id = value.clone()
}
}
}
None => resolved_elements
.push(ResolvedPathElement::MissingKey(Key(key.to_string()))),
_ => return None,
}
}
PathElement::Index(index) => match index {
ListIndex::Head => {
match self.get_operations_for_object_id(&containing_object_id) {
Some(ObjectState::List { .. }) => {
resolved_elements.push(ResolvedPathElement::Index(ElementID::Head))
}
_ => return None,
};
}
ListIndex::Index(i) => {
let op = self
.get_operations_for_object_id(&containing_object_id)
.and_then(|history| match history {
ObjectState::List(ListState {
operations_by_elemid,
following,
..
}) => list_ops_in_order(operations_by_elemid, following).ok(),
ObjectState::Map { .. } => None,
})
.and_then(|ops| ops.get(*i).cloned())
.and_then(|(element_id, cops)| {
cops.active_op().map(|o| (element_id, o.operation.clone()))
});
match op {
Some((elem_id, Operation::Set { value, .. })) => {
resolved_elements.push(ResolvedPathElement::Index(elem_id));
resolved_elements.push(ResolvedPathElement::Value(value));
}
Some((_, Operation::Link { value, .. })) => {
match self.get_operations_for_object_id(&value) {
None => return None,
Some(ObjectState::Map { .. }) => {
resolved_elements
.push(ResolvedPathElement::Map(value.clone()));
containing_object_id = value
}
Some(ObjectState::List(ListState { max_elem, .. })) => {
resolved_elements.push(ResolvedPathElement::List(
value.clone(),
*max_elem,
));
containing_object_id = value
}
}
}
_ => return None,
}
}
},
}
}
Some(ResolvedPath::new(resolved_elements))
}
}
fn value_to_ops(actor_id: &ActorID, v: &Value) -> (ObjectID, Vec<Operation>) {
match v {
Value::List(vs) => {
let list_id = ObjectID::ID(uuid::Uuid::new_v4().to_string());
let mut ops = vec![Operation::MakeList {
object_id: list_id.clone(),
}];
let mut elem_ops: Vec<Operation> = vs
.iter()
.enumerate()
.map(|(index, elem_value)| {
let elem: u32 = (index + 1).try_into().unwrap();
let previous_elemid = match index {
0 => ElementID::Head,
_ => ElementID::SpecificElementID(actor_id.clone(), elem - 1),
};
let insert_op = Operation::Insert {
list_id: list_id.clone(),
elem,
key: previous_elemid,
};
let elem_id = ElementID::SpecificElementID(actor_id.clone(), elem);
let mut elem_value_ops: Vec<Operation> = match elem_value {
Value::Boolean { .. }
| Value::Str { .. }
| Value::Number { .. }
| Value::Null { .. } => {
vec![create_prim(list_id.clone(), elem_id.as_key(), elem_value)]
}
Value::Map { .. } | Value::List { .. } => {
let (linked_object_id, mut value_ops) =
value_to_ops(actor_id, elem_value);
value_ops.push(Operation::Link {
object_id: list_id.clone(),
key: elem_id.as_key(),
value: linked_object_id,
});
value_ops
}
};
let mut result = Vec::new();
result.push(insert_op);
result.append(&mut elem_value_ops);
result
})
.flatten()
.collect();
ops.append(&mut elem_ops);
(list_id, ops)
}
Value::Map(kvs) => {
let object_id = ObjectID::ID(uuid::Uuid::new_v4().to_string());
let mut ops = vec![Operation::MakeMap {
object_id: object_id.clone(),
}];
let mut key_ops: Vec<Operation> = kvs
.iter()
.map(|(k, v)| match v {
Value::Boolean { .. }
| Value::Str { .. }
| Value::Number { .. }
| Value::Null { .. } => vec![create_prim(object_id.clone(), Key(k.clone()), v)],
Value::Map { .. } | Value::List { .. } => {
let (linked_object_id, mut value_ops) = value_to_ops(actor_id, v);
value_ops.push(Operation::Link {
object_id: object_id.clone(),
key: Key(k.clone()),
value: linked_object_id,
});
value_ops
}
})
.flatten()
.collect();
ops.append(&mut key_ops);
(object_id, ops)
}
_ => panic!("Only a map or list can be the top level object in value_to_ops".to_string()),
}
}
fn create_prim(object_id: ObjectID, key: Key, value: &Value) -> Operation {
let prim_value = match value {
Value::Number(n) => PrimitiveValue::Number(*n),
Value::Boolean(b) => PrimitiveValue::Boolean(*b),
Value::Str(s) => PrimitiveValue::Str(s.to_string()),
Value::Null => PrimitiveValue::Null,
_ => panic!("Non primitive value passed to create_prim"),
};
Operation::Set {
object_id,
key,
value: prim_value,
datatype: None,
}
}

View file

@ -0,0 +1,80 @@
use automerge_backend::Value;
/// Represents the various changes that you can make to a document, all of
/// these use a "path" to refer to parts of the document. You can generate
/// paths using a builder syntax. E.g this would refer to the second element
/// of an array under the "people" key in the root object
///
/// ```rust,no_run
/// # use automerge::{Path, ListIndex};
/// Path::root().key("people".to_string()).index(ListIndex::Index(1));
/// ```
///
/// Note that there is a special `ListIndex` for the head of a list, in case
/// you want to insert something at the beginning
#[derive(Debug)]
pub enum ChangeRequest {
Set { path: Path, value: Value },
Move { from: Path, to: Path },
Delete { path: Path },
Increment { path: Path, value: f64 },
InsertAfter { path: Path, value: Value },
}
#[derive(Clone, Debug, PartialEq)]
pub enum PathElement {
Root,
Key(String),
Index(ListIndex),
}
#[derive(Clone, Debug, PartialEq)]
pub enum ListIndex {
Head,
Index(usize),
}
/// Represents a location within a document
#[derive(Debug)]
pub struct Path(Vec<PathElement>);
impl Path {
/// A path at the root of the document
pub fn root() -> Path {
Path(vec![PathElement::Root])
}
/// Returns a new path which points to the list element at index of the
/// current path
pub fn index(&self, index: ListIndex) -> Path {
let mut elems = self.0.clone();
elems.push(PathElement::Index(index));
Path(elems)
}
/// Returns a new path which points to the element under this key in the
/// current path
pub fn key(&self, key: String) -> Path {
let mut elems = self.0.clone();
elems.push(PathElement::Key(key));
Path(elems)
}
/// Returns the parent of this part
pub fn parent(&self) -> Path {
Path(self.0.clone().into_iter().skip(1).collect())
}
pub fn is_root(&self) -> bool {
self.0.len() == 1 && self.0[0] == PathElement::Root
}
}
impl<'a> IntoIterator for &'a Path {
type Item = &'a PathElement;
type IntoIter = std::slice::Iter<'a, PathElement>;
fn into_iter(self) -> Self::IntoIter {
self.0.iter()
}
}

132
automerge/src/document.rs Normal file
View file

@ -0,0 +1,132 @@
use super::{AutomergeError, ChangeRequest};
use crate::change_context::ChangeContext;
use crate::error::InvalidChangeRequest;
use automerge_backend::OpSet;
use automerge_backend::Value;
use automerge_backend::{ActorID, Change};
use uuid;
pub struct Document {
op_set: OpSet,
actor_id: ActorID,
}
impl Document {
/// Create a new, empty document
pub fn init() -> Document {
Document {
op_set: OpSet::init(),
actor_id: ActorID(uuid::Uuid::new_v4().to_string()),
}
}
/// Create a new document from a set of changes
pub fn load(changes: Vec<Change>) -> Result<Document, AutomergeError> {
let mut doc = Document::init();
for change in changes {
doc.apply_change(change)?
}
Ok(doc)
}
/// Get the current state of the document as a serde_json value
pub fn state(&self) -> &Value {
panic!("not implemented");
}
/// Add a single change to the document
pub fn apply_change(&mut self, change: Change) -> Result<(), AutomergeError> {
self.op_set.apply_change(change, false).map(|_| ())
}
pub fn create_and_apply_change(
&mut self,
message: Option<String>,
requests: Vec<ChangeRequest>,
) -> Result<Change, InvalidChangeRequest> {
let mut change_ctx = ChangeContext::new(
&self.op_set.object_store,
self.actor_id.clone(),
&self.op_set.states,
self.op_set.clock.clone(),
);
let change = change_ctx.create_change(requests, message)?;
self.apply_change(change.clone())
.map_err(|e| InvalidChangeRequest(format!("Error applying change: {:?}", e)))?;
Ok(change)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::change_request::{ListIndex, Path};
use automerge_backend::Value;
use serde_json;
#[test]
#[ignore] // This is broken for some reason
fn test_insert_ops() {
let json_value: serde_json::Value = serde_json::from_str(
r#"
{
"values": [1.0, false]
}
"#,
)
.unwrap();
let mut doc = Document::init();
doc.create_and_apply_change(
Some("Initial".to_string()),
vec![ChangeRequest::Set {
path: Path::root(),
value: Value::from_json(&json_value),
}],
)
.unwrap();
let person_json: serde_json::Value = serde_json::from_str(
r#"
{
"name": "fred",
"surname": "johnson"
}
"#,
)
.unwrap();
doc.create_and_apply_change(
Some("list additions".to_string()),
vec![
ChangeRequest::InsertAfter {
path: Path::root()
.key("values".to_string())
.index(ListIndex::Head),
value: Value::from_json(&person_json),
},
ChangeRequest::InsertAfter {
path: Path::root()
.key("values".to_string())
.index(ListIndex::Index(1)),
value: Value::from_json(&serde_json::Value::String("final".to_string())),
},
],
)
.unwrap();
let expected: serde_json::Value = serde_json::from_str(
r#"
{
"values": [
{
"name": "fred",
"surname": "johnson"
},
1.0,
false,
"final"
]
}
"#,
)
.unwrap();
assert_eq!(expected, doc.state().to_json());
}
}

13
automerge/src/error.rs Normal file
View file

@ -0,0 +1,13 @@
use std::error::Error;
use std::fmt;
#[derive(Debug)]
pub struct InvalidChangeRequest(pub String);
impl Error for InvalidChangeRequest {}
impl fmt::Display for InvalidChangeRequest {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self)
}
}

67
automerge/src/lib.rs Normal file
View file

@ -0,0 +1,67 @@
//! Get your changes from the javascript library like so:
//!
//! ```javascript
//! doc = ... // create and edit an automerge document
//! let changes = Automerge.getHistory(doc).map(h => h.change)
//! console.log(JSON.stringify(changes, null, 4))
//! ```
//!
//! Then load the changes in rust:
//!
//! ```rust,no_run
//! # use automerge::Change;
//! let changes_str = "<paste the contents of the output here>";
//! let changes: Vec<Change> = serde_json::from_str(changes_str).unwrap();
//! let doc = automerge::Document::load(changes).unwrap();
//! println!("{:?}", doc.state().to_json());
//! ```
//!
//! Generate changes like so:
//!
//! ```rust,no_run
//! # use automerge::{Document, Change, ChangeRequest, Path, Value};
//! let mut doc = Document::init();
//! let json_value: serde_json::Value = serde_json::from_str(
//! r#"
//! {
//! "cards_by_id": {},
//! "size_of_cards": 12.0,
//! "numRounds": 11.0,
//! "cards": [1.0, false]
//! }
//! "#,
//! )
//! .unwrap();
//! doc.create_and_apply_change(
//! Some("Some change".to_string()),
//! vec![ChangeRequest::Set {
//! path: Path::root().key("the-state".to_string()),
//! value: Value::from_json(&json_value),
//! }],
//! )
//! .unwrap();
//! let expected: serde_json::Value = serde_json::from_str(
//! r#"
//! {
//! "the-state": {
//! "cards_by_id": {},
//! "size_of_cards": 12.0,
//! "numRounds": 11.0,
//! "cards": [1.0, false]
//! }
//! }
//! "#,
//! )
//! .unwrap();
//! assert_eq!(expected, doc.state().to_json());
//! ```
mod change_context;
mod change_request;
mod document;
mod error;
pub use automerge_backend::AutomergeError;
pub use automerge_backend::Change;
pub use automerge_backend::Value;
pub use change_request::{ChangeRequest, ListIndex, Path};
pub use document::Document;

View file

@ -0,0 +1,2 @@
extern crate automerge;

View file

@ -0,0 +1,2 @@
extern crate automerge;

94
flake.lock generated
View file

@ -1,94 +0,0 @@
{
"nodes": {
"flake-utils": {
"locked": {
"lastModified": 1667395993,
"narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"locked": {
"lastModified": 1659877975,
"narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1669542132,
"narHash": "sha256-DRlg++NJAwPh8io3ExBJdNW7Djs3plVI5jgYQ+iXAZQ=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "a115bb9bd56831941be3776c8a94005867f316a7",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1665296151,
"narHash": "sha256-uOB0oxqxN9K7XGF1hcnY+PQnlQJ+3bP2vCn/+Ru/bbc=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "14ccaaedd95a488dd7ae142757884d8e125b3363",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay"
}
},
"rust-overlay": {
"inputs": {
"flake-utils": "flake-utils_2",
"nixpkgs": "nixpkgs_2"
},
"locked": {
"lastModified": 1669775522,
"narHash": "sha256-6xxGArBqssX38DdHpDoPcPvB/e79uXyQBwpBcaO/BwY=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "3158e47f6b85a288d12948aeb9a048e0ed4434d6",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

View file

@ -1,69 +0,0 @@
{
description = "automerge-rs";
inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
rust-overlay.url = "github:oxalica/rust-overlay";
};
outputs = {
self,
nixpkgs,
flake-utils,
rust-overlay,
}:
flake-utils.lib.eachDefaultSystem
(system: let
pkgs = import nixpkgs {
overlays = [rust-overlay.overlays.default];
inherit system;
};
rust = pkgs.rust-bin.stable.latest.default;
in {
formatter = pkgs.alejandra;
packages = {
deadnix = pkgs.runCommand "deadnix" {} ''
${pkgs.deadnix}/bin/deadnix --fail ${./.}
mkdir $out
'';
};
checks = {
inherit (self.packages.${system}) deadnix;
};
devShells.default = pkgs.mkShell {
buildInputs = with pkgs; [
(rust.override {
extensions = ["rust-src"];
targets = ["wasm32-unknown-unknown"];
})
cargo-edit
cargo-watch
cargo-criterion
cargo-fuzz
cargo-flamegraph
cargo-deny
crate2nix
wasm-pack
pkgconfig
openssl
gnuplot
nodejs
yarn
deno
# c deps
cmake
cmocka
doxygen
rnix-lsp
nixpkgs-fmt
];
};
});
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 KiB

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 80.46 80.46"><defs><style>.cls-1{fill:#fc3;}.cls-1,.cls-2{fill-rule:evenodd;}.cls-2{fill:#2a1e20;}</style></defs><g id="Layer_2" data-name="Layer 2"><g id="Layer_1-2" data-name="Layer 1"><path class="cls-1" d="M79.59,38.12a3,3,0,0,1,0,4.21L42.34,79.58a3,3,0,0,1-4.22,0L.88,42.33a3,3,0,0,1,0-4.2L38.12.87a3,3,0,0,1,4.22,0"/><path class="cls-2" d="M76.87,38.76,41.71,3.59a2.09,2.09,0,0,0-2.93,0L3.62,38.76a2.07,2.07,0,0,0,0,2.93L38.78,76.85a2.07,2.07,0,0,0,2.93,0L76.87,41.69a2.07,2.07,0,0,0,0-2.93m-2,.79a.93.93,0,0,1,0,1.34l-33.94,34a1,1,0,0,1-1.33,0l-34-33.95a.94.94,0,0,1,0-1.32l34-34a1,1,0,0,1,1.33,0Z"/><path class="cls-2" d="M36.25,32.85v1.71c0,6.35-5.05,11.38-9.51,16.45l4.08,4.07c2.48-2.6,4.72-5.24,5.43-6.19V60.14h7.94V32.88l4.25,1.3a1.68,1.68,0,0,0,2.25-2.24L40.27,16.7,29.75,31.94A1.68,1.68,0,0,0,32,34.18"/></g></g></svg>

Before

Width:  |  Height:  |  Size: 885 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 254 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.7 KiB

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 400.72 80.46"><defs><style>.cls-1{fill:#fc3;}.cls-1,.cls-2{fill-rule:evenodd;}.cls-2{fill:#2a1e20;}</style></defs><g id="Layer_2" data-name="Layer 2"><g id="Layer_1-2" data-name="Layer 1"><path class="cls-1" d="M79.59,38.12a3,3,0,0,1,0,4.21L42.34,79.58a3,3,0,0,1-4.22,0L.88,42.33a3,3,0,0,1,0-4.2L38.12.87a3,3,0,0,1,4.22,0"/><path class="cls-2" d="M76.87,38.76,41.71,3.59a2.09,2.09,0,0,0-2.93,0L3.62,38.76a2.07,2.07,0,0,0,0,2.93L38.78,76.85a2.07,2.07,0,0,0,2.93,0L76.87,41.69a2.07,2.07,0,0,0,0-2.93m-2,.79a.93.93,0,0,1,0,1.34l-33.94,34a1,1,0,0,1-1.33,0l-34-33.95a.94.94,0,0,1,0-1.32l34-34a1,1,0,0,1,1.33,0Z"/><path class="cls-2" d="M36.25,32.85v1.71c0,6.35-5.05,11.38-9.51,16.45l4.08,4.07c2.48-2.6,4.72-5.24,5.43-6.19V60.14h7.94V32.88l4.25,1.3a1.68,1.68,0,0,0,2.25-2.24L40.27,16.7,29.75,31.94A1.68,1.68,0,0,0,32,34.18"/><path d="M124.14,60.08,120.55,50h-17L100,60.08H93.34l15.34-42.61h6.75L131,60.08Zm-9-25.63c-1-3-2.74-8-3.22-9.8-.49,1.83-2,6.7-3.11,9.86l-3.41,9.74H118.6Z"/><path d="M156.7,60.08V57c-1.58,2.32-4.74,3.72-8,3.72-7.43,0-11.38-4.87-11.38-14.31V28.12h6.27V46.2c0,6.45,2.43,8.76,6.57,8.76s6.57-3,6.57-8.15V28.12H163v32Z"/><path d="M187.5,59.29a12.74,12.74,0,0,1-6.15,1.46c-4.44,0-7.18-2.74-7.18-8.46V33.84h-4.56V28.12h4.56V19l6.15-3.29V28.12h7.91v5.72h-7.91V51.19c0,3,1,3.83,3.29,3.83a10,10,0,0,0,4.62-1.27Z"/><path d="M208.08,60.75c-8,0-14.06-6.64-14.06-16.62,0-10.47,6.2-16.68,14.24-16.68S222.5,34,222.5,44C222.5,54.54,216.29,60.75,208.08,60.75ZM208,33.42c-4.75,0-7.67,4.2-7.67,10.53,0,7,3.22,10.83,8,10.83s7.85-4.81,7.85-10.65C216.17,37.62,213.07,33.42,208,33.42Z"/><path d="M267.36,60.08V42c0-6.45-2-8.77-6.15-8.77s-6.14,3-6.14,8.16V60.08H248.8V42c0-6.45-2-8.77-6.15-8.77s-6.15,3-6.15,8.16V60.08h-6.27v-32h6.27v3a9,9,0,0,1,7.61-3.71c4.32,0,7.06,1.65,8.76,4.69,2.32-2.86,4.81-4.69,9.8-4.69,7.43,0,11,4.87,11,14.31V60.08Z"/><path d="M308.39,46.32H287.27c.66,6.15,4.13,8.77,8,8.77a11.22,11.22,0,0,0,6.94-2.56l3.71,4a14.9,14.9,0,0,1-11,4.2c-7.48,0-13.81-6-13.81-16.62,0-10.84,5.72-16.68,14-16.68,9.07,0,13.45,7.37,13.45,16C308.57,44.62,308.45,45.65,308.39,46.32Zm-13.7-13.21c-4.2,0-6.76,2.92-7.3,8h14.85C301.93,36.76,299.86,33.11,294.69,33.11Z"/><path d="M333.71,34.76a9.37,9.37,0,0,0-4.81-1.16c-4,0-6.27,2.8-6.27,8.22V60.08h-6.27v-32h6.27v3a8.86,8.86,0,0,1,7.3-3.71,9.22,9.22,0,0,1,5.42,1.34Z"/><path d="M350.45,71.82l-2.14-4.74c9-.43,11-2.86,11-9.5V57c-2.31,2.13-4.93,3.72-8.28,3.72-6.81,0-12.29-5-12.29-17.17,0-10.95,6-16.13,12.6-16.13a11.11,11.11,0,0,1,8,3.65v-3h6.27V57C365.54,66.77,362,71.46,350.45,71.82Zm8.94-34.39c-1.4-1.88-4.32-4.2-7.48-4.2-4.51,0-6.94,3.41-6.94,10.17,0,8,2.55,11.56,7.18,11.56,3,0,5.6-2,7.24-4.07Z"/><path d="M400.54,46.32H379.42c.67,6.15,4.14,8.77,8,8.77a11.22,11.22,0,0,0,6.94-2.56l3.71,4a14.87,14.87,0,0,1-11,4.2c-7.49,0-13.82-6-13.82-16.62,0-10.84,5.72-16.68,14-16.68,9.07,0,13.45,7.37,13.45,16C400.72,44.62,400.6,45.65,400.54,46.32Zm-13.7-13.21c-4.2,0-6.75,2.92-7.3,8h14.85C394.09,36.76,392,33.11,386.84,33.11Z"/></g></g></svg>

Before

Width:  |  Height:  |  Size: 3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.7 KiB

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 485 108"><defs><style>.cls-1{fill:#fff;}.cls-2{fill:#fc3;}.cls-3{fill:#2a1e20;fill-rule:evenodd;}</style></defs><g id="Layer_2" data-name="Layer 2"><g id="Layer_1-2" data-name="Layer 1"><path class="cls-1" d="M465,5a15,15,0,0,1,15,15V88a15,15,0,0,1-15,15H20A15,15,0,0,1,5,88V20A15,15,0,0,1,20,5H465m0-5H20A20,20,0,0,0,0,20V88a20,20,0,0,0,20,20H465a20,20,0,0,0,20-20V20A20,20,0,0,0,465,0Z"/><rect class="cls-2" x="3.7" y="3.7" width="477.6" height="100.6" rx="16.3"/><path class="cls-2" d="M465,5a15,15,0,0,1,15,15V88a15,15,0,0,1-15,15H20A15,15,0,0,1,5,88V20A15,15,0,0,1,20,5H465m0-2.6H20A17.63,17.63,0,0,0,2.4,20V88A17.63,17.63,0,0,0,20,105.6H465A17.63,17.63,0,0,0,482.6,88V20A17.63,17.63,0,0,0,465,2.4Z"/><path d="M465,7.6A12.41,12.41,0,0,1,477.4,20V88A12.41,12.41,0,0,1,465,100.4H20A12.41,12.41,0,0,1,7.6,88V20A12.41,12.41,0,0,1,20,7.6H465M465,5H20A15,15,0,0,0,5,20V88a15,15,0,0,0,15,15H465a15,15,0,0,0,15-15V20A15,15,0,0,0,465,5Z"/><path class="cls-3" d="M106.1,51.48l-34-34a2,2,0,0,0-2.83,0l-34,34a2,2,0,0,0,0,2.82l34,34a2,2,0,0,0,2.83,0l34-34a2,2,0,0,0,0-2.82m-.76.74a.93.93,0,0,1,0,1.34L71.4,87.5a1,1,0,0,1-1.33,0l-34-33.94a.94.94,0,0,1,0-1.32l34-34a1,1,0,0,1,1.33,0Z"/><path class="cls-3" d="M67,45.62V47c0,6.2-5.1,11.11-9.59,16.06l4.11,4C64,64.52,66.28,61.94,67,61V72h8V45.37l4.29,1.27a1.67,1.67,0,0,0,2.27-2.19L71,29.56,60.45,44.45a1.67,1.67,0,0,0,2.27,2.19"/><path d="M162.62,72.74,159,62.64H142l-3.53,10.1h-6.63l15.34-42.61h6.75l15.53,42.61Zm-9-25.62c-1-3-2.74-8-3.22-9.8-.49,1.82-2,6.69-3.11,9.86l-3.41,9.73h13.15Z"/><path d="M195.18,72.74v-3c-1.58,2.31-4.74,3.71-8,3.71-7.43,0-11.38-4.87-11.38-14.3V40.78H182V58.86c0,6.45,2.43,8.77,6.57,8.77s6.57-3,6.57-8.16V40.78h6.27v32Z"/><path d="M226,72a12.74,12.74,0,0,1-6.15,1.46c-4.44,0-7.18-2.74-7.18-8.46V46.51h-4.56V40.78h4.56V31.65l6.15-3.28V40.78h7.91v5.73H218.8V63.85c0,3,1,3.84,3.29,3.84a10,10,0,0,0,4.62-1.28Z"/><path d="M246.56,73.41c-8,0-14.06-6.63-14.06-16.62,0-10.47,6.2-16.67,14.24-16.67S261,46.63,261,56.61C261,67.2,254.77,73.41,246.56,73.41Zm-.07-27.33c-4.74,0-7.66,4.2-7.66,10.53,0,7,3.22,10.83,8,10.83s7.85-4.8,7.85-10.65C254.65,50.28,251.55,46.08,246.49,46.08Z"/><path d="M305.84,72.74V54.66c0-6.45-2-8.76-6.15-8.76s-6.14,3-6.14,8.15V72.74h-6.27V54.66c0-6.45-2-8.76-6.15-8.76s-6.15,3-6.15,8.15V72.74h-6.27v-32H275v3a9,9,0,0,1,7.61-3.71c4.32,0,7.06,1.64,8.76,4.68,2.32-2.86,4.81-4.68,9.8-4.68,7.43,0,11,4.86,11,14.3V72.74Z"/><path d="M346.87,59H325.74c.67,6.15,4.14,8.77,8,8.77a11.16,11.16,0,0,0,6.94-2.56l3.71,4a14.86,14.86,0,0,1-11,4.2c-7.48,0-13.81-6-13.81-16.62,0-10.83,5.72-16.67,14-16.67,9.07,0,13.45,7.36,13.45,16C347.05,57.28,346.93,58.31,346.87,59Zm-13.7-13.2c-4.2,0-6.76,2.92-7.3,8h14.85C340.41,49.43,338.34,45.78,333.17,45.78Z"/><path d="M372.19,47.42a9.37,9.37,0,0,0-4.81-1.16c-4,0-6.27,2.8-6.27,8.22V72.74h-6.27v-32h6.27v3a8.86,8.86,0,0,1,7.3-3.71,9.22,9.22,0,0,1,5.42,1.33Z"/><path d="M388.92,84.49l-2.13-4.75c9-.43,11-2.86,11-9.5V69.7c-2.31,2.13-4.93,3.71-8.28,3.71-6.81,0-12.29-5-12.29-17.16,0-11,6-16.13,12.6-16.13a11.07,11.07,0,0,1,8,3.65v-3H404V69.7C404,79.44,400.49,84.12,388.92,84.49Zm8.95-34.39c-1.4-1.89-4.32-4.2-7.48-4.2-4.51,0-6.94,3.41-6.94,10.16,0,8,2.55,11.57,7.18,11.57,3,0,5.6-2,7.24-4.08Z"/><path d="M439,59H417.9c.67,6.15,4.14,8.77,8,8.77a11.16,11.16,0,0,0,6.94-2.56l3.71,4a14.84,14.84,0,0,1-11,4.2c-7.49,0-13.82-6-13.82-16.62,0-10.83,5.72-16.67,14-16.67,9.07,0,13.45,7.36,13.45,16C439.2,57.28,439.08,58.31,439,59Zm-13.7-13.2c-4.2,0-6.75,2.92-7.3,8h14.85C432.57,49.43,430.5,45.78,425.32,45.78Z"/></g></g></svg>

Before

Width:  |  Height:  |  Size: 3.5 KiB

View file

@ -1,3 +0,0 @@
{
"replacer": "scripts/denoify-replacer.mjs"
}

View file

@ -1,2 +0,0 @@
dist
examples

View file

@ -1,15 +0,0 @@
module.exports = {
root: true,
parser: "@typescript-eslint/parser",
plugins: ["@typescript-eslint"],
extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"],
rules: {
"@typescript-eslint/no-unused-vars": [
"error",
{
argsIgnorePattern: "^_",
varsIgnorePattern: "^_",
},
],
},
}

View file

@ -1,6 +0,0 @@
/node_modules
/yarn.lock
dist
docs/
.vim
deno_dist/

View file

@ -1,4 +0,0 @@
e2e/verdacciodb
dist
docs
deno_dist

View file

@ -1,4 +0,0 @@
{
"semi": false,
"arrowParens": "avoid"
}

View file

@ -1,39 +0,0 @@
## Architecture
The `@automerge/automerge` package is a set of
[`Proxy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy)
objects which provide an idiomatic javascript interface built on top of the
lower level `@automerge/automerge-wasm` package (which is in turn built from the
Rust codebase and can be found in `~/automerge-wasm`). I.e. the responsibility
of this codebase is
- To map from the javascript data model to the underlying `set`, `make`,
`insert`, and `delete` operations of Automerge.
- To expose a more convenient interface to functions in `automerge-wasm` which
generate messages to send over the network or compressed file formats to store
on disk
## Building and testing
Much of the functionality of this package depends on the
`@automerge/automerge-wasm` package and frequently you will be working on both
of them at the same time. It would be frustrating to have to push
`automerge-wasm` to NPM every time you want to test a change but I (Alex) also
don't trust `yarn link` to do the right thing here. Therefore, the `./e2e`
folder contains a little yarn package which spins up a local NPM registry. See
`./e2e/README` for details. In brief though:
To build `automerge-wasm` and install it in the local `node_modules`
```bash
cd e2e && yarn install && yarn run e2e buildjs
```
NOw that you've done this you can run the tests
```bash
yarn test
```
If you make changes to the `automerge-wasm` package you will need to re-run
`yarn e2e buildjs`

View file

@ -1,10 +0,0 @@
MIT License
Copyright 2022, Ink & Switch LLC
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View file

@ -1,109 +0,0 @@
## Automerge
Automerge is a library of data structures for building collaborative
applications, this package is the javascript implementation.
Detailed documentation is available at [automerge.org](http://automerge.org/)
but see the following for a short getting started guid.
## Quickstart
First, install the library.
```
yarn add @automerge/automerge
```
If you're writing a `node` application, you can skip straight to [Make some
data](#make-some-data). If you're in a browser you need a bundler
### Bundler setup
`@automerge/automerge` is a wrapper around a core library which is written in
rust, compiled to WebAssembly and distributed as a separate package called
`@automerge/automerge-wasm`. Browsers don't currently support WebAssembly
modules taking part in ESM module imports, so you must use a bundler to import
`@automerge/automerge` in the browser. There are a lot of bundlers out there, we
have examples for common bundlers in the `examples` folder. Here is a short
example using Webpack 5.
Assuming a standard setup of a new webpack project, you'll need to enable the
`asyncWebAssembly` experiment. In a typical webpack project that means adding
something like this to `webpack.config.js`
```javascript
module.exports = {
...
experiments: { asyncWebAssembly: true },
performance: { // we dont want the wasm blob to generate warnings
hints: false,
maxEntrypointSize: 512000,
maxAssetSize: 512000
}
};
```
### Make some data
Automerge allows to separate threads of execution to make changes to some data
and always be able to merge their changes later.
```javascript
import * as automerge from "@automerge/automerge"
import * as assert from "assert"
let doc1 = automerge.from({
tasks: [
{ description: "feed fish", done: false },
{ description: "water plants", done: false },
],
})
// Create a new thread of execution
let doc2 = automerge.clone(doc1)
// Now we concurrently make changes to doc1 and doc2
// Complete a task in doc2
doc2 = automerge.change(doc2, d => {
d.tasks[0].done = true
})
// Add a task in doc1
doc1 = automerge.change(doc1, d => {
d.tasks.push({
description: "water fish",
done: false,
})
})
// Merge changes from both docs
doc1 = automerge.merge(doc1, doc2)
doc2 = automerge.merge(doc2, doc1)
// Both docs are merged and identical
assert.deepEqual(doc1, {
tasks: [
{ description: "feed fish", done: true },
{ description: "water plants", done: false },
{ description: "water fish", done: false },
],
})
assert.deepEqual(doc2, {
tasks: [
{ description: "feed fish", done: true },
{ description: "water plants", done: false },
{ description: "water fish", done: false },
],
})
```
## Development
See [HACKING.md](./HACKING.md)
## Meta
Copyright 2017present, the Automerge contributors. Released under the terms of the
MIT license (see `LICENSE`).

View file

@ -1,12 +0,0 @@
{
"extends": "../tsconfig.json",
"exclude": [
"../dist/**/*",
"../node_modules",
"../test/**/*",
"../src/**/*.deno.ts"
],
"compilerOptions": {
"outDir": "../dist/cjs"
}
}

View file

@ -1,13 +0,0 @@
{
"extends": "../tsconfig.json",
"exclude": [
"../dist/**/*",
"../node_modules",
"../test/**/*",
"../src/**/*.deno.ts"
],
"emitDeclarationOnly": true,
"compilerOptions": {
"outDir": "../dist"
}
}

View file

@ -1,14 +0,0 @@
{
"extends": "../tsconfig.json",
"exclude": [
"../dist/**/*",
"../node_modules",
"../test/**/*",
"../src/**/*.deno.ts"
],
"compilerOptions": {
"target": "es6",
"module": "es6",
"outDir": "../dist/mjs"
}
}

View file

@ -1,10 +0,0 @@
import * as Automerge from "../deno_dist/index.ts"
Deno.test("It should create, clone and free", () => {
let doc1 = Automerge.init()
let doc2 = Automerge.clone(doc1)
// this is only needed if weakrefs are not supported
Automerge.free(doc1)
Automerge.free(doc2)
})

View file

@ -1,3 +0,0 @@
node_modules/
verdacciodb/
htpasswd

View file

@ -1,70 +0,0 @@
#End to end testing for javascript packaging
The network of packages and bundlers we rely on to get the `automerge` package
working is a little complex. We have the `automerge-wasm` package, which the
`automerge` package depends upon, which means that anyone who depends on
`automerge` needs to either a) be using node or b) use a bundler in order to
load the underlying WASM module which is packaged in `automerge-wasm`.
The various bundlers involved are complicated and capricious and so we need an
easy way of testing that everything is in fact working as expected. To do this
we run a custom NPM registry (namely [Verdaccio](https://verdaccio.org/)) and
build the `automerge-wasm` and `automerge` packages and publish them to this
registry. Once we have this registry running we are able to build the example
projects which depend on these packages and check that everything works as
expected.
## Usage
First, install everything:
```
yarn install
```
### Build `automerge-js`
This builds the `automerge-wasm` package and then runs `yarn build` in the
`automerge-js` project with the `--registry` set to the verdaccio registry. The
end result is that you can run `yarn test` in the resulting `automerge-js`
directory in order to run tests against the current `automerge-wasm`.
```
yarn e2e buildjs
```
### Build examples
This either builds or the examples in `automerge-js/examples` or just a subset
of them. Once this is complete you can run the relevant scripts (e.g. `vite dev`
for the Vite example) to check everything works.
```
yarn e2e buildexamples
```
Or, to just build the webpack example
```
yarn e2e buildexamples -e webpack
```
### Run Registry
If you're experimenting with a project which is not in the `examples` folder
you'll need a running registry. `run-registry` builds and publishes
`automerge-js` and `automerge-wasm` and then runs the registry at
`localhost:4873`.
```
yarn e2e run-registry
```
You can now run `yarn install --registry http://localhost:4873` to experiment
with the built packages.
## Using the `dev` build of `automerge-wasm`
All the commands above take a `-p` flag which can be either `release` or
`debug`. The `debug` builds with additional debug symbols which makes errors
less cryptic.

View file

@ -1,534 +0,0 @@
import { once } from "events"
import { setTimeout } from "timers/promises"
import { spawn, ChildProcess } from "child_process"
import * as child_process from "child_process"
import {
command,
subcommands,
run,
array,
multioption,
option,
Type,
} from "cmd-ts"
import * as path from "path"
import * as fsPromises from "fs/promises"
import fetch from "node-fetch"
const VERDACCIO_DB_PATH = path.normalize(`${__dirname}/verdacciodb`)
const VERDACCIO_CONFIG_PATH = path.normalize(`${__dirname}/verdaccio.yaml`)
const AUTOMERGE_WASM_PATH = path.normalize(
`${__dirname}/../../rust/automerge-wasm`
)
const AUTOMERGE_JS_PATH = path.normalize(`${__dirname}/..`)
const EXAMPLES_DIR = path.normalize(path.join(__dirname, "../", "examples"))
// The different example projects in "../examples"
type Example = "webpack" | "vite" | "create-react-app"
// Type to parse strings to `Example` so the types line up for the `buildExamples` commmand
const ReadExample: Type<string, Example> = {
async from(str) {
if (str === "webpack") {
return "webpack"
} else if (str === "vite") {
return "vite"
} else if (str === "create-react-app") {
return "create-react-app"
} else {
throw new Error(`Unknown example type ${str}`)
}
},
}
type Profile = "dev" | "release"
const ReadProfile: Type<string, Profile> = {
async from(str) {
if (str === "dev") {
return "dev"
} else if (str === "release") {
return "release"
} else {
throw new Error(`Unknown profile ${str}`)
}
},
}
const buildjs = command({
name: "buildjs",
args: {
profile: option({
type: ReadProfile,
long: "profile",
short: "p",
defaultValue: () => "dev" as Profile,
}),
},
handler: ({ profile }) => {
console.log("building js")
withPublishedWasm(profile, async (registryUrl: string) => {
await buildAndPublishAutomergeJs(registryUrl)
})
},
})
const buildWasm = command({
name: "buildwasm",
args: {
profile: option({
type: ReadProfile,
long: "profile",
short: "p",
defaultValue: () => "dev" as Profile,
}),
},
handler: ({ profile }) => {
console.log("building automerge-wasm")
withRegistry(buildAutomergeWasm(profile))
},
})
const buildexamples = command({
name: "buildexamples",
args: {
examples: multioption({
long: "example",
short: "e",
type: array(ReadExample),
}),
profile: option({
type: ReadProfile,
long: "profile",
short: "p",
defaultValue: () => "dev" as Profile,
}),
},
handler: ({ examples, profile }) => {
if (examples.length === 0) {
examples = ["webpack", "vite", "create-react-app"]
}
buildExamples(examples, profile)
},
})
const runRegistry = command({
name: "run-registry",
args: {
profile: option({
type: ReadProfile,
long: "profile",
short: "p",
defaultValue: () => "dev" as Profile,
}),
},
handler: ({ profile }) => {
withPublishedWasm(profile, async (registryUrl: string) => {
await buildAndPublishAutomergeJs(registryUrl)
console.log("\n************************")
console.log(` Verdaccio NPM registry is running at ${registryUrl}`)
console.log(" press CTRL-C to exit ")
console.log("************************")
await once(process, "SIGINT")
}).catch(e => {
console.error(`Failed: ${e}`)
})
},
})
const app = subcommands({
name: "e2e",
cmds: {
buildjs,
buildexamples,
buildwasm: buildWasm,
"run-registry": runRegistry,
},
})
run(app, process.argv.slice(2))
async function buildExamples(examples: Array<Example>, profile: Profile) {
await withPublishedWasm(profile, async registryUrl => {
printHeader("building and publishing automerge")
await buildAndPublishAutomergeJs(registryUrl)
for (const example of examples) {
printHeader(`building ${example} example`)
if (example === "webpack") {
const projectPath = path.join(EXAMPLES_DIR, example)
await removeExistingAutomerge(projectPath)
await fsPromises.rm(path.join(projectPath, "yarn.lock"), {
force: true,
})
await spawnAndWait(
"yarn",
[
"--cwd",
projectPath,
"install",
"--registry",
registryUrl,
"--check-files",
],
{ stdio: "inherit" }
)
await spawnAndWait("yarn", ["--cwd", projectPath, "build"], {
stdio: "inherit",
})
} else if (example === "vite") {
const projectPath = path.join(EXAMPLES_DIR, example)
await removeExistingAutomerge(projectPath)
await fsPromises.rm(path.join(projectPath, "yarn.lock"), {
force: true,
})
await spawnAndWait(
"yarn",
[
"--cwd",
projectPath,
"install",
"--registry",
registryUrl,
"--check-files",
],
{ stdio: "inherit" }
)
await spawnAndWait("yarn", ["--cwd", projectPath, "build"], {
stdio: "inherit",
})
} else if (example === "create-react-app") {
const projectPath = path.join(EXAMPLES_DIR, example)
await removeExistingAutomerge(projectPath)
await fsPromises.rm(path.join(projectPath, "yarn.lock"), {
force: true,
})
await spawnAndWait(
"yarn",
[
"--cwd",
projectPath,
"install",
"--registry",
registryUrl,
"--check-files",
],
{ stdio: "inherit" }
)
await spawnAndWait("yarn", ["--cwd", projectPath, "build"], {
stdio: "inherit",
})
}
}
})
}
type WithRegistryAction = (registryUrl: string) => Promise<void>
async function withRegistry(
action: WithRegistryAction,
...actions: Array<WithRegistryAction>
) {
// First, start verdaccio
printHeader("Starting verdaccio NPM server")
const verd = await VerdaccioProcess.start()
actions.unshift(action)
for (const action of actions) {
try {
type Step = "verd-died" | "action-completed"
const verdDied: () => Promise<Step> = async () => {
await verd.died()
return "verd-died"
}
const actionComplete: () => Promise<Step> = async () => {
await action("http://localhost:4873")
return "action-completed"
}
const result = await Promise.race([verdDied(), actionComplete()])
if (result === "verd-died") {
throw new Error("verdaccio unexpectedly exited")
}
} catch (e) {
await verd.kill()
throw e
}
}
await verd.kill()
}
async function withPublishedWasm(profile: Profile, action: WithRegistryAction) {
await withRegistry(buildAutomergeWasm(profile), publishAutomergeWasm, action)
}
function buildAutomergeWasm(profile: Profile): WithRegistryAction {
return async (registryUrl: string) => {
printHeader("building automerge-wasm")
await spawnAndWait(
"yarn",
["--cwd", AUTOMERGE_WASM_PATH, "--registry", registryUrl, "install"],
{ stdio: "inherit" }
)
const cmd = profile === "release" ? "release" : "debug"
await spawnAndWait("yarn", ["--cwd", AUTOMERGE_WASM_PATH, cmd], {
stdio: "inherit",
})
}
}
async function publishAutomergeWasm(registryUrl: string) {
printHeader("Publishing automerge-wasm to verdaccio")
await fsPromises.rm(
path.join(VERDACCIO_DB_PATH, "@automerge/automerge-wasm"),
{ recursive: true, force: true }
)
await yarnPublish(registryUrl, AUTOMERGE_WASM_PATH)
}
async function buildAndPublishAutomergeJs(registryUrl: string) {
// Build the js package
printHeader("Building automerge")
await removeExistingAutomerge(AUTOMERGE_JS_PATH)
await removeFromVerdaccio("@automerge/automerge")
await fsPromises.rm(path.join(AUTOMERGE_JS_PATH, "yarn.lock"), {
force: true,
})
await spawnAndWait(
"yarn",
[
"--cwd",
AUTOMERGE_JS_PATH,
"install",
"--registry",
registryUrl,
"--check-files",
],
{ stdio: "inherit" }
)
await spawnAndWait("yarn", ["--cwd", AUTOMERGE_JS_PATH, "build"], {
stdio: "inherit",
})
await yarnPublish(registryUrl, AUTOMERGE_JS_PATH)
}
/**
* A running verdaccio process
*
*/
class VerdaccioProcess {
child: ChildProcess
stdout: Array<Buffer>
stderr: Array<Buffer>
constructor(child: ChildProcess) {
this.child = child
// Collect stdout/stderr otherwise the subprocess gets blocked writing
this.stdout = []
this.stderr = []
this.child.stdout &&
this.child.stdout.on("data", data => this.stdout.push(data))
this.child.stderr &&
this.child.stderr.on("data", data => this.stderr.push(data))
const errCallback = (e: any) => {
console.error("!!!!!!!!!ERROR IN VERDACCIO PROCESS!!!!!!!!!")
console.error(" ", e)
if (this.stdout.length > 0) {
console.log("\n**Verdaccio stdout**")
const stdout = Buffer.concat(this.stdout)
process.stdout.write(stdout)
}
if (this.stderr.length > 0) {
console.log("\n**Verdaccio stderr**")
const stdout = Buffer.concat(this.stderr)
process.stdout.write(stdout)
}
process.exit(-1)
}
this.child.on("error", errCallback)
}
/**
* Spawn a verdaccio process and wait for it to respond succesfully to http requests
*
* The returned `VerdaccioProcess` can be used to control the subprocess
*/
static async start() {
const child = spawn(
"yarn",
["verdaccio", "--config", VERDACCIO_CONFIG_PATH],
{ env: { ...process.env, FORCE_COLOR: "true" } }
)
// Forward stdout and stderr whilst waiting for startup to complete
const stdoutCallback = (data: Buffer) => process.stdout.write(data)
const stderrCallback = (data: Buffer) => process.stderr.write(data)
child.stdout && child.stdout.on("data", stdoutCallback)
child.stderr && child.stderr.on("data", stderrCallback)
const healthCheck = async () => {
while (true) {
try {
const resp = await fetch("http://localhost:4873")
if (resp.status === 200) {
return
} else {
console.log(`Healthcheck failed: bad status ${resp.status}`)
}
} catch (e) {
console.error(`Healthcheck failed: ${e}`)
}
await setTimeout(500)
}
}
await withTimeout(healthCheck(), 10000)
// Stop forwarding stdout/stderr
child.stdout && child.stdout.off("data", stdoutCallback)
child.stderr && child.stderr.off("data", stderrCallback)
return new VerdaccioProcess(child)
}
/**
* Send a SIGKILL to the process and wait for it to stop
*/
async kill() {
this.child.stdout && this.child.stdout.destroy()
this.child.stderr && this.child.stderr.destroy()
this.child.kill()
try {
await withTimeout(once(this.child, "close"), 500)
} catch (e) {
console.error("unable to kill verdaccio subprocess, trying -9")
this.child.kill(9)
await withTimeout(once(this.child, "close"), 500)
}
}
/**
* A promise which resolves if the subprocess exits for some reason
*/
async died(): Promise<number | null> {
const [exit, _signal] = await once(this.child, "exit")
return exit
}
}
function printHeader(header: string) {
console.log("\n===============================")
console.log(` ${header}`)
console.log("===============================")
}
/**
* Removes the automerge, @automerge/automerge-wasm, and @automerge/automerge packages from
* `$packageDir/node_modules`
*
* This is useful to force refreshing a package by use in combination with
* `yarn install --check-files`, which checks if a package is present in
* `node_modules` and if it is not forces a reinstall.
*
* @param packageDir - The directory containing the package.json of the target project
*/
async function removeExistingAutomerge(packageDir: string) {
await fsPromises.rm(path.join(packageDir, "node_modules", "@automerge"), {
recursive: true,
force: true,
})
await fsPromises.rm(path.join(packageDir, "node_modules", "automerge"), {
recursive: true,
force: true,
})
}
type SpawnResult = {
stdout?: Buffer
stderr?: Buffer
}
async function spawnAndWait(
cmd: string,
args: Array<string>,
options: child_process.SpawnOptions
): Promise<SpawnResult> {
const child = spawn(cmd, args, options)
let stdout = null
let stderr = null
if (child.stdout) {
stdout = []
child.stdout.on("data", data => stdout.push(data))
}
if (child.stderr) {
stderr = []
child.stderr.on("data", data => stderr.push(data))
}
const [exit, _signal] = await once(child, "exit")
if (exit && exit !== 0) {
throw new Error("nonzero exit code")
}
return {
stderr: stderr ? Buffer.concat(stderr) : null,
stdout: stdout ? Buffer.concat(stdout) : null,
}
}
/**
* Remove a package from the verdaccio registry. This is necessary because we
* often want to _replace_ a version rather than update the version number.
* Obviously this is very bad and verboten in normal circumastances, but the
* whole point here is to be able to test the entire packaging story so it's
* okay I Promise.
*/
async function removeFromVerdaccio(packageName: string) {
await fsPromises.rm(path.join(VERDACCIO_DB_PATH, packageName), {
force: true,
recursive: true,
})
}
async function yarnPublish(registryUrl: string, cwd: string) {
await spawnAndWait(
"yarn",
["--registry", registryUrl, "--cwd", cwd, "publish", "--non-interactive"],
{
stdio: "inherit",
env: {
...process.env,
FORCE_COLOR: "true",
// This is a fake token, it just has to be the right format
npm_config__auth:
"//localhost:4873/:_authToken=Gp2Mgxm4faa/7wp0dMSuRA==",
},
}
)
}
/**
* Wait for a given delay to resolve a promise, throwing an error if the
* promise doesn't resolve with the timeout
*
* @param promise - the promise to wait for @param timeout - the delay in
* milliseconds to wait before throwing
*/
async function withTimeout<T>(
promise: Promise<T>,
timeout: number
): Promise<T> {
type Step = "timed-out" | { result: T }
const timedOut: () => Promise<Step> = async () => {
await setTimeout(timeout)
return "timed-out"
}
const succeeded: () => Promise<Step> = async () => {
const result = await promise
return { result }
}
const result = await Promise.race([timedOut(), succeeded()])
if (result === "timed-out") {
throw new Error("timed out")
} else {
return result.result
}
}

View file

@ -1,23 +0,0 @@
{
"name": "e2e",
"version": "0.0.1",
"description": "",
"main": "index.js",
"scripts": {
"e2e": "ts-node index.ts"
},
"author": "",
"license": "ISC",
"dependencies": {
"@types/node": "^18.7.18",
"cmd-ts": "^0.11.0",
"node-fetch": "^2",
"ts-node": "^10.9.1",
"typed-emitter": "^2.1.0",
"typescript": "^4.8.3",
"verdaccio": "5"
},
"devDependencies": {
"@types/node-fetch": "2.x"
}
}

View file

@ -1,6 +0,0 @@
{
"compilerOptions": {
"types": ["node"]
},
"module": "nodenext"
}

View file

@ -1,25 +0,0 @@
storage: "./verdacciodb"
auth:
htpasswd:
file: ./htpasswd
publish:
allow_offline: true
logs: { type: stdout, format: pretty, level: info }
packages:
"@automerge/automerge-wasm":
access: "$all"
publish: "$all"
"@automerge/automerge":
access: "$all"
publish: "$all"
"*":
access: "$all"
publish: "$all"
proxy: npmjs
"@*/*":
access: "$all"
publish: "$all"
proxy: npmjs
uplinks:
npmjs:
url: https://registry.npmjs.org/

File diff suppressed because it is too large Load diff

View file

@ -1 +0,0 @@
node_modules/

View file

@ -1,59 +0,0 @@
# Automerge + `create-react-app`
This is a little fiddly to get working. The problem is that `create-react-app`
hard codes a webpack configuration which does not support WASM modules, which we
require in order to bundle the WASM implementation of automerge. To get around
this we use [`craco`](https://github.com/dilanx/craco) which does some monkey
patching to allow us to modify the webpack config that `create-react-app`
bundles. Then we use a craco plugin called
[`craco-wasm`](https://www.npmjs.com/package/craco-wasm) to perform the
necessary modifications to the webpack config. It should be noted that this is
all quite fragile and ideally you probably don't want to use `create-react-app`
to do this in production.
## Setup
Assuming you have already run `create-react-app` and your working directory is
the project.
### Install craco and craco-wasm
```bash
yarn add craco craco-wasm
```
### Modify `package.json` to use `craco` for scripts
In `package.json` the `scripts` section will look like this:
```json
"scripts": {
"start": "craco start",
"build": "craco build",
"test": "craco test",
"eject": "craco eject"
},
```
Replace that section with:
```json
"scripts": {
"start": "craco start",
"build": "craco build",
"test": "craco test",
"eject": "craco eject"
},
```
### Create `craco.config.js`
In the root of the project add the following contents to `craco.config.js`
```javascript
const cracoWasm = require("craco-wasm")
module.exports = {
plugins: [cracoWasm()],
}
```

View file

@ -1,5 +0,0 @@
const cracoWasm = require("craco-wasm")
module.exports = {
plugins: [cracoWasm()],
}

View file

@ -1,41 +0,0 @@
{
"name": "automerge-create-react-app",
"version": "0.1.0",
"private": true,
"dependencies": {
"@craco/craco": "^7.0.0-alpha.8",
"craco-wasm": "0.0.1",
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "^13.4.0",
"@testing-library/user-event": "^13.5.0",
"@automerge/automerge": "2.0.0-alpha.7",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-scripts": "5.0.1",
"web-vitals": "^2.1.4"
},
"scripts": {
"start": "craco start",
"build": "craco build",
"test": "craco test",
"eject": "craco eject"
},
"eslintConfig": {
"extends": [
"react-app",
"react-app/jest"
]
},
"browserslist": {
"production": [
">0.2%",
"not dead",
"not op_mini all"
],
"development": [
"last 1 chrome version",
"last 1 firefox version",
"last 1 safari version"
]
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.8 KiB

View file

@ -1,43 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="icon" href="%PUBLIC_URL%/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />
<meta
name="description"
content="Web site created using create-react-app"
/>
<link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" />
<!--
manifest.json provides metadata used when your web app is installed on a
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
-->
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
<!--
Notice the use of %PUBLIC_URL% in the tags above.
It will be replaced with the URL of the `public` folder during the build.
Only files inside the `public` folder can be referenced from the HTML.
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
<title>React App</title>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run `npm start` or `yarn start`.
To create a production bundle, use `npm run build` or `yarn build`.
-->
</body>
</html>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.4 KiB

View file

@ -1,25 +0,0 @@
{
"short_name": "React App",
"name": "Create React App Sample",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
},
{
"src": "logo192.png",
"type": "image/png",
"sizes": "192x192"
},
{
"src": "logo512.png",
"type": "image/png",
"sizes": "512x512"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}

View file

@ -1,3 +0,0 @@
# https://www.robotstxt.org/robotstxt.html
User-agent: *
Disallow:

View file

@ -1,38 +0,0 @@
.App {
text-align: center;
}
.App-logo {
height: 40vmin;
pointer-events: none;
}
@media (prefers-reduced-motion: no-preference) {
.App-logo {
animation: App-logo-spin infinite 20s linear;
}
}
.App-header {
background-color: #282c34;
min-height: 100vh;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
font-size: calc(10px + 2vmin);
color: white;
}
.App-link {
color: #61dafb;
}
@keyframes App-logo-spin {
from {
transform: rotate(0deg);
}
to {
transform: rotate(360deg);
}
}

View file

@ -1,20 +0,0 @@
import * as Automerge from "@automerge/automerge"
import logo from "./logo.svg"
import "./App.css"
let doc = Automerge.init()
doc = Automerge.change(doc, d => (d.hello = "from automerge"))
const result = JSON.stringify(doc)
function App() {
return (
<div className="App">
<header className="App-header">
<img src={logo} className="App-logo" alt="logo" />
<p>{result}</p>
</header>
</div>
)
}
export default App

View file

@ -1,8 +0,0 @@
import { render, screen } from "@testing-library/react"
import App from "./App"
test("renders learn react link", () => {
render(<App />)
const linkElement = screen.getByText(/learn react/i)
expect(linkElement).toBeInTheDocument()
})

View file

@ -1,13 +0,0 @@
body {
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen",
"Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue",
sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
code {
font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New",
monospace;
}

View file

@ -1,17 +0,0 @@
import React from "react"
import ReactDOM from "react-dom/client"
import "./index.css"
import App from "./App"
import reportWebVitals from "./reportWebVitals"
const root = ReactDOM.createRoot(document.getElementById("root"))
root.render(
<React.StrictMode>
<App />
</React.StrictMode>
)
// If you want to start measuring performance in your app, pass a function
// to log results (for example: reportWebVitals(console.log))
// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
reportWebVitals()

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 841.9 595.3"><g fill="#61DAFB"><path d="M666.3 296.5c0-32.5-40.7-63.3-103.1-82.4 14.4-63.6 8-114.2-20.2-130.4-6.5-3.8-14.1-5.6-22.4-5.6v22.3c4.6 0 8.3.9 11.4 2.6 13.6 7.8 19.5 37.5 14.9 75.7-1.1 9.4-2.9 19.3-5.1 29.4-19.6-4.8-41-8.5-63.5-10.9-13.5-18.5-27.5-35.3-41.6-50 32.6-30.3 63.2-46.9 84-46.9V78c-27.5 0-63.5 19.6-99.9 53.6-36.4-33.8-72.4-53.2-99.9-53.2v22.3c20.7 0 51.4 16.5 84 46.6-14 14.7-28 31.4-41.3 49.9-22.6 2.4-44 6.1-63.6 11-2.3-10-4-19.7-5.2-29-4.7-38.2 1.1-67.9 14.6-75.8 3-1.8 6.9-2.6 11.5-2.6V78.5c-8.4 0-16 1.8-22.6 5.6-28.1 16.2-34.4 66.7-19.9 130.1-62.2 19.2-102.7 49.9-102.7 82.3 0 32.5 40.7 63.3 103.1 82.4-14.4 63.6-8 114.2 20.2 130.4 6.5 3.8 14.1 5.6 22.5 5.6 27.5 0 63.5-19.6 99.9-53.6 36.4 33.8 72.4 53.2 99.9 53.2 8.4 0 16-1.8 22.6-5.6 28.1-16.2 34.4-66.7 19.9-130.1 62-19.1 102.5-49.9 102.5-82.3zm-130.2-66.7c-3.7 12.9-8.3 26.2-13.5 39.5-4.1-8-8.4-16-13.1-24-4.6-8-9.5-15.8-14.4-23.4 14.2 2.1 27.9 4.7 41 7.9zm-45.8 106.5c-7.8 13.5-15.8 26.3-24.1 38.2-14.9 1.3-30 2-45.2 2-15.1 0-30.2-.7-45-1.9-8.3-11.9-16.4-24.6-24.2-38-7.6-13.1-14.5-26.4-20.8-39.8 6.2-13.4 13.2-26.8 20.7-39.9 7.8-13.5 15.8-26.3 24.1-38.2 14.9-1.3 30-2 45.2-2 15.1 0 30.2.7 45 1.9 8.3 11.9 16.4 24.6 24.2 38 7.6 13.1 14.5 26.4 20.8 39.8-6.3 13.4-13.2 26.8-20.7 39.9zm32.3-13c5.4 13.4 10 26.8 13.8 39.8-13.1 3.2-26.9 5.9-41.2 8 4.9-7.7 9.8-15.6 14.4-23.7 4.6-8 8.9-16.1 13-24.1zM421.2 430c-9.3-9.6-18.6-20.3-27.8-32 9 .4 18.2.7 27.5.7 9.4 0 18.7-.2 27.8-.7-9 11.7-18.3 22.4-27.5 32zm-74.4-58.9c-14.2-2.1-27.9-4.7-41-7.9 3.7-12.9 8.3-26.2 13.5-39.5 4.1 8 8.4 16 13.1 24 4.7 8 9.5 15.8 14.4 23.4zM420.7 163c9.3 9.6 18.6 20.3 27.8 32-9-.4-18.2-.7-27.5-.7-9.4 0-18.7.2-27.8.7 9-11.7 18.3-22.4 27.5-32zm-74 58.9c-4.9 7.7-9.8 15.6-14.4 23.7-4.6 8-8.9 16-13 24-5.4-13.4-10-26.8-13.8-39.8 13.1-3.1 26.9-5.8 41.2-7.9zm-90.5 125.2c-35.4-15.1-58.3-34.9-58.3-50.6 0-15.7 22.9-35.6 58.3-50.6 8.6-3.7 18-7 27.7-10.1 5.7 19.6 13.2 40 22.5 60.9-9.2 20.8-16.6 41.1-22.2 60.6-9.9-3.1-19.3-6.5-28-10.2zM310 490c-13.6-7.8-19.5-37.5-14.9-75.7 1.1-9.4 2.9-19.3 5.1-29.4 19.6 4.8 41 8.5 63.5 10.9 13.5 18.5 27.5 35.3 41.6 50-32.6 30.3-63.2 46.9-84 46.9-4.5-.1-8.3-1-11.3-2.7zm237.2-76.2c4.7 38.2-1.1 67.9-14.6 75.8-3 1.8-6.9 2.6-11.5 2.6-20.7 0-51.4-16.5-84-46.6 14-14.7 28-31.4 41.3-49.9 22.6-2.4 44-6.1 63.6-11 2.3 10.1 4.1 19.8 5.2 29.1zm38.5-66.7c-8.6 3.7-18 7-27.7 10.1-5.7-19.6-13.2-40-22.5-60.9 9.2-20.8 16.6-41.1 22.2-60.6 9.9 3.1 19.3 6.5 28.1 10.2 35.4 15.1 58.3 34.9 58.3 50.6-.1 15.7-23 35.6-58.4 50.6zM320.8 78.4z"/><circle cx="420.9" cy="296.5" r="45.7"/><path d="M520.5 78.1z"/></g></svg>

Before

Width:  |  Height:  |  Size: 2.6 KiB

View file

@ -1,13 +0,0 @@
const reportWebVitals = onPerfEntry => {
if (onPerfEntry && onPerfEntry instanceof Function) {
import("web-vitals").then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => {
getCLS(onPerfEntry)
getFID(onPerfEntry)
getFCP(onPerfEntry)
getLCP(onPerfEntry)
getTTFB(onPerfEntry)
})
}
}
export default reportWebVitals

View file

@ -1,5 +0,0 @@
// jest-dom adds custom jest matchers for asserting on DOM nodes.
// allows you to do things like:
// expect(element).toHaveTextContent(/react/i)
// learn more: https://github.com/testing-library/jest-dom
import "@testing-library/jest-dom"

File diff suppressed because it is too large Load diff

View file

@ -1,2 +0,0 @@
node_modules/
yarn.lock

View file

@ -1,54 +0,0 @@
# Vite + Automerge
There are three things you need to do to get WASM packaging working with vite:
1. Install the top level await plugin
2. Install the `vite-plugin-wasm` plugin
3. Exclude `automerge-wasm` from the optimizer
First, install the packages we need:
```bash
yarn add vite-plugin-top-level-await
yarn add vite-plugin-wasm
```
In `vite.config.js`
```javascript
import { defineConfig } from "vite"
import wasm from "vite-plugin-wasm"
import topLevelAwait from "vite-plugin-top-level-await"
export default defineConfig({
plugins: [topLevelAwait(), wasm()],
// This is only necessary if you are using `SharedWorker` or `WebWorker`, as
// documented in https://vitejs.dev/guide/features.html#import-with-constructors
worker: {
format: "es",
plugins: [topLevelAwait(), wasm()],
},
optimizeDeps: {
// This is necessary because otherwise `vite dev` includes two separate
// versions of the JS wrapper. This causes problems because the JS
// wrapper has a module level variable to track JS side heap
// allocations, initializing this twice causes horrible breakage
exclude: ["@automerge/automerge-wasm"],
},
})
```
Now start the dev server:
```bash
yarn vite
```
## Running the example
```bash
yarn install
yarn dev
```

View file

@ -1,13 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Vite + TS</title>
</head>
<body>
<div id="app"></div>
<script type="module" src="/src/main.ts"></script>
</body>
</html>

View file

@ -1,15 +0,0 @@
import * as Automerge from "/node_modules/.vite/deps/automerge-js.js?v=6e973f28"
console.log(Automerge)
let doc = Automerge.init()
doc = Automerge.change(doc, d => (d.hello = "from automerge-js"))
console.log(doc)
const result = JSON.stringify(doc)
if (typeof document !== "undefined") {
const element = document.createElement("div")
element.innerHTML = JSON.stringify(result)
document.body.appendChild(element)
} else {
console.log("node:", result)
}
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi9ob21lL2FsZXgvUHJvamVjdHMvYXV0b21lcmdlL2F1dG9tZXJnZS1ycy9hdXRvbWVyZ2UtanMvZXhhbXBsZXMvdml0ZS9zcmMvbWFpbi50cyJdLCJzb3VyY2VzQ29udGVudCI6WyJpbXBvcnQgKiBhcyBBdXRvbWVyZ2UgZnJvbSBcImF1dG9tZXJnZS1qc1wiXG5cbi8vIGhlbGxvIHdvcmxkIGNvZGUgdGhhdCB3aWxsIHJ1biBjb3JyZWN0bHkgb24gd2ViIG9yIG5vZGVcblxuY29uc29sZS5sb2coQXV0b21lcmdlKVxubGV0IGRvYyA9IEF1dG9tZXJnZS5pbml0KClcbmRvYyA9IEF1dG9tZXJnZS5jaGFuZ2UoZG9jLCAoZDogYW55KSA9PiBkLmhlbGxvID0gXCJmcm9tIGF1dG9tZXJnZS1qc1wiKVxuY29uc29sZS5sb2coZG9jKVxuY29uc3QgcmVzdWx0ID0gSlNPTi5zdHJpbmdpZnkoZG9jKVxuXG5pZiAodHlwZW9mIGRvY3VtZW50ICE9PSAndW5kZWZpbmVkJykge1xuICAgIC8vIGJyb3dzZXJcbiAgICBjb25zdCBlbGVtZW50ID0gZG9jdW1lbnQuY3JlYXRlRWxlbWVudCgnZGl2Jyk7XG4gICAgZWxlbWVudC5pbm5lckhUTUwgPSBKU09OLnN0cmluZ2lmeShyZXN1bHQpXG4gICAgZG9jdW1lbnQuYm9keS5hcHBlbmRDaGlsZChlbGVtZW50KTtcbn0gZWxzZSB7XG4gICAgLy8gc2VydmVyXG4gICAgY29uc29sZS5sb2coXCJub2RlOlwiLCByZXN1bHQpXG59XG5cbiJdLCJtYXBwaW5ncyI6IkFBQUEsWUFBWSxlQUFlO0FBSTNCLFFBQVEsSUFBSSxTQUFTO0FBQ3JCLElBQUksTUFBTSxVQUFVLEtBQUs7QUFDekIsTUFBTSxVQUFVLE9BQU8sS0FBSyxDQUFDLE1BQVcsRUFBRSxRQUFRLG1CQUFtQjtBQUNyRSxRQUFRLElBQUksR0FBRztBQUNmLE1BQU0sU0FBUyxLQUFLLFVBQVUsR0FBRztBQUVqQyxJQUFJLE9BQU8sYUFBYSxhQUFhO0FBRWpDLFFBQU0sVUFBVSxTQUFTLGNBQWMsS0FBSztBQUM1QyxVQUFRLFlBQVksS0FBSyxVQUFVLE1BQU07QUFDekMsV0FBUyxLQUFLLFlBQVksT0FBTztBQUNyQyxPQUFPO0FBRUgsVUFBUSxJQUFJLFNBQVMsTUFBTTtBQUMvQjsiLCJuYW1lcyI6W119

View file

@ -1,20 +0,0 @@
{
"name": "autovite",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc && vite build",
"preview": "vite preview"
},
"dependencies": {
"@automerge/automerge": "2.0.0-alpha.7"
},
"devDependencies": {
"typescript": "^4.6.4",
"vite": "^3.1.0",
"vite-plugin-top-level-await": "^1.1.1",
"vite-plugin-wasm": "^2.1.0"
}
}

View file

@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>

Before

Width:  |  Height:  |  Size: 1.5 KiB

View file

@ -1,9 +0,0 @@
export function setupCounter(element: HTMLButtonElement) {
let counter = 0
const setCounter = (count: number) => {
counter = count
element.innerHTML = `count is ${counter}`
}
element.addEventListener("click", () => setCounter(++counter))
setCounter(0)
}

View file

@ -1,17 +0,0 @@
import * as Automerge from "@automerge/automerge"
// hello world code that will run correctly on web or node
let doc = Automerge.init()
doc = Automerge.change(doc, (d: any) => (d.hello = "from automerge"))
const result = JSON.stringify(doc)
if (typeof document !== "undefined") {
// browser
const element = document.createElement("div")
element.innerHTML = JSON.stringify(result)
document.body.appendChild(element)
} else {
// server
console.log("node:", result)
}

View file

@ -1,97 +0,0 @@
:root {
font-family: Inter, Avenir, Helvetica, Arial, sans-serif;
font-size: 16px;
line-height: 24px;
font-weight: 400;
color-scheme: light dark;
color: rgba(255, 255, 255, 0.87);
background-color: #242424;
font-synthesis: none;
text-rendering: optimizeLegibility;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
-webkit-text-size-adjust: 100%;
}
a {
font-weight: 500;
color: #646cff;
text-decoration: inherit;
}
a:hover {
color: #535bf2;
}
body {
margin: 0;
display: flex;
place-items: center;
min-width: 320px;
min-height: 100vh;
}
h1 {
font-size: 3.2em;
line-height: 1.1;
}
#app {
max-width: 1280px;
margin: 0 auto;
padding: 2rem;
text-align: center;
}
.logo {
height: 6em;
padding: 1.5em;
will-change: filter;
}
.logo:hover {
filter: drop-shadow(0 0 2em #646cffaa);
}
.logo.vanilla:hover {
filter: drop-shadow(0 0 2em #3178c6aa);
}
.card {
padding: 2em;
}
.read-the-docs {
color: #888;
}
button {
border-radius: 8px;
border: 1px solid transparent;
padding: 0.6em 1.2em;
font-size: 1em;
font-weight: 500;
font-family: inherit;
background-color: #1a1a1a;
cursor: pointer;
transition: border-color 0.25s;
}
button:hover {
border-color: #646cff;
}
button:focus,
button:focus-visible {
outline: 4px auto -webkit-focus-ring-color;
}
@media (prefers-color-scheme: light) {
:root {
color: #213547;
background-color: #ffffff;
}
a:hover {
color: #747bff;
}
button {
background-color: #f9f9f9;
}
}

Some files were not shown because too many files have changed in this diff Show more