Compare commits

...

1 commit

Author SHA1 Message Date
actions
2dbe14346c Add deno release files 2023-02-03 16:52:17 +00:00
9 changed files with 2380 additions and 460 deletions

View file

@ -1,17 +0,0 @@
name: Advisories
on:
schedule:
- cron: '0 18 * * *'
jobs:
cargo-deny:
runs-on: ubuntu-latest
strategy:
matrix:
checks:
- advisories
- bans licenses sources
steps:
- uses: actions/checkout@v2
- uses: EmbarkStudios/cargo-deny-action@v1
with:
command: check ${{ matrix.checks }}

View file

@ -1,177 +0,0 @@
name: CI
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
fmt:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.66.0
default: true
components: rustfmt
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/fmt
shell: bash
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.66.0
default: true
components: clippy
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/lint
shell: bash
docs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.66.0
default: true
- uses: Swatinem/rust-cache@v1
- name: Build rust docs
run: ./scripts/ci/rust-docs
shell: bash
- name: Install doxygen
run: sudo apt-get install -y doxygen
shell: bash
cargo-deny:
runs-on: ubuntu-latest
strategy:
matrix:
checks:
- advisories
- bans licenses sources
continue-on-error: ${{ matrix.checks == 'advisories' }}
steps:
- uses: actions/checkout@v2
- uses: EmbarkStudios/cargo-deny-action@v1
with:
arguments: '--manifest-path ./rust/Cargo.toml'
command: check ${{ matrix.checks }}
wasm_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install wasm-bindgen-cli
run: cargo install wasm-bindgen-cli wasm-opt
- name: Install wasm32 target
run: rustup target add wasm32-unknown-unknown
- name: run tests
run: ./scripts/ci/wasm_tests
deno_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: denoland/setup-deno@v1
with:
deno-version: v1.x
- name: Install wasm-bindgen-cli
run: cargo install wasm-bindgen-cli wasm-opt
- name: Install wasm32 target
run: rustup target add wasm32-unknown-unknown
- name: run tests
run: ./scripts/ci/deno_tests
js_fmt:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: install
run: yarn global add prettier
- name: format
run: prettier -c javascript/.prettierrc javascript
js_tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install wasm-bindgen-cli
run: cargo install wasm-bindgen-cli wasm-opt
- name: Install wasm32 target
run: rustup target add wasm32-unknown-unknown
- name: run tests
run: ./scripts/ci/js_tests
cmake_build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.66.0
default: true
- uses: Swatinem/rust-cache@v1
- name: Install CMocka
run: sudo apt-get install -y libcmocka-dev
- name: Install/update CMake
uses: jwlawson/actions-setup-cmake@v1.12
with:
cmake-version: latest
- name: Build and test C bindings
run: ./scripts/ci/cmake-build Release Static
shell: bash
linux:
runs-on: ubuntu-latest
strategy:
matrix:
toolchain:
- 1.66.0
- nightly
continue-on-error: ${{ matrix.toolchain == 'nightly' }}
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: ${{ matrix.toolchain }}
default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
macos:
runs-on: macos-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.66.0
default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash
windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.66.0
default: true
- uses: Swatinem/rust-cache@v1
- run: ./scripts/ci/build-test
shell: bash

View file

@ -1,52 +0,0 @@
on:
push:
branches:
- main
name: Documentation
jobs:
deploy-docs:
concurrency: deploy-docs
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Cache
uses: Swatinem/rust-cache@v1
- name: Clean docs dir
run: rm -rf docs
shell: bash
- name: Clean Rust docs dir
uses: actions-rs/cargo@v1
with:
command: clean
args: --manifest-path ./rust/Cargo.toml --doc
- name: Build Rust docs
uses: actions-rs/cargo@v1
with:
command: doc
args: --manifest-path ./rust/Cargo.toml --workspace --all-features --no-deps
- name: Move Rust docs
run: mkdir -p docs && mv rust/target/doc/* docs/.
shell: bash
- name: Configure root page
run: echo '<meta http-equiv="refresh" content="0; url=automerge">' > docs/index.html
- name: Deploy docs
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: ./docs

View file

@ -1,214 +0,0 @@
name: Release
on:
push:
branches:
- main
jobs:
check_if_wasm_version_upgraded:
name: Check if WASM version has been upgraded
runs-on: ubuntu-latest
outputs:
wasm_version: ${{ steps.version-updated.outputs.current-package-version }}
wasm_has_updated: ${{ steps.version-updated.outputs.has-updated }}
steps:
- uses: JiPaix/package-json-updated-action@v1.0.5
id: version-updated
with:
path: rust/automerge-wasm/package.json
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
publish-wasm:
name: Publish WASM package
runs-on: ubuntu-latest
needs:
- check_if_wasm_version_upgraded
# We create release only if the version in the package.json has been upgraded
if: needs.check_if_wasm_version_upgraded.outputs.wasm_has_updated == 'true'
steps:
- uses: actions/setup-node@v3
with:
node-version: '16.x'
registry-url: 'https://registry.npmjs.org'
- uses: denoland/setup-deno@v1
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ github.ref }}
- name: Get rid of local github workflows
run: rm -r .github/workflows
- name: Remove tmp_branch if it exists
run: git push origin :tmp_branch || true
- run: git checkout -b tmp_branch
- name: Install wasm-bindgen-cli
run: cargo install wasm-bindgen-cli wasm-opt
- name: Install wasm32 target
run: rustup target add wasm32-unknown-unknown
- name: run wasm js tests
id: wasm_js_tests
run: ./scripts/ci/wasm_tests
- name: run wasm deno tests
id: wasm_deno_tests
run: ./scripts/ci/deno_tests
- name: build release
id: build_release
run: |
npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm run release
- name: Collate deno release files
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
run: |
mkdir $GITHUB_WORKSPACE/deno_wasm_dist
cp $GITHUB_WORKSPACE/rust/automerge-wasm/deno/* $GITHUB_WORKSPACE/deno_wasm_dist
cp $GITHUB_WORKSPACE/rust/automerge-wasm/index.d.ts $GITHUB_WORKSPACE/deno_wasm_dist
cp $GITHUB_WORKSPACE/rust/automerge-wasm/README.md $GITHUB_WORKSPACE/deno_wasm_dist
cp $GITHUB_WORKSPACE/rust/automerge-wasm/LICENSE $GITHUB_WORKSPACE/deno_wasm_dist
sed -i '1i /// <reference types="./index.d.ts" />' $GITHUB_WORKSPACE/deno_wasm_dist/automerge_wasm.js
- name: Create npm release
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
run: |
if [ "$(npm --prefix $GITHUB_WORKSPACE/rust/automerge-wasm show . version)" = "$VERSION" ]; then
echo "This version is already published"
exit 0
fi
EXTRA_ARGS="--access public"
if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
echo "Is pre-release version"
EXTRA_ARGS="$EXTRA_ARGS --tag next"
fi
if [ "$NODE_AUTH_TOKEN" = "" ]; then
echo "Can't publish on NPM, You need a NPM_TOKEN secret."
false
fi
npm publish $GITHUB_WORKSPACE/rust/automerge-wasm $EXTRA_ARGS
env:
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- name: Commit wasm deno release files
run: |
git config --global user.name "actions"
git config --global user.email actions@github.com
git add $GITHUB_WORKSPACE/deno_wasm_dist
git commit -am "Add deno release files"
git push origin tmp_branch
- name: Tag wasm release
if: steps.wasm_js_tests.outcome == 'success' && steps.wasm_deno_tests.outcome == 'success'
uses: softprops/action-gh-release@v1
with:
name: Automerge Wasm v${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
tag_name: js/automerge-wasm-${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
target_commitish: tmp_branch
generate_release_notes: false
draft: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Remove tmp_branch
run: git push origin :tmp_branch
check_if_js_version_upgraded:
name: Check if JS version has been upgraded
runs-on: ubuntu-latest
outputs:
js_version: ${{ steps.version-updated.outputs.current-package-version }}
js_has_updated: ${{ steps.version-updated.outputs.has-updated }}
steps:
- uses: JiPaix/package-json-updated-action@v1.0.5
id: version-updated
with:
path: javascript/package.json
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
publish-js:
name: Publish JS package
runs-on: ubuntu-latest
needs:
- check_if_js_version_upgraded
- check_if_wasm_version_upgraded
- publish-wasm
# We create release only if the version in the package.json has been upgraded and after the WASM release
if: |
(always() && ! cancelled()) &&
(needs.publish-wasm.result == 'success' || needs.publish-wasm.result == 'skipped') &&
needs.check_if_js_version_upgraded.outputs.js_has_updated == 'true'
steps:
- uses: actions/setup-node@v3
with:
node-version: '16.x'
registry-url: 'https://registry.npmjs.org'
- uses: denoland/setup-deno@v1
- uses: actions/checkout@v3
with:
fetch-depth: 0
ref: ${{ github.ref }}
- name: Get rid of local github workflows
run: rm -r .github/workflows
- name: Remove js_tmp_branch if it exists
run: git push origin :js_tmp_branch || true
- run: git checkout -b js_tmp_branch
- name: check js formatting
run: |
yarn global add prettier
prettier -c javascript/.prettierrc javascript
- name: run js tests
id: js_tests
run: |
cargo install wasm-bindgen-cli wasm-opt
rustup target add wasm32-unknown-unknown
./scripts/ci/js_tests
- name: build js release
id: build_release
run: |
npm --prefix $GITHUB_WORKSPACE/javascript run build
- name: build js deno release
id: build_deno_release
run: |
VERSION=$WASM_VERSION npm --prefix $GITHUB_WORKSPACE/javascript run deno:build
env:
WASM_VERSION: ${{ needs.check_if_wasm_version_upgraded.outputs.wasm_version }}
- name: run deno tests
id: deno_tests
run: |
npm --prefix $GITHUB_WORKSPACE/javascript run deno:test
- name: Collate deno release files
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
run: |
mkdir $GITHUB_WORKSPACE/deno_js_dist
cp $GITHUB_WORKSPACE/javascript/deno_dist/* $GITHUB_WORKSPACE/deno_js_dist
- name: Create npm release
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
run: |
if [ "$(npm --prefix $GITHUB_WORKSPACE/javascript show . version)" = "$VERSION" ]; then
echo "This version is already published"
exit 0
fi
EXTRA_ARGS="--access public"
if [[ $VERSION == *"alpha."* ]] || [[ $VERSION == *"beta."* ]] || [[ $VERSION == *"rc."* ]]; then
echo "Is pre-release version"
EXTRA_ARGS="$EXTRA_ARGS --tag next"
fi
if [ "$NODE_AUTH_TOKEN" = "" ]; then
echo "Can't publish on NPM, You need a NPM_TOKEN secret."
false
fi
npm publish $GITHUB_WORKSPACE/javascript $EXTRA_ARGS
env:
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
VERSION: ${{ needs.check_if_js_version_upgraded.outputs.js_version }}
- name: Commit js deno release files
run: |
git config --global user.name "actions"
git config --global user.email actions@github.com
git add $GITHUB_WORKSPACE/deno_js_dist
git commit -am "Add deno js release files"
git push origin js_tmp_branch
- name: Tag JS release
if: steps.js_tests.outcome == 'success' && steps.deno_tests.outcome == 'success'
uses: softprops/action-gh-release@v1
with:
name: Automerge v${{ needs.check_if_js_version_upgraded.outputs.js_version }}
tag_name: js/automerge-${{ needs.check_if_js_version_upgraded.outputs.js_version }}
target_commitish: js_tmp_branch
generate_release_notes: false
draft: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Remove js_tmp_branch
run: git push origin :js_tmp_branch

10
deno_wasm_dist/LICENSE Normal file
View file

@ -0,0 +1,10 @@
MIT License
Copyright 2022, Ink & Switch LLC
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

469
deno_wasm_dist/README.md Normal file
View file

@ -0,0 +1,469 @@
## Automerge WASM Low Level Interface
This package is a low level interface to the [automerge rust](https://github.com/automerge/automerge-rs/tree/experiment) CRDT. The api is intended to be as "close to the metal" as possible with only a few ease of use accommodations. This library is used as the underpinnings for the [Automerge JS wrapper](https://github.com/automerge/automerge-rs/tree/experiment/automerge-js) and can be used as is or as a basis for another higher level expression of a CRDT.
All example code can be found in `test/readme.ts`
### Why CRDT?
CRDT stands for Conflict Free Replicated Data Type. It is a data structure that offers eventual consistency where multiple actors can write to the document independently and then these edits can be automatically merged together into a coherent document that, as much as possible, preserves the intent of the different writers. This allows for novel masterless application design where different components need not have a central coordinating server when altering application state.
### Terminology
The term Actor, Object Id and Heads are used through this documentation. Detailed explanations are in the glossary at the end of this readme. But the most basic definition would be...
An Actor is a unique id that distinguishes a single writer to a document. It can be any hex string.
An Object id uniquely identifies a Map, List or Text object within a document. It can be treated as an opaque string and can be used across documents. This id comes as a string in the form of `{number}@{actor}` - so `"10@aabbcc"` for example. The string `"_root"` or `"/"` can also be used to refer to the document root. These strings are durable and can be used on any descendant or copy of the document that generated them.
Heads refers to a set of hashes that uniquely identifies a point in time in a document's history. Heads are useful for comparing documents state or retrieving past states from the document.
### Automerge Scalar Types
Automerge has many scalar types. Methods like `put()` and `insert()` take an optional data type parameter. Normally the type can be inferred but in some cases, such as telling the difference between int, uint and a counter, it cannot.
These are puts without a data type
```javascript
import { create } from "@automerge/automerge-wasm"
let doc = create()
doc.put("/", "prop1", 100) // int
doc.put("/", "prop2", 3.14) // f64
doc.put("/", "prop3", "hello world")
doc.put("/", "prop4", new Date())
doc.put("/", "prop5", new Uint8Array([1,2,3]))
doc.put("/", "prop6", true)
doc.put("/", "prop7", null)
```
Put's with a data type and examples of all the supported data types.
While int vs uint vs f64 matters little in javascript, Automerge is a cross platform library where these distinctions matter.
```javascript
import { create } from "@automerge/automerge-wasm"
let doc = create()
doc.put("/", "prop1", 100, "int")
doc.put("/", "prop2", 100, "uint")
doc.put("/", "prop3", 100.5, "f64")
doc.put("/", "prop4", 100, "counter")
doc.put("/", "prop5", 1647531707301, "timestamp")
doc.put("/", "prop6", new Date(), "timestamp")
doc.put("/", "prop7", "hello world", "str")
doc.put("/", "prop8", new Uint8Array([1,2,3]), "bytes")
doc.put("/", "prop9", true, "boolean")
doc.put("/", "prop10", null, "null")
```
### Automerge Object Types
Automerge WASM supports 3 object types. Maps, lists, and text. Maps are key value stores where the values can be any scalar type or any object type. Lists are numerically indexed sets of data that can hold any scalar or any object type.
```javascript
import { create } from "@automerge/automerge-wasm"
let doc = create()
// you can create an object by passing in the inital state - if blank pass in `{}`
// the return value is the Object Id
// these functions all return an object id
let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] })
let token = doc.putObject("/", "tokens", {})
// lists can be made with javascript arrays
let birds = doc.putObject("/", "birds", ["bluejay", "penguin", "puffin"])
let bots = doc.putObject("/", "bots", [])
// text is initialized with a string
let notes = doc.putObject("/", "notes", "Hello world!")
```
You can access objects by passing the object id as the first parameter for a call.
```javascript
import { create } from "@automerge/automerge-wasm"
let doc = create()
let config = doc.putObject("/", "config", { align: "left", archived: false, cycles: [10, 19, 21] })
doc.put(config, "align", "right")
// Anywhere Object Ids are being used a path can also be used.
// The following two statements are equivalent:
// get the id then use it
// get returns a single simple javascript value or undefined
// getWithType returns an Array of the datatype plus basic type or null
let id = doc.getWithType("/", "config")
if (id && id[0] === 'map') {
doc.put(id[1], "align", "right")
}
// use a path instead
doc.put("/config", "align", "right")
```
Using the id directly is always faster (as it prevents the path to id conversion internally) so it is preferred for performance critical code.
### Maps
Maps are key/value stores. The root object is always a map. The keys are always strings. The values can be any scalar type or any object.
```javascript
let doc = create()
let mymap = doc.putObject("_root", "mymap", { foo: "bar"})
// make a new map with the foo key
doc.put(mymap, "bytes", new Uint8Array([1,2,3]))
// assign a byte array to key `bytes` of the mymap object
let submap = doc.putObject(mymap, "sub", {})
// make a new empty object and assign it to the key `sub` of mymap
doc.keys(mymap) // returns ["bytes","foo","sub"]
doc.materialize("_root") // returns { mymap: { bytes: new Uint8Array([1,2,3]), foo: "bar", sub: {}}}
```
### Lists
Lists are index addressable sets of values. These values can be any scalar or object type. You can manipulate lists with `insert()`, `put()`, `insertObject()`, `putObject()`, `push()`, `pushObject()`, `splice()`, and `delete()`.
```javascript
let doc = create()
let items = doc.putObject("_root", "items", [10,"box"])
// init a new list with two elements
doc.push(items, true) // push `true` to the end of the list
doc.putObject(items, 0, { hello: "world" }) // overwrite the value 10 with an object with a key and value
doc.delete(items, 1) // delete "box"
doc.splice(items, 2, 0, ["bag", "brick"]) // splice in "bag" and "brick" at position 2
doc.insert(items, 0, "bat") // insert "bat" to the beginning of the list
doc.insertObject(items, 1, [1,2]) // insert a list with 2 values at pos 1
doc.materialize(items) // returns [ "bat", [1,2], { hello : "world" }, true, "bag", "brick"]
doc.length(items) // returns 6
```
### Text
Text is a specialized list type intended for modifying a text document. The primary way to interact with a text document is via the `splice()` method. Spliced strings will be indexable by character (important to note for platforms that index by graphmeme cluster).
```javascript
let doc = create("aaaaaa")
let notes = doc.putObject("_root", "notes", "Hello world")
doc.splice(notes, 6, 5, "everyone")
doc.text(notes) // returns "Hello everyone"
```
### Tables
Automerge's Table type is currently not implemented.
### Querying Data
When querying maps use the `get()` method with the object in question and the property to query. This method returns a tuple with the data type and the data. The `keys()` method will return all the keys on the object. If you are interested in conflicted values from a merge use `getAll()` instead which returns an array of values instead of just the winner.
```javascript
let doc1 = create("aabbcc")
doc1.put("_root", "key1", "val1")
let key2 = doc1.putObject("_root", "key2", [])
doc1.get("_root", "key1") // returns "val1"
doc1.getWithType("_root", "key2") // returns ["list", "2@aabbcc"]
doc1.keys("_root") // returns ["key1", "key2"]
let doc2 = doc1.fork("ffaaff")
// put a value concurrently
doc1.put("_root","key3","doc1val")
doc2.put("_root","key3","doc2val")
doc1.merge(doc2)
doc1.get("_root","key3") // returns "doc2val"
doc1.getAll("_root","key3") // returns [[ "str", "doc1val"], ["str", "doc2val"]]
```
### Counters
Counters are 64 bit ints that support the increment operation. Frequently different actors will want to increment or decrement a number and have all these coalesse into a merged value.
```javascript
let doc1 = create("aaaaaa")
doc1.put("_root", "number", 0)
doc1.put("_root", "total", 0, "counter")
let doc2 = doc1.fork("bbbbbb")
doc2.put("_root", "number", 10)
doc2.increment("_root", "total", 11)
doc1.put("_root", "number", 20)
doc1.increment("_root", "total", 22)
doc1.merge(doc2)
doc1.materialize("_root") // returns { number: 10, total: 33 }
```
### Transactions
Generally speaking you don't need to think about transactions when using Automerge. Normal edits queue up into an in-progress transaction. You can query the number of ops in the current transaction with `pendingOps()`. The transaction will commit automatically on certains calls such as `save()`, `saveIncremental()`, `fork()`, `merge()`, `getHeads()`, `applyChanges()`, `generateSyncMessage()`, and `receiveSyncMessage()`. When the transaction commits the heads of the document change. If you want to roll back all the in progress ops you can call `doc.rollback()`. If you want to manually commit a transaction in progress you can call `doc.commit()` with an optional commit message and timestamp.
```javascript
let doc = create()
doc.put("_root", "key", "val1")
doc.get("_root", "key") // returns "val1"
doc.pendingOps() // returns 1
doc.rollback()
doc.get("_root", "key") // returns null
doc.pendingOps() // returns 0
doc.put("_root", "key", "val2")
doc.pendingOps() // returns 1
doc.commit("test commit 1")
doc.get("_root", "key") // returns "val2"
doc.pendingOps() // returns 0
```
### Viewing Old Versions of the Document
All query functions can take an optional argument of `heads` which allow you to query a prior document state. Heads are a set of change hashes that uniquely identify a point in the document history. The `getHeads()` method can retrieve these at any point.
```javascript
let doc = create()
doc.put("_root", "key", "val1")
let heads1 = doc.getHeads()
doc.put("_root", "key", "val2")
let heads2 = doc.getHeads()
doc.put("_root", "key", "val3")
doc.get("_root","key") // returns "val3"
doc.get("_root","key",heads2) // returns "val2"
doc.get("_root","key",heads1) // returns "val1"
doc.get("_root","key",[]) // returns undefined
```
This works for `get()`, `getAll()`, `keys()`, `length()`, `text()`, and `materialize()`
Queries of old document states are not indexed internally and will be slower than normal access. If you need a fast indexed version of a document at a previous point in time you can create one with `doc.forkAt(heads, actor?)`
### Forking and Merging
You can `fork()` a document which makes an exact copy of it. This assigns a new actor so changes made to the fork can be merged back in with the original. The `forkAt()` takes a Heads, allowing you to fork off a document from a previous point in its history. These documents allocate new memory in WASM and need to be freed.
The `merge()` command applies all changes in the argument doc into the calling doc. Therefore if doc a has 1000 changes that doc b lacks and doc b has only 10 changes that doc a lacks, `a.merge(b)` will be much faster than `b.merge(a)`.
```javascript
let doc1 = create()
doc1.put("_root", "key1", "val1")
let doc2 = doc1.fork()
doc1.put("_root", "key2", "val2")
doc2.put("_root", "key3", "val3")
doc1.merge(doc2)
doc1.materialize("_root") // returns { key1: "val1", key2: "val2", key3: "val3" }
doc2.materialize("_root") // returns { key1: "val1", key3: "val3" }
```
Note that calling `a.merge(a)` will produce an unrecoverable error from the wasm-bindgen layer which (as of this writing) there is no workaround for.
### Saving and Loading
Calling `save()` converts the document to a compressed `Uint8Array()` that can be saved to durable storage. This format uses a columnar storage format that compresses away most of the Automerge metadata needed to manage the CRDT state, but does include all of the change history.
If you wish to incrementally update a saved Automerge doc you can call `saveIncremental()` to get a `Uint8Array()` of bytes that can be appended to the file with all the new changes(). Note that the `saveIncremental()` bytes are not as compressed as the whole document save as each chunk has metadata information needed to parse it. It may make sense to periodically perform a new `save()` to get the smallest possible file footprint.
The `load()` function takes a `Uint8Array()` of bytes produced in this way and constitutes a new document. The `loadIncremental()` method is available if you wish to consume the result of a `saveIncremental()` with an already instanciated document.
```javascript
import { create, load } from "@automerge/automerge-wasm"
let doc1 = create()
doc1.put("_root", "key1", "value1")
let save1 = doc1.save()
let doc2 = load(save1)
doc2.materialize("_root") // returns { key1: "value1" }
doc1.put("_root", "key2", "value2")
let saveIncremental = doc1.saveIncremental()
let save2 = doc1.save()
let save3 = new Uint8Array([... save1, ... saveIncremental])
// save2 has fewer bytes than save3 but contains the same ops
doc2.loadIncremental(saveIncremental)
let doc3 = load(save2)
let doc4 = load(save3)
doc1.materialize("_root") // returns { key1: "value1", key2: "value2" }
doc2.materialize("_root") // returns { key1: "value1", key2: "value2" }
doc3.materialize("_root") // returns { key1: "value1", key2: "value2" }
doc4.materialize("_root") // returns { key1: "value1", key2: "value2" }
```
One interesting feature of automerge binary saves is that they can be concatenated together in any order and can still be loaded into a coherent merged document.
```javascript
import { load } from "@automerge/automerge-wasm"
import * as fs from "fs"
let file1 = fs.readFileSync("automerge_save_1");
let file2 = fs.readFileSync("automerge_save_2");
let docA = load(file1).merge(load(file2))
let docB = load(Buffer.concat([ file1, file2 ]))
assert.deepEqual(docA.materialize("/"), docB.materialize("/"))
assert.equal(docA.save(), docB.save())
```
### Syncing
When syncing a document the `generateSyncMessage()` and `receiveSyncMessage()` methods will produce and consume sync messages. A sync state object will need to be managed for the duration of the connection (created by the function `initSyncState()` and can be serialized to a Uint8Array() to preserve sync state with the `encodeSyncState()` and `decodeSyncState()` functions.
A very simple sync implementation might look like this.
```javascript
import { encodeSyncState, decodeSyncState, initSyncState } from "@automerge/automerge-wasm"
let states = {}
function receiveMessageFromPeer(doc, peer_id, message) {
let syncState = states[peer_id]
doc.receiveMessage(syncState, message)
let reply = doc.generateSyncMessage(syncState)
if (reply) {
sendMessage(peer_id, reply)
}
}
function notifyPeerAboutUpdates(doc, peer_id) {
let syncState = states[peer_id]
let message = doc.generateSyncMessage(syncState)
if (message) {
sendMessage(peer_id, message)
}
}
function onDisconnect(peer_id) {
let state = states[peer_id]
if (state) {
saveSyncToStorage(peer_id, encodeSyncState(state))
}
delete states[peer_id]
}
function onConnect(peer_id) {
let state = loadSyncFromStorage(peer_id)
if (state) {
states[peer_id] = decodeSyncState(state)
} else {
states[peer_id] = initSyncState()
}
}
```
### Glossary: Actors
Some basic concepts you will need to know to better understand the api are Actors and Object Ids.
Actors are ids that need to be unique to each process writing to a document. This is normally one actor per device. Or for a web app one actor per tab per browser would be needed. It can be a uuid, or a public key, or a certificate, as your application demands. All that matters is that its bytes are unique. Actors are always expressed in this api as a hex string.
Methods that create new documents will generate random actors automatically - if you wish to supply your own it is always taken as an optional argument. This is true for the following functions.
```javascript
import { create, load } from "@automerge/automerge-wasm"
let doc1 = create() // random actorid
let doc2 = create("aabbccdd")
let doc3 = doc1.fork() // random actorid
let doc4 = doc2.fork("ccdd0011")
let doc5 = load(doc3.save()) // random actorid
let doc6 = load(doc4.save(), "00aabb11")
let actor = doc1.getActor()
```
### Glossary: Object Id's
Object Ids uniquely identify an object within a document. They are represented as strings in the format of `{counter}@{actor}`. The root object is a special case and can be referred to as `_root`. The counter is an ever increasing integer, starting at 1, that is always one higher than the highest counter seen in the document thus far. Object Id's do not change when the object is modified but they do if it is overwritten with a new object.
```javascript
let doc = create("aabbcc")
let o1 = doc.putObject("_root", "o1", {})
let o2 = doc.putObject("_root", "o2", {})
doc.put(o1, "hello", "world")
assert.deepEqual(doc.materialize("_root"), { "o1": { hello: "world" }, "o2": {} })
assert.equal(o1, "1@aabbcc")
assert.equal(o2, "2@aabbcc")
let o1v2 = doc.putObject("_root", "o1", {})
doc.put(o1, "a", "b") // modifying an overwritten object - does nothing
doc.put(o1v2, "x", "y") // modifying the new "o1" object
assert.deepEqual(doc.materialize("_root"), { "o1": { x: "y" }, "o2": {} })
```
### Appendix: Building
The following steps should allow you to build the package
```
$ rustup target add wasm32-unknown-unknown
$ cargo install wasm-bindgen-cli
$ cargo install wasm-opt
$ yarn
$ yarn release
$ yarn pack
```
### Appendix: WASM and Memory Allocation
Allocated memory in rust will be freed automatically on platforms that support `FinalizationRegistry`.
This is currently supported in [all major browsers and nodejs](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry).
On unsupported platforms you can free memory explicitly.
```javascript
import { create, initSyncState } from "@automerge/automerge-wasm"
let doc = create()
let sync = initSyncState()
doc.free()
sync.free()
```

File diff suppressed because it is too large Load diff

Binary file not shown.

238
deno_wasm_dist/index.d.ts vendored Normal file
View file

@ -0,0 +1,238 @@
export type Actor = string;
export type ObjID = string;
export type Change = Uint8Array;
export type SyncMessage = Uint8Array;
export type Prop = string | number;
export type Hash = string;
export type Heads = Hash[];
export type Value = string | number | boolean | null | Date | Uint8Array
export type MaterializeValue = { [key:string]: MaterializeValue } | Array<MaterializeValue> | Value
export type ObjType = string | Array<ObjType | Value> | { [key: string]: ObjType | Value }
export type FullValue =
["str", string] |
["int", number] |
["uint", number] |
["f64", number] |
["boolean", boolean] |
["timestamp", Date] |
["counter", number] |
["bytes", Uint8Array] |
["null", null] |
["map", ObjID] |
["list", ObjID] |
["text", ObjID] |
["table", ObjID]
export type FullValueWithId =
["str", string, ObjID ] |
["int", number, ObjID ] |
["uint", number, ObjID ] |
["f64", number, ObjID ] |
["boolean", boolean, ObjID ] |
["timestamp", Date, ObjID ] |
["counter", number, ObjID ] |
["bytes", Uint8Array, ObjID ] |
["null", null, ObjID ] |
["map", ObjID ] |
["list", ObjID] |
["text", ObjID] |
["table", ObjID]
export enum ObjTypeName {
list = "list",
map = "map",
table = "table",
text = "text",
}
export type Datatype =
"boolean" |
"str" |
"int" |
"uint" |
"f64" |
"null" |
"timestamp" |
"counter" |
"bytes" |
"map" |
"text" |
"list";
export type SyncHave = {
lastSync: Heads,
bloom: Uint8Array,
}
export type DecodedSyncMessage = {
heads: Heads,
need: Heads,
have: SyncHave[]
changes: Change[]
}
export type DecodedChange = {
actor: Actor,
seq: number
startOp: number,
time: number,
message: string | null,
deps: Heads,
hash: Hash,
ops: Op[]
}
type PartialBy<T, K extends keyof T> = Omit<T, K> & Partial<Pick<T, K>>
export type ChangeToEncode = PartialBy<DecodedChange, 'hash'>
export type Op = {
action: string,
obj: ObjID,
key: string,
value?: string | number | boolean,
datatype?: string,
pred: string[],
}
export type Patch = PutPatch | DelPatch | SpliceTextPatch | IncPatch | InsertPatch;
export type PutPatch = {
action: 'put'
path: Prop[],
value: Value
conflict: boolean
}
export type IncPatch = {
action: 'inc'
path: Prop[],
value: number
}
export type DelPatch = {
action: 'del'
path: Prop[],
length?: number,
}
export type SpliceTextPatch = {
action: 'splice'
path: Prop[],
value: string,
}
export type InsertPatch = {
action: 'insert'
path: Prop[],
values: Value[],
}
export function encodeChange(change: ChangeToEncode): Change;
export function create(text_v2: boolean, actor?: Actor): Automerge;
export function load(data: Uint8Array, text_v2: boolean, actor?: Actor): Automerge;
export function decodeChange(change: Change): DecodedChange;
export function initSyncState(): SyncState;
export function encodeSyncMessage(message: DecodedSyncMessage): SyncMessage;
export function decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage;
export function encodeSyncState(state: SyncState): Uint8Array;
export function decodeSyncState(data: Uint8Array): SyncState;
export function exportSyncState(state: SyncState): JsSyncState;
export function importSyncState(state: JsSyncState): SyncState;
export interface API {
create(text_v2: boolean, actor?: Actor): Automerge;
load(data: Uint8Array, text_v2: boolean, actor?: Actor): Automerge;
encodeChange(change: ChangeToEncode): Change;
decodeChange(change: Change): DecodedChange;
initSyncState(): SyncState;
encodeSyncMessage(message: DecodedSyncMessage): SyncMessage;
decodeSyncMessage(msg: SyncMessage): DecodedSyncMessage;
encodeSyncState(state: SyncState): Uint8Array;
decodeSyncState(data: Uint8Array): SyncState;
exportSyncState(state: SyncState): JsSyncState;
importSyncState(state: JsSyncState): SyncState;
}
export class Automerge {
// change state
put(obj: ObjID, prop: Prop, value: Value, datatype?: Datatype): void;
putObject(obj: ObjID, prop: Prop, value: ObjType): ObjID;
insert(obj: ObjID, index: number, value: Value, datatype?: Datatype): void;
insertObject(obj: ObjID, index: number, value: ObjType): ObjID;
push(obj: ObjID, value: Value, datatype?: Datatype): void;
pushObject(obj: ObjID, value: ObjType): ObjID;
splice(obj: ObjID, start: number, delete_count: number, text?: string | Array<Value>): ObjID[] | undefined;
increment(obj: ObjID, prop: Prop, value: number): void;
delete(obj: ObjID, prop: Prop): void;
// returns a single value - if there is a conflict return the winner
get(obj: ObjID, prop: Prop, heads?: Heads): Value | undefined;
getWithType(obj: ObjID, prop: Prop, heads?: Heads): FullValue | null;
// return all values in case of a conflict
getAll(obj: ObjID, arg: Prop, heads?: Heads): FullValueWithId[];
keys(obj: ObjID, heads?: Heads): string[];
text(obj: ObjID, heads?: Heads): string;
length(obj: ObjID, heads?: Heads): number;
materialize(obj?: ObjID, heads?: Heads, metadata?: unknown): MaterializeValue;
toJS(): MaterializeValue;
// transactions
commit(message?: string, time?: number): Hash | null;
emptyChange(message?: string, time?: number): Hash;
merge(other: Automerge): Heads;
getActorId(): Actor;
pendingOps(): number;
rollback(): number;
// patches
enablePatches(enable: boolean): boolean;
enableFreeze(enable: boolean): boolean;
registerDatatype(datatype: string, callback: Function): void;
popPatches(): Patch[];
// save and load to local store
save(): Uint8Array;
saveIncremental(): Uint8Array;
loadIncremental(data: Uint8Array): number;
// sync over network
receiveSyncMessage(state: SyncState, message: SyncMessage): void;
generateSyncMessage(state: SyncState): SyncMessage | null;
// low level change functions
applyChanges(changes: Change[]): void;
getChanges(have_deps: Heads): Change[];
getChangeByHash(hash: Hash): Change | null;
getChangesAdded(other: Automerge): Change[];
getHeads(): Heads;
getLastLocalChange(): Change | null;
getMissingDeps(heads?: Heads): Heads;
// memory management
free(): void; // only needed if weak-refs are unsupported
clone(actor?: string): Automerge; // TODO - remove, this is dangerous
fork(actor?: string, heads?: Heads): Automerge;
// dump internal state to console.log - for debugging
dump(): void;
// experimental api can go here
applyPatches<Doc>(obj: Doc, meta?: unknown, callback?: (patch: Array<Patch>, before: Doc, after: Doc) => void): Doc;
}
export interface JsSyncState {
sharedHeads: Heads;
lastSentHeads: Heads;
theirHeads: Heads | undefined;
theirHeed: Heads | undefined;
theirHave: SyncHave[] | undefined;
sentHashes: Heads;
}
export class SyncState {
free(): void;
clone(): SyncState;
lastSentHeads: Heads;
sentHashes: Heads;
readonly sharedHeads: Heads;
}