Allow for empty head indices when decoding doc
The compressed document format includes at the end of the document chunk the indicies of the heads of the document. Older versions of the javascript implementation do not include these indicies so we allow them to be omitted when decoding. Whilst we're here add some tracing::trace logs to make it easier to understand where parsing is failing.
This commit is contained in:
parent
dd69f6f7b4
commit
eba7038bd2
4 changed files with 26 additions and 11 deletions
automerge/src
|
@ -591,13 +591,16 @@ impl Automerge {
|
|||
}
|
||||
|
||||
/// Load a document.
|
||||
#[tracing::instrument(skip(data, options), err)]
|
||||
pub fn load_with<Obs: OpObserver>(
|
||||
data: &[u8],
|
||||
mut options: ApplyOptions<'_, Obs>,
|
||||
) -> Result<Self, AutomergeError> {
|
||||
if data.is_empty() {
|
||||
tracing::trace!("no data, initializing empty document");
|
||||
return Ok(Self::new());
|
||||
}
|
||||
tracing::trace!("loading first chunk");
|
||||
let (remaining, first_chunk) = storage::Chunk::parse(storage::parse::Input::new(data))
|
||||
.map_err(|e| load::Error::Parse(Box::new(e)))?;
|
||||
if !first_chunk.checksum_valid() {
|
||||
|
@ -607,6 +610,7 @@ impl Automerge {
|
|||
|
||||
let mut am = match first_chunk {
|
||||
storage::Chunk::Document(d) => {
|
||||
tracing::trace!("first chunk is document chunk, inflating");
|
||||
let storage::load::Reconstructed {
|
||||
max_op,
|
||||
result: op_set,
|
||||
|
@ -643,6 +647,7 @@ impl Automerge {
|
|||
}
|
||||
}
|
||||
storage::Chunk::Change(stored_change) => {
|
||||
tracing::trace!("first chunk is change chunk, applying");
|
||||
let change = Change::new_from_unverified(stored_change.into_owned(), None)
|
||||
.map_err(|e| load::Error::InvalidChangeColumns(Box::new(e)))?;
|
||||
let mut am = Self::new();
|
||||
|
@ -650,6 +655,7 @@ impl Automerge {
|
|||
am
|
||||
}
|
||||
storage::Chunk::CompressedChange(stored_change, compressed) => {
|
||||
tracing::trace!("first chunk is compressed change, decompressing and applying");
|
||||
let change = Change::new_from_unverified(
|
||||
stored_change.into_owned(),
|
||||
Some(compressed.into_owned()),
|
||||
|
@ -660,6 +666,7 @@ impl Automerge {
|
|||
am
|
||||
}
|
||||
};
|
||||
tracing::trace!("first chunk loaded, loading remaining chunks");
|
||||
match load::load_changes(remaining.reset()) {
|
||||
load::LoadedChanges::Complete(c) => {
|
||||
for change in c {
|
||||
|
|
|
@ -56,6 +56,7 @@ impl<'a> Chunk<'a> {
|
|||
first: chunk_input,
|
||||
remaining,
|
||||
} = i.split(header.data_bytes().len());
|
||||
tracing::trace!(?header, "parsed chunk header");
|
||||
let chunk = match header.chunk_type {
|
||||
ChunkType::Change => {
|
||||
let (remaining, change) =
|
||||
|
|
|
@ -135,17 +135,23 @@ impl<'a> Document<'a> {
|
|||
let (i, parse::RangeOf { range: ops, .. }) =
|
||||
parse::range_of(|i| parse::take_n(ops_meta.total_column_len(), i), i)?;
|
||||
|
||||
// parse the suffix
|
||||
let (
|
||||
i,
|
||||
parse::RangeOf {
|
||||
range: suffix,
|
||||
value: head_indices,
|
||||
},
|
||||
) = parse::range_of(
|
||||
|i| parse::apply_n(heads.len(), parse::leb128_u64::<ParseError>)(i),
|
||||
i,
|
||||
)?;
|
||||
// parse the suffix, which may be empty if this document was produced by an older version
|
||||
// of the JS automerge implementation
|
||||
let (i, suffix, head_indices) = if i.is_empty() {
|
||||
(i, 0..0, Vec::new())
|
||||
} else {
|
||||
let (
|
||||
i,
|
||||
parse::RangeOf {
|
||||
range: suffix,
|
||||
value: head_indices,
|
||||
},
|
||||
) = parse::range_of(
|
||||
|i| parse::apply_n(heads.len(), parse::leb128_u64::<ParseError>)(i),
|
||||
i,
|
||||
)?;
|
||||
(i, suffix, head_indices)
|
||||
};
|
||||
|
||||
let compression::Decompressed {
|
||||
change_bytes,
|
||||
|
|
|
@ -80,6 +80,7 @@ fn load_next_change<'a>(
|
|||
}
|
||||
match chunk {
|
||||
storage::Chunk::Document(d) => {
|
||||
tracing::trace!("loading document chunk");
|
||||
let Reconstructed {
|
||||
changes: new_changes,
|
||||
..
|
||||
|
|
Loading…
Add table
Reference in a new issue