Merge pull request from automerge/backend-tidy

Tidying up some backend code.
This commit is contained in:
Andrew Jeffery 2021-05-07 11:58:56 +01:00 committed by GitHub
commit d3ef07d79f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 153 additions and 129 deletions

View file

@ -15,7 +15,7 @@ impl ActorMap {
pub fn import_key(&mut self, key: &amp::Key) -> Key {
match key {
amp::Key::Map(string) => Key::Map(string.to_string()),
amp::Key::Seq(eid) => Key::Seq(self.import_element_id(&eid)),
amp::Key::Seq(eid) => Key::Seq(self.import_element_id(eid)),
}
}
@ -48,7 +48,7 @@ impl ActorMap {
pub fn import_op(&mut self, op: amp::Op) -> InternalOp {
InternalOp {
action: self.import_optype(&op.action),
action: Self::import_optype(&op.action),
obj: self.import_obj(&op.obj),
key: self.import_key(&op.key),
pred: op
@ -60,7 +60,7 @@ impl ActorMap {
}
}
pub fn import_optype(&mut self, optype: &amp::OpType) -> InternalOpType {
pub fn import_optype(optype: &amp::OpType) -> InternalOpType {
match optype {
amp::OpType::Make(val) => InternalOpType::Make(*val),
amp::OpType::Del => InternalOpType::Del,
@ -126,13 +126,13 @@ impl ActorMap {
}
fn cmp_opid(&self, op1: &OpId, op2: &OpId) -> Ordering {
if op1.0 != op2.0 {
op1.0.cmp(&op2.0)
} else {
if op1.0 == op2.0 {
let actor1 = &self.0[(op1.1).0];
let actor2 = &self.0[(op2.1).0];
actor1.cmp(&actor2)
actor1.cmp(actor2)
//op1.1.cmp(&op2.1)
} else {
op1.0.cmp(&op2.0)
}
}
}

View file

@ -13,7 +13,7 @@ use crate::{
pub struct Backend {
queue: Vec<Change>,
op_set: OpSet,
states: HashMap<amp::ActorId, Vec<Change>>,
states: HashMap<amp::ActorId, Vec<usize>>,
actors: ActorMap,
history: Vec<Change>,
history_index: HashMap<amp::ChangeHash, usize>,
@ -42,8 +42,8 @@ impl Backend {
self.op_set
.deps
.iter()
.filter(|&dep| dep != &last_hash)
.cloned()
.filter(|dep| dep != &last_hash)
.collect()
} else {
self.op_set.deps.iter().cloned().collect()
@ -85,7 +85,7 @@ impl Backend {
) -> Result<amp::Patch, AutomergeError> {
let mut pending_diffs = HashMap::new();
for change in changes.into_iter() {
for change in changes {
self.add_change(change, actor.is_some(), &mut pending_diffs)?;
}
@ -98,6 +98,7 @@ impl Backend {
self.states
.get(actor)
.and_then(|v| v.get(seq as usize - 1))
.and_then(|&i| self.history.get(i))
.map(|c| c.hash)
.ok_or(AutomergeError::InvalidSeq(seq))
}
@ -127,8 +128,7 @@ impl Backend {
if self
.states
.get(&change.actor_id)
.map(|v| v.len() as u64)
.unwrap_or(0)
.map_or(0, |v| v.len() as u64)
>= change.seq
{
return Err(AutomergeError::DuplicateChange(format!(
@ -173,13 +173,17 @@ impl Backend {
return Ok(());
}
self.update_history(&change);
let change_index = self.update_history(change);
// SAFETY: change_index is the index for the change we've just added so this can't (and
// shouldn't) panic. This is to get around the borrow checker.
let change = &self.history[change_index];
let op_set = &mut self.op_set;
let start_op = change.start_op;
op_set.update_deps(&change);
op_set.update_deps(change);
let ops = OpHandle::extract(change, &mut self.actors);
@ -190,14 +194,18 @@ impl Backend {
Ok(())
}
fn update_history(&mut self, change: &Change) {
fn update_history(&mut self, change: Change) -> usize {
let history_index = self.history.len();
self.states
.entry(change.actor_id().clone())
.or_default()
.push(change.clone());
.push(history_index);
self.history_index.insert(change.hash, self.history.len());
self.history.push(change.clone());
self.history_index.insert(change.hash, history_index);
self.history.push(change);
history_index
}
fn pop_next_causally_ready_change(&mut self) -> Option<Change> {
@ -230,7 +238,7 @@ impl Backend {
Ok(self
.states
.get(actor_id)
.map(|vec| vec.iter().collect())
.map(|vec| vec.iter().filter_map(|&i| self.history.get(i)).collect())
.unwrap_or_default())
}
@ -276,7 +284,7 @@ impl Backend {
}
if let Some(change) = self
.history_index
.get(&hash)
.get(hash)
.and_then(|i| self.history.get(*i))
{
stack.extend(change.deps.iter());
@ -299,9 +307,11 @@ impl Backend {
pub fn save(&self) -> Result<Vec<u8>, AutomergeError> {
let changes: Vec<amp::UncompressedChange> = self.history.iter().map(|r| r.into()).collect();
encode_document(changes)
encode_document(&changes)
}
// allow this for API reasons
#[allow(clippy::needless_pass_by_value)]
pub fn load(data: Vec<u8>) -> Result<Self, AutomergeError> {
let changes = Change::load_document(&data)?;
let mut backend = Self::init();
@ -313,21 +323,22 @@ impl Backend {
let in_queue: HashSet<_> = self.queue.iter().map(|change| change.hash).collect();
let mut missing = HashSet::new();
for head in self.queue.iter().flat_map(|change| change.deps.clone()) {
if !self.history_index.contains_key(&head) {
for head in self.queue.iter().flat_map(|change| &change.deps) {
if !self.history_index.contains_key(head) {
missing.insert(head);
}
}
for head in heads {
if !self.history_index.contains_key(&head) {
missing.insert(*head);
if !self.history_index.contains_key(head) {
missing.insert(head);
}
}
let mut missing = missing
.into_iter()
.filter(|hash| !in_queue.contains(hash))
.cloned()
.collect::<Vec<_>>();
missing.sort();
missing
@ -359,14 +370,14 @@ impl Backend {
}
seen.insert(hash);
let removed = changes.remove(&hash);
let removed = changes.remove(hash);
if changes.is_empty() {
break;
}
for dep in self
.history_index
.get(&hash)
.get(hash)
.and_then(|i| self.history.get(*i))
.map(|c| c.deps.as_slice())
.unwrap_or_default()

View file

@ -244,7 +244,7 @@ impl Change {
if m.is_empty() {
None
} else {
str::from_utf8(&m).map(|s| s.to_string()).ok()
str::from_utf8(m).map(ToString::to_string).ok()
}
}
@ -263,7 +263,7 @@ impl Change {
}
pub fn iter_ops(&self) -> OperationIterator {
OperationIterator::new(&self.bytes.uncompressed(), &self.actors, &self.ops)
OperationIterator::new(self.bytes.uncompressed(), &self.actors, &self.ops)
}
pub fn extra_bytes(&self) -> &[u8] {
@ -438,10 +438,10 @@ fn decode_column_info(
fn decode_columns(
cursor: &mut Range<usize>,
columns: Vec<(u32, usize)>,
columns: &[(u32, usize)],
) -> HashMap<u32, Range<usize>> {
let mut ops = HashMap::new();
for (id, length) in columns.iter() {
for (id, length) in columns {
let start = cursor.start;
let end = start + length;
*cursor = end..cursor.end;
@ -472,7 +472,7 @@ fn decode_change(bytes: Vec<u8>) -> Result<Change, AutomergeError> {
ChangeBytes::Uncompressed(bytes)
};
let (chunktype, hash, body) = decode_header(&bytes.uncompressed())?;
let (chunktype, hash, body) = decode_header(bytes.uncompressed())?;
if chunktype != BLOCK_TYPE_CHANGE {
return Err(AutomergeError::EncodingError);
@ -480,19 +480,19 @@ fn decode_change(bytes: Vec<u8>) -> Result<Change, AutomergeError> {
let mut cursor = body;
let deps = decode_hashes(&bytes.uncompressed(), &mut cursor)?;
let deps = decode_hashes(bytes.uncompressed(), &mut cursor)?;
let actor =
amp::ActorId::from(&bytes.uncompressed()[slice_bytes(&bytes.uncompressed(), &mut cursor)?]);
let seq = read_slice(&bytes.uncompressed(), &mut cursor)?;
let start_op = read_slice(&bytes.uncompressed(), &mut cursor)?;
let time = read_slice(&bytes.uncompressed(), &mut cursor)?;
let message = slice_bytes(&bytes.uncompressed(), &mut cursor)?;
amp::ActorId::from(&bytes.uncompressed()[slice_bytes(bytes.uncompressed(), &mut cursor)?]);
let seq = read_slice(bytes.uncompressed(), &mut cursor)?;
let start_op = read_slice(bytes.uncompressed(), &mut cursor)?;
let time = read_slice(bytes.uncompressed(), &mut cursor)?;
let message = slice_bytes(bytes.uncompressed(), &mut cursor)?;
let actors = decode_actors(&bytes.uncompressed(), &mut cursor, Some(actor))?;
let actors = decode_actors(bytes.uncompressed(), &mut cursor, Some(actor))?;
let ops_info = decode_column_info(&bytes.uncompressed(), &mut cursor, false)?;
let ops = decode_columns(&mut cursor, ops_info);
let ops_info = decode_column_info(bytes.uncompressed(), &mut cursor, false)?;
let ops = decode_columns(&mut cursor, &ops_info);
Ok(Change {
bytes,
@ -559,8 +559,8 @@ fn group_doc_change_and_doc_ops(
let op = ops[i].clone(); // this is safe - avoid borrow checker issues
//let id = (op.ctr, op.actor);
//op_by_id.insert(id, i);
for succ in op.succ.iter() {
if let Some(index) = op_by_id.get(&succ) {
for succ in &op.succ {
if let Some(index) = op_by_id.get(succ) {
ops[*index].pred.push((op.ctr, op.actor))
} else {
let key = if op.insert {
@ -650,7 +650,7 @@ fn doc_changes_to_uncompressed_changes(
fn load_blocks(bytes: &[u8]) -> Result<Vec<Change>, AutomergeError> {
let mut changes = Vec::new();
for slice in split_blocks(bytes).into_iter() {
for slice in split_blocks(bytes) {
decode_block(slice, &mut changes)?;
}
Ok(changes)
@ -685,25 +685,25 @@ fn pop_block(bytes: &[u8]) -> Option<Range<usize>> {
}
fn decode_document(bytes: &[u8]) -> Result<Vec<Change>, AutomergeError> {
let (chunktype, _hash, mut cursor) = decode_header(&bytes)?;
let (chunktype, _hash, mut cursor) = decode_header(bytes)?;
if chunktype > 0 {
return Err(AutomergeError::EncodingError);
}
let actors = decode_actors(&bytes, &mut cursor, None)?;
let actors = decode_actors(bytes, &mut cursor, None)?;
// FIXME
// I should calculate the deads generated on decode and confirm they match these
let _heads = decode_hashes(&bytes, &mut cursor)?;
let _heads = decode_hashes(bytes, &mut cursor)?;
let changes_info = decode_column_info(&bytes, &mut cursor, true)?;
let ops_info = decode_column_info(&bytes, &mut cursor, true)?;
let changes_info = decode_column_info(bytes, &mut cursor, true)?;
let ops_info = decode_column_info(bytes, &mut cursor, true)?;
let changes_data = decode_columns(&mut cursor, changes_info);
let mut doc_changes: Vec<_> = ChangeIterator::new(&bytes, &changes_data).collect();
let changes_data = decode_columns(&mut cursor, &changes_info);
let mut doc_changes: Vec<_> = ChangeIterator::new(bytes, &changes_data).collect();
let ops_data = decode_columns(&mut cursor, ops_info);
let mut doc_ops: Vec<_> = DocOpIterator::new(&bytes, &actors, &ops_data).collect();
let ops_data = decode_columns(&mut cursor, &ops_info);
let mut doc_ops: Vec<_> = DocOpIterator::new(bytes, &actors, &ops_data).collect();
group_doc_change_and_doc_ops(&mut doc_changes, &mut doc_ops, &actors)?;
@ -723,7 +723,7 @@ fn compress_doc_changes(
for i in 0..doc_changes.len() {
let deps = &mut uncompressed_changes.get_mut(i)?.deps;
for idx in doc_changes.get(i)?.deps.iter() {
for idx in &doc_changes.get(i)?.deps {
deps.push(changes.get(*idx)?.hash)
}
changes.push(uncompressed_changes.get(i)?.into());
@ -746,9 +746,7 @@ fn group_doc_ops(changes: &[amp::UncompressedChange], actors: &[amp::ActorId]) -
is_seq.insert(opid.clone().into());
}
let key = if !op.insert {
op.key.clone()
} else {
let key = if op.insert {
by_ref
.entry(objid.clone())
.or_default()
@ -756,6 +754,8 @@ fn group_doc_ops(changes: &[amp::UncompressedChange], actors: &[amp::ActorId]) -
.or_default()
.push(opid.clone());
opid.clone().into()
} else {
op.key.clone()
};
by_obj_id
@ -837,20 +837,20 @@ fn get_heads(changes: &[amp::UncompressedChange]) -> HashSet<amp::ChangeHash> {
if let Some(hash) = c.hash {
acc.insert(hash);
}
for dep in c.deps.iter() {
acc.remove(&dep);
for dep in &c.deps {
acc.remove(dep);
}
acc
})
}
pub(crate) fn encode_document(
changes: Vec<amp::UncompressedChange>,
changes: &[amp::UncompressedChange],
) -> Result<Vec<u8>, AutomergeError> {
let mut bytes: Vec<u8> = Vec::new();
let mut hasher = Sha256::new();
let heads = get_heads(&changes);
let heads = get_heads(changes);
// this assumes that all actor_ids referenced are seen in changes.actor_id which is true
// so long as we have a full history
@ -862,9 +862,9 @@ pub(crate) fn encode_document(
.cloned()
.collect();
let (change_bytes, change_info) = ChangeEncoder::encode_changes(&changes, &actors);
let (change_bytes, change_info) = ChangeEncoder::encode_changes(changes, &actors);
let doc_ops = group_doc_ops(&changes, &actors);
let doc_ops = group_doc_ops(changes, &actors);
let (ops_bytes, ops_info) = DocOpEncoder::encode_doc_ops(&doc_ops, &mut actors);
@ -876,7 +876,7 @@ pub(crate) fn encode_document(
actors.len().encode(&mut chunk)?;
for a in actors.iter() {
for a in &actors {
a.to_bytes().encode(&mut chunk)?;
}
@ -1128,7 +1128,7 @@ mod tests {
.map(|c| c.decode())
.collect();
let decoded_preload: Vec<amp::UncompressedChange> =
changes.clone().into_iter().map(|c| c.decode()).collect();
changes.clone().into_iter().map(Change::decode).collect();
assert_eq!(decoded_loaded, decoded_preload);
assert_eq!(
loaded_changes,
@ -1180,7 +1180,7 @@ mod tests {
.map(|c| c.decode())
.collect();
let decoded_preload: Vec<amp::UncompressedChange> =
changes.clone().into_iter().map(|c| c.decode()).collect();
changes.clone().into_iter().map(Change::decode).collect();
assert_eq!(decoded_loaded[0].operations.len(), 257);
assert_eq!(decoded_loaded, decoded_preload);
assert_eq!(
@ -1202,7 +1202,7 @@ mod tests {
deps: Vec::new(),
extra_bytes: Vec::new(),
};
let mut doc = encode_document(vec![change]).unwrap();
let mut doc = encode_document(&[change]).unwrap();
let hash = doc[4..8].try_into().unwrap();
doc[4] = 0;
doc[5] = 0;

View file

@ -424,7 +424,7 @@ impl<'a> Iterator for ValueIterator<'a> {
v if v % 16 == VALUE_TYPE_UTF8 => {
let len = v >> 4;
let data = self.val_raw.read_bytes(len).ok()?;
let s = str::from_utf8(&data).ok()?;
let s = str::from_utf8(data).ok()?;
Some(amp::ScalarValue::Str(s.to_string()))
}
v if v % 16 == VALUE_TYPE_BYTES => {
@ -490,7 +490,7 @@ impl<'a> Iterator for ObjIterator<'a> {
fn next(&mut self) -> Option<amp::ObjectId> {
if let (Some(actor), Some(ctr)) = (self.actor.next()?, self.ctr.next()?) {
let actor_id = self.actors.get(actor)?;
Some(amp::ObjectId::Id(amp::OpId::new(ctr, &actor_id)))
Some(amp::ObjectId::Id(amp::OpId::new(ctr, actor_id)))
} else {
Some(amp::ObjectId::Root)
}
@ -562,12 +562,9 @@ impl ValEncoder {
// It may seem weird to have two consecutive matches on the same value. The reason is so
// that we don't have to repeat the `append_null` calls on ref_actor and ref_counter in
// every arm of the next match
match val {
amp::ScalarValue::Cursor(_) => {}
_ => {
self.ref_actor.append_null();
self.ref_counter.append_null();
}
if !matches!(val, amp::ScalarValue::Cursor(_)) {
self.ref_actor.append_null();
self.ref_counter.append_null();
}
match val {
amp::ScalarValue::Null => self.len.append_value(VALUE_TYPE_NULL),
@ -657,7 +654,7 @@ impl KeyEncoder {
self.str.append_null();
}
amp::Key::Seq(amp::ElementId::Id(amp::OpId(ctr, actor))) => {
self.actor.append_value(map_actor(&actor, actors));
self.actor.append_value(map_actor(actor, actors));
self.ctr.append_value(*ctr);
self.str.append_null();
}
@ -757,7 +754,7 @@ impl ObjEncoder {
self.ctr.append_null();
}
amp::ObjectId::Id(amp::OpId(ctr, actor)) => {
self.actor.append_value(map_actor(&actor, actors));
self.actor.append_value(map_actor(actor, actors));
self.ctr.append_value(*ctr);
}
}
@ -824,7 +821,7 @@ impl ChangeEncoder {
self.time.append_value(change.time as u64);
self.message.append_value(change.message.clone());
self.deps_num.append_value(change.deps.len());
for dep in change.deps.iter() {
for dep in &change.deps {
if let Some(dep_index) = index_by_hash.get(dep) {
self.deps_index.append_value(*dep_index as u64)
} else {
@ -863,11 +860,11 @@ impl ChangeEncoder {
.count()
.encode(&mut info)
.ok();
for d in coldata.iter_mut() {
for d in &mut coldata {
d.deflate();
d.encode_col_len(&mut info).ok();
}
for d in coldata.iter() {
for d in &coldata {
data.write_all(d.data.as_slice()).ok();
}
(data, info)
@ -973,11 +970,11 @@ impl DocOpEncoder {
.count()
.encode(&mut info)
.ok();
for d in coldata.iter_mut() {
for d in &mut coldata {
d.deflate();
d.encode_col_len(&mut info).ok();
}
for d in coldata.iter() {
for d in &coldata {
data.write_all(d.data.as_slice()).ok();
}
(data, info)
@ -1078,10 +1075,10 @@ impl ColumnEncoder {
.count()
.encode(&mut data)
.ok();
for d in coldata.iter_mut() {
for d in &mut coldata {
d.encode_col_len(&mut data).ok();
}
for d in coldata.iter() {
for d in &coldata {
let begin = data.len();
data.write_all(d.data.as_slice()).ok();
if !d.data.is_empty() {

View file

@ -226,7 +226,7 @@ where
}
}
RleState::LoneVal(value) => self.flush_lit_run(vec![value]),
RleState::Run(value, len) => self.flush_run(value, len),
RleState::Run(value, len) => self.flush_run(&value, len),
RleState::LiteralRun(last, mut run) => {
run.push(last);
self.flush_lit_run(run);
@ -236,20 +236,20 @@ where
ColData::new(col, self.buf)
}
fn flush_run(&mut self, val: T, len: usize) {
self.encode(len as i64);
fn flush_run(&mut self, val: &T, len: usize) {
self.encode(&(len as i64));
self.encode(val);
}
fn flush_null_run(&mut self, len: usize) {
self.encode::<i64>(0);
self.encode(len);
self.encode::<i64>(&0);
self.encode(&len);
}
fn flush_lit_run(&mut self, run: Vec<T>) {
self.encode(-(run.len() as i64));
self.encode(&-(run.len() as i64));
for val in run {
self.encode(val);
self.encode(&val);
}
}
@ -268,7 +268,7 @@ where
RleState::NullRun(1)
}
RleState::Run(other, len) => {
self.flush_run(other, len);
self.flush_run(&other, len);
RleState::NullRun(1)
}
RleState::LiteralRun(last, mut run) => {
@ -293,7 +293,7 @@ where
if other == value {
RleState::Run(other, len + 1)
} else {
self.flush_run(other, len);
self.flush_run(&other, len);
RleState::LoneVal(value)
}
}
@ -313,7 +313,7 @@ where
}
}
fn encode<V>(&mut self, val: V)
fn encode<V>(&mut self, val: &V)
where
V: Encodable,
{
@ -639,13 +639,13 @@ impl Encodable for usize {
impl Encodable for u32 {
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
(*self as u64).encode(buf)
u64::from(*self).encode(buf)
}
}
impl Encodable for i32 {
fn encode<R: Write>(&self, buf: &mut R) -> io::Result<usize> {
(*self as i64).encode(buf)
i64::from(*self).encode(buf)
}
}

View file

@ -1,3 +1,20 @@
#![warn(clippy::pedantic)]
#![warn(clippy::nursery)]
#![allow(clippy::missing_errors_doc)]
#![allow(clippy::must_use_candidate)]
#![allow(clippy::option_if_let_else)]
#![allow(clippy::cast_sign_loss)]
#![allow(clippy::cast_possible_truncation)]
#![allow(clippy::cast_possible_wrap)]
#![allow(clippy::doc_markdown)]
#![allow(clippy::similar_names)]
#![allow(clippy::shadow_unrelated)]
#![allow(clippy::module_name_repetitions)]
#![allow(clippy::redundant_pub_crate)]
#![allow(clippy::missing_const_for_fn)]
#![allow(clippy::use_self)]
#![allow(clippy::too_many_lines)]
extern crate fxhash;
extern crate hex;
extern crate itertools;

View file

@ -46,7 +46,7 @@ impl ObjState {
}
fn get_parent(&self, id: &ElementId) -> Option<ElementId> {
self.insertions.get(&id).and_then(|i| i.key.as_element_id())
self.insertions.get(id).and_then(|i| i.key.as_element_id())
}
fn insertions_after(&self, parent: &ElementId) -> Vec<ElementId> {

View file

@ -20,7 +20,7 @@ pub(crate) struct OpHandle {
}
impl OpHandle {
pub fn extract(change: Change, actors: &mut ActorMap) -> Vec<OpHandle> {
pub fn extract(change: &Change, actors: &mut ActorMap) -> Vec<OpHandle> {
change
.iter_ops()
.enumerate()

View file

@ -99,7 +99,7 @@ impl OpSet {
tracing::debug!(referred_opid=?oid, "Adding cursor");
let internal_opid = actors.import_opid(oid);
let mut target_found = false;
for (obj_id, obj) in self.objs.iter() {
for (obj_id, obj) in &self.objs {
if obj.insertions.contains_key(&internal_opid.into()) {
target_found = true;
self.cursors.entry(*obj_id).or_default().push(CursorState {
@ -120,7 +120,7 @@ impl OpSet {
}
let object_id = &op.obj;
let object = self.get_obj_mut(&object_id)?;
let object = self.get_obj_mut(object_id)?;
let (diff, overwritten) = if object.is_seq() {
if op.insert {
@ -199,7 +199,7 @@ impl OpSet {
for old in overwritten.iter() {
if let Some(child) = old.child() {
self.get_obj_mut(&child)?.inbound.remove(&old);
self.get_obj_mut(&child)?.inbound.remove(old);
}
}
Ok(())
@ -207,13 +207,13 @@ impl OpSet {
pub fn get_obj(&self, object_id: &ObjectId) -> Result<&ObjState, AutomergeError> {
self.objs
.get(&object_id)
.get(object_id)
.ok_or(AutomergeError::MissingObjectError)
}
fn get_obj_mut(&mut self, object_id: &ObjectId) -> Result<&mut ObjState, AutomergeError> {
self.objs
.get_mut(&object_id)
.get_mut(object_id)
.ok_or(AutomergeError::MissingObjectError)
}
@ -226,7 +226,7 @@ impl OpSet {
) -> Result<amp::Diff, AutomergeError> {
let mut props = HashMap::new();
for (key, ops) in object.props.iter() {
for (key, ops) in &object.props {
if !ops.is_empty() {
let mut opid_to_value = HashMap::new();
for op in ops.iter() {
@ -261,7 +261,7 @@ impl OpSet {
let mut index = 0;
let mut max_counter = 0;
for opid in object.seq.into_iter() {
for opid in &object.seq {
max_counter = max(max_counter, opid.0);
let key = (*opid).into(); // FIXME - something is wrong here
let elem_id = actors.export_opid(opid).into();
@ -298,7 +298,7 @@ impl OpSet {
object_id: &ObjectId,
actors: &ActorMap,
) -> Result<amp::Diff, AutomergeError> {
let object = self.get_obj(&object_id)?;
let object = self.get_obj(object_id)?;
match object.obj_type {
amp::ObjType::Map(map_type) => self.construct_map(object_id, object, actors, map_type),
amp::ObjType::Sequence(seq_type) => {
@ -322,7 +322,7 @@ impl OpSet {
// diff for the cursor
let mut cursor_changes: HashMap<ObjectId, Vec<PendingDiff>> = HashMap::new();
for obj_id in pending.keys() {
if let Some(cursors) = self.cursors.get_mut(&obj_id) {
if let Some(cursors) = self.cursors.get_mut(obj_id) {
for cursor in cursors.iter_mut() {
if let Some(obj) = self.objs.get(&cursor.internal_referred_object_id) {
cursor.index = obj.index_of(cursor.internal_element_opid).unwrap_or(0);
@ -370,10 +370,10 @@ impl OpSet {
let mut props = HashMap::new();
let edits = pending.iter().filter_map(|p| p.edit(actors)).collect();
// i may have duplicate keys - this makes sure I hit each one only once
let keys: HashSet<_> = pending.iter().map(|p| p.operation_key()).collect();
for key in keys.iter() {
let keys: HashSet<_> = pending.iter().map(PendingDiff::operation_key).collect();
for key in &keys {
let mut opid_to_value = HashMap::new();
for op in obj.props.get(&key).iter().flat_map(|i| i.iter()) {
for op in obj.props.get(key).iter().flat_map(|i| i.iter()) {
let link = match op.action {
InternalOpType::Set(ref value) => self.gen_value_diff(op, value),
InternalOpType::Make(_) => {
@ -410,11 +410,11 @@ impl OpSet {
) -> Result<amp::Diff, AutomergeError> {
let mut props = HashMap::new();
// I may have duplicate keys - I do this to make sure I visit each one only once
let keys: HashSet<_> = pending.iter().map(|p| p.operation_key()).collect();
for key in keys.iter() {
let keys: HashSet<_> = pending.iter().map(PendingDiff::operation_key).collect();
for key in &keys {
let key_string = actors.key_to_string(key);
let mut opid_to_value = HashMap::new();
for op in obj.props.get(&key).iter().flat_map(|i| i.iter()) {
for op in obj.props.get(key).iter().flat_map(|i| i.iter()) {
let link = match op.action {
InternalOpType::Set(ref value) => self.gen_value_diff(op, value),
InternalOpType::Make(_) => {
@ -438,7 +438,7 @@ impl OpSet {
pub fn update_deps(&mut self, change: &Change) {
//self.max_op = max(self.max_op, change.max_op());
for d in change.deps.iter() {
for d in &change.deps {
self.deps.remove(d);
}
self.deps.insert(change.hash);

View file

@ -235,8 +235,7 @@ where
} else {
self.key_of(index - 1)
.cloned()
.map(|suc| self.insert_after(&suc, key))
.unwrap_or(false)
.map_or(false, |suc| self.insert_after(&suc, key))
}
}
}
@ -383,7 +382,7 @@ where
let mut pre_level = 0;
let mut suc_level = 0;
for level in 1..(max_level + 1) {
for level in 1..=max_level {
let update_level = min(level, removed.level);
if level == max_level
|| pre.get(level).map(|l| &l.key) != pre.get(pre_level).map(|l| &l.key)
@ -404,7 +403,7 @@ where
}
fn get_node(&self, key: Option<&K>) -> &Node<K> {
if let Some(ref k) = key {
if let Some(k) = key {
self.nodes
.get(k)
.unwrap_or_else(|| panic!("get_node - missing key {:?}", key))
@ -414,7 +413,7 @@ where
}
fn get_node_mut(&mut self, key: Option<&K>) -> &mut Node<K> {
if let Some(ref k) = key {
if let Some(k) = key {
self.nodes
.get_mut(k)
.unwrap_or_else(|| panic!("get_node - missing key {:?}", key))
@ -499,7 +498,7 @@ where
let mut pre_level = 0;
let mut suc_level = 0;
for level in 1..(max_level + 1) {
for level in 1..=max_level {
let update_level = min(level, new_level);
if level == max_level
|| pre.get(level).map(|l| &l.key) != pre.get(pre_level).map(|l| &l.key)
@ -575,7 +574,7 @@ where
fn next(&mut self) -> Option<&'a K> {
let mut successor = match self.id {
None => None,
Some(ref key) => self.nodes.get(key).and_then(|n| n.successor()),
Some(key) => self.nodes.get(key).and_then(Node::successor),
};
mem::swap(&mut successor, &mut self.id);
successor
@ -835,7 +834,7 @@ mod tests {
.collect();
let mut s = SkipList::<&str>::new();
for elem in elems.iter() {
for elem in &elems {
s.insert_head(elem);
}

View file

@ -17,9 +17,9 @@ pub(crate) enum PendingDiff {
impl PendingDiff {
pub fn operation_key(&self) -> Key {
match self {
Self::SeqInsert(op, _, _) => op.operation_key(),
Self::SeqRemove(op, _) => op.operation_key(),
Self::Set(op) => op.operation_key(),
Self::SeqInsert(op, _, _) | Self::SeqRemove(op, _) | Self::Set(op) => {
op.operation_key()
}
Self::CursorChange(k) => k.clone(),
}
}

View file

@ -19,6 +19,7 @@ mod state;
pub use bloom::BloomFilter;
pub use state::{SyncHave, SyncState};
const HASH_SIZE: usize = 32; // 256 bits = 32 bytes
const MESSAGE_TYPE_SYNC: u8 = 0x42; // first byte of a sync message, for identification
impl Backend {
@ -222,8 +223,8 @@ impl Backend {
let mut changes_to_send = Vec::new();
for hash in need {
hashes_to_send.insert(*hash);
if !change_hashes.contains(&hash) {
let change = self.get_change_by_hash(&hash);
if !change_hashes.contains(hash) {
let change = self.get_change_by_hash(hash);
if let Some(change) = change {
changes_to_send.push(change)
}
@ -328,7 +329,6 @@ fn decode_hashes(decoder: &mut Decoder) -> Result<Vec<ChangeHash>, AutomergeErro
let length = decoder.read::<u32>()?;
let mut hashes = Vec::with_capacity(length as usize);
const HASH_SIZE: usize = 32; // 256 bits = 32 bytes
for _ in 0..length {
let hash_bytes = decoder.read_bytes(HASH_SIZE)?;
let hash = ChangeHash::try_from(hash_bytes)

View file

@ -92,7 +92,7 @@ impl BloomFilter {
}
fn bits_capacity(num_entries: u32, num_bits_per_entry: u32) -> usize {
let f = ((num_entries as f64 * num_bits_per_entry as f64) / 8f64).ceil();
let f = ((f64::from(num_entries) * f64::from(num_bits_per_entry)) / 8_f64).ceil();
f as usize
}