changeset 49930:e98fd81bb151

rust-clippy: fix most warnings in `hg-core` All of these are simple changes that for the most part are clear improvements and the rest are at most equivalent. The remaining warnings have to be fixed either with a bigger refactor like for the nested "revlog" module, or in the dependency `bytes-cast`, which we own. This will be done sometime in the future.
author Raphaël Gomès <rgomes@octobus.net>
date Mon, 09 Jan 2023 19:18:43 +0100
parents 5f1cd6839c69
children fba29deebfe7
files rust/hg-core/src/ancestors.rs rust/hg-core/src/config/config.rs rust/hg-core/src/config/layer.rs rust/hg-core/src/config/values.rs rust/hg-core/src/copy_tracing.rs rust/hg-core/src/dagops.rs rust/hg-core/src/dirstate.rs rust/hg-core/src/dirstate/dirs_multiset.rs rust/hg-core/src/dirstate/entry.rs rust/hg-core/src/dirstate_tree/dirstate_map.rs rust/hg-core/src/dirstate_tree/on_disk.rs rust/hg-core/src/dirstate_tree/owning.rs rust/hg-core/src/dirstate_tree/status.rs rust/hg-core/src/discovery.rs rust/hg-core/src/filepatterns.rs rust/hg-core/src/lock.rs rust/hg-core/src/matchers.rs rust/hg-core/src/narrow.rs rust/hg-core/src/operations/cat.rs rust/hg-core/src/repo.rs rust/hg-core/src/revlog/changelog.rs rust/hg-core/src/revlog/filelog.rs rust/hg-core/src/revlog/index.rs rust/hg-core/src/revlog/nodemap.rs rust/hg-core/src/revlog/path_encode.rs rust/hg-core/src/revlog/revlog.rs rust/hg-core/src/revset.rs rust/hg-core/src/sparse.rs rust/hg-core/src/utils.rs rust/hg-core/src/utils/files.rs rust/hg-core/src/utils/hg_path.rs rust/hg-core/tests/test_missing_ancestors.rs
diffstat 32 files changed, 258 insertions(+), 300 deletions(-) [+]
line wrap: on
line diff
--- a/rust/hg-core/src/ancestors.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/ancestors.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -175,7 +175,7 @@
     ///
     /// This is useful in unit tests, but also setdiscovery.py does
     /// read the bases attribute of a ancestor.missingancestors instance.
-    pub fn get_bases<'a>(&'a self) -> &'a HashSet<Revision> {
+    pub fn get_bases(&self) -> &HashSet<Revision> {
         &self.bases
     }
 
@@ -288,7 +288,7 @@
             .collect();
         let revs_visit = &mut revs;
         let mut both_visit: HashSet<Revision> =
-            revs_visit.intersection(&bases_visit).cloned().collect();
+            revs_visit.intersection(bases_visit).cloned().collect();
         if revs_visit.is_empty() {
             return Ok(Vec::new());
         }
@@ -503,18 +503,18 @@
             MissingAncestors::new(SampleGraph, [5, 3, 1, 3].iter().cloned());
         let mut as_vec: Vec<Revision> =
             missing_ancestors.get_bases().iter().cloned().collect();
-        as_vec.sort();
+        as_vec.sort_unstable();
         assert_eq!(as_vec, [1, 3, 5]);
         assert_eq!(missing_ancestors.max_base, 5);
 
         missing_ancestors.add_bases([3, 7, 8].iter().cloned());
         as_vec = missing_ancestors.get_bases().iter().cloned().collect();
-        as_vec.sort();
+        as_vec.sort_unstable();
         assert_eq!(as_vec, [1, 3, 5, 7, 8]);
         assert_eq!(missing_ancestors.max_base, 8);
 
         as_vec = missing_ancestors.bases_heads()?.iter().cloned().collect();
-        as_vec.sort();
+        as_vec.sort_unstable();
         assert_eq!(as_vec, [3, 5, 7, 8]);
         Ok(())
     }
@@ -531,7 +531,7 @@
             .remove_ancestors_from(&mut revset)
             .unwrap();
         let mut as_vec: Vec<Revision> = revset.into_iter().collect();
-        as_vec.sort();
+        as_vec.sort_unstable();
         assert_eq!(as_vec.as_slice(), expected);
     }
 
@@ -572,6 +572,7 @@
     /// the one in test-ancestor.py. An early version of Rust MissingAncestors
     /// failed this, yet none of the integration tests of the whole suite
     /// catched it.
+    #[allow(clippy::unnecessary_cast)]
     #[test]
     fn test_remove_ancestors_from_case1() {
         let graph: VecGraph = vec![
--- a/rust/hg-core/src/config/config.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/config/config.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -117,7 +117,7 @@
     }
     let sections_to_delete: &[&[u8]] =
         &[b"defaults", b"commands", b"command-templates"];
-    return sections_to_delete.contains(&section);
+    sections_to_delete.contains(&section)
 }
 
 impl Config {
@@ -207,7 +207,7 @@
             file_paths.sort();
             for file_path in &file_paths {
                 if file_path.extension() == Some(std::ffi::OsStr::new("rc")) {
-                    self.add_trusted_file(&file_path)?
+                    self.add_trusted_file(file_path)?
                 }
             }
         }
@@ -259,7 +259,7 @@
         // `mercurial/helptext/config.txt` suggests it should be reversed
         if let Some(installation_prefix) = hg.parent().and_then(Path::parent) {
             if installation_prefix != root {
-                add_for_prefix(&installation_prefix)?
+                add_for_prefix(installation_prefix)?
             }
         }
         add_for_prefix(root)?;
@@ -348,7 +348,7 @@
         expected_type: &'static str,
         parse: impl Fn(&'config [u8]) -> Option<T>,
     ) -> Result<Option<T>, ConfigValueParseError> {
-        match self.get_inner(&section, &item) {
+        match self.get_inner(section, item) {
             Some((layer, v)) => match parse(&v.bytes) {
                 Some(b) => Ok(Some(b)),
                 None => Err(ConfigValueParseError {
@@ -463,7 +463,7 @@
     ) -> Option<(&ConfigLayer, &ConfigValue)> {
         // Filter out the config items that are hidden by [PLAIN].
         // This differs from python hg where we delete them from the config.
-        let should_ignore = should_ignore(&self.plain, &section, &item);
+        let should_ignore = should_ignore(&self.plain, section, item);
         for layer in self.layers.iter().rev() {
             if !layer.trusted {
                 continue;
@@ -480,8 +480,8 @@
             {
                 continue;
             }
-            if let Some(v) = layer.get(&section, &item) {
-                return Some((&layer, v));
+            if let Some(v) = layer.get(section, item) {
+                return Some((layer, v));
             }
         }
         None
@@ -561,7 +561,7 @@
     fn get_all(&self, section: &[u8], item: &[u8]) -> Vec<&[u8]> {
         let mut res = vec![];
         for layer in self.layers.iter().rev() {
-            if let Some(v) = layer.get(&section, &item) {
+            if let Some(v) = layer.get(section, item) {
                 res.push(v.bytes.as_ref());
             }
         }
@@ -592,11 +592,11 @@
         add(b"git", b"git", b"1");
         add(b"git", b"showfunc", b"1");
         add(b"git", b"word-diff", b"1");
-        return layer;
+        layer
     }
 
     // introduce the tweaked defaults as implied by ui.tweakdefaults
-    pub fn tweakdefaults<'a>(&mut self) -> () {
+    pub fn tweakdefaults(&mut self) {
         self.layers.insert(0, Config::tweakdefaults_layer());
     }
 }
--- a/rust/hg-core/src/config/layer.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/config/layer.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -94,11 +94,7 @@
 
     /// Returns whether this layer comes from `--config` CLI arguments
     pub(crate) fn is_from_command_line(&self) -> bool {
-        if let ConfigOrigin::CommandLine = self.origin {
-            true
-        } else {
-            false
-        }
+        matches!(self.origin, ConfigOrigin::CommandLine)
     }
 
     /// Add an entry to the config, overwriting the old one if already present.
@@ -111,13 +107,13 @@
     ) {
         self.sections
             .entry(section)
-            .or_insert_with(|| HashMap::new())
+            .or_insert_with(HashMap::new)
             .insert(item, ConfigValue { bytes: value, line });
     }
 
     /// Returns the config value in `<section>.<item>` if it exists
     pub fn get(&self, section: &[u8], item: &[u8]) -> Option<&ConfigValue> {
-        Some(self.sections.get(section)?.get(item)?)
+        self.sections.get(section)?.get(item)
     }
 
     /// Returns the keys defined in the given section
@@ -171,7 +167,7 @@
 
         while let Some((index, bytes)) = lines_iter.next() {
             let line = Some(index + 1);
-            if let Some(m) = INCLUDE_RE.captures(&bytes) {
+            if let Some(m) = INCLUDE_RE.captures(bytes) {
                 let filename_bytes = &m[1];
                 let filename_bytes = crate::utils::expand_vars(filename_bytes);
                 // `Path::parent` only fails for the root directory,
@@ -205,18 +201,18 @@
                         }
                     }
                 }
-            } else if let Some(_) = EMPTY_RE.captures(&bytes) {
-            } else if let Some(m) = SECTION_RE.captures(&bytes) {
+            } else if EMPTY_RE.captures(bytes).is_some() {
+            } else if let Some(m) = SECTION_RE.captures(bytes) {
                 section = m[1].to_vec();
-            } else if let Some(m) = ITEM_RE.captures(&bytes) {
+            } else if let Some(m) = ITEM_RE.captures(bytes) {
                 let item = m[1].to_vec();
                 let mut value = m[2].to_vec();
                 loop {
                     match lines_iter.peek() {
                         None => break,
                         Some((_, v)) => {
-                            if let Some(_) = COMMENT_RE.captures(&v) {
-                            } else if let Some(_) = CONT_RE.captures(&v) {
+                            if COMMENT_RE.captures(v).is_some() {
+                            } else if CONT_RE.captures(v).is_some() {
                                 value.extend(b"\n");
                                 value.extend(&m[1]);
                             } else {
@@ -227,7 +223,7 @@
                     lines_iter.next();
                 }
                 current_layer.add(section.clone(), item, value, line);
-            } else if let Some(m) = UNSET_RE.captures(&bytes) {
+            } else if let Some(m) = UNSET_RE.captures(bytes) {
                 if let Some(map) = current_layer.sections.get_mut(&section) {
                     map.remove(&m[1]);
                 }
@@ -261,7 +257,7 @@
         sections.sort_by(|e0, e1| e0.0.cmp(e1.0));
 
         for (section, items) in sections.into_iter() {
-            let mut items: Vec<_> = items.into_iter().collect();
+            let mut items: Vec<_> = items.iter().collect();
             items.sort_by(|e0, e1| e0.0.cmp(e1.0));
 
             for (item, config_entry) in items {
--- a/rust/hg-core/src/config/values.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/config/values.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -200,11 +200,7 @@
 
     // https://docs.python.org/3/library/stdtypes.html?#bytes.isspace
     fn is_space(byte: u8) -> bool {
-        if let b' ' | b'\t' | b'\n' | b'\r' | b'\x0b' | b'\x0c' = byte {
-            true
-        } else {
-            false
-        }
+        matches!(byte, b' ' | b'\t' | b'\n' | b'\r' | b'\x0b' | b'\x0c')
     }
 }
 
--- a/rust/hg-core/src/copy_tracing.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/copy_tracing.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -59,7 +59,7 @@
         Self {
             rev,
             path: winner.path,
-            overwritten: overwritten,
+            overwritten,
         }
     }
 
@@ -489,7 +489,7 @@
                         if cs1 == cs2 {
                             cs1.mark_delete(current_rev);
                         } else {
-                            cs1.mark_delete_with_pair(current_rev, &cs2);
+                            cs1.mark_delete_with_pair(current_rev, cs2);
                         }
                         e2.insert(cs1.clone());
                     }
@@ -513,15 +513,14 @@
 ) {
     let dest = path_map.tokenize(path_dest);
     let source = path_map.tokenize(path_source);
-    let entry;
-    if let Some(v) = base_copies.get(&source) {
-        entry = match &v.path {
+    let entry = if let Some(v) = base_copies.get(&source) {
+        match &v.path {
             Some(path) => Some((*(path)).to_owned()),
             None => Some(source.to_owned()),
         }
     } else {
-        entry = Some(source.to_owned());
-    }
+        Some(source.to_owned())
+    };
     // Each new entry is introduced by the children, we
     // record this information as we will need it to take
     // the right decision when merging conflicting copy
@@ -563,17 +562,15 @@
                 MergePick::Major | MergePick::Any => (src_major, src_minor),
                 MergePick::Minor => (src_minor, src_major),
             };
-            MergeResult::UseNewValue(CopySource::new_from_merge(
+            MergeResult::NewValue(CopySource::new_from_merge(
                 current_merge,
                 winner,
                 loser,
             ))
         } else {
             match pick {
-                MergePick::Any | MergePick::Major => {
-                    MergeResult::UseRightValue
-                }
-                MergePick::Minor => MergeResult::UseLeftValue,
+                MergePick::Any | MergePick::Major => MergeResult::RightValue,
+                MergePick::Minor => MergeResult::LeftValue,
             }
         }
     })
--- a/rust/hg-core/src/dagops.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/dagops.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -181,7 +181,7 @@
         let mut revs: HashSet<Revision> = revs.iter().cloned().collect();
         retain_heads(graph, &mut revs)?;
         let mut as_vec: Vec<Revision> = revs.iter().cloned().collect();
-        as_vec.sort();
+        as_vec.sort_unstable();
         Ok(as_vec)
     }
 
@@ -206,7 +206,7 @@
     ) -> Result<Vec<Revision>, GraphError> {
         let heads = heads(graph, revs.iter())?;
         let mut as_vec: Vec<Revision> = heads.iter().cloned().collect();
-        as_vec.sort();
+        as_vec.sort_unstable();
         Ok(as_vec)
     }
 
@@ -231,7 +231,7 @@
     ) -> Result<Vec<Revision>, GraphError> {
         let set: HashSet<_> = revs.iter().cloned().collect();
         let mut as_vec = roots(graph, &set)?;
-        as_vec.sort();
+        as_vec.sort_unstable();
         Ok(as_vec)
     }
 
--- a/rust/hg-core/src/dirstate.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/dirstate.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -32,7 +32,7 @@
     };
 
     pub fn is_merge(&self) -> bool {
-        return !(self.p2 == NULL_NODE);
+        !(self.p2 == NULL_NODE)
     }
 }
 
--- a/rust/hg-core/src/dirstate/dirs_multiset.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/dirstate/dirs_multiset.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -232,7 +232,7 @@
     #[test]
     fn test_delete_path_empty_path() {
         let mut map =
-            DirsMultiset::from_manifest(&vec![HgPathBuf::new()]).unwrap();
+            DirsMultiset::from_manifest(&[HgPathBuf::new()]).unwrap();
         let path = HgPath::new(b"");
         assert_eq!(Ok(()), map.delete_path(path));
         assert_eq!(
--- a/rust/hg-core/src/dirstate/entry.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/dirstate/entry.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -180,11 +180,7 @@
         if self.truncated_seconds != other.truncated_seconds {
             false
         } else if self.nanoseconds == 0 || other.nanoseconds == 0 {
-            if self.second_ambiguous {
-                false
-            } else {
-                true
-            }
+            !self.second_ambiguous
         } else {
             self.nanoseconds == other.nanoseconds
         }
@@ -706,9 +702,9 @@
     }
 }
 
-impl Into<u8> for EntryState {
-    fn into(self) -> u8 {
-        match self {
+impl From<EntryState> for u8 {
+    fn from(val: EntryState) -> Self {
+        match val {
             EntryState::Normal => b'n',
             EntryState::Added => b'a',
             EntryState::Removed => b'r',
--- a/rust/hg-core/src/dirstate_tree/dirstate_map.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/dirstate_tree/dirstate_map.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -320,9 +320,7 @@
         on_disk: &'on_disk [u8],
     ) -> Result<Option<&'tree HgPath>, DirstateV2ParseError> {
         match self {
-            NodeRef::InMemory(_path, node) => {
-                Ok(node.copy_source.as_ref().map(|s| &**s))
-            }
+            NodeRef::InMemory(_path, node) => Ok(node.copy_source.as_deref()),
             NodeRef::OnDisk(node) => node.copy_source(on_disk),
         }
     }
@@ -340,9 +338,9 @@
                     Cow::Owned(in_memory) => BorrowedPath::InMemory(in_memory),
                 })
             }
-            NodeRef::OnDisk(node) => node
-                .copy_source(on_disk)?
-                .map(|source| BorrowedPath::OnDisk(source)),
+            NodeRef::OnDisk(node) => {
+                node.copy_source(on_disk)?.map(BorrowedPath::OnDisk)
+            }
         })
     }
 
@@ -418,10 +416,7 @@
 
 impl NodeData {
     fn has_entry(&self) -> bool {
-        match self {
-            NodeData::Entry(_) => true,
-            _ => false,
-        }
+        matches!(self, NodeData::Entry(_))
     }
 
     fn as_entry(&self) -> Option<&DirstateEntry> {
@@ -509,7 +504,7 @@
                 Ok(())
             },
         )?;
-        let parents = Some(parents.clone());
+        let parents = Some(*parents);
 
         Ok((map, parents))
     }
@@ -681,10 +676,8 @@
                         .checked_sub(1)
                         .expect("tracked count to be >= 0");
                 }
-            } else {
-                if wc_tracked {
-                    ancestor.tracked_descendants_count += 1;
-                }
+            } else if wc_tracked {
+                ancestor.tracked_descendants_count += 1;
             }
         })?;
 
@@ -734,7 +727,7 @@
             ancestor.tracked_descendants_count += tracked_count_increment;
         })?;
         if let Some(old_entry) = old_entry_opt {
-            let mut e = old_entry.clone();
+            let mut e = old_entry;
             if e.tracked() {
                 // XXX
                 // This is probably overkill for more case, but we need this to
@@ -775,7 +768,7 @@
                     .expect("tracked_descendants_count should be >= 0");
             })?
             .expect("node should exist");
-        let mut new_entry = old_entry.clone();
+        let mut new_entry = old_entry;
         new_entry.set_untracked();
         node.data = NodeData::Entry(new_entry);
         Ok(())
@@ -803,7 +796,7 @@
                 }
             })?
             .expect("node should exist");
-        let mut new_entry = old_entry.clone();
+        let mut new_entry = old_entry;
         new_entry.set_clean(mode, size, mtime);
         node.data = NodeData::Entry(new_entry);
         Ok(())
@@ -1364,7 +1357,7 @@
         value: &HgPath,
     ) -> Result<Option<HgPathBuf>, DirstateV2ParseError> {
         self.with_dmap_mut(|map| {
-            let node = map.get_or_insert_node(&key, |_ancestor| {})?;
+            let node = map.get_or_insert_node(key, |_ancestor| {})?;
             let had_copy_source = node.copy_source.is_none();
             let old = node
                 .copy_source
@@ -1864,11 +1857,8 @@
         map.set_untracked(p(b"some/nested/removed"))?;
         assert_eq!(map.get_map().unreachable_bytes, 0);
 
-        match map.get_map().root {
-            ChildNodes::InMemory(_) => {
-                panic!("root should not have been mutated")
-            }
-            _ => (),
+        if let ChildNodes::InMemory(_) = map.get_map().root {
+            panic!("root should not have been mutated")
         }
         // We haven't mutated enough (nothing, actually), we should still be in
         // the append strategy
@@ -1879,9 +1869,8 @@
         let unreachable_bytes = map.get_map().unreachable_bytes;
         assert!(unreachable_bytes > 0);
 
-        match map.get_map().root {
-            ChildNodes::OnDisk(_) => panic!("root should have been mutated"),
-            _ => (),
+        if let ChildNodes::OnDisk(_) = map.get_map().root {
+            panic!("root should have been mutated")
         }
 
         // This should not mutate the structure either, since `root` has
@@ -1889,22 +1878,20 @@
         map.set_untracked(p(b"merged"))?;
         assert_eq!(map.get_map().unreachable_bytes, unreachable_bytes);
 
-        match map.get_map().get_node(p(b"other/added_with_p2"))?.unwrap() {
-            NodeRef::InMemory(_, _) => {
-                panic!("'other/added_with_p2' should not have been mutated")
-            }
-            _ => (),
+        if let NodeRef::InMemory(_, _) =
+            map.get_map().get_node(p(b"other/added_with_p2"))?.unwrap()
+        {
+            panic!("'other/added_with_p2' should not have been mutated")
         }
         // But this should, since it's in a different path
         // than `<root>some/nested/add`
         map.set_untracked(p(b"other/added_with_p2"))?;
         assert!(map.get_map().unreachable_bytes > unreachable_bytes);
 
-        match map.get_map().get_node(p(b"other/added_with_p2"))?.unwrap() {
-            NodeRef::OnDisk(_) => {
-                panic!("'other/added_with_p2' should have been mutated")
-            }
-            _ => (),
+        if let NodeRef::OnDisk(_) =
+            map.get_map().get_node(p(b"other/added_with_p2"))?.unwrap()
+        {
+            panic!("'other/added_with_p2' should have been mutated")
         }
 
         // We have rewritten most of the tree, we should create a new file
--- a/rust/hg-core/src/dirstate_tree/on_disk.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/dirstate_tree/on_disk.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -246,11 +246,9 @@
     pub fn parents(&self) -> DirstateParents {
         use crate::Node;
         let p1 = Node::try_from(&self.header.parent_1[..USED_NODE_ID_BYTES])
-            .unwrap()
-            .clone();
+            .unwrap();
         let p2 = Node::try_from(&self.header.parent_2[..USED_NODE_ID_BYTES])
-            .unwrap()
-            .clone();
+            .unwrap();
         DirstateParents { p1, p2 }
     }
 
@@ -322,7 +320,7 @@
         read_hg_path(on_disk, self.full_path)
     }
 
-    pub(super) fn base_name_start<'on_disk>(
+    pub(super) fn base_name_start(
         &self,
     ) -> Result<usize, DirstateV2ParseError> {
         let start = self.base_name_start.get();
@@ -355,7 +353,7 @@
         ))
     }
 
-    pub(super) fn has_copy_source<'on_disk>(&self) -> bool {
+    pub(super) fn has_copy_source(&self) -> bool {
         self.copy_source.start.get() != 0
     }
 
@@ -414,12 +412,12 @@
         } else {
             libc::S_IFREG
         };
-        let permisions = if self.flags().contains(Flags::MODE_EXEC_PERM) {
+        let permissions = if self.flags().contains(Flags::MODE_EXEC_PERM) {
             0o755
         } else {
             0o644
         };
-        (file_type | permisions).into()
+        file_type | permissions
     }
 
     fn mtime(&self) -> Result<TruncatedTimestamp, DirstateV2ParseError> {
--- a/rust/hg-core/src/dirstate_tree/owning.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/dirstate_tree/owning.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -24,7 +24,7 @@
 
         OwningDirstateMapBuilder {
             on_disk,
-            map_builder: |bytes| DirstateMap::empty(&bytes),
+            map_builder: |bytes| DirstateMap::empty(bytes),
         }
         .build()
     }
@@ -42,7 +42,7 @@
             OwningDirstateMapTryBuilder {
                 on_disk,
                 map_builder: |bytes| {
-                    DirstateMap::new_v1(&bytes).map(|(dmap, p)| {
+                    DirstateMap::new_v1(bytes).map(|(dmap, p)| {
                         parents = p.unwrap_or(DirstateParents::NULL);
                         dmap
                     })
@@ -66,7 +66,7 @@
         OwningDirstateMapTryBuilder {
             on_disk,
             map_builder: |bytes| {
-                DirstateMap::new_v2(&bytes, data_size, metadata)
+                DirstateMap::new_v2(bytes, data_size, metadata)
             },
         }
         .try_build()
--- a/rust/hg-core/src/dirstate_tree/status.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/dirstate_tree/status.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -154,7 +154,7 @@
         hg_path,
         &DirEntry {
             hg_path: Cow::Borrowed(HgPath::new(b"")),
-            fs_path: Cow::Borrowed(&root_dir),
+            fs_path: Cow::Borrowed(root_dir),
             symlink_metadata: None,
             file_type: FakeFileType::Directory,
         },
@@ -245,7 +245,7 @@
             None => false,
             Some(parent) => {
                 *(parent.cache.get_or_init(|| {
-                    parent.force(ignore_fn) || ignore_fn(&self.path)
+                    parent.force(ignore_fn) || ignore_fn(self.path)
                 }))
             }
         }
@@ -402,7 +402,7 @@
                             let entry = DirEntry {
                                 hg_path: Cow::Borrowed(
                                     dirstate_node
-                                        .full_path(&self.dmap.on_disk)?,
+                                        .full_path(self.dmap.on_disk)?,
                                 ),
                                 fs_path: Cow::Borrowed(&fs_path),
                                 symlink_metadata: Some(fs_metadata),
@@ -420,7 +420,8 @@
                         Err(error) => {
                             let hg_path =
                                 dirstate_node.full_path(self.dmap.on_disk)?;
-                            Ok(self.io_error(error, hg_path))
+                            self.io_error(error, hg_path);
+                            Ok(())
                         }
                     }
                 })
@@ -472,28 +473,25 @@
         .par_bridge()
         .map(|pair| {
             use itertools::EitherOrBoth::*;
-            let has_dirstate_node_or_is_ignored;
-            match pair {
+            let has_dirstate_node_or_is_ignored = match pair {
                 Both(dirstate_node, fs_entry) => {
                     self.traverse_fs_and_dirstate(
-                        &fs_entry,
+                        fs_entry,
                         dirstate_node,
                         has_ignored_ancestor,
                     )?;
-                    has_dirstate_node_or_is_ignored = true
+                    true
                 }
                 Left(dirstate_node) => {
                     self.traverse_dirstate_only(dirstate_node)?;
-                    has_dirstate_node_or_is_ignored = true;
+                    true
                 }
-                Right(fs_entry) => {
-                    has_dirstate_node_or_is_ignored = self.traverse_fs_only(
-                        has_ignored_ancestor.force(&self.ignore_fn),
-                        directory_hg_path,
-                        fs_entry,
-                    )
-                }
-            }
+                Right(fs_entry) => self.traverse_fs_only(
+                    has_ignored_ancestor.force(&self.ignore_fn),
+                    directory_hg_path,
+                    fs_entry,
+                ),
+            };
             Ok(has_dirstate_node_or_is_ignored)
         })
         .try_reduce(|| true, |a, b| Ok(a && b))
@@ -524,7 +522,7 @@
                     .push(hg_path.detach_from_tree())
             }
             let is_ignored = HasIgnoredAncestor::create(
-                Some(&has_ignored_ancestor),
+                Some(has_ignored_ancestor),
                 hg_path,
             );
             let is_at_repo_root = false;
@@ -544,14 +542,14 @@
                 outdated_dircache,
             )?
         } else {
-            if file_or_symlink && self.matcher.matches(&hg_path) {
+            if file_or_symlink && self.matcher.matches(hg_path) {
                 if let Some(entry) = dirstate_node.entry()? {
                     if !entry.any_tracked() {
                         // Forward-compat if we start tracking unknown/ignored
                         // files for caching reasons
                         self.mark_unknown_or_ignored(
                             has_ignored_ancestor.force(&self.ignore_fn),
-                            &hg_path,
+                            hg_path,
                         );
                     }
                     if entry.added() {
@@ -620,12 +618,13 @@
             Ok(meta) => meta,
             Err(_) => return Ok(()),
         };
-        let directory_mtime = if let Ok(option) =
-            TruncatedTimestamp::for_reliable_mtime_of(&metadata, status_start)
-        {
-            if let Some(directory_mtime) = option {
-                directory_mtime
-            } else {
+
+        let directory_mtime = match TruncatedTimestamp::for_reliable_mtime_of(
+            &metadata,
+            status_start,
+        ) {
+            Ok(Some(directory_mtime)) => directory_mtime,
+            Ok(None) => {
                 // The directory was modified too recently,
                 // don’t cache its `read_dir` results.
                 //
@@ -643,9 +642,10 @@
                 // by the same script.
                 return Ok(());
             }
-        } else {
-            // OS/libc does not support mtime?
-            return Ok(());
+            Err(_) => {
+                // OS/libc does not support mtime?
+                return Ok(());
+            }
         };
         // We’ve observed (through `status_start`) that time has
         // “progressed” since `directory_mtime`, so any further
@@ -713,8 +713,9 @@
         {
             self.push_outcome(Outcome::Modified, dirstate_node)?
         } else {
-            let mtime_looks_clean;
-            if let Some(dirstate_mtime) = entry.truncated_mtime() {
+            let mtime_looks_clean = if let Some(dirstate_mtime) =
+                entry.truncated_mtime()
+            {
                 let fs_mtime = TruncatedTimestamp::for_mtime_of(&fs_metadata)
                     .expect("OS/libc does not support mtime?");
                 // There might be a change in the future if for example the
@@ -722,10 +723,10 @@
                 // case where the issues the user would face
                 // would be a lot worse and there is nothing we
                 // can really do.
-                mtime_looks_clean = fs_mtime.likely_equal(dirstate_mtime)
+                fs_mtime.likely_equal(dirstate_mtime)
             } else {
                 // No mtime in the dirstate entry
-                mtime_looks_clean = false
+                false
             };
             if !mtime_looks_clean {
                 self.push_outcome(Outcome::Unsure, dirstate_node)?
@@ -769,7 +770,7 @@
                 if entry.removed() {
                     self.push_outcome(Outcome::Removed, dirstate_node)?
                 } else {
-                    self.push_outcome(Outcome::Deleted, &dirstate_node)?
+                    self.push_outcome(Outcome::Deleted, dirstate_node)?
                 }
             }
         }
@@ -816,26 +817,24 @@
                 }
             }
             is_ignored
+        } else if file_or_symlink {
+            if self.matcher.matches(&hg_path) {
+                self.mark_unknown_or_ignored(
+                    has_ignored_ancestor,
+                    &BorrowedPath::InMemory(&hg_path),
+                )
+            } else {
+                // We haven’t computed whether this path is ignored. It
+                // might not be, and a future run of status might have a
+                // different matcher that matches it. So treat it as not
+                // ignored. That is, inhibit readdir caching of the parent
+                // directory.
+                false
+            }
         } else {
-            if file_or_symlink {
-                if self.matcher.matches(&hg_path) {
-                    self.mark_unknown_or_ignored(
-                        has_ignored_ancestor,
-                        &BorrowedPath::InMemory(&hg_path),
-                    )
-                } else {
-                    // We haven’t computed whether this path is ignored. It
-                    // might not be, and a future run of status might have a
-                    // different matcher that matches it. So treat it as not
-                    // ignored. That is, inhibit readdir caching of the parent
-                    // directory.
-                    false
-                }
-            } else {
-                // This is neither a directory, a plain file, or a symlink.
-                // Treat it like an ignored file.
-                true
-            }
+            // This is neither a directory, a plain file, or a symlink.
+            // Treat it like an ignored file.
+            true
         }
     }
 
@@ -845,7 +844,7 @@
         has_ignored_ancestor: bool,
         hg_path: &BorrowedPath<'_, 'on_disk>,
     ) -> bool {
-        let is_ignored = has_ignored_ancestor || (self.ignore_fn)(&hg_path);
+        let is_ignored = has_ignored_ancestor || (self.ignore_fn)(hg_path);
         if is_ignored {
             if self.options.list_ignored {
                 self.push_outcome_without_copy_source(
@@ -853,13 +852,8 @@
                     hg_path,
                 )
             }
-        } else {
-            if self.options.list_unknown {
-                self.push_outcome_without_copy_source(
-                    Outcome::Unknown,
-                    hg_path,
-                )
-            }
+        } else if self.options.list_unknown {
+            self.push_outcome_without_copy_source(Outcome::Unknown, hg_path)
         }
         is_ignored
     }
--- a/rust/hg-core/src/discovery.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/discovery.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -194,7 +194,7 @@
         size: usize,
     ) -> Vec<Revision> {
         if !self.randomize {
-            sample.sort();
+            sample.sort_unstable();
             sample.truncate(size);
             return sample;
         }
@@ -513,14 +513,14 @@
     ) -> Vec<Revision> {
         let mut as_vec: Vec<Revision> =
             disco.undecided.as_ref().unwrap().iter().cloned().collect();
-        as_vec.sort();
+        as_vec.sort_unstable();
         as_vec
     }
 
     fn sorted_missing(disco: &PartialDiscovery<SampleGraph>) -> Vec<Revision> {
         let mut as_vec: Vec<Revision> =
             disco.missing.iter().cloned().collect();
-        as_vec.sort();
+        as_vec.sort_unstable();
         as_vec
     }
 
@@ -529,7 +529,7 @@
     ) -> Result<Vec<Revision>, GraphError> {
         let mut as_vec: Vec<Revision> =
             disco.common_heads()?.iter().cloned().collect();
-        as_vec.sort();
+        as_vec.sort_unstable();
         Ok(as_vec)
     }
 
@@ -621,7 +621,7 @@
         disco.undecided = Some((1..=13).collect());
 
         let mut sample_vec = disco.take_quick_sample(vec![], 4)?;
-        sample_vec.sort();
+        sample_vec.sort_unstable();
         assert_eq!(sample_vec, vec![10, 11, 12, 13]);
         Ok(())
     }
@@ -632,7 +632,7 @@
         disco.ensure_undecided()?;
 
         let mut sample_vec = disco.take_quick_sample(vec![12], 4)?;
-        sample_vec.sort();
+        sample_vec.sort_unstable();
         // r12's only parent is r9, whose unique grand-parent through the
         // diamond shape is r4. This ends there because the distance from r4
         // to the root is only 3.
@@ -650,11 +650,11 @@
         assert_eq!(cache.get(&10).cloned(), None);
 
         let mut children_4 = cache.get(&4).cloned().unwrap();
-        children_4.sort();
+        children_4.sort_unstable();
         assert_eq!(children_4, vec![5, 6, 7]);
 
         let mut children_7 = cache.get(&7).cloned().unwrap();
-        children_7.sort();
+        children_7.sort_unstable();
         assert_eq!(children_7, vec![9, 11]);
 
         Ok(())
@@ -684,7 +684,7 @@
         let (sample_set, size) = disco.bidirectional_sample(7)?;
         assert_eq!(size, 7);
         let mut sample: Vec<Revision> = sample_set.into_iter().collect();
-        sample.sort();
+        sample.sort_unstable();
         // our DAG is a bit too small for the results to be really interesting
         // at least it shows that
         // - we went both ways
--- a/rust/hg-core/src/filepatterns.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/filepatterns.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -313,7 +313,7 @@
         PatternSyntax::RootGlob
         | PatternSyntax::Path
         | PatternSyntax::RelGlob
-        | PatternSyntax::RootFiles => normalize_path_bytes(&pattern),
+        | PatternSyntax::RootFiles => normalize_path_bytes(pattern),
         PatternSyntax::Include | PatternSyntax::SubInclude => {
             return Err(PatternError::NonRegexPattern(entry.clone()))
         }
@@ -368,7 +368,7 @@
     let mut warnings: Vec<PatternFileWarning> = vec![];
 
     let mut current_syntax =
-        default_syntax_override.unwrap_or(b"relre:".as_ref());
+        default_syntax_override.unwrap_or_else(|| b"relre:".as_ref());
 
     for (line_number, mut line) in lines.split(|c| *c == b'\n').enumerate() {
         let line_number = line_number + 1;
@@ -402,7 +402,7 @@
             continue;
         }
 
-        let mut line_syntax: &[u8] = &current_syntax;
+        let mut line_syntax: &[u8] = current_syntax;
 
         for (s, rels) in SYNTAXES.iter() {
             if let Some(rest) = line.drop_prefix(rels) {
@@ -418,7 +418,7 @@
         }
 
         inputs.push(IgnorePattern::new(
-            parse_pattern_syntax(&line_syntax).map_err(|e| match e {
+            parse_pattern_syntax(line_syntax).map_err(|e| match e {
                 PatternError::UnsupportedSyntax(syntax) => {
                     PatternError::UnsupportedSyntaxInFile(
                         syntax,
@@ -428,7 +428,7 @@
                 }
                 _ => e,
             })?,
-            &line,
+            line,
             file_path,
         ));
     }
@@ -502,7 +502,7 @@
                 }
                 PatternSyntax::SubInclude => {
                     let mut sub_include = SubInclude::new(
-                        &root_dir,
+                        root_dir,
                         &entry.pattern,
                         &entry.source,
                     )?;
@@ -564,11 +564,11 @@
         let prefix = canonical_path(root_dir, root_dir, new_root)?;
 
         Ok(Self {
-            prefix: path_to_hg_path_buf(prefix).and_then(|mut p| {
+            prefix: path_to_hg_path_buf(prefix).map(|mut p| {
                 if !p.is_empty() {
                     p.push_byte(b'/');
                 }
-                Ok(p)
+                p
             })?,
             path: path.to_owned(),
             root: new_root.to_owned(),
--- a/rust/hg-core/src/lock.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/lock.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -107,7 +107,7 @@
 fn lock_should_be_broken(data: &Option<String>) -> bool {
     (|| -> Option<bool> {
         let (prefix, pid) = data.as_ref()?.split_once(':')?;
-        if prefix != &*LOCK_PREFIX {
+        if prefix != *LOCK_PREFIX {
             return Some(false);
         }
         let process_is_running;
@@ -144,6 +144,8 @@
 
         /// Same as https://github.com/python/cpython/blob/v3.10.0/Modules/socketmodule.c#L5414
         const BUFFER_SIZE: usize = 1024;
+        // This cast is *needed* for platforms with signed chars
+        #[allow(clippy::unnecessary_cast)]
         let mut buffer = [0 as libc::c_char; BUFFER_SIZE];
         let hostname_bytes = unsafe {
             let result = libc::gethostname(buffer.as_mut_ptr(), BUFFER_SIZE);
--- a/rust/hg-core/src/matchers.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/matchers.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -302,11 +302,11 @@
     }
 
     fn matches(&self, filename: &HgPath) -> bool {
-        (self.match_fn)(filename.as_ref())
+        (self.match_fn)(filename)
     }
 
     fn visit_children_set(&self, directory: &HgPath) -> VisitChildrenSet {
-        let dir = directory.as_ref();
+        let dir = directory;
         if self.prefix && self.roots.contains(dir) {
             return VisitChildrenSet::Recursive;
         }
@@ -318,11 +318,11 @@
             return VisitChildrenSet::This;
         }
 
-        if self.parents.contains(directory.as_ref()) {
+        if self.parents.contains(dir.as_ref()) {
             let multiset = self.get_all_parents_children();
             if let Some(children) = multiset.get(dir) {
                 return VisitChildrenSet::Set(
-                    children.into_iter().map(HgPathBuf::from).collect(),
+                    children.iter().map(HgPathBuf::from).collect(),
                 );
             }
         }
@@ -446,7 +446,7 @@
                 VisitChildrenSet::This
             }
             (VisitChildrenSet::Set(m1), VisitChildrenSet::Set(m2)) => {
-                let set: HashSet<_> = m1.intersection(&m2).cloned().collect();
+                let set: HashSet<_> = m1.intersection(m2).cloned().collect();
                 if set.is_empty() {
                     VisitChildrenSet::Empty
                 } else {
@@ -699,10 +699,9 @@
             PatternSyntax::RootGlob | PatternSyntax::Glob => {
                 let mut root = HgPathBuf::new();
                 for p in pattern.split(|c| *c == b'/') {
-                    if p.iter().any(|c| match *c {
-                        b'[' | b'{' | b'*' | b'?' => true,
-                        _ => false,
-                    }) {
+                    if p.iter()
+                        .any(|c| matches!(*c, b'[' | b'{' | b'*' | b'?'))
+                    {
                         break;
                     }
                     root.push(HgPathBuf::from_bytes(p).as_ref());
@@ -780,10 +779,10 @@
 
 /// Returns a function that checks whether a given file (in the general sense)
 /// should be matched.
-fn build_match<'a, 'b>(
+fn build_match<'a>(
     ignore_patterns: Vec<IgnorePattern>,
-) -> PatternResult<(Vec<u8>, IgnoreFnType<'b>)> {
-    let mut match_funcs: Vec<IgnoreFnType<'b>> = vec![];
+) -> PatternResult<(Vec<u8>, IgnoreFnType<'a>)> {
+    let mut match_funcs: Vec<IgnoreFnType<'a>> = vec![];
     // For debugging and printing
     let mut patterns = vec![];
 
@@ -921,9 +920,8 @@
             dirs,
             parents,
         } = roots_dirs_and_parents(&ignore_patterns)?;
-        let prefix = ignore_patterns.iter().all(|k| match k.syntax {
-            PatternSyntax::Path | PatternSyntax::RelPath => true,
-            _ => false,
+        let prefix = ignore_patterns.iter().all(|k| {
+            matches!(k.syntax, PatternSyntax::Path | PatternSyntax::RelPath)
         });
         let (patterns, match_fn) = build_match(ignore_patterns)?;
 
--- a/rust/hg-core/src/narrow.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/narrow.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -37,9 +37,11 @@
     }
     // Treat "narrowspec does not exist" the same as "narrowspec file exists
     // and is empty".
-    let store_spec = repo.store_vfs().try_read(FILENAME)?.unwrap_or(vec![]);
-    let working_copy_spec =
-        repo.hg_vfs().try_read(DIRSTATE_FILENAME)?.unwrap_or(vec![]);
+    let store_spec = repo.store_vfs().try_read(FILENAME)?.unwrap_or_default();
+    let working_copy_spec = repo
+        .hg_vfs()
+        .try_read(DIRSTATE_FILENAME)?
+        .unwrap_or_default();
     if store_spec != working_copy_spec {
         return Err(HgError::abort(
             "working copy's narrowspec is stale",
--- a/rust/hg-core/src/operations/cat.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/operations/cat.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -70,7 +70,7 @@
             Some(item) => res.push((file, item)),
         }
     }
-    return Ok((res, missing));
+    Ok((res, missing))
 }
 
 /// Output the given revision of files
@@ -94,10 +94,8 @@
 
     files.sort_unstable();
 
-    let (found, missing) = find_files_in_manifest(
-        &manifest,
-        files.into_iter().map(|f| f.as_ref()),
-    )?;
+    let (found, missing) =
+        find_files_in_manifest(&manifest, files.into_iter())?;
 
     for (file_path, file_node) in found {
         found_any = true;
--- a/rust/hg-core/src/repo.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/repo.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -68,9 +68,9 @@
                 return Ok(ancestor.to_path_buf());
             }
         }
-        return Err(RepoError::NotFound {
+        Err(RepoError::NotFound {
             at: current_directory,
-        });
+        })
     }
 
     /// Find a repository, either at the given path (which must contain a `.hg`
@@ -87,13 +87,11 @@
     ) -> Result<Self, RepoError> {
         if let Some(root) = explicit_path {
             if is_dir(root.join(".hg"))? {
-                Self::new_at_path(root.to_owned(), config)
+                Self::new_at_path(root, config)
             } else if is_file(&root)? {
                 Err(HgError::unsupported("bundle repository").into())
             } else {
-                Err(RepoError::NotFound {
-                    at: root.to_owned(),
-                })
+                Err(RepoError::NotFound { at: root })
             }
         } else {
             let root = Self::find_repo_root()?;
@@ -108,9 +106,8 @@
     ) -> Result<Self, RepoError> {
         let dot_hg = working_directory.join(".hg");
 
-        let mut repo_config_files = Vec::new();
-        repo_config_files.push(dot_hg.join("hgrc"));
-        repo_config_files.push(dot_hg.join("hgrc-not-shared"));
+        let mut repo_config_files =
+            vec![dot_hg.join("hgrc"), dot_hg.join("hgrc-not-shared")];
 
         let hg_vfs = Vfs { base: &dot_hg };
         let mut reqs = requirements::load_if_exists(hg_vfs)?;
@@ -254,7 +251,7 @@
             .hg_vfs()
             .read("dirstate")
             .io_not_found_as_none()?
-            .unwrap_or(Vec::new()))
+            .unwrap_or_default())
     }
 
     pub fn dirstate_parents(&self) -> Result<DirstateParents, HgError> {
@@ -277,8 +274,7 @@
                 .set(Some(docket.uuid.to_owned()));
             docket.parents()
         } else {
-            crate::dirstate::parsers::parse_dirstate_parents(&dirstate)?
-                .clone()
+            *crate::dirstate::parsers::parse_dirstate_parents(&dirstate)?
         };
         self.dirstate_parents.set(parents);
         Ok(parents)
--- a/rust/hg-core/src/revlog/changelog.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/revlog/changelog.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -165,7 +165,7 @@
     pub fn files(&self) -> impl Iterator<Item = &HgPath> {
         self.bytes[self.timestamp_end + 1..self.files_end]
             .split(|b| b == &b'\n')
-            .map(|path| HgPath::new(path))
+            .map(HgPath::new)
     }
 
     /// The change description.
--- a/rust/hg-core/src/revlog/filelog.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/revlog/filelog.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -49,7 +49,7 @@
         file_rev: Revision,
     ) -> Result<FilelogRevisionData, RevlogError> {
         let data: Vec<u8> = self.revlog.get_rev_data(file_rev)?.into_owned();
-        Ok(FilelogRevisionData(data.into()))
+        Ok(FilelogRevisionData(data))
     }
 
     /// The given node ID is that of the file as found in a filelog, not of a
@@ -161,7 +161,7 @@
         // this `FilelogEntry` does not have such metadata:
         let file_data_len = uncompressed_len;
 
-        return file_data_len != other_len;
+        file_data_len != other_len
     }
 
     pub fn data(&self) -> Result<FilelogRevisionData, HgError> {
--- a/rust/hg-core/src/revlog/index.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/revlog/index.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -21,11 +21,11 @@
 impl IndexHeaderFlags {
     /// Corresponds to FLAG_INLINE_DATA in python
     pub fn is_inline(self) -> bool {
-        return self.flags & 1 != 0;
+        self.flags & 1 != 0
     }
     /// Corresponds to FLAG_GENERALDELTA in python
     pub fn uses_generaldelta(self) -> bool {
-        return self.flags & 2 != 0;
+        self.flags & 2 != 0
     }
 }
 
@@ -35,9 +35,9 @@
     fn format_flags(&self) -> IndexHeaderFlags {
         // No "unknown flags" check here, unlike in python. Maybe there should
         // be.
-        return IndexHeaderFlags {
+        IndexHeaderFlags {
             flags: BigEndian::read_u16(&self.header_bytes[0..2]),
-        };
+        }
     }
 
     /// The only revlog version currently supported by rhg.
@@ -45,7 +45,7 @@
 
     /// Corresponds to `_format_version` in Python.
     fn format_version(&self) -> u16 {
-        return BigEndian::read_u16(&self.header_bytes[2..4]);
+        BigEndian::read_u16(&self.header_bytes[2..4])
     }
 
     const EMPTY_INDEX_HEADER: IndexHeader = IndexHeader {
@@ -59,7 +59,7 @@
     };
 
     fn parse(index_bytes: &[u8]) -> Result<IndexHeader, HgError> {
-        if index_bytes.len() == 0 {
+        if index_bytes.is_empty() {
             return Ok(IndexHeader::EMPTY_INDEX_HEADER);
         }
         if index_bytes.len() < 4 {
@@ -67,13 +67,13 @@
                 "corrupted revlog: can't read the index format header",
             ));
         }
-        return Ok(IndexHeader {
+        Ok(IndexHeader {
             header_bytes: {
                 let bytes: [u8; 4] =
                     index_bytes[0..4].try_into().expect("impossible");
                 bytes
             },
-        });
+        })
     }
 }
 
@@ -127,8 +127,7 @@
                     uses_generaldelta,
                 })
             } else {
-                Err(HgError::corrupted("unexpected inline revlog length")
-                    .into())
+                Err(HgError::corrupted("unexpected inline revlog length"))
             }
         } else {
             Ok(Self {
@@ -466,8 +465,8 @@
             .with_inline(false)
             .build();
 
-        assert_eq!(is_inline(&bytes), false);
-        assert_eq!(uses_generaldelta(&bytes), false);
+        assert!(!is_inline(&bytes));
+        assert!(!uses_generaldelta(&bytes));
     }
 
     #[test]
@@ -478,8 +477,8 @@
             .with_inline(true)
             .build();
 
-        assert_eq!(is_inline(&bytes), true);
-        assert_eq!(uses_generaldelta(&bytes), false);
+        assert!(is_inline(&bytes));
+        assert!(!uses_generaldelta(&bytes));
     }
 
     #[test]
@@ -490,8 +489,8 @@
             .with_inline(true)
             .build();
 
-        assert_eq!(is_inline(&bytes), true);
-        assert_eq!(uses_generaldelta(&bytes), true);
+        assert!(is_inline(&bytes));
+        assert!(uses_generaldelta(&bytes));
     }
 
     #[test]
--- a/rust/hg-core/src/revlog/nodemap.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/revlog/nodemap.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -71,7 +71,7 @@
     ///
     /// If several Revisions match the given prefix, a [`MultipleResults`]
     /// error is returned.
-    fn find_bin<'a>(
+    fn find_bin(
         &self,
         idx: &impl RevlogIndex,
         prefix: NodePrefix,
@@ -88,7 +88,7 @@
     ///
     /// If several Revisions match the given prefix, a [`MultipleResults`]
     /// error is returned.
-    fn unique_prefix_len_bin<'a>(
+    fn unique_prefix_len_bin(
         &self,
         idx: &impl RevlogIndex,
         node_prefix: NodePrefix,
@@ -249,7 +249,7 @@
     rev: Revision,
 ) -> Result<Option<Revision>, NodeMapError> {
     idx.node(rev)
-        .ok_or_else(|| NodeMapError::RevisionNotInIndex(rev))
+        .ok_or(NodeMapError::RevisionNotInIndex(rev))
         .map(|node| {
             if prefix.is_prefix_of(node) {
                 Some(rev)
@@ -468,7 +468,7 @@
         if let Element::Rev(old_rev) = deepest.element {
             let old_node = index
                 .node(old_rev)
-                .ok_or_else(|| NodeMapError::RevisionNotInIndex(old_rev))?;
+                .ok_or(NodeMapError::RevisionNotInIndex(old_rev))?;
             if old_node == node {
                 return Ok(()); // avoid creating lots of useless blocks
             }
@@ -865,7 +865,7 @@
             hex: &str,
         ) -> Result<(), NodeMapError> {
             let node = pad_node(hex);
-            self.index.insert(rev, node.clone());
+            self.index.insert(rev, node);
             self.nt.insert(&self.index, &node, rev)?;
             Ok(())
         }
@@ -887,13 +887,13 @@
         /// Drain `added` and restart a new one
         fn commit(self) -> Self {
             let mut as_vec: Vec<Block> =
-                self.nt.readonly.iter().map(|block| block.clone()).collect();
+                self.nt.readonly.iter().copied().collect();
             as_vec.extend(self.nt.growable);
             as_vec.push(self.nt.root);
 
             Self {
                 index: self.index,
-                nt: NodeTree::from(as_vec).into(),
+                nt: NodeTree::from(as_vec),
             }
         }
     }
@@ -967,15 +967,15 @@
         let idx = &mut nt_idx.index;
 
         let node0_hex = hex_pad_right("444444");
-        let mut node1_hex = hex_pad_right("444444").clone();
+        let mut node1_hex = hex_pad_right("444444");
         node1_hex.pop();
         node1_hex.push('5');
         let node0 = Node::from_hex(&node0_hex).unwrap();
         let node1 = Node::from_hex(&node1_hex).unwrap();
 
-        idx.insert(0, node0.clone());
+        idx.insert(0, node0);
         nt.insert(idx, &node0, 0)?;
-        idx.insert(1, node1.clone());
+        idx.insert(1, node1);
         nt.insert(idx, &node1, 1)?;
 
         assert_eq!(nt.find_bin(idx, (&node0).into())?, Some(0));
--- a/rust/hg-core/src/revlog/path_encode.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/revlog/path_encode.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -2,6 +2,7 @@
 
 #[derive(PartialEq, Debug)]
 #[allow(non_camel_case_types)]
+#[allow(clippy::upper_case_acronyms)]
 enum path_state {
     START, /* first byte of a path component */
     A,     /* "AUX" */
@@ -27,6 +28,7 @@
 
 /* state machine for dir-encoding */
 #[allow(non_camel_case_types)]
+#[allow(clippy::upper_case_acronyms)]
 enum dir_state {
     DDOT,
     DH,
@@ -61,7 +63,7 @@
     }
 }
 
-fn hexencode<'a>(mut dest: Option<&'a mut [u8]>, destlen: &mut usize, c: u8) {
+fn hexencode(mut dest: Option<&mut [u8]>, destlen: &mut usize, c: u8) {
     let hexdigit = b"0123456789abcdef";
     charcopy(
         rewrap_option(&mut dest),
@@ -534,10 +536,7 @@
     let last_slash = src.iter().rposition(|b| *b == b'/');
     let last_dot: Option<usize> = {
         let s = last_slash.unwrap_or(0);
-        src[s..]
-            .iter()
-            .rposition(|b| *b == b'.')
-            .and_then(|i| Some(i + s))
+        src[s..].iter().rposition(|b| *b == b'.').map(|i| i + s)
     };
 
     let mut dest = vec![0; MAXSTOREPATHLEN];
@@ -545,8 +544,8 @@
 
     {
         let mut first = true;
-        for slice in src[..last_slash.unwrap_or_else(|| src.len())]
-            .split(|b| *b == b'/')
+        for slice in
+            src[..last_slash.unwrap_or(src.len())].split(|b| *b == b'/')
         {
             let slice = &slice[..std::cmp::min(slice.len(), dirprefixlen)];
             if destlen + (slice.len() + if first { 0 } else { 1 })
@@ -641,6 +640,6 @@
             res
         }
     } else {
-        hash_encode(&path)
+        hash_encode(path)
     }
 }
--- a/rust/hg-core/src/revlog/revlog.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/revlog/revlog.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -174,10 +174,11 @@
         // optimize these cases.
         let mut found_by_prefix = None;
         for rev in (0..self.len() as Revision).rev() {
-            let index_entry =
-                self.index.get_entry(rev).ok_or(HgError::corrupted(
+            let index_entry = self.index.get_entry(rev).ok_or_else(|| {
+                HgError::corrupted(
                     "revlog references a revision not in the index",
-                ))?;
+                )
+            })?;
             if node == *index_entry.hash() {
                 return Ok(rev);
             }
@@ -230,7 +231,7 @@
             None => &NULL_NODE,
         };
 
-        &hash(data, h1.as_bytes(), h2.as_bytes()) == expected
+        hash(data, h1.as_bytes(), h2.as_bytes()) == expected
     }
 
     /// Build the full data of a revision out its snapshot
@@ -253,8 +254,8 @@
 
     /// Return the revlog data.
     fn data(&self) -> &[u8] {
-        match self.data_bytes {
-            Some(ref data_bytes) => &data_bytes,
+        match &self.data_bytes {
+            Some(data_bytes) => data_bytes,
             None => panic!(
                 "forgot to load the data or trying to access inline data"
             ),
--- a/rust/hg-core/src/revset.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/revset.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -21,7 +21,7 @@
     match input {
         "." => {
             let p1 = repo.dirstate_parents()?.p1;
-            return Ok(changelog.revlog.rev_from_node(p1.into())?);
+            return changelog.revlog.rev_from_node(p1.into());
         }
         "null" => return Ok(NULL_REVISION),
         _ => {}
@@ -33,7 +33,7 @@
             let msg = format!("cannot parse revset '{}'", input);
             Err(HgError::unsupported(msg).into())
         }
-        result => return result,
+        result => result,
     }
 }
 
--- a/rust/hg-core/src/sparse.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/sparse.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -164,7 +164,7 @@
 fn read_temporary_includes(
     repo: &Repo,
 ) -> Result<Vec<Vec<u8>>, SparseConfigError> {
-    let raw = repo.hg_vfs().try_read("tempsparse")?.unwrap_or(vec![]);
+    let raw = repo.hg_vfs().try_read("tempsparse")?.unwrap_or_default();
     if raw.is_empty() {
         return Ok(vec![]);
     }
@@ -179,7 +179,7 @@
     if !repo.has_sparse() {
         return Ok(None);
     }
-    let raw = repo.hg_vfs().try_read("sparse")?.unwrap_or(vec![]);
+    let raw = repo.hg_vfs().try_read("sparse")?.unwrap_or_default();
 
     if raw.is_empty() {
         return Ok(None);
@@ -200,9 +200,10 @@
             let output =
                 cat(repo, &rev.to_string(), vec![HgPath::new(&profile)])
                     .map_err(|_| {
-                        HgError::corrupted(format!(
+                        HgError::corrupted(
                             "dirstate points to non-existent parent node"
-                        ))
+                                .to_string(),
+                        )
                     })?;
             if output.results.is_empty() {
                 config.warnings.push(SparseWarning::ProfileNotFound {
@@ -252,9 +253,9 @@
         repo.changelog()?
             .rev_from_node(parents.p1.into())
             .map_err(|_| {
-                HgError::corrupted(format!(
-                    "dirstate points to non-existent parent node"
-                ))
+                HgError::corrupted(
+                    "dirstate points to non-existent parent node".to_string(),
+                )
             })?;
     if p1_rev != NULL_REVISION {
         revs.push(p1_rev)
@@ -263,9 +264,9 @@
         repo.changelog()?
             .rev_from_node(parents.p2.into())
             .map_err(|_| {
-                HgError::corrupted(format!(
-                    "dirstate points to non-existent parent node"
-                ))
+                HgError::corrupted(
+                    "dirstate points to non-existent parent node".to_string(),
+                )
             })?;
     if p2_rev != NULL_REVISION {
         revs.push(p2_rev)
@@ -325,7 +326,7 @@
     }
     let forced_include_matcher = IncludeMatcher::new(
         temp_includes
-            .into_iter()
+            .iter()
             .map(|include| {
                 IgnorePattern::new(PatternSyntax::Path, include, Path::new(""))
             })
--- a/rust/hg-core/src/utils.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/utils.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -137,11 +137,8 @@
     }
 
     fn split_2_by_slice(&self, separator: &[u8]) -> Option<(&[u8], &[u8])> {
-        if let Some(pos) = find_slice_in_slice(self, separator) {
-            Some((&self[..pos], &self[pos + separator.len()..]))
-        } else {
-            None
-        }
+        find_slice_in_slice(self, separator)
+            .map(|pos| (&self[..pos], &self[pos + separator.len()..]))
     }
 }
 
@@ -369,7 +366,7 @@
                 MergeResult::RightValue => {
                     left.insert(key, right_value);
                 }
-                MergeResult::UseNewValue(new_value) => {
+                MergeResult::NewValue(new_value) => {
                     left.insert(key, new_value);
                 }
             },
--- a/rust/hg-core/src/utils/files.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/utils/files.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -230,7 +230,7 @@
         // TODO hint to the user about using --cwd
         // Bubble up the responsibility to Python for now
         Err(HgPathError::NotUnderRoot {
-            path: original_name.to_owned(),
+            path: original_name,
             root: root.to_owned(),
         })
     }
@@ -424,7 +424,7 @@
         assert_eq!(
             canonical_path(&root, Path::new(""), &beneath_repo),
             Err(HgPathError::NotUnderRoot {
-                path: beneath_repo.to_owned(),
+                path: beneath_repo,
                 root: root.to_owned()
             })
         );
--- a/rust/hg-core/src/utils/hg_path.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/src/utils/hg_path.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -205,7 +205,7 @@
     /// ```
     pub fn split_filename(&self) -> (&Self, &Self) {
         match &self.inner.iter().rposition(|c| *c == b'/') {
-            None => (HgPath::new(""), &self),
+            None => (HgPath::new(""), self),
             Some(size) => (
                 HgPath::new(&self.inner[..*size]),
                 HgPath::new(&self.inner[*size + 1..]),
@@ -326,7 +326,7 @@
     #[cfg(unix)]
     /// Split a pathname into drive and path. On Posix, drive is always empty.
     pub fn split_drive(&self) -> (&HgPath, &HgPath) {
-        (HgPath::new(b""), &self)
+        (HgPath::new(b""), self)
     }
 
     /// Checks for errors in the path, short-circuiting at the first one.
@@ -396,7 +396,7 @@
         Default::default()
     }
 
-    pub fn push<T: ?Sized + AsRef<HgPath>>(&mut self, other: &T) -> () {
+    pub fn push<T: ?Sized + AsRef<HgPath>>(&mut self, other: &T) {
         if !self.inner.is_empty() && self.inner.last() != Some(&b'/') {
             self.inner.push(b'/');
         }
@@ -431,7 +431,7 @@
 
     #[inline]
     fn deref(&self) -> &HgPath {
-        &HgPath::new(&self.inner)
+        HgPath::new(&self.inner)
     }
 }
 
@@ -441,15 +441,15 @@
     }
 }
 
-impl Into<Vec<u8>> for HgPathBuf {
-    fn into(self) -> Vec<u8> {
-        self.inner
+impl From<HgPathBuf> for Vec<u8> {
+    fn from(val: HgPathBuf) -> Self {
+        val.inner
     }
 }
 
 impl Borrow<HgPath> for HgPathBuf {
     fn borrow(&self) -> &HgPath {
-        &HgPath::new(self.as_bytes())
+        HgPath::new(self.as_bytes())
     }
 }
 
@@ -491,7 +491,7 @@
     #[cfg(unix)]
     {
         use std::os::unix::ffi::OsStrExt;
-        os_str = std::ffi::OsStr::from_bytes(&hg_path.as_ref().as_bytes());
+        os_str = std::ffi::OsStr::from_bytes(hg_path.as_ref().as_bytes());
     }
     // TODO Handle other platforms
     // TODO: convert from WTF8 to Windows MBCS (ANSI encoding).
@@ -511,7 +511,7 @@
     #[cfg(unix)]
     {
         use std::os::unix::ffi::OsStrExt;
-        buf = HgPathBuf::from_bytes(&os_string.as_ref().as_bytes());
+        buf = HgPathBuf::from_bytes(os_string.as_ref().as_bytes());
     }
     // TODO Handle other platforms
     // TODO: convert from WTF8 to Windows MBCS (ANSI encoding).
@@ -528,7 +528,7 @@
     #[cfg(unix)]
     {
         use std::os::unix::ffi::OsStrExt;
-        buf = HgPathBuf::from_bytes(&os_str.as_bytes());
+        buf = HgPathBuf::from_bytes(os_str.as_bytes());
     }
     // TODO Handle other platforms
     // TODO: convert from WTF8 to Windows MBCS (ANSI encoding).
--- a/rust/hg-core/tests/test_missing_ancestors.rs	Mon Jan 09 19:14:14 2023 +0100
+++ b/rust/hg-core/tests/test_missing_ancestors.rs	Mon Jan 09 19:18:43 2023 +0100
@@ -53,7 +53,7 @@
 /// Compute the ancestors set of all revisions of a VecGraph
 fn ancestors_sets(vg: &VecGraph) -> Vec<HashSet<Revision>> {
     let mut ancs: Vec<HashSet<Revision>> = Vec::new();
-    for i in 0..vg.len() {
+    (0..vg.len()).for_each(|i| {
         let mut ancs_i = HashSet::new();
         ancs_i.insert(i as Revision);
         for p in vg[i].iter().cloned() {
@@ -62,7 +62,7 @@
             }
         }
         ancs.push(ancs_i);
-    }
+    });
     ancs
 }