Skip to content

Commit

Permalink
Fix various lints.
Browse files Browse the repository at this point in the history
  • Loading branch information
olson-sean-k committed Feb 13, 2024
1 parent bbe626c commit e22598a
Show file tree
Hide file tree
Showing 8 changed files with 47 additions and 77 deletions.
7 changes: 2 additions & 5 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ use thiserror::Error;
use crate::encode::CompileError;
use crate::rule::{Checked, RuleError};
use crate::token::{
ConcatenationTree, ExpressionMetadata, ParseError, Text, Token, TokenTree, Tokenized, When,
ConcatenationTree, ExpressionMetadata, ParseError, Text, Token, TokenTree, Tokenized,
};
#[cfg(feature = "walk")]
use crate::walk::WalkError;
Expand Down Expand Up @@ -750,10 +750,7 @@ impl<'t> Glob<'t> {
/// As with Unix paths, a glob expression has a root if it begins with a separator `/`.
/// Patterns other than separators may also root an expression, such as `/**` or `</root:1,>`.
pub fn has_root(&self) -> bool {
match self.tree.as_ref().as_token().has_root() {
When::Always => true,
_ => false,
}
self.tree.as_ref().as_token().has_root().is_always()
}

/// Returns `true` if the glob has literals that have non-nominal semantics on the target
Expand Down
19 changes: 8 additions & 11 deletions src/rule.rs
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,7 @@ impl<'t> TryFrom<&'t str> for Checked<Tokenized<'t, ExpressionMetadata>> {
}
}

pub fn check<'t, A>(tree: Tokenized<'t, A>) -> Result<Checked<Tokenized<'t, A>>, RuleError<'t>>
pub fn check<A>(tree: Tokenized<'_, A>) -> Result<Checked<Tokenized<'_, A>>, RuleError<'_>>
where
A: Spanned,
{
Expand Down Expand Up @@ -457,10 +457,7 @@ where

impl<'i, 't, A> Clone for Outer<'i, 't, A> {
fn clone(&self) -> Self {
Outer {
left: self.left,
right: self.right,
}
*self
}
}

Expand Down Expand Up @@ -490,27 +487,27 @@ where
})
}

fn is_boundary<'t, A>(token: &Token<'t, A>) -> bool {
fn is_boundary<A>(token: &Token<'_, A>) -> bool {
token.boundary().is_some()
}

fn is_zom<'t, A>(token: &Token<'t, A>) -> bool {
fn is_zom<A>(token: &Token<'_, A>) -> bool {
matches!(token.as_leaf(), Some(Wildcard(ZeroOrMore(_))))
}

fn has_starting_boundary<'t, A>(token: Option<&Token<'t, A>>) -> bool {
fn has_starting_boundary<A>(token: Option<&Token<'_, A>>) -> bool {
is_some_and_any_in(token, walk::starting, is_boundary)
}

fn has_ending_boundary<'t, A>(token: Option<&Token<'t, A>>) -> bool {
fn has_ending_boundary<A>(token: Option<&Token<'_, A>>) -> bool {
is_some_and_any_in(token, walk::ending, is_boundary)
}

fn has_starting_zom<'t, A>(token: Option<&Token<'t, A>>) -> bool {
fn has_starting_zom<A>(token: Option<&Token<'_, A>>) -> bool {
is_some_and_any_in(token, walk::starting, is_zom)
}

fn has_ending_zom<'t, A>(token: Option<&Token<'t, A>>) -> bool {
fn has_ending_zom<A>(token: Option<&Token<'_, A>>) -> bool {
is_some_and_any_in(token, walk::ending, is_zom)
}

Expand Down
61 changes: 30 additions & 31 deletions src/token/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,7 @@ impl When {
match (self, other) {
(Always, Always) => Always,
(Never, Never) => Never,
(Sometimes, _) | (_, Sometimes) => Sometimes,
(Always, _) | (_, Always) => Sometimes,
(Sometimes, _) | (_, Sometimes) | (Always, _) | (_, Always) => Sometimes,
}
}

Expand Down Expand Up @@ -223,17 +222,17 @@ impl<T> Composition<T, T> {
impl<T> AsMut<T> for Composition<T, T> {
fn as_mut(&mut self) -> &mut T {
match self {
Composition::Conjunctive(ref mut inner) => inner,
Composition::Disjunctive(ref mut inner) => inner,
Composition::Conjunctive(ref mut inner) | Composition::Disjunctive(ref mut inner) => {
inner
},
}
}
}

impl<T> AsRef<T> for Composition<T, T> {
fn as_ref(&self) -> &T {
match self {
Composition::Conjunctive(ref inner) => inner,
Composition::Disjunctive(ref inner) => inner,
Composition::Conjunctive(ref inner) | Composition::Disjunctive(ref inner) => inner,
}
}
}
Expand Down Expand Up @@ -374,7 +373,7 @@ impl<'t, A> Token<'t, A> {

fn pop_prefix_tokens_with<F>(mut self, n: usize, f: F) -> (Vec<Self>, Option<Self>)
where
F: FnOnce(&mut Self) -> (),
F: FnOnce(&mut Self),
{
if let Some(concatenation) = self.as_concatenation_mut() {
if n >= concatenation.tokens().len() {
Expand All @@ -390,15 +389,13 @@ impl<'t, A> Token<'t, A> {
(tokens, Some(self))
}
}
else if n == 0 {
// Yield the token as-is if there are no tokens to pop.
(vec![], Some(self))
}
else {
if n == 0 {
// Yield the token as-is if there are no tokens to pop.
(vec![], Some(self))
}
else {
// Pop the entire token if it is not a concatenation (and `n` is not zero).
(vec![self], None)
}
// Pop the entire token if it is not a concatenation (and `n` is not zero).
(vec![self], None)
}
}

Expand Down Expand Up @@ -438,17 +435,15 @@ impl<'t, A> Token<'t, A> {
if let Some(literal) = component.literal() {
return Some((component, literal));
}
else {
components.extend(
component
.tokens()
.iter()
.filter_map(Token::as_branch)
.flat_map(|branch| self::components(branch.tokens().into_inner())),
);
}
components.extend(
component
.tokens()
.iter()
.filter_map(Token::as_branch)
.flat_map(|branch| self::components(branch.tokens().into_inner())),
);
}
return None;
None
})
}

Expand All @@ -463,7 +458,7 @@ impl<'t, A> Token<'t, A> {

pub fn composition(&self) -> TokenComposition<()> {
self.as_branch()
.map_or(Composition::Conjunctive(()), |branch| branch.composition())
.map_or(Composition::Conjunctive(()), BranchKind::composition)
}

pub fn topology(&self) -> &TokenTopology<'t, A> {
Expand Down Expand Up @@ -519,7 +514,7 @@ impl<'t, A> Token<'t, A> {
let prefix = head.get_or_insert_with(Prefix::default);
prefix.index = n;
prefix.text.push_str(text.to_string().as_ref());
if let Some(_) = token.boundary() {
if token.is_boundary() {
checkpoint = head.clone();
}
},
Expand All @@ -543,7 +538,7 @@ impl<'t, A> Token<'t, A> {
type Term = When;

fn sequencer() -> Self::Sequencer {
Starting::default()
Starting
}

fn fold(
Expand Down Expand Up @@ -581,6 +576,10 @@ impl<'t, A> Token<'t, A> {
.any(|token| token.boundary().is_some())
}

pub fn is_boundary(&self) -> bool {
self.boundary().is_some()
}

pub fn is_capturing(&self) -> bool {
match self.topology {
TokenTopology::Branch(ref branch) => branch.is_capturing(),
Expand All @@ -600,7 +599,7 @@ impl<'t, A> Token<'t, A> {
// serious bug. False positives in negative patterns cause matching to incorrectly
// discard directory trees.
pub fn is_exhaustive(&self) -> bool {
self.fold(TreeExhaustiveness::default())
self.fold(TreeExhaustiveness)
.as_ref()
.map_or(false, Variance::is_exhaustive)
}
Expand Down Expand Up @@ -1506,7 +1505,7 @@ impl<'i, 't, A> Component<'i, 't, A> {
}
else {
self.tokens().iter().all(Token::is_literal).then(|| {
LiteralSequence(self.tokens().iter().flat_map(Token::as_literal).collect())
LiteralSequence(self.tokens().iter().filter_map(Token::as_literal).collect())
})
}
}
Expand Down Expand Up @@ -1544,7 +1543,7 @@ where
}

fn components<'i, 't, A>(tokens: &'i [Token<'t, A>]) -> impl Iterator<Item = Component<'i, 't, A>> {
tokens.into_iter().enumerate().peekable().batching(|batch| {
tokens.iter().enumerate().peekable().batching(|batch| {
let mut first = batch.next();
while matches!(
first.and_then(|(_, token)| token.boundary()),
Expand Down
2 changes: 1 addition & 1 deletion src/token/variance/bound.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ impl Cobound for NonZeroBound {
type Bound = NonZeroUsize;

fn cobound(&self) -> Boundedness<Self::Bound> {
self.clone()
*self
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/token/variance/invariant/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,6 @@ impl Product<NonZeroUsize> for Boundedness<UnitBound> {
type Output = Self;

fn product(self, _: NonZeroUsize) -> Self::Output {
self.into()
self
}
}
9 changes: 4 additions & 5 deletions src/token/variance/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ impl<T, B> Variance<T, Boundedness<B>> {
pub fn is_unbounded(&self) -> bool {
self.as_ref()
.variant()
.map_or(false, |bound| bound.is_unbounded())
.map_or(false, Boundedness::is_unbounded)
}
}

Expand Down Expand Up @@ -269,7 +269,7 @@ where
type Term = GlobVariance<T>;

fn sequencer() -> Self::Sequencer {
Forward::default()
Forward
}

fn fold(&mut self, branch: &BranchKind<'t, A>, terms: Vec<Self::Term>) -> Option<Self::Term> {
Expand Down Expand Up @@ -313,7 +313,7 @@ impl<'t, A> Fold<'t, A> for TreeExhaustiveness {
type Term = GlobVariance<Depth>;

fn sequencer() -> Self::Sequencer {
Self::default()
Self
}

fn fold(&mut self, branch: &BranchKind<'t, A>, terms: Vec<Self::Term>) -> Option<Self::Term> {
Expand Down Expand Up @@ -444,8 +444,7 @@ mod tests {
use Variance::Variant;

fn range(lower: usize, upper: Option<usize>) -> GlobVariance<Size> {
NaturalRange::from_closed_and_open(lower, upper)
.map_invariant(|size| usize::from(size).into())
NaturalRange::from_closed_and_open(lower, upper).map_invariant(From::from)
}

let token = token::parse("**").unwrap().into_token();
Expand Down
2 changes: 1 addition & 1 deletion src/token/variance/ops.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ impl Product for NonZeroUsize {
type Output = Self;

fn product(self, rhs: Self) -> Self::Output {
self.checked_mul(rhs.into())
self.checked_mul(rhs)
.expect("overflow determining product of unsigned word")
}
}
Expand Down
22 changes: 0 additions & 22 deletions src/token/walk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -427,18 +427,6 @@ impl Sequencer for Forward {
}
}

#[derive(Default)]
pub struct Reverse;

impl Sequencer for Reverse {
fn enqueue<'i, 't, A>(
&mut self,
parent: ParentToken<'i, 't, A>,
) -> impl Iterator<Item = ChildToken<'i, 't, A>> {
parent.into_tokens().rev()
}
}

#[derive(Default)]
pub struct Starting;

Expand Down Expand Up @@ -659,16 +647,6 @@ where
self::with_sequence(tree, Forward)
}

pub fn reverse<'i, 't, T>(
tree: &'i T,
) -> impl 'i + HierarchicalIterator<Feed = TokenFeed<'i, 't, T::Annotation>>
where
't: 'i,
T: TokenTree<'t>,
{
self::with_sequence(tree, Reverse)
}

pub fn starting<'i, 't, T>(
tree: &'i T,
) -> impl 'i + HierarchicalIterator<Feed = TokenFeed<'i, 't, T::Annotation>>
Expand Down

0 comments on commit e22598a

Please sign in to comment.