mirror of
https://github.com/BurntSushi/ripgrep.git
synced 2025-06-20 06:15:37 +02:00
Some minor performance tweaks.
This includes moving basename-only globs into separate regexes. The hope is that if the regex processes less input, it will be faster.
This commit is contained in:
37
src/glob.rs
37
src/glob.rs
@ -124,6 +124,8 @@ pub struct Set {
|
||||
base_prefixes_map: Vec<usize>,
|
||||
base_suffixes: Vec<Vec<u8>>,
|
||||
base_suffixes_map: Vec<usize>,
|
||||
base_regexes: RegexSet,
|
||||
base_regexes_map: Vec<usize>,
|
||||
regexes: RegexSet,
|
||||
regexes_map: Vec<usize>,
|
||||
}
|
||||
@ -195,7 +197,14 @@ impl Set {
|
||||
}
|
||||
}
|
||||
}
|
||||
into.extend(self.regexes.matches(path_bytes));
|
||||
if let Some(ref basename) = basename {
|
||||
for i in self.base_regexes.matches(&**basename) {
|
||||
into.push(self.base_regexes_map[i]);
|
||||
}
|
||||
}
|
||||
for i in self.regexes.matches(path_bytes) {
|
||||
into.push(self.regexes_map[i]);
|
||||
}
|
||||
into.sort();
|
||||
}
|
||||
|
||||
@ -207,6 +216,7 @@ impl Set {
|
||||
let (mut base_prefixes, mut base_prefixes_map) = (vec![], vec![]);
|
||||
let (mut base_suffixes, mut base_suffixes_map) = (vec![], vec![]);
|
||||
let (mut regexes, mut regexes_map) = (vec![], vec![]);
|
||||
let (mut base_regexes, mut base_regexes_map) = (vec![], vec![]);
|
||||
for (i, &(ref p, ref o)) in pats.iter().enumerate() {
|
||||
if let Some(ext) = p.ext() {
|
||||
exts.entry(ext).or_insert(vec![]).push(i);
|
||||
@ -221,6 +231,10 @@ impl Set {
|
||||
} else if let Some(literal) = p.base_literal_suffix() {
|
||||
base_suffixes.push(literal.into_bytes());
|
||||
base_suffixes_map.push(i);
|
||||
} else if p.is_only_basename() {
|
||||
let part = format!("(?:{})", p.to_regex_with(o));
|
||||
base_regexes.push(part);
|
||||
base_regexes_map.push(i);
|
||||
} else {
|
||||
let part = format!("(?:{})", p.to_regex_with(o));
|
||||
regexes.push(part);
|
||||
@ -236,6 +250,8 @@ impl Set {
|
||||
base_prefixes_map: base_prefixes_map,
|
||||
base_suffixes: base_suffixes,
|
||||
base_suffixes_map: base_suffixes_map,
|
||||
base_regexes: try!(RegexSet::new(base_regexes)),
|
||||
base_regexes_map: base_regexes_map,
|
||||
regexes: try!(RegexSet::new(regexes)),
|
||||
regexes_map: regexes_map,
|
||||
})
|
||||
@ -402,6 +418,25 @@ impl Pattern {
|
||||
Some(lit)
|
||||
}
|
||||
|
||||
/// Returns true if and only if this pattern only inspects the basename
|
||||
/// of a path.
|
||||
pub fn is_only_basename(&self) -> bool {
|
||||
match self.tokens.get(0) {
|
||||
Some(&Token::RecursivePrefix) => {}
|
||||
_ => return false,
|
||||
}
|
||||
for t in &self.tokens[1..] {
|
||||
match *t {
|
||||
Token::Literal(c) if c == '/' || c == '\\' => return false,
|
||||
Token::RecursivePrefix
|
||||
| Token::RecursiveSuffix
|
||||
| Token::RecursiveZeroOrMore => return false,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
/// Returns the pattern as a literal if and only if the pattern must match
|
||||
/// an entire path exactly.
|
||||
///
|
||||
|
@ -19,6 +19,7 @@ use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use gitignore::{self, Gitignore, GitignoreBuilder, Match, Pattern};
|
||||
use pathutil::is_hidden;
|
||||
use types::Types;
|
||||
|
||||
const IGNORE_NAMES: &'static [&'static str] = &[
|
||||
@ -377,14 +378,6 @@ impl Overrides {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_hidden<P: AsRef<Path>>(path: P) -> bool {
|
||||
if let Some(name) = path.as_ref().file_name() {
|
||||
name.to_str().map(|s| s.starts_with(".")).unwrap_or(false)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::Path;
|
||||
|
@ -11,6 +11,8 @@ improvement on just listing the files to search (!).
|
||||
use std::ffi::OsStr;
|
||||
use std::path::Path;
|
||||
|
||||
use memchr::memrchr;
|
||||
|
||||
/// Strip `prefix` from the `path` and return the remainder.
|
||||
///
|
||||
/// If `path` doesn't have a prefix `prefix`, then return `None`.
|
||||
@ -58,13 +60,7 @@ pub fn file_name<'a, P: AsRef<Path> + ?Sized>(
|
||||
} else if path.len() >= 2 && &path[path.len() - 2..] == &b".."[..] {
|
||||
return None;
|
||||
}
|
||||
let mut last_slash = 0;
|
||||
for (i, &b) in path.iter().enumerate().rev() {
|
||||
if b == b'/' {
|
||||
last_slash = i + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
let last_slash = memrchr(b'/', path).map(|i| i + 1).unwrap_or(0);
|
||||
Some(OsStr::from_bytes(&path[last_slash..]))
|
||||
}
|
||||
|
||||
@ -78,3 +74,25 @@ pub fn file_name<'a, P: AsRef<Path> + ?Sized>(
|
||||
) -> Option<&'a OsStr> {
|
||||
path.as_ref().file_name()
|
||||
}
|
||||
|
||||
/// Returns true if and only if this file path is considered to be hidden.
|
||||
#[cfg(unix)]
|
||||
pub fn is_hidden<P: AsRef<Path>>(path: P) -> bool {
|
||||
use std::os::unix::ffi::OsStrExt;
|
||||
|
||||
if let Some(name) = file_name(path.as_ref()) {
|
||||
name.as_bytes().get(0) == Some(&b'.')
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if and only if this file path is considered to be hidden.
|
||||
#[cfg(not(unix))]
|
||||
pub fn is_hidden<P: AsRef<Path>>(path: P) -> bool {
|
||||
if let Some(name) = file_name(path) {
|
||||
name.to_str().map(|s| s.starts_with(".")).unwrap_or(false)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ impl Iter {
|
||||
}
|
||||
|
||||
/// Returns true if this entry should be skipped.
|
||||
#[inline(always)]
|
||||
fn skip_entry(&self, ent: &DirEntry) -> bool {
|
||||
if ent.depth() == 0 {
|
||||
// Never skip the root directory.
|
||||
@ -41,6 +42,7 @@ impl Iter {
|
||||
impl Iterator for Iter {
|
||||
type Item = DirEntry;
|
||||
|
||||
#[inline(always)]
|
||||
fn next(&mut self) -> Option<DirEntry> {
|
||||
while let Some(ev) = self.it.next() {
|
||||
match ev {
|
||||
@ -108,6 +110,7 @@ impl From<WalkDir> for WalkEventIter {
|
||||
impl Iterator for WalkEventIter {
|
||||
type Item = walkdir::Result<WalkEvent>;
|
||||
|
||||
#[inline(always)]
|
||||
fn next(&mut self) -> Option<walkdir::Result<WalkEvent>> {
|
||||
let dent = self.next.take().or_else(|| self.it.next());
|
||||
let depth = match dent {
|
||||
|
Reference in New Issue
Block a user