Skip to content

Commit

Permalink
refactor(typos): Focus API on primary use case
Browse files Browse the repository at this point in the history
  • Loading branch information
Ed Page committed Jan 2, 2021
1 parent aba85df commit 692f0ac
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 60 deletions.
49 changes: 2 additions & 47 deletions crates/typos/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use crate::tokens;
use crate::Dictionary;
use std::borrow::Cow;

/// Extract typos from the buffer.
#[derive(Clone)]
pub struct ParserBuilder<'p, 'd> {
tokenizer: Option<&'p tokens::Tokenizer>,
Expand Down Expand Up @@ -30,26 +31,12 @@ impl<'p, 'd> ParserBuilder<'p, 'd> {
}

/// Extract typos from the buffer.
pub fn typos(&self) -> TyposParser<'p, 'd> {
pub fn build(&self) -> TyposParser<'p, 'd> {
TyposParser {
tokenizer: self.tokenizer.unwrap_or_else(|| &DEFAULT_TOKENIZER),
dictionary: self.dictionary,
}
}

/// Parse for Identifiers.
pub fn identifiers(&self) -> IdentifiersParser<'p> {
IdentifiersParser {
tokenizer: self.tokenizer.unwrap_or_else(|| &DEFAULT_TOKENIZER),
}
}

/// Parse for Words.
pub fn words(&self) -> WordsParser<'p> {
WordsParser {
tokenizer: self.tokenizer.unwrap_or_else(|| &DEFAULT_TOKENIZER),
}
}
}

impl<'p> Default for ParserBuilder<'p, 'static> {
Expand Down Expand Up @@ -158,35 +145,3 @@ impl<'m> Default for Typo<'m> {
}
}
}

/// Parse for Identifiers.
#[derive(Debug, Clone)]
pub struct IdentifiersParser<'p> {
tokenizer: &'p tokens::Tokenizer,
}

impl<'p> IdentifiersParser<'p> {
pub fn parse_str(&self, buffer: &'p str) -> impl Iterator<Item = tokens::Identifier<'p>> {
self.tokenizer.parse_str(buffer)
}

pub fn parse_bytes(&self, buffer: &'p [u8]) -> impl Iterator<Item = tokens::Identifier<'p>> {
self.tokenizer.parse_bytes(buffer)
}
}

/// Parse for Words.
#[derive(Debug, Clone)]
pub struct WordsParser<'p> {
tokenizer: &'p tokens::Tokenizer,
}

impl<'p> WordsParser<'p> {
pub fn parse_str(&self, buffer: &'p str) -> impl Iterator<Item = tokens::Word<'p>> {
self.tokenizer.parse_str(buffer).flat_map(|i| i.split())
}

pub fn parse_bytes(&self, buffer: &'p [u8]) -> impl Iterator<Item = tokens::Word<'p>> {
self.tokenizer.parse_bytes(buffer).flat_map(|i| i.split())
}
}
20 changes: 7 additions & 13 deletions src/checks.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ impl Check for Typos {
let parser = typos::ParserBuilder::new()
.tokenizer(tokenizer)
.dictionary(dictionary)
.typos();
.build();

if self.check_filenames {
if let Some(file_name) = path.file_name().and_then(|s| s.to_str()) {
Expand Down Expand Up @@ -180,7 +180,7 @@ impl Check for FixTypos {
let parser = typos::ParserBuilder::new()
.tokenizer(tokenizer)
.dictionary(dictionary)
.typos();
.build();

if self.check_files {
let (buffer, content_type) = read_file(path, reporter)?;
Expand Down Expand Up @@ -265,7 +265,7 @@ impl Check for DiffTypos {
let parser = typos::ParserBuilder::new()
.tokenizer(tokenizer)
.dictionary(dictionary)
.typos();
.build();

let mut content = Vec::new();
let mut new_content = Vec::new();
Expand Down Expand Up @@ -379,13 +379,9 @@ impl Check for Identifiers {
_dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let parser = typos::ParserBuilder::new()
.tokenizer(tokenizer)
.identifiers();

if self.check_filenames {
if let Some(file_name) = path.file_name().and_then(|s| s.to_str()) {
for word in parser.parse_str(file_name) {
for word in tokenizer.parse_str(file_name) {
let msg = report::Parse {
context: Some(report::PathContext { path }.into()),
kind: report::ParseKind::Identifier,
Expand All @@ -402,7 +398,7 @@ impl Check for Identifiers {
let msg = report::BinaryFile { path };
reporter.report(msg.into())?;
} else {
for word in parser.parse_bytes(&buffer) {
for word in tokenizer.parse_bytes(&buffer) {
// HACK: Don't look up the line_num per entry to better match the performance
// of Typos for comparison purposes. We don't really get much out of it
// anyway.
Expand Down Expand Up @@ -437,11 +433,9 @@ impl Check for Words {
_dictionary: &dyn Dictionary,
reporter: &dyn report::Report,
) -> Result<(), std::io::Error> {
let parser = typos::ParserBuilder::new().tokenizer(tokenizer).words();

if self.check_filenames {
if let Some(file_name) = path.file_name().and_then(|s| s.to_str()) {
for word in parser.parse_str(file_name) {
for word in tokenizer.parse_str(file_name).flat_map(|i| i.split()) {
let msg = report::Parse {
context: Some(report::PathContext { path }.into()),
kind: report::ParseKind::Word,
Expand All @@ -458,7 +452,7 @@ impl Check for Words {
let msg = report::BinaryFile { path };
reporter.report(msg.into())?;
} else {
for word in parser.parse_bytes(&buffer) {
for word in tokenizer.parse_bytes(&buffer).flat_map(|i| i.split()) {
// HACK: Don't look up the line_num per entry to better match the performance
// of Typos for comparison purposes. We don't really get much out of it
// anyway.
Expand Down

0 comments on commit 692f0ac

Please sign in to comment.