Skip to content

Commit

Permalink
fix doctests
Browse files Browse the repository at this point in the history
  • Loading branch information
trinity-1686a committed Mar 3, 2023
1 parent 9fd9266 commit 59f7417
Show file tree
Hide file tree
Showing 6 changed files with 23 additions and 15 deletions.
10 changes: 6 additions & 4 deletions src/tokenizer/alphanum_only.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,18 @@
//! ```rust
//! use tantivy::tokenizer::*;
//!
//! let tokenizer = TextAnalyzer::from(RawTokenizer)
//! .filter(AlphaNumOnlyFilter);
//! let tokenizer = TextAnalyzer::builder(RawTokenizer)
//! .filter(AlphaNumOnlyFilter)
//! .build();
//!
//! let mut stream = tokenizer.token_stream("hello there");
//! // is none because the raw filter emits one token that
//! // contains a space
//! assert!(stream.next().is_none());
//!
//! let tokenizer = TextAnalyzer::from(SimpleTokenizer)
//! .filter(AlphaNumOnlyFilter);
//! let tokenizer = TextAnalyzer::builder(SimpleTokenizer)
//! .filter(AlphaNumOnlyFilter)
//! .build();
//!
//! let mut stream = tokenizer.token_stream("hello there 💣");
//! assert!(stream.next().is_some());
Expand Down
10 changes: 6 additions & 4 deletions src/tokenizer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,11 @@
//! ```rust
//! use tantivy::tokenizer::*;
//!
//! let en_stem = TextAnalyzer::from(SimpleTokenizer)
//! let en_stem = TextAnalyzer::builder(SimpleTokenizer)
//! .filter(RemoveLongFilter::limit(40))
//! .filter(LowerCaser)
//! .filter(Stemmer::new(Language::English));
//! .filter(Stemmer::new(Language::English))
//! .build();
//! ```
//!
//! Once your tokenizer is defined, you need to
Expand Down Expand Up @@ -112,9 +113,10 @@
//! let index = Index::create_in_ram(schema);
//!
//! // We need to register our tokenizer :
//! let custom_en_tokenizer = TextAnalyzer::from(SimpleTokenizer)
//! let custom_en_tokenizer = TextAnalyzer::builder(SimpleTokenizer)
//! .filter(RemoveLongFilter::limit(40))
//! .filter(LowerCaser);
//! .filter(LowerCaser)
//! .build();
//! index
//! .tokenizers()
//! .register("custom_en", custom_en_tokenizer);
Expand Down
5 changes: 3 additions & 2 deletions src/tokenizer/remove_long.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@
//! ```rust
//! use tantivy::tokenizer::*;
//!
//! let tokenizer = TextAnalyzer::from(SimpleTokenizer)
//! .filter(RemoveLongFilter::limit(5));
//! let tokenizer = TextAnalyzer::builder(SimpleTokenizer)
//! .filter(RemoveLongFilter::limit(5))
//! .build();
//!
//! let mut stream = tokenizer.token_stream("toolong nice");
//! // because `toolong` is more than 5 characters, it is filtered
Expand Down
6 changes: 4 additions & 2 deletions src/tokenizer/split_compound_words.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,11 @@ use super::{Token, TokenFilter, TokenStream, Tokenizer};
/// use tantivy::tokenizer::{SimpleTokenizer, SplitCompoundWords, TextAnalyzer};
///
/// let tokenizer =
/// TextAnalyzer::from(SimpleTokenizer).filter(SplitCompoundWords::from_dictionary([
/// TextAnalyzer::builder(SimpleTokenizer)
/// .filter(SplitCompoundWords::from_dictionary([
/// "dampf", "schiff", "fahrt", "brot", "backen", "automat",
/// ]));
/// ]))
/// .build();
///
/// let mut stream = tokenizer.token_stream("dampfschifffahrt");
/// assert_eq!(stream.next().unwrap().text, "dampf");
Expand Down
5 changes: 3 additions & 2 deletions src/tokenizer/stop_word_filter/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@
//! ```rust
//! use tantivy::tokenizer::*;
//!
//! let tokenizer = TextAnalyzer::from(SimpleTokenizer)
//! .filter(StopWordFilter::remove(vec!["the".to_string(), "is".to_string()]));
//! let tokenizer = TextAnalyzer::builder(SimpleTokenizer)
//! .filter(StopWordFilter::remove(vec!["the".to_string(), "is".to_string()]))
//! .build();
//!
//! let mut stream = tokenizer.token_stream("the fox is crafty");
//! assert_eq!(stream.next().unwrap().text, "fox");
Expand Down
2 changes: 1 addition & 1 deletion src/tokenizer/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ impl<T: Tokenizer> TextAnalyzerBuilder<T> {
/// ```rust
/// use tantivy::tokenizer::*;
///
/// let en_stem = TextAnalyzer::build(SimpleTokenizer)
/// let en_stem = TextAnalyzer::builder(SimpleTokenizer)
/// .filter(RemoveLongFilter::limit(40))
/// .filter(LowerCaser)
/// .filter(Stemmer::default())
Expand Down

0 comments on commit 59f7417

Please sign in to comment.