Skip to content

Commit

Permalink
Fixing test_alignment_methods
Browse files Browse the repository at this point in the history
  • Loading branch information
Narsil committed Aug 31, 2022
1 parent cc60f86 commit 4714139
Show file tree
Hide file tree
Showing 6 changed files with 184 additions and 10 deletions.
2 changes: 1 addition & 1 deletion bindings/python/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions tokenizers/src/pre_tokenizers/byte_level.rs
Original file line number Diff line number Diff line change
Expand Up @@ -484,7 +484,7 @@ mod tests {
);
let expected = Encoding::new(
vec![0; 5],
vec![],
vec![0; 5],
vec![
"Ġ".into(),
"ĠĠĠĠHelloĠĠ".into(),
Expand All @@ -508,7 +508,7 @@ mod tests {

let pair_expected = Encoding::new(
vec![0; 10],
vec![],
vec![0, 0, 0, 0, 0, 1, 1, 1, 1, 1],
vec![
"Ġ".into(),
"ĠĠĠĠHelloĠĠ".into(),
Expand Down
87 changes: 87 additions & 0 deletions tokenizers/src/processors/bert.rs
Original file line number Diff line number Diff line change
Expand Up @@ -195,4 +195,91 @@ mod tests {
bert
);
}

#[test]
fn bert_processing() {
let processor = BertProcessing::default();
assert_eq!(processor.added_tokens(false), 2);
assert_eq!(processor.added_tokens(true), 3);

use crate::Token;
let encoding = Encoding::from_tokens(
vec![
Token::new(12, "Hello".into(), (0, 5)),
Token::new(14, "there".into(), (6, 11)),
],
0,
);
let pair = Encoding::from_tokens(vec![Token::new(15, "pair".into(), (0, 4))], 0);
let single_encoding = processor.process(encoding.clone(), None, true).unwrap();
assert_eq!(
single_encoding,
Encoding::new(
vec![101, 12, 14, 102],
vec![0, 0, 0, 0],
vec![
"[CLS]".into(),
"Hello".into(),
"there".into(),
"[SEP]".into()
],
vec![None, None, None, None],
vec![(0, 0), (0, 5), (6, 11), (0, 0)],
vec![1, 0, 0, 1],
vec![1, 1, 1, 1],
vec![],
HashMap::from_iter(vec![(0, 1..3)]),
)
);
assert_eq!(single_encoding.token_to_sequence(2), Some(0));
assert_eq!(single_encoding.token_to_sequence(3), None);
let pair_encoding = processor
.process(encoding.clone(), Some(pair.clone()), true)
.unwrap();
assert_eq!(
pair_encoding,
Encoding::new(
vec![101, 12, 14, 102, 15, 102],
vec![0, 0, 0, 0, 1, 1],
vec![
"[CLS]".into(),
"Hello".into(),
"there".into(),
"[SEP]".into(),
"pair".into(),
"[SEP]".into()
],
vec![None, None, None, None, None, None],
vec![(0, 0), (0, 5), (6, 11), (0, 0), (0, 4), (0, 0)],
vec![1, 0, 0, 1, 0, 1],
vec![1, 1, 1, 1, 1, 1],
vec![],
HashMap::from_iter(vec![(0, 1..3), (1, 4..5)]),
)
);
assert_eq!(pair_encoding.token_to_sequence(2), Some(0));
assert_eq!(pair_encoding.token_to_sequence(3), None);
assert_eq!(pair_encoding.token_to_sequence(4), Some(1));
assert_eq!(pair_encoding.token_to_sequence(5), None);

// No special tokens
let pair_encoding = processor.process(encoding, Some(pair), false).unwrap();
assert_eq!(
pair_encoding,
Encoding::new(
vec![12, 14, 15],
vec![0, 0, 1],
vec!["Hello".into(), "there".into(), "pair".into(),],
vec![None, None, None],
vec![(0, 5), (6, 11), (0, 4)],
vec![0, 0, 0],
vec![1, 1, 1],
vec![],
HashMap::from_iter(vec![(0, 0..2), (1, 2..3)]),
)
);
assert_eq!(pair_encoding.token_to_sequence(0), Some(0));
assert_eq!(pair_encoding.token_to_sequence(1), Some(0));
assert_eq!(pair_encoding.token_to_sequence(2), Some(1));
}
}
88 changes: 86 additions & 2 deletions tokenizers/src/processors/roberta.rs
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ impl PostProcessor for RobertaProcessing {
)
} else {
let pair_ids = [&[self.sep.1], encoding.get_ids(), &[self.sep.1]].concat();
let pair_type_ids = vec![0; encoding.get_ids().len() + 2];
let pair_type_ids = vec![1; encoding.get_ids().len() + 2];
let pair_tokens = [
&[self.sep.0.clone()],
encoding.get_tokens(),
Expand Down Expand Up @@ -176,7 +176,7 @@ impl PostProcessor for RobertaProcessing {
.map(|encoding| {
let pair_ids =
[&[self.sep.1], encoding.get_ids(), &[self.sep.1]].concat();
let pair_type_ids = vec![0; encoding.get_ids().len() + 2];
let pair_type_ids = vec![1; encoding.get_ids().len() + 2];
let pair_tokens = [
&[self.sep.0.clone()],
encoding.get_tokens(),
Expand Down Expand Up @@ -240,4 +240,88 @@ mod tests {
roberta
);
}

#[test]
fn roberta_processing() {
let processor = RobertaProcessing::default();
assert_eq!(processor.added_tokens(false), 2);
assert_eq!(processor.added_tokens(true), 4);

use crate::Token;
let encoding = Encoding::from_tokens(
vec![
Token::new(12, "Hello".into(), (0, 5)),
Token::new(14, "there".into(), (6, 11)),
],
0,
);
let pair = Encoding::from_tokens(vec![Token::new(15, "pair".into(), (0, 4))], 0);
let single_encoding = processor.process(encoding.clone(), None, true).unwrap();
assert_eq!(
single_encoding,
Encoding::new(
vec![0, 12, 14, 2],
vec![0, 0, 0, 0],
vec!["<s>".into(), "Hello".into(), "there".into(), "</s>".into()],
vec![None, None, None, None],
vec![(0, 0), (0, 5), (6, 11), (0, 0)],
vec![1, 0, 0, 1],
vec![1, 1, 1, 1],
vec![],
HashMap::from_iter(vec![(0, 1..3)]),
)
);
assert_eq!(single_encoding.token_to_sequence(2), Some(0));
assert_eq!(single_encoding.token_to_sequence(3), None);
let pair_encoding = processor
.process(encoding.clone(), Some(pair.clone()), true)
.unwrap();
assert_eq!(
pair_encoding,
Encoding::new(
vec![0, 12, 14, 2, 2, 15, 2],
vec![0, 0, 0, 0, 1, 1, 1],
vec![
"<s>".into(),
"Hello".into(),
"there".into(),
"</s>".into(),
"</s>".into(),
"pair".into(),
"</s>".into()
],
vec![None, None, None, None, None, None, None],
vec![(0, 0), (0, 5), (6, 11), (0, 0), (0, 0), (0, 4), (0, 0)],
vec![1, 0, 0, 1, 1, 0, 1],
vec![1, 1, 1, 1, 1, 1, 1],
vec![],
HashMap::from_iter(vec![(0, 1..3), (1, 5..6)]),
)
);
assert_eq!(pair_encoding.token_to_sequence(2), Some(0));
assert_eq!(pair_encoding.token_to_sequence(3), None);
assert_eq!(pair_encoding.token_to_sequence(4), None);
assert_eq!(pair_encoding.token_to_sequence(5), Some(1));
assert_eq!(pair_encoding.token_to_sequence(6), None);

// No special tokens
let pair_encoding = processor.process(encoding, Some(pair), false).unwrap();
assert_eq!(
pair_encoding,
Encoding::new(
vec![12, 14, 15],
vec![0, 0, 1],
vec!["Hello".into(), "there".into(), "pair".into(),],
vec![None, None, None],
vec![(0, 5), (6, 11), (0, 4)],
vec![0, 0, 0],
vec![1, 1, 1],
vec![],
HashMap::from_iter(vec![(0, 0..2), (1, 2..3)]),
)
);
assert_eq!(pair_encoding.token_to_sequence(0), Some(0));
assert_eq!(pair_encoding.token_to_sequence(1), Some(0));
assert_eq!(pair_encoding.token_to_sequence(2), Some(1));
}
}
6 changes: 3 additions & 3 deletions tokenizers/src/processors/sequence.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ mod tests {
fn process_chain() {
let start = Encoding::new(
vec![0; 5],
vec![],
vec![0; 5],
vec![
"Ġ".into(),
"ĠĠĠĠHelloĠĠ".into(),
Expand All @@ -67,7 +67,7 @@ mod tests {
let sequence = Sequence::new(vec![PostProcessorWrapper::ByteLevel(bytelevel)]);
let expected = Encoding::new(
vec![0; 5],
vec![],
vec![0; 5],
vec![
"Ġ".into(),
"ĠĠĠĠHelloĠĠ".into(),
Expand All @@ -94,7 +94,7 @@ mod tests {

let pair_expected = Encoding::new(
vec![0; 10],
vec![],
vec![0, 0, 0, 0, 0, 1, 1, 1, 1, 1],
vec![
"Ġ".into(),
"ĠĠĠĠHelloĠĠ".into(),
Expand Down
7 changes: 5 additions & 2 deletions tokenizers/src/tokenizer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,14 +100,17 @@ pub trait PostProcessor {
pair_encoding: Option<Encoding>,
add_special_tokens: bool,
) -> Result<Encoding> {
let encodings = if let Some(pair_encoding) = pair_encoding {
let mut encodings = if let Some(pair_encoding) = pair_encoding {
vec![encoding, pair_encoding]
} else {
vec![encoding]
};
encodings.iter_mut().enumerate().for_each(|(i, encoding)| {
encoding.set_sequence_id(i);
encoding.set_type_ids(vec![i as u32; encoding.len()]);
});

let encodings = self.process_encodings(encodings, add_special_tokens)?;

Ok(Encoding::merge(encodings, false))
}

Expand Down

0 comments on commit 4714139

Please sign in to comment.