Skip to content

Commit

Permalink
Update dev-dependencies
Browse files Browse the repository at this point in the history
  • Loading branch information
wooorm committed Jan 7, 2025
1 parent 9ed45b4 commit 5c9eba1
Show file tree
Hide file tree
Showing 8 changed files with 39 additions and 43 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ criterion = "0.5"
env_logger = "0.11"
pretty_assertions = { workspace = true }
serde_json = { version = "1" }
swc_core = { version = "0.100", features = [
swc_core = { version = "10", features = [
"common",
"ecma_ast",
"ecma_parser",
Expand Down
3 changes: 2 additions & 1 deletion mdast_util_to_markdown/src/handle/list_item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,5 +102,6 @@ impl Handle for ListItem {
}

fn compute_size(a: usize) -> usize {
((a + 4 - 1) / 4) * 4
// `a.div_ceil(4)` is `((a + 4 - 1) / 4)`
a.div_ceil(4) * 4
}
2 changes: 1 addition & 1 deletion src/construct/content.rs
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ pub fn resolve(tokenizer: &mut Tokenizer) -> Result<Option<Subresult>, message::
let result = subtokenize(
&mut tokenizer.events,
tokenizer.parse_state,
&Some(Content::Content),
Some(&Content::Content),
)?;

Ok(Some(result))
Expand Down
65 changes: 30 additions & 35 deletions src/construct/document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -304,26 +304,24 @@ pub fn containers_after(tokenizer: &mut Tokenizer) -> State {
!= tokenizer.tokenize_state.document_container_stack.len();
child.define_skip(tokenizer.point.clone());

match tokenizer.current {
// Note: EOL is part of data.
None => State::Retry(StateName::DocumentFlowEnd),
Some(_) => {
let current = tokenizer.events.len();
let previous = tokenizer.tokenize_state.document_data_index;
if let Some(previous) = previous {
tokenizer.events[previous].link.as_mut().unwrap().next = Some(current);
}
tokenizer.tokenize_state.document_data_index = Some(current);
tokenizer.enter_link(
Name::Data,
Link {
previous,
next: None,
content: Content::Flow,
},
);
State::Retry(StateName::DocumentFlowInside)
if tokenizer.current.is_none() {
State::Retry(StateName::DocumentFlowEnd)
} else {
let current = tokenizer.events.len();
let previous = tokenizer.tokenize_state.document_data_index;
if let Some(previous) = previous {
tokenizer.events[previous].link.as_mut().unwrap().next = Some(current);
}
tokenizer.tokenize_state.document_data_index = Some(current);
tokenizer.enter_link(
Name::Data,
Link {
previous,
next: None,
content: Content::Flow,
},
);
State::Retry(StateName::DocumentFlowInside)
}
}

Expand Down Expand Up @@ -450,23 +448,20 @@ pub fn flow_end(tokenizer: &mut Tokenizer) -> State {
debug_assert!(result.is_ok(), "did not expect error when exiting");
}

match tokenizer.current {
None => {
tokenizer.tokenize_state.document_continued = 0;
if let Err(message) = exit_containers(tokenizer, &Phase::Eof) {
return State::Error(message);
}
resolve(tokenizer);
State::Ok
}
Some(_) => {
tokenizer.tokenize_state.document_continued = 0;
tokenizer.tokenize_state.document_lazy_accepting_before =
document_lazy_continuation_current;
// Containers would only be interrupting if we’ve continued.
tokenizer.interrupt = false;
State::Retry(StateName::DocumentContainerExistingBefore)
if tokenizer.current.is_none() {
tokenizer.tokenize_state.document_continued = 0;
if let Err(message) = exit_containers(tokenizer, &Phase::Eof) {
return State::Error(message);
}
resolve(tokenizer);
State::Ok
} else {
tokenizer.tokenize_state.document_continued = 0;
tokenizer.tokenize_state.document_lazy_accepting_before =
document_lazy_continuation_current;
// Containers would only be interrupting if we’ve continued.
tokenizer.interrupt = false;
State::Retry(StateName::DocumentContainerExistingBefore)
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/mdast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ impl serde::ser::Serialize for AlignKind {
struct AlignKindVisitor;

#[cfg(feature = "serde")]
impl<'de> serde::de::Visitor<'de> for AlignKindVisitor {
impl serde::de::Visitor<'_> for AlignKindVisitor {
type Value = AlignKind;

fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
Expand Down
2 changes: 1 addition & 1 deletion src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,6 @@ pub fn parse<'a>(
return Ok((events, parse_state));
}

result = subtokenize(&mut events, &parse_state, &None)?;
result = subtokenize(&mut events, &parse_state, None)?;
}
}
4 changes: 2 additions & 2 deletions src/subtokenize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ pub fn link_to(events: &mut [Event], previous: usize, next: usize) {
pub fn subtokenize(
events: &mut Vec<Event>,
parse_state: &ParseState,
filter: &Option<Content>,
filter: Option<&Content>,
) -> Result<Subresult, message::Message> {
let mut map = EditMap::new();
let mut index = 0;
Expand All @@ -98,7 +98,7 @@ pub fn subtokenize(

// No need to enter linked events again.
if link.previous.is_none()
&& (filter.is_none() || &link.content == filter.as_ref().unwrap())
&& (filter.is_none() || &link.content == *filter.as_ref().unwrap())
{
// Index into `events` pointing to a chunk.
let mut link_index = Some(index);
Expand Down
2 changes: 1 addition & 1 deletion src/util/sanitize_uri.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ pub fn sanitize_with_protocols(value: &str, protocols: &[&str]) -> String {
let value = sanitize(value);

let end = value.find(|c| matches!(c, '?' | '#' | '/'));
let mut colon = value.find(|c| matches!(c, ':'));
let mut colon = value.find(':');

// If the first colon is after `?`, `#`, or `/`, it’s not a protocol.
if let Some(end) = end {
Expand Down

0 comments on commit 5c9eba1

Please sign in to comment.