Skip to content

Commit

Permalink
Switch back to using std
Browse files Browse the repository at this point in the history
Fixes #63.
  • Loading branch information
kmcallister committed Apr 19, 2015
1 parent 5614706 commit 5293725
Show file tree
Hide file tree
Showing 22 changed files with 34 additions and 136 deletions.
7 changes: 2 additions & 5 deletions src/driver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,11 @@

//! High-level interface to the parser.
use core::prelude::*;

use tokenizer::{TokenizerOpts, Tokenizer, TokenSink};
use tree_builder::{TreeBuilderOpts, TreeBuilder, TreeSink};

use core::default::Default;
use core::option;
use collections::string::String;
use std::option;
use std::default::Default;

use string_cache::{Atom, QualName};

Expand Down
9 changes: 2 additions & 7 deletions src/for_c/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,9 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use core::prelude::*;

use core::ptr;
use core::slice;
use core::str;
use core::marker::PhantomData;
use std::{ptr, slice, str};
use std::marker::PhantomData;
use std::borrow::Cow;
use collections::string::String;

use libc::{size_t, c_int, c_char, strlen};

Expand Down
9 changes: 3 additions & 6 deletions src/for_c/tokenizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,18 +9,15 @@

#![allow(non_camel_case_types)]

use core::prelude::*;

use for_c::common::{LifetimeBuf, AsLifetimeBuf, h5e_buf, c_bool};

use tokenizer::{TokenSink, Token, Doctype, Tag, ParseError, DoctypeToken};
use tokenizer::{CommentToken, CharacterTokens, NullCharacterToken};
use tokenizer::{TagToken, StartTag, EndTag, EOFToken, Tokenizer};

use core::mem;
use core::default::Default;
use alloc::boxed::Box;
use collections::String;
use std::mem;
use std::default::Default;

use libc::{c_void, c_int, size_t};

#[repr(C)]
Expand Down
16 changes: 1 addition & 15 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,31 +10,17 @@
#![crate_name="html5ever"]
#![crate_type="dylib"]

#![feature(plugin, box_syntax, no_std, core, collections, alloc, str_char, slice_patterns)]
#![feature(plugin, box_syntax, core, collections, alloc, str_char, slice_patterns)]
#![deny(warnings)]
#![allow(unused_parens)]

#![plugin(phf_macros)]
#![plugin(string_cache_plugin)]
#![plugin(html5ever_macros)]

// FIXME(#63): switch back to using std
#![no_std]

extern crate alloc;

#[macro_use]
extern crate core;

#[macro_use]
extern crate std;

#[cfg(for_c)]
extern crate libc;

#[macro_use]
extern crate collections;

#[macro_use]
extern crate log;

Expand Down
5 changes: 1 addition & 4 deletions src/serialize/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,8 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use core::prelude::*;

use std::io::{self, Write};
use core::default::Default;
use collections::vec::Vec;
use std::default::Default;

use string_cache::{Atom, QualName};

Expand Down
2 changes: 0 additions & 2 deletions src/sink/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@

use tokenizer::Attribute;

use collections::vec::Vec;
use collections::string::String;
use string_cache::QualName;

pub use self::NodeEnum::{Document, Doctype, Text, Comment, Element};
Expand Down
14 changes: 4 additions & 10 deletions src/sink/owned_dom.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@
//! This is believed to be memory safe, but if you want to be extra
//! careful you can use `RcDom` instead.
use core::prelude::*;

use sink::common::{NodeEnum, Document, Doctype, Text, Comment, Element};

use tokenizer::Attribute;
Expand All @@ -31,14 +29,10 @@ use serialize::TraversalScope;
use serialize::TraversalScope::{IncludeNode, ChildrenOnly};
use driver::ParseResult;

use core::cell::UnsafeCell;
use core::default::Default;
use core::mem::transmute;
use core::mem;
use core::ptr;
use alloc::boxed::Box;
use collections::vec::Vec;
use collections::string::String;
use std::{mem, ptr};
use std::cell::UnsafeCell;
use std::default::Default;
use std::mem::transmute;
use std::borrow::Cow;
use std::io::{self, Write};
use std::collections::HashSet;
Expand Down
10 changes: 3 additions & 7 deletions src/sink/rcdom.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
//! This is sufficient as a static parse tree, but don't build a
//! web browser using it. :)
use core::prelude::*;

use sink::common::{NodeEnum, Document, Doctype, Text, Comment, Element};

use tokenizer::Attribute;
Expand All @@ -24,11 +22,9 @@ use serialize::TraversalScope;
use serialize::TraversalScope::{IncludeNode, ChildrenOnly};
use driver::ParseResult;

use core::cell::RefCell;
use core::default::Default;
use alloc::rc::{Rc, Weak};
use collections::vec::Vec;
use collections::string::String;
use std::cell::RefCell;
use std::default::Default;
use std::rc::{Rc, Weak};
use std::borrow::Cow;
use std::io::{self, Write};
use std::ops::DerefMut;
Expand Down
9 changes: 2 additions & 7 deletions src/tokenizer/buffer_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,11 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use core::prelude::*;

use util::str::AsciiCast;
use util::smallcharset::SmallCharSet;

use core::str::CharRange;
use collections::string::String;
use collections::VecDeque;
use std::str::CharRange;
use std::collections::VecDeque;

pub use self::SetResult::{FromSet, NotFromSet};

Expand Down Expand Up @@ -177,8 +174,6 @@ impl BufferQueue {
#[cfg(test)]
#[allow(non_snake_case)]
mod test {
use core::prelude::*;
use collections::string::String;
use super::{BufferQueue, FromSet, NotFromSet};

#[test]
Expand Down
2 changes: 0 additions & 2 deletions src/tokenizer/char_ref/data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use core::prelude::*;

use phf::Map;

/// The spec replaces most characters in the ISO-2022 C1 control code range
Expand Down
5 changes: 1 addition & 4 deletions src/tokenizer/char_ref/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,12 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use core::prelude::*;

use super::{Tokenizer, TokenSink};

use util::str::{is_ascii_alnum, empty_str};

use core::char::from_u32;
use std::char::from_u32;
use std::borrow::Cow::Borrowed;
use collections::string::String;

pub use self::Status::*;
use self::State::*;
Expand Down
6 changes: 0 additions & 6 deletions src/tokenizer/interface.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,9 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use core::option::Option::{self, None};
use core::clone::Clone;

use tokenizer::states;

use collections::vec::Vec;
use collections::string::String;
use std::borrow::Cow;
use std::marker::Send;

use string_cache::{Atom, QualName};

Expand Down
15 changes: 2 additions & 13 deletions src/tokenizer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,6 @@
#![allow(unused_imports)]

use core::clone::Clone;
use core::cmp::Ord;
use core::iter::Iterator;
use core::option::Option::{self, Some, None};

pub use self::interface::{Doctype, Attribute, TagKind, StartTag, EndTag, Tag};
pub use self::interface::{Token, DoctypeToken, TagToken, CommentToken};
pub use self::interface::{CharacterTokens, NullCharacterToken, EOFToken, ParseError};
Expand All @@ -34,11 +29,8 @@ use self::buffer_queue::{BufferQueue, SetResult, FromSet, NotFromSet};
use util::str::{lower_ascii, lower_ascii_letter, empty_str};
use util::smallcharset::SmallCharSet;

use core::mem::replace;
use core::default::Default;
use alloc::boxed::Box;
use collections::vec::Vec;
use collections::string::{String, ToString};
use std::mem::replace;
use std::default::Default;
use std::borrow::Cow::{self, Borrowed};
use std::collections::BTreeMap;

Expand Down Expand Up @@ -1329,9 +1321,6 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
#[cfg(test)]
#[allow(non_snake_case)]
mod test {
use core::prelude::*;
use collections::vec::Vec;
use collections::string::String;
use super::{option_push, append_strings}; // private items

#[test]
Expand Down
2 changes: 0 additions & 2 deletions src/tokenizer/states.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
//! This is public for use by the tokenizer tests. Other library
//! users should not have to care about this.
use core::prelude::*;

pub use self::ScriptEscapeKind::*;
pub use self::DoctypeIdKind::*;
pub use self::RawKind::*;
Expand Down
15 changes: 5 additions & 10 deletions src/tree_builder/actions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
//! Many of these are named within the spec, e.g. "reset the insertion
//! mode appropriately".
use core::prelude::*;

use tree_builder::types::*;
use tree_builder::tag_sets::*;
use tree_builder::interface::{TreeSink, QuirksMode, NodeOrText, AppendNode, AppendText};
Expand All @@ -24,12 +22,9 @@ use tokenizer::states::{RawData, RawKind};

use util::str::{AsciiExt, to_escaped_string};

use core::mem::replace;
use core::iter::{Rev, Enumerate};
use core::slice;
use core::fmt::Debug;
use collections::vec::Vec;
use collections::string::String;
use std::{slice, fmt};
use std::mem::replace;
use std::iter::{Rev, Enumerate};
use std::borrow::Cow::Borrowed;

use string_cache::{Atom, Namespace, QualName};
Expand Down Expand Up @@ -62,7 +57,7 @@ enum Bookmark<Handle> {

// These go in a trait so that we can control visibility.
pub trait TreeBuilderActions<Handle> {
fn unexpected<T: Debug>(&mut self, thing: &T) -> ProcessResult;
fn unexpected<T: fmt::Debug>(&mut self, thing: &T) -> ProcessResult;
fn assert_named(&mut self, node: Handle, name: Atom);
fn clear_active_formatting_to_marker(&mut self);
fn create_formatting_element_for(&mut self, tag: Tag) -> Handle;
Expand Down Expand Up @@ -131,7 +126,7 @@ impl<Handle, Sink> TreeBuilderActions<Handle>
where Handle: Clone,
Sink: TreeSink<Handle=Handle>,
{
fn unexpected<T: Debug>(&mut self, _thing: &T) -> ProcessResult {
fn unexpected<T: fmt::Debug>(&mut self, _thing: &T) -> ProcessResult {
self.sink.parse_error(format_if!(
self.opts.exact_errors,
"Unexpected token",
Expand Down
4 changes: 0 additions & 4 deletions src/tree_builder/data.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,10 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use core::prelude::*;

use tokenizer::Doctype;
use tree_builder::interface::{QuirksMode, Quirks, LimitedQuirks, NoQuirks};
use util::str::AsciiExt;

use collections::string::String;

// These should all be lowercase, for ASCII-case-insensitive matching.
static QUIRKY_PUBLIC_PREFIXES: &'static [&'static str] = &[
"-//advasoft ltd//dtd html 3.0 aswedit + extensions//",
Expand Down
4 changes: 0 additions & 4 deletions src/tree_builder/interface.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,8 @@
//! The interface for consumers of the tree builder (and thus the
//! parser overall).
use core::prelude::*;

use tokenizer::Attribute;

use collections::vec::Vec;
use collections::string::String;
use std::borrow::Cow;

use string_cache::QualName;
Expand Down
10 changes: 3 additions & 7 deletions src/tree_builder/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@

//! The HTML5 tree builder.
use core::prelude::*;

pub use self::interface::{QuirksMode, Quirks, LimitedQuirks, NoQuirks};
pub use self::interface::{NodeOrText, AppendNode, AppendText};
pub use self::interface::{TreeSink, Tracer, NextParserState};
Expand All @@ -28,12 +26,10 @@ use tokenizer::states as tok_state;

use util::str::{is_ascii_whitespace, char_run};

use core::default::Default;
use core::mem::replace;
use collections::vec::Vec;
use collections::string::String;
use std::default::Default;
use std::mem::replace;
use std::borrow::Cow::Borrowed;
use collections::VecDeque;
use std::collections::VecDeque;

#[macro_use] mod tag_sets;
// "pub" is a workaround for rust#18241 (?)
Expand Down
5 changes: 1 addition & 4 deletions src/tree_builder/rules.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@

//! The tree builder rules, as a single, enormous nested match expression.
use core::prelude::*;

use tree_builder::types::*;
use tree_builder::tag_sets::*;
use tree_builder::actions::TreeBuilderActions;
Expand All @@ -21,8 +19,7 @@ use tokenizer::states::{Rcdata, Rawtext, ScriptData, Plaintext, Quiescent};

use util::str::{AsciiExt, is_ascii_whitespace};

use core::mem::replace;
use collections::string::String;
use std::mem::replace;
use std::borrow::Cow::Borrowed;
use std::borrow::ToOwned;

Expand Down
Loading

0 comments on commit 5293725

Please sign in to comment.