diff --git a/scraper/README.md b/scraper/README.md
index beebfe15..f2d9af90 100644
--- a/scraper/README.md
+++ b/scraper/README.md
@@ -134,15 +134,17 @@ assert_eq!(vec!["Hello, ", "world!"], text);
```rust
use html5ever::tree_builder::TreeSink;
-use scraper::{Html, Selector};
+use scraper::{Html, Selector, HtmlTreeSink};
let html = "
hello
REMOVE ME
";
let selector = Selector::parse(".hello").unwrap();
let mut document = Html::parse_document(html);
let node_ids: Vec<_> = document.select(&selector).map(|x| x.id()).collect();
+let tree = HtmlTreeSink::new(document);
for id in node_ids {
- document.remove_from_parent(&id);
+ tree.remove_from_parent(&id);
}
+let document = tree.finish();
assert_eq!(document.html(), "hello");
```
diff --git a/scraper/src/lib.rs b/scraper/src/lib.rs
index 46ca0647..50201190 100644
--- a/scraper/src/lib.rs
+++ b/scraper/src/lib.rs
@@ -1,128 +1,4 @@
-//! HTML parsing and querying with CSS selectors.
-//!
-//! `scraper` is on [Crates.io][crate] and [GitHub][github].
-//!
-//! [crate]: https://crates.io/crates/scraper
-//! [github]: https://github.com/programble/scraper
-//!
-//! Scraper provides an interface to Servo's `html5ever` and `selectors` crates, for browser-grade
-//! parsing and querying.
-//!
-//! # Examples
-//!
-//! ## Parsing a document
-//!
-//! ```
-//! use scraper::Html;
-//!
-//! let html = r#"
-//!
-//!
-//! Hello, world!
-//!
Hello, world!
-//! "#;
-//!
-//! let document = Html::parse_document(html);
-//! ```
-//!
-//! ## Parsing a fragment
-//!
-//! ```
-//! use scraper::Html;
-//! let fragment = Html::parse_fragment("
Hello, world!
");
-//! ```
-//!
-//! ## Parsing a selector
-//!
-//! ```
-//! use scraper::Selector;
-//! let selector = Selector::parse("h1.foo").unwrap();
-//! ```
-//!
-//! ## Selecting elements
-//!
-//! ```
-//! use scraper::{Html, Selector};
-//!
-//! let html = r#"
-//!
-//!
Foo
-//!
Bar
-//!
Baz
-//!
-//! "#;
-//!
-//! let fragment = Html::parse_fragment(html);
-//! let selector = Selector::parse("li").unwrap();
-//!
-//! for element in fragment.select(&selector) {
-//! assert_eq!("li", element.value().name());
-//! }
-//! ```
-//!
-//! ## Selecting descendent elements
-//!
-//! ```
-//! use scraper::{Html, Selector};
-//!
-//! let html = r#"
-//!
-//!
Foo
-//!
Bar
-//!
Baz
-//!
-//! "#;
-//!
-//! let fragment = Html::parse_fragment(html);
-//! let ul_selector = Selector::parse("ul").unwrap();
-//! let li_selector = Selector::parse("li").unwrap();
-//!
-//! let ul = fragment.select(&ul_selector).next().unwrap();
-//! for element in ul.select(&li_selector) {
-//! assert_eq!("li", element.value().name());
-//! }
-//! ```
-//!
-//! ## Accessing element attributes
-//!
-//! ```
-//! use scraper::{Html, Selector};
-//!
-//! let fragment = Html::parse_fragment(r#""#);
-//! let selector = Selector::parse(r#"input[name="foo"]"#).unwrap();
-//!
-//! let input = fragment.select(&selector).next().unwrap();
-//! assert_eq!(Some("bar"), input.value().attr("value"));
-//! ```
-//!
-//! ## Serializing HTML and inner HTML
-//!
-//! ```
-//! use scraper::{Html, Selector};
-//!
-//! let fragment = Html::parse_fragment("
Hello, world!
");
-//! let selector = Selector::parse("h1").unwrap();
-//!
-//! let h1 = fragment.select(&selector).next().unwrap();
-//!
-//! assert_eq!("
Hello, world!
", h1.html());
-//! assert_eq!("Hello, world!", h1.inner_html());
-//! ```
-//!
-//! ## Accessing descendent text
-//!
-//! ```
-//! use scraper::{Html, Selector};
-//!
-//! let fragment = Html::parse_fragment("
Hello, world!
");
-//! let selector = Selector::parse("h1").unwrap();
-//!
-//! let h1 = fragment.select(&selector).next().unwrap();
-//! let text = h1.text().collect::>();
-//!
-//! assert_eq!(vec!["Hello, ", "world!"], text);
-//! ```
-
+#![doc = include_str!("../README.md")]
#![warn(
missing_docs,
missing_debug_implementations,