diff --git a/src/tools/linkchecker/Cargo.lock b/src/tools/linkchecker/Cargo.lock index 8e94137d2139..ed5fe081ffb2 100644 --- a/src/tools/linkchecker/Cargo.lock +++ b/src/tools/linkchecker/Cargo.lock @@ -2,30 +2,22 @@ name = "linkchecker" version = "0.1.0" dependencies = [ - "url 0.5.5 (registry+/~https://github.com/rust-lang/crates.io-index)", + "url 1.2.0 (registry+/~https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "libc" -version = "0.2.8" -source = "registry+/~https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "matches" -version = "0.1.2" -source = "registry+/~https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "rand" -version = "0.3.14" +name = "idna" +version = "0.1.0" source = "registry+/~https://github.com/rust-lang/crates.io-index" dependencies = [ - "libc 0.2.8 (registry+/~https://github.com/rust-lang/crates.io-index)", + "matches 0.1.2 (registry+/~https://github.com/rust-lang/crates.io-index)", + "unicode-bidi 0.2.3 (registry+/~https://github.com/rust-lang/crates.io-index)", + "unicode-normalization 0.1.2 (registry+/~https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "rustc-serialize" -version = "0.3.18" +name = "matches" +version = "0.1.2" source = "registry+/~https://github.com/rust-lang/crates.io-index" [[package]] @@ -43,22 +35,10 @@ source = "registry+/~https://github.com/rust-lang/crates.io-index" [[package]] name = "url" -version = "0.5.5" +version = "1.2.0" source = "registry+/~https://github.com/rust-lang/crates.io-index" dependencies = [ + "idna 0.1.0 (registry+/~https://github.com/rust-lang/crates.io-index)", "matches 0.1.2 (registry+/~https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.18 (registry+/~https://github.com/rust-lang/crates.io-index)", - "unicode-bidi 0.2.3 (registry+/~https://github.com/rust-lang/crates.io-index)", - "unicode-normalization 0.1.2 (registry+/~https://github.com/rust-lang/crates.io-index)", - "uuid 0.1.18 (registry+/~https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "uuid" -version = "0.1.18" -source = "registry+/~https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rand 0.3.14 (registry+/~https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.18 (registry+/~https://github.com/rust-lang/crates.io-index)", ] diff --git a/src/tools/linkchecker/Cargo.toml b/src/tools/linkchecker/Cargo.toml index 29fc78a65e91..415b6f056728 100644 --- a/src/tools/linkchecker/Cargo.toml +++ b/src/tools/linkchecker/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" authors = ["Alex Crichton "] [dependencies] -url = "0.5" +url = "1.2" [[bin]] name = "linkchecker" diff --git a/src/tools/linkchecker/main.rs b/src/tools/linkchecker/main.rs index 80c37d559759..27adabbc72e5 100644 --- a/src/tools/linkchecker/main.rs +++ b/src/tools/linkchecker/main.rs @@ -33,7 +33,7 @@ use std::path::{Path, PathBuf}; use std::collections::{HashMap, HashSet}; use std::collections::hash_map::Entry; -use url::{Url, UrlParser}; +use url::Url; use Redirect::*; @@ -92,7 +92,7 @@ fn walk(cache: &mut Cache, root: &Path, dir: &Path, url: &mut Url, errors: &mut for entry in t!(dir.read_dir()).map(|e| t!(e)) { let path = entry.path(); let kind = t!(entry.file_type()); - url.path_mut().unwrap().push(entry.file_name().into_string().unwrap()); + url.path_segments_mut().unwrap().push(entry.file_name().to_str().unwrap()); if kind.is_dir() { walk(cache, root, &path, url, errors); } else { @@ -104,7 +104,7 @@ fn walk(cache: &mut Cache, root: &Path, dir: &Path, url: &mut Url, errors: &mut entry.source = String::new(); } } - url.path_mut().unwrap().pop(); + url.path_segments_mut().unwrap().pop(); } } @@ -138,9 +138,6 @@ fn check(cache: &mut Cache, return None; } - let mut parser = UrlParser::new(); - parser.base_url(base); - let res = load_file(cache, root, PathBuf::from(file), SkipRedirect); let (pretty_file, contents) = match res { Ok(res) => res, @@ -162,7 +159,7 @@ fn check(cache: &mut Cache, } // Once we've plucked out the URL, parse it using our base url and // then try to extract a file path. - let (parsed_url, path) = match url_to_file_path(&parser, url) { + let (parsed_url, path) = match url_to_file_path(&base, url) { Some((url, path)) => (url, PathBuf::from(path)), None => { *errors = true; @@ -203,7 +200,7 @@ fn check(cache: &mut Cache, Err(LoadError::IsRedirect) => unreachable!(), }; - if let Some(ref fragment) = parsed_url.fragment { + if let Some(ref fragment) = parsed_url.fragment() { // Fragments like `#1-6` are most likely line numbers to be // interpreted by javascript, so we're ignoring these if fragment.splitn(2, '-') @@ -214,7 +211,7 @@ fn check(cache: &mut Cache, let entry = &mut cache.get_mut(&pretty_path).unwrap(); entry.parse_ids(&pretty_path, &contents, errors); - if !entry.ids.contains(fragment) { + if !entry.ids.contains(*fragment) { *errors = true; print!("{}:{}: broken link fragment ", pretty_file.display(), @@ -271,10 +268,8 @@ fn load_file(cache: &mut Cache, } }; let base = Url::from_file_path(&file).unwrap(); - let mut parser = UrlParser::new(); - parser.base_url(&base); - match maybe_redirect.and_then(|url| url_to_file_path(&parser, &url)) { + match maybe_redirect.and_then(|url| url_to_file_path(&base, &url)) { Some((_, redirect_file)) => { let path = PathBuf::from(redirect_file); load_file(cache, root, path, FromRedirect(true)) @@ -299,8 +294,8 @@ fn maybe_redirect(source: &str) -> Option { }) } -fn url_to_file_path(parser: &UrlParser, url: &str) -> Option<(Url, PathBuf)> { - parser.parse(url) +fn url_to_file_path(parser: &Url, url: &str) -> Option<(Url, PathBuf)> { + parser.join(url) .ok() .and_then(|parsed_url| parsed_url.to_file_path().ok().map(|f| (parsed_url, f))) }