diff --git a/src/bin/bench.rs b/src/bin/bench.rs index 655fbff60e5..7e658997d28 100644 --- a/src/bin/bench.rs +++ b/src/bin/bench.rs @@ -5,7 +5,7 @@ use cargo::util::important_paths::{find_root_manifest_for_cwd}; #[derive(RustcDecodable)] struct Options { flag_no_run: bool, - flag_package: Option, + flag_package: Vec, flag_jobs: Option, flag_features: Vec, flag_no_default_features: bool, @@ -29,22 +29,22 @@ Usage: cargo bench [options] [--] [...] Options: - -h, --help Print this message - --lib Benchmark only this package's library - --bin NAME Benchmark only the specified binary - --example NAME Benchmark only the specified example - --test NAME Benchmark only the specified test target - --bench NAME Benchmark only the specified bench target - --no-run Compile, but don't run benchmarks - -p SPEC, --package SPEC Package to run benchmarks for - -j N, --jobs N The number of jobs to run in parallel - --features FEATURES Space-separated list of features to also build - --no-default-features Do not build the `default` feature - --target TRIPLE Build for the target triple - --manifest-path PATH Path to the manifest to build benchmarks for - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never + -h, --help Print this message + --lib Benchmark only this package's library + --bin NAME Benchmark only the specified binary + --example NAME Benchmark only the specified example + --test NAME Benchmark only the specified test target + --bench NAME Benchmark only the specified bench target + --no-run Compile, but don't run benchmarks + -p SPEC, --package SPEC ... Package to run benchmarks for + -j N, --jobs N The number of jobs to run in parallel + --features FEATURES Space-separated list of features to also build + --no-default-features Do not build the `default` feature + --target TRIPLE Build for the target triple + --manifest-path PATH Path to the manifest to build benchmarks for + -v, --verbose Use verbose output + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never All of the trailing arguments are passed to the benchmark binaries generated for filtering benchmarks and generally providing options configuring how they @@ -75,7 +75,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { target: options.flag_target.as_ref().map(|s| &s[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, - spec: options.flag_package.as_ref().map(|s| &s[..]), + spec: &options.flag_package, exec_engine: None, release: true, mode: ops::CompileMode::Bench, diff --git a/src/bin/build.rs b/src/bin/build.rs index 984a0e752a3..27bcf57b122 100644 --- a/src/bin/build.rs +++ b/src/bin/build.rs @@ -7,7 +7,7 @@ use cargo::util::{CliResult, CliError, Config}; #[derive(RustcDecodable)] struct Options { - flag_package: Option, + flag_package: Vec, flag_jobs: Option, flag_features: Vec, flag_no_default_features: bool, @@ -31,22 +31,22 @@ Usage: cargo build [options] Options: - -h, --help Print this message - -p SPEC, --package SPEC Package to build - -j N, --jobs N The number of jobs to run in parallel - --lib Build only this package's library - --bin NAME Build only the specified binary - --example NAME Build only the specified example - --test NAME Build only the specified test target - --bench NAME Build only the specified benchmark target - --release Build artifacts in release mode, with optimizations - --features FEATURES Space-separated list of features to also build - --no-default-features Do not build the `default` feature - --target TRIPLE Build for the target triple - --manifest-path PATH Path to the manifest to compile - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never + -h, --help Print this message + -p SPEC, --package SPEC ... Package to build + -j N, --jobs N The number of jobs to run in parallel + --lib Build only this package's library + --bin NAME Build only the specified binary + --example NAME Build only the specified example + --test NAME Build only the specified test target + --bench NAME Build only the specified benchmark target + --release Build artifacts in release mode, with optimizations + --features FEATURES Space-separated list of features to also build + --no-default-features Do not build the `default` feature + --target TRIPLE Build for the target triple + --manifest-path PATH Path to the manifest to compile + -v, --verbose Use verbose output + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never If the --package argument is given, then SPEC is a package id specification which indicates which package should be built. If it is not given, then the @@ -72,7 +72,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { target: options.flag_target.as_ref().map(|t| &t[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, - spec: options.flag_package.as_ref().map(|s| &s[..]), + spec: &options.flag_package, exec_engine: None, mode: ops::CompileMode::Build, release: options.flag_release, diff --git a/src/bin/clean.rs b/src/bin/clean.rs index 18c6111e56e..8aea04d35a7 100644 --- a/src/bin/clean.rs +++ b/src/bin/clean.rs @@ -6,7 +6,7 @@ use cargo::util::important_paths::{find_root_manifest_for_cwd}; #[derive(RustcDecodable)] struct Options { - flag_package: Option, + flag_package: Vec, flag_target: Option, flag_manifest_path: Option, flag_verbose: bool, @@ -21,13 +21,13 @@ Usage: cargo clean [options] Options: - -h, --help Print this message - -p SPEC, --package SPEC Package to clean artifacts for - --manifest-path PATH Path to the manifest to the package to clean - --target TRIPLE Target triple to clean output for (default all) - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never + -h, --help Print this message + -p SPEC, --package SPEC ... Package to clean artifacts for + --manifest-path PATH Path to the manifest to the package to clean + --target TRIPLE Target triple to clean output for (default all) + -v, --verbose Use verbose output + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never If the --package argument is given, then SPEC is a package id specification which indicates which package's artifacts should be cleaned out. If it is not @@ -43,7 +43,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); let opts = ops::CleanOptions { config: config, - spec: options.flag_package.as_ref().map(|s| &s[..]), + spec: &options.flag_package, target: options.flag_target.as_ref().map(|s| &s[..]), }; ops::clean(&root, &opts).map(|_| None).map_err(|err| { diff --git a/src/bin/doc.rs b/src/bin/doc.rs index 8fa2bae7cc0..16fd12430ec 100644 --- a/src/bin/doc.rs +++ b/src/bin/doc.rs @@ -15,7 +15,7 @@ struct Options { flag_release: bool, flag_quiet: bool, flag_color: Option, - flag_package: Option, + flag_package: Vec, } pub const USAGE: &'static str = " @@ -25,19 +25,19 @@ Usage: cargo doc [options] Options: - -h, --help Print this message - --open Opens the docs in a browser after the operation - -p SPEC, --package SPEC Package to document - --no-deps Don't build documentation for dependencies - -j N, --jobs N The number of jobs to run in parallel - --release Build artifacts in release mode, with optimizations - --features FEATURES Space-separated list of features to also build - --no-default-features Do not build the `default` feature - --target TRIPLE Build for the target triple - --manifest-path PATH Path to the manifest to document - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never + -h, --help Print this message + --open Opens the docs in a browser after the operation + -p SPEC, --package SPEC ... Package to document + --no-deps Don't build documentation for dependencies + -j N, --jobs N The number of jobs to run in parallel + --release Build artifacts in release mode, with optimizations + --features FEATURES Space-separated list of features to also build + --no-default-features Do not build the `default` feature + --target TRIPLE Build for the target triple + --manifest-path PATH Path to the manifest to document + -v, --verbose Use verbose output + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never By default the documentation for the local package and all dependencies is built. The output is all placed in `target/doc` in rustdoc's usual format. @@ -62,7 +62,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { target: options.flag_target.as_ref().map(|t| &t[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, - spec: options.flag_package.as_ref().map(|s| &s[..]), + spec: &options.flag_package, exec_engine: None, filter: ops::CompileFilter::Everything, release: options.flag_release, diff --git a/src/bin/run.rs b/src/bin/run.rs index c1c04295886..2483dcc8dfc 100644 --- a/src/bin/run.rs +++ b/src/bin/run.rs @@ -68,7 +68,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { target: options.flag_target.as_ref().map(|t| &t[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, - spec: None, + spec: &[], exec_engine: None, release: options.flag_release, mode: ops::CompileMode::Build, diff --git a/src/bin/rustc.rs b/src/bin/rustc.rs index 8a589091aae..54c0dd36bcf 100644 --- a/src/bin/rustc.rs +++ b/src/bin/rustc.rs @@ -75,7 +75,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { target: options.flag_target.as_ref().map(|t| &t[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, - spec: options.flag_package.as_ref().map(|s| &s[..]), + spec: &options.flag_package.map_or(Vec::new(), |s| vec![s]), exec_engine: None, mode: ops::CompileMode::Build, release: options.flag_release, diff --git a/src/bin/test.rs b/src/bin/test.rs index ade46016528..09981e0bed8 100644 --- a/src/bin/test.rs +++ b/src/bin/test.rs @@ -10,7 +10,7 @@ struct Options { flag_manifest_path: Option, flag_no_default_features: bool, flag_no_run: bool, - flag_package: Option, + flag_package: Vec, flag_target: Option, flag_lib: bool, flag_bin: Vec, @@ -31,24 +31,24 @@ Usage: cargo test [options] [--] [...] Options: - -h, --help Print this message - --lib Test only this package's library - --bin NAME Test only the specified binary - --example NAME Test only the specified example - --test NAME Test only the specified integration test target - --bench NAME Test only the specified benchmark target - --no-run Compile, but don't run tests - -p SPEC, --package SPEC Package to run tests for - -j N, --jobs N The number of jobs to run in parallel - --release Build artifacts in release mode, with optimizations - --features FEATURES Space-separated list of features to also build - --no-default-features Do not build the `default` feature - --target TRIPLE Build for the target triple - --manifest-path PATH Path to the manifest to build tests for - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never - --no-fail-fast Run all tests regardless of failure + -h, --help Print this message + --lib Test only this package's library + --bin NAME Test only the specified binary + --example NAME Test only the specified example + --test NAME Test only the specified integration test target + --bench NAME Test only the specified benchmark target + --no-run Compile, but don't run tests + -p SPEC, --package SPEC ... Package to run tests for + -j N, --jobs N The number of jobs to run in parallel + --release Build artifacts in release mode, with optimizations + --features FEATURES Space-separated list of features to also build + --no-default-features Do not build the `default` feature + --target TRIPLE Build for the target triple + --manifest-path PATH Path to the manifest to build tests for + -v, --verbose Use verbose output + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never + --no-fail-fast Run all tests regardless of failure All of the trailing arguments are passed to the test binaries generated for filtering tests and generally providing options configuring how they run. For @@ -81,7 +81,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { target: options.flag_target.as_ref().map(|s| &s[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, - spec: options.flag_package.as_ref().map(|s| &s[..]), + spec: &options.flag_package, exec_engine: None, release: options.flag_release, mode: ops::CompileMode::Test, diff --git a/src/bin/update.rs b/src/bin/update.rs index cd4cd1173aa..75cac7a70ef 100644 --- a/src/bin/update.rs +++ b/src/bin/update.rs @@ -6,7 +6,7 @@ use cargo::util::important_paths::find_root_manifest_for_cwd; #[derive(RustcDecodable)] struct Options { - flag_package: Option, + flag_package: Vec, flag_aggressive: bool, flag_precise: Option, flag_manifest_path: Option, @@ -22,14 +22,14 @@ Usage: cargo update [options] Options: - -h, --help Print this message - -p SPEC, --package SPEC Package to update - --aggressive Force updating all dependencies of as well - --precise PRECISE Update a single dependency to exactly PRECISE - --manifest-path PATH Path to the manifest to compile - -v, --verbose Use verbose output - -q, --quiet No output printed to stdout - --color WHEN Coloring: auto, always, never + -h, --help Print this message + -p SPEC, --package SPEC ... Package to update + --aggressive Force updating all dependencies of as well + --precise PRECISE Update a single dependency to exactly PRECISE + --manifest-path PATH Path to the manifest to compile + -v, --verbose Use verbose output + -q, --quiet No output printed to stdout + --color WHEN Coloring: auto, always, never This command requires that a `Cargo.lock` already exists as generated by `cargo build` or related commands. @@ -58,12 +58,10 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..]))); let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path)); - let spec = options.flag_package.as_ref(); - let update_opts = ops::UpdateOptions { aggressive: options.flag_aggressive, precise: options.flag_precise.as_ref().map(|s| &s[..]), - to_update: spec.map(|s| &s[..]), + to_update: &options.flag_package, config: config, }; diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index a5880b0af12..1c21e0d9607 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -111,7 +111,7 @@ impl<'cfg> PackageRegistry<'cfg> { self.sources } - fn ensure_loaded(&mut self, namespace: &SourceId) -> CargoResult<()> { + fn ensure_loaded(&mut self, namespace: &SourceId, kind: Kind) -> CargoResult<()> { match self.source_ids.get(namespace) { // We've previously loaded this source, and we've already locked it, // so we're not allowed to change it even if `namespace` has a @@ -143,13 +143,13 @@ impl<'cfg> PackageRegistry<'cfg> { } } - try!(self.load(namespace, Kind::Normal)); + try!(self.load(namespace, kind)); Ok(()) } pub fn add_sources(&mut self, ids: &[SourceId]) -> CargoResult<()> { for id in ids.iter() { - try!(self.load(id, Kind::Locked)); + try!(self.ensure_loaded(id, Kind::Locked)); } Ok(()) } @@ -288,7 +288,7 @@ impl<'cfg> Registry for PackageRegistry<'cfg> { let ret = if overrides.len() == 0 { // Ensure the requested source_id is loaded - try!(self.ensure_loaded(dep.source_id())); + try!(self.ensure_loaded(dep.source_id(), Kind::Normal)); let mut ret = Vec::new(); for (id, src) in self.sources.sources_mut() { if id == dep.source_id() { diff --git a/src/cargo/ops/cargo_clean.rs b/src/cargo/ops/cargo_clean.rs index c3303423e8b..f9e4107e9f5 100644 --- a/src/cargo/ops/cargo_clean.rs +++ b/src/cargo/ops/cargo_clean.rs @@ -9,7 +9,7 @@ use util::{CargoResult, human, ChainError, Config}; use ops::{self, Layout, Context, BuildConfig, Kind}; pub struct CleanOptions<'a> { - pub spec: Option<&'a str>, + pub spec: &'a [String], pub target: Option<&'a str>, pub config: &'a Config, } @@ -21,26 +21,17 @@ pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> { // If we have a spec, then we need to delete some packages, otherwise, just // remove the whole target directory and be done with it! - let spec = match opts.spec { - Some(spec) => spec, - None => return rm_rf(&target_dir), - }; + if opts.spec.len() == 0 { + return rm_rf(&target_dir); + } - // Load the lockfile (if one's available), and resolve spec to a pkgid + // Load the lockfile (if one's available) let lockfile = root.root().join("Cargo.lock"); let source_id = root.package_id().source_id(); let resolve = match try!(ops::load_lockfile(&lockfile, source_id)) { Some(resolve) => resolve, None => return Err(human("A Cargo.lock must exist before cleaning")) }; - let pkgid = try!(resolve.query(spec)); - - // Translate the PackageId to a Package - let pkg = { - let mut source = pkgid.source_id().load(opts.config); - try!(source.update()); - (try!(source.get(&[pkgid.clone()]))).into_iter().next().unwrap() - }; // Create a compilation context to have access to information like target // filenames and such @@ -49,20 +40,32 @@ pub fn clean(manifest_path: &Path, opts: &CleanOptions) -> CargoResult<()> { let profiles = Profiles::default(); let cx = try!(Context::new(&resolve, &srcs, &pkgs, opts.config, Layout::at(target_dir), - None, &pkg, BuildConfig::default(), + None, BuildConfig::default(), &profiles)); - // And finally, clean everything out! - for target in pkg.targets().iter() { - // TODO: `cargo clean --release` - let layout = Layout::new(opts.config, &root, opts.target, "debug"); - try!(rm_rf(&layout.fingerprint(&pkg))); - let profiles = [Profile::default_dev(), Profile::default_test()]; - for profile in profiles.iter() { - for filename in try!(cx.target_filenames(&pkg, target, profile, - Kind::Target)).iter() { - try!(rm_rf(&layout.dest().join(&filename))); - try!(rm_rf(&layout.deps().join(&filename))); + // resolve package specs and remove the corresponding packages + for spec in opts.spec { + let pkgid = try!(resolve.query(spec)); + + // Translate the PackageId to a Package + let pkg = { + let mut source = pkgid.source_id().load(opts.config); + try!(source.update()); + (try!(source.get(&[pkgid.clone()]))).into_iter().next().unwrap() + }; + + // And finally, clean everything out! + for target in pkg.targets().iter() { + // TODO: `cargo clean --release` + let layout = Layout::new(opts.config, &root, opts.target, "debug"); + try!(rm_rf(&layout.fingerprint(&pkg))); + let profiles = [Profile::default_dev(), Profile::default_test()]; + for profile in profiles.iter() { + for filename in try!(cx.target_filenames(&pkg, target, profile, + Kind::Target)).iter() { + try!(rm_rf(&layout.dest().join(&filename))); + try!(rm_rf(&layout.deps().join(&filename))); + } } } } diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index 74c073f269f..b52cae00213 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -47,7 +47,7 @@ pub struct CompileOptions<'a> { /// Flag if the default feature should be built for the root package pub no_default_features: bool, /// Root package to build (if None it's the current one) - pub spec: Option<&'a str>, + pub spec: &'a [String], /// Filter to apply to the root package to select which targets will be /// built. pub filter: CompileFilter<'a>, @@ -95,7 +95,8 @@ pub fn compile<'a>(manifest_path: &Path, compile_pkg(&package, options) } -pub fn compile_pkg<'a>(package: &Package, +#[allow(deprecated)] // connect => join in 1.3 +pub fn compile_pkg<'a>(root_package: &Package, options: &CompileOptions<'a>) -> CargoResult> { let CompileOptions { config, jobs, target, spec, features, @@ -108,7 +109,7 @@ pub fn compile_pkg<'a>(package: &Package, s.split(' ') }).map(|s| s.to_string()).collect::>(); - if spec.is_some() && (no_default_features || features.len() > 0) { + if spec.len() > 0 && (no_default_features || features.len() > 0) { return Err(human("features cannot be modified when the main package \ is not being built")) } @@ -116,14 +117,14 @@ pub fn compile_pkg<'a>(package: &Package, return Err(human("jobs must be at least 1")) } - let override_ids = try!(source_ids_from_config(config, package.root())); + let override_ids = try!(source_ids_from_config(options.config, root_package.root())); let (packages, resolve_with_overrides, sources) = { - let mut registry = PackageRegistry::new(config); + let mut registry = PackageRegistry::new(options.config); - // First, resolve the package's *listed* dependencies, as well as + // First, resolve the root_package's *listed* dependencies, as well as // downloading and updating all remotes and such. - let resolve = try!(ops::resolve_pkg(&mut registry, package)); + let resolve = try!(ops::resolve_pkg(&mut registry, root_package)); // Second, resolve with precisely what we're doing. Filter out // transitive dependencies if necessary, specify features, handle @@ -132,14 +133,14 @@ pub fn compile_pkg<'a>(package: &Package, try!(registry.add_overrides(override_ids)); - let method = Method::Required { + let method = Method::Required{ dev_deps: true, // TODO: remove this option? features: &features, uses_default_features: !no_default_features, }; let resolved_with_overrides = - try!(ops::resolve_with_previous(&mut registry, package, method, + try!(ops::resolve_with_previous(&mut registry, root_package, method, Some(&resolve), None)); let packages = try!(ops::get_resolved_packages(&resolved_with_overrides, &mut registry)); @@ -147,33 +148,65 @@ pub fn compile_pkg<'a>(package: &Package, (packages, resolved_with_overrides, registry.move_sources()) }; - let pkgid = match spec { - Some(spec) => try!(resolve_with_overrides.query(spec)), - None => package.package_id(), + let mut invalid_spec = vec![]; + let pkgids = if spec.len() > 0 { + spec.iter().filter_map(|p| { + match resolve_with_overrides.query(&p) { + Ok(p) => Some(p), + Err(..) => { invalid_spec.push(p.to_string()); None } + } + }).collect::>() + } else { + vec![root_package.package_id()] }; - let to_build = packages.iter().find(|p| p.package_id() == pkgid).unwrap(); - let targets = try!(generate_targets(to_build, mode, filter, release)); - - let target_with_args = match *target_rustc_args { - Some(args) if targets.len() == 1 => { - let (target, profile) = targets[0]; - let mut profile = profile.clone(); - profile.rustc_args = Some(args.to_vec()); - Some((target, profile)) + + if spec.len() > 0 && invalid_spec.len() > 0 { + return Err(human(format!("could not find package matching spec `{}`", + invalid_spec.connect(", ")))); + } + + let to_builds = packages.iter().filter(|p| pkgids.contains(&p.package_id())) + .collect::>(); + + let mut general_targets = Vec::new(); + let mut package_targets = Vec::new(); + + match *target_rustc_args { + Some(args) => { + if to_builds.len() == 1 { + let targets = try!(generate_targets(to_builds[0], mode, filter, release)); + if targets.len() == 1 { + let (target, profile) = targets[0]; + let mut profile = profile.clone(); + profile.rustc_args = Some(args.to_vec()); + general_targets.push((target, profile)); + } else { + return Err(human("extra arguments to `rustc` can only be \ + passed to one target, consider \ + filtering\nthe package by passing e.g. \ + `--lib` or `--bin NAME` to specify \ + a single target")) + + } + } else { + panic!("`rustc` should not accept multiple `-p` flags") + } } - Some(_) => { - return Err(human("extra arguments to `rustc` can only be passed to \ - one target, consider filtering\nthe package by \ - passing e.g. `--lib` or `--bin NAME` to specify \ - a single target")) + None => { + for &to_build in to_builds.iter() { + let targets = try!(generate_targets(to_build, mode, filter, release)); + package_targets.push((to_build, targets)); + } } - None => None, }; - let targets = target_with_args.as_ref().map(|&(t, ref p)| vec![(t, p)]) - .unwrap_or(targets); + for &(target, ref profile) in &general_targets { + for &to_build in to_builds.iter() { + package_targets.push((to_build, vec![(target, profile)])); + } + } - let ret = { + let mut ret = { let _p = profile::start("compiling"); let mut build_config = try!(scrape_build_config(config, jobs, target)); build_config.exec_engine = exec_engine.clone(); @@ -182,15 +215,18 @@ pub fn compile_pkg<'a>(package: &Package, build_config.doc_all = deps; } - try!(ops::compile_targets(&targets, to_build, + try!(ops::compile_targets(&package_targets, &PackageSet::new(&packages), &resolve_with_overrides, &sources, config, build_config, - to_build.manifest().profiles())) + root_package.manifest().profiles(), + )) }; + ret.to_doc_test = to_builds.iter().map(|&p| p.clone()).collect(); + return Ok(ret); } diff --git a/src/cargo/ops/cargo_doc.rs b/src/cargo/ops/cargo_doc.rs index 11e5d5bf736..296b792c061 100644 --- a/src/cargo/ops/cargo_doc.rs +++ b/src/cargo/ops/cargo_doc.rs @@ -18,7 +18,7 @@ pub fn doc(manifest_path: &Path, let mut lib_names = HashSet::new(); let mut bin_names = HashSet::new(); - if options.compile_opts.spec.is_none() { + if options.compile_opts.spec.len() == 0 { for target in package.targets().iter().filter(|t| t.documented()) { if target.is_lib() { assert!(lib_names.insert(target.crate_name())); @@ -39,13 +39,16 @@ pub fn doc(manifest_path: &Path, try!(ops::compile(manifest_path, &options.compile_opts)); if options.open_result { - let name = match options.compile_opts.spec { - Some(spec) => try!(PackageIdSpec::parse(spec)).name().replace("-", "_").to_string(), - None => { - match lib_names.iter().chain(bin_names.iter()).nth(0) { - Some(s) => s.to_string(), - None => return Ok(()) - } + let name = if options.compile_opts.spec.len() > 1 { + return Err(human("Passing multiple packages and `open` is not \ + supported")) + } else if options.compile_opts.spec.len() == 1 { + try!(PackageIdSpec::parse(&options.compile_opts.spec[0])) + .name().replace("-", "_").to_string() + } else { + match lib_names.iter().chain(bin_names.iter()).nth(0) { + Some(s) => s.to_string(), + None => return Ok(()) } }; diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index 5edca1acef7..ac5578e0c9e 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -11,7 +11,7 @@ use util::{CargoResult, human}; pub struct UpdateOptions<'a> { pub config: &'a Config, - pub to_update: Option<&'a str>, + pub to_update: &'a [String], pub precise: Option<&'a str>, pub aggressive: bool, } @@ -44,15 +44,18 @@ pub fn update_lockfile(manifest_path: &Path, let mut registry = PackageRegistry::new(opts.config); let mut to_avoid = HashSet::new(); - match opts.to_update { - Some(name) => { + if opts.to_update.len() == 0 { + to_avoid.extend(previous_resolve.iter()); + } else { + let mut sources = Vec::new(); + for name in opts.to_update { let dep = try!(previous_resolve.query(name)); if opts.aggressive { fill_with_deps(&previous_resolve, dep, &mut to_avoid, &mut HashSet::new()); } else { to_avoid.insert(dep); - match opts.precise { + sources.push(match opts.precise { Some(precise) => { // TODO: see comment in `resolve.rs` as well, but this // seems like a pretty hokey reason to single out @@ -62,19 +65,15 @@ pub fn update_lockfile(manifest_path: &Path, } else { precise.to_string() }; - let precise = dep.source_id().clone() - .with_precise(Some(precise)); - try!(registry.add_sources(&[precise])); + dep.source_id().clone().with_precise(Some(precise)) } None => { - let imprecise = dep.source_id().clone() - .with_precise(None); - try!(registry.add_sources(&[imprecise])); + dep.source_id().clone().with_precise(None) } - } + }); } } - None => to_avoid.extend(previous_resolve.iter()), + try!(registry.add_sources(&sources)); } let resolve = try!(ops::resolve_with_previous(&mut registry, diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index cf0bd9d831b..a1b3cd5afd5 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -2,11 +2,13 @@ use std::io::prelude::*; use std::fs::{self, File}; use std::path::{self, Path, PathBuf}; +use semver::VersionReq; use tar::Archive; use flate2::{GzBuilder, Compression}; use flate2::read::GzDecoder; use core::{SourceId, Package, PackageId}; +use core::dependency::Kind; use sources::PathSource; use util::{self, CargoResult, human, internal, ChainError, Config}; use ops; @@ -35,6 +37,8 @@ pub fn package(manifest_path: &Path, try!(check_metadata(&pkg, config)); } + try!(check_dependencies(&pkg, config)); + if list { let root = pkg.root(); let mut list: Vec<_> = try!(src.list_files(&pkg)).iter().map(|file| { @@ -102,6 +106,32 @@ fn check_metadata(pkg: &Package, config: &Config) -> CargoResult<()> { Ok(()) } +// Warn about wildcard deps which will soon be prohibited on crates.io +#[allow(deprecated)] // connect => join in 1.3 +fn check_dependencies(pkg: &Package, config: &Config) -> CargoResult<()> { + let wildcard = VersionReq::parse("*").unwrap(); + + let mut wildcard_deps = vec![]; + for dep in pkg.dependencies() { + if dep.kind() != Kind::Development && dep.version_req() == &wildcard { + wildcard_deps.push(dep.name()); + } + } + + if !wildcard_deps.is_empty() { + let deps = wildcard_deps.connect(", "); + try!(config.shell().warn( + "warning: some dependencies have wildcard (\"*\") version constraints. \ + On December 11th, 2015, crates.io will begin rejecting packages with \ + wildcard dependency constraints. See \ + http://doc.crates.io/crates-io.html#using-crates.io-based-crates \ + for information on version constraints.")); + try!(config.shell().warn( + &format!("dependencies for these crates have wildcard constraints: {}", deps))); + } + Ok(()) +} + fn tar(pkg: &Package, src: &PathSource, config: &Config, dst: &Path) -> CargoResult<()> { @@ -184,7 +214,7 @@ fn run_verify(config: &Config, pkg: &Package, tar: &Path) target: None, features: &[], no_default_features: false, - spec: None, + spec: &[], filter: ops::CompileFilter::Everything, exec_engine: None, release: false, diff --git a/src/cargo/ops/cargo_rustc/compilation.rs b/src/cargo/ops/cargo_rustc/compilation.rs index bd6be3c4c84..2404c1ede96 100644 --- a/src/cargo/ops/cargo_rustc/compilation.rs +++ b/src/cargo/ops/cargo_rustc/compilation.rs @@ -17,7 +17,7 @@ pub struct Compilation<'cfg> { pub libraries: HashMap>, /// An array of all tests created during this compilation. - pub tests: Vec<(String, PathBuf)>, + pub tests: Vec<(Package, Vec<(String, PathBuf)>)>, /// An array of all binaries created. pub binaries: Vec, @@ -39,8 +39,7 @@ pub struct Compilation<'cfg> { /// be passed to future invocations of programs. pub extra_env: HashMap, - /// Top-level package that was compiled - pub package: Package, + pub to_doc_test: Vec, /// Features enabled during this compilation. pub features: HashSet, @@ -49,7 +48,7 @@ pub struct Compilation<'cfg> { } impl<'cfg> Compilation<'cfg> { - pub fn new(pkg: &Package, config: &'cfg Config) -> Compilation<'cfg> { + pub fn new(config: &'cfg Config) -> Compilation<'cfg> { Compilation { libraries: HashMap::new(), native_dirs: HashMap::new(), // TODO: deprecated, remove @@ -58,7 +57,7 @@ impl<'cfg> Compilation<'cfg> { tests: Vec::new(), binaries: Vec::new(), extra_env: HashMap::new(), - package: pkg.clone(), + to_doc_test: Vec::new(), features: HashSet::new(), config: config, } diff --git a/src/cargo/ops/cargo_rustc/context.rs b/src/cargo/ops/cargo_rustc/context.rs index 1ba89283575..15c5d3dc22e 100644 --- a/src/cargo/ops/cargo_rustc/context.rs +++ b/src/cargo/ops/cargo_rustc/context.rs @@ -18,6 +18,7 @@ use super::fingerprint::Fingerprint; use super::layout::{Layout, LayoutProxy}; use super::{Kind, Compilation, BuildConfig}; use super::{ProcessEngine, ExecEngine}; +use super::PackagesToBuild; #[derive(Debug, Clone, Copy)] pub enum Platform { @@ -59,7 +60,6 @@ impl<'a, 'cfg> Context<'a, 'cfg> { config: &'cfg Config, host: Layout, target_layout: Option, - root_pkg: &Package, build_config: BuildConfig, profiles: &'a Profiles) -> CargoResult> { let target = build_config.requested_target.clone(); @@ -90,7 +90,7 @@ impl<'a, 'cfg> Context<'a, 'cfg> { host_dylib: host_dylib, host_exe: host_exe, requirements: HashMap::new(), - compilation: Compilation::new(root_pkg, config), + compilation: Compilation::new(config), build_state: Arc::new(BuildState::new(&build_config, deps)), build_config: build_config, exec_engine: engine, @@ -143,36 +143,35 @@ impl<'a, 'cfg> Context<'a, 'cfg> { /// Prepare this context, ensuring that all filesystem directories are in /// place. - pub fn prepare(&mut self, pkg: &'a Package, - targets: &[(&'a Target, &'a Profile)]) - -> CargoResult<()> { + pub fn prepare(&mut self, root: &Package, + pkgs: &'a PackagesToBuild<'a>) -> CargoResult<()> { let _p = profile::start("preparing layout"); try!(self.host.prepare().chain_error(|| { - internal(format!("couldn't prepare build directories for `{}`", - pkg.name())) + internal(format!("couldn't prepare build directories")) })); match self.target { Some(ref mut target) => { try!(target.prepare().chain_error(|| { - internal(format!("couldn't prepare build directories \ - for `{}`", pkg.name())) + internal(format!("couldn't prepare build directories")) })); } None => {} } - for &(target, profile) in targets { - self.build_requirements(pkg, target, profile, Kind::from(target)); + for &(pkg, ref targets) in pkgs { + for &(target, profile) in targets { + self.build_requirements(pkg, target, profile, Kind::from(target)); + } } let jobs = self.jobs(); self.compilation.extra_env.insert("NUM_JOBS".to_string(), jobs.to_string()); self.compilation.root_output = - self.layout(pkg, Kind::Target).proxy().dest().to_path_buf(); + self.layout(root, Kind::Target).proxy().dest().to_path_buf(); self.compilation.deps_output = - self.layout(pkg, Kind::Target).proxy().deps().to_path_buf(); + self.layout(root, Kind::Target).proxy().deps().to_path_buf(); return Ok(()); } diff --git a/src/cargo/ops/cargo_rustc/custom_build.rs b/src/cargo/ops/cargo_rustc/custom_build.rs index a12f27bfd6f..c84bfeaf10b 100644 --- a/src/cargo/ops/cargo_rustc/custom_build.rs +++ b/src/cargo/ops/cargo_rustc/custom_build.rs @@ -8,11 +8,12 @@ use std::sync::Mutex; use core::{Package, Target, PackageId, PackageSet, Profile}; use util::{CargoResult, human, Human}; use util::{internal, ChainError, profile}; +use util::Freshness; use super::job::Work; use super::{fingerprint, process, Kind, Context, Platform}; use super::CommandType; -use util::Freshness; +use super::PackagesToBuild; /// Contains the parsed output of a custom build script. #[derive(Clone, Debug)] @@ -350,12 +351,13 @@ impl BuildOutput { /// The given set of targets to this function is the initial set of /// targets/profiles which are being built. pub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, - pkg: &'b Package, - targets: &[(&'b Target, &'b Profile)]) { + pkgs: &'b PackagesToBuild<'b>) { let mut ret = HashMap::new(); - for &(target, profile) in targets { - build(&mut ret, Kind::Target, pkg, target, profile, cx); - build(&mut ret, Kind::Host, pkg, target, profile, cx); + for &(pkg, ref targets) in pkgs { + for &(target, profile) in targets { + build(&mut ret, Kind::Target, pkg, target, profile, cx); + build(&mut ret, Kind::Host, pkg, target, profile, cx); + } } // Make the output a little more deterministic by sorting all dependencies diff --git a/src/cargo/ops/cargo_rustc/mod.rs b/src/cargo/ops/cargo_rustc/mod.rs index a13a74dc00f..55ef3dcc0cf 100644 --- a/src/cargo/ops/cargo_rustc/mod.rs +++ b/src/cargo/ops/cargo_rustc/mod.rs @@ -52,10 +52,11 @@ pub struct TargetConfig { pub overrides: HashMap, } +pub type PackagesToBuild<'a> = [(&'a Package,Vec<(&'a Target,&'a Profile)>)]; + // Returns a mapping of the root package plus its immediate dependencies to // where the compiled libraries are all located. -pub fn compile_targets<'a, 'cfg: 'a>(targets: &[(&'a Target, &'a Profile)], - pkg: &'a Package, +pub fn compile_targets<'a, 'cfg: 'a>(pkg_targets: &'a PackagesToBuild<'a>, deps: &'a PackageSet, resolve: &'a Resolve, sources: &'a SourceMap<'cfg>, @@ -63,88 +64,96 @@ pub fn compile_targets<'a, 'cfg: 'a>(targets: &[(&'a Target, &'a Profile)], build_config: BuildConfig, profiles: &'a Profiles) -> CargoResult> { - if targets.is_empty() { - return Ok(Compilation::new(pkg, config)) - } - - debug!("compile_targets: {}", pkg); try!(links::validate(deps)); let dest = if build_config.release {"release"} else {"debug"}; - let root = if resolve.root() == pkg.package_id() { - pkg - } else { - deps.iter().find(|p| p.package_id() == resolve.root()).unwrap() - }; + let root = deps.iter().find(|p| p.package_id() == resolve.root()).unwrap(); let host_layout = Layout::new(config, root, None, &dest); let target_layout = build_config.requested_target.as_ref().map(|target| { layout::Layout::new(config, root, Some(&target), &dest) }); let mut cx = try!(Context::new(resolve, sources, deps, config, - host_layout, target_layout, pkg, + host_layout, target_layout, build_config, profiles)); let mut queue = JobQueue::new(cx.resolve, deps, cx.jobs()); - // Prep the context's build requirements and see the job graph for all - // packages initially. { let _p = profile::start("preparing build directories"); - try!(cx.prepare(pkg, targets)); - prepare_init(&mut cx, pkg, &mut queue, &mut HashSet::new()); - custom_build::build_map(&mut cx, pkg, targets); + // Prep the context's build requirements and see the job graph for all + // packages initially. + + + try!(cx.prepare(root, pkg_targets)); + let mut visited = HashSet::new(); + for &(pkg, _) in pkg_targets { + prepare_init(&mut cx, pkg, &mut queue, &mut visited); + } + custom_build::build_map(&mut cx, pkg_targets); } - // Build up a list of pending jobs, each of which represent compiling a - // particular package. No actual work is executed as part of this, that's - // all done next as part of the `execute` function which will run - // everything in order with proper parallelism. - try!(compile(targets, pkg, &mut cx, &mut queue)); + for &(pkg, ref targets) in pkg_targets { + // Build up a list of pending jobs, each of which represent + // compiling a particular package. No actual work is executed as + // part of this, that's all done next as part of the `execute` + // function which will run everything in order with proper + // parallelism. + try!(compile(targets, pkg, &mut cx, &mut queue)); + } // Now that we've figured out everything that we're going to do, do it! try!(queue.execute(cx.config)); - let out_dir = cx.layout(pkg, Kind::Target).build_out(pkg) - .display().to_string(); - cx.compilation.extra_env.insert("OUT_DIR".to_string(), out_dir); + for &(pkg, ref targets) in pkg_targets.iter() { + let out_dir = cx.layout(pkg, Kind::Target).build_out(pkg) + .display().to_string(); + cx.compilation.extra_env.insert("OUT_DIR".to_string(), out_dir); + + let mut tests = vec![]; + + for &(target, profile) in targets { + let kind = Kind::from(target); + for filename in try!(cx.target_filenames(pkg, target, profile, + kind)).iter() { + let dst = cx.out_dir(pkg, kind, target).join(filename); + if profile.test { + tests.push((target.name().to_string(), dst)); + } else if target.is_bin() || target.is_example() { + cx.compilation.binaries.push(dst); + } else if target.is_lib() { + let pkgid = pkg.package_id().clone(); + cx.compilation.libraries.entry(pkgid).or_insert(Vec::new()) + .push((target.clone(), dst)); + } + if !target.is_lib() { continue } - for &(target, profile) in targets { - let kind = Kind::from(target); - for filename in try!(cx.target_filenames(pkg, target, profile, - kind)).iter() { - let dst = cx.out_dir(pkg, kind, target).join(filename); - if profile.test { - cx.compilation.tests.push((target.name().to_string(), dst)); - } else if target.is_bin() || target.is_example() { - cx.compilation.binaries.push(dst); - } else if target.is_lib() { - let pkgid = pkg.package_id().clone(); - cx.compilation.libraries.entry(pkgid).or_insert(Vec::new()) - .push((target.clone(), dst)); - } - if !target.is_lib() { continue } + // Include immediate lib deps as well + for dep in &cx.dep_targets(pkg, target, kind, profile) { + let (pkg, target, profile) = *dep; + let pkgid = pkg.package_id(); + if !target.is_lib() { continue } + if profile.doc { continue } + if cx.compilation.libraries.contains_key(&pkgid) { + continue + } - // Include immediate lib deps as well - for dep in cx.dep_targets(pkg, target, kind, profile) { - let (pkg, target, profile) = dep; - let pkgid = pkg.package_id(); - if !target.is_lib() { continue } - if profile.doc { continue } - if cx.compilation.libraries.contains_key(&pkgid) { continue } - - let kind = kind.for_target(target); - let v = try!(cx.target_filenames(pkg, target, profile, kind)); - let v = v.into_iter().map(|f| { - (target.clone(), cx.out_dir(pkg, kind, target).join(f)) - }).collect::>(); - cx.compilation.libraries.insert(pkgid.clone(), v); + let kind = kind.for_target(target); + let v = try!(cx.target_filenames(pkg, target, profile, kind)); + let v = v.into_iter().map(|f| { + (target.clone(), cx.out_dir(pkg, kind, target).join(f)) + }).collect::>(); + cx.compilation.libraries.insert(pkgid.clone(), v); + } } } + + cx.compilation.tests.push((pkg.clone(), tests)); + } - if let Some(feats) = cx.resolve.features(pkg.package_id()) { + if let Some(feats) = cx.resolve.features(root.package_id()) { cx.compilation.features.extend(feats.iter().cloned()); } diff --git a/src/cargo/ops/cargo_test.rs b/src/cargo/ops/cargo_test.rs index 555b239c0a2..a34556dd890 100644 --- a/src/cargo/ops/cargo_test.rs +++ b/src/cargo/ops/cargo_test.rs @@ -62,7 +62,10 @@ fn compile_tests<'a>(manifest_path: &Path, options: &TestOptions<'a>) -> CargoResult> { let mut compilation = try!(ops::compile(manifest_path, &options.compile_opts)); - compilation.tests.sort(); + for tests in compilation.tests.iter_mut() { + tests.1.sort(); + } + Ok(compilation) } @@ -76,24 +79,26 @@ fn run_unit_tests(options: &TestOptions, let mut errors = Vec::new(); - for &(_, ref exe) in &compilation.tests { - let to_display = match util::without_prefix(exe, &cwd) { - Some(path) => path, - None => &**exe, - }; - let mut cmd = try!(compilation.target_process(exe, &compilation.package)); - cmd.args(test_args); - try!(config.shell().concise(|shell| { - shell.status("Running", to_display.display().to_string()) - })); - try!(config.shell().verbose(|shell| { - shell.status("Running", cmd.to_string()) - })); - - if let Err(e) = ExecEngine::exec(&mut ProcessEngine, cmd) { - errors.push(e); - if !options.no_fail_fast { - break + for &(ref pkg, ref tests) in &compilation.tests { + for &(_, ref exe) in tests { + let to_display = match util::without_prefix(exe, &cwd) { + Some(path) => path, + None => &**exe, + }; + let mut cmd = try!(compilation.target_process(exe, pkg)); + cmd.args(test_args); + try!(config.shell().concise(|shell| { + shell.status("Running", to_display.display().to_string()) + })); + try!(config.shell().verbose(|shell| { + shell.status("Running", cmd.to_string()) + })); + + if let Err(e) = ExecEngine::exec(&mut ProcessEngine, cmd) { + errors.push(e); + if !options.no_fail_fast { + break + } } } } @@ -107,63 +112,68 @@ fn run_doc_tests(options: &TestOptions, -> CargoResult> { let mut errors = Vec::new(); let config = options.compile_opts.config; - let libs = compilation.package.targets().iter() - .filter(|t| t.doctested()) - .map(|t| (t.src_path(), t.name(), t.crate_name())); - for (lib, name, crate_name) in libs { - try!(config.shell().status("Doc-tests", name)); - let mut p = try!(compilation.rustdoc_process(&compilation.package)); - p.arg("--test").arg(lib) - .arg("--crate-name").arg(&crate_name) - .cwd(compilation.package.root()); - - for &rust_dep in &[&compilation.deps_output, &compilation.root_output] { - let mut arg = OsString::from("dependency="); - arg.push(rust_dep); - p.arg("-L").arg(arg); - } - for native_dep in compilation.native_dirs.values() { - p.arg("-L").arg(native_dep); - } - if test_args.len() > 0 { - p.arg("--test-args").arg(&test_args.connect(" ")); - } + let libs = compilation.to_doc_test.iter().map(|package| { + (package, package.targets().iter().filter(|t| t.doctested()) + .map(|t| (t.src_path(), t.name(), t.crate_name()))) + }); + + for (package, tests) in libs { + for (lib, name, crate_name) in tests { + try!(config.shell().status("Doc-tests", name)); + let mut p = try!(compilation.rustdoc_process(package)); + p.arg("--test").arg(lib) + .arg("--crate-name").arg(&crate_name) + .cwd(package.root()); + + for &rust_dep in &[&compilation.deps_output, &compilation.root_output] { + let mut arg = OsString::from("dependency="); + arg.push(rust_dep); + p.arg("-L").arg(arg); + } + for native_dep in compilation.native_dirs.values() { + p.arg("-L").arg(native_dep); + } - for feat in compilation.features.iter() { - p.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); - } + if test_args.len() > 0 { + p.arg("--test-args").arg(&test_args.connect(" ")); + } - for (_, libs) in compilation.libraries.iter() { - for &(ref target, ref lib) in libs.iter() { - // Note that we can *only* doctest rlib outputs here. A - // staticlib output cannot be linked by the compiler (it just - // doesn't do that). A dylib output, however, can be linked by - // the compiler, but will always fail. Currently all dylibs are - // built as "static dylibs" where the standard library is - // statically linked into the dylib. The doc tests fail, - // however, for now as they try to link the standard library - // dynamically as well, causing problems. As a result we only - // pass `--extern` for rlib deps and skip out on all other - // artifacts. - if lib.extension() != Some(OsStr::new("rlib")) && - !target.for_host() { - continue + for feat in compilation.features.iter() { + p.arg("--cfg").arg(&format!("feature=\"{}\"", feat)); + } + + for (_, libs) in compilation.libraries.iter() { + for &(ref target, ref lib) in libs.iter() { + // Note that we can *only* doctest rlib outputs here. A + // staticlib output cannot be linked by the compiler (it just + // doesn't do that). A dylib output, however, can be linked by + // the compiler, but will always fail. Currently all dylibs are + // built as "static dylibs" where the standard library is + // statically linked into the dylib. The doc tests fail, + // however, for now as they try to link the standard library + // dynamically as well, causing problems. As a result we only + // pass `--extern` for rlib deps and skip out on all other + // artifacts. + if lib.extension() != Some(OsStr::new("rlib")) && + !target.for_host() { + continue + } + let mut arg = OsString::from(target.crate_name()); + arg.push("="); + arg.push(lib); + p.arg("--extern").arg(&arg); } - let mut arg = OsString::from(target.crate_name()); - arg.push("="); - arg.push(lib); - p.arg("--extern").arg(&arg); } - } - try!(config.shell().verbose(|shell| { - shell.status("Running", p.to_string()) - })); - if let Err(e) = ExecEngine::exec(&mut ProcessEngine, p) { - errors.push(e); - if !options.no_fail_fast { - break + try!(config.shell().verbose(|shell| { + shell.status("Running", p.to_string()) + })); + if let Err(e) = ExecEngine::exec(&mut ProcessEngine, p) { + errors.push(e); + if !options.no_fail_fast { + return Ok(errors); + } } } } diff --git a/tests/support/mod.rs b/tests/support/mod.rs index 0f7602e99da..4411dd62582 100644 --- a/tests/support/mod.rs +++ b/tests/support/mod.rs @@ -269,7 +269,8 @@ pub struct Execs { expect_stdout: Option, expect_stdin: Option, expect_stderr: Option, - expect_exit_code: Option + expect_exit_code: Option, + expect_stdout_contains: Vec } impl Execs { @@ -289,6 +290,11 @@ impl Execs { self } + pub fn with_stdout_contains(mut self, expected: S) -> Execs { + self.expect_stdout_contains.push(expected.to_string()); + self + } + fn match_output(&self, actual: &Output) -> ham::MatchResult { self.match_status(actual) .and(self.match_stdout(actual)) @@ -312,6 +318,8 @@ impl Execs { fn match_stdout(&self, actual: &Output) -> ham::MatchResult { self.match_std(self.expect_stdout.as_ref(), &actual.stdout, "stdout", &actual.stderr) + .and(self.match_contains(self.expect_stdout_contains.as_ref(), + &actual.stdout, "stdout")) } fn match_stderr(&self, actual: &Output) -> ham::MatchResult { @@ -319,6 +327,40 @@ impl Execs { "stderr", &actual.stdout) } + #[allow(deprecated)] // connect => join in 1.3 + fn match_contains(&self, expect: &[String], actual: &[u8], + description: &str) -> ham::MatchResult { + for s in expect { + let a: Vec<&str> = match str::from_utf8(actual) { + Err(..) => return Err(format!("{} was not utf8 encoded", + description)), + Ok(actual) => actual.lines().collect(), + }; + let e: Vec<&str> = s.lines().collect(); + + let first = e.first().unwrap(); + let mut ai = a.iter(); + match ai.position(|s| lines_match(first, s)) { + Some(_) => { + let match_count = ai.zip(e.iter().skip(1)) + .take_while(|&(a, e)| lines_match(a, e)).count(); + if match_count != (e.len() - 1) { + return ham::expect(false, + format!("expected: {}\n\ + actual: {}", + e.connect("\n"), + a.iter().take(e.len()).map(|&s| s) + .collect::>().connect("\n"))); + } + }, + None => { + return ham::expect(false, format!("no match")); + } + }; + } + ham::expect(true, format!("OK")) + } + #[allow(deprecated)] // connect => join in 1.3 fn match_std(&self, expected: Option<&String>, actual: &[u8], description: &str, extra: &[u8]) -> ham::MatchResult { @@ -446,7 +488,8 @@ pub fn execs() -> Execs { expect_stdout: None, expect_stderr: None, expect_stdin: None, - expect_exit_code: None + expect_exit_code: None, + expect_stdout_contains: vec![] } } diff --git a/tests/test_cargo_bench.rs b/tests/test_cargo_bench.rs index 5e3b21cad53..1439172586d 100644 --- a/tests/test_cargo_bench.rs +++ b/tests/test_cargo_bench.rs @@ -922,3 +922,88 @@ test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING))); }); + +test!(test_bench_multiple_packages { + if !::is_nightly() { return } + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + authors = [] + version = "0.1.0" + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#) + .file("src/lib.rs", ""); + + let bar = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + authors = [] + version = "0.1.0" + + [[bench]] + name = "bbar" + test = true + "#) + .file("src/lib.rs", "") + .file("benches/bbar.rs", r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_bar(_b: &mut Bencher) {} + "#); + bar.build(); + + let baz = project("baz") + .file("Cargo.toml", r#" + [project] + name = "baz" + authors = [] + version = "0.1.0" + + [[bench]] + name = "bbaz" + test = true + "#) + .file("src/lib.rs", "") + .file("benches/bbaz.rs", r#" + #![feature(test)] + extern crate test; + + use test::Bencher; + + #[bench] + fn bench_baz(_b: &mut Bencher) {} + "#); + baz.build(); + + + assert_that(p.cargo_process("bench").arg("-p").arg("bar").arg("-p").arg("baz"), + execs().with_status(0) + .with_stdout_contains(&format!("\ +{running} target[..]release[..]bbaz-[..] + +running 1 test +test bench_baz ... bench: 0 ns/iter (+/- 0) + +test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured +", running = RUNNING)) + .with_stdout_contains(&format!("\ +{running} target[..]release[..]bbar-[..] + +running 1 test +test bench_bar ... bench: 0 ns/iter (+/- 0) + +test result: ok. 0 passed; 0 failed; 0 ignored; 1 measured +", running = RUNNING))); +}); diff --git a/tests/test_cargo_clean.rs b/tests/test_cargo_clean.rs index 9ccd8f7314f..ff3313d9979 100644 --- a/tests/test_cargo_clean.rs +++ b/tests/test_cargo_clean.rs @@ -1,5 +1,7 @@ +use std::env; + use support::{project, execs, main_file, basic_bin_manifest}; -use hamcrest::{assert_that, existing_dir, is_not}; +use hamcrest::{assert_that, existing_dir, existing_file, is_not}; fn setup() { } @@ -30,3 +32,64 @@ test!(different_dir { execs().with_status(0).with_stdout("")); assert_that(&p.build_dir(), is_not(existing_dir())); }); + +test!(clean_multiple_packages { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [[bin]] + name = "foo" + "#) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d1" + "#) + .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d2" + "#) + .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }"); + p.build(); + + assert_that(p.cargo_process("build").arg("-p").arg("d1").arg("-p").arg("d2") + .arg("-p").arg("foo"), + execs().with_status(0)); + + let d1_path = &p.build_dir().join("debug").join("deps") + .join(format!("d1{}", env::consts::EXE_SUFFIX)); + let d2_path = &p.build_dir().join("debug").join("deps") + .join(format!("d2{}", env::consts::EXE_SUFFIX)); + + + assert_that(&p.bin("foo"), existing_file()); + assert_that(d1_path, existing_file()); + assert_that(d2_path, existing_file()); + + assert_that(p.cargo("clean").arg("-p").arg("d1").arg("-p").arg("d2") + .cwd(&p.root().join("src")), + execs().with_status(0).with_stdout("")); + assert_that(&p.bin("foo"), existing_file()); + assert_that(d1_path, is_not(existing_file())); + assert_that(d2_path, is_not(existing_file())); +}); diff --git a/tests/test_cargo_compile.rs b/tests/test_cargo_compile.rs index b8930f2b872..cd1714c1876 100644 --- a/tests/test_cargo_compile.rs +++ b/tests/test_cargo_compile.rs @@ -1925,3 +1925,103 @@ test!(rustc_no_trans { assert_that(p.cargo("rustc").arg("-v").arg("--").arg("-Zno-trans"), execs().with_status(0)); }); + +test!(build_multiple_packages { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [[bin]] + name = "foo" + "#) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d1" + "#) + .file("d1/src/lib.rs", "") + .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d2" + doctest = false + "#) + .file("d2/src/main.rs", "fn main() { println!(\"d2\"); }"); + p.build(); + + assert_that(p.cargo_process("build").arg("-p").arg("d1").arg("-p").arg("d2") + .arg("-p").arg("foo"), + execs()); + + assert_that(&p.bin("foo"), existing_file()); + assert_that(process(&p.bin("foo")).unwrap(), + execs().with_stdout("i am foo\n")); + + let d1_path = &p.build_dir().join("debug").join("deps") + .join(format!("d1{}", env::consts::EXE_SUFFIX)); + let d2_path = &p.build_dir().join("debug").join("deps") + .join(format!("d2{}", env::consts::EXE_SUFFIX)); + + assert_that(d1_path, existing_file()); + assert_that(process(d1_path).unwrap(), execs().with_stdout("d1")); + + assert_that(d2_path, existing_file()); + assert_that(process(d2_path).unwrap(), + execs().with_stdout("d2")); +}); + +test!(invalid_spec { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + + [[bin]] + name = "foo" + "#) + .file("src/foo.rs", &main_file(r#""i am foo""#, &[])) + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [[bin]] + name = "d1" + "#) + .file("d1/src/lib.rs", "") + .file("d1/src/main.rs", "fn main() { println!(\"d1\"); }"); + p.build(); + + assert_that(p.cargo_process("build").arg("-p").arg("notAValidDep"), + execs().with_status(101).with_stderr( + "could not find package matching spec `notAValidDep`".to_string())); + + assert_that(p.cargo_process("build").arg("-p").arg("d1").arg("-p").arg("notAValidDep"), + execs().with_status(101).with_stderr( + "could not find package matching spec `notAValidDep`".to_string())); + +}); diff --git a/tests/test_cargo_doc.rs b/tests/test_cargo_doc.rs index 57d37129d6c..93517837978 100644 --- a/tests/test_cargo_doc.rs +++ b/tests/test_cargo_doc.rs @@ -432,3 +432,48 @@ test!(doc_release { {running} `rustdoc src[..]lib.rs [..]` ", compiling = COMPILING, running = RUNNING))); }); + +test!(doc_multiple_deps { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "bar" + + [dependencies.baz] + path = "baz" + "#) + .file("src/lib.rs", r#" + extern crate bar; + pub fn foo() {} + "#) + .file("bar/Cargo.toml", r#" + [package] + name = "bar" + version = "0.0.1" + authors = [] + "#) + .file("bar/src/lib.rs", r#" + pub fn bar() {} + "#) + .file("baz/Cargo.toml", r#" + [package] + name = "baz" + version = "0.0.1" + authors = [] + "#) + .file("baz/src/lib.rs", r#" + pub fn baz() {} + "#); + + assert_that(p.cargo_process("doc").arg("-p").arg("bar").arg("-p").arg("baz"), + execs().with_status(0)); + + assert_that(&p.root().join("target/doc"), existing_dir()); + assert_that(&p.root().join("target/doc/bar/index.html"), existing_file()); + assert_that(&p.root().join("target/doc/baz/index.html"), existing_file()); +}); diff --git a/tests/test_cargo_package.rs b/tests/test_cargo_package.rs index 5ca6b1cf632..6e59a364773 100644 --- a/tests/test_cargo_package.rs +++ b/tests/test_cargo_package.rs @@ -9,7 +9,8 @@ use git2; use tar::Archive; use support::{project, execs, cargo_dir, paths, git, path2url}; -use support::{PACKAGING, VERIFYING, COMPILING, ARCHIVING}; +use support::{PACKAGING, VERIFYING, COMPILING, ARCHIVING, UPDATING, DOWNLOADING}; +use support::registry as r; use hamcrest::{assert_that, existing_file}; fn setup() { @@ -141,6 +142,61 @@ http://doc.crates.io/manifest.html#package-metadata for more info.")); dir = p.url()))); }); +test!(wildcard_deps { + r::init(); + + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + license = "MIT" + description = "foo" + repository = "bar" + + [dependencies] + bar = "*" + + [build-dependencies] + baz = "*" + + [dev-dependencies] + buz = "*" + "#) + .file("src/main.rs", "fn main() {}"); + + r::mock_pkg("baz", "0.0.1", &[]); + r::mock_pkg("bar", "0.0.1", &[("baz", "0.0.1", "normal")]); + r::mock_pkg("buz", "0.0.1", &[("bar", "0.0.1", "normal")]); + + assert_that(p.cargo_process("package"), + execs().with_status(0).with_stdout(&format!("\ +{packaging} foo v0.0.1 ({dir}) +{verifying} foo v0.0.1 ({dir}) +{updating} registry `{reg}` +{downloading} [..] v0.0.1 (registry file://[..]) +{downloading} [..] v0.0.1 (registry file://[..]) +{downloading} [..] v0.0.1 (registry file://[..]) +{compiling} baz v0.0.1 (registry file://[..]) +{compiling} bar v0.0.1 (registry file://[..]) +{compiling} foo v0.0.1 ({dir}[..]) +", + packaging = PACKAGING, + verifying = VERIFYING, + updating = UPDATING, + downloading = DOWNLOADING, + compiling = COMPILING, + dir = p.url(), + reg = r::registry())) + .with_stderr("\ +warning: some dependencies have wildcard (\"*\") version constraints. On December 11th, 2015, \ +crates.io will begin rejecting packages with wildcard dependency constraints. See \ +http://doc.crates.io/crates-io.html#using-crates.io-based-crates for information on version \ +constraints. +dependencies for these crates have wildcard constraints: bar, baz")); +}); + test!(package_verbose { let root = paths::root().join("all"); let p = git::repo(&root) diff --git a/tests/test_cargo_registry.rs b/tests/test_cargo_registry.rs index 632511c6830..8209b5bb293 100644 --- a/tests/test_cargo_registry.rs +++ b/tests/test_cargo_registry.rs @@ -829,3 +829,63 @@ test!(update_backtracking_ok { {updating} registry `[..]` ", updating = UPDATING))); }); + +test!(update_multiple_packages { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] + + [dependencies] + a = "*" + b = "*" + c = "*" + "#) + .file("src/main.rs", "fn main() {}"); + p.build(); + + r::mock_pkg("a", "0.1.0", &[]); + r::mock_pkg("b", "0.1.0", &[]); + r::mock_pkg("c", "0.1.0", &[]); + + assert_that(p.cargo("fetch"), + execs().with_status(0)); + + r::mock_pkg("a", "0.1.1", &[]); + r::mock_pkg("b", "0.1.1", &[]); + r::mock_pkg("c", "0.1.1", &[]); + + assert_that(p.cargo("update").arg("-pa").arg("-pb"), + execs().with_status(0) + .with_stdout(format!("\ +{updating} registry `[..]` +{updating} a v0.1.0 (registry [..]) -> v0.1.1 +{updating} b v0.1.0 (registry [..]) -> v0.1.1 +", updating = UPDATING))); + + assert_that(p.cargo("update").arg("-pb").arg("-pc"), + execs().with_status(0) + .with_stdout(format!("\ +{updating} registry `[..]` +{updating} c v0.1.0 (registry [..]) -> v0.1.1 +", updating = UPDATING))); + + assert_that(p.cargo("build"), + execs().with_status(0) + .with_stdout_contains(format!("\ +{downloading} a v0.1.1 (registry file://[..])", downloading = DOWNLOADING)) + .with_stdout_contains(format!("\ +{downloading} b v0.1.1 (registry file://[..])", downloading = DOWNLOADING)) + .with_stdout_contains(format!("\ +{downloading} c v0.1.1 (registry file://[..])", downloading = DOWNLOADING)) + .with_stdout_contains(format!("\ +{compiling} a v0.1.1 (registry [..])", compiling = COMPILING)) + .with_stdout_contains(format!("\ +{compiling} b v0.1.1 (registry [..])", compiling = COMPILING)) + .with_stdout_contains(format!("\ +{compiling} c v0.1.1 (registry [..])", compiling = COMPILING)) + .with_stdout_contains(format!("\ +{compiling} foo v0.5.0 ([..])", compiling = COMPILING))); +}); diff --git a/tests/test_cargo_rustc.rs b/tests/test_cargo_rustc.rs index 8000517932c..559a5f722cc 100644 --- a/tests/test_cargo_rustc.rs +++ b/tests/test_cargo_rustc.rs @@ -3,6 +3,7 @@ use support::{execs, project}; use support::{COMPILING, RUNNING}; use hamcrest::{assert_that}; + fn setup() { } @@ -296,3 +297,59 @@ test!(build_only_bar_dependency { compiling = COMPILING, running = RUNNING, url = foo.url()))); }); + +test!(fail_with_multiple_packages { + let foo = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.bar] + path = "../bar" + + [dependencies.baz] + path = "../baz" + "#) + .file("src/main.rs", r#" + fn main() {} + "#); + foo.build(); + + let bar = project("bar") + .file("Cargo.toml", r#" + [package] + name = "bar" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { + if cfg!(flag = "1") { println!("Yeah from bar!"); } + } + "#); + bar.build(); + + let baz = project("baz") + .file("Cargo.toml", r#" + [package] + name = "baz" + version = "0.1.0" + authors = [] + "#) + .file("src/main.rs", r#" + fn main() { + if cfg!(flag = "1") { println!("Yeah from baz!"); } + } + "#); + baz.build(); + + assert_that(foo.cargo("rustc").arg("-v").arg("-p").arg("bar") + .arg("-p").arg("baz"), + execs().with_status(1).with_stderr("\ +Invalid arguments. + +Usage: + cargo rustc [options] [--] [...]".to_string())); +}); diff --git a/tests/test_cargo_test.rs b/tests/test_cargo_test.rs index ab29bcc47e5..32787d998a7 100644 --- a/tests/test_cargo_test.rs +++ b/tests/test_cargo_test.rs @@ -1935,3 +1935,63 @@ test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured ", compiling = COMPILING, running = RUNNING, doctest = DOCTEST))) }); + +test!(test_multiple_packages { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.1" + authors = [] + + [dependencies.d1] + path = "d1" + [dependencies.d2] + path = "d2" + + [lib] + name = "foo" + doctest = false + "#) + .file("src/lib.rs", "") + .file("d1/Cargo.toml", r#" + [package] + name = "d1" + version = "0.0.1" + authors = [] + + [lib] + name = "d1" + doctest = false + "#) + .file("d1/src/lib.rs", "") + .file("d2/Cargo.toml", r#" + [package] + name = "d2" + version = "0.0.1" + authors = [] + + [lib] + name = "d2" + doctest = false + "#) + .file("d2/src/lib.rs", ""); + p.build(); + + assert_that(p.cargo("test").arg("-p").arg("d1").arg("-p").arg("d2"), + execs().with_status(0) + .with_stdout_contains(&format!("\ +{running} target[..]debug[..]d1-[..] + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured +", running = RUNNING)) + .with_stdout_contains(&format!("\ +{running} target[..]debug[..]d2-[..] + +running 0 tests + +test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured +", running = RUNNING))); +});