diff --git a/src/bin/git_checkout.rs b/src/bin/git_checkout.rs index a453055147c..a78a481df63 100644 --- a/src/bin/git_checkout.rs +++ b/src/bin/git_checkout.rs @@ -1,6 +1,6 @@ use cargo::core::source::{Source, SourceId, GitReference}; use cargo::sources::git::{GitSource}; -use cargo::util::{Config, CliResult, CliError, human, ToUrl}; +use cargo::util::{Config, CliResult, CliError, ToUrl}; #[derive(RustcDecodable)] pub struct Options { @@ -31,11 +31,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { &options.flag_color)); let Options { flag_url: url, flag_reference: reference, .. } = options; - let url = try!(url.to_url().map_err(|e| { - human(format!("The URL `{}` you passed was \ - not a valid URL: {}", url, e)) - }) - .map_err(|e| CliError::from_boxed(e, 1))); + let url = try!(url.to_url()); let reference = GitReference::Branch(reference.clone()); let source_id = SourceId::for_git(&url, reference); diff --git a/src/bin/install.rs b/src/bin/install.rs index bd2c3191bf3..9ee5a141d7d 100644 --- a/src/bin/install.rs +++ b/src/bin/install.rs @@ -1,6 +1,6 @@ use cargo::ops; use cargo::core::{SourceId, GitReference}; -use cargo::util::{CliResult, Config, ToUrl, human}; +use cargo::util::{CliResult, Config, ToUrl}; #[derive(RustcDecodable)] pub struct Options { @@ -104,7 +104,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { }; let source = if let Some(url) = options.flag_git { - let url = try!(url.to_url().map_err(human)); + let url = try!(url.to_url()); let gitref = if let Some(branch) = options.flag_branch { GitReference::Branch(branch) } else if let Some(tag) = options.flag_tag { @@ -120,7 +120,7 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { } else if options.arg_crate == None { try!(SourceId::for_path(&config.cwd())) } else { - try!(SourceId::for_central(config)) + try!(SourceId::crates_io(config)) }; let krate = options.arg_crate.as_ref().map(|s| &s[..]); diff --git a/src/bin/login.rs b/src/bin/login.rs index bfb8418e40e..dbd93e485b0 100644 --- a/src/bin/login.rs +++ b/src/bin/login.rs @@ -37,10 +37,10 @@ pub fn execute(options: Options, config: &Config) -> CliResult> { let token = match options.arg_token.clone() { Some(token) => token, None => { - let src = try!(SourceId::for_central(config)); - let mut src = RegistrySource::new(&src, config); + let src = try!(SourceId::crates_io(config)); + let mut src = RegistrySource::remote(&src, config); try!(src.update()); - let config = try!(src.config()); + let config = try!(src.config()).unwrap(); let host = options.flag_host.clone().unwrap_or(config.api); println!("please visit {}me and paste the API Token below", host); let mut line = String::new(); diff --git a/src/cargo/core/dependency.rs b/src/cargo/core/dependency.rs index a030de626f9..02cd9668793 100644 --- a/src/cargo/core/dependency.rs +++ b/src/cargo/core/dependency.rs @@ -251,6 +251,7 @@ impl Dependency { pub fn is_transitive(&self) -> bool { self.inner.is_transitive() } pub fn is_build(&self) -> bool { self.inner.is_build() } pub fn is_optional(&self) -> bool { self.inner.is_optional() } + /// Returns true if the default features of the dependency are requested. pub fn uses_default_features(&self) -> bool { self.inner.uses_default_features() @@ -265,6 +266,17 @@ impl Dependency { pub fn matches_id(&self, id: &PackageId) -> bool { self.inner.matches_id(id) } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) + -> Dependency { + if self.source_id() != to_replace { + self + } else { + Rc::try_unwrap(self.inner).unwrap_or_else(|r| (*r).clone()) + .set_source_id(replace_with.clone()) + .into_dependency() + } + } } impl Platform { diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index 5a4a48182b4..3c0bdc6a470 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -5,7 +5,7 @@ use std::path::{PathBuf, Path}; use semver::Version; use rustc_serialize::{Encoder, Encodable}; -use core::{Dependency, PackageId, Summary}; +use core::{Dependency, PackageId, Summary, SourceId}; use core::package_id::Metadata; use util::{CargoResult, human}; @@ -202,6 +202,14 @@ impl Manifest { pub fn set_summary(&mut self, summary: Summary) { self.summary = summary; } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) + -> Manifest { + Manifest { + summary: self.summary.map_source(to_replace, replace_with), + ..self + } + } } impl Target { diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index c8362ffa286..75e53994476 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -89,6 +89,14 @@ impl Package { pub fn generate_metadata(&self) -> Metadata { self.package_id().generate_metadata() } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) + -> Package { + Package { + manifest: self.manifest.map_source(to_replace, replace_with), + manifest_path: self.manifest_path, + } + } } impl fmt::Display for Package { diff --git a/src/cargo/core/package_id.rs b/src/cargo/core/package_id.rs index 0d968aeeef0..b29b8ca11d5 100644 --- a/src/cargo/core/package_id.rs +++ b/src/cargo/core/package_id.rs @@ -13,12 +13,12 @@ use util::{CargoResult, CargoError, short_hash, ToSemver}; use core::source::SourceId; /// Identifier for a specific version of a package in a specific source. -#[derive(Clone, Debug)] +#[derive(Clone)] pub struct PackageId { inner: Arc, } -#[derive(PartialEq, PartialOrd, Eq, Ord, Debug)] +#[derive(PartialEq, PartialOrd, Eq, Ord)] struct PackageIdInner { name: String, version: semver::Version, @@ -38,13 +38,19 @@ impl Decodable for PackageId { fn decode(d: &mut D) -> Result { let string: String = try!(Decodable::decode(d)); let regex = Regex::new(r"^([^ ]+) ([^ ]+) \(([^\)]+)\)$").unwrap(); - let captures = regex.captures(&string).expect("invalid serialized PackageId"); + let captures = try!(regex.captures(&string).ok_or_else(|| { + d.error("invalid serialized PackageId") + })); let name = captures.at(1).unwrap(); let version = captures.at(2).unwrap(); let url = captures.at(3).unwrap(); - let version = semver::Version::parse(version).ok().expect("invalid version"); - let source_id = SourceId::from_url(url.to_string()); + let version = try!(semver::Version::parse(version).map_err(|_| { + d.error("invalid version") + })); + let source_id = try!(SourceId::from_url(url).map_err(|e| { + d.error(&e.to_string()) + })); Ok(PackageId { inner: Arc::new(PackageIdInner { @@ -151,6 +157,16 @@ impl PackageId { }), } } + + pub fn with_source_id(&self, source: &SourceId) -> PackageId { + PackageId { + inner: Arc::new(PackageIdInner { + name: self.inner.name.to_string(), + version: self.inner.version.clone(), + source_id: source.clone(), + }), + } + } } impl Metadata { @@ -173,16 +189,26 @@ impl fmt::Display for PackageId { } } +impl fmt::Debug for PackageId { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + f.debug_struct("PackageId") + .field("name", &self.inner.name) + .field("version", &self.inner.version.to_string()) + .field("source", &self.inner.source_id.to_string()) + .finish() + } +} + #[cfg(test)] mod tests { use super::PackageId; use core::source::SourceId; - use sources::RegistrySource; + use sources::CRATES_IO; use util::ToUrl; #[test] fn invalid_version_handled_nicely() { - let loc = RegistrySource::default_url().to_url().unwrap(); + let loc = CRATES_IO.to_url().unwrap(); let repo = SourceId::for_registry(&loc); assert!(PackageId::new("foo", "1.0", &repo).is_err()); diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index c005703860a..a0bfb088a99 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -3,6 +3,7 @@ use std::collections::{HashSet, HashMap}; use core::{Source, SourceId, SourceMap, Summary, Dependency, PackageId, Package}; use core::PackageSet; use util::{CargoResult, ChainError, Config, human, profile}; +use sources::config::SourceConfigMap; /// Source of information about a group of packages. /// @@ -10,6 +11,14 @@ use util::{CargoResult, ChainError, Config, human, profile}; pub trait Registry { /// Attempt to find the packages that match a dependency request. fn query(&mut self, name: &Dependency) -> CargoResult>; + + /// Returns whether or not this registry will return summaries with + /// checksums listed. + /// + /// By default, registries do not support checksums. + fn supports_checksums(&self) -> bool { + false + } } impl Registry for Vec { @@ -26,6 +35,12 @@ impl Registry for Vec { } } +impl<'a, T: ?Sized + Registry + 'a> Registry for Box { + fn query(&mut self, name: &Dependency) -> CargoResult> { + (**self).query(name) + } +} + /// This structure represents a registry of known packages. It internally /// contains a number of `Box` instances which are used to load a /// `Package` from. @@ -41,7 +56,6 @@ impl Registry for Vec { /// operations if necessary) and is ready to be queried for packages. pub struct PackageRegistry<'cfg> { sources: SourceMap<'cfg>, - config: &'cfg Config, // A list of sources which are considered "overrides" which take precedent // when querying for packages. @@ -65,6 +79,7 @@ pub struct PackageRegistry<'cfg> { source_ids: HashMap, locked: HashMap)>>>, + source_config: SourceConfigMap<'cfg>, } #[derive(PartialEq, Eq, Clone, Copy)] @@ -75,14 +90,15 @@ enum Kind { } impl<'cfg> PackageRegistry<'cfg> { - pub fn new(config: &'cfg Config) -> PackageRegistry<'cfg> { - PackageRegistry { + pub fn new(config: &'cfg Config) -> CargoResult> { + let source_config = try!(SourceConfigMap::new(config)); + Ok(PackageRegistry { sources: SourceMap::new(), source_ids: HashMap::new(), overrides: vec![], - config: config, + source_config: source_config, locked: HashMap::new(), - } + }) } pub fn get(self, package_ids: &[PackageId]) -> PackageSet<'cfg> { @@ -158,7 +174,7 @@ impl<'cfg> PackageRegistry<'cfg> { fn load(&mut self, source_id: &SourceId, kind: Kind) -> CargoResult<()> { (|| { - let mut source = source_id.load(self.config); + let mut source = try!(self.source_config.load(source_id)); // Ensure the source has fetched all necessary remote data. let p = profile::start(format!("updating: {}", source_id)); diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index c2d187c3f09..425e42505ff 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -1,10 +1,12 @@ use std::collections::{HashMap, BTreeMap}; +use std::fmt; +use std::str::FromStr; use regex::Regex; use rustc_serialize::{Encodable, Encoder, Decodable, Decoder}; use core::{Package, PackageId, SourceId}; -use util::{CargoResult, Graph, Config}; +use util::{CargoResult, CargoError, Graph, internal, ChainError, Config}; use super::Resolve; @@ -18,7 +20,7 @@ pub struct EncodableResolve { pub type Metadata = BTreeMap; impl EncodableResolve { - pub fn to_resolve(&self, root: &Package, config: &Config) + pub fn to_resolve(self, root: &Package, config: &Config) -> CargoResult { let mut path_deps = HashMap::new(); try!(build_path_deps(root, &mut path_deps, config)); @@ -81,12 +83,55 @@ impl EncodableResolve { try!(add_dependencies(id, pkg)); } } + let mut metadata = self.metadata.unwrap_or(BTreeMap::new()); + + // Parse out all package checksums. After we do this we can be in a few + // situations: + // + // * We parsed no checksums. In this situation we're dealing with an old + // lock file and we're gonna fill them all in. + // * We parsed some checksums, but not one for all packages listed. It + // could have been the case that some were listed, then an older Cargo + // client added more dependencies, and now we're going to fill in the + // missing ones. + // * There are too many checksums listed, indicative of an older Cargo + // client removing a package but not updating the checksums listed. + // + // In all of these situations they're part of normal usage, so we don't + // really worry about it. We just try to slurp up as many checksums as + // possible. + let mut checksums = HashMap::new(); + let prefix = "checksum "; + let mut to_remove = Vec::new(); + for (k, v) in metadata.iter().filter(|p| p.0.starts_with(prefix)) { + to_remove.push(k.to_string()); + let k = &k[prefix.len()..]; + let id: EncodablePackageId = try!(k.parse().chain_error(|| { + internal("invalid encoding of checksum in lockfile") + })); + let id = try!(to_package_id(&id.name, + &id.version, + id.source.as_ref(), + default, + &path_deps)); + let v = if v == "" { + None + } else { + Some(v.to_string()) + }; + checksums.insert(id, v); + } + + for k in to_remove { + metadata.remove(&k); + } Ok(Resolve { graph: g, root: root, features: HashMap::new(), - metadata: self.metadata.clone(), + checksums: checksums, + metadata: metadata, }) } } @@ -146,29 +191,32 @@ pub struct EncodablePackageId { source: Option } -impl Encodable for EncodablePackageId { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - let mut out = format!("{} {}", self.name, self.version); +impl fmt::Display for EncodablePackageId { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + try!(write!(f, "{} {}", self.name, self.version)); if let Some(ref s) = self.source { - out.push_str(&format!(" ({})", s.to_url())); + try!(write!(f, " ({})", s.to_url())); } - out.encode(s) + Ok(()) } } -impl Decodable for EncodablePackageId { - fn decode(d: &mut D) -> Result { - let string: String = try!(Decodable::decode(d)); +impl FromStr for EncodablePackageId { + type Err = Box; + + fn from_str(s: &str) -> CargoResult { let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap(); - let captures = regex.captures(&string) - .expect("invalid serialized PackageId"); + let captures = try!(regex.captures(s).ok_or_else(|| { + internal("invalid serialized PackageId") + })); let name = captures.at(1).unwrap(); let version = captures.at(2).unwrap(); - let source = captures.at(3); - - let source_id = source.map(|s| SourceId::from_url(s.to_string())); + let source_id = match captures.at(3) { + Some(s) => Some(try!(SourceId::from_url(s))), + None => None, + }; Ok(EncodablePackageId { name: name.to_string(), @@ -178,21 +226,49 @@ impl Decodable for EncodablePackageId { } } +impl Encodable for EncodablePackageId { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + self.to_string().encode(s) + } +} + +impl Decodable for EncodablePackageId { + fn decode(d: &mut D) -> Result { + String::decode(d).and_then(|string| { + string.parse::() + .map_err(|e| d.error(&e.to_string())) + }) + } +} + impl Encodable for Resolve { fn encode(&self, s: &mut S) -> Result<(), S::Error> { - let mut ids: Vec<&PackageId> = self.graph.iter().collect(); + let mut ids: Vec<_> = self.graph.iter().collect(); ids.sort(); let encodable = ids.iter().filter_map(|&id| { if self.root == *id { return None; } Some(encodable_resolve_node(id, &self.graph)) - }).collect::>(); + }).collect::>(); + + let mut metadata = self.metadata.clone(); + + for id in ids.iter().filter(|id| ***id != self.root) { + let checksum = match self.checksums[*id] { + Some(ref s) => &s[..], + None => "", + }; + let id = encodable_package_id(id); + metadata.insert(format!("checksum {}", id.to_string()), + checksum.to_string()); + } + let metadata = if metadata.len() == 0 {None} else {Some(metadata)}; EncodableResolve { package: Some(encodable), root: encodable_resolve_node(&self.root, &self.graph), - metadata: self.metadata.clone(), + metadata: metadata, }.encode(s) } } diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 8304756d1b8..fa2481d180b 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -47,7 +47,7 @@ //! over the place. use std::cmp::Ordering; -use std::collections::{HashSet, HashMap, BinaryHeap}; +use std::collections::{HashSet, HashMap, BinaryHeap, BTreeMap}; use std::fmt; use std::ops::Range; use std::rc::Rc; @@ -74,8 +74,9 @@ mod encode; pub struct Resolve { graph: Graph, features: HashMap>, + checksums: HashMap>, root: PackageId, - metadata: Option, + metadata: Metadata, } #[derive(Clone, Copy)] @@ -99,21 +100,93 @@ type ResolveResult = CargoResult>>; type DepInfo = (Dependency, Vec>, Vec); impl Resolve { - fn new(root: PackageId) -> Resolve { - let mut g = Graph::new(); - g.add(root.clone(), &[]); - Resolve { graph: g, root: root, features: HashMap::new(), metadata: None } - } + pub fn merge_from(&mut self, previous: &Resolve) -> CargoResult<()> { + // Given a previous instance of resolve, it should be forbidden to ever + // have a checksums which *differ*. If the same package id has differing + // checksums, then something has gone wrong such as: + // + // * Something got seriously corrupted + // * A "mirror" isn't actually a mirror as some changes were made + // * A replacement source wasn't actually a replacment, some changes + // were made + // + // In all of these cases, we want to report an error to indicate that + // something is awry. Normal execution (esp just using crates.io) should + // never run into this. + for (id, cksum) in previous.checksums.iter() { + if let Some(mine) = self.checksums.get(id) { + if mine == cksum { + continue + } + + // If the previous checksum wasn't calculated, the current + // checksum is `Some`. This may indicate that a source was + // erroneously replaced or was replaced with something that + // desires stronger checksum guarantees than can be afforded + // elsewhere. + if cksum.is_none() { + bail!("\ +checksum for `{}` was not previously calculated, but a checksum could now \ +be calculated + +this could be indicative of a few possible situations: + + * the source `{}` did not previously support checksums, + but was replaced with one that does + * newer Cargo implementations know how to checksum this source, but this + older implementation does not + * the lock file is corrupt +", id, id.source_id()) + + // If our checksum hasn't been calculated, then it could mean + // that future Cargo figured out how to checksum something or + // more realistically we were overridden with a source that does + // not have checksums. + } else if mine.is_none() { + bail!("\ +checksum for `{}` could not be calculated, but a checksum is listed in \ +the existing lock file + +this could be indicative of a few possible situations: + + * the source `{}` supports checksums, + but was replaced with one that doesn't + * the lock file is corrupt + +unable to verify that `{0}` was the same as before in either situation +", id, id.source_id()) + + // If the checksums aren't equal, and neither is None, then they + // must both be Some, in which case the checksum now differs. + // That's quite bad! + } else { + bail!("\ +checksum for `{}` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `{0}` was the same as before in any situation +", id); + } + } + } - pub fn copy_metadata(&mut self, other: &Resolve) { - self.metadata = other.metadata.clone(); + // Be sure to just copy over any unknown metadata. + self.metadata = previous.metadata.clone(); + Ok(()) } pub fn iter(&self) -> Nodes { self.graph.iter() } - pub fn root(&self) -> &PackageId { &self.root } + pub fn root(&self) -> &PackageId { + &self.root + } pub fn deps(&self, pkg: &PackageId) -> Option> { self.graph.edges(pkg) @@ -142,23 +215,40 @@ impl fmt::Debug for Resolve { #[derive(Clone)] struct Context { activations: HashMap<(String, SourceId), Vec>>, - resolve: Resolve, + resolve_graph: Graph, + resolve_features: HashMap>, } /// Builds the list of all packages required to build the first argument. -pub fn resolve(summary: &Summary, method: &Method, - registry: &mut Registry) -> CargoResult { +pub fn resolve(summary: &Summary, method: &Method, registry: &mut Registry) + -> CargoResult { trace!("resolve; summary={}", summary.package_id()); - let summary = Rc::new(summary.clone()); let cx = Context { - resolve: Resolve::new(summary.package_id().clone()), + resolve_graph: Graph::new(), + resolve_features: HashMap::new(), activations: HashMap::new(), }; let _p = profile::start(format!("resolving: {}", summary.package_id())); - let cx = try!(activate_deps_loop(cx, registry, summary, method)); - try!(check_cycles(&cx)); - Ok(cx.resolve) + let cx = try!(activate_deps_loop(cx, registry, Rc::new(summary.clone()), + method)); + try!(check_cycles(&cx, summary.package_id())); + + let mut resolve = Resolve { + graph: cx.resolve_graph, + features: cx.resolve_features, + root: summary.package_id().clone(), + checksums: HashMap::new(), + metadata: BTreeMap::new(), + }; + + for summary in cx.activations.values().flat_map(|v| v.iter()) { + let cksum = summary.checksum().map(|s| s.to_string()); + resolve.checksums.insert(summary.package_id().clone(), cksum); + } + + trace!("resolved: {:?}", resolve); + Ok(resolve) } /// Attempts to activate the summary `parent` in the context `cx`. @@ -398,12 +488,12 @@ fn activate_deps_loop(mut cx: Context, trace!("{}[{}]>{} trying {}", parent.name(), cur, dep.name(), candidate.version()); - cx.resolve.graph.link(parent.package_id().clone(), + cx.resolve_graph.link(parent.package_id().clone(), candidate.package_id().clone()); remaining_deps.extend(try!(activate(&mut cx, registry, candidate, &method))); } - trace!("resolved: {:?}", cx.resolve); + Ok(cx) } @@ -444,8 +534,8 @@ fn activation_error(cx: &Context, dep.name(), parent.name(), dep.name()); 'outer: for v in prev_active.iter() { - for node in cx.resolve.graph.iter() { - let edges = match cx.resolve.graph.edges(node) { + for node in cx.resolve_graph.iter() { + let edges = match cx.resolve_graph.edges(node) { Some(edges) => edges, None => continue, }; @@ -630,7 +720,7 @@ impl Context { let key = (id.name().to_string(), id.source_id().clone()); let prev = self.activations.entry(key).or_insert(Vec::new()); if !prev.iter().any(|c| c == summary) { - self.resolve.graph.add(id.clone(), &[]); + self.resolve_graph.add(id.clone(), &[]); prev.push(summary.clone()); return false } @@ -643,7 +733,7 @@ impl Context { }; let has_default_feature = summary.features().contains_key("default"); - match self.resolve.features(id) { + match self.resolve_features.get(id) { Some(prev) => { features.iter().all(|f| prev.contains(f)) && (!use_default || prev.contains("default") || @@ -740,7 +830,7 @@ impl Context { // Record what list of features is active for this package. if !used_features.is_empty() { let pkgid = parent.package_id(); - self.resolve.features.entry(pkgid.clone()) + self.resolve_features.entry(pkgid.clone()) .or_insert(HashSet::new()) .extend(used_features); } @@ -749,18 +839,18 @@ impl Context { } } -fn check_cycles(cx: &Context) -> CargoResult<()> { +fn check_cycles(cx: &Context, root: &PackageId) -> CargoResult<()> { let mut summaries = HashMap::new(); for summary in cx.activations.values().flat_map(|v| v) { summaries.insert(summary.package_id(), &**summary); } - return visit(&cx.resolve, - cx.resolve.root(), + return visit(&cx.resolve_graph, + root, &summaries, &mut HashSet::new(), &mut HashSet::new()); - fn visit<'a>(resolve: &'a Resolve, + fn visit<'a>(resolve: &'a Graph, id: &'a PackageId, summaries: &HashMap<&'a PackageId, &Summary>, visited: &mut HashSet<&'a PackageId>, @@ -781,7 +871,7 @@ fn check_cycles(cx: &Context) -> CargoResult<()> { // dependencies. if checked.insert(id) { let summary = summaries[id]; - for dep in resolve.deps(id).into_iter().flat_map(|a| a) { + for dep in resolve.edges(id).into_iter().flat_map(|a| a) { let is_transitive = summary.dependencies().iter().any(|d| { d.matches_id(dep) && d.is_transitive() }); diff --git a/src/cargo/core/source.rs b/src/cargo/core/source.rs index 64779af3731..86cd296bfc3 100644 --- a/src/cargo/core/source.rs +++ b/src/cargo/core/source.rs @@ -5,13 +5,16 @@ use std::hash; use std::mem; use std::path::Path; use std::sync::Arc; -use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; +use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT}; +use std::sync::atomic::Ordering::SeqCst; +use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use url::Url; use core::{Package, PackageId, Registry}; -use sources::{PathSource, GitSource, RegistrySource}; +use ops; use sources::git; +use sources::{PathSource, GitSource, RegistrySource, CRATES_IO}; use util::{human, Config, CargoResult, ToUrl}; /// A Source finds and downloads remote packages based on names and @@ -38,6 +41,20 @@ pub trait Source: Registry { fn fingerprint(&self, pkg: &Package) -> CargoResult; } +impl<'a, T: Source + ?Sized + 'a> Source for Box { + fn update(&mut self) -> CargoResult<()> { + (**self).update() + } + + fn download(&mut self, id: &PackageId) -> CargoResult { + (**self).download(id) + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + (**self).fingerprint(pkg) + } +} + #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] enum Kind { /// Kind::Git() represents a git repository @@ -46,6 +63,8 @@ enum Kind { Path, /// represents the central registry Registry, + /// represents a local filesystem-based registry + LocalRegistry, } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -90,16 +109,16 @@ impl SourceId { /// use cargo::core::SourceId; /// SourceId::from_url("git+https://github.com/alexcrichton/\ /// libssh2-static-sys#80e71a3021618eb05\ - /// 656c58fb7c5ef5f12bc747f".to_string()); + /// 656c58fb7c5ef5f12bc747f"); /// ``` - pub fn from_url(string: String) -> SourceId { + pub fn from_url(string: &str) -> CargoResult { let mut parts = string.splitn(2, '+'); let kind = parts.next().unwrap(); let url = parts.next().unwrap(); match kind { "git" => { - let mut url = url.to_url().unwrap(); + let mut url = try!(url.to_url()); let mut reference = GitReference::Branch("master".to_string()); let pairs = url.query_pairs().unwrap_or(Vec::new()); for &(ref k, ref v) in pairs.iter() { @@ -115,19 +134,19 @@ impl SourceId { } url.query = None; let precise = mem::replace(&mut url.fragment, None); - SourceId::for_git(&url, reference) - .with_precise(precise) + Ok(SourceId::for_git(&url, reference) + .with_precise(precise)) }, "registry" => { - let url = url.to_url().unwrap(); - SourceId::new(Kind::Registry, url) - .with_precise(Some("locked".to_string())) + let url = try!(url.to_url()); + Ok(SourceId::new(Kind::Registry, url) + .with_precise(Some("locked".to_string()))) } "path" => { - let url = url.to_url().unwrap(); - SourceId::new(Kind::Path, url) + let url = try!(url.to_url()); + Ok(SourceId::new(Kind::Path, url)) } - _ => panic!("Unsupported serialized SourceId") + kind => Err(human(format!("unsupported source protocol: {}", kind))) } } @@ -152,12 +171,15 @@ impl SourceId { SourceIdInner { kind: Kind::Registry, ref url, .. } => { format!("registry+{}", url) } + SourceIdInner { kind: Kind::LocalRegistry, ref url, .. } => { + format!("local-registry+{}", url) + } } } // Pass absolute path pub fn for_path(path: &Path) -> CargoResult { - let url = try!(path.to_url().map_err(human)); + let url = try!(path.to_url()); Ok(SourceId::new(Kind::Path, url)) } @@ -169,17 +191,38 @@ impl SourceId { SourceId::new(Kind::Registry, url.clone()) } + pub fn for_local_registry(path: &Path) -> CargoResult { + let url = try!(path.to_url()); + Ok(SourceId::new(Kind::LocalRegistry, url)) + } + /// Returns the `SourceId` corresponding to the main repository. /// /// This is the main cargo registry by default, but it can be overridden in /// a `.cargo/config`. - pub fn for_central(config: &Config) -> CargoResult { - Ok(SourceId::for_registry(&try!(RegistrySource::url(config)))) + pub fn crates_io(config: &Config) -> CargoResult { + let cfg = try!(ops::registry_configuration(config)); + let url = if let Some(ref index) = cfg.index { + static WARNED: AtomicBool = ATOMIC_BOOL_INIT; + if !WARNED.swap(true, SeqCst) { + try!(config.shell().warn("custom registry support via \ + the `registry.index` configuration is \ + being removed, this functionality \ + will not work in the future")); + } + &index[..] + } else { + CRATES_IO + }; + let url = try!(url.to_url()); + Ok(SourceId::for_registry(&url)) } pub fn url(&self) -> &Url { &self.inner.url } pub fn is_path(&self) -> bool { self.inner.kind == Kind::Path } - pub fn is_registry(&self) -> bool { self.inner.kind == Kind::Registry } + pub fn is_registry(&self) -> bool { + self.inner.kind == Kind::Registry || self.inner.kind == Kind::LocalRegistry + } pub fn is_git(&self) -> bool { match self.inner.kind { @@ -200,7 +243,14 @@ impl SourceId { }; Box::new(PathSource::new(&path, self, config)) } - Kind::Registry => Box::new(RegistrySource::new(self, config)), + Kind::Registry => Box::new(RegistrySource::remote(self, config)), + Kind::LocalRegistry => { + let path = match self.inner.url.to_file_path() { + Ok(p) => p, + Err(()) => panic!("path sources cannot be remote"), + }; + Box::new(RegistrySource::local(self, &path, config)) + } } } @@ -229,7 +279,7 @@ impl SourceId { Kind::Registry => {} _ => return false, } - self.inner.url.to_string() == RegistrySource::default_url() + self.inner.url.to_string() == CRATES_IO } } @@ -263,8 +313,10 @@ impl Encodable for SourceId { impl Decodable for SourceId { fn decode(d: &mut D) -> Result { - let string: String = Decodable::decode(d).ok().expect("Invalid encoded SourceId"); - Ok(SourceId::from_url(string)) + let string: String = try!(Decodable::decode(d)); + SourceId::from_url(&string).map_err(|e| { + d.error(&e.to_string()) + }) } } @@ -284,7 +336,8 @@ impl fmt::Display for SourceId { } Ok(()) } - SourceIdInner { kind: Kind::Registry, ref url, .. } => { + SourceIdInner { kind: Kind::Registry, ref url, .. } | + SourceIdInner { kind: Kind::LocalRegistry, ref url, .. } => { write!(f, "registry {}", url) } } diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index 01697171d2f..219e21bcadb 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -15,6 +15,7 @@ pub struct Summary { package_id: PackageId, dependencies: Vec, features: HashMap>, + checksum: Option, } impl Summary { @@ -60,6 +61,7 @@ impl Summary { package_id: pkg_id, dependencies: dependencies, features: features, + checksum: None, }) } @@ -69,18 +71,39 @@ impl Summary { pub fn source_id(&self) -> &SourceId { self.package_id.source_id() } pub fn dependencies(&self) -> &[Dependency] { &self.dependencies } pub fn features(&self) -> &HashMap> { &self.features } + pub fn checksum(&self) -> Option<&str> { + self.checksum.as_ref().map(|s| &s[..]) + } pub fn override_id(mut self, id: PackageId) -> Summary { self.package_id = id; self } + pub fn set_checksum(mut self, cksum: String) -> Summary { + self.checksum = Some(cksum); + self + } + pub fn map_dependencies(mut self, f: F) -> Summary where F: FnMut(Dependency) -> Dependency { let deps = mem::replace(&mut self.dependencies, Vec::new()); self.dependencies = deps.into_iter().map(f).collect(); self } + + pub fn map_source(self, to_replace: &SourceId, replace_with: &SourceId) + -> Summary { + let me = if self.package_id().source_id() == to_replace { + let new_id = self.package_id().with_source_id(replace_with); + self.override_id(new_id) + } else { + self + }; + me.map_dependencies(|dep| { + dep.map_source(to_replace, replace_with) + }) + } } impl PartialEq for Summary { @@ -88,15 +111,3 @@ impl PartialEq for Summary { self.package_id == other.package_id } } - -pub trait SummaryVec { - fn names(&self) -> Vec; -} - -impl SummaryVec for Vec { - // TODO: Move to Registry - fn names(&self) -> Vec { - self.iter().map(|summary| summary.name().to_string()).collect() - } - -} diff --git a/src/cargo/ops/cargo_compile.rs b/src/cargo/ops/cargo_compile.rs index ad82d4f773e..3eda2a431b9 100644 --- a/src/cargo/ops/cargo_compile.rs +++ b/src/cargo/ops/cargo_compile.rs @@ -105,7 +105,7 @@ pub fn resolve_dependencies<'a>(root_package: &Package, no_default_features: bool) -> CargoResult<(PackageSet<'a>, Resolve)> { - let mut registry = PackageRegistry::new(config); + let mut registry = try!(PackageRegistry::new(config)); if let Some(source) = source { registry.add_preloaded(root_package.package_id().source_id(), source); @@ -389,6 +389,7 @@ fn add_overrides<'a>(registry: &mut PackageRegistry<'a>, Some(list) => list, None => return Ok(()) }; + let paths = paths.val.iter().map(|&(ref s, ref p)| { // The path listed next to the string is the config file in which the // key was located, so we want to pop off the `.cargo/config` component diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs index e8f401d5333..f47853a5458 100644 --- a/src/cargo/ops/cargo_fetch.rs +++ b/src/cargo/ops/cargo_fetch.rs @@ -10,7 +10,7 @@ pub fn fetch<'a>(manifest_path: &Path, config: &'a Config) -> CargoResult<(Resolve, PackageSet<'a>)> { let package = try!(Package::for_path(manifest_path, config)); - let mut registry = PackageRegistry::new(config); + let mut registry = try!(PackageRegistry::new(config)); let resolve = try!(ops::resolve_pkg(&mut registry, &package, config)); let packages = get_resolved_packages(&resolve, registry); for id in resolve.iter() { diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs index 1b8bb0087f0..133ed6e6ade 100644 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ b/src/cargo/ops/cargo_generate_lockfile.rs @@ -19,7 +19,7 @@ pub struct UpdateOptions<'a> { pub fn generate_lockfile(manifest_path: &Path, config: &Config) -> CargoResult<()> { let package = try!(Package::for_path(manifest_path, config)); - let mut registry = PackageRegistry::new(config); + let mut registry = try!(PackageRegistry::new(config)); let resolve = try!(ops::resolve_with_previous(&mut registry, &package, Method::Everything, None, None)); @@ -41,7 +41,7 @@ pub fn update_lockfile(manifest_path: &Path, bail!("cannot specify both aggressive and precise simultaneously") } - let mut registry = PackageRegistry::new(opts.config); + let mut registry = try!(PackageRegistry::new(opts.config)); let mut to_avoid = HashSet::new(); if opts.to_update.is_empty() { diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index 666c75aa887..601d7f68e77 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -12,7 +12,7 @@ use toml; use core::{SourceId, Source, Package, Registry, Dependency, PackageIdSpec}; use core::PackageId; use ops::{self, CompileFilter}; -use sources::{GitSource, PathSource, RegistrySource}; +use sources::{GitSource, PathSource, SourceConfigMap}; use util::{CargoResult, ChainError, Config, human, internal}; #[derive(RustcDecodable, RustcEncodable)] @@ -44,6 +44,7 @@ pub fn install(root: Option<&str>, opts: &ops::CompileOptions) -> CargoResult<()> { let config = opts.config; let root = try!(resolve_root(root, config)); + let map = try!(SourceConfigMap::new(config)); let (pkg, source) = if source_id.is_git() { try!(select_pkg(GitSource::new(source_id, config), source_id, krate, vers, &mut |git| git.read_packages())) @@ -60,7 +61,7 @@ pub fn install(root: Option<&str>, source_id, krate, vers, &mut |path| path.read_packages())) } else { - try!(select_pkg(RegistrySource::new(source_id, config), + try!(select_pkg(try!(map.load(source_id)), source_id, krate, vers, &mut |_| Err(human("must specify a crate to install from \ crates.io, or use --path or --git to \ diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index bf173f1642c..fbb421fc44d 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -187,21 +187,21 @@ fn run_verify(config: &Config, pkg: &Package, tar: &Path) try!(archive.unpack(dst.parent().unwrap())); let manifest_path = dst.join("Cargo.toml"); - // When packages are uploaded to the registry, all path dependencies are - // implicitly converted to registry-based dependencies, so we rewrite those + // When packages are uploaded to a registry, all path dependencies are + // implicitly converted to registry dependencies, so we rewrite those // dependencies here. // // We also make sure to point all paths at `dst` instead of the previous // location that the package was originally read from. In locking the // `SourceId` we're telling it that the corresponding `PathSource` will be // considered updated and we won't actually read any packages. - let registry = try!(SourceId::for_central(config)); + let cratesio = try!(SourceId::crates_io(config)); let precise = Some("locked".to_string()); let new_src = try!(SourceId::for_path(&dst)).with_precise(precise); let new_pkgid = try!(PackageId::new(pkg.name(), pkg.version(), &new_src)); let new_summary = pkg.summary().clone().map_dependencies(|d| { if !d.source_id().is_path() { return d } - d.clone_inner().set_source_id(registry.clone()).into_dependency() + d.clone_inner().set_source_id(cratesio.clone()).into_dependency() }); let mut new_manifest = pkg.manifest().clone(); new_manifest.set_summary(new_summary.override_id(new_pkgid)); diff --git a/src/cargo/ops/registry.rs b/src/cargo/ops/registry.rs index a02b80a6ba2..7643cd70110 100644 --- a/src/cargo/ops/registry.rs +++ b/src/cargo/ops/registry.rs @@ -139,18 +139,19 @@ pub fn registry(config: &Config, // Parse all configuration options let RegistryConfig { token: token_config, - index: index_config, + index: _index_config, } = try!(registry_configuration(config)); let token = token.or(token_config); - let index = index.or(index_config).unwrap_or(RegistrySource::default_url()); - let index = try!(index.to_url().map_err(human)); - let sid = SourceId::for_registry(&index); + let sid = match index { + Some(index) => SourceId::for_registry(&try!(index.to_url())), + None => try!(SourceId::crates_io(config)), + }; let api_host = { - let mut src = RegistrySource::new(&sid, config); + let mut src = RegistrySource::remote(&sid, config); try!(src.update().chain_error(|| { - human(format!("failed to update registry {}", index)) + human(format!("failed to update {}", sid)) })); - (try!(src.config())).api + (try!(src.config())).unwrap().api }; let handle = try!(http_handle(config)); Ok((Registry::new_handle(api_host, token, handle), sid)) diff --git a/src/cargo/ops/resolve.rs b/src/cargo/ops/resolve.rs index f5925a46c50..cb84faa2cbc 100644 --- a/src/cargo/ops/resolve.rs +++ b/src/cargo/ops/resolve.rs @@ -111,9 +111,8 @@ pub fn resolve_with_previous<'a>(registry: &mut PackageRegistry, }; let mut resolved = try!(resolver::resolve(&summary, &method, registry)); - match previous { - Some(r) => resolved.copy_metadata(r), - None => {} + if let Some(previous) = previous { + try!(resolved.merge_from(previous)); } return Ok(resolved); diff --git a/src/cargo/sources/config.rs b/src/cargo/sources/config.rs new file mode 100644 index 00000000000..3be5156452c --- /dev/null +++ b/src/cargo/sources/config.rs @@ -0,0 +1,175 @@ +//! Implementation of configuration for various sources +//! +//! This module will parse the various `source.*` TOML configuration keys into a +//! structure usable by Cargo itself. Currently this is primarily used to map +//! sources to one another via the `replace-with` key in `.cargo/config`. + +use std::collections::HashMap; +use std::path::{Path, PathBuf}; + +use url::Url; + +use core::{Source, SourceId}; +use sources::ReplacedSource; +use util::{CargoResult, Config, ChainError, human, ToUrl}; +use util::config::ConfigValue; + +pub struct SourceConfigMap<'cfg> { + cfgs: HashMap, + id2name: HashMap, + config: &'cfg Config, +} + +/// Configuration for a particular source, found in TOML looking like: +/// +/// ```toml +/// [source.crates-io] +/// registry = 'https://github.com/rust-lang/crates.io-index' +/// replace-with = 'foo' # optional +/// ``` +struct SourceConfig { + // id this source corresponds to, inferred from the various defined keys in + // the configuration + id: SourceId, + + // Name of the source that this source should be replaced with. This field + // is a tuple of (name, path) where path is where this configuration key was + // defined (the literal `.cargo/config` file). + replace_with: Option<(String, PathBuf)>, +} + +impl<'cfg> SourceConfigMap<'cfg> { + pub fn new(config: &'cfg Config) -> CargoResult> { + let mut base = try!(SourceConfigMap::empty(config)); + if let Some(table) = try!(config.get_table("source")) { + for (key, value) in table.val.iter() { + try!(base.add_config(key, value)); + } + } + Ok(base) + } + + pub fn empty(config: &'cfg Config) -> CargoResult> { + let mut base = SourceConfigMap { + cfgs: HashMap::new(), + id2name: HashMap::new(), + config: config, + }; + base.add("crates-io", SourceConfig { + id: try!(SourceId::crates_io(config)), + replace_with: None, + }); + Ok(base) + } + + pub fn load(&self, id: &SourceId) -> CargoResult> { + debug!("loading: {}", id); + let mut name = match self.id2name.get(id) { + Some(name) => name, + None => return Ok(id.load(self.config)), + }; + let mut path = Path::new("/"); + let orig_name = name; + let new_id; + loop { + let cfg = match self.cfgs.get(name) { + Some(cfg) => cfg, + None => bail!("could not find a configured source with the \ + name `{}` when attempting to lookup `{}` \ + (configuration in `{}`)", + name, orig_name, path.display()), + }; + match cfg.replace_with { + Some((ref s, ref p)) => { + name = s; + path = p; + } + None if *id == cfg.id => return Ok(id.load(self.config)), + None => { + new_id = cfg.id.with_precise(id.precise() + .map(|s| s.to_string())); + break + } + } + debug!("following pointer to {}", name); + if name == orig_name { + bail!("detected a cycle of `replace-with` sources, the source \ + `{}` is eventually replaced with itself \ + (configuration in `{}`)", name, path.display()) + } + } + let new_src = new_id.load(self.config); + let old_src = id.load(self.config); + if new_src.supports_checksums() != old_src.supports_checksums() { + let (supports, no_support) = if new_src.supports_checksums() { + (name, orig_name) + } else { + (orig_name, name) + }; + bail!("\ +cannot replace `{orig}` with `{name}`, the source `{supports}` supports \ +checksums, but `{no_support}` does not + +a lock file compatible with `{orig}` cannot be generated in this situation +", orig = orig_name, name = name, supports = supports, no_support = no_support); + } + Ok(Box::new(ReplacedSource::new(id, &new_id, new_src))) + } + + fn add(&mut self, name: &str, cfg: SourceConfig) { + self.id2name.insert(cfg.id.clone(), name.to_string()); + self.cfgs.insert(name.to_string(), cfg); + } + + fn add_config(&mut self, name: &str, cfg: &ConfigValue) -> CargoResult<()> { + let (table, _path) = try!(cfg.table(&format!("source.{}", name))); + let mut srcs = Vec::new(); + if let Some(val) = table.get("registry") { + let url = try!(url(val, &format!("source.{}.registry", name))); + srcs.push(SourceId::for_registry(&url)); + } + if let Some(val) = table.get("local-registry") { + let (s, path) = try!(val.string(&format!("source.{}.local-registry", + name))); + let mut path = path.to_path_buf(); + path.pop(); + path.pop(); + path.push(s); + srcs.push(try!(SourceId::for_local_registry(&path))); + } + + let mut srcs = srcs.into_iter(); + let src = try!(srcs.next().chain_error(|| { + human(format!("no source URL specified for `source.{}`, need \ + either `registry` or `local-registry` defined", + name)) + })); + if srcs.next().is_some() { + return Err(human(format!("more than one source URL specified for \ + `source.{}`", name))) + } + + let mut replace_with = None; + if let Some(val) = table.get("replace-with") { + let (s, path) = try!(val.string(&format!("source.{}.replace-with", + name))); + replace_with = Some((s.to_string(), path.to_path_buf())); + } + + self.add(name, SourceConfig { + id: src, + replace_with: replace_with, + }); + + return Ok(()); + + fn url(cfg: &ConfigValue, key: &str) -> CargoResult { + let (url, path) = try!(cfg.string(key)); + url.to_url().chain_error(|| { + human(format!("configuration key `{}` specified an invalid \ + URL (in {})", key, path.display())) + + }) + } + } +} diff --git a/src/cargo/sources/git/utils.rs b/src/cargo/sources/git/utils.rs index d865262d080..83909f75712 100644 --- a/src/cargo/sources/git/utils.rs +++ b/src/cargo/sources/git/utils.rs @@ -257,7 +257,7 @@ impl<'a> GitCheckout<'a> { })); } - let url = try!(source.to_url().map_err(human)); + let url = try!(source.to_url()); let url = url.to_string(); let repo = try!(git2::Repository::clone(&url, into).chain_error(|| { internal(format!("failed to clone {} into {}", source.display(), @@ -278,7 +278,7 @@ impl<'a> GitCheckout<'a> { fn fetch(&self) -> CargoResult<()> { info!("fetch {}", self.repo.path().display()); - let url = try!(self.database.path.to_url().map_err(human)); + let url = try!(self.database.path.to_url()); let url = url.to_string(); let refspec = "refs/heads/*:refs/heads/*"; try!(fetch(&self.repo, &url, refspec)); diff --git a/src/cargo/sources/mod.rs b/src/cargo/sources/mod.rs index 7db73619311..53c573aa38c 100644 --- a/src/cargo/sources/mod.rs +++ b/src/cargo/sources/mod.rs @@ -1,7 +1,11 @@ pub use self::path::PathSource; pub use self::git::GitSource; -pub use self::registry::RegistrySource; +pub use self::registry::{RegistrySource, CRATES_IO}; +pub use self::replaced::ReplacedSource; +pub use self::config::SourceConfigMap; pub mod path; pub mod git; pub mod registry; +pub mod config; +pub mod replaced; diff --git a/src/cargo/sources/registry.rs b/src/cargo/sources/registry.rs deleted file mode 100644 index 1c68475e057..00000000000 --- a/src/cargo/sources/registry.rs +++ /dev/null @@ -1,561 +0,0 @@ -//! A `Source` for registry-based packages. -//! -//! # What's a Registry? -//! -//! Registries are central locations where packages can be uploaded to, -//! discovered, and searched for. The purpose of a registry is to have a -//! location that serves as permanent storage for versions of a crate over time. -//! -//! Compared to git sources, a registry provides many packages as well as many -//! versions simultaneously. Git sources can also have commits deleted through -//! rebasings where registries cannot have their versions deleted. -//! -//! # The Index of a Registry -//! -//! One of the major difficulties with a registry is that hosting so many -//! packages may quickly run into performance problems when dealing with -//! dependency graphs. It's infeasible for cargo to download the entire contents -//! of the registry just to resolve one package's dependencies, for example. As -//! a result, cargo needs some efficient method of querying what packages are -//! available on a registry, what versions are available, and what the -//! dependencies for each version is. -//! -//! One method of doing so would be having the registry expose an HTTP endpoint -//! which can be queried with a list of packages and a response of their -//! dependencies and versions is returned. This is somewhat inefficient however -//! as we may have to hit the endpoint many times and we may have already -//! queried for much of the data locally already (for other packages, for -//! example). This also involves inventing a transport format between the -//! registry and Cargo itself, so this route was not taken. -//! -//! Instead, Cargo communicates with registries through a git repository -//! referred to as the Index. The Index of a registry is essentially an easily -//! query-able version of the registry's database for a list of versions of a -//! package as well as a list of dependencies for each version. -//! -//! Using git to host this index provides a number of benefits: -//! -//! * The entire index can be stored efficiently locally on disk. This means -//! that all queries of a registry can happen locally and don't need to touch -//! the network. -//! -//! * Updates of the index are quite efficient. Using git buys incremental -//! updates, compressed transmission, etc for free. The index must be updated -//! each time we need fresh information from a registry, but this is one -//! update of a git repository that probably hasn't changed a whole lot so -//! it shouldn't be too expensive. -//! -//! Additionally, each modification to the index is just appending a line at -//! the end of a file (the exact format is described later). This means that -//! the commits for an index are quite small and easily applied/compressable. -//! -//! ## The format of the Index -//! -//! The index is a store for the list of versions for all packages known, so its -//! format on disk is optimized slightly to ensure that `ls registry` doesn't -//! produce a list of all packages ever known. The index also wants to ensure -//! that there's not a million files which may actually end up hitting -//! filesystem limits at some point. To this end, a few decisions were made -//! about the format of the registry: -//! -//! 1. Each crate will have one file corresponding to it. Each version for a -//! crate will just be a line in this file. -//! 2. There will be two tiers of directories for crate names, under which -//! crates corresponding to those tiers will be located. -//! -//! As an example, this is an example hierarchy of an index: -//! -//! ```notrust -//! . -//! ├── 3 -//! │   └── u -//! │   └── url -//! ├── bz -//! │   └── ip -//! │   └── bzip2 -//! ├── config.json -//! ├── en -//! │   └── co -//! │   └── encoding -//! └── li -//!    ├── bg -//!    │   └── libgit2 -//!    └── nk -//!    └── link-config -//! ``` -//! -//! The root of the index contains a `config.json` file with a few entries -//! corresponding to the registry (see `RegistryConfig` below). -//! -//! Otherwise, there are three numbered directories (1, 2, 3) for crates with -//! names 1, 2, and 3 characters in length. The 1/2 directories simply have the -//! crate files underneath them, while the 3 directory is sharded by the first -//! letter of the crate name. -//! -//! Otherwise the top-level directory contains many two-letter directory names, -//! each of which has many sub-folders with two letters. At the end of all these -//! are the actual crate files themselves. -//! -//! The purpose of this layout is to hopefully cut down on `ls` sizes as well as -//! efficient lookup based on the crate name itself. -//! -//! ## Crate files -//! -//! Each file in the index is the history of one crate over time. Each line in -//! the file corresponds to one version of a crate, stored in JSON format (see -//! the `RegistryPackage` structure below). -//! -//! As new versions are published, new lines are appended to this file. The only -//! modifications to this file that should happen over time are yanks of a -//! particular version. -//! -//! # Downloading Packages -//! -//! The purpose of the Index was to provide an efficient method to resolve the -//! dependency graph for a package. So far we only required one network -//! interaction to update the registry's repository (yay!). After resolution has -//! been performed, however we need to download the contents of packages so we -//! can read the full manifest and build the source code. -//! -//! To accomplish this, this source's `download` method will make an HTTP -//! request per-package requested to download tarballs into a local cache. These -//! tarballs will then be unpacked into a destination folder. -//! -//! Note that because versions uploaded to the registry are frozen forever that -//! the HTTP download and unpacking can all be skipped if the version has -//! already been downloaded and unpacked. This caching allows us to only -//! download a package when absolutely necessary. -//! -//! # Filesystem Hierarchy -//! -//! Overall, the `$HOME/.cargo` looks like this when talking about the registry: -//! -//! ```notrust -//! # A folder under which all registry metadata is hosted (similar to -//! # $HOME/.cargo/git) -//! $HOME/.cargo/registry/ -//! -//! # For each registry that cargo knows about (keyed by hostname + hash) -//! # there is a folder which is the checked out version of the index for -//! # the registry in this location. Note that this is done so cargo can -//! # support multiple registries simultaneously -//! index/ -//! registry1-/ -//! registry2-/ -//! ... -//! -//! # This folder is a cache for all downloaded tarballs from a registry. -//! # Once downloaded and verified, a tarball never changes. -//! cache/ -//! registry1-/-.crate -//! ... -//! -//! # Location in which all tarballs are unpacked. Each tarball is known to -//! # be frozen after downloading, so transitively this folder is also -//! # frozen once its unpacked (it's never unpacked again) -//! src/ -//! registry1-/-/... -//! ... -//! ``` - -use std::collections::HashMap; -use std::fs::{self, File}; -use std::io::prelude::*; -use std::path::PathBuf; - -use curl::http; -use flate2::read::GzDecoder; -use git2; -use rustc_serialize::hex::ToHex; -use rustc_serialize::json; -use tar::Archive; -use url::Url; - -use core::{Source, SourceId, PackageId, Package, Summary, Registry}; -use core::dependency::{Dependency, DependencyInner, Kind}; -use sources::{PathSource, git}; -use util::{CargoResult, Config, internal, ChainError, ToUrl, human}; -use util::{hex, Sha256, paths}; -use ops; - -static DEFAULT: &'static str = "https://github.com/rust-lang/crates.io-index"; - -pub struct RegistrySource<'cfg> { - source_id: SourceId, - checkout_path: PathBuf, - cache_path: PathBuf, - src_path: PathBuf, - config: &'cfg Config, - handle: Option, - hashes: HashMap<(String, String), String>, // (name, vers) => cksum - cache: HashMap>, - updated: bool, -} - -#[derive(RustcDecodable)] -pub struct RegistryConfig { - /// Download endpoint for all crates. This will be appended with - /// `///download` and then will be hit with an HTTP GET - /// request to download the tarball for a crate. - pub dl: String, - - /// API endpoint for the registry. This is what's actually hit to perform - /// operations like yanks, owner modifications, publish new crates, etc. - pub api: String, -} - -#[derive(RustcDecodable)] -struct RegistryPackage { - name: String, - vers: String, - deps: Vec, - features: HashMap>, - cksum: String, - yanked: Option, -} - -#[derive(RustcDecodable)] -struct RegistryDependency { - name: String, - req: String, - features: Vec, - optional: bool, - default_features: bool, - target: Option, - kind: Option, -} - -impl<'cfg> RegistrySource<'cfg> { - pub fn new(source_id: &SourceId, - config: &'cfg Config) -> RegistrySource<'cfg> { - let hash = hex::short_hash(source_id); - let ident = source_id.url().host().unwrap().to_string(); - let part = format!("{}-{}", ident, hash); - RegistrySource { - checkout_path: config.registry_index_path().join(&part), - cache_path: config.registry_cache_path().join(&part), - src_path: config.registry_source_path().join(&part), - config: config, - source_id: source_id.clone(), - handle: None, - hashes: HashMap::new(), - cache: HashMap::new(), - updated: false, - } - } - - /// Get the configured default registry URL. - /// - /// This is the main cargo registry by default, but it can be overridden in - /// a .cargo/config - pub fn url(config: &Config) -> CargoResult { - let config = try!(ops::registry_configuration(config)); - let url = config.index.unwrap_or(DEFAULT.to_string()); - url.to_url().map_err(human) - } - - /// Get the default url for the registry - pub fn default_url() -> String { - DEFAULT.to_string() - } - - /// Decode the configuration stored within the registry. - /// - /// This requires that the index has been at least checked out. - pub fn config(&self) -> CargoResult { - let contents = try!(paths::read(&self.checkout_path.join("config.json"))); - let config = try!(json::decode(&contents)); - Ok(config) - } - - /// Open the git repository for the index of the registry. - /// - /// This will attempt to open an existing checkout, and failing that it will - /// initialize a fresh new directory and git checkout. No remotes will be - /// configured by default. - fn open(&self) -> CargoResult { - match git2::Repository::open(&self.checkout_path) { - Ok(repo) => return Ok(repo), - Err(..) => {} - } - - try!(fs::create_dir_all(&self.checkout_path)); - let _ = fs::remove_dir_all(&self.checkout_path); - let repo = try!(git2::Repository::init(&self.checkout_path)); - Ok(repo) - } - - /// Download the given package from the given url into the local cache. - /// - /// This will perform the HTTP request to fetch the package. This function - /// will only succeed if the HTTP download was successful and the file is - /// then ready for inspection. - /// - /// No action is taken if the package is already downloaded. - fn download_package(&mut self, pkg: &PackageId, url: &Url) - -> CargoResult { - // TODO: should discover filename from the S3 redirect - let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); - let dst = self.cache_path.join(&filename); - if fs::metadata(&dst).is_ok() { return Ok(dst) } - try!(self.config.shell().status("Downloading", pkg)); - - try!(fs::create_dir_all(dst.parent().unwrap())); - let expected_hash = try!(self.hash(pkg)); - let handle = match self.handle { - Some(ref mut handle) => handle, - None => { - self.handle = Some(try!(ops::http_handle(self.config))); - self.handle.as_mut().unwrap() - } - }; - // TODO: don't download into memory (curl-rust doesn't expose it) - let resp = try!(handle.get(url.to_string()).follow_redirects(true).exec()); - if resp.get_code() != 200 && resp.get_code() != 0 { - return Err(internal(format!("failed to get 200 response from {}\n{}", - url, resp))) - } - - // Verify what we just downloaded - let actual = { - let mut state = Sha256::new(); - state.update(resp.get_body()); - state.finish() - }; - if actual.to_hex() != expected_hash { - bail!("failed to verify the checksum of `{}`", pkg) - } - - try!(paths::write(&dst, resp.get_body())); - Ok(dst) - } - - /// Return the hash listed for a specified PackageId. - fn hash(&mut self, pkg: &PackageId) -> CargoResult { - let key = (pkg.name().to_string(), pkg.version().to_string()); - if let Some(s) = self.hashes.get(&key) { - return Ok(s.clone()) - } - // Ok, we're missing the key, so parse the index file to load it. - try!(self.summaries(pkg.name())); - self.hashes.get(&key).chain_error(|| { - internal(format!("no hash listed for {}", pkg)) - }).map(|s| s.clone()) - } - - /// Unpacks a downloaded package into a location where it's ready to be - /// compiled. - /// - /// No action is taken if the source looks like it's already unpacked. - fn unpack_package(&self, pkg: &PackageId, tarball: PathBuf) - -> CargoResult { - let dst = self.src_path.join(&format!("{}-{}", pkg.name(), - pkg.version())); - if fs::metadata(&dst.join(".cargo-ok")).is_ok() { return Ok(dst) } - - try!(fs::create_dir_all(dst.parent().unwrap())); - let f = try!(File::open(&tarball)); - let gz = try!(GzDecoder::new(f)); - let mut tar = Archive::new(gz); - try!(tar.unpack(dst.parent().unwrap())); - try!(File::create(&dst.join(".cargo-ok"))); - Ok(dst) - } - - /// Parse the on-disk metadata for the package provided - pub fn summaries(&mut self, name: &str) -> CargoResult<&Vec<(Summary, bool)>> { - if self.cache.contains_key(name) { - return Ok(self.cache.get(name).unwrap()); - } - // see module comment for why this is structured the way it is - let path = self.checkout_path.clone(); - let fs_name = name.chars().flat_map(|c| c.to_lowercase()).collect::(); - let path = match fs_name.len() { - 1 => path.join("1").join(&fs_name), - 2 => path.join("2").join(&fs_name), - 3 => path.join("3").join(&fs_name[..1]).join(&fs_name), - _ => path.join(&fs_name[0..2]) - .join(&fs_name[2..4]) - .join(&fs_name), - }; - let summaries = match File::open(&path) { - Ok(mut f) => { - let mut contents = String::new(); - try!(f.read_to_string(&mut contents)); - let ret: CargoResult>; - ret = contents.lines().filter(|l| l.trim().len() > 0) - .map(|l| self.parse_registry_package(l)) - .collect(); - try!(ret.chain_error(|| { - internal(format!("failed to parse registry's information \ - for: {}", name)) - })) - } - Err(..) => Vec::new(), - }; - let summaries = summaries.into_iter().filter(|summary| { - summary.0.package_id().name() == name - }).collect(); - self.cache.insert(name.to_string(), summaries); - Ok(self.cache.get(name).unwrap()) - } - - /// Parse a line from the registry's index file into a Summary for a - /// package. - /// - /// The returned boolean is whether or not the summary has been yanked. - fn parse_registry_package(&mut self, line: &str) - -> CargoResult<(Summary, bool)> { - let RegistryPackage { - name, vers, cksum, deps, features, yanked - } = try!(json::decode::(line)); - let pkgid = try!(PackageId::new(&name, &vers, &self.source_id)); - let deps: CargoResult> = deps.into_iter().map(|dep| { - self.parse_registry_dependency(dep) - }).collect(); - let deps = try!(deps); - self.hashes.insert((name, vers), cksum); - Ok((try!(Summary::new(pkgid, deps, features)), yanked.unwrap_or(false))) - } - - /// Converts an encoded dependency in the registry to a cargo dependency - fn parse_registry_dependency(&self, dep: RegistryDependency) - -> CargoResult { - let RegistryDependency { - name, req, features, optional, default_features, target, kind - } = dep; - - let dep = try!(DependencyInner::parse(&name, Some(&req), - &self.source_id)); - let kind = match kind.as_ref().map(|s| &s[..]).unwrap_or("") { - "dev" => Kind::Development, - "build" => Kind::Build, - _ => Kind::Normal, - }; - - let platform = match target { - Some(target) => Some(try!(target.parse())), - None => None, - }; - - // Unfortunately older versions of cargo and/or the registry ended up - // publishing lots of entries where the features array contained the - // empty feature, "", inside. This confuses the resolution process much - // later on and these features aren't actually valid, so filter them all - // out here. - let features = features.into_iter().filter(|s| !s.is_empty()).collect(); - - Ok(dep.set_optional(optional) - .set_default_features(default_features) - .set_features(features) - .set_platform(platform) - .set_kind(kind) - .into_dependency()) - } - - /// Actually perform network operations to update the registry - fn do_update(&mut self) -> CargoResult<()> { - if self.updated { return Ok(()) } - - try!(self.config.shell().status("Updating", - format!("registry `{}`", self.source_id.url()))); - let repo = try!(self.open()); - - // git fetch origin - let url = self.source_id.url().to_string(); - let refspec = "refs/heads/*:refs/remotes/origin/*"; - try!(git::fetch(&repo, &url, refspec).chain_error(|| { - internal(format!("failed to fetch `{}`", url)) - })); - - // git reset --hard origin/master - let reference = "refs/remotes/origin/master"; - let oid = try!(repo.refname_to_id(reference)); - trace!("[{}] updating to rev {}", self.source_id, oid); - let object = try!(repo.find_object(oid, None)); - try!(repo.reset(&object, git2::ResetType::Hard, None)); - self.updated = true; - self.cache.clear(); - Ok(()) - } -} - -impl<'cfg> Registry for RegistrySource<'cfg> { - fn query(&mut self, dep: &Dependency) -> CargoResult> { - // If this is a precise dependency, then it came from a lockfile and in - // theory the registry is known to contain this version. If, however, we - // come back with no summaries, then our registry may need to be - // updated, so we fall back to performing a lazy update. - if dep.source_id().precise().is_some() { - let mut summaries = try!(self.summaries(dep.name())).iter().map(|s| { - s.0.clone() - }).collect::>(); - if try!(summaries.query(dep)).is_empty() { - try!(self.do_update()); - } - } - - let mut summaries = { - let summaries = try!(self.summaries(dep.name())); - summaries.iter().filter(|&&(_, yanked)| { - dep.source_id().precise().is_some() || !yanked - }).map(|s| s.0.clone()).collect::>() - }; - - // Handle `cargo update --precise` here. If specified, our own source - // will have a precise version listed of the form `=` where - // `` is the name of a crate on this source and `` is the - // version requested (agument to `--precise`). - summaries.retain(|s| { - match self.source_id.precise() { - Some(p) if p.starts_with(dep.name()) && - p[dep.name().len()..].starts_with("=") => { - let vers = &p[dep.name().len() + 1..]; - s.version().to_string() == vers - } - _ => true, - } - }); - summaries.query(dep) - } -} - -impl<'cfg> Source for RegistrySource<'cfg> { - fn update(&mut self) -> CargoResult<()> { - // If we have an imprecise version then we don't know what we're going - // to look for, so we always attempt to perform an update here. - // - // If we have a precise version, then we'll update lazily during the - // querying phase. Note that precise in this case is only - // `Some("locked")` as other `Some` values indicate a `cargo update - // --precise` request - if self.source_id.precise() != Some("locked") { - try!(self.do_update()); - } - Ok(()) - } - - fn download(&mut self, package: &PackageId) -> CargoResult { - let config = try!(self.config()); - let url = try!(config.dl.to_url().map_err(internal)); - let mut url = url.clone(); - url.path_mut().unwrap().push(package.name().to_string()); - url.path_mut().unwrap().push(package.version().to_string()); - url.path_mut().unwrap().push("download".to_string()); - let path = try!(self.download_package(package, &url).chain_error(|| { - internal(format!("failed to download package `{}` from {}", - package, url)) - })); - let path = try!(self.unpack_package(package, path).chain_error(|| { - internal(format!("failed to unpack package `{}`", package)) - })); - - let mut src = PathSource::new(&path, &self.source_id, self.config); - try!(src.update()); - src.download(package) - } - - fn fingerprint(&self, pkg: &Package) -> CargoResult { - Ok(pkg.package_id().version().to_string()) - } -} diff --git a/src/cargo/sources/registry/index.rs b/src/cargo/sources/registry/index.rs new file mode 100644 index 00000000000..260f5a43d5d --- /dev/null +++ b/src/cargo/sources/registry/index.rs @@ -0,0 +1,167 @@ +use std::collections::HashMap; +use std::io::prelude::*; +use std::fs::File; +use std::path::{Path, PathBuf}; + +use rustc_serialize::json; + +use core::dependency::{Dependency, DependencyInner, Kind}; +use core::{SourceId, Summary, PackageId, Registry}; +use sources::registry::{RegistryPackage, RegistryDependency}; +use util::{CargoResult, ChainError, internal}; + +pub struct RegistryIndex { + source_id: SourceId, + path: PathBuf, + cache: HashMap>, + hashes: HashMap<(String, String), String>, // (name, vers) => cksum +} + +impl RegistryIndex { + pub fn new(id: &SourceId, path: &Path) -> RegistryIndex { + RegistryIndex { + source_id: id.clone(), + path: path.to_path_buf(), + cache: HashMap::new(), + hashes: HashMap::new(), + } + } + + /// Return the hash listed for a specified PackageId. + pub fn hash(&mut self, pkg: &PackageId) -> CargoResult { + let key = (pkg.name().to_string(), pkg.version().to_string()); + if let Some(s) = self.hashes.get(&key) { + return Ok(s.clone()) + } + // Ok, we're missing the key, so parse the index file to load it. + try!(self.summaries(pkg.name())); + self.hashes.get(&key).chain_error(|| { + internal(format!("no hash listed for {}", pkg)) + }).map(|s| s.clone()) + } + + /// Parse the on-disk metadata for the package provided + /// + /// Returns a list of pairs of (summary, yanked) for the package name + /// specified. + pub fn summaries(&mut self, name: &str) -> CargoResult<&Vec<(Summary, bool)>> { + if self.cache.contains_key(name) { + return Ok(self.cache.get(name).unwrap()); + } + // see module comment for why this is structured the way it is + let path = self.path.clone(); + let fs_name = name.chars().flat_map(|c| c.to_lowercase()).collect::(); + let path = match fs_name.len() { + 1 => path.join("1").join(&fs_name), + 2 => path.join("2").join(&fs_name), + 3 => path.join("3").join(&fs_name[..1]).join(&fs_name), + _ => path.join(&fs_name[0..2]) + .join(&fs_name[2..4]) + .join(&fs_name), + }; + let summaries = match File::open(&path) { + Ok(mut f) => { + let mut contents = String::new(); + try!(f.read_to_string(&mut contents)); + let ret: CargoResult>; + ret = contents.lines().filter(|l| l.trim().len() > 0) + .map(|l| self.parse_registry_package(l)) + .collect(); + try!(ret.chain_error(|| { + internal(format!("failed to parse registry's information \ + for: {}", name)) + })) + } + Err(..) => Vec::new(), + }; + let summaries = summaries.into_iter().filter(|summary| { + summary.0.package_id().name() == name + }).collect(); + self.cache.insert(name.to_string(), summaries); + Ok(self.cache.get(name).unwrap()) + } + + /// Parse a line from the registry's index file into a Summary for a + /// package. + /// + /// The returned boolean is whether or not the summary has been yanked. + fn parse_registry_package(&mut self, line: &str) + -> CargoResult<(Summary, bool)> { + let RegistryPackage { + name, vers, cksum, deps, features, yanked + } = try!(json::decode::(line)); + let pkgid = try!(PackageId::new(&name, &vers, &self.source_id)); + let deps: CargoResult> = deps.into_iter().map(|dep| { + self.parse_registry_dependency(dep) + }).collect(); + let deps = try!(deps); + let summary = try!(Summary::new(pkgid, deps, features)); + let summary = summary.set_checksum(cksum.clone()); + self.hashes.insert((name, vers), cksum); + Ok((summary, yanked.unwrap_or(false))) + } + + /// Converts an encoded dependency in the registry to a cargo dependency + fn parse_registry_dependency(&self, dep: RegistryDependency) + -> CargoResult { + let RegistryDependency { + name, req, features, optional, default_features, target, kind + } = dep; + + let dep = try!(DependencyInner::parse(&name, Some(&req), + &self.source_id)); + let kind = match kind.as_ref().map(|s| &s[..]).unwrap_or("") { + "dev" => Kind::Development, + "build" => Kind::Build, + _ => Kind::Normal, + }; + + let platform = match target { + Some(target) => Some(try!(target.parse())), + None => None, + }; + + // Unfortunately older versions of cargo and/or the registry ended up + // publishing lots of entries where the features array contained the + // empty feature, "", inside. This confuses the resolution process much + // later on and these features aren't actually valid, so filter them all + // out here. + let features = features.into_iter().filter(|s| !s.is_empty()).collect(); + + Ok(dep.set_optional(optional) + .set_default_features(default_features) + .set_features(features) + .set_platform(platform) + .set_kind(kind) + .into_dependency()) + } +} + +impl Registry for RegistryIndex { + fn query(&mut self, dep: &Dependency) -> CargoResult> { + let mut summaries = { + let summaries = try!(self.summaries(dep.name())); + summaries.iter().filter(|&&(_, yanked)| { + dep.source_id().precise().is_some() || !yanked + }).map(|s| s.0.clone()).collect::>() + }; + + // Handle `cargo update --precise` here. If specified, our own source + // will have a precise version listed of the form `=` where + // `` is the name of a crate on this source and `` is the + // version requested (agument to `--precise`). + summaries.retain(|s| { + match self.source_id.precise() { + Some(p) if p.starts_with(dep.name()) && + p[dep.name().len()..].starts_with("=") => { + let vers = &p[dep.name().len() + 1..]; + s.version().to_string() == vers + } + _ => true, + } + }); + summaries.query(dep) + } + + fn supports_checksums(&self) -> bool { true } +} diff --git a/src/cargo/sources/registry/local.rs b/src/cargo/sources/registry/local.rs new file mode 100644 index 00000000000..aa44e4cbc0d --- /dev/null +++ b/src/cargo/sources/registry/local.rs @@ -0,0 +1,89 @@ +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{PathBuf, Path}; + +use rustc_serialize::hex::ToHex; + +use core::PackageId; +use sources::registry::{RegistryData, RegistryConfig}; +use util::{Config, CargoResult, ChainError, human, Sha256}; + +pub struct LocalRegistry<'cfg> { + index_path: PathBuf, + root: PathBuf, + src_path: PathBuf, + config: &'cfg Config, +} + +impl<'cfg> LocalRegistry<'cfg> { + pub fn new(root: &Path, + config: &'cfg Config, + name: &str) -> LocalRegistry<'cfg> { + LocalRegistry { + src_path: config.registry_source_path().join(name), + index_path: root.join("index"), + root: root.to_path_buf(), + config: config, + } + } +} + +impl<'cfg> RegistryData for LocalRegistry<'cfg> { + fn index_path(&self) -> &Path { + &self.index_path + } + + fn config(&self) -> CargoResult> { + // Local registries don't have configuration for remote APIs or anything + // like that + Ok(None) + } + + fn update_index(&mut self) -> CargoResult<()> { + // Nothing to update, we just use what's on disk. Verify it actually + // exists though + if !self.root.is_dir() { + bail!("local registry path is not a directory: {}", + self.root.display()) + } + if !self.index_path.is_dir() { + bail!("local registry index path is not a directory: {}", + self.index_path.display()) + } + Ok(()) + } + + fn download(&mut self, pkg: &PackageId, checksum: &str) + -> CargoResult { + let crate_file = format!("{}-{}.crate", pkg.name(), pkg.version()); + let crate_file = self.root.join(&crate_file); + + // If we've already got an unpacked version of this crate, then skip the + // checksum below as it is in theory already verified. + let dst = format!("{}-{}", pkg.name(), pkg.version()); + let dst = self.src_path.join(&dst); + if fs::metadata(&dst).is_ok() { + return Ok(crate_file) + } + + try!(self.config.shell().status("Unpacking", pkg)); + + // We don't actually need to download anything per-se, we just need to + // verify the checksum matches the .crate file itself. + let mut file = try!(File::open(&crate_file).chain_error(|| { + human(format!("failed to read `{}` for `{}`", crate_file.display(), + pkg)) + })); + let mut data = Vec::new(); + try!(file.read_to_end(&mut data).chain_error(|| { + human(format!("failed to read `{}`", crate_file.display())) + })); + let mut state = Sha256::new(); + state.update(&data); + if state.finish().to_hex() != checksum { + bail!("failed to verify the checksum of `{}`", pkg) + } + + Ok(crate_file) + } +} diff --git a/src/cargo/sources/registry/mod.rs b/src/cargo/sources/registry/mod.rs new file mode 100644 index 00000000000..5d408e513fe --- /dev/null +++ b/src/cargo/sources/registry/mod.rs @@ -0,0 +1,352 @@ +//! A `Source` for registry-based packages. +//! +//! # What's a Registry? +//! +//! Registries are central locations where packages can be uploaded to, +//! discovered, and searched for. The purpose of a registry is to have a +//! location that serves as permanent storage for versions of a crate over time. +//! +//! Compared to git sources, a registry provides many packages as well as many +//! versions simultaneously. Git sources can also have commits deleted through +//! rebasings where registries cannot have their versions deleted. +//! +//! # The Index of a Registry +//! +//! One of the major difficulties with a registry is that hosting so many +//! packages may quickly run into performance problems when dealing with +//! dependency graphs. It's infeasible for cargo to download the entire contents +//! of the registry just to resolve one package's dependencies, for example. As +//! a result, cargo needs some efficient method of querying what packages are +//! available on a registry, what versions are available, and what the +//! dependencies for each version is. +//! +//! One method of doing so would be having the registry expose an HTTP endpoint +//! which can be queried with a list of packages and a response of their +//! dependencies and versions is returned. This is somewhat inefficient however +//! as we may have to hit the endpoint many times and we may have already +//! queried for much of the data locally already (for other packages, for +//! example). This also involves inventing a transport format between the +//! registry and Cargo itself, so this route was not taken. +//! +//! Instead, Cargo communicates with registries through a git repository +//! referred to as the Index. The Index of a registry is essentially an easily +//! query-able version of the registry's database for a list of versions of a +//! package as well as a list of dependencies for each version. +//! +//! Using git to host this index provides a number of benefits: +//! +//! * The entire index can be stored efficiently locally on disk. This means +//! that all queries of a registry can happen locally and don't need to touch +//! the network. +//! +//! * Updates of the index are quite efficient. Using git buys incremental +//! updates, compressed transmission, etc for free. The index must be updated +//! each time we need fresh information from a registry, but this is one +//! update of a git repository that probably hasn't changed a whole lot so +//! it shouldn't be too expensive. +//! +//! Additionally, each modification to the index is just appending a line at +//! the end of a file (the exact format is described later). This means that +//! the commits for an index are quite small and easily applied/compressable. +//! +//! ## The format of the Index +//! +//! The index is a store for the list of versions for all packages known, so its +//! format on disk is optimized slightly to ensure that `ls registry` doesn't +//! produce a list of all packages ever known. The index also wants to ensure +//! that there's not a million files which may actually end up hitting +//! filesystem limits at some point. To this end, a few decisions were made +//! about the format of the registry: +//! +//! 1. Each crate will have one file corresponding to it. Each version for a +//! crate will just be a line in this file. +//! 2. There will be two tiers of directories for crate names, under which +//! crates corresponding to those tiers will be located. +//! +//! As an example, this is an example hierarchy of an index: +//! +//! ```notrust +//! . +//! ├── 3 +//! │   └── u +//! │   └── url +//! ├── bz +//! │   └── ip +//! │   └── bzip2 +//! ├── config.json +//! ├── en +//! │   └── co +//! │   └── encoding +//! └── li +//!    ├── bg +//!    │   └── libgit2 +//!    └── nk +//!    └── link-config +//! ``` +//! +//! The root of the index contains a `config.json` file with a few entries +//! corresponding to the registry (see `RegistryConfig` below). +//! +//! Otherwise, there are three numbered directories (1, 2, 3) for crates with +//! names 1, 2, and 3 characters in length. The 1/2 directories simply have the +//! crate files underneath them, while the 3 directory is sharded by the first +//! letter of the crate name. +//! +//! Otherwise the top-level directory contains many two-letter directory names, +//! each of which has many sub-folders with two letters. At the end of all these +//! are the actual crate files themselves. +//! +//! The purpose of this layout is to hopefully cut down on `ls` sizes as well as +//! efficient lookup based on the crate name itself. +//! +//! ## Crate files +//! +//! Each file in the index is the history of one crate over time. Each line in +//! the file corresponds to one version of a crate, stored in JSON format (see +//! the `RegistryPackage` structure below). +//! +//! As new versions are published, new lines are appended to this file. The only +//! modifications to this file that should happen over time are yanks of a +//! particular version. +//! +//! # Downloading Packages +//! +//! The purpose of the Index was to provide an efficient method to resolve the +//! dependency graph for a package. So far we only required one network +//! interaction to update the registry's repository (yay!). After resolution has +//! been performed, however we need to download the contents of packages so we +//! can read the full manifest and build the source code. +//! +//! To accomplish this, this source's `download` method will make an HTTP +//! request per-package requested to download tarballs into a local cache. These +//! tarballs will then be unpacked into a destination folder. +//! +//! Note that because versions uploaded to the registry are frozen forever that +//! the HTTP download and unpacking can all be skipped if the version has +//! already been downloaded and unpacked. This caching allows us to only +//! download a package when absolutely necessary. +//! +//! # Filesystem Hierarchy +//! +//! Overall, the `$HOME/.cargo` looks like this when talking about the registry: +//! +//! ```notrust +//! # A folder under which all registry metadata is hosted (similar to +//! # $HOME/.cargo/git) +//! $HOME/.cargo/registry/ +//! +//! # For each registry that cargo knows about (keyed by hostname + hash) +//! # there is a folder which is the checked out version of the index for +//! # the registry in this location. Note that this is done so cargo can +//! # support multiple registries simultaneously +//! index/ +//! registry1-/ +//! registry2-/ +//! ... +//! +//! # This folder is a cache for all downloaded tarballs from a registry. +//! # Once downloaded and verified, a tarball never changes. +//! cache/ +//! registry1-/-.crate +//! ... +//! +//! # Location in which all tarballs are unpacked. Each tarball is known to +//! # be frozen after downloading, so transitively this folder is also +//! # frozen once its unpacked (it's never unpacked again) +//! src/ +//! registry1-/-/... +//! ... +//! ``` + +use std::collections::HashMap; +use std::fs::{self, File}; +use std::path::{PathBuf, Path}; + +use flate2::read::GzDecoder; +use tar::Archive; + +use core::{Source, SourceId, PackageId, Package, Summary, Registry}; +use core::dependency::Dependency; +use sources::PathSource; +use util::{CargoResult, Config, internal, ChainError}; +use util::hex; + +pub static CRATES_IO: &'static str = "https://github.com/rust-lang/crates.io-index"; + +pub struct RegistrySource<'cfg> { + source_id: SourceId, + src_path: PathBuf, + config: &'cfg Config, + updated: bool, + ops: Box, + index: index::RegistryIndex, +} + +#[derive(RustcDecodable)] +pub struct RegistryConfig { + /// Download endpoint for all crates. This will be appended with + /// `///download` and then will be hit with an HTTP GET + /// request to download the tarball for a crate. + pub dl: String, + + /// API endpoint for the registry. This is what's actually hit to perform + /// operations like yanks, owner modifications, publish new crates, etc. + pub api: String, +} + +#[derive(RustcDecodable)] +struct RegistryPackage { + name: String, + vers: String, + deps: Vec, + features: HashMap>, + cksum: String, + yanked: Option, +} + +#[derive(RustcDecodable)] +struct RegistryDependency { + name: String, + req: String, + features: Vec, + optional: bool, + default_features: bool, + target: Option, + kind: Option, +} + +pub trait RegistryData { + fn index_path(&self) -> &Path; + fn config(&self) -> CargoResult>; + fn update_index(&mut self) -> CargoResult<()>; + fn download(&mut self, + pkg: &PackageId, + checksum: &str) -> CargoResult; +} + +mod index; +mod remote; +mod local; + +fn short_name(id: &SourceId) -> String { + let hash = hex::short_hash(id); + let ident = id.url().host().unwrap().to_string(); + format!("{}-{}", ident, hash) +} + +impl<'cfg> RegistrySource<'cfg> { + pub fn remote(source_id: &SourceId, + config: &'cfg Config) -> RegistrySource<'cfg> { + let name = short_name(source_id); + let ops = remote::RemoteRegistry::new(source_id, config, &name); + RegistrySource::new(source_id, config, &name, Box::new(ops)) + } + + pub fn local(source_id: &SourceId, + path: &Path, + config: &'cfg Config) -> RegistrySource<'cfg> { + let name = short_name(source_id); + let ops = local::LocalRegistry::new(path, config, &name); + RegistrySource::new(source_id, config, &name, Box::new(ops)) + } + + fn new(source_id: &SourceId, + config: &'cfg Config, + name: &str, + ops: Box) -> RegistrySource<'cfg> { + RegistrySource { + src_path: config.registry_source_path().join(name), + config: config, + source_id: source_id.clone(), + updated: false, + index: index::RegistryIndex::new(source_id, ops.index_path()), + ops: ops, + } + } + + /// Decode the configuration stored within the registry. + /// + /// This requires that the index has been at least checked out. + pub fn config(&self) -> CargoResult> { + self.ops.config() + } + + /// Unpacks a downloaded package into a location where it's ready to be + /// compiled. + /// + /// No action is taken if the source looks like it's already unpacked. + fn unpack_package(&self, pkg: &PackageId, tarball: PathBuf) + -> CargoResult { + let dst = self.src_path.join(&format!("{}-{}", pkg.name(), + pkg.version())); + if fs::metadata(&dst.join(".cargo-ok")).is_ok() { + return Ok(dst) + } + + try!(fs::create_dir_all(dst.parent().unwrap())); + let f = try!(File::open(&tarball)); + let gz = try!(GzDecoder::new(f)); + let mut tar = Archive::new(gz); + try!(tar.unpack(dst.parent().unwrap())); + try!(File::create(&dst.join(".cargo-ok"))); + Ok(dst) + } + + fn do_update(&mut self) -> CargoResult<()> { + try!(self.ops.update_index()); + let path = self.ops.index_path(); + self.index = index::RegistryIndex::new(&self.source_id, path); + Ok(()) + } +} + +impl<'cfg> Registry for RegistrySource<'cfg> { + fn query(&mut self, dep: &Dependency) -> CargoResult> { + // If this is a precise dependency, then it came from a lockfile and in + // theory the registry is known to contain this version. If, however, we + // come back with no summaries, then our registry may need to be + // updated, so we fall back to performing a lazy update. + if dep.source_id().precise().is_some() && !self.updated { + if try!(self.index.query(dep)).is_empty() { + try!(self.do_update()); + } + } + + self.index.query(dep) + } + + fn supports_checksums(&self) -> bool { + true + } +} + +impl<'cfg> Source for RegistrySource<'cfg> { + fn update(&mut self) -> CargoResult<()> { + // If we have an imprecise version then we don't know what we're going + // to look for, so we always attempt to perform an update here. + // + // If we have a precise version, then we'll update lazily during the + // querying phase. Note that precise in this case is only + // `Some("locked")` as other `Some` values indicate a `cargo update + // --precise` request + if self.source_id.precise() != Some("locked") { + try!(self.do_update()); + } + Ok(()) + } + + fn download(&mut self, package: &PackageId) -> CargoResult { + let hash = try!(self.index.hash(package)); + let path = try!(self.ops.download(package, &hash)); + let path = try!(self.unpack_package(package, path).chain_error(|| { + internal(format!("failed to unpack package `{}`", package)) + })); + let mut src = PathSource::new(&path, &self.source_id, self.config); + try!(src.update()); + src.download(package) + } + + fn fingerprint(&self, pkg: &Package) -> CargoResult { + Ok(pkg.package_id().version().to_string()) + } +} diff --git a/src/cargo/sources/registry/remote.rs b/src/cargo/sources/registry/remote.rs new file mode 100644 index 00000000000..2e820fced33 --- /dev/null +++ b/src/cargo/sources/registry/remote.rs @@ -0,0 +1,131 @@ +use std::fs; +use std::path::{PathBuf, Path}; + +use curl::http; +use git2; +use rustc_serialize::json; +use rustc_serialize::hex::ToHex; +use url::Url; + +use core::{PackageId, SourceId}; +use ops; +use sources::git; +use sources::registry::{RegistryData, RegistryConfig}; +use util::paths; +use util::{Config, CargoResult, ChainError, human, internal, Sha256, ToUrl}; + +pub struct RemoteRegistry<'cfg> { + index_path: PathBuf, + cache_path: PathBuf, + source_id: SourceId, + config: &'cfg Config, + handle: Option, +} + +impl<'cfg> RemoteRegistry<'cfg> { + pub fn new(source_id: &SourceId, config: &'cfg Config, name: &str) + -> RemoteRegistry<'cfg> { + RemoteRegistry { + index_path: config.registry_index_path().join(name), + cache_path: config.registry_cache_path().join(name), + source_id: source_id.clone(), + config: config, + handle: None, + } + } + + fn download(&mut self, url: &Url) -> CargoResult> { + let handle = match self.handle { + Some(ref mut handle) => handle, + None => { + self.handle = Some(try!(ops::http_handle(self.config))); + self.handle.as_mut().unwrap() + } + }; + // TODO: don't download into memory (curl-rust doesn't expose it) + let resp = try!(handle.get(url.to_string()).follow_redirects(true).exec()); + if resp.get_code() != 200 && resp.get_code() != 0 { + Err(internal(format!("failed to get 200 response from {}\n{}", + url, resp))) + } else { + Ok(resp.move_body()) + } + } +} + +impl<'cfg> RegistryData for RemoteRegistry<'cfg> { + fn index_path(&self) -> &Path { + &self.index_path + } + + fn config(&self) -> CargoResult> { + let contents = try!(paths::read(&self.index_path.join("config.json"))); + let config = try!(json::decode(&contents)); + Ok(Some(config)) + } + + fn update_index(&mut self) -> CargoResult<()> { + let msg = format!("registry `{}`", self.source_id.url()); + try!(self.config.shell().status("Updating", msg)); + + let repo = match git2::Repository::open(&self.index_path) { + Ok(repo) => repo, + Err(..) => { + try!(fs::create_dir_all(&self.index_path)); + let _ = fs::remove_dir_all(&self.index_path); + try!(git2::Repository::init(&self.index_path)) + } + }; + + // git fetch origin + let url = self.source_id.url().to_string(); + let refspec = "refs/heads/*:refs/remotes/origin/*"; + try!(git::fetch(&repo, &url, refspec).chain_error(|| { + human(format!("failed to fetch `{}`", url)) + })); + + // git reset --hard origin/master + let reference = "refs/remotes/origin/master"; + let oid = try!(repo.refname_to_id(reference)); + trace!("[{}] updating to rev {}", self.source_id, oid); + let object = try!(repo.find_object(oid, None)); + try!(repo.reset(&object, git2::ResetType::Hard, None)); + Ok(()) + } + + fn download(&mut self, pkg: &PackageId, checksum: &str) + -> CargoResult { + // TODO: should discover filename from the S3 redirect + let filename = format!("{}-{}.crate", pkg.name(), pkg.version()); + let dst = self.cache_path.join(&filename); + if fs::metadata(&dst).is_ok() { + return Ok(dst) + } + + try!(fs::create_dir_all(dst.parent().unwrap())); + + let config = try!(self.config()).unwrap(); + let mut url = try!(config.dl.to_url().map_err(internal)); + url.path_mut().unwrap().push(pkg.name().to_string()); + url.path_mut().unwrap().push(pkg.version().to_string()); + url.path_mut().unwrap().push("download".to_string()); + + try!(self.config.shell().status("Downloading", pkg)); + let data = try!(self.download(&url).chain_error(|| { + human(format!("failed to download package `{}` from {}", pkg, url)) + })); + + // Verify what we just downloaded + let actual = { + let mut state = Sha256::new(); + state.update(&data); + state.finish() + }; + if actual.to_hex() != checksum { + bail!("failed to verify the checksum of `{}`", pkg) + } + + try!(paths::write(&dst, &data)); + Ok(dst) + } +} diff --git a/src/cargo/sources/replaced.rs b/src/cargo/sources/replaced.rs new file mode 100644 index 00000000000..cd0ffd4b1ea --- /dev/null +++ b/src/cargo/sources/replaced.rs @@ -0,0 +1,55 @@ +use core::{Source, Registry, PackageId, Package, Dependency, Summary, SourceId}; +use util::{CargoResult, ChainError, human}; + +pub struct ReplacedSource<'cfg> { + to_replace: SourceId, + replace_with: SourceId, + inner: Box, +} + +impl<'cfg> ReplacedSource<'cfg> { + pub fn new(to_replace: &SourceId, + replace_with: &SourceId, + src: Box) -> ReplacedSource<'cfg> { + ReplacedSource { + to_replace: to_replace.clone(), + replace_with: replace_with.clone(), + inner: src, + } + } +} + +impl<'cfg> Registry for ReplacedSource<'cfg> { + fn query(&mut self, dep: &Dependency) -> CargoResult> { + let dep = dep.clone().map_source(&self.to_replace, &self.replace_with); + let ret = try!(self.inner.query(&dep).chain_error(|| { + human(format!("failed to query replaced source `{}`", + self.to_replace)) + })); + Ok(ret.into_iter().map(|summary| { + summary.map_source(&self.replace_with, &self.to_replace) + }).collect()) + } +} + +impl<'cfg> Source for ReplacedSource<'cfg> { + fn update(&mut self) -> CargoResult<()> { + self.inner.update().chain_error(|| { + human(format!("failed to update replaced source `{}`", + self.to_replace)) + }) + } + + fn download(&mut self, id: &PackageId) -> CargoResult { + let id = id.with_source_id(&self.replace_with); + let pkg = try!(self.inner.download(&id).chain_error(|| { + human(format!("failed to download replaced source `{}`", + self.to_replace)) + })); + Ok(pkg.map_source(&self.replace_with, &self.to_replace)) + } + + fn fingerprint(&self, id: &Package) -> CargoResult { + self.inner.fingerprint(id) + } +} diff --git a/src/cargo/util/config.rs b/src/cargo/util/config.rs index 4df18656db4..61c09dafb78 100644 --- a/src/cargo/util/config.rs +++ b/src/cargo/util/config.rs @@ -264,7 +264,7 @@ impl Config { } pub fn expected(&self, ty: &str, key: &str, val: CV) -> CargoResult { - val.expected(ty).map_err(|e| { + val.expected(ty, key).map_err(|e| { human(format!("invalid configuration for key `{}`\n{}", key, e)) }) } @@ -477,38 +477,39 @@ impl ConfigValue { Ok(()) } - pub fn i64(&self) -> CargoResult<(i64, &Path)> { + pub fn i64(&self, key: &str) -> CargoResult<(i64, &Path)> { match *self { CV::Integer(i, ref p) => Ok((i, p)), - _ => self.expected("integer"), + _ => self.expected("integer", key), } } - pub fn string(&self) -> CargoResult<(&str, &Path)> { + pub fn string(&self, key: &str) -> CargoResult<(&str, &Path)> { match *self { CV::String(ref s, ref p) => Ok((s, p)), - _ => self.expected("string"), + _ => self.expected("string", key), } } - pub fn table(&self) -> CargoResult<(&HashMap, &Path)> { + pub fn table(&self, key: &str) + -> CargoResult<(&HashMap, &Path)> { match *self { CV::Table(ref table, ref p) => Ok((table, p)), - _ => self.expected("table"), + _ => self.expected("table", key), } } - pub fn list(&self) -> CargoResult<&[(String, PathBuf)]> { + pub fn list(&self, key: &str) -> CargoResult<&[(String, PathBuf)]> { match *self { CV::List(ref list, _) => Ok(list), - _ => self.expected("list"), + _ => self.expected("list", key), } } - pub fn boolean(&self) -> CargoResult<(bool, &Path)> { + pub fn boolean(&self, key: &str) -> CargoResult<(bool, &Path)> { match *self { CV::Boolean(b, ref p) => Ok((b, p)), - _ => self.expected("bool"), + _ => self.expected("bool", key), } } @@ -532,10 +533,10 @@ impl ConfigValue { } } - fn expected(&self, wanted: &str) -> CargoResult { - Err(internal(format!("expected a {}, but found a {} in {}", - wanted, self.desc(), - self.definition_path().display()))) + fn expected(&self, wanted: &str, key: &str) -> CargoResult { + Err(human(format!("expected a {}, but found a {} for `{}` in {}", + wanted, self.desc(), key, + self.definition_path().display()))) } fn into_toml(self) -> toml::Value { diff --git a/src/cargo/util/to_url.rs b/src/cargo/util/to_url.rs index 2e3365cb255..2155ed836f6 100644 --- a/src/cargo/util/to_url.rs +++ b/src/cargo/util/to_url.rs @@ -1,34 +1,37 @@ -use url::{self, Url, UrlParser}; use std::path::Path; +use url::{self, Url, UrlParser}; + +use util::{human, CargoResult}; + pub trait ToUrl { - fn to_url(self) -> Result; + fn to_url(self) -> CargoResult; } impl ToUrl for Url { - fn to_url(self) -> Result { + fn to_url(self) -> CargoResult { Ok(self) } } impl<'a> ToUrl for &'a Url { - fn to_url(self) -> Result { + fn to_url(self) -> CargoResult { Ok(self.clone()) } } impl<'a> ToUrl for &'a str { - fn to_url(self) -> Result { + fn to_url(self) -> CargoResult { UrlParser::new().scheme_type_mapper(mapper).parse(self).map_err(|s| { - format!("invalid url `{}`: {}", self, s) + human(format!("invalid url `{}`: {}", self, s)) }) } } impl<'a> ToUrl for &'a Path { - fn to_url(self) -> Result { + fn to_url(self) -> CargoResult { Url::from_file_path(self).map_err(|()| { - format!("invalid path url `{}`", self.display()) + human(format!("invalid path url `{}`", self.display())) }) } } diff --git a/src/cargo/util/toml.rs b/src/cargo/util/toml.rs index 98c9aa1e546..12f5152e719 100644 --- a/src/cargo/util/toml.rs +++ b/src/cargo/util/toml.rs @@ -694,9 +694,7 @@ fn process_dependencies(cx: &mut Context, let new_source_id = match details.git { Some(ref git) => { - let loc = try!(git.to_url().map_err(|e| { - human(e) - })); + let loc = try!(git.to_url()); Some(SourceId::for_git(&loc, reference)) } None => { @@ -722,7 +720,7 @@ fn process_dependencies(cx: &mut Context, None => None, } } - }.unwrap_or(try!(SourceId::for_central(cx.config))); + }.unwrap_or(try!(SourceId::crates_io(cx.config))); let version = details.version.as_ref().map(|v| &v[..]); let mut dep = try!(DependencyInner::parse(&n, version, &new_source_id)); diff --git a/tests/support/mod.rs b/tests/support/mod.rs index 1135849184a..b689da99722 100644 --- a/tests/support/mod.rs +++ b/tests/support/mod.rs @@ -666,3 +666,4 @@ pub static UPLOADING: &'static str = " Uploading"; pub static VERIFYING: &'static str = " Verifying"; pub static ARCHIVING: &'static str = " Archiving"; pub static INSTALLING: &'static str = " Installing"; +pub static UNPACKING: &'static str = " Unpacking"; diff --git a/tests/support/registry.rs b/tests/support/registry.rs index 5452fe58373..f5fde354d65 100644 --- a/tests/support/registry.rs +++ b/tests/support/registry.rs @@ -24,9 +24,10 @@ pub struct Package { deps: Vec<(String, String, &'static str, String)>, files: Vec<(String, String)>, yanked: bool, + local: bool, } -fn init() { +pub fn init() { let config = paths::home().join(".cargo/config"); fs::create_dir_all(config.parent().unwrap()).unwrap(); if fs::metadata(&config).is_ok() { @@ -34,16 +35,23 @@ fn init() { } File::create(&config).unwrap().write_all(format!(r#" [registry] - index = "{reg}" token = "api-token" + + [source.crates-io] + registry = 'https://wut' + replace-with = 'dummy-registry' + + [source.dummy-registry] + registry = '{reg}' "#, reg = registry()).as_bytes()).unwrap(); // Init a new registry repo(®istry_path()) .file("config.json", &format!(r#" - {{"dl":"{}","api":""}} + {{"dl":"{0}","api":"{0}"}} "#, dl_url())) .build(); + fs::create_dir_all(dl_path().join("api/v1/crates")).unwrap(); } impl Package { @@ -55,9 +63,15 @@ impl Package { deps: Vec::new(), files: Vec::new(), yanked: false, + local: false, } } + pub fn local(&mut self, local: bool) -> &mut Package { + self.local = local; + self + } + pub fn file(&mut self, name: &str, contents: &str) -> &mut Package { self.files.push((name.to_string(), contents.to_string())); self @@ -89,7 +103,6 @@ impl Package { self } - #[allow(deprecated)] // connect => join in 1.3 pub fn publish(&self) { self.make_archive(); @@ -102,7 +115,7 @@ impl Package { \"target\":{},\ \"optional\":false,\ \"kind\":\"{}\"}}", name, req, target, kind) - }).collect::>().connect(","); + }).collect::>().join(","); let cksum = { let mut c = Vec::new(); File::open(&self.archive_dst()).unwrap() @@ -121,7 +134,11 @@ impl Package { }; // Write file/line in the index - let dst = registry_path().join(&file); + let dst = if self.local { + registry_path().join("index").join(&file) + } else { + registry_path().join(&file) + }; let mut prev = String::new(); let _ = File::open(&dst).and_then(|mut f| f.read_to_string(&mut prev)); fs::create_dir_all(dst.parent().unwrap()).unwrap(); @@ -129,20 +146,22 @@ impl Package { .write_all((prev + &line[..] + "\n").as_bytes()).unwrap(); // Add the new file to the index - let repo = git2::Repository::open(®istry_path()).unwrap(); - let mut index = repo.index().unwrap(); - index.add_path(Path::new(&file)).unwrap(); - index.write().unwrap(); - let id = index.write_tree().unwrap(); - - // Commit this change - let tree = repo.find_tree(id).unwrap(); - let sig = repo.signature().unwrap(); - let parent = repo.refname_to_id("refs/heads/master").unwrap(); - let parent = repo.find_commit(parent).unwrap(); - repo.commit(Some("HEAD"), &sig, &sig, - "Another commit", &tree, - &[&parent]).unwrap(); + if !self.local { + let repo = git2::Repository::open(®istry_path()).unwrap(); + let mut index = repo.index().unwrap(); + index.add_path(Path::new(&file)).unwrap(); + index.write().unwrap(); + let id = index.write_tree().unwrap(); + + // Commit this change + let tree = repo.find_tree(id).unwrap(); + let sig = repo.signature().unwrap(); + let parent = repo.refname_to_id("refs/heads/master").unwrap(); + let parent = repo.find_commit(parent).unwrap(); + repo.commit(Some("HEAD"), &sig, &sig, + "Another commit", &tree, + &[&parent]).unwrap(); + } } fn make_archive(&self) { @@ -192,7 +211,12 @@ impl Package { } pub fn archive_dst(&self) -> PathBuf { - dl_path().join(&self.name).join(&self.vers).join("download") + if self.local { + registry_path().join(format!("{}-{}.crate", self.name, + self.vers)) + } else { + dl_path().join(&self.name).join(&self.vers).join("download") + } } } diff --git a/tests/test_bad_config.rs b/tests/test_bad_config.rs index 32428e41771..a1008f30ff3 100644 --- a/tests/test_bad_config.rs +++ b/tests/test_bad_config.rs @@ -73,7 +73,7 @@ test!(bad3 { assert_that(foo.cargo_process("publish").arg("-v"), execs().with_status(101).with_stderr(&format!("\ {error} invalid configuration for key `http.proxy` -expected a string, but found a boolean in [..]config +expected a string, but found a boolean for `http.proxy` in [..]config ", error = ERROR))); }); @@ -90,7 +90,7 @@ test!(bad4 { Caused by: invalid configuration for key `cargo-new.name` -expected a string, but found a boolean in [..]config +expected a string, but found a boolean for `cargo-new.name` in [..]config ", error = ERROR))); }); @@ -423,3 +423,187 @@ warning: dependency (foo) specified without providing a local path, Git reposito to use. This will be considered an error in future versions ")); }); + +test!(bad_source_config1 { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.foo] + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr(&format!("\ +{error} no source URL specified for `source.foo`, need [..] +", error = ERROR))); +}); + +test!(bad_source_config2 { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'bar' + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr(&format!("\ +{error} Unable to update registry https://[..] + +Caused by: + could not find a configured source with the name `bar` \ + when attempting to lookup `crates-io` (configuration in [..]) +", error = ERROR))); +}); + +test!(bad_source_config3 { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'crates-io' + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr(&format!("\ +{error} Unable to update registry https://[..] + +Caused by: + detected a cycle of `replace-with` sources, [..] +", error = ERROR))); +}); + +test!(bad_source_config4 { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'bar' + + [source.bar] + registry = 'http://example.com' + replace-with = 'crates-io' + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr(&format!("\ +{error} Unable to update registry https://[..] + +Caused by: + detected a cycle of `replace-with` sources, the source `crates-io` is \ + eventually replaced with itself (configuration in [..]) +", error = ERROR))); +}); + +test!(bad_source_config5 { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = 'bar' + + [source.bar] + registry = 'not a url' + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr(&format!("\ +{error} configuration key `source.bar.registry` specified an invalid URL (in [..]) + +Caused by: + invalid url `not a url`: [..] +", error = ERROR))); +}); + +test!(bad_source_config6 { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'http://example.com' + replace-with = ['not', 'a', 'string'] + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr(&format!("\ +{error} expected a string, but found a array for `source.crates-io.replace-with` in [..] +", error = ERROR))); +}); + +test!(bad_source_config7 { + let p = project("foo") + .file("Cargo.toml", r#" + [package] + name = "foo" + version = "0.0.0" + authors = [] + + [dependencies] + bar = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.foo] + registry = 'http://example.com' + local-registry = 'file:///another/file' + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr(&format!("\ +{error} more than one source URL specified for `source.foo` +", error = ERROR))); +}); diff --git a/tests/test_cargo_cfg.rs b/tests/test_cargo_cfg.rs index dbb2fb58335..c58906ae411 100644 --- a/tests/test_cargo_cfg.rs +++ b/tests/test_cargo_cfg.rs @@ -215,8 +215,8 @@ test!(works_through_the_registry { {updating} registry [..] {downloading} [..] {downloading} [..] -{compiling} foo v0.1.0 ([..]) -{compiling} bar v0.1.0 ([..]) +{compiling} foo v0.1.0 +{compiling} bar v0.1.0 {compiling} a v0.0.1 ([..]) ", compiling = COMPILING, updating = UPDATING, downloading = DOWNLOADING))); }); diff --git a/tests/test_cargo_install.rs b/tests/test_cargo_install.rs index 13d50376df7..30ff528d1f5 100644 --- a/tests/test_cargo_install.rs +++ b/tests/test_cargo_install.rs @@ -64,7 +64,7 @@ test!(simple { execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {downloading} foo v0.0.1 (registry file://[..]) -{compiling} foo v0.0.1 (registry file://[..]) +{compiling} foo v0.0.1 {installing} {home}[..]bin[..]foo[..] ", updating = UPDATING, @@ -91,7 +91,7 @@ test!(pick_max_version { execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {downloading} foo v0.0.2 (registry file://[..]) -{compiling} foo v0.0.2 (registry file://[..]) +{compiling} foo v0.0.2 {installing} {home}[..]bin[..]foo[..] ", updating = UPDATING, @@ -106,7 +106,7 @@ test!(missing { pkg("foo", "0.0.1"); assert_that(cargo_process("install").arg("bar"), execs().with_status(101).with_stderr(&format!("\ -{error} could not find `bar` in `registry file://[..]` +{error} could not find `bar` in `registry [..]` ", error = ERROR))); }); @@ -115,7 +115,7 @@ test!(bad_version { pkg("foo", "0.0.1"); assert_that(cargo_process("install").arg("foo").arg("--vers=0.2.0"), execs().with_status(101).with_stderr(&format!("\ -{error} could not find `foo` in `registry file://[..]` with version `0.2.0` +{error} could not find `foo` in `registry [..]` with version `0.2.0` ", error = ERROR))); }); @@ -466,9 +466,9 @@ test!(list { execs().with_status(0)); assert_that(cargo_process("install").arg("--list"), execs().with_status(0).with_stdout("\ -bar v0.2.1 (registry [..]): +bar v0.2.1: bar[..] -foo v0.0.1 (registry [..]): +foo v0.0.1: foo[..] ")); }); @@ -488,7 +488,7 @@ test!(uninstall_bin_does_not_exist { execs().with_status(0)); assert_that(cargo_process("uninstall").arg("foo").arg("--bin=bar"), execs().with_status(101).with_stderr(&format!("\ -{error} binary `bar[..]` not installed as part of `foo v0.0.1 ([..])` +{error} binary `bar[..]` not installed as part of `foo v0.0.1` ", error = ERROR))); }); diff --git a/tests/test_cargo_local_registry.rs b/tests/test_cargo_local_registry.rs new file mode 100644 index 00000000000..b59fc66854f --- /dev/null +++ b/tests/test_cargo_local_registry.rs @@ -0,0 +1,333 @@ +use std::fs::{self, File}; +use std::io::prelude::*; + +use hamcrest::assert_that; + +use support::{project, execs, COMPILING, UPDATING, UNPACKING, ERROR}; +use support::paths::{self, CargoPathExt}; +use support::registry::Package; + +fn setup() { + let root = paths::root(); + fs::create_dir(&root.join(".cargo")).unwrap(); + File::create(root.join(".cargo/config")).unwrap().write_all(br#" + [source.crates-io] + registry = 'https://wut' + replace-with = 'my-awesome-local-directory' + + [source.my-awesome-local-directory] + local-registry = 'registry' + "#).unwrap(); +} + +test!(simple { + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.0.1" + "#) + .file("src/lib.rs", r#" + extern crate foo; + pub fn bar() { + foo::foo(); + } + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stdout(&format!("\ +{unpacking} foo v0.0.1 ([..]) +{compiling} foo v0.0.1 +{compiling} bar v0.0.1 ({dir}) +", + compiling = COMPILING, + unpacking = UNPACKING, + dir = p.url()))); + assert_that(p.cargo("build"), execs().with_status(0).with_stdout("")); + assert_that(p.cargo("test"), execs().with_status(0)); +}); + +test!(multiple_versions { + Package::new("foo", "0.0.1").local(true).publish(); + Package::new("foo", "0.1.0") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/lib.rs", r#" + extern crate foo; + pub fn bar() { + foo::foo(); + } + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stdout(&format!("\ +{unpacking} foo v0.1.0 ([..]) +{compiling} foo v0.1.0 +{compiling} bar v0.0.1 ({dir}) +", + compiling = COMPILING, + unpacking = UNPACKING, + dir = p.url()))); + + Package::new("foo", "0.2.0") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + + assert_that(p.cargo("update").arg("-v"), + execs().with_status(0).with_stdout(&format!("\ +{updating} foo v0.1.0 -> v0.2.0 +", + updating = UPDATING))); +}); + +test!(multiple_names { + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("bar", "0.1.0") + .local(true) + .file("src/lib.rs", "pub fn bar() {}") + .publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + bar = "*" + "#) + .file("src/lib.rs", r#" + extern crate foo; + extern crate bar; + pub fn local() { + foo::foo(); + bar::bar(); + } + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stdout(&format!("\ +{unpacking} [..] +{unpacking} [..] +{compiling} [..] +{compiling} [..] +{compiling} local v0.0.1 ({dir}) +", + compiling = COMPILING, + unpacking = UNPACKING, + dir = p.url()))); +}); + +test!(interdependent { + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("bar", "0.1.0") + .local(true) + .dep("foo", "*") + .file("src/lib.rs", "extern crate foo; pub fn bar() {}") + .publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + bar = "*" + "#) + .file("src/lib.rs", r#" + extern crate foo; + extern crate bar; + pub fn local() { + foo::foo(); + bar::bar(); + } + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stdout(&format!("\ +{unpacking} [..] +{unpacking} [..] +{compiling} foo v0.0.1 +{compiling} bar v0.1.0 +{compiling} local v0.0.1 ({dir}) +", + compiling = COMPILING, + unpacking = UNPACKING, + dir = p.url()))); +}); + +test!(path_dep_rewritten { + Package::new("foo", "0.0.1") + .local(true) + .file("src/lib.rs", "pub fn foo() {}") + .publish(); + Package::new("bar", "0.1.0") + .local(true) + .dep("foo", "*") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.1.0" + authors = [] + + [dependencies] + foo = { path = "foo", version = "*" } + "#) + .file("src/lib.rs", "extern crate foo; pub fn bar() {}") + .file("foo/Cargo.toml", r#" + [project] + name = "foo" + version = "0.0.1" + authors = [] + "#) + .file("foo/src/lib.rs", "pub fn foo() {}") + .publish(); + + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + bar = "*" + "#) + .file("src/lib.rs", r#" + extern crate foo; + extern crate bar; + pub fn local() { + foo::foo(); + bar::bar(); + } + "#); + + assert_that(p.cargo_process("build"), + execs().with_status(0).with_stdout(&format!("\ +{unpacking} [..] +{unpacking} [..] +{compiling} foo v0.0.1 +{compiling} bar v0.1.0 +{compiling} local v0.0.1 ({dir}) +", + compiling = COMPILING, + unpacking = UNPACKING, + dir = p.url()))); +}); + +test!(invalid_dir_bad { + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/lib.rs", "") + .file(".cargo/config", r#" + [source.crates-io] + registry = 'https://wut' + replace-with = 'my-awesome-local-directory' + + [source.my-awesome-local-directory] + local-registry = '/path/to/nowhere' + "#); + + + assert_that(p.cargo_process("build"), + execs().with_status(101).with_stderr(&format!("\ +{error} Unable to update registry https://[..] + +Caused by: + failed to update replaced source `registry https://[..]` + +Caused by: + local registry path is not a directory: [..]path[..]to[..]nowhere +", error = ERROR))); +}); + +test!(different_directory_replacing_the_registry_is_bad { + // Move our test's .cargo/config to a temporary location and publish a + // registry package we're going to use first. + let config = paths::root().join(".cargo"); + let config_tmp = paths::root().join(".cargo-old"); + fs::rename(&config, &config_tmp).unwrap(); + + let p = project("local") + .file("Cargo.toml", r#" + [project] + name = "local" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "*" + "#) + .file("src/lib.rs", ""); + p.build(); + + // Generate a lock file against the crates.io registry + Package::new("foo", "0.0.1").publish(); + assert_that(p.cargo("build"), execs().with_status(0)); + + // Switch back to our directory source, and now that we're replacing + // crates.io make sure that this fails because we're replacing with a + // different checksum + let _ = config.rm_rf(); + fs::rename(&config_tmp, &config).unwrap(); + Package::new("foo", "0.0.1") + .file("src/lib.rs", "invalid") + .local(true) + .publish(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr(&format!("\ +{error} checksum for `foo v0.0.1` changed between lock files + +this could be indicative of a few possible errors: + + * the lock file is corrupt + * a replacement source in use (e.g. a mirror) returned a different checksum + * the source itself may be corrupt in one way or another + +unable to verify that `foo v0.0.1` was the same as before in any situation + +", error = ERROR))); +}); diff --git a/tests/test_cargo_metadata.rs b/tests/test_cargo_metadata.rs index 543f357d241..8453e9cefd6 100644 --- a/tests/test_cargo_metadata.rs +++ b/tests/test_cargo_metadata.rs @@ -73,10 +73,10 @@ test!(cargo_metadata_with_deps_and_version { { "dependencies": [], "features": {}, - "id": "baz 0.0.1 (registry+file:[..])", + "id": "baz 0.0.1 (registry+[..])", "manifest_path": "[..]Cargo.toml", "name": "baz", - "source": "registry+file:[..]", + "source": "registry+[..]", "targets": [ { "kind": [ @@ -96,16 +96,16 @@ test!(cargo_metadata_with_deps_and_version { "name": "baz", "optional": false, "req": "^0.0.1", - "source": "registry+file:[..]", + "source": "registry+[..]", "target": null, "uses_default_features": true } ], "features": {}, - "id": "bar 0.0.1 (registry+file:[..])", + "id": "bar 0.0.1 (registry+[..])", "manifest_path": "[..]Cargo.toml", "name": "bar", - "source": "registry+file:[..]", + "source": "registry+[..]", "targets": [ { "kind": [ @@ -125,7 +125,7 @@ test!(cargo_metadata_with_deps_and_version { "name": "bar", "optional": false, "req": "*", - "source": "registry+file:[..]", + "source": "registry+[..]", "target": null, "uses_default_features": true } @@ -151,19 +151,19 @@ test!(cargo_metadata_with_deps_and_version { "nodes": [ { "dependencies": [ - "bar 0.0.1 (registry+file:[..])" + "bar 0.0.1 (registry+[..])" ], "id": "foo 0.5.0 (path+file:[..]foo)" }, { "dependencies": [ - "baz 0.0.1 (registry+file:[..])" + "baz 0.0.1 (registry+[..])" ], - "id": "bar 0.0.1 (registry+file:[..])" + "id": "bar 0.0.1 (registry+[..])" }, { "dependencies": [], - "id": "baz 0.0.1 (registry+file:[..])" + "id": "baz 0.0.1 (registry+[..])" } ], "root": "foo 0.5.0 (path+file:[..]foo)" diff --git a/tests/test_cargo_publish.rs b/tests/test_cargo_publish.rs index 15e33f2b53e..1aaf0cfc4d2 100644 --- a/tests/test_cargo_publish.rs +++ b/tests/test_cargo_publish.rs @@ -1,43 +1,22 @@ use std::io::prelude::*; -use std::fs::{self, File}; +use std::fs::File; use std::io::SeekFrom; -use std::path::PathBuf; use flate2::read::GzDecoder; use tar::Archive; -use url::Url; use support::{project, execs}; use support::{UPDATING, PACKAGING, UPLOADING, ERROR}; -use support::paths; -use support::git::repo; +use support::registry; use hamcrest::assert_that; -fn registry_path() -> PathBuf { paths::root().join("registry") } -fn registry() -> Url { Url::from_file_path(&*registry_path()).ok().unwrap() } -fn upload_path() -> PathBuf { paths::root().join("upload") } -fn upload() -> Url { Url::from_file_path(&*upload_path()).ok().unwrap() } - fn setup() { - let config = paths::root().join(".cargo/config"); - fs::create_dir_all(config.parent().unwrap()).unwrap(); - File::create(&config).unwrap().write_all(&format!(r#" - [registry] - index = "{reg}" - token = "api-token" - "#, reg = registry()).as_bytes()).unwrap(); - fs::create_dir_all(&upload_path().join("api/v1/crates")).unwrap(); - - repo(®istry_path()) - .file("config.json", &format!(r#"{{ - "dl": "{0}", - "api": "{0}" - }}"#, upload())) - .build(); } test!(simple { + registry::init(); + let p = project("foo") .file("Cargo.toml", r#" [project] @@ -49,7 +28,8 @@ test!(simple { "#) .file("src/main.rs", "fn main() {}"); - assert_that(p.cargo_process("publish").arg("--no-verify"), + assert_that(p.cargo_process("publish").arg("--no-verify") + .arg("--host").arg(registry::registry().to_string()), execs().with_status(0).with_stdout(&format!("\ {updating} registry `{reg}` {packaging} foo v0.0.1 ({dir}) @@ -59,9 +39,9 @@ test!(simple { uploading = UPLOADING, packaging = PACKAGING, dir = p.url(), - reg = registry()))); + reg = registry::registry()))); - let mut f = File::open(&upload_path().join("api/v1/crates/new")).unwrap(); + let mut f = File::open(®istry::dl_path().join("api/v1/crates/new")).unwrap(); // Skip the metadata payload and the size of the tarball let mut sz = [0; 4]; assert_eq!(f.read(&mut sz).unwrap(), 4); @@ -88,6 +68,8 @@ test!(simple { }); test!(git_deps { + registry::init(); + let p = project("foo") .file("Cargo.toml", r#" [project] @@ -111,6 +93,8 @@ error = ERROR))); }); test!(path_dependency_no_version { + registry::init(); + let p = project("foo") .file("Cargo.toml", r#" [project] @@ -141,6 +125,8 @@ error = ERROR))); }); test!(unpublishable_crate { + registry::init(); + let p = project("foo") .file("Cargo.toml", r#" [project] diff --git a/tests/test_cargo_registry.rs b/tests/test_cargo_registry.rs index 7596ca2acb8..2fccf98633e 100644 --- a/tests/test_cargo_registry.rs +++ b/tests/test_cargo_registry.rs @@ -30,8 +30,8 @@ test!(simple { assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `{reg}` -{downloading} bar v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) +{downloading} bar v0.0.1 (registry [..]) +{compiling} bar v0.0.1 {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, @@ -44,7 +44,7 @@ test!(simple { assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `{reg}` -[..] bar v0.0.1 (registry file://[..]) +[..] bar v0.0.1 [..] foo v0.0.1 ({dir}) ", updating = UPDATING, @@ -71,10 +71,10 @@ test!(deps { assert_that(p.cargo_process("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `{reg}` -{downloading} [..] v0.0.1 (registry file://[..]) -{downloading} [..] v0.0.1 (registry file://[..]) -{compiling} baz v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) +{downloading} [..] v0.0.1 (registry [..]) +{downloading} [..] v0.0.1 (registry [..]) +{compiling} baz v0.0.1 +{compiling} bar v0.0.1 {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, @@ -102,7 +102,7 @@ test!(nonexistent { assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr(&format!("\ {error} no matching package named `nonexistent` found (required by `foo`) -location searched: registry file://[..] +location searched: registry [..] version required: >= 0.0.0 ", error = ERROR))); @@ -127,7 +127,7 @@ test!(wrong_version { assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr(&format!("\ {error} no matching package named `foo` found (required by `foo`) -location searched: registry file://[..] +location searched: registry [..] version required: >= 1.0.0 versions found: 0.0.2, 0.0.1 ", @@ -139,7 +139,7 @@ error = ERROR))); assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr(&format!("\ {error} no matching package named `foo` found (required by `foo`) -location searched: registry file://[..] +location searched: registry [..] version required: >= 1.0.0 versions found: 0.0.4, 0.0.3, 0.0.2, ... ", @@ -168,7 +168,7 @@ test!(bad_cksum { {error} unable to get packages from source Caused by: - failed to download package `bad-cksum v0.0.1 (registry file://[..])` from [..] + failed to download replaced source `registry https://[..]` Caused by: failed to verify the checksum of `bad-cksum v0.0.1 (registry file://[..])` @@ -194,7 +194,7 @@ test!(update_registry { assert_that(p.cargo_process("build"), execs().with_status(101).with_stderr(&format!("\ {error} no matching package named `notyet` found (required by `foo`) -location searched: registry file://[..] +location searched: registry [..] version required: >= 0.0.0 ", error = ERROR))); @@ -204,8 +204,8 @@ error = ERROR))); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `{reg}` -{downloading} notyet v0.0.1 (registry file://[..]) -{compiling} notyet v0.0.1 (registry file://[..]) +{downloading} notyet v0.0.1 (registry [..]) +{compiling} notyet v0.0.1 {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, @@ -248,7 +248,7 @@ test!(package_with_path_deps { Caused by: no matching package named `notyet` found (required by `foo`) -location searched: registry file://[..] +location searched: registry [..] version required: ^0.0.1 ", error = ERROR))); @@ -260,8 +260,8 @@ error = ERROR))); {packaging} foo v0.0.1 ({dir}) {verifying} foo v0.0.1 ({dir}) {updating} registry `[..]` -{downloading} notyet v0.0.1 (registry file://[..]) -{compiling} notyet v0.0.1 (registry file://[..]) +{downloading} notyet v0.0.1 (registry [..]) +{compiling} notyet v0.0.1 {compiling} foo v0.0.1 ({dir}[..]) ", packaging = PACKAGING, @@ -292,8 +292,8 @@ test!(lockfile_locks { assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` -{downloading} bar v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) +{downloading} bar v0.0.1 (registry [..]) +{compiling} bar v0.0.1 {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); @@ -325,10 +325,10 @@ test!(lockfile_locks_transitively { assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` -{downloading} [..] v0.0.1 (registry file://[..]) -{downloading} [..] v0.0.1 (registry file://[..]) -{compiling} baz v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) +{downloading} [..] v0.0.1 (registry [..]) +{downloading} [..] v0.0.1 (registry [..]) +{compiling} baz v0.0.1 +{compiling} bar v0.0.1 {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); @@ -363,10 +363,10 @@ test!(yanks_are_not_used { assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` -{downloading} [..] v0.0.1 (registry file://[..]) -{downloading} [..] v0.0.1 (registry file://[..]) -{compiling} baz v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) +{downloading} [..] v0.0.1 (registry [..]) +{downloading} [..] v0.0.1 (registry [..]) +{compiling} baz v0.0.1 +{compiling} bar v0.0.1 {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); @@ -393,7 +393,7 @@ test!(relying_on_a_yank_is_bad { assert_that(p.cargo("build"), execs().with_status(101).with_stderr(&format!("\ {error} no matching package named `baz` found (required by `bar`) -location searched: registry file://[..] +location searched: registry [..] version required: = 0.0.2 versions found: 0.0.1 ", @@ -419,7 +419,7 @@ test!(yanks_in_lockfiles_are_ok { assert_that(p.cargo("build"), execs().with_status(0)); - fs::remove_dir_all(®istry::registry_path().join("3")).unwrap(); + registry::registry_path().join("3").rm_rf().unwrap(); Package::new("bar", "0.0.1").yanked(true).publish(); @@ -429,7 +429,7 @@ test!(yanks_in_lockfiles_are_ok { assert_that(p.cargo("update"), execs().with_status(101).with_stderr(&format!("\ {error} no matching package named `bar` found (required by `foo`) -location searched: registry file://[..] +location searched: registry [..] version required: * ", error = ERROR))); @@ -458,7 +458,7 @@ test!(update_with_lockfile_if_packages_missing { assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` -{downloading} bar v0.0.1 (registry file://[..]) +{downloading} bar v0.0.1 (registry [..]) ", updating = UPDATING, downloading = DOWNLOADING))); }); @@ -489,14 +489,14 @@ test!(update_lockfile { .arg("-p").arg("bar").arg("--precise").arg("0.0.2"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` -{updating} bar v0.0.1 (registry file://[..]) -> v0.0.2 +{updating} bar v0.0.1 -> v0.0.2 ", updating = UPDATING))); println!("0.0.2 build"); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ -{downloading} [..] v0.0.2 (registry file://[..]) -{compiling} bar v0.0.2 (registry file://[..]) +{downloading} [..] v0.0.2 (registry [..]) +{compiling} bar v0.0.2 {compiling} foo v0.0.1 ({dir}) ", downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); @@ -506,14 +506,14 @@ test!(update_lockfile { .arg("-p").arg("bar"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` -{updating} bar v0.0.2 (registry file://[..]) -> v0.0.3 +{updating} bar v0.0.2 -> v0.0.3 ", updating = UPDATING))); println!("0.0.3 build"); assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ -{downloading} [..] v0.0.3 (registry file://[..]) -{compiling} bar v0.0.3 (registry file://[..]) +{downloading} [..] v0.0.3 (registry [..]) +{compiling} bar v0.0.3 {compiling} foo v0.0.1 ({dir}) ", downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); @@ -525,8 +525,8 @@ test!(update_lockfile { .arg("-p").arg("bar"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` -{updating} bar v0.0.3 (registry file://[..]) -> v0.0.4 -{adding} spam v0.2.5 (registry file://[..]) +{updating} bar v0.0.3 -> v0.0.4 +{adding} spam v0.2.5 ", updating = UPDATING, adding = ADDING))); println!("new dependencies update"); @@ -535,8 +535,8 @@ test!(update_lockfile { .arg("-p").arg("bar"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` -{updating} bar v0.0.4 (registry file://[..]) -> v0.0.5 -{removing} spam v0.2.5 (registry file://[..]) +{updating} bar v0.0.4 -> v0.0.5 +{removing} spam v0.2.5 ", updating = UPDATING, removing = REMOVING))); }); @@ -560,8 +560,8 @@ test!(dev_dependency_not_used { assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` -{downloading} [..] v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) +{downloading} [..] v0.0.1 (registry [..]) +{compiling} bar v0.0.1 {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); @@ -624,8 +624,8 @@ test!(updating_a_dep { assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` -{downloading} bar v0.0.1 (registry file://[..]) -{compiling} bar v0.0.1 (registry file://[..]) +{downloading} bar v0.0.1 (registry [..]) +{compiling} bar v0.0.1 {compiling} a v0.0.1 ({dir}/a) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, @@ -646,8 +646,8 @@ test!(updating_a_dep { assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` -{downloading} bar v0.1.0 (registry file://[..]) -{compiling} bar v0.1.0 (registry file://[..]) +{downloading} bar v0.1.0 (registry [..]) +{compiling} bar v0.1.0 {compiling} a v0.0.1 ({dir}/a) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, @@ -690,8 +690,8 @@ test!(git_and_registry_dep { execs().with_status(0).with_stdout(&format!("\ {updating} [..] {updating} [..] -{downloading} a v0.0.1 (registry file://[..]) -{compiling} a v0.0.1 (registry [..]) +{downloading} a v0.0.1 (registry [..]) +{compiling} a v0.0.1 {compiling} b v0.0.1 ([..]) {compiling} foo v0.0.1 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, @@ -704,6 +704,8 @@ test!(git_and_registry_dep { }); test!(update_publish_then_update { + // First generate a Cargo.lock and a clone of the registry index at the + // "head" of the current registry. let p = project("foo") .file("Cargo.toml", r#" [project] @@ -716,27 +718,44 @@ test!(update_publish_then_update { "#) .file("src/main.rs", "fn main() {}"); p.build(); - Package::new("a", "0.1.0").publish(); - assert_that(p.cargo("build"), execs().with_status(0)); + // Next, publish a new package and back up the copy of the registry we just + // created. Package::new("a", "0.1.1").publish(); + let registry = paths::home().join(".cargo/registry"); + let backup = paths::root().join("registry-backup"); + fs::rename(®istry, &backup).unwrap(); - let lock = p.root().join("Cargo.lock"); - let mut s = String::new(); - File::open(&lock).unwrap().read_to_string(&mut s).unwrap(); - File::create(&lock).unwrap() - .write_all(s.replace("0.1.0", "0.1.1").as_bytes()).unwrap(); - println!("second"); + // Generate a Cargo.lock with the newer version, and then move the old copy + // of the registry back into place. + let p2 = project("foo2") + .file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.5.0" + authors = [] - fs::remove_dir_all(&p.root().join("target")).unwrap(); + [dependencies] + a = "0.1.1" + "#) + .file("src/main.rs", "fn main() {}"); + assert_that(p2.cargo_process("build"), + execs().with_status(0)); + registry.rm_rf().unwrap(); + fs::rename(&backup, ®istry).unwrap(); + fs::rename(p2.root().join("Cargo.lock"), p.root().join("Cargo.lock")).unwrap(); + + // Finally, build the first project again (with our newer Cargo.lock) which + // should force an update of the old registry, download the new crate, and + // then build everything again. assert_that(p.cargo("build"), execs().with_status(0).with_stdout(&format!("\ {updating} [..] -{downloading} a v0.1.1 (registry file://[..]) -{compiling} a v0.1.1 (registry [..]) +{downloading} a v0.1.1 (registry [..]) +{compiling} a v0.1.1 {compiling} foo v0.5.0 ({dir}) ", updating = UPDATING, downloading = DOWNLOADING, compiling = COMPILING, dir = p.url()))); @@ -793,15 +812,15 @@ test!(update_transitive_dependency { execs().with_status(0) .with_stdout(format!("\ {updating} registry `[..]` -{updating} b v0.1.0 (registry [..]) -> v0.1.1 +{updating} b v0.1.0 -> v0.1.1 ", updating = UPDATING))); assert_that(p.cargo("build"), execs().with_status(0) .with_stdout(format!("\ -{downloading} b v0.1.1 (registry file://[..]) -{compiling} b v0.1.1 (registry [..]) -{compiling} a v0.1.0 (registry [..]) +{downloading} b v0.1.1 (registry [..]) +{compiling} b v0.1.1 +{compiling} a v0.1.0 {compiling} foo v0.5.0 ([..]) ", downloading = DOWNLOADING, compiling = COMPILING))); }); @@ -873,31 +892,31 @@ test!(update_multiple_packages { execs().with_status(0) .with_stdout(format!("\ {updating} registry `[..]` -{updating} a v0.1.0 (registry [..]) -> v0.1.1 -{updating} b v0.1.0 (registry [..]) -> v0.1.1 +{updating} a v0.1.0 -> v0.1.1 +{updating} b v0.1.0 -> v0.1.1 ", updating = UPDATING))); assert_that(p.cargo("update").arg("-pb").arg("-pc"), execs().with_status(0) .with_stdout(format!("\ {updating} registry `[..]` -{updating} c v0.1.0 (registry [..]) -> v0.1.1 +{updating} c v0.1.0 -> v0.1.1 ", updating = UPDATING))); assert_that(p.cargo("build"), execs().with_status(0) .with_stdout_contains(format!("\ -{downloading} a v0.1.1 (registry file://[..])", downloading = DOWNLOADING)) +{downloading} a v0.1.1 (registry [..])", downloading = DOWNLOADING)) .with_stdout_contains(format!("\ -{downloading} b v0.1.1 (registry file://[..])", downloading = DOWNLOADING)) +{downloading} b v0.1.1 (registry [..])", downloading = DOWNLOADING)) .with_stdout_contains(format!("\ -{downloading} c v0.1.1 (registry file://[..])", downloading = DOWNLOADING)) +{downloading} c v0.1.1 (registry [..])", downloading = DOWNLOADING)) .with_stdout_contains(format!("\ -{compiling} a v0.1.1 (registry [..])", compiling = COMPILING)) +{compiling} a v0.1.1", compiling = COMPILING)) .with_stdout_contains(format!("\ -{compiling} b v0.1.1 (registry [..])", compiling = COMPILING)) +{compiling} b v0.1.1", compiling = COMPILING)) .with_stdout_contains(format!("\ -{compiling} c v0.1.1 (registry [..])", compiling = COMPILING)) +{compiling} c v0.1.1", compiling = COMPILING)) .with_stdout_contains(format!("\ {compiling} foo v0.5.0 ([..])", compiling = COMPILING))); }); @@ -1011,7 +1030,33 @@ test!(only_download_relevant { execs().with_status(0).with_stdout(&format!("\ {updating} registry `[..]` {downloading} baz v0.1.0 ([..]) -{compiling} baz v0.1.0 ([..]) +{compiling} baz v0.1.0 {compiling} bar v0.5.0 ([..]) ", downloading = DOWNLOADING, compiling = COMPILING, updating = UPDATING))); }); + +test!(registry_index_is_old_and_deprecated { + let p = project("foo") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.5.0" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/main.rs", "fn main() {}") + .file(".cargo/config", &format!(r#" + [registry] + index = '{}' + "#, registry::registry())); + p.build(); + + Package::new("foo", "0.1.0").publish(); + assert_that(p.cargo("build"), + execs().with_status(0).with_stderr("\ +custom registry support via the `registry.index` configuration is being removed, \ +this functionality will not work in the future +")); +}); diff --git a/tests/test_lockfile_compat.rs b/tests/test_lockfile_compat.rs new file mode 100644 index 00000000000..c25709614ae --- /dev/null +++ b/tests/test_lockfile_compat.rs @@ -0,0 +1,260 @@ +use std::fs::File; +use std::io::prelude::*; + +use hamcrest::assert_that; + +use support::{execs, project}; +use support::registry::Package; +use support::git; + +fn setup() {} + +test!(oldest_lockfile_still_works { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + let lockfile = r#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +"#; + File::create(p.root().join("Cargo.lock")).unwrap() + .write_all(lockfile.as_bytes()).unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + let mut lock = String::new(); + File::open(p.root().join("Cargo.lock")).unwrap() + .read_to_string(&mut lock).unwrap(); + assert!(lock.starts_with(lockfile.trim())); +}); + +test!(totally_wild_checksums_works { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + File::create(p.root().join("Cargo.lock")).unwrap().write_all(br#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum baz 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"checksum foo 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"#).unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(0)); + + let mut lock = String::new(); + File::open(p.root().join("Cargo.lock")).unwrap() + .read_to_string(&mut lock).unwrap(); + assert!(lock.starts_with(r#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"#.trim())); +}); + +test!(wrong_checksum_is_an_error { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + File::create(p.root().join("Cargo.lock")).unwrap().write_all(br#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "checksum" +"#).unwrap(); + + assert_that(p.cargo("build"), + execs().with_status(101).with_stderr_contains(&format!("\ +{error} checksum for `foo v0.1.0` changed between lock files +", error = ERROR))); +}); + +// If the checksum is unlisted in the lockfile (e.g. ) yet we can +// calculate it (e.g. it's a registry dep), then we should in theory just fill +// it in. +test!(unlisted_checksum_is_bad_if_we_calculate { + Package::new("foo", "0.1.0").publish(); + + let p = project("bar") + .file("Cargo.toml", r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = "0.1.0" + "#) + .file("src/lib.rs", ""); + p.build(); + + File::create(p.root().join("Cargo.lock")).unwrap().write_all(br#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum foo 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "" +"#).unwrap(); + + assert_that(p.cargo("fetch"), + execs().with_status(101).with_stderr(&format!("\ +{error} checksum for `foo v0.1.0` was not previously calculated, but a checksum could \ +now be calculated + +this could be indicative of a few possible situations: + + * the source `[..]` did not previously support checksums, + but was replaced with one that does + * newer Cargo implementations know how to checksum this source, but this + older implementation does not + * the lock file is corrupt + +", error = ERROR))); +}); + +// If the checksum is listed in the lockfile yet we cannot calculate it (e.g. +// git dependencies as of today), then make sure we choke. +test!(listed_checksum_bad_if_we_cannot_compute { + let git = git::new("foo", |p| { + p.file("Cargo.toml", r#" + [project] + name = "foo" + version = "0.1.0" + authors = [] + "#) + .file("src/lib.rs", "") + }).unwrap(); + + let p = project("bar") + .file("Cargo.toml", &format!(r#" + [project] + name = "bar" + version = "0.0.1" + authors = [] + + [dependencies] + foo = {{ git = '{}' }} + "#, git.url())) + .file("src/lib.rs", ""); + p.build(); + + let lockfile = format!(r#" +[root] +name = "bar" +version = "0.0.1" +dependencies = [ + "foo 0.1.0 (git+{0})" +] + +[[package]] +name = "foo" +version = "0.1.0" +source = "git+{0}" + +[metadata] +"checksum foo 0.1.0 (git+{0})" = "checksum" +"#, git.url()); + File::create(p.root().join("Cargo.lock")).unwrap() + .write_all(lockfile.as_bytes()).unwrap(); + + assert_that(p.cargo("fetch"), + execs().with_status(101).with_stderr(&format!("\ +{error} checksum for `foo v0.1.0 ([..])` could not be calculated, but a \ +checksum is listed in the existing lock file[..] + +this could be indicative of a few possible situations: + + * the source `[..]` supports checksums, + but was replaced with one that doesn't + * the lock file is corrupt + +unable to verify that `foo v0.1.0 ([..])` was the same as before in either situation + +", error = ERROR))); +}); diff --git a/tests/tests.rs b/tests/tests.rs index 6b252a3e2e7..28422efdf9e 100644 --- a/tests/tests.rs +++ b/tests/tests.rs @@ -72,6 +72,8 @@ mod test_cargo_version; mod test_shell; mod test_cargo_death; mod test_cargo_cfg; +mod test_cargo_local_registry; +mod test_lockfile_compat; thread_local!(static RUSTC: Rustc = Rustc::new("rustc").unwrap());