diff --git a/Cargo.lock b/Cargo.lock index 04bf0fc303..dfdb141c60 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3,14 +3,18 @@ name = "bldr" version = "0.4.0" dependencies = [ "ansi_term 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "bincode 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "bitflags 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "clap 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "gpgme 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "iron 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "lazy_static 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", "libarchive 0.1.0", "libc 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "lmdb-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "mustache 0.6.1 (git+https://github.com/adamhjk/rust-mustache?branch=fallback_on_missing_extension)", "rand 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)", @@ -50,6 +54,17 @@ name = "ansi_term" version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "bincode" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "byteorder 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)", + "num 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "bitflags" version = "0.3.3" @@ -60,6 +75,11 @@ name = "bitflags" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "byteorder" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "clap" version = "2.0.2" @@ -277,6 +297,16 @@ dependencies = [ "pnacl-build-helper 1.4.10 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "lmdb-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "gcc 0.3.21 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", + "pkg-config 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "log" version = "0.3.5" diff --git a/Cargo.toml b/Cargo.toml index 6c8410b9b9..bff6f6ad95 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,12 +16,16 @@ doc = false name = "functional" [dependencies] +bincode = "*" +bitflags = "*" rustc-serialize = "*" log = "*" env_logger = "*" ansi_term = "*" gpgme = "*" hyper = "*" +lazy_static = "*" +lmdb-sys = "*" tempdir = "*" toml = "*" regex = "*" diff --git a/src/bldr/command/configure.rs b/src/bldr/command/configure.rs index eaf73bd16b..ca331765e7 100644 --- a/src/bldr/command/configure.rs +++ b/src/bldr/command/configure.rs @@ -1,4 +1,4 @@ -// Copyright:: Copyright (c) 2015 Chef Software, Inc. +// Copyright:: Copyright (c) 2015-2016 Chef Software, Inc. // // The terms of the Evaluation Agreement (Bldr) between Chef Software Inc. and the party accessing // this file ("Licensee") apply to Licensee's use of the Software until such time that the Software @@ -29,11 +29,7 @@ use package::Package; /// * If the default.toml does not exist, or cannot be read /// * If we can't read the file into a string pub fn display(config: &Config) -> BldrResult<()> { - let package = try!(Package::load(config.deriv(), - config.package(), - config.version().clone(), - config.release().clone(), - None)); + let package = try!(Package::load(config.package(), None)); let mut file = try!(File::open(package.join_path("default.toml"))); let mut s = String::new(); try!(file.read_to_string(&mut s)); diff --git a/src/bldr/command/install.rs b/src/bldr/command/install.rs index 985c47d208..904515ae8d 100644 --- a/src/bldr/command/install.rs +++ b/src/bldr/command/install.rs @@ -46,11 +46,12 @@ //! use std::fs; +use std::str::FromStr; use fs::PACKAGE_CACHE; use error::BldrResult; -use package::Package; -use repo; +use package::{Package, PackageIdent}; +use repo::{self, data_object}; static LOGKEY: &'static str = "CI"; @@ -61,38 +62,30 @@ static LOGKEY: &'static str = "CI"; /// /// * Fails if it cannot create `/opt/bldr/cache/pkgs` /// * Fails if it cannot download the package from the upstream -pub fn from_url(repo: &str, - deriv: &str, - name: &str, - version: Option, - release: Option) - -> BldrResult { - let package = try!(repo::client::show_package(repo, deriv, name, version, release)); +pub fn from_url(repo: &str, ident: &PackageIdent) -> BldrResult { + let package = try!(repo::client::show_package(repo, ident)); try!(fs::create_dir_all(PACKAGE_CACHE)); - let mut installed: Vec = vec![]; - if let Some(ref pkgs) = package.deps { - for pkg in pkgs { - try!(install(repo, &pkg, &mut installed)); - } + let mut installed: Vec = vec![]; + for dep in &package.deps { + let ident = try!(PackageIdent::from_str(&dep.to_string())); + try!(install(repo, &ident, &mut installed)); } - try!(install(repo, &package, &mut installed)); + try!(install(repo, &ident, &mut installed)); Ok(package) } -fn install(repo: &str, package: &Package, acc: &mut Vec) -> BldrResult<()> { +fn install(repo: &str, package: &PackageIdent, acc: &mut Vec) -> BldrResult<()> { if acc.contains(&package) { return Ok(()); } - let archive = try!(repo::client::fetch_package_exact(repo, package, PACKAGE_CACHE)); - try!(archive.verify()); - let package = try!(archive.unpack()); + let archive = try!(repo::client::fetch_package(repo, package, PACKAGE_CACHE)); + let package = try!(archive.ident()); + let deps = try!(archive.deps()); + try!(archive.unpack()); outputln!("Installed {}", package); - let deps = package.deps.clone(); acc.push(package); - if let Some(ref pkgs) = deps { - for pkg in pkgs { - try!(install(repo, &pkg, acc)) - } + for dep in deps { + try!(install(repo, &dep, acc)) } Ok(()) } diff --git a/src/bldr/command/key.rs b/src/bldr/command/key.rs index e90017edc7..948f85d6dd 100644 --- a/src/bldr/command/key.rs +++ b/src/bldr/command/key.rs @@ -6,17 +6,19 @@ use std::fs; use std::path::Path; -use fs::KEY_CACHE; -use config::Config; -use error::{BldrResult, ErrorKind}; -use util::gpg; -use regex::Regex; +use std::process::{Command, Stdio, Child}; + use ansi_term::Colour::{Yellow, Red}; use time::strptime; use rpassword::read_password; -use package::Package; -use std::process::{Command, Stdio, Child}; +use regex::Regex; + +use config::Config; +use error::{BldrResult, ErrorKind}; +use fs::KEY_CACHE; +use package::{Package, PackageIdent}; use repo; +use util::gpg; static LOGKEY: &'static str = "KU"; // "key utils" static USER_KEY_COMMENT: &'static str = "bldr user key"; @@ -298,7 +300,6 @@ fn gen_service_key_email(keyname: &str, group: &str) -> String { /// generate a user key name in the form of /// `bldr_keyname`. If a user key is already in the /// form BLDR_KEY_PREFIX.+, then just return it - fn gen_user_key_name(keyname: &str) -> String { let re = String::from(BLDR_KEY_PREFIX) + ".+"; let regex = Regex::new(&re).unwrap(); @@ -325,12 +326,11 @@ impl Drop for DroppableChildProcess { } } - /// run rngd in the background to generate entropy while generating keys. /// The process is killed when it goes out of scope via `DroppableChildProcess`. fn run_rngd() -> BldrResult { debug!("Spawning rngd in the background"); - let res = try!(Package::load("chef", "rngd", None, None, None)); + let res = try!(Package::load(&PackageIdent::new("chef", "rngd", None, None), None)); let rngdpath = res.join_path("sbin/rngd"); debug!("RNGD path = {}", rngdpath); let child = Command::new(rngdpath) diff --git a/src/bldr/command/repo.rs b/src/bldr/command/repo.rs index 3d77af6732..6d877fa6b5 100644 --- a/src/bldr/command/repo.rs +++ b/src/bldr/command/repo.rs @@ -25,17 +25,78 @@ //! Does the same, but the data is stored in `/tmp/whatever`. use config::Config; -use error::BldrResult; -use repo; +use error::{BldrError, BldrResult, ErrorKind}; +use repo::{self, data_object}; +use repo::data_store::{self, Cursor, Database, Transaction}; static LOGKEY: &'static str = "CR"; -/// Starts the repository. +/// Create a repository with the given name in the database. /// /// # Failures /// -/// * Fails if the repository fails to start - canot bind to the port, etc. +/// * The database cannot be read +/// * A write transaction cannot be acquired. +pub fn create_repository(name: &str, config: &Config) -> BldrResult<()> { + let repo = try!(repo::Repo::new(String::from(config.path()))); + let txn = try!(repo.datastore.views.txn_rw()); + let object = data_object::View::new(name); + try!(repo.datastore.views.write(&txn, &object)); + Ok(()) +} + +/// List all repositories in the database. +/// +/// # Failures +/// +/// * The database cannot be read +/// * A read transaction cannot be acquired. +pub fn list_repositories(config: &Config) -> BldrResult<()> { + let repo = try!(repo::Repo::new(String::from(config.path()))); + let mut views: Vec = vec![]; + let txn = try!(repo.datastore.views.txn_ro()); + let mut cursor = try!(txn.cursor_ro()); + match cursor.first() { + Err(BldrError {err: ErrorKind::MdbError(data_store::MdbError::NotFound), ..}) => { + outputln!("No repositories. Create one with `bldr repo-create`."); + return Ok(()); + } + Err(e) => return Err(e), + Ok(value) => views.push(value), + } + loop { + match cursor.next() { + Ok((_, value)) => views.push(value), + Err(_) => break, + } + } + outputln!("Listing {} repositories", views.len()); + for view in views.iter() { + outputln!(" {}", view); + } + Ok(()) +} + +/// Starts the depot server. +/// +/// # Failures +/// +/// * Fails if the depot server fails to start - canot bind to the port, etc. pub fn start(config: &Config) -> BldrResult<()> { outputln!("Repo listening on {:?}", config.repo_addr()); repo::run(&config) } + +/// Analyzes the integrity of the depot's metadata by comparing the metadata with the packages +/// on disk. If a package is found on disk that is not present in the metadata it is added to the +/// metadata and if an entry in the metadata doesn't have a matching package archive on disk the +/// entry is dropped from the database. +/// +/// # Failures +/// +/// * The database cannot be read +/// * A write transaction cannot be acquired +pub fn repair(config: &Config) -> BldrResult<()> { + outputln!("Repairing repo at {:?}", config.path()); + repo::repair(&config) +} diff --git a/src/bldr/command/start.rs b/src/bldr/command/start.rs index 803e3789d9..0f59f1906d 100644 --- a/src/bldr/command/start.rs +++ b/src/bldr/command/start.rs @@ -55,7 +55,7 @@ use std::env; use fs::PACKAGE_CACHE; use error::{BldrResult, ErrorKind}; use config::Config; -use package::Package; +use package::{Package, PackageIdent}; use topology::{self, Topology}; use command::install; use repo; @@ -71,22 +71,26 @@ static LOGKEY: &'static str = "CS"; /// * Fails if the `run` method for the topology fails /// * Fails if an unknown topology was specified on the command line pub fn package(config: &Config) -> BldrResult<()> { - match Package::load(config.deriv(), config.package(), None, None, None) { - Ok(mut package) => { + match Package::load(config.package(), None) { + Ok(package) => { if let Some(ref url) = *config.url() { outputln!("Checking remote for newer versions..."); - let latest_pkg = try!(repo::client::show_package(&url, - &package.derivation, - &package.name, - None, - None)); + // It is important to pass `config.package()` to `show_package()` instead of the + // package identifier of the loaded package. This will ensure that if the operator + // starts a package while specifying a version number, they will only automaticaly + // receive release updates for the started package. + // + // If the operator does not specify a version number they will automatically receive + // updates for any releases, regardless of version number, for the started package. + let latest_pkg: Package = try!(repo::client::show_package(&url, config.package())) + .into(); if latest_pkg > package { outputln!("Downloading latest version from remote: {}", &latest_pkg); - let archive = try!(repo::client::fetch_package_exact(&url, - &latest_pkg, - PACKAGE_CACHE)); + let archive = try!(repo::client::fetch_package(&url, + &PackageIdent::from(latest_pkg), + PACKAGE_CACHE)); try!(archive.verify()); - package = try!(archive.unpack()); + try!(archive.unpack()); } else { outputln!("Already running latest."); }; @@ -95,25 +99,16 @@ pub fn package(config: &Config) -> BldrResult<()> { } Err(_) => { outputln!("{} not found in local cache", - Yellow.bold().paint(config.package())); + Yellow.bold().paint(config.package().to_string())); match *config.url() { Some(ref url) => { outputln!("Searching for {} in remote {}", - Yellow.bold().paint(config.package()), + Yellow.bold().paint(config.package().to_string()), url); - let package = try!(install::from_url(url, - config.deriv(), - config.package(), - config.version().clone(), - config.release().clone())); + let package: Package = try!(install::from_url(url, config.package())).into(); start_package(package, config) } - None => { - Err(bldr_error!(ErrorKind::PackageNotFound(config.deriv().to_string(), - config.package().to_string(), - config.release().clone(), - config.release().clone()))) - } + None => Err(bldr_error!(ErrorKind::PackageNotFound(config.package().clone()))), } } } diff --git a/src/bldr/command/upload.rs b/src/bldr/command/upload.rs index 3d607d5ab6..1db164d15a 100644 --- a/src/bldr/command/upload.rs +++ b/src/bldr/command/upload.rs @@ -20,9 +20,10 @@ //! complex than just latest version. //! -use error::BldrResult; -use config::Config; +use hyper::status::StatusCode; +use error::{BldrResult, BldrError, ErrorKind}; +use config::Config; use package::Package; use repo; @@ -38,13 +39,22 @@ static LOGKEY: &'static str = "CU"; /// * Fails if it cannot upload the file pub fn package(config: &Config) -> BldrResult<()> { let url = config.url().as_ref().unwrap(); - let package = try!(Package::load(config.deriv(), - config.package(), - config.version().clone(), - config.release().clone(), - None)); + let package = try!(Package::load(config.package(), None)); outputln!("Uploading from {}", package.cache_file().to_string_lossy()); - try!(repo::client::put_package(url, &package)); + match repo::client::put_package(url, &package) { + Ok(()) => (), + Err(BldrError{err: ErrorKind::HTTP(StatusCode::Conflict), ..}) => { + outputln!("Package already exists on remote; skipping."); + } + Err(BldrError{err: ErrorKind::HTTP(StatusCode::UnprocessableEntity), ..}) => { + return Err(bldr_error!(ErrorKind::PackageArchiveMalformed(format!("{}", + package.cache_file().to_string_lossy())))); + } + Err(e) => { + outputln!("Unexpected response from remote"); + return Err(e); + } + } outputln!("Complete"); Ok(()) } diff --git a/src/bldr/config.rs b/src/bldr/config.rs index 883a30b1f4..898a9e8457 100644 --- a/src/bldr/config.rs +++ b/src/bldr/config.rs @@ -13,11 +13,13 @@ //! See the [Config](struct.Config.html) struct for the specific options available. use std::net; -use gossip::server::GOSSIP_DEFAULT_PORT; -use topology::Topology; -use repo; use std::str::FromStr; + use error::{BldrError, ErrorKind}; +use gossip::server::GOSSIP_DEFAULT_PORT; +use package::PackageIdent; +use repo; +use topology::Topology; static LOGKEY: &'static str = "CFG"; @@ -38,11 +40,12 @@ pub enum Command { Decrypt, Shell, Repo, + RepoCreate, + RepoList, + RepoRepair, Upload, - Configuration, } - impl FromStr for Command { type Err = BldrError; fn from_str(s: &str) -> Result { @@ -51,6 +54,7 @@ impl FromStr for Command { "config" => Ok(Command::Config), "decrypt" => Ok(Command::Decrypt), "depot" => Ok(Command::Repo), + "depot-repair" => Ok(Command::RepoRepair), "download-depot-key" => Ok(Command::DownloadRepoKey), "encrypt" => Ok(Command::Encrypt), "export-key" => Ok(Command::ExportKey), @@ -59,6 +63,8 @@ impl FromStr for Command { "import-key" => Ok(Command::ImportKey), "install" => Ok(Command::Install), "list-keys" => Ok(Command::ListKeys), + "repo-create" => Ok(Command::RepoCreate), + "repo-list" => Ok(Command::RepoList), "sh" => Ok(Command::Shell), "start" => Ok(Command::Start), "upload-depot-key" => Ok(Command::UploadRepoKey), @@ -79,14 +85,11 @@ impl Default for Command { #[derive(Default, Debug)] pub struct Config { command: Command, - package: String, + package: PackageIdent, url: Option, topology: Topology, group: String, path: String, - deriv: String, - version: Option, - release: Option, watch: Vec, key: String, password: Option, @@ -197,17 +200,6 @@ impl Config { &self.outfile } - /// Set the package name - pub fn set_package(&mut self, package: String) -> &mut Config { - self.package = package; - self - } - - /// Return the package name - pub fn package(&self) -> &str { - &self.package - } - /// Set the key expire days pub fn set_expire_days(&mut self, expire_days: u16) -> &mut Config { self.expire_days = Some(expire_days); @@ -218,39 +210,6 @@ impl Config { &self.expire_days } - /// Set the derivation - pub fn set_deriv(&mut self, deriv: String) -> &mut Config { - self.deriv = deriv; - self - } - - /// Return the derivation - pub fn deriv(&self) -> &str { - &self.deriv - } - - /// Set the version - pub fn set_version(&mut self, version: String) -> &mut Config { - self.version = Some(version); - self - } - - /// Return the version - pub fn version(&self) -> &Option { - &self.version - } - - /// Set the release - pub fn set_release(&mut self, release: String) -> &mut Config { - self.release = Some(release); - self - } - - /// Return the release - pub fn release(&self) -> &Option { - &self.release - } - /// Set the path pub fn set_path(&mut self, path: String) -> &mut Config { self.path = path; @@ -347,21 +306,13 @@ impl Config { self } - pub fn package_id(&self) -> String { - if self.version.is_some() && self.release.is_some() { - format!("{}/{}/{}/{}", - &self.deriv, - &self.package, - self.version.as_ref().unwrap(), - self.release.as_ref().unwrap()) - } else if self.version.is_some() { - format!("{}/{}/{}", - self.deriv, - self.package, - self.version.as_ref().unwrap()) - } else { - format!("{}/{}", self.deriv, self.package) - } + pub fn set_package(&mut self, ident: PackageIdent) -> &mut Config { + self.package = ident; + self + } + + pub fn package(&self) -> &PackageIdent { + &self.package } } @@ -390,13 +341,6 @@ mod tests { assert_eq!(c.key(), "foolio"); } - #[test] - fn package() { - let mut c = Config::new(); - c.set_package(String::from("foolio")); - assert_eq!(c.package(), "foolio"); - } - #[test] fn path() { let mut c = Config::new(); diff --git a/src/bldr/error.rs b/src/bldr/error.rs index 8c5c043231..69d1c33875 100644 --- a/src/bldr/error.rs +++ b/src/bldr/error.rs @@ -44,11 +44,12 @@ use uuid; use wonder::actor; use ansi_term::Colour::Red; use rustc_serialize::json; - use hyper; use toml; use mustache; use regex; + +use repo::data_store; use package; use output::StructuredOutput; @@ -91,6 +92,7 @@ pub enum ErrorKind { ArchiveError(libarchive::error::ArchiveError), Io(io::Error), CommandNotImplemented, + DbInvalidPath, InstallFailed, WriteSyncFailed, HyperError(hyper::error::Error), @@ -100,9 +102,12 @@ pub enum ErrorKind { GPGError(gpgme::Error), UnpackFailed, TomlParser(Vec), + MdbError(data_store::MdbError), MustacheEncoderError(mustache::encoder::Error), MetaFileNotFound(package::MetaFile), MetaFileMalformed, + MetaFileIO(io::Error), + PackageArchiveMalformed(String), PermissionFailed, BadVersion, RegexParse(regex::Error), @@ -111,8 +116,9 @@ pub enum ErrorKind { FileNotFound(String), KeyNotFound(String), PackageLoad(String), - PackageNotFound(String, String, Option, Option), - RemotePackageNotFound(String, String, Option, Option), + PackageNotFound(package::PackageIdent), + PackageIdentMismatch(String, String), + RemotePackageNotFound(package::PackageIdent), MustacheMergeOnlyMaps, SupervisorSignalFailed, StringFromUtf8Error(string::FromUtf8Error), @@ -152,6 +158,7 @@ impl fmt::Display for BldrError { ErrorKind::ArchiveError(ref err) => format!("{}", err), ErrorKind::Io(ref err) => format!("{}", err), ErrorKind::CommandNotImplemented => format!("Command is not yet implemented!"), + ErrorKind::DbInvalidPath => format!("Invalid filepath to internal datastore"), ErrorKind::InstallFailed => format!("Could not install package!"), ErrorKind::HyperError(ref err) => format!("{}", err), ErrorKind::HTTP(ref e) => format!("{}", e), @@ -167,6 +174,7 @@ impl fmt::Display for BldrError { ErrorKind::TomlParser(ref errs) => { format!("Failed to parse toml:\n{}", toml_parser_string(errs)) } + ErrorKind::MdbError(ref err) => format!("{}", err), ErrorKind::MustacheEncoderError(ref me) => { match *me { mustache::encoder::Error::IoError(ref e) => format!("{}", e), @@ -177,7 +185,12 @@ impl fmt::Display for BldrError { format!("Couldn't read MetaFile: {}, not found", e) } ErrorKind::MetaFileMalformed => { - "Metafile was blank or didn't contain a valid UTF-8 string".to_string() + "MetaFile was blank or didn't contain a valid UTF-8 string".to_string() + } + ErrorKind::MetaFileIO(ref e) => format!("IO error while accessing MetaFile: {:?}", e), + ErrorKind::PackageArchiveMalformed(ref e) => { + format!("Package archive was unreadable or contained unexpected contents: {:?}", + e) } ErrorKind::PermissionFailed => format!("Failed to set permissions"), ErrorKind::BadVersion => format!("Failed to parse a version number"), @@ -187,38 +200,23 @@ impl fmt::Display for BldrError { ErrorKind::FileNotFound(ref e) => format!("File not found at: {}", e), ErrorKind::KeyNotFound(ref e) => format!("Key not found in key cache: {}", e), ErrorKind::PackageLoad(ref e) => format!("Unable to load package from: {}", e), - ErrorKind::PackageNotFound(ref d, ref n, ref v, ref r) => { - if v.is_some() && r.is_some() { - format!("Cannot find package: {}/{}/{}/{}", - d, - n, - v.as_ref().unwrap(), - r.as_ref().unwrap()) - } else if v.is_some() { - format!("Cannot find a release of package: {}/{}/{}", - d, - n, - v.as_ref().unwrap()) + ErrorKind::PackageNotFound(ref pkg) => { + if pkg.fully_qualified() { + format!("Cannot find package: {}", pkg) } else { - format!("Cannot find a release of package: {}/{}", d, n) + format!("Cannot find a release of package: {}", pkg) } } - ErrorKind::RemotePackageNotFound(ref d, ref n, ref v, ref r) => { - if v.is_some() && r.is_some() { - format!("Cannot find package in any sources: {}/{}/{}/{}", - d, - n, - v.as_ref().unwrap(), - r.as_ref().unwrap()) - } else if v.is_some() { - format!("Cannot find a release of package in any sources: {}/{}/{}", - d, - n, - v.as_ref().unwrap()) + ErrorKind::PackageIdentMismatch(ref expect, ref got) => { + format!("Encountered an unexpected package identity: expected={}, got={}", + expect, + got) + } + ErrorKind::RemotePackageNotFound(ref pkg) => { + if pkg.fully_qualified() { + format!("Cannot find package in any sources: {}", pkg) } else { - format!("Cannot find a release of package in any sources: {}/{}", - d, - n) + format!("Cannot find a release of package in any sources: {}", pkg) } } ErrorKind::MustacheMergeOnlyMaps => format!("Can only merge two Mustache::Data::Maps"), @@ -257,8 +255,9 @@ impl fmt::Display for BldrError { derivation/name (example: chef/redis)", e) } - ErrorKind::InvalidKeyParameter(ref e) => - format!("Invalid parameter for key generation: {:?}", e), + ErrorKind::InvalidKeyParameter(ref e) => { + format!("Invalid parameter for key generation: {:?}", e) + } ErrorKind::JsonEncode(ref e) => format!("JSON encoding error: {}", e), ErrorKind::JsonDecode(ref e) => format!("JSON decoding error: {}", e), ErrorKind::InitialPeers => format!("Failed to contact initial peers"), @@ -282,6 +281,7 @@ impl Error for BldrError { ErrorKind::ArchiveReadFailed(_) => "Failed to read contents of package archive", ErrorKind::Io(ref err) => err.description(), ErrorKind::CommandNotImplemented => "Command is not yet implemented!", + ErrorKind::DbInvalidPath => "A bad filepath was provided for an internal datastore", ErrorKind::InstallFailed => "Could not install package!", ErrorKind::WriteSyncFailed => { "Could not write to destination; bytes written was 0 on a non-0 buffer" @@ -293,10 +293,15 @@ impl Error for BldrError { ErrorKind::GPGError(_) => "gpgme error", ErrorKind::UnpackFailed => "Failed to unpack a package", ErrorKind::TomlParser(_) => "Failed to parse toml!", + ErrorKind::MdbError(_) => "Database error", ErrorKind::MustacheEncoderError(_) => "Failed to encode mustache template", ErrorKind::MetaFileNotFound(_) => "Failed to read an archive's metafile", ErrorKind::MetaFileMalformed => { - "Metafile was blank or didn't contain a valid UTF-8 string" + "MetaFile was blank or didn't contain a valid UTF-8 string" + } + ErrorKind::MetaFileIO(_) => "MetaFile could not be read or written to", + ErrorKind::PackageArchiveMalformed(_) => { + "Package archive was unreadable or had unexpected contents" } ErrorKind::PermissionFailed => "Failed to set permissions", ErrorKind::BadVersion => "Failed to parse a version number", @@ -306,8 +311,11 @@ impl Error for BldrError { ErrorKind::FileNotFound(_) => "File not found", ErrorKind::KeyNotFound(_) => "Key not found in key cache", ErrorKind::PackageLoad(_) => "Unable to load package from path", - ErrorKind::PackageNotFound(_, _, _, _) => "Cannot find a package", - ErrorKind::RemotePackageNotFound(_, _, _, _) => "Cannot find a package in any sources", + ErrorKind::PackageNotFound(_) => "Cannot find a package", + ErrorKind::PackageIdentMismatch(_, _) => { + "Expected a package identity but received another" + } + ErrorKind::RemotePackageNotFound(_) => "Cannot find a package in any sources", ErrorKind::MustacheMergeOnlyMaps => "Can only merge two Mustache::Data::Maps", ErrorKind::SupervisorSignalFailed => { "Failed to send a signal to the process supervisor" @@ -315,9 +323,7 @@ impl Error for BldrError { ErrorKind::StringFromUtf8Error(_) => { "Failed to convert a string from a Vec as UTF-8" } - ErrorKind::StrFromUtf8Error(_) => { - "Failed to convert a str from a &[u8] as UTF-8" - } + ErrorKind::StrFromUtf8Error(_) => "Failed to convert a str from a &[u8] as UTF-8", ErrorKind::SupervisorDied => "The supervisor died", ErrorKind::NulError(_) => { "An attempt was made to build a CString with a null byte inside it" @@ -434,6 +440,12 @@ impl From for BldrError { } } +impl From for BldrError { + fn from(err: data_store::MdbError) -> BldrError { + bldr_error!(ErrorKind::MdbError(err)) + } +} + impl From for BldrError { fn from(err: actor::ActorError) -> Self { bldr_error!(ErrorKind::ActorError(err)) diff --git a/src/bldr/lib.rs b/src/bldr/lib.rs index 946aa2d19c..367f749751 100644 --- a/src/bldr/lib.rs +++ b/src/bldr/lib.rs @@ -32,6 +32,9 @@ //! * [The bldr Repo; http based package repository](repo) //! +extern crate bincode; +#[macro_use] +extern crate bitflags; #[macro_use] extern crate hyper; #[macro_use] @@ -42,12 +45,15 @@ extern crate rustc_serialize; extern crate toml; extern crate ansi_term; extern crate gpgme; +#[macro_use] +extern crate lazy_static; extern crate libarchive; extern crate regex; extern crate libc; extern crate url; extern crate fnv; extern crate iron; +extern crate lmdb_sys; #[macro_use] extern crate router; extern crate time; diff --git a/src/bldr/package/archive.rs b/src/bldr/package/archive.rs index 9af7810ed3..270cd8c5eb 100644 --- a/src/bldr/package/archive.rs +++ b/src/bldr/package/archive.rs @@ -7,7 +7,7 @@ use std::fmt; use std::io::{Seek, SeekFrom}; use std::path::PathBuf; -use std::str; +use std::str::{self, FromStr}; use libarchive::writer; use libarchive::reader::{self, Reader}; @@ -15,7 +15,7 @@ use libarchive::archive::{Entry, ReadFilter, ReadFormat}; use regex::Regex; use error::{BldrResult, BldrError, ErrorKind}; -use package::Package; +use package::PackageIdent; use util::gpg; static LOGKEY: &'static str = "PA"; @@ -23,7 +23,9 @@ static LOGKEY: &'static str = "PA"; #[derive(Debug)] pub enum MetaFile { CFlags, + Config, Deps, + TDeps, Exposes, Ident, LdRunPath, @@ -36,7 +38,9 @@ impl fmt::Display for MetaFile { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let id = match *self { MetaFile::CFlags => "CFLAGS", + MetaFile::Config => "default.toml", MetaFile::Deps => "DEPS", + MetaFile::TDeps => "TDEPS", MetaFile::Exposes => "EXPOSES", MetaFile::Ident => "IDENT", MetaFile::LdRunPath => "LD_RUN_PATH", @@ -58,53 +62,92 @@ impl PackageArchive { PackageArchive { path: path } } - /// A package struct representing the contents of this archive. + pub fn cflags(&self) -> BldrResult> { + match self.read_metadata(MetaFile::CFlags) { + Ok(data) => Ok(Some(data)), + Err(BldrError{err: ErrorKind::MetaFileNotFound(_), ..}) => Ok(None), + Err(e) => Err(e), + } + } + + pub fn config(&self) -> BldrResult> { + match self.read_metadata(MetaFile::Config) { + Ok(data) => Ok(Some(data)), + Err(BldrError{err: ErrorKind::MetaFileNotFound(_), ..}) => Ok(None), + Err(e) => Err(e), + } + } + + /// Returns a list of package identifiers representing the runtime package dependencies for + /// this archive. /// /// # Failures /// - /// * If an `IDENT` metafile is not found in the archive + /// * If a `DEPS` metafile is not found in the archive /// * If the archive cannot be read /// * If the archive cannot be verified - pub fn package(&self) -> BldrResult { - let body = try!(self.read_metadata(MetaFile::Ident)); - let mut package = try!(Package::from_ident(&body)); - match self.deps() { - Ok(Some(deps)) => { - for dep in deps { - package.add_dep(dep); - } - } - Ok(None) => {} - Err(e) => return Err(e), - } - Ok(package) + pub fn deps(&self) -> BldrResult> { + self.read_deps(MetaFile::Deps) } - /// List of package structs representing the package dependencies for this archive. + /// Returns a list of package identifiers representing the transitive runtime package + /// dependencies for this archive. /// /// # Failures /// - /// * If a `DEPS` metafile is not found in the archive + /// * If a `TDEPS` metafile is not found in the archive /// * If the archive cannot be read /// * If the archive cannot be verified - pub fn deps(&self) -> BldrResult>> { - match self.read_metadata(MetaFile::Deps) { - Ok(body) => { - let dep_strs: Vec<&str> = body.split("\n").collect(); - let mut deps = vec![]; - for dep in &dep_strs { - match Package::from_ident(&dep) { - Ok(package) => deps.push(package), - Err(_) => continue, - } - } - Ok(Some(deps)) + pub fn tdeps(&self) -> BldrResult> { + self.read_deps(MetaFile::TDeps) + } + + pub fn exposes(&self) -> BldrResult> { + match self.read_metadata(MetaFile::Exposes) { + Ok(data) => { + let ports: Vec = data.split(" ") + .filter_map(|port| port.parse::().ok()) + .collect(); + Ok(ports) } + Err(BldrError{err: ErrorKind::MetaFileNotFound(_), ..}) => Ok(vec![]), + Err(e) => Err(e), + } + } + + pub fn ident(&self) -> BldrResult { + let data = try!(self.read_metadata(MetaFile::Ident)); + PackageIdent::from_str(&data) + } + + pub fn ld_run_path(&self) -> BldrResult> { + match self.read_metadata(MetaFile::LdRunPath) { + Ok(data) => Ok(Some(data)), Err(BldrError{err: ErrorKind::MetaFileNotFound(_), ..}) => Ok(None), Err(e) => Err(e), } } + pub fn ldflags(&self) -> BldrResult> { + match self.read_metadata(MetaFile::LdFlags) { + Ok(data) => Ok(Some(data)), + Err(BldrError {err: ErrorKind::MetaFileNotFound(_), ..}) => Ok(None), + Err(e) => Err(e), + } + } + + pub fn manifest(&self) -> BldrResult { + self.read_metadata(MetaFile::Manifest) + } + + pub fn path(&self) -> BldrResult> { + match self.read_metadata(MetaFile::Path) { + Ok(data) => Ok(Some(data)), + Err(BldrError {err: ErrorKind::MetaFileNotFound(_), ..}) => Ok(None), + Err(e) => Err(e), + } + } + /// A plain string representation of the archive's file name. pub fn file_name(&self) -> String { self.path.file_name().unwrap().to_string_lossy().into_owned() @@ -129,7 +172,7 @@ impl PackageArchive { /// # Failures /// /// * If the package cannot be unpacked via gpg - pub fn unpack(&self) -> BldrResult { + pub fn unpack(&self) -> BldrResult<()> { let file = self.path.to_str().unwrap().to_string(); let mut out = try!(gpg::verify(&file)); try!(out.seek(SeekFrom::Start(0))); @@ -141,7 +184,29 @@ impl PackageArchive { try!(writer.set_standard_lookup()); try!(writer.write(&mut reader, Some("/"))); try!(writer.close()); - self.package() + Ok(()) + } + + fn read_deps(&self, file: MetaFile) -> BldrResult> { + let mut deps: Vec = vec![]; + match self.read_metadata(file) { + Ok(body) => { + let ids: Vec = body.split("\n").map(|d| d.to_string()).collect(); + for id in &ids { + let package = try!(PackageIdent::from_str(id)); + if !package.fully_qualified() { + // JW TODO: use a more appropriate erorr to describe the invalid + // user input here. (user because a package was generated by a user + // and read into program) + return Err(bldr_error!(ErrorKind::InvalidPackageIdent(package.to_string()))); + } + deps.push(package); + } + Ok(deps) + } + Err(BldrError{err: ErrorKind::MetaFileNotFound(_), ..}) => Ok(deps), + Err(e) => Err(e), + } } fn read_metadata(&self, file: MetaFile) -> BldrResult { @@ -167,7 +232,7 @@ impl PackageArchive { match reader.read_block() { Ok(Some(bytes)) => { match str::from_utf8(bytes) { - Ok(content) => Ok(content.to_string()), + Ok(content) => Ok(content.trim().to_string()), Err(_) => Err(bldr_error!(ErrorKind::MetaFileMalformed)), } } diff --git a/src/bldr/package/mod.rs b/src/bldr/package/mod.rs index 6e3b2ca32b..80756e971b 100644 --- a/src/bldr/package/mod.rs +++ b/src/bldr/package/mod.rs @@ -7,6 +7,7 @@ pub mod archive; pub mod hooks; pub mod updater; + pub use self::archive::{PackageArchive, MetaFile}; pub use self::updater::{PackageUpdater, PackageUpdaterActor, UpdaterMessage}; pub use self::hooks::HookType; @@ -16,9 +17,10 @@ use std::fmt; use std::fs::{self, DirEntry, File}; use std::os::unix; use std::path::{Path, PathBuf}; +use std::str::FromStr; +use std::string::ToString; use std::io::prelude::*; use std::process::Command; -use std::io::BufReader; use std::env; use regex::Regex; @@ -35,23 +37,161 @@ const HEALTHCHECK_FILENAME: &'static str = "health_check"; const RECONFIGURE_FILENAME: &'static str = "reconfigure"; const RUN_FILENAME: &'static str = "run"; -#[derive(Debug, Clone, Eq, RustcDecodable, RustcEncodable)] +#[derive(Debug, Clone, RustcDecodable, RustcEncodable)] pub struct Package { pub derivation: String, pub name: String, pub version: String, pub release: String, - pub deps: Option>, + pub deps: Vec, + pub tdeps: Vec, } impl fmt::Display for Package { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, - "{}/{}/{}/{}", - self.derivation, - self.name, - self.version, - self.release) + write!(f, "{}", self.ident()) + } +} + +#[derive(RustcEncodable, RustcDecodable, PartialEq, Debug, Clone)] +pub struct PackageIdent { + pub derivation: String, + pub name: String, + pub version: Option, + pub release: Option, +} + +impl PackageIdent { + /// Creates a new package identifier + pub fn new>(deriv: T, name: T, version: Option, release: Option) -> Self { + PackageIdent { + derivation: deriv.into(), + name: name.into(), + version: version.map(|v| v.into()), + release: release.map(|v| v.into()), + } + } + + pub fn fully_qualified(&self) -> bool { + self.version.is_some() && self.release.is_some() + } + + pub fn satisfies(&self, other: &Self) -> bool { + if self.derivation != other.derivation || self.name != other.name { + return false; + } + if self.version.is_some() { + if other.version.is_none() { + return true; + } + if *self.version.as_ref().unwrap() != *other.version.as_ref().unwrap() { + return false; + } + } + if self.release.is_some() { + if other.release.is_none() { + return true; + } + if *self.release.as_ref().unwrap() != *other.release.as_ref().unwrap() { + return false; + } + } + true + } +} + +impl Default for PackageIdent { + fn default() -> PackageIdent { + PackageIdent::new("", "", None, None) + } +} + +impl fmt::Display for PackageIdent { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if self.version.is_some() && self.release.is_some() { + write!(f, "{}/{}/{}/{}", + self.derivation, + self.name, + self.version.as_ref().unwrap(), + self.release.as_ref().unwrap()) + } else if self.version.is_some() { + write!(f, "{}/{}/{}", + self.derivation, + self.name, + self.version.as_ref().unwrap()) + } else { + write!(f, "{}/{}", self.derivation, self.name) + } + } +} + +impl From for PackageIdent { + fn from(value: Package) -> PackageIdent { + PackageIdent::new(value.derivation, value.name, Some(value.version), Some(value.release)) + } +} + +impl FromStr for PackageIdent { + type Err = BldrError; + + fn from_str(value: &str) -> Result { + let items: Vec<&str> = value.split("/").collect(); + let (deriv, name, ver, rel) = match items.len() { + 2 => (items[0], items[1], None, None), + 3 => (items[0], items[1], Some(items[2]), None), + 4 => (items[0], items[1], Some(items[2]), Some(items[3])), + _ => return Err(bldr_error!(ErrorKind::InvalidPackageIdent(value.to_string()))), + }; + Ok(PackageIdent::new(deriv, name, ver, rel)) + } +} + +impl PartialOrd for PackageIdent { + /// Packages can be compared according to the following: + /// + /// * Derivation is ignored in the comparison - my redis and + /// your redis compare the same. + /// * If the names are not equal, they cannot be compared. + /// * If the versions are greater/lesser, return that as + /// the ordering. + /// * If the versions are equal, return the greater/lesser + /// for the release. + fn partial_cmp(&self, other: &PackageIdent) -> Option { + if self.name != other.name { + return None; + } + if self.version.is_none() && other.version.is_none() { + return None; + } + if self.version.is_none() && other.version.is_some() { + return Some(Ordering::Less); + } + if self.version.is_some() && other.version.is_none() { + return Some(Ordering::Greater); + } + if self.release.is_none() && other.release.is_none() { + return None; + } + if self.release.is_none() && other.release.is_some() { + return Some(Ordering::Less); + } + if self.release.is_some() && other.release.is_none() { + return Some(Ordering::Greater); + } + let ord = match version_sort(self.version.as_ref().unwrap(), other.version.as_ref().unwrap()) { + Ok(ord) => ord, + Err(e) => { + error!("This was a very bad version number: {:?}", e); + return None; + } + }; + match ord { + Ordering::Greater => return Some(Ordering::Greater), + Ordering::Less => return Some(Ordering::Less), + Ordering::Equal => { + return Some(self.release.cmp(&other.release)); + } + } } } @@ -84,47 +224,64 @@ pub enum Signal { } impl Package { - pub fn new(deriv: String, name: String, version: String, release: String) -> Self { - Package { - derivation: deriv, - name: name, - version: version, - release: release, - deps: None, - } + pub fn deps>(ident: &PackageIdent, home: P) -> BldrResult> { + Self::read_deps(home.as_ref().join(ident.to_string()), MetaFile::Deps) } - pub fn add_dep(&mut self, dep: Package) -> &mut Self { - if let Some(ref mut deps) = self.deps { - deps.push(dep); - } else { - self.deps = Some(vec![dep]); - } - self + pub fn tdeps>(ident: &PackageIdent, home: P) -> BldrResult> { + Self::read_deps(home.as_ref().join(ident.to_string()), MetaFile::TDeps) } - pub fn from_ident(id: &str) -> BldrResult { - let items: Vec<&str> = id.split("/").collect(); - match items.len() { - 4 => { - Ok(Self::new(items[0].trim().to_string(), - items[1].trim().to_string(), - items[2].trim().to_string(), - items[3].trim().to_string())) + /// Helper function for reading metafiles containing dependencies represented by package + /// identifiers separated by new lines + /// + /// # Failures + /// + /// * Metafile could not be found + /// * Contents of the metafile could not be read + /// * Contents of the metafile are unreadable or malformed + fn read_deps>(path: P, file: MetaFile) -> BldrResult> { + let mut deps: Vec = vec![]; + match Self::read_metadata(path.as_ref(), file) { + Ok(body) => { + let ids: Vec = body.split("\n").map(|d| d.to_string()).collect(); + for id in &ids { + let package = try!(PackageIdent::from_str(id)); + if !package.fully_qualified() { + return Err(bldr_error!(ErrorKind::InvalidPackageIdent(package.to_string()))); + } + deps.push(package); + } + Ok(deps) } - _ => Err(bldr_error!(ErrorKind::InvalidPackageIdent(id.to_string()))), + Err(BldrError{err: ErrorKind::MetaFileNotFound(_), ..}) => Ok(deps), + Err(e) => Err(e), } } - pub fn from_path(spath: &str) -> BldrResult { - if Path::new(spath).starts_with(PACKAGE_HOME) { - let items: Vec<&str> = spath.split("/").collect(); - Ok(Self::new(items[3].trim().to_string(), - items[4].trim().to_string(), - items[5].trim().to_string(), - items[6].trim().to_string())) - } else { - Err(bldr_error!(ErrorKind::PackageLoad(spath.to_string()))) + /// Read the contents of the given metafile from a package at the given filepath. + /// + /// # Failures + /// + /// * A metafile could not be found + /// * Contents of the metafile could not be read + /// * Contents of the metafile are unreadable or malformed + fn read_metadata>(path: P, file: MetaFile) -> BldrResult { + let filepath = path.as_ref().join(file.to_string()); + match fs::metadata(&filepath) { + Ok(_) => { + match File::open(&filepath) { + Ok(mut f) => { + let mut data = String::new(); + if f.read_to_string(&mut data).is_err() { + return Err(bldr_error!(ErrorKind::MetaFileMalformed)); + } + Ok(data.trim().to_string()) + }, + Err(e) => Err(bldr_error!(ErrorKind::MetaFileIO(e))) + } + }, + Err(_) => Err(bldr_error!(ErrorKind::MetaFileNotFound(file))) } } @@ -135,49 +292,52 @@ impl Package { /// package will be returned if their optional value is not specified. If only a version is /// specified, the latest release of that package derivation, name, and version is returned. /// - /// An optional `home` path may be provided to search for a package in the non-default path. - pub fn load(deriv: &str, - pkg: &str, - ver: Option, - rel: Option, - home: Option<&str>) - -> BldrResult { + /// An optional `home` path may be provided to search for a package in a non-default path. + pub fn load(ident: &PackageIdent, home: Option<&str>) -> BldrResult { let path = home.unwrap_or(PACKAGE_HOME); - let pl = try!(Self::package_list(path)); - let latest: Option = pl.iter() - .filter(|&p| { - if ver.is_some() && rel.is_some() { - p.name == pkg && p.derivation == deriv && - p.version == *ver.as_ref().unwrap() && - p.release == *rel.as_ref().unwrap() - } else if ver.is_some() { - p.name == pkg && p.derivation == deriv && - p.version == *ver.as_ref().unwrap() - } else { - p.name == pkg && p.derivation == deriv - } - }) - .fold(None, |winner, b| { - match winner { - Some(a) => { - match a.partial_cmp(&b) { - Some(Ordering::Greater) => Some(a), - Some(Ordering::Equal) => Some(a), - Some(Ordering::Less) => Some(b.clone()), - None => Some(a), + if ident.fully_qualified() { + Ok(Package { + derivation: ident.derivation.clone(), + name: ident.name.clone(), + version: ident.version.as_ref().unwrap().clone(), + release: ident.release.as_ref().unwrap().clone(), + deps: try!(Self::deps(ident, path)), + tdeps: try!(Self::tdeps(ident, path)), + }) + } else { + let pl = try!(Self::package_list(path)); + let latest: Option = pl.iter() + .filter(|&p| p.satisfies(ident)) + .fold(None, |winner, b| { + match winner { + Some(a) => { + match a.partial_cmp(&b) { + Some(Ordering::Greater) => Some(a), + Some(Ordering::Equal) => Some(a), + Some(Ordering::Less) => Some(b.clone()), + None => Some(a), + } } + None => Some(b.clone()), } - None => Some(b.clone()), - } - }); - latest.ok_or(bldr_error!(ErrorKind::PackageNotFound(deriv.to_string(), - pkg.to_string(), - ver, - rel))) + }); + if let Some(id) = latest { + Ok(Package { + deps: try!(Self::deps(&id, path)), + tdeps: try!(Self::tdeps(&id, path)), + derivation: id.derivation, + name: id.name, + version: id.version.unwrap(), + release: id.release.unwrap(), + }) + } else { + Err(bldr_error!(ErrorKind::PackageNotFound(ident.clone()))) + } + } } pub fn signal(&self, signal: Signal) -> BldrResult { - let runit_pkg = try!(Self::load("chef", "runit", None, None, None)); + let runit_pkg = try!(Self::load(&PackageIdent::new("chef", "runit", None, None), None)); let signal_arg = match signal { Signal::Status => "status", Signal::Up => "up", @@ -260,30 +420,6 @@ impl Package { } } - /// Creates a Package for every TDEP - pub fn tdeps(&self) -> BldrResult> { - let mut tdeps = Vec::new(); - match fs::metadata(self.join_path("TDEPS")) { - Ok(_) => { - let tdep_file = File::open(self.join_path("TDEPS")).unwrap(); - let reader = BufReader::new(tdep_file); - for line in reader.lines() { - match line { - Ok(tdep) => { - let pkg = try!(Package::from_ident(&tdep)); - tdeps.push(pkg); - } - Err(e) => { - outputln!("Package {} has malformed TDEPS: {}", self, e); - } - } - } - } - Err(_) => {} - } - Ok(tdeps) - } - /// Returns a string with the full run path for this package. This path is composed of any /// binary paths specified by this package, or its TDEPS, plus bldr or its TDEPS, plus the /// existing value of the PATH variable. @@ -292,23 +428,23 @@ impl Package { /// to worry much about context. pub fn run_path(&self) -> BldrResult { let mut run_path = String::new(); - if let Some(path) = self.path_meta() { + if let Some(path) = try!(self.path_meta()) { run_path = format!("{}", path); } - let tdeps = try!(self.tdeps()); + let tdeps: Vec = try!(self.load_tdeps()); for dep in tdeps.iter() { - if let Some(path) = dep.path_meta() { + if let Some(path) = try!(dep.path_meta()) { run_path = format!("{}:{}", run_path, path); } } if self.name != "bldr" { - let bldr_pkg = try!(Package::load("chef", "bldr", None, None, None)); - if let Some(path) = bldr_pkg.path_meta() { + let bldr_pkg = try!(Package::load(&PackageIdent::new("chef", "bldr", None, None), None)); + if let Some(path) = try!(bldr_pkg.path_meta()) { run_path = format!("{}:{}", run_path, path); } - let tdeps = try!(bldr_pkg.tdeps()); + let tdeps: Vec = try!(bldr_pkg.load_tdeps()); for dep in tdeps.iter() { - if let Some(path) = dep.path_meta() { + if let Some(path) = try!(dep.path_meta()) { run_path = format!("{}:{}", run_path, path); } } @@ -325,21 +461,18 @@ impl Package { } /// Return the PATH string from the package metadata, if it exists - pub fn path_meta(&self) -> Option { - match fs::metadata(self.join_path("PATH")) { - Ok(_) => { - let mut path_file = File::open(self.join_path("PATH")).unwrap(); - let mut path_string = String::new(); - path_file.read_to_string(&mut path_string).unwrap(); - Some(String::from(path_string.trim_right_matches("\n"))) - } - Err(_) => { - None - } + /// + /// # Failures + /// + /// * The package contains a Path metafile but it could not be read or it was malformed + pub fn path_meta(&self) -> BldrResult> { + match Self::read_metadata(self.path(), MetaFile::Path) { + Ok(data) => Ok(Some(data)), + Err(BldrError {err: ErrorKind::MetaFileNotFound(_), ..}) => Ok(None), + Err(e) => Err(e), } } - pub fn hook_template_path(&self, hook_type: &HookType) -> PathBuf { let base = PathBuf::from(self.join_path("hooks")); match *hook_type { @@ -445,14 +578,7 @@ impl Package { /// helpers above. pub fn config_files(&self) -> BldrResult> { let mut files: Vec = Vec::new(); - let config_dir = self.join_path("config"); - { - let p = Path::new(&config_dir); - if !p.exists() { - return Ok(files); - } - } - for config in try!(fs::read_dir(config_dir)) { + for config in try!(fs::read_dir(self.join_path("config"))) { let config = try!(config); match config.path().file_name() { Some(filename) => { @@ -464,6 +590,14 @@ impl Package { Ok(files) } + pub fn ident(&self) -> String { + format!("{}/{}/{}/{}", + self.derivation, + self.name, + self.version, + self.release) + } + /// Run iniitalization hook if present pub fn initialize(&self, context: &ServiceConfig) -> BldrResult<()> { if let Some(hook) = self.hooks().init_hook { @@ -555,9 +689,25 @@ impl Package { Ok(result) } + /// Attempts to load the extracted package for each transitive dependency and returns a + /// `Package` struct representation of each in the returned vector. + /// + /// # Failures + /// + /// * Any transitive dependency could not be located or it's contents could not be read + /// from disk + fn load_tdeps(&self) -> BldrResult> { + let mut deps = Vec::with_capacity(self.tdeps.len()); + for dep in self.tdeps.iter() { + let package = try!(Package::load(dep, None)); + deps.push(package); + } + Ok(deps) + } + /// Returns a list of package structs built from the contents of the given directory. - fn package_list(path: &str) -> BldrResult> { - let mut package_list: Vec = Vec::new(); + fn package_list(path: &str) -> BldrResult> { + let mut package_list: Vec = vec![]; if try!(fs::metadata(path)).is_dir() { try!(Self::walk_derivations(&path, &mut package_list)); } @@ -567,7 +717,7 @@ impl Package { /// Helper function for package_list. Walks the given path for derivation directories /// and builds on the given package list by recursing into name, version, and release /// directories. - fn walk_derivations(path: &str, packages: &mut Vec) -> BldrResult<()> { + fn walk_derivations(path: &str, packages: &mut Vec) -> BldrResult<()> { for entry in try!(fs::read_dir(path)) { let derivation = try!(entry); if try!(fs::metadata(derivation.path())).is_dir() { @@ -579,7 +729,7 @@ impl Package { /// Helper function for walk_derivations. Walks the given derivation DirEntry for name /// directories and recurses into them to find version and release directories. - fn walk_names(derivation: &DirEntry, packages: &mut Vec) -> BldrResult<()> { + fn walk_names(derivation: &DirEntry, packages: &mut Vec) -> BldrResult<()> { for name in try!(fs::read_dir(derivation.path())) { let name = try!(name); let derivation = derivation.file_name().to_string_lossy().into_owned().to_string(); @@ -594,7 +744,7 @@ impl Package { /// into them to find release directories. fn walk_versions(derivation: &String, name: &DirEntry, - packages: &mut Vec) + packages: &mut Vec) -> BldrResult<()> { for version in try!(fs::read_dir(name.path())) { let version = try!(version); @@ -612,13 +762,13 @@ impl Package { fn walk_releases(derivation: &String, name: &String, version: &DirEntry, - packages: &mut Vec) + packages: &mut Vec) -> BldrResult<()> { for release in try!(fs::read_dir(version.path())) { let release = try!(release).file_name().to_string_lossy().into_owned().to_string(); let version = version.file_name().to_string_lossy().into_owned().to_string(); - let package = Package::new(derivation.clone(), name.clone(), version, release); - packages.push(package) + let ident = PackageIdent::new(derivation.clone(), name.clone(), Some(version), Some(release)); + packages.push(ident) } Ok(()) } @@ -770,33 +920,33 @@ impl PartialOrd for Package { #[cfg(test)] mod tests { - use super::{Package, split_version, version_sort}; + use super::{Package, PackageIdent, split_version, version_sort}; use std::cmp::Ordering; use std::cmp::PartialOrd; #[test] - fn package_partial_eq() { - let a = Package::new("bldr".to_string(), + fn package_ident_partial_eq() { + let a = PackageIdent::new("bldr".to_string(), "bldr".to_string(), - "1.0.0".to_string(), - "20150521131555".to_string()); - let b = Package::new("bldr".to_string(), + Some("1.0.0".to_string()), + Some("20150521131555".to_string())); + let b = PackageIdent::new("bldr".to_string(), "bldr".to_string(), - "1.0.0".to_string(), - "20150521131555".to_string()); + Some("1.0.0".to_string()), + Some("20150521131555".to_string())); assert_eq!(a, b); } #[test] - fn package_partial_ord() { - let a = Package::new("bldr".to_string(), + fn package_ident_partial_ord() { + let a = PackageIdent::new("bldr".to_string(), "bldr".to_string(), - "1.0.1".to_string(), - "20150521131555".to_string()); - let b = Package::new("bldr".to_string(), + Some("1.0.1".to_string()), + Some("20150521131555".to_string())); + let b = PackageIdent::new("bldr".to_string(), "bldr".to_string(), - "1.0.0".to_string(), - "20150521131555".to_string()); + Some("1.0.0".to_string()), + Some("20150521131555".to_string())); match a.partial_cmp(&b) { Some(ord) => assert_eq!(ord, Ordering::Greater), None => panic!("Ordering should be greater"), @@ -804,15 +954,15 @@ mod tests { } #[test] - fn package_partial_ord_bad_name() { - let a = Package::new("bldr".to_string(), + fn package_ident_partial_ord_bad_name() { + let a = PackageIdent::new("bldr".to_string(), "snoopy".to_string(), - "1.0.1".to_string(), - "20150521131555".to_string()); - let b = Package::new("bldr".to_string(), + Some("1.0.1".to_string()), + Some("20150521131555".to_string())); + let b = PackageIdent::new("bldr".to_string(), "bldr".to_string(), - "1.0.0".to_string(), - "20150521131555".to_string()); + Some("1.0.0".to_string()), + Some("20150521131555".to_string())); match a.partial_cmp(&b) { Some(_) => panic!("We tried to return an order"), None => assert!(true), @@ -820,15 +970,15 @@ mod tests { } #[test] - fn package_partial_ord_different_derivation() { - let a = Package::new("adam".to_string(), + fn package_ident_partial_ord_different_derivation() { + let a = PackageIdent::new("adam".to_string(), "bldr".to_string(), - "1.0.0".to_string(), - "20150521131555".to_string()); - let b = Package::new("bldr".to_string(), + Some("1.0.0".to_string()), + Some("20150521131555".to_string())); + let b = PackageIdent::new("bldr".to_string(), "bldr".to_string(), - "1.0.0".to_string(), - "20150521131555".to_string()); + Some("1.0.0".to_string()), + Some("20150521131555".to_string())); match a.partial_cmp(&b) { Some(ord) => assert_eq!(ord, Ordering::Equal), None => panic!("We failed to return an order"), @@ -836,15 +986,15 @@ mod tests { } #[test] - fn package_partial_ord_release() { - let a = Package::new("adam".to_string(), + fn package_ident_partial_ord_release() { + let a = PackageIdent::new("adam".to_string(), "bldr".to_string(), - "1.0.0".to_string(), - "20150521131556".to_string()); - let b = Package::new("bldr".to_string(), + Some("1.0.0".to_string()), + Some("20150521131556".to_string())); + let b = PackageIdent::new("bldr".to_string(), "bldr".to_string(), - "1.0.0".to_string(), - "20150521131555".to_string()); + Some("1.0.0".to_string()), + Some("20150521131555".to_string())); match a.partial_cmp(&b) { Some(ord) => assert_eq!(ord, Ordering::Greater), None => panic!("We failed to return an order"), @@ -907,4 +1057,12 @@ mod tests { Err(e) => panic!("{:?}", e), } } + + #[test] + fn check_fully_qualified_package_id() { + let partial = PackageIdent::new("chef", "libarchive", None, None); + let full = PackageIdent::new("chef", "libarchive", Some("1.2.3"), Some("1234")); + assert!(!partial.fully_qualified()); + assert!(full.fully_qualified()); + } } diff --git a/src/bldr/package/updater.rs b/src/bldr/package/updater.rs index 94443dc793..ae8809ece9 100644 --- a/src/bldr/package/updater.rs +++ b/src/bldr/package/updater.rs @@ -5,13 +5,14 @@ // is made available under an open source license such as the Apache 2.0 License. use std::sync::{Arc, RwLock}; +use std::str::FromStr; use wonder; use wonder::actor::{GenServer, InitResult, HandleResult, ActorSender, ActorResult}; use error::BldrError; use fs::PACKAGE_CACHE; -use package::Package; +use package::{Package, PackageIdent}; use repo; const TIMEOUT_MS: u64 = 60_000; @@ -82,14 +83,18 @@ impl GenServer for PackageUpdater { state: &mut Self::S) -> HandleResult { let package = state.package.read().unwrap(); - match repo::client::show_package(&state.repo, - &package.derivation, - &package.name, - None, - None) { - Ok(latest) => { + // JW TODO: Store and use the version if the package was started with a specific version. + // This will allow an operator to lock to a version and receive security updates + // in the form of release updates for a package. + let ident = PackageIdent::new(package.derivation.clone(), package.name.clone(), None, None); + match repo::client::show_package(&state.repo, &ident) { + Ok(remote) => { + let latest: Package = remote.into(); if latest > *package { - match repo::client::fetch_package_exact(&state.repo, &latest, PACKAGE_CACHE) { + match repo::client::fetch_package(&state.repo, + &PackageIdent::from_str(&latest.ident()) + .unwrap(), + PACKAGE_CACHE) { Ok(archive) => { debug!("Updater downloaded new package to {:?}", archive); // JW TODO: actually handle verify and unpack results diff --git a/src/bldr/repo/client.rs b/src/bldr/repo/client.rs index 295db7ad23..6bc603470c 100644 --- a/src/bldr/repo/client.rs +++ b/src/bldr/repo/client.rs @@ -13,9 +13,9 @@ use hyper::client::{Client, Body}; use hyper::status::StatusCode; use rustc_serialize::json; -use super::XFileName; +use super::{XFileName, data_object}; use error::{BldrResult, BldrError, ErrorKind}; -use package::{Package, PackageArchive}; +use package::{Package, PackageArchive, PackageIdent}; static LOGKEY: &'static str = "RC"; @@ -31,39 +31,6 @@ pub fn fetch_key(repo: &str, key: &str, path: &str) -> BldrResult { download(key, &url, path) } -/// Download a sepcific package identified by it's derivation, name, version, and release, to the -/// given filepath. -/// -/// # Failures -/// -/// * Package cannot be found -/// * Remote repository is not available -/// * File cannot be created and written to -pub fn fetch_package_exact(repo: &str, - package: &Package, - store: &str) - -> BldrResult { - let url = format!("{}/pkgs/{}/{}/{}/{}/download", - repo, - package.derivation, - package.name, - package.version, - package.release); - match download(&package.name, &url, store) { - Ok(file) => { - let path = PathBuf::from(file); - Ok(PackageArchive::new(path)) - } - Err(BldrError{ err: ErrorKind::HTTP(StatusCode::NotFound), ..}) => { - Err(bldr_error!(ErrorKind::RemotePackageNotFound(package.derivation.clone(), - package.name.clone(), - Some(package.version.clone()), - Some(package.release.clone())))) - } - Err(e) => Err(e), - } -} - /// Download the latest release of a package. /// /// An optional version and release can be specified @@ -77,38 +44,17 @@ pub fn fetch_package_exact(repo: &str, /// * Remote repository is not available /// * File cannot be created and written to pub fn fetch_package(repo: &str, - derivation: &str, - package: &str, - version: &Option, - release: &Option, + package: &PackageIdent, store: &str) -> BldrResult { - let url = if release.is_some() && version.is_some() { - format!("{}/pkgs/{}/{}/{}/{}/download", - repo, - derivation, - package, - release.as_ref().unwrap(), - version.as_ref().unwrap()) - } else if release.is_some() { - format!("{}/pkgs/{}/{}/{}/download", - repo, - derivation, - package, - release.as_ref().unwrap()) - } else { - format!("{}/pkgs/{}/{}/download", repo, derivation, package) - }; - match download(package, &url, store) { + let url = format!("{}/pkgs/{}/download", repo, package); + match download(&package.name, &url, store) { Ok(file) => { let path = PathBuf::from(file); Ok(PackageArchive::new(path)) } Err(BldrError { err: ErrorKind::HTTP(StatusCode::NotFound), ..}) => { - Err(bldr_error!(ErrorKind::RemotePackageNotFound(derivation.to_string(), - package.to_string(), - version.clone(), - release.clone()))) + Err(bldr_error!(ErrorKind::RemotePackageNotFound(package.clone()))) } Err(e) => Err(e), } @@ -123,32 +69,20 @@ pub fn fetch_package(repo: &str, /// /// * Package cannot be found /// * Remote repository is not available -pub fn show_package(repo: &str, - derivation: &str, - name: &str, - version: Option, - release: Option) - -> BldrResult { - let url = url_show_package(repo, derivation, name, &version, &release); +pub fn show_package(repo: &str, ident: &PackageIdent) -> BldrResult { + let url = url_show_package(repo, ident); let client = Client::new(); let request = client.get(&url); let mut res = try!(request.send()); if res.status != hyper::status::StatusCode::Ok { - let ver = if version.is_some() { - Some(version.unwrap().to_string()) - } else { - None - }; - return Err(bldr_error!(ErrorKind::RemotePackageNotFound(derivation.to_string(), - name.to_string(), - ver, - None))); + return Err(bldr_error!(ErrorKind::RemotePackageNotFound(ident.clone()))); } let mut encoded = String::new(); try!(res.read_to_string(&mut encoded)); - let package: Package = json::decode(&encoded).unwrap(); + debug!("Body: {:?}", encoded); + let package: data_object::Package = json::decode(&encoded).unwrap(); Ok(package) } @@ -182,27 +116,11 @@ pub fn put_package(repo: &str, package: &Package) -> BldrResult<()> { upload(&url, &mut file) } -fn url_show_package(repo: &str, - derivation: &str, - name: &str, - version: &Option, - release: &Option) - -> String { - if version.is_some() && release.is_some() { - format!("{}/pkgs/{}/{}/{}/{}", - repo, - derivation, - name, - version.as_ref().unwrap(), - release.as_ref().unwrap()) - } else if version.is_some() { - format!("{}/pkgs/{}/{}/{}", - repo, - derivation, - name, - version.as_ref().unwrap()) +fn url_show_package(repo: &str, package: &PackageIdent) -> String { + if package.fully_qualified() { + format!("{}/pkgs/{}", repo, package) } else { - format!("{}/pkgs/{}/{}", repo, derivation, name) + format!("{}/pkgs/{}/latest", repo, package) } } @@ -273,9 +191,13 @@ fn upload(url: &str, file: &mut File) -> BldrResult<()> { debug!("Uploading to {}", url); let client = Client::new(); let metadata = try!(file.metadata()); - let res = try!(client.post(url).body(Body::SizedBody(file, metadata.len())).send()); - debug!("Response {:?}", res); - Ok(()) + let response = try!(client.post(url).body(Body::SizedBody(file, metadata.len())).send()); + if response.status.is_success() { + Ok(()) + } else { + debug!("Response {:?}", response); + Err(bldr_error!(ErrorKind::HTTP(response.status))) + } } fn progress(status: &str, written: i64, length: &str, finished: bool) { diff --git a/src/bldr/repo/data_object.rs b/src/bldr/repo/data_object.rs new file mode 100644 index 0000000000..681e86cd6b --- /dev/null +++ b/src/bldr/repo/data_object.rs @@ -0,0 +1,197 @@ +// Copyright:: Copyright (c) 2015-2016 Chef Software, Inc. +// +// The terms of the Evaluation Agreement (Bldr) between Chef Software Inc. and the party accessing +// this file ("Licensee") apply to Licensee's use of the Software until such time that the Software +// is made available under an open source license such as the Apache 2.0 License. + +use std::collections::HashSet; +use std::fmt; +use std::str::FromStr; + +use rustc_serialize::{Encoder, Decoder, Encodable, Decodable}; + +use error::{BldrResult, ErrorKind}; +use package; +use super::data_store::ToMdbValue; + +static LOGKEY: &'static str = "DO"; + +pub trait DataObject : Encodable + Decodable { + type Key: ToMdbValue + fmt::Display; + fn ident(&self) -> &Self::Key; +} + +#[repr(C)] +#[derive(PartialEq, Debug, Clone)] +pub struct PackageIdent(String); + +impl PackageIdent { + pub fn new(ident: String) -> Self { + PackageIdent(ident) + } + + pub fn deriv_idx(&self) -> String { + format!("{}", self.parts()[0]) + } + + pub fn name_idx(&self) -> String { + let vec: Vec<&str> = self.parts(); + format!("{}/{}", vec[0], vec[1]) + } + + pub fn version_idx(&self) -> String { + let vec: Vec<&str> = self.parts(); + format!("{}/{}/{}", vec[0], vec[1], vec[2]) + } + + pub fn parts(&self) -> Vec<&str> { + self.0.split("/").collect() + } +} + +impl fmt::Display for PackageIdent { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl Encodable for PackageIdent { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + let p = self.parts(); + try!(s.emit_struct("PackageIdent", p.len(), |s| { + try!(s.emit_struct_field("derivation", 0, |s| p[0].encode(s))); + try!(s.emit_struct_field("name", 1, |s| p[1].encode(s))); + if p.len() > 2 { + try!(s.emit_struct_field("version", 2, |s| p[2].encode(s))); + } + if p.len() > 3 { + try!(s.emit_struct_field("release", 3, |s| p[3].encode(s))); + } + Ok(()) + })); + Ok(()) + } +} + +impl Decodable for PackageIdent { + fn decode(d: &mut D) -> Result { + d.read_struct("PackageIdent", 4, |d| { + let derivation: String = try!(d.read_struct_field("derivation", 0, |d| Decodable::decode(d))); + let name: String = try!(d.read_struct_field("name", 1, |d| Decodable::decode(d))); + let version: String = try!(d.read_struct_field("version", 2, |d| Decodable::decode(d))); + let release: String = try!(d.read_struct_field("release", 3, |d| Decodable::decode(d))); + Ok(PackageIdent::new(format!("{}/{}/{}/{}", derivation, name, version, release))) + }) + } +} + +impl DataObject for PackageIdent { + type Key = String; + + fn ident<'a>(&'a self) -> &'a String { + &self.0 + } +} + +impl Into for PackageIdent { + fn into(self) -> package::PackageIdent { + FromStr::from_str(&self.0).unwrap() + } +} + +#[repr(C)] +#[derive(RustcEncodable, RustcDecodable, PartialEq, Debug)] +pub struct View { + pub ident: String, + pub packages: HashSet<::Key>, +} + +impl View { + pub fn new(name: &str) -> Self { + View { + ident: String::from(name), + packages: HashSet::new(), + } + } + + pub fn add_package(&mut self, package: ::Key) -> &mut Self { + self.packages.insert(package); + self + } +} + +impl DataObject for View { + type Key = String; + + fn ident<'a>(&'a self) -> &'a String { + &self.ident + } +} + +impl fmt::Display for View { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.ident) + } +} + +#[repr(C)] +#[derive(RustcEncodable, RustcDecodable, PartialEq, Debug)] +pub struct Package { + pub ident: PackageIdent, + pub manifest: String, + pub deps: Vec, + pub tdeps: Vec, + pub exposes: Vec, + pub config: Option, + pub views: HashSet<::Key>, +} + +impl Package { + pub fn from_archive(archive: &package::PackageArchive) -> BldrResult { + let ident = match archive.ident() { + Ok(value) => { + if !value.fully_qualified() { + return Err(bldr_error!(ErrorKind::InvalidPackageIdent(value.to_string()))); + } + PackageIdent::new(value.to_string()) + } + Err(e) => return Err(e), + }; + Ok(Package { + ident: ident, + manifest: try!(archive.manifest()), + deps: try!(archive.deps()).iter().map(|d| PackageIdent::new(d.to_string())).collect(), + tdeps: try!(archive.tdeps()).iter().map(|d| PackageIdent::new(d.to_string())).collect(), + exposes: try!(archive.exposes()), + config: try!(archive.config()), + views: HashSet::new(), + }) + } + + pub fn add_view(&mut self, view: ::Key) -> &mut Self { + self.views.insert(view); + self + } +} + +impl Into for Package { + fn into(self) -> package::Package { + let ident = self.ident.parts(); + package::Package { + derivation: ident[0].to_string(), + name: ident[1].to_string(), + version: ident[2].to_string(), + release: ident[3].to_string(), + deps: self.deps.into_iter().map(|d| d.into()).collect(), + tdeps: self.tdeps.into_iter().map(|d| d.into()).collect(), + } + } +} + +impl DataObject for Package { + type Key = String; + + fn ident<'a>(&'a self) -> &'a String { + &self.ident.0 + } +} diff --git a/src/bldr/repo/data_store.rs b/src/bldr/repo/data_store.rs new file mode 100644 index 0000000000..dd8e0a7816 --- /dev/null +++ b/src/bldr/repo/data_store.rs @@ -0,0 +1,1470 @@ +// Copyright:: Copyright (c) 2015-2016 Chef Software, Inc. +// +// The terms of the Evaluation Agreement (Bldr) between Chef Software Inc. and the party accessing +// this file ("Licensee") apply to Licensee's use of the Software until such time that the Software +// is made available under an open source license such as the Apache 2.0 License. + +use std::any::Any; +use std::ffi::{CStr, CString}; +use std::fmt; +use std::fs; +use std::marker::PhantomData; +use std::mem; +use std::path::Path; +use std::ptr; +use std::slice; +use std::sync::{Arc, Mutex}; + +use bincode::{self, SizeLimit}; +use bincode::rustc_serialize::{encode, decode}; +use libc::{c_void, c_int, c_uint, mode_t, size_t}; +use lmdb_sys; +use rustc_serialize::{Encodable, Decodable}; + +use error::{BldrResult, ErrorKind}; +use super::data_object::{self, DataObject}; +use package::PackageIdent; + +static LOGKEY: &'static str = "DS"; + +lazy_static! { + static ref OPEN_LOCK: Mutex<()> = Mutex::new(()); +} + +bitflags! { + flags EnvFlags: c_uint { + const ENV_NO_SYNC = lmdb_sys::MDB_NOSYNC, + const ENV_NO_META_SYNC = lmdb_sys::MDB_NOMETASYNC, + const ENV_MAP_ASYNC = lmdb_sys::MDB_MAPASYNC, + const ENV_NO_MEM_INIT = lmdb_sys::MDB_NOMEMINIT, + } +} + +bitflags! { + flags EnvCreateFlags: c_uint { + const ENV_CREATE_FIXED_MAP = lmdb_sys::MDB_FIXEDMAP, + const ENV_CREATE_NO_SUB_DIR = lmdb_sys::MDB_NOSUBDIR, + const ENV_CREATE_READ_ONLY = lmdb_sys::MDB_RDONLY, + const ENV_CREATE_WRITE_MAP = lmdb_sys::MDB_WRITEMAP, + const ENV_CREATE_NO_META_SYNC = lmdb_sys::MDB_NOMETASYNC, + const ENV_CREATE_NO_SYNC = lmdb_sys::MDB_NOSYNC, + const ENV_CREATE_MAP_ASYNC = lmdb_sys::MDB_MAPASYNC, + const ENV_CREATE_NO_TLS = lmdb_sys::MDB_NOTLS, + const ENV_CREATE_NO_LOCK = lmdb_sys::MDB_NOLOCK, + const ENV_CREATE_NO_READ_AHEAD = lmdb_sys::MDB_NORDAHEAD, + const ENV_CREATE_NO_MEM_INIT = lmdb_sys::MDB_NOMEMINIT, + } +} + +bitflags! { + flags DatabaseFlags: c_uint { + const DB_REVERSE_KEY = lmdb_sys::MDB_REVERSEKEY, + const DB_ALLOW_DUPS = lmdb_sys::MDB_DUPSORT, + const DB_INTEGER_KEY = lmdb_sys::MDB_INTEGERKEY, + const DB_DUPS_FIXED = lmdb_sys::MDB_DUPFIXED, + const DB_ALLOW_INT_DUPS = lmdb_sys::MDB_INTEGERDUP, + const DB_REVERSE_DUPS = lmdb_sys::MDB_REVERSEDUP, + const DB_CREATE = lmdb_sys::MDB_CREATE, + } +} + +bitflags! { + flags WriteFlags: c_uint { + const CURRENT = lmdb_sys::MDB_CURRENT, + const NO_DUP_DATA = lmdb_sys::MDB_NODUPDATA, + const NO_OVERWRITE = lmdb_sys::MDB_NOOVERWRITE, + const RESERVE = lmdb_sys::MDB_RESERVE, + const APPEND = lmdb_sys::MDB_APPEND, + const APPEND_DUP = lmdb_sys::MDB_APPENDDUP, + const MULTIPLE = lmdb_sys::MDB_MULTIPLE, + } +} + +/// Name of the package database +pub const PACKAGE_DB: &'static str = "packages"; +/// Name of the package index database +pub const PACKAGE_INDEX: &'static str = "package-index"; +/// Name of the views database +pub const VIEW_DB: &'static str = "views"; +/// Value for how many databases can be opened within a DataStore's environment. Increase this +/// count for each new database added and decrease this count if databases are removed from the +/// DataStore. +pub const MAX_DBS: u32 = 3; + +macro_rules! try_mdb { + ($e: expr) => (match $e { + lmdb_sys::MDB_SUCCESS => (), + _ => return Err(bldr_error!(ErrorKind::MdbError(MdbError::from($e)))) + }) +} + +macro_rules! handle_mdb { + ($e: expr) => (match $e { + lmdb_sys::MDB_SUCCESS => Ok(()), + _ => Err(bldr_error!(ErrorKind::MdbError(MdbError::from($e)))) + }) +} + +macro_rules! assert_txn_state_eq { + ($cur:expr, $exp:expr) => ( + { + let c = $cur.clone(); + let e = $exp; + if c == e { + () + } else { + return Err(bldr_error!(ErrorKind::MdbError(MdbError::StateError(c, e)))) + } + } + ) +} + +#[derive(Debug)] +pub enum MdbError { + /// The specified DBI was changed unexpectedly + BadDbi, + /// Invalid reuse of reader locktable slot + BadRslot, + /// Transaction must abort, has a child, or is invalid + BadTxn, + /// Unsupported size of key/database name/data or wrong DUPFIXED size + BadValSize, + /// Unable to decode data from database + DecodingError(bincode::rustc_serialize::DecodingError), + /// Unable to encode data into database + EncodingError(bincode::rustc_serialize::EncodingError), + /// Key/Data pair not found + NotFound, + /// Key/Data pair already exists + KeyExists, + /// Cursor stack too deep (internal error) + CursorFull, + /// Environment maxdbs reached + DbsFull, + /// Operation and database incompatible or database type has changed. This can mean: + /// + /// * Operation expects an the database to have the `DB_ALLOW_DUPS`/`DB_DUPS_FIXED` flag set + /// * Opening a named database when the unamed database has the `DB_ALLOW_DUPS`/`DB_INTEGER_KEY` + /// flag set + /// * Accessing a data record as a database or vice versa + /// * The database was dropped and recreated with different flags + Incompatible, + /// File is not a valid LMDB file + Invalid, + /// Environment mapsize reached + MapFull, + /// Database contents grew beyond environment mapsize + MapResized, + /// Page has not enough space (internal error) + PageFull, + /// Requested page not found (usually indicates data corruption) + PageNotFound, + /// Environment maxreaders reached + ReadersFull, + /// Located page was wrong type + Corrupted, + /// Update of meta page failed or environment encountered a fatal error + Panic, + /// Too many TLS keys in use (Windows only) + TLSFull, + /// Trasnaction has too many dirty pages + TxnFull, + /// Transaction was in an unexpected state at time of execution + StateError(TxnState, TxnState), + /// Catch all for undefined error codes + Undefined(c_int, String), + /// Environment version mismatch + VersionMismatch, +} + +impl fmt::Display for MdbError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{:?}", self) + } +} + +impl From for MdbError { + fn from(code: c_int) -> MdbError { + match code { + lmdb_sys::MDB_BAD_DBI => MdbError::BadDbi, + lmdb_sys::MDB_BAD_RSLOT => MdbError::BadRslot, + lmdb_sys::MDB_BAD_TXN => MdbError::BadTxn, + lmdb_sys::MDB_BAD_VALSIZE => MdbError::BadValSize, + lmdb_sys::MDB_NOTFOUND => MdbError::NotFound, + lmdb_sys::MDB_KEYEXIST => MdbError::KeyExists, + lmdb_sys::MDB_CURSOR_FULL => MdbError::CursorFull, + lmdb_sys::MDB_DBS_FULL => MdbError::DbsFull, + lmdb_sys::MDB_INCOMPATIBLE => MdbError::Incompatible, + lmdb_sys::MDB_INVALID => MdbError::Invalid, + lmdb_sys::MDB_MAP_FULL => MdbError::MapFull, + lmdb_sys::MDB_MAP_RESIZED => MdbError::MapResized, + lmdb_sys::MDB_PAGE_FULL => MdbError::PageFull, + lmdb_sys::MDB_PAGE_NOTFOUND => MdbError::PageNotFound, + lmdb_sys::MDB_READERS_FULL => MdbError::ReadersFull, + lmdb_sys::MDB_CORRUPTED => MdbError::Corrupted, + lmdb_sys::MDB_PANIC => MdbError::Panic, + lmdb_sys::MDB_TLS_FULL => MdbError::TLSFull, + lmdb_sys::MDB_TXN_FULL => MdbError::TxnFull, + lmdb_sys::MDB_VERSION_MISMATCH => MdbError::VersionMismatch, + _ => { + let msg = unsafe { + String::from_utf8(CStr::from_ptr(lmdb_sys::mdb_strerror(code)) + .to_bytes() + .to_vec()) + .unwrap() + }; + MdbError::Undefined(code, msg) + } + } + } +} + +impl From for MdbError { + fn from(value: bincode::rustc_serialize::DecodingError) -> MdbError { + MdbError::DecodingError(value) + } +} + +impl From for MdbError { + fn from(value: bincode::rustc_serialize::EncodingError) -> MdbError { + MdbError::EncodingError(value) + } +} + +fn create_txn(env: &Environment, + flags: c_uint, + parent: Option<&mut lmdb_sys::MDB_txn>) + -> BldrResult<*mut lmdb_sys::MDB_txn> { + let mut handle: *mut lmdb_sys::MDB_txn = ptr::null_mut(); + let parent = if parent.is_some() { + parent.unwrap() as *mut lmdb_sys::MDB_txn + } else { + ptr::null_mut() as *mut lmdb_sys::MDB_txn + }; + unsafe { + try_mdb!(lmdb_sys::mdb_txn_begin(env.handle, parent, flags, &mut handle)); + } + Ok(handle) +} + +fn cursor_get<'a, K, D>(cursor: *mut lmdb_sys::MDB_cursor, + key: Option<&K>, + value: Option<&D>, + op: CursorOp) + -> BldrResult<(Option<&'a K>, D)> + where K: ToMdbValue, + D: Encodable + Decodable +{ + unsafe { + let mut kval = if key.is_some() { + key.unwrap().to_mdb_value() + } else { + val_for::(None) + }; + let mut dval = encoded_val_for::(value); + let key_ptr = kval.mv_data; + try_mdb!(lmdb_sys::mdb_cursor_get(cursor, &mut kval, &mut dval, op as u32)); + let kout = if key_ptr != kval.mv_data { + let kout: &K = &*(kval.mv_data as *const K); + Some(kout) + } else { + None + }; + let bytes: &[u8] = slice::from_raw_parts(dval.mv_data as *const u8, dval.mv_size); + match decode(bytes) { + Ok(dout) => Ok((kout, dout)), + Err(e) => Err(bldr_error!(ErrorKind::MdbError(MdbError::from(e)))), + } + } +} + +unsafe fn val_for(data: Option<&T>) -> lmdb_sys::MDB_val { + match data { + Some(d) => { + lmdb_sys::MDB_val { + mv_data: d as *const T as *mut c_void, + mv_size: mem::size_of::() as size_t, + } + } + None => { + lmdb_sys::MDB_val { + mv_data: ptr::null_mut(), + mv_size: 0, + } + } + } +} + +unsafe fn encoded_val_for(data: Option<&T>) -> lmdb_sys::MDB_val { + match data { + Some(d) => { + // JW TODO: this should be set to the max size the database allows. Infinite is fine + // for now. + let mut encoded: Vec = encode(d, SizeLimit::Infinite).unwrap(); + let bytes: &mut [u8] = &mut encoded[..]; + lmdb_sys::MDB_val { + mv_data: bytes.as_ptr() as *mut c_void, + mv_size: bytes.len() as size_t, + } + } + None => { + lmdb_sys::MDB_val { + mv_data: ptr::null_mut() as *mut c_void, + mv_size: 0, + } + } + } +} + +pub unsafe trait ToMdbValue { + fn to_mdb_value(&self) -> lmdb_sys::MDB_val; +} + +unsafe impl ToMdbValue for String { + fn to_mdb_value(&self) -> lmdb_sys::MDB_val { + let t: &str = self; + lmdb_sys::MDB_val { + mv_data: t.as_ptr() as *mut c_void, + mv_size: t.len(), + } + } +} + +pub struct DataStore { + pub packages: PkgDatabase, + pub views: ViewDatabase, + #[allow(dead_code)] + env: Arc, +} + +impl DataStore { + /// Instantiates a new LMDB backed datastore. + /// + /// # Failures + /// + /// * Cannot read/write to the given path + /// * Cannot obtain a lock to create the environment + /// * Could not create the environment or any of it's databases + pub fn open(path: &Path) -> BldrResult { + let mut flags = EnvCreateFlags::empty(); + flags.toggle(ENV_CREATE_NO_SUB_DIR); + match path.parent() { + Some(root) => try!(fs::create_dir_all(root)), + None => return Err(bldr_error!(ErrorKind::DbInvalidPath)), + } + let env = try!(Environment::new().max_databases(MAX_DBS).flags(flags).open(&path, 0o744)); + let env1 = Arc::new(env); + let env2 = env1.clone(); + let env3 = env1.clone(); + let pkg_database = try!(PkgDatabase::new().create(env2)); + let view_database = try!(ViewDatabase::new().create(env3)); + Ok(DataStore { + env: env1, + packages: pkg_database, + views: view_database, + }) + } + + /// Truncates every database in the datastore. + /// + /// # Failures + /// + /// * If a read-write transaction could not be acquired for any of the databases in the + /// datastore + pub fn clear(&self) -> BldrResult<()> { + let txn = try!(self.packages.txn_rw()); + try!(self.packages.clear(&txn)); + try!(txn.commit()); + let txn = try!(self.views.txn_rw()); + try!(self.views.clear(&txn)); + txn.commit() + } +} + +pub struct EnvironmentBuilder { + flags: EnvCreateFlags, + map_size: Option, + max_databases: Option, + max_readers: Option, +} + +impl EnvironmentBuilder { + /// Set environment flags. + pub fn flags(mut self, flags: EnvCreateFlags) -> Self { + // JW TODO: don't let people set flags themselves, the lmdb API is hostile and requires + // that you have knowledge of what flags are and are not allowed to be mixed. Instead expose + // setters that return a result when setting flags that may or may not be compatible with + // the current state of the environment being built + self.flags = flags; + self + } + + /// Set the maximum number of named databases for the environment. + /// + /// This function is only needed if multiple databases will be used in the environment. Simpler + /// applications that use the environment as a single unnamed database can ignore this option. + pub fn max_databases(mut self, count: u32) -> Self { + self.max_databases = Some(count); + self + } + + /// Set the maximum number of threads/reader slots for the environment. + /// + /// This defines the number of slots in the lock table that is used to track readers in the + /// environment. The default is 126. Starting a read-only transaction normally ties a lock table + /// slot to the current thread untilt he environment closes or the thread exists. If the + /// `ENV_CREATE_NO_TLS` flag is set, starting a new transaction instead ties the slot to the + /// transaction until it, or the environment, is destroyed. + pub fn max_readers(mut self, count: u32) -> Self { + self.max_readers = Some(count); + self + } + + /// Set the size of the memory map to use for this environment. + /// + /// The size should be a multiple of the OS page size. The default is 10485760 bytes. The size + /// of the memory map is also the maximum size of the database. The value should be chosen + /// as large as possible to accomodate future growth of the database. + pub fn map_size(mut self, size: u64) -> Self { + self.map_size = Some(size); + self + } + + /// Create/open the database + pub fn open(self, path: &Path, permissions: u32) -> BldrResult { + let handle: *mut lmdb_sys::MDB_env = ptr::null_mut(); + unsafe { + try_mdb!(lmdb_sys::mdb_env_create(mem::transmute(&handle))); + } + + if let Some(map_size) = self.map_size { + unsafe { + try_mdb!(lmdb_sys::mdb_env_set_mapsize(handle, map_size as usize)); + } + } + + if let Some(count) = self.max_databases { + unsafe { try_mdb!(lmdb_sys::mdb_env_set_maxdbs(handle, count)) } + } + + if let Some(count) = self.max_readers { + unsafe { try_mdb!(lmdb_sys::mdb_env_set_maxreaders(handle, count)) } + } + + // JW TODO: if read only flag is set, lets return a read only environment. + + unsafe { + let path_str = try!(path.to_str().ok_or(bldr_error!(ErrorKind::DbInvalidPath))); + let path_ptr = try!(CString::new(path_str) + .map_err(|_| bldr_error!(ErrorKind::DbInvalidPath))) + .as_ptr(); + + match lmdb_sys::mdb_env_open(handle, + path_ptr, + self.flags.bits(), + permissions as mode_t) { + lmdb_sys::MDB_SUCCESS => Ok(Environment { handle: handle }), + code => { + lmdb_sys::mdb_env_close(handle); + Err(bldr_error!(ErrorKind::MdbError(MdbError::from(code)))) + } + } + } + } +} + +impl Default for EnvironmentBuilder { + fn default() -> EnvironmentBuilder { + EnvironmentBuilder { + flags: EnvCreateFlags::empty(), + max_databases: None, + max_readers: None, + map_size: None, + } + } +} + +pub struct Environment { + handle: *mut lmdb_sys::MDB_env, +} + +impl Environment { + pub fn new() -> EnvironmentBuilder { + EnvironmentBuilder::default() + } +} + +impl Drop for Environment { + fn drop(&mut self) { + if self.handle != ptr::null_mut() { + unsafe { lmdb_sys::mdb_env_close(self.handle) } + } + } +} + +unsafe impl Send for Environment {} +unsafe impl Sync for Environment {} + +pub enum Txn { + Read, + ReadWrite, +} + +#[derive(PartialEq, Debug, Eq, Copy, Clone)] +pub enum TxnState { + Normal, + Released, + Invalid, +} + +impl Default for TxnState { + fn default() -> TxnState { + TxnState::Normal + } +} + +#[repr(C)] +#[derive(Copy, Clone, Debug)] +#[allow(dead_code)] +/// Cursor Get operations +/// +/// This is the set of all operations for retrieving data using a cursor. +enum CursorOp { + /// Position at the first key/data item + First = lmdb_sys::MDB_FIRST as isize, + /// Position at the first data item of current key. Valid only if the database has the + /// `DB_ALLOW_DUPS` flag set + FirstDup = lmdb_sys::MDB_FIRST_DUP as isize, + /// Position at key/data pair. Valid only if the database has the `DB_ALLOW_DUPS` flag set + GetBoth = lmdb_sys::MDB_GET_BOTH as isize, + /// Position at key, nearest data. Valid only if the database has the `DB_ALLOW_DUPS` flag set + GetBothRange = lmdb_sys::MDB_GET_BOTH_RANGE as isize, + /// Return the key and data at the cursor's current position + GetCurrent = lmdb_sys::MDB_GET_CURRENT as isize, + /// Return the key and up to a page of duplicate data items from the cursor's current position. + /// Move the cursor to prepare for `NextMultiple` cursor operation. Valid only if the database + /// has the `DB_DUPS_FIXED` flag set + GetMultiple = lmdb_sys::MDB_GET_MULTIPLE as isize, + /// Position the cursor at the last key and data item + Last = lmdb_sys::MDB_LAST as isize, + /// Position the cursor at the last data item of the current key. Valid only if the database has + /// the `DB_ALLOW_DUPS` flag set + LastDup = lmdb_sys::MDB_LAST_DUP as isize, + /// Position at the next data item + Next = lmdb_sys::MDB_NEXT as isize, + /// Position at the next data item of the current key. Valid only if the database has the + /// `DB_ALLOW_DUPS` flag set + NextDup = lmdb_sys::MDB_NEXT_DUP as isize, + /// Return key and up to a page of duplicate data items from next cursor position. Move cursor + /// to prepare for `NextMultiple` cursor operation. Valid only if the database has the + /// `DB_DUPS_FIXED` flag set + NextMultiple = lmdb_sys::MDB_NEXT_MULTIPLE as isize, + /// Position at first data item of next key + NextNoDup = lmdb_sys::MDB_NEXT_NODUP as isize, + /// Position at previous data item + Prev = lmdb_sys::MDB_PREV as isize, + /// Position at previous data item of current key. Valid only if the database has the + /// `DB_ALLOW_DUPS` flag set + PrevDup = lmdb_sys::MDB_PREV_DUP as isize, + /// Position at the last data item of previous key + PrevNoDup = lmdb_sys::MDB_PREV_NODUP as isize, + /// Position at the specified key + Set = lmdb_sys::MDB_SET as isize, + /// Position at the specified key and return the key and it's data + SetKey = lmdb_sys::MDB_SET_KEY as isize, + /// Position at first key greater than or equal to the specified key + SetRange = lmdb_sys::MDB_SET_RANGE as isize, +} + +/// Common behaviour for cursors +pub trait Cursor<'a, 'd, D: 'a + 'd + Database, T: Transaction<'a, D>> { + /// Returns a raw pointer to the cursor's handle. + fn handle(&self) -> *mut lmdb_sys::MDB_cursor; + + /// Returns a raw pointer to the cursor's transaction handle. + fn txn(&self) -> &'a mut lmdb_sys::MDB_txn; + + /// Returns a reference to the current state of the cursor's transaction. + fn state(&self) -> &TxnState; + + /// Returns the cursor's database handle. + fn database(&self) -> lmdb_sys::MDB_dbi; + + /// Return count of duplicates for current key. + /// + /// This call is only valid on databases that support sorted duplicate items by the + /// `DB_ALLOW_DUPS` flag. + fn dup_count(&self) -> BldrResult { + assert_txn_state_eq!(self.state(), TxnState::Normal); + let mut count: size_t = 0; + unsafe { + try_mdb!(lmdb_sys::mdb_cursor_count(self.handle(), &mut count)); + } + Ok(count as u64) + } + + /// Position cursor at the first key/data item and return the data for the item. + fn first(&mut self) -> BldrResult { + assert_txn_state_eq!(self.state(), TxnState::Normal); + match cursor_get::<::Key, D::Object>(self.handle(), + None, + None, + CursorOp::First) { + Ok((_, value)) => Ok(value), + Err(e) => Err(e), + } + } + + /// Position the cursor at the first data item of the current key. + /// + /// This call is only valid on databases that support sorted duplicate items by the + /// `DB_ALLOW_DUPS` flag. + fn first_dup(&mut self) -> BldrResult { + assert_txn_state_eq!(self.state(), TxnState::Normal); + match cursor_get::<::Key, D::Object>(self.handle(), + None, + None, + CursorOp::FirstDup) { + Ok((_, value)) => Ok(value), + Err(e) => Err(e), + } + } + + /// Position the cursor at the last key/data item and return the data for that item. + fn last(&mut self) -> BldrResult { + assert_txn_state_eq!(self.state(), TxnState::Normal); + match cursor_get::<::Key, D::Object>(self.handle(), + None, + None, + CursorOp::Last) { + Ok((_, value)) => Ok(value), + Err(e) => Err(e), + } + } + + /// Position the cursor at the last data item of the current key. + /// + /// This call is only valid on databases that support sorted duplicate items by the + /// `DB_ALLOW_DUPS` flag. + fn last_dup(&mut self) -> BldrResult { + assert_txn_state_eq!(self.state(), TxnState::Normal); + match cursor_get::<::Key, D::Object>(self.handle(), + None, + None, + CursorOp::LastDup) { + Ok((_, value)) => Ok(value), + Err(e) => Err(e), + } + } + + /// Position the cursor at the next data item. + fn next(&mut self) -> BldrResult<(&'a ::Key, D::Object)> { + assert_txn_state_eq!(self.state(), TxnState::Normal); + match cursor_get::<::Key, D::Object>(self.handle(), + None, + None, + CursorOp::Next) { + Ok((Some(key), value)) => Ok((key, value)), + Ok(_) => unreachable!(), + Err(e) => Err(e), + } + } + + /// Position the cursor at the next data item of the current key. + /// + /// This call is only valid on databases taht support sorted duplicate items by the + /// `DB_ALLOW_DUPS` flag. + fn next_dup(&mut self) -> BldrResult<(&'a ::Key, D::Object)> { + assert_txn_state_eq!(self.state(), TxnState::Normal); + match cursor_get::<::Key, D::Object>(self.handle(), + None, + None, + CursorOp::NextDup) { + Ok((Some(key), value)) => Ok((key, value)), + Ok(_) => unreachable!(), + Err(e) => Err(e), + } + } + + /// Position the cursor at the specified key and return the key and data. + fn set_key(&mut self, + key: &::Key) + -> BldrResult<(&'d ::Key, D::Object)> { + assert_txn_state_eq!(self.state(), TxnState::Normal); + match cursor_get::<::Key, D::Object>(self.handle(), + Some(key), + None, + CursorOp::SetKey) { + Ok((Some(key), value)) => Ok((key, value)), + Ok(_) => unreachable!(), + Err(e) => Err(e), + } + } +} + +/// Read-only cursor +pub struct RoCursor<'a, D: 'a + Database, T: 'a + Transaction<'a, D>> { + txn: &'a T, + cursor: *mut lmdb_sys::MDB_cursor, + _marker: PhantomData, +} + +impl<'a, D, T> RoCursor<'a, D, T> + where D: Database, + T: Transaction<'a, D> +{ + fn open(txn: &'a T) -> BldrResult { + assert_txn_state_eq!(txn.state(), TxnState::Normal); + let mut cursor: *mut lmdb_sys::MDB_cursor = ptr::null_mut(); + unsafe { + try_mdb!(lmdb_sys::mdb_cursor_open(txn.handle(), txn.database().handle(), &mut cursor)); + } + Ok(RoCursor { + txn: txn, + cursor: cursor, + _marker: PhantomData, + }) + } +} + +impl<'a, 'b, D, T> Cursor<'a, 'b, D, T> for RoCursor<'a, D, T> + where D: 'a + 'b + Database, + T: 'a + Transaction<'a, D> +{ + fn handle(&self) -> *mut lmdb_sys::MDB_cursor { + self.cursor + } + + fn txn(&self) -> &'a mut lmdb_sys::MDB_txn { + self.txn.handle() + } + + fn database(&self) -> lmdb_sys::MDB_dbi { + self.txn.database().handle() + } + + fn state(&self) -> &TxnState { + self.txn.state() + } +} + +impl<'a, D, T> Drop for RoCursor<'a, D, T> + where D: Database, + T: Transaction<'a, D> +{ + fn drop(&mut self) { + unsafe { lmdb_sys::mdb_cursor_close(self.cursor) } + } +} + +/// Read-write cursor +pub struct RwCursor<'a, D: 'a + Database> { + txn: &'a RwTransaction<'a, D>, + cursor: *mut lmdb_sys::MDB_cursor, + _marker: PhantomData, +} + +impl<'a, D: Database> RwCursor<'a, D> { + fn open(txn: &'a RwTransaction) -> BldrResult { + assert_txn_state_eq!(txn.state(), TxnState::Normal); + let mut cursor: *mut lmdb_sys::MDB_cursor = ptr::null_mut(); + unsafe { + try_mdb!(lmdb_sys::mdb_cursor_open(txn.handle(), txn.database().handle(), &mut cursor)); + } + Ok(RwCursor { + txn: txn, + cursor: cursor, + _marker: PhantomData, + }) + } +} + +impl<'a, 'b, D, T> Cursor<'a, 'b, D, T> for RwCursor<'a, D> + where D: 'a + 'b + Database, + T: 'a + Transaction<'a, D> +{ + fn handle(&self) -> *mut lmdb_sys::MDB_cursor { + self.cursor + } + + fn txn(&self) -> &'a mut lmdb_sys::MDB_txn { + self.txn.handle() + } + + fn database(&self) -> lmdb_sys::MDB_dbi { + self.txn.database().handle() + } + + fn state(&self) -> &TxnState { + self.txn.state() + } +} + +impl<'a, D: Database> Drop for RwCursor<'a, D> { + fn drop(&mut self) { + unsafe { lmdb_sys::mdb_cursor_close(self.cursor) } + } +} + +pub struct DatabaseBuilder { + pub name: Option, + pub flags: DatabaseFlags, + txn_flags: c_uint, + _marker: PhantomData, +} + +impl DatabaseBuilder { + /// Create a new database + pub fn create(mut self, env: Arc) -> BldrResult { + self.flags = self.flags | DB_CREATE; + let handle = try!(self.open_database(&env)); + T::open(env, handle) + } + + /// Open an existing database + pub fn open(mut self, env: Arc) -> BldrResult { + self.flags = self.flags - DB_CREATE; + let handle = try!(self.open_database(&env)); + T::open(env, handle) + } + + /// Configures the database to be a named database + pub fn named(&mut self, name: String) -> &mut Self { + self.name = Some(name); + self + } + + /// Set database flags + pub fn flags(&mut self, flags: DatabaseFlags) -> &mut Self { + self.flags = flags; + self + } + + /// Set the database to be read-only + pub fn readonly(&mut self) -> &mut Self { + self.txn_flags = lmdb_sys::MDB_RDONLY; + self + } + + /// Set the database to be writeable + pub fn writable(&mut self) -> &mut Self { + self.txn_flags = 0; + self + } + + fn open_database(&self, env: &Environment) -> BldrResult { + match OPEN_LOCK.lock() { + Ok(_) => { + let name_ptr = if self.name.is_some() { + try!(CString::new(self.name.as_ref().unwrap().as_bytes())).as_ptr() + } else { + ptr::null() + }; + let mut handle: lmdb_sys::MDB_dbi = 0; + let txn = try!(create_txn(env, self.txn_flags, None)); + unsafe { + match handle_mdb!(lmdb_sys::mdb_dbi_open(txn, + name_ptr, + self.flags.bits(), + &mut handle)) { + Ok(()) => { + try_mdb!(lmdb_sys::mdb_txn_commit(txn)); + Ok(handle) + } + Err(e) => { + lmdb_sys::mdb_txn_abort(txn); + return Err(e); + } + } + } + } + Err(e) => panic!("Internal data access error: {:?}", e), + } + } +} + +/// Common behaviour for databases +pub trait Database : Sized { + type Object: DataObject + Any; + + /// Drop all entries from the database + fn clear<'a, T: Transaction<'a, Self>>(&'a self, txn: &'a T) -> BldrResult<()> { + txn.clear() + } + + /// Write an object into the database + fn write<'a>(&self, txn: &'a RwTransaction<'a, Self>, object: &Self::Object) -> BldrResult<()> { + txn.put(object.ident(), object) + } + + /// Open the database + fn open(env: Arc, handle: lmdb_sys::MDB_dbi) -> BldrResult; + + /// Returns a reference to the database's environment + fn env(&self) -> &Environment; + + /// Returns the database handle + fn handle(&self) -> lmdb_sys::MDB_dbi; + + /// Begin a read-only transaction + fn txn_ro<'b>(&'b self) -> BldrResult> { + RoTransaction::begin(self) + } + + /// Begin a read-write transaction + fn txn_rw<'b>(&'b self) -> BldrResult> { + RwTransaction::begin(self) + } +} + +/// Common behaviour for transactions +pub trait Transaction<'a, D: 'a + Database> : Sized { + /// Begin a transaction + fn begin(database: &'a D) -> BldrResult; + + /// Abandon all the operations of the transaction instead of saving them. + fn abort(self) -> (); + + /// Commit all the operations of a transaction into the database. + fn commit(self) -> BldrResult<()>; + + /// Return a reference to the transaction's database + fn database(&self) -> &'a D; + + /// Return a reference of the transaction's handle + fn handle(&self) -> &mut lmdb_sys::MDB_txn; + + /// Return a reference of the transaction's current state + fn state(&self) -> &TxnState; + + /// Returns a read-only cursor + fn cursor_ro(&'a self) -> BldrResult> { + RoCursor::open(self) + } + + /// Begins a read-only nested transaction within the current transaction into the given + /// database. + fn new_child(&'a self, database: &'a D2) -> BldrResult> { + let handle = try!(create_txn(database.env(), lmdb_sys::MDB_RDONLY, Some(self.handle()))); + Ok(RoTransaction { + database: database, + parent: Some(self.handle()), + handle: handle, + state: TxnState::default(), + }) + } + + /// Begins a read-write nested transaction within the current transaction into the given + /// database. + fn new_child_rw(&'a self, database: &'a D2) -> BldrResult> { + let handle = try!(create_txn(database.env(), 0, Some(self.handle()))); + Ok(RwTransaction { + database: database, + parent: Some(self.handle()), + handle: handle, + state: TxnState::default(), + }) + } + + /// Clear all data in the database. + fn clear(&self) -> BldrResult<()> { + assert_txn_state_eq!(self.state(), TxnState::Normal); + unsafe { + try_mdb!(lmdb_sys::mdb_drop(self.handle(), self.database().handle(), 0)); + } + Ok(()) + } + + /// Return a value from the database. + fn get(&self, k: &::Key) -> BldrResult { + assert_txn_state_eq!(self.state(), TxnState::Normal); + unsafe { + let mut key = k.to_mdb_value(); + let mut data = encoded_val_for::<()>(None); + try_mdb!(lmdb_sys::mdb_get(self.handle() as *mut lmdb_sys::MDB_txn, + self.database().handle(), + &mut key, + &mut data)); + let bytes: &[u8] = slice::from_raw_parts(data.mv_data as *const u8, data.mv_size); + match decode(bytes) { + Ok(value) => Ok(value), + Err(e) => Err(bldr_error!(ErrorKind::MdbError(MdbError::from(e)))), + } + } + } +} + +pub struct RoTransaction<'a, D: 'a + Database> { + database: &'a D, + #[allow(dead_code)] + parent: Option<&'a mut lmdb_sys::MDB_txn>, + handle: *mut lmdb_sys::MDB_txn, + state: TxnState, +} + +impl<'a, D: Database> RoTransaction<'a, D> { + /// Like `abort()`, but doesn't consume the transaction, keeping it alive for later use. + #[allow(dead_code)] + fn reset(&mut self) { + if *self.state() != TxnState::Normal { + return (); + } + unsafe { + lmdb_sys::mdb_txn_reset(self.handle()); + } + self.state = TxnState::Released; + } + + /// Acquires a new reader lock for a transaction that had been previously released by `renew()`. + #[allow(dead_code)] + fn renew(&mut self) -> BldrResult<()> { + assert_txn_state_eq!(self.state(), TxnState::Released); + unsafe { + try_mdb!(lmdb_sys::mdb_txn_renew(self.handle())); + } + self.state = TxnState::Normal; + Ok(()) + } +} + +impl<'a, D: 'a + Database> Transaction<'a, D> for RoTransaction<'a, D> { + fn begin(database: &'a D) -> BldrResult { + let handle = try!(create_txn(database.env(), lmdb_sys::MDB_RDONLY, None)); + Ok(RoTransaction { + database: database, + parent: None, + handle: handle, + state: TxnState::default(), + }) + } + + fn abort(self) { + if *self.state() != TxnState::Normal { + return (); + } + unsafe { + lmdb_sys::mdb_txn_abort(self.handle()); + } + } + + fn commit(mut self) -> BldrResult<()> { + assert_txn_state_eq!(self.state(), TxnState::Normal); + unsafe { + try_mdb!(lmdb_sys::mdb_txn_commit(self.handle())); + } + self.state = TxnState::Released; + Ok(()) + } + + fn database(&self) -> &'a D { + self.database + } + + fn handle(&self) -> &mut lmdb_sys::MDB_txn { + unsafe { &mut *self.handle } + } + + fn state(&self) -> &TxnState { + &self.state + } +} + +impl<'a, D: 'a + Database> Drop for RoTransaction<'a, D> { + fn drop(&mut self) { + if self.state == TxnState::Normal { + unsafe { + lmdb_sys::mdb_txn_commit(self.handle); + } + self.state = TxnState::Released; + } + } +} + +pub struct RwTransaction<'a, D: 'a + Database> { + database: &'a D, + #[allow(dead_code)] + parent: Option<&'a mut lmdb_sys::MDB_txn>, + handle: *mut lmdb_sys::MDB_txn, + state: TxnState, +} + +impl<'a, D: Database> RwTransaction<'a, D> { + /// Returns a read-write cursor + pub fn cursor_rw(&'a self) -> BldrResult> { + RwCursor::open(self) + } + + /// Write a key/value into the database. + /// + /// The default behaviour is to enter a new key/data pair, replacing any previously existing + /// key if duplicates are not allowed, or adding a new duplicate data item if duplicates are + /// allowed. + /// + /// Duplicates can be allowed by setting the `DB_ALLOW_DUPS` flag on the database. + fn put(&self, key: &::Key, value: &D::Object) -> BldrResult<()> { + assert_txn_state_eq!(self.state(), TxnState::Normal); + // JW TODO: these flags represent different types of "writes" and are dependent upon the + // flags used to open the database. This would mean that this `put` function is the + // foundation for writing data into the database and functions like `write`, `update`, and + // `append` are the public API hanging off of the database itself. + let flags = 0; + unsafe { + let mut kval = key.to_mdb_value(); + let mut dval = encoded_val_for::(Some(value)); + try_mdb!(lmdb_sys::mdb_put(self.handle, + self.database.handle(), + &mut kval, + &mut dval, + flags)); + } + Ok(()) + } + + /// Delete items from the database. + /// + /// If the database does not support duplicate data items the value argument is ignored. If + /// the database supports duplicate data items and the value argument is `None`, all of the + /// duplicate data items for the key are deleted. Otherwise, if the data option is present only + /// the matching data item will be deleted. + /// + /// # Failures + /// + /// * MdbError::NotFound if the specified key/data pair is not in the database + #[allow(dead_code)] + fn delete(&self, + key: &::Key, + value: Option<&D::Object>) + -> BldrResult<()> { + unsafe { + let mut kval = key.to_mdb_value(); + let dval: *mut lmdb_sys::MDB_val = { + if value.is_some() { + &mut encoded_val_for::(value) + } else { + ptr::null_mut() + } + }; + try_mdb!(lmdb_sys::mdb_del(self.handle, self.database.handle(), &mut kval, dval)); + } + Ok(()) + } +} + +impl<'a, D: 'a + Database> Transaction<'a, D> for RwTransaction<'a, D> { + fn begin(database: &'a D) -> BldrResult { + let handle = try!(create_txn(database.env(), 0, None)); + Ok(RwTransaction { + database: database, + parent: None, + handle: handle, + state: TxnState::default(), + }) + } + + fn abort(self) { + if *self.state() != TxnState::Normal { + return (); + } + unsafe { + lmdb_sys::mdb_txn_abort(self.handle()); + } + } + + fn commit(mut self) -> BldrResult<()> { + assert_txn_state_eq!(self.state(), TxnState::Normal); + unsafe { + try_mdb!(lmdb_sys::mdb_txn_commit(self.handle())); + } + self.state = TxnState::Invalid; + Ok(()) + } + + fn database(&self) -> &'a D { + self.database + } + + fn handle(&self) -> &mut lmdb_sys::MDB_txn { + unsafe { &mut *self.handle } + } + + fn state(&self) -> &TxnState { + &self.state + } +} + +impl<'a, D: 'a + Database> Drop for RwTransaction<'a, D> { + fn drop(&mut self) { + if self.state == TxnState::Normal { + unsafe { + lmdb_sys::mdb_txn_commit(self.handle); + } + self.state = TxnState::Invalid; + } + } +} + +/// Contains metadata entries for each package known by the Depot +pub struct PkgDatabase { + pub index: PkgIndex, + env: Arc, + handle: lmdb_sys::MDB_dbi, +} + +impl PkgDatabase { + pub fn new() -> DatabaseBuilder { + DatabaseBuilder::default() + } +} + +impl Database for PkgDatabase { + type Object = data_object::Package; + + fn open(env: Arc, handle: lmdb_sys::MDB_dbi) -> BldrResult { + let env2 = env.clone(); + let index = try!(PkgIndex::new().create(env2)); + Ok(PkgDatabase { + env: env, + handle: handle, + index: index, + }) + } + + fn clear<'a, T: Transaction<'a, Self>>(&'a self, txn: &'a T) -> BldrResult<()> { + try!(txn.clear()); + let nested = try!(txn.new_child_rw(&self.index)); + try!(self.index.clear(&nested)); + try!(nested.commit()); + Ok(()) + } + + fn write<'a>(&self, txn: &RwTransaction<'a, Self>, object: &Self::Object) -> BldrResult<()> { + try!(txn.put(object.ident(), object)); + let nested = try!(txn.new_child_rw(&self.index)); + try!(self.index.write(&nested, &object.ident)); + try!(nested.commit()); + Ok(()) + } + + fn env(&self) -> &Environment { + &self.env + } + + fn handle(&self) -> lmdb_sys::MDB_dbi { + self.handle + } +} + +impl Default for DatabaseBuilder { + fn default() -> DatabaseBuilder { + DatabaseBuilder { + name: Some(PACKAGE_DB.to_string()), + flags: DatabaseFlags::empty(), + txn_flags: 0, + _marker: PhantomData, + } + } +} + +impl Drop for PkgDatabase { + fn drop(&mut self) { + unsafe { lmdb_sys::mdb_dbi_close(self.env.handle, self.handle()) } + } +} + +/// Contains an index of package identifiers to easily find the latest version/release of a +/// specified package. +pub struct PkgIndex { + env: Arc, + handle: lmdb_sys::MDB_dbi, +} + +impl PkgIndex { + pub fn new() -> DatabaseBuilder { + DatabaseBuilder::default() + } +} + +impl Database for PkgIndex { + type Object = data_object::PackageIdent; + + fn open(env: Arc, handle: lmdb_sys::MDB_dbi) -> BldrResult { + Ok(PkgIndex { + env: env, + handle: handle, + }) + } + + fn env(&self) -> &Environment { + &self.env + } + + fn handle(&self) -> lmdb_sys::MDB_dbi { + self.handle + } + + fn write<'a>(&self, txn: &RwTransaction<'a, Self>, object: &Self::Object) -> BldrResult<()> { + try!(txn.put(&object.deriv_idx(), object)); + try!(txn.put(&object.name_idx(), object)); + txn.put(&object.version_idx(), object) + } +} + +impl Default for DatabaseBuilder { + fn default() -> DatabaseBuilder { + let mut flags = DatabaseFlags::empty(); + flags.toggle(DB_ALLOW_DUPS); + DatabaseBuilder { + name: Some(PACKAGE_INDEX.to_string()), + flags: flags, + txn_flags: 0, + _marker: PhantomData, + } + } +} + +impl Drop for PkgIndex { + fn drop(&mut self) { + unsafe { lmdb_sys::mdb_dbi_close(self.env.handle, self.handle()) } + } +} + +/// Contains a mapping of repository names and the packages found within that repository. +/// +/// This is how packages will be "promoted" between environments without duplicating data on disk. +pub struct ViewDatabase { + env: Arc, + handle: lmdb_sys::MDB_dbi, +} + +impl ViewDatabase { + pub fn new() -> DatabaseBuilder { + DatabaseBuilder::default() + } +} + +impl Database for ViewDatabase { + type Object = data_object::View; + + fn open(env: Arc, handle: lmdb_sys::MDB_dbi) -> BldrResult { + Ok(ViewDatabase { + env: env, + handle: handle, + }) + } + + fn env(&self) -> &Environment { + &self.env + } + + fn handle(&self) -> lmdb_sys::MDB_dbi { + self.handle + } +} + +impl Default for DatabaseBuilder { + fn default() -> DatabaseBuilder { + DatabaseBuilder { + name: Some(VIEW_DB.to_string()), + flags: DatabaseFlags::empty(), + txn_flags: 0, + _marker: PhantomData, + } + } +} + +impl Drop for ViewDatabase { + fn drop(&mut self) { + unsafe { lmdb_sys::mdb_dbi_close(self.env.handle, self.handle()) } + } +} + +#[cfg(test)] +mod tests { + use std::path::Path; + use std::collections::HashSet; + use super::*; + use super::super::data_object::*; + use error::{BldrError, ErrorKind}; + + // JW TODO: This test is ignored while I track down a bug preventing multiple transactions + // being opened from different threads. + #[test] + #[ignore] + fn read_write_composite_data_object() { + let ds = open_datastore(); + let key: String = "chef/redis/3.0.1/1234".to_string(); + { + let pkg = Package { + ident: PackageIdent::new(key.clone()), + manifest: "my-manifest".to_string(), + deps: vec![], + tdeps: vec![], + exposes: vec![], + config: Some("configuration".to_string()), + views: HashSet::new(), + }; + let txn = ds.packages.txn_rw().unwrap(); + txn.put(&pkg.ident(), &pkg).unwrap(); + txn.commit().unwrap(); + } + let txn = ds.packages.txn_ro().unwrap(); + let saved = txn.get(&"chef/redis/3.0.1/1234".to_string()).unwrap(); + txn.abort(); + assert_eq!(saved.ident(), "chef/redis/3.0.1/1234"); + assert_eq!(saved.manifest, "my-manifest"); + assert_eq!(saved.config, Some("configuration".to_string())); + } + + // JW TODO: This test is ignored while I track down a bug preventing multiple transactions + // being opened from different threads. + #[test] + #[ignore] + fn transaction_read_write() { + let ds = open_datastore(); + { + let mut view = View::new("my-view"); + view.add_package("my-package".to_string()); + let txn = ds.views.txn_rw().unwrap(); + txn.put(view.ident(), &view).unwrap(); + txn.commit().unwrap(); + }; + let txn = ds.views.txn_ro().unwrap(); + let saved = txn.get(&"my-view".to_string()).unwrap(); + txn.abort(); + assert_eq!(saved.ident(), "my-view"); + assert_eq!(saved.packages.len(), 1); + } + + // JW TODO: This test is ignored while I track down a bug preventing multiple transactions + // being opened from different threads. + #[test] + #[ignore] + fn transaction_delete() { + let ds = open_datastore(); + { + let mut view = View::new("my-view"); + view.add_package("my-package".to_string()); + let txn = ds.views.txn_rw().unwrap(); + txn.put(view.ident(), &view).unwrap(); + txn.commit().unwrap(); + }; + let txn = ds.views.txn_rw().unwrap(); + txn.delete(&"my-view".to_string(), None).unwrap(); + match txn.get(&"my-view".to_string()) { + Err(BldrError { err: ErrorKind::MdbError(MdbError::NotFound), .. }) => { + txn.abort(); + assert!(true) + } + _ => { + txn.abort(); + assert!(false) + } + } + } + + fn open_datastore() -> DataStore { + let ds = DataStore::open(Path::new("/opt/bldr/test")).unwrap(); + ds.clear().unwrap(); + ds + } +} diff --git a/src/bldr/repo/mod.rs b/src/bldr/repo/mod.rs index 47829083dd..1ea7ea3ffb 100644 --- a/src/bldr/repo/mod.rs +++ b/src/bldr/repo/mod.rs @@ -5,12 +5,14 @@ // is made available under an open source license such as the Apache 2.0 License. pub mod client; +pub mod data_object; +pub mod data_store; use iron::prelude::*; use iron::status; use iron::request::Body; use iron::headers; -use router::Router; +use router::{Params, Router}; use rustc_serialize::json; use std::net; @@ -19,71 +21,50 @@ use std::fs::{self, File}; use std::io::{Read, Write, BufWriter}; use std::path::{Path, PathBuf}; -use error::{BldrResult, ErrorKind}; +use error::{BldrError, BldrResult, ErrorKind}; use config::Config; - -use package::{Package, PackageArchive}; +use self::data_store::{Cursor, DataStore, Database, Transaction}; +use self::data_object::DataObject; +use package::{self, Package, PackageArchive}; static LOGKEY: &'static str = "RE"; header! { (XFileName, "X-Filename") => [String] } -struct Repo { +pub struct Repo { pub path: String, + pub datastore: DataStore, } impl Repo { - fn new(path: &str) -> BldrResult> { - Ok(Arc::new(Repo { path: String::from(path) })) + pub fn new(path: String) -> BldrResult> { + let dbpath = Path::new(&path).join("datastore"); + let datastore = try!(DataStore::open(dbpath.as_path())); + Ok(Arc::new(Repo { + path: path, + datastore: datastore, + })) } // Return a PackageArchive representing the given package. None is returned if the repository // doesn't have an archive for the given package. - fn archive(&self, - derivation: &str, - name: &str, - version: &str, - release: &str) - -> Option { - let file = self.archive_path(derivation, name, version, release); + fn archive(&self, ident: &package::PackageIdent) -> Option { + let file = self.archive_path(&ident); match fs::metadata(&file) { Ok(_) => Some(PackageArchive::new(file)), Err(_) => None, } } - // Return a PackageArchive representing the latest release available for the given package - // derivation, name, and version (optional). - // - // If a version is specified the latest release of that version will be returned, if it is - // omitted the latest release of the latest version is returned. - fn archive_latest(&self, - derivation: &str, - name: &str, - version: Option<&str>) - -> Option { - match Package::load(derivation, - name, - version.map(String::from), - None, - Some(self.packages_path().to_str().unwrap())) { - Ok(package) => self.archive(&package.derivation, - &package.name, - &package.version, - &package.release), - Err(_) => None, - } - } - // Return a formatted string representing the filename of an archive for the given package // identifier pieces. - fn archive_path(&self, derivation: &str, name: &str, version: &str, release: &str) -> PathBuf { + fn archive_path(&self, ident: &package::PackageIdent) -> PathBuf { self.packages_path() - .join(derivation) - .join(name) - .join(version) - .join(release) - .join(format!("{}-{}-{}-{}.bldr", &derivation, &name, &version, &release)) + .join(&ident.derivation) + .join(&ident.name) + .join(ident.version.as_ref().unwrap()) + .join(ident.release.as_ref().unwrap()) + .join(format!("{}-{}-{}-{}.bldr", &ident.derivation, &ident.name, ident.version.as_ref().unwrap(), ident.release.as_ref().unwrap())) } fn key_path(&self, name: &str) -> PathBuf { @@ -116,6 +97,30 @@ impl Default for ListenPort { } } +impl<'a> Into for &'a Params { + fn into(self) -> package::PackageIdent { + package::PackageIdent::new(self.find("deriv").unwrap(), self.find("pkg").unwrap(), self.find("version"), self.find("release")) + } +} + +impl<'a> Into for &'a Params { + fn into(self) -> data_object::PackageIdent { + let deriv = self.find("deriv").unwrap(); + let name = self.find("pkg"); + let version = self.find("version"); + let release = self.find("release"); + if release.is_some() && version.is_some() && name.is_some() { + data_object::PackageIdent::new(format!("{}/{}/{}/{}", deriv, name.unwrap(), version.unwrap(), release.unwrap())) + } else if version.is_some() && name.is_some() { + data_object::PackageIdent::new(format!("{}/{}/{}", deriv, name.unwrap(), version.unwrap())) + } else if name.is_some() { + data_object::PackageIdent::new(format!("{}/{}", deriv, name.unwrap())) + } else { + data_object::PackageIdent::new(deriv.to_string()) + } + } +} + fn write_file(filename: &PathBuf, body: &mut Body) -> BldrResult { let path = filename.parent().unwrap(); try!(fs::create_dir_all(path)); @@ -165,31 +170,54 @@ fn upload_key(repo: &Repo, req: &mut Request) -> IronResult { fn upload_package(repo: &Repo, req: &mut Request) -> IronResult { outputln!("Upload {:?}", req); - let rext = req.extensions.get::().unwrap(); + let ident: package::PackageIdent = { + let params = req.extensions.get::().unwrap(); + params.into() + }; - let deriv = rext.find("deriv").unwrap(); - let pkg = rext.find("pkg").unwrap(); - let version = rext.find("version").unwrap(); - let release = rext.find("release").unwrap(); + if !ident.fully_qualified() { + return Ok(Response::with((status::BadRequest))); + } - let filename = repo.archive_path(deriv, pkg, version, release); - try!(write_file(&filename, &mut req.body)); + let txn = try!(repo.datastore.packages.txn_rw()); + if let Ok(_) = txn.get(&ident.to_string()) { + if let Some(_) = repo.archive(&ident) { + return Ok(Response::with((status::Conflict))); + } else { + // This should never happen. Writing the package to disk and recording it's existence + // in the metadata is a transactional operation and one cannot exist without the other. + // + // JW TODO: write the depot repair tool and wire it into the `bldr-depot repair` command + panic!("Inconsistent package metadata! Exit and run `bldr-depot repair` to fix data integrity."); + } + } - let mut response = Response::with((status::Created, - format!("/pkgs/{}/{}/{}/{}/download", - deriv, - pkg, - version, - release))); - let mut base_url = req.url.clone(); - base_url.path = vec![String::from("pkgs"), - String::from(deriv), - String::from(pkg), - String::from(version), - String::from(release), - String::from("download")]; - response.headers.set(headers::Location(format!("{}", base_url))); - Ok(response) + let filename = repo.archive_path(&ident); + try!(write_file(&filename, &mut req.body)); + let archive = PackageArchive::new(filename); + let object = match data_object::Package::from_archive(&archive) { + Ok(object) => object, + Err(_) => return Ok(Response::with(status::UnprocessableEntity)), + }; + if ident.satisfies(&object.ident.clone().into()) { + // JW TODO: handle failure here? + try!(repo.datastore.packages.write(&txn, &object)); + try!(txn.commit()); + + let mut response = Response::with((status::Created, format!("/pkgs/{}/download", object.ident))); + let mut base_url = req.url.clone(); + let parts: Vec<&str> = object.ident.parts(); + base_url.path = vec![String::from("pkgs"), + parts[0].to_string(), + parts[1].to_string(), + parts[2].to_string(), + parts[3].to_string(), + String::from("download")]; + response.headers.set(headers::Location(format!("{}", base_url))); + Ok(response) + } else { + Ok(Response::with(status::UnprocessableEntity)) + } } fn download_key(repo: &Repo, req: &mut Request) -> IronResult { @@ -213,76 +241,140 @@ fn download_key(repo: &Repo, req: &mut Request) -> IronResult { fn download_package(repo: &Repo, req: &mut Request) -> IronResult { outputln!("Download {:?}", req); - let rext = req.extensions.get::().unwrap(); - - let deriv = match rext.find("deriv") { - Some(deriv) => deriv, - None => return Ok(Response::with(status::BadRequest)), - }; - let pkg = match rext.find("pkg") { - Some(pkg) => pkg, - None => return Ok(Response::with(status::BadRequest)), - }; - let param_ver = rext.find("version"); - let param_rel = rext.find("release"); - - let archive = if param_ver.is_some() && param_rel.is_some() { - match repo.archive(&deriv, - &pkg, - param_ver.as_ref().unwrap(), - param_rel.as_ref().unwrap()) { - Some(archive) => archive, - None => return Ok(Response::with(status::NotFound)), + let params = req.extensions.get::().unwrap(); + // JW TODO: check for repo param + let ident: data_object::PackageIdent = params.into(); + + let result = if ident.parts().len() == 4 { + let txn = try!(repo.datastore.packages.txn_ro()); + match txn.get(&ident.to_string()) { + Ok(package) => { + let value: package::PackageIdent = package.ident.into(); + Ok(value) + }, + Err(e) => Err(e) } } else { - match repo.archive_latest(&deriv, &pkg, param_ver) { - Some(archive) => archive, - None => return Ok(Response::with(status::NotFound)), + // JW TODO: fix scoping of cursor/transactions and refactor this + let r = { + let idx = try!(repo.datastore.packages.index.txn_ro()); + let mut cursor = try!(idx.cursor_ro()); + if let Some(e) = cursor.set_key(&ident.to_string()).err() { + Err(e) + } else { + cursor.last_dup() + } + }; + match r { + Ok(v) => { + let txn = try!(repo.datastore.packages.txn_ro()); + let value: package::PackageIdent = try!(txn.get(&v.ident())).ident.into(); + Ok(value) + }, + Err(e) => Err(BldrError::from(e)) } }; - match fs::metadata(&archive.path) { - Ok(_) => { - let mut response = Response::with((status::Ok, archive.path.clone())); - response.headers.set(XFileName(archive.file_name())); - Ok(response) - } - Err(_) => { - Ok(Response::with(status::NotFound)) - } + match result { + Ok(ident) => { + if let Some(archive) = repo.archive(&ident) { + match fs::metadata(&archive.path) { + Ok(_) => { + let mut response = Response::with((status::Ok, archive.path.clone())); + response.headers.set(XFileName(archive.file_name())); + Ok(response) + } + Err(_) => Ok(Response::with(status::NotFound)), + } + } else { + // This should never happen. Writing the package to disk and recording it's existence + // in the metadata is a transactional operation and one cannot exist without the other. + // + // JW TODO: write the depot repair tool and wire it into the `bldr-depot repair` command + panic!("Inconsistent package metadata! Exit and run `bldr-depot repair` to fix data integrity."); + } + }, + Err(BldrError { err: ErrorKind::MdbError(data_store::MdbError::NotFound), ..}) => Ok(Response::with((status::NotFound))), + Err(_) => unreachable!("unknown error"), + } +} + +fn list_packages(repo: &Repo, req: &mut Request) -> IronResult { + let params = req.extensions.get::().unwrap(); + let ident: data_object::PackageIdent = params.into(); + let mut packages: Vec = vec![]; + + let txn = try!(repo.datastore.packages.index.txn_ro()); + let mut cursor = try!(txn.cursor_ro()); + let result = match cursor.set_key(ident.ident()) { + Ok((_, value)) => { + packages.push(value.into()); + loop { + match cursor.next_dup() { + Ok((_, value)) => packages.push(value.into()), + Err(_) => break + } + } + Ok(()) + }, + Err(e) => Err(BldrError::from(e)) + }; + + match result { + Ok(()) => { + let body = json::encode(&packages).unwrap(); + Ok(Response::with((status::Ok, body))) + }, + Err(BldrError { err: ErrorKind::MdbError(data_store::MdbError::NotFound), ..}) => Ok(Response::with((status::NotFound))), + Err(_) => unreachable!("unknown error"), } } fn show_package(repo: &Repo, req: &mut Request) -> IronResult { - let rext = req.extensions.get::().unwrap(); - let package = rext.find("pkg").unwrap(); - let deriv = rext.find("deriv").unwrap(); - let version = rext.find("version"); - let release = rext.find("release"); - - let archive = if version.is_some() && release.is_some() { - match repo.archive(&deriv, - &package, - version.as_ref().unwrap(), - release.as_ref().unwrap()) { - Some(archive) => archive, - None => return Ok(Response::with(status::NotFound)), - } + let params = req.extensions.get::().unwrap(); + let ident: package::PackageIdent = params.into(); + + let result = if ident.fully_qualified() { + let txn = try!(repo.datastore.packages.txn_ro()); + txn.get(&ident.to_string()) } else { - match repo.archive_latest(&deriv, &package, version) { - Some(archive) => archive, - None => return Ok(Response::with(status::NotFound)), + let r = { + let idx = try!(repo.datastore.packages.index.txn_ro()); + let mut cursor = try!(idx.cursor_ro()); + if let Some(e) = cursor.set_key(&ident.to_string()).err() { + Err(e) + } else { + cursor.last_dup() + } + }; + match r { + Ok(v) => { + let txn = try!(repo.datastore.packages.txn_ro()); + txn.get(&v.ident()) + }, + Err(e) => Err(e) } }; + match result { + Ok(data) => { + // JW TODO: re-enable proper json encoding when I have a plan for proper decoding + // let body = json::encode(&data.to_json()).unwrap(); + let body = json::encode(&data).unwrap(); + Ok(Response::with((status::Ok, body))) + }, + Err(BldrError { err: ErrorKind::MdbError(data_store::MdbError::NotFound), ..}) => Ok(Response::with((status::NotFound))), + Err(e) => unreachable!("unknown error: {:?}", e), + } +} - let package = try!(archive.package()); - let body = json::encode(&package).unwrap(); - Ok(Response::with((status::Ok, body))) +pub fn repair(config: &Config) -> BldrResult<()> { + let repo = try!(Repo::new(String::from(config.path()))); + repo.datastore.clear() } pub fn run(config: &Config) -> BldrResult<()> { - let repo = try!(Repo::new(config.path())); - let repo2 = repo.clone(); + let repo = try!(Repo::new(String::from(config.path()))); + // let repo2 = repo.clone(); let repo3 = repo.clone(); let repo4 = repo.clone(); let repo5 = repo.clone(); @@ -290,17 +382,33 @@ pub fn run(config: &Config) -> BldrResult<()> { let repo7 = repo.clone(); let repo8 = repo.clone(); let repo9 = repo.clone(); + let repo10 = repo.clone(); + let repo11 = repo.clone(); + let repo12 = repo.clone(); + let repo13 = repo.clone(); + let repo14 = repo.clone(); + let repo15 = repo.clone(); + let repo16 = repo.clone(); + let repo17 = repo.clone(); let router = router!( - post "/pkgs/:deriv/:pkg/:version/:release" => move |r: &mut Request| upload_package(&repo, r), - get "/pkgs/:deriv/:pkg/:version/:release/download" => move |r: &mut Request| download_package(&repo2, r), - get "/pkgs/:deriv/:pkg/:version/download" => move |r: &mut Request| download_package(&repo3, r), - get "/pkgs/:deriv/:pkg/download" => move |r: &mut Request| download_package(&repo4, r), - get "/pkgs/:deriv/:pkg/:version/:release" => move |r: &mut Request| show_package(&repo5, r), - get "/pkgs/:deriv/:pkg/:version" => move |r: &mut Request| show_package(&repo6, r), - get "/pkgs/:deriv/:pkg" => move |r: &mut Request| show_package(&repo7, r), - - post "/keys/:key" => move |r: &mut Request| upload_key(&repo8, r), - get "/keys/:key" => move |r: &mut Request| download_key(&repo9, r) + // JW TODO: update list/show/download function to cover scoping rules of these routes repos + get "/pkgs/:deriv/:pkg/:version" => move |r: &mut Request| list_packages(&repo3, r), + get "/pkgs/:deriv/:pkg" => move |r: &mut Request| list_packages(&repo4, r), + get "/pkgs/:deriv/:pkg/latest" => move |r: &mut Request| show_package(&repo5, r), + get "/pkgs/:deriv/:pkg/:version/:release/download" => move |r: &mut Request| download_package(&repo6, r), + get "/pkgs/:deriv/:pkg/:version/download" => move |r: &mut Request| download_package(&repo7, r), + get "/pkgs/:deriv/:pkg/download" => move |r: &mut Request| download_package(&repo8, r), + + post "/pkgs/:deriv/:pkg/:version/:release" => move |r: &mut Request| upload_package(&repo9, r), + get "/pkgs/:deriv/:pkg/:version/latest" => move |r: &mut Request| show_package(&repo10, r), + get "/pkgs/:deriv/:pkg/:version/:release" => move |r: &mut Request| show_package(&repo11, r), + get "/pkgs/:deriv/:pkg/latest" => move |r: &mut Request| show_package(&repo12, r), + get "/pkgs/:deriv/:pkg/:version" => move |r: &mut Request| list_packages(&repo13, r), + get "/pkgs/:deriv/:pkg" => move |r: &mut Request| list_packages(&repo14, r), + get "/pkgs/:deriv" => move |r: &mut Request| list_packages(&repo15, r), + + post "/keys/:key" => move |r: &mut Request| upload_key(&repo16, r), + get "/keys/:key" => move |r: &mut Request| download_key(&repo17, r) ); Iron::new(router).http(config.repo_addr()).unwrap(); Ok(()) diff --git a/src/bldr/topology/standalone.rs b/src/bldr/topology/standalone.rs index b4425cf6dc..0b48196595 100644 --- a/src/bldr/topology/standalone.rs +++ b/src/bldr/topology/standalone.rs @@ -20,7 +20,7 @@ use std::io::prelude::*; use fs::SERVICE_HOME; use error::{BldrResult, BldrError, ErrorKind}; -use package::Package; +use package::{Package, PackageIdent}; use state_machine::StateMachine; use topology::{self, State, Worker}; use config::Config; @@ -63,7 +63,7 @@ pub fn state_initializing(worker: &mut Worker) -> BldrResult<(State, u64)> { pub fn state_starting(worker: &mut Worker) -> BldrResult<(State, u64)> { outputln!(P: &worker.package_name, "Starting"); let package = worker.package_name.clone(); - let runit_pkg = try!(Package::load("chef", "runit", None, None, None)); + let runit_pkg = try!(Package::load(&PackageIdent::new("chef", "runit", None, None), None)); let mut child = try!(Command::new(runit_pkg.join_path("bin/runsv")) .arg(&format!("{}/{}", SERVICE_HOME, worker.package_name)) .stdin(Stdio::null()) diff --git a/src/main.rs b/src/main.rs index 686d4dfd41..1593fd7ab0 100644 --- a/src/main.rs +++ b/src/main.rs @@ -24,6 +24,7 @@ use std::ptr; use bldr::config::{Command, Config}; use bldr::error::{BldrResult, BldrError, ErrorKind}; use bldr::command::*; +use bldr::package::PackageIdent; use bldr::topology::Topology; /// Our output key @@ -36,7 +37,6 @@ const VERSION: &'static str = env!("CARGO_PKG_VERSION"); /// CLI defaults static DEFAULT_GROUP: &'static str = "default"; static DEFAULT_PATH: &'static str = "/opt/bldr/srvc/bldr/data"; -const DEFAULT_TOPOLOGY: bldr::topology::Topology = Topology::Standalone; static DEFAULT_GOSSIP_LISTEN: &'static str = "0.0.0.0:9634"; /// Creates a [Config](config/struct.Config.html) from global args @@ -48,46 +48,31 @@ fn config_from_args(args: &ArgMatches, let mut config = Config::new(); let command = try!(Command::from_str(subcommand)); config.set_command(command); - if let Some(ref package) = sub_args.value_of("package") { - let (deriv, name, version, release) = try!(split_package_arg(package)); - config.set_deriv(deriv); - config.set_package(name); - if let Some(ver) = version { - config.set_version(ver); - } - if let Some(rel) = release { - config.set_release(rel); - } + let ident = try!(PackageIdent::from_str(package)); + config.set_package(ident); } - if let Some(key) = sub_args.value_of("key") { config.set_key(key.to_string()); } - if let Some(password) = sub_args.value_of("password") { config.set_password(password.to_string()); } - if let Some(email) = sub_args.value_of("email") { config.set_email(email.to_string()); } - if let Some(user) = sub_args.value_of("user") { config.set_user_key(user.to_string()); } - if let Some(service) = sub_args.value_of("service") { config.set_service_key(service.to_string()); } - if let Some(infile) = sub_args.value_of("infile") { config.set_infile(infile.to_string()); } if let Some(outfile) = sub_args.value_of("outfile") { config.set_outfile(outfile.to_string()); } - if let Some(topology) = sub_args.value_of("topology") { match topology.as_ref() { "standalone" => { @@ -101,57 +86,39 @@ fn config_from_args(args: &ArgMatches, } t => return Err(bldr_error!(ErrorKind::UnknownTopology(String::from(t)))), } - } else { - // none set, use the default - config.set_topology(DEFAULT_TOPOLOGY); } - if sub_args.value_of("port").is_some() { let p = value_t!(sub_args.value_of("port"), u16).unwrap_or_else(|e| e.exit()); config.set_port(p); } - if sub_args.value_of("expire-days").is_some() { let ed = value_t!(sub_args.value_of("expire-days"), u16).unwrap_or_else(|e| e.exit()); config.set_expire_days(ed); } - if let Some(url) = sub_args.value_of("url") { config.set_url(url.to_string()); } - config.set_group(sub_args.value_of("group").unwrap_or(DEFAULT_GROUP).to_string()); - let watches = match sub_args.values_of("watch") { Some(ws) => ws.map(|s| s.to_string()).collect(), None => vec![], }; config.set_watch(watches); - config.set_path(sub_args.value_of("path").unwrap_or(DEFAULT_PATH).to_string()); - config.set_gossip_listen(sub_args.value_of("gossip-listen") .unwrap_or(DEFAULT_GOSSIP_LISTEN) .to_string()); - let gossip_peers = match sub_args.values_of("gossip-peer") { Some(gp) => gp.map(|s| s.to_string()).collect(), None => vec![], }; config.set_gossip_peer(gossip_peers); - if sub_args.value_of("gossip-permanent").is_some() { config.set_gossip_permanent(true); } - - // ------------------------------------- - // begin global args - // ------------------------------------- if args.value_of("verbose").is_some() { bldr::output::set_verbose(true); } - - if args.value_of("no-color").is_some() { bldr::output::set_no_color(true); } @@ -159,7 +126,6 @@ fn config_from_args(args: &ArgMatches, Ok(config) } - type Handler = fn(&Config) -> Result<(), bldr::error::BldrError>; /// The primary loop for bldr. @@ -179,22 +145,18 @@ fn main() { .takes_value(true) .help("Use the specified package depot url") }; - let arg_group = || { Arg::with_name("group") .long("group") .takes_value(true) .help("The service group; shared config and topology [default: default].") }; - - let arg_infile = || { Arg::with_name("infile") .long("infile") .takes_value(true) .help("Input filename") }; - let arg_outfile = || { Arg::with_name("outfile") .long("outfile") @@ -202,9 +164,6 @@ fn main() { .help("Output filename") }; - - // subcommand definitions - let sub_install = SubCommand::with_name("install") .about("Install a bldr package from a depot") .arg(Arg::with_name("package") @@ -212,7 +171,6 @@ fn main() { .required(true) .help("Name of bldr package to install")) .arg(arg_url()); - let sub_start = SubCommand::with_name("start") .about("Start a bldr package") .arg(Arg::with_name("package") @@ -244,11 +202,8 @@ fn main() { .short("I") .long("gossip-permanent") .help("If this service is a permanent gossip peer")); - let sub_sh = SubCommand::with_name("sh").about("Start an interactive shell"); - let sub_bash = SubCommand::with_name("bash").about("Start an interactive shell"); - let sub_depot = SubCommand::with_name("depot") .about("Run a bldr package depot") .arg(Arg::with_name("path") @@ -260,7 +215,31 @@ fn main() { .long("port") .value_name("port") .help("Depot port. [default: 9632]")); - + let sub_repair = SubCommand::with_name("depot-repair") + .about("Repair a bldr package depot") + .arg(Arg::with_name("path") + .short("p") + .long("path") + .value_name("path") + .help("A path")); + let sub_repo_list = SubCommand::with_name("repo-list") + .about("List repositories in the bldr depot") + .arg(Arg::with_name("path") + .short("p") + .long("path") + .value_name("path") + .help("A path")); + let sub_repo_create = SubCommand::with_name("repo-create") + .about("Create a new repository in the bldr depot") + .arg(Arg::with_name("repo") + .index(1) + .required(true) + .help("Name of the repository to create")) + .arg(Arg::with_name("path") + .short("p") + .long("path") + .value_name("path") + .help("A path")); let sub_upload = SubCommand::with_name("upload") .about("Upload a package to a bldr depot") .arg(Arg::with_name("package") @@ -268,7 +247,6 @@ fn main() { .required(true) .help("Name of package to upload")) .arg(arg_url().required(true)); - let sub_generate_user_key = SubCommand::with_name("generate-user-key") .about("Generate a bldr user key") .arg(Arg::with_name("user") @@ -291,7 +269,6 @@ fn main() { .takes_value(true) .value_name("expire-days") .help("Number of days before a key expires")); - let sub_generate_service_key = SubCommand::with_name("generate-service-key") .about("Generate a bldr service key") .arg(Arg::with_name("service") @@ -304,7 +281,6 @@ fn main() { .takes_value(true) .value_name("expire-days") .help("Number of days before a key expires")); - let sub_encrypt = SubCommand::with_name("encrypt") .about("Encrypt and sign a message with a service as the recipient") .arg(Arg::with_name("user") @@ -325,12 +301,10 @@ fn main() { .takes_value(true) .help("User key password")) .arg(arg_group()); - let sub_decrypt = SubCommand::with_name("decrypt") .about("Decrypt and verify a message") .arg(arg_infile().required(true)) .arg(arg_outfile().required(true)); - let sub_import_key = SubCommand::with_name("import-key") .about("Import a public bldr key") .arg(arg_infile()) @@ -343,7 +317,6 @@ fn main() { .group(ArgGroup::with_name("input-method") .required(true) .args(&["infile", "key"])); - let sub_export_key = SubCommand::with_name("export-key") .about("Export a public bldr key") .arg(Arg::with_name("user") @@ -359,31 +332,25 @@ fn main() { .args(&["user", "service"])) .arg(arg_outfile().required(true)) .arg(arg_group()); - let sub_download_depot_key = SubCommand::with_name("download-depot-key") .about("Not implemented") .arg(Arg::with_name("key") .index(1) .required(true) .help("Name of key")); - let sub_upload_depot_key = SubCommand::with_name("upload-depot-key") .about("Not implemented") .arg(Arg::with_name("key") .index(1) .required(true) .help("Name of key")); - let sub_list_keys = SubCommand::with_name("list-keys").about("List user and service keys"); - let sub_config = SubCommand::with_name("config") .about("Print the default.toml for a given package") .arg(Arg::with_name("package") .index(1) .required(true) .help("Name of package")); - - let args = App::new("bldr") .version(VERSION) .setting(AppSettings::VersionlessSubcommands) @@ -402,6 +369,9 @@ fn main() { .subcommand(sub_sh) .subcommand(sub_bash) .subcommand(sub_depot) + .subcommand(sub_repair) + .subcommand(sub_repo_list) + .subcommand(sub_repo_create) .subcommand(sub_upload) .subcommand(sub_generate_user_key) .subcommand(sub_generate_service_key) @@ -413,45 +383,37 @@ fn main() { .subcommand(sub_upload_depot_key) .subcommand(sub_list_keys) .subcommand(sub_config); - let matches = args.get_matches(); debug!("clap matches {:?}", matches); - // we use AppSettings::SubcommandRequiredElseHelp above, so it's safe to unwrap let subcommand_name = matches.subcommand_name().unwrap(); + let subcommand_matches = matches.subcommand_matches(subcommand_name).unwrap(); - let handler = match subcommand_name { - "bash" => Some(shell as Handler), - "config" => Some(configure as Handler), - "decrypt" => Some(decrypt as Handler), - "depot" => Some(depot as Handler), - "download-depot-key" => Some(download_depot_key as Handler), - "encrypt" => Some(encrypt as Handler), - "export-key" => Some(export_key as Handler), - "generate-service-key" => Some(generate_service_key as Handler), - "generate-user-key" => Some(generate_user_key as Handler), - "import-key" => Some(import_key as Handler), - "install" => Some(install as Handler), - "list-keys" => Some(list_keys as Handler), - "sh" => Some(shell as Handler), - "start" => Some(start as Handler), - "upload-depot-key" => Some(upload_depot_key as Handler), - "upload" => Some(upload as Handler), - _ => None, + let config = match config_from_args(&matches, subcommand_name, &subcommand_matches) { + Ok(config) => config, + Err(e) => return exit_with(e, 1), }; - // we use AppSettings::SubcommandRequiredElseHelp above, so it's safe to unwrap - let subcommand_matches = matches.subcommand_matches(subcommand_name).unwrap(); - - let result = match handler { - Some(h) => { - match config_from_args(&matches, subcommand_name, &subcommand_matches) { - Ok(config) => h(&config), - Err(e) => Err(e), - } - } - None => Err(bldr_error!(ErrorKind::CommandNotImplemented)), + let result = match config.command() { + Command::Shell => shell(&config), + Command::Config => configure(&config), + Command::Decrypt => decrypt(&config), + Command::Repo => depot(&config), + Command::RepoRepair => repair(&config), + Command::RepoList => repo_list(&config), + Command::RepoCreate => repo_create(subcommand_matches.value_of("repo").unwrap(), &config), + Command::DownloadRepoKey => download_depot_key(&config), + Command::Encrypt => encrypt(&config), + Command::ExportKey => export_key(&config), + Command::GenerateServiceKey => generate_service_key(&config), + Command::GenerateUserKey => generate_user_key(&config), + Command::ImportKey => import_key(&config), + Command::Install => install(&config), + Command::ListKeys => list_keys(&config), + Command::Start => start(&config), + Command::UploadRepoKey => upload_depot_key(&config), + Command::Upload => upload(&config), }; match result { @@ -460,7 +422,6 @@ fn main() { } } - /// Exit with an error message and the right status code #[allow(dead_code)] fn exit_with(e: BldrError, code: i32) { @@ -468,31 +429,6 @@ fn exit_with(e: BldrError, code: i32) { process::exit(code) } -fn split_package_arg(arg: &str) -> BldrResult<(String, String, Option, Option)> { - let items: Vec<&str> = arg.split("/").collect(); - match items.len() { - 2 => Ok((items[0].to_string(), items[1].to_string(), None, None)), - 3 => { - Ok((items[0].to_string(), - items[1].to_string(), - Some(items[2].to_string()), - None)) - } - 4 => { - Ok((items[0].to_string(), - items[1].to_string(), - Some(items[2].to_string()), - Some(items[3].to_string()))) - } - _ => Err(bldr_error!(ErrorKind::InvalidPackageIdent(arg.to_string()))), - } -} - - -// ------------------- -// Handlers -// ------------------- - /// Start a shell #[allow(dead_code)] fn shell(_config: &Config) -> BldrResult<()> { @@ -517,21 +453,20 @@ fn configure(config: &Config) -> BldrResult<()> { /// Install a package #[allow(dead_code)] fn install(config: &Config) -> BldrResult<()> { - outputln!("Installing {}", Yellow.bold().paint(config.package_id())); - try!(install::from_url(&config.url().as_ref().unwrap(), - config.deriv(), - config.package(), - config.version().clone(), - config.release().clone())); + outputln!("Installing {}", + Yellow.bold().paint(config.package().to_string())); + try!(install::from_url(&config.url().as_ref().unwrap(), config.package())); Ok(()) } /// Start a service #[allow(dead_code)] fn start(config: &Config) -> BldrResult<()> { - outputln!("Starting {}", Yellow.bold().paint(config.package_id())); + outputln!("Starting {}", + Yellow.bold().paint(config.package().to_string())); try!(start::package(config)); - outputln!("Finished with {}", Yellow.bold().paint(config.package_id())); + outputln!("Finished with {}", + Yellow.bold().paint(config.package().to_string())); Ok(()) } @@ -541,7 +476,25 @@ fn depot(config: &Config) -> BldrResult<()> { outputln!("Starting Bldr Depot at {}", Yellow.bold().paint(config.path())); try!(repo::start(&config)); - outputln!("Finished with {}", Yellow.bold().paint(config.package_id())); + outputln!("Finished with {}", + Yellow.bold().paint(config.package().to_string())); + Ok(()) +} + +fn repair(config: &Config) -> BldrResult<()> { + outputln!("Verifying data integrity of Depot at {}", + Yellow.bold().paint(config.path())); + try!(repo::repair(config)); + Ok(()) +} + +fn repo_create(name: &str, config: &Config) -> BldrResult<()> { + try!(repo::create_repository(name, config)); + Ok(()) +} + +fn repo_list(config: &Config) -> BldrResult<()> { + try!(repo::list_repositories(config)); Ok(()) } @@ -549,9 +502,10 @@ fn depot(config: &Config) -> BldrResult<()> { #[allow(dead_code)] fn upload(config: &Config) -> BldrResult<()> { outputln!("Upload Bldr Package {}", - Yellow.bold().paint(config.package())); + Yellow.bold().paint(config.package().to_string())); try!(upload::package(&config)); - outputln!("Finished with {}", Yellow.bold().paint(config.package_id())); + outputln!("Finished with {}", + Yellow.bold().paint(config.package().to_string())); Ok(()) }