Skip to content

Commit

Permalink
command line arguments for different stats work now
Browse files Browse the repository at this point in the history
  • Loading branch information
aaronshim committed Jun 2, 2017
1 parent ecaa306 commit 9b18c19
Show file tree
Hide file tree
Showing 5 changed files with 151 additions and 59 deletions.
2 changes: 1 addition & 1 deletion .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"type": "gdb",
"request": "launch",
"target": "./target/debug/zi",
"arguments": "-p test.zip",
"arguments": "test.zip",
"cwd": "${workspaceRoot}"
}
]
Expand Down
29 changes: 19 additions & 10 deletions src/flat_writer.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
use ::glob;
use ::zip;
use std::fs;

use zip_info::WriteZipInfo;
use zip_info::StatArgs;

/// Info Writer for multiple archive files:
pub struct MultiArchiveFlatWriter<'a> {
Expand All @@ -17,12 +19,12 @@ impl<'a> MultiArchiveFlatWriter<'a> {
impl<'a> WriteZipInfo for MultiArchiveFlatWriter<'a> {
/// Given path names for multiple archives, concatenate their
/// names, contents, and stats:
fn write_zip_info(&mut self, exclude: &str) -> String {
fn write_zip_info(&mut self, exclude: &str, stat_args: &StatArgs) -> String {
let mut output = Vec::new();

for path_name in self.path_names {
let archive_info = ZipInfoFlatWriter::new(path_name.as_str())
.write_zip_info(exclude);
.write_zip_info(exclude, stat_args);

output.push(archive_info);
}
Expand All @@ -49,7 +51,7 @@ impl<'a> ZipInfoFlatWriter<'a> {

impl<'a> WriteZipInfo for ZipInfoFlatWriter<'a> {
/// Concatenate Zip file name with indented stats for an archive:
fn write_zip_info(&mut self, exclude: &str) -> String {
fn write_zip_info(&mut self, exclude: &str, stat_args: &StatArgs) -> String {
let mut info = format!("{}", self.path_name);

let exclude_pattern = glob::Pattern::new(exclude).unwrap();
Expand All @@ -58,29 +60,36 @@ impl<'a> WriteZipInfo for ZipInfoFlatWriter<'a> {
let archive_item = self.archive.by_index(i).unwrap();

if !exclude_pattern.matches(archive_item.name()) {
info = format!("{}{}", info, info_for_archive_item(archive_item));
info = format!("{}{}", info, info_for_archive_item(archive_item, stat_args));
}
}

info
}
}

fn info_for_archive_item(archive_item: zip::read::ZipFile) -> String {
fn info_for_archive_item(archive_item: zip::read::ZipFile, stat_args: &StatArgs) -> String {
let mut info = String::new();
let item_path = archive_item.name();
info = format!("{}\n\t{}", info, item_path);

let compression_type = archive_item.compression();
info = format!("{}\n\t\tCompression type: {}", info, compression_type);
if stat_args.flag_compression_type {
let compression_type = archive_item.compression();
info = format!("{}\n\t\tCompression type: {}", info, compression_type);
}

// Can't think of a way to avoid evaluating original_size or compressed_size without dependent typing...
let original_size = archive_item.size();
info = format!("{}\n\t\tOriginal size: {}", info, original_size);
if stat_args.flag_original_size {
info = format!("{}\n\t\tOriginal size: {}", info, original_size);
}

let compressed_size = archive_item.compressed_size();
info = format!("{}\n\t\tCompressed size: {}", info, compressed_size);
if stat_args.flag_compressed_size {
info = format!("{}\n\t\tCompressed size: {}", info, compressed_size);
}

if original_size > 0 {
if stat_args.flag_compression_rate && original_size > 0 {
let compression_rate =
(original_size as f64 - compressed_size as f64)
/ original_size as f64;
Expand Down
100 changes: 64 additions & 36 deletions src/json_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,20 @@ use ::zip;
use std::collections::HashMap;
use std::fs;

pub fn zips_to_json(file_paths: &[&str], exclude: &str) -> String {
let multi_archive = MultiArchiveJsonWriter::from(file_paths, exclude);
serde_json::to_string(&multi_archive).unwrap()
use zip_info::StatArgs;

pub fn zips_to_json(file_paths: &[&str], exclude: &str, stat_args: &StatArgs) -> String {
zips_to_json_with_printer(file_paths, exclude, stat_args, serde_json::to_string)
}

pub fn zips_to_json_pretty(file_paths: &[&str], exclude: &str, stat_args: &StatArgs) -> String {
zips_to_json_with_printer(file_paths, exclude, stat_args, serde_json::to_string_pretty)
}

pub fn zips_to_json_pretty(file_paths: &[&str], exclude: &str) -> String {
let multi_archive = MultiArchiveJsonWriter::from(file_paths,exclude);
serde_json::to_string_pretty(&multi_archive).unwrap()
fn zips_to_json_with_printer<F>(file_paths: &[&str], exclude: &str, stat_args: &StatArgs, printer: F) -> String
where F: Fn(&MultiArchiveJsonWriter) -> serde_json::Result<String> {
let multi_archive = MultiArchiveJsonWriter::from(file_paths, exclude, stat_args);
printer(&multi_archive).unwrap()
}

#[derive(Serialize, Deserialize, Debug, PartialEq)]
Expand All @@ -28,13 +34,13 @@ impl MultiArchiveJsonWriter {
/// Create and fill MultiArchiveJsonWriter representing
/// zero to many .zip files:
pub fn from(
file_paths: &[&str], exclude: &str) -> MultiArchiveJsonWriter {
file_paths: &[&str], exclude: &str, stat_args: &StatArgs) -> MultiArchiveJsonWriter {
let mut multi_archive = MultiArchiveJsonWriter::new();

for path in file_paths {
multi_archive.archives.insert(
String::from(*path),
ZipArchiveJsonWriter::from(*path, exclude),
ZipArchiveJsonWriter::from(*path, exclude, stat_args),
);
}

Expand All @@ -55,7 +61,7 @@ impl ZipArchiveJsonWriter {

/// Create and fill ZipArchiveJsonWriter representing a
/// .zip file:
pub fn from(file_path: &str, exclude: &str) -> ZipArchiveJsonWriter {
pub fn from(file_path: &str, exclude: &str, stat_args: &StatArgs) -> ZipArchiveJsonWriter {
let mut archive_writer = ZipArchiveJsonWriter::new();
let exclude_pattern = glob::Pattern::new(exclude).unwrap();

Expand All @@ -71,10 +77,15 @@ impl ZipArchiveJsonWriter {
if !exclude_pattern.matches(zip_object.name()) {
archive_writer.objects.insert(
String::from(zip_object.name()),
// I wanted to prevent the evaluation of as many of these parameters at this level as possible
// but it seems that because compression rate depends on both .size() and .compressed_size(), we
// need both to be passed in to calculate compression_rate if needed. (No way to encode this
// dependency here without dependent types.)
ZipObjectJsonWriter::new(
zip_object.compression(),
if stat_args.flag_compression_type { Some(zip_object.compression()) } else { None },
zip_object.size(),
zip_object.compressed_size(),
stat_args,
),
);
}
Expand All @@ -84,33 +95,47 @@ impl ZipArchiveJsonWriter {
}
}

// We are going to use Option types here to denote that not
// all of these are going to be calculated, based on certain
// command-line flags given to this program.
//
// TODO: Figure out how to get the JSON serializer to dump fields
// that are set to null, if this is something that we want in the
// feature.
#[derive(Serialize, Deserialize, Debug, PartialEq)]
struct ZipObjectJsonWriter {
compression_type: String,
original_size: u64,
compressed_size: u64,
compression_rate: String,
compression_type: Option<String>,
original_size: Option<u64>,
compressed_size: Option<u64>,
compression_rate: Option<String>,
}

impl ZipObjectJsonWriter {
pub fn new(
compression_type: zip::CompressionMethod,
compression_type: Option<zip::CompressionMethod>,
original_size: u64,
compressed_size: u64,
stat_args: &StatArgs,
) -> ZipObjectJsonWriter {
let compression_rate = match original_size {
0 => 0.0 as f64,
_ => {
(original_size as f64 - compressed_size as f64)
/ original_size as f64
},
};
let compression_rate =
if stat_args.flag_compression_rate {
Some(match original_size {
0 => 0.0 as f64,
_ => {
(original_size as f64 - compressed_size as f64)
/ original_size as f64
},
})
}
else {
None
};

ZipObjectJsonWriter {
compression_type: format!("{}", compression_type),
original_size: original_size,
compressed_size: compressed_size,
compression_rate: format!("{:.*}%", 2, compression_rate * 100.0),
compression_type: compression_type.and_then(|v| Some(format!("{}", v))),
original_size: if stat_args.flag_original_size { Some(original_size) } else { None },
compressed_size: if stat_args.flag_compressed_size { Some(compressed_size) } else { None },
compression_rate: compression_rate.and_then(|v| Some(format!("{:.*}%", 2, v * 100.0))),
}
}
}
Expand All @@ -121,10 +146,10 @@ mod tests {

fn get_zip_object() -> ZipObjectJsonWriter {
ZipObjectJsonWriter {
compression_type: format!("{}", zip::CompressionMethod::Deflated),
original_size: 100,
compressed_size: 50,
compression_rate: String::from("50%"),
compression_type: Some(format!("{}", zip::CompressionMethod::Deflated)),
original_size: Some(100),
compressed_size: Some(50),
compression_rate: Some(String::from("50%")),
}
}

Expand All @@ -137,28 +162,31 @@ mod tests {
#[test]
fn test_new_zip_object_calculates_percentages() {
let zip_object = ZipObjectJsonWriter::new(
zip::CompressionMethod::Deflated,
Some(zip::CompressionMethod::Deflated),
100,
50,
&StatArgs::default(),
);

assert_eq!("50.00%", zip_object.compression_rate);
assert_eq!("50.00%", zip_object.compression_rate.unwrap_or_default());

let zip_object_empty = ZipObjectJsonWriter::new(
zip::CompressionMethod::Stored,
Some(zip::CompressionMethod::Stored),
0,
0,
&StatArgs::default(),
);

assert_eq!("0.00%", zip_object_empty.compression_rate);
assert_eq!("0.00%", zip_object_empty.compression_rate.unwrap_or_default());

let zip_object_grew = ZipObjectJsonWriter::new(
zip::CompressionMethod::Deflated,
Some(zip::CompressionMethod::Deflated),
100,
150,
&StatArgs::default(),
);

assert_eq!("-50.00%", zip_object_grew.compression_rate);
assert_eq!("-50.00%", zip_object_grew.compression_rate.unwrap_or_default());
}

#[test]
Expand Down
23 changes: 12 additions & 11 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,12 @@ mod flat_writer;
mod json_writer;

use zip_info::WriteZipInfo;
use zip_info::Args;
use zip_info::StatArgs;

/// The Docopt usage string
const USAGE: &'static str = "
Usage: zi [-j | -p] [--exclude=<glob>] <path> ...
Usage: zi [-j | -p] [--exclude=<glob>] [options] <path> ...
zi --help
zi presents information about Zip archives.
Expand All @@ -29,21 +31,20 @@ Common options:
-p, --pretty-json Structure the output in easy-to-read JSON
--exclude=<glob> Ignore objects in the archives whose name
is like this glob pattern.
More options:
--compression-type Show the compression type of each file.
--original-size Show the original size of each file.
--compressed-size Show the compressed size of each file.
--compression-rate Show the compression rate of each file.
";

#[derive(Debug, RustcDecodable)]
struct Args {
arg_path: Vec<String>,
flag_json: bool,
flag_pretty_json: bool,
flag_exclude: String,
}

fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());

let stat_args = StatArgs::new(&args);

// XXX We divide up the display functionality here since
// the original flat writer works differently than the JSON
// writer. Ultimately, the JSON writer should be a generic
Expand All @@ -54,7 +55,7 @@ fn main() {
args.arg_path.as_slice()
);

println!("{}", multiwriter.write_zip_info(args.flag_exclude.as_str()));
println!("{}", multiwriter.write_zip_info(args.flag_exclude.as_str(), &stat_args));
} else {
// Convert String to &str for json printing since
// Docopt appears not to be able to handle Vec<&str>:
Expand All @@ -69,6 +70,6 @@ fn main() {
_ => json_writer::zips_to_json_pretty,
};

println!("{}", s(path_names.as_slice(), args.flag_exclude.as_str()));
println!("{}", s(path_names.as_slice(), args.flag_exclude.as_str(), &stat_args));
}
}
56 changes: 55 additions & 1 deletion src/zip_info.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,59 @@
pub trait WriteZipInfo {
fn write_zip_info(&mut self, exclude: &str) -> String;
fn write_zip_info(&mut self, exclude: &str, stat_args: &StatArgs) -> String;
}

// Struct for our command-line arguments
#[derive(Debug, RustcDecodable)]
pub struct Args {
pub arg_path: Vec<String>,
pub flag_json: bool,
pub flag_pretty_json: bool,
pub flag_exclude: String,
pub flag_compression_type: bool,
pub flag_original_size: bool,
pub flag_compressed_size: bool,
pub flag_compression_rate: bool,
}

// We will need to pass this down in the main thread, to the individual printers,
// and there is no point in passing down the whole Args struct, in the spirit of encapsulation.
#[derive(Debug)]
pub struct StatArgs {
pub flag_compression_type: bool,
pub flag_original_size: bool,
pub flag_compressed_size: bool,
pub flag_compression_rate: bool,
}

impl StatArgs {
pub fn new(args: &Args) -> StatArgs {
let mut stat_args = StatArgs {
flag_compression_type: args.flag_compression_type,
flag_original_size: args.flag_original_size,
flag_compressed_size: args.flag_compressed_size,
flag_compression_rate: args.flag_compression_rate,
};

// If none of these are explicitly specified, we print all of them.
if stat_args.is_all_false() {
stat_args = StatArgs::default();
}

stat_args
}

pub fn default() -> StatArgs {
StatArgs {
flag_compression_type: true,
flag_original_size: true,
flag_compressed_size: true,
flag_compression_rate: true,
}
}

fn is_all_false(&self) -> bool {
!(self.flag_compression_type || self.flag_original_size || self.flag_compressed_size || self.flag_compression_rate)
}
}

#[cfg(test)]
Expand Down

0 comments on commit 9b18c19

Please sign in to comment.