Skip to content

Commit

Permalink
Improved error messages
Browse files Browse the repository at this point in the history
Partially fixes andrewchambers#367
  • Loading branch information
piegamesde committed Apr 17, 2023
1 parent f41b349 commit 02982b2
Show file tree
Hide file tree
Showing 5 changed files with 101 additions and 17 deletions.
67 changes: 65 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ itertools = "0.10"
rusqlite = { version = "0.25", features = ["bundled"] }
lz4 = "1.2"
zstd-safe = { version = "6.0", features = ["std", "experimental"] }
anyhow = "1"
anyhow = { version = "1.0.69", features = [ "backtrace" ] }
thiserror = "1.0"
libc = "0.2"
getopts = "0.2"
Expand Down
15 changes: 10 additions & 5 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ pub mod xglobset;
pub mod xid;
pub mod xtar;

use anyhow::Context;
use plmap::PipelineMap;
use std::collections::{BTreeMap, HashMap};
use std::fmt::Write as FmtWrite;
Expand Down Expand Up @@ -1172,7 +1173,8 @@ fn put_main(args: Vec<String>) -> Result<(), anyhow::Error> {
action,
ty,
path.as_os_str().to_string_lossy()
)?;
)
.context("Failed to write to stderr")?;
Ok(())
}))
} else {
Expand Down Expand Up @@ -1302,7 +1304,8 @@ fn put_main(args: Vec<String>) -> Result<(), anyhow::Error> {
&progress,
ServeProcessCliOpts::default(),
protocol::OpenMode::ReadWrite,
)?;
)
.context("Could not start serve process")?;
let mut serve_out = serve_proc.proc.stdout.as_mut().unwrap();
let mut serve_in = serve_proc.proc.stdin.as_mut().unwrap();

Expand All @@ -1328,7 +1331,8 @@ fn put_main(args: Vec<String>) -> Result<(), anyhow::Error> {
threads,
};

let (id, stats) = client::put(ctx, &mut serve_out, &mut serve_in, tags, data_source)?;
let (id, stats) = client::put(ctx, &mut serve_out, &mut serve_in, tags, data_source)
.context("Failed to backup the data")?;
client::hangup(&mut serve_in)?;
serve_proc.wait()?;

Expand Down Expand Up @@ -3147,9 +3151,10 @@ fn main() {
// Support unix style pipelines, don't print an error on EPIPE.
match err.root_cause().downcast_ref::<std::io::Error>() {
Some(io_error) if io_error.kind() == std::io::ErrorKind::BrokenPipe => {
std::process::exit(1)
// Use distinct exit code here for diagnostic
std::process::exit(2)
}
_ => die(format!("bupstash {}: {}", subcommand, err)),
_ => die(format!("bupstash {}: {:?}", subcommand, err)),
}
}
}
32 changes: 24 additions & 8 deletions src/put.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ use super::oplog;
use super::protocol;
use super::rollsum;
use super::sendlog;
use anyhow::Context;
use plmap::{PipelineMap, ScopedPipelineMap};
use std::borrow::Cow;
use std::collections::BTreeMap;
Expand Down Expand Up @@ -590,7 +591,8 @@ impl<'a, 'b> plmap::Mapper<Result<Vec<(PathBuf, index::IndexEntry)>, anyhow::Err
&mut self,
file_batch: Result<Vec<(PathBuf, index::IndexEntry)>, anyhow::Error>,
) -> Self::Out {
self.process_batch(file_batch?)
self.process_batch(file_batch.context("Received `Err` batch for processing")?)
.context("Failed to process a batch")
}
}

Expand Down Expand Up @@ -672,7 +674,8 @@ impl<'a, 'b> BatchFileProcessor<'a, 'b> {
.unwrap()
.lock()
.unwrap()
.stat_cache_lookup_and_update(&stat_cache_key.unwrap())?
.stat_cache_lookup_and_update(&stat_cache_key.unwrap())
.context("Failed to update stat cache")?
} else {
None
};
Expand All @@ -682,7 +685,10 @@ impl<'a, 'b> BatchFileProcessor<'a, 'b> {
let mut uncompressed_data_size = 0;
for (i, (ref path, ref mut ent)) in file_batch.iter_mut().enumerate() {
uncompressed_data_size += ent.size.0;
self.log_file_action('~', ent.type_display_char(), path)?;
self.log_file_action('~', ent.type_display_char(), path)
.with_context(|| {
format!("Failed to log file action for {}", path.display())
})?;
ent.data_hash = cache_entry.hashes[i];
ent.data_cursor = cache_entry.data_cursors[i];
}
Expand All @@ -707,16 +713,23 @@ impl<'a, 'b> BatchFileProcessor<'a, 'b> {
let file_batch_len = file_batch.len();

for (i, (ref path, ref mut ent)) in file_batch.iter_mut().enumerate() {
self.log_file_action('+', ent.type_display_char(), path)?;
self.log_file_action('+', ent.type_display_char(), path)
.context("Failed to log file action")?;

let ent_data_chunk_start_idx = data_addresses.len() as u64;
let ent_start_byte_offset = self.data_chunker.buffered_count() as u64;

if ent.is_file() {
match file_opener.next_file().unwrap() {
(_, Ok(f)) => {
(ent_path, Ok(f)) => {
let stat_size = ent.size.0;
self.chunk_and_hash_file_data(f, &mut data_addresses, ent)?;
self.chunk_and_hash_file_data(f, &mut data_addresses, ent)
.with_context(|| {
format!(
"Failed to process file data of {}",
ent_path.display()
)
})?;
if stat_size != ent.size.0 {
// The files size changed, don't cache
// this result in the stat cache.
Expand Down Expand Up @@ -750,7 +763,9 @@ impl<'a, 'b> BatchFileProcessor<'a, 'b> {
crypto::keyed_content_address(&chunk, &self.ctx.data_hash_key);
let chunk = compression::compress(self.ctx.compression, chunk);
let chunk = self.ctx.data_ectx.encrypt_data(chunk);
self.sender.write_chunk(&address, chunk)?;
self.sender
.write_chunk(&address, chunk)
.context("Failed to write chunk")?;
data_addresses.push(address);
self.ctx.progress.inc(chunk_len);
}
Expand Down Expand Up @@ -793,7 +808,8 @@ impl<'a, 'b> BatchFileProcessor<'a, 'b> {
data_cursors: Cow::Borrowed(&data_cursors),
hashes: Cow::Borrowed(&content_hashes),
},
)?;
)
.context("Failed to add stat cache data")?;
}

data_addresses
Expand Down
2 changes: 1 addition & 1 deletion src/repository.rs
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ impl Repo {
schema_version = txn.read_string("meta/schema_version")?;
if schema_version != CURRENT_SCHEMA_VERSION {
anyhow::bail!(
"the current version of bupstash expects repository schema version {}, got {}",
"the current version of bupstash expects repository schema version {}, got {} (if the latter number is greater than the former, this means that you need to update bupstash)",
CURRENT_SCHEMA_VERSION,
schema_version
);
Expand Down

0 comments on commit 02982b2

Please sign in to comment.