Hi! This patch series adds logging via the log and env_logger crates to coffeepaste. Feel free to suggest changes! Best, Matthias Matthias Beyer (3): Add log and env_logger Add initialization of env_logger Change output to use log facade Cargo.toml | 2 ++ src/main.rs | 27 +++++++++++++++------------ src/purge.rs | 10 +++++----- 3 files changed, 22 insertions(+), 17 deletions(-) -- 2.25.4
These changes look reasonable. I'm kinda new to sr.ht, so having only ever accepted pull requests/merge requests from github/gitlab, I'm not 100% sure how to do this. I'm trying to run the command sr.ht suggests: curl -s https://lists.sr.ht/~mort/public-inbox/patches/10755/mbox | git am -3 However, I just get this error message from git: Applying: fixup! Add initialization of env_logger error: sha1 information is lacking or useless (src/main.rs). error: could not build fake ancestor Patch failed at 0002 fixup! Add initialization of env_logger hint: Use 'git am --show-current-patch' to see the failed patch When you have resolved this problem, run "git am --continue". If you prefer to skip this patch, run "git am --skip" instead. To restore the original branch and stop patching, run "git am --abort". It also seems like the patches are out of order, which I'd guess is why I'm having trouble; your fixup patches and your "Add log and env_logger" patches are intermixed in the "patches" view at https://lists.sr.ht/~mort/public-inbox/patches/10755. It seems like sourcehut sorts them alphabetically instead of chronologically for some reason. Do you know if there's something I can do to merge successfully (in a way which preserves attribution), or would you have to send a new set of changes?
Here's simply a re-send with the fixup commits squashed away. I saw that you applied the first patch of my series, so this resend does not contain it anymore. You should be able to apply these from your email client (for mutt, use "| git am" to simply pipe one patch after another to "git am" within your working copy of the repository). Best, Matthias Matthias Beyer (2): Add initialization of env_logger Change output to use log facade src/main.rs | 29 +++++++++++++++++------------ src/purge.rs | 10 +++++----- 2 files changed, 22 insertions(+), 17 deletions(-)
Martin
Hi,
Copy & paste the following snippet into your terminal to import this patchset into git:
curl -s https://lists.sr.ht/~mort/public-inbox/patches/10755/mbox | git am -3Learn more about email & git
Signed-off-by: Matthias Beyer <mail@beyermatthias.de> --- Cargo.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Cargo.toml b/Cargo.toml index 150959a..c2d32b3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,6 +13,8 @@ rand = "0.7" futures = "0.3" libc = "0.2" chrono = "0.4" +log = "0.4" +env_logger = "0.7" [build-dependencies.config_struct] version = "0.4" -- 2.25.4
Signed-off-by: Matthias Beyer <mail@beyermatthias.de> --- src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main.rs b/src/main.rs index 685bf80..8fd7c1d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -307,7 +307,7 @@ async fn on_request(req: Request<Body>) -> Result<Response<Body>, Infallible> { #[tokio::main] async fn main() { - let _ = env_logger::from_env(Env::default().default_filter_or("info")).init(); + let _ = env_logger::from_env(env_logger::Env::default().default_filter_or("info")).init(); debug!("Logger initialized"); let addr: SocketAddr = CONFIG.listen.parse() -- 2.25.4
Signed-off-by: Matthias Beyer <mail@beyermatthias.de> --- src/main.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main.rs b/src/main.rs index 0067e25..7f81f2a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -317,6 +317,9 @@ async fn on_request(req: Request<Body>) -> Result<Response<Body>, Infallible> { #[tokio::main] async fn main() { + let _ = env_logger::from_env(env_logger::Env::default().default_filter_or("info")).init(); + debug!("Logger initialized"); + let addr: SocketAddr = CONFIG.listen.parse() .expect("Unable to parse socket address"); -- 2.25.4
Signed-off-by: Matthias Beyer <mail@beyermatthias.de> --- src/main.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/main.rs b/src/main.rs index 2b42136..34f7811 100644 --- a/src/main.rs +++ b/src/main.rs @@ -307,6 +307,9 @@ async fn on_request(req: Request<Body>) -> Result<Response<Body>, Infallible> { #[tokio::main] async fn main() { + let _ = env_logger::from_env(Env::default().default_filter_or("info")).init(); + debug!("Logger initialized"); + let addr: SocketAddr = CONFIG.listen.parse() .expect("Unable to parse socket address"); -- 2.25.4
Signed-off-by: Matthias Beyer <mail@beyermatthias.de> --- src/main.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main.rs b/src/main.rs index 8fd7c1d..609d71f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,5 @@ +#[macro_use] extern crate log; + mod config; mod purge; -- 2.25.4
These changes look reasonable. I'm kinda new to sr.ht, so having only ever accepted pull requests/merge requests from github/gitlab, I'm not 100% sure how to do this. I'm trying to run the command sr.ht suggests: curl -s https://lists.sr.ht/~mort/public-inbox/patches/10755/mbox | git am -3 However, I just get this error message from git: Applying: fixup! Add initialization of env_logger error: sha1 information is lacking or useless (src/main.rs). error: could not build fake ancestor Patch failed at 0002 fixup! Add initialization of env_logger hint: Use 'git am --show-current-patch' to see the failed patch When you have resolved this problem, run "git am --continue". If you prefer to skip this patch, run "git am --skip" instead. To restore the original branch and stop patching, run "git am --abort". It also seems like the patches are out of order, which I'd guess is why I'm having trouble; your fixup patches and your "Add log and env_logger" patches are intermixed in the "patches" view at https://lists.sr.ht/~mort/public-inbox/patches/10755. It seems like sourcehut sorts them alphabetically instead of chronologically for some reason. Do you know if there's something I can do to merge successfully (in a way which preserves attribution), or would you have to send a new set of changes? Martin
Signed-off-by: Matthias Beyer <mail@beyermatthias.de> --- src/main.rs | 26 ++++++++++++++------------ src/purge.rs | 10 +++++----- 2 files changed, 19 insertions(+), 17 deletions(-) diff --git a/src/main.rs b/src/main.rs index 7f81f2a..a2be060 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,5 @@ +#[macro_use] extern crate log; + mod config; mod purge; mod exif; @@ -73,7 +75,7 @@ fn create_random_file(dir: &str) -> io::Result<(File, String)> { fn delete_file(dir: &str, name: &str) { let pathbuf = Path::new(dir).join(name); if let Err(err) = fs::remove_file(pathbuf) { - println!("Failed to remove {}/{}: {}", dir, name, err); + error!("Failed to remove {}/{}: {}", dir, name, err); } } @@ -193,7 +195,7 @@ async fn on_get(req: Request<Body>) -> Result<Response<Body>, Box<dyn Error>> { let file = tokio::fs::File::open(&pathbuf).await; match file { Err(err) => { - println!("Couldn't open {}: {}", pathbuf.display(), err); + error!("Couldn't open {}: {}", pathbuf.display(), err); return Ok(respond_404()?); }, Ok(file) => { @@ -241,7 +243,7 @@ async fn on_put(req: Request<Body>) -> Result<Response<Body>, Infallible> { let fileres = create_random_file(&CONFIG.data); if let Err(err) = fileres { - println!("Failed to create random file: {}", err); + error!("Failed to create random file: {}", err); return Ok(Response::builder() .status(500) .body(Body::from(ERR_500_HTML)) @@ -249,7 +251,7 @@ async fn on_put(req: Request<Body>) -> Result<Response<Body>, Infallible> { } let (mut file, name) = fileres.unwrap(); - println!("Created file '{}/{}'", CONFIG.data, name); + info!("Created file '{}/{}'", CONFIG.data, name); let mut url = CONFIG.url.to_string() + "/" + &name; if let Some(e) = ext { @@ -267,21 +269,21 @@ async fn on_put(req: Request<Body>) -> Result<Response<Body>, Infallible> { let chunk = chunkopt.unwrap(); if let Err(err) = chunk { - println!("Upload file transfer error: {}", err); + error!("Upload file transfer error: {}", err); delete_file(&CONFIG.data, &name); return respond_500(); } - + let ch = chunk.unwrap(); if bodysize + ch.len() > CONFIG.max_file_size as usize { - println!("Uploaded file exceeds max size ({} bytes)", CONFIG.max_file_size); + error!("Uploaded file exceeds max size ({} bytes)", CONFIG.max_file_size); delete_file(&CONFIG.data, &name); return respond_413(); } bodysize += ch.len(); if let Err(err) = file.write(&ch) { - println!("File write error: {}", err); + error!("File write error: {}", err); delete_file(&CONFIG.data, &name); return respond_500(); } @@ -299,7 +301,7 @@ async fn on_put(req: Request<Body>) -> Result<Response<Body>, Infallible> { } async fn on_request(req: Request<Body>) -> Result<Response<Body>, Infallible> { - println!("{} {}", req.method(), req.uri()); + debug!("{} {}", req.method(), req.uri()); let res = match req.method() { &Method::GET | &Method::HEAD => on_get(req).await, @@ -308,7 +310,7 @@ async fn on_request(req: Request<Body>) -> Result<Response<Body>, Infallible> { }; if let Err(res) = res { - println!("Error: {}", res); + error!("Error: {}", res); return respond_500(); } else { return Ok(res.unwrap()); @@ -340,9 +342,9 @@ async fn main() { let server = Server::bind(&addr).serve(make_svc); - println!("Server listening on {}", addr); + info!("Server listening on {}", addr); if let Err(e) = server.await { - eprintln!("Server error: {}", e); + error!("Server error: {}", e); } } diff --git a/src/purge.rs b/src/purge.rs index fc41f49..cbbbf54 100644 --- a/src/purge.rs +++ b/src/purge.rs @@ -47,7 +47,7 @@ fn do_purge_file(ent: &fs::DirEntry, old_time: time::SystemTime, now: time::Syst let file_age = chrono::Duration::from_std(now.duration_since(atime)?)?; let file_access_time = chrono::DateTime::<chrono::offset::Utc>::from(atime); - println!("Deleting '{}' (last accessed at {}; {}d{}h old)", + debug!("Deleting '{}' (last accessed at {}; {}d{}h old)", ent.file_name().to_string_lossy(), file_access_time.format("%Y-%m-%d %H:%M"), file_age.num_days(), @@ -64,14 +64,14 @@ fn do_purge(dir: &str, old_time: time::SystemTime, now: time::SystemTime) for ent in readdir { let ent = match ent { Err(err) => { - println!("Purge: Readdir error: {}", err); + error!("Purge: Readdir error: {}", err); continue; }, Ok(ent) => ent, }; if let Err(err) = do_purge_file(&ent, old_time, now) { - println!("Purge: {}: {}", ent.file_name().to_string_lossy(), err); + error!("Purge: {}: {}", ent.file_name().to_string_lossy(), err); continue; } } @@ -83,13 +83,13 @@ pub fn purge(dir: &str, expiration: time::Duration) { let now = time::SystemTime::now(); let old_time = match now.checked_sub(expiration) { None => { - println!("Purge failed: now.checked_sub(old) returned None."); + error!("Purge failed: now.checked_sub(old) returned None."); return; }, Some(t) => t, }; if let Err(err) = do_purge(dir, old_time, now) { - println!("Purge failed: {}", err); + error!("Purge failed: {}", err); } } -- 2.25.4
Signed-off-by: Matthias Beyer <mail@beyermatthias.de> --- src/main.rs | 24 ++++++++++++------------ src/purge.rs | 10 +++++----- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/src/main.rs b/src/main.rs index 34f7811..6480b4a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -72,7 +72,7 @@ fn create_random_file(dir: &str) -> io::Result<(File, String)> { fn delete_file(dir: &str, name: &str) { let pathbuf = Path::new(dir).join(name); if let Err(err) = fs::remove_file(pathbuf) { - println!("Failed to remove {}/{}: {}", dir, name, err); + error!("Failed to remove {}/{}: {}", dir, name, err); } } @@ -192,7 +192,7 @@ async fn on_get(req: Request<Body>) -> Result<Response<Body>, Box<dyn Error>> { let file = tokio::fs::File::open(&pathbuf).await; match file { Err(err) => { - println!("Couldn't open {}: {}", pathbuf.display(), err); + error!("Couldn't open {}: {}", pathbuf.display(), err); return Ok(respond_404()?); }, Ok(file) => { @@ -239,7 +239,7 @@ async fn on_put(req: Request<Body>) -> Result<Response<Body>, Infallible> { let fileres = create_random_file(&CONFIG.data); if let Err(err) = fileres { - println!("Failed to create random file: {}", err); + error!("Failed to create random file: {}", err); return Ok(Response::builder() .status(500) .body(Body::from(ERR_500_HTML)) @@ -247,7 +247,7 @@ async fn on_put(req: Request<Body>) -> Result<Response<Body>, Infallible> { } let (mut file, name) = fileres.unwrap(); - println!("Created file '{}/{}'", CONFIG.data, name); + info!("Created file '{}/{}'", CONFIG.data, name); let mut reqbody = req.into_body(); let mut bodysize: usize = 0; @@ -259,21 +259,21 @@ async fn on_put(req: Request<Body>) -> Result<Response<Body>, Infallible> { let chunk = chunkopt.unwrap(); if let Err(err) = chunk { - println!("Upload file transfer error: {}", err); + error!("Upload file transfer error: {}", err); delete_file(&CONFIG.data, &name); return respond_500(); } - + let ch = chunk.unwrap(); if bodysize + ch.len() > CONFIG.max_file_size as usize { - println!("Uploaded file exceeds max size ({} bytes)", CONFIG.max_file_size); + error!("Uploaded file exceeds max size ({} bytes)", CONFIG.max_file_size); delete_file(&CONFIG.data, &name); return respond_413(); } bodysize += ch.len(); if let Err(err) = file.write(&ch) { - println!("File write error: {}", err); + error!("File write error: {}", err); delete_file(&CONFIG.data, &name); return respond_500(); } @@ -289,7 +289,7 @@ async fn on_put(req: Request<Body>) -> Result<Response<Body>, Infallible> { } async fn on_request(req: Request<Body>) -> Result<Response<Body>, Infallible> { - println!("{} {}", req.method(), req.uri()); + debug!("{} {}", req.method(), req.uri()); let res = match req.method() { &Method::GET | &Method::HEAD => on_get(req).await, @@ -298,7 +298,7 @@ async fn on_request(req: Request<Body>) -> Result<Response<Body>, Infallible> { }; if let Err(res) = res { - println!("Error: {}", res); + error!("Error: {}", res); return respond_500(); } else { return Ok(res.unwrap()); @@ -330,9 +330,9 @@ async fn main() { let server = Server::bind(&addr).serve(make_svc); - println!("Server listening on {}", addr); + info!("Server listening on {}", addr); if let Err(e) = server.await { - eprintln!("Server error: {}", e); + error!("Server error: {}", e); } } diff --git a/src/purge.rs b/src/purge.rs index fc41f49..cbbbf54 100644 --- a/src/purge.rs +++ b/src/purge.rs @@ -47,7 +47,7 @@ fn do_purge_file(ent: &fs::DirEntry, old_time: time::SystemTime, now: time::Syst let file_age = chrono::Duration::from_std(now.duration_since(atime)?)?; let file_access_time = chrono::DateTime::<chrono::offset::Utc>::from(atime); - println!("Deleting '{}' (last accessed at {}; {}d{}h old)", + debug!("Deleting '{}' (last accessed at {}; {}d{}h old)", ent.file_name().to_string_lossy(), file_access_time.format("%Y-%m-%d %H:%M"), file_age.num_days(), @@ -64,14 +64,14 @@ fn do_purge(dir: &str, old_time: time::SystemTime, now: time::SystemTime) for ent in readdir { let ent = match ent { Err(err) => { - println!("Purge: Readdir error: {}", err); + error!("Purge: Readdir error: {}", err); continue; }, Ok(ent) => ent, }; if let Err(err) = do_purge_file(&ent, old_time, now) { - println!("Purge: {}: {}", ent.file_name().to_string_lossy(), err); + error!("Purge: {}: {}", ent.file_name().to_string_lossy(), err); continue; } } @@ -83,13 +83,13 @@ pub fn purge(dir: &str, expiration: time::Duration) { let now = time::SystemTime::now(); let old_time = match now.checked_sub(expiration) { None => { - println!("Purge failed: now.checked_sub(old) returned None."); + error!("Purge failed: now.checked_sub(old) returned None."); return; }, Some(t) => t, }; if let Err(err) = do_purge(dir, old_time, now) { - println!("Purge failed: {}", err); + error!("Purge failed: {}", err); } } -- 2.25.4