From d3fff194f4bd81d0511e9c3505ec81e6d29207b6 Mon Sep 17 00:00:00 2001 From: Rushmore75 Date: Thu, 17 Apr 2025 08:17:37 -0600 Subject: [PATCH] logging updates --- src/db.rs | 1 - src/main.rs | 100 ++++++++++++++++++++++++++------------------------ src/parser.rs | 9 ++--- 3 files changed, 57 insertions(+), 53 deletions(-) diff --git a/src/db.rs b/src/db.rs index 06cddde..49fbd6e 100644 --- a/src/db.rs +++ b/src/db.rs @@ -43,7 +43,6 @@ impl Website { } pub fn set_crawled(&mut self) { - trace!("Set crawled to true"); self.crawled = true } diff --git a/src/main.rs b/src/main.rs index bdfc55c..1f2d256 100644 --- a/src/main.rs +++ b/src/main.rs @@ -20,7 +20,7 @@ use metrics_exporter_prometheus::PrometheusBuilder; use serde::Deserialize; use surrealdb::{engine::remote::ws::Client, Surreal}; use tokio::{io::AsyncWriteExt, task::JoinSet}; -use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, trace_span}; +use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, trace_span, warn}; use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry}; mod db; @@ -174,58 +174,64 @@ async fn process(mut site: Website, db: Surreal, reqwest: reqwest::Clien // Send the http request (get) if let Ok(response) = request_builder.send().await { // Get body from response + let headers = response.headers(); + let ct = headers.get("Content-Type"); - let path = filesystem::as_path(&site.site); + if let Some(ct) = ct { + let path = filesystem::as_path(&site.site, ct); - // make sure that the file is good to go - if let Some(mut file) = filesystem::init(&path).await { - let should_parse = path.to_string_lossy().ends_with(".html"); - let mut buf: Vec = Vec::new(); + // make sure that the file is good to go + if let Some(mut file) = filesystem::init(&path).await { + let should_parse = path.to_string_lossy().ends_with(".html"); + let mut buf: Vec = Vec::new(); - // stream the response onto the disk - let mut stream = response.bytes_stream(); - while let Some(data) = stream.next().await { - match data { - Ok(data) => { - debug!("Writing at: {:?}", path); - let _ = file.write_all(&data).await; - // If we are going to parse this file later, we will save it - // into memory as well as the disk. - if should_parse { - data.iter().for_each(|f| buf.push(*f)); - } - }, - Err(err) => { - eprintln!("{}", err) - }, + // stream the response onto the disk + let mut stream = response.bytes_stream(); + while let Some(data) = stream.next().await { + match data { + Ok(data) => { + debug!("Writing at: {:?}", path); + let _ = file.write_all(&data).await; + // If we are going to parse this file later, we will save it + // into memory as well as the disk. + if should_parse { + data.iter().for_each(|f| buf.push(*f)); + } + }, + Err(err) => { + eprintln!("{}", err) + }, + } } + + if should_parse { + // Parse document and get relationships + let sites = parser::parse(&site, &buf).await; + // De-duplicate this list + let prev_len = sites.len(); + let set = sites.into_iter().fold(HashSet::new(), |mut set, item| { + set.insert(item); + set + }); + let de_dupe_sites: Vec = set.into_iter().collect(); + let diff = prev_len - de_dupe_sites.len(); + trace!("Saved {diff} from being entered into the db by de-duping"); + // Store all the other sites so that we can link to them. + let _ = Website::store_all(de_dupe_sites, &db).await; + } + + // METRICS + g.decrement(1); + counter!(GET_METRIC).increment(1); + + // update self in db + site.set_crawled(); + Website::store_all(vec![site], &db).await; + } else { + error!("File failed to cooperate: {:?}", path); } - - if should_parse { - // Parse document and get relationships - let sites = parser::parse(&site, &buf).await; - // De-duplicate this list - let prev_len = sites.len(); - let set = sites.into_iter().fold(HashSet::new(), |mut set, item| { - set.insert(item); - set - }); - let de_dupe_sites: Vec = set.into_iter().collect(); - let diff = prev_len - de_dupe_sites.len(); - trace!("Saved {diff} from being entered into the db by de-duping"); - // Store all the other sites so that we can link to them. - let _ = Website::store_all(de_dupe_sites, &db).await; - } - - // METRICS - g.decrement(1); - counter!(GET_METRIC).increment(1); - - // update self in db - site.set_crawled(); - Website::store_all(vec![site], &db).await; } else { - error!("File failed to cooperate: {:?}", path); + warn!("Server did not respond with Content-Type header: {}", site.site.to_string()); } } else { error!("Failed to get: {}", &site.site); diff --git a/src/parser.rs b/src/parser.rs index 61469d5..28bbc04 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -127,16 +127,15 @@ fn try_get_url(parent: &Url, link: &str) -> Option { } let url = origin.clone() + link; - trace!("Built `{url}` from `{origin} + `{}`", link.to_string()); - if let Ok(url) = Url::parse(&url) { - trace!("Saved relative url `{}` AS: `{}`", link, url); + trace!("Built `{url}` from `{origin} + `{}`", link.to_string()); Some(url) } else { error!( - "Failed to reconstruct a url from relative url: `{}` on site: `{}`", + "Failed to reconstruct a url from relative url: `{}` on site: `{}`. Failed url was: {}", link, - parent.to_string() + parent.to_string(), + url ); None }