logging updates
This commit is contained in:
parent
3497312fd4
commit
d3fff194f4
@ -43,7 +43,6 @@ impl Website {
|
||||
}
|
||||
|
||||
pub fn set_crawled(&mut self) {
|
||||
trace!("Set crawled to true");
|
||||
self.crawled = true
|
||||
}
|
||||
|
||||
|
100
src/main.rs
100
src/main.rs
@ -20,7 +20,7 @@ use metrics_exporter_prometheus::PrometheusBuilder;
|
||||
use serde::Deserialize;
|
||||
use surrealdb::{engine::remote::ws::Client, Surreal};
|
||||
use tokio::{io::AsyncWriteExt, task::JoinSet};
|
||||
use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, trace_span};
|
||||
use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, trace_span, warn};
|
||||
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
|
||||
|
||||
mod db;
|
||||
@ -174,58 +174,64 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
|
||||
// Send the http request (get)
|
||||
if let Ok(response) = request_builder.send().await {
|
||||
// Get body from response
|
||||
let headers = response.headers();
|
||||
let ct = headers.get("Content-Type");
|
||||
|
||||
let path = filesystem::as_path(&site.site);
|
||||
if let Some(ct) = ct {
|
||||
let path = filesystem::as_path(&site.site, ct);
|
||||
|
||||
// make sure that the file is good to go
|
||||
if let Some(mut file) = filesystem::init(&path).await {
|
||||
let should_parse = path.to_string_lossy().ends_with(".html");
|
||||
let mut buf: Vec<u8> = Vec::new();
|
||||
// make sure that the file is good to go
|
||||
if let Some(mut file) = filesystem::init(&path).await {
|
||||
let should_parse = path.to_string_lossy().ends_with(".html");
|
||||
let mut buf: Vec<u8> = Vec::new();
|
||||
|
||||
// stream the response onto the disk
|
||||
let mut stream = response.bytes_stream();
|
||||
while let Some(data) = stream.next().await {
|
||||
match data {
|
||||
Ok(data) => {
|
||||
debug!("Writing at: {:?}", path);
|
||||
let _ = file.write_all(&data).await;
|
||||
// If we are going to parse this file later, we will save it
|
||||
// into memory as well as the disk.
|
||||
if should_parse {
|
||||
data.iter().for_each(|f| buf.push(*f));
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("{}", err)
|
||||
},
|
||||
// stream the response onto the disk
|
||||
let mut stream = response.bytes_stream();
|
||||
while let Some(data) = stream.next().await {
|
||||
match data {
|
||||
Ok(data) => {
|
||||
debug!("Writing at: {:?}", path);
|
||||
let _ = file.write_all(&data).await;
|
||||
// If we are going to parse this file later, we will save it
|
||||
// into memory as well as the disk.
|
||||
if should_parse {
|
||||
data.iter().for_each(|f| buf.push(*f));
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("{}", err)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
if should_parse {
|
||||
// Parse document and get relationships
|
||||
let sites = parser::parse(&site, &buf).await;
|
||||
// De-duplicate this list
|
||||
let prev_len = sites.len();
|
||||
let set = sites.into_iter().fold(HashSet::new(), |mut set, item| {
|
||||
set.insert(item);
|
||||
set
|
||||
});
|
||||
let de_dupe_sites: Vec<Website> = set.into_iter().collect();
|
||||
let diff = prev_len - de_dupe_sites.len();
|
||||
trace!("Saved {diff} from being entered into the db by de-duping");
|
||||
// Store all the other sites so that we can link to them.
|
||||
let _ = Website::store_all(de_dupe_sites, &db).await;
|
||||
}
|
||||
|
||||
// METRICS
|
||||
g.decrement(1);
|
||||
counter!(GET_METRIC).increment(1);
|
||||
|
||||
// update self in db
|
||||
site.set_crawled();
|
||||
Website::store_all(vec![site], &db).await;
|
||||
} else {
|
||||
error!("File failed to cooperate: {:?}", path);
|
||||
}
|
||||
|
||||
if should_parse {
|
||||
// Parse document and get relationships
|
||||
let sites = parser::parse(&site, &buf).await;
|
||||
// De-duplicate this list
|
||||
let prev_len = sites.len();
|
||||
let set = sites.into_iter().fold(HashSet::new(), |mut set, item| {
|
||||
set.insert(item);
|
||||
set
|
||||
});
|
||||
let de_dupe_sites: Vec<Website> = set.into_iter().collect();
|
||||
let diff = prev_len - de_dupe_sites.len();
|
||||
trace!("Saved {diff} from being entered into the db by de-duping");
|
||||
// Store all the other sites so that we can link to them.
|
||||
let _ = Website::store_all(de_dupe_sites, &db).await;
|
||||
}
|
||||
|
||||
// METRICS
|
||||
g.decrement(1);
|
||||
counter!(GET_METRIC).increment(1);
|
||||
|
||||
// update self in db
|
||||
site.set_crawled();
|
||||
Website::store_all(vec![site], &db).await;
|
||||
} else {
|
||||
error!("File failed to cooperate: {:?}", path);
|
||||
warn!("Server did not respond with Content-Type header: {}", site.site.to_string());
|
||||
}
|
||||
} else {
|
||||
error!("Failed to get: {}", &site.site);
|
||||
|
@ -127,16 +127,15 @@ fn try_get_url(parent: &Url, link: &str) -> Option<Url> {
|
||||
}
|
||||
let url = origin.clone() + link;
|
||||
|
||||
trace!("Built `{url}` from `{origin} + `{}`", link.to_string());
|
||||
|
||||
if let Ok(url) = Url::parse(&url) {
|
||||
trace!("Saved relative url `{}` AS: `{}`", link, url);
|
||||
trace!("Built `{url}` from `{origin} + `{}`", link.to_string());
|
||||
Some(url)
|
||||
} else {
|
||||
error!(
|
||||
"Failed to reconstruct a url from relative url: `{}` on site: `{}`",
|
||||
"Failed to reconstruct a url from relative url: `{}` on site: `{}`. Failed url was: {}",
|
||||
link,
|
||||
parent.to_string()
|
||||
parent.to_string(),
|
||||
url
|
||||
);
|
||||
None
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user