logging updates
This commit is contained in:
parent
3497312fd4
commit
d3fff194f4
@ -43,7 +43,6 @@ impl Website {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_crawled(&mut self) {
|
pub fn set_crawled(&mut self) {
|
||||||
trace!("Set crawled to true");
|
|
||||||
self.crawled = true
|
self.crawled = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
100
src/main.rs
100
src/main.rs
@ -20,7 +20,7 @@ use metrics_exporter_prometheus::PrometheusBuilder;
|
|||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use surrealdb::{engine::remote::ws::Client, Surreal};
|
use surrealdb::{engine::remote::ws::Client, Surreal};
|
||||||
use tokio::{io::AsyncWriteExt, task::JoinSet};
|
use tokio::{io::AsyncWriteExt, task::JoinSet};
|
||||||
use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, trace_span};
|
use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, trace_span, warn};
|
||||||
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
|
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
|
||||||
|
|
||||||
mod db;
|
mod db;
|
||||||
@ -174,58 +174,64 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
|
|||||||
// Send the http request (get)
|
// Send the http request (get)
|
||||||
if let Ok(response) = request_builder.send().await {
|
if let Ok(response) = request_builder.send().await {
|
||||||
// Get body from response
|
// Get body from response
|
||||||
|
let headers = response.headers();
|
||||||
|
let ct = headers.get("Content-Type");
|
||||||
|
|
||||||
let path = filesystem::as_path(&site.site);
|
if let Some(ct) = ct {
|
||||||
|
let path = filesystem::as_path(&site.site, ct);
|
||||||
|
|
||||||
// make sure that the file is good to go
|
// make sure that the file is good to go
|
||||||
if let Some(mut file) = filesystem::init(&path).await {
|
if let Some(mut file) = filesystem::init(&path).await {
|
||||||
let should_parse = path.to_string_lossy().ends_with(".html");
|
let should_parse = path.to_string_lossy().ends_with(".html");
|
||||||
let mut buf: Vec<u8> = Vec::new();
|
let mut buf: Vec<u8> = Vec::new();
|
||||||
|
|
||||||
// stream the response onto the disk
|
// stream the response onto the disk
|
||||||
let mut stream = response.bytes_stream();
|
let mut stream = response.bytes_stream();
|
||||||
while let Some(data) = stream.next().await {
|
while let Some(data) = stream.next().await {
|
||||||
match data {
|
match data {
|
||||||
Ok(data) => {
|
Ok(data) => {
|
||||||
debug!("Writing at: {:?}", path);
|
debug!("Writing at: {:?}", path);
|
||||||
let _ = file.write_all(&data).await;
|
let _ = file.write_all(&data).await;
|
||||||
// If we are going to parse this file later, we will save it
|
// If we are going to parse this file later, we will save it
|
||||||
// into memory as well as the disk.
|
// into memory as well as the disk.
|
||||||
if should_parse {
|
if should_parse {
|
||||||
data.iter().for_each(|f| buf.push(*f));
|
data.iter().for_each(|f| buf.push(*f));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
eprintln!("{}", err)
|
eprintln!("{}", err)
|
||||||
},
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if should_parse {
|
||||||
|
// Parse document and get relationships
|
||||||
|
let sites = parser::parse(&site, &buf).await;
|
||||||
|
// De-duplicate this list
|
||||||
|
let prev_len = sites.len();
|
||||||
|
let set = sites.into_iter().fold(HashSet::new(), |mut set, item| {
|
||||||
|
set.insert(item);
|
||||||
|
set
|
||||||
|
});
|
||||||
|
let de_dupe_sites: Vec<Website> = set.into_iter().collect();
|
||||||
|
let diff = prev_len - de_dupe_sites.len();
|
||||||
|
trace!("Saved {diff} from being entered into the db by de-duping");
|
||||||
|
// Store all the other sites so that we can link to them.
|
||||||
|
let _ = Website::store_all(de_dupe_sites, &db).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// METRICS
|
||||||
|
g.decrement(1);
|
||||||
|
counter!(GET_METRIC).increment(1);
|
||||||
|
|
||||||
|
// update self in db
|
||||||
|
site.set_crawled();
|
||||||
|
Website::store_all(vec![site], &db).await;
|
||||||
|
} else {
|
||||||
|
error!("File failed to cooperate: {:?}", path);
|
||||||
}
|
}
|
||||||
|
|
||||||
if should_parse {
|
|
||||||
// Parse document and get relationships
|
|
||||||
let sites = parser::parse(&site, &buf).await;
|
|
||||||
// De-duplicate this list
|
|
||||||
let prev_len = sites.len();
|
|
||||||
let set = sites.into_iter().fold(HashSet::new(), |mut set, item| {
|
|
||||||
set.insert(item);
|
|
||||||
set
|
|
||||||
});
|
|
||||||
let de_dupe_sites: Vec<Website> = set.into_iter().collect();
|
|
||||||
let diff = prev_len - de_dupe_sites.len();
|
|
||||||
trace!("Saved {diff} from being entered into the db by de-duping");
|
|
||||||
// Store all the other sites so that we can link to them.
|
|
||||||
let _ = Website::store_all(de_dupe_sites, &db).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
// METRICS
|
|
||||||
g.decrement(1);
|
|
||||||
counter!(GET_METRIC).increment(1);
|
|
||||||
|
|
||||||
// update self in db
|
|
||||||
site.set_crawled();
|
|
||||||
Website::store_all(vec![site], &db).await;
|
|
||||||
} else {
|
} else {
|
||||||
error!("File failed to cooperate: {:?}", path);
|
warn!("Server did not respond with Content-Type header: {}", site.site.to_string());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
error!("Failed to get: {}", &site.site);
|
error!("Failed to get: {}", &site.site);
|
||||||
|
@ -127,16 +127,15 @@ fn try_get_url(parent: &Url, link: &str) -> Option<Url> {
|
|||||||
}
|
}
|
||||||
let url = origin.clone() + link;
|
let url = origin.clone() + link;
|
||||||
|
|
||||||
trace!("Built `{url}` from `{origin} + `{}`", link.to_string());
|
|
||||||
|
|
||||||
if let Ok(url) = Url::parse(&url) {
|
if let Ok(url) = Url::parse(&url) {
|
||||||
trace!("Saved relative url `{}` AS: `{}`", link, url);
|
trace!("Built `{url}` from `{origin} + `{}`", link.to_string());
|
||||||
Some(url)
|
Some(url)
|
||||||
} else {
|
} else {
|
||||||
error!(
|
error!(
|
||||||
"Failed to reconstruct a url from relative url: `{}` on site: `{}`",
|
"Failed to reconstruct a url from relative url: `{}` on site: `{}`. Failed url was: {}",
|
||||||
link,
|
link,
|
||||||
parent.to_string()
|
parent.to_string(),
|
||||||
|
url
|
||||||
);
|
);
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user