work off content-type header
This commit is contained in:
parent
7fab961d76
commit
647c4cd324
119
src/main.rs
119
src/main.rs
@ -19,7 +19,7 @@ use metrics::{counter, gauge};
|
||||
use metrics_exporter_prometheus::PrometheusBuilder;
|
||||
use serde::Deserialize;
|
||||
use surrealdb::{engine::remote::ws::Client, Surreal};
|
||||
use tokio::{io::AsyncWriteExt, task::JoinSet};
|
||||
use tokio::{io::{AsyncWriteExt, BufWriter}, task::JoinSet};
|
||||
use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, trace_span, warn};
|
||||
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
|
||||
|
||||
@ -175,63 +175,78 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
|
||||
if let Ok(response) = request_builder.send().await {
|
||||
// Get body from response
|
||||
let headers = response.headers();
|
||||
let ct = headers.get("Content-Type");
|
||||
|
||||
if let Some(ct) = ct {
|
||||
let path = filesystem::as_path(&site.site, ct);
|
||||
#[allow(non_snake_case)]
|
||||
let CT = headers.get("Content-Type");
|
||||
let ct = headers.get("content-type");
|
||||
|
||||
// make sure that the file is good to go
|
||||
if let Some(mut file) = filesystem::init(&path).await {
|
||||
let should_parse = path.to_string_lossy().ends_with(".html");
|
||||
let mut buf: Vec<u8> = Vec::new();
|
||||
if CT.is_none() && ct.is_none() {
|
||||
}
|
||||
let ct = match (CT,ct) {
|
||||
(None, None) => {
|
||||
warn!("Server did not respond with Content-Type header. Url: {} Headers: ({:?})", site.site.to_string(), headers);
|
||||
return
|
||||
},
|
||||
(None, Some(a)) => a,
|
||||
(Some(a), None) => a,
|
||||
(Some(a), Some(_)) => a,
|
||||
};
|
||||
|
||||
// stream the response onto the disk
|
||||
let mut stream = response.bytes_stream();
|
||||
while let Some(data) = stream.next().await {
|
||||
match data {
|
||||
Ok(data) => {
|
||||
debug!("Writing at: {:?}", path);
|
||||
let _ = file.write_all(&data).await;
|
||||
// If we are going to parse this file later, we will save it
|
||||
// into memory as well as the disk.
|
||||
if should_parse {
|
||||
data.iter().for_each(|f| buf.push(*f));
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("{}", err)
|
||||
},
|
||||
}
|
||||
let path = filesystem::as_path(&site.site, ct);
|
||||
|
||||
// make sure that the file is good to go
|
||||
if let Some(file) = filesystem::init(&path).await {
|
||||
let should_parse = path.to_string_lossy().ends_with(".html");
|
||||
let mut buf: Vec<u8> = Vec::new();
|
||||
|
||||
let mut writer = BufWriter::new(file);
|
||||
|
||||
// stream the response onto the disk
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
info!("Writing at: {:?}", path);
|
||||
while let Some(data) = stream.next().await {
|
||||
match data {
|
||||
Ok(data) => {
|
||||
let _ = writer.write_all(&data).await;
|
||||
// If we are going to parse this file later, we will save it
|
||||
// into memory as well as the disk.
|
||||
if should_parse {
|
||||
data.iter().for_each(|f| buf.push(*f));
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("{}", err)
|
||||
},
|
||||
}
|
||||
|
||||
if should_parse {
|
||||
// Parse document and get relationships
|
||||
let sites = parser::parse(&site, &buf).await;
|
||||
// De-duplicate this list
|
||||
let prev_len = sites.len();
|
||||
let set = sites.into_iter().fold(HashSet::new(), |mut set, item| {
|
||||
set.insert(item);
|
||||
set
|
||||
});
|
||||
let de_dupe_sites: Vec<Website> = set.into_iter().collect();
|
||||
let diff = prev_len - de_dupe_sites.len();
|
||||
trace!("Saved {diff} from being entered into the db by de-duping");
|
||||
// Store all the other sites so that we can link to them.
|
||||
let _ = Website::store_all(de_dupe_sites, &db).await;
|
||||
}
|
||||
|
||||
// METRICS
|
||||
g.decrement(1);
|
||||
counter!(GET_METRIC).increment(1);
|
||||
|
||||
// update self in db
|
||||
site.set_crawled();
|
||||
Website::store_all(vec![site], &db).await;
|
||||
} else {
|
||||
error!("File failed to cooperate: {:?}", path);
|
||||
}
|
||||
let _ = writer.flush();
|
||||
|
||||
if should_parse {
|
||||
// Parse document and get relationships
|
||||
let sites = parser::parse(&site, &buf).await;
|
||||
// De-duplicate this list
|
||||
let prev_len = sites.len();
|
||||
let set = sites.into_iter().fold(HashSet::new(), |mut set, item| {
|
||||
set.insert(item);
|
||||
set
|
||||
});
|
||||
let de_dupe_sites: Vec<Website> = set.into_iter().collect();
|
||||
let diff = prev_len - de_dupe_sites.len();
|
||||
trace!("Saved {diff} from being entered into the db by de-duping");
|
||||
// Store all the other sites so that we can link to them.
|
||||
let _ = Website::store_all(de_dupe_sites, &db).await;
|
||||
}
|
||||
|
||||
// METRICS
|
||||
g.decrement(1);
|
||||
counter!(GET_METRIC).increment(1);
|
||||
|
||||
// update self in db
|
||||
site.set_crawled();
|
||||
Website::store_all(vec![site], &db).await;
|
||||
} else {
|
||||
warn!("Server did not respond with Content-Type header: {}", site.site.to_string());
|
||||
error!("File failed to cooperate: {:?}", path);
|
||||
}
|
||||
} else {
|
||||
error!("Failed to get: {}", &site.site);
|
||||
|
Loading…
x
Reference in New Issue
Block a user