cleanup and more accuratly use metrics

This commit is contained in:
Rushmore75 2025-04-15 09:07:16 -06:00
parent 94912e9125
commit c08a20ac00

View File

@ -24,6 +24,8 @@ const GET_IN_FLIGHT: &str = "gets_in_flight";
const SITES_CRAWLED: &str = "pages_crawled"; const SITES_CRAWLED: &str = "pages_crawled";
const BEING_PROCESSED: &str = "pages_being_processed"; const BEING_PROCESSED: &str = "pages_being_processed";
const BATCH_SIZE: usize = 2;
#[derive(Deserialize)] #[derive(Deserialize)]
struct Config { struct Config {
surreal_ns: String, surreal_ns: String,
@ -109,13 +111,7 @@ async fn main() {
let span = trace_span!("Loop"); let span = trace_span!("Loop");
let span = span.enter(); let span = span.enter();
while crawled < config.budget { while crawled < config.budget {
let get_num = if config.budget - crawled < 100 { let uncrawled = get_uncrawled_links(&db, config.budget - crawled, config.crawl_filter.clone()).await;
config.budget - crawled
} else {
100
};
let uncrawled = get_uncrawled_links(&db, get_num, config.crawl_filter.clone()).await;
if uncrawled.is_empty() { if uncrawled.is_empty() {
info!("Had more budget but finished crawling everything."); info!("Had more budget but finished crawling everything.");
return; return;
@ -170,26 +166,26 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
// Send the http request (get) // Send the http request (get)
if let Ok(response) = request_builder.send().await { if let Ok(response) = request_builder.send().await {
// METRICS // TODO if this will fail if the object we are downloading is
g.decrement(1); // larger than the memory of the device it's running on.
counter!(GET_METRIC).increment(1); // We should store it *as* we download it then parse it in-place.
// Get body from response // Get body from response
let data = response let data = response
.text() .text()
.await .await
.expect("Failed to read http response's body!"); .expect("Failed to read http response's body!");
// Store document // METRICS
filesystem::store(&data, &site.site).await; g.decrement(1);
counter!(GET_METRIC).increment(1);
// Store document
let should_parse = filesystem::store(&data, &site.site).await;
if should_parse {
// Parse document and get relationships // Parse document and get relationships
let sites = parser::parse(&site, &data).await; let sites = parser::parse(&site, &data).await;
// update self in db
site.set_crawled();
Website::store_all(vec![site], &db).await;
// De-duplicate this list // De-duplicate this list
let prev_len = sites.len(); let prev_len = sites.len();
let set = sites.into_iter().fold(HashSet::new(), |mut set,item| { let set = sites.into_iter().fold(HashSet::new(), |mut set,item| {
@ -202,6 +198,11 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
// Store all the other sites so that we can link to them. // Store all the other sites so that we can link to them.
let _ = Website::store_all(de_dupe_sites, &db).await; let _ = Website::store_all(de_dupe_sites, &db).await;
}
// update self in db
site.set_crawled();
Website::store_all(vec![site], &db).await;
} else { } else {
error!("Failed to get: {}", &site.site); error!("Failed to get: {}", &site.site);
@ -215,9 +216,11 @@ async fn get_uncrawled_links(
mut count: usize, mut count: usize,
filter: String, filter: String,
) -> Vec<Website> { ) -> Vec<Website> {
if count > 100 {
count = 100 if count > BATCH_SIZE {
count = BATCH_SIZE;
} }
debug!("Getting uncrawled links"); debug!("Getting uncrawled links");
let mut response = db let mut response = db