Compare commits

..

No commits in common. "c08a20ac003baeda95f3bb607da9b397f0ffbdc8" and "a9465dda6e32d86a92d0b14e23a1513db9272bf4" have entirely different histories.

3 changed files with 61 additions and 1491 deletions

View File

@ -5,10 +5,7 @@ use tracing::{debug, error, instrument, trace, warn};
use url::Url; use url::Url;
#[instrument(skip(data))] #[instrument(skip(data))]
/// Returns whether or not the saved file should be parsed. pub async fn store(data: &str, url: &Url) {
/// If the file is just data, like an image, it doesn't need to be parsed.
/// If it's html, then it does need to be parsed.
pub async fn store(data: &str, url: &Url) -> bool {
// extract data from url to save it accurately // extract data from url to save it accurately
let url_path = PathBuf::from("./downloaded/".to_string() + url.domain().unwrap_or("UnknownDomain") + url.path()); let url_path = PathBuf::from("./downloaded/".to_string() + url.domain().unwrap_or("UnknownDomain") + url.path());
@ -24,20 +21,19 @@ pub async fn store(data: &str, url: &Url) -> bool {
(url_path.clone(), "index.html".into()) (url_path.clone(), "index.html".into())
}; };
let should_parse = filename.ends_with(".html");
debug!("Writing at: {:?} {:?}", basepath, filename); debug!("Writing at: {:?} {:?}", basepath, filename);
// create the folders // create the folders
if let Err(err) = fs::create_dir_all(&basepath).await { if let Err(err) = fs::create_dir_all(&basepath).await {
error!("Dir creation: {err} {:?}", basepath); error!("Dir creation: {err} {:?}", basepath);
} else { } else {
// FIXME I don't think this handles index.html files well...
// TODO this should probably append .html to non-described files
// create the file if that was successful
if let Err(err) = fs::write(&basepath.join(filename), data).await { if let Err(err) = fs::write(&basepath.join(filename), data).await {
error!("File creation: {err} {:?}", url_path); error!("File creation: {err} {:?}", url_path);
} }
} }
should_parse
} }
fn valid_file_extension(take: &&OsStr) -> bool { fn valid_file_extension(take: &&OsStr) -> bool {
@ -45,14 +41,35 @@ fn valid_file_extension(take: &&OsStr) -> bool {
let all = los.split('.'); let all = los.split('.');
match all.last() { match all.last() {
Some(s) => { Some(s) => {
// FIXME it's worth noting that the dumb tlds like .zip are in here, match s.to_lowercase().as_str() {
// which could cause problems "html" => true,
let all_domains = include_str!("tlds-alpha-by-domain.txt"); "css" => true,
"js" => true,
"ts" => true,
"otf" => true, // font
// check if it is a domain "png" => true,
match all_domains.lines().map(str::to_lowercase).find(|x| x==s.to_lowercase().as_str()) { "svg" => true,
Some(_) => false, "jpg" => true,
None => true "jpeg" => true,
"mp4" => true,
"mp3" => true,
"webp" => true,
"pdf" => true,
"json" => true,
"xml" => true,
// IGNORE
// TODO Should this be a list of all domains?
"org" => false,
"com" => false,
"net" => false,
_ => {
warn!("Might be forgetting a file extension: {s}");
false
}
} }
}, },
None => false, None => false,

View File

@ -24,8 +24,6 @@ const GET_IN_FLIGHT: &str = "gets_in_flight";
const SITES_CRAWLED: &str = "pages_crawled"; const SITES_CRAWLED: &str = "pages_crawled";
const BEING_PROCESSED: &str = "pages_being_processed"; const BEING_PROCESSED: &str = "pages_being_processed";
const BATCH_SIZE: usize = 2;
#[derive(Deserialize)] #[derive(Deserialize)]
struct Config { struct Config {
surreal_ns: String, surreal_ns: String,
@ -111,7 +109,13 @@ async fn main() {
let span = trace_span!("Loop"); let span = trace_span!("Loop");
let span = span.enter(); let span = span.enter();
while crawled < config.budget { while crawled < config.budget {
let uncrawled = get_uncrawled_links(&db, config.budget - crawled, config.crawl_filter.clone()).await; let get_num = if config.budget - crawled < 100 {
config.budget - crawled
} else {
100
};
let uncrawled = get_uncrawled_links(&db, get_num, config.crawl_filter.clone()).await;
if uncrawled.is_empty() { if uncrawled.is_empty() {
info!("Had more budget but finished crawling everything."); info!("Had more budget but finished crawling everything.");
return; return;
@ -166,44 +170,39 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
// Send the http request (get) // Send the http request (get)
if let Ok(response) = request_builder.send().await { if let Ok(response) = request_builder.send().await {
// TODO if this will fail if the object we are downloading is // METRICS
// larger than the memory of the device it's running on. g.decrement(1);
// We should store it *as* we download it then parse it in-place. counter!(GET_METRIC).increment(1);
// Get body from response // Get body from response
let data = response let data = response
.text() .text()
.await .await
.expect("Failed to read http response's body!"); .expect("Failed to read http response's body!");
// METRICS
g.decrement(1);
counter!(GET_METRIC).increment(1);
// Store document // Store document
let should_parse = filesystem::store(&data, &site.site).await; filesystem::store(&data, &site.site).await;
if should_parse { // Parse document and get relationships
// Parse document and get relationships let sites = parser::parse(&site, &data).await;
let sites = parser::parse(&site, &data).await;
// De-duplicate this list
let prev_len = sites.len();
let set = sites.into_iter().fold(HashSet::new(), |mut set,item| {
set.insert(item);
set
});
let de_dupe_sites: Vec<Website> = set.into_iter().collect();
let diff = prev_len - de_dupe_sites.len();
trace!("Saved {diff} from being entered into the db by de-duping");
// Store all the other sites so that we can link to them.
let _ = Website::store_all(de_dupe_sites, &db).await;
}
// update self in db // update self in db
site.set_crawled(); site.set_crawled();
Website::store_all(vec![site], &db).await; Website::store_all(vec![site], &db).await;
// De-duplicate this list
let prev_len = sites.len();
let set = sites.into_iter().fold(HashSet::new(), |mut set,item| {
set.insert(item);
set
});
let de_dupe_sites: Vec<Website> = set.into_iter().collect();
let diff = prev_len - de_dupe_sites.len();
trace!("Saved {diff} from being entered into the db by de-duping");
// Store all the other sites so that we can link to them.
let _ = Website::store_all(de_dupe_sites, &db).await;
} else { } else {
error!("Failed to get: {}", &site.site); error!("Failed to get: {}", &site.site);
} }
@ -216,11 +215,9 @@ async fn get_uncrawled_links(
mut count: usize, mut count: usize,
filter: String, filter: String,
) -> Vec<Website> { ) -> Vec<Website> {
if count > 100 {
if count > BATCH_SIZE { count = 100
count = BATCH_SIZE;
} }
debug!("Getting uncrawled links"); debug!("Getting uncrawled links");
let mut response = db let mut response = db

File diff suppressed because it is too large Load Diff