2024-10-04 19:52:34 +00:00
|
|
|
extern crate markup5ever_rcdom as rcdom;
|
|
|
|
extern crate html5ever;
|
2024-08-23 11:22:49 +00:00
|
|
|
|
2024-11-10 06:30:57 +00:00
|
|
|
use std::{rc::Rc, time::Instant};
|
2024-11-09 18:30:32 +00:00
|
|
|
use db::{connect, Website};
|
2024-10-04 19:52:34 +00:00
|
|
|
use html5ever::{parse_document, tendril::TendrilSink, tree_builder::TreeBuilderOpts, ParseOpts};
|
2024-10-07 17:14:56 +00:00
|
|
|
use rcdom::{Node, RcDom};
|
2024-11-10 06:30:57 +00:00
|
|
|
use surrealdb::{engine::remote::ws::Client, sql::Thing, Surreal};
|
2024-11-09 22:28:10 +00:00
|
|
|
use tracing::{debug, info, instrument, trace, trace_span, warn};
|
2024-11-09 18:30:32 +00:00
|
|
|
use tracing_subscriber::EnvFilter;
|
2024-08-23 11:22:49 +00:00
|
|
|
|
2024-10-07 17:14:56 +00:00
|
|
|
mod db;
|
2024-08-23 11:22:49 +00:00
|
|
|
|
2024-10-04 19:52:34 +00:00
|
|
|
#[tokio::main]
|
|
|
|
async fn main() {
|
2024-11-09 18:30:32 +00:00
|
|
|
tracing_subscriber::fmt()
|
|
|
|
.with_env_filter(EnvFilter::from_default_env())
|
|
|
|
.with_line_number(true)
|
|
|
|
.without_time()
|
|
|
|
.init();
|
2024-10-04 19:52:34 +00:00
|
|
|
debug!("Starting...");
|
|
|
|
|
2024-11-09 18:30:32 +00:00
|
|
|
// Would probably take these in as parameters from a cli
|
|
|
|
let url = "https://oliveratkinson.net/";
|
|
|
|
let budget = 50;
|
|
|
|
let mut crawled = 0;
|
|
|
|
|
2024-10-07 17:14:56 +00:00
|
|
|
let db = connect().await.expect("Failed to connect to db, aborting.");
|
|
|
|
|
2024-11-09 18:30:32 +00:00
|
|
|
// Kick off the whole machine - This Website object doesn't matter, it's just to allow for
|
|
|
|
// get() to work.
|
2024-11-09 22:28:10 +00:00
|
|
|
let span = trace_span!("Pre-Loop");
|
|
|
|
let pre_loop_span = span.enter();
|
|
|
|
let mut site = Website::new(&url, false);
|
2024-11-09 18:30:32 +00:00
|
|
|
let dom = get(&mut site, &db).await.expect("Inital page returned None.");
|
2024-11-10 06:30:57 +00:00
|
|
|
crawl_wrapper(&dom, &db, &site, &mut crawled).await;
|
2024-11-09 22:28:10 +00:00
|
|
|
drop(pre_loop_span);
|
2024-11-09 18:30:32 +00:00
|
|
|
|
2024-11-09 22:28:10 +00:00
|
|
|
let span = trace_span!("Loop");
|
|
|
|
let span = span.enter();
|
2024-11-09 18:30:32 +00:00
|
|
|
while crawled < budget {
|
2024-11-10 06:30:57 +00:00
|
|
|
let get_num = if budget - crawled < 100 {
|
|
|
|
budget - crawled
|
|
|
|
} else {100};
|
|
|
|
|
|
|
|
let uncrawled = get_uncrawled_links(&db, get_num).await;
|
2024-11-09 18:30:32 +00:00
|
|
|
debug!("Crawling {} pages...", uncrawled.len());
|
|
|
|
|
2024-11-09 22:28:10 +00:00
|
|
|
let span = trace_span!("Crawling");
|
|
|
|
let _ = span.enter();
|
|
|
|
|
2024-11-09 18:30:32 +00:00
|
|
|
for mut site in uncrawled {
|
|
|
|
if let Some(dom) = get(&mut site, &db).await {
|
2024-11-10 06:30:57 +00:00
|
|
|
trace!("Pre-walk checkpoint");
|
|
|
|
|
|
|
|
crawl_wrapper(&dom, &db, &site, &mut crawled).await;
|
|
|
|
|
2024-11-09 18:30:32 +00:00
|
|
|
let percent = format!("{:.2}%", (crawled as f32/budget as f32) * 100f32);
|
|
|
|
info!("Crawled {crawled} out of {budget} pages. ({percent})");
|
2024-11-09 22:28:10 +00:00
|
|
|
} else {
|
|
|
|
warn!("Failed to get {}", site.to_string());
|
2024-11-09 18:30:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2024-11-09 22:28:10 +00:00
|
|
|
drop(span);
|
2024-10-31 21:09:48 +00:00
|
|
|
|
|
|
|
info!("Done");
|
2024-10-07 17:14:56 +00:00
|
|
|
}
|
|
|
|
|
2024-11-10 06:30:57 +00:00
|
|
|
async fn crawl_wrapper(dom: &Rc<Node>, db: &Surreal<Client>, site: &Website, count: &mut usize) {
|
|
|
|
let mut buffer = Vec::new();
|
|
|
|
let now = Instant::now();
|
|
|
|
walk(&dom, &db, &site, &mut buffer).await;
|
|
|
|
let dif = now.elapsed().as_micros();
|
|
|
|
trace!("{}", format!("Walked in {:.3}ms", dif as f64/1000.));
|
|
|
|
site.links_to(buffer, &db).await;
|
|
|
|
*count += 1;
|
|
|
|
}
|
|
|
|
|
2024-11-09 18:30:32 +00:00
|
|
|
#[instrument(skip_all)]
|
|
|
|
/// A quick helper function for downloading a url
|
|
|
|
async fn get(site: &mut Website, db: &Surreal<Client>) -> Option<Rc<Node>> {
|
2024-11-09 22:28:10 +00:00
|
|
|
trace!("Get: {}", site.to_string());
|
2024-11-10 06:30:57 +00:00
|
|
|
let now = Instant::now();
|
2024-11-09 22:28:10 +00:00
|
|
|
if let Ok(response) = reqwest::get(site.to_string()).await {
|
2024-11-10 06:30:57 +00:00
|
|
|
let dif = now.elapsed().as_micros();
|
|
|
|
trace!("{}", format!("Got page in {:.3}ms", dif as f64/1000.));
|
|
|
|
|
2024-11-09 18:30:32 +00:00
|
|
|
let data = response.text().await.unwrap();
|
|
|
|
let opts = ParseOpts {
|
|
|
|
tree_builder: TreeBuilderOpts {
|
|
|
|
drop_doctype: true,
|
|
|
|
..Default::default()
|
|
|
|
},
|
2024-10-04 19:52:34 +00:00
|
|
|
..Default::default()
|
2024-11-09 18:30:32 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
let dom = parse_document(RcDom::default(), opts)
|
|
|
|
.from_utf8()
|
|
|
|
.read_from(&mut data.as_bytes())
|
|
|
|
.unwrap();
|
2024-08-23 11:22:49 +00:00
|
|
|
|
2024-11-09 22:28:10 +00:00
|
|
|
site.set_crawled();
|
2024-11-09 18:30:32 +00:00
|
|
|
site.store(db).await;
|
2024-11-09 22:28:10 +00:00
|
|
|
trace!("Got: {}", site.to_string());
|
2024-11-09 18:30:32 +00:00
|
|
|
return Some(dom.document);
|
|
|
|
}
|
2024-11-09 22:28:10 +00:00
|
|
|
trace!("Failed to get: {}", site.to_string());
|
2024-11-09 18:30:32 +00:00
|
|
|
None
|
2024-10-07 17:14:56 +00:00
|
|
|
}
|
2024-08-23 11:22:49 +00:00
|
|
|
|
2024-11-09 18:30:32 +00:00
|
|
|
/// Walks the givin site, placing it's findings in the database
|
2024-11-10 06:30:57 +00:00
|
|
|
async fn walk(node: &rcdom::Handle, db: &Surreal<Client> , site: &Website, links_to: &mut Vec<Thing>) {
|
2024-11-09 22:28:10 +00:00
|
|
|
let span = trace_span!("Walk");
|
|
|
|
let span = span.enter();
|
2024-10-07 17:14:56 +00:00
|
|
|
|
2024-10-04 19:52:34 +00:00
|
|
|
match &node.data {
|
|
|
|
rcdom::NodeData::Element { name, attrs, template_contents, mathml_annotation_xml_integration_point } => {
|
2024-10-31 20:10:14 +00:00
|
|
|
for attr in attrs.borrow().clone() {
|
2024-11-09 18:30:32 +00:00
|
|
|
if name.local.to_string() == "a" {
|
2024-10-31 20:10:14 +00:00
|
|
|
if attr.value.starts_with("mailto") {
|
2024-11-09 22:28:10 +00:00
|
|
|
trace!("Is mailto");
|
2024-10-07 17:14:56 +00:00
|
|
|
// mailto link, lol
|
2024-11-09 18:30:32 +00:00
|
|
|
let _created: Option<db::Record> = db.create("email").content(db::Email {
|
2024-11-10 06:30:57 +00:00
|
|
|
email: attr.value.to_string(),
|
|
|
|
on: site.domain_str().to_owned(),
|
2024-10-31 21:09:48 +00:00
|
|
|
}).await.unwrap();
|
2024-10-07 17:14:56 +00:00
|
|
|
} else {
|
2024-11-09 22:28:10 +00:00
|
|
|
let mut web = site.clone();
|
|
|
|
let url = web.mut_url();
|
|
|
|
|
|
|
|
// TODO remove #xyz
|
|
|
|
let joined = url.join(&attr.value).unwrap();
|
|
|
|
*url = joined;
|
|
|
|
|
|
|
|
let crawled = web.crawled();
|
|
|
|
*crawled = false;
|
|
|
|
|
2024-11-10 06:30:57 +00:00
|
|
|
if let Some(id) = web.store(db).await {
|
|
|
|
links_to.push(id);
|
|
|
|
}
|
2024-10-07 17:14:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2024-10-04 19:52:34 +00:00
|
|
|
},
|
2024-10-31 21:09:48 +00:00
|
|
|
_ => {},
|
2024-10-04 19:52:34 +00:00
|
|
|
};
|
2024-11-09 22:28:10 +00:00
|
|
|
drop(span);
|
2024-10-31 20:10:14 +00:00
|
|
|
for child in node.children.borrow().iter() {
|
2024-11-10 06:30:57 +00:00
|
|
|
Box::pin(walk(child, db, site, links_to)).await;
|
2024-10-07 17:14:56 +00:00
|
|
|
}
|
2024-08-23 11:22:49 +00:00
|
|
|
}
|
2024-11-09 18:30:32 +00:00
|
|
|
|
2024-11-09 22:28:10 +00:00
|
|
|
/// Returns uncrawled links
|
|
|
|
async fn get_uncrawled_links(db: &Surreal<Client>, mut count: usize) -> Vec<Website> {
|
|
|
|
if count > 100 {
|
|
|
|
count = 100
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut response = db
|
|
|
|
.query("SELECT * FROM website WHERE crawled = false LIMIT $count")
|
|
|
|
.bind(("count", count))
|
|
|
|
.await
|
|
|
|
.expect("Hard-coded query failed..?");
|
2024-11-09 18:30:32 +00:00
|
|
|
response.take(0).expect("Returned websites couldn't be parsed")
|
|
|
|
}
|
|
|
|
|