internet_mapper/src/main.rs

211 lines
6.9 KiB
Rust

extern crate markup5ever_rcdom as rcdom;
extern crate html5ever;
use std::{path::is_separator, rc::Rc, time::Instant};
use db::{connect, Website};
use html5ever::{local_name, parse_document, tendril::TendrilSink, tree_builder::TreeBuilderOpts, ParseOpts};
use rcdom::{Node, RcDom};
use surrealdb::{engine::remote::ws::Client, sql::Thing, Surreal};
use tracing::{debug, info, instrument, trace, trace_span, warn};
use tracing_subscriber::EnvFilter;
mod db;
#[tokio::main]
async fn main() {
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env())
.with_line_number(true)
.without_time()
.init();
debug!("Starting...");
// Would probably take these in as parameters from a cli
// let url = "https://oliveratkinson.net/";
let url = "http://localhost:5500";
let budget = 50;
let mut crawled = 0;
let db = connect().await.expect("Failed to connect to db, aborting.");
let client = reqwest::Client::builder()
// .use_rustls_tls()
.build()
.unwrap();
// Kick off the whole machine - This Website object doesn't matter, it's just to allow for
// get() to work.
let span = trace_span!("Pre-Loop");
let pre_loop_span = span.enter();
// Download the site
let mut site = Website::new(&url, false);
let dom = get(&mut site, &db, &client).await.expect("Inital page returned None.");
crawl_wrapper(&dom, &db, &site, &mut crawled).await;
drop(pre_loop_span);
let span = trace_span!("Loop");
let span = span.enter();
while crawled < budget {
let get_num = if budget - crawled < 100 {
budget - crawled
} else {100};
let uncrawled = get_uncrawled_links(&db, get_num).await;
if uncrawled.len() == 0 {
info!("Had more budget but finished crawling everything.");
return;
}
debug!("Crawling {} pages...", uncrawled.len());
let span = trace_span!("Crawling");
let _ = span.enter();
for mut site in uncrawled {
if let Some(dom) = get(&mut site, &db, &client).await {
crawl_wrapper(&dom, &db, &site, &mut crawled).await;
let percent = format!("{:.2}%", (crawled as f32/budget as f32) * 100f32);
info!("Crawled {crawled} out of {budget} pages. ({percent})");
} else {
warn!("Failed to get {}", site.to_string());
}
}
}
drop(span);
info!("Done");
}
async fn crawl_wrapper(dom: &Rc<Node>, db: &Surreal<Client>, site: &Website, count: &mut usize) {
let mut buffer = Vec::new();
let timer= Timer::start("Walked");
walk(&dom, &db, &site, &mut buffer).await;
drop(timer);
site.links_to(buffer, &db).await;
*count += 1;
}
#[instrument(skip_all)]
/// A quick helper function for downloading a url
async fn get(site: &mut Website, db: &Surreal<Client>, getter: &reqwest::Client) -> Option<Rc<Node>> {
trace!("Get: {}", site.to_string());
let timer = Timer::start("Got page");
if let Ok(response) = getter.get(site.to_string()).send().await {
drop(timer);
let data = response.text().await.unwrap();
let opts = ParseOpts {
tree_builder: TreeBuilderOpts {
drop_doctype: true,
..Default::default()
},
..Default::default()
};
let dom = parse_document(RcDom::default(), opts)
.from_utf8()
.read_from(&mut data.as_bytes())
.unwrap();
// TODO save the dom to minio if a flag is set
site.set_crawled();
site.store(db).await;
trace!("Got: {}", site.to_string());
return Some(dom.document);
}
trace!("Failed to get: {}", site.to_string());
None
}
/// Walks the givin site, placing it's findings in the database
async fn walk(node: &rcdom::Handle, db: &Surreal<Client> , site: &Website, links_to: &mut Vec<Thing>) {
let span = trace_span!("Walk");
let span = span.enter();
// Match each node - node basically means element.
match &node.data {
rcdom::NodeData::Element { name, attrs, template_contents, mathml_annotation_xml_integration_point } => {
for attr in attrs.borrow().clone() {
match name.local {
local_name!("a") |
local_name!("audio") |
local_name!("area") |
local_name!("img") |
local_name!("link") |
local_name!("object") |
local_name!("source") |
local_name!("base") |
local_name!("video") => {
let attribute_name = attr.name.local.to_string();
if attribute_name == "src" || attribute_name == "href" || attribute_name == "data" {
// Get clone of the current site object
let mut web = site.clone();
// Set url
let url = web.mut_url();
url.set_fragment(None); // removes #xyz
let joined = url.join(&attr.value).unwrap();
*url = joined;
// Set other attributes
web.crawled = false;
// TODO set element name
// let element_name = name.local.to_string();
if let Some(id) = web.store(db).await {
links_to.push(id);
}
}
},
local_name!("button") |
local_name!("meta") |
local_name!("iframe") => {
// dbg!(attrs);
}
_ => {/**/}
};
};
},
_ => {},
};
drop(span);
for child in node.children.borrow().iter() {
Box::pin(walk(child, db, site, links_to)).await;
}
}
/// Returns uncrawled links
async fn get_uncrawled_links(db: &Surreal<Client>, mut count: usize) -> Vec<Website> {
if count > 100 {
count = 100
}
let mut response = db
.query("SELECT * FROM website WHERE crawled = false LIMIT $count")
.bind(("count", count))
.await
.expect("Hard-coded query failed..?");
response.take(0).expect("Returned websites couldn't be parsed")
}
pub struct Timer<'a> {
start: Instant,
msg: &'a str,
}
impl<'a> Timer<'a> {
#[inline]
pub fn start(msg: &'a str) -> Self {
Self { start: Instant::now(), msg }
}
}
impl<'a> Drop for Timer<'a> {
fn drop(&mut self) {
let dif = self.start.elapsed().as_micros();
debug!("{}", format!("{} in {:.3}ms", self.msg, dif as f64/1000.));
}
}