extern crate html5ever;
use std::time::Instant;
use db::{connect, Website};
use s3::S3;
use surrealdb::{engine::remote::ws::Client, Surreal};
use tracing::{debug, info, instrument, trace, trace_span};
use tracing_subscriber::EnvFilter;
mod db;
mod s3;
mod parser;
struct Config<'a> {
surreal_ns: &'a str,
surreal_db: &'a str,
surreal_url: &'a str,
surreal_username: &'a str,
surreal_password: &'a str,
s3_url: &'a str,
s3_bucket: &'a str,
s3_access_key: &'a str,
s3_secret_key: &'a str,
}
#[tokio::main]
async fn main() {
tracing_subscriber::fmt()
.with_env_filter(EnvFilter::from_default_env())
.with_line_number(true)
.without_time()
.init();
debug!("Starting...");
let config = Config {
surreal_ns: "test",
surreal_db: "v1.7",
surreal_url: "localhost:8000",
surreal_username: "root",
surreal_password: "root",
s3_url: "http://localhost:9000",
s3_bucket: "v1.7",
s3_access_key: "8tUJn7e1paMFZQr0PKIT",
s3_secret_key: "uSMvYxNOeCejCUgXVqgTfYlUEcmiZY0xcZ91M9E0",
};
// Would probably take these in as parameters from a cli
let starting_url = "https://oliveratkinson.net/";
let budget = 15;
let mut crawled = 0;
let s3 = S3::connect(&config).await.expect("Failed to connect to minio, aborting.");
let db = connect(&config).await.expect("Failed to connect to surreal, aborting.");
let reqwest = reqwest::Client::builder()
// .use_rustls_tls()
.build()
.unwrap();
// Kick off the whole machine - This Website object doesn't matter, it's just to allow for
// get() to work.
let span = trace_span!("Pre-Loop");
let pre_loop_span = span.enter();
// Download the site
let mut site = Website::new(&starting_url, false);
get(&mut site, &db, &reqwest, &s3, &mut crawled).await;
drop(pre_loop_span);
let span = trace_span!("Loop");
let span = span.enter();
while crawled < budget {
let get_num = if budget - crawled < 100 { budget - crawled } else { 100 };
let uncrawled = get_uncrawled_links(&db, get_num).await;
if uncrawled.len() == 0 {
info!("Had more budget but finished crawling everything.");
return;
}
debug!("Crawling {} pages...", uncrawled.len());
let span = trace_span!("Crawling");
let _ = span.enter();
for mut site in uncrawled {
get(&mut site, &db, &reqwest, &s3, &mut crawled).await;
let percent = format!("{:.2}%", (crawled as f32 / budget as f32) * 100f32);
info!("Crawled {crawled} out of {budget} pages. ({percent})");
}
}
drop(span);
info!("Done");
}
#[instrument(skip_all)]
/// Downloads and crawls and stores a webpage.
async fn get(
site: &mut Website,
db: &Surreal,
reqwest: &reqwest::Client,
s3: &S3,
count: &mut usize,
) {
trace!("Get: {}", site.to_string());
let timer = Timer::start("Got page");
if let Ok(response) = reqwest.get(site.to_string()).send().await {
timer.stop();
// Get body
let data = response.text().await.unwrap();
// Store document
s3.store(&data, &site.site).await;
// Parse document and store relationships
parser::parse(db, site, data).await;
*count += 1;
}
trace!("Failed to get: {}", site.to_string());
}
/// Returns uncrawled links
async fn get_uncrawled_links(db: &Surreal, mut count: usize) -> Vec {
if count > 100 {
count = 100
}
let mut response = db
.query("SELECT * FROM website WHERE crawled = false LIMIT $count")
.bind(("count", count))
.await
.expect("Hard-coded query failed..?");
response
.take(0)
.expect("Returned websites couldn't be parsed")
}
pub struct Timer<'a> {
start: Instant,
msg: &'a str,
}
impl<'a> Timer<'a> {
#[inline]
pub fn start(msg: &'a str) -> Self {
Self {
start: Instant::now(),
msg,
}
}
pub fn stop(&self) -> f64 {
let dif = self.start.elapsed().as_micros();
let ms = dif as f64 / 1000.;
debug!("{}", format!("{} in {:.3}ms", self.msg, ms));
ms
}
}
impl Drop for Timer<'_> {
fn drop(&mut self) {
self.stop();
}
}