228 lines
6.7 KiB
Rust
228 lines
6.7 KiB
Rust
#![feature(type_alias_impl_trait)]
|
|
#![feature(const_async_blocks)]
|
|
|
|
extern crate html5ever;
|
|
|
|
use std::time::Instant;
|
|
|
|
use db::{connect, Website};
|
|
use s3::S3;
|
|
use surrealdb::{engine::remote::ws::Client, Surreal};
|
|
use tokio::{task::JoinSet};
|
|
use tracing::{debug, info, instrument, trace, trace_span, warn};
|
|
use tracing_subscriber::{fmt::{self, time::LocalTime}, layer::{Filter, SubscriberExt}, EnvFilter, Layer, Registry};
|
|
|
|
mod db;
|
|
mod parser;
|
|
mod s3;
|
|
|
|
struct Config<'a> {
|
|
surreal_ns: &'a str,
|
|
surreal_db: &'a str,
|
|
surreal_url: &'a str,
|
|
surreal_username: &'a str,
|
|
surreal_password: &'a str,
|
|
|
|
s3_url: &'a str,
|
|
s3_bucket: &'a str,
|
|
s3_access_key: &'a str,
|
|
s3_secret_key: &'a str,
|
|
}
|
|
|
|
#[tokio::main]
|
|
async fn main() {
|
|
let total_runtime = Timer::start("Completed");
|
|
|
|
let writer = std::fs::OpenOptions::new()
|
|
.append(true)
|
|
.create(true)
|
|
.open("./docker/logs/tracing.log")
|
|
.expect("Couldn't make log file!");
|
|
|
|
let registry = Registry::default()
|
|
.with(
|
|
fmt::layer()
|
|
.with_line_number(true)
|
|
.with_thread_ids(true)
|
|
.with_file(true)
|
|
// .with_timer(LocalTime::rfc_3339()) // Loki or alloy does this automatically
|
|
.json()
|
|
.with_writer(writer)
|
|
);
|
|
|
|
tracing::subscriber::set_global_default(registry).expect("Failed to set default subscriber");
|
|
|
|
// tracing_subscriber::fmt()
|
|
// .with_env_filter(EnvFilter::from_default_env())
|
|
// .with_line_number(true)
|
|
// .with_thread_ids(true)
|
|
// .with_file(true)
|
|
// .with_timer(LocalTime::rfc_3339())
|
|
// .init();
|
|
|
|
debug!("Starting...");
|
|
// Would probably take these in as parameters from a cli
|
|
let starting_url = "https://en.wikipedia.org/";
|
|
// When getting uncrawled pages, name must contain this variable. "" will effectively get ignored.
|
|
let crawl_filter = "en.wikipedia.org/";
|
|
let budget = 50;
|
|
let mut crawled = 0;
|
|
|
|
let config = Config {
|
|
surreal_url: "localhost:8000",
|
|
surreal_username: "root",
|
|
surreal_password: "root",
|
|
surreal_ns: "test",
|
|
surreal_db: "v1.12",
|
|
s3_bucket: "v1.12",
|
|
s3_url: "http://localhost:9000",
|
|
s3_access_key: "p8gXIZEO2FnWqWBiJYwo",
|
|
s3_secret_key: "1mRO0EYA2YAQ0xsKrlbkIIz4AT8KNXy6QIQPtxUu",
|
|
};
|
|
|
|
|
|
let db = connect(&config)
|
|
.await
|
|
.expect("Failed to connect to surreal, aborting.");
|
|
let s3 = S3::connect(&config)
|
|
.await
|
|
.expect("Failed to connect to minio, aborting.\n\nThis probably means you need to login to the minio console and get a new access key!\n\n(Probably here) http://localhost:9001/access-keys/new-account\n\n");
|
|
|
|
let reqwest = reqwest::Client::builder()
|
|
// .use_rustls_tls()
|
|
.gzip(true)
|
|
.build()
|
|
.expect("Failed to build reqwest client.");
|
|
|
|
// Kick off the whole machine - This Website object doesn't matter, it's just to allow for
|
|
// get() to work.
|
|
let span = trace_span!("Pre-Loop");
|
|
let pre_loop_span = span.enter();
|
|
// Download the site
|
|
let site = Website::new(&starting_url, false);
|
|
get(site, db.clone(), reqwest.clone(), s3.clone()).await;
|
|
|
|
drop(pre_loop_span);
|
|
|
|
let span = trace_span!("Loop");
|
|
let span = span.enter();
|
|
while crawled < budget {
|
|
let get_num = if budget - crawled < 100 {
|
|
budget - crawled
|
|
} else {
|
|
100
|
|
};
|
|
|
|
let uncrawled = get_uncrawled_links(&db, get_num, crawl_filter.to_string()).await;
|
|
if uncrawled.len() == 0 {
|
|
info!("Had more budget but finished crawling everything.");
|
|
return;
|
|
}
|
|
debug!("Crawling {} pages...", uncrawled.len());
|
|
|
|
let span = trace_span!("Crawling");
|
|
let _ = span.enter();
|
|
|
|
{
|
|
let mut futures = JoinSet::new();
|
|
for site in uncrawled {
|
|
futures.spawn(get(site, db.clone(), reqwest.clone(), s3.clone()));
|
|
// technically the site hasn't be crawled *yet*, but the future
|
|
// where it is crawled has been set up.
|
|
crawled += 1;
|
|
// let percent = format!("{:.2}%", (crawled as f32 / budget as f32) * 100f32);
|
|
// info!("Crawled {crawled} out of {budget} pages. ({percent})");
|
|
}
|
|
debug!("Joining {} futures...", futures.len());
|
|
// join all the gets together
|
|
let _ = futures.join_all().await;
|
|
}
|
|
}
|
|
drop(span);
|
|
|
|
info!("Done");
|
|
drop(total_runtime);
|
|
}
|
|
|
|
#[instrument(skip (db, s3, reqwest))]
|
|
/// Downloads and crawls and stores a webpage.
|
|
/// It is acceptable to clone `db`, `reqwest`, and `s3` because they all use `Arc`s internally. - Noted by Oliver
|
|
async fn get(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Client, s3: S3) {
|
|
trace!("Get: {}", site.to_string());
|
|
|
|
let timer = Timer::start("Built request");
|
|
let request_builder = reqwest.get(site.to_string());
|
|
timer.stop();
|
|
|
|
let timer = Timer::start("Got page");
|
|
if let Ok(response) = request_builder.send().await {
|
|
timer.stop();
|
|
debug!("Getting body...");
|
|
|
|
// Get body
|
|
let data = response.text().await.expect("Failed to read http response's body!");
|
|
// Store document
|
|
s3.store(&data, &site.site).await;
|
|
// Parse document and store relationships
|
|
parser::parse(&db, &mut site, &data).await;
|
|
return;
|
|
}
|
|
trace!("Failed to get: {}", site.to_string());
|
|
}
|
|
|
|
/// Returns uncrawled links
|
|
#[instrument(skip(db))]
|
|
async fn get_uncrawled_links(
|
|
db: &Surreal<Client>,
|
|
mut count: usize,
|
|
filter: String,
|
|
) -> Vec<Website> {
|
|
if count > 100 {
|
|
count = 100
|
|
}
|
|
debug!("Getting uncrawled links");
|
|
|
|
let mut response = db
|
|
.query("SELECT * FROM website WHERE crawled = false AND site ~ type::string($format) LIMIT $count;")
|
|
.bind(("format", filter))
|
|
.bind(("count", count))
|
|
.await
|
|
.expect("Hard-coded query failed..?");
|
|
response
|
|
.take(0)
|
|
.expect("Returned websites couldn't be parsed")
|
|
}
|
|
|
|
pub struct Timer<'a> {
|
|
start: Instant,
|
|
msg: &'a str,
|
|
}
|
|
|
|
impl<'a> Timer<'a> {
|
|
#[inline]
|
|
pub fn start(msg: &'a str) -> Self {
|
|
Self {
|
|
start: Instant::now(),
|
|
msg,
|
|
}
|
|
}
|
|
pub fn stop(&self) -> f64 {
|
|
let dif = self.start.elapsed().as_micros();
|
|
let ms = dif as f64 / 1000.;
|
|
|
|
if ms > 200. {
|
|
warn!("{}", format!("{} in {:.3}ms", self.msg, ms));
|
|
} else {
|
|
trace!("{}", format!("{} in {:.3}ms", self.msg, ms));
|
|
}
|
|
|
|
ms
|
|
}
|
|
}
|
|
|
|
impl Drop for Timer<'_> {
|
|
fn drop(&mut self) {
|
|
self.stop();
|
|
}
|
|
}
|