back to spider-rs

This commit is contained in:
Oliver Atkinson 2024-12-12 11:04:45 -07:00
parent 0f8a3d7215
commit fd71a8bc13
5 changed files with 1300 additions and 421 deletions

1542
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -8,9 +8,10 @@ html5ever = "0.29.0"
# minio = "0.1.0" # minio = "0.1.0"
minio = {git="https://github.com/minio/minio-rs.git", rev = "c28f576"} minio = {git="https://github.com/minio/minio-rs.git", rev = "c28f576"}
reqwest = "0.12.9" reqwest = "0.12.9"
serde = { version = "1.0.214", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
surrealdb = "2.0.4" spider = { version = "2.21", features = ["sync"] }
tokio = { version="1.41.0", features = ["full"] } surrealdb = "2.0"
tracing = "0.1.40" tokio = { version="1.41", features = ["full"] }
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
url = { version = "2.5.3", features = ["serde"] } url = { version = "2.5.3", features = ["serde"] }

View File

@ -22,31 +22,20 @@ pub struct Website {
impl Website { impl Website {
/// Creates a blank site (assumes that url param is site's root) /// Creates a blank site (assumes that url param is site's root)
pub fn new(url: &str, crawled: bool) -> Self { pub fn new(url: Url, crawled: bool) -> Self {
let site = match Url::parse(url) {
Ok(a) => a,
Err(_) => todo!(),
};
Self { Self {
id: None, id: None,
crawled, crawled,
site, site: url,
} }
} }
pub fn set_crawled(&mut self) {
trace!("Set crawled to true");
self.crawled = true
}
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn links_to(&self, other: Vec<Thing>, db: &Surreal<Client>) { pub async fn links_to(&self, other: Vec<Thing>, db: &Surreal<Client>) {
let len = other.len(); let len = other.len();
if len == 0 {return} if len == 0 {return}
let from = self.site.to_string(); let from = self.site.to_string();
// let to = other.site.to_string();
trace!("Linking {from} to {} other pages.", other.len());
let msg = format!("Linked {len} pages"); let msg = format!("Linked {len} pages");
let timer = Timer::start(&msg); let timer = Timer::start(&msg);
// prevent the timer from being dropped instantly. // prevent the timer from being dropped instantly.
@ -65,7 +54,6 @@ impl Website {
let _: Vec<usize> = vec; let _: Vec<usize> = vec;
if let Some(num) = vec.get(0) { if let Some(num) = vec.get(0) {
if *num == len { if *num == len {
trace!("Link OK");
return; return;
} else { } else {
warn!("Didn't link all the records. {num}/{len}"); warn!("Didn't link all the records. {num}/{len}");

View File

@ -3,14 +3,17 @@ extern crate html5ever;
use std::time::Instant; use std::time::Instant;
use db::{connect, Website}; use db::{connect, Website};
use parser::parse;
use s3::S3; use s3::S3;
use surrealdb::{engine::remote::ws::Client, Surreal}; use surrealdb::{engine::remote::ws::Client, Surreal};
use tracing::{debug, info, instrument, trace, trace_span}; use tokio::sync::broadcast::Receiver;
use tracing::{debug, info, trace, trace_span};
use tracing_subscriber::EnvFilter; use tracing_subscriber::EnvFilter;
use url::Url;
mod db; mod db;
mod s3;
mod parser; mod parser;
mod s3;
struct Config<'a> { struct Config<'a> {
surreal_ns: &'a str, surreal_ns: &'a str,
@ -33,94 +36,82 @@ async fn main() {
.without_time() .without_time()
.init(); .init();
debug!("Starting..."); debug!("Starting...");
let config = Config { let config = Config {
surreal_ns: "test",
surreal_db: "v1.7",
surreal_url: "localhost:8000", surreal_url: "localhost:8000",
surreal_username: "root", surreal_username: "root",
surreal_password: "root", surreal_password: "root",
surreal_ns: "test",
surreal_db: "v1.9",
s3_bucket: "v1.9",
s3_url: "http://localhost:9000", s3_url: "http://localhost:9000",
s3_bucket: "v1.7", s3_access_key: "0zv7GbLQsw4ZI8TclMps",
s3_access_key: "8tUJn7e1paMFZQr0PKIT", s3_secret_key: "5dB7QkGFw7fYbUJ5LpHk2GbWR7Bl710HlRz4NbzB",
s3_secret_key: "uSMvYxNOeCejCUgXVqgTfYlUEcmiZY0xcZ91M9E0",
}; };
// Would probably take these in as parameters from a cli // Would probably take these in as parameters from a cli
let starting_url = "https://oliveratkinson.net/"; // let starting_url = "https://oliveratkinson.net/";
let budget = 15; let starting_url = "https://en.wikipedia.org/wiki/Main_Page";
let mut crawled = 0;
let s3 = S3::connect(&config).await.expect("Failed to connect to minio, aborting."); let s3 = S3::connect(&config)
let db = connect(&config).await.expect("Failed to connect to surreal, aborting."); .await
.expect("Failed to connect to minio, aborting.");
let reqwest = reqwest::Client::builder() let db = connect(&config)
// .use_rustls_tls() .await
.expect("Failed to connect to surreal, aborting.");
let mut site = spider::website::Website::new(&starting_url)
.with_limit(4)
.with_depth(0)
.build() .build()
.unwrap(); .unwrap();
// Kick off the whole machine - This Website object doesn't matter, it's just to allow for let mut rx: Receiver<spider::page::Page> = site.subscribe(0).unwrap();
// get() to work.
let span = trace_span!("Pre-Loop");
let pre_loop_span = span.enter();
// Download the site
let mut site = Website::new(&starting_url, false);
get(&mut site, &db, &reqwest, &s3, &mut crawled).await;
drop(pre_loop_span); let subscriber = tokio::spawn(async move {
let span = trace_span!("Sub");
let span = span.enter();
while let Ok(res) = rx.recv().await {
// Get body
let data = res.get_html();
let url = Url::parse(res.get_url()).unwrap();
let span = trace_span!("Loop"); trace!("Got '{}'", url.to_string());
let span = span.enter(); // Store document
while crawled < budget { s3.store(&data, &url).await;
let get_num = if budget - crawled < 100 { budget - crawled } else { 100 };
let uncrawled = get_uncrawled_links(&db, get_num).await; // Parse document and store relationships
if uncrawled.len() == 0 { let mut page = Website::new(url, true);
info!("Had more budget but finished crawling everything."); page.store(&db).await;
return;
// Relate this page to all the pages it links to
let span = trace_span!("Linking");
let span = span.enter();
let found_links = parse( &page, data).await;
let mut stored_links = Vec::new();
for mut link in found_links {
if let Some(id) = link.store(&db).await {
stored_links.push(id);
}
}
page.links_to(stored_links, &db).await;
drop(span);
} }
debug!("Crawling {} pages...", uncrawled.len()); drop(span);
});
let span = trace_span!("Crawling"); let timer = Timer::start("Crawled");
let _ = span.enter();
for mut site in uncrawled { site.crawl().await;
get(&mut site, &db, &reqwest, &s3, &mut crawled).await; site.unsubscribe();
let percent = format!("{:.2}%", (crawled as f32 / budget as f32) * 100f32); drop(timer);
info!("Crawled {crawled} out of {budget} pages. ({percent})");
} subscriber.await.unwrap();
}
drop(span);
info!("Done"); info!("Done");
} }
#[instrument(skip_all)]
/// Downloads and crawls and stores a webpage.
async fn get(
site: &mut Website,
db: &Surreal<Client>,
reqwest: &reqwest::Client,
s3: &S3,
count: &mut usize,
) {
trace!("Get: {}", site.to_string());
let timer = Timer::start("Got page");
if let Ok(response) = reqwest.get(site.to_string()).send().await {
timer.stop();
// Get body
let data = response.text().await.unwrap();
// Store document
s3.store(&data, &site.site).await;
// Parse document and store relationships
parser::parse(db, site, data).await;
*count += 1;
}
trace!("Failed to get: {}", site.to_string());
}
/// Returns uncrawled links /// Returns uncrawled links
async fn get_uncrawled_links(db: &Surreal<Client>, mut count: usize) -> Vec<Website> { async fn get_uncrawled_links(db: &Surreal<Client>, mut count: usize) -> Vec<Website> {
if count > 100 { if count > 100 {

View File

@ -5,8 +5,7 @@ use html5ever::tokenizer::{BufferQueue, TokenizerResult};
use html5ever::tokenizer::{StartTag, TagToken}; use html5ever::tokenizer::{StartTag, TagToken};
use html5ever::tokenizer::{Token, TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts}; use html5ever::tokenizer::{Token, TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts};
use html5ever::{local_name, tendril::*}; use html5ever::{local_name, tendril::*};
use surrealdb::engine::remote::ws::Client; use tracing::{instrument, trace};
use surrealdb::Surreal;
use crate::db::Website; use crate::db::Website;
@ -67,11 +66,10 @@ impl TokenSink for LinkParser<'_> {
} }
} }
pub async fn parse(db: &Surreal<Client>, site: &mut Website, data: String) {
#[instrument(skip_all)]
site.set_crawled(); pub async fn parse(site: &Website, data: String) -> Vec<Website> {
site.store(db).await;
let sink = LinkParser { site }; let sink = LinkParser { site };
let chunk = Tendril::from_str(&data).unwrap(); let chunk = Tendril::from_str(&data).unwrap();
let mut input = BufferQueue::default(); let mut input = BufferQueue::default();
@ -79,17 +77,16 @@ pub async fn parse(db: &Surreal<Client>, site: &mut Website, data: String) {
let token = Tokenizer::new(sink.clone(), TokenizerOpts::default()); let token = Tokenizer::new(sink.clone(), TokenizerOpts::default());
let mut links_to = Vec::new(); // let mut links_to = Vec::new();
let mut res = Vec::new();
while !input.is_empty() { while !input.is_empty() {
if let TokenizerResult::Script(s) = token.feed(&mut input) { if let TokenizerResult::Script(mut s) = token.feed(&mut input) {
for mut web in s { res.append(&mut s);
if let Some(id) = web.store(db).await {
links_to.push(id);
}
}
} }
} }
sink.site.links_to(links_to, db).await; trace!("Found {} links.", res.len());
assert!(input.is_empty()); assert!(input.is_empty());
token.end(); token.end();
res
} }