back to spider-rs
This commit is contained in:
parent
0f8a3d7215
commit
fd71a8bc13
1542
Cargo.lock
generated
1542
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
11
Cargo.toml
11
Cargo.toml
@ -8,9 +8,10 @@ html5ever = "0.29.0"
|
||||
# minio = "0.1.0"
|
||||
minio = {git="https://github.com/minio/minio-rs.git", rev = "c28f576"}
|
||||
reqwest = "0.12.9"
|
||||
serde = { version = "1.0.214", features = ["derive"] }
|
||||
surrealdb = "2.0.4"
|
||||
tokio = { version="1.41.0", features = ["full"] }
|
||||
tracing = "0.1.40"
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
spider = { version = "2.21", features = ["sync"] }
|
||||
surrealdb = "2.0"
|
||||
tokio = { version="1.41", features = ["full"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
url = { version = "2.5.3", features = ["serde"] }
|
||||
|
16
src/db.rs
16
src/db.rs
@ -22,31 +22,20 @@ pub struct Website {
|
||||
|
||||
impl Website {
|
||||
/// Creates a blank site (assumes that url param is site's root)
|
||||
pub fn new(url: &str, crawled: bool) -> Self {
|
||||
let site = match Url::parse(url) {
|
||||
Ok(a) => a,
|
||||
Err(_) => todo!(),
|
||||
};
|
||||
pub fn new(url: Url, crawled: bool) -> Self {
|
||||
Self {
|
||||
id: None,
|
||||
crawled,
|
||||
site,
|
||||
site: url,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_crawled(&mut self) {
|
||||
trace!("Set crawled to true");
|
||||
self.crawled = true
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
pub async fn links_to(&self, other: Vec<Thing>, db: &Surreal<Client>) {
|
||||
let len = other.len();
|
||||
if len == 0 {return}
|
||||
|
||||
let from = self.site.to_string();
|
||||
// let to = other.site.to_string();
|
||||
trace!("Linking {from} to {} other pages.", other.len());
|
||||
let msg = format!("Linked {len} pages");
|
||||
let timer = Timer::start(&msg);
|
||||
// prevent the timer from being dropped instantly.
|
||||
@ -65,7 +54,6 @@ impl Website {
|
||||
let _: Vec<usize> = vec;
|
||||
if let Some(num) = vec.get(0) {
|
||||
if *num == len {
|
||||
trace!("Link OK");
|
||||
return;
|
||||
} else {
|
||||
warn!("Didn't link all the records. {num}/{len}");
|
||||
|
121
src/main.rs
121
src/main.rs
@ -3,14 +3,17 @@ extern crate html5ever;
|
||||
use std::time::Instant;
|
||||
|
||||
use db::{connect, Website};
|
||||
use parser::parse;
|
||||
use s3::S3;
|
||||
use surrealdb::{engine::remote::ws::Client, Surreal};
|
||||
use tracing::{debug, info, instrument, trace, trace_span};
|
||||
use tokio::sync::broadcast::Receiver;
|
||||
use tracing::{debug, info, trace, trace_span};
|
||||
use tracing_subscriber::EnvFilter;
|
||||
use url::Url;
|
||||
|
||||
mod db;
|
||||
mod s3;
|
||||
mod parser;
|
||||
mod s3;
|
||||
|
||||
struct Config<'a> {
|
||||
surreal_ns: &'a str,
|
||||
@ -35,92 +38,80 @@ async fn main() {
|
||||
debug!("Starting...");
|
||||
|
||||
let config = Config {
|
||||
surreal_ns: "test",
|
||||
surreal_db: "v1.7",
|
||||
surreal_url: "localhost:8000",
|
||||
surreal_username: "root",
|
||||
surreal_password: "root",
|
||||
surreal_ns: "test",
|
||||
surreal_db: "v1.9",
|
||||
s3_bucket: "v1.9",
|
||||
s3_url: "http://localhost:9000",
|
||||
s3_bucket: "v1.7",
|
||||
s3_access_key: "8tUJn7e1paMFZQr0PKIT",
|
||||
s3_secret_key: "uSMvYxNOeCejCUgXVqgTfYlUEcmiZY0xcZ91M9E0",
|
||||
s3_access_key: "0zv7GbLQsw4ZI8TclMps",
|
||||
s3_secret_key: "5dB7QkGFw7fYbUJ5LpHk2GbWR7Bl710HlRz4NbzB",
|
||||
};
|
||||
|
||||
// Would probably take these in as parameters from a cli
|
||||
let starting_url = "https://oliveratkinson.net/";
|
||||
let budget = 15;
|
||||
let mut crawled = 0;
|
||||
// let starting_url = "https://oliveratkinson.net/";
|
||||
let starting_url = "https://en.wikipedia.org/wiki/Main_Page";
|
||||
|
||||
let s3 = S3::connect(&config).await.expect("Failed to connect to minio, aborting.");
|
||||
let db = connect(&config).await.expect("Failed to connect to surreal, aborting.");
|
||||
let s3 = S3::connect(&config)
|
||||
.await
|
||||
.expect("Failed to connect to minio, aborting.");
|
||||
let db = connect(&config)
|
||||
.await
|
||||
.expect("Failed to connect to surreal, aborting.");
|
||||
|
||||
let reqwest = reqwest::Client::builder()
|
||||
// .use_rustls_tls()
|
||||
let mut site = spider::website::Website::new(&starting_url)
|
||||
.with_limit(4)
|
||||
.with_depth(0)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
// Kick off the whole machine - This Website object doesn't matter, it's just to allow for
|
||||
// get() to work.
|
||||
let span = trace_span!("Pre-Loop");
|
||||
let pre_loop_span = span.enter();
|
||||
// Download the site
|
||||
let mut site = Website::new(&starting_url, false);
|
||||
get(&mut site, &db, &reqwest, &s3, &mut crawled).await;
|
||||
let mut rx: Receiver<spider::page::Page> = site.subscribe(0).unwrap();
|
||||
|
||||
drop(pre_loop_span);
|
||||
|
||||
let span = trace_span!("Loop");
|
||||
let subscriber = tokio::spawn(async move {
|
||||
let span = trace_span!("Sub");
|
||||
let span = span.enter();
|
||||
while crawled < budget {
|
||||
let get_num = if budget - crawled < 100 { budget - crawled } else { 100 };
|
||||
while let Ok(res) = rx.recv().await {
|
||||
// Get body
|
||||
let data = res.get_html();
|
||||
let url = Url::parse(res.get_url()).unwrap();
|
||||
|
||||
let uncrawled = get_uncrawled_links(&db, get_num).await;
|
||||
if uncrawled.len() == 0 {
|
||||
info!("Had more budget but finished crawling everything.");
|
||||
return;
|
||||
trace!("Got '{}'", url.to_string());
|
||||
// Store document
|
||||
s3.store(&data, &url).await;
|
||||
|
||||
// Parse document and store relationships
|
||||
let mut page = Website::new(url, true);
|
||||
page.store(&db).await;
|
||||
|
||||
// Relate this page to all the pages it links to
|
||||
let span = trace_span!("Linking");
|
||||
let span = span.enter();
|
||||
let found_links = parse( &page, data).await;
|
||||
let mut stored_links = Vec::new();
|
||||
for mut link in found_links {
|
||||
if let Some(id) = link.store(&db).await {
|
||||
stored_links.push(id);
|
||||
}
|
||||
debug!("Crawling {} pages...", uncrawled.len());
|
||||
|
||||
let span = trace_span!("Crawling");
|
||||
let _ = span.enter();
|
||||
|
||||
for mut site in uncrawled {
|
||||
get(&mut site, &db, &reqwest, &s3, &mut crawled).await;
|
||||
|
||||
let percent = format!("{:.2}%", (crawled as f32 / budget as f32) * 100f32);
|
||||
info!("Crawled {crawled} out of {budget} pages. ({percent})");
|
||||
}
|
||||
page.links_to(stored_links, &db).await;
|
||||
drop(span);
|
||||
}
|
||||
drop(span);
|
||||
});
|
||||
|
||||
let timer = Timer::start("Crawled");
|
||||
|
||||
site.crawl().await;
|
||||
site.unsubscribe();
|
||||
|
||||
drop(timer);
|
||||
|
||||
subscriber.await.unwrap();
|
||||
|
||||
info!("Done");
|
||||
}
|
||||
|
||||
#[instrument(skip_all)]
|
||||
/// Downloads and crawls and stores a webpage.
|
||||
async fn get(
|
||||
site: &mut Website,
|
||||
db: &Surreal<Client>,
|
||||
reqwest: &reqwest::Client,
|
||||
s3: &S3,
|
||||
count: &mut usize,
|
||||
) {
|
||||
trace!("Get: {}", site.to_string());
|
||||
let timer = Timer::start("Got page");
|
||||
if let Ok(response) = reqwest.get(site.to_string()).send().await {
|
||||
timer.stop();
|
||||
|
||||
// Get body
|
||||
let data = response.text().await.unwrap();
|
||||
// Store document
|
||||
s3.store(&data, &site.site).await;
|
||||
// Parse document and store relationships
|
||||
parser::parse(db, site, data).await;
|
||||
*count += 1;
|
||||
}
|
||||
trace!("Failed to get: {}", site.to_string());
|
||||
}
|
||||
|
||||
/// Returns uncrawled links
|
||||
async fn get_uncrawled_links(db: &Surreal<Client>, mut count: usize) -> Vec<Website> {
|
||||
if count > 100 {
|
||||
|
@ -5,8 +5,7 @@ use html5ever::tokenizer::{BufferQueue, TokenizerResult};
|
||||
use html5ever::tokenizer::{StartTag, TagToken};
|
||||
use html5ever::tokenizer::{Token, TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts};
|
||||
use html5ever::{local_name, tendril::*};
|
||||
use surrealdb::engine::remote::ws::Client;
|
||||
use surrealdb::Surreal;
|
||||
use tracing::{instrument, trace};
|
||||
|
||||
use crate::db::Website;
|
||||
|
||||
@ -67,10 +66,9 @@ impl TokenSink for LinkParser<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn parse(db: &Surreal<Client>, site: &mut Website, data: String) {
|
||||
|
||||
site.set_crawled();
|
||||
site.store(db).await;
|
||||
#[instrument(skip_all)]
|
||||
pub async fn parse(site: &Website, data: String) -> Vec<Website> {
|
||||
|
||||
let sink = LinkParser { site };
|
||||
let chunk = Tendril::from_str(&data).unwrap();
|
||||
@ -79,17 +77,16 @@ pub async fn parse(db: &Surreal<Client>, site: &mut Website, data: String) {
|
||||
|
||||
let token = Tokenizer::new(sink.clone(), TokenizerOpts::default());
|
||||
|
||||
let mut links_to = Vec::new();
|
||||
// let mut links_to = Vec::new();
|
||||
let mut res = Vec::new();
|
||||
while !input.is_empty() {
|
||||
if let TokenizerResult::Script(s) = token.feed(&mut input) {
|
||||
for mut web in s {
|
||||
if let Some(id) = web.store(db).await {
|
||||
links_to.push(id);
|
||||
if let TokenizerResult::Script(mut s) = token.feed(&mut input) {
|
||||
res.append(&mut s);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
sink.site.links_to(links_to, db).await;
|
||||
trace!("Found {} links.", res.len());
|
||||
|
||||
assert!(input.is_empty());
|
||||
token.end();
|
||||
res
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user