This commit is contained in:
Rushmore75 2025-03-20 15:11:01 -06:00
parent b9c1f0b492
commit 7df19a480f
9 changed files with 63 additions and 105 deletions

38
Cargo.lock generated
View File

@ -1961,6 +1961,25 @@ dependencies = [
"generic-array", "generic-array",
] ]
[[package]]
name = "internet_mapper"
version = "0.1.0"
dependencies = [
"base64 0.22.1",
"html5ever 0.29.1",
"metrics",
"metrics-exporter-prometheus",
"minio",
"reqwest",
"serde",
"surrealdb",
"tokio",
"toml",
"tracing",
"tracing-subscriber",
"url",
]
[[package]] [[package]]
name = "ipnet" name = "ipnet"
version = "2.11.0" version = "2.11.0"
@ -4112,25 +4131,6 @@ version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "surreal_spider"
version = "0.1.0"
dependencies = [
"base64 0.22.1",
"html5ever 0.29.1",
"metrics",
"metrics-exporter-prometheus",
"minio",
"reqwest",
"serde",
"surrealdb",
"tokio",
"toml",
"tracing",
"tracing-subscriber",
"url",
]
[[package]] [[package]]
name = "surrealdb" name = "surrealdb"
version = "2.2.1" version = "2.2.1"

View File

@ -1,5 +1,5 @@
[package] [package]
name = "surreal_spider" name = "internet_mapper"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"

View File

@ -3,13 +3,13 @@ surreal_url = "localhost:8000"
surreal_username = "root" surreal_username = "root"
surreal_password = "root" surreal_password = "root"
surreal_ns = "test" surreal_ns = "test"
surreal_db = "v1.15.4" surreal_db = "v1.17"
# Minio config # Minio config
s3_bucket = "v1.15.4" s3_bucket = "v1.17"
s3_url = "http://localhost:9000" s3_url = "http://localhost:9000"
s3_access_key = "3ptjsHhRHCHlpCmgFy9n" s3_access_key = "Ok6s9uQEvKrqRoGZdacm"
s3_secret_key = "68CmV07YExeCxb8kJhosSauEizj5CAE7PINZIfQz" s3_secret_key = "qubeSkP787c7QZu4TvtnuwPTGIAq6ETPupCxvv6K"
# Crawler config # Crawler config
crawl_filter = "en.wikipedia.com" crawl_filter = "en.wikipedia.com"

View File

@ -2,7 +2,6 @@
Crawls sites saving all the found links to a surrealdb database. It then proceeds to take batches of 100 uncrawled links untill the crawl budget is reached. It saves the data of each site in a minio database. Crawls sites saving all the found links to a surrealdb database. It then proceeds to take batches of 100 uncrawled links untill the crawl budget is reached. It saves the data of each site in a minio database.
### TODO ### TODO
- [ ] Domain filtering - prevent the crawler from going on alternate versions of wikipedia. - [ ] Domain filtering - prevent the crawler from going on alternate versions of wikipedia.
@ -11,5 +10,14 @@ Crawls sites saving all the found links to a surrealdb database. It then proceed
- [x] Better asynchronous getting of the sites. Currently it all happens serially. - [x] Better asynchronous getting of the sites. Currently it all happens serially.
- [ ] Allow for storing asynchronously - [ ] Allow for storing asynchronously
3/19/25: Took 20min to crawl 100 pages 3/17/25: Took >1hr to crawl 100 pages
This ment we stored 100 pages, 142,997 urls, and 1,425,798 links between the two.
3/19/25: Took 20min to crawl 1000 pages
This ment we stored 1000 pages, 142,997 urls, and 1,425,798 links between the two.
3/20/25: Took 5min to crawl 1000 pages
# About
![Screenshot](/pngs/graphana.png)

BIN
pngs/graphana.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 264 KiB

View File

@ -10,14 +10,13 @@ use surrealdb::{
engine::remote::ws::{Client, Ws}, engine::remote::ws::{Client, Ws},
opt::auth::Root, opt::auth::Root,
sql::Thing, sql::Thing,
Error::Api,
Response, Surreal, Response, Surreal,
}; };
use tokio::sync::Mutex; use tokio::sync::Mutex;
use tracing::{error, instrument, trace, warn}; use tracing::{error, instrument, trace, warn};
use url::Url; use url::Url;
use crate::{Config, Timer}; use crate::Config;
// static LOCK: LazyLock<Arc<Mutex<bool>>> = LazyLock::new(|| Arc::new(Mutex::new(true))); // static LOCK: LazyLock<Arc<Mutex<bool>>> = LazyLock::new(|| Arc::new(Mutex::new(true)));
static LOCK: LazyLock<Mutex<bool>> = LazyLock::new(|| Mutex::new(true)); static LOCK: LazyLock<Mutex<bool>> = LazyLock::new(|| Mutex::new(true));
@ -25,9 +24,9 @@ static LOCK: LazyLock<Mutex<bool>> = LazyLock::new(|| Mutex::new(true));
const CUSTOM_ENGINE: engine::GeneralPurpose = const CUSTOM_ENGINE: engine::GeneralPurpose =
engine::GeneralPurpose::new(&alphabet::URL_SAFE, general_purpose::NO_PAD); engine::GeneralPurpose::new(&alphabet::URL_SAFE, general_purpose::NO_PAD);
const TIME_SPENT_ON_LOCK: &'static str = "surql_lock_waiting_ms"; const TIME_SPENT_ON_LOCK: &str = "surql_lock_waiting_ms";
const STORE: &'static str = "surql_store_calls"; const STORE: &str = "surql_store_calls";
const LINK: &'static str = "surql_link_calls"; const LINK: &str = "surql_link_calls";
#[derive(Deserialize, Clone)] #[derive(Deserialize, Clone)]
pub struct Website { pub struct Website {
@ -86,6 +85,7 @@ impl Website {
domain + path domain + path
} }
pub fn get_url_as_b64_path(site: &Url) -> String { pub fn get_url_as_b64_path(site: &Url) -> String {
let domain = site.domain().unwrap_or("DOMAIN").to_string(); let domain = site.domain().unwrap_or("DOMAIN").to_string();
let path = &CUSTOM_ENGINE.encode(site.path()); let path = &CUSTOM_ENGINE.encode(site.path());
@ -104,10 +104,6 @@ impl Website {
// let to = other.site.to_string(); // let to = other.site.to_string();
trace!("Linking {} pages to {from}", other.len()); trace!("Linking {} pages to {from}", other.len());
let msg = format!("Linked {len} pages to {from}");
let timer = Timer::start(&msg);
// prevent the timer from being dropped instantly.
let _ = timer;
counter!(LINK).increment(1); counter!(LINK).increment(1);
match db match db
.query("COUNT(RELATE (SELECT id FROM website WHERE site = $in) -> links_to -> $out)") .query("COUNT(RELATE (SELECT id FROM website WHERE site = $in) -> links_to -> $out)")
@ -121,7 +117,7 @@ impl Website {
let _: Response = e; let _: Response = e;
if let Ok(vec) = e.take(0) { if let Ok(vec) = e.take(0) {
let _: Vec<usize> = vec; let _: Vec<usize> = vec;
if let Some(num) = vec.get(0) { if let Some(num) = vec.first() {
if *num == len { if *num == len {
trace!("Link for {from} OK - {num}/{len}"); trace!("Link for {from} OK - {num}/{len}");
return; return;
@ -167,13 +163,7 @@ impl Website {
{ {
Ok(mut id) => match id.take::<Vec<Thing>>(0) { Ok(mut id) => match id.take::<Vec<Thing>>(0) {
Ok(mut x) => things.append(&mut x), Ok(mut x) => things.append(&mut x),
Err(err) => match err { Err(err) => error!("{:?}", err),
Api(error) => {
eprintln!("{:?}", error);
error!("{:?}", error);
}
_ => error!("{:?}", err),
},
}, },
Err(err) => { Err(err) => {
error!("{:?}", err); error!("{:?}", err);
@ -224,3 +214,4 @@ pub async fn connect(config: &Config) -> surrealdb::Result<Surreal<Client>> {
Ok(db) Ok(db)
} }

View File

@ -6,7 +6,6 @@ use std::{
fs::File, fs::File,
io::Read, io::Read,
net::{IpAddr, Ipv4Addr}, net::{IpAddr, Ipv4Addr},
time::Instant,
}; };
use db::{connect, Website}; use db::{connect, Website};
@ -16,17 +15,17 @@ use s3::S3;
use serde::Deserialize; use serde::Deserialize;
use surrealdb::{engine::remote::ws::Client, Surreal}; use surrealdb::{engine::remote::ws::Client, Surreal};
use tokio::task::JoinSet; use tokio::task::JoinSet;
use tracing::{debug, error, info, instrument, trace, trace_span, warn}; use tracing::{debug, error, info, instrument, trace, trace_span};
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry}; use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
mod db; mod db;
mod parser; mod parser;
mod s3; mod s3;
const GET_METRIC: &'static str = "total_gets"; const GET_METRIC: &str = "total_gets";
const GET_IN_FLIGHT: &'static str = "gets_in_flight"; const GET_IN_FLIGHT: &str = "gets_in_flight";
const SITES_CRAWLED: &'static str = "pages_crawled"; const SITES_CRAWLED: &str = "pages_crawled";
const BEING_PROCESSED: &'static str = "pages_being_processed"; const BEING_PROCESSED: &str = "pages_being_processed";
#[derive(Deserialize)] #[derive(Deserialize)]
struct Config { struct Config {
@ -47,8 +46,6 @@ struct Config {
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let total_runtime = Timer::start("Completed");
let writer = std::fs::OpenOptions::new() let writer = std::fs::OpenOptions::new()
.append(true) .append(true)
.create(true) .create(true)
@ -110,7 +107,7 @@ async fn main() {
let span = trace_span!("Pre-Loop"); let span = trace_span!("Pre-Loop");
let pre_loop_span = span.enter(); let pre_loop_span = span.enter();
// Download the site // Download the site
let site = Website::new(&starting_url, false); let site = Website::new(starting_url, false);
process(site, db.clone(), reqwest.clone(), s3.clone()).await; process(site, db.clone(), reqwest.clone(), s3.clone()).await;
drop(pre_loop_span); drop(pre_loop_span);
@ -125,7 +122,7 @@ async fn main() {
}; };
let uncrawled = get_uncrawled_links(&db, get_num, config.crawl_filter.clone()).await; let uncrawled = get_uncrawled_links(&db, get_num, config.crawl_filter.clone()).await;
if uncrawled.len() == 0 { if uncrawled.is_empty() {
info!("Had more budget but finished crawling everything."); info!("Had more budget but finished crawling everything.");
return; return;
} }
@ -146,7 +143,7 @@ async fn main() {
let c = counter!(SITES_CRAWLED); let c = counter!(SITES_CRAWLED);
// As futures complete runs code in while block // As futures complete runs code in while block
while let Some(_) = futures.join_next().await { while futures.join_next().await.is_some() {
c.increment(1); c.increment(1);
gauge!(BEING_PROCESSED).decrement(1); gauge!(BEING_PROCESSED).decrement(1);
crawled += 1; crawled += 1;
@ -156,7 +153,6 @@ async fn main() {
drop(span); drop(span);
debug!("Done"); debug!("Done");
drop(total_runtime);
} }
#[instrument(skip(db, s3, reqwest))] #[instrument(skip(db, s3, reqwest))]
@ -166,22 +162,17 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
// METRICS // METRICS
trace!("Process: {}", &site.site); trace!("Process: {}", &site.site);
let timer = Timer::start("Built request");
// Build the request // Build the request
let request_builder = reqwest.get(&site.site.to_string()); let request_builder = reqwest.get(site.site.to_string());
// METRICS
timer.stop();
// METRICS // METRICS
let g = gauge!(GET_IN_FLIGHT); let g = gauge!(GET_IN_FLIGHT);
g.increment(1); g.increment(1);
let timer = Timer::start("Got page");
// Send the http request (get) // Send the http request (get)
if let Ok(response) = request_builder.send().await { if let Ok(response) = request_builder.send().await {
// METRICS // METRICS
timer.stop();
g.decrement(1); g.decrement(1);
counter!(GET_METRIC).increment(1); counter!(GET_METRIC).increment(1);
@ -198,14 +189,14 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
// update self in db // update self in db
site.set_crawled(); site.set_crawled();
Website::store_all(vec![site.clone()], &db).await; Website::store_all(vec![site], &db).await;
// Store all the other sites so that we can link to them. // Store all the other sites so that we can link to them.
// let mut links_to = Vec::new(); // let mut links_to = Vec::new();
let others = Website::store_all(sites, &db).await; let _ = Website::store_all(sites, &db).await;
// Make the database's links reflect the html links between sites // Make the database's links reflect the html links between sites
site.links_to(others, &db).await; // site.links_to(others, &db).await;
} else { } else {
error!("Failed to get: {}", &site.site); error!("Failed to get: {}", &site.site);
} }
@ -234,33 +225,3 @@ async fn get_uncrawled_links(
.expect("Returned websites couldn't be parsed") .expect("Returned websites couldn't be parsed")
} }
pub struct Timer<'a> {
start: Instant,
msg: &'a str,
}
impl<'a> Timer<'a> {
#[inline]
pub fn start(msg: &'a str) -> Self {
Self {
start: Instant::now(),
msg,
}
}
pub fn stop(&self) -> f64 {
let dif = self.start.elapsed().as_micros();
let ms = dif as f64 / 1000.;
if ms > 200. {
warn!("{}", format!("{} in {:.3}ms", self.msg, ms));
}
ms
}
}
impl Drop for Timer<'_> {
fn drop(&mut self) {
self.stop();
}
}

View File

@ -8,7 +8,6 @@ use html5ever::{local_name, tendril::*};
use tracing::instrument; use tracing::instrument;
use crate::db::Website; use crate::db::Website;
use crate::Timer;
impl TokenSink for Website { impl TokenSink for Website {
type Handle = Vec<Website>; type Handle = Vec<Website>;
@ -69,12 +68,11 @@ impl TokenSink for Website {
pub async fn parse(site: &Website, data: &str) -> Vec<Website> { pub async fn parse(site: &Website, data: &str) -> Vec<Website> {
// prep work // prep work
let mut other_sites: Vec<Website> = Vec::new(); let mut other_sites: Vec<Website> = Vec::new();
let _t = Timer::start("Parsed page");
// change data into something that can be tokenized // change data into something that can be tokenized
let chunk = Tendril::from_str(&data).expect("Failed to parse string into Tendril!"); let chunk = Tendril::from_str(data).expect("Failed to parse string into Tendril!");
// create buffer of tokens and push our input into it // create buffer of tokens and push our input into it
let mut token_buffer = BufferQueue::default(); let token_buffer = BufferQueue::default();
token_buffer.push_back( token_buffer.push_back(
chunk chunk
.try_reinterpret::<fmt::UTF8>() .try_reinterpret::<fmt::UTF8>()
@ -84,7 +82,7 @@ pub async fn parse(site: &Website, data: &str) -> Vec<Website> {
let tokenizer = Tokenizer::new(site.clone(), TokenizerOpts::default()); let tokenizer = Tokenizer::new(site.clone(), TokenizerOpts::default());
// go thru buffer // go thru buffer
while let TokenizerResult::Script(mut sites) = tokenizer.feed(&mut token_buffer) { while let TokenizerResult::Script(mut sites) = tokenizer.feed(&token_buffer) {
other_sites.append(&mut sites); other_sites.append(&mut sites);
// other_sites.push(sites); // other_sites.push(sites);
} }
@ -93,3 +91,4 @@ pub async fn parse(site: &Website, data: &str) -> Vec<Website> {
other_sites other_sites
} }

View File

@ -10,9 +10,9 @@ use minio::s3::{
use tracing::{instrument, trace, warn}; use tracing::{instrument, trace, warn};
use url::Url; use url::Url;
use crate::{db::Website, Config, Timer}; use crate::{db::Website, Config};
const S3_ROUND_TRIP_METRIC: &'static str = "s3_trips"; const S3_ROUND_TRIP_METRIC: &str = "s3_trips";
#[derive(Clone)] #[derive(Clone)]
pub struct S3 { pub struct S3 {
@ -65,14 +65,12 @@ impl S3 {
#[instrument(name = "s3_store", skip_all)] #[instrument(name = "s3_store", skip_all)]
pub async fn store(&self, data: &str, url: &Url) { pub async fn store(&self, data: &str, url: &Url) {
let counter = counter!(S3_ROUND_TRIP_METRIC); let counter = counter!(S3_ROUND_TRIP_METRIC);
let t = Timer::start("Stored page");
let _ = t; // prevent compiler drop
let filename = Website::get_url_as_string(url); let filename = Website::get_url_as_string(url);
trace!("Storing {} as {filename}", url.to_string()); trace!("Storing {} as {filename}", url.to_string());
counter.increment(1); counter.increment(1);
let _ = match &self match &self
.client .client
.put_object_content(&self.bucket_name, &filename, data.to_owned()) .put_object_content(&self.bucket_name, &filename, data.to_owned())
.send() .send()
@ -99,3 +97,4 @@ impl S3 {
}; };
} }
} }