diff --git a/Cargo.lock b/Cargo.lock index b005446..f00c940 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1862,6 +1862,7 @@ dependencies = [ "opentelemetry", "opentelemetry-otlp", "opentelemetry_sdk", + "rand 0.9.1", "reqwest", "serde", "surrealdb", @@ -2610,7 +2611,7 @@ dependencies = [ "futures-util", "opentelemetry", "percent-encoding", - "rand 0.9.0", + "rand 0.9.1", "serde_json", "thiserror 2.0.12", ] @@ -2998,7 +2999,7 @@ checksum = "b820744eb4dc9b57a3398183639c511b5a26d2ed702cedd3febaa1393caa22cc" dependencies = [ "bytes", "getrandom 0.3.2", - "rand 0.9.0", + "rand 0.9.1", "ring", "rustc-hash 2.1.1", "rustls", @@ -3069,13 +3070,12 @@ dependencies = [ [[package]] name = "rand" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94" +checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" dependencies = [ "rand_chacha 0.9.0", "rand_core 0.9.3", - "zerocopy 0.8.23", ] [[package]] @@ -4711,7 +4711,7 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "470dbf6591da1b39d43c14523b2b469c86879a53e8b758c8e090a470fe7b1fbe" dependencies = [ - "rand 0.9.0", + "rand 0.9.1", "serde", "web-time", ] diff --git a/Cargo.toml b/Cargo.toml index f1e599b..91deff9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,6 +12,7 @@ metrics-exporter-prometheus = { version = "0.16.2", features=["http-listener"]} opentelemetry = "0.30.0" opentelemetry-otlp = { version = "0.30.0", features = ["metrics", "trace", "logs", "grpc-tonic"] } opentelemetry_sdk = "0.30.0" +rand = "0.9.1" reqwest = { version = "0.12", features = ["gzip", "default", "rustls-tls", "stream"] } serde = { version = "1.0", features = ["derive"] } surrealdb = "2.2" diff --git a/src/db.rs b/src/db.rs index df20963..ea1a714 100644 --- a/src/db.rs +++ b/src/db.rs @@ -1,12 +1,13 @@ use metrics::counter; -use std::fmt::Debug; use serde::{Deserialize, Serialize}; +use std::{fmt::Debug, time::Duration}; use surrealdb::{ engine::remote::ws::{Client, Ws}, opt::auth::Root, sql::Thing, Surreal, }; +use tokio::time::sleep; use tracing::{error, instrument, trace}; use url::Url; @@ -16,6 +17,7 @@ const STORE: &str = "surql_store_calls"; #[derive(Serialize, Deserialize, Clone, Eq, PartialEq, Hash)] pub struct Website { + pub id: Option, /// The url that this data is found at pub site: Url, /// Wether or not this link has been crawled yet @@ -46,6 +48,7 @@ impl Website { crawled, site, status_code: 0, + id: None, } } @@ -63,6 +66,7 @@ impl Website { ON DUPLICATE KEY UPDATE accessed_at = time::now(), status_code = $input.status_code, + processing = false, crawled = crawled OR $input.crawled RETURN VALUE id; ", @@ -82,18 +86,47 @@ impl Website { } } +/// Returns uncrawled links +#[instrument(skip(db, config))] +pub async fn get_next(db: &Surreal, config: &Config) -> Option { + let mut res: Option = None; + let mut fails = 0; + + while res == None { + let mut response = db + .query("fn::get_next($format)") + .bind(("format", config.crawl_filter.to_string())) + .await + .expect("Hard-coded query failed..?"); + + res = match response.take(0) { + Ok(ok) => ok, + Err(_err) => { + // basically just CSMA/CA + let delay = rand::random_range(10..500); + sleep(Duration::from_millis(delay)).await; + fails += 1; + // Don't get stuck here forever, failing... + // (most I've seen is 1) + if fails > 5 { + error!("Max attempts to get_next() reached... ({fails})"); + return None + } + None + } + }; + } + + res +} + #[derive(Debug, Serialize)] +#[allow(dead_code)] pub struct Email { pub email: String, pub on: String, } -#[derive(Debug, Deserialize)] -pub struct Record { - #[allow(dead_code)] - pub id: Thing, -} - #[instrument(skip_all, name = "SurrealDB")] pub async fn connect(config: &Config) -> surrealdb::Result> { trace!("Establishing connection to surreal..."); @@ -122,4 +155,3 @@ pub async fn connect(config: &Config) -> surrealdb::Result> { Ok(db) } - diff --git a/src/main.rs b/src/main.rs index b4575bf..53fad8a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -9,7 +9,7 @@ use std::{ }; use futures_util::StreamExt; -use opentelemetry::{global::{self, BoxedTracer}, metrics::{Counter, Meter, UpDownCounter}, trace::{Span, Tracer}}; +use opentelemetry::{global::{self}, metrics::{Counter, Meter, UpDownCounter}}; use opentelemetry_otlp::{Protocol, WithExportConfig}; use db::{connect, Website}; use serde::Deserialize; @@ -18,6 +18,8 @@ use tokio::{io::{AsyncWriteExt, BufWriter}, sync::RwLock, task::JoinSet}; use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, warn}; use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry}; +use crate::db::get_next; + mod db; mod filesystem; mod parser; @@ -56,6 +58,15 @@ static SITES_CRAWLED: LazyLock> = LazyLock::new(|| .build() ); +static CONFIG: LazyLock = LazyLock::new(|| { + let mut file = File::open("./Crawler.toml").expect("Failed to read Crawler.toml"); + let mut buf = String::new(); + let _ = file.read_to_string(&mut buf); + + let config: Config = toml::from_str(&buf).expect("Failed to parse Crawler.toml"); + config +}); + // FIXME Traces aren't working on multiple threads, they block // static TRACER: LazyLock = LazyLock::new(|| global::tracer("Internet_Mapper")); @@ -78,78 +89,18 @@ async fn main() { println!("Logs and metrics are provided to the Grafana dashboard"); // Start TRACE / LOGGING / METRICS -// let otlp_log = opentelemetry_otlp::LogExporter::builder() -// .with_tonic() -// .with_endpoint(endpoint) -// .build() -// .unwrap(); - // Send metrics to Prometheus - let otlp_metrics = opentelemetry_otlp::MetricExporter::builder() - .with_http() - .with_protocol(Protocol::HttpBinary) - .with_endpoint("http://localhost:9090/api/v1/otlp/v1/metrics") - .build() - .unwrap(); - // Send spans to Alloy (which will send them to Tempo) - let otlp_span = opentelemetry_otlp::SpanExporter::builder() - .with_tonic() - .with_endpoint("http://localhost:4317") - .build() - .unwrap(); - - let tracer_provider = opentelemetry_sdk::trace::SdkTracerProvider::builder() - .with_simple_exporter(otlp_span) - .build(); - // let logger_provider = opentelemetry_sdk::logs::SdkLoggerProvider::builder() - // .with_simple_exporter(otlp_log) - // .build(); - let metrics_provider = opentelemetry_sdk::metrics::SdkMeterProvider::builder() - .with_periodic_exporter(otlp_metrics) // default delay is 60s, turn down to like 15 - .build(); - - global::set_tracer_provider(tracer_provider); - global::set_meter_provider(metrics_provider); - // How to set logger? - - // End TRACE - - // Start LOGGING - let writer = std::fs::OpenOptions::new() - .append(true) - .create(true) - .open("./docker/logs/tracing.log") - .expect("Couldn't make log file!"); - - let filter = EnvFilter::builder() - .with_default_directive(LevelFilter::DEBUG.into()) - .from_env_lossy(); - - let registry = Registry::default().with( - fmt::layer() - .with_line_number(true) - .with_thread_ids(true) - .with_file(true) - .json() - .with_writer(writer) - .with_filter(filter) - ); - - tracing::subscriber::set_global_default(registry).expect("Failed to set default subscriber"); - // End LOGGING + load_tracing(); + load_logging(); + load_metrics(); // When getting uncrawled pages, name must contain this variable. "" will effectively get ignored. // let crawl_filter = "en.wikipedia.org/"; // let budget = 50; let crawled = Arc::new(RwLock::new(0)); - let mut file = File::open("./Crawler.toml").expect("Failed to read Crawler.toml"); - let mut buf = String::new(); - let _ = file.read_to_string(&mut buf); + let starting_url = &CONFIG.start_url; - let config: Arc = Arc::new(toml::from_str(&buf).expect("Failed to parse Crawler.toml")); - let starting_url = &config.start_url; - - let db = connect(&config) + let db = connect(&CONFIG) .await .expect("Failed to connect to surreal, aborting."); @@ -168,8 +119,8 @@ async fn main() { // let mut main_loop_span= TRACER.start("Main-Loop"); let mut futures = JoinSet::new(); - for _ in 0..config.batch_size { - futures.spawn(process_single_thread(config.clone(), db.clone(), reqwest.clone(), crawled.clone())); + for _ in 0..CONFIG.batch_size { + futures.spawn(process_single_thread(&CONFIG, db.clone(), reqwest.clone(), crawled.clone())); } futures.join_all().await; // main_loop_span.end(); @@ -177,19 +128,21 @@ async fn main() { info!("Done"); } -async fn process_single_thread(config: Arc, db: Surreal, reqwest: reqwest::Client, crawled: Arc>) { +async fn process_single_thread(config: &Config, db: Surreal, reqwest: reqwest::Client, crawled: Arc>) { while *(crawled.read().await) < config.budget { - let uncrawled = get_uncrawled_links(&db.clone(), 1, &config).await; - if uncrawled.is_empty() { - return - } - - for site in uncrawled { - process(site, db.clone(), reqwest.clone()).await; - SITES_CRAWLED.add(1, &[]); - // Somehow this write doesn't hang on the while's read? - let mut c = crawled.write().await; - *c += 1; + let uncrawled = get_next(&db.clone(), &config).await; + match uncrawled { + Some(site) => { + process(site, db.clone(), reqwest.clone()).await; + SITES_CRAWLED.add(1, &[]); + // Somehow this write doesn't hang on the while's read? + let mut c = crawled.write().await; + *c += 1; + }, + None => { + warn!("fn::get_next() returned None"); + return; + }, } } } @@ -200,7 +153,7 @@ async fn process_single_thread(config: Arc, db: Surreal, reqwest async fn process(mut site: Website, db: Surreal, reqwest: reqwest::Client) { // METRICS - trace!(url = &site.site.as_str(), "Process: {}", &site.site); + debug!(url = &site.site.as_str(), "Process: {}", &site.site); BEING_PROCESSED.add(1, &[]); // let mut process_span = TRACER.start("Process"); @@ -326,26 +279,65 @@ async fn process(mut site: Website, db: Surreal, reqwest: reqwest::Clien BEING_PROCESSED.add(-1, &[]); } -/// Returns uncrawled links -#[instrument(skip(db, config))] -async fn get_uncrawled_links( - db: &Surreal, - mut count: usize, - config: &Config, -) -> Vec { - if count > config.batch_size { - count = config.batch_size; - } +fn load_tracing() { + // Send spans to Alloy (which will send them to Tempo) + let otlp_span = opentelemetry_otlp::SpanExporter::builder() + .with_tonic() + .with_endpoint("http://localhost:4317") + .build() + .unwrap(); + let tracer_provider = opentelemetry_sdk::trace::SdkTracerProvider::builder() + .with_simple_exporter(otlp_span) + .build(); - debug!("Getting {} uncrawled links from DB", count); - - let mut response = db - .query("SELECT * FROM website WHERE crawled = false AND site ~ type::string($format) LIMIT $count;") - .bind(("format", config.crawl_filter.to_string())) - .bind(("count", count)) - .await - .expect("Hard-coded query failed..?"); - response - .take(0) - .expect("Returned websites couldn't be parsed") + global::set_tracer_provider(tracer_provider); } + +fn load_logging() { +// let otlp_log = opentelemetry_otlp::LogExporter::builder() +// .with_tonic() +// .with_endpoint(endpoint) +// .build() +// .unwrap(); + // let logger_provider = opentelemetry_sdk::logs::SdkLoggerProvider::builder() + // .with_simple_exporter(otlp_log) + // .build(); + let writer = std::fs::OpenOptions::new() + .append(true) + .create(true) + .open("./docker/logs/tracing.log") + .expect("Couldn't make log file!"); + + let filter = EnvFilter::builder() + .with_default_directive(LevelFilter::DEBUG.into()) + .from_env_lossy(); + + let registry = Registry::default().with( + fmt::layer() + .with_line_number(true) + .with_thread_ids(true) + .with_file(true) + .json() + .with_writer(writer) + .with_filter(filter) + ); + + tracing::subscriber::set_global_default(registry).expect("Failed to set default subscriber"); +} + +fn load_metrics() { + // Send metrics to Prometheus + let otlp_metrics = opentelemetry_otlp::MetricExporter::builder() + .with_http() + .with_protocol(Protocol::HttpBinary) + .with_endpoint("http://localhost:9090/api/v1/otlp/v1/metrics") + .build() + .unwrap(); + let metrics_provider = opentelemetry_sdk::metrics::SdkMeterProvider::builder() + .with_periodic_exporter(otlp_metrics) // default delay is 60s, turn down to like 15 + .build(); + + global::set_meter_provider(metrics_provider); +} + + diff --git a/src/parser.rs b/src/parser.rs index dc04373..6f50d15 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -115,7 +115,7 @@ fn try_get_url(parent: &Url, link: &str) -> Option { } } } else { - // # This is some sort of realative url, gonna try patching it up into an absolute + // # This is some sort of relative url, gonna try patching it up into an absolute // url match e { url::ParseError::RelativeUrlWithoutBase => { diff --git a/src/setup.surql b/src/setup.surql index 17b89f5..ab3d387 100644 --- a/src/setup.surql +++ b/src/setup.surql @@ -4,6 +4,13 @@ DEFINE FIELD IF NOT EXISTS site ON TABLE website TYPE string; DEFINE INDEX IF NOT EXISTS idx ON TABLE website COLUMNS site UNIQUE; DEFINE FIELD IF NOT EXISTS crawled ON TABLE website TYPE bool; +DEFINE FIELD IF NOT EXISTS processing ON TABLE website TYPE bool DEFAULT false; DEFINE FIELD IF NOT EXISTS accessed_at ON TABLE website VALUE time::now(); DEFINE FIELD IF NOT EXISTS first_accessed_at ON TABLE website VALUE time::now(); + +DEFINE FUNCTION OVERWRITE fn::get_next($filter: string) { + LET $site = SELECT * FROM ONLY website WHERE crawled = false AND processing = false AND site ~ type::string($filter) LIMIT 1; + UPDATE $site.id SET processing = true; + RETURN $site +};