Compare commits
3 Commits
83def7ba27
...
e535bcc295
Author | SHA1 | Date | |
---|---|---|---|
e535bcc295 | |||
a0fd81d956 | |||
5cbba33a09 |
12
Cargo.lock
generated
12
Cargo.lock
generated
@ -1862,6 +1862,7 @@ dependencies = [
|
||||
"opentelemetry",
|
||||
"opentelemetry-otlp",
|
||||
"opentelemetry_sdk",
|
||||
"rand 0.9.1",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"surrealdb",
|
||||
@ -2610,7 +2611,7 @@ dependencies = [
|
||||
"futures-util",
|
||||
"opentelemetry",
|
||||
"percent-encoding",
|
||||
"rand 0.9.0",
|
||||
"rand 0.9.1",
|
||||
"serde_json",
|
||||
"thiserror 2.0.12",
|
||||
]
|
||||
@ -2998,7 +2999,7 @@ checksum = "b820744eb4dc9b57a3398183639c511b5a26d2ed702cedd3febaa1393caa22cc"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"getrandom 0.3.2",
|
||||
"rand 0.9.0",
|
||||
"rand 0.9.1",
|
||||
"ring",
|
||||
"rustc-hash 2.1.1",
|
||||
"rustls",
|
||||
@ -3069,13 +3070,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
|
||||
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
|
||||
dependencies = [
|
||||
"rand_chacha 0.9.0",
|
||||
"rand_core 0.9.3",
|
||||
"zerocopy 0.8.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4711,7 +4711,7 @@ version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "470dbf6591da1b39d43c14523b2b469c86879a53e8b758c8e090a470fe7b1fbe"
|
||||
dependencies = [
|
||||
"rand 0.9.0",
|
||||
"rand 0.9.1",
|
||||
"serde",
|
||||
"web-time",
|
||||
]
|
||||
|
@ -12,6 +12,7 @@ metrics-exporter-prometheus = { version = "0.16.2", features=["http-listener"]}
|
||||
opentelemetry = "0.30.0"
|
||||
opentelemetry-otlp = { version = "0.30.0", features = ["metrics", "trace", "logs", "grpc-tonic"] }
|
||||
opentelemetry_sdk = "0.30.0"
|
||||
rand = "0.9.1"
|
||||
reqwest = { version = "0.12", features = ["gzip", "default", "rustls-tls", "stream"] }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
surrealdb = "2.2"
|
||||
|
@ -1,3 +1,11 @@
|
||||
# Visability config
|
||||
# Alloy (for Tempo)
|
||||
tracing_endpoint = "http://localhost:4317"
|
||||
# Prometheus
|
||||
metrics_endpoint = "http://localhost:9090/api/v1/otlp/v1/metrics"
|
||||
# Alloy (for Loki)
|
||||
log_file = "./docker/logs/tracing.log"
|
||||
|
||||
# Surreal config
|
||||
surreal_url = "localhost:8000"
|
||||
surreal_username = "root"
|
||||
|
48
src/db.rs
48
src/db.rs
@ -1,12 +1,13 @@
|
||||
use metrics::counter;
|
||||
use std::fmt::Debug;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{fmt::Debug, time::Duration};
|
||||
use surrealdb::{
|
||||
engine::remote::ws::{Client, Ws},
|
||||
opt::auth::Root,
|
||||
sql::Thing,
|
||||
Surreal,
|
||||
};
|
||||
use tokio::time::sleep;
|
||||
use tracing::{error, instrument, trace};
|
||||
use url::Url;
|
||||
|
||||
@ -16,6 +17,7 @@ const STORE: &str = "surql_store_calls";
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Website {
|
||||
pub id: Option<Thing>,
|
||||
/// The url that this data is found at
|
||||
pub site: Url,
|
||||
/// Wether or not this link has been crawled yet
|
||||
@ -46,6 +48,7 @@ impl Website {
|
||||
crawled,
|
||||
site,
|
||||
status_code: 0,
|
||||
id: None,
|
||||
}
|
||||
}
|
||||
|
||||
@ -63,6 +66,7 @@ impl Website {
|
||||
ON DUPLICATE KEY UPDATE
|
||||
accessed_at = time::now(),
|
||||
status_code = $input.status_code,
|
||||
processing = false,
|
||||
crawled = crawled OR $input.crawled
|
||||
RETURN VALUE id;
|
||||
",
|
||||
@ -82,18 +86,47 @@ impl Website {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns uncrawled links
|
||||
#[instrument(skip(db, config))]
|
||||
pub async fn get_next(db: &Surreal<Client>, config: &Config) -> Option<Website> {
|
||||
let mut res: Option<Website> = None;
|
||||
let mut fails = 0;
|
||||
|
||||
while res == None {
|
||||
let mut response = db
|
||||
.query("fn::get_next($format)")
|
||||
.bind(("format", config.crawl_filter.to_string()))
|
||||
.await
|
||||
.expect("Hard-coded query failed..?");
|
||||
|
||||
res = match response.take(0) {
|
||||
Ok(ok) => ok,
|
||||
Err(_err) => {
|
||||
// basically just CSMA/CA
|
||||
let delay = rand::random_range(10..500);
|
||||
sleep(Duration::from_millis(delay)).await;
|
||||
fails += 1;
|
||||
// Don't get stuck here forever, failing...
|
||||
// (most I've seen is 1)
|
||||
if fails > 5 {
|
||||
error!("Max attempts to get_next() reached... ({fails})");
|
||||
return None
|
||||
}
|
||||
None
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[allow(dead_code)]
|
||||
pub struct Email {
|
||||
pub email: String,
|
||||
pub on: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Record {
|
||||
#[allow(dead_code)]
|
||||
pub id: Thing,
|
||||
}
|
||||
|
||||
#[instrument(skip_all, name = "SurrealDB")]
|
||||
pub async fn connect(config: &Config) -> surrealdb::Result<Surreal<Client>> {
|
||||
trace!("Establishing connection to surreal...");
|
||||
@ -122,4 +155,3 @@ pub async fn connect(config: &Config) -> surrealdb::Result<Surreal<Client>> {
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,7 @@ use std::{io::ErrorKind, path::PathBuf};
|
||||
|
||||
use reqwest::header::HeaderValue;
|
||||
use tokio::fs;
|
||||
use tracing::{error, event, trace, warn, Level};
|
||||
use tracing::{error, trace, warn};
|
||||
use url::Url;
|
||||
|
||||
pub fn as_path(url: &Url, content_type: &HeaderValue) -> PathBuf {
|
||||
|
200
src/main.rs
200
src/main.rs
@ -9,7 +9,7 @@ use std::{
|
||||
};
|
||||
|
||||
use futures_util::StreamExt;
|
||||
use opentelemetry::{global::{self, BoxedTracer}, metrics::{Counter, Meter, UpDownCounter}, trace::{Span, Tracer}};
|
||||
use opentelemetry::{global::{self}, metrics::{Counter, Meter, UpDownCounter}};
|
||||
use opentelemetry_otlp::{Protocol, WithExportConfig};
|
||||
use db::{connect, Website};
|
||||
use serde::Deserialize;
|
||||
@ -18,6 +18,8 @@ use tokio::{io::{AsyncWriteExt, BufWriter}, sync::RwLock, task::JoinSet};
|
||||
use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, warn};
|
||||
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
|
||||
|
||||
use crate::db::get_next;
|
||||
|
||||
mod db;
|
||||
mod filesystem;
|
||||
mod parser;
|
||||
@ -54,11 +56,24 @@ static SITES_CRAWLED: LazyLock<Counter<u64>> = LazyLock::new(||
|
||||
.build()
|
||||
);
|
||||
|
||||
static CONFIG: LazyLock<Config> = LazyLock::new(|| {
|
||||
let mut file = File::open("./Crawler.toml").expect("Failed to read Crawler.toml");
|
||||
let mut buf = String::new();
|
||||
let _ = file.read_to_string(&mut buf);
|
||||
|
||||
let config: Config = toml::from_str(&buf).expect("Failed to parse Crawler.toml");
|
||||
config
|
||||
});
|
||||
|
||||
// FIXME Traces aren't working on multiple threads, they block
|
||||
// static TRACER: LazyLock<BoxedTracer> = LazyLock::new(|| global::tracer("Internet_Mapper"));
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Config {
|
||||
tracing_endpoint: String,
|
||||
metrics_endpoint: String,
|
||||
log_file: String,
|
||||
|
||||
surreal_ns: String,
|
||||
surreal_db: String,
|
||||
surreal_url: String,
|
||||
@ -76,78 +91,18 @@ async fn main() {
|
||||
println!("Logs and metrics are provided to the Grafana dashboard");
|
||||
|
||||
// Start TRACE / LOGGING / METRICS
|
||||
// let otlp_log = opentelemetry_otlp::LogExporter::builder()
|
||||
// .with_tonic()
|
||||
// .with_endpoint(endpoint)
|
||||
// .build()
|
||||
// .unwrap();
|
||||
// Send metrics to Prometheus
|
||||
let otlp_metrics = opentelemetry_otlp::MetricExporter::builder()
|
||||
.with_http()
|
||||
.with_protocol(Protocol::HttpBinary)
|
||||
.with_endpoint("http://localhost:9090/api/v1/otlp/v1/metrics")
|
||||
.build()
|
||||
.unwrap();
|
||||
// Send spans to Alloy (which will send them to Tempo)
|
||||
let otlp_span = opentelemetry_otlp::SpanExporter::builder()
|
||||
.with_tonic()
|
||||
.with_endpoint("http://localhost:4317")
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let tracer_provider = opentelemetry_sdk::trace::SdkTracerProvider::builder()
|
||||
.with_simple_exporter(otlp_span)
|
||||
.build();
|
||||
// let logger_provider = opentelemetry_sdk::logs::SdkLoggerProvider::builder()
|
||||
// .with_simple_exporter(otlp_log)
|
||||
// .build();
|
||||
let metrics_provider = opentelemetry_sdk::metrics::SdkMeterProvider::builder()
|
||||
.with_periodic_exporter(otlp_metrics) // default delay is 60s, turn down to like 15
|
||||
.build();
|
||||
|
||||
global::set_tracer_provider(tracer_provider);
|
||||
global::set_meter_provider(metrics_provider);
|
||||
// How to set logger?
|
||||
|
||||
// End TRACE
|
||||
|
||||
// Start LOGGING
|
||||
let writer = std::fs::OpenOptions::new()
|
||||
.append(true)
|
||||
.create(true)
|
||||
.open("./docker/logs/tracing.log")
|
||||
.expect("Couldn't make log file!");
|
||||
|
||||
let filter = EnvFilter::builder()
|
||||
.with_default_directive(LevelFilter::DEBUG.into())
|
||||
.from_env_lossy();
|
||||
|
||||
let registry = Registry::default().with(
|
||||
fmt::layer()
|
||||
.with_line_number(true)
|
||||
.with_thread_ids(true)
|
||||
.with_file(true)
|
||||
.json()
|
||||
.with_writer(writer)
|
||||
.with_filter(filter)
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(registry).expect("Failed to set default subscriber");
|
||||
// End LOGGING
|
||||
load_tracing(&CONFIG);
|
||||
load_logging(&CONFIG);
|
||||
load_metrics(&CONFIG);
|
||||
|
||||
// When getting uncrawled pages, name must contain this variable. "" will effectively get ignored.
|
||||
// let crawl_filter = "en.wikipedia.org/";
|
||||
// let budget = 50;
|
||||
let crawled = Arc::new(RwLock::new(0));
|
||||
|
||||
let mut file = File::open("./Crawler.toml").expect("Failed to read Crawler.toml");
|
||||
let mut buf = String::new();
|
||||
let _ = file.read_to_string(&mut buf);
|
||||
let starting_url = &CONFIG.start_url;
|
||||
|
||||
let config: Arc<Config> = Arc::new(toml::from_str(&buf).expect("Failed to parse Crawler.toml"));
|
||||
let starting_url = &config.start_url;
|
||||
|
||||
let db = connect(&config)
|
||||
let db = connect(&CONFIG)
|
||||
.await
|
||||
.expect("Failed to connect to surreal, aborting.");
|
||||
|
||||
@ -166,8 +121,8 @@ async fn main() {
|
||||
|
||||
// let mut main_loop_span= TRACER.start("Main-Loop");
|
||||
let mut futures = JoinSet::new();
|
||||
for _ in 0..config.batch_size {
|
||||
futures.spawn(process_single_thread(config.clone(), db.clone(), reqwest.clone(), crawled.clone()));
|
||||
for _ in 0..CONFIG.batch_size {
|
||||
futures.spawn(process_single_thread(&CONFIG, db.clone(), reqwest.clone(), crawled.clone()));
|
||||
}
|
||||
futures.join_all().await;
|
||||
// main_loop_span.end();
|
||||
@ -175,19 +130,21 @@ async fn main() {
|
||||
info!("Done");
|
||||
}
|
||||
|
||||
async fn process_single_thread(config: Arc<Config>, db: Surreal<Client>, reqwest: reqwest::Client, crawled: Arc<RwLock<usize>>) {
|
||||
async fn process_single_thread(config: &Config, db: Surreal<Client>, reqwest: reqwest::Client, crawled: Arc<RwLock<usize>>) {
|
||||
while *(crawled.read().await) < config.budget {
|
||||
let uncrawled = get_uncrawled_links(&db.clone(), 1, &config).await;
|
||||
if uncrawled.is_empty() {
|
||||
return
|
||||
}
|
||||
|
||||
for site in uncrawled {
|
||||
process(site, db.clone(), reqwest.clone()).await;
|
||||
SITES_CRAWLED.add(1, &[]);
|
||||
// Somehow this write doesn't hang on the while's read?
|
||||
let mut c = crawled.write().await;
|
||||
*c += 1;
|
||||
let uncrawled = get_next(&db.clone(), &config).await;
|
||||
match uncrawled {
|
||||
Some(site) => {
|
||||
process(site, db.clone(), reqwest.clone()).await;
|
||||
SITES_CRAWLED.add(1, &[]);
|
||||
// Somehow this write doesn't hang on the while's read?
|
||||
let mut c = crawled.write().await;
|
||||
*c += 1;
|
||||
},
|
||||
None => {
|
||||
warn!("fn::get_next() returned None");
|
||||
return;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -198,7 +155,7 @@ async fn process_single_thread(config: Arc<Config>, db: Surreal<Client>, reqwest
|
||||
async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Client) {
|
||||
|
||||
// METRICS
|
||||
trace!(url = &site.site.as_str(), "Process: {}", &site.site);
|
||||
debug!(url = &site.site.as_str(), "Process: {}", &site.site);
|
||||
BEING_PROCESSED.add(1, &[]);
|
||||
// let mut process_span = TRACER.start("Process");
|
||||
|
||||
@ -324,26 +281,65 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
|
||||
BEING_PROCESSED.add(-1, &[]);
|
||||
}
|
||||
|
||||
/// Returns uncrawled links
|
||||
#[instrument(skip(db, config))]
|
||||
async fn get_uncrawled_links(
|
||||
db: &Surreal<Client>,
|
||||
mut count: usize,
|
||||
config: &Config,
|
||||
) -> Vec<Website> {
|
||||
if count > config.batch_size {
|
||||
count = config.batch_size;
|
||||
}
|
||||
fn load_tracing(config: &Config) {
|
||||
// Send spans to Alloy (which will send them to Tempo)
|
||||
let otlp_span = opentelemetry_otlp::SpanExporter::builder()
|
||||
.with_tonic()
|
||||
.with_endpoint(config.tracing_endpoint.clone())
|
||||
.build()
|
||||
.unwrap();
|
||||
let tracer_provider = opentelemetry_sdk::trace::SdkTracerProvider::builder()
|
||||
.with_simple_exporter(otlp_span)
|
||||
.build();
|
||||
|
||||
debug!("Getting {} uncrawled links from DB", count);
|
||||
|
||||
let mut response = db
|
||||
.query("SELECT * FROM website WHERE crawled = false AND site ~ type::string($format) LIMIT $count;")
|
||||
.bind(("format", config.crawl_filter.to_string()))
|
||||
.bind(("count", count))
|
||||
.await
|
||||
.expect("Hard-coded query failed..?");
|
||||
response
|
||||
.take(0)
|
||||
.expect("Returned websites couldn't be parsed")
|
||||
global::set_tracer_provider(tracer_provider);
|
||||
}
|
||||
|
||||
fn load_logging(config: &Config) {
|
||||
// let otlp_log = opentelemetry_otlp::LogExporter::builder()
|
||||
// .with_tonic()
|
||||
// .with_endpoint(endpoint)
|
||||
// .build()
|
||||
// .unwrap();
|
||||
// let logger_provider = opentelemetry_sdk::logs::SdkLoggerProvider::builder()
|
||||
// .with_simple_exporter(otlp_log)
|
||||
// .build();
|
||||
let writer = std::fs::OpenOptions::new()
|
||||
.append(true)
|
||||
.create(true)
|
||||
.open(config.log_file.clone())
|
||||
.expect("Couldn't make log file!");
|
||||
|
||||
let filter = EnvFilter::builder()
|
||||
.with_default_directive(LevelFilter::DEBUG.into())
|
||||
.from_env_lossy();
|
||||
|
||||
let registry = Registry::default().with(
|
||||
fmt::layer()
|
||||
.with_line_number(true)
|
||||
.with_thread_ids(true)
|
||||
.with_file(true)
|
||||
.json()
|
||||
.with_writer(writer)
|
||||
.with_filter(filter)
|
||||
);
|
||||
|
||||
tracing::subscriber::set_global_default(registry).expect("Failed to set default subscriber");
|
||||
}
|
||||
|
||||
fn load_metrics(config: &Config) {
|
||||
// Send metrics to Prometheus
|
||||
let otlp_metrics = opentelemetry_otlp::MetricExporter::builder()
|
||||
.with_http()
|
||||
.with_protocol(Protocol::HttpBinary)
|
||||
.with_endpoint(config.metrics_endpoint.clone())
|
||||
.build()
|
||||
.unwrap();
|
||||
let metrics_provider = opentelemetry_sdk::metrics::SdkMeterProvider::builder()
|
||||
.with_periodic_exporter(otlp_metrics) // default delay is 60s, turn down to like 15
|
||||
.build();
|
||||
|
||||
global::set_meter_provider(metrics_provider);
|
||||
}
|
||||
|
||||
|
||||
|
@ -115,7 +115,7 @@ fn try_get_url(parent: &Url, link: &str) -> Option<Url> {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// # This is some sort of realative url, gonna try patching it up into an absolute
|
||||
// # This is some sort of relative url, gonna try patching it up into an absolute
|
||||
// url
|
||||
match e {
|
||||
url::ParseError::RelativeUrlWithoutBase => {
|
||||
|
@ -4,6 +4,13 @@ DEFINE FIELD IF NOT EXISTS site ON TABLE website TYPE string;
|
||||
DEFINE INDEX IF NOT EXISTS idx ON TABLE website COLUMNS site UNIQUE;
|
||||
|
||||
DEFINE FIELD IF NOT EXISTS crawled ON TABLE website TYPE bool;
|
||||
DEFINE FIELD IF NOT EXISTS processing ON TABLE website TYPE bool DEFAULT false;
|
||||
|
||||
DEFINE FIELD IF NOT EXISTS accessed_at ON TABLE website VALUE time::now();
|
||||
DEFINE FIELD IF NOT EXISTS first_accessed_at ON TABLE website VALUE time::now();
|
||||
|
||||
DEFINE FUNCTION OVERWRITE fn::get_next($filter: string) {
|
||||
LET $site = SELECT * FROM ONLY website WHERE crawled = false AND processing = false AND site ~ type::string($filter) LIMIT 1;
|
||||
UPDATE $site.id SET processing = true;
|
||||
RETURN $site
|
||||
};
|
||||
|
Loading…
x
Reference in New Issue
Block a user