update how the database interactions work
This commit is contained in:
parent
76e78cc745
commit
5cbba33a09
12
Cargo.lock
generated
12
Cargo.lock
generated
@ -1862,6 +1862,7 @@ dependencies = [
|
|||||||
"opentelemetry",
|
"opentelemetry",
|
||||||
"opentelemetry-otlp",
|
"opentelemetry-otlp",
|
||||||
"opentelemetry_sdk",
|
"opentelemetry_sdk",
|
||||||
|
"rand 0.9.1",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
"serde",
|
"serde",
|
||||||
"surrealdb",
|
"surrealdb",
|
||||||
@ -2610,7 +2611,7 @@ dependencies = [
|
|||||||
"futures-util",
|
"futures-util",
|
||||||
"opentelemetry",
|
"opentelemetry",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
"rand 0.9.0",
|
"rand 0.9.1",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"thiserror 2.0.12",
|
"thiserror 2.0.12",
|
||||||
]
|
]
|
||||||
@ -2998,7 +2999,7 @@ checksum = "b820744eb4dc9b57a3398183639c511b5a26d2ed702cedd3febaa1393caa22cc"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes",
|
"bytes",
|
||||||
"getrandom 0.3.2",
|
"getrandom 0.3.2",
|
||||||
"rand 0.9.0",
|
"rand 0.9.1",
|
||||||
"ring",
|
"ring",
|
||||||
"rustc-hash 2.1.1",
|
"rustc-hash 2.1.1",
|
||||||
"rustls",
|
"rustls",
|
||||||
@ -3069,13 +3070,12 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rand"
|
name = "rand"
|
||||||
version = "0.9.0"
|
version = "0.9.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3779b94aeb87e8bd4e834cee3650289ee9e0d5677f976ecdb6d219e5f4f6cd94"
|
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"rand_chacha 0.9.0",
|
"rand_chacha 0.9.0",
|
||||||
"rand_core 0.9.3",
|
"rand_core 0.9.3",
|
||||||
"zerocopy 0.8.23",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -4711,7 +4711,7 @@ version = "1.2.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "470dbf6591da1b39d43c14523b2b469c86879a53e8b758c8e090a470fe7b1fbe"
|
checksum = "470dbf6591da1b39d43c14523b2b469c86879a53e8b758c8e090a470fe7b1fbe"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"rand 0.9.0",
|
"rand 0.9.1",
|
||||||
"serde",
|
"serde",
|
||||||
"web-time",
|
"web-time",
|
||||||
]
|
]
|
||||||
|
@ -12,6 +12,7 @@ metrics-exporter-prometheus = { version = "0.16.2", features=["http-listener"]}
|
|||||||
opentelemetry = "0.30.0"
|
opentelemetry = "0.30.0"
|
||||||
opentelemetry-otlp = { version = "0.30.0", features = ["metrics", "trace", "logs", "grpc-tonic"] }
|
opentelemetry-otlp = { version = "0.30.0", features = ["metrics", "trace", "logs", "grpc-tonic"] }
|
||||||
opentelemetry_sdk = "0.30.0"
|
opentelemetry_sdk = "0.30.0"
|
||||||
|
rand = "0.9.1"
|
||||||
reqwest = { version = "0.12", features = ["gzip", "default", "rustls-tls", "stream"] }
|
reqwest = { version = "0.12", features = ["gzip", "default", "rustls-tls", "stream"] }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
surrealdb = "2.2"
|
surrealdb = "2.2"
|
||||||
|
48
src/db.rs
48
src/db.rs
@ -1,12 +1,13 @@
|
|||||||
use metrics::counter;
|
use metrics::counter;
|
||||||
use std::fmt::Debug;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::{fmt::Debug, time::Duration};
|
||||||
use surrealdb::{
|
use surrealdb::{
|
||||||
engine::remote::ws::{Client, Ws},
|
engine::remote::ws::{Client, Ws},
|
||||||
opt::auth::Root,
|
opt::auth::Root,
|
||||||
sql::Thing,
|
sql::Thing,
|
||||||
Surreal,
|
Surreal,
|
||||||
};
|
};
|
||||||
|
use tokio::time::sleep;
|
||||||
use tracing::{error, instrument, trace};
|
use tracing::{error, instrument, trace};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
@ -16,6 +17,7 @@ const STORE: &str = "surql_store_calls";
|
|||||||
|
|
||||||
#[derive(Serialize, Deserialize, Clone, Eq, PartialEq, Hash)]
|
#[derive(Serialize, Deserialize, Clone, Eq, PartialEq, Hash)]
|
||||||
pub struct Website {
|
pub struct Website {
|
||||||
|
pub id: Option<Thing>,
|
||||||
/// The url that this data is found at
|
/// The url that this data is found at
|
||||||
pub site: Url,
|
pub site: Url,
|
||||||
/// Wether or not this link has been crawled yet
|
/// Wether or not this link has been crawled yet
|
||||||
@ -46,6 +48,7 @@ impl Website {
|
|||||||
crawled,
|
crawled,
|
||||||
site,
|
site,
|
||||||
status_code: 0,
|
status_code: 0,
|
||||||
|
id: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -63,6 +66,7 @@ impl Website {
|
|||||||
ON DUPLICATE KEY UPDATE
|
ON DUPLICATE KEY UPDATE
|
||||||
accessed_at = time::now(),
|
accessed_at = time::now(),
|
||||||
status_code = $input.status_code,
|
status_code = $input.status_code,
|
||||||
|
processing = false,
|
||||||
crawled = crawled OR $input.crawled
|
crawled = crawled OR $input.crawled
|
||||||
RETURN VALUE id;
|
RETURN VALUE id;
|
||||||
",
|
",
|
||||||
@ -82,18 +86,47 @@ impl Website {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns uncrawled links
|
||||||
|
#[instrument(skip(db, config))]
|
||||||
|
pub async fn get_next(db: &Surreal<Client>, config: &Config) -> Option<Website> {
|
||||||
|
let mut res: Option<Website> = None;
|
||||||
|
let mut fails = 0;
|
||||||
|
|
||||||
|
while res == None {
|
||||||
|
let mut response = db
|
||||||
|
.query("fn::get_next($format)")
|
||||||
|
.bind(("format", config.crawl_filter.to_string()))
|
||||||
|
.await
|
||||||
|
.expect("Hard-coded query failed..?");
|
||||||
|
|
||||||
|
res = match response.take(0) {
|
||||||
|
Ok(ok) => ok,
|
||||||
|
Err(_err) => {
|
||||||
|
// basically just CSMA/CA
|
||||||
|
let delay = rand::random_range(10..500);
|
||||||
|
sleep(Duration::from_millis(delay)).await;
|
||||||
|
fails += 1;
|
||||||
|
// Don't get stuck here forever, failing...
|
||||||
|
// (most I've seen is 1)
|
||||||
|
if fails > 5 {
|
||||||
|
error!("Max attempts to get_next() reached... ({fails})");
|
||||||
|
return None
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
|
#[allow(dead_code)]
|
||||||
pub struct Email {
|
pub struct Email {
|
||||||
pub email: String,
|
pub email: String,
|
||||||
pub on: String,
|
pub on: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
pub struct Record {
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub id: Thing,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip_all, name = "SurrealDB")]
|
#[instrument(skip_all, name = "SurrealDB")]
|
||||||
pub async fn connect(config: &Config) -> surrealdb::Result<Surreal<Client>> {
|
pub async fn connect(config: &Config) -> surrealdb::Result<Surreal<Client>> {
|
||||||
trace!("Establishing connection to surreal...");
|
trace!("Establishing connection to surreal...");
|
||||||
@ -122,4 +155,3 @@ pub async fn connect(config: &Config) -> surrealdb::Result<Surreal<Client>> {
|
|||||||
|
|
||||||
Ok(db)
|
Ok(db)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
196
src/main.rs
196
src/main.rs
@ -9,7 +9,7 @@ use std::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use futures_util::StreamExt;
|
use futures_util::StreamExt;
|
||||||
use opentelemetry::{global::{self, BoxedTracer}, metrics::{Counter, Meter, UpDownCounter}, trace::{Span, Tracer}};
|
use opentelemetry::{global::{self}, metrics::{Counter, Meter, UpDownCounter}};
|
||||||
use opentelemetry_otlp::{Protocol, WithExportConfig};
|
use opentelemetry_otlp::{Protocol, WithExportConfig};
|
||||||
use db::{connect, Website};
|
use db::{connect, Website};
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
@ -18,6 +18,8 @@ use tokio::{io::{AsyncWriteExt, BufWriter}, sync::RwLock, task::JoinSet};
|
|||||||
use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, warn};
|
use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, warn};
|
||||||
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
|
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
|
||||||
|
|
||||||
|
use crate::db::get_next;
|
||||||
|
|
||||||
mod db;
|
mod db;
|
||||||
mod filesystem;
|
mod filesystem;
|
||||||
mod parser;
|
mod parser;
|
||||||
@ -56,6 +58,15 @@ static SITES_CRAWLED: LazyLock<Counter<u64>> = LazyLock::new(||
|
|||||||
.build()
|
.build()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
static CONFIG: LazyLock<Config> = LazyLock::new(|| {
|
||||||
|
let mut file = File::open("./Crawler.toml").expect("Failed to read Crawler.toml");
|
||||||
|
let mut buf = String::new();
|
||||||
|
let _ = file.read_to_string(&mut buf);
|
||||||
|
|
||||||
|
let config: Config = toml::from_str(&buf).expect("Failed to parse Crawler.toml");
|
||||||
|
config
|
||||||
|
});
|
||||||
|
|
||||||
// FIXME Traces aren't working on multiple threads, they block
|
// FIXME Traces aren't working on multiple threads, they block
|
||||||
// static TRACER: LazyLock<BoxedTracer> = LazyLock::new(|| global::tracer("Internet_Mapper"));
|
// static TRACER: LazyLock<BoxedTracer> = LazyLock::new(|| global::tracer("Internet_Mapper"));
|
||||||
|
|
||||||
@ -78,78 +89,18 @@ async fn main() {
|
|||||||
println!("Logs and metrics are provided to the Grafana dashboard");
|
println!("Logs and metrics are provided to the Grafana dashboard");
|
||||||
|
|
||||||
// Start TRACE / LOGGING / METRICS
|
// Start TRACE / LOGGING / METRICS
|
||||||
// let otlp_log = opentelemetry_otlp::LogExporter::builder()
|
load_tracing();
|
||||||
// .with_tonic()
|
load_logging();
|
||||||
// .with_endpoint(endpoint)
|
load_metrics();
|
||||||
// .build()
|
|
||||||
// .unwrap();
|
|
||||||
// Send metrics to Prometheus
|
|
||||||
let otlp_metrics = opentelemetry_otlp::MetricExporter::builder()
|
|
||||||
.with_http()
|
|
||||||
.with_protocol(Protocol::HttpBinary)
|
|
||||||
.with_endpoint("http://localhost:9090/api/v1/otlp/v1/metrics")
|
|
||||||
.build()
|
|
||||||
.unwrap();
|
|
||||||
// Send spans to Alloy (which will send them to Tempo)
|
|
||||||
let otlp_span = opentelemetry_otlp::SpanExporter::builder()
|
|
||||||
.with_tonic()
|
|
||||||
.with_endpoint("http://localhost:4317")
|
|
||||||
.build()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let tracer_provider = opentelemetry_sdk::trace::SdkTracerProvider::builder()
|
|
||||||
.with_simple_exporter(otlp_span)
|
|
||||||
.build();
|
|
||||||
// let logger_provider = opentelemetry_sdk::logs::SdkLoggerProvider::builder()
|
|
||||||
// .with_simple_exporter(otlp_log)
|
|
||||||
// .build();
|
|
||||||
let metrics_provider = opentelemetry_sdk::metrics::SdkMeterProvider::builder()
|
|
||||||
.with_periodic_exporter(otlp_metrics) // default delay is 60s, turn down to like 15
|
|
||||||
.build();
|
|
||||||
|
|
||||||
global::set_tracer_provider(tracer_provider);
|
|
||||||
global::set_meter_provider(metrics_provider);
|
|
||||||
// How to set logger?
|
|
||||||
|
|
||||||
// End TRACE
|
|
||||||
|
|
||||||
// Start LOGGING
|
|
||||||
let writer = std::fs::OpenOptions::new()
|
|
||||||
.append(true)
|
|
||||||
.create(true)
|
|
||||||
.open("./docker/logs/tracing.log")
|
|
||||||
.expect("Couldn't make log file!");
|
|
||||||
|
|
||||||
let filter = EnvFilter::builder()
|
|
||||||
.with_default_directive(LevelFilter::DEBUG.into())
|
|
||||||
.from_env_lossy();
|
|
||||||
|
|
||||||
let registry = Registry::default().with(
|
|
||||||
fmt::layer()
|
|
||||||
.with_line_number(true)
|
|
||||||
.with_thread_ids(true)
|
|
||||||
.with_file(true)
|
|
||||||
.json()
|
|
||||||
.with_writer(writer)
|
|
||||||
.with_filter(filter)
|
|
||||||
);
|
|
||||||
|
|
||||||
tracing::subscriber::set_global_default(registry).expect("Failed to set default subscriber");
|
|
||||||
// End LOGGING
|
|
||||||
|
|
||||||
// When getting uncrawled pages, name must contain this variable. "" will effectively get ignored.
|
// When getting uncrawled pages, name must contain this variable. "" will effectively get ignored.
|
||||||
// let crawl_filter = "en.wikipedia.org/";
|
// let crawl_filter = "en.wikipedia.org/";
|
||||||
// let budget = 50;
|
// let budget = 50;
|
||||||
let crawled = Arc::new(RwLock::new(0));
|
let crawled = Arc::new(RwLock::new(0));
|
||||||
|
|
||||||
let mut file = File::open("./Crawler.toml").expect("Failed to read Crawler.toml");
|
let starting_url = &CONFIG.start_url;
|
||||||
let mut buf = String::new();
|
|
||||||
let _ = file.read_to_string(&mut buf);
|
|
||||||
|
|
||||||
let config: Arc<Config> = Arc::new(toml::from_str(&buf).expect("Failed to parse Crawler.toml"));
|
let db = connect(&CONFIG)
|
||||||
let starting_url = &config.start_url;
|
|
||||||
|
|
||||||
let db = connect(&config)
|
|
||||||
.await
|
.await
|
||||||
.expect("Failed to connect to surreal, aborting.");
|
.expect("Failed to connect to surreal, aborting.");
|
||||||
|
|
||||||
@ -168,8 +119,8 @@ async fn main() {
|
|||||||
|
|
||||||
// let mut main_loop_span= TRACER.start("Main-Loop");
|
// let mut main_loop_span= TRACER.start("Main-Loop");
|
||||||
let mut futures = JoinSet::new();
|
let mut futures = JoinSet::new();
|
||||||
for _ in 0..config.batch_size {
|
for _ in 0..CONFIG.batch_size {
|
||||||
futures.spawn(process_single_thread(config.clone(), db.clone(), reqwest.clone(), crawled.clone()));
|
futures.spawn(process_single_thread(&CONFIG, db.clone(), reqwest.clone(), crawled.clone()));
|
||||||
}
|
}
|
||||||
futures.join_all().await;
|
futures.join_all().await;
|
||||||
// main_loop_span.end();
|
// main_loop_span.end();
|
||||||
@ -177,19 +128,21 @@ async fn main() {
|
|||||||
info!("Done");
|
info!("Done");
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn process_single_thread(config: Arc<Config>, db: Surreal<Client>, reqwest: reqwest::Client, crawled: Arc<RwLock<usize>>) {
|
async fn process_single_thread(config: &Config, db: Surreal<Client>, reqwest: reqwest::Client, crawled: Arc<RwLock<usize>>) {
|
||||||
while *(crawled.read().await) < config.budget {
|
while *(crawled.read().await) < config.budget {
|
||||||
let uncrawled = get_uncrawled_links(&db.clone(), 1, &config).await;
|
let uncrawled = get_next(&db.clone(), &config).await;
|
||||||
if uncrawled.is_empty() {
|
match uncrawled {
|
||||||
return
|
Some(site) => {
|
||||||
}
|
process(site, db.clone(), reqwest.clone()).await;
|
||||||
|
SITES_CRAWLED.add(1, &[]);
|
||||||
for site in uncrawled {
|
// Somehow this write doesn't hang on the while's read?
|
||||||
process(site, db.clone(), reqwest.clone()).await;
|
let mut c = crawled.write().await;
|
||||||
SITES_CRAWLED.add(1, &[]);
|
*c += 1;
|
||||||
// Somehow this write doesn't hang on the while's read?
|
},
|
||||||
let mut c = crawled.write().await;
|
None => {
|
||||||
*c += 1;
|
warn!("fn::get_next() returned None");
|
||||||
|
return;
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -200,7 +153,7 @@ async fn process_single_thread(config: Arc<Config>, db: Surreal<Client>, reqwest
|
|||||||
async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Client) {
|
async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Client) {
|
||||||
|
|
||||||
// METRICS
|
// METRICS
|
||||||
trace!(url = &site.site.as_str(), "Process: {}", &site.site);
|
debug!(url = &site.site.as_str(), "Process: {}", &site.site);
|
||||||
BEING_PROCESSED.add(1, &[]);
|
BEING_PROCESSED.add(1, &[]);
|
||||||
// let mut process_span = TRACER.start("Process");
|
// let mut process_span = TRACER.start("Process");
|
||||||
|
|
||||||
@ -326,26 +279,65 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
|
|||||||
BEING_PROCESSED.add(-1, &[]);
|
BEING_PROCESSED.add(-1, &[]);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns uncrawled links
|
fn load_tracing() {
|
||||||
#[instrument(skip(db, config))]
|
// Send spans to Alloy (which will send them to Tempo)
|
||||||
async fn get_uncrawled_links(
|
let otlp_span = opentelemetry_otlp::SpanExporter::builder()
|
||||||
db: &Surreal<Client>,
|
.with_tonic()
|
||||||
mut count: usize,
|
.with_endpoint("http://localhost:4317")
|
||||||
config: &Config,
|
.build()
|
||||||
) -> Vec<Website> {
|
.unwrap();
|
||||||
if count > config.batch_size {
|
let tracer_provider = opentelemetry_sdk::trace::SdkTracerProvider::builder()
|
||||||
count = config.batch_size;
|
.with_simple_exporter(otlp_span)
|
||||||
}
|
.build();
|
||||||
|
|
||||||
debug!("Getting {} uncrawled links from DB", count);
|
global::set_tracer_provider(tracer_provider);
|
||||||
|
|
||||||
let mut response = db
|
|
||||||
.query("SELECT * FROM website WHERE crawled = false AND site ~ type::string($format) LIMIT $count;")
|
|
||||||
.bind(("format", config.crawl_filter.to_string()))
|
|
||||||
.bind(("count", count))
|
|
||||||
.await
|
|
||||||
.expect("Hard-coded query failed..?");
|
|
||||||
response
|
|
||||||
.take(0)
|
|
||||||
.expect("Returned websites couldn't be parsed")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn load_logging() {
|
||||||
|
// let otlp_log = opentelemetry_otlp::LogExporter::builder()
|
||||||
|
// .with_tonic()
|
||||||
|
// .with_endpoint(endpoint)
|
||||||
|
// .build()
|
||||||
|
// .unwrap();
|
||||||
|
// let logger_provider = opentelemetry_sdk::logs::SdkLoggerProvider::builder()
|
||||||
|
// .with_simple_exporter(otlp_log)
|
||||||
|
// .build();
|
||||||
|
let writer = std::fs::OpenOptions::new()
|
||||||
|
.append(true)
|
||||||
|
.create(true)
|
||||||
|
.open("./docker/logs/tracing.log")
|
||||||
|
.expect("Couldn't make log file!");
|
||||||
|
|
||||||
|
let filter = EnvFilter::builder()
|
||||||
|
.with_default_directive(LevelFilter::DEBUG.into())
|
||||||
|
.from_env_lossy();
|
||||||
|
|
||||||
|
let registry = Registry::default().with(
|
||||||
|
fmt::layer()
|
||||||
|
.with_line_number(true)
|
||||||
|
.with_thread_ids(true)
|
||||||
|
.with_file(true)
|
||||||
|
.json()
|
||||||
|
.with_writer(writer)
|
||||||
|
.with_filter(filter)
|
||||||
|
);
|
||||||
|
|
||||||
|
tracing::subscriber::set_global_default(registry).expect("Failed to set default subscriber");
|
||||||
|
}
|
||||||
|
|
||||||
|
fn load_metrics() {
|
||||||
|
// Send metrics to Prometheus
|
||||||
|
let otlp_metrics = opentelemetry_otlp::MetricExporter::builder()
|
||||||
|
.with_http()
|
||||||
|
.with_protocol(Protocol::HttpBinary)
|
||||||
|
.with_endpoint("http://localhost:9090/api/v1/otlp/v1/metrics")
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
let metrics_provider = opentelemetry_sdk::metrics::SdkMeterProvider::builder()
|
||||||
|
.with_periodic_exporter(otlp_metrics) // default delay is 60s, turn down to like 15
|
||||||
|
.build();
|
||||||
|
|
||||||
|
global::set_meter_provider(metrics_provider);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -115,7 +115,7 @@ fn try_get_url(parent: &Url, link: &str) -> Option<Url> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// # This is some sort of realative url, gonna try patching it up into an absolute
|
// # This is some sort of relative url, gonna try patching it up into an absolute
|
||||||
// url
|
// url
|
||||||
match e {
|
match e {
|
||||||
url::ParseError::RelativeUrlWithoutBase => {
|
url::ParseError::RelativeUrlWithoutBase => {
|
||||||
|
@ -4,6 +4,13 @@ DEFINE FIELD IF NOT EXISTS site ON TABLE website TYPE string;
|
|||||||
DEFINE INDEX IF NOT EXISTS idx ON TABLE website COLUMNS site UNIQUE;
|
DEFINE INDEX IF NOT EXISTS idx ON TABLE website COLUMNS site UNIQUE;
|
||||||
|
|
||||||
DEFINE FIELD IF NOT EXISTS crawled ON TABLE website TYPE bool;
|
DEFINE FIELD IF NOT EXISTS crawled ON TABLE website TYPE bool;
|
||||||
|
DEFINE FIELD IF NOT EXISTS processing ON TABLE website TYPE bool DEFAULT false;
|
||||||
|
|
||||||
DEFINE FIELD IF NOT EXISTS accessed_at ON TABLE website VALUE time::now();
|
DEFINE FIELD IF NOT EXISTS accessed_at ON TABLE website VALUE time::now();
|
||||||
DEFINE FIELD IF NOT EXISTS first_accessed_at ON TABLE website VALUE time::now();
|
DEFINE FIELD IF NOT EXISTS first_accessed_at ON TABLE website VALUE time::now();
|
||||||
|
|
||||||
|
DEFINE FUNCTION OVERWRITE fn::get_next($filter: string) {
|
||||||
|
LET $site = SELECT * FROM ONLY website WHERE crawled = false AND processing = false AND site ~ type::string($filter) LIMIT 1;
|
||||||
|
UPDATE $site.id SET processing = true;
|
||||||
|
RETURN $site
|
||||||
|
};
|
||||||
|
Loading…
x
Reference in New Issue
Block a user