Compare commits
2 Commits
4b557a923c
...
bdb1094a30
Author | SHA1 | Date | |
---|---|---|---|
bdb1094a30 | |||
9aa2d9ce22 |
19
.vscode/launch.json
vendored
19
.vscode/launch.json
vendored
@ -7,18 +7,15 @@
|
|||||||
{
|
{
|
||||||
"type": "lldb",
|
"type": "lldb",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"name": "Debug executable 'surreal_spider'",
|
"name": "Debug executable 'internet_mapper'",
|
||||||
"env": {
|
|
||||||
"RUST_LOG": "surreal_spider=trace,reqwest=info",
|
|
||||||
},
|
|
||||||
"cargo": {
|
"cargo": {
|
||||||
"args": [
|
"args": [
|
||||||
"build",
|
"build",
|
||||||
"--bin=surreal_spider",
|
"--bin=internet_mapper",
|
||||||
"--package=surreal_spider"
|
"--package=internet_mapper"
|
||||||
],
|
],
|
||||||
"filter": {
|
"filter": {
|
||||||
"name": "surreal_spider",
|
"name": "internet_mapper",
|
||||||
"kind": "bin"
|
"kind": "bin"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -28,16 +25,16 @@
|
|||||||
{
|
{
|
||||||
"type": "lldb",
|
"type": "lldb",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"name": "Debug unit tests in executable 'surreal_spider'",
|
"name": "Debug unit tests in executable 'internet_mapper'",
|
||||||
"cargo": {
|
"cargo": {
|
||||||
"args": [
|
"args": [
|
||||||
"test",
|
"test",
|
||||||
"--no-run",
|
"--no-run",
|
||||||
"--bin=surreal_spider",
|
"--bin=internet_mapper",
|
||||||
"--package=surreal_spider"
|
"--package=internet_mapper"
|
||||||
],
|
],
|
||||||
"filter": {
|
"filter": {
|
||||||
"name": "surreal_spider",
|
"name": "internet_mapper",
|
||||||
"kind": "bin"
|
"kind": "bin"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@ -3,6 +3,6 @@
|
|||||||
"creds",
|
"creds",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
"rustls",
|
"rustls",
|
||||||
"surql"
|
"surql",
|
||||||
]
|
]
|
||||||
}
|
}
|
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -1966,6 +1966,7 @@ name = "internet_mapper"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64 0.22.1",
|
"base64 0.22.1",
|
||||||
|
"futures-util",
|
||||||
"html5ever 0.29.1",
|
"html5ever 0.29.1",
|
||||||
"metrics",
|
"metrics",
|
||||||
"metrics-exporter-prometheus",
|
"metrics-exporter-prometheus",
|
||||||
|
@ -5,12 +5,13 @@ edition = "2021"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
|
futures-util = "0.3.31"
|
||||||
html5ever = "0.29"
|
html5ever = "0.29"
|
||||||
metrics = "0.24.1"
|
metrics = "0.24.1"
|
||||||
metrics-exporter-prometheus = { version = "0.16.2", features=["http-listener"]}
|
metrics-exporter-prometheus = { version = "0.16.2", features=["http-listener"]}
|
||||||
# minio = "0.1.0"
|
# minio = "0.1.0"
|
||||||
minio = {git="https://github.com/minio/minio-rs.git", rev = "c28f576"}
|
minio = {git="https://github.com/minio/minio-rs.git", rev = "c28f576"}
|
||||||
reqwest = { version = "0.12", features = ["gzip", "default", "rustls-tls"] }
|
reqwest = { version = "0.12", features = ["gzip", "default", "rustls-tls", "stream"] }
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
surrealdb = "2.2"
|
surrealdb = "2.2"
|
||||||
tokio = { version="1.41.0", features = ["full"] }
|
tokio = { version="1.41.0", features = ["full"] }
|
||||||
|
@ -1,14 +1,10 @@
|
|||||||
use std::{ffi::OsStr, path::PathBuf};
|
use std::{ffi::OsStr, io::ErrorKind, path::PathBuf};
|
||||||
|
|
||||||
use tokio::fs;
|
use tokio::fs;
|
||||||
use tracing::{debug, error, instrument, trace, warn};
|
use tracing::{error, trace};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
#[instrument(skip(data))]
|
pub fn as_path(url: &Url) -> PathBuf {
|
||||||
/// Returns whether or not the saved file should be parsed.
|
|
||||||
/// If the file is just data, like an image, it doesn't need to be parsed.
|
|
||||||
/// If it's html, then it does need to be parsed.
|
|
||||||
pub async fn store(data: &str, url: &Url) -> bool {
|
|
||||||
// extract data from url to save it accurately
|
// extract data from url to save it accurately
|
||||||
let url_path = PathBuf::from("./downloaded/".to_string() + url.domain().unwrap_or("UnknownDomain") + url.path());
|
let url_path = PathBuf::from("./downloaded/".to_string() + url.domain().unwrap_or("UnknownDomain") + url.path());
|
||||||
|
|
||||||
@ -24,20 +20,42 @@ pub async fn store(data: &str, url: &Url) -> bool {
|
|||||||
(url_path.clone(), "index.html".into())
|
(url_path.clone(), "index.html".into())
|
||||||
};
|
};
|
||||||
|
|
||||||
let should_parse = filename.ends_with(".html");
|
let mut path = PathBuf::new();
|
||||||
|
path = path.join(basepath);
|
||||||
|
path = path.join(filename);
|
||||||
|
|
||||||
debug!("Writing at: {:?} {:?}", basepath, filename);
|
path
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn init(filename: &PathBuf) -> Option<fs::File> {
|
||||||
|
let file = async || tokio::fs::OpenOptions::new()
|
||||||
|
.append(true)
|
||||||
|
.create(true)
|
||||||
|
.open(&filename).await;
|
||||||
|
|
||||||
|
match file().await {
|
||||||
|
Ok(ok) => Some(ok),
|
||||||
|
Err(err) => {
|
||||||
|
// the file/folder isn't found
|
||||||
|
if err.kind() == ErrorKind::NotFound {
|
||||||
|
if let Some(parent ) = &filename.parent() {
|
||||||
// create the folders
|
// create the folders
|
||||||
if let Err(err) = fs::create_dir_all(&basepath).await {
|
if let Err(err) = fs::create_dir_all(&parent).await {
|
||||||
error!("Dir creation: {err} {:?}", basepath);
|
error!("Dir creation: {err} {:?}", filename);
|
||||||
|
eprintln!("{}", err)
|
||||||
|
} else if let Ok(ok) = file().await {
|
||||||
|
return Some(ok);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
if let Err(err) = fs::write(&basepath.join(filename), data).await {
|
error!("Couldn't get file's parents: {:?}", &filename);
|
||||||
error!("File creation: {err} {:?}", url_path);
|
}
|
||||||
|
} else {
|
||||||
|
error!("File creation: {err} {:?}", filename);
|
||||||
|
}
|
||||||
|
// we don't care about other errors, we can't/shouldn't fix them
|
||||||
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
should_parse
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn valid_file_extension(take: &&OsStr) -> bool {
|
fn valid_file_extension(take: &&OsStr) -> bool {
|
||||||
|
90
src/main.rs
90
src/main.rs
@ -1,9 +1,17 @@
|
|||||||
#![feature(ip_from)]
|
#![feature(ip_from)]
|
||||||
|
#![feature(async_closure)]
|
||||||
|
#![warn(clippy::expect_used)]
|
||||||
|
#![deny(clippy::unwrap_used)]
|
||||||
|
|
||||||
extern crate html5ever;
|
extern crate html5ever;
|
||||||
|
|
||||||
|
use futures_util::StreamExt;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet, fs::File, io::Read, net::{IpAddr, Ipv4Addr}
|
collections::HashSet,
|
||||||
|
fs::File,
|
||||||
|
io::Read,
|
||||||
|
net::{IpAddr, Ipv4Addr},
|
||||||
};
|
};
|
||||||
|
|
||||||
use db::{connect, Website};
|
use db::{connect, Website};
|
||||||
@ -11,21 +19,19 @@ use metrics::{counter, gauge};
|
|||||||
use metrics_exporter_prometheus::PrometheusBuilder;
|
use metrics_exporter_prometheus::PrometheusBuilder;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use surrealdb::{engine::remote::ws::Client, Surreal};
|
use surrealdb::{engine::remote::ws::Client, Surreal};
|
||||||
use tokio::task::JoinSet;
|
use tokio::{io::AsyncWriteExt, task::JoinSet};
|
||||||
use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, trace_span};
|
use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, trace_span};
|
||||||
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
|
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
|
||||||
|
|
||||||
mod db;
|
mod db;
|
||||||
mod parser;
|
|
||||||
mod filesystem;
|
mod filesystem;
|
||||||
|
mod parser;
|
||||||
|
|
||||||
const GET_METRIC: &str = "total_gets";
|
const GET_METRIC: &str = "total_gets";
|
||||||
const GET_IN_FLIGHT: &str = "gets_in_flight";
|
const GET_IN_FLIGHT: &str = "gets_in_flight";
|
||||||
const SITES_CRAWLED: &str = "pages_crawled";
|
const SITES_CRAWLED: &str = "pages_crawled";
|
||||||
const BEING_PROCESSED: &str = "pages_being_processed";
|
const BEING_PROCESSED: &str = "pages_being_processed";
|
||||||
|
|
||||||
const BATCH_SIZE: usize = 2;
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct Config {
|
struct Config {
|
||||||
surreal_ns: String,
|
surreal_ns: String,
|
||||||
@ -37,6 +43,7 @@ struct Config {
|
|||||||
crawl_filter: String,
|
crawl_filter: String,
|
||||||
start_url: String,
|
start_url: String,
|
||||||
budget: usize,
|
budget: usize,
|
||||||
|
batch_size: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
@ -111,7 +118,8 @@ async fn main() {
|
|||||||
let span = trace_span!("Loop");
|
let span = trace_span!("Loop");
|
||||||
let span = span.enter();
|
let span = span.enter();
|
||||||
while crawled < config.budget {
|
while crawled < config.budget {
|
||||||
let uncrawled = get_uncrawled_links(&db, config.budget - crawled, config.crawl_filter.clone()).await;
|
let uncrawled =
|
||||||
|
get_uncrawled_links(&db, config.budget - crawled, config.crawl_filter.clone(), &config).await;
|
||||||
if uncrawled.is_empty() {
|
if uncrawled.is_empty() {
|
||||||
info!("Had more budget but finished crawling everything.");
|
info!("Had more budget but finished crawling everything.");
|
||||||
return;
|
return;
|
||||||
@ -137,14 +145,15 @@ async fn main() {
|
|||||||
}
|
}
|
||||||
drop(span);
|
drop(span);
|
||||||
|
|
||||||
if let Ok(mut ok) = db.query("count(select id from website where crawled = true)").await {
|
if let Ok(mut ok) = db
|
||||||
|
.query("count(select id from website where crawled = true)")
|
||||||
|
.await
|
||||||
|
{
|
||||||
let res = ok.take::<Option<usize>>(0);
|
let res = ok.take::<Option<usize>>(0);
|
||||||
if let Ok(i) = res {
|
if let Ok(Some(n)) = res {
|
||||||
if let Some(n) = i {
|
|
||||||
info!("Total crawled pages now equals {n}");
|
info!("Total crawled pages now equals {n}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
info!("Done");
|
info!("Done");
|
||||||
}
|
}
|
||||||
@ -153,7 +162,6 @@ async fn main() {
|
|||||||
/// Downloads and crawls and stores a webpage.
|
/// Downloads and crawls and stores a webpage.
|
||||||
/// It is acceptable to clone `db`, `reqwest`, and `s3` because they all use `Arc`s internally. - Noted by Oliver
|
/// It is acceptable to clone `db`, `reqwest`, and `s3` because they all use `Arc`s internally. - Noted by Oliver
|
||||||
async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Client) {
|
async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Client) {
|
||||||
|
|
||||||
// METRICS
|
// METRICS
|
||||||
trace!("Process: {}", &site.site);
|
trace!("Process: {}", &site.site);
|
||||||
// Build the request
|
// Build the request
|
||||||
@ -165,60 +173,75 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
|
|||||||
|
|
||||||
// Send the http request (get)
|
// Send the http request (get)
|
||||||
if let Ok(response) = request_builder.send().await {
|
if let Ok(response) = request_builder.send().await {
|
||||||
|
|
||||||
// TODO if this will fail if the object we are downloading is
|
|
||||||
// larger than the memory of the device it's running on.
|
|
||||||
// We should store it *as* we download it then parse it in-place.
|
|
||||||
// Get body from response
|
// Get body from response
|
||||||
let data = response
|
|
||||||
.text()
|
|
||||||
.await
|
|
||||||
.expect("Failed to read http response's body!");
|
|
||||||
|
|
||||||
// METRICS
|
let path = filesystem::as_path(&site.site);
|
||||||
g.decrement(1);
|
|
||||||
counter!(GET_METRIC).increment(1);
|
|
||||||
|
|
||||||
// Store document
|
// make sure that the file is good to go
|
||||||
let should_parse = filesystem::store(&data, &site.site).await;
|
if let Some(mut file) = filesystem::init(&path).await {
|
||||||
|
let should_parse = path.to_string_lossy().ends_with(".html");
|
||||||
|
let mut buf: Vec<u8> = Vec::new();
|
||||||
|
|
||||||
|
// stream the response onto the disk
|
||||||
|
let mut stream = response.bytes_stream();
|
||||||
|
while let Some(data) = stream.next().await {
|
||||||
|
match data {
|
||||||
|
Ok(data) => {
|
||||||
|
debug!("Writing at: {:?}", path);
|
||||||
|
let _ = file.write_all(&data).await;
|
||||||
|
// If we are going to parse this file later, we will save it
|
||||||
|
// into memory as well as the disk.
|
||||||
|
if should_parse {
|
||||||
|
data.iter().for_each(|f| buf.push(*f));
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(err) => {
|
||||||
|
eprintln!("{}", err)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if should_parse {
|
if should_parse {
|
||||||
// Parse document and get relationships
|
// Parse document and get relationships
|
||||||
let sites = parser::parse(&site, &data).await;
|
let sites = parser::parse(&site, &buf).await;
|
||||||
|
|
||||||
// De-duplicate this list
|
// De-duplicate this list
|
||||||
let prev_len = sites.len();
|
let prev_len = sites.len();
|
||||||
let set = sites.into_iter().fold(HashSet::new(), |mut set,item| {
|
let set = sites.into_iter().fold(HashSet::new(), |mut set, item| {
|
||||||
set.insert(item);
|
set.insert(item);
|
||||||
set
|
set
|
||||||
});
|
});
|
||||||
let de_dupe_sites: Vec<Website> = set.into_iter().collect();
|
let de_dupe_sites: Vec<Website> = set.into_iter().collect();
|
||||||
let diff = prev_len - de_dupe_sites.len();
|
let diff = prev_len - de_dupe_sites.len();
|
||||||
trace!("Saved {diff} from being entered into the db by de-duping");
|
trace!("Saved {diff} from being entered into the db by de-duping");
|
||||||
|
|
||||||
// Store all the other sites so that we can link to them.
|
// Store all the other sites so that we can link to them.
|
||||||
let _ = Website::store_all(de_dupe_sites, &db).await;
|
let _ = Website::store_all(de_dupe_sites, &db).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// METRICS
|
||||||
|
g.decrement(1);
|
||||||
|
counter!(GET_METRIC).increment(1);
|
||||||
|
|
||||||
// update self in db
|
// update self in db
|
||||||
site.set_crawled();
|
site.set_crawled();
|
||||||
Website::store_all(vec![site], &db).await;
|
Website::store_all(vec![site], &db).await;
|
||||||
|
} else {
|
||||||
|
error!("File failed to cooperate: {:?}", path);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
error!("Failed to get: {}", &site.site);
|
error!("Failed to get: {}", &site.site);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns uncrawled links
|
/// Returns uncrawled links
|
||||||
#[instrument(skip(db))]
|
#[instrument(skip(db, config))]
|
||||||
async fn get_uncrawled_links(
|
async fn get_uncrawled_links(
|
||||||
db: &Surreal<Client>,
|
db: &Surreal<Client>,
|
||||||
mut count: usize,
|
mut count: usize,
|
||||||
filter: String,
|
filter: String,
|
||||||
|
config: &Config,
|
||||||
) -> Vec<Website> {
|
) -> Vec<Website> {
|
||||||
|
if count > config.batch_size {
|
||||||
if count > BATCH_SIZE {
|
count = config.batch_size;
|
||||||
count = BATCH_SIZE;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("Getting uncrawled links");
|
debug!("Getting uncrawled links");
|
||||||
@ -233,4 +256,3 @@ async fn get_uncrawled_links(
|
|||||||
.take(0)
|
.take(0)
|
||||||
.expect("Returned websites couldn't be parsed")
|
.expect("Returned websites couldn't be parsed")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
use std::default::Default;
|
use std::default::Default;
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use html5ever::tokenizer::{BufferQueue, TokenizerResult};
|
use html5ever::tokenizer::{BufferQueue, TokenizerResult};
|
||||||
use html5ever::tokenizer::{StartTag, TagToken};
|
use html5ever::tokenizer::{StartTag, TagToken};
|
||||||
@ -63,12 +62,14 @@ impl TokenSink for Website {
|
|||||||
|
|
||||||
#[instrument(skip_all)]
|
#[instrument(skip_all)]
|
||||||
/// Parses the passed site and returns all the sites it links to.
|
/// Parses the passed site and returns all the sites it links to.
|
||||||
pub async fn parse(site: &Website, data: &str) -> Vec<Website> {
|
pub async fn parse(site: &Website, data: &[u8]) -> Vec<Website> {
|
||||||
|
debug!("Parsing {}", site.site.to_string());
|
||||||
// prep work
|
// prep work
|
||||||
let mut other_sites: Vec<Website> = Vec::new();
|
let mut other_sites: Vec<Website> = Vec::new();
|
||||||
|
|
||||||
// change data into something that can be tokenized
|
// change data into something that can be tokenized
|
||||||
let chunk = Tendril::from_str(data).expect("Failed to parse string into Tendril!");
|
let s: Result<Tendril<fmt::UTF8>, ()> = Tendril::try_from_byte_slice(data);
|
||||||
|
if let Ok(chunk) = s {
|
||||||
// create buffer of tokens and push our input into it
|
// create buffer of tokens and push our input into it
|
||||||
let token_buffer = BufferQueue::default();
|
let token_buffer = BufferQueue::default();
|
||||||
token_buffer.push_back(
|
token_buffer.push_back(
|
||||||
@ -86,6 +87,9 @@ pub async fn parse(site: &Website, data: &str) -> Vec<Website> {
|
|||||||
}
|
}
|
||||||
assert!(token_buffer.is_empty());
|
assert!(token_buffer.is_empty());
|
||||||
tokenizer.end();
|
tokenizer.end();
|
||||||
|
} else {
|
||||||
|
warn!("Tendril failed to parse on: {}", site.site.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
other_sites
|
other_sites
|
||||||
}
|
}
|
||||||
@ -107,7 +111,7 @@ fn try_get_url(parent: &Url, link: &str) -> Option<Url> {
|
|||||||
match Url::parse(&format!("{scheme}://{}", link)) {
|
match Url::parse(&format!("{scheme}://{}", link)) {
|
||||||
Ok(url) => Some(url),
|
Ok(url) => Some(url),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
error!("Failed parsing realative scheme url: {}", err);
|
error!("Failed parsing relative scheme url: {}", err);
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -117,10 +121,13 @@ fn try_get_url(parent: &Url, link: &str) -> Option<Url> {
|
|||||||
match e {
|
match e {
|
||||||
url::ParseError::RelativeUrlWithoutBase => {
|
url::ParseError::RelativeUrlWithoutBase => {
|
||||||
// Is: scheme://host:port
|
// Is: scheme://host:port
|
||||||
let origin = parent.origin().ascii_serialization();
|
let mut origin = parent.origin().ascii_serialization();
|
||||||
|
if !origin.ends_with('/') && !link.starts_with('/') {
|
||||||
|
origin += "/";
|
||||||
|
}
|
||||||
let url = origin.clone() + link;
|
let url = origin.clone() + link;
|
||||||
|
|
||||||
trace!("Built `{url}` from `{origin} + {}`", link.to_string());
|
trace!("Built `{url}` from `{origin} + `{}`", link.to_string());
|
||||||
|
|
||||||
if let Ok(url) = Url::parse(&url) {
|
if let Ok(url) = Url::parse(&url) {
|
||||||
trace!("Saved relative url `{}` AS: `{}`", link, url);
|
trace!("Saved relative url `{}` AS: `{}`", link, url);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user