Compare commits
12 Commits
bdb1094a30
...
status_cod
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f7a3ca8fd7 | ||
| 6790061e22 | |||
| 50606bb69e | |||
| 5850f19cab | |||
| 2c8546e30a | |||
| 4e619d0ebc | |||
| 647c4cd324 | |||
| 7fab961d76 | |||
| d3fff194f4 | |||
| 3497312fd4 | |||
| 0fd76b1734 | |||
| 9bfa8f9108 |
@@ -3,9 +3,10 @@ surreal_url = "localhost:8000"
|
||||
surreal_username = "root"
|
||||
surreal_password = "root"
|
||||
surreal_ns = "test"
|
||||
surreal_db = "v1.19.5"
|
||||
surreal_db = "v1.21.1"
|
||||
|
||||
# Crawler config
|
||||
crawl_filter = "en.wikipedia.org"
|
||||
start_url = "https://en.wikipedia.org"
|
||||
budget = 100
|
||||
crawl_filter = "https://ftpgeoinfo.msl.mt.gov/Data/Spatial/MSDI"
|
||||
start_url = "https://ftpgeoinfo.msl.mt.gov/Data/Spatial/MSDI"
|
||||
budget = 10000
|
||||
batch_size = 50
|
||||
|
||||
@@ -7,7 +7,7 @@ scrape_configs:
|
||||
static_configs:
|
||||
# change this your machine's ip, localhost won't work
|
||||
# because localhost refers to the docker container.
|
||||
- targets: ['172.20.239.48:2500']
|
||||
- targets: ['192.168.1.200:2500']
|
||||
#- targets: ['192.168.8.209:2500']
|
||||
- job_name: loki
|
||||
static_configs:
|
||||
|
||||
18
src/db.rs
18
src/db.rs
@@ -20,12 +20,18 @@ pub struct Website {
|
||||
pub site: Url,
|
||||
/// Wether or not this link has been crawled yet
|
||||
pub crawled: bool,
|
||||
/// 200, 404, etc
|
||||
pub status_code: u16,
|
||||
}
|
||||
|
||||
// manual impl to make tracing look nicer
|
||||
impl Debug for Website {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Website").field("site", &self.site).finish()
|
||||
f.debug_struct("Website")
|
||||
.field("host", &self.site.host())
|
||||
.field("path", &self.site.path())
|
||||
.field("status_code", &self.status_code)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,15 +44,11 @@ impl Website {
|
||||
};
|
||||
Self {
|
||||
crawled,
|
||||
site
|
||||
site,
|
||||
status_code: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_crawled(&mut self) {
|
||||
trace!("Set crawled to true");
|
||||
self.crawled = true
|
||||
}
|
||||
|
||||
// Insert ever item in the vec into surreal, crawled state will be preserved as TRUE
|
||||
// if already in the database as such or incoming data is TRUE.
|
||||
#[instrument(skip(db))]
|
||||
@@ -54,11 +56,13 @@ impl Website {
|
||||
counter!(STORE).increment(1);
|
||||
let mut things = Vec::with_capacity(all.len());
|
||||
|
||||
// FIXME failes *sometimes* because "Resource Busy"
|
||||
match db
|
||||
.query(
|
||||
"INSERT INTO website $array
|
||||
ON DUPLICATE KEY UPDATE
|
||||
accessed_at = time::now(),
|
||||
status_code = $input.status_code,
|
||||
crawled = crawled OR $input.crawled
|
||||
RETURN VALUE id;
|
||||
",
|
||||
|
||||
@@ -1,30 +1,37 @@
|
||||
use std::{ffi::OsStr, io::ErrorKind, path::PathBuf};
|
||||
use std::{io::ErrorKind, path::PathBuf};
|
||||
|
||||
use reqwest::header::HeaderValue;
|
||||
use tokio::fs;
|
||||
use tracing::{error, trace};
|
||||
use tracing::{error, trace, warn};
|
||||
use url::Url;
|
||||
|
||||
pub fn as_path(url: &Url) -> PathBuf {
|
||||
pub fn as_path(url: &Url, content_type: &HeaderValue) -> PathBuf {
|
||||
// extract data from url to save it accurately
|
||||
let url_path = PathBuf::from("./downloaded/".to_string() + url.domain().unwrap_or("UnknownDomain") + url.path());
|
||||
let mut url_path = PathBuf::from("./downloaded/".to_string() + url.domain().unwrap_or("UnknownDomain") + url.path());
|
||||
|
||||
// if it's a file
|
||||
let (basepath, filename) = if url_path.extension().filter(valid_file_extension).is_some() {
|
||||
// get everything up till the file
|
||||
let basepath = url_path.ancestors().skip(1).take(1).collect::<PathBuf>();
|
||||
// get the file name
|
||||
let filename = url_path.file_name().expect("This should exist").to_string_lossy();
|
||||
trace!("Save path: {:?} and base path: {:?}", &url_path, &basepath);
|
||||
(basepath, filename.to_string())
|
||||
if let Ok(header) = content_type.to_str() {
|
||||
// text/html; charset=UTF-8; option=value
|
||||
let ttype = if let Some((t, _)) = header.split_once(';') {
|
||||
t
|
||||
} else {
|
||||
(url_path.clone(), "index.html".into())
|
||||
header
|
||||
};
|
||||
|
||||
let mut path = PathBuf::new();
|
||||
path = path.join(basepath);
|
||||
path = path.join(filename);
|
||||
if let Some((ttype, subtype)) = ttype.split_once('/') {
|
||||
trace!("Found Content-Type to be: {ttype}/{subtype} for {}", url.to_string());
|
||||
// If the Content-Type header is "*/html" (most likely "text/html") and the path's
|
||||
// extension is anything but html:
|
||||
if subtype=="html" && !url_path.extension().is_some_and(|f| f=="html" || f=="htm" ) {
|
||||
// time to slap a index.html to the end of that path there!
|
||||
url_path = url_path.join("index.html");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
warn!("Header: {:?} couldn't be parsed into a string!", content_type);
|
||||
}
|
||||
trace!("Final path for {} is: {:?}", url, url_path);
|
||||
|
||||
path
|
||||
url_path
|
||||
}
|
||||
|
||||
pub async fn init(filename: &PathBuf) -> Option<fs::File> {
|
||||
@@ -50,29 +57,10 @@ pub async fn init(filename: &PathBuf) -> Option<fs::File> {
|
||||
error!("Couldn't get file's parents: {:?}", &filename);
|
||||
}
|
||||
} else {
|
||||
error!("File creation: {err} {:?}", filename);
|
||||
error!("File open error: {err} {:?}", filename);
|
||||
}
|
||||
// we don't care about other errors, we can't/shouldn't fix them
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn valid_file_extension(take: &&OsStr) -> bool {
|
||||
let los = take.to_string_lossy();
|
||||
let all = los.split('.');
|
||||
match all.last() {
|
||||
Some(s) => {
|
||||
// FIXME it's worth noting that the dumb tlds like .zip are in here,
|
||||
// which could cause problems
|
||||
let all_domains = include_str!("tlds-alpha-by-domain.txt");
|
||||
|
||||
// check if it is a domain
|
||||
match all_domains.lines().map(str::to_lowercase).find(|x| x==s.to_lowercase().as_str()) {
|
||||
Some(_) => false,
|
||||
None => true
|
||||
}
|
||||
},
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
59
src/main.rs
59
src/main.rs
@@ -1,5 +1,4 @@
|
||||
#![feature(ip_from)]
|
||||
#![feature(async_closure)]
|
||||
#![warn(clippy::expect_used)]
|
||||
#![deny(clippy::unwrap_used)]
|
||||
|
||||
@@ -19,8 +18,8 @@ use metrics::{counter, gauge};
|
||||
use metrics_exporter_prometheus::PrometheusBuilder;
|
||||
use serde::Deserialize;
|
||||
use surrealdb::{engine::remote::ws::Client, Surreal};
|
||||
use tokio::{io::AsyncWriteExt, task::JoinSet};
|
||||
use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, trace_span};
|
||||
use tokio::{io::{AsyncWriteExt, BufWriter}, task::JoinSet};
|
||||
use tracing::{debug, debug_span, error, info, instrument, level_filters::LevelFilter, trace, trace_span, warn};
|
||||
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
|
||||
|
||||
mod db;
|
||||
@@ -173,24 +172,45 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
|
||||
|
||||
// Send the http request (get)
|
||||
if let Ok(response) = request_builder.send().await {
|
||||
// Get body from response
|
||||
let headers = response.headers();
|
||||
let code = response.status();
|
||||
|
||||
let path = filesystem::as_path(&site.site);
|
||||
#[allow(non_snake_case)]
|
||||
let CT = headers.get("Content-Type");
|
||||
let ct = headers.get("content-type");
|
||||
|
||||
let ct = match (CT,ct) {
|
||||
(None, None) => {
|
||||
warn!("Server did not respond with Content-Type header. Url: {} Headers: ({:?})", site.site.to_string(), headers);
|
||||
return
|
||||
},
|
||||
(None, Some(a)) => a,
|
||||
(Some(a), None) => a,
|
||||
(Some(a), Some(_)) => a,
|
||||
};
|
||||
|
||||
// create filepath (handles / -> /index.html)
|
||||
let path = filesystem::as_path(&site.site, ct);
|
||||
|
||||
// make sure that the file is good to go
|
||||
if let Some(mut file) = filesystem::init(&path).await {
|
||||
let should_parse = path.to_string_lossy().ends_with(".html");
|
||||
let mut buf: Vec<u8> = Vec::new();
|
||||
|
||||
if let Some(file) = filesystem::init(&path).await {
|
||||
// Get body from response
|
||||
// stream the response onto the disk
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
let should_parse = path.to_string_lossy().ends_with(".html");
|
||||
let mut writer = BufWriter::new(file);
|
||||
let mut buf: Vec<u8> = Vec::new();
|
||||
|
||||
// Write file to disk
|
||||
info!("Writing at: {:?}", path);
|
||||
while let Some(data) = stream.next().await {
|
||||
match data {
|
||||
Ok(data) => {
|
||||
debug!("Writing at: {:?}", path);
|
||||
let _ = file.write_all(&data).await;
|
||||
let _ = writer.write_all(&data).await;
|
||||
// If we are going to parse this file later, we will save it
|
||||
// into memory as well as the disk.
|
||||
// We do this because the data here might be incomplete
|
||||
if should_parse {
|
||||
data.iter().for_each(|f| buf.push(*f));
|
||||
}
|
||||
@@ -200,8 +220,14 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
|
||||
},
|
||||
}
|
||||
}
|
||||
let _ = writer.flush();
|
||||
|
||||
|
||||
// (If needed) Parse the file
|
||||
if should_parse {
|
||||
let span = debug_span!("Should Parse");
|
||||
let enter = span.enter();
|
||||
|
||||
// Parse document and get relationships
|
||||
let sites = parser::parse(&site, &buf).await;
|
||||
// De-duplicate this list
|
||||
@@ -215,6 +241,8 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
|
||||
trace!("Saved {diff} from being entered into the db by de-duping");
|
||||
// Store all the other sites so that we can link to them.
|
||||
let _ = Website::store_all(de_dupe_sites, &db).await;
|
||||
|
||||
drop(enter);
|
||||
}
|
||||
|
||||
// METRICS
|
||||
@@ -222,11 +250,14 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
|
||||
counter!(GET_METRIC).increment(1);
|
||||
|
||||
// update self in db
|
||||
site.set_crawled();
|
||||
Website::store_all(vec![site], &db).await;
|
||||
site.crawled = true;
|
||||
site.status_code = code.as_u16();
|
||||
Website::store_all(vec![site.clone()], &db).await;
|
||||
} else {
|
||||
error!("File failed to cooperate: {:?}", path);
|
||||
}
|
||||
|
||||
trace!("Done processing: {}", &site.site);
|
||||
} else {
|
||||
error!("Failed to get: {}", &site.site);
|
||||
}
|
||||
@@ -244,7 +275,7 @@ async fn get_uncrawled_links(
|
||||
count = config.batch_size;
|
||||
}
|
||||
|
||||
debug!("Getting uncrawled links");
|
||||
debug!("Getting {} uncrawled links", count);
|
||||
|
||||
let mut response = db
|
||||
.query("SELECT * FROM website WHERE crawled = false AND site ~ type::string($format) LIMIT $count;")
|
||||
|
||||
@@ -39,7 +39,7 @@ impl TokenSink for Website {
|
||||
if let Some(mut parsed) = url {
|
||||
parsed.set_query(None);
|
||||
parsed.set_fragment(None);
|
||||
debug!("Final cleaned URL: `{}`", parsed.to_string());
|
||||
trace!("Final cleaned URL: `{}`", parsed.to_string());
|
||||
let web = Website::new(&parsed.to_string(), false);
|
||||
links.push(web);
|
||||
}
|
||||
@@ -127,16 +127,15 @@ fn try_get_url(parent: &Url, link: &str) -> Option<Url> {
|
||||
}
|
||||
let url = origin.clone() + link;
|
||||
|
||||
trace!("Built `{url}` from `{origin} + `{}`", link.to_string());
|
||||
|
||||
if let Ok(url) = Url::parse(&url) {
|
||||
trace!("Saved relative url `{}` AS: `{}`", link, url);
|
||||
trace!("Built `{url}` from `{origin} + `{}`", link.to_string());
|
||||
Some(url)
|
||||
} else {
|
||||
error!(
|
||||
"Failed to reconstruct a url from relative url: `{}` on site: `{}`",
|
||||
"Failed to reconstruct a url from relative url: `{}` on site: `{}`. Failed url was: {}",
|
||||
link,
|
||||
parent.to_string()
|
||||
parent.to_string(),
|
||||
url
|
||||
);
|
||||
None
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user