Compare commits
	
		
			17 Commits
		
	
	
		
			a9465dda6e
			...
			status_cod
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 
						 | 
					f7a3ca8fd7 | ||
| 6790061e22 | |||
| 50606bb69e | |||
| 5850f19cab | |||
| 2c8546e30a | |||
| 4e619d0ebc | |||
| 647c4cd324 | |||
| 7fab961d76 | |||
| d3fff194f4 | |||
| 3497312fd4 | |||
| 0fd76b1734 | |||
| 9bfa8f9108 | |||
| bdb1094a30 | |||
| 9aa2d9ce22 | |||
| 4b557a923c | |||
| c08a20ac00 | |||
| 94912e9125 | 
							
								
								
									
										19
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										19
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							@@ -7,18 +7,15 @@
 | 
			
		||||
        {
 | 
			
		||||
            "type": "lldb",
 | 
			
		||||
            "request": "launch",
 | 
			
		||||
            "name": "Debug executable 'surreal_spider'",
 | 
			
		||||
            "env": {
 | 
			
		||||
                "RUST_LOG": "surreal_spider=trace,reqwest=info",
 | 
			
		||||
            },
 | 
			
		||||
            "name": "Debug executable 'internet_mapper'",
 | 
			
		||||
            "cargo": {
 | 
			
		||||
                "args": [
 | 
			
		||||
                    "build",
 | 
			
		||||
                    "--bin=surreal_spider",
 | 
			
		||||
                    "--package=surreal_spider"
 | 
			
		||||
                    "--bin=internet_mapper",
 | 
			
		||||
                    "--package=internet_mapper"
 | 
			
		||||
                ],
 | 
			
		||||
                "filter": {
 | 
			
		||||
                    "name": "surreal_spider",
 | 
			
		||||
                    "name": "internet_mapper",
 | 
			
		||||
                    "kind": "bin"
 | 
			
		||||
                }
 | 
			
		||||
            },
 | 
			
		||||
@@ -28,16 +25,16 @@
 | 
			
		||||
        {
 | 
			
		||||
            "type": "lldb",
 | 
			
		||||
            "request": "launch",
 | 
			
		||||
            "name": "Debug unit tests in executable 'surreal_spider'",
 | 
			
		||||
            "name": "Debug unit tests in executable 'internet_mapper'",
 | 
			
		||||
            "cargo": {
 | 
			
		||||
                "args": [
 | 
			
		||||
                    "test",
 | 
			
		||||
                    "--no-run",
 | 
			
		||||
                    "--bin=surreal_spider",
 | 
			
		||||
                    "--package=surreal_spider"
 | 
			
		||||
                    "--bin=internet_mapper",
 | 
			
		||||
                    "--package=internet_mapper"
 | 
			
		||||
                ],
 | 
			
		||||
                "filter": {
 | 
			
		||||
                    "name": "surreal_spider",
 | 
			
		||||
                    "name": "internet_mapper",
 | 
			
		||||
                    "kind": "bin"
 | 
			
		||||
                }
 | 
			
		||||
            },
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										2
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							@@ -3,6 +3,6 @@
 | 
			
		||||
        "creds",
 | 
			
		||||
        "reqwest",
 | 
			
		||||
        "rustls",
 | 
			
		||||
        "surql"
 | 
			
		||||
        "surql",
 | 
			
		||||
    ]
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										1
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										1
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							@@ -1966,6 +1966,7 @@ name = "internet_mapper"
 | 
			
		||||
version = "0.1.0"
 | 
			
		||||
dependencies = [
 | 
			
		||||
 "base64 0.22.1",
 | 
			
		||||
 "futures-util",
 | 
			
		||||
 "html5ever 0.29.1",
 | 
			
		||||
 "metrics",
 | 
			
		||||
 "metrics-exporter-prometheus",
 | 
			
		||||
 
 | 
			
		||||
@@ -5,12 +5,13 @@ edition = "2021"
 | 
			
		||||
 | 
			
		||||
[dependencies]
 | 
			
		||||
base64 = "0.22.1"
 | 
			
		||||
futures-util = "0.3.31"
 | 
			
		||||
html5ever = "0.29"
 | 
			
		||||
metrics = "0.24.1"
 | 
			
		||||
metrics-exporter-prometheus = { version = "0.16.2", features=["http-listener"]}
 | 
			
		||||
# minio = "0.1.0"
 | 
			
		||||
minio = {git="https://github.com/minio/minio-rs.git", rev = "c28f576"}
 | 
			
		||||
reqwest = { version = "0.12", features = ["gzip", "default", "rustls-tls"] }
 | 
			
		||||
reqwest = { version = "0.12", features = ["gzip", "default", "rustls-tls", "stream"] }
 | 
			
		||||
serde = { version = "1.0", features = ["derive"] }
 | 
			
		||||
surrealdb = "2.2"
 | 
			
		||||
tokio = { version="1.41.0", features = ["full"] }
 | 
			
		||||
 
 | 
			
		||||
@@ -3,9 +3,10 @@ surreal_url = "localhost:8000"
 | 
			
		||||
surreal_username = "root"
 | 
			
		||||
surreal_password = "root"
 | 
			
		||||
surreal_ns = "test"
 | 
			
		||||
surreal_db = "v1.19.5"
 | 
			
		||||
surreal_db = "v1.21.1"
 | 
			
		||||
 | 
			
		||||
# Crawler config
 | 
			
		||||
crawl_filter = "en.wikipedia.org" 
 | 
			
		||||
start_url = "https://en.wikipedia.org"
 | 
			
		||||
budget = 100
 | 
			
		||||
crawl_filter = "https://ftpgeoinfo.msl.mt.gov/Data/Spatial/MSDI" 
 | 
			
		||||
start_url = "https://ftpgeoinfo.msl.mt.gov/Data/Spatial/MSDI"
 | 
			
		||||
budget = 10000
 | 
			
		||||
batch_size = 50
 | 
			
		||||
 
 | 
			
		||||
@@ -7,7 +7,7 @@ scrape_configs:
 | 
			
		||||
    static_configs:
 | 
			
		||||
    # change this your machine's ip, localhost won't work
 | 
			
		||||
    # because localhost refers to the docker container.
 | 
			
		||||
      - targets: ['172.20.239.48:2500']
 | 
			
		||||
      - targets: ['192.168.1.200:2500']
 | 
			
		||||
        #- targets: ['192.168.8.209:2500']
 | 
			
		||||
  - job_name: loki
 | 
			
		||||
    static_configs:
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										18
									
								
								src/db.rs
									
									
									
									
									
								
							
							
						
						
									
										18
									
								
								src/db.rs
									
									
									
									
									
								
							@@ -20,12 +20,18 @@ pub struct Website {
 | 
			
		||||
    pub site: Url,
 | 
			
		||||
    /// Wether or not this link has been crawled yet
 | 
			
		||||
    pub crawled: bool,
 | 
			
		||||
    /// 200, 404, etc
 | 
			
		||||
    pub status_code: u16,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// manual impl to make tracing look nicer
 | 
			
		||||
impl Debug for Website {
 | 
			
		||||
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
 | 
			
		||||
        f.debug_struct("Website").field("site", &self.site).finish()
 | 
			
		||||
        f.debug_struct("Website")
 | 
			
		||||
            .field("host", &self.site.host())
 | 
			
		||||
            .field("path", &self.site.path())
 | 
			
		||||
            .field("status_code", &self.status_code)
 | 
			
		||||
            .finish()
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@@ -38,15 +44,11 @@ impl Website {
 | 
			
		||||
        };
 | 
			
		||||
        Self {
 | 
			
		||||
            crawled,
 | 
			
		||||
            site
 | 
			
		||||
            site,
 | 
			
		||||
            status_code: 0,
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    pub fn set_crawled(&mut self) {
 | 
			
		||||
        trace!("Set crawled to true");
 | 
			
		||||
        self.crawled = true
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // Insert ever item in the vec into surreal, crawled state will be preserved as TRUE
 | 
			
		||||
    // if already in the database as such or incoming data is TRUE.
 | 
			
		||||
    #[instrument(skip(db))]
 | 
			
		||||
@@ -54,11 +56,13 @@ impl Website {
 | 
			
		||||
        counter!(STORE).increment(1);
 | 
			
		||||
        let mut things = Vec::with_capacity(all.len());
 | 
			
		||||
 | 
			
		||||
        // FIXME failes *sometimes* because "Resource Busy"
 | 
			
		||||
        match db
 | 
			
		||||
            .query(
 | 
			
		||||
                "INSERT INTO website $array
 | 
			
		||||
                    ON DUPLICATE KEY UPDATE
 | 
			
		||||
                        accessed_at = time::now(),
 | 
			
		||||
                        status_code = $input.status_code,
 | 
			
		||||
                        crawled = crawled OR $input.crawled
 | 
			
		||||
                    RETURN VALUE id;
 | 
			
		||||
                 ",
 | 
			
		||||
 
 | 
			
		||||
@@ -1,77 +1,66 @@
 | 
			
		||||
use std::{ffi::OsStr, path::PathBuf};
 | 
			
		||||
use std::{io::ErrorKind, path::PathBuf};
 | 
			
		||||
 | 
			
		||||
use reqwest::header::HeaderValue;
 | 
			
		||||
use tokio::fs;
 | 
			
		||||
use tracing::{debug, error, instrument, trace, warn};
 | 
			
		||||
use tracing::{error, trace, warn};
 | 
			
		||||
use url::Url;
 | 
			
		||||
 | 
			
		||||
#[instrument(skip(data))]
 | 
			
		||||
pub async fn store(data: &str, url: &Url) {
 | 
			
		||||
pub fn as_path(url: &Url, content_type: &HeaderValue) -> PathBuf {
 | 
			
		||||
    // extract data from url to save it accurately
 | 
			
		||||
    let url_path = PathBuf::from("./downloaded/".to_string() + url.domain().unwrap_or("UnknownDomain") + url.path());
 | 
			
		||||
    let mut url_path = PathBuf::from("./downloaded/".to_string() + url.domain().unwrap_or("UnknownDomain") + url.path());
 | 
			
		||||
 | 
			
		||||
    // if it's a file
 | 
			
		||||
    let (basepath, filename) = if url_path.extension().filter(valid_file_extension).is_some() {
 | 
			
		||||
        // get everything up till the file
 | 
			
		||||
        let basepath = url_path.ancestors().skip(1).take(1).collect::<PathBuf>();
 | 
			
		||||
        // get the file name
 | 
			
		||||
        let filename = url_path.file_name().expect("This should exist").to_string_lossy();
 | 
			
		||||
        trace!("Save path: {:?} and base path: {:?}", &url_path, &basepath);
 | 
			
		||||
        (basepath, filename.to_string())
 | 
			
		||||
    if let Ok(header) = content_type.to_str() {
 | 
			
		||||
        // text/html; charset=UTF-8; option=value
 | 
			
		||||
        let ttype = if let Some((t, _)) = header.split_once(';') {
 | 
			
		||||
            t
 | 
			
		||||
        } else {
 | 
			
		||||
            header
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        if let Some((ttype, subtype)) = ttype.split_once('/') {
 | 
			
		||||
            trace!("Found Content-Type to be: {ttype}/{subtype} for {}", url.to_string());
 | 
			
		||||
            // If the Content-Type header is "*/html" (most likely "text/html") and the path's
 | 
			
		||||
            // extension is anything but html:
 | 
			
		||||
            if subtype=="html" && !url_path.extension().is_some_and(|f| f=="html" || f=="htm" ) {
 | 
			
		||||
                // time to slap a index.html to the end of that path there!
 | 
			
		||||
                url_path = url_path.join("index.html");
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
    } else {
 | 
			
		||||
        (url_path.clone(), "index.html".into())
 | 
			
		||||
    };
 | 
			
		||||
        warn!("Header: {:?} couldn't be parsed into a string!", content_type);
 | 
			
		||||
    }
 | 
			
		||||
    trace!("Final path for {} is: {:?}", url, url_path);
 | 
			
		||||
 | 
			
		||||
    debug!("Writing at: {:?} {:?}", basepath, filename);
 | 
			
		||||
    url_path
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
    // create the folders
 | 
			
		||||
    if let Err(err) = fs::create_dir_all(&basepath).await {
 | 
			
		||||
        error!("Dir creation: {err} {:?}", basepath);
 | 
			
		||||
    } else {
 | 
			
		||||
        // FIXME I don't think this handles index.html files well...
 | 
			
		||||
        // TODO this should probably append .html to non-described files
 | 
			
		||||
        // create the file if that was successful
 | 
			
		||||
        if let Err(err) = fs::write(&basepath.join(filename), data).await {
 | 
			
		||||
            error!("File creation: {err} {:?}", url_path);
 | 
			
		||||
pub async fn init(filename: &PathBuf) -> Option<fs::File> {
 | 
			
		||||
    let file = async || tokio::fs::OpenOptions::new()
 | 
			
		||||
        .append(true)
 | 
			
		||||
        .create(true)
 | 
			
		||||
        .open(&filename).await;
 | 
			
		||||
 | 
			
		||||
    match file().await {
 | 
			
		||||
        Ok(ok) => Some(ok),
 | 
			
		||||
        Err(err) => {
 | 
			
		||||
            // the file/folder isn't found
 | 
			
		||||
            if err.kind() == ErrorKind::NotFound {
 | 
			
		||||
                if let Some(parent ) = &filename.parent() {
 | 
			
		||||
                    // create the folders
 | 
			
		||||
                    if let Err(err) = fs::create_dir_all(&parent).await {
 | 
			
		||||
                        error!("Dir creation: {err} {:?}", filename);
 | 
			
		||||
                        eprintln!("{}", err)
 | 
			
		||||
                    } else if let Ok(ok) = file().await {
 | 
			
		||||
                        return Some(ok);
 | 
			
		||||
                    }
 | 
			
		||||
                } else {
 | 
			
		||||
                    error!("Couldn't get file's parents: {:?}", &filename);
 | 
			
		||||
                }
 | 
			
		||||
            } else {
 | 
			
		||||
                error!("File open error: {err} {:?}", filename);
 | 
			
		||||
            }
 | 
			
		||||
            // we don't care about other errors, we can't/shouldn't fix them
 | 
			
		||||
            None
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
fn valid_file_extension(take: &&OsStr) -> bool {
 | 
			
		||||
    let los = take.to_string_lossy();
 | 
			
		||||
    let all = los.split('.');
 | 
			
		||||
    match all.last() {
 | 
			
		||||
        Some(s) => {
 | 
			
		||||
            match s.to_lowercase().as_str() {
 | 
			
		||||
                "html" => true,
 | 
			
		||||
                "css" => true,
 | 
			
		||||
                "js" => true,
 | 
			
		||||
                "ts" => true,
 | 
			
		||||
                "otf" => true, // font
 | 
			
		||||
 | 
			
		||||
                "png" => true,
 | 
			
		||||
                "svg" => true,
 | 
			
		||||
                "jpg" => true,
 | 
			
		||||
                "jpeg" => true,
 | 
			
		||||
                "mp4" => true,
 | 
			
		||||
                "mp3" => true,
 | 
			
		||||
                "webp" => true,
 | 
			
		||||
 | 
			
		||||
                "pdf" => true,
 | 
			
		||||
                "json" => true,
 | 
			
		||||
                "xml" => true,
 | 
			
		||||
 | 
			
		||||
                // IGNORE
 | 
			
		||||
                // TODO Should this be a list of all domains?
 | 
			
		||||
                "org" => false,
 | 
			
		||||
                "com" => false,
 | 
			
		||||
                "net" => false,
 | 
			
		||||
 | 
			
		||||
                _ => {
 | 
			
		||||
                    warn!("Might be forgetting a file extension: {s}");
 | 
			
		||||
                    false
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        },
 | 
			
		||||
        None => false,
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										152
									
								
								src/main.rs
									
									
									
									
									
								
							
							
						
						
									
										152
									
								
								src/main.rs
									
									
									
									
									
								
							@@ -1,9 +1,16 @@
 | 
			
		||||
#![feature(ip_from)]
 | 
			
		||||
#![warn(clippy::expect_used)]
 | 
			
		||||
#![deny(clippy::unwrap_used)]
 | 
			
		||||
 | 
			
		||||
extern crate html5ever;
 | 
			
		||||
 | 
			
		||||
use futures_util::StreamExt;
 | 
			
		||||
 | 
			
		||||
use std::{
 | 
			
		||||
    collections::HashSet, fs::File, io::Read, net::{IpAddr, Ipv4Addr}
 | 
			
		||||
    collections::HashSet,
 | 
			
		||||
    fs::File,
 | 
			
		||||
    io::Read,
 | 
			
		||||
    net::{IpAddr, Ipv4Addr},
 | 
			
		||||
};
 | 
			
		||||
 | 
			
		||||
use db::{connect, Website};
 | 
			
		||||
@@ -11,13 +18,13 @@ use metrics::{counter, gauge};
 | 
			
		||||
use metrics_exporter_prometheus::PrometheusBuilder;
 | 
			
		||||
use serde::Deserialize;
 | 
			
		||||
use surrealdb::{engine::remote::ws::Client, Surreal};
 | 
			
		||||
use tokio::task::JoinSet;
 | 
			
		||||
use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, trace_span};
 | 
			
		||||
use tokio::{io::{AsyncWriteExt, BufWriter}, task::JoinSet};
 | 
			
		||||
use tracing::{debug, debug_span, error, info, instrument, level_filters::LevelFilter, trace, trace_span, warn};
 | 
			
		||||
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
 | 
			
		||||
 | 
			
		||||
mod db;
 | 
			
		||||
mod parser;
 | 
			
		||||
mod filesystem;
 | 
			
		||||
mod parser;
 | 
			
		||||
 | 
			
		||||
const GET_METRIC: &str = "total_gets";
 | 
			
		||||
const GET_IN_FLIGHT: &str = "gets_in_flight";
 | 
			
		||||
@@ -35,6 +42,7 @@ struct Config {
 | 
			
		||||
    crawl_filter: String,
 | 
			
		||||
    start_url: String,
 | 
			
		||||
    budget: usize,
 | 
			
		||||
    batch_size: usize,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[tokio::main]
 | 
			
		||||
@@ -109,13 +117,8 @@ async fn main() {
 | 
			
		||||
    let span = trace_span!("Loop");
 | 
			
		||||
    let span = span.enter();
 | 
			
		||||
    while crawled < config.budget {
 | 
			
		||||
        let get_num = if config.budget - crawled < 100 {
 | 
			
		||||
            config.budget - crawled
 | 
			
		||||
        } else {
 | 
			
		||||
            100
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        let uncrawled = get_uncrawled_links(&db, get_num, config.crawl_filter.clone()).await;
 | 
			
		||||
        let uncrawled =
 | 
			
		||||
            get_uncrawled_links(&db, config.budget - crawled, config.crawl_filter.clone(), &config).await;
 | 
			
		||||
        if uncrawled.is_empty() {
 | 
			
		||||
            info!("Had more budget but finished crawling everything.");
 | 
			
		||||
            return;
 | 
			
		||||
@@ -141,12 +144,13 @@ async fn main() {
 | 
			
		||||
    }
 | 
			
		||||
    drop(span);
 | 
			
		||||
 | 
			
		||||
    if let Ok(mut ok) = db.query("count(select id from website where crawled = true)").await {
 | 
			
		||||
    if let Ok(mut ok) = db
 | 
			
		||||
        .query("count(select id from website where crawled = true)")
 | 
			
		||||
        .await
 | 
			
		||||
    {
 | 
			
		||||
        let res = ok.take::<Option<usize>>(0);
 | 
			
		||||
        if let Ok(i) = res {
 | 
			
		||||
            if let Some(n) = i {
 | 
			
		||||
                info!("Total crawled pages now equals {n}");
 | 
			
		||||
            }
 | 
			
		||||
        if let Ok(Some(n)) = res {
 | 
			
		||||
            info!("Total crawled pages now equals {n}");
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
@@ -157,7 +161,6 @@ async fn main() {
 | 
			
		||||
/// Downloads and crawls and stores a webpage.
 | 
			
		||||
/// It is acceptable to clone `db`, `reqwest`, and `s3` because they all use `Arc`s internally. - Noted by Oliver
 | 
			
		||||
async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Client) {
 | 
			
		||||
    
 | 
			
		||||
    // METRICS
 | 
			
		||||
    trace!("Process: {}", &site.site);
 | 
			
		||||
    // Build the request
 | 
			
		||||
@@ -169,56 +172,110 @@ async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Clien
 | 
			
		||||
 | 
			
		||||
    // Send the http request (get)
 | 
			
		||||
    if let Ok(response) = request_builder.send().await {
 | 
			
		||||
        let headers = response.headers();
 | 
			
		||||
        let code = response.status();
 | 
			
		||||
 | 
			
		||||
        // METRICS
 | 
			
		||||
        g.decrement(1);
 | 
			
		||||
        counter!(GET_METRIC).increment(1);
 | 
			
		||||
        #[allow(non_snake_case)]
 | 
			
		||||
        let CT = headers.get("Content-Type");
 | 
			
		||||
        let ct = headers.get("content-type");
 | 
			
		||||
 | 
			
		||||
        // Get body from response
 | 
			
		||||
        let data = response
 | 
			
		||||
            .text()
 | 
			
		||||
            .await
 | 
			
		||||
            .expect("Failed to read http response's body!");
 | 
			
		||||
        let ct = match (CT,ct) {
 | 
			
		||||
            (None, None) => {
 | 
			
		||||
                warn!("Server did not respond with Content-Type header. Url: {} Headers: ({:?})", site.site.to_string(), headers);
 | 
			
		||||
                return
 | 
			
		||||
            },
 | 
			
		||||
            (None, Some(a)) => a,
 | 
			
		||||
            (Some(a), None) => a,
 | 
			
		||||
            (Some(a), Some(_)) => a,
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        // Store document
 | 
			
		||||
        filesystem::store(&data, &site.site).await;
 | 
			
		||||
        // create filepath (handles / -> /index.html)
 | 
			
		||||
        let path = filesystem::as_path(&site.site, ct);
 | 
			
		||||
 | 
			
		||||
        // Parse document and get relationships
 | 
			
		||||
        let sites = parser::parse(&site, &data).await;
 | 
			
		||||
        // make sure that the file is good to go
 | 
			
		||||
        if let Some(file) = filesystem::init(&path).await {
 | 
			
		||||
            // Get body from response
 | 
			
		||||
            // stream the response onto the disk
 | 
			
		||||
            let mut stream = response.bytes_stream();
 | 
			
		||||
 | 
			
		||||
        // update self in db
 | 
			
		||||
        site.set_crawled();
 | 
			
		||||
        Website::store_all(vec![site], &db).await;
 | 
			
		||||
            let should_parse = path.to_string_lossy().ends_with(".html");
 | 
			
		||||
            let mut writer = BufWriter::new(file);
 | 
			
		||||
            let mut buf: Vec<u8> = Vec::new();
 | 
			
		||||
 | 
			
		||||
        // De-duplicate this list
 | 
			
		||||
        let prev_len = sites.len();
 | 
			
		||||
        let set = sites.into_iter().fold(HashSet::new(), |mut set,item| {
 | 
			
		||||
            set.insert(item);
 | 
			
		||||
            set
 | 
			
		||||
        });
 | 
			
		||||
        let de_dupe_sites: Vec<Website> = set.into_iter().collect();
 | 
			
		||||
        let diff = prev_len - de_dupe_sites.len();
 | 
			
		||||
        trace!("Saved {diff} from being entered into the db by de-duping");
 | 
			
		||||
            // Write file to disk
 | 
			
		||||
            info!("Writing at: {:?}", path);
 | 
			
		||||
            while let Some(data) = stream.next().await {
 | 
			
		||||
                match data {
 | 
			
		||||
                    Ok(data) => {
 | 
			
		||||
                        let _ = writer.write_all(&data).await;
 | 
			
		||||
                        // If we are going to parse this file later, we will save it
 | 
			
		||||
                        // into memory as well as the disk.
 | 
			
		||||
                        // We do this because the data here might be incomplete
 | 
			
		||||
                        if should_parse {
 | 
			
		||||
                            data.iter().for_each(|f| buf.push(*f));
 | 
			
		||||
                        }
 | 
			
		||||
                    },
 | 
			
		||||
                    Err(err) => {
 | 
			
		||||
                        eprintln!("{}", err)
 | 
			
		||||
                    },
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
            let _ = writer.flush();
 | 
			
		||||
 | 
			
		||||
        // Store all the other sites so that we can link to them.
 | 
			
		||||
        let _ = Website::store_all(de_dupe_sites, &db).await;
 | 
			
		||||
 | 
			
		||||
            // (If needed) Parse the file
 | 
			
		||||
            if should_parse {
 | 
			
		||||
                let span = debug_span!("Should Parse");
 | 
			
		||||
                let enter = span.enter();
 | 
			
		||||
 | 
			
		||||
                // Parse document and get relationships
 | 
			
		||||
                let sites = parser::parse(&site, &buf).await;
 | 
			
		||||
                // De-duplicate this list
 | 
			
		||||
                let prev_len = sites.len();
 | 
			
		||||
                let set = sites.into_iter().fold(HashSet::new(), |mut set, item| {
 | 
			
		||||
                    set.insert(item);
 | 
			
		||||
                    set
 | 
			
		||||
                });
 | 
			
		||||
                let de_dupe_sites: Vec<Website> = set.into_iter().collect();
 | 
			
		||||
                let diff = prev_len - de_dupe_sites.len();
 | 
			
		||||
                trace!("Saved {diff} from being entered into the db by de-duping");
 | 
			
		||||
                // Store all the other sites so that we can link to them.
 | 
			
		||||
                let _ = Website::store_all(de_dupe_sites, &db).await;
 | 
			
		||||
 | 
			
		||||
                drop(enter);
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            // METRICS
 | 
			
		||||
            g.decrement(1);
 | 
			
		||||
            counter!(GET_METRIC).increment(1);
 | 
			
		||||
 | 
			
		||||
            // update self in db
 | 
			
		||||
            site.crawled = true;
 | 
			
		||||
            site.status_code = code.as_u16();
 | 
			
		||||
            Website::store_all(vec![site.clone()], &db).await;
 | 
			
		||||
        } else {
 | 
			
		||||
            error!("File failed to cooperate: {:?}", path);
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        trace!("Done processing: {}", &site.site);
 | 
			
		||||
    } else {
 | 
			
		||||
        error!("Failed to get: {}", &site.site);
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
/// Returns uncrawled links
 | 
			
		||||
#[instrument(skip(db))]
 | 
			
		||||
#[instrument(skip(db, config))]
 | 
			
		||||
async fn get_uncrawled_links(
 | 
			
		||||
    db: &Surreal<Client>,
 | 
			
		||||
    mut count: usize,
 | 
			
		||||
    filter: String,
 | 
			
		||||
    config: &Config,
 | 
			
		||||
) -> Vec<Website> {
 | 
			
		||||
    if count > 100 {
 | 
			
		||||
        count = 100
 | 
			
		||||
    if count > config.batch_size {
 | 
			
		||||
        count = config.batch_size;
 | 
			
		||||
    }
 | 
			
		||||
    debug!("Getting uncrawled links");
 | 
			
		||||
 | 
			
		||||
    debug!("Getting {} uncrawled links", count);
 | 
			
		||||
 | 
			
		||||
    let mut response = db
 | 
			
		||||
        .query("SELECT * FROM website WHERE crawled = false AND site ~ type::string($format) LIMIT $count;")
 | 
			
		||||
@@ -230,4 +287,3 @@ async fn get_uncrawled_links(
 | 
			
		||||
        .take(0)
 | 
			
		||||
        .expect("Returned websites couldn't be parsed")
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -1,5 +1,4 @@
 | 
			
		||||
use std::default::Default;
 | 
			
		||||
use std::str::FromStr;
 | 
			
		||||
 | 
			
		||||
use html5ever::tokenizer::{BufferQueue, TokenizerResult};
 | 
			
		||||
use html5ever::tokenizer::{StartTag, TagToken};
 | 
			
		||||
@@ -40,7 +39,7 @@ impl TokenSink for Website {
 | 
			
		||||
                                    if let Some(mut parsed) = url {
 | 
			
		||||
                                        parsed.set_query(None);
 | 
			
		||||
                                        parsed.set_fragment(None);
 | 
			
		||||
                                        debug!("Final cleaned URL: `{}`", parsed.to_string());
 | 
			
		||||
                                        trace!("Final cleaned URL: `{}`", parsed.to_string());
 | 
			
		||||
                                        let web = Website::new(&parsed.to_string(), false);
 | 
			
		||||
                                        links.push(web);
 | 
			
		||||
                                    }
 | 
			
		||||
@@ -63,29 +62,34 @@ impl TokenSink for Website {
 | 
			
		||||
 | 
			
		||||
#[instrument(skip_all)]
 | 
			
		||||
/// Parses the passed site and returns all the sites it links to.
 | 
			
		||||
pub async fn parse(site: &Website, data: &str) -> Vec<Website> {
 | 
			
		||||
pub async fn parse(site: &Website, data: &[u8]) -> Vec<Website> {
 | 
			
		||||
    debug!("Parsing {}", site.site.to_string());
 | 
			
		||||
    // prep work
 | 
			
		||||
    let mut other_sites: Vec<Website> = Vec::new();
 | 
			
		||||
 | 
			
		||||
    // change data into something that can be tokenized
 | 
			
		||||
    let chunk = Tendril::from_str(data).expect("Failed to parse string into Tendril!");
 | 
			
		||||
    // create buffer of tokens and push our input into it
 | 
			
		||||
    let token_buffer = BufferQueue::default();
 | 
			
		||||
    token_buffer.push_back(
 | 
			
		||||
        chunk
 | 
			
		||||
            .try_reinterpret::<fmt::UTF8>()
 | 
			
		||||
            .expect("Failed to reinterpret chunk!"),
 | 
			
		||||
    );
 | 
			
		||||
    // create the tokenizer
 | 
			
		||||
    let tokenizer = Tokenizer::new(site.clone(), TokenizerOpts::default());
 | 
			
		||||
    let s: Result<Tendril<fmt::UTF8>, ()> = Tendril::try_from_byte_slice(data);
 | 
			
		||||
    if let Ok(chunk) = s {
 | 
			
		||||
        // create buffer of tokens and push our input into it
 | 
			
		||||
        let token_buffer = BufferQueue::default();
 | 
			
		||||
        token_buffer.push_back(
 | 
			
		||||
            chunk
 | 
			
		||||
                .try_reinterpret::<fmt::UTF8>()
 | 
			
		||||
                .expect("Failed to reinterpret chunk!"),
 | 
			
		||||
        );
 | 
			
		||||
        // create the tokenizer
 | 
			
		||||
        let tokenizer = Tokenizer::new(site.clone(), TokenizerOpts::default());
 | 
			
		||||
 | 
			
		||||
    // go thru buffer
 | 
			
		||||
    while let TokenizerResult::Script(mut sites) = tokenizer.feed(&token_buffer) {
 | 
			
		||||
        other_sites.append(&mut sites);
 | 
			
		||||
        // other_sites.push(sites);
 | 
			
		||||
        // go thru buffer
 | 
			
		||||
        while let TokenizerResult::Script(mut sites) = tokenizer.feed(&token_buffer) {
 | 
			
		||||
            other_sites.append(&mut sites);
 | 
			
		||||
            // other_sites.push(sites);
 | 
			
		||||
        }
 | 
			
		||||
        assert!(token_buffer.is_empty());
 | 
			
		||||
        tokenizer.end();
 | 
			
		||||
    } else {
 | 
			
		||||
        warn!("Tendril failed to parse on: {}", site.site.to_string());
 | 
			
		||||
    }
 | 
			
		||||
    assert!(token_buffer.is_empty());
 | 
			
		||||
    tokenizer.end();
 | 
			
		||||
 | 
			
		||||
    other_sites
 | 
			
		||||
}
 | 
			
		||||
@@ -107,7 +111,7 @@ fn try_get_url(parent: &Url, link: &str) -> Option<Url> {
 | 
			
		||||
                match Url::parse(&format!("{scheme}://{}", link)) {
 | 
			
		||||
                    Ok(url) => Some(url),
 | 
			
		||||
                    Err(err) => {
 | 
			
		||||
                        error!("Failed parsing realative scheme url: {}", err);
 | 
			
		||||
                        error!("Failed parsing relative scheme url: {}", err);
 | 
			
		||||
                        None
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
@@ -117,19 +121,21 @@ fn try_get_url(parent: &Url, link: &str) -> Option<Url> {
 | 
			
		||||
                match e {
 | 
			
		||||
                    url::ParseError::RelativeUrlWithoutBase => {
 | 
			
		||||
                        // Is: scheme://host:port
 | 
			
		||||
                        let origin = parent.origin().ascii_serialization();
 | 
			
		||||
                        let mut origin = parent.origin().ascii_serialization();
 | 
			
		||||
                        if !origin.ends_with('/') && !link.starts_with('/') {
 | 
			
		||||
                            origin += "/";
 | 
			
		||||
                        }
 | 
			
		||||
                        let url = origin.clone() + link;
 | 
			
		||||
 | 
			
		||||
                        trace!("Built `{url}` from `{origin} + {}`", link.to_string());
 | 
			
		||||
 | 
			
		||||
                        if let Ok(url) = Url::parse(&url) {
 | 
			
		||||
                            trace!("Saved relative url `{}` AS: `{}`", link, url);
 | 
			
		||||
                            trace!("Built `{url}` from `{origin} + `{}`", link.to_string());
 | 
			
		||||
                            Some(url)
 | 
			
		||||
                        } else {
 | 
			
		||||
                            error!(
 | 
			
		||||
                                "Failed to reconstruct a url from relative url: `{}` on site: `{}`",
 | 
			
		||||
                                "Failed to reconstruct a url from relative url: `{}` on site: `{}`. Failed url was: {}",
 | 
			
		||||
                                link,
 | 
			
		||||
                                parent.to_string()
 | 
			
		||||
                                parent.to_string(),
 | 
			
		||||
                                url
 | 
			
		||||
                            );
 | 
			
		||||
                            None
 | 
			
		||||
                        }
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user