added support for nearly all html tags that can have a link
This commit is contained in:
parent
7c32600694
commit
720adaa552
@ -15,7 +15,7 @@ pub struct Website {
|
|||||||
/// The url that this data is found at
|
/// The url that this data is found at
|
||||||
site: Url,
|
site: Url,
|
||||||
/// Wether or not this link has been crawled yet
|
/// Wether or not this link has been crawled yet
|
||||||
crawled: bool,
|
pub crawled: bool,
|
||||||
#[serde(skip_serializing)]
|
#[serde(skip_serializing)]
|
||||||
id: Option<Thing>,
|
id: Option<Thing>,
|
||||||
}
|
}
|
||||||
@ -39,10 +39,6 @@ impl Website {
|
|||||||
self.crawled = true
|
self.crawled = true
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn crawled(&mut self) -> &mut bool {
|
|
||||||
&mut self.crawled
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn mut_url(&mut self) -> &mut Url {
|
pub fn mut_url(&mut self) -> &mut Url {
|
||||||
&mut self.site
|
&mut self.site
|
||||||
}
|
}
|
||||||
@ -105,9 +101,11 @@ impl Website {
|
|||||||
if let Some(old) = response.take::<Option<Website>>(0).unwrap() {
|
if let Some(old) = response.take::<Option<Website>>(0).unwrap() {
|
||||||
// site exists already
|
// site exists already
|
||||||
if let Some(id) = old.id {
|
if let Some(id) = old.id {
|
||||||
|
// make sure to preserve the "crawled status"
|
||||||
let mut new = self.clone();
|
let mut new = self.clone();
|
||||||
new.crawled = old.crawled | new.crawled;
|
new.crawled = old.crawled | new.crawled;
|
||||||
|
|
||||||
|
// update the record
|
||||||
match db.upsert((id.tb, id.id.to_string())).content(new).await {
|
match db.upsert((id.tb, id.id.to_string())).content(new).await {
|
||||||
Ok(e) => {
|
Ok(e) => {
|
||||||
if let Some(a) = e {
|
if let Some(a) = e {
|
||||||
|
74
src/main.rs
74
src/main.rs
@ -1,9 +1,9 @@
|
|||||||
extern crate markup5ever_rcdom as rcdom;
|
extern crate markup5ever_rcdom as rcdom;
|
||||||
extern crate html5ever;
|
extern crate html5ever;
|
||||||
|
|
||||||
use std::{rc::Rc, time::Instant};
|
use std::{path::is_separator, rc::Rc, time::Instant};
|
||||||
use db::{connect, Website};
|
use db::{connect, Website};
|
||||||
use html5ever::{parse_document, tendril::TendrilSink, tree_builder::TreeBuilderOpts, ParseOpts};
|
use html5ever::{local_name, parse_document, tendril::TendrilSink, tree_builder::TreeBuilderOpts, ParseOpts};
|
||||||
use rcdom::{Node, RcDom};
|
use rcdom::{Node, RcDom};
|
||||||
use surrealdb::{engine::remote::ws::Client, sql::Thing, Surreal};
|
use surrealdb::{engine::remote::ws::Client, sql::Thing, Surreal};
|
||||||
use tracing::{debug, info, instrument, trace, trace_span, warn};
|
use tracing::{debug, info, instrument, trace, trace_span, warn};
|
||||||
@ -21,7 +21,8 @@ async fn main() {
|
|||||||
debug!("Starting...");
|
debug!("Starting...");
|
||||||
|
|
||||||
// Would probably take these in as parameters from a cli
|
// Would probably take these in as parameters from a cli
|
||||||
let url = "https://oliveratkinson.net/";
|
// let url = "https://oliveratkinson.net/";
|
||||||
|
let url = "http://localhost:5500";
|
||||||
let budget = 50;
|
let budget = 50;
|
||||||
let mut crawled = 0;
|
let mut crawled = 0;
|
||||||
|
|
||||||
@ -36,9 +37,12 @@ async fn main() {
|
|||||||
// get() to work.
|
// get() to work.
|
||||||
let span = trace_span!("Pre-Loop");
|
let span = trace_span!("Pre-Loop");
|
||||||
let pre_loop_span = span.enter();
|
let pre_loop_span = span.enter();
|
||||||
|
// Download the site
|
||||||
let mut site = Website::new(&url, false);
|
let mut site = Website::new(&url, false);
|
||||||
let dom = get(&mut site, &db, &client).await.expect("Inital page returned None.");
|
let dom = get(&mut site, &db, &client).await.expect("Inital page returned None.");
|
||||||
|
|
||||||
crawl_wrapper(&dom, &db, &site, &mut crawled).await;
|
crawl_wrapper(&dom, &db, &site, &mut crawled).await;
|
||||||
|
|
||||||
drop(pre_loop_span);
|
drop(pre_loop_span);
|
||||||
|
|
||||||
let span = trace_span!("Loop");
|
let span = trace_span!("Loop");
|
||||||
@ -49,6 +53,10 @@ async fn main() {
|
|||||||
} else {100};
|
} else {100};
|
||||||
|
|
||||||
let uncrawled = get_uncrawled_links(&db, get_num).await;
|
let uncrawled = get_uncrawled_links(&db, get_num).await;
|
||||||
|
if uncrawled.len() == 0 {
|
||||||
|
info!("Had more budget but finished crawling everything.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
debug!("Crawling {} pages...", uncrawled.len());
|
debug!("Crawling {} pages...", uncrawled.len());
|
||||||
|
|
||||||
let span = trace_span!("Crawling");
|
let span = trace_span!("Crawling");
|
||||||
@ -100,6 +108,8 @@ async fn get(site: &mut Website, db: &Surreal<Client>, getter: &reqwest::Client)
|
|||||||
.from_utf8()
|
.from_utf8()
|
||||||
.read_from(&mut data.as_bytes())
|
.read_from(&mut data.as_bytes())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
// TODO save the dom to minio if a flag is set
|
||||||
|
|
||||||
site.set_crawled();
|
site.set_crawled();
|
||||||
site.store(db).await;
|
site.store(db).await;
|
||||||
@ -114,34 +124,48 @@ async fn get(site: &mut Website, db: &Surreal<Client>, getter: &reqwest::Client)
|
|||||||
async fn walk(node: &rcdom::Handle, db: &Surreal<Client> , site: &Website, links_to: &mut Vec<Thing>) {
|
async fn walk(node: &rcdom::Handle, db: &Surreal<Client> , site: &Website, links_to: &mut Vec<Thing>) {
|
||||||
let span = trace_span!("Walk");
|
let span = trace_span!("Walk");
|
||||||
let span = span.enter();
|
let span = span.enter();
|
||||||
|
// Match each node - node basically means element.
|
||||||
match &node.data {
|
match &node.data {
|
||||||
rcdom::NodeData::Element { name, attrs, template_contents, mathml_annotation_xml_integration_point } => {
|
rcdom::NodeData::Element { name, attrs, template_contents, mathml_annotation_xml_integration_point } => {
|
||||||
for attr in attrs.borrow().clone() {
|
for attr in attrs.borrow().clone() {
|
||||||
if name.local.to_string() == "a" {
|
match name.local {
|
||||||
if attr.value.starts_with("mailto") {
|
local_name!("a") |
|
||||||
trace!("Is mailto");
|
local_name!("audio") |
|
||||||
// mailto link, lol
|
local_name!("area") |
|
||||||
let _created: Option<db::Record> = db.create("email").content(db::Email {
|
local_name!("img") |
|
||||||
email: attr.value.to_string(),
|
local_name!("link") |
|
||||||
on: site.domain_str().to_owned(),
|
local_name!("object") |
|
||||||
}).await.unwrap();
|
local_name!("source") |
|
||||||
} else {
|
local_name!("base") |
|
||||||
let mut web = site.clone();
|
local_name!("video") => {
|
||||||
let url = web.mut_url();
|
let attribute_name = attr.name.local.to_string();
|
||||||
|
if attribute_name == "src" || attribute_name == "href" || attribute_name == "data" {
|
||||||
|
// Get clone of the current site object
|
||||||
|
let mut web = site.clone();
|
||||||
|
|
||||||
|
// Set url
|
||||||
|
let url = web.mut_url();
|
||||||
|
url.set_fragment(None); // removes #xyz
|
||||||
|
let joined = url.join(&attr.value).unwrap();
|
||||||
|
*url = joined;
|
||||||
|
|
||||||
// TODO remove #xyz
|
// Set other attributes
|
||||||
let joined = url.join(&attr.value).unwrap();
|
web.crawled = false;
|
||||||
*url = joined;
|
// TODO set element name
|
||||||
|
// let element_name = name.local.to_string();
|
||||||
|
|
||||||
let crawled = web.crawled();
|
if let Some(id) = web.store(db).await {
|
||||||
*crawled = false;
|
links_to.push(id);
|
||||||
|
}
|
||||||
if let Some(id) = web.store(db).await {
|
|
||||||
links_to.push(id);
|
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
local_name!("button") |
|
||||||
|
local_name!("meta") |
|
||||||
|
local_name!("iframe") => {
|
||||||
|
// dbg!(attrs);
|
||||||
}
|
}
|
||||||
}
|
_ => {/**/}
|
||||||
|
};
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
_ => {},
|
_ => {},
|
||||||
@ -183,4 +207,4 @@ impl<'a> Drop for Timer<'a> {
|
|||||||
let dif = self.start.elapsed().as_micros();
|
let dif = self.start.elapsed().as_micros();
|
||||||
debug!("{}", format!("{} in {:.3}ms", self.msg, dif as f64/1000.));
|
debug!("{}", format!("{} in {:.3}ms", self.msg, dif as f64/1000.));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user