Compare commits
4 Commits
foss_stora
...
a9465dda6e
| Author | SHA1 | Date | |
|---|---|---|---|
| a9465dda6e | |||
| add6f00ed6 | |||
| 4a433a1a77 | |||
| 03cbcd9ae0 |
@@ -3,8 +3,9 @@ surreal_url = "localhost:8000"
|
||||
surreal_username = "root"
|
||||
surreal_password = "root"
|
||||
surreal_ns = "test"
|
||||
surreal_db = "v1.19.2"
|
||||
surreal_db = "v1.19.5"
|
||||
|
||||
# Crawler config
|
||||
crawl_filter = "en.wikipedia.com"
|
||||
budget = 1000
|
||||
crawl_filter = "en.wikipedia.org"
|
||||
start_url = "https://en.wikipedia.org"
|
||||
budget = 100
|
||||
|
||||
34
README.md
34
README.md
@@ -2,13 +2,43 @@
|
||||
|
||||
Crawls sites saving all the found links to a surrealdb database. It then proceeds to take batches of 100 uncrawled links untill the crawl budget is reached. It saves the data of each site in a minio database.
|
||||
|
||||
## How to use
|
||||
|
||||
1. Clone the repo and `cd` into it.
|
||||
2. Build the repo with `cargo build -r`
|
||||
3. Start the docker conatiners
|
||||
1. cd into the docker folder `cd docker`
|
||||
2. Bring up the docker containers `docker compose up -d`
|
||||
4. From the project's root, edit the `Crawler.toml` file to your liking.
|
||||
5. Run with `./target/release/internet_mapper`
|
||||
|
||||
You can view stats of the project at `http://<your-ip>:3000/dashboards`
|
||||
|
||||
```bash
|
||||
# Untested script but probably works
|
||||
git clone https://git.oliveratkinson.net/Oliver/internet_mapper.git
|
||||
cd internet_mapper
|
||||
|
||||
cargo build -r
|
||||
|
||||
cd docker
|
||||
docker compose up -d
|
||||
cd ..
|
||||
|
||||
$EDITOR Crawler.toml
|
||||
|
||||
./target/release/internet_mapper
|
||||
|
||||
```
|
||||
|
||||
### TODO
|
||||
|
||||
- [ ] Domain filtering - prevent the crawler from going on alternate versions of wikipedia.
|
||||
- [x] Domain filtering - prevent the crawler from going on alternate versions of wikipedia.
|
||||
- [ ] Conditionally save content - based on filename or file contents
|
||||
- [x] GUI / TUI ? - Graphana
|
||||
- [x] Better asynchronous getting of the sites. Currently it all happens serially.
|
||||
- [ ] Allow for storing asynchronously
|
||||
- [x] Allow for storing asynchronously - dropping the "links to" logic fixes this need
|
||||
- [x] Control crawler via config file (no recompliation needed)
|
||||
|
||||
3/17/25: Took >1hr to crawl 100 pages
|
||||
|
||||
|
||||
@@ -66,4 +66,3 @@ volumes:
|
||||
grafana_storage:
|
||||
alloy_storage:
|
||||
surrealdb_storage:
|
||||
minio_storage:
|
||||
|
||||
@@ -49,6 +49,7 @@ impl Website {
|
||||
|
||||
// Insert ever item in the vec into surreal, crawled state will be preserved as TRUE
|
||||
// if already in the database as such or incoming data is TRUE.
|
||||
#[instrument(skip(db))]
|
||||
pub async fn store_all(all: Vec<Self>, db: &Surreal<Client>) -> Vec<Thing> {
|
||||
counter!(STORE).increment(1);
|
||||
let mut things = Vec::with_capacity(all.len());
|
||||
|
||||
@@ -59,6 +59,13 @@ fn valid_file_extension(take: &&OsStr) -> bool {
|
||||
"pdf" => true,
|
||||
"json" => true,
|
||||
"xml" => true,
|
||||
|
||||
// IGNORE
|
||||
// TODO Should this be a list of all domains?
|
||||
"org" => false,
|
||||
"com" => false,
|
||||
"net" => false,
|
||||
|
||||
_ => {
|
||||
warn!("Might be forgetting a file extension: {s}");
|
||||
false
|
||||
|
||||
16
src/main.rs
16
src/main.rs
@@ -33,11 +33,14 @@ struct Config {
|
||||
surreal_password: String,
|
||||
|
||||
crawl_filter: String,
|
||||
start_url: String,
|
||||
budget: usize,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
println!("Logs and metrics are provided to the Grafana dashboard");
|
||||
|
||||
let writer = std::fs::OpenOptions::new()
|
||||
.append(true)
|
||||
.create(true)
|
||||
@@ -70,8 +73,7 @@ async fn main() {
|
||||
.expect("failed to install recorder/exporter");
|
||||
|
||||
info!("Starting...");
|
||||
// Would probably take these in as parameters from a cli
|
||||
let starting_url = "https://en.wikipedia.org/";
|
||||
|
||||
// When getting uncrawled pages, name must contain this variable. "" will effectively get ignored.
|
||||
// let crawl_filter = "en.wikipedia.org/";
|
||||
// let budget = 50;
|
||||
@@ -82,6 +84,7 @@ async fn main() {
|
||||
let _ = file.read_to_string(&mut buf);
|
||||
|
||||
let config: Config = toml::from_str(&buf).expect("Failed to parse Crawler.toml");
|
||||
let starting_url = &config.start_url;
|
||||
|
||||
let db = connect(&config)
|
||||
.await
|
||||
@@ -138,6 +141,15 @@ async fn main() {
|
||||
}
|
||||
drop(span);
|
||||
|
||||
if let Ok(mut ok) = db.query("count(select id from website where crawled = true)").await {
|
||||
let res = ok.take::<Option<usize>>(0);
|
||||
if let Ok(i) = res {
|
||||
if let Some(n) = i {
|
||||
info!("Total crawled pages now equals {n}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
info!("Done");
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user