Compare commits
	
		
			1 Commits
		
	
	
		
			a9465dda6e
			...
			foss_stora
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 4989a59ddf | 
							
								
								
									
										2287
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										2287
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							@@ -11,8 +11,8 @@ metrics-exporter-prometheus = { version = "0.16.2", features=["http-listener"]}
 | 
			
		||||
# minio = "0.1.0"
 | 
			
		||||
minio = {git="https://github.com/minio/minio-rs.git", rev = "c28f576"}
 | 
			
		||||
reqwest = { version = "0.12", features = ["gzip", "default", "rustls-tls"] }
 | 
			
		||||
rusqlite = { version = "0.34.0", features = ["bundled"] }
 | 
			
		||||
serde = { version = "1.0", features = ["derive"] }
 | 
			
		||||
surrealdb = "2.2"
 | 
			
		||||
tokio = { version="1.41.0", features = ["full"] }
 | 
			
		||||
toml = "0.8.20"
 | 
			
		||||
tracing = "0.1"
 | 
			
		||||
 
 | 
			
		||||
@@ -3,9 +3,8 @@ surreal_url = "localhost:8000"
 | 
			
		||||
surreal_username = "root"
 | 
			
		||||
surreal_password = "root"
 | 
			
		||||
surreal_ns = "test"
 | 
			
		||||
surreal_db = "v1.19.5"
 | 
			
		||||
surreal_db = "v1.19.2"
 | 
			
		||||
 | 
			
		||||
# Crawler config
 | 
			
		||||
crawl_filter = "en.wikipedia.org" 
 | 
			
		||||
start_url = "https://en.wikipedia.org"
 | 
			
		||||
budget = 100
 | 
			
		||||
crawl_filter = "en.wikipedia.com" 
 | 
			
		||||
budget = 1000
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										34
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										34
									
								
								README.md
									
									
									
									
									
								
							@@ -2,43 +2,13 @@
 | 
			
		||||
 | 
			
		||||
Crawls sites saving all the found links to a surrealdb database. It then proceeds to take batches of 100 uncrawled links untill the crawl budget is reached. It saves the data of each site in a minio database.
 | 
			
		||||
 | 
			
		||||
## How to use
 | 
			
		||||
 | 
			
		||||
1. Clone the repo and `cd` into it.
 | 
			
		||||
2. Build the repo with `cargo build -r`
 | 
			
		||||
3. Start the docker conatiners
 | 
			
		||||
	1. cd into the docker folder `cd docker`
 | 
			
		||||
	2. Bring up the docker containers `docker compose up -d`
 | 
			
		||||
4. From the project's root, edit the `Crawler.toml` file to your liking.
 | 
			
		||||
5. Run with `./target/release/internet_mapper`
 | 
			
		||||
 | 
			
		||||
You can view stats of the project at `http://<your-ip>:3000/dashboards`
 | 
			
		||||
 | 
			
		||||
```bash
 | 
			
		||||
# Untested script but probably works
 | 
			
		||||
git clone https://git.oliveratkinson.net/Oliver/internet_mapper.git
 | 
			
		||||
cd internet_mapper
 | 
			
		||||
 | 
			
		||||
cargo build -r
 | 
			
		||||
 | 
			
		||||
cd docker
 | 
			
		||||
docker compose up -d
 | 
			
		||||
cd ..
 | 
			
		||||
 | 
			
		||||
$EDITOR Crawler.toml
 | 
			
		||||
 | 
			
		||||
./target/release/internet_mapper
 | 
			
		||||
 | 
			
		||||
```
 | 
			
		||||
 | 
			
		||||
### TODO
 | 
			
		||||
 | 
			
		||||
- [x] Domain filtering - prevent the crawler from going on alternate versions of wikipedia.
 | 
			
		||||
- [ ] Domain filtering - prevent the crawler from going on alternate versions of wikipedia.
 | 
			
		||||
- [ ] Conditionally save content - based on filename or file contents
 | 
			
		||||
- [x] GUI / TUI ? - Graphana
 | 
			
		||||
- [x] Better asynchronous getting of the sites. Currently it all happens serially.
 | 
			
		||||
- [x] Allow for storing asynchronously - dropping the "links to" logic fixes this need
 | 
			
		||||
- [x] Control crawler via config file (no recompliation needed)
 | 
			
		||||
- [ ] Allow for storing asynchronously
 | 
			
		||||
 | 
			
		||||
3/17/25: Took >1hr to crawl 100 pages
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -66,3 +66,4 @@ volumes:
 | 
			
		||||
  grafana_storage:
 | 
			
		||||
  alloy_storage:
 | 
			
		||||
  surrealdb_storage:
 | 
			
		||||
  minio_storage:
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										46
									
								
								src/db.rs
									
									
									
									
									
								
							
							
						
						
									
										46
									
								
								src/db.rs
									
									
									
									
									
								
							@@ -1,12 +1,7 @@
 | 
			
		||||
use metrics::counter;
 | 
			
		||||
use rusqlite::Connection;
 | 
			
		||||
use std::fmt::Debug;
 | 
			
		||||
use serde::{Deserialize, Serialize};
 | 
			
		||||
use surrealdb::{
 | 
			
		||||
    engine::remote::ws::{Client, Ws},
 | 
			
		||||
    opt::auth::Root,
 | 
			
		||||
    sql::Thing,
 | 
			
		||||
    Surreal,
 | 
			
		||||
};
 | 
			
		||||
use tracing::{error, instrument, trace};
 | 
			
		||||
use url::Url;
 | 
			
		||||
 | 
			
		||||
@@ -49,11 +44,16 @@ impl Website {
 | 
			
		||||
 | 
			
		||||
    // Insert ever item in the vec into surreal, crawled state will be preserved as TRUE
 | 
			
		||||
    // if already in the database as such or incoming data is TRUE.
 | 
			
		||||
    #[instrument(skip(db))]
 | 
			
		||||
    pub async fn store_all(all: Vec<Self>, db: &Surreal<Client>) -> Vec<Thing> {
 | 
			
		||||
    pub async fn store_all(all: Vec<Self>, db: &Connection) {
 | 
			
		||||
        counter!(STORE).increment(1);
 | 
			
		||||
        let mut things = Vec::with_capacity(all.len());
 | 
			
		||||
 | 
			
		||||
        rusqlite::ParamsFromIter;
 | 
			
		||||
 | 
			
		||||
        db.execute("",
 | 
			
		||||
            params![]
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        match db
 | 
			
		||||
            .query(
 | 
			
		||||
                "INSERT INTO website $array
 | 
			
		||||
@@ -90,32 +90,10 @@ pub struct Record {
 | 
			
		||||
    pub id: Thing,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[instrument(skip_all, name = "SurrealDB")]
 | 
			
		||||
pub async fn connect(config: &Config) -> surrealdb::Result<Surreal<Client>> {
 | 
			
		||||
    trace!("Establishing connection to surreal...");
 | 
			
		||||
#[instrument(skip_all, name = "sqlite_connect")]
 | 
			
		||||
pub async fn connect(config: &Config) -> Result<Connection, rusqlite::Error> {
 | 
			
		||||
    trace!("Establishing connection to sqlite...");
 | 
			
		||||
    // Connect to the server
 | 
			
		||||
    let db = Surreal::new::<Ws>(&config.surreal_url).await?;
 | 
			
		||||
 | 
			
		||||
    trace!("Logging in...");
 | 
			
		||||
    // Signin as a namespace, database, or root user
 | 
			
		||||
    db.signin(Root {
 | 
			
		||||
        username: &config.surreal_username,
 | 
			
		||||
        password: &config.surreal_password,
 | 
			
		||||
    })
 | 
			
		||||
    .await?;
 | 
			
		||||
 | 
			
		||||
    // Select a specific namespace / database
 | 
			
		||||
    db.use_ns(&config.surreal_ns)
 | 
			
		||||
        .use_db(&config.surreal_db)
 | 
			
		||||
        .await?;
 | 
			
		||||
 | 
			
		||||
    let setup = include_bytes!("setup.surql");
 | 
			
		||||
    let file = setup.iter().map(|c| *c as char).collect::<String>();
 | 
			
		||||
 | 
			
		||||
    db.query(file)
 | 
			
		||||
        .await
 | 
			
		||||
        .expect("Failed to setup surreal tables.");
 | 
			
		||||
 | 
			
		||||
    Ok(db)
 | 
			
		||||
    Connection::open("./squeelite.db")
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
@@ -59,13 +59,6 @@ fn valid_file_extension(take: &&OsStr) -> bool {
 | 
			
		||||
                "pdf" => true,
 | 
			
		||||
                "json" => true,
 | 
			
		||||
                "xml" => true,
 | 
			
		||||
 | 
			
		||||
                // IGNORE
 | 
			
		||||
                // TODO Should this be a list of all domains?
 | 
			
		||||
                "org" => false,
 | 
			
		||||
                "com" => false,
 | 
			
		||||
                "net" => false,
 | 
			
		||||
 | 
			
		||||
                _ => {
 | 
			
		||||
                    warn!("Might be forgetting a file extension: {s}");
 | 
			
		||||
                    false
 | 
			
		||||
 
 | 
			
		||||
							
								
								
									
										16
									
								
								src/main.rs
									
									
									
									
									
								
							
							
						
						
									
										16
									
								
								src/main.rs
									
									
									
									
									
								
							@@ -33,14 +33,11 @@ struct Config {
 | 
			
		||||
    surreal_password: String,
 | 
			
		||||
 | 
			
		||||
    crawl_filter: String,
 | 
			
		||||
    start_url: String,
 | 
			
		||||
    budget: usize,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
#[tokio::main]
 | 
			
		||||
async fn main() {
 | 
			
		||||
    println!("Logs and metrics are provided to the Grafana dashboard");
 | 
			
		||||
 | 
			
		||||
    let writer = std::fs::OpenOptions::new()
 | 
			
		||||
        .append(true)
 | 
			
		||||
        .create(true)
 | 
			
		||||
@@ -73,7 +70,8 @@ async fn main() {
 | 
			
		||||
        .expect("failed to install recorder/exporter");
 | 
			
		||||
 | 
			
		||||
    info!("Starting...");
 | 
			
		||||
 | 
			
		||||
    // Would probably take these in as parameters from a cli
 | 
			
		||||
    let starting_url = "https://en.wikipedia.org/";
 | 
			
		||||
    // When getting uncrawled pages, name must contain this variable. "" will effectively get ignored.
 | 
			
		||||
    // let crawl_filter = "en.wikipedia.org/";
 | 
			
		||||
    // let budget = 50;
 | 
			
		||||
@@ -84,7 +82,6 @@ async fn main() {
 | 
			
		||||
    let _ = file.read_to_string(&mut buf);
 | 
			
		||||
 | 
			
		||||
    let config: Config = toml::from_str(&buf).expect("Failed to parse Crawler.toml");
 | 
			
		||||
    let starting_url = &config.start_url;
 | 
			
		||||
 | 
			
		||||
    let db = connect(&config)
 | 
			
		||||
        .await
 | 
			
		||||
@@ -141,15 +138,6 @@ async fn main() {
 | 
			
		||||
    }
 | 
			
		||||
    drop(span);
 | 
			
		||||
 | 
			
		||||
    if let Ok(mut ok) = db.query("count(select id from website where crawled = true)").await {
 | 
			
		||||
        let res = ok.take::<Option<usize>>(0);
 | 
			
		||||
        if let Ok(i) = res {
 | 
			
		||||
            if let Some(n) = i {
 | 
			
		||||
                info!("Total crawled pages now equals {n}");
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    info!("Done");
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user