Compare commits
2 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
e4bf320ecd | ||
|
fd71a8bc13 |
2
.gitignore
vendored
2
.gitignore
vendored
@ -4,5 +4,3 @@
|
||||
perf.data
|
||||
flamegraph.svg
|
||||
perf.data.old
|
||||
/docker/logs/*
|
||||
/downloaded
|
19
.vscode/launch.json
vendored
19
.vscode/launch.json
vendored
@ -7,15 +7,18 @@
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Debug executable 'internet_mapper'",
|
||||
"name": "Debug executable 'surreal_spider'",
|
||||
"env": {
|
||||
"RUST_LOG": "surreal_spider=trace,reqwest=info",
|
||||
},
|
||||
"cargo": {
|
||||
"args": [
|
||||
"build",
|
||||
"--bin=internet_mapper",
|
||||
"--package=internet_mapper"
|
||||
"--bin=surreal_spider",
|
||||
"--package=surreal_spider"
|
||||
],
|
||||
"filter": {
|
||||
"name": "internet_mapper",
|
||||
"name": "surreal_spider",
|
||||
"kind": "bin"
|
||||
}
|
||||
},
|
||||
@ -25,16 +28,16 @@
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Debug unit tests in executable 'internet_mapper'",
|
||||
"name": "Debug unit tests in executable 'surreal_spider'",
|
||||
"cargo": {
|
||||
"args": [
|
||||
"test",
|
||||
"--no-run",
|
||||
"--bin=internet_mapper",
|
||||
"--package=internet_mapper"
|
||||
"--bin=surreal_spider",
|
||||
"--package=surreal_spider"
|
||||
],
|
||||
"filter": {
|
||||
"name": "internet_mapper",
|
||||
"name": "surreal_spider",
|
||||
"kind": "bin"
|
||||
}
|
||||
},
|
||||
|
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@ -1,8 +0,0 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"creds",
|
||||
"reqwest",
|
||||
"rustls",
|
||||
"surql",
|
||||
]
|
||||
}
|
2024
Cargo.lock
generated
2024
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
20
Cargo.toml
20
Cargo.toml
@ -1,21 +1,17 @@
|
||||
[package]
|
||||
name = "internet_mapper"
|
||||
name = "surreal_spider"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.22.1"
|
||||
futures-util = "0.3.31"
|
||||
html5ever = "0.29"
|
||||
metrics = "0.24.1"
|
||||
metrics-exporter-prometheus = { version = "0.16.2", features=["http-listener"]}
|
||||
html5ever = "0.29.0"
|
||||
# minio = "0.1.0"
|
||||
minio = {git="https://github.com/minio/minio-rs.git", rev = "c28f576"}
|
||||
reqwest = { version = "0.12", features = ["gzip", "default", "rustls-tls", "stream"] }
|
||||
reqwest = "0.12.9"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
surrealdb = "2.2"
|
||||
tokio = { version="1.41.0", features = ["full"] }
|
||||
toml = "0.8.20"
|
||||
spider = { version = "2.21", features = ["sync"] }
|
||||
surrealdb = "2.0"
|
||||
tokio = { version="1.41", features = ["full"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "local-time", "json"] }
|
||||
url = { version = "2.5", features = ["serde"] }
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
url = { version = "2.5.3", features = ["serde"] }
|
||||
|
14
Crawler.toml
14
Crawler.toml
@ -1,14 +0,0 @@
|
||||
# Surreal config
|
||||
surreal_url = "localhost:8000"
|
||||
surreal_username = "root"
|
||||
surreal_password = "root"
|
||||
surreal_ns = "test"
|
||||
surreal_db = "v1.20.3"
|
||||
|
||||
# Crawler config
|
||||
crawl_filter = "https://ftpgeoinfo.msl.mt.gov/Data/Spatial/MSDI/Imagery/2023_NAIP/UTM_County_Mosaics/"
|
||||
# crawl_filter = "https://oliveratkinson.net"
|
||||
start_url = "https://ftpgeoinfo.msl.mt.gov/Data/Spatial/MSDI/Imagery/2023_NAIP/UTM_County_Mosaics/"
|
||||
# start_url = "https://oliveratkinson.net"
|
||||
budget = 100
|
||||
batch_size = 5
|
50
README.md
50
README.md
@ -2,54 +2,10 @@
|
||||
|
||||
Crawls sites saving all the found links to a surrealdb database. It then proceeds to take batches of 100 uncrawled links untill the crawl budget is reached. It saves the data of each site in a minio database.
|
||||
|
||||
## How to use
|
||||
|
||||
1. Clone the repo and `cd` into it.
|
||||
2. Build the repo with `cargo build -r`
|
||||
3. Start the docker conatiners
|
||||
1. cd into the docker folder `cd docker`
|
||||
2. Bring up the docker containers `docker compose up -d`
|
||||
4. From the project's root, edit the `Crawler.toml` file to your liking.
|
||||
5. Run with `./target/release/internet_mapper`
|
||||
|
||||
You can view stats of the project at `http://<your-ip>:3000/dashboards`
|
||||
|
||||
```bash
|
||||
# Untested script but probably works
|
||||
git clone https://git.oliveratkinson.net/Oliver/internet_mapper.git
|
||||
cd internet_mapper
|
||||
|
||||
cargo build -r
|
||||
|
||||
cd docker
|
||||
docker compose up -d
|
||||
cd ..
|
||||
|
||||
$EDITOR Crawler.toml
|
||||
|
||||
./target/release/internet_mapper
|
||||
|
||||
```
|
||||
|
||||
### TODO
|
||||
|
||||
- [x] Domain filtering - prevent the crawler from going on alternate versions of wikipedia.
|
||||
- [ ] Domain filtering - prevent the crawler from going on alternate versions of wikipedia.
|
||||
- [ ] Conditionally save content - based on filename or file contents
|
||||
- [x] GUI / TUI ? - Graphana
|
||||
- [x] Better asynchronous getting of the sites. Currently it all happens serially.
|
||||
- [x] Allow for storing asynchronously - dropping the "links to" logic fixes this need
|
||||
- [x] Control crawler via config file (no recompliation needed)
|
||||
|
||||
3/17/25: Took >1hr to crawl 100 pages
|
||||
|
||||
3/19/25: Took 20min to crawl 1000 pages
|
||||
This ment we stored 1000 pages, 142,997 urls, and 1,425,798 links between the two.
|
||||
|
||||
3/20/25: Took 5min to crawl 1000 pages
|
||||
|
||||
3/21/25: Took 3min to crawl 1000 pages
|
||||
|
||||
# About
|
||||
|
||||

|
||||
|
||||
- [ ] GUI / TUI ?
|
||||
- [ ] Better asynchronous getting of the sites. Currently it all happens serially.
|
31
compose.yml
Normal file
31
compose.yml
Normal file
@ -0,0 +1,31 @@
|
||||
services:
|
||||
surreal:
|
||||
image: surrealdb/surrealdb:latest-dev
|
||||
ports:
|
||||
- 8000:8000
|
||||
volumes:
|
||||
- ./.surrealdb/:/mydata
|
||||
command:
|
||||
- start
|
||||
- --log
|
||||
- debug
|
||||
- --user
|
||||
- root
|
||||
- --pass
|
||||
- root
|
||||
- rocksdb:/mydata/database.db
|
||||
minio:
|
||||
image: quay.io/minio/minio
|
||||
ports:
|
||||
- 9000:9000
|
||||
- 9001:9001
|
||||
environment:
|
||||
- MINIO_ROOT_USER=root
|
||||
- MINIO_ROOT_PASSWORD=an8charpassword
|
||||
volumes:
|
||||
- ./.minio/:/data
|
||||
command:
|
||||
- server
|
||||
- /data
|
||||
- --console-address
|
||||
- ":9001"
|
@ -1,14 +0,0 @@
|
||||
local.file_match "tmplogs" {
|
||||
path_targets = [{"__path__" = "/tmp/alloy-logs/*.log"}]
|
||||
}
|
||||
|
||||
loki.source.file "local_files" {
|
||||
targets = local.file_match.tmplogs.targets
|
||||
forward_to = [loki.write.local_loki.receiver]
|
||||
}
|
||||
|
||||
loki.write "local_loki" {
|
||||
endpoint {
|
||||
url = "http://loki:3100/loki/api/v1/push"
|
||||
}
|
||||
}
|
@ -1,68 +0,0 @@
|
||||
services:
|
||||
surreal:
|
||||
image: surrealdb/surrealdb:latest-dev
|
||||
ports:
|
||||
- 8000:8000
|
||||
volumes:
|
||||
- surrealdb_storage:/mydata
|
||||
command:
|
||||
- start
|
||||
- --log
|
||||
- debug
|
||||
- --user
|
||||
- root
|
||||
- --pass
|
||||
- root
|
||||
- rocksdb:/mydata/database.db
|
||||
|
||||
alloy:
|
||||
image: grafana/alloy:latest
|
||||
ports:
|
||||
- 12345:12345
|
||||
volumes:
|
||||
# if you change this, you also need to change it in the alloy config file
|
||||
- ./logs/:/tmp/alloy-logs
|
||||
- ./alloy.conf:/etc/alloy/config.alloy
|
||||
- alloy_storage:/var/lib/alloy
|
||||
command: run --server.http.listen-addr=0.0.0.0:12345 --storage.path=/var/lib/alloy/data /etc/alloy/config.alloy
|
||||
|
||||
#logs
|
||||
loki:
|
||||
image: grafana/loki:latest
|
||||
ports:
|
||||
- 3100:3100
|
||||
command: -config.file=/etc/loki/local-config.yaml
|
||||
volumes:
|
||||
- ./loki.yaml:/etc/loki/local-config.yaml
|
||||
|
||||
# Metrics collector
|
||||
prometheus:
|
||||
image: prom/prometheus:latest
|
||||
expose:
|
||||
- 9090
|
||||
volumes:
|
||||
- ./prometheus.yaml:/etc/prometheus/prometheus.yml
|
||||
# persist data
|
||||
- prometheus_storage:/prometheus
|
||||
command: --web.enable-lifecycle --config.file=/etc/prometheus/prometheus.yml
|
||||
|
||||
# Everything viewer
|
||||
grafana:
|
||||
image: grafana/grafana:latest
|
||||
volumes:
|
||||
- ./grafana.yaml:/etc/grafana/provisioning/datasources/datasources.yaml
|
||||
- ./dashboards:/var/lib/grafana/dashboards
|
||||
- grafana_storage:/var/lib/grafana
|
||||
environment:
|
||||
- GF_AUTH_ANONYMOUS_ENABLED=true
|
||||
- GF_AUTH_ANONYMOUS_ORG_ROLE=Admin
|
||||
- GF_AUTH_DISABLE_LOGIN_FORM=true
|
||||
- GF_FEATURE_TOGGLES_ENABLE=traceqlEditor
|
||||
ports:
|
||||
- 3000:3000
|
||||
|
||||
volumes:
|
||||
prometheus_storage:
|
||||
grafana_storage:
|
||||
alloy_storage:
|
||||
surrealdb_storage:
|
@ -1,648 +0,0 @@
|
||||
{
|
||||
"__inputs": [
|
||||
{
|
||||
"name": "DS_PROMETHEUS",
|
||||
"label": "Prometheus",
|
||||
"description": "",
|
||||
"type": "datasource",
|
||||
"pluginId": "prometheus",
|
||||
"pluginName": "Prometheus"
|
||||
},
|
||||
{
|
||||
"name": "DS_LOKI",
|
||||
"label": "Loki",
|
||||
"description": "",
|
||||
"type": "datasource",
|
||||
"pluginId": "loki",
|
||||
"pluginName": "Loki"
|
||||
}
|
||||
],
|
||||
"__elements": {},
|
||||
"__requires": [
|
||||
{
|
||||
"type": "grafana",
|
||||
"id": "grafana",
|
||||
"name": "Grafana",
|
||||
"version": "11.3.1"
|
||||
},
|
||||
{
|
||||
"type": "panel",
|
||||
"id": "logs",
|
||||
"name": "Logs",
|
||||
"version": ""
|
||||
},
|
||||
{
|
||||
"type": "datasource",
|
||||
"id": "loki",
|
||||
"name": "Loki",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
{
|
||||
"type": "datasource",
|
||||
"id": "prometheus",
|
||||
"name": "Prometheus",
|
||||
"version": "1.0.0"
|
||||
},
|
||||
{
|
||||
"type": "panel",
|
||||
"id": "stat",
|
||||
"name": "Stat",
|
||||
"version": ""
|
||||
},
|
||||
{
|
||||
"type": "panel",
|
||||
"id": "timeseries",
|
||||
"name": "Time series",
|
||||
"version": ""
|
||||
}
|
||||
],
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": {
|
||||
"type": "grafana",
|
||||
"uid": "-- Grafana --"
|
||||
},
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
}
|
||||
]
|
||||
},
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
"id": null,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisBorderShow": false,
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"barWidthFactor": 0.6,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 0,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"insertNulls": false,
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "auto",
|
||||
"spanNulls": 300000,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "off"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 8,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"id": 5,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [],
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "single",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "11.3.1",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"expr": "surql_trips",
|
||||
"fullMetaSearch": false,
|
||||
"includeNullMetadata": true,
|
||||
"legendFormat": "Trips to Surreal",
|
||||
"range": true,
|
||||
"refId": "A",
|
||||
"useBackend": false
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"expr": "s3_trips",
|
||||
"fullMetaSearch": false,
|
||||
"hide": false,
|
||||
"includeNullMetadata": true,
|
||||
"instant": false,
|
||||
"legendFormat": "Trips to S3",
|
||||
"range": true,
|
||||
"refId": "B",
|
||||
"useBackend": false
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"expr": "pages_crawled",
|
||||
"fullMetaSearch": false,
|
||||
"hide": false,
|
||||
"includeNullMetadata": true,
|
||||
"instant": false,
|
||||
"legendFormat": "total crawled",
|
||||
"range": true,
|
||||
"refId": "C",
|
||||
"useBackend": false
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"expr": "pages_being_processed",
|
||||
"fullMetaSearch": false,
|
||||
"hide": false,
|
||||
"includeNullMetadata": true,
|
||||
"instant": false,
|
||||
"legendFormat": "Pages being processed",
|
||||
"range": true,
|
||||
"refId": "E",
|
||||
"useBackend": false
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"expr": "gets_in_flight",
|
||||
"fullMetaSearch": false,
|
||||
"hide": false,
|
||||
"includeNullMetadata": true,
|
||||
"instant": false,
|
||||
"legendFormat": "__auto",
|
||||
"range": true,
|
||||
"refId": "D",
|
||||
"useBackend": false
|
||||
}
|
||||
],
|
||||
"title": "Crawler stats",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisBorderShow": false,
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"barWidthFactor": 0.6,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 0,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"insertNulls": false,
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "auto",
|
||||
"spanNulls": 300000,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "off"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 9,
|
||||
"x": 8,
|
||||
"y": 0
|
||||
},
|
||||
"id": 6,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [],
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "single",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "11.3.1",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"expr": "surql_trips",
|
||||
"fullMetaSearch": false,
|
||||
"includeNullMetadata": true,
|
||||
"legendFormat": "Trips to Surreal",
|
||||
"range": true,
|
||||
"refId": "A",
|
||||
"useBackend": false
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"expr": "surql_link_calls",
|
||||
"fullMetaSearch": false,
|
||||
"hide": false,
|
||||
"includeNullMetadata": true,
|
||||
"instant": false,
|
||||
"legendFormat": "link calls",
|
||||
"range": true,
|
||||
"refId": "B",
|
||||
"useBackend": false
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"expr": "surql_store_calls",
|
||||
"fullMetaSearch": false,
|
||||
"hide": false,
|
||||
"includeNullMetadata": true,
|
||||
"instant": false,
|
||||
"legendFormat": "store calls",
|
||||
"range": true,
|
||||
"refId": "C",
|
||||
"useBackend": false
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"expr": "pages_being_processed",
|
||||
"fullMetaSearch": false,
|
||||
"hide": false,
|
||||
"includeNullMetadata": true,
|
||||
"instant": false,
|
||||
"legendFormat": "Pages being processed",
|
||||
"range": true,
|
||||
"refId": "E",
|
||||
"useBackend": false
|
||||
}
|
||||
],
|
||||
"title": "Surreal stats",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"description": "This is across all threads, so this isn't wall clock time",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "thresholds"
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "ms"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 7,
|
||||
"x": 17,
|
||||
"y": 0
|
||||
},
|
||||
"id": 7,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"percentChangeColorMode": "standard",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"showPercentChange": false,
|
||||
"textMode": "auto",
|
||||
"wideLayout": true
|
||||
},
|
||||
"pluginVersion": "11.3.1",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "${DS_PROMETHEUS}"
|
||||
},
|
||||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"expr": "surql_lock_waiting_ms",
|
||||
"fullMetaSearch": false,
|
||||
"includeNullMetadata": true,
|
||||
"legendFormat": "__auto",
|
||||
"range": true,
|
||||
"refId": "A",
|
||||
"useBackend": false
|
||||
}
|
||||
],
|
||||
"title": "Time spend waiting on lock",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "${DS_LOKI}"
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 18,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 8
|
||||
},
|
||||
"id": 1,
|
||||
"options": {
|
||||
"dedupStrategy": "none",
|
||||
"enableLogDetails": true,
|
||||
"prettifyLogMessage": false,
|
||||
"showCommonLabels": false,
|
||||
"showLabels": false,
|
||||
"showTime": false,
|
||||
"sortOrder": "Descending",
|
||||
"wrapLogMessage": false
|
||||
},
|
||||
"pluginVersion": "11.3.1",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "${DS_LOKI}"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "{filename=\"/tmp/alloy-logs/tracing.log\"} | json | level = `ERROR` | line_format \"{{.threadId}} {{.filename_extracted}}:{{.line_number}} {{.fields_message}}\"",
|
||||
"queryType": "range",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Errors",
|
||||
"type": "logs"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "${DS_LOKI}"
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 26
|
||||
},
|
||||
"id": 2,
|
||||
"options": {
|
||||
"dedupStrategy": "none",
|
||||
"enableLogDetails": true,
|
||||
"prettifyLogMessage": false,
|
||||
"showCommonLabels": false,
|
||||
"showLabels": false,
|
||||
"showTime": false,
|
||||
"sortOrder": "Descending",
|
||||
"wrapLogMessage": false
|
||||
},
|
||||
"pluginVersion": "11.3.1",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "${DS_LOKI}"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "{filename=\"/tmp/alloy-logs/tracing.log\"} | json | level = `DEBUG` | line_format \"{{.fields_message}}\"",
|
||||
"queryType": "range",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Debug",
|
||||
"type": "logs"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "${DS_LOKI}"
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 16,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 26
|
||||
},
|
||||
"id": 4,
|
||||
"options": {
|
||||
"dedupStrategy": "none",
|
||||
"enableLogDetails": true,
|
||||
"prettifyLogMessage": false,
|
||||
"showCommonLabels": false,
|
||||
"showLabels": false,
|
||||
"showTime": false,
|
||||
"sortOrder": "Descending",
|
||||
"wrapLogMessage": false
|
||||
},
|
||||
"pluginVersion": "11.3.1",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "${DS_LOKI}"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "{filename=\"/tmp/alloy-logs/tracing.log\"} | json | level = `TRACE` | line_format \"{{.fields_message}}\"",
|
||||
"queryType": "range",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Trace",
|
||||
"type": "logs"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "${DS_LOKI}"
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 34
|
||||
},
|
||||
"id": 3,
|
||||
"options": {
|
||||
"dedupStrategy": "none",
|
||||
"enableLogDetails": true,
|
||||
"prettifyLogMessage": false,
|
||||
"showCommonLabels": false,
|
||||
"showLabels": false,
|
||||
"showTime": false,
|
||||
"sortOrder": "Descending",
|
||||
"wrapLogMessage": false
|
||||
},
|
||||
"pluginVersion": "11.3.1",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "${DS_LOKI}"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "{filename=\"/tmp/alloy-logs/tracing.log\"} | json | level = `WARN` | line_format \"{{.fields_message}}\"",
|
||||
"queryType": "range",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Warnings",
|
||||
"type": "logs"
|
||||
}
|
||||
],
|
||||
"schemaVersion": 40,
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "P8E80F9AEF21F6940"
|
||||
},
|
||||
"filters": [],
|
||||
"name": "Filters",
|
||||
"type": "adhoc"
|
||||
}
|
||||
]
|
||||
},
|
||||
"time": {
|
||||
"from": "now-5m",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {},
|
||||
"timezone": "browser",
|
||||
"title": "Crawler",
|
||||
"uid": "ceg90x34pqgowd",
|
||||
"version": 21,
|
||||
"weekStart": ""
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
apiVersion: 1
|
||||
|
||||
datasources:
|
||||
- name: Loki
|
||||
type: loki
|
||||
access: proxy
|
||||
orgId: 1
|
||||
url: http://loki:3100
|
||||
basicAuth: false
|
||||
isDefault: true
|
||||
version: 1
|
||||
editable: false
|
||||
- name: Prometheus
|
||||
type: prometheus
|
||||
uid: prometheus
|
||||
access: proxy
|
||||
orgId: 1
|
||||
url: http://prometheus:9090
|
||||
basicAuth: false
|
||||
isDefault: false
|
||||
version: 1
|
||||
editable: false
|
||||
jsonData:
|
||||
httpMethod: GET
|
@ -1,62 +0,0 @@
|
||||
# this is mostly the default config from grafana's website
|
||||
|
||||
auth_enabled: false
|
||||
|
||||
server:
|
||||
http_listen_port: 3100
|
||||
grpc_listen_port: 9096
|
||||
log_level: info
|
||||
grpc_server_max_concurrent_streams: 1000
|
||||
|
||||
common:
|
||||
instance_addr: 127.0.0.1
|
||||
path_prefix: /tmp/loki
|
||||
storage:
|
||||
filesystem:
|
||||
chunks_directory: /tmp/loki/chunks
|
||||
rules_directory: /tmp/loki/rules
|
||||
replication_factor: 1
|
||||
ring:
|
||||
kvstore:
|
||||
store: inmemory
|
||||
|
||||
query_range:
|
||||
results_cache:
|
||||
cache:
|
||||
embedded_cache:
|
||||
enabled: true
|
||||
max_size_mb: 100
|
||||
|
||||
limits_config:
|
||||
metric_aggregation_enabled: true
|
||||
|
||||
schema_config:
|
||||
configs:
|
||||
- from: 2020-10-24
|
||||
store: tsdb
|
||||
object_store: filesystem
|
||||
schema: v13
|
||||
index:
|
||||
prefix: index_
|
||||
period: 24h
|
||||
|
||||
pattern_ingester:
|
||||
enabled: true
|
||||
metric_aggregation:
|
||||
loki_address: localhost:3100
|
||||
|
||||
frontend:
|
||||
encoding: protobuf
|
||||
|
||||
# By default, Loki will send anonymous, but uniquely-identifiable usage and configuration
|
||||
# analytics to Grafana Labs. These statistics are sent to https://stats.grafana.org/
|
||||
#
|
||||
# Statistics help us better understand how Loki is used, and they show us performance
|
||||
# levels for most users. This helps us prioritize features and documentation.
|
||||
# For more information on what's sent, look at
|
||||
# https://github.com/grafana/loki/blob/main/pkg/analytics/stats.go
|
||||
# Refer to the buildReport method to see what goes into a report.
|
||||
#
|
||||
# If you would like to disable reporting, uncomment the following lines:
|
||||
analytics:
|
||||
reporting_enabled: false
|
@ -1,17 +0,0 @@
|
||||
global:
|
||||
scrape_interval: 5s
|
||||
query_log_file: /etc/prometheus/query.log
|
||||
|
||||
scrape_configs:
|
||||
- job_name: crawler
|
||||
static_configs:
|
||||
# change this your machine's ip, localhost won't work
|
||||
# because localhost refers to the docker container.
|
||||
- targets: ['172.20.239.48:2500']
|
||||
#- targets: ['192.168.8.209:2500']
|
||||
- job_name: loki
|
||||
static_configs:
|
||||
- targets: ['loki:3100']
|
||||
- job_name: prometheus
|
||||
static_configs:
|
||||
- targets: ['localhost:9090']
|
@ -1,16 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "Bundler",
|
||||
"target": "ES2022",
|
||||
"jsx": "react",
|
||||
"allowImportingTsExtensions": true,
|
||||
"strictNullChecks": true,
|
||||
"strictFunctionTypes": true
|
||||
},
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"**/node_modules/*"
|
||||
],
|
||||
"typeAcquisition": {"include": ["firefox-webext-browser"]}
|
||||
}
|
Binary file not shown.
Before Width: | Height: | Size: 264 KiB |
154
src/db.rs
154
src/db.rs
@ -1,79 +1,123 @@
|
||||
use metrics::counter;
|
||||
use std::fmt::Debug;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use surrealdb::{
|
||||
engine::remote::ws::{Client, Ws},
|
||||
opt::auth::Root,
|
||||
sql::Thing,
|
||||
Surreal,
|
||||
Response, Surreal,
|
||||
};
|
||||
use tracing::{error, instrument, trace};
|
||||
use tracing::{error, instrument, trace, warn};
|
||||
use url::Url;
|
||||
|
||||
use crate::Config;
|
||||
use crate::{Config, Timer};
|
||||
|
||||
const STORE: &str = "surql_store_calls";
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Eq, PartialEq, Hash)]
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Website {
|
||||
/// The url that this data is found at
|
||||
pub site: Url,
|
||||
/// Wether or not this link has been crawled yet
|
||||
pub crawled: bool,
|
||||
}
|
||||
|
||||
// manual impl to make tracing look nicer
|
||||
impl Debug for Website {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Website").field("site", &self.site).finish()
|
||||
}
|
||||
#[serde(skip_serializing)]
|
||||
id: Option<Thing>,
|
||||
}
|
||||
|
||||
impl Website {
|
||||
/// Creates a blank site (assumes that url param is site's root)
|
||||
pub fn new(url: &str, crawled: bool) -> Self {
|
||||
let site = match Url::parse(url) {
|
||||
Ok(a) => a,
|
||||
Err(_) => todo!(),
|
||||
};
|
||||
pub fn new(url: Url, crawled: bool) -> Self {
|
||||
Self {
|
||||
id: None,
|
||||
crawled,
|
||||
site
|
||||
site: url,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_crawled(&mut self) {
|
||||
self.crawled = true
|
||||
}
|
||||
|
||||
// Insert ever item in the vec into surreal, crawled state will be preserved as TRUE
|
||||
// if already in the database as such or incoming data is TRUE.
|
||||
#[instrument(skip(db))]
|
||||
pub async fn store_all(all: Vec<Self>, db: &Surreal<Client>) -> Vec<Thing> {
|
||||
counter!(STORE).increment(1);
|
||||
let mut things = Vec::with_capacity(all.len());
|
||||
#[instrument(skip_all)]
|
||||
pub async fn links_to(&self, other: Vec<Thing>, db: &Surreal<Client>) {
|
||||
let len = other.len();
|
||||
if len == 0 {return}
|
||||
|
||||
let from = self.site.to_string();
|
||||
let msg = format!("Linked {len} pages");
|
||||
let timer = Timer::start(&msg);
|
||||
// prevent the timer from being dropped instantly.
|
||||
let _ = timer;
|
||||
match db
|
||||
.query(
|
||||
"INSERT INTO website $array
|
||||
ON DUPLICATE KEY UPDATE
|
||||
accessed_at = time::now(),
|
||||
crawled = crawled OR $input.crawled
|
||||
RETURN VALUE id;
|
||||
",
|
||||
)
|
||||
.bind(("array", all))
|
||||
.query("COUNT(RELATE (SELECT id FROM website WHERE site = $in) -> links_to -> $out)")
|
||||
.bind(("in", from))
|
||||
.bind(("out", other))
|
||||
.await
|
||||
{
|
||||
Ok(mut id) => match id.take::<Vec<Thing>>(0) {
|
||||
Ok(mut x) => things.append(&mut x),
|
||||
Err(err) => error!("{:?}", err),
|
||||
Ok(mut e) => {
|
||||
// The relate could technically "fail" (not relate anything), this just means that
|
||||
// the query was ok.
|
||||
let _: Response = e;
|
||||
if let Ok(vec) = e.take(0) {
|
||||
let _: Vec<usize> = vec;
|
||||
if let Some(num) = vec.get(0) {
|
||||
if *num == len {
|
||||
return;
|
||||
} else {
|
||||
warn!("Didn't link all the records. {num}/{len}");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
warn!("Linking request succeeded but couldn't verify the results.");
|
||||
},
|
||||
Err(e) => {
|
||||
error!("{}", e.to_string());
|
||||
},
|
||||
Err(err) => {
|
||||
error!("{:?}", err);
|
||||
}
|
||||
}
|
||||
things
|
||||
|
||||
#[instrument(skip_all)]
|
||||
pub async fn store(&mut self, db: &Surreal<Client>) -> Option<Thing> {
|
||||
// check if it's been gone thru before
|
||||
let mut response = db
|
||||
.query("SELECT * FROM ONLY website WHERE site = $site LIMIT 1")
|
||||
.bind(("site", self.site.to_string()))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
if let Some(old) = response.take::<Option<Website>>(0).unwrap() {
|
||||
// site exists already
|
||||
if let Some(id) = old.id {
|
||||
// make sure to preserve the "crawled status"
|
||||
let mut new = self.clone();
|
||||
new.crawled = old.crawled | new.crawled;
|
||||
|
||||
// update the record
|
||||
match db.upsert((id.tb, id.id.to_string())).content(new).await {
|
||||
Ok(e) => {
|
||||
if let Some(a) = e {
|
||||
let _: Record = a;
|
||||
return Some(a.id);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!("{}", e);
|
||||
}
|
||||
};
|
||||
}
|
||||
} else {
|
||||
// sites hasn't existed yet
|
||||
match db.create("website").content(self.clone()).await {
|
||||
Ok(e) => {
|
||||
let _: Option<Record> = e;
|
||||
if let Some(a) = e {
|
||||
let _: Record = a;
|
||||
return Some(a.id);
|
||||
}
|
||||
}
|
||||
Err(a) => error!("{:?}", a),
|
||||
};
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl ToString for Website {
|
||||
fn to_string(&self) -> String {
|
||||
self.site.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
@ -90,31 +134,29 @@ pub struct Record {
|
||||
}
|
||||
|
||||
#[instrument(skip_all, name = "SurrealDB")]
|
||||
pub async fn connect(config: &Config) -> surrealdb::Result<Surreal<Client>> {
|
||||
pub async fn connect(config: &Config<'_>) -> surrealdb::Result<Surreal<Client>> {
|
||||
trace!("Establishing connection to surreal...");
|
||||
// Connect to the server
|
||||
let db = Surreal::new::<Ws>(&config.surreal_url).await?;
|
||||
let db = Surreal::new::<Ws>(config.surreal_url).await?;
|
||||
|
||||
trace!("Logging in...");
|
||||
// Signin as a namespace, database, or root user
|
||||
db.signin(Root {
|
||||
username: &config.surreal_username,
|
||||
password: &config.surreal_password,
|
||||
username: config.surreal_username,
|
||||
password: config.surreal_password,
|
||||
})
|
||||
.await?;
|
||||
|
||||
// Select a specific namespace / database
|
||||
db.use_ns(&config.surreal_ns)
|
||||
.use_db(&config.surreal_db)
|
||||
db
|
||||
.use_ns(config.surreal_ns)
|
||||
.use_db(config.surreal_db)
|
||||
.await?;
|
||||
|
||||
let setup = include_bytes!("setup.surql");
|
||||
let file = setup.iter().map(|c| *c as char).collect::<String>();
|
||||
|
||||
db.query(file)
|
||||
.await
|
||||
.expect("Failed to setup surreal tables.");
|
||||
db.query(file).await.expect("Failed to setup surreal tables.");
|
||||
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
|
@ -1,66 +0,0 @@
|
||||
use std::{io::ErrorKind, path::PathBuf};
|
||||
|
||||
use reqwest::header::HeaderValue;
|
||||
use tokio::fs;
|
||||
use tracing::{error, trace, warn};
|
||||
use url::Url;
|
||||
|
||||
pub fn as_path(url: &Url, content_type: &HeaderValue) -> PathBuf {
|
||||
// extract data from url to save it accurately
|
||||
let mut url_path = PathBuf::from("./downloaded/".to_string() + url.domain().unwrap_or("UnknownDomain") + url.path());
|
||||
|
||||
if let Ok(header) = content_type.to_str() {
|
||||
// text/html; charset=UTF-8; option=value
|
||||
let ttype = if let Some((t, _)) = header.split_once(';') {
|
||||
t
|
||||
} else {
|
||||
header
|
||||
};
|
||||
|
||||
if let Some((ttype, subtype)) = ttype.split_once('/') {
|
||||
trace!("Found Content-Type to be: {ttype}/{subtype} for {}", url.to_string());
|
||||
// If the Content-Type header is "*/html" (most likely "text/html") and the path's
|
||||
// extension is anything but html:
|
||||
if subtype=="html" && !url_path.extension().is_some_and(|f| f=="html" || f=="htm" ) {
|
||||
// time to slap a index.html to the end of that path there!
|
||||
url_path = url_path.join("index.html");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
warn!("Header: {:?} couldn't be parsed into a string!", content_type);
|
||||
}
|
||||
trace!("Final path for {} is: {:?}", url, url_path);
|
||||
|
||||
url_path
|
||||
}
|
||||
|
||||
pub async fn init(filename: &PathBuf) -> Option<fs::File> {
|
||||
let file = async || tokio::fs::OpenOptions::new()
|
||||
.append(true)
|
||||
.create(true)
|
||||
.open(&filename).await;
|
||||
|
||||
match file().await {
|
||||
Ok(ok) => Some(ok),
|
||||
Err(err) => {
|
||||
// the file/folder isn't found
|
||||
if err.kind() == ErrorKind::NotFound {
|
||||
if let Some(parent ) = &filename.parent() {
|
||||
// create the folders
|
||||
if let Err(err) = fs::create_dir_all(&parent).await {
|
||||
error!("Dir creation: {err} {:?}", filename);
|
||||
eprintln!("{}", err)
|
||||
} else if let Ok(ok) = file().await {
|
||||
return Some(ok);
|
||||
}
|
||||
} else {
|
||||
error!("Couldn't get file's parents: {:?}", &filename);
|
||||
}
|
||||
} else {
|
||||
error!("File open error: {err} {:?}", filename);
|
||||
}
|
||||
// we don't care about other errors, we can't/shouldn't fix them
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
340
src/main.rs
340
src/main.rs
@ -1,275 +1,126 @@
|
||||
#![feature(ip_from)]
|
||||
#![feature(async_closure)]
|
||||
#![warn(clippy::expect_used)]
|
||||
#![deny(clippy::unwrap_used)]
|
||||
|
||||
extern crate html5ever;
|
||||
|
||||
use futures_util::StreamExt;
|
||||
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
fs::File,
|
||||
io::Read,
|
||||
net::{IpAddr, Ipv4Addr},
|
||||
};
|
||||
use std::time::Instant;
|
||||
|
||||
use db::{connect, Website};
|
||||
use metrics::{counter, gauge};
|
||||
use metrics_exporter_prometheus::PrometheusBuilder;
|
||||
use serde::Deserialize;
|
||||
use parser::parse;
|
||||
use s3::S3;
|
||||
use surrealdb::{engine::remote::ws::Client, Surreal};
|
||||
use tokio::{io::{AsyncWriteExt, BufWriter}, task::JoinSet};
|
||||
use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, trace_span, warn};
|
||||
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
|
||||
use tokio::sync::broadcast::Receiver;
|
||||
use tracing::{debug, info, trace, trace_span};
|
||||
use tracing_subscriber::EnvFilter;
|
||||
use url::Url;
|
||||
|
||||
mod db;
|
||||
mod filesystem;
|
||||
mod parser;
|
||||
mod s3;
|
||||
|
||||
const GET_METRIC: &str = "total_gets";
|
||||
const GET_IN_FLIGHT: &str = "gets_in_flight";
|
||||
const SITES_CRAWLED: &str = "pages_crawled";
|
||||
const BEING_PROCESSED: &str = "pages_being_processed";
|
||||
struct Config<'a> {
|
||||
surreal_ns: &'a str,
|
||||
surreal_db: &'a str,
|
||||
surreal_url: &'a str,
|
||||
surreal_username: &'a str,
|
||||
surreal_password: &'a str,
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Config {
|
||||
surreal_ns: String,
|
||||
surreal_db: String,
|
||||
surreal_url: String,
|
||||
surreal_username: String,
|
||||
surreal_password: String,
|
||||
|
||||
crawl_filter: String,
|
||||
start_url: String,
|
||||
budget: usize,
|
||||
batch_size: usize,
|
||||
s3_url: &'a str,
|
||||
s3_bucket: &'a str,
|
||||
s3_access_key: &'a str,
|
||||
s3_secret_key: &'a str,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
println!("Logs and metrics are provided to the Grafana dashboard");
|
||||
|
||||
let writer = std::fs::OpenOptions::new()
|
||||
.append(true)
|
||||
.create(true)
|
||||
.open("./docker/logs/tracing.log")
|
||||
.expect("Couldn't make log file!");
|
||||
|
||||
let filter = EnvFilter::builder()
|
||||
.with_default_directive(LevelFilter::DEBUG.into())
|
||||
.from_env_lossy();
|
||||
|
||||
let registry = Registry::default().with(
|
||||
fmt::layer()
|
||||
let total_runtime = Timer::start("Completed");
|
||||
tracing_subscriber::fmt()
|
||||
.with_env_filter(EnvFilter::from_default_env())
|
||||
.with_line_number(true)
|
||||
.with_thread_ids(true)
|
||||
.with_file(true)
|
||||
.json()
|
||||
.with_writer(writer)
|
||||
.with_filter(filter)
|
||||
);
|
||||
// .without_time()
|
||||
.init();
|
||||
debug!("Starting...");
|
||||
|
||||
tracing::subscriber::set_global_default(registry).expect("Failed to set default subscriber");
|
||||
let config = Config {
|
||||
surreal_url: "localhost:8000",
|
||||
surreal_username: "root",
|
||||
surreal_password: "root",
|
||||
surreal_ns: "test",
|
||||
surreal_db: "spider",
|
||||
s3_bucket: "spider",
|
||||
s3_url: "http://localhost:9000",
|
||||
s3_access_key: "0zv7GbLQsw4ZI8TclMps",
|
||||
s3_secret_key: "5dB7QkGFw7fYbUJ5LpHk2GbWR7Bl710HlRz4NbzB",
|
||||
};
|
||||
|
||||
let builder = PrometheusBuilder::new();
|
||||
builder
|
||||
.with_http_listener(std::net::SocketAddr::new(
|
||||
IpAddr::V4(Ipv4Addr::from_octets([0, 0, 0, 0])),
|
||||
2500,
|
||||
))
|
||||
.install()
|
||||
.expect("failed to install recorder/exporter");
|
||||
|
||||
info!("Starting...");
|
||||
|
||||
// When getting uncrawled pages, name must contain this variable. "" will effectively get ignored.
|
||||
// let crawl_filter = "en.wikipedia.org/";
|
||||
// let budget = 50;
|
||||
let mut crawled = 0;
|
||||
|
||||
let mut file = File::open("./Crawler.toml").expect("Failed to read Crawler.toml");
|
||||
let mut buf = String::new();
|
||||
let _ = file.read_to_string(&mut buf);
|
||||
|
||||
let config: Config = toml::from_str(&buf).expect("Failed to parse Crawler.toml");
|
||||
let starting_url = &config.start_url;
|
||||
// Would probably take these in as parameters from a cli
|
||||
let starting_url = "https://oliveratkinson.net/";
|
||||
|
||||
let s3 = S3::connect(&config)
|
||||
.await
|
||||
.expect("Failed to connect to minio, aborting.");
|
||||
let db = connect(&config)
|
||||
.await
|
||||
.expect("Failed to connect to surreal, aborting.");
|
||||
|
||||
let reqwest = reqwest::Client::builder()
|
||||
// .use_rustls_tls()
|
||||
.gzip(true)
|
||||
let mut site = spider::website::Website::new(&starting_url)
|
||||
.with_limit(5)
|
||||
.with_depth(0)
|
||||
.build()
|
||||
.expect("Failed to build reqwest client.");
|
||||
.unwrap();
|
||||
|
||||
// Kick off the whole machine - This Website object doesn't matter, it's just to allow for
|
||||
// get() to work.
|
||||
let span = trace_span!("Pre-Loop");
|
||||
let pre_loop_span = span.enter();
|
||||
// Download the site
|
||||
let site = Website::new(starting_url, false);
|
||||
process(site, db.clone(), reqwest.clone()).await;
|
||||
let mut rx: Receiver<spider::page::Page> = site.subscribe(0).unwrap();
|
||||
|
||||
drop(pre_loop_span);
|
||||
|
||||
let span = trace_span!("Loop");
|
||||
let subscriber = tokio::spawn(async move {
|
||||
let span = trace_span!("Sub");
|
||||
let span = span.enter();
|
||||
while crawled < config.budget {
|
||||
let uncrawled =
|
||||
get_uncrawled_links(&db, config.budget - crawled, config.crawl_filter.clone(), &config).await;
|
||||
if uncrawled.is_empty() {
|
||||
info!("Had more budget but finished crawling everything.");
|
||||
return;
|
||||
}
|
||||
while let Ok(res) = rx.recv().await {
|
||||
// Get body
|
||||
let data = res.get_html();
|
||||
let url = Url::parse(res.get_url()).unwrap();
|
||||
|
||||
{
|
||||
let mut futures = JoinSet::new();
|
||||
for site in uncrawled {
|
||||
gauge!(BEING_PROCESSED).increment(1);
|
||||
futures.spawn(process(site, db.clone(), reqwest.clone()));
|
||||
// let percent = format!("{:.2}%", (crawled as f32 / budget as f32) * 100f32);
|
||||
// info!("Crawled {crawled} out of {budget} pages. ({percent})");
|
||||
}
|
||||
trace!("Got '{}'", url.to_string());
|
||||
// Store document
|
||||
s3.store(&data, &url).await;
|
||||
|
||||
let c = counter!(SITES_CRAWLED);
|
||||
// As futures complete runs code in while block
|
||||
while futures.join_next().await.is_some() {
|
||||
c.increment(1);
|
||||
gauge!(BEING_PROCESSED).decrement(1);
|
||||
crawled += 1;
|
||||
// Parse document and store relationships
|
||||
let mut page = Website::new(url, true);
|
||||
page.store(&db).await;
|
||||
|
||||
// Relate this page to all the pages it links to
|
||||
let span = trace_span!("Linking");
|
||||
let span = span.enter();
|
||||
let found_links = parse( &page, data).await;
|
||||
let mut stored_links = Vec::new();
|
||||
for mut link in found_links {
|
||||
if let Some(id) = link.store(&db).await {
|
||||
stored_links.push(id);
|
||||
}
|
||||
}
|
||||
page.links_to(stored_links, &db).await;
|
||||
drop(span);
|
||||
}
|
||||
drop(span);
|
||||
});
|
||||
|
||||
if let Ok(mut ok) = db
|
||||
.query("count(select id from website where crawled = true)")
|
||||
.await
|
||||
{
|
||||
let res = ok.take::<Option<usize>>(0);
|
||||
if let Ok(Some(n)) = res {
|
||||
info!("Total crawled pages now equals {n}");
|
||||
}
|
||||
}
|
||||
let timer = Timer::start("Crawled");
|
||||
|
||||
site.crawl().await;
|
||||
site.unsubscribe();
|
||||
|
||||
drop(timer);
|
||||
|
||||
subscriber.await.unwrap();
|
||||
|
||||
info!("Done");
|
||||
}
|
||||
|
||||
#[instrument(skip(db, reqwest))]
|
||||
/// Downloads and crawls and stores a webpage.
|
||||
/// It is acceptable to clone `db`, `reqwest`, and `s3` because they all use `Arc`s internally. - Noted by Oliver
|
||||
async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Client) {
|
||||
// METRICS
|
||||
trace!("Process: {}", &site.site);
|
||||
// Build the request
|
||||
let request_builder = reqwest.get(site.site.to_string());
|
||||
|
||||
// METRICS
|
||||
let g = gauge!(GET_IN_FLIGHT);
|
||||
g.increment(1);
|
||||
|
||||
// Send the http request (get)
|
||||
if let Ok(response) = request_builder.send().await {
|
||||
// Get body from response
|
||||
let headers = response.headers();
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let CT = headers.get("Content-Type");
|
||||
let ct = headers.get("content-type");
|
||||
|
||||
if CT.is_none() && ct.is_none() {
|
||||
}
|
||||
let ct = match (CT,ct) {
|
||||
(None, None) => {
|
||||
warn!("Server did not respond with Content-Type header. Url: {} Headers: ({:?})", site.site.to_string(), headers);
|
||||
return
|
||||
},
|
||||
(None, Some(a)) => a,
|
||||
(Some(a), None) => a,
|
||||
(Some(a), Some(_)) => a,
|
||||
};
|
||||
|
||||
let path = filesystem::as_path(&site.site, ct);
|
||||
|
||||
// make sure that the file is good to go
|
||||
if let Some(file) = filesystem::init(&path).await {
|
||||
let should_parse = path.to_string_lossy().ends_with(".html");
|
||||
let mut buf: Vec<u8> = Vec::new();
|
||||
|
||||
let mut writer = BufWriter::new(file);
|
||||
|
||||
// stream the response onto the disk
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
info!("Writing at: {:?}", path);
|
||||
while let Some(data) = stream.next().await {
|
||||
match data {
|
||||
Ok(data) => {
|
||||
let _ = writer.write_all(&data).await;
|
||||
// If we are going to parse this file later, we will save it
|
||||
// into memory as well as the disk.
|
||||
if should_parse {
|
||||
data.iter().for_each(|f| buf.push(*f));
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("{}", err)
|
||||
},
|
||||
}
|
||||
}
|
||||
let _ = writer.flush();
|
||||
|
||||
if should_parse {
|
||||
// Parse document and get relationships
|
||||
let sites = parser::parse(&site, &buf).await;
|
||||
// De-duplicate this list
|
||||
let prev_len = sites.len();
|
||||
let set = sites.into_iter().fold(HashSet::new(), |mut set, item| {
|
||||
set.insert(item);
|
||||
set
|
||||
});
|
||||
let de_dupe_sites: Vec<Website> = set.into_iter().collect();
|
||||
let diff = prev_len - de_dupe_sites.len();
|
||||
trace!("Saved {diff} from being entered into the db by de-duping");
|
||||
// Store all the other sites so that we can link to them.
|
||||
let _ = Website::store_all(de_dupe_sites, &db).await;
|
||||
}
|
||||
|
||||
// METRICS
|
||||
g.decrement(1);
|
||||
counter!(GET_METRIC).increment(1);
|
||||
|
||||
// update self in db
|
||||
site.set_crawled();
|
||||
Website::store_all(vec![site], &db).await;
|
||||
} else {
|
||||
error!("File failed to cooperate: {:?}", path);
|
||||
}
|
||||
} else {
|
||||
error!("Failed to get: {}", &site.site);
|
||||
}
|
||||
drop(total_runtime);
|
||||
}
|
||||
|
||||
/// Returns uncrawled links
|
||||
#[instrument(skip(db, config))]
|
||||
async fn get_uncrawled_links(
|
||||
db: &Surreal<Client>,
|
||||
mut count: usize,
|
||||
filter: String,
|
||||
config: &Config,
|
||||
) -> Vec<Website> {
|
||||
if count > config.batch_size {
|
||||
count = config.batch_size;
|
||||
async fn get_uncrawled_links(db: &Surreal<Client>, mut count: usize) -> Vec<Website> {
|
||||
if count > 100 {
|
||||
count = 100
|
||||
}
|
||||
|
||||
debug!("Getting uncrawled links");
|
||||
|
||||
let mut response = db
|
||||
.query("SELECT * FROM website WHERE crawled = false AND site ~ type::string($format) LIMIT $count;")
|
||||
.bind(("format", filter))
|
||||
.query("SELECT * FROM website WHERE crawled = false LIMIT $count")
|
||||
.bind(("count", count))
|
||||
.await
|
||||
.expect("Hard-coded query failed..?");
|
||||
@ -277,3 +128,30 @@ async fn get_uncrawled_links(
|
||||
.take(0)
|
||||
.expect("Returned websites couldn't be parsed")
|
||||
}
|
||||
|
||||
pub struct Timer<'a> {
|
||||
start: Instant,
|
||||
msg: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> Timer<'a> {
|
||||
#[inline]
|
||||
pub fn start(msg: &'a str) -> Self {
|
||||
Self {
|
||||
start: Instant::now(),
|
||||
msg,
|
||||
}
|
||||
}
|
||||
pub fn stop(&self) -> f64 {
|
||||
let dif = self.start.elapsed().as_micros();
|
||||
let ms = dif as f64 / 1000.;
|
||||
debug!("{}", format!("{} in {:.3}ms", self.msg, ms));
|
||||
ms
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Timer<'_> {
|
||||
fn drop(&mut self) {
|
||||
self.stop();
|
||||
}
|
||||
}
|
||||
|
131
src/parser.rs
131
src/parser.rs
@ -1,24 +1,27 @@
|
||||
use std::default::Default;
|
||||
use std::str::FromStr;
|
||||
|
||||
use html5ever::tokenizer::{BufferQueue, TokenizerResult};
|
||||
use html5ever::tokenizer::{StartTag, TagToken};
|
||||
use html5ever::tokenizer::{Token, TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts};
|
||||
use html5ever::{local_name, tendril::*};
|
||||
use tracing::{debug, error, instrument, trace, warn};
|
||||
use url::Url;
|
||||
use tracing::{instrument, trace};
|
||||
|
||||
use crate::db::Website;
|
||||
|
||||
impl TokenSink for Website {
|
||||
#[derive(Clone)]
|
||||
struct LinkParser<'a> {
|
||||
site: &'a Website,
|
||||
}
|
||||
|
||||
impl TokenSink for LinkParser<'_> {
|
||||
type Handle = Vec<Website>;
|
||||
|
||||
#[instrument(skip(token, _line_number))]
|
||||
fn process_token(&self, token: Token, _line_number: u64) -> TokenSinkResult<Self::Handle> {
|
||||
match token {
|
||||
TagToken(tag) => {
|
||||
if tag.kind == StartTag {
|
||||
match tag.name {
|
||||
// this should be all the html elements that have links
|
||||
local_name!("a")
|
||||
| local_name!("audio")
|
||||
| local_name!("area")
|
||||
@ -33,18 +36,21 @@ impl TokenSink for Website {
|
||||
let attr_name = attr.name.local.to_string();
|
||||
if attr_name == "src" || attr_name == "href" || attr_name == "data"
|
||||
{
|
||||
trace!("Found `{}` in html `{}` tag", &attr.value, tag.name);
|
||||
let url = try_get_url(&self.site, &attr.value);
|
||||
// Get clone of the current site object
|
||||
let mut web = self.site.clone();
|
||||
|
||||
// Set url
|
||||
let mut url = web.site;
|
||||
url.set_fragment(None); // removes #xyz
|
||||
let joined = url.join(&attr.value).unwrap();
|
||||
web.site = joined;
|
||||
|
||||
web.crawled = false;
|
||||
|
||||
if let Some(mut parsed) = url {
|
||||
parsed.set_query(None);
|
||||
parsed.set_fragment(None);
|
||||
trace!("Final cleaned URL: `{}`", parsed.to_string());
|
||||
let web = Website::new(&parsed.to_string(), false);
|
||||
links.push(web);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return TokenSinkResult::Script(links);
|
||||
}
|
||||
local_name!("button") | local_name!("meta") | local_name!("iframe") => {
|
||||
@ -60,92 +66,27 @@ impl TokenSink for Website {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[instrument(skip_all)]
|
||||
/// Parses the passed site and returns all the sites it links to.
|
||||
pub async fn parse(site: &Website, data: &[u8]) -> Vec<Website> {
|
||||
debug!("Parsing {}", site.site.to_string());
|
||||
// prep work
|
||||
let mut other_sites: Vec<Website> = Vec::new();
|
||||
pub async fn parse(site: &Website, data: String) -> Vec<Website> {
|
||||
|
||||
// change data into something that can be tokenized
|
||||
let s: Result<Tendril<fmt::UTF8>, ()> = Tendril::try_from_byte_slice(data);
|
||||
if let Ok(chunk) = s {
|
||||
// create buffer of tokens and push our input into it
|
||||
let token_buffer = BufferQueue::default();
|
||||
token_buffer.push_back(
|
||||
chunk
|
||||
.try_reinterpret::<fmt::UTF8>()
|
||||
.expect("Failed to reinterpret chunk!"),
|
||||
);
|
||||
// create the tokenizer
|
||||
let tokenizer = Tokenizer::new(site.clone(), TokenizerOpts::default());
|
||||
let sink = LinkParser { site };
|
||||
let chunk = Tendril::from_str(&data).unwrap();
|
||||
let mut input = BufferQueue::default();
|
||||
input.push_back(chunk.try_reinterpret::<fmt::UTF8>().unwrap());
|
||||
|
||||
// go thru buffer
|
||||
while let TokenizerResult::Script(mut sites) = tokenizer.feed(&token_buffer) {
|
||||
other_sites.append(&mut sites);
|
||||
// other_sites.push(sites);
|
||||
let token = Tokenizer::new(sink.clone(), TokenizerOpts::default());
|
||||
|
||||
// let mut links_to = Vec::new();
|
||||
let mut res = Vec::new();
|
||||
while !input.is_empty() {
|
||||
if let TokenizerResult::Script(mut s) = token.feed(&mut input) {
|
||||
res.append(&mut s);
|
||||
}
|
||||
assert!(token_buffer.is_empty());
|
||||
tokenizer.end();
|
||||
} else {
|
||||
warn!("Tendril failed to parse on: {}", site.site.to_string());
|
||||
}
|
||||
trace!("Found {} links.", res.len());
|
||||
|
||||
other_sites
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
fn try_get_url(parent: &Url, link: &str) -> Option<Url> {
|
||||
match Url::parse(link) {
|
||||
Ok(ok) => Some(ok),
|
||||
Err(e) => {
|
||||
if link.starts_with('#') {
|
||||
trace!("Rejecting # url");
|
||||
None
|
||||
} else if link.starts_with("//") {
|
||||
// if a url starts with "//" is assumed that it will adopt
|
||||
// the same scheme as it's parent
|
||||
// https://stackoverflow.com/questions/9646407/two-forward-slashes-in-a-url-src-href-attribute
|
||||
let scheme = parent.scheme();
|
||||
|
||||
match Url::parse(&format!("{scheme}://{}", link)) {
|
||||
Ok(url) => Some(url),
|
||||
Err(err) => {
|
||||
error!("Failed parsing relative scheme url: {}", err);
|
||||
None
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// # This is some sort of realative url, gonna try patching it up into an absolute
|
||||
// url
|
||||
match e {
|
||||
url::ParseError::RelativeUrlWithoutBase => {
|
||||
// Is: scheme://host:port
|
||||
let mut origin = parent.origin().ascii_serialization();
|
||||
if !origin.ends_with('/') && !link.starts_with('/') {
|
||||
origin += "/";
|
||||
}
|
||||
let url = origin.clone() + link;
|
||||
|
||||
if let Ok(url) = Url::parse(&url) {
|
||||
trace!("Built `{url}` from `{origin} + `{}`", link.to_string());
|
||||
Some(url)
|
||||
} else {
|
||||
error!(
|
||||
"Failed to reconstruct a url from relative url: `{}` on site: `{}`. Failed url was: {}",
|
||||
link,
|
||||
parent.to_string(),
|
||||
url
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
error!("MISC error: {:?} {:?}", e, link);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
assert!(input.is_empty());
|
||||
token.end();
|
||||
res
|
||||
}
|
||||
|
76
src/s3.rs
Normal file
76
src/s3.rs
Normal file
@ -0,0 +1,76 @@
|
||||
use minio::s3::{
|
||||
args::{BucketExistsArgs, MakeBucketArgs}, client::ClientBuilder, creds::StaticProvider, error::Error, http::BaseUrl, types::S3Api, Client
|
||||
};
|
||||
use tracing::{instrument, trace};
|
||||
use url::Url;
|
||||
|
||||
use crate::Config;
|
||||
|
||||
pub struct S3 {
|
||||
bucket_name: String,
|
||||
client: Client,
|
||||
}
|
||||
|
||||
impl S3 {
|
||||
#[instrument(skip_all, name = "S3")]
|
||||
pub async fn connect(config: &Config<'_>) -> Result<Self, Error> {
|
||||
let base_url = config.s3_url.parse::<BaseUrl>().unwrap();
|
||||
|
||||
let static_provider =
|
||||
StaticProvider::new(&config.s3_access_key, &config.s3_secret_key, None);
|
||||
|
||||
let client = ClientBuilder::new(base_url)
|
||||
.provider(Some(Box::new(static_provider)))
|
||||
.build()?;
|
||||
|
||||
trace!("Checking bucket...");
|
||||
let exists = client
|
||||
.bucket_exists(&BucketExistsArgs::new(&config.s3_bucket).unwrap())
|
||||
.await?;
|
||||
|
||||
if !exists {
|
||||
trace!("Creating bucket...");
|
||||
client
|
||||
.make_bucket(&MakeBucketArgs::new(&config.s3_bucket).unwrap())
|
||||
.await?;
|
||||
}
|
||||
|
||||
trace!("Connection successfull");
|
||||
|
||||
Ok(Self {
|
||||
bucket_name: config.s3_bucket.to_owned(),
|
||||
client: client,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn store(&self, data: &str, name: &Url) {
|
||||
if let Some(domain) = name.domain() {
|
||||
let filename = domain.to_string() + name.path();
|
||||
|
||||
let _ = &self
|
||||
.client
|
||||
.put_object_content(&self.bucket_name, &filename, data.to_owned())
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn _get(&self, name: &Url) -> Option<String> {
|
||||
if let Some(domain) = name.domain() {
|
||||
let filename = domain.to_string() + name.path();
|
||||
|
||||
let data = self
|
||||
.client
|
||||
.get_object(&self.bucket_name, &filename)
|
||||
.send()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
if let Ok(segments )= data.content.to_segmented_bytes().await {
|
||||
return Some(segments.to_bytes().iter().map(|c| *c as char).collect::<String>())
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
@ -1,9 +1,2 @@
|
||||
DEFINE TABLE IF NOT EXISTS website SCHEMALESS;
|
||||
|
||||
DEFINE FIELD IF NOT EXISTS site ON TABLE website TYPE string;
|
||||
DEFINE INDEX IF NOT EXISTS idx ON TABLE website COLUMNS site UNIQUE;
|
||||
|
||||
DEFINE FIELD IF NOT EXISTS crawled ON TABLE website TYPE bool;
|
||||
|
||||
DEFINE FIELD IF NOT EXISTS accessed_at ON TABLE website VALUE time::now();
|
||||
DEFINE FIELD IF NOT EXISTS first_accessed_at ON TABLE website VALUE time::now();
|
||||
|
Loading…
x
Reference in New Issue
Block a user