Compare commits

17 Commits

Author SHA1 Message Date
4989a59ddf checkpoint 2025-07-10 18:46:25 -06:00
6fc71c7a78 add speed improvements 2025-03-21 12:14:29 -06:00
96a3ca092a :) 2025-03-21 12:11:05 -06:00
b750d88d48 working filesystem storage 2025-03-21 11:42:43 -06:00
808790a7c3 file patch; 2025-03-21 07:11:51 +00:00
2de01b2a0e remove removed code 2025-03-21 06:48:39 +00:00
be0fd5505b i think the files work better 2025-03-21 06:48:17 +00:00
a23429104c dead code removal 2025-03-21 06:03:34 +00:00
66581cc453 getting there 2025-03-21 05:59:40 +00:00
7df19a480f updates 2025-03-20 15:11:01 -06:00
b9c1f0b492 readme updates 2025-03-19 15:05:32 -06:00
71b7b2d7bc it works and it is awesome 2025-03-19 15:04:00 -06:00
bac3cd9d1d add most recent long run 2025-03-19 15:03:49 -06:00
1f6a0acce3 shutup spellchecker 2025-03-19 15:03:39 -06:00
53dbf53ab9 newest settings 2025-03-19 15:03:24 -06:00
0477bb26e4 viz improvements 2025-03-19 15:03:11 -06:00
6409baaffb Reducted trips to surreal by x500 2025-03-19 12:41:08 -06:00
17 changed files with 1005 additions and 2908 deletions

1
.gitignore vendored
View File

@@ -5,3 +5,4 @@ perf.data
flamegraph.svg flamegraph.svg
perf.data.old perf.data.old
/docker/logs/* /docker/logs/*
/downloaded

8
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,8 @@
{
"cSpell.words": [
"creds",
"reqwest",
"rustls",
"surql"
]
}

2311
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
[package] [package]
name = "surreal_spider" name = "internet_mapper"
version = "0.1.0" version = "0.1.0"
edition = "2021" edition = "2021"
@@ -11,8 +11,8 @@ metrics-exporter-prometheus = { version = "0.16.2", features=["http-listener"]}
# minio = "0.1.0" # minio = "0.1.0"
minio = {git="https://github.com/minio/minio-rs.git", rev = "c28f576"} minio = {git="https://github.com/minio/minio-rs.git", rev = "c28f576"}
reqwest = { version = "0.12", features = ["gzip", "default", "rustls-tls"] } reqwest = { version = "0.12", features = ["gzip", "default", "rustls-tls"] }
rusqlite = { version = "0.34.0", features = ["bundled"] }
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
surrealdb = "2.2"
tokio = { version="1.41.0", features = ["full"] } tokio = { version="1.41.0", features = ["full"] }
toml = "0.8.20" toml = "0.8.20"
tracing = "0.1" tracing = "0.1"

View File

@@ -3,14 +3,8 @@ surreal_url = "localhost:8000"
surreal_username = "root" surreal_username = "root"
surreal_password = "root" surreal_password = "root"
surreal_ns = "test" surreal_ns = "test"
surreal_db = "v1.12" surreal_db = "v1.19.2"
# Minio config
s3_bucket = "v1.12"
s3_url = "http://localhost:9000"
s3_access_key = "jLDPKGuu513VENc8kJwX"
s3_secret_key = "4T1nymEzsGYOlKSAb1WX7V3stnQn9a5ZoTQjDfcL"
# Crawler config # Crawler config
crawl_filter = "en.wikipedia.com" crawl_filter = "en.wikipedia.com"
budget = 200 budget = 1000

View File

@@ -2,10 +2,24 @@
Crawls sites saving all the found links to a surrealdb database. It then proceeds to take batches of 100 uncrawled links untill the crawl budget is reached. It saves the data of each site in a minio database. Crawls sites saving all the found links to a surrealdb database. It then proceeds to take batches of 100 uncrawled links untill the crawl budget is reached. It saves the data of each site in a minio database.
### TODO ### TODO
- [ ] Domain filtering - prevent the crawler from going on alternate versions of wikipedia. - [ ] Domain filtering - prevent the crawler from going on alternate versions of wikipedia.
- [ ] Conditionally save content - based on filename or file contents - [ ] Conditionally save content - based on filename or file contents
- [ ] GUI / TUI ? - [x] GUI / TUI ? - Graphana
- [ ] Better asynchronous getting of the sites. Currently it all happens serially. - [x] Better asynchronous getting of the sites. Currently it all happens serially.
- [ ] Allow for storing asynchronously
3/17/25: Took >1hr to crawl 100 pages
3/19/25: Took 20min to crawl 1000 pages
This ment we stored 1000 pages, 142,997 urls, and 1,425,798 links between the two.
3/20/25: Took 5min to crawl 1000 pages
3/21/25: Took 3min to crawl 1000 pages
# About
![Screenshot](/pngs/graphana.png)

View File

@@ -14,21 +14,6 @@ services:
- --pass - --pass
- root - root
- rocksdb:/mydata/database.db - rocksdb:/mydata/database.db
minio:
image: quay.io/minio/minio
ports:
- 9000:9000
- 9001:9001
environment:
- MINIO_ROOT_USER=root
- MINIO_ROOT_PASSWORD=an8charpassword
volumes:
- minio_storage:/data
command:
- server
- /data
- --console-address
- ":9001"
alloy: alloy:
image: grafana/alloy:latest image: grafana/alloy:latest
@@ -66,6 +51,7 @@ services:
image: grafana/grafana:latest image: grafana/grafana:latest
volumes: volumes:
- ./grafana.yaml:/etc/grafana/provisioning/datasources/datasources.yaml - ./grafana.yaml:/etc/grafana/provisioning/datasources/datasources.yaml
- ./dashboards:/var/lib/grafana/dashboards
- grafana_storage:/var/lib/grafana - grafana_storage:/var/lib/grafana
environment: environment:
- GF_AUTH_ANONYMOUS_ENABLED=true - GF_AUTH_ANONYMOUS_ENABLED=true

View File

@@ -0,0 +1,648 @@
{
"__inputs": [
{
"name": "DS_PROMETHEUS",
"label": "Prometheus",
"description": "",
"type": "datasource",
"pluginId": "prometheus",
"pluginName": "Prometheus"
},
{
"name": "DS_LOKI",
"label": "Loki",
"description": "",
"type": "datasource",
"pluginId": "loki",
"pluginName": "Loki"
}
],
"__elements": {},
"__requires": [
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "11.3.1"
},
{
"type": "panel",
"id": "logs",
"name": "Logs",
"version": ""
},
{
"type": "datasource",
"id": "loki",
"name": "Loki",
"version": "1.0.0"
},
{
"type": "datasource",
"id": "prometheus",
"name": "Prometheus",
"version": "1.0.0"
},
{
"type": "panel",
"id": "stat",
"name": "Stat",
"version": ""
},
{
"type": "panel",
"id": "timeseries",
"name": "Time series",
"version": ""
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": null,
"links": [],
"panels": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"barWidthFactor": 0.6,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": 300000,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 8,
"x": 0,
"y": 0
},
"id": 5,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "surql_trips",
"fullMetaSearch": false,
"includeNullMetadata": true,
"legendFormat": "Trips to Surreal",
"range": true,
"refId": "A",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "s3_trips",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "Trips to S3",
"range": true,
"refId": "B",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "pages_crawled",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "total crawled",
"range": true,
"refId": "C",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "pages_being_processed",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "Pages being processed",
"range": true,
"refId": "E",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "gets_in_flight",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "__auto",
"range": true,
"refId": "D",
"useBackend": false
}
],
"title": "Crawler stats",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"barWidthFactor": 0.6,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"spanNulls": 300000,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 9,
"x": 8,
"y": 0
},
"id": 6,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "surql_trips",
"fullMetaSearch": false,
"includeNullMetadata": true,
"legendFormat": "Trips to Surreal",
"range": true,
"refId": "A",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "surql_link_calls",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "link calls",
"range": true,
"refId": "B",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "surql_store_calls",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "store calls",
"range": true,
"refId": "C",
"useBackend": false
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "pages_being_processed",
"fullMetaSearch": false,
"hide": false,
"includeNullMetadata": true,
"instant": false,
"legendFormat": "Pages being processed",
"range": true,
"refId": "E",
"useBackend": false
}
],
"title": "Surreal stats",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"description": "This is across all threads, so this isn't wall clock time",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
}
]
},
"unit": "ms"
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 7,
"x": 17,
"y": 0
},
"id": 7,
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"percentChangeColorMode": "standard",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"showPercentChange": false,
"textMode": "auto",
"wideLayout": true
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"disableTextWrap": false,
"editorMode": "builder",
"expr": "surql_lock_waiting_ms",
"fullMetaSearch": false,
"includeNullMetadata": true,
"legendFormat": "__auto",
"range": true,
"refId": "A",
"useBackend": false
}
],
"title": "Time spend waiting on lock",
"type": "stat"
},
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"gridPos": {
"h": 18,
"w": 24,
"x": 0,
"y": 8
},
"id": 1,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"editorMode": "code",
"expr": "{filename=\"/tmp/alloy-logs/tracing.log\"} | json | level = `ERROR` | line_format \"{{.threadId}} {{.filename_extracted}}:{{.line_number}} {{.fields_message}}\"",
"queryType": "range",
"refId": "A"
}
],
"title": "Errors",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 26
},
"id": 2,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"editorMode": "code",
"expr": "{filename=\"/tmp/alloy-logs/tracing.log\"} | json | level = `DEBUG` | line_format \"{{.fields_message}}\"",
"queryType": "range",
"refId": "A"
}
],
"title": "Debug",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"gridPos": {
"h": 16,
"w": 12,
"x": 12,
"y": 26
},
"id": 4,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"editorMode": "code",
"expr": "{filename=\"/tmp/alloy-logs/tracing.log\"} | json | level = `TRACE` | line_format \"{{.fields_message}}\"",
"queryType": "range",
"refId": "A"
}
],
"title": "Trace",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 34
},
"id": 3,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"editorMode": "code",
"expr": "{filename=\"/tmp/alloy-logs/tracing.log\"} | json | level = `WARN` | line_format \"{{.fields_message}}\"",
"queryType": "range",
"refId": "A"
}
],
"title": "Warnings",
"type": "logs"
}
],
"schemaVersion": 40,
"tags": [],
"templating": {
"list": [
{
"datasource": {
"type": "loki",
"uid": "P8E80F9AEF21F6940"
},
"filters": [],
"name": "Filters",
"type": "adhoc"
}
]
},
"time": {
"from": "now-5m",
"to": "now"
},
"timepicker": {},
"timezone": "browser",
"title": "Crawler",
"uid": "ceg90x34pqgowd",
"version": 21,
"weekStart": ""
}

View File

@@ -1,223 +0,0 @@
{
"__inputs": [
{
"name": "DS_LOKI",
"label": "Loki",
"description": "",
"type": "datasource",
"pluginId": "loki",
"pluginName": "Loki"
}
],
"__elements": {},
"__requires": [
{
"type": "grafana",
"id": "grafana",
"name": "Grafana",
"version": "11.3.1"
},
{
"type": "panel",
"id": "logs",
"name": "Logs",
"version": ""
},
{
"type": "datasource",
"id": "loki",
"name": "Loki",
"version": "1.0.0"
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": null,
"links": [],
"panels": [
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 0
},
"id": 1,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"editorMode": "code",
"expr": "{filename=\"/tmp/alloy-logs/tracing.log\"} | json | level = `ERROR` | line_format \"{{.fields_message}}\"",
"queryType": "range",
"refId": "A"
}
],
"title": "Errors",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 0
},
"id": 3,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"editorMode": "code",
"expr": "{filename=\"/tmp/alloy-logs/tracing.log\"} | json | level = `WARN` | line_format \"{{.fields_message}}\"",
"queryType": "range",
"refId": "A"
}
],
"title": "Warnings",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"gridPos": {
"h": 8,
"w": 12,
"x": 0,
"y": 8
},
"id": 2,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"editorMode": "code",
"expr": "{filename=\"/tmp/alloy-logs/tracing.log\"} | json | level = `DEBUG` | line_format \"{{.fields_message}}\"",
"queryType": "range",
"refId": "A"
}
],
"title": "Debug",
"type": "logs"
},
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 8
},
"id": 4,
"options": {
"dedupStrategy": "none",
"enableLogDetails": true,
"prettifyLogMessage": false,
"showCommonLabels": false,
"showLabels": false,
"showTime": false,
"sortOrder": "Descending",
"wrapLogMessage": false
},
"pluginVersion": "11.3.1",
"targets": [
{
"datasource": {
"type": "loki",
"uid": "${DS_LOKI}"
},
"editorMode": "code",
"expr": "{filename=\"/tmp/alloy-logs/tracing.log\"} | json | level = `TRACE` | line_format \"{{.fields_message}}\"",
"queryType": "range",
"refId": "A"
}
],
"title": "Trace",
"type": "logs"
}
],
"schemaVersion": 40,
"tags": [],
"templating": {
"list": []
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {},
"timezone": "browser",
"title": "New dashboard",
"uid": "ceg90x34pqgowd",
"version": 4,
"weekStart": ""
}

View File

@@ -7,7 +7,8 @@ scrape_configs:
static_configs: static_configs:
# change this your machine's ip, localhost won't work # change this your machine's ip, localhost won't work
# because localhost refers to the docker container. # because localhost refers to the docker container.
- targets: ['192.168.8.209:2500'] - targets: ['172.20.239.48:2500']
#- targets: ['192.168.8.209:2500']
- job_name: loki - job_name: loki
static_configs: static_configs:
- targets: ['loki:3100'] - targets: ['loki:3100']

BIN
pngs/graphana.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 264 KiB

184
src/db.rs
View File

@@ -1,33 +1,26 @@
use std::fmt::Debug;
use metrics::counter; use metrics::counter;
use rusqlite::Connection;
use std::fmt::Debug;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use surrealdb::{ use tracing::{error, instrument, trace};
engine::remote::ws::{Client, Ws}, error::Db, opt::auth::Root, sql::Thing, Response, Surreal
};
use tracing::{error, instrument, trace, warn};
use url::Url; use url::Url;
use crate::{Config, Timer}; use crate::Config;
const ROUND_TRIP_METRIC: &'static str = "surql_trips"; const STORE: &str = "surql_store_calls";
const STORE: &'static str = "surql_store_calls";
const LINK: &'static str = "surql_link_calls";
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone, Eq, PartialEq, Hash)]
pub struct Website { pub struct Website {
/// The url that this data is found at /// The url that this data is found at
pub site: Url, pub site: Url,
/// Wether or not this link has been crawled yet /// Wether or not this link has been crawled yet
pub crawled: bool, pub crawled: bool,
#[serde(skip_serializing)]
id: Option<Thing>,
} }
// manual impl to make tracing look nicer // manual impl to make tracing look nicer
impl Debug for Website { impl Debug for Website {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let site = (self.site.domain().unwrap_or("n/a")).to_string() + self.site.path(); f.debug_struct("Website").field("site", &self.site).finish()
f.debug_struct("Website").field("site", &site).finish()
} }
} }
@@ -39,9 +32,8 @@ impl Website {
Err(_) => todo!(), Err(_) => todo!(),
}; };
Self { Self {
id: None,
crawled, crawled,
site, site
} }
} }
@@ -50,119 +42,39 @@ impl Website {
self.crawled = true self.crawled = true
} }
#[instrument(skip_all)] // Insert ever item in the vec into surreal, crawled state will be preserved as TRUE
pub async fn links_to(&self, other: Vec<Thing>, db: &Surreal<Client>) { // if already in the database as such or incoming data is TRUE.
pub async fn store_all(all: Vec<Self>, db: &Connection) {
counter!(STORE).increment(1);
let mut things = Vec::with_capacity(all.len());
let len = other.len(); rusqlite::ParamsFromIter;
if len == 0 {return}
db.execute("",
params![]
);
let from = self.site.to_string();
// let to = other.site.to_string();
trace!("Linking {from} to {} other pages.", other.len());
let msg = format!("Linked {len} pages");
let timer = Timer::start(&msg);
// prevent the timer from being dropped instantly.
let _ = timer;
counter!(ROUND_TRIP_METRIC).increment(1);
counter!(LINK).increment(1);
match db match db
.query("COUNT(RELATE (SELECT id FROM website WHERE site = $in) -> links_to -> $out)") .query(
.bind(("in", from)) "INSERT INTO website $array
.bind(("out", other)) ON DUPLICATE KEY UPDATE
accessed_at = time::now(),
crawled = crawled OR $input.crawled
RETURN VALUE id;
",
)
.bind(("array", all))
.await .await
{ {
Ok(mut e) => { Ok(mut id) => match id.take::<Vec<Thing>>(0) {
// The relate could technically "fail" (not relate anything), this just means that Ok(mut x) => things.append(&mut x),
// the query was ok. Err(err) => error!("{:?}", err),
let _: Response = e;
if let Ok(vec) = e.take(0) {
let _: Vec<usize> = vec;
if let Some(num) = vec.get(0) {
if *num == len {
trace!("Link OK");
return;
} else {
warn!("Didn't link all the records. {num}/{len}");
return;
}
}
}
warn!("Linking request succeeded but couldn't verify the results.");
},
Err(e) => {
error!("{}", e.to_string());
}, },
Err(err) => {
error!("{:?}", err);
} }
} }
things
#[instrument(name = "surql_store", skip_all)]
pub async fn store(&self, db: &Surreal<Client>) -> Option<Thing> {
counter!(STORE).increment(1);
let counter = counter!(ROUND_TRIP_METRIC);
let t = Timer::start("Stored link");
let _ = t;
counter.increment(1);
// check if it's been gone thru before
let mut response = db
.query("SELECT * FROM ONLY website WHERE site = $site LIMIT 1")
.bind(("site", self.site.to_string()))
.await
.expect("Failed to check surreal for duplicates!");
if let Some(old) = response.take::<Option<Website>>(0).expect("Failed to read response from surreal for duplicates.") {
// site exists already
if let Some(id) = old.id {
// make sure to preserve the "crawled status"
let mut new = self.clone();
new.crawled = old.crawled | new.crawled;
counter.increment(1);
// update the record
match db.upsert((id.tb, id.id.to_string())).content(new).await {
Ok(e) => {
if let Some(a) = e {
let _: Record = a;
return Some(a.id);
}
}
Err(e) => {
match e {
surrealdb::Error::Db(error) => {
match error {
Db::QueryCancelled => todo!(),
Db::QueryNotExecuted => todo!(),
Db::QueryNotExecutedDetail { message: _ } => todo!(),
_=>{},
}
},
_=>{},
}
// error!("{}", e);
}
};
}
} else {
counter.increment(1);
// sites hasn't existed yet
match db.create("website").content(self.clone()).await {
Ok(e) => {
let _: Option<Record> = e;
if let Some(a) = e {
let _: Record = a;
return Some(a.id);
}
}
Err(a) => error!("{:?}", a),
};
}
None
}
}
impl ToString for Website {
fn to_string(&self) -> String {
self.site.to_string()
} }
} }
@@ -178,30 +90,10 @@ pub struct Record {
pub id: Thing, pub id: Thing,
} }
#[instrument(skip_all, name = "SurrealDB")] #[instrument(skip_all, name = "sqlite_connect")]
pub async fn connect(config: &Config) -> surrealdb::Result<Surreal<Client>> { pub async fn connect(config: &Config) -> Result<Connection, rusqlite::Error> {
trace!("Establishing connection to surreal..."); trace!("Establishing connection to sqlite...");
// Connect to the server // Connect to the server
let db = Surreal::new::<Ws>(&config.surreal_url).await?; Connection::open("./squeelite.db")
trace!("Logging in...");
// Signin as a namespace, database, or root user
db.signin(Root {
username: &config.surreal_username,
password: &config.surreal_password,
})
.await?;
// Select a specific namespace / database
db
.use_ns(&config.surreal_ns)
.use_db(&config.surreal_db)
.await?;
let setup = include_bytes!("setup.surql");
let file = setup.iter().map(|c| *c as char).collect::<String>();
db.query(file).await.expect("Failed to setup surreal tables.");
Ok(db)
} }

70
src/filesystem.rs Normal file
View File

@@ -0,0 +1,70 @@
use std::{ffi::OsStr, path::PathBuf};
use tokio::fs;
use tracing::{debug, error, instrument, trace, warn};
use url::Url;
#[instrument(skip(data))]
pub async fn store(data: &str, url: &Url) {
// extract data from url to save it accurately
let url_path = PathBuf::from("./downloaded/".to_string() + url.domain().unwrap_or("UnknownDomain") + url.path());
// if it's a file
let (basepath, filename) = if url_path.extension().filter(valid_file_extension).is_some() {
// get everything up till the file
let basepath = url_path.ancestors().skip(1).take(1).collect::<PathBuf>();
// get the file name
let filename = url_path.file_name().expect("This should exist").to_string_lossy();
trace!("Save path: {:?} and base path: {:?}", &url_path, &basepath);
(basepath, filename.to_string())
} else {
(url_path.clone(), "index.html".into())
};
debug!("Writing at: {:?} {:?}", basepath, filename);
// create the folders
if let Err(err) = fs::create_dir_all(&basepath).await {
error!("Dir creation: {err} {:?}", basepath);
} else {
// FIXME I don't think this handles index.html files well...
// TODO this should probably append .html to non-described files
// create the file if that was successful
if let Err(err) = fs::write(&basepath.join(filename), data).await {
error!("File creation: {err} {:?}", url_path);
}
}
}
fn valid_file_extension(take: &&OsStr) -> bool {
let los = take.to_string_lossy();
let all = los.split('.');
match all.last() {
Some(s) => {
match s.to_lowercase().as_str() {
"html" => true,
"css" => true,
"js" => true,
"ts" => true,
"otf" => true, // font
"png" => true,
"svg" => true,
"jpg" => true,
"jpeg" => true,
"mp4" => true,
"mp3" => true,
"webp" => true,
"pdf" => true,
"json" => true,
"xml" => true,
_ => {
warn!("Might be forgetting a file extension: {s}");
false
}
}
},
None => false,
}
}

View File

@@ -2,26 +2,27 @@
extern crate html5ever; extern crate html5ever;
use std::{fs::File, io::Read, net::{IpAddr, Ipv4Addr}, time::Instant}; use std::{
collections::HashSet, fs::File, io::Read, net::{IpAddr, Ipv4Addr}
};
use db::{connect, Website}; use db::{connect, Website};
use metrics::{counter, gauge}; use metrics::{counter, gauge};
use metrics_exporter_prometheus::PrometheusBuilder; use metrics_exporter_prometheus::PrometheusBuilder;
use s3::S3;
use serde::Deserialize; use serde::Deserialize;
use surrealdb::{engine::remote::ws::Client, Surreal}; use surrealdb::{engine::remote::ws::Client, Surreal};
use tokio::task::JoinSet; use tokio::task::JoinSet;
use tracing::{debug, info, instrument, trace, trace_span, warn}; use tracing::{debug, error, info, instrument, level_filters::LevelFilter, trace, trace_span};
use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry}; use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter, Layer, Registry};
mod db; mod db;
mod parser; mod parser;
mod s3; mod filesystem;
const GET_METRIC: &'static str = "total_gets"; const GET_METRIC: &str = "total_gets";
const GET_IN_FLIGHT: &'static str = "gets_in_flight"; const GET_IN_FLIGHT: &str = "gets_in_flight";
const SITES_CRAWLED: &'static str = "pages_crawled"; const SITES_CRAWLED: &str = "pages_crawled";
const BEING_PROCESSED: &'static str = "pages_being_processed"; const BEING_PROCESSED: &str = "pages_being_processed";
#[derive(Deserialize)] #[derive(Deserialize)]
struct Config { struct Config {
@@ -31,48 +32,44 @@ struct Config {
surreal_username: String, surreal_username: String,
surreal_password: String, surreal_password: String,
s3_url: String,
s3_bucket: String,
s3_access_key: String,
s3_secret_key: String,
crawl_filter: String, crawl_filter: String,
budget: usize, budget: usize,
} }
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let total_runtime = Timer::start("Completed");
let writer = std::fs::OpenOptions::new() let writer = std::fs::OpenOptions::new()
.append(true) .append(true)
.create(true) .create(true)
.open("./docker/logs/tracing.log") .open("./docker/logs/tracing.log")
.expect("Couldn't make log file!"); .expect("Couldn't make log file!");
let registry = Registry::default() let filter = EnvFilter::builder()
.with( .with_default_directive(LevelFilter::DEBUG.into())
.from_env_lossy();
let registry = Registry::default().with(
fmt::layer() fmt::layer()
.with_line_number(true) .with_line_number(true)
.with_thread_ids(true) .with_thread_ids(true)
.with_file(true) .with_file(true)
// .with_timer(LocalTime::rfc_3339()) // Loki or alloy does this automatically
.json() .json()
.with_writer(writer) .with_writer(writer)
// .with_filter(EnvFilter::from_default_env()) .with_filter(filter)
); );
tracing::subscriber::set_global_default(registry).expect("Failed to set default subscriber"); tracing::subscriber::set_global_default(registry).expect("Failed to set default subscriber");
let builder = PrometheusBuilder::new(); let builder = PrometheusBuilder::new();
builder.with_http_listener( builder
std::net::SocketAddr::new(IpAddr::V4(Ipv4Addr::from_octets([0,0,0,0])), 2500) .with_http_listener(std::net::SocketAddr::new(
) IpAddr::V4(Ipv4Addr::from_octets([0, 0, 0, 0])),
2500,
))
.install() .install()
.expect("failed to install recorder/exporter"); .expect("failed to install recorder/exporter");
debug!("Starting..."); info!("Starting...");
// Would probably take these in as parameters from a cli // Would probably take these in as parameters from a cli
let starting_url = "https://en.wikipedia.org/"; let starting_url = "https://en.wikipedia.org/";
// When getting uncrawled pages, name must contain this variable. "" will effectively get ignored. // When getting uncrawled pages, name must contain this variable. "" will effectively get ignored.
@@ -80,7 +77,6 @@ async fn main() {
// let budget = 50; // let budget = 50;
let mut crawled = 0; let mut crawled = 0;
let mut file = File::open("./Crawler.toml").expect("Failed to read Crawler.toml"); let mut file = File::open("./Crawler.toml").expect("Failed to read Crawler.toml");
let mut buf = String::new(); let mut buf = String::new();
let _ = file.read_to_string(&mut buf); let _ = file.read_to_string(&mut buf);
@@ -90,9 +86,6 @@ async fn main() {
let db = connect(&config) let db = connect(&config)
.await .await
.expect("Failed to connect to surreal, aborting."); .expect("Failed to connect to surreal, aborting.");
let s3 = S3::connect(&config)
.await
.expect("Failed to connect to minio, aborting.\n\nThis probably means you need to login to the minio console and get a new access key!\n\n(Probably here) http://localhost:9001/access-keys/new-account\n\n");
let reqwest = reqwest::Client::builder() let reqwest = reqwest::Client::builder()
// .use_rustls_tls() // .use_rustls_tls()
@@ -105,8 +98,8 @@ async fn main() {
let span = trace_span!("Pre-Loop"); let span = trace_span!("Pre-Loop");
let pre_loop_span = span.enter(); let pre_loop_span = span.enter();
// Download the site // Download the site
let site = Website::new(&starting_url, false); let site = Website::new(starting_url, false);
get(site, db.clone(), reqwest.clone(), s3.clone()).await; process(site, db.clone(), reqwest.clone()).await;
drop(pre_loop_span); drop(pre_loop_span);
@@ -120,28 +113,23 @@ async fn main() {
}; };
let uncrawled = get_uncrawled_links(&db, get_num, config.crawl_filter.clone()).await; let uncrawled = get_uncrawled_links(&db, get_num, config.crawl_filter.clone()).await;
if uncrawled.len() == 0 { if uncrawled.is_empty() {
info!("Had more budget but finished crawling everything."); info!("Had more budget but finished crawling everything.");
return; return;
} }
debug!("Crawling {} pages...", uncrawled.len());
let span = trace_span!("Crawling");
let _ = span.enter();
{ {
let mut futures = JoinSet::new(); let mut futures = JoinSet::new();
for site in uncrawled { for site in uncrawled {
gauge!(BEING_PROCESSED).increment(1); gauge!(BEING_PROCESSED).increment(1);
futures.spawn(get(site, db.clone(), reqwest.clone(), s3.clone())); futures.spawn(process(site, db.clone(), reqwest.clone()));
// let percent = format!("{:.2}%", (crawled as f32 / budget as f32) * 100f32); // let percent = format!("{:.2}%", (crawled as f32 / budget as f32) * 100f32);
// info!("Crawled {crawled} out of {budget} pages. ({percent})"); // info!("Crawled {crawled} out of {budget} pages. ({percent})");
} }
debug!("Joining {} futures...", futures.len());
let c = counter!(SITES_CRAWLED); let c = counter!(SITES_CRAWLED);
// As futures complete runs code in while block // As futures complete runs code in while block
while let Some(_) = futures.join_next().await { while futures.join_next().await.is_some() {
c.increment(1); c.increment(1);
gauge!(BEING_PROCESSED).decrement(1); gauge!(BEING_PROCESSED).decrement(1);
crawled += 1; crawled += 1;
@@ -151,39 +139,61 @@ async fn main() {
drop(span); drop(span);
info!("Done"); info!("Done");
drop(total_runtime);
} }
#[instrument(skip (db, s3, reqwest))] #[instrument(skip(db, reqwest))]
/// Downloads and crawls and stores a webpage. /// Downloads and crawls and stores a webpage.
/// It is acceptable to clone `db`, `reqwest`, and `s3` because they all use `Arc`s internally. - Noted by Oliver /// It is acceptable to clone `db`, `reqwest`, and `s3` because they all use `Arc`s internally. - Noted by Oliver
async fn get(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Client, s3: S3) { async fn process(mut site: Website, db: Surreal<Client>, reqwest: reqwest::Client) {
trace!("Get: {}", site.to_string());
let timer = Timer::start("Built request"); // METRICS
let request_builder = reqwest.get(site.to_string()); trace!("Process: {}", &site.site);
timer.stop(); // Build the request
let request_builder = reqwest.get(site.site.to_string());
// METRICS
let g = gauge!(GET_IN_FLIGHT); let g = gauge!(GET_IN_FLIGHT);
g.increment(1); g.increment(1);
let timer = Timer::start("Got page");
// Send the http request (get)
if let Ok(response) = request_builder.send().await { if let Ok(response) = request_builder.send().await {
timer.stop(); // METRICS
g.decrement(1); g.decrement(1);
counter!(GET_METRIC).increment(1); counter!(GET_METRIC).increment(1);
debug!("Getting body...");
// Get body // Get body from response
let data = response.text().await.expect("Failed to read http response's body!"); let data = response
.text()
.await
.expect("Failed to read http response's body!");
// Store document // Store document
s3.store(&data, &site.site).await; filesystem::store(&data, &site.site).await;
// Parse document and store relationships
parser::parse(&db, &mut site, &data).await; // Parse document and get relationships
return; let sites = parser::parse(&site, &data).await;
// update self in db
site.set_crawled();
Website::store_all(vec![site], &db).await;
// De-duplicate this list
let prev_len = sites.len();
let set = sites.into_iter().fold(HashSet::new(), |mut set,item| {
set.insert(item);
set
});
let de_dupe_sites: Vec<Website> = set.into_iter().collect();
let diff = prev_len - de_dupe_sites.len();
trace!("Saved {diff} from being entered into the db by de-duping");
// Store all the other sites so that we can link to them.
let _ = Website::store_all(de_dupe_sites, &db).await;
} else {
error!("Failed to get: {}", &site.site);
} }
trace!("Failed to get: {}", site.to_string());
} }
/// Returns uncrawled links /// Returns uncrawled links
@@ -209,35 +219,3 @@ async fn get_uncrawled_links(
.expect("Returned websites couldn't be parsed") .expect("Returned websites couldn't be parsed")
} }
pub struct Timer<'a> {
start: Instant,
msg: &'a str,
}
impl<'a> Timer<'a> {
#[inline]
pub fn start(msg: &'a str) -> Self {
Self {
start: Instant::now(),
msg,
}
}
pub fn stop(&self) -> f64 {
let dif = self.start.elapsed().as_micros();
let ms = dif as f64 / 1000.;
if ms > 200. {
warn!("{}", format!("{} in {:.3}ms", self.msg, ms));
} else {
trace!("{}", format!("{} in {:.3}ms", self.msg, ms));
}
ms
}
}
impl Drop for Timer<'_> {
fn drop(&mut self) {
self.stop();
}
}

View File

@@ -5,21 +5,21 @@ use html5ever::tokenizer::{BufferQueue, TokenizerResult};
use html5ever::tokenizer::{StartTag, TagToken}; use html5ever::tokenizer::{StartTag, TagToken};
use html5ever::tokenizer::{Token, TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts}; use html5ever::tokenizer::{Token, TokenSink, TokenSinkResult, Tokenizer, TokenizerOpts};
use html5ever::{local_name, tendril::*}; use html5ever::{local_name, tendril::*};
use surrealdb::engine::remote::ws::Client; use tracing::{debug, error, instrument, trace, warn};
use surrealdb::Surreal; use url::Url;
use tracing::instrument;
use crate::db::Website; use crate::db::Website;
use crate::Timer;
impl TokenSink for Website { impl TokenSink for Website {
type Handle = Vec<Website>; type Handle = Vec<Website>;
#[instrument(skip(token, _line_number))]
fn process_token(&self, token: Token, _line_number: u64) -> TokenSinkResult<Self::Handle> { fn process_token(&self, token: Token, _line_number: u64) -> TokenSinkResult<Self::Handle> {
match token { match token {
TagToken(tag) => { TagToken(tag) => {
if tag.kind == StartTag { if tag.kind == StartTag {
match tag.name { match tag.name {
// this should be all the html elements that have links
local_name!("a") local_name!("a")
| local_name!("audio") | local_name!("audio")
| local_name!("area") | local_name!("area")
@@ -34,21 +34,18 @@ impl TokenSink for Website {
let attr_name = attr.name.local.to_string(); let attr_name = attr.name.local.to_string();
if attr_name == "src" || attr_name == "href" || attr_name == "data" if attr_name == "src" || attr_name == "href" || attr_name == "data"
{ {
// Get clone of the current site object trace!("Found `{}` in html `{}` tag", &attr.value, tag.name);
let mut web = self.clone(); let url = try_get_url(&self.site, &attr.value);
// Set url
let mut url = web.site;
url.set_fragment(None); // removes #xyz
let joined = url.join(&attr.value).expect("Failed to join url during parsing!");
web.site = joined;
web.crawled = false;
if let Some(mut parsed) = url {
parsed.set_query(None);
parsed.set_fragment(None);
debug!("Final cleaned URL: `{}`", parsed.to_string());
let web = Website::new(&parsed.to_string(), false);
links.push(web); links.push(web);
} }
} }
}
return TokenSinkResult::Script(links); return TokenSinkResult::Script(links);
} }
local_name!("button") | local_name!("meta") | local_name!("iframe") => { local_name!("button") | local_name!("meta") | local_name!("iframe") => {
@@ -65,45 +62,84 @@ impl TokenSink for Website {
} }
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn parse(db: &Surreal<Client>, site: &mut Website, data: &str) { /// Parses the passed site and returns all the sites it links to.
// update self in db pub async fn parse(site: &Website, data: &str) -> Vec<Website> {
site.set_crawled();
site.store(db).await;
// prep work // prep work
let mut other_sites: Vec<Website> = Vec::new(); let mut other_sites: Vec<Website> = Vec::new();
{ // using blocks to prevent compiler's async worries
let _t = Timer::start("Parsed page");
// change data into something that can be tokenized // change data into something that can be tokenized
let chunk = Tendril::from_str(&data).expect("Failed to parse string into Tendril!"); let chunk = Tendril::from_str(data).expect("Failed to parse string into Tendril!");
// create buffer of tokens and push our input into it // create buffer of tokens and push our input into it
let mut token_buffer = BufferQueue::default(); let token_buffer = BufferQueue::default();
token_buffer.push_back(chunk.try_reinterpret::<fmt::UTF8>().expect("Failed to reinterprt chunk!")); token_buffer.push_back(
chunk
.try_reinterpret::<fmt::UTF8>()
.expect("Failed to reinterpret chunk!"),
);
// create the tokenizer // create the tokenizer
let tokenizer = Tokenizer::new(site.clone(), TokenizerOpts::default()); let tokenizer = Tokenizer::new(site.clone(), TokenizerOpts::default());
// go thru buffer // go thru buffer
while let TokenizerResult::Script(mut sites) = tokenizer.feed(&mut token_buffer) { while let TokenizerResult::Script(mut sites) = tokenizer.feed(&token_buffer) {
other_sites.append(&mut sites); other_sites.append(&mut sites);
// other_sites.push(sites); // other_sites.push(sites);
} }
assert!(token_buffer.is_empty()); assert!(token_buffer.is_empty());
tokenizer.end(); tokenizer.end();
other_sites
} }
{ #[instrument]
let mut links_to = Vec::with_capacity(other_sites.len()); fn try_get_url(parent: &Url, link: &str) -> Option<Url> {
match Url::parse(link) {
Ok(ok) => Some(ok),
Err(e) => {
if link.starts_with('#') {
trace!("Rejecting # url");
None
} else if link.starts_with("//") {
// if a url starts with "//" is assumed that it will adopt
// the same scheme as it's parent
// https://stackoverflow.com/questions/9646407/two-forward-slashes-in-a-url-src-href-attribute
let scheme = parent.scheme();
for a in other_sites { match Url::parse(&format!("{scheme}://{}", link)) {
Ok(url) => Some(url),
Err(err) => {
error!("Failed parsing realative scheme url: {}", err);
None
}
}
} else {
// # This is some sort of realative url, gonna try patching it up into an absolute
// url
match e {
url::ParseError::RelativeUrlWithoutBase => {
// Is: scheme://host:port
let origin = parent.origin().ascii_serialization();
let url = origin.clone() + link;
let other = a.store(db).await; trace!("Built `{url}` from `{origin} + {}`", link.to_string());
if let Some(o) = other {
links_to.push(o);
}
}
site.links_to(links_to, db).await; if let Ok(url) = Url::parse(&url) {
trace!("Saved relative url `{}` AS: `{}`", link, url);
Some(url)
} else {
error!(
"Failed to reconstruct a url from relative url: `{}` on site: `{}`",
link,
parent.to_string()
);
None
}
}
_ => {
error!("MISC error: {:?} {:?}", e, link);
None
}
}
}
}
} }
} }

105
src/s3.rs
View File

@@ -1,105 +0,0 @@
use base64::{alphabet, engine::{self, general_purpose}, Engine};
use metrics::counter;
use minio::s3::{
args::{BucketExistsArgs, MakeBucketArgs},
client::ClientBuilder,
creds::StaticProvider,
error::Error,
http::BaseUrl,
Client,
};
use tracing::{instrument, trace, warn};
use url::Url;
use crate::{Config, Timer};
const CUSTOM_ENGINE: engine::GeneralPurpose = engine::GeneralPurpose::new(&alphabet::URL_SAFE, general_purpose::NO_PAD);
const ROUND_TRIP_METRIC: &'static str = "s3_trips";
#[derive(Clone)]
pub struct S3 {
bucket_name: String,
client: Client,
}
impl S3 {
#[instrument(skip_all, name = "S3")]
pub async fn connect(config: &Config) -> Result<Self, Error> {
let base_url = config
.s3_url
.parse::<BaseUrl>()
.expect("Failed to parse url into BaseUrl");
let static_provider =
StaticProvider::new(&config.s3_access_key, &config.s3_secret_key, None);
let client = ClientBuilder::new(base_url)
.provider(Some(Box::new(static_provider)))
.build()?;
trace!("Checking bucket...");
let exists = client
.bucket_exists(
&BucketExistsArgs::new(&config.s3_bucket)
.expect("Failed to check if bucket exists"),
)
.await?;
if !exists {
trace!("Creating bucket...");
client
.make_bucket(
&MakeBucketArgs::new(&config.s3_bucket).expect("Failed to create bucket!"),
)
.await?;
}
trace!("Connection successful");
Ok(Self {
bucket_name: config.s3_bucket.to_owned(),
client: client,
})
}
#[instrument(name = "s3_store", skip_all)]
pub async fn store(&self, data: &str, url: &Url) {
let counter = counter!(ROUND_TRIP_METRIC);
let t = Timer::start("Stored page");
let _ = t; // prevent compiler drop
if let Some(domain) = url.domain() {
let filename = domain.to_owned() + url.path();
trace!("Created filename: {filename} from raw: {}", url.to_string());
counter.increment(1);
let _ = match &self
.client
.put_object_content(&self.bucket_name, &filename, data.to_owned())
.send()
.await {
Ok(_) => {},
Err(err) => {
match err {
Error::InvalidObjectName(_) => {
warn!("Tried storing invalid object name, retrying with Base64 encoding. Last try.");
let filename: String = domain.to_owned() + &CUSTOM_ENGINE.encode(url.path());
counter.increment(1);
let _ = &self
.client
.put_object_content(&self.bucket_name, &filename, data.to_owned())
.send()
.await
.unwrap();
},
_ => {},
}
},
};
}
}
}

View File

@@ -1,3 +1,9 @@
DEFINE TABLE IF NOT EXISTS website SCHEMALESS; DEFINE TABLE IF NOT EXISTS website SCHEMALESS;
DEFINE FIELD IF NOT EXISTS accessed_at ON TABLE website VALUE time::now();
DEFINE FIELD IF NOT EXISTS site ON TABLE website TYPE string;
DEFINE INDEX IF NOT EXISTS idx ON TABLE website COLUMNS site UNIQUE; DEFINE INDEX IF NOT EXISTS idx ON TABLE website COLUMNS site UNIQUE;
DEFINE FIELD IF NOT EXISTS crawled ON TABLE website TYPE bool;
DEFINE FIELD IF NOT EXISTS accessed_at ON TABLE website VALUE time::now();
DEFINE FIELD IF NOT EXISTS first_accessed_at ON TABLE website VALUE time::now();