updated to new storage schema
This commit is contained in:
		| @@ -19,12 +19,17 @@ function handle_header(req) { | |||||||
| let old = "" | let old = "" | ||||||
|  |  | ||||||
| function url_redirect (req)  { | function url_redirect (req)  { | ||||||
|     let b64 = btoa(req.url) |      | ||||||
|  |     let url = new URL(req.url); | ||||||
|  |     let path = url.pathname; | ||||||
|  |     let domain = url.hostname; | ||||||
|  |  | ||||||
|  |     let b64 = btoa(path) | ||||||
|     if (!req.url.startsWith("http://localhost")) { |     if (!req.url.startsWith("http://localhost")) { | ||||||
|         old = req.url |         old = req.url | ||||||
|         // console.debug(`Loading: ${req.url} || ${b64}`); |         // console.debug(`Loading: ${req.url} || ${b64}`); | ||||||
|         return { |         return { | ||||||
|             redirectUrl: `http://localhost:4433/s3/${b64}` |             redirectUrl: `http://localhost:4433/s3/${domain}/${b64}` | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										16
									
								
								src/main.rs
									
									
									
									
									
								
							
							
						
						
									
										16
									
								
								src/main.rs
									
									
									
									
									
								
							| @@ -21,10 +21,10 @@ async fn main() { | |||||||
|         .init(); |         .init(); | ||||||
|  |  | ||||||
|     let config = Config { |     let config = Config { | ||||||
|         s3_bucket: "b64v1", |         s3_bucket: "b64v2", | ||||||
|         s3_url: "http://localhost:9000", |         s3_url: "http://localhost:9000", | ||||||
|         s3_access_key: "8UO76z8wCs9DnpxSbQUY", |         s3_access_key: "Eyp21VVclXP8xx49bJ6D", | ||||||
|         s3_secret_key: "xwKVMpf2jzgprsdo85Dvo74UmO84y0aRrAUorYY5", |         s3_secret_key: "VxqU9hQ2gl7TvHIFzCM74Og6rQUZcpgYpHNUBnVm", | ||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     let s3 = S3::connect(&config).await.expect("Failed to connect to minio, aborting."); |     let s3 = S3::connect(&config).await.expect("Failed to connect to minio, aborting."); | ||||||
| @@ -58,13 +58,15 @@ impl<'r> FromRequest<'r> for Og{ | |||||||
| } | } | ||||||
|  |  | ||||||
|  |  | ||||||
| #[get("/s3/<path>")] | #[get("/s3/<domain>/<file>")] | ||||||
| async fn get_s3_content(path: &str, db: &State<S3>, og: Og) -> (Status, (ContentType, String)) { | async fn get_s3_content(domain: &str, file: &str, db: &State<S3>, og: Og) -> (Status, (ContentType, String)) { | ||||||
|     use base64::prelude::*; |     use base64::prelude::*; | ||||||
|  |     let filename = domain.to_owned() + "/" + file;  | ||||||
|  |  | ||||||
|     info!("{:?}", og.og); |     info!("{:?}", og.og); | ||||||
|      |      | ||||||
|     if let Some(resp) = db.get(&path).await { |     if let Some(resp) = db.get(&filename).await { | ||||||
|         let content_type = if let Ok(url) = BASE64_URL_SAFE.decode(path) { |         let content_type = if let Ok(url) = BASE64_URL_SAFE.decode(file) { | ||||||
|             let url = url.into_iter().map(|f| f as char).collect::<String>(); |             let url = url.into_iter().map(|f| f as char).collect::<String>(); | ||||||
|             if let Some(filetype) = url.split('.').collect::<Vec<&str>>().last() { |             if let Some(filetype) = url.split('.').collect::<Vec<&str>>().last() { | ||||||
|                 ContentType::from_extension(&filetype).unwrap_or(ContentType::HTML) |                 ContentType::from_extension(&filetype).unwrap_or(ContentType::HTML) | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user