Pour tout problème contactez-nous par mail : support@froggit.fr | La FAQ :grey_question: | Rejoignez-nous sur le Chat :speech_balloon:

Skip to content
Snippets Groups Projects
Commit d21d08a7 authored by peterrabbit's avatar peterrabbit
Browse files

website structure

parent be44541a
No related branches found
No related tags found
No related merge requests found
......@@ -377,6 +377,8 @@ dependencies = [
"actix-web",
"rustls",
"rustls-pemfile",
"serde",
"serde_json",
]
[[package]]
......@@ -1019,9 +1021,23 @@ checksum = "93f6841e709003d68bb2deee8c343572bf446003ec20a583e76f7b15cebf3711"
[[package]]
name = "serde"
version = "1.0.142"
version = "1.0.143"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53e8e5d5b70924f74ff5c6d64d9a5acd91422117c60f48c4e07855238a254553"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.143"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e590c437916fb6b221e1d00df6e3294f3fccd70ca7e92541c475d6ed6ef5fee2"
checksum = "d3d8e8de557aee63c26b85b947f5e59b690d0454c753f3adeb5cd7835ab88391"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
......
......@@ -9,3 +9,5 @@ edition = "2021"
actix-web = { version = "4.1.0", features = ["rustls", "secure-cookies"] }
rustls = "0.20.6"
rustls-pemfile = "1.0.1"
serde = { version = "1.0.143", features = ["derive"] }
serde_json = "1.0.83"
mod website;
use actix_web::{get, App, HttpResponse, HttpServer, Responder};
use website::WebSite;
fn load_website_template() -> WebSite {
let site_template_path = std::env::current_dir()
.unwrap()
.join("templates")
.join("new_website.json");
let site_template = std::fs::read_to_string(site_template_path).unwrap();
WebSite::new(serde_json::from_str(&site_template).unwrap())
}
#[get("/{pth:.*}")]
async fn page(pth: actix_web::web::Path<String>) -> impl Responder {
HttpResponse::Ok().body(format!("Page path {}", pth))
async fn page(
website: actix_web::web::Data<WebSite>,
pth: actix_web::web::Path<String>,
) -> impl Responder {
let pth = format!("/{}/", pth.into_inner()); // BUG Use trailing slash middleware ? or match root or not root
match website.get_page_by_url(&pth) {
Some(page) => HttpResponse::Ok().body(page.html_doc.clone()),
None => HttpResponse::NotFound().body(format!("Not found {}", pth)),
}
}
#[get("/admin/dashboard")]
......@@ -17,15 +35,16 @@ async fn admin_login() -> impl Responder {
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let website = load_website_template();
// GET HOST AND CERTS DIR FROM CLI ARGUMENT
// Get port from arg, or get context from arg and define default port, or set default port to standard
// LOAD A WEBSITE SCHEMA (JSON) FROM CLI ARGUMENT PATH OR CREATE A NEW ONE
// LOAD A WEBSITE SCHEMA (JSON) FROM CLI ARGUMENT PATH OR search in /var/{sitename} and load it CREATE A NEW ONE
// create pages resources with templates and the contents from the json file
// Save the resources in an appstate
// create the static dir
// create the static files index (like Arc<Mutex<Index>>)
// create the static dir in standard location if doesn't exist (like /var/{sitename}/static)
// create the static files index (like Arc<Mutex<StaticFilesIndex>>)
// create a Rest service at root with extensive path argument: like #[get(/{pth:.*})]
// Then parse the website document and return the corresponding template, or 404 template
......@@ -51,9 +70,11 @@ async fn main() -> std::io::Result<()> {
.with_single_cert(vec![cert], key)
.expect("bad certificate/key");
HttpServer::new(|| {
HttpServer::new(move || {
App::new()
.wrap(actix_web::middleware::Logger::default())
.wrap(actix_web::middleware::Compress::default())
.app_data(actix_web::web::Data::new(website.clone()))
.service(admin_dashboard)
.service(admin_login)
.service(page)
......
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct PageData {
pub title: String,
pub lang: String,
pub description: String,
pub slug: String,
pub html_body: String,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct WebPage {
pub page_data: PageData,
pub html_doc: String,
}
impl PageData {
pub fn to_web_page(&self) -> WebPage {
WebPage {
page_data: self.clone(),
html_doc: self.html_body.clone(), // TMP
}
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct PagesTree {
pub page_data: PageData,
pub sub_pages: Option<Vec<PagesTree>>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct WebSite {
pages_tree: PagesTree,
pages_index_by_url: HashMap<String, WebPage>,
}
impl WebSite {
pub fn new(pages_tree: PagesTree) -> Self {
let mut pages_index_by_url = HashMap::new();
WebSite::create_index_by_url(&mut pages_index_by_url, &pages_tree, String::new());
WebSite {
pages_tree,
pages_index_by_url,
}
}
fn create_index_by_url(
index: &mut HashMap<String, WebPage>,
pages_tree: &PagesTree,
from_url: String,
) {
let page_data = pages_tree.page_data.clone();
let url = format!("{}{}/", from_url, page_data.slug);
index.insert(url.to_owned(), page_data.to_web_page());
if let Some(sub_pages) = &pages_tree.sub_pages {
for pt in sub_pages {
WebSite::create_index_by_url(index, pt, url.to_owned());
}
}
}
pub fn get_page_by_url(&self, url: &String) -> Option<&WebPage> {
self.pages_index_by_url.get(url)
}
}
#[cfg(test)]
mod test_website {
use super::*;
const JSON_TEMPLATE: &'static str = "
{
\"page_data\": {
\"title\": \"Test Website\",
\"slug\": \"\",
\"lang\": \"en\",
\"description\": \"A test website\",
\"html_body\": \"<h1>Test Website</h1>\"
},
\"sub_pages\": [
{
\"page_data\": {
\"title\": \"A sub page\",
\"slug\": \"subpage\",
\"lang\": \"en\",
\"description\": \"A sub page of the testing web site\",
\"html_body\": \"<h1>A sub page</h1>\"
},
\"sub_pages\": [
{
\"page_data\": {
\"title\": \"Another page\",
\"lang\": \"en\",
\"slug\": \"otherpage\",
\"description\": \"Another testing page\",
\"html_body\": \"<h1>Another page</h1>\"
}
}
]
}
]
}
";
#[test]
fn test_index_pages_by_slug() {
let pages_tree: PagesTree = serde_json::from_str(JSON_TEMPLATE).unwrap();
let website = WebSite::new(pages_tree);
let root_page = website.get_page_by_url(&"/".to_string());
assert!(root_page.is_some());
let root_page = root_page.unwrap();
assert_eq!(root_page.page_data.html_body, "<h1>Test Website</h1>");
let sub_page = website.get_page_by_url(&"/subpage/".to_string());
assert!(sub_page.is_some());
let sub_page = sub_page.unwrap();
assert_eq!(sub_page.page_data.html_body, "<h1>A sub page</h1>");
}
}
{
"page_data": {
"title": "New Website",
"lang": "en",
"slug": "",
"description": "A new website",
"html_body": "<h1>New Website</h1>"
},
"sub_pages": [
{
"page_data": {
"title": "A sub page",
"lang": "en",
"slug": "subpage",
"description": "A sub page of the new web site",
"html_body": "<h1>A sub page</h1>"
}
},
{
"page_data": {
"title": "Some other page",
"lang": "en",
"slug": "otherpage",
"description": "Some other page of the new web site",
"html_body": "<h1>Another page</h1>"
},
"sub_pages": [
{
"page_data": {
"title": "A sub page of the other page",
"lang": "en",
"slug": "othersubpage",
"description": "A sub page of the other page of the new web site",
"html_body": "<h1>A subpage</h1>"
}
}
]
}
]
}
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment