From 9dd20cacbb5af6d2cc67ec8fba697199712cc725 Mon Sep 17 00:00:00 2001
From: peterrabbit <peterrabbit@msi.home>
Date: Thu, 18 Aug 2022 14:04:33 +0200
Subject: [PATCH] html doc formatting

---
 Cargo.lock              |  1 +
 Cargo.toml              |  1 +
 src/main.rs             | 11 +++---
 src/website.rs          | 81 +++++++++++++++++++++++++++++++++--------
 templates/html_doc.html | 17 +++++++++
 5 files changed, 91 insertions(+), 20 deletions(-)
 create mode 100644 templates/html_doc.html

diff --git a/Cargo.lock b/Cargo.lock
index 69056ae..f69647c 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -375,6 +375,7 @@ name = "cms_rust"
 version = "0.1.0"
 dependencies = [
  "actix-web",
+ "regex",
  "rustls",
  "rustls-pemfile",
  "serde",
diff --git a/Cargo.toml b/Cargo.toml
index ac24c85..63e3fed 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -11,3 +11,4 @@ rustls = "0.20.6"
 rustls-pemfile = "1.0.1"
 serde = { version = "1.0.143", features = ["derive"] }
 serde_json = "1.0.83"
+regex = "1.6"
diff --git a/src/main.rs b/src/main.rs
index 8e35f65..e785b97 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -1,5 +1,6 @@
 mod website;
 use actix_web::{get, App, HttpResponse, HttpServer, Responder};
+use std::path::PathBuf;
 use website::WebSite;
 
 fn load_website_template() -> WebSite {
@@ -8,18 +9,18 @@ fn load_website_template() -> WebSite {
         .join("templates")
         .join("new_website.json");
     let site_template = std::fs::read_to_string(site_template_path).unwrap();
-    WebSite::new(serde_json::from_str(&site_template).unwrap())
+    WebSite::from_json_str(&site_template)
 }
 
 #[get("/{pth:.*}")]
 async fn page(
     website: actix_web::web::Data<WebSite>,
-    pth: actix_web::web::Path<String>,
+    pth: actix_web::web::Path<PathBuf>,
 ) -> impl Responder {
-    let pth = format!("/{}/", pth.into_inner()); // BUG Use trailing slash middleware ? or match root or not root
+    let pth = pth.into_inner();
     match website.get_page_by_url(&pth) {
-        Some(page) => HttpResponse::Ok().body(page.html_doc.clone()),
-        None => HttpResponse::NotFound().body(format!("Not found {}", pth)),
+        Some(page) => HttpResponse::Ok().body(page.html_doc.to_string()),
+        None => HttpResponse::NotFound().body(format!("Not found {}", pth.display())),
     }
 }
 
diff --git a/src/website.rs b/src/website.rs
index 648e010..bfb564c 100644
--- a/src/website.rs
+++ b/src/website.rs
@@ -1,5 +1,7 @@
+use regex::{Captures, Regex};
 use serde::{Deserialize, Serialize};
 use std::collections::HashMap;
+use std::path::PathBuf;
 
 #[derive(Debug, Serialize, Deserialize, Clone)]
 pub struct PageData {
@@ -8,19 +10,65 @@ pub struct PageData {
     pub description: String,
     pub slug: String,
     pub html_body: String,
+    pub css_src: Option<String>,
+    pub js_src: Option<String>,
+}
+
+#[derive(Debug, Serialize, Deserialize, Clone)]
+pub struct HtmlDoc(String);
+
+impl HtmlDoc {
+    pub fn from_page_data(page_data: &PageData) -> Self {
+        let html_doc = std::fs::read_to_string(
+            std::env::current_dir()
+                .unwrap()
+                .join("templates")
+                .join("html_doc.html"),
+        )
+        .expect("Missing html_doc template");
+
+        let re = Regex::new(r#"\{[a-z]+\}"#).unwrap();
+
+        let html_doc = re
+            .replace_all(&html_doc, |captures: &Captures| {
+                let placeholder = captures.iter().next().unwrap().unwrap().as_str();
+                let placeholder = placeholder[1..placeholder.len() - 1].to_owned();
+                page_data.field_from_str_key(placeholder)
+            })
+            .to_string();
+
+        HtmlDoc(html_doc)
+    }
+
+    pub fn to_string(&self) -> String {
+        self.0.clone()
+    }
 }
 
 #[derive(Debug, Serialize, Deserialize, Clone)]
 pub struct WebPage {
     pub page_data: PageData,
-    pub html_doc: String,
+    pub html_doc: HtmlDoc,
 }
 
 impl PageData {
     pub fn to_web_page(&self) -> WebPage {
         WebPage {
             page_data: self.clone(),
-            html_doc: self.html_body.clone(), // TMP
+            html_doc: HtmlDoc::from_page_data(&self),
+        }
+    }
+
+    pub fn field_from_str_key(&self, key: String) -> String {
+        match &key[..] {
+            "title" => self.title.to_owned(),
+            "lang" => self.lang.to_owned(),
+            "description" => self.description.to_owned(),
+            "slug" => self.slug.to_owned(),
+            "body" => self.html_body.to_owned(),
+            "css" => self.css_src.as_ref().unwrap_or(&String::new()).to_owned(),
+            "js" => self.js_src.as_ref().unwrap_or(&String::new()).to_owned(),
+            _ => String::new(),
         }
     }
 }
@@ -34,13 +82,13 @@ pub struct PagesTree {
 #[derive(Debug, Serialize, Deserialize, Clone)]
 pub struct WebSite {
     pages_tree: PagesTree,
-    pages_index_by_url: HashMap<String, WebPage>,
+    pages_index_by_url: HashMap<PathBuf, WebPage>,
 }
 
 impl WebSite {
     pub fn new(pages_tree: PagesTree) -> Self {
         let mut pages_index_by_url = HashMap::new();
-        WebSite::create_index_by_url(&mut pages_index_by_url, &pages_tree, String::new());
+        WebSite::create_index_by_url(&mut pages_index_by_url, &pages_tree, PathBuf::from("/"));
 
         WebSite {
             pages_tree,
@@ -48,25 +96,29 @@ impl WebSite {
         }
     }
 
+    pub fn from_json_str(json: &str) -> Self {
+        WebSite::new(serde_json::from_str(json).unwrap())
+    }
+
     fn create_index_by_url(
-        index: &mut HashMap<String, WebPage>,
+        index: &mut HashMap<PathBuf, WebPage>,
         pages_tree: &PagesTree,
-        from_url: String,
+        from_url: PathBuf,
     ) {
         let page_data = pages_tree.page_data.clone();
-        let url = format!("{}{}/", from_url, page_data.slug);
+        let url = from_url.join(&page_data.slug);
 
-        index.insert(url.to_owned(), page_data.to_web_page());
+        index.insert(url.clone(), page_data.to_web_page());
 
         if let Some(sub_pages) = &pages_tree.sub_pages {
             for pt in sub_pages {
-                WebSite::create_index_by_url(index, pt, url.to_owned());
+                WebSite::create_index_by_url(index, pt, url.clone());
             }
         }
     }
 
-    pub fn get_page_by_url(&self, url: &String) -> Option<&WebPage> {
-        self.pages_index_by_url.get(url)
+    pub fn get_page_by_url(&self, url: &PathBuf) -> Option<&WebPage> {
+        self.pages_index_by_url.get(&PathBuf::from("/").join(url))
     }
 }
 
@@ -109,14 +161,13 @@ mod test_website {
 
     #[test]
     fn test_index_pages_by_slug() {
-        let pages_tree: PagesTree = serde_json::from_str(JSON_TEMPLATE).unwrap();
-        let website = WebSite::new(pages_tree);
-        let root_page = website.get_page_by_url(&"/".to_string());
+        let website = WebSite::from_json_str(JSON_TEMPLATE);
+        let root_page = website.get_page_by_url(&PathBuf::from("/"));
         assert!(root_page.is_some());
         let root_page = root_page.unwrap();
         assert_eq!(root_page.page_data.html_body, "<h1>Test Website</h1>");
 
-        let sub_page = website.get_page_by_url(&"/subpage/".to_string());
+        let sub_page = website.get_page_by_url(&PathBuf::from("/subpage"));
         assert!(sub_page.is_some());
         let sub_page = sub_page.unwrap();
         assert_eq!(sub_page.page_data.html_body, "<h1>A sub page</h1>");
diff --git a/templates/html_doc.html b/templates/html_doc.html
new file mode 100644
index 0000000..e856898
--- /dev/null
+++ b/templates/html_doc.html
@@ -0,0 +1,17 @@
+<html lang="{lang}">
+
+<head>
+    <meta charset="UTF-8">
+    <meta http-equiv="X-UA-Compatible" content="IE=edge">
+    <meta name="viewport" content="width=device-width, initial-scale=1.0">
+    <meta name="description" content="{description}">
+    <title>{title}</title>
+    <link rel="stylesheet" href="{css}">
+</head>
+
+<body>
+    {body}
+</body>
+<script src="{js}"></script>
+
+</html>
\ No newline at end of file
-- 
GitLab