Pour tout problème contactez-nous par mail : support@froggit.fr | La FAQ :grey_question: | Rejoignez-nous sur le Chat :speech_balloon:

Skip to content
Snippets Groups Projects
Commit c0676f8f authored by peterrabbit's avatar peterrabbit
Browse files

wip model structure

parent 99f17aa8
No related branches found
No related tags found
No related merge requests found
No preview for this file type
......@@ -6,7 +6,7 @@ use std::path::PathBuf;
pub async fn page(website: web::Data<WebSite>, pth: web::Path<PathBuf>) -> impl Responder {
let pth = pth.into_inner();
match website.get_page_by_url(&pth) {
Some(page) => HttpResponse::Ok().body(page.html_doc.to_string()),
Some(page) => HttpResponse::Ok().body(page.html.to_string()),
None => HttpResponse::NotFound().body(format!("Not found {}", pth.display())),
}
}
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct StyleSheet(pub HashMap<String, String>);
use crate::website::page::PageData;
use super::page::Page;
use regex::{Captures, Regex};
use serde::{Deserialize, Serialize};
pub const HTML_DOC_TEMPLATE: &'static str = "
// const CSS_LINK_FRAGMENT: &'static str = "<link rel='stylesheet' href='{url}'>";
// const SCRIPT_FRAGMENT: &'static str = "<script src='{url}'></script>";
const HTML_DOC_TEMPLATE: &'static str = "
<html lang='{lang}'>
<head>
<meta charset='UTF-8'>
......@@ -10,14 +13,14 @@ pub const HTML_DOC_TEMPLATE: &'static str = "
<meta name='viewport' content='width=device-width, initial-scale=1.0'>
<meta name='description' content='{description}'>
<title>{title}</title>
<link rel='stylesheet' href='{css}'>
{css}
</head>
<body>
{body}
</body>
<script src='{js}'></script>
{js}
</html>
";
......@@ -26,21 +29,23 @@ pub const HTML_DOC_TEMPLATE: &'static str = "
pub struct HtmlDoc(String);
impl HtmlDoc {
pub fn from_page_data(page_data: &PageData) -> Self {
pub fn from_page(page: &Page) -> Self {
let re = Regex::new(r#"\{[a-z]+\}"#).unwrap();
let html = re
.replace_all(HTML_DOC_TEMPLATE, |captures: &Captures| {
let placeholder = captures.iter().next().unwrap().unwrap().as_str();
let placeholder = placeholder[1..placeholder.len() - 1].to_owned();
page_data.field_from_str_key(placeholder)
page.text_from_key(placeholder)
})
.to_string();
HtmlDoc(html)
}
}
pub fn to_string(&self) -> String {
self.0.clone()
impl std::fmt::Display for HtmlDoc {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
use super::css::StyleSheet;
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Item {
contents: Vec<ItemContent>,
layout: StyleSheet,
}
impl std::fmt::Display for Item {
fn fmt(&self, _f: &mut std::fmt::Formatter) -> std::fmt::Result {
unimplemented!()
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct ItemContent {}
mod css;
mod html;
mod item;
mod page;
mod website;
......
use crate::website::html::HtmlDoc;
use super::css::StyleSheet;
use super::html::HtmlDoc;
use super::item::*;
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct PageData {
pub title: String,
pub lang: String,
pub description: String,
pub slug: String,
pub html_body: String,
pub css_src: Option<String>,
pub js_src: Option<String>,
pub struct Page {
template: PageTemplate,
body: PageBody,
pub metadata: PageMetadata,
pub sub_pages: Vec<Page>,
pub html: HtmlDoc,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct WebPage {
pub page_data: PageData,
pub html_doc: HtmlDoc,
pub struct PageTemplate {
layout: StyleSheet,
name: String,
fixed_contents: Vec<ItemContent>,
}
impl PageData {
pub fn to_web_page(&self) -> WebPage {
WebPage {
page_data: self.clone(),
html_doc: HtmlDoc::from_page_data(&self),
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct PageBody(Vec<Item>);
impl std::fmt::Display for PageBody {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"{}",
&self
.0
.iter()
.map(|i| i.to_string())
.collect::<Vec<String>>()
.join("")
)
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct PageMetadata {
pub title: String,
pub lang: String,
pub description: String,
pub url_slug: String,
pub css_src: Vec<String>,
pub js_src: Vec<String>,
}
impl Page {
pub fn build_html(&mut self) {
self.html = HtmlDoc::from_page(self);
}
pub fn field_from_str_key(&self, key: String) -> String {
pub fn text_from_key(&self, key: String) -> String {
match &key[..] {
"title" => self.title.to_owned(),
"lang" => self.lang.to_owned(),
"description" => self.description.to_owned(),
"slug" => self.slug.to_owned(),
"body" => self.html_body.to_owned(),
"css" => self.css_src.as_ref().unwrap_or(&String::new()).to_owned(),
"js" => self.js_src.as_ref().unwrap_or(&String::new()).to_owned(),
"title" => self.metadata.title.to_owned(),
"lang" => self.metadata.lang.to_owned(),
"description" => self.metadata.description.to_owned(),
"slug" => self.metadata.url_slug.to_owned(),
"body" => self.body.to_string(),
// "css" => self.css_src.as_ref().unwrap_or(&String::new()).to_owned(),
// "js" => self.js_src.as_ref().unwrap_or(&String::new()).to_owned(),
_ => String::new(),
}
}
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct PagesTree {
pub page_data: PageData,
pub sub_pages: Option<Vec<PagesTree>>,
}
use crate::app::AppConfig;
use crate::website::page::{PagesTree, WebPage};
use crate::website::page::{Page, PageTemplate};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::path::PathBuf;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct WebSite {
pages_tree: PagesTree,
pages_index_by_url: HashMap<PathBuf, WebPage>,
root_page: Page,
assets_index: Vec<String>,
templates: Vec<PageTemplate>,
pages_index: HashMap<PathBuf, Page>,
}
impl WebSite {
pub fn new(pages_tree: PagesTree) -> Self {
let mut pages_index_by_url = HashMap::new();
WebSite::create_index_by_url(&mut pages_index_by_url, &pages_tree, PathBuf::from("/"));
WebSite {
pages_tree,
pages_index_by_url,
pub fn from_json(json: &str) -> Self {
let mut obj: Self = serde_json::from_str(json).unwrap();
obj.build_assets_index();
obj.build_pages_index(obj.root_page.clone(), PathBuf::from("/"));
obj.root_page.build_html();
for p in obj.root_page.sub_pages.iter_mut() {
p.build_html();
}
}
pub fn from_json_str(json: &str) -> Self {
WebSite::new(serde_json::from_str(json).unwrap())
obj
}
pub fn load(config: &AppConfig) -> WebSite {
......@@ -34,52 +33,49 @@ impl WebSite {
Some(pth) => pth.clone(),
};
WebSite::from_json_str(&std::fs::read_to_string(file_path).unwrap())
WebSite::from_json(&std::fs::read_to_string(file_path).unwrap())
}
fn create_index_by_url(
index: &mut HashMap<PathBuf, WebPage>,
pages_tree: &PagesTree,
from_url: PathBuf,
) {
let page_data = pages_tree.page_data.clone();
let url = from_url.join(&page_data.slug);
fn build_pages_index(&mut self, root_page: Page, from_url: PathBuf) {
let url = from_url.join(&root_page.metadata.url_slug);
index.insert(url.clone(), page_data.to_web_page());
self.pages_index.insert(url.clone(), root_page.clone());
if let Some(sub_pages) = &pages_tree.sub_pages {
for pt in sub_pages {
WebSite::create_index_by_url(index, pt, url.clone());
}
for p in root_page.sub_pages {
self.build_pages_index(p, url.clone());
}
}
pub fn get_page_by_url(&self, url: &PathBuf) -> Option<&WebPage> {
self.pages_index_by_url.get(&PathBuf::from("/").join(url))
fn build_assets_index(&mut self) {
unimplemented!();
}
pub fn get_page_by_url(&self, url: &PathBuf) -> Option<&Page> {
self.pages_index.get(&PathBuf::from("/").join(url))
}
}
#[cfg(test)]
mod test_website {
use super::*;
use crate::testing::TEST_JSON_WEBSITE;
// #[cfg(test)]
// mod test_website {
// use super::*;
// use crate::testing::TEST_JSON_WEBSITE;
#[test]
fn test_index_pages_by_slug() {
let website = WebSite::from_json_str(TEST_JSON_WEBSITE);
let root_page = website.get_page_by_url(&PathBuf::from("/"));
assert!(root_page.is_some());
let root_page = root_page.unwrap();
assert_eq!(root_page.page_data.html_body, "<h1>Test Website</h1>");
// #[test]
// fn test_index_pages_by_slug() {
// let website = WebSite::from_json(TEST_JSON_WEBSITE);
// let root_page = website.get_page_by_url(&PathBuf::from("/"));
// assert!(root_page.is_some());
// let root_page = root_page.unwrap();
// assert_eq!(root_page.page_data.html_body, "<h1>Test Website</h1>");
let sub_page = website.get_page_by_url(&PathBuf::from("subpage"));
assert!(sub_page.is_some());
let sub_page = sub_page.unwrap();
assert_eq!(sub_page.page_data.html_body, "<h1>A sub page</h1>");
// let sub_page = website.get_page_by_url(&PathBuf::from("subpage"));
// assert!(sub_page.is_some());
// let sub_page = sub_page.unwrap();
// assert_eq!(sub_page.page_data.html_body, "<h1>A sub page</h1>");
let nested_page = website.get_page_by_url(&PathBuf::from("subpage/nested"));
assert!(nested_page.is_some());
let nested_page = nested_page.unwrap();
assert_eq!(nested_page.page_data.html_body, "<h1>Nested page</h1>");
}
}
// let nested_page = website.get_page_by_url(&PathBuf::from("subpage/nested"));
// assert!(nested_page.is_some());
// let nested_page = nested_page.unwrap();
// assert_eq!(nested_page.page_data.html_body, "<h1>Nested page</h1>");
// }
// }
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment