diff --git a/Cargo.lock b/Cargo.lock
index 2f84d36..a591968 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -2169,6 +2169,7 @@ dependencies = [
"notify",
"num_cpus",
"once_cell",
+ "rayon",
"rouille",
"serde",
"serde_json",
diff --git a/Cargo.toml b/Cargo.toml
index 26b96d1..ec98fb8 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -18,6 +18,7 @@ minify-html = "0.15.0"
notify = "6.1.1"
num_cpus = "1.16.0"
once_cell = "1.20.2"
+rayon = "1.10"
rouille = "3.6.2"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.120"
diff --git a/generate.sh b/generate.sh
deleted file mode 100755
index 26e4f47..0000000
--- a/generate.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-
-rm -rf ./testing
-mkdir testing
-cd testing
-mkdir src
-
-cd src
-mkdir components
-mkdir data
-mkdir pages
-mkdir public
-mkdir templates
-
-for i in {1..1000}; do
- random_string=$(openssl rand -base64 24 | tr -dc 'A-Z')
- # touch "components/Component$random_string.html"
- # touch "data/Data$random_string.json"
- # touch "pages/page$random_string.html"
- # touch "public/public$random_string.txt"
- # touch "templates/Template$random_string.html"
-
- echo "
Component $random_string
" > "components/$random_string.component.html"
- echo "[{\"A\": \"$random_string\"}]" > "data/$random_string.data.json"
- echo "Template - {A} - $random_string
" > "templates/$random_string.template.html"
- echo "Page $random_string
<$random_string />
<-{$random_string} />" > "pages/$random_string.html"
- # echo "Public $random_string" > "public/public$random_string.txt"
-
-done
diff --git a/src/dev/mod.rs b/src/dev/mod.rs
index 14d6ef1..f22080e 100644
--- a/src/dev/mod.rs
+++ b/src/dev/mod.rs
@@ -14,7 +14,6 @@ use std::sync::mpsc::Receiver;
use std::sync::{Arc, Mutex};
use std::thread;
use std::time::Duration;
-use WithItem::None;
pub static WS_PORT: OnceCell = OnceCell::new();
pub const SCRIPT: &str = include_str!("./inline_script.html");
@@ -30,7 +29,7 @@ fn dev_rebuild(res: Result) -> Result<(), Vec Err(vec![ProcessError {
error_type: ErrorType::Other,
- item: None,
+ item: WithItem::None,
path: PathBuf::from("Watcher"),
message: Some(format!("{e} (internal watcher error)")),
}]),
@@ -39,14 +38,25 @@ fn dev_rebuild(res: Result) -> Result<(), Vec, src: PathBuf, ws_port: u16) {
let clients: Arc>> = Arc::new(Mutex::new(HashMap::new()));
- let event_hub = simple_websockets::launch(ws_port)
- .unwrap_or_else(|_| panic!("failed to listen on port {}", ws_port));
+ let event_hub = match simple_websockets::launch(ws_port) {
+ Ok(hub) => hub,
+ Err(e) => {
+ eprintln!("Failed to launch websocket server on port {}: {:?}", ws_port, e);
+ return;
+ }
+ };
// Spawn thread to handle messages from receiver
let clients_clone = Arc::clone(&clients);
thread::spawn(move || loop {
- let message = receiver.recv().unwrap();
- let locked_clients = clients_clone.lock().unwrap();
+ let message = match receiver.recv() {
+ Ok(msg) => msg,
+ Err(_) => return, // Channel closed, exit thread
+ };
+ let locked_clients = match clients_clone.lock() {
+ Ok(c) => c,
+ Err(_) => return, // Mutex poisoned, exit thread
+ };
let json = serde_json::json!({
"message": if message == "reload" { "reload" } else { &message }
@@ -62,13 +72,15 @@ fn spawn_websocket_handler(receiver: Receiver, src: PathBuf, ws_port: u1
loop {
match event_hub.poll_event() {
Event::Connect(client_id, responder) => {
- let mut locked_clients = clients.lock().unwrap();
- locked_clients.insert(client_id, responder);
+ if let Ok(mut locked_clients) = clients.lock() {
+ locked_clients.insert(client_id, responder);
+ }
}
Event::Disconnect(client_id) => {
- let mut locked_clients = clients.lock().unwrap();
- locked_clients.remove(&client_id);
+ if let Ok(mut locked_clients) = clients.lock() {
+ locked_clients.remove(&client_id);
+ }
}
Event::Message(_, msg) => {
@@ -84,8 +96,14 @@ fn spawn_websocket_handler(receiver: Receiver, src: PathBuf, ws_port: u1
fn handle_markdown_update(json: &serde_json::Value, src: &PathBuf) {
if json["type"] == "markdown_update" {
- let content = json["content"].as_str().unwrap().trim();
- let original = json["originalContent"].as_str().unwrap().trim();
+ let content = match json["content"].as_str() {
+ Some(s) => s.trim(),
+ None => return,
+ };
+ let original = match json["originalContent"].as_str() {
+ Some(s) => s.trim(),
+ None => return,
+ };
if let Ok(files) = utils::walk_dir(src) {
for path in files {
if let Ok(file_content) = fs::read_to_string(&path) {
@@ -143,38 +161,44 @@ pub fn spawn_watcher(args: Vec) {
.with_compare_contents(true)
.with_poll_interval(Duration::from_millis(200));
- let mut watcher = notify::PollWatcher::new(
+ let mut watcher = match notify::PollWatcher::new(
move |res| {
let result = dev_rebuild(res);
if result.is_ok() {
- let send = sender.send("reload".to_string());
- if send.is_err() {
- let e = send.unwrap_err();
+ if let Err(e) = sender.send("reload".to_string()) {
cprintln!("Warning: failed to send reload signal: >>: {e}");
}
- } else {
- let e = result.unwrap_err();
+ } else if let Err(e) = result {
let _ = sender.send(utils::format_errs(&e));
utils::print_vec_errs(&e);
}
},
config,
- )
- .unwrap();
+ ) {
+ Ok(w) => w,
+ Err(e) => {
+ eprintln!("Failed to create watcher: {}", e);
+ return;
+ }
+ };
- watcher
- .watch(&src, RecursiveMode::Recursive)
- .expect("watch failed");
+ if let Err(e) = watcher.watch(&src, RecursiveMode::Recursive) {
+ eprintln!("Failed to watch directory: {}", e);
+ return;
+ }
let preview_addr = format!("0.0.0.0:{}", preview_port);
rouille::start_server(preview_addr, move |request| {
{
- let mut response = rouille::match_assets(request, dist.to_str().unwrap());
+ let dist_str = match dist.to_str() {
+ Some(s) => s,
+ None => return Response::html("500 Internal Server Error").with_status_code(500),
+ };
+ let mut response = rouille::match_assets(request, dist_str);
if request.url() == "/" {
- let f = fs::File::open(dist.join("index").with_extension("html"));
- if f.is_ok() {
- response = Response::from_file("text/html", f.unwrap());
+ if let Ok(f) = fs::File::open(dist.join("index").with_extension("html")) {
+ response = Response::from_file("text/html", f);
}
}
if response.is_success() {
diff --git a/src/error.rs b/src/error.rs
index ced1f32..0e5bb07 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -47,11 +47,10 @@ impl fmt::Display for ProcessError {
Some(msg) => cformat!("{msg}>"),
None => String::new(),
};
- let path = &self
+ let path = self
.path
.to_str()
- .to_owned()
- .expect("Couldn't turn PathBuf into string whilst formatting error message.");
+ .unwrap_or("");
let err_msg = match self.error_type {
ErrorType::Io => {
cformat!("The {item} {path}> encountered an IO error. {msg_fmt}")
@@ -98,11 +97,7 @@ impl MapProcErr for Result {
message: Option,
) -> Result {
self.map_err(|e| {
- let msg = if message.is_some() {
- message.unwrap()
- } else {
- format!("{}", e)
- };
+ let msg = message.unwrap_or_else(|| format!("{}", e));
ProcessError {
error_type,
item,
diff --git a/src/handlers/components.rs b/src/handlers/components.rs
index 4cf5083..30e426b 100644
--- a/src/handlers/components.rs
+++ b/src/handlers/components.rs
@@ -158,6 +158,12 @@ pub fn process_component(
let mut errors: Vec = Vec::new();
let mut output = input;
+
+ // Early return if no matches
+ if !regex.is_match(&output).unwrap_or(false) {
+ return ProcessResult { output, errors };
+ }
+
let mut replacements = Vec::new();
for f in regex.find_iter(&output) {
@@ -180,7 +186,7 @@ pub fn process_component(
ComponentTypes::SelfClosing => {
let result = get_component_self(src, name, targets, hist.clone());
errors.extend(result.errors);
- replacements.push((found_str.to_string(), result.output));
+ replacements.push((found_str.to_owned(), result.output));
}
ComponentTypes::Wrapping => {
let end = format!("{}>", name);
@@ -193,7 +199,7 @@ pub fn process_component(
replacements.push((content, String::new()));
}
replacements.push((end, String::new()));
- replacements.push((found_str.to_string(), result.output));
+ replacements.push((found_str.to_owned(), result.output));
}
}
}
diff --git a/src/handlers/entries.rs b/src/handlers/entries.rs
index 4a2c023..5771044 100644
--- a/src/handlers/entries.rs
+++ b/src/handlers/entries.rs
@@ -16,7 +16,7 @@ pub fn process_entry(
kv: Vec<(&str, &str)>,
) -> Vec {
let mut errors: Vec = Vec::new();
- let is_dev = *IS_DEV.get().unwrap();
+ let is_dev = *IS_DEV.get().unwrap_or(&false);
// Reset KaTeX usage flag for this page
katex_assets::reset_katex_flag();
@@ -37,9 +37,21 @@ pub fn process_entry(
.join("templates")
.join(name.replace(":", "/"))
.with_extension("frame.html");
- let result_path = src
- .parent()
- .unwrap()
+
+ let src_parent = match src.parent() {
+ Some(p) => p,
+ None => {
+ errors.push(ProcessError {
+ error_type: ErrorType::Io,
+ item: WithItem::File,
+ path: src.clone(),
+ message: Some("Source directory has no parent".to_string()),
+ });
+ return errors;
+ }
+ };
+
+ let result_path = src_parent
.join(if is_dev { "dev" } else { "dist" })
.join(result_path.trim_start_matches("/"));
@@ -121,10 +133,9 @@ pub fn process_entry(
if is_dev && !s.contains("// * SCRIPT INCLUDED IN DEV MODE") {
s = s.replace("", &format!("{}", SCRIPT));
- s = s.replace(
- "__SIMPLE_WS_PORT_PLACEHOLDER__",
- &WS_PORT.get().unwrap().to_string(),
- );
+ if let Some(ws_port) = WS_PORT.get() {
+ s = s.replace("__SIMPLE_WS_PORT_PLACEHOLDER__", &ws_port.to_string());
+ }
}
let output = minify(&s.into_bytes(), &minify_html::Cfg::spec_compliant());
diff --git a/src/handlers/markdown.rs b/src/handlers/markdown.rs
index 51d8443..f9acab0 100644
--- a/src/handlers/markdown.rs
+++ b/src/handlers/markdown.rs
@@ -43,7 +43,10 @@ fn render_katex(html: &str) -> Result {
for captures in MATH_SPAN_REGEX.captures_iter(html) {
if let Ok(cap) = captures {
- let mat = cap.get(0).unwrap();
+ let mat = match cap.get(0) {
+ Some(m) => m,
+ None => continue,
+ };
let start = mat.start();
let end = mat.end();
@@ -51,8 +54,14 @@ fn render_katex(html: &str) -> Result {
result.push_str(&html[last_end..start]);
// Extract math style and content
- let style = cap.get(1).unwrap().as_str();
- let latex = cap.get(2).unwrap().as_str();
+ let style = match cap.get(1) {
+ Some(m) => m.as_str(),
+ None => continue,
+ };
+ let latex = match cap.get(2) {
+ Some(m) => m.as_str(),
+ None => continue,
+ };
// Configure KaTeX options
let opts = Opts::builder()
@@ -90,6 +99,11 @@ fn render_katex(html: &str) -> Result {
}
pub fn render_markdown(input: String) -> String {
+ // Early return if no markdown
+ if !input.contains("") {
+ return input;
+ }
+
let mut plugins = Plugins::default();
plugins.render.codefence_syntax_highlighter = Some(&*SYNTAX_HIGHLIGHTER);
let options = create_markdown_options();
@@ -114,7 +128,8 @@ pub fn render_markdown(input: String) -> String {
Ok(html) => html,
Err(e) => {
eprintln!("KaTeX rendering error: {}", e);
- std::process::exit(1);
+ // Return the rendered HTML without KaTeX processing on error
+ rendered
}
};
diff --git a/src/handlers/pages.rs b/src/handlers/pages.rs
index 2d3a311..b5f77d2 100644
--- a/src/handlers/pages.rs
+++ b/src/handlers/pages.rs
@@ -7,9 +7,8 @@ use crate::handlers::templates::process_template;
use crate::utils::ProcessResult;
use crate::IS_DEV;
use minify_html::minify;
-use std::sync::mpsc;
+use rayon::prelude::*;
use std::sync::Arc;
-use std::thread;
use std::{collections::HashSet, fs, path::PathBuf};
fn process_step(
@@ -88,7 +87,6 @@ pub fn process_pages(
};
let working_dir = if dev { "dev" } else { "dist" };
- let (sender, receiver) = mpsc::channel();
let minify_cfg = Arc::new(minify_html::Cfg::spec_compliant());
let mut file_tasks = Vec::new();
@@ -105,40 +103,36 @@ pub fn process_pages(
}
}
+ // Process directories sequentially
for (dir, src, source, path) in dir_tasks {
if let Err(mut errs) = process_pages(&dir, &src, source, path) {
errors.append(&mut errs);
}
}
+ // Process files in parallel using rayon
if !file_tasks.is_empty() {
- let max_threads = std::cmp::min(file_tasks.len(), num_cpus::get());
- let chunk_size = (file_tasks.len() + max_threads - 1) / max_threads;
-
- for chunk in file_tasks.chunks(chunk_size) {
- for path in chunk {
- let sender = sender.clone();
- let dir = dir.clone();
- let src = src.clone();
- let path = path.clone();
- let working_dir = working_dir.to_string();
- let minify_cfg = Arc::clone(&minify_cfg);
-
- thread::spawn(move || {
- let result = process_single_file(path, dir, src, working_dir, dev, minify_cfg);
- sender.send(result).unwrap();
- });
+ let results: Vec>> = file_tasks
+ .par_iter()
+ .map(|path| {
+ process_single_file(
+ path.clone(),
+ dir.clone(),
+ src.clone(),
+ working_dir.to_string(),
+ dev,
+ Arc::clone(&minify_cfg),
+ )
+ })
+ .collect();
+
+ for result in results {
+ if let Err(e) = result {
+ errors.extend(e);
}
}
}
- drop(sender);
- for result in receiver {
- if let Err(e) = result {
- errors.extend(e);
- }
- }
-
if errors.is_empty() {
Ok(())
} else {
@@ -171,12 +165,33 @@ fn process_single_file(
let result = page(&src, file_content, HashSet::new());
errors.extend(result.errors);
- let out_path = dir.join(&working_dir).join(
- path.strip_prefix(&src)
- .unwrap()
- .strip_prefix("pages")
- .unwrap(),
- );
+ let relative_to_src = match path.strip_prefix(&src) {
+ Ok(rel) => rel,
+ Err(e) => {
+ errors.push(ProcessError {
+ error_type: ErrorType::Io,
+ item: WithItem::File,
+ path: path.clone(),
+ message: Some(format!("Failed to strip src prefix: {}", e)),
+ });
+ return Err(errors);
+ }
+ };
+
+ let relative_to_pages = match relative_to_src.strip_prefix("pages") {
+ Ok(rel) => rel,
+ Err(e) => {
+ errors.push(ProcessError {
+ error_type: ErrorType::Io,
+ item: WithItem::File,
+ path: path.clone(),
+ message: Some(format!("Failed to strip pages prefix: {}", e)),
+ });
+ return Err(errors);
+ }
+ };
+
+ let out_path = dir.join(&working_dir).join(relative_to_pages);
if let Some(parent) = out_path.parent() {
if let Err(e) = fs::create_dir_all(parent) {
diff --git a/src/handlers/templates.rs b/src/handlers/templates.rs
index 5ede490..febe73a 100644
--- a/src/handlers/templates.rs
+++ b/src/handlers/templates.rs
@@ -199,6 +199,12 @@ pub fn get_template(src: &PathBuf, name: &str, mut hist: HashSet) -> Pr
pub fn process_template(src: &PathBuf, input: String, hist: HashSet) -> ProcessResult {
let mut errors = Vec::new();
let mut output = input;
+
+ // Early return if no templates
+ if !TEMPLATE_REGEX.is_match(&output).unwrap_or(false) {
+ return ProcessResult { output, errors };
+ }
+
let mut replacements = Vec::new();
for f in TEMPLATE_REGEX.find_iter(&output) {
@@ -214,7 +220,7 @@ pub fn process_template(src: &PathBuf, input: String, hist: HashSet) ->
let result = get_template(src, template_name, hist.clone());
errors.extend(result.errors);
- replacements.push((found_str.to_string(), result.output));
+ replacements.push((found_str.to_owned(), result.output));
}
}
diff --git a/src/main.rs b/src/main.rs
index 66b62ad..a4b253e 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -67,7 +67,7 @@ fn build(args: Vec) -> Result<(), Vec> {
let src = dir.join("src");
- let working_dir = if *IS_DEV.get().unwrap() {
+ let working_dir = if *IS_DEV.get().unwrap_or(&false) {
"dev"
} else {
"dist"
diff --git a/src/utils.rs b/src/utils.rs
index 22f48f0..d3b449a 100644
--- a/src/utils.rs
+++ b/src/utils.rs
@@ -44,12 +44,20 @@ pub fn get_targets_kv<'a>(
Ok(targets)
}
-pub fn kv_replace(kv: Vec<(&str, &str)>, mut from: String) -> String {
+pub fn kv_replace(kv: Vec<(&str, &str)>, from: String) -> String {
+ if kv.is_empty() {
+ return from;
+ }
+
+ let mut result = from;
for (k, v) in kv {
+ if !result.contains(k) {
+ continue;
+ }
let key = format!("${{{k}}}");
- from = from.replace(&key, v);
+ result = result.replace(&key, v);
}
- from
+ result
}
pub fn get_inside(input: String, from: &str, to: &str) -> Option {
@@ -60,38 +68,37 @@ pub fn get_inside(input: String, from: &str, to: &str) -> Option {
if start_pos >= end_index {
None
} else {
- Some(input[start_pos..end_index].to_string())
+ Some(input[start_pos..end_index].to_owned())
}
}
pub fn copy_into(public: &PathBuf, dist: &PathBuf) -> Result<(), ProcessError> {
if !dist.exists() {
- fs::create_dir_all(dist).map_proc_err(File, Io, &PathBuf::from(dist), None)?;
+ fs::create_dir_all(dist).map_proc_err(File, Io, dist, None)?;
}
- let entries = fs::read_dir(public).map_proc_err(File, Io, &PathBuf::from(public), None)?;
-
- for entry in entries {
- let entry = entry.unwrap().path();
- let dest_path = dist.join(entry.strip_prefix(public).unwrap());
-
- if entry.is_dir() {
- copy_into(&entry, &dest_path)?;
+ let entries = fs::read_dir(public).map_proc_err(File, Io, public, None)?;
+
+ for entry_result in entries {
+ let entry = entry_result.map_proc_err(File, Io, public, None)?;
+ let entry_path = entry.path();
+ let relative = entry_path
+ .strip_prefix(public)
+ .map_err(|e| ProcessError {
+ error_type: ErrorType::Io,
+ item: File,
+ path: entry_path.clone(),
+ message: Some(format!("Failed to strip prefix: {}", e)),
+ })?;
+ let dest_path = dist.join(relative);
+
+ if entry_path.is_dir() {
+ copy_into(&entry_path, &dest_path)?;
} else {
if let Some(parent) = dest_path.parent() {
- fs::create_dir_all(parent).map_proc_err(
- File,
- Io,
- &PathBuf::from(&dest_path),
- None,
- )?;
+ fs::create_dir_all(parent).map_proc_err(File, Io, &dest_path, None)?;
}
- fs::copy(&entry, &dest_path).map_proc_err(
- File,
- Io,
- &PathBuf::from(&dest_path),
- None,
- )?;
+ fs::copy(&entry_path, &dest_path).map_proc_err(File, Io, &dest_path, None)?;
}
}
Ok(())
@@ -181,7 +188,7 @@ fn walk_dir_internal(dir: &Path, files: &mut Vec) -> Result<(), Process
pub fn find_next_available_port(start_port: u16) -> u16 {
(start_port..65535)
.find(|port| is_port_available(*port))
- .expect("No available ports found")
+ .unwrap_or(start_port)
}
fn is_port_available(port: u16) -> bool {