summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/skaldpress/file_metadata_extract.rs80
-rw-r--r--src/skaldpress/filelists.rs225
-rw-r--r--src/skaldpress/main.rs227
-rw-r--r--src/skaldpress/mod.rs2
4 files changed, 462 insertions, 72 deletions
diff --git a/src/skaldpress/file_metadata_extract.rs b/src/skaldpress/file_metadata_extract.rs
new file mode 100644
index 0000000..e68969c
--- /dev/null
+++ b/src/skaldpress/file_metadata_extract.rs
@@ -0,0 +1,80 @@
+use std::collections::HashMap;
+use std::fs;
+use std::path::Path;
+use std::path::PathBuf;
+use std::time::SystemTime;
+
+use crate::skaldpress::error::SkaldpressError;
+use crate::skaldpress::error::SP_COMPILE_FILE_EXTENSION_ERROR_2;
+use crate::skaldpress::metadata_parser::extract_parse_yaml_metadata;
+use crate::skaldpress::metadata_parser::YamlValue;
+
+fn get_template_path(template: &str, template_dir: &str) -> PathBuf {
+ Path::new(&format!("{}{}", template_dir, template)).to_path_buf()
+}
+
+pub fn get_all_meta(
+ file_path: &Path,
+ template_dir: &str,
+ meta: HashMap<String, YamlValue>,
+) -> Result<(HashMap<String, YamlValue>, String, SystemTime), SkaldpressError> {
+ let extension = file_path
+ .extension()
+ .unwrap_or(std::ffi::OsStr::new(""))
+ .to_str()
+ .ok_or(SkaldpressError::PathOperationError(
+ SP_COMPILE_FILE_EXTENSION_ERROR_2,
+ None,
+ ))?;
+
+ let fs_metadata = match fs::metadata(&file_path) {
+ Ok(metadata) => metadata,
+ Err(e) => {
+ return Err(SkaldpressError::MetadataError(87, e));
+ }
+ };
+
+ let fs_modified = match fs_metadata.modified() {
+ Ok(r) => r,
+ Err(e) => {
+ return Err(SkaldpressError::MetadataError(89, e));
+ }
+ };
+
+ let file_content = fs::read_to_string(file_path).map_err(|e| {
+ SkaldpressError::FileReadError(
+ 1,
+ e,
+ file_path.to_str().unwrap_or("unknown file").to_string(),
+ )
+ })?;
+
+ let (map_with_meta, _file_content) = match extract_parse_yaml_metadata(&file_content) {
+ Some((map, file_content)) => (map, file_content),
+ None => (HashMap::new(), file_content.as_str()),
+ };
+ let mut map_base = meta;
+ map_base.extend(map_with_meta);
+
+ let Some(template) = &map_base.get("template") else {
+ return Ok((map_base, extension.to_string(), fs_modified));
+ };
+
+ let template_file = get_template_path(
+ &TryInto::<String>::try_into(*template)
+ .map_err(|e| SkaldpressError::MetadataError(12, std::io::Error::other(e)))?,
+ template_dir,
+ );
+
+ let (mut map_templated, extension, template_fs_modified) =
+ get_all_meta(&template_file, template_dir, HashMap::new())?;
+ map_templated.extend(map_base);
+ // Shuld really add a cutsom extend function to the hashmap,
+ // so lists can be merged and such
+
+ Ok((
+ map_templated,
+ String::from(extension),
+ std::cmp::max(fs_modified, template_fs_modified),
+ ))
+}
diff --git a/src/skaldpress/filelists.rs b/src/skaldpress/filelists.rs
new file mode 100644
index 0000000..c6495e3
--- /dev/null
+++ b/src/skaldpress/filelists.rs
@@ -0,0 +1,225 @@
+use crate::skaldpress::error::SkaldpressError;
+use crate::skaldpress::file_metadata_extract::get_all_meta;
+use crate::skaldpress::metadata_parser::YamlValue;
+use std::collections::HashMap;
+use std::fs;
+use std::path::Path;
+use std::path::PathBuf;
+use std::time::SystemTime;
+
+#[derive(Clone, Debug)]
+pub enum FileListFileTargetAction {
+ COMPILE,
+ COPY,
+ NONE,
+ DELETE,
+}
+
+#[derive(Clone, Debug)]
+pub struct FileListFile {
+ pub file_path: Box<PathBuf>,
+ pub file_rel: Box<PathBuf>,
+ pub change_time: SystemTime,
+ pub size: u64,
+ pub dependencies: Vec<Box<PathBuf>>,
+ pub target_action: FileListFileTargetAction,
+ pub metadata: HashMap<String, YamlValue>,
+}
+
+#[derive(Debug)]
+pub struct FileList {
+ pub files: HashMap<String, FileListFile>,
+}
+
+impl FileList {
+ pub fn new() -> FileList {
+ FileList {
+ files: HashMap::new(),
+ }
+ }
+
+ pub fn add(&mut self, file: FileListFile) {
+ let f = file.file_rel.to_str().unwrap_or("").to_string();
+ self.files.insert(f, file);
+ }
+
+ // FIX UNWRAP, RETURN PROPER ERROR!
+ pub fn set_action(&mut self, filename: &String, action: FileListFileTargetAction) {
+ self.files.get_mut(filename).unwrap().target_action = action;
+ }
+
+ pub fn extend(&mut self, filelist: FileList) {
+ for (_, file) in filelist.files {
+ self.add(file);
+ }
+ }
+
+ pub fn len(&self) -> usize {
+ self.files.len()
+ }
+
+ pub fn missing_from(&self, other: &FileList) -> FileList {
+ let mut filelist = FileList::new();
+ for (fname, file) in &self.files {
+ if !other.files.contains_key(fname.as_str()) {
+ filelist.add(file.clone());
+ }
+ }
+ filelist
+ }
+
+ pub fn changed_from(&self, other: &FileList) -> FileList {
+ let mut filelist = FileList::new();
+ for (fname, file) in &self.files {
+ let Some(o_file) = other.files.get(fname.as_str()) else {
+ continue;
+ };
+ if o_file.change_time < file.change_time {
+ filelist.add(file.clone());
+ }
+ }
+ filelist
+ }
+}
+
+pub fn file_pat_match(file: &String, pat: &str) -> bool {
+ if file == pat {
+ return true;
+ }
+ if pat.starts_with("*") && file.ends_with(pat.trim_start_matches("*")) {
+ return true;
+ }
+ if pat.ends_with("*") && file.starts_with(pat.trim_end_matches("*")) {
+ return true;
+ }
+ false
+}
+
+fn file_filtered(file: &String, filters: &Vec<String>, exclude: &Vec<String>) -> bool {
+ for filter in exclude {
+ if file_pat_match(file, filter) {
+ return true;
+ }
+ }
+ if filters.len() == 0 {
+ return false;
+ }
+ for filter in filters {
+ if file_pat_match(file, filter) {
+ return false;
+ }
+ }
+ true
+}
+
+pub fn make_filelist(
+ directory: &Path,
+ base_dir: &Path,
+ include: &Vec<String>,
+ exclude: &Vec<String>,
+ target_action: FileListFileTargetAction,
+ read_metadata: bool,
+ template_dir: &str,
+ additional_metadata: &HashMap<String, YamlValue>,
+) -> Result<FileList, SkaldpressError> {
+ // 1. iterate all files in source
+ // 2. read metadata for all included files to compile
+ // 3. remove deleted files
+ // 4. copy/compile all files, use a set/queue or something and store all files which depends on
+ // the ones that are compiled
+ // (this is a bit hard to do properly, with all the extra macros,
+ // one "fix" could be to define a metadata key which can store dependencies)
+ // 5. Repeat step 4 until the queue is empty
+ let mut filelist = FileList::new();
+ for entry in fs::read_dir(directory).map_err(|e| {
+ SkaldpressError::DirectoryReadError(
+ 8,
+ e,
+ directory.to_str().unwrap_or("unknown dir").to_string(),
+ )
+ })? {
+ let entry = match entry {
+ Ok(entry) => entry,
+ Err(e) => {
+ println!("\x1b[31mError getting file info {:#?}\x1b[0m", e);
+ continue;
+ }
+ };
+ let path = entry.path();
+ let metadata = match fs::metadata(&path) {
+ Ok(metadata) => metadata,
+ Err(e) => {
+ println!("\x1b[31mError getting file metadata {:#?}\x1b[0m", e);
+ continue;
+ }
+ };
+
+ if metadata.is_file() {
+ let mut rel_path = match path.strip_prefix(base_dir) {
+ Ok(r) => r.to_owned(),
+ Err(e) => {
+ println!("\x1b[31mError copying {:#?}: {}\x1b[0m", path.as_path(), e);
+ continue;
+ }
+ };
+ if file_filtered(&path.to_str().unwrap_or("").to_string(), &include, &exclude) {
+ continue;
+ }
+
+ let mut modified = match metadata.modified() {
+ Ok(r) => r,
+ Err(e) => {
+ println!(
+ "\x1b[31mError finding modified time {:#?}: {}\x1b[0m",
+ path.as_path(),
+ e
+ );
+ continue;
+ }
+ };
+
+ let meta;
+ if read_metadata {
+ let extension;
+ let oldest_modified;
+ (meta, extension, oldest_modified) =
+ get_all_meta(path.as_path(), template_dir, additional_metadata.clone())?;
+ modified = oldest_modified;
+ rel_path = rel_path.with_extension(&extension);
+ } else {
+ meta = HashMap::new();
+ }
+
+ filelist.add(FileListFile {
+ file_path: Box::new(path.clone()),
+ file_rel: Box::new(rel_path.to_owned()),
+ change_time: modified,
+ size: metadata.len(),
+ dependencies: vec![],
+ target_action: target_action.clone(),
+ metadata: meta,
+ });
+ } else if metadata.is_dir() {
+ match make_filelist(
+ path.as_path(),
+ base_dir,
+ include,
+ exclude,
+ target_action.clone(),
+ read_metadata,
+ template_dir,
+ additional_metadata,
+ ) {
+ Ok(fl) => filelist.extend(fl),
+ Err(e) => {
+ println!(
+ "\x1b[31mError processing directory {:#?}: {}\x1b[0m",
+ path.as_path(),
+ e
+ );
+ }
+ };
+ }
+ }
+ Ok(filelist)
+}
diff --git a/src/skaldpress/main.rs b/src/skaldpress/main.rs
index 25060f7..48c96d5 100644
--- a/src/skaldpress/main.rs
+++ b/src/skaldpress/main.rs
@@ -1,10 +1,13 @@
use skaldpress::macro_processor::error::SMPError;
use skaldpress::macro_processor::macro_processor::{MacroProcessorWarning, MacroType};
+use skaldpress::skaldpress::filelists::{make_filelist, FileList, FileListFileTargetAction};
use skaldpress::skaldpress::parseopts::{parseopts, Opts};
use std::cmp::Ordering;
use std::collections::HashMap;
+use std::collections::VecDeque;
use std::fs;
use std::path::Path;
+use std::time::Instant;
use skaldpress::macro_processor::MacroProcessor;
use skaldpress::skaldpress::error::SkaldpressError;
@@ -597,87 +600,165 @@ fn compile_files_in_directory(directory: &Path, opts: &Opts) -> Result<(), Skald
Ok(())
}
-fn copy_files_in_directory(directory: &Path, opts: &Opts) -> Result<(), SkaldpressError> {
- for entry in fs::read_dir(directory).map_err(|e| {
- SkaldpressError::DirectoryReadError(
- 8,
- e,
- directory.to_str().unwrap_or("unknown dir").to_string(),
- )
- })? {
- let entry = match entry {
- Ok(entry) => entry,
- Err(e) => {
- println!("\x1b[31mError getting file info {:#?}\x1b[0m", e);
- continue;
- }
- };
- let path = entry.path();
- let metadata = match fs::metadata(&path) {
- Ok(metadata) => metadata,
- Err(e) => {
- println!("\x1b[31mError getting file metadata {:#?}\x1b[0m", e);
- continue;
- }
- };
+fn main() -> Result<(), SkaldpressError> {
+ unsafe {
+ COMPILED_FILES_BY_TAG = Some(HashMap::new());
+ }
+ let mut opts = parseopts().build();
+
+ let emptyvec: Vec<String> = Vec::new();
+ let now = Instant::now();
+ let mut filelist_dest = make_filelist(
+ &Path::new(&opts.build_dir),
+ &Path::new(&opts.build_dir),
+ &emptyvec,
+ &emptyvec,
+ FileListFileTargetAction::NONE,
+ false,
+ &opts.template_dir,
+ &opts.metadata,
+ )?;
+ let mut filelist_src = FileList::new();
+ filelist_src.extend(make_filelist(
+ &Path::new(&opts.static_dir),
+ &Path::new(&opts.static_dir),
+ &emptyvec,
+ &opts.static_exclude,
+ FileListFileTargetAction::COPY,
+ false,
+ &opts.template_dir,
+ &opts.metadata,
+ )?);
+ filelist_src.extend(make_filelist(
+ &Path::new(&opts.content_dir),
+ &Path::new(&opts.content_dir),
+ &opts.filter,
+ &opts.exclude,
+ FileListFileTargetAction::COMPILE,
+ true,
+ &opts.template_dir,
+ &opts.metadata,
+ )?);
+
+ let elapsed = now.elapsed();
+ let mut work_queue: VecDeque<(String, FileListFileTargetAction)> = VecDeque::new();
+
+ // We also will not currently discover empty directories from build target,
+ // we should attempt to do that.
+ for (filename, _file) in filelist_dest.missing_from(&filelist_src).files {
+ filelist_dest.set_action(&filename, FileListFileTargetAction::DELETE);
+ work_queue.push_back((filename, FileListFileTargetAction::DELETE));
+ }
+ for (filename, file) in filelist_src.missing_from(&filelist_dest).files {
+ work_queue.push_back((filename, file.target_action));
+ }
+ for (filename, file) in filelist_src.changed_from(&filelist_dest).files {
+ work_queue.push_back((filename, file.target_action));
+ }
+
+ let mut dependants: Vec<(String, String)> = Vec::new();
+ for (filename, file) in &filelist_src.files {
+ if !file.metadata.contains_key("dependencies") {
+ continue;
+ }
+ match file.metadata["dependencies"].clone() {
+ YamlValue::Scalar(s) => dependants.push((s, filename.clone())),
+ _ => {}
+ }
+ }
+
+ println!(
+ "Generated filelist in {:#?} seconds, {} in destination, {} in source",
+ elapsed,
+ filelist_dest.len(),
+ filelist_src.len()
+ );
+ println!("Total file actions to take {}", work_queue.len());
+
+ //let mut compiled_files: Vec<String> = Vec::with_capacity(work_queue.len());
- if metadata.is_file() {
- let real_path = match path.strip_prefix(&opts.static_dir) {
- Ok(r) => r,
- Err(e) => {
- println!("\x1b[31mError copying {:#?}: {}\x1b[0m", path.as_path(), e);
+ while let Some((filename, action)) = work_queue.pop_front() {
+ println!("> {:#?} {:#?}", action, filename);
+ match action {
+ FileListFileTargetAction::DELETE => {
+ // This should be some flag that can be toggled (like with -del in rsync)
+ if false {
continue;
}
- };
- let e: Vec<String> = Vec::new();
- if file_filtered(
- &path.to_str().unwrap_or("").to_string(),
- &e,
- &opts.static_exclude,
- ) {
- continue;
+
+ let file = filelist_dest.files.get(&filename).expect("SP87");
+ println!(" Deleting {:#?}", file.file_path);
+ if let Err(e) = std::fs::remove_file(&file.file_path.as_path()) {
+ println!(
+ "\x1b[31mError copying {:#?}: {}\x1b[0m",
+ file.file_path.as_path(),
+ e
+ );
+ }
}
- let dest_file_path = Path::new(&opts.build_dir).join(real_path);
- println!(
- "< Copying {:#?} -> {:#?}",
- path.as_path(),
- dest_file_path.as_path()
- );
- let Some(dest_dir) = &dest_file_path.parent() else {
- println!("\x1b[31mError creating dir {:#?}\x1b[0m", dest_file_path);
- continue;
- };
- if let Err(e) = std::fs::create_dir_all(&dest_dir) {
- println!("\x1b[31mError creating dir {:#?}: {}\x1b[0m", dest_dir, e);
- continue;
+ FileListFileTargetAction::COPY => {
+ let file = filelist_src.files.get(&filename).expect("SP87");
+ let dest_file_path = Path::new(&opts.build_dir).join(file.file_rel.as_path());
+ println!(" Copying {:#?}", file.file_path);
+ println!(" {:#?}", dest_file_path);
+ let Some(dest_dir) = &dest_file_path.parent() else {
+ println!("\x1b[31mError creating dir {:#?}\x1b[0m", dest_file_path);
+ continue;
+ };
+ if let Err(e) = std::fs::create_dir_all(&dest_dir) {
+ println!("\x1b[31mError creating dir {:#?}: {}\x1b[0m", dest_dir, e);
+ continue;
+ }
+ if let Err(e) = std::fs::copy(&file.file_path.as_path(), dest_file_path) {
+ println!(
+ "\x1b[31mError copying {:#?}: {}\x1b[0m",
+ file.file_path.as_path(),
+ e
+ );
+ }
}
- if let Err(e) = std::fs::copy(&path, dest_file_path) {
- println!("\x1b[31mError copying {:#?}: {}\x1b[0m", path.as_path(), e);
+ FileListFileTargetAction::COMPILE => {
+ // This is a idea fo something, have not figured out how to deal with having files
+ // depending on all other files (index e.g.), as I cannot see how to get around
+ // recompiling everything anyway.
+ //
+ // I guess we could technically get away with a single compilation instead of two
+ // independent ones in some circumstances tho, unless the file needs to recompiled
+ // anyway, which would be the case for any files with TOC e.g.
+ // let file = filelist_src.files.get(&filename).expect("SP87");
+ // println!(" Compiling {:#?}", file.file_path);
+ //
+ // // check list of already compiled files, to see if all dependants are compiled
+ // // if not, move yourself to the end of the queue (basically just reinsert yourself
+ // // without compiling
+ // //
+ // // Check if the file has been compiled already, if so, it does not need to be added
+ // // to queue
+ //
+ // // If any dependants depend on this file, add them to the end of the work queue,
+ // // if they are not already there.
+ // 'dependants: for i in 0..dependants.len() {
+ // let (pat, target) = dependants[i].clone();
+ // if filename == target {
+ // continue;
+ // }
+ // if file_pat_match(&filename, &pat) {
+ // for (workel, _action) in &work_queue {
+ // if workel == &target {
+ // continue 'dependants;
+ // }
+ // }
+ // work_queue.push_back((target, action.clone()));
+ // }
+ //
+ // }
+
+ // compiled_files.push(filename.clone());
}
- } else if metadata.is_dir() {
- if let Err(e) = copy_files_in_directory(path.as_path(), opts) {
- println!(
- "\x1b[31mError processing directory {:#?}: {}\x1b[0m",
- path.as_path(),
- e
- );
- };
+ FileListFileTargetAction::NONE => {}
}
}
- Ok(())
-}
-fn main() {
- unsafe {
- COMPILED_FILES_BY_TAG = Some(HashMap::new());
- }
-
- let mut opts = parseopts().build();
-
- println!("Removing {:#?}", opts.build_dir);
- let _ = std::fs::remove_dir_all(Path::new(&opts.build_dir));
- println!("Copying static content");
- let _ = copy_files_in_directory(Path::new(&opts.static_dir), &opts);
// Running compilation twice, needed for some macros which depends on compiled content
// We should make some kind of file-list, and only re-compile files which has changed.
println!("Compiling content");
@@ -685,4 +766,6 @@ fn main() {
println!("Rerun compilation");
opts.first_run = false;
let _ = compile_files_in_directory(Path::new(&opts.content_dir), &opts);
+
+ Ok(())
}
diff --git a/src/skaldpress/mod.rs b/src/skaldpress/mod.rs
index 5245cd8..64c5d8e 100644
--- a/src/skaldpress/mod.rs
+++ b/src/skaldpress/mod.rs
@@ -1,3 +1,5 @@
pub mod error;
+pub mod file_metadata_extract;
+pub mod filelists;
pub mod metadata_parser;
pub mod parseopts;