aboutsummaryrefslogtreecommitdiff
path: root/skaldpress_main.rs
diff options
context:
space:
mode:
Diffstat (limited to 'skaldpress_main.rs')
-rw-r--r--skaldpress_main.rs324
1 files changed, 324 insertions, 0 deletions
diff --git a/skaldpress_main.rs b/skaldpress_main.rs
new file mode 100644
index 0000000..5e6530c
--- /dev/null
+++ b/skaldpress_main.rs
@@ -0,0 +1,324 @@
+use skaldpress::macro_processor::error::SMPError;
+use skaldpress::macro_processor::macro_processor::{MacroProcessorWarning, MacroType};
+use skaldpress::skaldpress::filelists::{make_filelist, FileList, FileListFileTargetAction};
+use skaldpress::skaldpress::parseopts::{parseopts, Opts};
+use std::cmp::Ordering;
+use std::collections::HashMap;
+use std::collections::VecDeque;
+use std::fs;
+use std::path::Path;
+use std::time::Instant;
+
+use skaldpress::macro_processor::MacroProcessor;
+use skaldpress::skaldpress::error::SkaldpressError;
+use skaldpress::skaldpress::error::{
+ SP_COMPILE_FILE_EXTENSION_ERROR_2, SP_COMPILE_FILE_MACRO_PROCESS_ERROR,
+ SP_COMPILE_FILE_TEMPLATE_READ_ERROR, SP_COMPILE_TEMPLATE_MACRO_PROCESS_ERROR,
+ SP_GEN_DEST_STRIP_PREFIX_ERROR,
+};
+use skaldpress::skaldpress::metadata_parser::extract_parse_yaml_metadata;
+use skaldpress::skaldpress::metadata_parser::YamlValue;
+
+/// Convenience function for doing cmp on a metadata key of two arbitrary indexes
+/// This takes a macro_processor as input, so that it can emit warnings if the ordering failed in
+/// any way.
+fn order_index_by_cached_data(
+ smp: &mut MacroProcessor,
+ key: &str,
+ a: &usize,
+ b: &usize,
+) -> Ordering {
+ let compiled_files: &Vec<CompiledFile>;
+ unsafe {
+ compiled_files = COMPILED_FILES.as_ref();
+ }
+ if *a >= compiled_files.len() {
+ smp.warnings.push(MacroProcessorWarning::new(format!(
+ "\"a\" is not a cached file {} >= {}",
+ *a,
+ compiled_files.len()
+ )));
+ return Ordering::Equal;
+ }
+ if *b >= compiled_files.len() {
+ smp.warnings.push(MacroProcessorWarning::new(format!(
+ "\"b\" is not a cached file {} >= {}",
+ *b,
+ compiled_files.len()
+ )));
+ return Ordering::Equal;
+ }
+ let Some(a) = &compiled_files[*a].metadata.get(key) else {
+ smp.warnings.push(MacroProcessorWarning::new(format!(
+ "Key {:?} not found for ordering data in {:?}",
+ key, compiled_files[*a].source_path
+ )));
+ return Ordering::Equal;
+ };
+ let Some(b) = &compiled_files[*b].metadata.get(key) else {
+ smp.warnings.push(MacroProcessorWarning::new(format!(
+ "Key {:?} not found for ordering data in {:?}",
+ key, compiled_files[*b].source_path
+ )));
+ return Ordering::Equal;
+ };
+ a.cmp(b)
+}
+
+/// SMP Macro for getting all files with specific tag, this is only _really_ effective the second run
+///
+/// Usage in files:
+/// all_tagged_by(<tag name>, <template> [, <field to sort by>] [, reversed])
+fn sp_all_tagged_by(
+ smp: &mut MacroProcessor,
+ macro_name: &str,
+ args: &mut [String],
+) -> Result<String, SMPError> {
+ if args.len() < 1 {
+ return Ok(macro_name.to_string());
+ }
+
+ let compiled_files: &Vec<CompiledFile>;
+ let compiled_files_by_tag;
+ unsafe {
+ compiled_files_by_tag = COMPILED_FILES_BY_TAG.as_ref().unwrap();
+ compiled_files = COMPILED_FILES.as_ref();
+ }
+ let Some(tagged_files) = compiled_files_by_tag.get(&args[0]) else {
+ println!(" \x1b[35mNo tags for {}\x1b[0m", args[0]);
+ return Ok(String::new());
+ };
+
+ let mut out = String::new();
+
+ let mut tagged_files = tagged_files.clone();
+ if args.len() > 2 {
+ if args.len() > 3 && args[3] == "reversed" {
+ tagged_files.sort_by(|a, b| order_index_by_cached_data(smp, &args[2], b, a));
+ } else {
+ tagged_files.sort_by(|a, b| order_index_by_cached_data(smp, &args[2], b, a));
+ }
+ }
+
+ for doc_i in tagged_files {
+ let file = &compiled_files[doc_i];
+ let mut smp_local = smp.clone();
+ macro_processor_initialize(&file.metadata, &mut smp_local, None);
+ out.push_str(&sp_template(
+ &mut smp_local,
+ "template",
+ &mut [args[1].clone(), file.content.clone()],
+ )?);
+ print_warnings(&smp_local);
+ }
+ Ok(out)
+}
+
+fn macro_processor_initialize(
+ metadata: &HashMap<String, YamlValue>,
+ old_macro_processor: &mut MacroProcessor,
+ additional_state: Option<&HashMap<String, MacroType>>,
+) {
+ let macro_processor = old_macro_processor;
+ macro_processor.define_macro(
+ String::from("all_tagged_by"),
+ MacroType::Function(sp_all_tagged_by),
+ );
+ macro_processor.define_macro(String::from("template"), MacroType::Function(sp_template));
+ for (key, value) in metadata {
+ let macro_name = format!("METADATA_{}", key);
+ if !macro_processor.macros.contains_key(&macro_name) {
+ let value = match value {
+ YamlValue::List(l) => {
+ let mut out = Vec::new();
+ for el in l {
+ out.push(MacroType::String(el.to_string()));
+ }
+ MacroType::Array(out)
+ }
+ value => MacroType::String(value.to_string()),
+ };
+ macro_processor.define_macro(macro_name, value);
+ }
+ }
+ if let Some(additional_state) = additional_state {
+ for (key, value) in additional_state {
+ macro_processor.define_macro(key.to_string(), value.clone());
+ }
+ }
+}
+
+fn get_template_path(template: &str, opts: &Opts) -> String {
+ format!("{}{}", opts.template_dir, template)
+}
+
+
+fn main() -> Result<(), SkaldpressError> {
+ unsafe {
+ COMPILED_FILES_BY_TAG = Some(HashMap::new());
+ }
+ let mut opts = parseopts().build();
+
+ let emptyvec: Vec<String> = Vec::new();
+ let now = Instant::now();
+ let mut filelist_dest = make_filelist(
+ &Path::new(&opts.build_dir),
+ &Path::new(&opts.build_dir),
+ &emptyvec,
+ &emptyvec,
+ FileListFileTargetAction::NONE,
+ false,
+ &opts.template_dir,
+ &opts.metadata,
+ )?;
+ let mut filelist_src = FileList::new();
+ filelist_src.extend(make_filelist(
+ &Path::new(&opts.static_dir),
+ &Path::new(&opts.static_dir),
+ &emptyvec,
+ &opts.static_exclude,
+ FileListFileTargetAction::COPY,
+ false,
+ &opts.template_dir,
+ &opts.metadata,
+ )?);
+ filelist_src.extend(make_filelist(
+ &Path::new(&opts.content_dir),
+ &Path::new(&opts.content_dir),
+ &opts.filter,
+ &opts.exclude,
+ FileListFileTargetAction::COMPILE,
+ true,
+ &opts.template_dir,
+ &opts.metadata,
+ )?);
+
+ let elapsed = now.elapsed();
+ let mut work_queue: VecDeque<(String, FileListFileTargetAction)> = VecDeque::new();
+
+ // We also will not currently discover empty directories from build target,
+ // we should attempt to do that.
+ for (filename, _file) in filelist_dest.missing_from(&filelist_src).files {
+ filelist_dest.set_action(&filename, FileListFileTargetAction::DELETE);
+ work_queue.push_back((filename, FileListFileTargetAction::DELETE));
+ }
+ for (filename, file) in filelist_src.missing_from(&filelist_dest).files {
+ work_queue.push_back((filename, file.target_action));
+ }
+ for (filename, file) in filelist_src.changed_from(&filelist_dest).files {
+ work_queue.push_back((filename, file.target_action));
+ }
+
+ let mut dependants: Vec<(String, String)> = Vec::new();
+ for (filename, file) in &filelist_src.files {
+ if !file.metadata.contains_key("dependencies") {
+ continue;
+ }
+ match file.metadata["dependencies"].clone() {
+ YamlValue::Scalar(s) => dependants.push((s, filename.clone())),
+ _ => {}
+ }
+ }
+
+ println!(
+ "Generated filelist in {:#?} seconds, {} in destination, {} in source",
+ elapsed,
+ filelist_dest.len(),
+ filelist_src.len()
+ );
+ println!("Total file actions to take {}", work_queue.len());
+
+ //let mut compiled_files: Vec<String> = Vec::with_capacity(work_queue.len());
+
+ while let Some((filename, action)) = work_queue.pop_front() {
+ println!("> {:#?} {:#?}", action, filename);
+ match action {
+ FileListFileTargetAction::DELETE => {
+ // This should be some flag that can be toggled (like with -del in rsync)
+ if false {
+ continue;
+ }
+
+ let file = filelist_dest.files.get(&filename).expect("SP87");
+ println!(" Deleting {:#?}", file.file_path);
+ if let Err(e) = std::fs::remove_file(&file.file_path.as_path()) {
+ println!(
+ "\x1b[31mError copying {:#?}: {}\x1b[0m",
+ file.file_path.as_path(),
+ e
+ );
+ }
+ }
+ FileListFileTargetAction::COPY => {
+ let file = filelist_src.files.get(&filename).expect("SP87");
+ let dest_file_path = Path::new(&opts.build_dir).join(file.file_rel.as_path());
+ println!(" Copying {:#?}", file.file_path);
+ println!(" {:#?}", dest_file_path);
+ let Some(dest_dir) = &dest_file_path.parent() else {
+ println!("\x1b[31mError creating dir {:#?}\x1b[0m", dest_file_path);
+ continue;
+ };
+ if let Err(e) = std::fs::create_dir_all(&dest_dir) {
+ println!("\x1b[31mError creating dir {:#?}: {}\x1b[0m", dest_dir, e);
+ continue;
+ }
+ if let Err(e) = std::fs::copy(&file.file_path.as_path(), dest_file_path) {
+ println!(
+ "\x1b[31mError copying {:#?}: {}\x1b[0m",
+ file.file_path.as_path(),
+ e
+ );
+ }
+ }
+ FileListFileTargetAction::COMPILE => {
+ // This is a idea fo something, have not figured out how to deal with having files
+ // depending on all other files (index e.g.), as I cannot see how to get around
+ // recompiling everything anyway.
+ //
+ // I guess we could technically get away with a single compilation instead of two
+ // independent ones in some circumstances tho, unless the file needs to recompiled
+ // anyway, which would be the case for any files with TOC e.g.
+ // let file = filelist_src.files.get(&filename).expect("SP87");
+ // println!(" Compiling {:#?}", file.file_path);
+ //
+ // // check list of already compiled files, to see if all dependants are compiled
+ // // if not, move yourself to the end of the queue (basically just reinsert yourself
+ // // without compiling
+ // //
+ // // Check if the file has been compiled already, if so, it does not need to be added
+ // // to queue
+ //
+ // // If any dependants depend on this file, add them to the end of the work queue,
+ // // if they are not already there.
+ // 'dependants: for i in 0..dependants.len() {
+ // let (pat, target) = dependants[i].clone();
+ // if filename == target {
+ // continue;
+ // }
+ // if file_pat_match(&filename, &pat) {
+ // for (workel, _action) in &work_queue {
+ // if workel == &target {
+ // continue 'dependants;
+ // }
+ // }
+ // work_queue.push_back((target, action.clone()));
+ // }
+ //
+ // }
+
+ // compiled_files.push(filename.clone());
+ }
+ FileListFileTargetAction::NONE => {}
+ }
+ }
+
+ // Running compilation twice, needed for some macros which depends on compiled content
+ // We should make some kind of file-list, and only re-compile files which has changed.
+ println!("Compiling content");
+ let _ = compile_files_in_directory(Path::new(&opts.content_dir), &opts);
+ println!("Rerun compilation");
+ opts.first_run = false;
+ let _ = compile_files_in_directory(Path::new(&opts.content_dir), &opts);
+
+ Ok(())
+}