diff options
author | Qrius <[email protected]> | 2025-03-05 08:41:05 +0100 |
---|---|---|
committer | Qrius <[email protected]> | 2025-03-05 08:41:08 +0100 |
commit | 313e059f367f2ac291cd409e77d22673f9595848 (patch) | |
tree | 6440ebdd65e015f8c6b0147fd8eb5f877f35c34e | |
parent | 5fb097851d88b42188ee0502270d8c336102783a (diff) | |
download | skaldpress-313e059f367f2ac291cd409e77d22673f9595848.tar.gz skaldpress-313e059f367f2ac291cd409e77d22673f9595848.zip |
First iteration of py skaldpress version
-rw-r--r-- | Makefile | 38 | ||||
-rw-r--r-- | pyproject.toml | 4 | ||||
-rw-r--r-- | skaldpress.1 | 96 | ||||
-rw-r--r-- | skaldpress_main.rs | 324 | ||||
-rw-r--r-- | smp.1 | 105 | ||||
-rw-r--r-- | src/skaldpress/__init__.py | 6 | ||||
-rw-r--r-- | src/skaldpress/file_metadata_extract.rs | 0 | ||||
-rw-r--r-- | src/skaldpress/main.py | 326 | ||||
-rw-r--r-- | src/skaldpress/metadata_parser.py | 74 | ||||
-rw-r--r-- | src/skaldpress/smp_macros.py | 1 | ||||
-rw-r--r-- | src/smp/__init__.py | 13 | ||||
-rw-r--r-- | src/smp/builtins.py | 6 | ||||
-rw-r--r-- | src/smp/macro_processor.py | 76 | ||||
-rw-r--r-- | tests/skaldpress/content/article.md | 11 | ||||
-rw-r--r-- | tests/skaldpress/templates/article.html | 22 | ||||
-rw-r--r-- | tests/skaldpress/templates/base.html | 14 | ||||
-rw-r--r-- | tests/smp/array_each_1 (renamed from tests/input_files/array_each_1) | 0 | ||||
-rw-r--r-- | tests/smp/array_push_1 (renamed from tests/input_files/array_push_1) | 0 | ||||
-rw-r--r-- | tests/smp/array_push_2 (renamed from tests/input_files/array_push_2) | 0 | ||||
-rw-r--r-- | tests/smp/array_push_3 (renamed from tests/input_files/array_push_3) | 0 | ||||
-rw-r--r-- | tests/smp/define_1 (renamed from tests/input_files/define_1) | 0 | ||||
-rw-r--r-- | tests/smp/define_2 (renamed from tests/input_files/define_2) | 0 | ||||
-rw-r--r-- | tests/smp/dnl_1 (renamed from tests/input_files/dnl_1) | 0 | ||||
-rw-r--r-- | tests/smp/dnl_2 (renamed from tests/input_files/dnl_2) | 0 | ||||
-rw-r--r-- | tests/smp/dnl_3 (renamed from tests/input_files/dnl_3) | 0 | ||||
-rw-r--r-- | tests/smp/dnl_4 (renamed from tests/input_files/dnl_4) | 0 | ||||
-rw-r--r-- | tests/smp/explode_1 (renamed from tests/input_files/explode_1) | 0 | ||||
-rw-r--r-- | tests/smp/explode_2 (renamed from tests/input_files/explode_2) | 0 | ||||
-rw-r--r-- | tests/smp/format_time_1 (renamed from tests/input_files/format_time_1) | 0 | ||||
-rw-r--r-- | tests/smp/ifdef_1 (renamed from tests/input_files/ifdef_1) | 0 | ||||
-rw-r--r-- | tests/smp/ifdef_2 (renamed from tests/input_files/ifdef_2) | 0 | ||||
-rw-r--r-- | tests/smp/ifdef_3 (renamed from tests/input_files/ifdef_3) | 0 | ||||
-rw-r--r-- | tests/smp/ifeq_1 (renamed from tests/input_files/ifeq_1) | 0 | ||||
-rw-r--r-- | tests/smp/ifeq_2 (renamed from tests/input_files/ifeq_2) | 0 | ||||
-rw-r--r-- | tests/smp/ifeq_3 (renamed from tests/input_files/ifeq_3) | 0 | ||||
-rw-r--r-- | tests/smp/ifeq_4 (renamed from tests/input_files/ifeq_4) | 0 | ||||
-rw-r--r-- | tests/smp/ifndef_1 (renamed from tests/input_files/ifndef_1) | 0 | ||||
-rw-r--r-- | tests/smp/ifndef_2 (renamed from tests/input_files/ifndef_2) | 0 | ||||
-rw-r--r-- | tests/smp/ifndef_3 (renamed from tests/input_files/ifndef_3) | 0 | ||||
-rw-r--r-- | tests/smp/ifneq_1 (renamed from tests/input_files/ifneq_1) | 0 | ||||
-rw-r--r-- | tests/smp/ifneq_2 (renamed from tests/input_files/ifneq_2) | 0 | ||||
-rw-r--r-- | tests/smp/ifneq_3 (renamed from tests/input_files/ifneq_3) | 0 | ||||
-rw-r--r-- | tests/smp/ifneq_4 (renamed from tests/input_files/ifneq_4) | 0 | ||||
-rw-r--r-- | tests/smp/include_1 (renamed from tests/input_files/include_1) | 0 | ||||
-rw-r--r-- | tests/smp/include_2 (renamed from tests/input_files/include_2) | 0 | ||||
-rw-r--r-- | tests/smp/markdown_html_1 (renamed from tests/input_files/markdown_html_1) | 0 | ||||
-rw-r--r-- | tests/smp/markdown_html_2 (renamed from tests/input_files/markdown_html_2) | 0 | ||||
-rw-r--r-- | tests/smp/non_macro_html (renamed from tests/input_files/non_macro_html) | 0 | ||||
-rw-r--r-- | tests/smp/shell_1 (renamed from tests/input_files/shell_1) | 0 | ||||
-rw-r--r-- | tests/smp/snnl_1 (renamed from tests/input_files/snnl_1) | 0 | ||||
-rw-r--r-- | tests/smp/snnl_2 (renamed from tests/input_files/snnl_2) | 0 | ||||
-rw-r--r-- | tests/smp/snnl_3 (renamed from tests/input_files/snnl_3) | 0 | ||||
-rw-r--r-- | tests/smp/whitespace_deleting_1 (renamed from tests/input_files/whitespace_deleting_1) | 0 | ||||
-rw-r--r-- | tests/smp/whitespace_deleting_2 (renamed from tests/input_files/whitespace_deleting_2) | 0 | ||||
-rw-r--r-- | tests/smp/wodl_1 (renamed from tests/input_files/wodl_1) | 0 | ||||
-rw-r--r-- | tests/smp/wodl_2 (renamed from tests/input_files/wodl_2) | 0 | ||||
-rwxr-xr-x | tests/test_macro_processor.sh | 4 | ||||
-rwxr-xr-x | tests/test_skaldpress.sh | 7 | ||||
-rwxr-xr-x | tests/test_unittests.sh | 27 | ||||
-rw-r--r-- | tests/unit_tests/metadata_parser_1.py | 28 |
60 files changed, 1141 insertions, 41 deletions
diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..dc25faa --- /dev/null +++ b/Makefile @@ -0,0 +1,38 @@ +.PHONY: test build minorbump publish install clean + +venv/bin/tt: src/timetracker/*.py venv + . venv/bin/activate && \ + python -m pip install -e .[dev] + +venv: pyproject.toml + python3 -m venv venv + +build: venv + . venv/bin/activate && \ + python -m pip install build twine && \ + python -m build && \ + python -m twine check dist/* + +minorbump: + . venv/bin/activate && \ + bumpver update --patch + +publish: minorbump build + . venv/bin/activate && \ + twine upload dist/* + +install: venv + python -m pip install . + +clean: + rm -rf venv + rm -rf dist + +test: + . venv/bin/activate && \ + black --check src && \ + mypy src/ && \ + pyflakes src/ && \ + ./tests/test_macro_processor.sh && \ + ./tests/test_skaldpress.sh + diff --git a/pyproject.toml b/pyproject.toml index e467c66..56b91d6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ classifiers = [ dependencies = ["markdown", "py-gfm"] [project.optional-dependencies] -dev = ["check-manifest", "black"] +dev = ["check-manifest", "black", "pyflakes", "mypy", "types-Markdown"] test = ["coverage"] [project.urls] @@ -34,6 +34,6 @@ test = ["coverage"] [project.scripts] smp = "smp:main" -#skaldpress = "skaldpress:main" +skaldpress = "skaldpress.main:main" [tool.setuptools] diff --git a/skaldpress.1 b/skaldpress.1 new file mode 100644 index 0000000..5ed2046 --- /dev/null +++ b/skaldpress.1 @@ -0,0 +1,96 @@ +.TH SKALDPRESS 1 2024-06-08 +.SH name +Skaldpress \- Templating engine +.SH SYNOPSIS +.B skaldpress [OPTIONS] + +.SH DESCRIPTION +.B smp +smp is a macro processor, made specifically for a usecase when doing templating of websites. + + +Macros which are available using skaldpress, in addition to the builtin smp(1)-macros +.IP "\fBtemplate(<template>,<content>)\fR" +Will process the content using a template, in-place. +This macro is used by the \fBall_tagged_by\fR-macro. + +.IP "\fBall_tagged_by(<tag_name>,<template>[,<field to sort by>][,<reversed>])\fR" +Will output all documents which had the specified tag, using the specified template. +If a field to sort by is specified, it will output ascending based on that field, +or reversed if a fourth argument \fBreversed\fR is specified. + +.PP +All input files can have a metadata-block at the beginning. +Any keys will be defined as macros as \fBMETADATA_<key>\fR. +There are a few special keys which has a specific effect: + +.IP "\fBskip_smp\fR" +If this is \fItrue\fR, macro processing will be skipped for the file, and it will be +included verbatim. + +.IP "\fBskip_build\fR" +If this is \fItrue\fR, the file will not be written to build-dir, but it will be compiled and available for macros like \fBall_tagged_by\fR. + +.IP "\fBtags\fR" +This is a list, and all files with entries here will be registered during the first compilation round. +During the second compilation, macros like \fBall_tagged_by\fR will then +make all files with the relevant content available and compile it in. + +.IP "\fBtarget_filename\fR" +By default, a file will have the same name in the \fIoutput\fR-directory (unless a template has a different extension). +This overrides the output-filename, but it keeps the file in the same directory, and it will not affect the extension. + +.IP "\fBtemplate\fR" +If specified, the file will be compiled using a template. +In the template, all \fBMETADATA_<key>\fR macros will be available, +additionally templates can use the \fBCONTENT\fR-macro to get the expanded content of the file. +This is recursive, meaning that templates can use other templates, with their own metadata-block, +since it is recursive, any metadata in templates will overwrite any metadata from their children, +but will keep metadata that is not overwritten. +This means templates can add additional context. + +.IP "\fBkeep_states\fR" +List or string where every listed state/variable will be kept for subsequenct compilations. +Meaning that you can e.g. construct a array in the first compilation, which is then used the second time around. +Setting this, means that the file will always be recompiled, regardeless of other instances (unless a \fB--filter\fR is set). + +.SH OPTIONS +.IP "\fB-o, --out, --output\fR \fIpath\fR +Specifies the directory to output the compiled files to, defaults to \fIbuild\fR. + +.IP "\fB-i, --input\fR \fIpath\fR +Specifies the directory where all files are iterated, defaults to \fIcontent\fR. + +.IP "\fB-s, --static\fR \fIpath\fR +Specifies the directory where all files are iterated, defaults to \fIcontent\fR. + +.IP "\fB-t, --templates\fR \fIpath\fR +Specifies the directory where templates are stored, defaults to \fItemplates\fR. + +.IP "\fB-f, --filter\fR \fIfilter\fR +Comma-separated list of files to compile, if not specified, all files in \fB--source\fR +will be compiled. + +.IP "\fB-e, --exclude\fR \fIfilter\fR +Comma-separated list of files to exclude, if not specified, all files in \fB--source\fR +will be compiled. + +.IP "\fB-m, --metadata\fR \fIkey\fR=\fIvalue\fR +Metadata to add to compiled file, this can e.g. set a template for all files if you don't want to manually add YAML-blocks. +Specify multiple times, to set multiple fields. + +.SH EXAMPLES + +To run skaldpress on a simple project, simply call it with no arguments +in a directory where you have a \fBcontent\fR-directory, +and optionally a \fBtemplates\fR-directory. +.PP +.nf +.RS +skaldpress +.RE +.fi +.PP + +.SH SEE ALSO +smp(1) diff --git a/skaldpress_main.rs b/skaldpress_main.rs new file mode 100644 index 0000000..5e6530c --- /dev/null +++ b/skaldpress_main.rs @@ -0,0 +1,324 @@ +use skaldpress::macro_processor::error::SMPError; +use skaldpress::macro_processor::macro_processor::{MacroProcessorWarning, MacroType}; +use skaldpress::skaldpress::filelists::{make_filelist, FileList, FileListFileTargetAction}; +use skaldpress::skaldpress::parseopts::{parseopts, Opts}; +use std::cmp::Ordering; +use std::collections::HashMap; +use std::collections::VecDeque; +use std::fs; +use std::path::Path; +use std::time::Instant; + +use skaldpress::macro_processor::MacroProcessor; +use skaldpress::skaldpress::error::SkaldpressError; +use skaldpress::skaldpress::error::{ + SP_COMPILE_FILE_EXTENSION_ERROR_2, SP_COMPILE_FILE_MACRO_PROCESS_ERROR, + SP_COMPILE_FILE_TEMPLATE_READ_ERROR, SP_COMPILE_TEMPLATE_MACRO_PROCESS_ERROR, + SP_GEN_DEST_STRIP_PREFIX_ERROR, +}; +use skaldpress::skaldpress::metadata_parser::extract_parse_yaml_metadata; +use skaldpress::skaldpress::metadata_parser::YamlValue; + +/// Convenience function for doing cmp on a metadata key of two arbitrary indexes +/// This takes a macro_processor as input, so that it can emit warnings if the ordering failed in +/// any way. +fn order_index_by_cached_data( + smp: &mut MacroProcessor, + key: &str, + a: &usize, + b: &usize, +) -> Ordering { + let compiled_files: &Vec<CompiledFile>; + unsafe { + compiled_files = COMPILED_FILES.as_ref(); + } + if *a >= compiled_files.len() { + smp.warnings.push(MacroProcessorWarning::new(format!( + "\"a\" is not a cached file {} >= {}", + *a, + compiled_files.len() + ))); + return Ordering::Equal; + } + if *b >= compiled_files.len() { + smp.warnings.push(MacroProcessorWarning::new(format!( + "\"b\" is not a cached file {} >= {}", + *b, + compiled_files.len() + ))); + return Ordering::Equal; + } + let Some(a) = &compiled_files[*a].metadata.get(key) else { + smp.warnings.push(MacroProcessorWarning::new(format!( + "Key {:?} not found for ordering data in {:?}", + key, compiled_files[*a].source_path + ))); + return Ordering::Equal; + }; + let Some(b) = &compiled_files[*b].metadata.get(key) else { + smp.warnings.push(MacroProcessorWarning::new(format!( + "Key {:?} not found for ordering data in {:?}", + key, compiled_files[*b].source_path + ))); + return Ordering::Equal; + }; + a.cmp(b) +} + +/// SMP Macro for getting all files with specific tag, this is only _really_ effective the second run +/// +/// Usage in files: +/// all_tagged_by(<tag name>, <template> [, <field to sort by>] [, reversed]) +fn sp_all_tagged_by( + smp: &mut MacroProcessor, + macro_name: &str, + args: &mut [String], +) -> Result<String, SMPError> { + if args.len() < 1 { + return Ok(macro_name.to_string()); + } + + let compiled_files: &Vec<CompiledFile>; + let compiled_files_by_tag; + unsafe { + compiled_files_by_tag = COMPILED_FILES_BY_TAG.as_ref().unwrap(); + compiled_files = COMPILED_FILES.as_ref(); + } + let Some(tagged_files) = compiled_files_by_tag.get(&args[0]) else { + println!(" \x1b[35mNo tags for {}\x1b[0m", args[0]); + return Ok(String::new()); + }; + + let mut out = String::new(); + + let mut tagged_files = tagged_files.clone(); + if args.len() > 2 { + if args.len() > 3 && args[3] == "reversed" { + tagged_files.sort_by(|a, b| order_index_by_cached_data(smp, &args[2], b, a)); + } else { + tagged_files.sort_by(|a, b| order_index_by_cached_data(smp, &args[2], b, a)); + } + } + + for doc_i in tagged_files { + let file = &compiled_files[doc_i]; + let mut smp_local = smp.clone(); + macro_processor_initialize(&file.metadata, &mut smp_local, None); + out.push_str(&sp_template( + &mut smp_local, + "template", + &mut [args[1].clone(), file.content.clone()], + )?); + print_warnings(&smp_local); + } + Ok(out) +} + +fn macro_processor_initialize( + metadata: &HashMap<String, YamlValue>, + old_macro_processor: &mut MacroProcessor, + additional_state: Option<&HashMap<String, MacroType>>, +) { + let macro_processor = old_macro_processor; + macro_processor.define_macro( + String::from("all_tagged_by"), + MacroType::Function(sp_all_tagged_by), + ); + macro_processor.define_macro(String::from("template"), MacroType::Function(sp_template)); + for (key, value) in metadata { + let macro_name = format!("METADATA_{}", key); + if !macro_processor.macros.contains_key(¯o_name) { + let value = match value { + YamlValue::List(l) => { + let mut out = Vec::new(); + for el in l { + out.push(MacroType::String(el.to_string())); + } + MacroType::Array(out) + } + value => MacroType::String(value.to_string()), + }; + macro_processor.define_macro(macro_name, value); + } + } + if let Some(additional_state) = additional_state { + for (key, value) in additional_state { + macro_processor.define_macro(key.to_string(), value.clone()); + } + } +} + +fn get_template_path(template: &str, opts: &Opts) -> String { + format!("{}{}", opts.template_dir, template) +} + + +fn main() -> Result<(), SkaldpressError> { + unsafe { + COMPILED_FILES_BY_TAG = Some(HashMap::new()); + } + let mut opts = parseopts().build(); + + let emptyvec: Vec<String> = Vec::new(); + let now = Instant::now(); + let mut filelist_dest = make_filelist( + &Path::new(&opts.build_dir), + &Path::new(&opts.build_dir), + &emptyvec, + &emptyvec, + FileListFileTargetAction::NONE, + false, + &opts.template_dir, + &opts.metadata, + )?; + let mut filelist_src = FileList::new(); + filelist_src.extend(make_filelist( + &Path::new(&opts.static_dir), + &Path::new(&opts.static_dir), + &emptyvec, + &opts.static_exclude, + FileListFileTargetAction::COPY, + false, + &opts.template_dir, + &opts.metadata, + )?); + filelist_src.extend(make_filelist( + &Path::new(&opts.content_dir), + &Path::new(&opts.content_dir), + &opts.filter, + &opts.exclude, + FileListFileTargetAction::COMPILE, + true, + &opts.template_dir, + &opts.metadata, + )?); + + let elapsed = now.elapsed(); + let mut work_queue: VecDeque<(String, FileListFileTargetAction)> = VecDeque::new(); + + // We also will not currently discover empty directories from build target, + // we should attempt to do that. + for (filename, _file) in filelist_dest.missing_from(&filelist_src).files { + filelist_dest.set_action(&filename, FileListFileTargetAction::DELETE); + work_queue.push_back((filename, FileListFileTargetAction::DELETE)); + } + for (filename, file) in filelist_src.missing_from(&filelist_dest).files { + work_queue.push_back((filename, file.target_action)); + } + for (filename, file) in filelist_src.changed_from(&filelist_dest).files { + work_queue.push_back((filename, file.target_action)); + } + + let mut dependants: Vec<(String, String)> = Vec::new(); + for (filename, file) in &filelist_src.files { + if !file.metadata.contains_key("dependencies") { + continue; + } + match file.metadata["dependencies"].clone() { + YamlValue::Scalar(s) => dependants.push((s, filename.clone())), + _ => {} + } + } + + println!( + "Generated filelist in {:#?} seconds, {} in destination, {} in source", + elapsed, + filelist_dest.len(), + filelist_src.len() + ); + println!("Total file actions to take {}", work_queue.len()); + + //let mut compiled_files: Vec<String> = Vec::with_capacity(work_queue.len()); + + while let Some((filename, action)) = work_queue.pop_front() { + println!("> {:#?} {:#?}", action, filename); + match action { + FileListFileTargetAction::DELETE => { + // This should be some flag that can be toggled (like with -del in rsync) + if false { + continue; + } + + let file = filelist_dest.files.get(&filename).expect("SP87"); + println!(" Deleting {:#?}", file.file_path); + if let Err(e) = std::fs::remove_file(&file.file_path.as_path()) { + println!( + "\x1b[31mError copying {:#?}: {}\x1b[0m", + file.file_path.as_path(), + e + ); + } + } + FileListFileTargetAction::COPY => { + let file = filelist_src.files.get(&filename).expect("SP87"); + let dest_file_path = Path::new(&opts.build_dir).join(file.file_rel.as_path()); + println!(" Copying {:#?}", file.file_path); + println!(" {:#?}", dest_file_path); + let Some(dest_dir) = &dest_file_path.parent() else { + println!("\x1b[31mError creating dir {:#?}\x1b[0m", dest_file_path); + continue; + }; + if let Err(e) = std::fs::create_dir_all(&dest_dir) { + println!("\x1b[31mError creating dir {:#?}: {}\x1b[0m", dest_dir, e); + continue; + } + if let Err(e) = std::fs::copy(&file.file_path.as_path(), dest_file_path) { + println!( + "\x1b[31mError copying {:#?}: {}\x1b[0m", + file.file_path.as_path(), + e + ); + } + } + FileListFileTargetAction::COMPILE => { + // This is a idea fo something, have not figured out how to deal with having files + // depending on all other files (index e.g.), as I cannot see how to get around + // recompiling everything anyway. + // + // I guess we could technically get away with a single compilation instead of two + // independent ones in some circumstances tho, unless the file needs to recompiled + // anyway, which would be the case for any files with TOC e.g. + // let file = filelist_src.files.get(&filename).expect("SP87"); + // println!(" Compiling {:#?}", file.file_path); + // + // // check list of already compiled files, to see if all dependants are compiled + // // if not, move yourself to the end of the queue (basically just reinsert yourself + // // without compiling + // // + // // Check if the file has been compiled already, if so, it does not need to be added + // // to queue + // + // // If any dependants depend on this file, add them to the end of the work queue, + // // if they are not already there. + // 'dependants: for i in 0..dependants.len() { + // let (pat, target) = dependants[i].clone(); + // if filename == target { + // continue; + // } + // if file_pat_match(&filename, &pat) { + // for (workel, _action) in &work_queue { + // if workel == &target { + // continue 'dependants; + // } + // } + // work_queue.push_back((target, action.clone())); + // } + // + // } + + // compiled_files.push(filename.clone()); + } + FileListFileTargetAction::NONE => {} + } + } + + // Running compilation twice, needed for some macros which depends on compiled content + // We should make some kind of file-list, and only re-compile files which has changed. + println!("Compiling content"); + let _ = compile_files_in_directory(Path::new(&opts.content_dir), &opts); + println!("Rerun compilation"); + opts.first_run = false; + let _ = compile_files_in_directory(Path::new(&opts.content_dir), &opts); + + Ok(()) +} @@ -0,0 +1,105 @@ +.TH SMP 1 2024-06-08 +.SH name +Skaldpress Macro Processor \- Macro processor +.SH SYNOPSIS +.B smp [\fIinput_file\fB] + +.SH DESCRIPTION +.B smp +smp is a macro processor, made specifically for a usecase when doing templating of websites. + + +.SS Macros +.IP "\fBinclude(<file>)\fR" +Includes a file in-place, performing macro-expansion on it. +NOTE, THERE IS NO LOOP PROTECTION HERE! + +.IP "\fBinclude_varbatim(<file>)\fR" +Same as \fBinclude\fR, but does not expand macros. + +.IP "\fBdefine(<macro_name> [, <macro content>])\fR" +This defines a macro, optionally with some content. +The optional content will be expanded immediately, +and later the already processed content will be included in the output. + +.IP "\fBifdef(<macro_name>, <output if defined> [, <output if not defined>])\fR" + +.IP "\fBifndef(<macro_name>, <output if not defined> [, <output if defined>])\fR" + +.IP "\fBifeq(<arg1>, <arg2>, <output if equal> [, <output if not equal>])\fR" + +.IP "\fBifneq(<arg1>, <arg2>, <output if not equal> [, <output if equal>])\fR" + +.IP "\fBshell(<command>)\fR" +Runs command on shell, and includes the command output in the output + +.IP "\fBexpr(<arg1>, <arg2>, ..., <argN>)\fR" +Shorthand for running the expr command, expands all arguments, and executes it on the shell. + +.IP "\fBdefine_array(<macro_name>)\fR" +Defines a macro as a array, this can later be used with macros like \fBarray_push\fR and \fBarray_each\fR. + +.IP "\fBarray_push(<macro_name>, <value>[, <additional value(s)>])\fR" +On a macro that is defined as a array, this will add one or more elements. + +.IP "\fBarray_each(<macro_name>, <template macro>])\fR" +Push any arguments to array macro +Process each element in a array as a macro-invokation on the second argument +Not the best way to do this, it is not sensibly recursive. + +.IP "\fBarray_size(<macro_name>)\fR" +Will return number of elements in a macro array. + +.IP "\fBexplode(<array_name>, <delimiter>, <input>)\fR" +Explode a input into individual array elements. + +.IP "\fBformat_time(<format>, <time>)\fR" +Format a RFC3339-timestamp to the specified format. Format is similar to strftime. +Only available if the \fBtime\fR-feature was enabled during compilation. + +.IP "\fBhtml_from_markdown(<markdown>)\fR" +If compiled with \fBmarkdown\fR, this macro will convert it's argument from markdown to html. +It will process it's input twice before actually doing the conversion. This is to remove the outer quotes of a argument. +So you should probably always quote arguments to this. + +.IP "\fBwodl(<url>)\fR" +If compiled with \fBdeadlinks\fR, this macro will always return it's argument. +But it will also perform a request to the link, and emit a warning if it it doesn't return HTTP 200 OK. +This will significantly slow down compile times! + +.SS "\fBCompile-Time Feature Flags\fR" +There are some compile-time flags that can enable/disable features, most are enabled by default, +and exist either because they trigger a external dependency, or because they are unlikely to work without glibc. + +.IP "\fBreadline\fR" 2 +\fBEnabled by default\fR. +Enables the use of libc readline in REPL mode. If not enabled, normal stdio will be used for input. + +Requires readline to be installed on the system. + +.IP "\fBtime\fR" 2 +\fBEnabled by default\fR. +Enables the \fIformat_time\fR-macro, this will compile the chrono dependency to format time. + +.IP "\fBmarkdown\fR" 2 +\fBEnabled by default\fR. +Enabled the \fIhtml_from_markdown\fR-macro, this will compile the markdown dependency. And can convert markdown into html. + +.IP "\fBdeadlinks\fR" 2 +\fBEnabled by default\fR. +Enabled the \fIwodl\fR-macro, this will compile the minreq dependency. +Allows testing the return code of links, and warns when the request is unsuccessful. + +.IP "\fBguile\fR" 2 +If compiled with guile, the macro processor supports running guile-code inline. +This is enabled with \fI%()%\fR in the input, and anything inside the parantheses will be evaluated as guile. + +Requires libguile-3.0 to be installed on the system. + +\fBExperimental feature!\fR +.SH OPTIONS +If a input file is provided, that will be processed. +If not, a sort of interactive REPL will be started instead. + +.SH SEE ALSO +skaldpress(1) diff --git a/src/skaldpress/__init__.py b/src/skaldpress/__init__.py new file mode 100644 index 0000000..f00b84f --- /dev/null +++ b/src/skaldpress/__init__.py @@ -0,0 +1,6 @@ +__version__ = "0.0.1" +# import skaldpress.smp_macros +# +# __all__ = [ +# "skaldpress.smp_macros", +# ] diff --git a/src/skaldpress/file_metadata_extract.rs b/src/skaldpress/file_metadata_extract.rs new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/src/skaldpress/file_metadata_extract.rs diff --git a/src/skaldpress/main.py b/src/skaldpress/main.py new file mode 100644 index 0000000..66fd0b1 --- /dev/null +++ b/src/skaldpress/main.py @@ -0,0 +1,326 @@ +import os +from argparse import ArgumentParser +from dataclasses import dataclass +import smp.macro_processor +from skaldpress.metadata_parser import extract_parse_yaml_metadata + + +@dataclass +class CompiledFile: + content: str + metadata: dict + extension: str + stored_smp_state: dict + source_path: str + needs_recompilation: bool + + +COMPILED_FILES: list[CompiledFile] = list() +COMPILED_FILES_BY_TAG: dict[str, CompiledFile] = dict() + + +class SkaldpressError(Exception): + def __init__(self, code, error, path=None): + self.code = code + self.error = error + self.path = path + + +def sp_template(macro_processor, template, content): + with open(template, "r") as f: + file_content = f.read() + macro_processor.macros["CONTENT"] = content + return macro_processor.process_input(file_content) + + +def get_template_path(template: str, opts): + return f"{opts.template_dir}{template}" + + +def cached_file_id_by_path(source_path: str) -> int | None: + for i in range(len(COMPILED_FILES)): + if COMPILED_FILES[i] == source_path: + return i + return None + + +def print_warnings(macro_processor): + for warning in macro_processor.warnings: + print(f" \u001b[33m{warning.description}\u001b[0m") + + +def file_pat_match(file: str, pat: str) -> bool: + if file == pat: + return True + if pat.startswith("*") and file.endswith(pat.removeprefix("*")): + return True + if pat.startswith("*") and file.endswith(pat.removeprefix("*")): + return True + return False + + +def file_filtered(file: str, filters: list[str], exclude: list[str]) -> bool: + for filter in exclude: + if file_pat_match(file, filter): + return True + if len(filters) == 0: + return False + for filter in filters: + if file_pat_match(file, filter): + return False + return True + + +def macro_processor_initialize(metadata, old_macro_processor, additional_state=None): + macro_processor = old_macro_processor + # macro_processor.define_macro("all_tagged_by", sp_all_tagged_by) + macro_processor.define_macro("template", sp_template) + + for key, value in metadata.items(): + macro_name = f"METADATA_{key}" + if macro_name not in macro_processor.macros: + if isinstance(value, list): + out = [str(el) for el in value] + macro_value = out + else: + macro_value = str(value) + macro_processor.define_macro(macro_name, macro_value) + + if additional_state: + for key, value in additional_state.items(): + macro_processor.define_macro(key, value) + + +def extract_requested_macro_processor_state(macro_processor): + requested_keys = macro_processor.macros.get("METADATA_keep_states") + if requested_keys: + if isinstance(requested_keys, list): + requested_keys = [str(el) for el in requested_keys] + elif isinstance(requested_keys, str): + requested_keys = [str(requested_keys)] + else: + macro_processor.warnings.append( + "keep_states specification must be list or scalar" + ) + return {} + + res = {} + for stored_key in requested_keys: + stored_value = macro_processor.macros.get(stored_key) + if stored_value: + res[stored_key] = stored_value + return res + return {} + + +def needs_recompilation(macro_processor): + if "METADATA_keep_states" in macro_processor.macros: + return True + for macro_name in macro_processor.macro_invocations: + if macro_name == "all_tagged_by": + return True + return False + + +def wrap_template(macro_processor, template_file, file_content, opts): + try: + with open(template_file, "r") as f: + template = f.read() + except OSError as e: + raise SkaldpressError(1, e, template_file) + + template_extension = os.path.splitext(template_file)[1][1:] or "" + + template_metadata, template_content = extract_parse_yaml_metadata(template) or ( + {}, + template, + ) + + macro_processor_initialize(template_metadata, macro_processor, None) + macro_processor.define_macro_string("CONTENT", file_content) + try: + content = macro_processor.process_input(template_content) + except Exception as e: + raise SkaldpressError(2, e) + + template_parent = template_metadata.get("template") + if not template_parent: + return content, template_extension + + template_parent = str(template_parent) + print(f" Wrapping in template {template_parent}") + return wrap_template( + macro_processor, get_template_path(template_parent, opts), content, opts + ) + + +def compile_file(file_path, opts): + extension = os.path.splitext(file_path)[1][1:] or "" + if not extension: + raise SkaldpressError(3, None) + + try: + with open(file_path, "r") as f: + file_content = f.read() + except OSError as e: + raise SkaldpressError(1, e, file_path) + + map, file_content = extract_parse_yaml_metadata(file_content) or ({}, file_content) + map.update(opts.metadata) + filename = os.path.relpath(file_path, opts.content_dir) + map["filename"] = os.path.splitext(filename)[0] + + skip_smp = map.get("skip_smp", "").lower() == "true" + if opts.compilefilter and not file_filtered(file_path, opts.compilefilter, []): + skip_smp = True + + if skip_smp: + return CompiledFile( + content=file_content, + metadata=map, + extension=extension, + source_path=file_path, + needs_recompilation=False, + stored_smp_state={}, + ) + + stored_smp_state = None + cfile_i = cached_file_id_by_path(file_path) + if cfile_i is not None: + stored_smp_state = COMPILED_FILES[cfile_i].stored_smp_state + + macro_processor = smp.macro_processor.MacroProcessor() + macro_processor_initialize(map, macro_processor, stored_smp_state) + + if extension == "md": + file_content = f'html_from_markdown(%"{file_content}"%)' + + if "template" not in map: + file_content = macro_processor.process_input(file_content) + print_warnings(macro_processor) + return CompiledFile( + content=file_content, + stored_smp_state=extract_requested_macro_processor_state(macro_processor), + metadata=map, + extension=extension, + source_path=file_path, + needs_recompilation=needs_recompilation(macro_processor), + ) + + template_file = get_template_path(map["template"], opts) + content, template_extension = wrap_template( + macro_processor, template_file, file_content, opts + ) + + print_warnings(macro_processor) + return CompiledFile( + content=content, + stored_smp_state=extract_requested_macro_processor_state(macro_processor), + metadata=map, + extension=template_extension, + source_path=file_path, + needs_recompilation=needs_recompilation(macro_processor), + ) + + +def compile_file_and_write(source_file_path, opts): + compiled_file = compile_file(source_file_path, opts) + + if opts.first_run: + COMPILED_FILES.append(compiled_file) + cfile_i = len(COMPILED_FILES) - 1 + cfile = COMPILED_FILES[cfile_i] + + tags = cfile.metadata.get("tags") + if tags and isinstance(tags, list): + compiled_files_by_tag = COMPILED_FILES_BY_TAG + for tag in tags: + if tag not in compiled_files_by_tag: + compiled_files_by_tag[tag] = [] + compiled_files_by_tag[tag].append(cfile_i) + else: + cfile_i = cached_file_id_by_path(compiled_file.source_path) + COMPILED_FILES[cfile_i], compiled_file = compiled_file, COMPILED_FILES[cfile_i] + cfile = COMPILED_FILES[cfile_i] + + skip_build = cfile.metadata.get("skip_build") + if skip_build and skip_build.lower() == "true": + return + + dest_file_path = os.path.join( + opts.build_dir, os.path.relpath(source_file_path, opts.content_dir) + ) + dest_file_path = os.path.splitext(dest_file_path)[0] + "." + cfile.extension + + target_filename = cfile.metadata.get("target_filename") + if target_filename and isinstance(target_filename, str): + dest_file_path = os.path.join( + os.path.dirname(dest_file_path), target_filename + "." + cfile.extension + ) + + dest_dir = os.path.dirname(dest_file_path) + os.makedirs(dest_dir, exist_ok=True) + + print(f"> Writing {source_file_path} to {dest_file_path}") + with open(dest_file_path, "w") as f: + f.write(cfile.content) + + +def compile_files_in_directory(directory, opts): + try: + entries = os.listdir(directory) + except OSError as e: + raise Exception(f"Error 8 {e} {directory}") + + for entry in entries: + path = os.path.join(directory, entry) + # try: + # metadata = os.stat(path) + # except OSError as e: + # print(f"\033[31mError getting file metadata {e}\033[0m") + # continue + + needs_recompilation = False + cfile_i = cached_file_id_by_path(path) + if cfile_i is not None: + needs_recompilation = COMPILED_FILES[cfile_i].needs_recompilation + + should_compile = (opts.first_run or needs_recompilation) and not file_filtered( + path, opts.filter, opts.exclude + ) + if os.path.isfile(path) and should_compile: + print(f"< Compiling {path}") + try: + compile_file_and_write(path, opts) + except Exception as e: + print(f"\033[31mError compiling {path}: {e}\033[0m") + raise e + elif os.path.isdir(path): + try: + compile_files_in_directory(path, opts) + except SkaldpressError as e: + print(f"\033[31mError processing directory {path}: {e}\033[0m") + + +def main(): + parser = ArgumentParser() + parser.add_argument( + "-o", "--out", "--output", metavar="path", default="build/", dest="build_dir" + ) + parser.add_argument( + "-i", "--input", metavar="path", default="content/", dest="content_dir" + ) + parser.add_argument("-s", "--static", metavar="path", default="static/") + parser.add_argument( + "-t", "--templates", metavar="path", default="templates/", dest="template_dir" + ) + parser.add_argument("-f", "--filter", metavar="filter", default=[]) + parser.add_argument("-e", "--exclude", metavar="filter", default=[]) + parser.add_argument("-m", "--metadata", nargs="+", metavar="key=value", default=[]) + parser.add_argument("-c", "--compilefilter", metavar="filter", default=[]) + parser.add_argument("-x", "--xclude", metavar="filter", default=[]) + args = parser.parse_args() + + args.first_run = True + + compile_files_in_directory(args.content_dir, args) diff --git a/src/skaldpress/metadata_parser.py b/src/skaldpress/metadata_parser.py new file mode 100644 index 0000000..28cab31 --- /dev/null +++ b/src/skaldpress/metadata_parser.py @@ -0,0 +1,74 @@ +import datetime +from typing import Any + + +def str_to_yaml_value(in_str: str) -> Any: + in_str = in_str.strip() + + try: + return int(in_str) + except: + pass + + try: + return datetime.datetime.strptime(in_str, "%Y-%m-%dT%H:%M:%S%z") + except: + pass + + return str(in_str) + + +def extract_parse_yaml_metadata(file_content, newline="\n") -> tuple[dict, str]: + file_lines = file_content.split(newline) + if len(file_lines) < 1: + return {}, file_content + + if next(iter(file_lines)).strip() != "---": + return {}, file_content + + yaml_map: dict[str, Any] = {} + yaml_started = yaml_ended = False + end_index = 0 + current_key = None + current_list = list() + + for i, line in enumerate(file_lines): + if line.strip() == "---": + if yaml_started: + yaml_ended = True + end_index = sum( + map( + lambda x: len(x) + len(newline), + file_content.split(newline)[: i + 1], + ) + ) + break + else: + yaml_started = True + elif yaml_started and not yaml_ended: + if line.strip().startswith("-") and current_key is not None: + current_list.append(line.strip().lstrip("-").strip()) + elif ":" in line: + key, value = line.split(":", 1) + if current_key is not None: + if len(current_list) > 0: + yaml_map[key] = current_list + current_list = list() + + current_key = key.strip() + if value.strip() != "": + yaml_map[current_key] = str_to_yaml_value(value.strip()) + current_key = None + + if current_key is not None: + if len(current_list) > 0: + yaml_map[current_key] = current_list + + if not yaml_ended: + end_index = len(file_content) + + if "publish_date" in yaml_map: + if "change_date" not in yaml_map: + yaml_map["change_date"] = yaml_map["publish_date"] + + return yaml_map, file_content[end_index:] diff --git a/src/skaldpress/smp_macros.py b/src/skaldpress/smp_macros.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/src/skaldpress/smp_macros.py @@ -0,0 +1 @@ + diff --git a/src/smp/__init__.py b/src/smp/__init__.py index 63cecaf..22085ae 100644 --- a/src/smp/__init__.py +++ b/src/smp/__init__.py @@ -2,6 +2,11 @@ __version__ = "0.0.1" import smp.macro_processor import smp.builtins +__all__ = [ + "smp.macro_processor", + "smp.builtins", +] + def repl(): print("=Skaldpress Macro Processor (REPL)") @@ -15,8 +20,8 @@ def read_stdin(): import sys data = sys.stdin.read() - smp = macro_processor.MacroProcessor() - res = smp.process_input(data) + macro_processor = smp.macro_processor.MacroProcessor() + res = macro_processor.process_input(data) print("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━", file=sys.stderr) print(res) @@ -39,7 +44,7 @@ def main(): with open(sys.argv[1], "r") as f: file_content = f.read() - smp = macro_processor.MacroProcessor() - res = smp.process_input(file_content) + macro_processor = smp.macro_processor.MacroProcessor() + res = macro_processor.process_input(file_content) print("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━", file=sys.stderr) print(res) diff --git a/src/smp/builtins.py b/src/smp/builtins.py index f463a24..9a27864 100644 --- a/src/smp/builtins.py +++ b/src/smp/builtins.py @@ -1,10 +1,10 @@ -import smp.exceptions +# import smp.exceptions import subprocess import urllib.request import urllib.error import datetime import markdown -from gfm import AutolinkExtension, TaskListExtension +from gfm import AutolinkExtension, TaskListExtension # type: ignore def smp_builtin_define(macro_processor, macro_name, macro_value=None): @@ -147,7 +147,7 @@ def smp_builtin_html_from_markdown(macro_processor, text, extensions=list()): global LINK_CACHE -LINK_CACHE = dict() +LINK_CACHE: dict[str, tuple[bool, int, str]] = dict() def smp_builtin_wodl(macro_processor, link, timeout_seconds=5): diff --git a/src/smp/macro_processor.py b/src/smp/macro_processor.py index e85fbe9..8fa9d91 100644 --- a/src/smp/macro_processor.py +++ b/src/smp/macro_processor.py @@ -41,6 +41,8 @@ class MacroProcessor: warnings: list[Any] """ Global environment for python execution """ py_global_env: dict + py_local_env_alt: dict + py_local_env_current: dict special_macros: dict[str, tuple[Any, Any]] @@ -49,32 +51,40 @@ class MacroProcessor: self.macro_invocations = list() self.warnings = list() self.py_global_env = dict() - self._define_builtins(prefix=prefix) - - def _define_builtins(self, prefix=""): - self.macros[f"{prefix}define"] = smp.builtins.smp_builtin_define - self.macros[f"{prefix}undefine"] = smp.builtins.smp_builtin_undefine - self.macros[f"{prefix}define_array"] = smp.builtins.smp_builtin_define_array - self.macros[f"{prefix}ifdef"] = smp.builtins.smp_builtin_ifdef - self.macros[f"{prefix}ifndef"] = smp.builtins.smp_builtin_ifndef - self.macros[f"{prefix}ifeq"] = smp.builtins.smp_builtin_ifeq - self.macros[f"{prefix}ifneq"] = smp.builtins.smp_builtin_ifneq - self.macros[f"{prefix}include"] = smp.builtins.smp_builtin_include - self.macros[f"{prefix}include_verbatim"] = ( - smp.builtins.smp_builtin_include_verbatim - ) - self.macros[f"{prefix}shell"] = smp.builtins.smp_builtin_shell - self.macros[f"{prefix}dumpenv"] = smp.builtins.smp_builtin_dumpenv - self.macros[f"{prefix}eval"] = smp.builtins.smp_builtin_eval - self.macros[f"{prefix}array_push"] = smp.builtins.smp_builtin_array_push - self.macros[f"{prefix}array_each"] = smp.builtins.smp_builtin_array_each - self.macros[f"{prefix}array_size"] = smp.builtins.smp_builtin_array_size - self.macros[f"{prefix}explode"] = smp.builtins.smp_builtin_explode - self.macros[f"{prefix}format_time"] = smp.builtins.smp_builtin_format_time - self.macros[f"{prefix}html_from_markdown"] = ( - smp.builtins.smp_builtin_html_from_markdown - ) - self.macros[f"{prefix}wodl"] = smp.builtins.smp_builtin_wodl + self.py_local_env_alt = dict() + self.py_local_env_current = self.macros + self.indent_level = "" + + self._define_builtins(self.macros, prefix=prefix) + self._define_builtins(self.py_local_env_alt, prefix=prefix) + + def _define_builtins(self, env, prefix=""): + env[f"{prefix}macro_processor"] = self + env[f"{prefix}define"] = smp.builtins.smp_builtin_define + env[f"{prefix}undefine"] = smp.builtins.smp_builtin_undefine + env[f"{prefix}define_array"] = smp.builtins.smp_builtin_define_array + env[f"{prefix}ifdef"] = smp.builtins.smp_builtin_ifdef + env[f"{prefix}ifndef"] = smp.builtins.smp_builtin_ifndef + env[f"{prefix}ifeq"] = smp.builtins.smp_builtin_ifeq + env[f"{prefix}ifneq"] = smp.builtins.smp_builtin_ifneq + env[f"{prefix}include"] = smp.builtins.smp_builtin_include + env[f"{prefix}include_verbatim"] = smp.builtins.smp_builtin_include_verbatim + env[f"{prefix}shell"] = smp.builtins.smp_builtin_shell + env[f"{prefix}dumpenv"] = smp.builtins.smp_builtin_dumpenv + env[f"{prefix}eval"] = smp.builtins.smp_builtin_eval + env[f"{prefix}array_push"] = smp.builtins.smp_builtin_array_push + env[f"{prefix}array_each"] = smp.builtins.smp_builtin_array_each + env[f"{prefix}array_size"] = smp.builtins.smp_builtin_array_size + env[f"{prefix}explode"] = smp.builtins.smp_builtin_explode + env[f"{prefix}format_time"] = smp.builtins.smp_builtin_format_time + env[f"{prefix}html_from_markdown"] = smp.builtins.smp_builtin_html_from_markdown + env[f"{prefix}wodl"] = smp.builtins.smp_builtin_wodl + + def define_macro_string(self, macro_name, macro_value): + self.define_macro(macro_name, str(macro_value)) + + def define_macro(self, macro_name, macro_value): + self.macros[macro_name] = macro_value def expand_macro(self, macro_name: str, args: list[str] = list()) -> str: # Ignore trailing underscore in macro name, the parser will pop a space in front if @@ -104,14 +114,18 @@ class MacroProcessor: if callable(macro): signature = inspect.signature(macro) - macro_args = [] + macro_args: list[Any] = [] if ( "macro_processor" in signature.parameters or "smp" in signature.parameters ): macro_args.append(self) macro_args.extend(args) - return str(macro(*macro_args)) + try: + return str(macro(*macro_args)) + except Exception as e: + s = f"{macro_name}({','.join([repr(x) for x in macro_args])})" + raise Exception(s) if isinstance(macro, str): expanded = macro for i, arg in enumerate(args): @@ -143,8 +157,11 @@ class MacroProcessor: skip_next_line_ending = False + line_begin = True + # We should keep track of filename, linenumber, and character number on line here # So we can give sensible error messages + # Probably add to python stack trace? quote_level = 0 parens_level = 0 @@ -153,7 +170,6 @@ class MacroProcessor: while i < len(input): c = input[i] peek = None if i + 1 >= len(input) else input[i + 1] - # import sys # print(f"[{i:4}] {repr(c):4} -> {repr(peek):4} [{state}] = {repr(output)}", file=sys.stderr) @@ -264,7 +280,7 @@ class MacroProcessor: try: f = StringIO() with redirect_stdout(f): - exec(py_expr, self.py_global_env, self.macros) + exec(py_expr, self.py_global_env, self.py_local_env_current) s = f.getvalue() if s != "": output += s diff --git a/tests/skaldpress/content/article.md b/tests/skaldpress/content/article.md new file mode 100644 index 0000000..b20468c --- /dev/null +++ b/tests/skaldpress/content/article.md @@ -0,0 +1,11 @@ +--- +title: Example-article +target_filename: example-article +template: article.html +publish_date: 2025-01-01T00:00:00Z +summary: A short example article +tags: + - article +--- + +This is a example article diff --git a/tests/skaldpress/templates/article.html b/tests/skaldpress/templates/article.html new file mode 100644 index 0000000..cb3b58b --- /dev/null +++ b/tests/skaldpress/templates/article.html @@ -0,0 +1,22 @@ +--- +template: base.html +table_of_contents: true +keep_states: + - TOC_ITEMS +--- +DNL include(templates/common_macros.smp)DNL +<header> + <h1>METADATA_title</h1> +</header> + +<main> + <div class="article-meta"> + <span>Published <time datetime="METADATA_publish_date">format_time(%d %B %Y,METADATA_publish_date)</time></span> + ifneq(METADATA_change_date, METADATA_publish_date,DNL + (<span>Changed <time datetime="METADATA_change_date">format_time(%d %B %Y,METADATA_change_date)</time></span>) DNL + ) + </div> + + CONTENT +</main> + diff --git a/tests/skaldpress/templates/base.html b/tests/skaldpress/templates/base.html new file mode 100644 index 0000000..aa73ffc --- /dev/null +++ b/tests/skaldpress/templates/base.html @@ -0,0 +1,14 @@ +<!DOCTYPE html> +<html lang="en"> + <body> + <nav> + <a href="/">Home</a> + </nav> + + CONTENT + + <footer> + </footer> + </body> +</html> + diff --git a/tests/input_files/array_each_1 b/tests/smp/array_each_1 index 5844d71..5844d71 100644 --- a/tests/input_files/array_each_1 +++ b/tests/smp/array_each_1 diff --git a/tests/input_files/array_push_1 b/tests/smp/array_push_1 index fb59320..fb59320 100644 --- a/tests/input_files/array_push_1 +++ b/tests/smp/array_push_1 diff --git a/tests/input_files/array_push_2 b/tests/smp/array_push_2 index 3b4d652..3b4d652 100644 --- a/tests/input_files/array_push_2 +++ b/tests/smp/array_push_2 diff --git a/tests/input_files/array_push_3 b/tests/smp/array_push_3 index 60417b4..60417b4 100644 --- a/tests/input_files/array_push_3 +++ b/tests/smp/array_push_3 diff --git a/tests/input_files/define_1 b/tests/smp/define_1 index 68002ea..68002ea 100644 --- a/tests/input_files/define_1 +++ b/tests/smp/define_1 diff --git a/tests/input_files/define_2 b/tests/smp/define_2 index 016294a..016294a 100644 --- a/tests/input_files/define_2 +++ b/tests/smp/define_2 diff --git a/tests/input_files/dnl_1 b/tests/smp/dnl_1 index c12bc01..c12bc01 100644 --- a/tests/input_files/dnl_1 +++ b/tests/smp/dnl_1 diff --git a/tests/input_files/dnl_2 b/tests/smp/dnl_2 index 356aee9..356aee9 100644 --- a/tests/input_files/dnl_2 +++ b/tests/smp/dnl_2 diff --git a/tests/input_files/dnl_3 b/tests/smp/dnl_3 index 356aee9..356aee9 100644 --- a/tests/input_files/dnl_3 +++ b/tests/smp/dnl_3 diff --git a/tests/input_files/dnl_4 b/tests/smp/dnl_4 index 67c1c0e..67c1c0e 100644 --- a/tests/input_files/dnl_4 +++ b/tests/smp/dnl_4 diff --git a/tests/input_files/explode_1 b/tests/smp/explode_1 index 79b70ba..79b70ba 100644 --- a/tests/input_files/explode_1 +++ b/tests/smp/explode_1 diff --git a/tests/input_files/explode_2 b/tests/smp/explode_2 index 7ef0c08..7ef0c08 100644 --- a/tests/input_files/explode_2 +++ b/tests/smp/explode_2 diff --git a/tests/input_files/format_time_1 b/tests/smp/format_time_1 index bd9a097..bd9a097 100644 --- a/tests/input_files/format_time_1 +++ b/tests/smp/format_time_1 diff --git a/tests/input_files/ifdef_1 b/tests/smp/ifdef_1 index a308141..a308141 100644 --- a/tests/input_files/ifdef_1 +++ b/tests/smp/ifdef_1 diff --git a/tests/input_files/ifdef_2 b/tests/smp/ifdef_2 index ecef66c..ecef66c 100644 --- a/tests/input_files/ifdef_2 +++ b/tests/smp/ifdef_2 diff --git a/tests/input_files/ifdef_3 b/tests/smp/ifdef_3 index 4867034..4867034 100644 --- a/tests/input_files/ifdef_3 +++ b/tests/smp/ifdef_3 diff --git a/tests/input_files/ifeq_1 b/tests/smp/ifeq_1 index 3e49f68..3e49f68 100644 --- a/tests/input_files/ifeq_1 +++ b/tests/smp/ifeq_1 diff --git a/tests/input_files/ifeq_2 b/tests/smp/ifeq_2 index fe7c0ac..fe7c0ac 100644 --- a/tests/input_files/ifeq_2 +++ b/tests/smp/ifeq_2 diff --git a/tests/input_files/ifeq_3 b/tests/smp/ifeq_3 index 42646b8..42646b8 100644 --- a/tests/input_files/ifeq_3 +++ b/tests/smp/ifeq_3 diff --git a/tests/input_files/ifeq_4 b/tests/smp/ifeq_4 index d069530..d069530 100644 --- a/tests/input_files/ifeq_4 +++ b/tests/smp/ifeq_4 diff --git a/tests/input_files/ifndef_1 b/tests/smp/ifndef_1 index 857204b..857204b 100644 --- a/tests/input_files/ifndef_1 +++ b/tests/smp/ifndef_1 diff --git a/tests/input_files/ifndef_2 b/tests/smp/ifndef_2 index fbe0ba3..fbe0ba3 100644 --- a/tests/input_files/ifndef_2 +++ b/tests/smp/ifndef_2 diff --git a/tests/input_files/ifndef_3 b/tests/smp/ifndef_3 index 82a180d..82a180d 100644 --- a/tests/input_files/ifndef_3 +++ b/tests/smp/ifndef_3 diff --git a/tests/input_files/ifneq_1 b/tests/smp/ifneq_1 index 5b443c5..5b443c5 100644 --- a/tests/input_files/ifneq_1 +++ b/tests/smp/ifneq_1 diff --git a/tests/input_files/ifneq_2 b/tests/smp/ifneq_2 index 8145e9f..8145e9f 100644 --- a/tests/input_files/ifneq_2 +++ b/tests/smp/ifneq_2 diff --git a/tests/input_files/ifneq_3 b/tests/smp/ifneq_3 index 163d871..163d871 100644 --- a/tests/input_files/ifneq_3 +++ b/tests/smp/ifneq_3 diff --git a/tests/input_files/ifneq_4 b/tests/smp/ifneq_4 index c93a572..c93a572 100644 --- a/tests/input_files/ifneq_4 +++ b/tests/smp/ifneq_4 diff --git a/tests/input_files/include_1 b/tests/smp/include_1 index 1621de2..1621de2 100644 --- a/tests/input_files/include_1 +++ b/tests/smp/include_1 diff --git a/tests/input_files/include_2 b/tests/smp/include_2 index 2714eff..2714eff 100644 --- a/tests/input_files/include_2 +++ b/tests/smp/include_2 diff --git a/tests/input_files/markdown_html_1 b/tests/smp/markdown_html_1 index 6a9f143..6a9f143 100644 --- a/tests/input_files/markdown_html_1 +++ b/tests/smp/markdown_html_1 diff --git a/tests/input_files/markdown_html_2 b/tests/smp/markdown_html_2 index 6a9f143..6a9f143 100644 --- a/tests/input_files/markdown_html_2 +++ b/tests/smp/markdown_html_2 diff --git a/tests/input_files/non_macro_html b/tests/smp/non_macro_html index 6f49b4a..6f49b4a 100644 --- a/tests/input_files/non_macro_html +++ b/tests/smp/non_macro_html diff --git a/tests/input_files/shell_1 b/tests/smp/shell_1 index 3eb1f14..3eb1f14 100644 --- a/tests/input_files/shell_1 +++ b/tests/smp/shell_1 diff --git a/tests/input_files/snnl_1 b/tests/smp/snnl_1 index 47a10b9..47a10b9 100644 --- a/tests/input_files/snnl_1 +++ b/tests/smp/snnl_1 diff --git a/tests/input_files/snnl_2 b/tests/smp/snnl_2 index 2fcd0ba..2fcd0ba 100644 --- a/tests/input_files/snnl_2 +++ b/tests/smp/snnl_2 diff --git a/tests/input_files/snnl_3 b/tests/smp/snnl_3 index 4a7dad7..4a7dad7 100644 --- a/tests/input_files/snnl_3 +++ b/tests/smp/snnl_3 diff --git a/tests/input_files/whitespace_deleting_1 b/tests/smp/whitespace_deleting_1 index ccda45f..ccda45f 100644 --- a/tests/input_files/whitespace_deleting_1 +++ b/tests/smp/whitespace_deleting_1 diff --git a/tests/input_files/whitespace_deleting_2 b/tests/smp/whitespace_deleting_2 index 8777c39..8777c39 100644 --- a/tests/input_files/whitespace_deleting_2 +++ b/tests/smp/whitespace_deleting_2 diff --git a/tests/input_files/wodl_1 b/tests/smp/wodl_1 index 6466022..6466022 100644 --- a/tests/input_files/wodl_1 +++ b/tests/smp/wodl_1 diff --git a/tests/input_files/wodl_2 b/tests/smp/wodl_2 index 51be3e1..51be3e1 100644 --- a/tests/input_files/wodl_2 +++ b/tests/smp/wodl_2 diff --git a/tests/test_macro_processor.sh b/tests/test_macro_processor.sh index 1a7accd..daee5d2 100755 --- a/tests/test_macro_processor.sh +++ b/tests/test_macro_processor.sh @@ -3,7 +3,7 @@ ALL_OUTPUT=0 test () { - file_content=$(<tests/input_files/$1) + file_content=$(<tests/smp/$1) readarray -t sections < <(awk -v RS="---" '{print}' <<< "$file_content") var1=$(echo "$file_content" | awk -v RS="\n---\n" 'NR==1') var2=$(echo "$file_content" | awk -v RS="\n---\n" 'NR==2') @@ -36,7 +36,7 @@ if [ "$#" -eq 1 ]; then ALL_OUTPUT=1 test $1 else - for file in tests/input_files/*; do + for file in tests/smp/*; do test "$(basename $file)" done fi diff --git a/tests/test_skaldpress.sh b/tests/test_skaldpress.sh new file mode 100755 index 0000000..4e57a44 --- /dev/null +++ b/tests/test_skaldpress.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +cd tests/skaldpress +rm -rf build +mkdir -p build + +skaldpress diff --git a/tests/test_unittests.sh b/tests/test_unittests.sh new file mode 100755 index 0000000..802cc49 --- /dev/null +++ b/tests/test_unittests.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +ALL_OUTPUT=0 + +test () { + python tests/unit_tests/$1 + if [ "$?" -eq "1" ]; then + tput setaf 1 + echo "$1 NOT OK:" + tput sgr0 + exit 1 + else + tput setaf 2 + echo "$1 OK" + tput sgr0 + fi +} + +if [ "$#" -eq 1 ]; then + ALL_OUTPUT=1 + test $1 +else + for file in tests/unit_tests/*; do + test "$(basename $file)" + done +fi + diff --git a/tests/unit_tests/metadata_parser_1.py b/tests/unit_tests/metadata_parser_1.py new file mode 100644 index 0000000..6465845 --- /dev/null +++ b/tests/unit_tests/metadata_parser_1.py @@ -0,0 +1,28 @@ +import datetime +from skaldpress.metadata_parser import extract_parse_yaml_metadata + +res = extract_parse_yaml_metadata("""--- +title: Example-article +target_filename: example-article +template: article.html +publish_date: 2025-01-01T00:00:00Z +summary: A short example article +num: 2 +tags: + - article +--- +Article content +""") + +meta = { + "title": "Example-article", + "target_filename": "example-article", + "template": "article.html", + "publish_date": datetime.datetime(2025, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), + "change_date": datetime.datetime(2025, 1, 1, 0, 0, tzinfo=datetime.timezone.utc), + "summary": "A short example article", + "num": 2, + "tags": ["article"] +} +assert res[0] == meta +assert res[1] == "Article content\n" |