aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorQrius <[email protected]>2025-03-05 11:10:07 +0100
committerQrius <[email protected]>2025-03-05 11:10:10 +0100
commit12d07e873ea21263fbacb5e0193d38893ceff846 (patch)
tree8a3ce2bc9b3ffbb9825efe181901d3728ad7e8e3
parent313e059f367f2ac291cd409e77d22673f9595848 (diff)
downloadskaldpress-12d07e873ea21263fbacb5e0193d38893ceff846.tar.gz
skaldpress-12d07e873ea21263fbacb5e0193d38893ceff846.zip
Seemingly up to scratch now
-rw-r--r--skaldpress_main.rs324
-rw-r--r--src/skaldpress/main.py81
-rw-r--r--src/smp/builtins.py13
-rw-r--r--src/smp/macro_processor.py7
4 files changed, 78 insertions, 347 deletions
diff --git a/skaldpress_main.rs b/skaldpress_main.rs
deleted file mode 100644
index 5e6530c..0000000
--- a/skaldpress_main.rs
+++ /dev/null
@@ -1,324 +0,0 @@
-use skaldpress::macro_processor::error::SMPError;
-use skaldpress::macro_processor::macro_processor::{MacroProcessorWarning, MacroType};
-use skaldpress::skaldpress::filelists::{make_filelist, FileList, FileListFileTargetAction};
-use skaldpress::skaldpress::parseopts::{parseopts, Opts};
-use std::cmp::Ordering;
-use std::collections::HashMap;
-use std::collections::VecDeque;
-use std::fs;
-use std::path::Path;
-use std::time::Instant;
-
-use skaldpress::macro_processor::MacroProcessor;
-use skaldpress::skaldpress::error::SkaldpressError;
-use skaldpress::skaldpress::error::{
- SP_COMPILE_FILE_EXTENSION_ERROR_2, SP_COMPILE_FILE_MACRO_PROCESS_ERROR,
- SP_COMPILE_FILE_TEMPLATE_READ_ERROR, SP_COMPILE_TEMPLATE_MACRO_PROCESS_ERROR,
- SP_GEN_DEST_STRIP_PREFIX_ERROR,
-};
-use skaldpress::skaldpress::metadata_parser::extract_parse_yaml_metadata;
-use skaldpress::skaldpress::metadata_parser::YamlValue;
-
-/// Convenience function for doing cmp on a metadata key of two arbitrary indexes
-/// This takes a macro_processor as input, so that it can emit warnings if the ordering failed in
-/// any way.
-fn order_index_by_cached_data(
- smp: &mut MacroProcessor,
- key: &str,
- a: &usize,
- b: &usize,
-) -> Ordering {
- let compiled_files: &Vec<CompiledFile>;
- unsafe {
- compiled_files = COMPILED_FILES.as_ref();
- }
- if *a >= compiled_files.len() {
- smp.warnings.push(MacroProcessorWarning::new(format!(
- "\"a\" is not a cached file {} >= {}",
- *a,
- compiled_files.len()
- )));
- return Ordering::Equal;
- }
- if *b >= compiled_files.len() {
- smp.warnings.push(MacroProcessorWarning::new(format!(
- "\"b\" is not a cached file {} >= {}",
- *b,
- compiled_files.len()
- )));
- return Ordering::Equal;
- }
- let Some(a) = &compiled_files[*a].metadata.get(key) else {
- smp.warnings.push(MacroProcessorWarning::new(format!(
- "Key {:?} not found for ordering data in {:?}",
- key, compiled_files[*a].source_path
- )));
- return Ordering::Equal;
- };
- let Some(b) = &compiled_files[*b].metadata.get(key) else {
- smp.warnings.push(MacroProcessorWarning::new(format!(
- "Key {:?} not found for ordering data in {:?}",
- key, compiled_files[*b].source_path
- )));
- return Ordering::Equal;
- };
- a.cmp(b)
-}
-
-/// SMP Macro for getting all files with specific tag, this is only _really_ effective the second run
-///
-/// Usage in files:
-/// all_tagged_by(<tag name>, <template> [, <field to sort by>] [, reversed])
-fn sp_all_tagged_by(
- smp: &mut MacroProcessor,
- macro_name: &str,
- args: &mut [String],
-) -> Result<String, SMPError> {
- if args.len() < 1 {
- return Ok(macro_name.to_string());
- }
-
- let compiled_files: &Vec<CompiledFile>;
- let compiled_files_by_tag;
- unsafe {
- compiled_files_by_tag = COMPILED_FILES_BY_TAG.as_ref().unwrap();
- compiled_files = COMPILED_FILES.as_ref();
- }
- let Some(tagged_files) = compiled_files_by_tag.get(&args[0]) else {
- println!(" \x1b[35mNo tags for {}\x1b[0m", args[0]);
- return Ok(String::new());
- };
-
- let mut out = String::new();
-
- let mut tagged_files = tagged_files.clone();
- if args.len() > 2 {
- if args.len() > 3 && args[3] == "reversed" {
- tagged_files.sort_by(|a, b| order_index_by_cached_data(smp, &args[2], b, a));
- } else {
- tagged_files.sort_by(|a, b| order_index_by_cached_data(smp, &args[2], b, a));
- }
- }
-
- for doc_i in tagged_files {
- let file = &compiled_files[doc_i];
- let mut smp_local = smp.clone();
- macro_processor_initialize(&file.metadata, &mut smp_local, None);
- out.push_str(&sp_template(
- &mut smp_local,
- "template",
- &mut [args[1].clone(), file.content.clone()],
- )?);
- print_warnings(&smp_local);
- }
- Ok(out)
-}
-
-fn macro_processor_initialize(
- metadata: &HashMap<String, YamlValue>,
- old_macro_processor: &mut MacroProcessor,
- additional_state: Option<&HashMap<String, MacroType>>,
-) {
- let macro_processor = old_macro_processor;
- macro_processor.define_macro(
- String::from("all_tagged_by"),
- MacroType::Function(sp_all_tagged_by),
- );
- macro_processor.define_macro(String::from("template"), MacroType::Function(sp_template));
- for (key, value) in metadata {
- let macro_name = format!("METADATA_{}", key);
- if !macro_processor.macros.contains_key(&macro_name) {
- let value = match value {
- YamlValue::List(l) => {
- let mut out = Vec::new();
- for el in l {
- out.push(MacroType::String(el.to_string()));
- }
- MacroType::Array(out)
- }
- value => MacroType::String(value.to_string()),
- };
- macro_processor.define_macro(macro_name, value);
- }
- }
- if let Some(additional_state) = additional_state {
- for (key, value) in additional_state {
- macro_processor.define_macro(key.to_string(), value.clone());
- }
- }
-}
-
-fn get_template_path(template: &str, opts: &Opts) -> String {
- format!("{}{}", opts.template_dir, template)
-}
-
-
-fn main() -> Result<(), SkaldpressError> {
- unsafe {
- COMPILED_FILES_BY_TAG = Some(HashMap::new());
- }
- let mut opts = parseopts().build();
-
- let emptyvec: Vec<String> = Vec::new();
- let now = Instant::now();
- let mut filelist_dest = make_filelist(
- &Path::new(&opts.build_dir),
- &Path::new(&opts.build_dir),
- &emptyvec,
- &emptyvec,
- FileListFileTargetAction::NONE,
- false,
- &opts.template_dir,
- &opts.metadata,
- )?;
- let mut filelist_src = FileList::new();
- filelist_src.extend(make_filelist(
- &Path::new(&opts.static_dir),
- &Path::new(&opts.static_dir),
- &emptyvec,
- &opts.static_exclude,
- FileListFileTargetAction::COPY,
- false,
- &opts.template_dir,
- &opts.metadata,
- )?);
- filelist_src.extend(make_filelist(
- &Path::new(&opts.content_dir),
- &Path::new(&opts.content_dir),
- &opts.filter,
- &opts.exclude,
- FileListFileTargetAction::COMPILE,
- true,
- &opts.template_dir,
- &opts.metadata,
- )?);
-
- let elapsed = now.elapsed();
- let mut work_queue: VecDeque<(String, FileListFileTargetAction)> = VecDeque::new();
-
- // We also will not currently discover empty directories from build target,
- // we should attempt to do that.
- for (filename, _file) in filelist_dest.missing_from(&filelist_src).files {
- filelist_dest.set_action(&filename, FileListFileTargetAction::DELETE);
- work_queue.push_back((filename, FileListFileTargetAction::DELETE));
- }
- for (filename, file) in filelist_src.missing_from(&filelist_dest).files {
- work_queue.push_back((filename, file.target_action));
- }
- for (filename, file) in filelist_src.changed_from(&filelist_dest).files {
- work_queue.push_back((filename, file.target_action));
- }
-
- let mut dependants: Vec<(String, String)> = Vec::new();
- for (filename, file) in &filelist_src.files {
- if !file.metadata.contains_key("dependencies") {
- continue;
- }
- match file.metadata["dependencies"].clone() {
- YamlValue::Scalar(s) => dependants.push((s, filename.clone())),
- _ => {}
- }
- }
-
- println!(
- "Generated filelist in {:#?} seconds, {} in destination, {} in source",
- elapsed,
- filelist_dest.len(),
- filelist_src.len()
- );
- println!("Total file actions to take {}", work_queue.len());
-
- //let mut compiled_files: Vec<String> = Vec::with_capacity(work_queue.len());
-
- while let Some((filename, action)) = work_queue.pop_front() {
- println!("> {:#?} {:#?}", action, filename);
- match action {
- FileListFileTargetAction::DELETE => {
- // This should be some flag that can be toggled (like with -del in rsync)
- if false {
- continue;
- }
-
- let file = filelist_dest.files.get(&filename).expect("SP87");
- println!(" Deleting {:#?}", file.file_path);
- if let Err(e) = std::fs::remove_file(&file.file_path.as_path()) {
- println!(
- "\x1b[31mError copying {:#?}: {}\x1b[0m",
- file.file_path.as_path(),
- e
- );
- }
- }
- FileListFileTargetAction::COPY => {
- let file = filelist_src.files.get(&filename).expect("SP87");
- let dest_file_path = Path::new(&opts.build_dir).join(file.file_rel.as_path());
- println!(" Copying {:#?}", file.file_path);
- println!(" {:#?}", dest_file_path);
- let Some(dest_dir) = &dest_file_path.parent() else {
- println!("\x1b[31mError creating dir {:#?}\x1b[0m", dest_file_path);
- continue;
- };
- if let Err(e) = std::fs::create_dir_all(&dest_dir) {
- println!("\x1b[31mError creating dir {:#?}: {}\x1b[0m", dest_dir, e);
- continue;
- }
- if let Err(e) = std::fs::copy(&file.file_path.as_path(), dest_file_path) {
- println!(
- "\x1b[31mError copying {:#?}: {}\x1b[0m",
- file.file_path.as_path(),
- e
- );
- }
- }
- FileListFileTargetAction::COMPILE => {
- // This is a idea fo something, have not figured out how to deal with having files
- // depending on all other files (index e.g.), as I cannot see how to get around
- // recompiling everything anyway.
- //
- // I guess we could technically get away with a single compilation instead of two
- // independent ones in some circumstances tho, unless the file needs to recompiled
- // anyway, which would be the case for any files with TOC e.g.
- // let file = filelist_src.files.get(&filename).expect("SP87");
- // println!(" Compiling {:#?}", file.file_path);
- //
- // // check list of already compiled files, to see if all dependants are compiled
- // // if not, move yourself to the end of the queue (basically just reinsert yourself
- // // without compiling
- // //
- // // Check if the file has been compiled already, if so, it does not need to be added
- // // to queue
- //
- // // If any dependants depend on this file, add them to the end of the work queue,
- // // if they are not already there.
- // 'dependants: for i in 0..dependants.len() {
- // let (pat, target) = dependants[i].clone();
- // if filename == target {
- // continue;
- // }
- // if file_pat_match(&filename, &pat) {
- // for (workel, _action) in &work_queue {
- // if workel == &target {
- // continue 'dependants;
- // }
- // }
- // work_queue.push_back((target, action.clone()));
- // }
- //
- // }
-
- // compiled_files.push(filename.clone());
- }
- FileListFileTargetAction::NONE => {}
- }
- }
-
- // Running compilation twice, needed for some macros which depends on compiled content
- // We should make some kind of file-list, and only re-compile files which has changed.
- println!("Compiling content");
- let _ = compile_files_in_directory(Path::new(&opts.content_dir), &opts);
- println!("Rerun compilation");
- opts.first_run = false;
- let _ = compile_files_in_directory(Path::new(&opts.content_dir), &opts);
-
- Ok(())
-}
diff --git a/src/skaldpress/main.py b/src/skaldpress/main.py
index 66fd0b1..cdc4464 100644
--- a/src/skaldpress/main.py
+++ b/src/skaldpress/main.py
@@ -2,6 +2,7 @@ import os
from argparse import ArgumentParser
from dataclasses import dataclass
import smp.macro_processor
+from copy import deepcopy
from skaldpress.metadata_parser import extract_parse_yaml_metadata
@@ -16,14 +17,7 @@ class CompiledFile:
COMPILED_FILES: list[CompiledFile] = list()
-COMPILED_FILES_BY_TAG: dict[str, CompiledFile] = dict()
-
-
-class SkaldpressError(Exception):
- def __init__(self, code, error, path=None):
- self.code = code
- self.error = error
- self.path = path
+COMPILED_FILES_BY_TAG: dict[str, list[int]] = dict()
def sp_template(macro_processor, template, content):
@@ -33,20 +27,65 @@ def sp_template(macro_processor, template, content):
return macro_processor.process_input(file_content)
+# SMP Macro for getting all files with specific tag, this is only _really_ effective the second run
+#
+# Usage in files:
+# all_tagged_by(<tag name>, <template> [, <field to sort by>] [, reversed])
+def sp_all_tagged_by(
+ macro_processor, tag: str, template: str, field=None, reversed=""
+) -> str:
+ global COMPILED_FILES, COMPILED_FILES_BY_TAG
+
+ if tag not in COMPILED_FILES_BY_TAG:
+ print(f" \u001b[35mNo tags for {tag}\u001b[0m")
+ return ""
+ tagged_files = deepcopy(COMPILED_FILES_BY_TAG[tag])
+
+ out = ""
+
+ # if field is not None:
+ # tagged_files = sorted(tagged_files, lambda x: x[field])
+ # if args.len() > 3 && args[3] == "reversed" {
+ # tagged_files.sort_by(|a, b| order_index_by_cached_data(macro_processor, &args[2], b, a));
+ # } else {
+ # tagged_files.sort_by(|a, b| order_index_by_cached_data(macro_processor, &args[2], b, a));
+ # }
+
+ for doc_i in tagged_files:
+ file = COMPILED_FILES[doc_i]
+ smp_local = deepcopy(macro_processor)
+
+ macro_processor_initialize(file.metadata, smp_local, None)
+
+ out += sp_template(smp_local, template, file.content)
+
+ print_warnings(smp_local)
+ return out
+
+
+class SkaldpressError(Exception):
+ def __init__(self, code, error, path=None):
+ self.code = code
+ self.error = error
+ self.path = path
+
+
def get_template_path(template: str, opts):
return f"{opts.template_dir}{template}"
def cached_file_id_by_path(source_path: str) -> int | None:
+ global COMPILED_FILES
for i in range(len(COMPILED_FILES)):
- if COMPILED_FILES[i] == source_path:
+ if COMPILED_FILES[i].source_path == source_path:
return i
return None
def print_warnings(macro_processor):
for warning in macro_processor.warnings:
- print(f" \u001b[33m{warning.description}\u001b[0m")
+ # print(f" \u001b[33m{warning.description}\u001b[0m")
+ print(f" \u001b[33m{warning}\u001b[0m")
def file_pat_match(file: str, pat: str) -> bool:
@@ -73,15 +112,14 @@ def file_filtered(file: str, filters: list[str], exclude: list[str]) -> bool:
def macro_processor_initialize(metadata, old_macro_processor, additional_state=None):
macro_processor = old_macro_processor
- # macro_processor.define_macro("all_tagged_by", sp_all_tagged_by)
+ macro_processor.define_macro("all_tagged_by", sp_all_tagged_by)
macro_processor.define_macro("template", sp_template)
for key, value in metadata.items():
macro_name = f"METADATA_{key}"
if macro_name not in macro_processor.macros:
if isinstance(value, list):
- out = [str(el) for el in value]
- macro_value = out
+ macro_value = [str(el) for el in value]
else:
macro_value = str(value)
macro_processor.define_macro(macro_name, macro_value)
@@ -116,7 +154,7 @@ def extract_requested_macro_processor_state(macro_processor):
def needs_recompilation(macro_processor):
if "METADATA_keep_states" in macro_processor.macros:
return True
- for macro_name in macro_processor.macro_invocations:
+ for macro_name, args in macro_processor.macro_invocations:
if macro_name == "all_tagged_by":
return True
return False
@@ -155,6 +193,8 @@ def wrap_template(macro_processor, template_file, file_content, opts):
def compile_file(file_path, opts):
+ global COMPILED_FILES
+
extension = os.path.splitext(file_path)[1][1:] or ""
if not extension:
raise SkaldpressError(3, None)
@@ -224,6 +264,7 @@ def compile_file(file_path, opts):
def compile_file_and_write(source_file_path, opts):
+ global COMPILED_FILES
compiled_file = compile_file(source_file_path, opts)
if opts.first_run:
@@ -240,6 +281,8 @@ def compile_file_and_write(source_file_path, opts):
compiled_files_by_tag[tag].append(cfile_i)
else:
cfile_i = cached_file_id_by_path(compiled_file.source_path)
+ if cfile_i is None:
+ return
COMPILED_FILES[cfile_i], compiled_file = compiled_file, COMPILED_FILES[cfile_i]
cfile = COMPILED_FILES[cfile_i]
@@ -267,6 +310,8 @@ def compile_file_and_write(source_file_path, opts):
def compile_files_in_directory(directory, opts):
+ global COMPILED_FILES
+
try:
entries = os.listdir(directory)
except OSError as e:
@@ -274,11 +319,6 @@ def compile_files_in_directory(directory, opts):
for entry in entries:
path = os.path.join(directory, entry)
- # try:
- # metadata = os.stat(path)
- # except OSError as e:
- # print(f"\033[31mError getting file metadata {e}\033[0m")
- # continue
needs_recompilation = False
cfile_i = cached_file_id_by_path(path)
@@ -324,3 +364,6 @@ def main():
args.first_run = True
compile_files_in_directory(args.content_dir, args)
+ print("\n=======================\n")
+ args.first_run = False
+ compile_files_in_directory(args.content_dir, args)
diff --git a/src/smp/builtins.py b/src/smp/builtins.py
index 9a27864..c1d67ce 100644
--- a/src/smp/builtins.py
+++ b/src/smp/builtins.py
@@ -2,6 +2,7 @@
import subprocess
import urllib.request
import urllib.error
+import urllib.parse
import datetime
import markdown
from gfm import AutolinkExtension, TaskListExtension # type: ignore
@@ -151,6 +152,14 @@ LINK_CACHE: dict[str, tuple[bool, int, str]] = dict()
def smp_builtin_wodl(macro_processor, link, timeout_seconds=5):
+ url = urllib.parse.urlparse(link)
+ link = (
+ url.scheme
+ + "://"
+ + url.netloc.encode("idna").decode("ascii")
+ + urllib.parse.quote(url.path)
+ )
+
if link in LINK_CACHE:
return LINK_CACHE[link]
@@ -179,3 +188,7 @@ def smp_builtin_dumpenv(macro_processor):
out += f"{repr(key)}: {repr(val)}\n"
out += "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
return out
+
+
+# TODO Add macro that spawns a interactive shell with the python env. would allow interactive debugging :)
+# needs to have a continue function or something (probably on C-d
diff --git a/src/smp/macro_processor.py b/src/smp/macro_processor.py
index 8fa9d91..bda6c6f 100644
--- a/src/smp/macro_processor.py
+++ b/src/smp/macro_processor.py
@@ -124,8 +124,9 @@ class MacroProcessor:
try:
return str(macro(*macro_args))
except Exception as e:
- s = f"{macro_name}({','.join([repr(x) for x in macro_args])})"
- raise Exception(s)
+ s = f"{macro_name}({','.join([repr(x) for x in args])})"
+ self.warnings.append(f"Error expanding macro {s} ({e})")
+ return s
if isinstance(macro, str):
expanded = macro
for i, arg in enumerate(args):
@@ -157,8 +158,6 @@ class MacroProcessor:
skip_next_line_ending = False
- line_begin = True
-
# We should keep track of filename, linenumber, and character number on line here
# So we can give sensible error messages
# Probably add to python stack trace?