aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/skaldpress/main.py81
-rw-r--r--src/smp/builtins.py13
-rw-r--r--src/smp/macro_processor.py7
3 files changed, 78 insertions, 23 deletions
diff --git a/src/skaldpress/main.py b/src/skaldpress/main.py
index 66fd0b1..cdc4464 100644
--- a/src/skaldpress/main.py
+++ b/src/skaldpress/main.py
@@ -2,6 +2,7 @@ import os
from argparse import ArgumentParser
from dataclasses import dataclass
import smp.macro_processor
+from copy import deepcopy
from skaldpress.metadata_parser import extract_parse_yaml_metadata
@@ -16,14 +17,7 @@ class CompiledFile:
COMPILED_FILES: list[CompiledFile] = list()
-COMPILED_FILES_BY_TAG: dict[str, CompiledFile] = dict()
-
-
-class SkaldpressError(Exception):
- def __init__(self, code, error, path=None):
- self.code = code
- self.error = error
- self.path = path
+COMPILED_FILES_BY_TAG: dict[str, list[int]] = dict()
def sp_template(macro_processor, template, content):
@@ -33,20 +27,65 @@ def sp_template(macro_processor, template, content):
return macro_processor.process_input(file_content)
+# SMP Macro for getting all files with specific tag, this is only _really_ effective the second run
+#
+# Usage in files:
+# all_tagged_by(<tag name>, <template> [, <field to sort by>] [, reversed])
+def sp_all_tagged_by(
+ macro_processor, tag: str, template: str, field=None, reversed=""
+) -> str:
+ global COMPILED_FILES, COMPILED_FILES_BY_TAG
+
+ if tag not in COMPILED_FILES_BY_TAG:
+ print(f" \u001b[35mNo tags for {tag}\u001b[0m")
+ return ""
+ tagged_files = deepcopy(COMPILED_FILES_BY_TAG[tag])
+
+ out = ""
+
+ # if field is not None:
+ # tagged_files = sorted(tagged_files, lambda x: x[field])
+ # if args.len() > 3 && args[3] == "reversed" {
+ # tagged_files.sort_by(|a, b| order_index_by_cached_data(macro_processor, &args[2], b, a));
+ # } else {
+ # tagged_files.sort_by(|a, b| order_index_by_cached_data(macro_processor, &args[2], b, a));
+ # }
+
+ for doc_i in tagged_files:
+ file = COMPILED_FILES[doc_i]
+ smp_local = deepcopy(macro_processor)
+
+ macro_processor_initialize(file.metadata, smp_local, None)
+
+ out += sp_template(smp_local, template, file.content)
+
+ print_warnings(smp_local)
+ return out
+
+
+class SkaldpressError(Exception):
+ def __init__(self, code, error, path=None):
+ self.code = code
+ self.error = error
+ self.path = path
+
+
def get_template_path(template: str, opts):
return f"{opts.template_dir}{template}"
def cached_file_id_by_path(source_path: str) -> int | None:
+ global COMPILED_FILES
for i in range(len(COMPILED_FILES)):
- if COMPILED_FILES[i] == source_path:
+ if COMPILED_FILES[i].source_path == source_path:
return i
return None
def print_warnings(macro_processor):
for warning in macro_processor.warnings:
- print(f" \u001b[33m{warning.description}\u001b[0m")
+ # print(f" \u001b[33m{warning.description}\u001b[0m")
+ print(f" \u001b[33m{warning}\u001b[0m")
def file_pat_match(file: str, pat: str) -> bool:
@@ -73,15 +112,14 @@ def file_filtered(file: str, filters: list[str], exclude: list[str]) -> bool:
def macro_processor_initialize(metadata, old_macro_processor, additional_state=None):
macro_processor = old_macro_processor
- # macro_processor.define_macro("all_tagged_by", sp_all_tagged_by)
+ macro_processor.define_macro("all_tagged_by", sp_all_tagged_by)
macro_processor.define_macro("template", sp_template)
for key, value in metadata.items():
macro_name = f"METADATA_{key}"
if macro_name not in macro_processor.macros:
if isinstance(value, list):
- out = [str(el) for el in value]
- macro_value = out
+ macro_value = [str(el) for el in value]
else:
macro_value = str(value)
macro_processor.define_macro(macro_name, macro_value)
@@ -116,7 +154,7 @@ def extract_requested_macro_processor_state(macro_processor):
def needs_recompilation(macro_processor):
if "METADATA_keep_states" in macro_processor.macros:
return True
- for macro_name in macro_processor.macro_invocations:
+ for macro_name, args in macro_processor.macro_invocations:
if macro_name == "all_tagged_by":
return True
return False
@@ -155,6 +193,8 @@ def wrap_template(macro_processor, template_file, file_content, opts):
def compile_file(file_path, opts):
+ global COMPILED_FILES
+
extension = os.path.splitext(file_path)[1][1:] or ""
if not extension:
raise SkaldpressError(3, None)
@@ -224,6 +264,7 @@ def compile_file(file_path, opts):
def compile_file_and_write(source_file_path, opts):
+ global COMPILED_FILES
compiled_file = compile_file(source_file_path, opts)
if opts.first_run:
@@ -240,6 +281,8 @@ def compile_file_and_write(source_file_path, opts):
compiled_files_by_tag[tag].append(cfile_i)
else:
cfile_i = cached_file_id_by_path(compiled_file.source_path)
+ if cfile_i is None:
+ return
COMPILED_FILES[cfile_i], compiled_file = compiled_file, COMPILED_FILES[cfile_i]
cfile = COMPILED_FILES[cfile_i]
@@ -267,6 +310,8 @@ def compile_file_and_write(source_file_path, opts):
def compile_files_in_directory(directory, opts):
+ global COMPILED_FILES
+
try:
entries = os.listdir(directory)
except OSError as e:
@@ -274,11 +319,6 @@ def compile_files_in_directory(directory, opts):
for entry in entries:
path = os.path.join(directory, entry)
- # try:
- # metadata = os.stat(path)
- # except OSError as e:
- # print(f"\033[31mError getting file metadata {e}\033[0m")
- # continue
needs_recompilation = False
cfile_i = cached_file_id_by_path(path)
@@ -324,3 +364,6 @@ def main():
args.first_run = True
compile_files_in_directory(args.content_dir, args)
+ print("\n=======================\n")
+ args.first_run = False
+ compile_files_in_directory(args.content_dir, args)
diff --git a/src/smp/builtins.py b/src/smp/builtins.py
index 9a27864..c1d67ce 100644
--- a/src/smp/builtins.py
+++ b/src/smp/builtins.py
@@ -2,6 +2,7 @@
import subprocess
import urllib.request
import urllib.error
+import urllib.parse
import datetime
import markdown
from gfm import AutolinkExtension, TaskListExtension # type: ignore
@@ -151,6 +152,14 @@ LINK_CACHE: dict[str, tuple[bool, int, str]] = dict()
def smp_builtin_wodl(macro_processor, link, timeout_seconds=5):
+ url = urllib.parse.urlparse(link)
+ link = (
+ url.scheme
+ + "://"
+ + url.netloc.encode("idna").decode("ascii")
+ + urllib.parse.quote(url.path)
+ )
+
if link in LINK_CACHE:
return LINK_CACHE[link]
@@ -179,3 +188,7 @@ def smp_builtin_dumpenv(macro_processor):
out += f"{repr(key)}: {repr(val)}\n"
out += "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
return out
+
+
+# TODO Add macro that spawns a interactive shell with the python env. would allow interactive debugging :)
+# needs to have a continue function or something (probably on C-d
diff --git a/src/smp/macro_processor.py b/src/smp/macro_processor.py
index 8fa9d91..bda6c6f 100644
--- a/src/smp/macro_processor.py
+++ b/src/smp/macro_processor.py
@@ -124,8 +124,9 @@ class MacroProcessor:
try:
return str(macro(*macro_args))
except Exception as e:
- s = f"{macro_name}({','.join([repr(x) for x in macro_args])})"
- raise Exception(s)
+ s = f"{macro_name}({','.join([repr(x) for x in args])})"
+ self.warnings.append(f"Error expanding macro {s} ({e})")
+ return s
if isinstance(macro, str):
expanded = macro
for i, arg in enumerate(args):
@@ -157,8 +158,6 @@ class MacroProcessor:
skip_next_line_ending = False
- line_begin = True
-
# We should keep track of filename, linenumber, and character number on line here
# So we can give sensible error messages
# Probably add to python stack trace?