1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
|
use skaldpress::macro_processor::error::SMPError;
use skaldpress::macro_processor::macro_processor::{MacroProcessorWarning, MacroType};
use skaldpress::skaldpress::filelists::{make_filelist, FileList, FileListFileTargetAction};
use skaldpress::skaldpress::parseopts::{parseopts, Opts};
use std::cmp::Ordering;
use std::collections::HashMap;
use std::collections::VecDeque;
use std::fs;
use std::path::Path;
use std::time::Instant;
use skaldpress::macro_processor::MacroProcessor;
use skaldpress::skaldpress::error::SkaldpressError;
use skaldpress::skaldpress::error::{
SP_COMPILE_FILE_EXTENSION_ERROR_2, SP_COMPILE_FILE_MACRO_PROCESS_ERROR,
SP_COMPILE_FILE_TEMPLATE_READ_ERROR, SP_COMPILE_TEMPLATE_MACRO_PROCESS_ERROR,
SP_GEN_DEST_STRIP_PREFIX_ERROR,
};
use skaldpress::skaldpress::metadata_parser::extract_parse_yaml_metadata;
use skaldpress::skaldpress::metadata_parser::YamlValue;
/// Convenience function for doing cmp on a metadata key of two arbitrary indexes
/// This takes a macro_processor as input, so that it can emit warnings if the ordering failed in
/// any way.
fn order_index_by_cached_data(
smp: &mut MacroProcessor,
key: &str,
a: &usize,
b: &usize,
) -> Ordering {
let compiled_files: &Vec<CompiledFile>;
unsafe {
compiled_files = COMPILED_FILES.as_ref();
}
if *a >= compiled_files.len() {
smp.warnings.push(MacroProcessorWarning::new(format!(
"\"a\" is not a cached file {} >= {}",
*a,
compiled_files.len()
)));
return Ordering::Equal;
}
if *b >= compiled_files.len() {
smp.warnings.push(MacroProcessorWarning::new(format!(
"\"b\" is not a cached file {} >= {}",
*b,
compiled_files.len()
)));
return Ordering::Equal;
}
let Some(a) = &compiled_files[*a].metadata.get(key) else {
smp.warnings.push(MacroProcessorWarning::new(format!(
"Key {:?} not found for ordering data in {:?}",
key, compiled_files[*a].source_path
)));
return Ordering::Equal;
};
let Some(b) = &compiled_files[*b].metadata.get(key) else {
smp.warnings.push(MacroProcessorWarning::new(format!(
"Key {:?} not found for ordering data in {:?}",
key, compiled_files[*b].source_path
)));
return Ordering::Equal;
};
a.cmp(b)
}
/// SMP Macro for getting all files with specific tag, this is only _really_ effective the second run
///
/// Usage in files:
/// all_tagged_by(<tag name>, <template> [, <field to sort by>] [, reversed])
fn sp_all_tagged_by(
smp: &mut MacroProcessor,
macro_name: &str,
args: &mut [String],
) -> Result<String, SMPError> {
if args.len() < 1 {
return Ok(macro_name.to_string());
}
let compiled_files: &Vec<CompiledFile>;
let compiled_files_by_tag;
unsafe {
compiled_files_by_tag = COMPILED_FILES_BY_TAG.as_ref().unwrap();
compiled_files = COMPILED_FILES.as_ref();
}
let Some(tagged_files) = compiled_files_by_tag.get(&args[0]) else {
println!(" \x1b[35mNo tags for {}\x1b[0m", args[0]);
return Ok(String::new());
};
let mut out = String::new();
let mut tagged_files = tagged_files.clone();
if args.len() > 2 {
if args.len() > 3 && args[3] == "reversed" {
tagged_files.sort_by(|a, b| order_index_by_cached_data(smp, &args[2], b, a));
} else {
tagged_files.sort_by(|a, b| order_index_by_cached_data(smp, &args[2], b, a));
}
}
for doc_i in tagged_files {
let file = &compiled_files[doc_i];
let mut smp_local = smp.clone();
macro_processor_initialize(&file.metadata, &mut smp_local, None);
out.push_str(&sp_template(
&mut smp_local,
"template",
&mut [args[1].clone(), file.content.clone()],
)?);
print_warnings(&smp_local);
}
Ok(out)
}
fn macro_processor_initialize(
metadata: &HashMap<String, YamlValue>,
old_macro_processor: &mut MacroProcessor,
additional_state: Option<&HashMap<String, MacroType>>,
) {
let macro_processor = old_macro_processor;
macro_processor.define_macro(
String::from("all_tagged_by"),
MacroType::Function(sp_all_tagged_by),
);
macro_processor.define_macro(String::from("template"), MacroType::Function(sp_template));
for (key, value) in metadata {
let macro_name = format!("METADATA_{}", key);
if !macro_processor.macros.contains_key(¯o_name) {
let value = match value {
YamlValue::List(l) => {
let mut out = Vec::new();
for el in l {
out.push(MacroType::String(el.to_string()));
}
MacroType::Array(out)
}
value => MacroType::String(value.to_string()),
};
macro_processor.define_macro(macro_name, value);
}
}
if let Some(additional_state) = additional_state {
for (key, value) in additional_state {
macro_processor.define_macro(key.to_string(), value.clone());
}
}
}
fn get_template_path(template: &str, opts: &Opts) -> String {
format!("{}{}", opts.template_dir, template)
}
fn main() -> Result<(), SkaldpressError> {
unsafe {
COMPILED_FILES_BY_TAG = Some(HashMap::new());
}
let mut opts = parseopts().build();
let emptyvec: Vec<String> = Vec::new();
let now = Instant::now();
let mut filelist_dest = make_filelist(
&Path::new(&opts.build_dir),
&Path::new(&opts.build_dir),
&emptyvec,
&emptyvec,
FileListFileTargetAction::NONE,
false,
&opts.template_dir,
&opts.metadata,
)?;
let mut filelist_src = FileList::new();
filelist_src.extend(make_filelist(
&Path::new(&opts.static_dir),
&Path::new(&opts.static_dir),
&emptyvec,
&opts.static_exclude,
FileListFileTargetAction::COPY,
false,
&opts.template_dir,
&opts.metadata,
)?);
filelist_src.extend(make_filelist(
&Path::new(&opts.content_dir),
&Path::new(&opts.content_dir),
&opts.filter,
&opts.exclude,
FileListFileTargetAction::COMPILE,
true,
&opts.template_dir,
&opts.metadata,
)?);
let elapsed = now.elapsed();
let mut work_queue: VecDeque<(String, FileListFileTargetAction)> = VecDeque::new();
// We also will not currently discover empty directories from build target,
// we should attempt to do that.
for (filename, _file) in filelist_dest.missing_from(&filelist_src).files {
filelist_dest.set_action(&filename, FileListFileTargetAction::DELETE);
work_queue.push_back((filename, FileListFileTargetAction::DELETE));
}
for (filename, file) in filelist_src.missing_from(&filelist_dest).files {
work_queue.push_back((filename, file.target_action));
}
for (filename, file) in filelist_src.changed_from(&filelist_dest).files {
work_queue.push_back((filename, file.target_action));
}
let mut dependants: Vec<(String, String)> = Vec::new();
for (filename, file) in &filelist_src.files {
if !file.metadata.contains_key("dependencies") {
continue;
}
match file.metadata["dependencies"].clone() {
YamlValue::Scalar(s) => dependants.push((s, filename.clone())),
_ => {}
}
}
println!(
"Generated filelist in {:#?} seconds, {} in destination, {} in source",
elapsed,
filelist_dest.len(),
filelist_src.len()
);
println!("Total file actions to take {}", work_queue.len());
//let mut compiled_files: Vec<String> = Vec::with_capacity(work_queue.len());
while let Some((filename, action)) = work_queue.pop_front() {
println!("> {:#?} {:#?}", action, filename);
match action {
FileListFileTargetAction::DELETE => {
// This should be some flag that can be toggled (like with -del in rsync)
if false {
continue;
}
let file = filelist_dest.files.get(&filename).expect("SP87");
println!(" Deleting {:#?}", file.file_path);
if let Err(e) = std::fs::remove_file(&file.file_path.as_path()) {
println!(
"\x1b[31mError copying {:#?}: {}\x1b[0m",
file.file_path.as_path(),
e
);
}
}
FileListFileTargetAction::COPY => {
let file = filelist_src.files.get(&filename).expect("SP87");
let dest_file_path = Path::new(&opts.build_dir).join(file.file_rel.as_path());
println!(" Copying {:#?}", file.file_path);
println!(" {:#?}", dest_file_path);
let Some(dest_dir) = &dest_file_path.parent() else {
println!("\x1b[31mError creating dir {:#?}\x1b[0m", dest_file_path);
continue;
};
if let Err(e) = std::fs::create_dir_all(&dest_dir) {
println!("\x1b[31mError creating dir {:#?}: {}\x1b[0m", dest_dir, e);
continue;
}
if let Err(e) = std::fs::copy(&file.file_path.as_path(), dest_file_path) {
println!(
"\x1b[31mError copying {:#?}: {}\x1b[0m",
file.file_path.as_path(),
e
);
}
}
FileListFileTargetAction::COMPILE => {
// This is a idea fo something, have not figured out how to deal with having files
// depending on all other files (index e.g.), as I cannot see how to get around
// recompiling everything anyway.
//
// I guess we could technically get away with a single compilation instead of two
// independent ones in some circumstances tho, unless the file needs to recompiled
// anyway, which would be the case for any files with TOC e.g.
// let file = filelist_src.files.get(&filename).expect("SP87");
// println!(" Compiling {:#?}", file.file_path);
//
// // check list of already compiled files, to see if all dependants are compiled
// // if not, move yourself to the end of the queue (basically just reinsert yourself
// // without compiling
// //
// // Check if the file has been compiled already, if so, it does not need to be added
// // to queue
//
// // If any dependants depend on this file, add them to the end of the work queue,
// // if they are not already there.
// 'dependants: for i in 0..dependants.len() {
// let (pat, target) = dependants[i].clone();
// if filename == target {
// continue;
// }
// if file_pat_match(&filename, &pat) {
// for (workel, _action) in &work_queue {
// if workel == &target {
// continue 'dependants;
// }
// }
// work_queue.push_back((target, action.clone()));
// }
//
// }
// compiled_files.push(filename.clone());
}
FileListFileTargetAction::NONE => {}
}
}
// Running compilation twice, needed for some macros which depends on compiled content
// We should make some kind of file-list, and only re-compile files which has changed.
println!("Compiling content");
let _ = compile_files_in_directory(Path::new(&opts.content_dir), &opts);
println!("Rerun compilation");
opts.first_run = false;
let _ = compile_files_in_directory(Path::new(&opts.content_dir), &opts);
Ok(())
}
|