From ae23669169b32d4986af06c1ae9483cc9c52d39d Mon Sep 17 00:00:00 2001 From: Ralph Amissah Date: Sat, 7 Jul 2018 13:55:43 -0400 Subject: 0.26.4 file renames, cleaning, reorganisation --- org/default_paths.org | 155 ++++----- org/imports.org | 2 +- org/meta_debugs.org | 14 +- org/meta_read_source_files.org | 733 ----------------------------------------- org/output_sqlite.org | 9 +- org/output_xmls.org | 190 +++++------ org/sdp.org | 16 +- org/source_files_read.org | 733 +++++++++++++++++++++++++++++++++++++++++ 8 files changed, 914 insertions(+), 938 deletions(-) delete mode 100644 org/meta_read_source_files.org create mode 100644 org/source_files_read.org (limited to 'org') diff --git a/org/default_paths.org b/org/default_paths.org index b8ff2a4..5b7141a 100644 --- a/org/default_paths.org +++ b/org/default_paths.org @@ -18,13 +18,13 @@ * 0. source paths :module:sdp:paths_source: ** 0. module template -#+BEGIN_SRC d :tangle ../src/sdp/output/paths_source.d +#+BEGIN_SRC d :tangle ../src/sdp/source/paths_source.d /++ read configuration files
- read config files
meta_config_files.d +/ -module sdp.output.paths_source; +module sdp.source.paths_source; import std.array, std.file, std.path, @@ -49,11 +49,10 @@ template PodManifest() { P _pth ) { struct ManifestFile_ { - auto pod_manifest_filename() { - string _manifest_file = "sisupod.manifest"; - return _manifest_file; + string pod_manifest_filename() { + return "sisupod.manifest"; } - auto pod_manifest_path() { + string pod_manifest_path() { string _manifest_path; if ((isValidPath(_pth) && exists(_pth)!=0 && _pth.isDir) && (exists(_pth.chainPath(pod_manifest_filename).array)!=0 @@ -115,11 +114,11 @@ sisupod template PathMatters() { mixin SiSUrgxInit; static auto rgx = Rgx(); - auto PathMatters(O,E,P,F)( - O _opt_actions, - E _env, - P _pth, - F _fns = "", + auto PathMatters(O,E)( + O _opt_actions, + E _env, + string _pth, + string _fns = "", char[][] _manifest_fn_list = [[]], ) { auto _manifest = PodManifest!()(_pth); @@ -145,15 +144,13 @@ template PathMatters() { } return Opt_(); } - auto src_is_pod() { - auto _src_is_pod = (_manifest.pod_manifest_path.length > 0) ? true : false; - return _src_is_pod; + bool src_is_pod() { + return (_manifest.pod_manifest_path.length > 0) ? true : false; } auto pod() { struct Pod_ { - auto src_is_pod() { - auto _src_is_pod = (_manifest.pod_manifest_path.length > 0) ? true : false; - return _src_is_pod; + bool src_is_pod() { + return (_manifest.pod_manifest_path.length > 0) ? true : false; } auto collection_root() { auto _collection_root = asNormalizedPath(chainPath(_manifest.pod_manifest_path, "..")).array; @@ -164,34 +161,34 @@ template PathMatters() { } return _collection_root; } - auto manifest_filename() { + string manifest_filename() { return _manifest.pod_manifest_filename; } - auto manifest_path() { + string manifest_path() { return _manifest.pod_manifest_path; } - auto pod_name() { // TODO decide what returned if src_is_pod == false + string pod_name() { return _manifest.pod_manifest_path.baseName; } - auto manifest_file_with_path() { + string manifest_file_with_path() { return _manifest.pod_manifest_file_with_path; } - auto config_sisu_document_make_dirs() { // TODO sisu_document_make + string[] config_sisu_document_make_dirs() { string[] _config_dirs; return _config_dirs; } - auto config_local_site_dirs() { // TODO sisu_document_make + string[] config_local_site_dirs() { string[] _config_dirs; return _config_dirs; } - auto image_dirs() { // TODO + string[] image_dirs() { string[] _image_dirs; return _image_dirs; } auto manifest_list_of_filenames() { return _manifest_fn_list; } - auto manifest_list_of_languages() { + string[] manifest_list_of_languages() { string[] _lngs; foreach (filename_; manifest_list_of_filenames) { string _k = "en"; @@ -211,41 +208,32 @@ template PathMatters() { auto _env = _env; string _sep = "␣"; struct SRC_ { - auto is_pod() { - auto _src_is_pod = (_manifest.pod_manifest_path.length > 0) ? true : false; - return _src_is_pod; + bool is_pod() { + return (_manifest.pod_manifest_path.length > 0) ? true : false; } - auto path_and_fn() { + string path_and_fn() { return _fns; } - auto pod_name() { /+ work on +/ - auto _pod_name = (is_pod) - ? _manifest.pod_manifest_path - : ""; - return _pod_name; + string pod_name() { + return (is_pod) ? _manifest.pod_manifest_path : ""; } - auto filename() { - auto _fn = (path_and_fn).baseName; - return _fn; + string filename() { + return path_and_fn.baseName; } - auto filename_base() { - auto _fn = filename.stripExtension; - return _fn; + string filename_base() { + return filename.stripExtension; } - auto filename_extension() { - auto _ext = filename.match(rgx.src_pth_sst_or_ssm).captures["extension"]; - return _ext; + string filename_extension() { + return filename.match(rgx.src_pth_sst_or_ssm).captures["extension"]; } - auto lng() { + string lng() { string _k; if (auto m = path_and_fn.match(rgx.language_code_and_filename)) { _k = m.captures[1]; - } else { - _k = "en"; - } + } else {_k = "en"; } return _k; } - auto docname_composite_unique_per_src_doc() { + string docname_composite_unique_per_src_doc() { /+ z pod name if any + src filename + lng code filename ~ "." ~ lng @@ -263,7 +251,7 @@ template PathMatters() { } return _fn; } - auto docname_composite_unique_per_src_pod() { + string docname_composite_unique_per_src_pod() { /+ z pod name if any + src filename (without lng code) filename ~ _sep ~ lng @@ -282,18 +270,16 @@ template PathMatters() { } return _fn; } - auto language() { + string language() { return lng(); } - auto file_with_absolute_path() { - string _pth = _env["pwd"].chainPath(path_and_fn).array; - return _pth; + string file_with_absolute_path() { + return _env["pwd"].chainPath(path_and_fn).array; } - auto absolute_path_to_src() { - string _pth = (_env["pwd"].chainPath(path_and_fn)).dirName.array; - return _pth; + string absolute_path_to_src() { + return (_env["pwd"].chainPath(path_and_fn)).dirName.array; } - auto base_dir() { + string base_dir() { string _dir; if ( // TODO this should catch generated --source sisupod, untested, needs manifest auto m = (absolute_path_to_src) @@ -311,7 +297,7 @@ template PathMatters() { } return _dir; } - auto base_parent_dir_path() { + string base_parent_dir_path() { string _dir; if ( // TODO this should catch generated --source sisupod, untested, needs manifest auto m = (absolute_path_to_src) @@ -323,7 +309,7 @@ template PathMatters() { } return _dir; } - auto base_dir_path() { // looks like there is work to do + string base_dir_path() { string _dir; if ( auto m = (absolute_path_to_src) @@ -343,11 +329,11 @@ template PathMatters() { } return _dir; } - auto media_dir_path() { // TODO rework, can base directly on src fn path + string media_dir_path() { string _dir = asNormalizedPath(base_dir_path.chainPath("media")).array; return _dir; } - auto image_dir_path() { + string image_dir_path() { string _paths; string[] _possible_img_pths = [ "./image", "../image", "../../image" ]; string _img_pth_found = ""; @@ -372,8 +358,7 @@ template PathMatters() { return _img_pth_found; } auto conf_dir_path() { - auto _dir = asNormalizedPath(base_dir_path.chainPath("conf")).array; - return _dir; + return asNormalizedPath(base_dir_path.chainPath("conf")).array; } auto base_parent_dir() { string _dir; @@ -390,18 +375,16 @@ template PathMatters() { } return _dir; } - auto config_dirs() { // TODO + string[] config_dirs() { string[] _config_dirs; if (is_pod) { - } else { - } + } else {} return _config_dirs; } - auto image_dirs() { // TODO + string[] image_dirs() { string[] _image_dirs; if (is_pod) { - } else { - } + } else {} return _image_dirs; } } @@ -453,15 +436,15 @@ template ConfigFilePaths() { E _env, ) { struct ConfFilePaths { - auto config_filename_document_toml() { + string config_filename_document_toml() { return "sisu_document_make"; } - auto config_filename_site_toml() { + string config_filename_site_toml() { return "config_local_site"; } auto possible_config_path_locations() { struct _ConfFilePaths { - auto sisu_document_make() { + string[] sisu_document_make() { /+ FIX clean up conf paths ↓ +/ /+ config local site (file system only, not in pod) +/ /+ return paths +/ @@ -504,7 +487,7 @@ template ConfigFilePaths() { +/ return _possible_config_path_locations; } - auto config_local_site() { + string[] config_local_site() { /+ FIX clean up conf paths ↓ +/ /+ config local site (file system only, not in pod) +/ string _dot_pwd = asNormalizedPath(chainPath(to!string(_env["pwd"]), ".sisu")).array; @@ -617,7 +600,7 @@ template SiSUpathsSRC() { auto pwd() { return _pwd; } - auto language() { + string language() { // use command line info as well? string _k; if (auto m = _fn_src_and_path.match(rgx.language_code_and_filename)) { @@ -627,7 +610,7 @@ template SiSUpathsSRC() { } return _k; } - auto doc_root() { + string doc_root() { return "sisudoc"; } auto media_root() { @@ -944,13 +927,9 @@ template SiSUoutPaths() { ) { struct _PathsStruct { string output_root() { - string out_root_; - if (output_pth_root.length > 0) { - out_root_ = output_pth_root; - } else { - out_root_ = "sisugen"; - } - return out_root_; + return (output_pth_root.length > 0) + ? output_pth_root + : "sisugen"; } string output_base() { return asNormalizedPath(output_root.chainPath(lng)).array; @@ -965,13 +944,9 @@ template SiSUoutPathSQLite() { ) { struct _PathsStruct { string output_root() { - string out_root_; - if (output_pth_root.length > 0) { - out_root_ = output_pth_root; - } else { - out_root_ = "sisugen"; - } - return out_root_; + return (output_pth_root.length > 0) + ? output_pth_root + : "sisugen"; } string output_base() { return asNormalizedPath(output_root).array; @@ -1006,7 +981,7 @@ template SiSUoutPathsFnPd() { - else if pod_name != file_name - pod_name.file_name +/ - auto _fn_src = fn_src_pth.baseName.stripExtension; + string _fn_src = fn_src_pth.baseName.stripExtension; string _output_base_name; if (!(pod_name.empty)) { if (pod_name == _fn_src) { diff --git a/org/imports.org b/org/imports.org index 64bc4c3..d82da9e 100644 --- a/org/imports.org +++ b/org/imports.org @@ -73,7 +73,7 @@ public import // std.uni, std.utf; public import - sdp.output.paths_source, + sdp.source.paths_source, sdp.output.defaults, sdp.output.paths_output, sdp.output.rgx; diff --git a/org/meta_debugs.org b/org/meta_debugs.org index 8542a32..1807ec8 100644 --- a/org/meta_debugs.org +++ b/org/meta_debugs.org @@ -26,8 +26,8 @@ module sdp.meta.doc_debugs; template SiSUdebugs() { <> auto SiSUdebugs(S,T)( - auto return ref const S contents, - auto return ref T doc_matters, + const S contents, + T doc_matters, ) { mixin SiSUrgxInit; mixin InternalMarkup; @@ -136,8 +136,8 @@ debug(dumpdoc) { #+BEGIN_SRC d debug(checkdoc) { void out_segnames(S,T)( - auto return ref const S contents, - auto return ref T doc_matters, + const S contents, + T doc_matters, ) { foreach (key; doc_matters.xml.keys_seq.seg) { if (contents[key].length > 1) { @@ -159,7 +159,7 @@ debug(checkdoc) { #+BEGIN_SRC d debug(checkdoc) { void out_toc(S)( - auto return ref const S contents, + const S contents, string key, ) { if (contents[key].length > 1) { @@ -183,7 +183,7 @@ debug(checkdoc) { #+BEGIN_SRC d debug(checkdoc) { void out_endnotes(S)( - auto return ref const S contents, + const S contents, string key, ) { if (contents[key].length > 1) { @@ -205,7 +205,7 @@ debug(checkdoc) { #+BEGIN_SRC d debug(checkdoc) { void out_bookindex(S)( - auto return ref const S contents, + const S contents, string key, ) { if (contents[key].length > 1) { diff --git a/org/meta_read_source_files.org b/org/meta_read_source_files.org deleted file mode 100644 index b726631..0000000 --- a/org/meta_read_source_files.org +++ /dev/null @@ -1,733 +0,0 @@ -#+TITLE: sdp markup source raw -#+AUTHOR: Ralph Amissah -#+EMAIL: [[mailto:ralph.amissah@gmail.com][ralph.amissah@gmail.com]] -#+DESCRIPTION: documents - structuring, publishing in multiple formats & search -#+KEYWORDS -#+LANGUAGE: en -#+STARTUP: indent content -#+OPTIONS: H:3 num:nil toc:t \n:nil @:t ::t |:t ^:nil _:nil -:t f:t *:t <:t -#+OPTIONS: TeX:t LaTeX:t skip:nil d:nil todo:t pri:nil tags:not-in-toc -#+OPTIONS: author:nil email:nil creator:nil timestamp:nil -#+PROPERTY: header-args :padline no :exports code :noweb yes -#+EXPORT_SELECT_TAGS: export -#+EXPORT_EXCLUDE_TAGS: noexport -#+FILETAGS: :sdp:rel:meta:read:file -#+TAGS: assert(a) class(c) debug(d) mixin(m) sdp(s) tangle(T) template(t) WEB(W) noexport(n) - -[[./sdp.org][sdp]] [[./][org/]] -* imports - -#+name: imports_std -#+BEGIN_SRC d -import - sdp.meta, - sdp.output.paths_source, - std.file, - std.path; -#+END_SRC - -* A. get _config file_ (read in) :module:sdp:meta_read_config_files: -** 0. module template - -#+BEGIN_SRC d :tangle ../src/sdp/meta/read_config_files.d -/++ - read configuration files
- - read config files
- meta_config_files.d -+/ -module sdp.meta.read_config_files; -<> -<> -<> -#+END_SRC - -*** 0. read config files (config_local_site & sisu_document_make) toml -**** 1. site configuration - -#+name: meta_config_file_hub -#+BEGIN_SRC d -static template readConfigSite() { - <> - final auto readConfigSite(M,E)(M _manifest, E _env) { - string config_file_str; - string conf_filename = "NONE"; - auto _conf_file_details = ConfigFilePaths!()(_manifest, _env); - auto possible_config_path_locations = _conf_file_details.possible_config_path_locations.config_local_site; - foreach(conf_fn; [_conf_file_details.config_filename_site_toml]) { - foreach(pth; possible_config_path_locations) { - auto conf_file = asNormalizedPath(chainPath(pth.to!string, conf_fn)).array; - conf_filename = conf_fn; - if (config_file_str.length > 0) { - // conf_filename = conf_fn; - break; - } - try { - if (exists(conf_file)) { - debug(io) { - writeln("WARNING (io debug) in config file found: ", conf_file); - // writeln(__LINE__, ": found: ", conf_file, " in ", pth); - } - config_file_str = conf_file.readText; - break; - } - } catch (ErrnoException ex) { - } catch (FileException ex) { - } - } - if (config_file_str.length > 0) { break; } - } - struct _ConfContent { - string filename() { - return conf_filename; - } - string filetype() { - return conf_filename.extension.chompPrefix("."); - } - auto content() { - return config_file_str; - } - } - return _ConfContent(); - } -} -#+END_SRC - -**** 2. document make/config - -#+name: meta_config_file_hub -#+BEGIN_SRC d -static template readConfigDoc() { - <> - final auto readConfigDoc(M,E)(M _manifest, E _env) { - string config_file_str; - string conf_filename = "NONE"; - auto _conf_file_details = ConfigFilePaths!()(_manifest, _env); - auto possible_config_path_locations = _conf_file_details.possible_config_path_locations.sisu_document_make; - foreach(conf_fn; [_conf_file_details.config_filename_document_toml]) { - foreach(pth; possible_config_path_locations) { - auto conf_file = asNormalizedPath(chainPath(pth.to!string, conf_fn)).array; - conf_filename = conf_fn; - if (config_file_str.length > 0) { - // conf_filename = conf_fn; - break; - } - try { - if (exists(conf_file)) { - debug(io) { - writeln("WARNING (io debug) in config file found: ", conf_file); - } - config_file_str = conf_file.readText; - break; - } - } - catch (ErrnoException ex) { - } - catch (FileException ex) { - } - } - if (config_file_str.length > 0) { break; } - } - struct _ConfContent { - string filename() { - return conf_filename; - } - string filetype() { - return conf_filename.extension.chompPrefix("."); - } - auto content() { - return config_file_str; - } - } - return _ConfContent(); - } -} -#+END_SRC - -** A. TOML -*** 1. TOML read config files (config_local_site & sisu_document_make) :file:config: -**** TOML config_local_site - -#+name: meta_config_file_in -#+BEGIN_SRC d -static template configReadInSiteTOML() { - <> - final string configReadInSiteTOML(M,E)(M manifest, E env) { - auto conf_file_details = ConfigFilePaths!()(manifest, env); - string conf_toml = conf_file_details.config_filename_site_toml; - auto possible_config_path_locations = conf_file_details.possible_config_path_locations.config_local_site; - string config_file_str; - debug(io) { - writeln("WARNING (io debug) in config filename: ", conf_toml); - writeln("WARNING (io debug) in config possible path locations: ", possible_config_path_locations); - } - foreach(pth; possible_config_path_locations) { - auto conf_file = asNormalizedPath(chainPath(pth.to!string, conf_toml)).array; - if (config_file_str.length > 0) { - break; - } - try { - if (exists(conf_file)) { - debug(io) { - writeln("WARNING (io debug) in config file found: ", conf_file); - } - config_file_str = conf_file.readText; - break; - } - } - catch (ErrnoException ex) { - } - catch (FileException ex) { - } - } - return config_file_str; - } -} -#+END_SRC - -**** TOML sisu_document_make - -#+name: meta_config_file_in -#+BEGIN_SRC d -static template configReadInDocTOML() { - <> - final string configReadInDocTOML(M,E)(M manifest, E env) { - auto conf_file_details = ConfigFilePaths!()(manifest, env); - string conf_toml = conf_file_details.config_filename_document_toml; - auto possible_config_path_locations = conf_file_details.possible_config_path_locations.sisu_document_make; - string config_file_str; - debug(io) { - writeln("WARNING (io debug) in config filename: ", conf_toml); - writeln("WARNING (io debug) in config possible path locations: ", possible_config_path_locations); - } - foreach(pth; possible_config_path_locations) { - auto conf_file = asNormalizedPath(chainPath(pth.to!string, conf_toml)).array; - if (config_file_str.length > 0) { - break; - } - try { - if (exists(conf_file)) { - debug(io) { - writeln("WARNING (io debug) in config file found: ", conf_file); - } - config_file_str = conf_file.readText; - break; - } - } - catch (ErrnoException ex) { - } - catch (FileException ex) { - } - } - return config_file_str; - } -} -#+END_SRC - -*** 2. TOML config files get - -#+name: meta_config_file_toml -#+BEGIN_SRC d -static template configTOML() { - import toml; // - <> - auto configTOML(string configuration, string conf_toml_filename) { - TOMLDocument _toml_conf; - try { - _toml_conf = parseTOML(configuration); // parseTOML(cast(string)(configuration)); - } - catch(ErrnoException e) { - stderr.writeln("Toml problem with content for ", conf_toml_filename); - stderr.writeln(e.msg); - } - return _toml_conf; - } -} -#+END_SRC - -*** 3. TOML config (config_local_site & sisu_document_make) :file:config:hub: - -#+name: meta_config_file_hub -#+BEGIN_SRC d -static template configReadSiteTOML() { - <> - import toml; - final auto configReadSiteTOML(M,E)(M _manifest, E _env) { - auto _configuration = configReadInSiteTOML!()(_manifest, _env); - auto _conf_file_details = ConfigFilePaths!()(_manifest, _env); - string _conf_toml = _conf_file_details.config_filename_site_toml; - auto _toml_conf = configTOML!()(_configuration, _conf_toml); - return _toml_conf; - } -} -static template configReadDocTOML() { - <> - import toml; - final auto configReadDocTOML(M,E)(M _manifest, E _env) { - auto _configuration = configReadInDocTOML!()(_manifest, _env); - auto _conf_file_details = ConfigFilePaths!()(_manifest, _env); - string _conf_toml = _conf_file_details.config_filename_document_toml; - auto _toml_conf = configTOML!()(_configuration, _conf_toml); - return _toml_conf; - } -} -#+END_SRC - -* B. get _markup source_, read file :module:sdp:meta_read_source_files: -** 0. module template (includes tuple) - -#+BEGIN_SRC d :tangle ../src/sdp/meta/read_source_files.d -/++ - module meta_read_source_files;
- - open markup files
- - if master file scan for addional files to import/insert -+/ -module sdp.meta.read_source_files; -static template SiSUrawMarkupContent() { - import - sdp.meta.rgx; - <> - mixin SiSUrgxInit; - static auto rgx = Rgx(); - string[] _images=[]; - auto _extract_images(S)(S content_block) { - string[] images_; - auto _content_block = content_block.to!string; - if (auto m = _content_block.matchAll(rgx.image)) { - images_ ~= m.captures[1].to!string; - } - return images_; - } - auto rawsrc = RawMarkupContent(); - auto SiSUrawMarkupContent(O,Fn)(O _opt_action, Fn fn_src) { - auto _0_header_1_body_content_2_insert_filelist_tuple - = rawsrc.sourceContentSplitIntoHeaderAndBody(_opt_action, rawsrc.sourceContent(fn_src), fn_src); - return _0_header_1_body_content_2_insert_filelist_tuple; - } - struct RawMarkupContent { - final sourceContent(in string fn_src) { - auto raw = MarkupRawUnit(); - auto source_txt_str - = raw.markupSourceReadIn(fn_src); - return source_txt_str; - } - final auto sourceContentSplitIntoHeaderAndBody(O)(O _opt_action, in string source_txt_str, in string fn_src="") { - auto raw = MarkupRawUnit(); - string[] insert_file_list; - string[] images_list; - auto t - = raw.markupSourceHeaderContentRawLineTupleArray(source_txt_str); - auto header_raw = t[0]; - auto sourcefile_body_content = t[1]; - if (fn_src.match(rgx.src_fn_master)) { // filename with path needed if master file (.ssm) not otherwise - auto ins = Inserts(); - auto tu - = ins.scan_master_src_for_insert_files_and_import_content(_opt_action, sourcefile_body_content, fn_src); - static assert(!isTypeTuple!(tu)); - sourcefile_body_content = tu[0]; - insert_file_list = tu[1].dup; - images_list = tu[2].dup; - } else if (_opt_action.source || _opt_action.sisupod) { - auto ins = Inserts(); - auto tu - = ins.scan_master_src_for_insert_files_and_import_content(_opt_action, sourcefile_body_content, fn_src); - static assert(!isTypeTuple!(tu)); - images_list = tu[2].dup; - } - t = tuple( - header_raw, - sourcefile_body_content, - insert_file_list, - images_list - ); - static assert(t.length==4); - return t; - } - } - struct MarkupRawUnit { - import std.file; - <> - <> - <> - <> - <> - <> - } - struct Inserts { - auto scan_subdoc_source(O)( - O _opt_action, - char[][] markup_sourcefile_insert_content, - string fn_src - ) { - mixin SiSUrgxInitFlags; - <> - foreach (line; markup_sourcefile_insert_content) { - <> - } // end src subdoc (inserts) loop - <> - } - auto scan_master_src_for_insert_files_and_import_content(O)( - O _opt_action, - char[][] sourcefile_body_content, - string fn_src - ) { - import std.algorithm; - mixin SiSUrgxInitFlags; - <> - foreach (line; sourcefile_body_content) { - <> - } // end src doc loop - <> - } - } -} -#+END_SRC - -** get markup source, read file :source:markup: -*** read file, source string [#A] :string: - -#+name: meta_markup_source_raw_read_file_source_string -#+BEGIN_SRC d -final private string readInMarkupSource(in char[] fn_src) { - enforce( - exists(fn_src)!=0, - "file not found: «" ~ - fn_src ~ "»" - ); - string source_txt_str; - try { - if (exists(fn_src)) { - debug(io) { - writeln("in src, markup source file found: ", fn_src); - } - source_txt_str = fn_src.readText; - } - } - catch (ErrnoException ex) { - } - catch (UTFException ex) { - // Handle validation errors - } - catch (FileException ex) { - // Handle errors - } - std.utf.validate(source_txt_str); - return source_txt_str; -} -#+END_SRC - -*** document header & content, array.length == 2 [#A] :array: - -here you split document header and body, an array.length == 2 -split is on first match of level A~ (which is required) - -#+name: meta_markup_source_raw_doc_header_and_content_split -#+BEGIN_SRC d -final private char[][] header0Content1(in string src_text) { - /+ split string on _first_ match of "^:?A~\s" into [header, content] array/tuple +/ - char[][] header_and_content; - auto m = (cast(char[]) src_text).matchFirst(rgx.heading_a); - header_and_content ~= m.pre; - header_and_content ~= m.hit ~ m.post; - assert(header_and_content.length == 2, - "document markup is broken, header body split == " - ~ header_and_content.length.to!string - ~ "; (header / body array split should == 2 (split is on level A~))" - ); - return header_and_content; -} -#+END_SRC - -*** source line array :array: - -#+name: meta_markup_source_raw_source_line_array -#+BEGIN_SRC d -final private char[][] markupSourceLineArray(in char[] src_text) { - char[][] source_line_arr - = (cast(char[]) src_text).split(rgx.newline_eol_strip_preceding); - return source_line_arr; -} -#+END_SRC - -*** source content raw line array :array: -- used for regular .sst files; master .ssm files and; .ssi inserts -- regex is passed for relevant enforce match - -**** read in file - -#+name: meta_markup_source_raw_read_in_file -#+BEGIN_SRC d -auto markupSourceReadIn(in string fn_src) { - static auto rgx = Rgx(); - enforce( - fn_src.match(rgx.src_pth_sst_or_ssm), - "not a sisu markup filename: «" ~ - fn_src ~ "»" - ); - auto source_txt_str = readInMarkupSource(fn_src); - return source_txt_str; -} -#+END_SRC - -**** tuple (a) header, (b) body content, (c) file insert list & (d) image list? - -- header -- body content -- file insert list -- [image list?] - -#+name: meta_markup_source_raw_tuple_of_header_and_body -#+BEGIN_SRC d -auto markupSourceHeaderContentRawLineTupleArray(in string source_txt_str) { - string[] file_insert_list = []; - string[] images_list = []; - auto hc = header0Content1(source_txt_str); - auto header = hc[0]; - char[] source_txt = hc[1]; - auto source_line_arr = markupSourceLineArray(source_txt); - auto t = tuple( - header, - source_line_arr, - file_insert_list, - images_list - ); - return t; -} -#+END_SRC - -**** get insert source line array - -#+name: meta_markup_source_raw_get_insert_source_line_array -#+BEGIN_SRC d -final char[][] getInsertMarkupSourceContentRawLineArray( - in char[] fn_src_insert, - Regex!(char) rgx_file -) { - enforce( - fn_src_insert.match(rgx_file), - "not a sisu markup filename: «" ~ - fn_src_insert ~ "»" - ); - auto source_txt_str = readInMarkupSource(fn_src_insert); - auto source_line_arr = markupSourceLineArray(source_txt_str); - return source_line_arr; -} -#+END_SRC - -** get markup source, master file & inserts :masterfile:inserts: -[[./sdp.org][sdp]] [[./][org/]] - -*** scan inserts (sub-document) source :scan_insert_src: -**** scan subdoc source - -#+name: meta_inserts_scan -#+BEGIN_SRC d -char[][] contents_insert; -auto type1 = flags_type_init; -auto fn_pth_full = fn_src.match(rgx.src_pth_sst_or_ssm); -auto markup_src_file_path = fn_pth_full.captures[1]; -#+END_SRC - -**** loop insert (sub-document) - -#+name: meta_inserts_scan_loop -#+BEGIN_SRC d -if (type1["curly_code"] == 1) { - type1["header_make"] = 0; - type1["header_meta"] = 0; - if (line.matchFirst(rgx.block_curly_code_close)) { - type1["curly_code"] = 0; - } - contents_insert ~= line; -} else if (line.matchFirst(rgx.block_curly_code_open)) { - type1["curly_code"] = 1; - type1["header_make"] = 0; - type1["header_meta"] = 0; - contents_insert ~= line; -} else if (type1["tic_code"] == 1) { - type1["header_make"] = 0; - type1["header_meta"] = 0; - if (line.matchFirst(rgx.block_tic_close)) { - type1["tic_code"] = 0; - } - contents_insert ~= line; -} else if (line.matchFirst(rgx.block_tic_code_open)) { - type1["tic_code"] = 1; - type1["header_make"] = 0; - type1["header_meta"] = 0; - contents_insert ~= line; -} else if ( - (type1["header_make"] == 1) - && line.matchFirst(rgx.native_header_sub) -) { - type1["header_make"] = 1; - type1["header_meta"] = 0; -} else if ( - (type1["header_meta"] == 1) - && line.matchFirst(rgx.native_header_sub) -) { - type1["header_meta"] = 1; - type1["header_make"] = 0; -} else if (auto m = line.match(rgx.insert_src_fn_ssi_or_sst)) { - type1["header_make"] = 0; - type1["header_meta"] = 0; - auto insert_fn = m.captures[2]; - auto insert_sub_pth = m.captures[1]; - auto fn_src_insert - = chainPath(markup_src_file_path, insert_sub_pth ~ insert_fn).array; - auto raw = MarkupRawUnit(); - auto markup_sourcesubfile_insert_content - = raw.getInsertMarkupSourceContentRawLineArray(fn_src_insert, rgx.src_fn_find_inserts); - debug(insert_file) { - tell_l("red", line); - tell_l("red", fn_src_insert); - tell_l("fuchsia", "ERROR"); - writeln( - " length contents insert array: ", - markup_sourcesubfile_insert_content.length - ); - } - if (_opt_action.source || _opt_action.sisupod) { - _images ~= _extract_images(markup_sourcesubfile_insert_content); - } - auto ins = Inserts(); - /+ - - 1. load file - - 2. read lines - - 3. scan lines - - a. if filename insert, and insert filename - - repeat 1 - - b. else - - add line to new array; - - build image list, search for any image files to add to image list - +/ -} else { - type1["header_make"] = 0; - type1["header_meta"] = 0; - contents_insert ~= line; // images to extract for image list? - if (_opt_action.source || _opt_action.sisupod) { - auto _image_linelist = _extract_images(line); - if (_image_linelist.length > 0) { - _images ~= _image_linelist; - } - } -} -#+END_SRC - -**** post loop - -#+name: meta_inserts_scan_post -#+BEGIN_SRC d -auto t = tuple( - contents_insert, - _images -); -return t; -#+END_SRC - -*** scan document source :scan_src: -**** scan doc source - -#+name: meta_master_doc_scan_for_insert_filenames -#+BEGIN_SRC d -char[][] contents; -auto type = flags_type_init; -auto fn_pth_full = fn_src.match(rgx.src_pth_sst_or_ssm); -auto markup_src_file_path = fn_pth_full.captures[1]; -char[][] contents_insert; -string[] _images =[]; -string[] insert_file_list =[]; -#+END_SRC - -**** include inserts: _loop master_ scan for inserts (insert documents) - -#+name: meta_master_doc_scan_for_insert_filenames_loop -#+BEGIN_SRC d -if (type["curly_code"] == 1) { - if (line.matchFirst(rgx.block_curly_code_close)) { - type["curly_code"] = 0; - } - contents ~= line; -} else if (line.matchFirst(rgx.block_curly_code_open)) { - type["curly_code"] = 1; - contents ~= line; -} else if (type["tic_code"] == 1) { - if (line.matchFirst(rgx.block_tic_close)) { - type["tic_code"] = 0; - } - contents ~= line; -} else if (line.matchFirst(rgx.block_tic_code_open)) { - type["tic_code"] = 1; - contents ~= line; -} else if (auto m = line.match(rgx.insert_src_fn_ssi_or_sst)) { - auto insert_fn = m.captures[2]; - auto insert_sub_pth = m.captures[1]; - auto fn_src_insert - = chainPath(markup_src_file_path, insert_sub_pth ~ insert_fn).array; - insert_file_list ~= fn_src_insert.to!string; - auto raw = MarkupRawUnit(); - /+ TODO +/ - auto markup_sourcefile_insert_content - = raw.getInsertMarkupSourceContentRawLineArray(fn_src_insert, rgx.src_fn_find_inserts); - debug(insert_file) { - tell_l("red", line); - tell_l("red", fn_src_insert); - writeln( - " length contents insert array: ", - markup_sourcefile_insert_content.length - ); - } - auto ins = Inserts(); - auto contents_insert_tu = ins.scan_subdoc_source( - _opt_action, - markup_sourcefile_insert_content, - fn_src_insert.to!string - ); - contents ~= contents_insert_tu[0]; // images to extract for image list? - if (_opt_action.source || _opt_action.sisupod) { - auto _image_linelist = _extract_images(contents_insert_tu[0]); - if (_image_linelist.length > 0) { - _images ~= _image_linelist; - } - } - /+ - - 1. load file - - 2. read lines - - 3. scan lines - - a. if filename insert, and insert filename - - repeat 1 - - b. else - - add line to new array; - - build image list, search for any image files to add to image list - +/ -} else { - contents ~= line; - if (_opt_action.source || _opt_action.sisupod) { - auto _image_linelist = _extract_images(line); - if (_image_linelist.length > 0) { - _images ~= _image_linelist; - } - } -} -#+END_SRC - -**** post loop - -#+name: meta_master_doc_scan_for_insert_filenames_post -#+BEGIN_SRC d -string[] images = []; -foreach(i; uniq(_images.sort())) { - images ~= i; -} -debug(insert_file) { - writeln(__LINE__); - writeln(contents.length); -} -auto t = tuple( - contents, - insert_file_list, - images -); -return t; -#+END_SRC - -* __END__ diff --git a/org/output_sqlite.org b/org/output_sqlite.org index e8a29bf..dc76da4 100644 --- a/org/output_sqlite.org +++ b/org/output_sqlite.org @@ -1223,10 +1223,11 @@ CREATE TABLE doc_objects ( seg VARCHAR(256) NULL, lev_an VARCHAR(1), lev SMALLINT NULL, - t_of VARCHAR(16), - t_is VARCHAR(16), + is_of_type VARCHAR(16), + is_a VARCHAR(16), node VARCHAR(16) NULL, parent VARCHAR(16) NULL, + last_decendant VARCHAR(16) NULL, /* headings only */ digest_clean CHAR(256), digest_all CHAR(256), types CHAR(1) NULL @@ -1276,8 +1277,8 @@ _insert_doc_objects_row = format(q"¶ clean, body, lev, - t_of, - t_is + is_of_type, + is_a ) #+END_SRC diff --git a/org/output_xmls.org b/org/output_xmls.org index 122b15c..0aa50d5 100644 --- a/org/output_xmls.org +++ b/org/output_xmls.org @@ -98,8 +98,8 @@ string div_delimit( #+name: xhtml_format_objects #+BEGIN_SRC d string special_characters(O)( - auto ref const O obj, - string _txt + const O obj, + string _txt ){ _txt = (_txt) .replaceAll(rgx.xhtml_ampersand, "&") @@ -421,10 +421,10 @@ auto tail() { #+name: xhtml_format_objects #+BEGIN_SRC d auto inline_images(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", - string _xml_type = "seg", + const O obj, + string _txt, + string _suffix = ".html", + string _xml_type = "seg", ) { string _img_pth; if (_xml_type == "epub") { @@ -450,10 +450,10 @@ auto inline_images(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto inline_links(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", - string _xml_type = "seg", + const O obj, + string _txt, + string _suffix = ".html", + string _xml_type = "seg", ) { if (obj.has.inline_links) { if ((_txt.match(rgx.mark_internal_site_lnk)) @@ -502,8 +502,8 @@ auto inline_links(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto inline_notes_scroll(O)( - auto ref const O obj, - string _txt, + const O obj, + string _txt, ) { if (obj.has.inline_notes_reg) { _txt = font_face(_txt); @@ -531,8 +531,8 @@ auto inline_notes_scroll(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto inline_notes_seg(O)( - auto ref const O obj, - string _txt, + const O obj, + string _txt, ) { string[] _endnotes; if (obj.has.inline_notes_reg) { @@ -576,9 +576,9 @@ auto inline_notes_seg(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto inline_markup_scroll(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", + const O obj, + string _txt, + string _suffix = ".html", ) { _txt = inline_images(obj, _txt, _suffix, "scroll"); _txt = inline_links(obj, _txt, _suffix, "scroll"); @@ -592,10 +592,10 @@ auto inline_markup_scroll(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto inline_markup_seg(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", - string _xml_type = "seg", + const O obj, + string _txt, + string _suffix = ".html", + string _xml_type = "seg", ) { _txt = inline_images(obj, _txt, _suffix, _xml_type); _txt = inline_links(obj, _txt, _suffix, _xml_type); @@ -610,7 +610,7 @@ auto inline_markup_seg(O)( #+name: xhtml_format_objects #+BEGIN_SRC d string lev4_heading_subtoc(O)( - auto ref const O obj, + const O obj, ) { char[] lev4_subtoc; lev4_subtoc ~= "
\n"; @@ -642,7 +642,7 @@ string lev4_heading_subtoc(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto nav_pre_next_svg(O)( - auto ref const O obj, + const O obj, ) { string prev, next, toc; if (obj.tags.segment_anchor_tag == "toc") { @@ -726,9 +726,9 @@ auto nav_pre_next_svg(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto heading(O)( - auto ref const O obj, - string _txt, - string _xml_type = "html", + const O obj, + string _txt, + string _xml_type = "html", ) { auto tags = _xhtml_anchor_tags(obj.tags.anchor_tags); string _horizontal_rule = "
"; @@ -781,9 +781,9 @@ auto heading(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto heading_scroll(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", + const O obj, + string _txt, + string _suffix = ".html", ) { auto tags = _xhtml_anchor_tags(obj.tags.anchor_tags); _txt = inline_markup_scroll(obj, _txt, _suffix); @@ -797,10 +797,10 @@ auto heading_scroll(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto heading_seg(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", - string _xml_type = "html", + const O obj, + string _txt, + string _suffix = ".html", + string _xml_type = "html", ) { auto t = inline_markup_seg(obj, _txt, _suffix, _xml_type); _txt = t[0]; @@ -820,8 +820,8 @@ auto heading_seg(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto para(O)( - auto ref const O obj, - string _txt, + const O obj, + string _txt, ) { auto tags = _xhtml_anchor_tags(obj.tags.anchor_tags); _txt = font_face(_txt); @@ -865,9 +865,9 @@ auto para(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto para_scroll(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", + const O obj, + string _txt, + string _suffix = ".html", ) { auto tags = _xhtml_anchor_tags(obj.tags.anchor_tags); _txt = inline_markup_scroll(obj, _txt, _suffix); @@ -881,10 +881,10 @@ auto para_scroll(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto para_seg(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", - string _xml_type = "html", + const O obj, + string _txt, + string _suffix = ".html", + string _xml_type = "html", ) { auto t = inline_markup_seg(obj, _txt, _suffix, _xml_type); _txt = t[0].to!string; @@ -904,8 +904,8 @@ auto para_seg(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto quote(O)( - auto ref const O obj, - string _txt, + const O obj, + string _txt, ) { _txt = font_face(_txt); string o; @@ -941,9 +941,9 @@ auto quote(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto quote_scroll(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", + const O obj, + string _txt, + string _suffix = ".html", ) { auto tags = _xhtml_anchor_tags(obj.tags.anchor_tags); _txt = inline_markup_scroll(obj, _txt, _suffix); @@ -957,10 +957,10 @@ auto quote_scroll(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto quote_seg(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", - string _xml_type = "html", + const O obj, + string _txt, + string _suffix = ".html", + string _xml_type = "html", ) { auto t = inline_markup_seg(obj, _txt, _suffix, _xml_type); _txt = t[0].to!string; @@ -980,8 +980,8 @@ auto quote_seg(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto group(O)( - auto ref const O obj, - string _txt, + const O obj, + string _txt, ) { _txt = font_face(_txt); string o; @@ -1017,10 +1017,10 @@ auto group(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto group_scroll(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", - string _xml_type = "html", + const O obj, + string _txt, + string _suffix = ".html", + string _xml_type = "html", ) { auto tags = _xhtml_anchor_tags(obj.tags.anchor_tags); _txt = inline_markup_scroll(obj, _txt, _suffix); @@ -1034,10 +1034,10 @@ auto group_scroll(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto group_seg(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", - string _xml_type = "html", + const O obj, + string _txt, + string _suffix = ".html", + string _xml_type = "html", ) { auto t = inline_markup_seg(obj, _txt, _suffix, _xml_type); _txt = t[0].to!string; @@ -1057,8 +1057,8 @@ auto group_seg(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto block(O)( - auto ref const O obj, - string _txt, + const O obj, + string _txt, ) { _txt = font_face(_txt); string o; @@ -1090,10 +1090,10 @@ auto block(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto block_scroll(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", - string _xml_type = "html", + const O obj, + string _txt, + string _suffix = ".html", + string _xml_type = "html", ) { auto tags = _xhtml_anchor_tags(obj.tags.anchor_tags); _txt = inline_markup_scroll(obj, _txt, _suffix); @@ -1107,10 +1107,10 @@ auto block_scroll(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto block_seg(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", - string _xml_type = "html", + const O obj, + string _txt, + string _suffix = ".html", + string _xml_type = "html", ) { auto t = inline_markup_seg(obj, _txt, _suffix, _xml_type); _txt = t[0].to!string; @@ -1130,8 +1130,8 @@ auto block_seg(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto verse(O)( - auto ref const O obj, - string _txt, + const O obj, + string _txt, ) { _txt = font_face(_txt); string o; @@ -1163,10 +1163,10 @@ auto verse(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto verse_scroll(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", - string _xml_type = "html", + const O obj, + string _txt, + string _suffix = ".html", + string _xml_type = "html", ) { auto tags = _xhtml_anchor_tags(obj.tags.anchor_tags); _txt = inline_markup_scroll(obj, _txt, _suffix); @@ -1180,10 +1180,10 @@ auto verse_scroll(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto verse_seg(O)( - auto ref const O obj, - string _txt, - string _suffix = ".html", - string _xml_type = "html", + const O obj, + string _txt, + string _suffix = ".html", + string _xml_type = "html", ) { auto t = inline_markup_seg(obj, _txt, _suffix, _xml_type); _txt = t[0].to!string; @@ -1202,8 +1202,8 @@ auto verse_seg(O)( #+name: xhtml_format_objects_code #+BEGIN_SRC d auto code(O)( - auto ref const O obj, - string _txt, + const O obj, + string _txt, ) { string o; if (obj.metainfo.object_number.empty) { @@ -1243,8 +1243,8 @@ align="left|right|center" #+name: xhtml_format_objects #+BEGIN_SRC d auto tablarize(O)( - auto ref const O obj, - string _txt, + const O obj, + string _txt, ) { string[] _table_rows = (_txt).split(rgx.table_delimiter_row); string[] _table_cols; @@ -1282,8 +1282,8 @@ auto tablarize(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto table(O)( - auto ref const O obj, - string _txt, + const O obj, + string _txt, ) { auto tags = _xhtml_anchor_tags(obj.tags.anchor_tags); _txt = font_face(_txt); @@ -1317,8 +1317,8 @@ auto table(O)( #+name: xhtml_format_objects #+BEGIN_SRC d auto endnote(O)( - auto ref const O obj, - string _txt, + const O obj, + string _txt, ) { string o; o = format(q"¶

@@ -1354,8 +1354,8 @@ template outputHTML() { #+name: output_html_scroll #+BEGIN_SRC d void scroll(D,I)( - auto ref const D doc_abstraction, - auto ref I doc_matters, + const D doc_abstraction, + I doc_matters, ) { mixin SiSUoutputRgxInit; auto xhtml_format = outputXHTMLs(); @@ -1535,8 +1535,8 @@ void scroll_write_output(M,C)( #+name: output_html_seg #+BEGIN_SRC d void seg(D,M)( - auto ref const D doc_abstraction, - auto ref M doc_matters, + const D doc_abstraction, + M doc_matters, ) { mixin SiSUoutputRgxInit; auto rgx = Rgx(); @@ -2216,8 +2216,8 @@ string epub2_oebps_toc_ncx(D,I)(D doc_abstraction, I doc_matters) { #+name: output_epub3_xhtml_seg #+BEGIN_SRC d void outputEPub3(D,I)( - auto ref const D doc_abstraction, - auto ref I doc_matters, + const D doc_abstraction, + I doc_matters, ) { mixin SiSUoutputRgxInit; auto xhtml_format = outputXHTMLs(); diff --git a/org/sdp.org b/org/sdp.org index fe34840..b26f317 100644 --- a/org/sdp.org +++ b/org/sdp.org @@ -26,7 +26,7 @@ struct Version { int minor; int patch; } -enum ver = Version(0, 26, 3); +enum ver = Version(0, 26, 4); #+END_SRC ** compilation restrictions (supported compilers) @@ -120,11 +120,11 @@ import sdp.meta.conf_make_meta_json, sdp.meta.defaults, sdp.meta.doc_debugs, - sdp.meta.read_config_files, - sdp.meta.read_source_files, sdp.meta.rgx, - sdp.output.hub, - sdp.output.paths_source; + sdp.source.paths_source, + sdp.source.read_config_files, + sdp.source.read_source_files, + sdp.output.hub; #+END_SRC ****** notes @@ -141,7 +141,7 @@ import [[./meta_conf_make_meta.org][meta_conf_make_meta]] [[./meta_defaults.org][meta_defaults]] [[./meta_output_debugs.org][meta_output_debugs]] -[[./meta_read_source_files.org][meta_read_source_files]] +[[./source_read_files.org][source_read_files]] [[./compile_time_info.org][compile time info]] [[./output.org][output]] [[./sdp.org][sdp]] @@ -201,7 +201,7 @@ static auto rgx = Rgx(); scope(success) { debug(checkdoc) { writefln( - "~ run complete, ok ~ (sdp-%s.%s.%s, %s v%s, %s %s)", + "~ run complete, ok ~ (sdp-%s.%s.%s, %s D:%s, %s %s)", ver.major, ver.minor, ver.patch, __VENDOR__, __VERSION__, bits, os, @@ -740,7 +740,7 @@ template SiSUabstraction() { #+END_SRC ** 1. raw file content split, doc: _header_, _content_ +(lists: subdocs? images?) >> -- [[./meta_read_source_files.org][meta_read_source_files]] +- [[./source_read_files.org][source_read_files]] - read in the _marked up source document_ and - split the document into: diff --git a/org/source_files_read.org b/org/source_files_read.org new file mode 100644 index 0000000..899e628 --- /dev/null +++ b/org/source_files_read.org @@ -0,0 +1,733 @@ +#+TITLE: sdp markup source raw +#+AUTHOR: Ralph Amissah +#+EMAIL: [[mailto:ralph.amissah@gmail.com][ralph.amissah@gmail.com]] +#+DESCRIPTION: documents - structuring, publishing in multiple formats & search +#+KEYWORDS +#+LANGUAGE: en +#+STARTUP: indent content +#+OPTIONS: H:3 num:nil toc:t \n:nil @:t ::t |:t ^:nil _:nil -:t f:t *:t <:t +#+OPTIONS: TeX:t LaTeX:t skip:nil d:nil todo:t pri:nil tags:not-in-toc +#+OPTIONS: author:nil email:nil creator:nil timestamp:nil +#+PROPERTY: header-args :padline no :exports code :noweb yes +#+EXPORT_SELECT_TAGS: export +#+EXPORT_EXCLUDE_TAGS: noexport +#+FILETAGS: :sdp:rel:meta:read:file +#+TAGS: assert(a) class(c) debug(d) mixin(m) sdp(s) tangle(T) template(t) WEB(W) noexport(n) + +[[./sdp.org][sdp]] [[./][org/]] +* imports + +#+name: imports_std +#+BEGIN_SRC d +import + sdp.meta, + sdp.source.paths_source, + std.file, + std.path; +#+END_SRC + +* A. get _config file_ (read in) :module:sdp:source_read_config_files: +** 0. module template + +#+BEGIN_SRC d :tangle ../src/sdp/source/read_config_files.d +/++ + read configuration files
+ - read config files
+ meta_config_files.d ++/ +module sdp.source.read_config_files; +<> +<> +<> +#+END_SRC + +*** 0. read config files (config_local_site & sisu_document_make) toml +**** 1. site configuration + +#+name: meta_config_file_hub +#+BEGIN_SRC d +static template readConfigSite() { + <> + final auto readConfigSite(M,E)(M _manifest, E _env) { + string config_file_str; + string conf_filename = "NONE"; + auto _conf_file_details = ConfigFilePaths!()(_manifest, _env); + auto possible_config_path_locations = _conf_file_details.possible_config_path_locations.config_local_site; + foreach(conf_fn; [_conf_file_details.config_filename_site_toml]) { + foreach(pth; possible_config_path_locations) { + auto conf_file = asNormalizedPath(chainPath(pth.to!string, conf_fn)).array; + conf_filename = conf_fn; + if (config_file_str.length > 0) { + // conf_filename = conf_fn; + break; + } + try { + if (exists(conf_file)) { + debug(io) { + writeln("WARNING (io debug) in config file found: ", conf_file); + // writeln(__LINE__, ": found: ", conf_file, " in ", pth); + } + config_file_str = conf_file.readText; + break; + } + } catch (ErrnoException ex) { + } catch (FileException ex) { + } + } + if (config_file_str.length > 0) { break; } + } + struct _ConfContent { + string filename() { + return conf_filename; + } + string filetype() { + return conf_filename.extension.chompPrefix("."); + } + auto content() { + return config_file_str; + } + } + return _ConfContent(); + } +} +#+END_SRC + +**** 2. document make/config + +#+name: meta_config_file_hub +#+BEGIN_SRC d +static template readConfigDoc() { + <> + final auto readConfigDoc(M,E)(M _manifest, E _env) { + string config_file_str; + string conf_filename = "NONE"; + auto _conf_file_details = ConfigFilePaths!()(_manifest, _env); + auto possible_config_path_locations = _conf_file_details.possible_config_path_locations.sisu_document_make; + foreach(conf_fn; [_conf_file_details.config_filename_document_toml]) { + foreach(pth; possible_config_path_locations) { + auto conf_file = asNormalizedPath(chainPath(pth.to!string, conf_fn)).array; + conf_filename = conf_fn; + if (config_file_str.length > 0) { + // conf_filename = conf_fn; + break; + } + try { + if (exists(conf_file)) { + debug(io) { + writeln("WARNING (io debug) in config file found: ", conf_file); + } + config_file_str = conf_file.readText; + break; + } + } + catch (ErrnoException ex) { + } + catch (FileException ex) { + } + } + if (config_file_str.length > 0) { break; } + } + struct _ConfContent { + string filename() { + return conf_filename; + } + string filetype() { + return conf_filename.extension.chompPrefix("."); + } + auto content() { + return config_file_str; + } + } + return _ConfContent(); + } +} +#+END_SRC + +** A. TOML +*** 1. TOML read config files (config_local_site & sisu_document_make) :file:config: +**** TOML config_local_site + +#+name: meta_config_file_in +#+BEGIN_SRC d +static template configReadInSiteTOML() { + <> + final string configReadInSiteTOML(M,E)(M manifest, E env) { + auto conf_file_details = ConfigFilePaths!()(manifest, env); + string conf_toml = conf_file_details.config_filename_site_toml; + auto possible_config_path_locations = conf_file_details.possible_config_path_locations.config_local_site; + string config_file_str; + debug(io) { + writeln("WARNING (io debug) in config filename: ", conf_toml); + writeln("WARNING (io debug) in config possible path locations: ", possible_config_path_locations); + } + foreach(pth; possible_config_path_locations) { + auto conf_file = asNormalizedPath(chainPath(pth.to!string, conf_toml)).array; + if (config_file_str.length > 0) { + break; + } + try { + if (exists(conf_file)) { + debug(io) { + writeln("WARNING (io debug) in config file found: ", conf_file); + } + config_file_str = conf_file.readText; + break; + } + } + catch (ErrnoException ex) { + } + catch (FileException ex) { + } + } + return config_file_str; + } +} +#+END_SRC + +**** TOML sisu_document_make + +#+name: meta_config_file_in +#+BEGIN_SRC d +static template configReadInDocTOML() { + <> + final string configReadInDocTOML(M,E)(M manifest, E env) { + auto conf_file_details = ConfigFilePaths!()(manifest, env); + string conf_toml = conf_file_details.config_filename_document_toml; + auto possible_config_path_locations = conf_file_details.possible_config_path_locations.sisu_document_make; + string config_file_str; + debug(io) { + writeln("WARNING (io debug) in config filename: ", conf_toml); + writeln("WARNING (io debug) in config possible path locations: ", possible_config_path_locations); + } + foreach(pth; possible_config_path_locations) { + auto conf_file = asNormalizedPath(chainPath(pth.to!string, conf_toml)).array; + if (config_file_str.length > 0) { + break; + } + try { + if (exists(conf_file)) { + debug(io) { + writeln("WARNING (io debug) in config file found: ", conf_file); + } + config_file_str = conf_file.readText; + break; + } + } + catch (ErrnoException ex) { + } + catch (FileException ex) { + } + } + return config_file_str; + } +} +#+END_SRC + +*** 2. TOML config files get + +#+name: meta_config_file_toml +#+BEGIN_SRC d +static template configTOML() { + import toml; // + <> + auto configTOML(string configuration, string conf_toml_filename) { + TOMLDocument _toml_conf; + try { + _toml_conf = parseTOML(configuration); // parseTOML(cast(string)(configuration)); + } + catch(ErrnoException e) { + stderr.writeln("Toml problem with content for ", conf_toml_filename); + stderr.writeln(e.msg); + } + return _toml_conf; + } +} +#+END_SRC + +*** 3. TOML config (config_local_site & sisu_document_make) :file:config:hub: + +#+name: meta_config_file_hub +#+BEGIN_SRC d +static template configReadSiteTOML() { + <> + import toml; + final auto configReadSiteTOML(M,E)(M _manifest, E _env) { + auto _configuration = configReadInSiteTOML!()(_manifest, _env); + auto _conf_file_details = ConfigFilePaths!()(_manifest, _env); + string _conf_toml = _conf_file_details.config_filename_site_toml; + auto _toml_conf = configTOML!()(_configuration, _conf_toml); + return _toml_conf; + } +} +static template configReadDocTOML() { + <> + import toml; + final auto configReadDocTOML(M,E)(M _manifest, E _env) { + auto _configuration = configReadInDocTOML!()(_manifest, _env); + auto _conf_file_details = ConfigFilePaths!()(_manifest, _env); + string _conf_toml = _conf_file_details.config_filename_document_toml; + auto _toml_conf = configTOML!()(_configuration, _conf_toml); + return _toml_conf; + } +} +#+END_SRC + +* B. get _markup source_, read file :module:sdp:source_read_source_files: +** 0. module template (includes tuple) + +#+BEGIN_SRC d :tangle ../src/sdp/source/read_source_files.d +/++ + module source_read_source_files;
+ - open markup files
+ - if master file scan for addional files to import/insert ++/ +module sdp.source.read_source_files; +static template SiSUrawMarkupContent() { + import + sdp.meta.rgx; + <> + mixin SiSUrgxInit; + static auto rgx = Rgx(); + string[] _images=[]; + auto _extract_images(S)(S content_block) { + string[] images_; + auto _content_block = content_block.to!string; + if (auto m = _content_block.matchAll(rgx.image)) { + images_ ~= m.captures[1].to!string; + } + return images_; + } + auto rawsrc = RawMarkupContent(); + auto SiSUrawMarkupContent(O,Fn)(O _opt_action, Fn fn_src) { + auto _0_header_1_body_content_2_insert_filelist_tuple + = rawsrc.sourceContentSplitIntoHeaderAndBody(_opt_action, rawsrc.sourceContent(fn_src), fn_src); + return _0_header_1_body_content_2_insert_filelist_tuple; + } + struct RawMarkupContent { + final sourceContent(in string fn_src) { + auto raw = MarkupRawUnit(); + auto source_txt_str + = raw.markupSourceReadIn(fn_src); + return source_txt_str; + } + final auto sourceContentSplitIntoHeaderAndBody(O)(O _opt_action, in string source_txt_str, in string fn_src="") { + auto raw = MarkupRawUnit(); + string[] insert_file_list; + string[] images_list; + auto t + = raw.markupSourceHeaderContentRawLineTupleArray(source_txt_str); + auto header_raw = t[0]; + auto sourcefile_body_content = t[1]; + if (fn_src.match(rgx.src_fn_master)) { // filename with path needed if master file (.ssm) not otherwise + auto ins = Inserts(); + auto tu + = ins.scan_master_src_for_insert_files_and_import_content(_opt_action, sourcefile_body_content, fn_src); + static assert(!isTypeTuple!(tu)); + sourcefile_body_content = tu[0]; + insert_file_list = tu[1].dup; + images_list = tu[2].dup; + } else if (_opt_action.source || _opt_action.sisupod) { + auto ins = Inserts(); + auto tu + = ins.scan_master_src_for_insert_files_and_import_content(_opt_action, sourcefile_body_content, fn_src); + static assert(!isTypeTuple!(tu)); + images_list = tu[2].dup; + } + t = tuple( + header_raw, + sourcefile_body_content, + insert_file_list, + images_list + ); + static assert(t.length==4); + return t; + } + } + struct MarkupRawUnit { + import std.file; + <> + <> + <> + <> + <> + <> + } + struct Inserts { + auto scan_subdoc_source(O)( + O _opt_action, + char[][] markup_sourcefile_insert_content, + string fn_src + ) { + mixin SiSUrgxInitFlags; + <> + foreach (line; markup_sourcefile_insert_content) { + <> + } // end src subdoc (inserts) loop + <> + } + auto scan_master_src_for_insert_files_and_import_content(O)( + O _opt_action, + char[][] sourcefile_body_content, + string fn_src + ) { + import std.algorithm; + mixin SiSUrgxInitFlags; + <> + foreach (line; sourcefile_body_content) { + <> + } // end src doc loop + <> + } + } +} +#+END_SRC + +** get markup source, read file :source:markup: +*** read file, source string [#A] :string: + +#+name: meta_markup_source_raw_read_file_source_string +#+BEGIN_SRC d +final private string readInMarkupSource(in char[] fn_src) { + enforce( + exists(fn_src)!=0, + "file not found: «" ~ + fn_src ~ "»" + ); + string source_txt_str; + try { + if (exists(fn_src)) { + debug(io) { + writeln("in src, markup source file found: ", fn_src); + } + source_txt_str = fn_src.readText; + } + } + catch (ErrnoException ex) { + } + catch (UTFException ex) { + // Handle validation errors + } + catch (FileException ex) { + // Handle errors + } + std.utf.validate(source_txt_str); + return source_txt_str; +} +#+END_SRC + +*** document header & content, array.length == 2 [#A] :array: + +here you split document header and body, an array.length == 2 +split is on first match of level A~ (which is required) + +#+name: meta_markup_source_raw_doc_header_and_content_split +#+BEGIN_SRC d +final private char[][] header0Content1(in string src_text) { + /+ split string on _first_ match of "^:?A~\s" into [header, content] array/tuple +/ + char[][] header_and_content; + auto m = (cast(char[]) src_text).matchFirst(rgx.heading_a); + header_and_content ~= m.pre; + header_and_content ~= m.hit ~ m.post; + assert(header_and_content.length == 2, + "document markup is broken, header body split == " + ~ header_and_content.length.to!string + ~ "; (header / body array split should == 2 (split is on level A~))" + ); + return header_and_content; +} +#+END_SRC + +*** source line array :array: + +#+name: meta_markup_source_raw_source_line_array +#+BEGIN_SRC d +final private char[][] markupSourceLineArray(in char[] src_text) { + char[][] source_line_arr + = (cast(char[]) src_text).split(rgx.newline_eol_strip_preceding); + return source_line_arr; +} +#+END_SRC + +*** source content raw line array :array: +- used for regular .sst files; master .ssm files and; .ssi inserts +- regex is passed for relevant enforce match + +**** read in file + +#+name: meta_markup_source_raw_read_in_file +#+BEGIN_SRC d +auto markupSourceReadIn(in string fn_src) { + static auto rgx = Rgx(); + enforce( + fn_src.match(rgx.src_pth_sst_or_ssm), + "not a sisu markup filename: «" ~ + fn_src ~ "»" + ); + auto source_txt_str = readInMarkupSource(fn_src); + return source_txt_str; +} +#+END_SRC + +**** tuple (a) header, (b) body content, (c) file insert list & (d) image list? + +- header +- body content +- file insert list +- [image list?] + +#+name: meta_markup_source_raw_tuple_of_header_and_body +#+BEGIN_SRC d +auto markupSourceHeaderContentRawLineTupleArray(in string source_txt_str) { + string[] file_insert_list = []; + string[] images_list = []; + auto hc = header0Content1(source_txt_str); + auto header = hc[0]; + char[] source_txt = hc[1]; + auto source_line_arr = markupSourceLineArray(source_txt); + auto t = tuple( + header, + source_line_arr, + file_insert_list, + images_list + ); + return t; +} +#+END_SRC + +**** get insert source line array + +#+name: meta_markup_source_raw_get_insert_source_line_array +#+BEGIN_SRC d +final char[][] getInsertMarkupSourceContentRawLineArray( + in char[] fn_src_insert, + Regex!(char) rgx_file +) { + enforce( + fn_src_insert.match(rgx_file), + "not a sisu markup filename: «" ~ + fn_src_insert ~ "»" + ); + auto source_txt_str = readInMarkupSource(fn_src_insert); + auto source_line_arr = markupSourceLineArray(source_txt_str); + return source_line_arr; +} +#+END_SRC + +** get markup source, master file & inserts :masterfile:inserts: +[[./sdp.org][sdp]] [[./][org/]] + +*** scan inserts (sub-document) source :scan_insert_src: +**** scan subdoc source + +#+name: meta_inserts_scan +#+BEGIN_SRC d +char[][] contents_insert; +auto type1 = flags_type_init; +auto fn_pth_full = fn_src.match(rgx.src_pth_sst_or_ssm); +auto markup_src_file_path = fn_pth_full.captures[1]; +#+END_SRC + +**** loop insert (sub-document) + +#+name: meta_inserts_scan_loop +#+BEGIN_SRC d +if (type1["curly_code"] == 1) { + type1["header_make"] = 0; + type1["header_meta"] = 0; + if (line.matchFirst(rgx.block_curly_code_close)) { + type1["curly_code"] = 0; + } + contents_insert ~= line; +} else if (line.matchFirst(rgx.block_curly_code_open)) { + type1["curly_code"] = 1; + type1["header_make"] = 0; + type1["header_meta"] = 0; + contents_insert ~= line; +} else if (type1["tic_code"] == 1) { + type1["header_make"] = 0; + type1["header_meta"] = 0; + if (line.matchFirst(rgx.block_tic_close)) { + type1["tic_code"] = 0; + } + contents_insert ~= line; +} else if (line.matchFirst(rgx.block_tic_code_open)) { + type1["tic_code"] = 1; + type1["header_make"] = 0; + type1["header_meta"] = 0; + contents_insert ~= line; +} else if ( + (type1["header_make"] == 1) + && line.matchFirst(rgx.native_header_sub) +) { + type1["header_make"] = 1; + type1["header_meta"] = 0; +} else if ( + (type1["header_meta"] == 1) + && line.matchFirst(rgx.native_header_sub) +) { + type1["header_meta"] = 1; + type1["header_make"] = 0; +} else if (auto m = line.match(rgx.insert_src_fn_ssi_or_sst)) { + type1["header_make"] = 0; + type1["header_meta"] = 0; + auto insert_fn = m.captures[2]; + auto insert_sub_pth = m.captures[1]; + auto fn_src_insert + = chainPath(markup_src_file_path, insert_sub_pth ~ insert_fn).array; + auto raw = MarkupRawUnit(); + auto markup_sourcesubfile_insert_content + = raw.getInsertMarkupSourceContentRawLineArray(fn_src_insert, rgx.src_fn_find_inserts); + debug(insert_file) { + tell_l("red", line); + tell_l("red", fn_src_insert); + tell_l("fuchsia", "ERROR"); + writeln( + " length contents insert array: ", + markup_sourcesubfile_insert_content.length + ); + } + if (_opt_action.source || _opt_action.sisupod) { + _images ~= _extract_images(markup_sourcesubfile_insert_content); + } + auto ins = Inserts(); + /+ + - 1. load file + - 2. read lines + - 3. scan lines + - a. if filename insert, and insert filename + - repeat 1 + - b. else + - add line to new array; + - build image list, search for any image files to add to image list + +/ +} else { + type1["header_make"] = 0; + type1["header_meta"] = 0; + contents_insert ~= line; // images to extract for image list? + if (_opt_action.source || _opt_action.sisupod) { + auto _image_linelist = _extract_images(line); + if (_image_linelist.length > 0) { + _images ~= _image_linelist; + } + } +} +#+END_SRC + +**** post loop + +#+name: meta_inserts_scan_post +#+BEGIN_SRC d +auto t = tuple( + contents_insert, + _images +); +return t; +#+END_SRC + +*** scan document source :scan_src: +**** scan doc source + +#+name: meta_master_doc_scan_for_insert_filenames +#+BEGIN_SRC d +char[][] contents; +auto type = flags_type_init; +auto fn_pth_full = fn_src.match(rgx.src_pth_sst_or_ssm); +auto markup_src_file_path = fn_pth_full.captures[1]; +char[][] contents_insert; +string[] _images =[]; +string[] insert_file_list =[]; +#+END_SRC + +**** include inserts: _loop master_ scan for inserts (insert documents) + +#+name: meta_master_doc_scan_for_insert_filenames_loop +#+BEGIN_SRC d +if (type["curly_code"] == 1) { + if (line.matchFirst(rgx.block_curly_code_close)) { + type["curly_code"] = 0; + } + contents ~= line; +} else if (line.matchFirst(rgx.block_curly_code_open)) { + type["curly_code"] = 1; + contents ~= line; +} else if (type["tic_code"] == 1) { + if (line.matchFirst(rgx.block_tic_close)) { + type["tic_code"] = 0; + } + contents ~= line; +} else if (line.matchFirst(rgx.block_tic_code_open)) { + type["tic_code"] = 1; + contents ~= line; +} else if (auto m = line.match(rgx.insert_src_fn_ssi_or_sst)) { + auto insert_fn = m.captures[2]; + auto insert_sub_pth = m.captures[1]; + auto fn_src_insert + = chainPath(markup_src_file_path, insert_sub_pth ~ insert_fn).array; + insert_file_list ~= fn_src_insert.to!string; + auto raw = MarkupRawUnit(); + /+ TODO +/ + auto markup_sourcefile_insert_content + = raw.getInsertMarkupSourceContentRawLineArray(fn_src_insert, rgx.src_fn_find_inserts); + debug(insert_file) { + tell_l("red", line); + tell_l("red", fn_src_insert); + writeln( + " length contents insert array: ", + markup_sourcefile_insert_content.length + ); + } + auto ins = Inserts(); + auto contents_insert_tu = ins.scan_subdoc_source( + _opt_action, + markup_sourcefile_insert_content, + fn_src_insert.to!string + ); + contents ~= contents_insert_tu[0]; // images to extract for image list? + if (_opt_action.source || _opt_action.sisupod) { + auto _image_linelist = _extract_images(contents_insert_tu[0]); + if (_image_linelist.length > 0) { + _images ~= _image_linelist; + } + } + /+ + - 1. load file + - 2. read lines + - 3. scan lines + - a. if filename insert, and insert filename + - repeat 1 + - b. else + - add line to new array; + - build image list, search for any image files to add to image list + +/ +} else { + contents ~= line; + if (_opt_action.source || _opt_action.sisupod) { + auto _image_linelist = _extract_images(line); + if (_image_linelist.length > 0) { + _images ~= _image_linelist; + } + } +} +#+END_SRC + +**** post loop + +#+name: meta_master_doc_scan_for_insert_filenames_post +#+BEGIN_SRC d +string[] images = []; +foreach(i; uniq(_images.sort())) { + images ~= i; +} +debug(insert_file) { + writeln(__LINE__); + writeln(contents.length); +} +auto t = tuple( + contents, + insert_file_list, + images +); +return t; +#+END_SRC + +* __END__ -- cgit v1.2.3