2024-01-31 16:51:48 +01:00
|
|
|
#!/usr/bin/zsh
|
|
|
|
|
2024-02-02 18:01:16 +01:00
|
|
|
#
|
2024-01-28 18:24:38 +01:00
|
|
|
# Quick Site Generator 2 is a static website generator inspired by Nikola.
|
|
|
|
# It is written for the Z shell (zsh) because that's what I use and also because I like it better than Bash.
|
|
|
|
#
|
|
|
|
# This script is an almost complete rewrite of my old script because it became overly complicated and
|
|
|
|
# had way too many bugs, even though it worked on simple sites.
|
2024-02-02 18:01:16 +01:00
|
|
|
#
|
|
|
|
# https://github.com/kekePower/qsgen2/
|
|
|
|
#
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
VERSION="0.4.0 beta 1" # Wed-2024-02-14
|
2024-01-29 15:41:09 +01:00
|
|
|
QSGEN="Quick Site Generator 2"
|
2024-01-28 18:24:38 +01:00
|
|
|
|
|
|
|
# Set to true or false
|
2024-02-05 20:10:28 +01:00
|
|
|
# This will show debug information from every function in this script
|
|
|
|
# You can also set debug=true in a single function if you want to debug only that specific one.
|
2024-02-14 13:06:44 +01:00
|
|
|
globaldebug=false
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-06 09:53:22 +01:00
|
|
|
# Use Zsh fpath to set the path to some extra functions
|
2024-02-14 11:38:42 +01:00
|
|
|
fpath=(${HOME}/bin/include/common ${HOME}/bin/include/qsgen2/lang $fpath)
|
2024-02-06 09:53:22 +01:00
|
|
|
# In this case, let's load the 'include' function
|
|
|
|
autoload include
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-01-29 15:06:36 +01:00
|
|
|
# Including some colors to the script
|
2024-01-28 18:24:38 +01:00
|
|
|
include common/colors
|
|
|
|
|
2024-02-05 20:10:28 +01:00
|
|
|
echo "${magenta}${blue_bg} ${QSGEN} ${end}${bold_white}${blue_bg}${VERSION} ${end}"
|
2024-01-28 19:34:30 +01:00
|
|
|
|
2024-01-28 18:24:38 +01:00
|
|
|
# Loading Zsh modules
|
|
|
|
zmodload zsh/files
|
2024-02-05 21:39:34 +01:00
|
|
|
zmodload zsh/datetime
|
2024-01-28 18:24:38 +01:00
|
|
|
|
|
|
|
# Check for, an source, the config file for this specific website
|
|
|
|
if [[ -f $(pwd)/config ]]; then
|
2024-02-14 13:01:43 +01:00
|
|
|
if (${globaldebug}); then echo "${red}Config file found and sourced${end}\n${yellow} - $(pwd)/config${end}"; fi
|
2024-01-28 18:24:38 +01:00
|
|
|
builtin source $(pwd)/config
|
|
|
|
else
|
|
|
|
echo "${red}Cannot find configuration file.${end}"
|
|
|
|
echo "${yellow} - Please create the file 'config' in your project directory.${end}"
|
|
|
|
exit
|
|
|
|
fi
|
2024-01-31 17:50:56 +01:00
|
|
|
|
2024-02-14 11:58:21 +01:00
|
|
|
# Load language
|
2024-02-14 13:01:43 +01:00
|
|
|
found=false
|
2024-02-14 15:59:33 +01:00
|
|
|
typeset -A qsgenlang
|
2024-02-14 13:01:43 +01:00
|
|
|
for dir in $fpath; do
|
|
|
|
if [[ -f "${dir}/${language}" ]]; then
|
2024-02-14 16:03:50 +01:00
|
|
|
# echo "Language file: ${dir}/${language}"
|
2024-02-14 13:01:43 +01:00
|
|
|
source "${dir}/${language}"
|
|
|
|
found=true
|
|
|
|
break
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
if ! $found; then
|
|
|
|
echo "Language file '${language}' not found in fpath."
|
|
|
|
# Fallback or error handling
|
|
|
|
fi
|
2024-02-14 11:58:21 +01:00
|
|
|
|
2024-02-06 09:53:22 +01:00
|
|
|
if (${globaldebug}); then
|
2024-02-13 07:12:10 +01:00
|
|
|
qsconfig=$( cat $(pwd)/config | grep -v \# | awk '{print substr($0, index($0, " ") + 1)}' )
|
2024-02-14 12:55:01 +01:00
|
|
|
echo "Content of Config file"
|
2024-01-31 17:32:56 +01:00
|
|
|
for qslines in ${qsconfig}
|
2024-02-13 07:12:10 +01:00
|
|
|
do
|
|
|
|
echo "${yellow}${qslines}${end}"
|
2024-01-31 16:51:48 +01:00
|
|
|
done
|
2024-01-28 18:24:38 +01:00
|
|
|
fi
|
|
|
|
|
2024-01-31 14:52:12 +01:00
|
|
|
function _msg() {
|
2024-02-14 11:35:47 +01:00
|
|
|
local type=$1
|
2024-02-14 13:11:37 +01:00
|
|
|
shift # Remove the first argument so $@ now contains only keys or additional strings
|
|
|
|
|
2024-02-14 12:40:53 +01:00
|
|
|
local full_msg=""
|
2024-02-14 13:11:37 +01:00
|
|
|
for arg in "$@"; do
|
2024-02-14 16:02:19 +01:00
|
|
|
if [[ -n "${qsgenlang[$arg]}" ]]; then
|
|
|
|
full_msg+="${qsgenlang[$arg]}"
|
2024-02-14 13:06:44 +01:00
|
|
|
else
|
2024-02-14 13:24:28 +01:00
|
|
|
full_msg+="$arg"
|
2024-02-14 12:40:53 +01:00
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
2024-02-14 17:56:21 +01:00
|
|
|
# Determine the color based on the type
|
|
|
|
local color="${end}" # Default to no color if type is unrecognized
|
2024-02-14 12:40:53 +01:00
|
|
|
case $type in
|
2024-02-14 17:56:21 +01:00
|
|
|
std) color="${green}" ;;
|
|
|
|
info) color="${yellow}" ;;
|
|
|
|
debug) color="${red}" ;;
|
|
|
|
other) color="${bold_yellow}" ;;
|
|
|
|
sub) color="${magenta}" ;;
|
|
|
|
main) color="${white}${green_bg}" ;;
|
2024-02-14 11:35:47 +01:00
|
|
|
esac
|
2024-02-14 16:04:24 +01:00
|
|
|
|
2024-02-14 17:56:21 +01:00
|
|
|
# Use printf with %b to allow backslash escape interpretation
|
|
|
|
printf "${color}%b${end}\n" "${full_msg}"
|
2024-01-31 14:52:12 +01:00
|
|
|
}
|
|
|
|
|
2024-02-14 11:58:21 +01:00
|
|
|
function _version() {
|
2024-02-14 13:11:37 +01:00
|
|
|
_msg info "_qsgen2_msg_7" "-$(strftime "%Y")"
|
2024-02-14 11:58:21 +01:00
|
|
|
echo "${yellow}- https://github.com/kekePower/qsgen2/${end}"
|
2024-02-14 12:43:09 +01:00
|
|
|
_msg info "_qsgen2_msg_8" " '${1} help' " "_qsgen2_msg_8.1"
|
2024-02-14 11:58:21 +01:00
|
|
|
exit
|
|
|
|
}
|
|
|
|
|
|
|
|
function _help() {
|
2024-02-14 17:32:49 +01:00
|
|
|
# This will also be translated some time in the future
|
2024-02-14 11:58:21 +01:00
|
|
|
echo "This is where I'll write the Help documentation."
|
|
|
|
exit
|
|
|
|
}
|
|
|
|
|
|
|
|
if [[ "$1" == "version" || "$1" == "-v" || "$1" == "--version" ]]; then
|
|
|
|
_version ${0:t}
|
|
|
|
elif [[ "$1" == "help" || "$1" == "-h" || "$1" == "--help" ]]; then
|
|
|
|
_help ${0:t}
|
|
|
|
fi
|
|
|
|
|
2024-02-14 17:34:13 +01:00
|
|
|
# Define cache files for blogs and pages
|
|
|
|
blog_cache_file="${project_dir}/.blog_cache"
|
|
|
|
pages_cache_file="${project_dir}/.pages_cache"
|
|
|
|
|
2024-02-05 16:20:39 +01:00
|
|
|
case ${1} in
|
2024-02-04 18:46:23 +01:00
|
|
|
force)
|
2024-02-14 11:58:21 +01:00
|
|
|
_msg sub "_qsgen2_msg_2"
|
2024-02-04 18:46:23 +01:00
|
|
|
: >| "$blog_cache_file" # Truncate the blog cache before doing update
|
|
|
|
: >| "$pages_cache_file" # Truncate the page cache before doing update
|
2024-02-05 21:19:22 +01:00
|
|
|
;;
|
2024-02-04 18:46:23 +01:00
|
|
|
*)
|
2024-02-06 09:53:22 +01:00
|
|
|
# Nothing
|
2024-02-05 21:19:22 +01:00
|
|
|
;;
|
2024-02-04 18:46:23 +01:00
|
|
|
esac
|
|
|
|
|
2024-01-29 21:40:26 +01:00
|
|
|
# Let's check if qsgen2 can generate this site by checking if 'generator' is available
|
2024-01-31 16:20:39 +01:00
|
|
|
if [[ ! ${generator} ]] || [[ -d $(pwd)/.git ]]; then
|
2024-02-14 11:58:21 +01:00
|
|
|
_msg debug "_qsgen2_msg_3"
|
2024-01-29 21:40:26 +01:00
|
|
|
exit
|
|
|
|
fi
|
|
|
|
|
|
|
|
# We define the variable 'engine' based on what's in the 'config' file.
|
2024-01-28 18:24:38 +01:00
|
|
|
if [[ ${generator} == "native" ]]; then
|
2024-02-05 20:55:22 +01:00
|
|
|
# Usage: ${engine} ${1} - Where 1 is the file you want to convert
|
|
|
|
engine=_zhtml
|
|
|
|
export file_ext="qst"
|
2024-01-28 18:24:38 +01:00
|
|
|
elif [[ ${generator} == "markdown" ]]; then
|
2024-02-05 20:55:22 +01:00
|
|
|
if [[ ! -f /usr/local/bin/pandoc ]]; then
|
2024-02-14 11:58:21 +01:00
|
|
|
_msg other "_qsgen2_msg_4"
|
2024-02-05 20:55:22 +01:00
|
|
|
_msg other "https://github.com/jgm/pandoc/releases"
|
|
|
|
exit
|
|
|
|
else
|
|
|
|
# Usage: ${engine} ${1} - Where 1 is the file you want parsed
|
2024-02-10 21:49:34 +01:00
|
|
|
engine="/usr/local/bin/pandoc"
|
2024-02-10 21:53:36 +01:00
|
|
|
engine_opts=
|
2024-02-05 20:55:22 +01:00
|
|
|
export file_ext="md"
|
|
|
|
fi
|
2024-02-08 07:31:30 +01:00
|
|
|
else
|
2024-02-14 11:58:21 +01:00
|
|
|
_msg debug "_qsgen2_msg_5"
|
2024-02-08 07:31:30 +01:00
|
|
|
exit
|
2024-01-28 18:24:38 +01:00
|
|
|
fi
|
2024-02-02 12:36:11 +01:00
|
|
|
|
2024-02-14 18:50:48 +01:00
|
|
|
function _run_engine() {
|
|
|
|
# Usage: _run_engine <input>
|
|
|
|
local debug=false
|
|
|
|
|
|
|
|
if [[ ${generator} == "native" ]]; then
|
|
|
|
${engine} ${1}
|
|
|
|
elif [[ ${generator} == "markdown" ]]; then
|
2024-02-15 13:33:11 +01:00
|
|
|
echo "${1} | ${engine} ${engine_opts}"
|
2024-02-14 18:50:48 +01:00
|
|
|
else
|
2024-02-15 13:33:11 +01:00
|
|
|
_msg debug "ERROR running engine: ${engine}!"
|
|
|
|
_msg info "Usage: _run_engine <input>"
|
2024-02-14 18:50:48 +01:00
|
|
|
exit
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
|
2024-02-14 11:58:21 +01:00
|
|
|
if (${globaldebug}); then _msg debug "_qsgen2_msg_6"; fi
|
2024-01-28 18:24:38 +01:00
|
|
|
|
|
|
|
builtin cd ${project_dir}
|
|
|
|
|
2024-01-28 19:27:30 +01:00
|
|
|
# Let's put these here for now.
|
2024-02-05 20:10:28 +01:00
|
|
|
export today=$(strftime "%Y-%m-%d - %T")
|
|
|
|
export blogdate=$(strftime "%a-%Y-%b-%d")
|
2024-01-28 19:27:30 +01:00
|
|
|
|
2024-01-28 18:24:38 +01:00
|
|
|
# Let's create arrays of all the files we'll be working on
|
2024-01-31 20:59:08 +01:00
|
|
|
|
2024-02-01 17:48:59 +01:00
|
|
|
function _list_pages() {
|
2024-02-05 21:19:22 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-02-03 20:26:21 +01:00
|
|
|
|
2024-02-05 21:19:22 +01:00
|
|
|
# Initialize or clear the array to ensure it's empty before adding files
|
|
|
|
pages_file_array=()
|
2024-02-03 20:26:21 +01:00
|
|
|
|
2024-02-05 21:19:22 +01:00
|
|
|
export no_pages_found=false
|
2024-02-01 17:48:59 +01:00
|
|
|
|
2024-02-05 21:19:22 +01:00
|
|
|
# Temporarily set null_glob for this function
|
|
|
|
setopt local_options null_glob
|
2024-02-03 20:26:21 +01:00
|
|
|
|
2024-02-05 21:19:22 +01:00
|
|
|
# Using an array to directly capture matching files
|
|
|
|
local -a pages_files=(*.${file_ext})
|
|
|
|
|
|
|
|
if (( ${#pages_files} == 0 )); then
|
2024-02-14 15:51:46 +01:00
|
|
|
if ${debug}; then _msg debug "${0:t}_msg_1" " ${file_ext}."; fi
|
2024-02-05 21:19:22 +01:00
|
|
|
export no_pages_found=true
|
|
|
|
return
|
|
|
|
else
|
|
|
|
for file in "${pages_files[@]}"; do
|
2024-02-14 15:51:46 +01:00
|
|
|
if ${debug}; then _msg debug "${0:t}_msg_2" " ${file}"; fi
|
2024-02-05 21:19:22 +01:00
|
|
|
pages_file_array+=("$file")
|
|
|
|
done
|
|
|
|
fi
|
2024-02-01 17:48:59 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2024-01-31 22:22:22 +01:00
|
|
|
function _list_blogs() {
|
2024-02-05 21:19:22 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-01-31 22:22:22 +01:00
|
|
|
|
2024-02-05 21:19:22 +01:00
|
|
|
# Initialize or clear the blogs array to ensure it's empty before adding files
|
|
|
|
blogs_file_array=()
|
|
|
|
|
|
|
|
export no_blogs_found=false
|
|
|
|
|
|
|
|
# Temporarily set null_glob for this function
|
|
|
|
setopt local_options null_glob
|
|
|
|
|
|
|
|
# Directly capture matching blog files into an array
|
|
|
|
local -a blog_files=(blog/*.blog(On))
|
|
|
|
|
|
|
|
if (( ${#blog_files[@]} == 0 )); then
|
2024-02-14 15:51:46 +01:00
|
|
|
if ${debug}; then _msg debug "${0:t}_msg_1"; fi
|
2024-02-05 21:19:22 +01:00
|
|
|
export no_blogs_found=true
|
|
|
|
return
|
|
|
|
else
|
|
|
|
for file in "${blog_files[@]}"
|
|
|
|
do
|
2024-02-14 15:51:46 +01:00
|
|
|
if ${debug}; then _msg debug "${0:t}_msg_2" " $file"; fi
|
2024-02-05 21:19:22 +01:00
|
|
|
blogs_file_array+=("$file")
|
|
|
|
done
|
|
|
|
fi
|
|
|
|
|
2024-01-31 20:59:08 +01:00
|
|
|
}
|
|
|
|
|
2024-02-03 20:30:06 +01:00
|
|
|
|
2024-01-28 18:24:38 +01:00
|
|
|
# BLOG CACHE
|
|
|
|
function _blog_cache() {
|
2024-02-05 21:19:22 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-01-31 22:22:22 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
_list_blogs
|
2024-01-31 22:22:22 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Create an associative array for the blog cache
|
|
|
|
typeset -A blog_cache
|
2024-01-29 21:40:26 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Load the existing blog cache
|
|
|
|
if [[ -f $blog_cache_file ]]; then
|
|
|
|
while IFS=':' read -r name hash; do
|
|
|
|
blog_cache[$name]=$hash
|
2024-02-14 15:51:46 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_1" " ${blog_cache[${name}]}"
|
2024-02-05 20:55:22 +01:00
|
|
|
done < "$blog_cache_file"
|
|
|
|
fi
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Initialize the array for storing blog files to process
|
|
|
|
make_blog_array=()
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Process blog files
|
|
|
|
for blog_file in ${blogs_file_array[@]}; do
|
|
|
|
# Compute the current blog file hash
|
|
|
|
current_hash=$(md5sum "$blog_file" | awk '{print $1}')
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-14 15:51:46 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_2" " ${blog_file}"
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_3" " ${current_hash}"
|
2024-02-05 20:55:22 +01:00
|
|
|
|
|
|
|
# Check if the blog file is new or has changed
|
|
|
|
if [[ ${blog_cache[$blog_file]} != "$current_hash" ]]; then
|
2024-02-14 15:51:46 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_4" " ${blog_file}"
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_5" " ${current_hash}"
|
2024-02-05 20:55:22 +01:00
|
|
|
# Blog file is new or has changed; add it to the processing array
|
|
|
|
make_blog_array+=("$blog_file")
|
|
|
|
|
|
|
|
# Update the blog cache with the new hash
|
|
|
|
blog_cache[$blog_file]=$current_hash
|
|
|
|
fi
|
|
|
|
done
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Rebuild the blog cache file from scratch
|
|
|
|
: >| "$blog_cache_file" # Truncate the file before writing
|
|
|
|
for name in "${(@k)blog_cache}"; do
|
|
|
|
echo "$name:${blog_cache[$name]}" >> "$blog_cache_file"
|
|
|
|
done
|
2024-02-05 21:19:22 +01:00
|
|
|
|
2024-01-28 18:24:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
# PAGES CACHE
|
2024-01-29 21:40:26 +01:00
|
|
|
# Returns the array pages_array()
|
2024-01-28 18:24:38 +01:00
|
|
|
function _pages_cache() {
|
2024-02-05 21:19:22 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-02-05 20:55:22 +01:00
|
|
|
|
|
|
|
# Create an associative array for the pages cache
|
|
|
|
typeset -A pages_cache
|
2024-02-01 17:48:59 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
_list_pages
|
2024-02-01 17:59:15 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Load the existing pages cache
|
|
|
|
if [[ -f $pages_cache_file ]]; then
|
|
|
|
while IFS=':' read -r name hash; do
|
|
|
|
pages_cache[$name]=$hash
|
2024-02-14 15:51:46 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_1" " ${pages_cache[${name}]}"
|
2024-02-05 20:55:22 +01:00
|
|
|
done < "$pages_cache_file"
|
|
|
|
fi
|
2024-02-01 17:48:59 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Initialize the array for storing pages files to process
|
|
|
|
pages_array=()
|
2024-02-01 17:48:59 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Process pages files
|
|
|
|
for file in ${pages_file_array[@]}; do
|
|
|
|
# Compute the current blog file hash
|
|
|
|
current_hash=$(md5sum "$file" | awk '{print $1}')
|
2024-02-01 17:48:59 +01:00
|
|
|
|
2024-02-14 15:51:46 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_2" " ${pages_cache[$file]}"
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_3" " current_cache: ${current_hash}"
|
2024-02-01 17:48:59 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Check if the pages file is new or has changed
|
|
|
|
if [[ ${pages_cache[$file]} != "$current_hash" ]]; then
|
2024-02-14 15:51:46 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_4" " ${pages_cache[$file]}"
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_5" " current_cache: ${current_hash}"
|
2024-02-05 20:55:22 +01:00
|
|
|
|
|
|
|
# Pages file is new or has changed; add it to the processing array
|
|
|
|
pages_array+=("$file")
|
|
|
|
|
|
|
|
# Update the pages cache with the new hash
|
|
|
|
pages_cache[$file]=$current_hash
|
|
|
|
fi
|
|
|
|
done
|
2024-02-01 17:48:59 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Rebuild the pages cache file from scratch
|
|
|
|
: >| "$pages_cache_file" # Truncate the file before writing
|
|
|
|
for name in "${(@k)pages_cache}"; do
|
|
|
|
echo "$name:${pages_cache[$name]}" >> "$pages_cache_file"
|
|
|
|
done
|
2024-02-05 21:19:22 +01:00
|
|
|
|
2024-01-28 18:24:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
function _last_updated() {
|
2024-02-05 20:55:22 +01:00
|
|
|
# This function updates #updated and #version tags in the provided string for buffers
|
2024-01-30 09:26:34 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-01-30 09:26:34 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
local upd_msg="Last updated ${today} by <a href=\"https://blog.kekepower.com/qsgen2.html\">${QSGEN} ${VERSION}</a>"
|
2024-02-14 17:32:49 +01:00
|
|
|
|
|
|
|
if (${debug}); then _msg debug "${0:t}_msg_1"; fi
|
|
|
|
if (${debug}); then _msg debug "${0:t}_msg_2" " ${upd_msg}"; fi
|
2024-02-03 08:30:39 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
local content="${1}"
|
2024-01-30 09:26:34 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Perform the replacements
|
|
|
|
local updated_content=$(echo "${content}" | sed \
|
|
|
|
-e "s|#updated|${upd_msg}|")
|
|
|
|
|
|
|
|
# Return the updated content
|
|
|
|
echo "${updated_content}"
|
2024-01-30 09:26:34 +01:00
|
|
|
|
2024-01-28 18:24:38 +01:00
|
|
|
}
|
|
|
|
|
2024-01-30 15:28:34 +01:00
|
|
|
function _f_last_updated() {
|
2024-02-12 22:44:57 +01:00
|
|
|
# Updates #updated and #version tags in the provided file using Zsh
|
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-02-12 22:44:57 +01:00
|
|
|
|
2024-02-12 23:09:17 +01:00
|
|
|
# local file_path="${1}"
|
2024-02-05 20:55:22 +01:00
|
|
|
local upd_msg="Last updated ${today} by <a href=\"https://blog.kekepower.com/qsgen2.html\">${QSGEN} ${VERSION}</a>"
|
2024-01-30 15:28:34 +01:00
|
|
|
|
2024-02-12 22:44:57 +01:00
|
|
|
if ${debug}; then
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg debug "${0:t}_msg_1" " ${1}"
|
|
|
|
_msg debug "${0:t}_msg_2" " ${upd_msg}"
|
2024-02-12 22:44:57 +01:00
|
|
|
fi
|
|
|
|
|
|
|
|
# Read the file content into a variable
|
2024-02-12 23:09:17 +01:00
|
|
|
local content="$(<${1})"
|
2024-02-12 22:44:57 +01:00
|
|
|
|
|
|
|
# Perform the replacement
|
|
|
|
content="${content//#updated/${upd_msg}}"
|
|
|
|
|
2024-02-12 23:09:17 +01:00
|
|
|
if [[ -f "${1}" ]]; then
|
|
|
|
sed -i -e "s|#updated|${upd_msg}|" "${1}"
|
|
|
|
else
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg debug "${0:t}_msg_3" " '${1}' " "${0:t}_msg_3.1"
|
2024-02-12 23:09:17 +01:00
|
|
|
fi
|
|
|
|
|
2024-01-30 15:28:34 +01:00
|
|
|
}
|
|
|
|
|
2024-01-29 21:40:26 +01:00
|
|
|
function _file_to_lower() {
|
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
local filename="${1}"
|
2024-01-29 21:40:26 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Replace spaces with dashes
|
|
|
|
filename="${filename// /-}"
|
2024-01-29 21:40:26 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Convert to lowercase and remove invalid characters
|
|
|
|
filename=$(echo "${filename}" | sed -e 's/^[^a-zA-Z0-9_.]+//g' -e 's/[^a-zA-Z0-9_-]+/-/g')
|
2024-01-29 21:40:26 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
echo ${filename}
|
2024-01-29 21:40:26 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-01-28 18:24:38 +01:00
|
|
|
function _pages() {
|
2024-01-29 21:40:26 +01:00
|
|
|
# This function generates all the new and updated Pages
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Load the cache for Pages
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_1"
|
2024-02-05 20:55:22 +01:00
|
|
|
_pages_cache
|
2024-01-30 00:39:30 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
if [[ ${no_pages_found} == "true" ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg sub "${0:t}_msg_1"
|
2024-02-05 20:55:22 +01:00
|
|
|
return
|
|
|
|
fi
|
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg main "${0:t}_msg_3"
|
2024-02-03 20:16:21 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
if (( ${#pages_array[@]} > 0 )); then
|
2024-02-03 20:16:21 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# If pages_array is not empty, we do work
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_4"
|
2024-01-29 21:40:26 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
for pages_in_array in ${pages_array[@]}
|
2024-01-29 21:40:26 +01:00
|
|
|
do
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_5"
|
2024-01-30 16:01:24 +01:00
|
|
|
local pages=${project_dir}/themes/${theme}/pages.tpl
|
2024-01-30 14:12:44 +01:00
|
|
|
|
|
|
|
# Let's check if we can access the pages.tpl file.
|
|
|
|
# It not, exit script.
|
|
|
|
if [[ ! -f ${pages} ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg info "${0:t}_msg_6" " ${pages}"
|
2024-02-05 20:55:22 +01:00
|
|
|
exit
|
2024-01-30 14:12:44 +01:00
|
|
|
else
|
2024-02-05 20:55:22 +01:00
|
|
|
# Read template once
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_7"
|
2024-02-05 20:55:22 +01:00
|
|
|
local pages_tpl="$(<${pages})"
|
2024-01-30 14:12:44 +01:00
|
|
|
fi
|
|
|
|
|
2024-02-15 07:38:41 +01:00
|
|
|
_msg std " - ${pages_in_array%.*}.html"
|
2024-01-29 21:40:26 +01:00
|
|
|
# Read the file once
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_9" " ${pages_in_array}"
|
2024-01-29 21:40:26 +01:00
|
|
|
local page_content="$(<${pages_in_array})"
|
2024-01-29 22:39:05 +01:00
|
|
|
|
2024-01-29 21:40:26 +01:00
|
|
|
# Grab the title from the Page
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_10"
|
2024-02-10 21:15:24 +01:00
|
|
|
if [[ ${generator} == "native" ]]; then
|
2024-02-12 20:29:04 +01:00
|
|
|
while read -r line
|
|
|
|
do
|
|
|
|
if [[ "$line" =~ ^#title=(.*) ]]; then
|
|
|
|
local page_title=${match[1]}
|
|
|
|
break
|
|
|
|
#local page_title=$( echo ${page_content} | head -2 | grep \#title | cut -d= -f2 )
|
|
|
|
fi
|
|
|
|
done <<< "$page_content"
|
2024-02-10 21:15:24 +01:00
|
|
|
elif [[ ${generator} == "markdown" ]]; then
|
|
|
|
while IFS= read -r line
|
|
|
|
do
|
|
|
|
# Check if the line starts with '#' and capture the line
|
|
|
|
if [[ "$line" == \#* ]]; then
|
|
|
|
# Remove all leading '#' characters and the first space (if present)
|
|
|
|
local page_title="${line#\#}" # Remove the first '#' character
|
|
|
|
page_title="${page_title#\#}" # Remove the second '#' character if present
|
|
|
|
page_title="${page_title#"${page_title%%[![:space:]]*}"}" # Trim leading whitespace
|
|
|
|
break # Exit the loop after finding the first heading
|
|
|
|
fi
|
2024-02-10 21:49:34 +01:00
|
|
|
done <<< ${page_content}
|
2024-02-10 21:15:24 +01:00
|
|
|
fi
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_11" " ${page_title}"
|
2024-01-29 21:40:26 +01:00
|
|
|
|
2024-01-30 00:42:34 +01:00
|
|
|
# Remove the #title line from the buffer. No longer needed.
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_12"
|
2024-02-03 15:46:49 +01:00
|
|
|
page_content=$( echo ${page_content} | grep -v \#title )
|
2024-01-30 00:42:34 +01:00
|
|
|
|
2024-01-30 09:39:54 +01:00
|
|
|
# HTML'ify the page content
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_13" " ${pages_in_array}"
|
2024-02-14 18:50:48 +01:00
|
|
|
page_content=$( _run_engine "$page_content" )
|
2024-02-05 20:55:22 +01:00
|
|
|
# Look for links, images and videos and convert them if present.
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_14"
|
2024-02-02 16:09:11 +01:00
|
|
|
if [[ $( echo ${page_content} | grep \#link ) ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_15"
|
2024-02-03 15:46:49 +01:00
|
|
|
page_content=$( _link "${page_content}" )
|
2024-02-02 16:09:11 +01:00
|
|
|
fi
|
|
|
|
if [[ $( echo ${page_content} | grep \#showimg ) ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_16"
|
2024-02-03 15:46:49 +01:00
|
|
|
page_content=$( _image "${page_content}" )
|
2024-02-02 16:09:11 +01:00
|
|
|
fi
|
|
|
|
if [[ $( echo ${page_content} | grep \#ytvideo ) ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_17"
|
2024-02-03 15:46:49 +01:00
|
|
|
page_content=$( _youtube "${page_content}" )
|
2024-02-02 16:09:11 +01:00
|
|
|
fi
|
2024-01-31 14:52:12 +01:00
|
|
|
|
2024-01-30 09:39:54 +01:00
|
|
|
# Replace every #pagetitle in pages_tpl
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_18"
|
2024-02-03 15:56:53 +01:00
|
|
|
pages_tpl=$(echo "${pages_tpl}" | perl -pe "s|#pagetitle|${page_title}|gs; s|#tagline|${site_tagline}|gs; s|#sitename|${site_name}|gs")
|
2024-01-30 09:39:54 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_19"
|
2024-02-12 18:42:29 +01:00
|
|
|
# Use awk for multi-line and special character handling
|
|
|
|
pages_tpl=$( awk -v new_body="$page_content" '{sub(/BODY/, new_body)} 1' <(echo "${pages_tpl}") )
|
|
|
|
|
2024-01-30 00:32:59 +01:00
|
|
|
# Replace #updated with today's date and #version with Name and Version to footer
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_20"
|
2024-02-03 15:46:49 +01:00
|
|
|
pages_tpl=$( _last_updated ${pages_tpl} )
|
2024-01-29 22:57:49 +01:00
|
|
|
|
2024-01-30 09:17:07 +01:00
|
|
|
# Always use lowercase for file names
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_21"
|
2024-02-03 15:46:49 +01:00
|
|
|
pages_title_lower=$( _file_to_lower "${pages_in_array}" )
|
2024-01-30 09:17:07 +01:00
|
|
|
|
2024-02-01 16:53:30 +01:00
|
|
|
# Clean up unused tags, if any
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_22"
|
2024-02-03 15:46:49 +01:00
|
|
|
pages_tpl=$( _cleanup "${pages_tpl}" )
|
2024-02-01 16:53:30 +01:00
|
|
|
|
2024-01-30 09:17:07 +01:00
|
|
|
# Write pages_tpl to disk
|
2024-02-03 09:47:38 +01:00
|
|
|
# _msg std "Writing ${www_root}/${pages_title_lower%.*}.html to disk."
|
2024-01-30 09:17:07 +01:00
|
|
|
echo "${pages_tpl}" > ${www_root}/${pages_title_lower%.*}.html
|
|
|
|
|
2024-02-03 18:17:10 +01:00
|
|
|
# Insert the blog to the front page is blog_in_index is true and the file in the array is index.file_ext
|
|
|
|
# and if index.tmp.html exist and is not empty
|
|
|
|
if [[ ${pages_in_array} == "index.${file_ext}" && ${blog_in_index} == "true" && -s "${project_dir}/blog/index.tmp.html" ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg sub "${0:t}_msg_23" " ${pages_in_array}"
|
|
|
|
if (${debug}) _msg sub "${0:t}_msg_24" " ${blog_in_index}"
|
|
|
|
if (${debug}) _msg sub "${0:t}_msg_25"
|
2024-02-03 18:29:04 +01:00
|
|
|
if (${debug}) ls -l ${project_dir}/blog/index.tmp.html
|
2024-02-03 18:17:10 +01:00
|
|
|
_add_blog_list_to_index
|
|
|
|
fi
|
|
|
|
|
2024-01-29 21:40:26 +01:00
|
|
|
done
|
2024-01-30 00:38:25 +01:00
|
|
|
|
2024-01-31 20:32:46 +01:00
|
|
|
export new_updated_pages=true
|
|
|
|
|
2024-01-30 00:38:25 +01:00
|
|
|
else
|
2024-02-05 20:55:22 +01:00
|
|
|
# Insert the blog to the front page is blog_in_index is true and the file in the array is index.file_ext
|
|
|
|
# and if index.tmp.html exist and is not empty
|
|
|
|
if [[ ${blog_in_index} == "true" && -s "${project_dir}/blog/index.tmp.html" ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg std "${0:t}_msg_26"
|
|
|
|
if (${debug}) _msg sub "${0:t}_msg_27" " ${pages_in_array}"
|
|
|
|
if (${debug}) _msg sub "${0:t}_msg_28" " ${blog_in_index}"
|
|
|
|
if (${debug}) _msg sub "${0:t}_msg_25"
|
2024-02-05 20:55:22 +01:00
|
|
|
if (${debug}) ls -l ${project_dir}/blog/index.tmp.html
|
|
|
|
_add_blog_list_to_index
|
|
|
|
fi
|
2024-02-02 16:09:11 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg sub "${0:t}_msg_29"
|
2024-02-05 20:55:22 +01:00
|
|
|
export new_updated_pages=false
|
|
|
|
|
|
|
|
fi
|
2024-02-12 18:42:29 +01:00
|
|
|
|
2024-01-28 18:24:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
function _blogs() {
|
2024-02-05 20:55:22 +01:00
|
|
|
# This function either generates blog files or exports metadata based on the argument
|
2024-01-30 10:27:22 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-01-30 10:27:22 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Running function _list_blogs
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_1"
|
2024-02-05 20:55:22 +01:00
|
|
|
_list_blogs
|
2024-02-03 20:13:56 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
if [[ ${no_blogs_found} == "true" ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg sub "${0:t}_msg_2"
|
2024-02-05 20:55:22 +01:00
|
|
|
return
|
|
|
|
fi
|
2024-02-03 20:13:56 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg main "${0:t}_msg_3"
|
2024-02-03 20:13:56 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Running function _blog_cache
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_4"
|
2024-02-05 20:55:22 +01:00
|
|
|
_blog_cache
|
2024-01-30 10:27:22 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
if (( ${#make_blog_array[@]} > 0 )); then
|
2024-01-30 15:19:33 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Declare the array to hold metadata strings for each blog
|
|
|
|
BLOG_META_STR_ARRAY=()
|
2024-01-30 10:27:22 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Regular blog creation process
|
2024-01-30 10:27:22 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
if [[ -f ${project_dir}/themes/${theme}/blogs.tpl ]]; then
|
|
|
|
local blog_tpl=$(<"${project_dir}/themes/${theme}/blogs.tpl")
|
|
|
|
else
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg info "${0:t}_msg_5"
|
2024-02-05 20:55:22 +01:00
|
|
|
exit
|
|
|
|
fi
|
|
|
|
|
|
|
|
for blog in "${make_blog_array[@]}"; do
|
2024-01-30 11:20:38 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_6" " ${blog}"
|
2024-01-30 11:20:38 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
local content="$(<"${blog}")"
|
|
|
|
local sdate btitle ingress body blog_index blog_dir blog_url
|
2024-02-08 07:57:14 +01:00
|
|
|
|
2024-02-10 22:03:37 +01:00
|
|
|
# Initialize variables to track if DATE and BLOG_TITLE are found
|
|
|
|
local date_found=false
|
|
|
|
local title_found=false
|
|
|
|
|
|
|
|
# Process content line by line
|
|
|
|
while IFS= read -r line
|
|
|
|
do
|
|
|
|
# Check for the DATE line
|
|
|
|
if [[ "${line}" == "DATE "* ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_7"
|
2024-02-10 22:03:37 +01:00
|
|
|
date_found=true
|
|
|
|
fi
|
|
|
|
# Check for the BLOG_TITLE line
|
|
|
|
if [[ "${line}" == "BLOG_TITLE "* ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_8"
|
2024-02-10 22:03:37 +01:00
|
|
|
title_found=true
|
|
|
|
fi
|
|
|
|
# If both DATE and BLOG_TITLE are found, no need to continue checking
|
|
|
|
if [[ "${date_found}" == true && "${title_found}" == true ]]; then
|
|
|
|
break
|
|
|
|
fi
|
|
|
|
done <<< "${content}"
|
|
|
|
|
|
|
|
# Check if DATE or BLOG_TITLE metadata is missing and log message
|
|
|
|
if [[ "${date_found}" == false ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg debug "${0:t}_msg_9" " ${blog}."
|
2024-02-08 07:57:14 +01:00
|
|
|
continue # Skip this file and move to the next
|
|
|
|
fi
|
2024-02-10 22:03:37 +01:00
|
|
|
if [[ "${title_found}" == false ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg debug "${0:t}_msg_10" " ${blog}."
|
2024-02-10 21:15:24 +01:00
|
|
|
continue # Skip this file and move to the next
|
|
|
|
fi
|
2024-01-30 11:20:38 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Extract blog information
|
|
|
|
sdate=( $( echo ${content} | grep DATE | sed "s|DATE\ ||" | sed "s|\-|\ |g" ) )
|
2024-02-10 21:15:24 +01:00
|
|
|
if [[ ${generator} == "native" ]]; then
|
2024-02-10 22:20:39 +01:00
|
|
|
while IFS= read -r line; do
|
|
|
|
if [[ "$line" == "BLOG_TITLE "* ]]; then
|
|
|
|
btitle="${line#BLOG_TITLE }"
|
|
|
|
break
|
|
|
|
fi
|
|
|
|
done <<< "$content"
|
2024-02-10 21:15:24 +01:00
|
|
|
elif [[ ${generator} == "markdown" ]]; then
|
2024-02-10 22:20:39 +01:00
|
|
|
while IFS= read -r line; do
|
2024-02-10 21:15:24 +01:00
|
|
|
if [[ "$line" == \#* ]]; then
|
2024-02-12 18:22:48 +01:00
|
|
|
btitle="${line#\#}" # Remove the first '#' character
|
|
|
|
btitle="${btitle#\#}" # Remove the second '#' character if present
|
|
|
|
btitle="${btitle#"${btitle%%[![:space:]]*}"}" # Trim leading whitespace
|
2024-02-10 21:15:24 +01:00
|
|
|
break # Exit the loop after finding the first heading
|
|
|
|
fi
|
2024-02-10 22:20:39 +01:00
|
|
|
done <<< "$content"
|
2024-02-10 21:15:24 +01:00
|
|
|
fi
|
2024-02-05 20:55:22 +01:00
|
|
|
ingress=$( echo ${content} | sed "s/'/\\\'/g" | xargs | grep -Po "#INGRESS_START\K(.*?)#INGRESS_STOP" | sed "s|\ \#INGRESS_STOP||" | sed "s|^\ ||" )
|
|
|
|
body=$( echo ${content} | sed "s/'/\\\'/g" | xargs | grep -Po "#BODY_START\K(.*?)#BODY_STOP" | sed "s|\ \#BODY_STOP||" | sed "s|^\ ||" )
|
2024-01-30 11:20:38 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
blog_index=$(echo "${btitle:l}" | sed 's/ /_/g; s/,//g; s/\.//g; s/://g; s/[()]//g')
|
2024-01-30 11:20:38 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
blog_dir="/blog/${sdate[2]}/${sdate[3]:l}/${sdate[4]}"
|
|
|
|
blog_url="${blog_dir}/${blog_index}.html"
|
2024-01-30 11:20:38 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_11" " ${blog} " "${0:t}_msg_11.1"
|
2024-01-30 15:19:33 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Concatenate all metadata into a single string for the current blog
|
|
|
|
local metadata_str="SDATE: ${sdate[@]}||BTITLE: ${btitle}||INGRESS: ${ingress}||URL: ${blog_url}"
|
|
|
|
# Append this metadata string to the array
|
|
|
|
BLOG_META_STR_ARRAY+=("${metadata_str}")
|
2024-01-30 15:19:33 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_12" " ${blog}"
|
2024-01-30 12:24:46 +01:00
|
|
|
|
2024-02-15 07:38:41 +01:00
|
|
|
_msg std " - ${blog_index}.html"
|
2024-02-05 20:55:22 +01:00
|
|
|
|
|
|
|
# Prepare the blog template
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_14" " ${blog}"
|
2024-02-05 20:55:22 +01:00
|
|
|
local blog_content=$(
|
|
|
|
echo "${blog_tpl}" | \
|
|
|
|
perl -pe "\
|
|
|
|
s|BLOGTITLE|${btitle}|g; \
|
|
|
|
s|BLOGURL|${blog_url}|g; \
|
|
|
|
s|\QINGRESS\E|${ingress}|g; \
|
|
|
|
s|\QBODY\E|${body}|g \
|
|
|
|
")
|
|
|
|
blog_content="${blog_content//CALNDAY/${sdate[4]}}"
|
|
|
|
blog_content="${blog_content//CALYEAR/${sdate[2]}}"
|
|
|
|
blog_content="${blog_content//CALMONTH/${sdate[3]}}"
|
|
|
|
blog_content="${blog_content//CALADAY/${sdate[1]}}"
|
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_15" " ${blog}"
|
2024-02-14 18:50:48 +01:00
|
|
|
blog_content=$( _run_engine "${blog_content}" )
|
2024-02-05 20:55:22 +01:00
|
|
|
# Look for links, images and videos and convert them if present.
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_16"
|
2024-02-05 20:55:22 +01:00
|
|
|
if [[ $( echo ${blog_content} | grep \#link ) ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_17"
|
2024-02-05 20:55:22 +01:00
|
|
|
blog_content=$(_link "${blog_content}")
|
|
|
|
fi
|
|
|
|
if [[ $( echo ${blog_content} | grep \#showimg ) ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_18"
|
2024-02-05 20:55:22 +01:00
|
|
|
blog_content=$(_image "${blog_content}")
|
|
|
|
fi
|
|
|
|
if [[ $( echo ${blog_content} | grep \#ytvideo ) ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_19"
|
2024-02-05 20:55:22 +01:00
|
|
|
blog_content=$(_youtube "${blog_content}")
|
|
|
|
fi
|
2024-02-03 09:16:43 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Replace every #tagline in blog_content
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_20"
|
2024-02-05 20:55:22 +01:00
|
|
|
blog_content=$( echo ${blog_content} | perl -pe "s|#tagline|${site_tagline}|gs; s|#sitename|${site_name}|gs; s|#pagetitle|${page_title}|gs" )
|
2024-02-03 09:16:43 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
blog_content=$(_last_updated "${blog_content}")
|
|
|
|
blog_content=$(_cleanup "${blog_content}")
|
2024-02-01 16:53:30 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Create directory if it doesn't exist
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_21" " ${www_root}${blog_dir}"
|
2024-02-05 20:55:22 +01:00
|
|
|
[[ ! -d "${www_root}/${blog_dir}" ]] && mkdir -p "${www_root}/${blog_dir}"
|
2024-01-30 10:27:22 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Write to file
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_22" " ${www_root}${blog_url}"
|
2024-02-05 20:55:22 +01:00
|
|
|
echo "${blog_content}" > "${www_root}${blog_url}"
|
2024-02-01 10:20:38 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
unset sdate btitle ingress body blog_index blog_dir blog_url
|
|
|
|
|
|
|
|
done
|
|
|
|
# Now BLOG_META_STR_ARRAY contains the metadata string for each blog post
|
|
|
|
export BLOG_META_STR_ARRAY
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_23"
|
2024-02-05 20:55:22 +01:00
|
|
|
export new_updated_blogs=true
|
|
|
|
|
|
|
|
else
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg sub "${0:t}_msg_24"
|
2024-02-05 20:55:22 +01:00
|
|
|
export new_updated_blogs=false
|
|
|
|
fi
|
2024-01-30 11:20:38 +01:00
|
|
|
|
2024-02-11 11:11:24 +01:00
|
|
|
if [[ ${new_updated_blogs} == "true" ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg sub "${0:t}_msg_25"
|
2024-02-11 11:11:24 +01:00
|
|
|
_blog_idx_for_index
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg sub "${0:t}_msg_26"
|
2024-02-11 11:11:24 +01:00
|
|
|
_blog_index
|
|
|
|
fi
|
2024-02-01 16:53:30 +01:00
|
|
|
|
2024-01-28 18:24:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
function _blog_idx_for_index() {
|
2024-02-05 20:55:22 +01:00
|
|
|
# This function generates the file blog/index.tmp.html
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-01-30 13:52:06 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg sub "${0:t}_msg_1" " ${project_dir}/blog/index.tmp.html"
|
2024-02-03 18:02:14 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_2"
|
2024-01-30 14:52:17 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
local blog_list_tpl=$(<${project_dir}/themes/${theme}/blog_list.tpl)
|
2024-02-14 18:29:19 +01:00
|
|
|
local blog_list_content=""
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Truncate file before writing new one
|
|
|
|
: >| "${project_dir}/blog/index.tmp.html"
|
2024-01-30 13:08:04 +01:00
|
|
|
|
2024-02-14 20:24:19 +01:00
|
|
|
# if (${debug}) _msg debug "${0:t}_msg_3" " ${BLOG_META_STR_ARRAY[@]}"
|
2024-01-30 14:56:43 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
for meta_str in ${BLOG_META_STR_ARRAY[@]}
|
2024-01-30 12:24:46 +01:00
|
|
|
do
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_4"
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_5" " ${meta_str}"
|
2024-02-05 20:55:22 +01:00
|
|
|
|
|
|
|
# Split meta_str into individual metadata components
|
|
|
|
local -a meta_array=("${(@s/||/)meta_str}")
|
|
|
|
|
|
|
|
# Initialize variables to store each component
|
|
|
|
local sdate btitle ingress url
|
|
|
|
|
|
|
|
# Iterate over each component and extract information
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_6"
|
2024-02-05 20:55:22 +01:00
|
|
|
for component in "${meta_array[@]}"
|
|
|
|
do
|
|
|
|
case "${component}" in
|
|
|
|
SDATE:*) sdate=${component#SDATE: } ;;
|
|
|
|
BTITLE:*) btitle=${component#BTITLE: } ;;
|
|
|
|
INGRESS:*) ingress=${component#INGRESS: } ;;
|
|
|
|
URL:*) url=${component#URL: } ;;
|
|
|
|
esac
|
|
|
|
|
2024-02-14 20:28:15 +01:00
|
|
|
done
|
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
local adate=( $( echo ${sdate} ) )
|
|
|
|
local caladay="${adate[1]}"
|
|
|
|
local calyear="${adate[2]}"
|
|
|
|
local calmonth="${adate[3]}"
|
|
|
|
local calnday="${adate[4]}"
|
2024-02-02 16:47:56 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
local bdate="${adate[1]} - ${adate[4]}/${adate[3]}/${adate[2]}"
|
2024-02-14 18:29:19 +01:00
|
|
|
blog_list_content+=$(
|
2024-02-03 17:57:32 +01:00
|
|
|
echo "${blog_list_tpl}" | \
|
2024-02-05 20:55:22 +01:00
|
|
|
perl -pe "\
|
|
|
|
s|BLOGURL|${site_url}${url}|g; \
|
|
|
|
s|BLOGTITLE|${btitle}|g; \
|
|
|
|
s|INGRESS|${ingress}|g; \
|
|
|
|
s|BLOGDATE|${bdate}|g; \
|
|
|
|
s|CALADAY|${caladay}|g; \
|
|
|
|
s|CALNDAY|${calnday}|g; \
|
|
|
|
s|CALMONTH|${calmonth}|g; \
|
|
|
|
s|CALYEAR|${calyear}|g \
|
|
|
|
")
|
|
|
|
|
2024-02-14 20:24:19 +01:00
|
|
|
unset sdate btitle ingress url adate caladay calyear calmonth calnday
|
2024-02-05 20:55:22 +01:00
|
|
|
|
|
|
|
done
|
2024-02-14 18:29:19 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_7" " ${engine} " "${0:t}_msg_7.1"
|
2024-02-14 20:24:19 +01:00
|
|
|
# Catch any QStags or Markdown in the Ingress
|
2024-02-14 18:50:48 +01:00
|
|
|
blog_list_content=$( _run_engine ${blog_list_content} )
|
2024-02-14 18:29:19 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_8" " ${project_dir}/blog/index.tmp.html"
|
2024-02-14 18:50:48 +01:00
|
|
|
#if (${debug}) _msg debug "${0:t}_msg_9" " ${blog_list_content}"
|
2024-02-14 18:29:19 +01:00
|
|
|
echo ${blog_list_content} > ${project_dir}/blog/index.tmp.html
|
2024-01-28 18:24:38 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
function _blog_index() {
|
2024-02-03 09:33:05 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-01-31 14:52:12 +01:00
|
|
|
|
2024-02-12 20:40:53 +01:00
|
|
|
# This function generates the www_root/blog/index.html file that gets its data from _blog_list_for_index()
|
2024-02-05 20:55:22 +01:00
|
|
|
# ${new_updated_blogs} comes from the function _blogs if anything new or updated is detected
|
|
|
|
if [[ ${blog_in_index} == "false" ]] && [[ ${new_updated_blogs} = "true" ]]; then
|
2024-01-31 20:26:17 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_1" "${blog_in_index}"
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_2" "${new_updated_blogs}"
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_3"
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_4" " ${blog_in_index}"
|
2024-01-31 20:26:17 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg std "${0:t}_msg_5" " ${www_root}/blog/index.html"
|
2024-01-31 20:26:17 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
local blog_index_tpl=$(<${project_dir}/themes/${theme}/blog_index.tpl)
|
|
|
|
local blog_index_list=$(<${project_dir}/blog/index.tmp.html)
|
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_6"
|
2024-02-05 20:55:22 +01:00
|
|
|
local blog_index_content=$(echo "${blog_index_tpl}" | perl -pe "s|#sitename|${site_name}|gs; s|#tagline|${site_tagline}|gs")
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_7" " ${project_dir}/blog/index.tmp.html"
|
2024-02-06 10:14:19 +01:00
|
|
|
blog_index_content=$( awk -v new_body="$blog_index_list" '{sub(/BODY/, new_body)} 1' <(echo "${blog_index_content}") )
|
2024-02-05 20:55:22 +01:00
|
|
|
|
2024-02-12 21:04:16 +01:00
|
|
|
if (${debug}); then
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg debug "${0:t}_msg_8" " ${www_root}/blog/index.html"
|
|
|
|
_msg debug "${0:t}_msg_9" " ${#blog_index_content}"
|
2024-02-12 21:04:16 +01:00
|
|
|
fi
|
2024-02-12 22:47:27 +01:00
|
|
|
echo "$blog_index_content" > ${www_root}/blog/index.html
|
|
|
|
_f_last_updated ${www_root}/blog/index.html
|
2024-02-12 22:35:24 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
fi
|
2024-01-31 20:26:17 +01:00
|
|
|
|
2024-01-30 16:26:52 +01:00
|
|
|
}
|
|
|
|
|
2024-02-02 16:09:11 +01:00
|
|
|
function _add_blog_list_to_index() {
|
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-02-02 16:09:11 +01:00
|
|
|
|
2024-02-03 20:37:34 +01:00
|
|
|
# Let's find the file 'index.qst' and add the blog if blog_in_index is true
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_1"
|
2024-02-05 20:55:22 +01:00
|
|
|
local blog_index_list=$(<${project_dir}/blog/index.tmp.html)
|
|
|
|
local site_index_file=$(<${www_root}/index.html)
|
|
|
|
echo "${site_index_file}" | awk -v new_body="${blog_index_list}" '{sub(/BLOGINDEX/, new_body)} 1' > "${www_root}/index.html"
|
2024-02-02 16:09:11 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2024-01-31 17:28:28 +01:00
|
|
|
function _sitemap() {
|
2024-02-05 20:55:22 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-11 11:11:24 +01:00
|
|
|
# Check if sitemap is set to true and if there are updated Blogs or Pages before updating the sitemap.xml file.
|
|
|
|
if [[ ${sitemap} == "true" ]] && ( [[ ${new_updated_blogs} == "true" ]] || [[ ${new_updated_pages} == "true" ]] ); then
|
2024-01-30 16:26:52 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg main "${0:t}_msg_1"
|
2024-02-05 20:55:22 +01:00
|
|
|
|
2024-02-11 11:11:24 +01:00
|
|
|
local sitemap_file="${www_root}/sitemap.xml"
|
2024-02-15 13:33:11 +01:00
|
|
|
#local sitemap_blog="${www_root}/sitemap-blogs.xml"
|
|
|
|
#local sitemap_page="${www_root}/sitemap-pages.xml"
|
2024-01-30 16:26:52 +01:00
|
|
|
|
2024-02-11 11:11:24 +01:00
|
|
|
# Find all HTML files and store them in an array
|
|
|
|
local -a html_files=("${(@f)$(find "${www_root}" -type f -name "*.html")}")
|
2024-02-15 13:33:11 +01:00
|
|
|
# Working on provinding 2 sitemaps, 1 for pages and 1 for blogs.
|
|
|
|
# sitemap-pages.xml
|
|
|
|
# sitemap-blogs.xml
|
|
|
|
# And in the main sitemap.xml file we link to these two
|
|
|
|
#local -a html_files=("${(@f)$(find "${www_root}" -type f -name "*.xml")}")
|
|
|
|
#local -a blog_files=("${(@f)$(find "${www_root}" -type f -name "*.html" | grep blog)}")
|
|
|
|
#local -a page_files=("${(@f)$(find "${www_root}" -type f -name "*.html" | grep -v blog)}")
|
2024-02-05 20:55:22 +01:00
|
|
|
|
2024-02-11 11:11:24 +01:00
|
|
|
# Start of the XML file
|
|
|
|
echo '<?xml version="1.0" encoding="UTF-8"?>' > ${sitemap_file}
|
|
|
|
echo "<!-- Sitemap generated by ${QSGEN} ${VERSION} - https://github.com/kekePower/qsgen2 -->" >> ${sitemap_file}
|
|
|
|
echo "<?xml-stylesheet type=\"text/xsl\" href=\"${site_url}/css/default-sitemap.xsl?sitemap=page\"?>" >> ${sitemap_file}
|
|
|
|
echo '<urlset' >> ${sitemap_file}
|
|
|
|
echo ' xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"' >> ${sitemap_file}
|
|
|
|
echo ' xmlns:xhtml="http://www.w3.org/1999/xhtml"' >> ${sitemap_file}
|
|
|
|
echo ' xmlns:image="http://www.google.com/schemas/sitemap-image/1.1"' >> ${sitemap_file}
|
|
|
|
echo '>' >> ${sitemap_file}
|
2024-02-05 20:55:22 +01:00
|
|
|
|
2024-02-11 11:11:24 +01:00
|
|
|
# Add each URL to the sitemap
|
|
|
|
for file in "${html_files[@]}"
|
|
|
|
do
|
|
|
|
# Remove www_root from the path and prepend site_url
|
|
|
|
local url="${site_url}${file#$www_root}"
|
|
|
|
local lastmod=$(stat -c %y "${file}" 2>/dev/null | cut -d' ' -f1,2 | sed 's/ /T/' | sed 's/\..*$//')
|
|
|
|
|
|
|
|
echo " <url>" >> ${sitemap_file}
|
|
|
|
echo " <loc>${url}</loc>" >> ${sitemap_file}
|
|
|
|
echo " <lastmod><![CDATA[${lastmod}+01:00]]></lastmod>" >> "${sitemap_file}"
|
|
|
|
echo " <changefreq><![CDATA[always]]></changefreq>" >> ${sitemap_file}
|
|
|
|
echo " <priority><![CDATA[1]]></priority>" >> ${sitemap_file}
|
|
|
|
echo " </url>" >> ${sitemap_file}
|
|
|
|
done
|
2024-02-05 20:55:22 +01:00
|
|
|
|
2024-02-11 11:11:24 +01:00
|
|
|
# End of the XML file
|
|
|
|
echo '</urlset>' >> "${sitemap_file}"
|
2024-02-05 20:55:22 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}); then _msg debug "${0:t}_msg_2" " ${sitemap_file}"; fi
|
2024-01-31 14:52:12 +01:00
|
|
|
|
2024-02-11 11:11:24 +01:00
|
|
|
fi
|
2024-01-30 16:26:52 +01:00
|
|
|
|
2024-01-28 18:24:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
function _link() {
|
2024-02-05 20:55:22 +01:00
|
|
|
# This converts #link tags to actual clickable links in a provided string
|
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-02-05 20:55:22 +01:00
|
|
|
|
|
|
|
local content="${1}"
|
|
|
|
local modified_content=""
|
|
|
|
|
|
|
|
# Process the content line by line
|
|
|
|
echo "${content}" | while IFS= read -r line; do
|
|
|
|
if [[ ${line} == *"#link"* ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_1" " ${line}"
|
2024-02-05 20:55:22 +01:00
|
|
|
|
|
|
|
# Extract the URL and the link text
|
|
|
|
local url_full=$(echo "${line}" | awk -F'#link ' '{print $2}' | awk -F'¤' '{print $1 "¤" $2}')
|
|
|
|
local url_dest=$(echo "${url_full}" | awk -F'¤' '{print $1}')
|
|
|
|
local url_txt=$(echo "${url_full}" | awk -F'¤' '{print $2}')
|
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_2" " ${url_dest}"
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_3" " ${url_txt}"
|
2024-02-05 20:55:22 +01:00
|
|
|
|
|
|
|
# Form the replacement HTML link
|
|
|
|
local modified_link="<a href=\"${url_dest}\">${url_txt}"
|
|
|
|
if [[ ${url_dest} =~ ^https?:// ]]; then
|
|
|
|
# Add external link icon for external URLs
|
2024-02-06 07:35:53 +01:00
|
|
|
modified_link+="<img class=\"exticon\" alt=\"External site icon\" src=\"/images/ext-black-top.svg\" width=\"12\" />"
|
2024-02-05 20:55:22 +01:00
|
|
|
fi
|
|
|
|
modified_link+="</a>"
|
|
|
|
line=${line//"#link ${url_full}"/${modified_link}}
|
|
|
|
fi
|
|
|
|
modified_content+="${line}\n"
|
|
|
|
done
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Return the modified content
|
|
|
|
echo -e "${modified_content}"
|
2024-01-28 18:24:38 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
function _image() {
|
2024-01-30 09:17:07 +01:00
|
|
|
# This replaces #showimg tags with actual HTML img tags in a provided string
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-02-05 20:55:22 +01:00
|
|
|
|
|
|
|
local content="${1}"
|
|
|
|
local modified_content=""
|
|
|
|
|
|
|
|
# Process the content line by line
|
|
|
|
echo "${content}" | while IFS= read -r line; do
|
|
|
|
if [[ ${line} == *"#showimg"* ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_1" " ${line}"
|
2024-02-05 20:55:22 +01:00
|
|
|
|
|
|
|
# Extract image link and alt text
|
|
|
|
local img_link=$(echo "${line}" | awk -F'#showimg ' '{print $2}')
|
|
|
|
local image=$(echo "${img_link}" | awk -F'¤' '{print $1}')
|
|
|
|
local img_alt=$(echo "${img_link}" | awk -F'¤' '{print $2}')
|
|
|
|
|
|
|
|
# Determine the source of the image
|
|
|
|
local real_image=""
|
|
|
|
if [[ ${image} =~ ^https?:// ]]; then
|
|
|
|
real_image=${image}
|
|
|
|
elif [[ ${image} =~ ^\/ ]]; then
|
|
|
|
real_image=${image}
|
|
|
|
else
|
|
|
|
real_image="/images/${image}"
|
|
|
|
fi
|
|
|
|
|
|
|
|
# Form the replacement HTML image tag
|
|
|
|
local img_tag="<img src=\"${real_image}\" alt=\"${img_alt}\" width=\"500\" />"
|
|
|
|
line=${line//"#showimg ${img_link}"/${img_tag}}
|
|
|
|
fi
|
|
|
|
modified_content+="${line}\n"
|
|
|
|
done
|
2024-01-30 09:17:07 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Return the modified content
|
|
|
|
echo -e "${modified_content}"
|
2024-01-30 09:17:07 +01:00
|
|
|
|
2024-01-28 18:24:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
function _youtube() {
|
2024-02-05 20:55:22 +01:00
|
|
|
# This embeds a YouTube video in a provided string
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-02-05 20:55:22 +01:00
|
|
|
|
|
|
|
local content="${1}"
|
|
|
|
local modified_content=""
|
2024-01-30 09:17:07 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Process the content line by line
|
|
|
|
echo "${content}" | while IFS= read -r line; do
|
|
|
|
if [[ ${line} == *"#ytvideo"* ]]; then
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_1" " ${line}"
|
2024-01-30 09:17:07 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Extract YouTube video ID
|
|
|
|
local yt_id=$(echo "${line}" | awk -F'#ytvideo ' '{print $2}')
|
2024-01-30 09:17:07 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Form the replacement YouTube iframe embed
|
|
|
|
local yt_iframe="<iframe width=\"560\" height=\"315\" src=\"https://www.youtube.com/embed/${yt_id}\" title=\"YouTube video player\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share\" allowfullscreen></iframe>"
|
|
|
|
line=${line//"#ytvideo ${yt_id}"/${yt_iframe}}
|
|
|
|
fi
|
|
|
|
modified_content+="${line}\n"
|
|
|
|
done
|
|
|
|
|
|
|
|
# Return the modified content
|
|
|
|
echo -e "${modified_content}"
|
2024-01-28 18:24:38 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
function _cleanup() {
|
2024-02-05 20:55:22 +01:00
|
|
|
# This removes tags used in the templates that may be left over for some reason
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-02-05 20:55:22 +01:00
|
|
|
|
|
|
|
local content="${1}"
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-14 17:32:49 +01:00
|
|
|
if (${debug}) _msg debug "${0:t}_msg_1"
|
2024-01-30 09:17:07 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Perform the cleanup
|
|
|
|
# -e "s|BLOGINDEX\ ||g"
|
|
|
|
local cleaned_content=$(echo "${content}" | sed \
|
|
|
|
-e "s|¤||g" \
|
|
|
|
-e "s|#showimg\ ||g" \
|
|
|
|
-e "s|#ytvideo\ ||g" \
|
|
|
|
-e "s|#link\ ||g" \
|
|
|
|
)
|
|
|
|
|
|
|
|
# Return the cleaned content
|
|
|
|
echo "${cleaned_content}"
|
2024-01-28 18:24:38 +01:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2024-02-03 08:30:39 +01:00
|
|
|
function _html() {
|
2024-02-05 20:55:22 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-02-05 20:55:22 +01:00
|
|
|
|
2024-02-06 09:53:22 +01:00
|
|
|
local content="${1}"
|
2024-01-28 18:24:38 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
if ${debug}; then
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg debug "${0:t}_msg_1"
|
2024-02-05 20:55:22 +01:00
|
|
|
fi
|
2024-02-01 15:53:48 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Use perl to convert QStags to HTML
|
|
|
|
perl -0777 -pe '
|
2024-02-01 15:53:48 +01:00
|
|
|
BEGIN {
|
2024-02-15 13:33:11 +01:00
|
|
|
@qstags = (
|
2024-02-05 20:55:22 +01:00
|
|
|
"#BR", "<br/>\n",
|
|
|
|
"#BD", "<b>", "#EBD", "</b>",
|
|
|
|
"#I", "<i>", "#EI", "</i>\n",
|
|
|
|
"#P", "<p>", "#EP", "</p>\n",
|
|
|
|
"#Q", "<blockquote>", "#EQ", "</blockquote>\n",
|
|
|
|
"#C", "<code>", "#EC", "</code>\n",
|
|
|
|
"#H1", "<h1>", "#EH1", "</h1>\n",
|
|
|
|
"#H2", "<h2>", "#EH2", "</h2>\n",
|
|
|
|
"#H3", "<h3>", "#EH3", "</h3>\n",
|
|
|
|
"#H4", "<h4>", "#EH4", "</h4>\n",
|
|
|
|
"#H5", "<h5>", "#EH5", "</h5>\n",
|
|
|
|
"#H6", "<h6>", "#EH6", "</h6>\n",
|
|
|
|
"#STRONG", "<strong>", "#ESTRONG", "</strong>\n",
|
|
|
|
"#EM", "<em>", "#EEM", "</em>\n",
|
|
|
|
"#DV", "<div>", "#EDV", "</div>\n",
|
|
|
|
"#SPN", "<span>", "#ESPN", "</span>\n",
|
|
|
|
"#UL", "<ul>", "#EUL", "</ul>\n",
|
|
|
|
"#OL", "<ol>", "#EOL", "</ol>\n",
|
|
|
|
"#LI", "<li>", "#ELI", "</li>\n",
|
|
|
|
"#IU", "<u>", "#EIU", "</u>\n",
|
|
|
|
"#TBL", "<table>", "#ETBL", "</table>\n",
|
|
|
|
"#TR", "<tr>", "#ETR", "</tr>\n",
|
|
|
|
"#TD", "<td>", "#ETD", "</td>\n",
|
|
|
|
"#TH", "<th>", "#ETH", "</th>\n",
|
|
|
|
"#ART", "<article>", "#EART", "</article>\n",
|
|
|
|
"#SEC", "<section>", "#ESEC", "</section>\n",
|
|
|
|
"#ASIDE", "<aside>", "#EASIDE", "</aside>\n",
|
|
|
|
"#NAV", "<nav>", "#ENAV", "</nav>\n",
|
|
|
|
"#BTN", "<button>", "#EBTN", "</button>\n",
|
|
|
|
"#SEL", "<select>", "#ESEL", "</select>\n",
|
|
|
|
"#OPT", "<option>", "#EOPT", "</option>\n",
|
|
|
|
"#LT", "<", "#GT", ">", "#NUM", "#"
|
|
|
|
);
|
2024-02-01 13:19:21 +01:00
|
|
|
}
|
2024-02-05 20:55:22 +01:00
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
for (my $i = 0; $i < $#qstags; $i += 2) {
|
|
|
|
my $qstag = $qstags[$i];
|
|
|
|
my $html = $qstags[$i + 1];
|
|
|
|
s/\Q$qstag\E/$html/g;
|
2024-02-01 15:53:48 +01:00
|
|
|
}
|
|
|
|
' <<< "$content"
|
2024-02-05 20:55:22 +01:00
|
|
|
|
2024-01-28 18:24:38 +01:00
|
|
|
}
|
|
|
|
|
2024-02-03 08:30:39 +01:00
|
|
|
function _zhtml() {
|
2024-02-08 07:31:30 +01:00
|
|
|
|
2024-02-03 08:30:39 +01:00
|
|
|
# This function uses the regex module from Zsh to parse the QStags
|
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
|
|
local debug=true
|
|
|
|
else
|
|
|
|
local debug=false
|
|
|
|
fi
|
2024-02-05 20:55:22 +01:00
|
|
|
|
2024-02-06 09:53:22 +01:00
|
|
|
local content="${1}"
|
2024-02-03 08:30:39 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
if ${debug}; then
|
2024-02-14 17:32:49 +01:00
|
|
|
_msg debug "${0:t}_msg_1"
|
2024-02-05 20:55:22 +01:00
|
|
|
fi
|
2024-02-03 08:30:39 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
# Load regex module
|
|
|
|
zmodload zsh/regex
|
|
|
|
|
|
|
|
# Define tag replacements as an associative array
|
2024-02-15 13:33:11 +01:00
|
|
|
typeset -A qstags=(
|
2024-02-05 20:55:22 +01:00
|
|
|
"#BR" "<br/>\n"
|
|
|
|
"#BD" "<b>" "#EBD" "</b>"
|
|
|
|
"#I" "<i>" "#EI" "</i>\n"
|
|
|
|
"#P" "<p>" "#EP" "</p>\n"
|
|
|
|
"#Q" "<blockquote>" "#EQ" "</blockquote>\n"
|
|
|
|
"#C" "<code>" "#EC" "</code>\n"
|
|
|
|
"#H1" "<h1>" "#EH1" "</h1>\n"
|
|
|
|
"#H2" "<h2>" "#EH2" "</h2>\n"
|
|
|
|
"#H3" "<h3>" "#EH3" "</h3>\n"
|
|
|
|
"#H4" "<h4>" "#EH4" "</h4>\n"
|
|
|
|
"#H5" "<h5>" "#EH5" "</h5>\n"
|
|
|
|
"#H6" "<h6>" "#EH6" "</h6>\n"
|
|
|
|
"#STRONG" "<strong>" "#ESTRONG" "</strong>\n"
|
|
|
|
"#EM" "<em>" "#EEM" "</em>\n"
|
|
|
|
"#DV" "<div>" "#EDV" "</div>\n"
|
|
|
|
"#SPN" "<span>" "#ESPN" "</span>\n"
|
|
|
|
"#UL" "<ul>" "#EUL" "</ul>\n"
|
|
|
|
"#OL" "<ol>" "#EOL" "</ol>\n"
|
|
|
|
"#LI" "<li class=\"libody\">" "#ELI" "</li>\n"
|
|
|
|
"#IU" "<u>" "#EIU" "</u>\n"
|
|
|
|
"#TBL" "<table>" "#ETBL" "</table>\n"
|
|
|
|
"#TR" "<tr>" "#ETR" "</tr>\n"
|
|
|
|
"#TD" "<td>" "#ETD" "</td>\n"
|
|
|
|
"#TH" "<th>" "#ETH" "</th>\n"
|
|
|
|
"#ART" "<article>" "#EART" "</article>\n"
|
|
|
|
"#SEC" "<section>" "#ESEC" "</section>\n"
|
|
|
|
"#ASIDE" "<aside>" "#EASIDE" "</aside>\n"
|
|
|
|
"#NAV" "<nav>" "#ENAV" "</nav>\n"
|
|
|
|
"#BTN" "<button>" "#EBTN" "</button>\n"
|
|
|
|
"#SEL" "<select>" "#ESEL" "</select>\n"
|
|
|
|
"#OPT" "<option>" "#EOPT" "</option>\n"
|
|
|
|
"#LT" "<" "#GT" ">" "#NUM" "#"
|
2024-02-03 08:30:39 +01:00
|
|
|
)
|
|
|
|
|
2024-02-15 13:33:11 +01:00
|
|
|
for qstag html (${(kv)qstags})
|
2024-02-05 20:55:22 +01:00
|
|
|
do
|
2024-02-03 08:30:39 +01:00
|
|
|
# Escape tag for regex use
|
2024-02-15 13:33:11 +01:00
|
|
|
local escapedTag=$(printf '%s' "$qstag" | sed 's/[].\[^$*]/\\&/g')
|
2024-02-03 08:30:39 +01:00
|
|
|
if [[ "$content" =~ "$escapedTag" ]]; then
|
2024-02-15 13:33:11 +01:00
|
|
|
content=${content//($qstag)/$html}
|
2024-02-03 08:30:39 +01:00
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
2024-02-06 09:53:22 +01:00
|
|
|
echo "${content}"
|
2024-02-03 08:30:39 +01:00
|
|
|
|
2024-02-05 20:55:22 +01:00
|
|
|
}
|
2024-02-03 08:30:39 +01:00
|
|
|
|
2024-01-30 13:42:01 +01:00
|
|
|
_blogs
|
2024-01-30 16:26:52 +01:00
|
|
|
_pages
|
2024-01-31 17:47:14 +01:00
|
|
|
_sitemap
|