1304 lines
42 KiB
Bash
Executable File
1304 lines
42 KiB
Bash
Executable File
#!/usr/bin/zsh
|
|
|
|
###############################################################################
|
|
###############################################################################
|
|
#
|
|
# Quick Site Generator 2 is a static website generator inspired by Nikola.
|
|
# It is written for the Z shell (zsh) because that's what I use and also because
|
|
# I like it better than Bash.
|
|
#
|
|
# This script is an almost complete rewrite of my old script because it became
|
|
# overly complicated and had way too many bugs, even though it worked on simple
|
|
# sites.
|
|
#
|
|
# https://github.com/kekePower/qsgen2/
|
|
#
|
|
###############################################################################
|
|
###############################################################################
|
|
|
|
VERSION="0.4.0" # Thu-2024-02-15
|
|
QSGEN="Quick Site Generator 2"
|
|
|
|
# Set to true or false
|
|
# This will show debug information from every function in this script
|
|
# You can also set debug=true in a single function if you want to debug only that specific one.
|
|
globaldebug=false
|
|
|
|
# Use Zsh fpath to set the path to some extra functions
|
|
fpath=(${HOME}/bin/include/common ${HOME}/bin/include/qsgen2/lang $fpath)
|
|
# In this case, let's load the 'include' function
|
|
autoload include
|
|
|
|
# Including some colors to the script
|
|
include common/colors
|
|
|
|
echo "${magenta}${blue_bg} ${QSGEN} ${end}${bold_white}${blue_bg}${VERSION} ${end}"
|
|
|
|
# Check for, and source, the config file for this specific website
|
|
if [[ -f $(pwd)/config ]]; then
|
|
if (${globaldebug}); then echo "${red}Config file found and sourced${end}\n${yellow} - $(pwd)/config${end}"; fi
|
|
builtin source $(pwd)/config
|
|
else
|
|
echo "${red}Cannot find configuration file.${end}"
|
|
echo "${yellow} - Please create the file 'config' in your project directory.${end}"
|
|
echo "${yellow} - See 'config.example' in the git source tree.${end}"
|
|
exit
|
|
fi
|
|
|
|
# Load language as defined in config
|
|
typeset -A qsgenlang
|
|
lang_found=false
|
|
for dir in $fpath; do
|
|
if [[ -f "${dir}/${language}" ]]; then
|
|
# echo "Language file: ${dir}/${language}"
|
|
source "${dir}/${language}"
|
|
lang_found=true
|
|
break
|
|
fi
|
|
done
|
|
if [[ ${lang_found} == "false" ]]; then
|
|
# Fall back to en_US if defined language isn't found
|
|
echo "Defined language, ${language}, not found. Using en_US."
|
|
source "${HOME}/bin/include/qsgen2/lang/en_US"
|
|
fi
|
|
|
|
if (${globaldebug}); then
|
|
qsconfig=$( cat $(pwd)/config | grep -v \# | awk '{print substr($0, index($0, " ") + 1)}' )
|
|
echo "Content of Config file"
|
|
for qslines in ${qsconfig}
|
|
do
|
|
echo "${yellow}${qslines}${end}"
|
|
done
|
|
fi
|
|
|
|
function _msg() {
|
|
local type=$1
|
|
shift # Remove the first argument so $@ now contains only keys or additional strings
|
|
|
|
local full_msg=""
|
|
for arg in "$@"; do
|
|
if [[ -n "${qsgenlang[$arg]}" ]]; then
|
|
full_msg+="${qsgenlang[$arg]}"
|
|
else
|
|
full_msg+="$arg"
|
|
fi
|
|
done
|
|
|
|
# Determine the color based on the type
|
|
local color="${end}" # Default to no color if type is unrecognized
|
|
case $type in
|
|
std) color="${green}" ;;
|
|
info) color="${yellow}" ;;
|
|
debug) color="${red}" ;;
|
|
other) color="${bold_yellow}" ;;
|
|
sub) color="${magenta}" ;;
|
|
main) color="${white}${green_bg}" ;;
|
|
esac
|
|
|
|
# Use printf with %b to allow backslash escape interpretation
|
|
printf "${color}%b${end}\n" "${full_msg}"
|
|
}
|
|
|
|
function _version() {
|
|
_msg info "_qsgen2_msg_7" "-$(strftime "%Y")"
|
|
echo "${yellow}- https://github.com/kekePower/qsgen2/${end}"
|
|
_msg info "_qsgen2_msg_8" " '${1} help' " "_qsgen2_msg_8.1"
|
|
exit
|
|
}
|
|
|
|
function _help() {
|
|
# This will also be translated some time in the future
|
|
echo "This is where I'll write the Help documentation."
|
|
exit
|
|
}
|
|
|
|
if [[ "$1" == "version" || "$1" == "-v" || "$1" == "--version" ]]; then
|
|
_version ${0:t}
|
|
elif [[ "$1" == "help" || "$1" == "-h" || "$1" == "--help" ]]; then
|
|
_help ${0:t}
|
|
fi
|
|
|
|
# Define cache files for blogs and pages
|
|
blog_cache_file="${project_dir}/.blog_cache"
|
|
pages_cache_file="${project_dir}/.pages_cache"
|
|
|
|
case ${1} in
|
|
force)
|
|
_msg sub "_qsgen2_msg_2"
|
|
: >| "$blog_cache_file" # Truncate the blog cache before doing update
|
|
: >| "$pages_cache_file" # Truncate the page cache before doing update
|
|
;;
|
|
*)
|
|
# Nothing
|
|
;;
|
|
esac
|
|
|
|
# Let's check if qsgen2 can generate this site by checking if 'generator' is available
|
|
if [[ ! ${generator} ]] || [[ -d $(pwd)/.git ]]; then
|
|
_msg debug "_qsgen2_msg_3"
|
|
exit
|
|
fi
|
|
|
|
# We define the variable 'engine' based on what's in the 'config' file.
|
|
if [[ ${generator} == "native" ]]; then
|
|
# Usage: ${engine} ${1} - Where 1 is the file you want to convert
|
|
engine=_zhtml
|
|
export file_ext="qst"
|
|
elif [[ ${generator} == "markdown" ]]; then
|
|
if [[ ! -f /usr/local/bin/pandoc ]]; then
|
|
_msg other "_qsgen2_msg_4"
|
|
_msg other "https://github.com/jgm/pandoc/releases"
|
|
exit
|
|
else
|
|
# Usage: ${engine} ${1} - Where 1 is the file you want parsed
|
|
engine="/usr/local/bin/pandoc"
|
|
engine_opts=
|
|
export file_ext="md"
|
|
fi
|
|
else
|
|
_msg debug "_qsgen2_msg_5"
|
|
exit
|
|
fi
|
|
|
|
function _run_engine() {
|
|
# Usage: _run_engine <input>
|
|
local debug=false
|
|
|
|
if [[ ${generator} == "native" ]]; then
|
|
${engine} ${1}
|
|
elif [[ ${generator} == "markdown" ]]; then
|
|
echo "${1} | ${engine} ${engine_opts}"
|
|
else
|
|
_msg debug "ERROR running engine: ${engine}!"
|
|
_msg info "Usage: _run_engine <input>"
|
|
exit
|
|
fi
|
|
}
|
|
|
|
if (${globaldebug}); then _msg debug "_qsgen2_msg_6"; fi
|
|
|
|
builtin cd ${project_dir}
|
|
|
|
# Loading Zsh modules
|
|
zmodload zsh/files
|
|
zmodload zsh/datetime
|
|
|
|
# Let's put these here for now.
|
|
export today=$(strftime "%Y-%m-%d - %T")
|
|
export blogdate=$(strftime "%a-%Y-%b-%d")
|
|
|
|
# Let's create arrays of all the files we'll be working on
|
|
|
|
function _list_pages() {
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
# Initialize or clear the array to ensure it's empty before adding files
|
|
pages_file_array=()
|
|
|
|
export no_pages_found=false
|
|
|
|
# Temporarily set null_glob for this function
|
|
setopt local_options null_glob
|
|
|
|
# Using an array to directly capture matching files
|
|
local -a pages_files=(*.${file_ext})
|
|
|
|
if (( ${#pages_files} == 0 )); then
|
|
if ${debug}; then _msg debug "${0:t}_msg_1" " ${file_ext}."; fi
|
|
export no_pages_found=true
|
|
return
|
|
else
|
|
for file in "${pages_files[@]}"; do
|
|
if ${debug}; then _msg debug "${0:t}_msg_2" " ${file}"; fi
|
|
pages_file_array+=("$file")
|
|
done
|
|
fi
|
|
|
|
}
|
|
|
|
function _list_blogs() {
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
# Initialize or clear the blogs array to ensure it's empty before adding files
|
|
blogs_file_array=()
|
|
|
|
export no_blogs_found=false
|
|
|
|
# Temporarily set null_glob for this function
|
|
setopt local_options null_glob
|
|
|
|
# Directly capture matching blog files into an array
|
|
local -a blog_files=(blog/*.blog(On))
|
|
|
|
if (( ${#blog_files[@]} == 0 )); then
|
|
if ${debug}; then _msg debug "${0:t}_msg_1"; fi
|
|
export no_blogs_found=true
|
|
return
|
|
else
|
|
for file in "${blog_files[@]}"
|
|
do
|
|
if ${debug}; then _msg debug "${0:t}_msg_2" " $file"; fi
|
|
blogs_file_array+=("$file")
|
|
done
|
|
fi
|
|
|
|
}
|
|
|
|
|
|
# BLOG CACHE
|
|
function _blog_cache() {
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
_list_blogs
|
|
|
|
# Create an associative array for the blog cache
|
|
typeset -A blog_cache
|
|
|
|
# Load the existing blog cache
|
|
if [[ -f $blog_cache_file ]]; then
|
|
while IFS=':' read -r name hash; do
|
|
blog_cache[$name]=$hash
|
|
if (${debug}) _msg debug "${0:t}_msg_1" " ${blog_cache[${name}]}"
|
|
done < "$blog_cache_file"
|
|
fi
|
|
|
|
# Initialize the array for storing blog files to process
|
|
make_blog_array=()
|
|
|
|
# Process blog files
|
|
for blog_file in ${blogs_file_array[@]}; do
|
|
# Compute the current blog file hash
|
|
current_hash=$(md5sum "$blog_file" | awk '{print $1}')
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_2" " ${blog_file}"
|
|
if (${debug}) _msg debug "${0:t}_msg_3" " ${current_hash}"
|
|
|
|
# Check if the blog file is new or has changed
|
|
if [[ ${blog_cache[$blog_file]} != "$current_hash" ]]; then
|
|
if (${debug}) _msg debug "${0:t}_msg_4" " ${blog_file}"
|
|
if (${debug}) _msg debug "${0:t}_msg_5" " ${current_hash}"
|
|
# Blog file is new or has changed; add it to the processing array
|
|
make_blog_array+=("$blog_file")
|
|
|
|
# Update the blog cache with the new hash
|
|
blog_cache[$blog_file]=$current_hash
|
|
fi
|
|
done
|
|
|
|
# Rebuild the blog cache file from scratch
|
|
: >| "$blog_cache_file" # Truncate the file before writing
|
|
for name in "${(@k)blog_cache}"; do
|
|
echo "$name:${blog_cache[$name]}" >> "$blog_cache_file"
|
|
done
|
|
|
|
}
|
|
|
|
|
|
# PAGES CACHE
|
|
# Returns the array pages_array()
|
|
function _pages_cache() {
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
# Create an associative array for the pages cache
|
|
typeset -A pages_cache
|
|
|
|
_list_pages
|
|
|
|
# Load the existing pages cache
|
|
if [[ -f $pages_cache_file ]]; then
|
|
while IFS=':' read -r name hash; do
|
|
pages_cache[$name]=$hash
|
|
if (${debug}) _msg debug "${0:t}_msg_1" " ${pages_cache[${name}]}"
|
|
done < "$pages_cache_file"
|
|
fi
|
|
|
|
# Initialize the array for storing pages files to process
|
|
pages_array=()
|
|
|
|
# Process pages files
|
|
for file in ${pages_file_array[@]}; do
|
|
# Compute the current blog file hash
|
|
current_hash=$(md5sum "$file" | awk '{print $1}')
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_2" " ${pages_cache[$file]}"
|
|
if (${debug}) _msg debug "${0:t}_msg_3" " current_cache: ${current_hash}"
|
|
|
|
# Check if the pages file is new or has changed
|
|
if [[ ${pages_cache[$file]} != "$current_hash" ]]; then
|
|
if (${debug}) _msg debug "${0:t}_msg_4" " ${pages_cache[$file]}"
|
|
if (${debug}) _msg debug "${0:t}_msg_5" " current_cache: ${current_hash}"
|
|
|
|
# Pages file is new or has changed; add it to the processing array
|
|
pages_array+=("$file")
|
|
|
|
# Update the pages cache with the new hash
|
|
pages_cache[$file]=$current_hash
|
|
fi
|
|
done
|
|
|
|
# Rebuild the pages cache file from scratch
|
|
: >| "$pages_cache_file" # Truncate the file before writing
|
|
for name in "${(@k)pages_cache}"; do
|
|
echo "$name:${pages_cache[$name]}" >> "$pages_cache_file"
|
|
done
|
|
|
|
}
|
|
|
|
function _last_updated() {
|
|
# This function updates #updated and #version tags in the provided string for buffers
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
local upd_msg="Last updated ${today} by <a href=\"https://blog.kekepower.com/qsgen2.html\">${QSGEN} ${VERSION}</a>"
|
|
|
|
if (${debug}); then _msg debug "${0:t}_msg_1"; fi
|
|
if (${debug}); then _msg debug "${0:t}_msg_2" " ${upd_msg}"; fi
|
|
|
|
local content="${1}"
|
|
|
|
# Perform the replacements
|
|
local updated_content=$(echo "${content}" | sed \
|
|
-e "s|#updated|${upd_msg}|")
|
|
|
|
# Return the updated content
|
|
echo "${updated_content}"
|
|
|
|
}
|
|
|
|
function _f_last_updated() {
|
|
# Updates #updated and #version tags in the provided file using Zsh
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
# local file_path="${1}"
|
|
local upd_msg="Last updated ${today} by <a href=\"https://blog.kekepower.com/qsgen2.html\">${QSGEN} ${VERSION}</a>"
|
|
|
|
if ${debug}; then
|
|
_msg debug "${0:t}_msg_1" " ${1}"
|
|
_msg debug "${0:t}_msg_2" " ${upd_msg}"
|
|
fi
|
|
|
|
# Read the file content into a variable
|
|
local content="$(<${1})"
|
|
|
|
# Perform the replacement
|
|
content="${content//#updated/${upd_msg}}"
|
|
|
|
if [[ -f "${1}" ]]; then
|
|
sed -i -e "s|#updated|${upd_msg}|" "${1}"
|
|
else
|
|
_msg debug "${0:t}_msg_3" " '${1}' " "${0:t}_msg_3.1"
|
|
fi
|
|
|
|
}
|
|
|
|
function _file_to_lower() {
|
|
|
|
local filename="${1}"
|
|
|
|
# Replace spaces with dashes
|
|
filename="${filename// /-}"
|
|
|
|
# Convert to lowercase and remove invalid characters
|
|
filename=$(echo "${filename}" | sed -e 's/^[^a-zA-Z0-9_.]+//g' -e 's/[^a-zA-Z0-9_-]+/-/g')
|
|
|
|
echo ${filename}
|
|
|
|
}
|
|
|
|
|
|
function _pages() {
|
|
# This function generates all the new and updated Pages
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
# Load the cache for Pages
|
|
if (${debug}) _msg debug "${0:t}_msg_1"
|
|
_pages_cache
|
|
|
|
if [[ ${no_pages_found} == "true" ]]; then
|
|
_msg sub "${0:t}_msg_1"
|
|
return
|
|
fi
|
|
|
|
_msg main "${0:t}_msg_3"
|
|
|
|
if (( ${#pages_array[@]} > 0 )); then
|
|
|
|
# If pages_array is not empty, we do work
|
|
if (${debug}) _msg debug "${0:t}_msg_4"
|
|
|
|
for pages_in_array in ${pages_array[@]}
|
|
do
|
|
if (${debug}) _msg debug "${0:t}_msg_5"
|
|
local pages=${project_dir}/themes/${theme}/pages.tpl
|
|
|
|
# Let's check if we can access the pages.tpl file.
|
|
# It not, exit script.
|
|
if [[ ! -f ${pages} ]]; then
|
|
_msg info "${0:t}_msg_6" " ${pages}"
|
|
exit
|
|
else
|
|
# Read template once
|
|
if (${debug}) _msg debug "${0:t}_msg_7"
|
|
local pages_tpl="$(<${pages})"
|
|
fi
|
|
|
|
_msg std " - ${pages_in_array%.*}.html"
|
|
# Read the file once
|
|
if (${debug}) _msg debug "${0:t}_msg_9" " ${pages_in_array}"
|
|
local page_content="$(<${pages_in_array})"
|
|
|
|
# Grab the title from the Page
|
|
if (${debug}) _msg debug "${0:t}_msg_10"
|
|
if [[ ${generator} == "native" ]]; then
|
|
while read -r line
|
|
do
|
|
if [[ "$line" =~ ^#title=(.*) ]]; then
|
|
local page_title=${match[1]}
|
|
break
|
|
#local page_title=$( echo ${page_content} | head -2 | grep \#title | cut -d= -f2 )
|
|
fi
|
|
done <<< "$page_content"
|
|
elif [[ ${generator} == "markdown" ]]; then
|
|
while IFS= read -r line
|
|
do
|
|
# Check if the line starts with '#' and capture the line
|
|
if [[ "$line" == \#* ]]; then
|
|
# Remove all leading '#' characters and the first space (if present)
|
|
local page_title="${line#\#}" # Remove the first '#' character
|
|
page_title="${page_title#\#}" # Remove the second '#' character if present
|
|
page_title="${page_title#"${page_title%%[![:space:]]*}"}" # Trim leading whitespace
|
|
break # Exit the loop after finding the first heading
|
|
fi
|
|
done <<< ${page_content}
|
|
fi
|
|
if (${debug}) _msg debug "${0:t}_msg_11" " ${page_title}"
|
|
|
|
# Remove the #title line from the buffer. No longer needed.
|
|
if (${debug}) _msg debug "${0:t}_msg_12"
|
|
page_content=$( echo ${page_content} | grep -v \#title )
|
|
|
|
# HTML'ify the page content
|
|
if (${debug}) _msg debug "${0:t}_msg_13" " ${pages_in_array}"
|
|
page_content=$( _run_engine "$page_content" )
|
|
# Look for links, images and videos and convert them if present.
|
|
if (${debug}) _msg debug "${0:t}_msg_14"
|
|
if [[ $( echo ${page_content} | grep \#link ) ]]; then
|
|
if (${debug}) _msg debug "${0:t}_msg_15"
|
|
page_content=$( _link "${page_content}" )
|
|
fi
|
|
if [[ $( echo ${page_content} | grep \#showimg ) ]]; then
|
|
if (${debug}) _msg debug "${0:t}_msg_16"
|
|
page_content=$( _image "${page_content}" )
|
|
fi
|
|
if [[ $( echo ${page_content} | grep \#ytvideo ) ]]; then
|
|
if (${debug}) _msg debug "${0:t}_msg_17"
|
|
page_content=$( _youtube "${page_content}" )
|
|
fi
|
|
|
|
# Replace every #pagetitle in pages_tpl
|
|
if (${debug}) _msg debug "${0:t}_msg_18"
|
|
pages_tpl=$(echo "${pages_tpl}" | perl -pe "s|#pagetitle|${page_title}|gs; s|#tagline|${site_tagline}|gs; s|#sitename|${site_name}|gs")
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_19"
|
|
# Use awk for multi-line and special character handling
|
|
pages_tpl=$( awk -v new_body="$page_content" '{sub(/BODY/, new_body)} 1' <(echo "${pages_tpl}") )
|
|
|
|
# Replace #updated with today's date and #version with Name and Version to footer
|
|
if (${debug}) _msg debug "${0:t}_msg_20"
|
|
pages_tpl=$( _last_updated ${pages_tpl} )
|
|
|
|
# Always use lowercase for file names
|
|
if (${debug}) _msg debug "${0:t}_msg_21"
|
|
pages_title_lower=$( _file_to_lower "${pages_in_array}" )
|
|
|
|
# Clean up unused tags, if any
|
|
if (${debug}) _msg debug "${0:t}_msg_22"
|
|
pages_tpl=$( _cleanup "${pages_tpl}" )
|
|
|
|
# Write pages_tpl to disk
|
|
# _msg std "Writing ${www_root}/${pages_title_lower%.*}.html to disk."
|
|
echo "${pages_tpl}" > ${www_root}/${pages_title_lower%.*}.html
|
|
|
|
# Insert the blog to the front page is blog_in_index is true and the file in the array is index.file_ext
|
|
# and if index.tmp.html exist and is not empty
|
|
if [[ ${pages_in_array} == "index.${file_ext}" && ${blog_in_index} == "true" && -s "${project_dir}/blog/index.tmp.html" ]]; then
|
|
if (${debug}) _msg sub "${0:t}_msg_23" " ${pages_in_array}"
|
|
if (${debug}) _msg sub "${0:t}_msg_24" " ${blog_in_index}"
|
|
if (${debug}) _msg sub "${0:t}_msg_25"
|
|
if (${debug}) ls -l ${project_dir}/blog/index.tmp.html
|
|
_add_blog_list_to_index
|
|
fi
|
|
|
|
done
|
|
|
|
export new_updated_pages=true
|
|
|
|
else
|
|
# Insert the blog to the front page is blog_in_index is true and the file in the array is index.file_ext
|
|
# and if index.tmp.html exist and is not empty
|
|
if [[ ${blog_in_index} == "true" && -s "${project_dir}/blog/index.tmp.html" ]]; then
|
|
_msg std "${0:t}_msg_26"
|
|
if (${debug}) _msg sub "${0:t}_msg_27" " ${pages_in_array}"
|
|
if (${debug}) _msg sub "${0:t}_msg_28" " ${blog_in_index}"
|
|
if (${debug}) _msg sub "${0:t}_msg_25"
|
|
if (${debug}) ls -l ${project_dir}/blog/index.tmp.html
|
|
_add_blog_list_to_index
|
|
fi
|
|
|
|
_msg sub "${0:t}_msg_29"
|
|
export new_updated_pages=false
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
function _blogs() {
|
|
# This function either generates blog files or exports metadata based on the argument
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
# Running function _list_blogs
|
|
if (${debug}) _msg debug "${0:t}_msg_1"
|
|
_list_blogs
|
|
|
|
if [[ ${no_blogs_found} == "true" ]]; then
|
|
_msg sub "${0:t}_msg_2"
|
|
return
|
|
fi
|
|
|
|
_msg main "${0:t}_msg_3"
|
|
|
|
# Running function _blog_cache
|
|
if (${debug}) _msg debug "${0:t}_msg_4"
|
|
_blog_cache
|
|
|
|
if (( ${#make_blog_array[@]} > 0 )); then
|
|
|
|
# Declare the array to hold metadata strings for each blog
|
|
BLOG_META_STR_ARRAY=()
|
|
|
|
# Regular blog creation process
|
|
|
|
if [[ -f ${project_dir}/themes/${theme}/blogs.tpl ]]; then
|
|
local blog_tpl=$(<"${project_dir}/themes/${theme}/blogs.tpl")
|
|
else
|
|
_msg info "${0:t}_msg_5"
|
|
exit
|
|
fi
|
|
|
|
for blog in "${make_blog_array[@]}"; do
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_6" " ${blog}"
|
|
|
|
local content="$(<"${blog}")"
|
|
local sdate btitle ingress body blog_index blog_dir blog_url
|
|
|
|
# Initialize variables to track if DATE and BLOG_TITLE are found
|
|
local date_found=false
|
|
local title_found=false
|
|
|
|
# Process content line by line
|
|
while IFS= read -r line
|
|
do
|
|
# Check for the DATE line
|
|
if [[ "${line}" == "DATE "* ]]; then
|
|
if (${debug}) _msg debug "${0:t}_msg_7"
|
|
date_found=true
|
|
fi
|
|
# Check for the BLOG_TITLE line
|
|
if [[ "${line}" == "BLOG_TITLE "* ]]; then
|
|
if (${debug}) _msg debug "${0:t}_msg_8"
|
|
title_found=true
|
|
fi
|
|
# If both DATE and BLOG_TITLE are found, no need to continue checking
|
|
if [[ "${date_found}" == true && "${title_found}" == true ]]; then
|
|
break
|
|
fi
|
|
done <<< "${content}"
|
|
|
|
# Check if DATE or BLOG_TITLE metadata is missing and log message
|
|
if [[ "${date_found}" == false ]]; then
|
|
_msg debug "${0:t}_msg_9" " ${blog}."
|
|
continue # Skip this file and move to the next
|
|
fi
|
|
if [[ "${title_found}" == false ]]; then
|
|
_msg debug "${0:t}_msg_10" " ${blog}."
|
|
continue # Skip this file and move to the next
|
|
fi
|
|
|
|
# Extract blog information
|
|
sdate=( $( echo ${content} | grep DATE | sed "s|DATE\ ||" | sed "s|\-|\ |g" ) )
|
|
if [[ ${generator} == "native" ]]; then
|
|
while IFS= read -r line; do
|
|
if [[ "$line" == "BLOG_TITLE "* ]]; then
|
|
btitle="${line#BLOG_TITLE }"
|
|
break
|
|
fi
|
|
done <<< "$content"
|
|
elif [[ ${generator} == "markdown" ]]; then
|
|
while IFS= read -r line; do
|
|
if [[ "$line" == \#* ]]; then
|
|
btitle="${line#\#}" # Remove the first '#' character
|
|
btitle="${btitle#\#}" # Remove the second '#' character if present
|
|
btitle="${btitle#"${btitle%%[![:space:]]*}"}" # Trim leading whitespace
|
|
break # Exit the loop after finding the first heading
|
|
fi
|
|
done <<< "$content"
|
|
fi
|
|
ingress=$( echo ${content} | sed "s/'/\\\'/g" | xargs | grep -Po "#INGRESS_START\K(.*?)#INGRESS_STOP" | sed "s|\ \#INGRESS_STOP||" | sed "s|^\ ||" )
|
|
body=$( echo ${content} | sed "s/'/\\\'/g" | xargs | grep -Po "#BODY_START\K(.*?)#BODY_STOP" | sed "s|\ \#BODY_STOP||" | sed "s|^\ ||" )
|
|
|
|
blog_index=$(echo "${btitle:l}" | sed 's/ /_/g; s/,//g; s/\.//g; s/://g; s/[()]//g')
|
|
|
|
blog_dir="/blog/${sdate[2]}/${sdate[3]:l}/${sdate[4]}"
|
|
blog_url="${blog_dir}/${blog_index}.html"
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_11" " ${blog} " "${0:t}_msg_11.1"
|
|
|
|
# Concatenate all metadata into a single string for the current blog
|
|
local metadata_str="SDATE: ${sdate[@]}||BTITLE: ${btitle}||INGRESS: ${ingress}||URL: ${blog_url}"
|
|
# Append this metadata string to the array
|
|
BLOG_META_STR_ARRAY+=("${metadata_str}")
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_12" " ${blog}"
|
|
|
|
_msg std " - ${blog_index}.html"
|
|
|
|
# Prepare the blog template
|
|
if (${debug}) _msg debug "${0:t}_msg_14" " ${blog}"
|
|
local blog_content=$(
|
|
echo "${blog_tpl}" | \
|
|
perl -pe "\
|
|
s|BLOGTITLE|${btitle}|g; \
|
|
s|BLOGURL|${blog_url}|g; \
|
|
s|\QINGRESS\E|${ingress}|g; \
|
|
s|\QBODY\E|${body}|g \
|
|
")
|
|
blog_content="${blog_content//CALNDAY/${sdate[4]}}"
|
|
blog_content="${blog_content//CALYEAR/${sdate[2]}}"
|
|
blog_content="${blog_content//CALMONTH/${sdate[3]}}"
|
|
blog_content="${blog_content//CALADAY/${sdate[1]}}"
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_15" " ${blog}"
|
|
blog_content=$( _run_engine "${blog_content}" )
|
|
# Look for links, images and videos and convert them if present.
|
|
if (${debug}) _msg debug "${0:t}_msg_16"
|
|
if [[ $( echo ${blog_content} | grep \#link ) ]]; then
|
|
if (${debug}) _msg debug "${0:t}_msg_17"
|
|
blog_content=$(_link "${blog_content}")
|
|
fi
|
|
if [[ $( echo ${blog_content} | grep \#showimg ) ]]; then
|
|
if (${debug}) _msg debug "${0:t}_msg_18"
|
|
blog_content=$(_image "${blog_content}")
|
|
fi
|
|
if [[ $( echo ${blog_content} | grep \#ytvideo ) ]]; then
|
|
if (${debug}) _msg debug "${0:t}_msg_19"
|
|
blog_content=$(_youtube "${blog_content}")
|
|
fi
|
|
|
|
# Replace every #tagline in blog_content
|
|
if (${debug}) _msg debug "${0:t}_msg_20"
|
|
blog_content=$( echo ${blog_content} | perl -pe "s|#tagline|${site_tagline}|gs; s|#sitename|${site_name}|gs; s|#pagetitle|${page_title}|gs" )
|
|
|
|
blog_content=$(_last_updated "${blog_content}")
|
|
blog_content=$(_cleanup "${blog_content}")
|
|
|
|
# Create directory if it doesn't exist
|
|
if (${debug}) _msg debug "${0:t}_msg_21" " ${www_root}${blog_dir}"
|
|
[[ ! -d "${www_root}/${blog_dir}" ]] && mkdir -p "${www_root}/${blog_dir}"
|
|
|
|
# Write to file
|
|
if (${debug}) _msg debug "${0:t}_msg_22" " ${www_root}${blog_url}"
|
|
echo "${blog_content}" > "${www_root}${blog_url}"
|
|
|
|
unset sdate btitle ingress body blog_index blog_dir blog_url
|
|
|
|
done
|
|
# Now BLOG_META_STR_ARRAY contains the metadata string for each blog post
|
|
export BLOG_META_STR_ARRAY
|
|
if (${debug}) _msg debug "${0:t}_msg_23"
|
|
export new_updated_blogs=true
|
|
|
|
else
|
|
_msg sub "${0:t}_msg_24"
|
|
export new_updated_blogs=false
|
|
fi
|
|
|
|
if [[ ${new_updated_blogs} == "true" ]]; then
|
|
if (${debug}) _msg sub "${0:t}_msg_25"
|
|
_blog_idx_for_index
|
|
if (${debug}) _msg sub "${0:t}_msg_26"
|
|
_blog_index
|
|
fi
|
|
|
|
}
|
|
|
|
function _blog_idx_for_index() {
|
|
# This function generates the file blog/index.tmp.html
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
_msg sub "${0:t}_msg_1" " ${project_dir}/blog/index.tmp.html"
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_2"
|
|
|
|
local blog_list_tpl=$(<${project_dir}/themes/${theme}/blog_list.tpl)
|
|
local blog_list_content=""
|
|
|
|
# Truncate file before writing new one
|
|
: >| "${project_dir}/blog/index.tmp.html"
|
|
|
|
# if (${debug}) _msg debug "${0:t}_msg_3" " ${BLOG_META_STR_ARRAY[@]}"
|
|
|
|
for meta_str in ${BLOG_META_STR_ARRAY[@]}
|
|
do
|
|
if (${debug}) _msg debug "${0:t}_msg_4"
|
|
if (${debug}) _msg debug "${0:t}_msg_5" " ${meta_str}"
|
|
|
|
# Split meta_str into individual metadata components
|
|
local -a meta_array=("${(@s/||/)meta_str}")
|
|
|
|
# Initialize variables to store each component
|
|
local sdate btitle ingress url
|
|
|
|
# Iterate over each component and extract information
|
|
if (${debug}) _msg debug "${0:t}_msg_6"
|
|
for component in "${meta_array[@]}"
|
|
do
|
|
case "${component}" in
|
|
SDATE:*) sdate=${component#SDATE: } ;;
|
|
BTITLE:*) btitle=${component#BTITLE: } ;;
|
|
INGRESS:*) ingress=${component#INGRESS: } ;;
|
|
URL:*) url=${component#URL: } ;;
|
|
esac
|
|
|
|
done
|
|
|
|
local adate=( $( echo ${sdate} ) )
|
|
local caladay="${adate[1]}"
|
|
local calyear="${adate[2]}"
|
|
local calmonth="${adate[3]}"
|
|
local calnday="${adate[4]}"
|
|
|
|
local bdate="${adate[1]} - ${adate[4]}/${adate[3]}/${adate[2]}"
|
|
blog_list_content+=$(
|
|
echo "${blog_list_tpl}" | \
|
|
perl -pe "\
|
|
s|BLOGURL|${site_url}${url}|g; \
|
|
s|BLOGTITLE|${btitle}|g; \
|
|
s|INGRESS|${ingress}|g; \
|
|
s|BLOGDATE|${bdate}|g; \
|
|
s|CALADAY|${caladay}|g; \
|
|
s|CALNDAY|${calnday}|g; \
|
|
s|CALMONTH|${calmonth}|g; \
|
|
s|CALYEAR|${calyear}|g \
|
|
")
|
|
|
|
unset sdate btitle ingress url adate caladay calyear calmonth calnday
|
|
|
|
done
|
|
if (${debug}) _msg debug "${0:t}_msg_7" " ${engine} " "${0:t}_msg_7.1"
|
|
# Catch any QStags or Markdown in the Ingress
|
|
blog_list_content=$( _run_engine ${blog_list_content} )
|
|
if (${debug}) _msg debug "${0:t}_msg_8" " ${project_dir}/blog/index.tmp.html"
|
|
#if (${debug}) _msg debug "${0:t}_msg_9" " ${blog_list_content}"
|
|
echo ${blog_list_content} > ${project_dir}/blog/index.tmp.html
|
|
|
|
}
|
|
|
|
function _blog_index() {
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
# This function generates the www_root/blog/index.html file that gets its data from _blog_list_for_index()
|
|
# ${new_updated_blogs} comes from the function _blogs if anything new or updated is detected
|
|
if [[ ${blog_in_index} == "false" ]] && [[ ${new_updated_blogs} = "true" ]]; then
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_1" "${blog_in_index}"
|
|
if (${debug}) _msg debug "${0:t}_msg_2" "${new_updated_blogs}"
|
|
if (${debug}) _msg debug "${0:t}_msg_3"
|
|
if (${debug}) _msg debug "${0:t}_msg_4" " ${blog_in_index}"
|
|
|
|
_msg std "${0:t}_msg_5" " ${www_root}/blog/index.html"
|
|
|
|
local blog_index_tpl=$(<${project_dir}/themes/${theme}/blog_index.tpl)
|
|
local blog_index_list=$(<${project_dir}/blog/index.tmp.html)
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_6"
|
|
local blog_index_content=$(echo "${blog_index_tpl}" | perl -pe "s|#sitename|${site_name}|gs; s|#tagline|${site_tagline}|gs")
|
|
if (${debug}) _msg debug "${0:t}_msg_7" " ${project_dir}/blog/index.tmp.html"
|
|
blog_index_content=$( awk -v new_body="$blog_index_list" '{sub(/BODY/, new_body)} 1' <(echo "${blog_index_content}") )
|
|
|
|
if (${debug}); then
|
|
_msg debug "${0:t}_msg_8" " ${www_root}/blog/index.html"
|
|
_msg debug "${0:t}_msg_9" " ${#blog_index_content}"
|
|
fi
|
|
echo "$blog_index_content" > ${www_root}/blog/index.html
|
|
_f_last_updated ${www_root}/blog/index.html
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
function _add_blog_list_to_index() {
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
# Let's find the file 'index.qst' and add the blog if blog_in_index is true
|
|
if (${debug}) _msg debug "${0:t}_msg_1"
|
|
local blog_index_list=$(<${project_dir}/blog/index.tmp.html)
|
|
local site_index_file=$(<${www_root}/index.html)
|
|
echo "${site_index_file}" | awk -v new_body="${blog_index_list}" '{sub(/BLOGINDEX/, new_body)} 1' > "${www_root}/index.html"
|
|
|
|
}
|
|
|
|
function _sitemap() {
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
# Check if sitemap is set to true and if there are updated Blogs or Pages before updating the sitemap.xml file.
|
|
if [[ ${sitemap} == "true" ]] && ( [[ ${new_updated_blogs} == "true" ]] || [[ ${new_updated_pages} == "true" ]] ); then
|
|
|
|
_msg main "${0:t}_msg_1"
|
|
|
|
local sm_file="sitemap.xml"
|
|
local b_file="sitemap-blogs.xml"
|
|
local p_file="sitemap-pages.xml"
|
|
local sitemap_file="${www_root}/${sm_file}"
|
|
local sitemap_blog="${www_root}/${b_file}"
|
|
local sitemap_page="${www_root}/${p_file}"
|
|
|
|
# Find all HTML files and store them in an array
|
|
# local -a html_files=("${(@f)$(find "${www_root}" -type f -name "*.html")}")
|
|
local -a html_files=(${www_root}/**/[a-z]*.html(.))
|
|
local -a blog_files=("${(@)html_files[@]}" | grep blog)
|
|
local -a page_files=$("${(@)html_files[@]}" | grep -v blog)
|
|
local -a xml_files=(${www_root}/[a-z]*.xml(.))
|
|
xml_files=$( "${(@)xml_files[@]}" | grep -v "sitemap.xml" )
|
|
|
|
# Start of the XML file for BLOGS
|
|
echo '<?xml version="1.0" encoding="UTF-8"?>' > ${b_file}
|
|
echo "<!-- Sitemap generated by ${QSGEN} ${VERSION} - https://github.com/kekePower/qsgen2 -->" >> ${b_file}
|
|
echo "<?xml-stylesheet type=\"text/xsl\" href=\"${site_url}/css/default-sitemap.xsl?sitemap=page\"?>" >> ${b_file}
|
|
echo '<urlset' >> ${b_file}
|
|
echo ' xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"' >> ${b_file}
|
|
echo ' xmlns:xhtml="http://www.w3.org/1999/xhtml"' >> ${b_file}
|
|
echo ' xmlns:image="http://www.google.com/schemas/sitemap-image/1.1"' >> ${b_file}
|
|
echo '>' >> ${b_file}
|
|
|
|
# Add each URL to the sitemap
|
|
for file in "${blog_files[@]}"
|
|
do
|
|
# Remove www_root from the path and prepend site_url
|
|
local url="${site_url}${file#$www_root}"
|
|
local lastmod=$(stat -c %y "${file}" 2>/dev/null | cut -d' ' -f1,2 | sed 's/ /T/' | sed 's/\..*$//')
|
|
|
|
echo " <url>" >> ${b_file}
|
|
echo " <loc>${url}</loc>" >> ${b_file}
|
|
echo " <lastmod><![CDATA[${lastmod}+01:00]]></lastmod>" >> ${b_file}
|
|
echo " <changefreq><![CDATA[always]]></changefreq>" >> ${b_file}
|
|
echo " <priority><![CDATA[1]]></priority>" >> ${b_file}
|
|
echo " </url>" >> ${b_file}
|
|
done
|
|
|
|
# End of the XML file
|
|
echo '</urlset>' >> "${b_file}"
|
|
_msg std " - ${b_file}"
|
|
|
|
# Start of the XML file for PAGES
|
|
echo '<?xml version="1.0" encoding="UTF-8"?>' > ${p_file}
|
|
echo "<!-- Sitemap generated by ${QSGEN} ${VERSION} - https://github.com/kekePower/qsgen2 -->" >> ${p_file}
|
|
echo "<?xml-stylesheet type=\"text/xsl\" href=\"${site_url}/css/default-sitemap.xsl?sitemap=page\"?>" >> ${p_file}
|
|
echo '<urlset' >> ${p_file}
|
|
echo ' xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"' >> ${p_file}
|
|
echo ' xmlns:xhtml="http://www.w3.org/1999/xhtml"' >> ${p_file}
|
|
echo ' xmlns:image="http://www.google.com/schemas/sitemap-image/1.1"' >> ${p_file}
|
|
echo '>' >> ${p_file}
|
|
|
|
# Add each URL to the sitemap
|
|
for file in "${page_files[@]}"
|
|
do
|
|
# Remove www_root from the path and prepend site_url
|
|
local url="${site_url}${file#$www_root}"
|
|
local lastmod=$(stat -c %y "${file}" 2>/dev/null | cut -d' ' -f1,2 | sed 's/ /T/' | sed 's/\..*$//')
|
|
|
|
echo " <url>" >> ${p_file}
|
|
echo " <loc>${url}</loc>" >> ${p_file}
|
|
echo " <lastmod><![CDATA[${lastmod}+01:00]]></lastmod>" >> ${p_file}
|
|
echo " <changefreq><![CDATA[always]]></changefreq>" >> ${p_file}
|
|
echo " <priority><![CDATA[1]]></priority>" >> ${p_file}
|
|
echo " </url>" >> ${p_file}
|
|
done
|
|
|
|
# End of the XML file
|
|
echo '</urlset>' >> "${p_file}"
|
|
_msg std " - ${p_file}"
|
|
|
|
# Start of the XML file
|
|
echo '<?xml version="1.0" encoding="UTF-8"?>' > ${sitemap_file}
|
|
echo "<!-- Sitemap generated by ${QSGEN} ${VERSION} - https://github.com/kekePower/qsgen2 -->" >> ${sitemap_file}
|
|
echo "<?xml-stylesheet type=\"text/xsl\" href=\"${site_url}/css/default-sitemap.xsl?sitemap=page\"?>" >> ${sitemap_file}
|
|
echo '<urlset' >> ${sitemap_file}
|
|
echo ' xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"' >> ${sitemap_file}
|
|
echo ' xmlns:xhtml="http://www.w3.org/1999/xhtml"' >> ${sitemap_file}
|
|
echo ' xmlns:image="http://www.google.com/schemas/sitemap-image/1.1"' >> ${sitemap_file}
|
|
echo '>' >> ${sitemap_file}
|
|
|
|
# Add each URL to the sitemap
|
|
for file in "${xml_files[@]}"
|
|
do
|
|
# Remove www_root from the path and prepend site_url
|
|
local url="${site_url}${file#$www_root}"
|
|
local lastmod=$(stat -c %y "${file}" 2>/dev/null | cut -d' ' -f1,2 | sed 's/ /T/' | sed 's/\..*$//')
|
|
|
|
echo " <url>" >> ${sitemap_file}
|
|
echo " <loc>${url}</loc>" >> ${sitemap_file}
|
|
echo " <lastmod><![CDATA[${lastmod}+01:00]]></lastmod>" >> "${sitemap_file}"
|
|
echo " <changefreq><![CDATA[always]]></changefreq>" >> ${sitemap_file}
|
|
echo " <priority><![CDATA[1]]></priority>" >> ${sitemap_file}
|
|
echo " </url>" >> ${sitemap_file}
|
|
done
|
|
|
|
# End of the XML file
|
|
echo '</urlset>' >> "${sitemap_file}"
|
|
_msg std " - ${sm_file}"
|
|
|
|
if (${debug}); then _msg debug "${0:t}_msg_2" " ${sitemap_file}"; fi
|
|
|
|
fi
|
|
|
|
}
|
|
|
|
function _link() {
|
|
# This converts #link tags to actual clickable links in a provided string
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
local content="${1}"
|
|
local modified_content=""
|
|
|
|
# Process the content line by line
|
|
echo "${content}" | while IFS= read -r line; do
|
|
if [[ ${line} == *"#link"* ]]; then
|
|
if (${debug}) _msg debug "${0:t}_msg_1" " ${line}"
|
|
|
|
# Extract the URL and the link text
|
|
local url_full=$(echo "${line}" | awk -F'#link ' '{print $2}' | awk -F'¤' '{print $1 "¤" $2}')
|
|
local url_dest=$(echo "${url_full}" | awk -F'¤' '{print $1}')
|
|
local url_txt=$(echo "${url_full}" | awk -F'¤' '{print $2}')
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_2" " ${url_dest}"
|
|
if (${debug}) _msg debug "${0:t}_msg_3" " ${url_txt}"
|
|
|
|
# Form the replacement HTML link
|
|
local modified_link="<a href=\"${url_dest}\">${url_txt}"
|
|
if [[ ${url_dest} =~ ^https?:// ]]; then
|
|
# Add external link icon for external URLs
|
|
modified_link+="<img class=\"exticon\" alt=\"External site icon\" src=\"/images/ext-black-top.svg\" width=\"12\" />"
|
|
fi
|
|
modified_link+="</a>"
|
|
line=${line//"#link ${url_full}"/${modified_link}}
|
|
fi
|
|
modified_content+="${line}\n"
|
|
done
|
|
|
|
# Return the modified content
|
|
echo -e "${modified_content}"
|
|
|
|
}
|
|
|
|
function _image() {
|
|
# This replaces #showimg tags with actual HTML img tags in a provided string
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
local content="${1}"
|
|
local modified_content=""
|
|
|
|
# Process the content line by line
|
|
echo "${content}" | while IFS= read -r line; do
|
|
if [[ ${line} == *"#showimg"* ]]; then
|
|
if (${debug}) _msg debug "${0:t}_msg_1" " ${line}"
|
|
|
|
# Extract image link and alt text
|
|
local img_link=$(echo "${line}" | awk -F'#showimg ' '{print $2}')
|
|
local image=$(echo "${img_link}" | awk -F'¤' '{print $1}')
|
|
local img_alt=$(echo "${img_link}" | awk -F'¤' '{print $2}')
|
|
|
|
# Determine the source of the image
|
|
local real_image=""
|
|
if [[ ${image} =~ ^https?:// ]]; then
|
|
real_image=${image}
|
|
elif [[ ${image} =~ ^\/ ]]; then
|
|
real_image=${image}
|
|
else
|
|
real_image="/images/${image}"
|
|
fi
|
|
|
|
# Form the replacement HTML image tag
|
|
local img_tag="<img src=\"${real_image}\" alt=\"${img_alt}\" width=\"500\" />"
|
|
line=${line//"#showimg ${img_link}"/${img_tag}}
|
|
fi
|
|
modified_content+="${line}\n"
|
|
done
|
|
|
|
# Return the modified content
|
|
echo -e "${modified_content}"
|
|
|
|
}
|
|
|
|
function _youtube() {
|
|
# This embeds a YouTube video in a provided string
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
local content="${1}"
|
|
local modified_content=""
|
|
|
|
# Process the content line by line
|
|
echo "${content}" | while IFS= read -r line; do
|
|
if [[ ${line} == *"#ytvideo"* ]]; then
|
|
if (${debug}) _msg debug "${0:t}_msg_1" " ${line}"
|
|
|
|
# Extract YouTube video ID
|
|
local yt_id=$(echo "${line}" | awk -F'#ytvideo ' '{print $2}')
|
|
|
|
# Form the replacement YouTube iframe embed
|
|
local yt_iframe="<iframe width=\"560\" height=\"315\" src=\"https://www.youtube.com/embed/${yt_id}\" title=\"YouTube video player\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share\" allowfullscreen></iframe>"
|
|
line=${line//"#ytvideo ${yt_id}"/${yt_iframe}}
|
|
fi
|
|
modified_content+="${line}\n"
|
|
done
|
|
|
|
# Return the modified content
|
|
echo -e "${modified_content}"
|
|
|
|
}
|
|
|
|
function _cleanup() {
|
|
# This removes tags used in the templates that may be left over for some reason
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
local content="${1}"
|
|
|
|
if (${debug}) _msg debug "${0:t}_msg_1"
|
|
|
|
# Perform the cleanup
|
|
# -e "s|BLOGINDEX\ ||g"
|
|
local cleaned_content=$(echo "${content}" | sed \
|
|
-e "s|¤||g" \
|
|
-e "s|#showimg\ ||g" \
|
|
-e "s|#ytvideo\ ||g" \
|
|
-e "s|#link\ ||g" \
|
|
)
|
|
|
|
# Return the cleaned content
|
|
echo "${cleaned_content}"
|
|
|
|
}
|
|
|
|
function _html() {
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
local content="${1}"
|
|
|
|
if ${debug}; then
|
|
_msg debug "${0:t}_msg_1"
|
|
fi
|
|
|
|
# Use perl to convert QStags to HTML
|
|
perl -0777 -pe '
|
|
BEGIN {
|
|
@qstags = (
|
|
"#BR", "<br/>\n",
|
|
"#BD", "<b>", "#EBD", "</b>",
|
|
"#I", "<i>", "#EI", "</i>\n",
|
|
"#P", "<p>", "#EP", "</p>\n",
|
|
"#Q", "<blockquote>", "#EQ", "</blockquote>\n",
|
|
"#C", "<code>", "#EC", "</code>\n",
|
|
"#H1", "<h1>", "#EH1", "</h1>\n",
|
|
"#H2", "<h2>", "#EH2", "</h2>\n",
|
|
"#H3", "<h3>", "#EH3", "</h3>\n",
|
|
"#H4", "<h4>", "#EH4", "</h4>\n",
|
|
"#H5", "<h5>", "#EH5", "</h5>\n",
|
|
"#H6", "<h6>", "#EH6", "</h6>\n",
|
|
"#STRONG", "<strong>", "#ESTRONG", "</strong>\n",
|
|
"#EM", "<em>", "#SEM", "</em>\n",
|
|
"#DV", "<div>", "#EDV", "</div>\n",
|
|
"#SPN", "<span>", "#ESPN", "</span>\n",
|
|
"#UL", "<ul>", "#EUL", "</ul>\n",
|
|
"#OL", "<ol>", "#EOL", "</ol>\n",
|
|
"#LI", "<li>", "#ELI", "</li>\n",
|
|
"#UD", "<u>", "#EUD", "</u>\n",
|
|
"#TBL", "<table>", "#ETBL", "</table>\n",
|
|
"#TR", "<tr>", "#ETR", "</tr>\n",
|
|
"#TD", "<td>", "#ETD", "</td>\n",
|
|
"#TH", "<th>", "#ETH", "</th>\n",
|
|
"#ART", "<article>", "#EART", "</article>\n",
|
|
"#SEC", "<section>", "#ESEC", "</section>\n",
|
|
"#ASIDE", "<aside>", "#EASIDE", "</aside>\n",
|
|
"#NAV", "<nav>", "#ENAV", "</nav>\n",
|
|
"#BTN", "<button>", "#EBTN", "</button>\n",
|
|
"#SEL", "<select>", "#ESEL", "</select>\n",
|
|
"#OPT", "<option>", "#EOPT", "</option>\n",
|
|
"#LT", "<", "#GT", ">", "#NUM", "#"
|
|
);
|
|
}
|
|
|
|
for (my $i = 0; $i < $#qstags; $i += 2) {
|
|
my $qstag = $qstags[$i];
|
|
my $html = $qstags[$i + 1];
|
|
s/\Q$qstag\E/$html/g;
|
|
}
|
|
' <<< "$content"
|
|
|
|
}
|
|
|
|
function _zhtml() {
|
|
|
|
# This function uses the regex module from Zsh to parse the QStags
|
|
|
|
if [[ ${globaldebug} == "true" ]]; then
|
|
local debug=true
|
|
else
|
|
local debug=false
|
|
fi
|
|
|
|
local content="${1}"
|
|
|
|
if ${debug}; then
|
|
_msg debug "${0:t}_msg_1"
|
|
fi
|
|
|
|
# Load regex module
|
|
zmodload zsh/regex
|
|
|
|
# Define tag replacements as an associative array
|
|
typeset -A qstags=(
|
|
"#BR" "<br/>\n"
|
|
"#BD" "<b>" "#EBD" "</b>"
|
|
"#I" "<i>" "#EI" "</i>\n"
|
|
"#P" "<p>" "#EP" "</p>\n"
|
|
"#Q" "<blockquote>" "#EQ" "</blockquote>\n"
|
|
"#C" "<code>" "#EC" "</code>\n"
|
|
"#H1" "<h1>" "#EH1" "</h1>\n"
|
|
"#H2" "<h2>" "#EH2" "</h2>\n"
|
|
"#H3" "<h3>" "#EH3" "</h3>\n"
|
|
"#H4" "<h4>" "#EH4" "</h4>\n"
|
|
"#H5" "<h5>" "#EH5" "</h5>\n"
|
|
"#H6" "<h6>" "#EH6" "</h6>\n"
|
|
"#STRONG" "<strong>" "#ESTRONG" "</strong>\n"
|
|
"#EM" "<em>" "#SEM" "</em>\n"
|
|
"#DV" "<div>" "#EDV" "</div>\n"
|
|
"#SPN" "<span>" "#ESPN" "</span>\n"
|
|
"#UL" "<ul>" "#EUL" "</ul>\n"
|
|
"#OL" "<ol>" "#EOL" "</ol>\n"
|
|
"#LI" "<li class=\"libody\">" "#ELI" "</li>\n"
|
|
"#UD" "<u>" "#EUD" "</u>\n"
|
|
"#TBL" "<table>" "#ETBL" "</table>\n"
|
|
"#TR" "<tr>" "#ETR" "</tr>\n"
|
|
"#TD" "<td>" "#ETD" "</td>\n"
|
|
"#TH" "<th>" "#ETH" "</th>\n"
|
|
"#ART" "<article>" "#EART" "</article>\n"
|
|
"#SEC" "<section>" "#ESEC" "</section>\n"
|
|
"#ASIDE" "<aside>" "#EASIDE" "</aside>\n"
|
|
"#NAV" "<nav>" "#ENAV" "</nav>\n"
|
|
"#BTN" "<button>" "#EBTN" "</button>\n"
|
|
"#SEL" "<select>" "#ESEL" "</select>\n"
|
|
"#OPT" "<option>" "#EOPT" "</option>\n"
|
|
"#LT" "<" "#GT" ">" "#NUM" "#"
|
|
)
|
|
|
|
for qstag html (${(kv)qstags})
|
|
do
|
|
# Escape tag for regex use
|
|
local escapedTag=$(printf '%s' "$qstag" | sed 's/[].\[^$*]/\\&/g')
|
|
if [[ "$content" =~ "$escapedTag" ]]; then
|
|
content=${content//($qstag)/$html}
|
|
fi
|
|
done
|
|
|
|
echo "${content}"
|
|
|
|
}
|
|
|
|
_blogs
|
|
_pages
|
|
_sitemap
|