#!/usr/bin/env zsh
###############################################################################
###############################################################################
#
# Quick Site Generator 2 is a static website generator inspired by Nikola.
# It is written for the Z shell (zsh) because that's what I use and also because
# I like it better than Bash.
#
# This script is an almost complete rewrite of my old script because it became
# overly complicated and had way too many bugs, even though it worked on simple
# sites.
#
# https://github.com/kekePower/qsgen2/
#
###############################################################################
###############################################################################
VERSION="0.6.0" # Sun-2025-05-18
QSGEN="Quick Site Generator 2"
# Exit immediately if a command exits with a non-zero status
# Do not allow unset variables
# Exit if any command in a pipeline fails
set -euo pipefail
# Set the default locale and handle all filenames correctly
LC_ALL=C
LANG=C
IFS=$' \n\t'
# Set the umask to prevent world-writable files
umask 0022
# Enable advanced pattern matching and extended globbing
setopt extended_glob
setopt glob_star_short
# Global associative arrays
typeset -gA config # Configuration parameters
typeset -gA config_cache # Cached configuration values
typeset -ga BLOG_META_STR_ARRAY # Blog metadata array
typeset -gA messages # Localized messages
# Load messages with auto-detection and fallback to en_US
_load_messages() {
local lang_dir="${0:h}/include/qsgen2/lang"
# Default to en_US if no language is specified
local lang="en_US"
# 1. Try configured language first
if [[ -n "${SITE_LANG:-}" ]]; then
lang="$SITE_LANG"
# 2. Try auto-detected system language
elif command -v locale >/dev/null; then
local sys_lang=$(locale | grep '^LANG=' | cut -d= -f2 | cut -d. -f1 | tr -d '"')
if [[ -n "$sys_lang" && -f "$lang_dir/$sys_lang" ]]; then
lang="$sys_lang"
fi
fi
# Initialize messages array
typeset -gA messages
# Load the language file
if [[ -f "$lang_dir/$lang" ]]; then
# Source the language file which should define the messages array
source "$lang_dir/$lang"
_msg debug "Using language: $lang"
else
_msg error "Language file not found: $lang_dir/$lang"
exit 1
fi
# If no messages were loaded, initialize with default messages
if (( ${#messages[@]} == 0 )); then
_msg warning "No messages loaded from language file. Using default messages."
messages=(
[error.config_not_found]="Configuration file not found"
[error.invalid_config]="Invalid configuration"
[warning.legacy_config_used]="Using legacy config file. Consider renaming to 'site.conf'"
[info.create_config]="Please create 'site.conf' in your project directory."
# Add more default messages as needed
)
fi
}
# Get a localized message
_i18n() {
local key="$1"
shift
# First try to get the message from the messages array
# The messages are stored in the format: messages[key]="value"
local msg=""
# Handle different message types
case "$key" in
error.*|warning.*|info.*|debug.*|success.*|blog.*)
# Direct message key (e.g., error.config_not_found)
msg="${messages[$key]:-}"
;;
*)
# Try to find the message by key first
msg="${messages[$key]:-}"
;;
esac
# If message is still empty, use the key as fallback
if [[ -z "$msg" ]]; then
msg="$key"
fi
# Replace placeholders with provided arguments
local i=1
for arg in "$@"; do
msg=${msg/\%s/"$arg"}
i=$((i + 1))
done
echo -n "$msg"
}
# Original _msg function that handles the actual message output
_original_msg() {
local level="$1"
shift
# Skip if quiet mode is enabled for this level
case "$level" in
debug) [[ -z "$DEBUG" ]] && return ;;
info) [[ -n "$QUIET" ]] && return ;;
esac
# Output the message with appropriate formatting
case "$level" in
error) echo -e "\e[1;31m[ERROR] $*\e[0m" >&2 ;;
warning) echo -e "\e[1;33m[WARNING] $*\e[0m" >&2 ;;
success) echo -e "\e[1;32m[SUCCESS] $*\e[0m" ;;
info) echo -e "[INFO] $*" ;;
debug) echo -e "\e[2m[DEBUG] $*\e[0m" >&2 ;;
*) echo -e "[$level] $*" ;;
esac
}
# Wrapper for _msg to support i18n
_msg() {
local level="$1"
shift
if [[ "$1" == "i18n" ]]; then
shift
local key="$1"
shift
_original_msg "$level" "$(_i18n "$key" "$@")"
else
_original_msg "$level" "$@"
fi
}
# Core required tools and their minimum versions
declare -A REQUIRED_TOOLS=(
[zsh]="5.8"
[grep]="3.0"
[sed]="4.5"
[find]="4.7"
)
# Optional tools and their minimum versions
declare -A OPTIONAL_TOOLS=(
[pandoc]="2.0" # Only needed for markdown support
)
# Check for required tools and their versions
_check_dependencies() {
local tool version min_version
local missing_deps=()
local outdated_deps=()
# Check core required tools
for tool in "${(@k)REQUIRED_TOOLS}"; do
if ! command -v "$tool" &>/dev/null; then
missing_deps+=("$tool")
continue
fi
min_version="${REQUIRED_TOOLS[$tool]}"
version=$(_get_tool_version "$tool")
if [[ "$(_version_compare "$version" "$min_version")" == "<" ]]; then
outdated_deps+=("$tool (installed: $version, required: $min_version)")
fi
done
# Check optional tools
for tool in "${(@k)OPTIONAL_TOOLS}"; do
if ! command -v "$tool" &>/dev/null; then
_msg warning i18n warning.optional_dependency "$tool" "${OPTIONAL_TOOLS[$tool]}"
continue
fi
min_version="${OPTIONAL_TOOLS[$tool]}"
version=$(_get_tool_version "$tool")
if [[ "$(_version_compare "$version" "$min_version")" == "<" ]]; then
_msg warning i18n warning.dependency_version "$tool" "$version" "$min_version"
fi
done
# Report missing dependencies
if (( ${#missing_deps[@]} > 0 )); then
_msg error i18n error.missing_dependencies "${(j:, :)missing_deps}"
return 1
fi
# Report outdated dependencies
if (( ${#outdated_deps[@]} > 0 )); then
_msg warning i18n warning.outdated_dependencies
for dep in "${outdated_deps[@]}"; do
_msg warning "- $dep"
done
fi
# Check optional tools based on configuration
if [[ "${config[project_generator]:-}" == "markdown" ]]; then
for tool in "${(@k)OPTIONAL_TOOLS}"; do
if ! command -v "$tool" &>/dev/null; then
_msg warning "Optional tool '$tool' is required for markdown support but not found"
continue
fi
min_version="${OPTIONAL_TOOLS[$tool]}"
version=$(_get_tool_version "$tool")
if [[ "$(_version_compare "$version" "$min_version")" == "<" ]]; then
_msg warning "$tool version $version is below the recommended version $min_version"
fi
done
fi
if (( ${#missing_deps[@]} > 0 )); then
_msg error "Missing required dependencies: ${(j:, :)missing_deps}"
fi
if (( ${#outdated_deps[@]} > 0 )); then
_msg warning "Some dependencies are outdated:"
printf ' - %s\n' "${outdated_deps[@]}"
fi
if (( ${#missing_deps[@]} > 0 )); then
exit 1
fi
}
# Get version of a tool
_get_tool_version() {
local tool="$1"
case "$tool" in
zsh)
"$tool" --version &1 | grep -oE '[0-9]+\.[0-9]+(\.[0-9]+)?' | head -n1
;;
esac
}
# Compare version numbers
_version_compare() {
local v1=("${(s/./)1}")
local v2=("${(s/./)2}")
local i
for ((i=1; i <= ${#v1} || i <= ${#v2}; i++)); do
if (( ${v1[i]:-0} < ${v2[i]:-0} )); then
echo "<"
return 0
elif (( ${v1[i]:-0} > ${v2[i]:-0} )); then
echo ">"
return 0
fi
done
echo "="
}
# Enable/disable debug mode for the entire script
# Can be overridden in individual functions for targeted debugging
globaldebug=false
# Load messages early
_load_messages
# Set debug mode if needed
[[ -n "$DEBUG" ]] && globaldebug=true
# Use Zsh fpath to set the path to some extra functions
fpath=(${HOME}/bin/include/common ${HOME}/bin/include/qsgen2/lang $fpath)
# In this case, let's load the 'include' function
autoload include
autoload zini
# Including some colors to the script
include common/colors
# Modern file reading function using zsh features
_read_file() {
local file="$1"
[[ -r "$file" ]] || return 1
# Use zsh's mapfile equivalent with proper error handling
local content
content="${(j: :f)$(<"$file")}" || return 1
# Remove trailing newline if present
echo -n "${content%$'\n'}"
return 0
}
echo "${magenta}${blue_bg} ${QSGEN} ${end}${bold_white}${blue_bg}${VERSION} ${end}"
# Validate configuration values
_validate_config() {
local -a required=(
"site_name" "site_url" "site_theme" "site_root" "project_root"
"site_lang" "site_tagline" "site_description"
)
local valid=true
# Check required fields
for key in "${required[@]}"; do
if [[ -z "${config[$key]:-}" ]]; then
_msg error i18n "error.missing_required_config" "${key}"
valid=false
fi
done
# Validate URLs
if [[ -n "${config[site_url]:-}" ]]; then
if ! [[ "${config[site_url]}" =~ ^https?:// ]]; then
_msg error i18n "error.invalid_url_format"
valid=false
fi
fi
# Validate directories
local -a dirs=("site_root" "project_root")
for dir in "${dirs[@]}"; do
if [[ -n "${config[$dir]:-}" && ! -d "${config[$dir]}" ]]; then
_msg error i18n "error.directory_not_found" "${config[$dir]}"
valid=false
fi
done
# Validate theme
if [[ -n "${config[site_theme]:-}" && ! -d "${config[project_root]}/themes/${config[site_theme]}" ]]; then
_msg error i18n "error.theme_not_found" "${config[project_root]}/themes/${config[site_theme]}"
valid=false
fi
# Load the theme configuration
local theme_conf="${config[project_root]}/themes/${config[site_theme]}/theme.conf"
if [[ -f "$theme_conf" ]]; then
source "$theme_conf"
_msg debug i18n "debug.theme_config_loaded" "$theme_conf"
else
_msg error i18n "error.theme_config_not_found" "$theme_conf"
exit 1
fi
if [[ "$valid" == false ]]; then
_msg error i18n "error.config_validation_failed"
exit 1
fi
}
# Load and validate configuration
_load_config() {
local config_file="$1"
if [[ -f "$config_file" ]]; then
if (${globaldebug}); then _msg debug i18n "debug.loading_config" "$config_file"; fi
if zini "$config_file"; then
return 0
else
_msg error i18n "error.config_load_failed" "$config_file"
return 1
fi
fi
return 1
}
# Check for, and source, the config file for this specific website
config_loaded=false
if _load_config "$(pwd)/site.conf"; then
config_loaded=true
elif _load_config "$(pwd)/config"; then
_msg warning i18n "warning.legacy_config_used"
config_loaded=true
else
_msg error i18n "error.config_not_found"
_msg info i18n "info.config_help"
_msg info i18n "info.config_template"
_msg info i18n "info.git_repo_help"
exit 1
fi
# Validate the loaded configuration
_validate_config
# Set default values for optional parameters
config[parallel_jobs]="${config[parallel_jobs]:-$(nproc)}"
config[site_author]="${config[site_author]:-${USER}}"
config[site_timezone]="${config[site_timezone]:-$(date +%Z)}"
# Ensure paths are absolute
if [[ -n "${config[site_root]:-}" && "${config[site_root]}" != /* ]]; then
config[site_root]="$(pwd)/${config[site_root]}"
fi
if [[ -n "${config[project_root]:-}" && "${config[project_root]}" != /* ]]; then
config[project_root]="$(pwd)/${config[project_root]}"
fi
# Safe file operations
_safe_path() {
# Resolve and validate a path to prevent directory traversal
# Usage: _safe_path "base/dir" "relative/path"
local base_dir="$1"
local target_path="$2"
# Convert to absolute path
if [[ "$target_path" != /* ]]; then
target_path="$base_dir/$target_path"
fi
# Normalize the path (resolve . and ..)
local normalized_path
normalized_path=$(realpath -m -- "$target_path" 2>/dev/null || echo "")
# Ensure the path is within the base directory
if [[ -z "$normalized_path" || "$normalized_path" != "$base_dir"/* && "$normalized_path" != "$base_dir" ]]; then
_msg error i18n error.invalid_path "$target_path"
return 1
fi
echo "$normalized_path"
return 0
}
# Safe file writing with atomic operation
_safe_write() {
# Usage: _safe_write "content" "/path/to/file"
local content="$1"
local target_file="$2"
local tmp_file
# Create parent directories if they don't exist
local dir_name
dir_name=$(dirname -- "$target_file")
mkdir -p -- "$dir_name"
# Create a temporary file in the same directory for atomic write
tmp_file=$(mktemp -p "$dir_name" "${target_file##*/}.XXXXXXXXXX")
# Write content to temporary file
echo -n "$content" > "$tmp_file" || {
_msg error i18n error.write_failed "$tmp_file"
rm -f -- "$tmp_file"
return 1
}
# Atomically move the file into place
mv -f -- "$tmp_file" "$target_file" || {
_msg error i18n error.move_failed "$tmp_file" "$target_file"
rm -f -- "$tmp_file"
return 1
}
return 0
}
# Debug: Show loaded configuration
if (${globaldebug}); then
_msg debug i18n debug.loaded_config
for key value in ${(kv)config}; do
_msg debug i18n debug.config_key_value "$key" "$value"
done
_msg debug i18n debug.config_end
fi
# Check if we're in a git repository (but don't fail, just warn)
if git rev-parse --is-inside-work-tree &>/dev/null; then
_msg warning i18n warning.git_repo
_msg info i18n info.git_repo_help
fi
# Create necessary directories if they don't exist
for dir in "${config[project_root]}/themes" "${config[project_root]}/pages" "${config[project_root]}/blog"; do
if [[ ! -d "$dir" ]]; then
mkdir -p "$dir"
_msg debug i18n debug.created_directory "$dir"
fi
done
# Set up the appropriate engine and file extension
if [[ ${config[project_generator]} == "qstags" ]]; then
engine=_qstags
export file_ext="qst"
elif [[ ${config[project_generator]} == "markdown" ]]; then
if ! command -v pandoc &>/dev/null; then
_msg error i18n pandoc.install
_msg info i18n pandoc.download
exit 1
else
engine="pandoc"
export file_ext="md"
fi
else
_msg error i18n generator.not_found
exit 1
fi
_msg debug i18n debug.using_engine "$engine" "$file_ext"
function _generate_sample_page() {
# Usage: _generate_sample_page
# This function generates a sample page
local sample_page="${config[project_root]}/pages/about.${file_ext}"
if [[ ! -f "${sample_page}" ]]; then
_msg info i18n info.creating_sample_page
_atomic_write "${sample_page}" "# $(i18n page.about_me_title)
## $(i18n page.welcome_title)
$(i18n page.welcome_message \"${sample_page}")
## $(i18n page.my_story_title)
$(i18n page.my_story_content)
## $(i18n page.skills_title)
- $(i18n page.skill_webdev)
- $(i18n page.skill_design)
- $(i18n page.skill_opensource)
## $(i18n page.contact_title)
$(i18n page.contact_content)
## $(i18n page.about_site_title)
$(i18n page.about_site_content)"
if [[ $? -ne 0 ]]; then
_msg error i18n error.create_sample_page_failed "${sample_page}"
return 1
fi
_msg success i18n success.sample_page_created "${sample_page}"
else
_msg debug i18n debug.sample_page_exists "${sample_page}"
fi
return 0
}
function _generate_sample_blog_post() {
# Usage: _generate_sample_blog_post
# This function generates a sample blog post
local sample_post="${config[project_root]}/blog/hello-world.${file_ext}"
if [[ ! -f "${sample_post}" ]]; then
_msg info i18n info.creating_sample_post
_atomic_write "${sample_post}" "# $(i18n blog.hello_world_title)
*$(i18n blog.published_on) $(date +'%Y-%m-%d')*
$(i18n blog.welcome_message)
## $(i18n blog.getting_started)
$(i18n blog.edit_this_post \"${sample_post}")
## $(i18n blog.features)
- $(i18n blog.feature_markdown)
- $(i18n blog.feature_easy_customize)
- $(i18n blog.feature_fast_lightweight)
## $(i18n blog.next_steps)
1. $(i18n blog.step_edit_post)
2. $(i18n blog.step_add_posts)
3. $(i18n blog.step_customize_theme)
4. $(i18n blog.step_publish_site)
$(i18n blog.happy_blogging)"
if [[ $? -ne 0 ]]; then
_msg error i18n error.create_sample_post_failed "${sample_post}"
return 1
fi
_msg success i18n success.sample_post_created "${sample_post}"
else
_msg debug i18n debug.sample_post_exists "${sample_post}"
fi
return 0
}
function _generate_blog_index() {
# Usage: _generate_blog_index
# This function generates the blog index page
local blog_index="${config[project_root]}/blog/index.${file_ext}"
if [[ ! -f "${blog_index}" ]]; then
_msg info i18n info.creating_blog_index
_atomic_write "${blog_index}" "# ${config[site_name]:-Blog}
${config[site_tagline]:+> ${config[site_tagline]}}
## $(i18n blog.latest_posts)
- [$(i18n blog.sample_post) 1](blog/post1.${file_ext})
- [$(i18n blog.sample_post) 2](blog/post2.${file_ext})
## $(i18n blog.categories)
- [$(i18n blog.sample_category) 1](blog/category1.${file_ext})
- [$(i18n blog.sample_category) 2](blog/category2.${file_ext})
## $(i18n blog.archives)
- [2023](blog/2023.${file_ext})
- [2024](blog/2024.${file_ext})
## $(i18n blog.tags)
- [tag1](blog/tag1.${file_ext})
- [tag2](blog/tag2.${file_ext})
## $(i18n blog.about)
$(i18n blog.about_text \"${blog_index}")"
if [[ $? -ne 0 ]]; then
_msg error i18n error.create_blog_index_failed "${blog_index}"
return 1
fi
_msg success i18n success.blog_index_created "${blog_index}"
else
_msg debug i18n debug.blog_index_exists "${blog_index}"
fi
return 0
}
function _run_engine() {
# Usage: _run_engine
# This function runs the appropriate engine on the input file
if [[ ${config[project_generator]} == "qstags" ]]; then
if [[ ! -x "$engine" ]]; then
_msg error i18n error.engine_not_found "$engine"
return 1
fi
"$engine" "$1"
elif [[ ${config[project_generator]} == "markdown" ]]; then
if ! command -v "$engine" &>/dev/null; then
_msg error i18n error.engine_not_found "$engine"
return 1
fi
"$engine" -f markdown -t html "$1"
else
_msg error i18n error.unknown_generator "${config[project_generator]}"
return 1
fi
}
if (${globaldebug}); then _msg debug "_qsgen2_msg_6"; fi
builtin cd ${config[project_root]}
# Loading Zsh modules
zmodload zsh/files
zmodload zsh/datetime
zmodload zsh/regex
# Let's put these here for now.
export today=$(strftime "%Y-%m-%d - %T")
export blogdate=$(strftime "%a-%Y-%b-%d")
# Let's create arrays of all the files we'll be working on
function _list_pages() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# Initialize or clear the array to ensure it's empty before adding files
pages_file_array=()
export no_pages_found=false
# Temporarily set null_glob for this function
setopt local_options null_glob
# Using an array to directly capture matching files
local -a pages_files=(*.${file_ext})
if (( ${#pages_files} == 0 )); then
if ${debug}; then _msg debug "${0:t}_msg_1" " ${file_ext}."; fi
export no_pages_found=true
return
else
for file in "${pages_files[@]}"; do
if ${debug}; then _msg debug "${0:t}_msg_2" " ${file}"; fi
pages_file_array+=("$file")
done
fi
}
function _list_blogs() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# Initialize or clear the blogs array to ensure it's empty before adding files
blogs_file_array=()
export no_blogs_found=false
# Temporarily set null_glob for this function
setopt local_options null_glob
# Directly capture matching blog files into an array
local -a blog_files=(blog/*.blog(On))
if (( ${#blog_files[@]} == 0 )); then
if ${debug}; then _msg debug "${0:t}_msg_1"; fi
export no_blogs_found=true
return
else
for file in "${blog_files[@]}"
do
if ${debug}; then _msg debug "${0:t}_msg_2" " $file"; fi
blogs_file_array+=("$file")
done
fi
}
# BLOG CACHE
blog_cache_file="${config[project_root]}/.blogindex.cache"
function _update_blog_cache() {
# This function updates the blog cache with the current blog index content
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
if (${debug}) _msg debug "Updating blog cache at ${blog_cache_file}"
# Get the current blog index content
local blog_index_content="$(<${config[project_root]}/blog/index.tmp.html)"
# Store the content in the cache file
echo "${blog_index_content}" > "${blog_cache_file}"
if (${debug}) _msg debug "Blog cache updated with ${#blog_index_content} bytes"
}
function _load_blog_cache() {
# Loads the blog index from cache if it exists
if [[ -f "${blog_cache_file}" ]]; then
if (${debug}) _msg debug "Loading blog index from cache"
cat "${blog_cache_file}"
return 0
fi
return 1
}
function _is_blog_cache_stale() {
# Returns 0 (success) if cache is stale or doesn't exist, 1 (failure) otherwise
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# If we have new or updated blogs, cache is considered stale
if [[ ${new_updated_blogs} == "true" ]]; then
if (${debug}) _msg debug "Blog cache stale: New or updated blogs detected"
return 0
fi
# If cache file doesn't exist, it's stale
if [[ ! -f "${blog_cache_file}" ]]; then
if (${debug}) _msg debug "Blog cache stale: Cache file does not exist"
return 0
fi
# Check if cache is older than 1 hour (3600 seconds)
local cache_mtime=$(stat -c %Y "${blog_cache_file}" 2>/dev/null || echo 0)
local current_time=$(date +%s)
if (( current_time - cache_mtime > 3600 )); then
if (${debug}) _msg debug "Blog cache stale: Cache is older than 1 hour"
return 0
fi
if (${debug}) _msg debug "Blog cache is fresh"
return 1
}
function _blog_cache() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
_list_blogs
# Create an associative array for the blog cache
typeset -gA config_cache
# Load the existing blog cache
if [[ -f $blog_cache_file ]]; then
while IFS=':' read -r name hash; do
blog_cache[$name]=$hash
if (${debug}) _msg debug "${0:t}_msg_1" " ${blog_cache[${name}]}"
done < "$blog_cache_file"
fi
# Initialize the array for storing blog files to process
make_blog_array=()
# Process blog files
for blog_file in ${blogs_file_array[@]}; do
# Compute the current blog file hash
current_hash=$(md5sum "$blog_file" | awk '{print $1}')
if (${debug}) _msg debug "${0:t}_msg_2" " ${blog_file}"
if (${debug}) _msg debug "${0:t}_msg_3" " ${current_hash}"
# Check if the blog file is new or has changed
if [[ ${blog_cache[$blog_file]} != "$current_hash" ]]; then
if (${debug}) _msg debug "${0:t}_msg_4" " ${blog_file}"
if (${debug}) _msg debug "${0:t}_msg_5" " ${current_hash}"
# Blog file is new or has changed; add it to the processing array
make_blog_array+=("$blog_file")
# Update the blog cache with the new hash
blog_cache[$blog_file]=$current_hash
fi
done
# Rebuild the blog cache file from scratch
: >| "$blog_cache_file" # Truncate the file before writing
for name in "${(@k)blog_cache}"; do
echo "$name:${blog_cache[$name]}" >> "$blog_cache_file"
done
}
# PAGES CACHE
# Returns the array pages_array()
function _pages_cache() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# Create an associative array for the pages cache
typeset -gA config_cache
_list_pages
# Load the existing pages cache
if [[ -f $pages_cache_file ]]; then
while IFS=':' read -r name hash; do
pages_cache[$name]=$hash
if (${debug}) _msg debug "${0:t}_msg_1" " ${pages_cache[${name}]}"
done < "$pages_cache_file"
fi
# Initialize the array for storing pages files to process
pages_array=()
# Process pages files
for file in ${pages_file_array[@]}; do
# Compute the current blog file hash
current_hash=$(md5sum "$file" | awk '{print $1}')
if (${debug}) _msg debug "${0:t}_msg_2" " ${pages_cache[$file]}"
if (${debug}) _msg debug "${0:t}_msg_3" " current_cache: ${current_hash}"
# Check if the pages file is new or has changed
if [[ ${pages_cache[$file]} != "$current_hash" ]]; then
if (${debug}) _msg debug "${0:t}_msg_4" " ${pages_cache[$file]}"
if (${debug}) _msg debug "${0:t}_msg_5" " current_cache: ${current_hash}"
# Pages file is new or has changed; add it to the processing array
pages_array+=("$file")
# Update the pages cache with the new hash
pages_cache[$file]=$current_hash
fi
done
# Rebuild the pages cache file from scratch
: >| "$pages_cache_file" # Truncate the file before writing
for name in "${(@k)pages_cache}"; do
echo "$name:${pages_cache[$name]}" >> "$pages_cache_file"
done
}
function _last_updated() {
# This function updates #updated and #version tags in the provided string for buffers
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local upd_msg="Last updated ${today} by ${QSGEN} ${VERSION}"
if (${debug}); then _msg debug "${0:t}_msg_1"; fi
if (${debug}); then _msg debug "${0:t}_msg_2" " ${upd_msg}"; fi
local content="${1}"
# Perform the replacements
local updated_content=$(echo "${content}" | sed \
-e "s|#updated|${upd_msg}|")
# Return the updated content
echo "${updated_content}"
}
function _f_last_updated() {
# Updates #updated and #version tags in the provided file using Zsh
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# local file_path="${1}"
local upd_msg="Last updated ${today} by ${QSGEN} ${VERSION}"
if ${debug}; then
_msg debug "${0:t}_msg_1" " ${1}"
_msg debug "${0:t}_msg_2" " ${upd_msg}"
fi
# Read the file content into a variable
local content="$(<${1})"
# Perform the replacement
content="${content//#updated/${upd_msg}}"
if [[ -f "${1}" ]]; then
sed -i -e "s|#updated|${upd_msg}|" "${1}"
else
_msg debug "${0:t}_msg_3" " '${1}' " "${0:t}_msg_3.1"
fi
}
function _file_to_lower() {
local filename="${1}"
# Replace spaces with dashes
filename="${filename// /-}"
# Convert to lowercase and remove invalid characters
filename=$(echo "${filename}" | sed -e 's/^[^a-zA-Z0-9_.]+//g' -e 's/[^a-zA-Z0-9_-]+/-/g')
echo ${filename}
}
function _pages() {
# This function generates all the new and updated Pages
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
_msg main "${0:t}_msg_3"
# Load the cache for Pages
if (${debug}) _msg debug "${0:t}_msg_1"
_pages_cache
if [[ ${no_pages_found} == "true" ]]; then
_msg sub "${0:t}_msg_1"
return
fi
if (( ${#pages_array[@]} > 0 )); then
# If pages_array is not empty, we do work
if (${debug}) _msg debug "${0:t}_msg_4"
for pages_in_array in ${pages_array[@]}
do
if (${debug}) _msg debug "${0:t}_msg_5"
local pages=${config[project_root]}/themes/${config[site_theme]}/pages.tpl
# Let's check if we can access the pages.tpl file.
# It not, exit script.
if [[ ! -f ${pages} ]]; then
_msg info "${0:t}_msg_6" " ${pages}"
exit
else
# Read template once
if (${debug}) _msg debug "${0:t}_msg_7"
local pages_tpl="$(<${pages})"
fi
# _msg std " - ${pages_in_array%.*}.html"
# Read the file once
if (${debug}) _msg debug "${0:t}_msg_9" " ${pages_in_array}"
local page_content="$(<${pages_in_array})"
# Grab the title from the Page
if (${debug}) _msg debug "${0:t}_msg_10"
if [[ ${config[project_generator]} == "native" ]]; then
while read -r line
do
if [[ "$line" =~ ^#title=(.*) ]]; then
local page_title=${match[1]}
break
#local page_title=$( echo ${page_content} | head -2 | grep \#title | cut -d= -f2 )
fi
done <<< "$page_content"
elif [[ ${config[project_generator]} == "markdown" ]]; then
while IFS= read -r line
do
# Check if the line starts with '#' and capture the line
if [[ "$line" == \#* ]]; then
# Remove all leading '#' characters and the first space (if present)
local page_title="${line#\#}" # Remove the first '#' character
page_title="${page_title#\#}" # Remove the second '#' character if present
page_title="${page_title#"${page_title%%[![:space:]]*}"}" # Trim leading whitespace
break # Exit the loop after finding the first heading
fi
done <<< ${page_content}
fi
_msg std " - ${page_title}"
if (${debug}) _msg debug "${0:t}_msg_11" " ${page_title}"
# Remove the #title line from the buffer. No longer needed.
if (${debug}) _msg debug "${0:t}_msg_12"
page_content=$( echo ${page_content} | grep -v \#title )
# HTML'ify the page content
if (${debug}) _msg debug "${0:t}_msg_13" " ${pages_in_array}"
page_content=$( _run_engine "$page_content" )
# Look for links, images and videos and convert them if present.
if (${debug}) _msg debug "${0:t}_msg_14"
if [[ $( echo ${page_content} | grep \#link ) ]]; then
if (${debug}) _msg debug "${0:t}_msg_15"
page_content=$( _link "${page_content}" )
fi
if [[ $( echo ${page_content} | grep \#showimg ) ]]; then
if (${debug}) _msg debug "${0:t}_msg_16"
page_content=$( _image "${page_content}" )
fi
if [[ $( echo ${page_content} | grep \#linkimg ) ]]; then
if (${debug}) _msg debug "${0:t}_msg_17"
page_content=$( _linkimg "${page_content}" )
fi
if [[ $( echo ${page_content} | grep \#ytvideo ) ]]; then
if (${debug}) _msg debug "${0:t}_msg_18"
page_content=$( _youtube "${page_content}" )
fi
# Replace every #pagetitle in pages_tpl
if (${debug}) _msg debug "${0:t}_msg_19"
pages_tpl=$(echo "${pages_tpl}" | perl -pe "s|#pagetitle|${page_title}|gs; s|#tagline|${config[site_tagline]}|gs; s|#sitename|${config[site_name]}|gs")
if (${debug}) _msg debug "${0:t}_msg_20"
# Use awk for multi-line and special character handling
pages_tpl=$( awk -v new_body="$page_content" '{sub(/BODY/, new_body)} 1' <(echo "${pages_tpl}") )
# Replace #updated with today's date and #version with Name and Version to footer
if (${debug}) _msg debug "${0:t}_msg_21"
pages_tpl=$( _last_updated ${pages_tpl} )
# Always use lowercase for file names
if (${debug}) _msg debug "${0:t}_msg_22"
pages_title_lower=$( _file_to_lower "${pages_in_array}" )
# Clean up unused tags, if any
if (${debug}) _msg debug "${0:t}_msg_23"
pages_tpl=$( _cleanup "${pages_tpl}" )
# Write pages_tpl to disk
# _msg std "Writing ${config[site_root]}/${pages_title_lower%.*}.html to disk."
echo "${pages_tpl}" > ${config[site_root]}/${pages_title_lower%.*}.html
# Insert the blog to the front page is blog_in_index is true and the file in the array is index.file_ext
# and if index.tmp.html exist and is not empty
if [[ ${pages_in_array} == "index.${file_ext}" && ${config[site_blog]} == "true" && -s "${config[project_root]}/blog/index.tmp.html" ]]; then
if (${debug}) _msg sub "${0:t}_msg_24" " ${pages_in_array}"
if (${debug}) _msg sub "${0:t}_msg_25" " ${config[site_blog]}"
if (${debug}) _msg sub "${0:t}_msg_26"
if (${debug}) ls -l ${config[project_root]}/blog/index.tmp.html
_add_blog_list_to_index
fi
done
export new_updated_pages=true
else
# Insert the blog to the front page is blog_in_index is true and the file in the array is index.file_ext
# and if index.tmp.html exist and is not empty
if [[ ${config[site_blog]} == "true" && -s "${config[project_root]}/blog/index.tmp.html" ]]; then
_msg std "${0:t}_msg_27"
if (${debug}) _msg sub "${0:t}_msg_28" " ${pages_in_array}"
if (${debug}) _msg sub "${0:t}_msg_29" " ${config[site_blog]}"
if (${debug}) _msg sub "${0:t}_msg_30"
if (${debug}) ls -l ${config[project_root]}/blog/index.tmp.html
_add_blog_list_to_index
fi
_msg sub "${0:t}_msg_31"
export new_updated_pages=false
fi
}
function _blogs() {
# This function either generates blog files or exports metadata based on the argument
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
_msg main "${0:t}_msg_3"
# Running function _list_blogs
if (${debug}) _msg debug "${0:t}_msg_1"
_list_blogs
if [[ ${no_blogs_found} == "true" ]]; then
_msg sub "${0:t}_msg_2"
return
fi
# Running function _blog_cache
if (${debug}) _msg debug "${0:t}_msg_4"
_blog_cache
if (( ${#make_blog_array[@]} > 0 )); then
# Declare arrays to hold blog content and metadata
typeset -ga BLOG_META_STR_ARRAY
typeset -A blog_contents
typeset -A blog_metadata
# Array to store PIDs of background jobs
local pids=()
# Counter for active jobs
local active_jobs=0
# Maximum number of parallel jobs (configurable, default to number of CPU cores)
local max_jobs=${config[parallel_jobs]:-$(nproc)}
# Load template once before processing
if [[ -f ${config[project_root]}/themes/${config[site_theme]}/blogs.tpl ]]; then
local blog_tpl=$(<"${config[project_root]}/themes/${config[site_theme]}/blogs.tpl")
else
_msg info "${0:t}_msg_5"
exit
fi
# Create a FIFO for inter-process communication
local fifo=$(mktemp -u)
mkfifo "${fifo}"
# Function to process a single blog file
_process_blog() {
local blog="$1"
local debug="$2"
local blog_tpl="$3"
# Process in a subshell to avoid variable conflicts
(
# Process blog in memory
local content="$(<"${blog}")"
local sdate btitle ingress body blog_index blog_dir blog_url
local date_found=false
local title_found=false
# Process content line by line
while IFS= read -r line; do
[[ "${line}" == "DATE "* ]] && { date_found=true; sdate=(${line#DATE }); }
[[ "${line}" == "BLOG_TITLE "* ]] && { title_found=true; btitle="${line#BLOG_TITLE }"; }
[[ "${date_found}" == true && "${title_found}" == true ]] && break
done <<< "${content}"
# Skip if required metadata is missing
[[ "${date_found}" != true || "${title_found}" != true ]] && return 1
# Extract blog content
ingress=$(echo "${content}" | sed "s/'/\\\'/g" | xargs |
grep -Po "#INGRESS_START\K(.*?)#INGRESS_STOP" |
sed "s|\ \#INGRESS_STOP||; s|^\ ||")
body=$(echo "${content}" | sed "s/'/\\\'/g" | xargs |
grep -Po "#BODY_START\K(.*?)#BODY_STOP" |
sed "s|\ \#BODY_STOP||; s|^\ ||")
# Process blog metadata
sdate=($(echo ${sdate} | sed 's|-| |g'))
blog_index=$(echo "${btitle:l}" | sed 's/ /_/g; s/[,.:()]//g')
blog_dir="/blog/${sdate[2]}/${sdate[3]:l}/${sdate[4]}"
blog_url="${blog_dir}/${blog_index}.html"
# Generate blog content with template
local blog_content=$(echo "${blog_tpl}" | \
perl -pe "
s|BLOGTITLE|${btitle}|g;
s|BLOGURL|${blog_url}|g;
s|\QINGRESS\E|${ingress}|g;
s|\QBODY\E|${body}|g")
# Apply template variables
blog_content="${blog_content//CALNDAY/${sdate[4]}}"
blog_content="${blog_content//CALYEAR/${sdate[2]}}"
blog_content="${blog_content//CALMONTH/${sdate[3]}}"
blog_content="${blog_content//CALADAY/${sdate[1]}}"
# Process content with engine and plugins
blog_content=$(_run_engine "${blog_content}")
[[ "${blog_content}" == *"#link"* ]] && blog_content=$(_link "${blog_content}")
[[ "${blog_content}" == *"#showimg"* ]] && blog_content=$(_image "${blog_content}")
[[ "${blog_content}" == *"#linkimg"* ]] && blog_content=$(_linkimg "${blog_content}")
[[ "${blog_content}" == *"#ytvideo"* ]] && blog_content=$(_youtube "${blog_content}")
# Apply site-wide variables
blog_content=$(echo "${blog_content}" | \
perl -pe "s|#tagline|${config[site_tagline]}|gs;
s|#sitename|${config[site_name]}|gs;
s|#pagetitle|${page_title}|gs")
# Final processing
blog_content=$(_last_updated "${blog_content}")
blog_content=$(_cleanup "${blog_content}")
# Output metadata and content through FIFO
echo "${blog}:"
echo " dir: ${blog_dir}"
echo " url: ${blog_url}"
echo " title: ${btitle}"
echo " content: |"
echo " ${blog_content//$'\n'/$'\n '}"
echo "---"
) > "${fifo}.${blog//\//_}" &
return $?
}
# Start a background process to read from FIFO
local reader_pid
(
while IFS= read -r line; do
if [[ $line == *":" ]]; then
current_blog="${line%:}"
elif [[ $line == " dir: "* ]]; then
blog_metadata["${current_blog}_dir"]="${line# dir: }"
elif [[ $line == " url: "* ]]; then
blog_metadata["${current_blog}_url"]="${line# url: }"
elif [[ $line == " title: "* ]]; then
blog_metadata["${current_blog}_title"]="${line# title: }"
elif [[ $line == " content: |" ]]; then
blog_content=""
while IFS= read -r content_line; do
[[ $content_line == " "* ]] || break
blog_content+="${content_line# }\n"
done
blog_contents["${current_blog}"]="${blog_content%\n}"
fi
done < <(cat "${fifo}".* 2>/dev/null)
) &
reader_pid=$!
# Export functions and variables needed in subshells
export -f _run_engine _link _image _linkimg _youtube _last_updated _cleanup
export config
# Process blogs in parallel
for blog in "${make_blog_array[@]}"; do
# Wait for a slot if we've reached max jobs
while (( active_jobs >= max_jobs )); do
# Check for completed jobs
local new_pids=()
local completed=0
for pid in "${pids[@]}"; do
if kill -0 "$pid" 2>/dev/null; then
new_pids+=("$pid")
else
((completed++))
fi
done
pids=("${new_pids[@]}")
((active_jobs -= completed))
# If still at max, wait a bit
(( active_jobs >= max_jobs )) && sleep 0.1
done
# Start a new job
if (${debug}) _msg debug "Processing blog: ${blog}"
_process_blog "${blog}" "${debug}" "${blog_tpl}" &
pids+=($!)
((active_jobs++))
done
# Wait for all background jobs to complete
wait "${pids[@]}" 2>/dev/null
# Signal the reader to finish
wait $reader_pid 2>/dev/null
# Write all blogs to disk in a single pass
for blog in "${!blog_contents[@]}"; do
local dir="${blog_metadata[${blog}_dir]}"
local url="${blog_metadata[${blog}_url]}"
local title="${blog_metadata[${blog}_title]}"
local content="${blog_contents[$blog]}"
# Skip if required fields are missing
if [[ -z "$dir" || -z "$url" || -z "$content" ]]; then
_msg warning "Skipping blog post due to missing metadata: ${blog}"
continue
fi
# Construct the full output path safely
local output_path
output_path=$(_safe_path "${config[site_root]}" "${url#/}") || {
_msg error "Invalid output path for blog: $url"
continue
}
# Create output directory if it doesn't exist
local output_dir
output_dir=$(dirname -- "$output_path")
if ! mkdir -p -- "$output_dir"; then
_msg error "Failed to create directory: $output_dir"
continue
fi
# Write the blog content safely
if ! _safe_write "$content" "$output_path"; then
_msg error "Failed to write blog post: $output_path"
continue
fi
# Add to metadata array for index generation
BLOG_META_STR_ARRAY+=("SDATA:${dir}||BTITLE:${title}||INGRESS:${content}||URL:${url}")
if (${debug}); then
_msg debug "Successfully wrote blog post: $output_path"
fi
done
# Clean up FIFO files
rm -f "${fifo}" "${fifo}".* 2>/dev/null
# Process each blog in parallel
# Now BLOG_META_STR_ARRAY contains the metadata string for each blog post
export BLOG_META_STR_ARRAY
if (${debug}) _msg debug "${0:t}_msg_24"
export new_updated_blogs=true
else
_msg sub "${0:t}_msg_25"
export new_updated_blogs=false
fi
if [[ ${new_updated_blogs} == "true" ]]; then
if (${debug}) _msg sub "${0:t}_msg_26"
_blog_idx_for_index
if (${debug}) _msg sub "${0:t}_msg_27"
_blog_index
fi
}
function _blog_idx_for_index() {
# This function generates the file blog/index.tmp.html
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
_msg sub "${0:t}_msg_1" " ${config[project_root]}/blog/index.tmp.html"
if (${debug}) _msg debug "${0:t}_msg_2"
local blog_list_tpl=$(<${config[project_root]}/themes/${config[site_theme]}/blog_list.tpl)
local blog_list_content=""
# Truncate file before writing new one
: >| "${config[project_root]}/blog/index.tmp.html"
# if (${debug}) _msg debug "${0:t}_msg_3" " ${BLOG_META_STR_ARRAY[@]}"
for meta_str in ${BLOG_META_STR_ARRAY[@]}
do
if (${debug}) _msg debug "${0:t}_msg_4"
if (${debug}) _msg debug "${0:t}_msg_5" " ${meta_str}"
# Split meta_str into individual metadata components
local -a meta_array=("${(@s/||/)meta_str}")
# Initialize variables to store each component
local sdate btitle ingress url
# Iterate over each component and extract information
if (${debug}) _msg debug "${0:t}_msg_6"
for component in "${meta_array[@]}"
do
case "${component}" in
SDATE:*) sdate=${component#SDATE: } ;;
BTITLE:*) btitle=${component#BTITLE: } ;;
INGRESS:*) ingress=${component#INGRESS: } ;;
URL:*) url=${component#URL: } ;;
esac
done
local adate=( $( echo ${sdate} ) )
local caladay="${adate[1]}"
local calyear="${adate[2]}"
local calmonth="${adate[3]}"
local calnday="${adate[4]}"
local bdate="${adate[1]} - ${adate[4]}/${adate[3]}/${adate[2]}"
blog_list_content+=$(
echo "${blog_list_tpl}" | \
perl -pe "\
s|BLOGURL|${config[site_url]}${url}|g; \
s|BLOGTITLE|${btitle}|g; \
s|INGRESS|${ingress}|g; \
s|BLOGDATE|${bdate}|g; \
s|CALADAY|${caladay}|g; \
s|CALNDAY|${calnday}|g; \
s|CALMONTH|${calmonth}|g; \
s|CALYEAR|${calyear}|g \
")
unset sdate btitle ingress url adate caladay calyear calmonth calnday
done
if (${debug}) _msg debug "${0:t}_msg_7" " ${engine} " "${0:t}_msg_7.1"
# Catch any QStags or Markdown in the Ingress
blog_list_content=$( _run_engine ${blog_list_content} )
if (${debug}) _msg debug "${0:t}_msg_8" " ${config[project_root]}/blog/index.tmp.html"
#if (${debug}) _msg debug "${0:t}_msg_9" " ${blog_list_content}"
echo ${blog_list_content} > ${config[project_root]}/blog/index.tmp.html
_update_blog_cache
}
function _blog_index() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# Check if we need to update the blog index
if [[ ${new_updated_blogs} == "true" ]] || _is_blog_cache_stale; then
if (${debug}) _msg debug "Generating new blog index"
# Get the template
local blog_index_tpl=$(<${config[project_root]}/themes/${config[site_theme]}/blog_index.tpl)
# Get the blog list content
local blog_index_list
if [[ ${new_updated_blogs} == "true" ]]; then
# If we have new/updated blogs, use the fresh content
blog_index_list=$(<${config[project_root]}/blog/index.tmp.html)
else
# Otherwise try to load from cache
blog_index_list=$(_load_blog_cache) || {
# If cache load fails, use the fresh content
blog_index_list=$(<${config[project_root]}/blog/index.tmp.html)
}
fi
# Generate the final content
local blog_index_content=$(echo "${blog_index_tpl}" | \
perl -pe "s|#sitename|${config[site_name]}|gs; s|#tagline|${config[site_tagline]}|gs")
blog_index_content=$(awk -v new_body="$blog_index_list" '{sub(/BODY/, new_body)} 1' <(echo "${blog_index_content}"))
# Create output directory if it doesn't exist
mkdir -p "${config[site_root]}/blog"
# Write the index file
echo "$blog_index_content" > "${config[site_root]}/blog/index.html"
_f_last_updated "${config[site_root]}/blog/index.html"
# Update the cache with the new content
_update_blog_cache
if (${debug}); then
_msg debug "Generated new blog index at ${config[site_root]}/blog/index.html"
_msg debug "Blog index size: ${#blog_index_content} bytes"
fi
else
# Use cached content
if (${debug}) _msg debug "Using cached blog index"
local cached_content=$(_load_blog_cache)
mkdir -p "${config[site_root]}/blog"
echo "$cached_content" > "${config[site_root]}/blog/index.html"
fi
}
function _add_blog_list_to_index() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# Let's find the file 'index.qst' and add the blog if blog_in_index is true
if (${debug}) _msg debug "${0:t}_msg_1"
local blog_index_list=$(<${config[project_root]}/blog/index.tmp.html)
local site_index_file=$(<${config[site_root]}/index.html)
echo "${site_index_file}" | awk -v new_body="${blog_index_list}" '{sub(/BLOGINDEX/, new_body)} 1' > "${config[site_root]}/index.html"
}
function _sitemap() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# Check if sitemap is set to true and if there are updated Blogs, Pages, or if cache is stale
if ([[ ${config[site_sitemap]} == "true" ]] &&
([[ ${new_updated_blogs} == "true" ]] ||
[[ ${new_updated_pages} == "true" ]] ||
_is_blog_cache_stale)) ||
[[ ${sitemap_force} == "true" ]]; then
setopt extendedglob
_msg main "${0:t}_msg_1"
local sm_file="sitemap.xml"
local b_file="sitemap-blogs.xml"
local p_file="sitemap-pages.xml"
local sitemap_file="${config[site_root]}/${sm_file}"
local sitemap_blog="${config[site_root]}/${b_file}"
local sitemap_page="${config[site_root]}/${p_file}"
# Find all HTML files and store them in an array
builtin cd ${config[site_root]}
local -a html_files=(**/[a-z]*.html(.))
local -a blog_files=()
local -a page_files=()
for file in "${html_files[@]}"; do
if [[ $file == *blog* ]]; then
blog_files+=("$file")
else
page_files+=("$file")
fi
done
# Start of the XML file for BLOGS
echo '' > ${sitemap_blog}
echo "" >> ${sitemap_blog}
echo "" >> ${sitemap_blog}
echo '"
fi
modified_link+=""
line=${line//"#link ${url_full}"/${modified_link}}
fi
modified_content+="${line}\n"
done
# Return the modified content
echo -e "${modified_content}"
}
function _image() {
# This replaces #showimg tags with actual HTML img tags in a provided string
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local content="${1}"
local modified_content=""
# Process the content line by line
echo "${content}" | while IFS= read -r line; do
if [[ ${line} == *"#showimg"* ]]; then
if (${debug}) _msg debug "${0:t}_msg_1" " ${line}"
# Extract image link and alt text
local img_link=$(echo "${line}" | awk -F'#showimg ' '{print $2}')
local image=$(echo "${img_link}" | awk -F'¤' '{print $1}')
local img_alt=$(echo "${img_link}" | awk -F'¤' '{print $2}')
# Determine the source of the image
local real_image=""
if [[ ${image} =~ ^https?:// ]]; then
real_image=${image}
elif [[ ${image} =~ ^\/ ]]; then
real_image=${image}
else
real_image="/images/${image}"
fi
# Form the replacement HTML image tag
local img_tag="
"
line=${line//"#showimg ${img_link}"/${img_tag}}
fi
modified_content+="${line}\n"
done
# Return the modified content
echo -e "${modified_content}"
}
function _linkimg() {
# This function replaces #linkimg tags with tags around
tags
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local content="${1}"
local modified_content=""
# Process the content line by line
echo "${content}" | while IFS= read -r line; do
if [[ ${line} == *"#linkimg"* ]]; then
if (${debug}) _msg debug "${0:t}_msg_1" " ${line}"
# Extract link, image, and alt text
local img_link=$(echo "${line}" | awk -F'#linkimg ' '{print $2}')
local img_url=$(echo "${img_link}" | awk -F'¤' '{print $1}')
local img_alt=$(echo "${img_link}" | awk -F'¤' '{print $2}')
# Determine the source of the image
local real_image=""
if [[ ${img_url} =~ ^https?:// ]]; then
real_image=${img_url}
elif [[ ${img_url} =~ ^\/ ]]; then
real_image=${img_url}
else
real_image="/images/${img_url}"
fi
# Form the replacement HTML link and image tag
local img_tag="
"
line=${line//"#linkimg ${img_link}"/${img_tag}}
fi
modified_content+="${line}\n"
done
# Return the modified content
echo -e "${modified_content}"
}
function _youtube() {
# This embeds a YouTube video in a provided string
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local content="${1}"
local modified_content=""
# Process the content line by line
echo "${content}" | while IFS= read -r line; do
if [[ ${line} == *"#ytvideo"* ]]; then
if (${debug}) _msg debug "${0:t}_msg_1" " ${line}"
# Extract YouTube video ID
local yt_id=$(echo "${line}" | awk -F'#ytvideo ' '{print $2}')
# Form the replacement YouTube iframe embed
local yt_iframe=""
line=${line//"#ytvideo ${yt_id}"/${yt_iframe}}
fi
modified_content+="${line}\n"
done
# Return the modified content
echo -e "${modified_content}"
}
function _cleanup() {
# This removes tags used in the templates that may be left over for some reason
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local content="${1}"
if (${debug}) _msg debug "${0:t}_msg_1"
# Perform the cleanup
# -e "s|BLOGINDEX\ ||g"
local cleaned_content=$(echo "${content}" | sed \
-e "s|¤||g" \
-e "s|#showimg\ ||g" \
-e "s|#ytvideo\ ||g" \
-e "s|#link\ ||g" \
-e "s|#linkimg\ ||g" \
)
# Return the cleaned content
echo "${cleaned_content}"
}
function _p_qstags() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local content="${1}"
if ${debug}; then
_msg debug "${0:t}_msg_1"
fi
# Use perl to convert QStags to HTML
perl -0777 -pe '
BEGIN {
@qstags = (
"#BR", "
\n",
"#BD", "", "#EBD", "",
"#I", "", "#EI", "\n",
"#P", "
", "#EP", "
\n", "#Q", "", "#EQ", "\n", "#C", "
", "#EC", "
\n",
"#H1", "" "#EP" "
\n" "#Q" "" "#EQ" "\n" "#C" "
" "#EC" "
\n"
"#H1" "