qsgen2/qsgen2
Stig-Ørjan Smelror 91b0bbd112 feat(i18n): add Norwegian (nb_NO) translation and clean up
- Add complete Norwegian (nb_NO) language file
- Update .gitignore to exclude temporary and backup files
- Remove temporary and backup files from language directory
- Clean up scripts directory and add to .gitignore
- Update language file format to use key-value pairs
2025-05-18 19:01:39 +02:00

2276 lines
69 KiB
Bash
Executable File

#!/usr/bin/env zsh
###############################################################################
###############################################################################
#
# Quick Site Generator 2 is a static website generator inspired by Nikola.
# It is written for the Z shell (zsh) because that's what I use and also because
# I like it better than Bash.
#
# This script is an almost complete rewrite of my old script because it became
# overly complicated and had way too many bugs, even though it worked on simple
# sites.
#
# https://github.com/kekePower/qsgen2/
#
###############################################################################
###############################################################################
VERSION="0.6.0" # Sun-2025-05-18
QSGEN="Quick Site Generator 2"
# Exit immediately if a command exits with a non-zero status
# Do not allow unset variables
# Exit if any command in a pipeline fails
set -euo pipefail
# Set the default locale and handle all filenames correctly
LC_ALL=C
LANG=C
IFS=$' \n\t'
# Set the umask to prevent world-writable files
umask 0022
# Enable advanced pattern matching and extended globbing
setopt extended_glob
setopt glob_star_short
# Global associative arrays
typeset -gA config # Configuration parameters
typeset -gA config_cache # Cached configuration values
typeset -ga BLOG_META_STR_ARRAY # Blog metadata array
typeset -gA messages # Localized messages
# Load messages with auto-detection and fallback to en_US
_load_messages() {
local lang_dir="${0:h}/include/qsgen2/lang"
# Default to en_US if no language is specified
local lang="en_US"
# 1. Try configured language first
if [[ -n "${SITE_LANG:-}" ]]; then
lang="$SITE_LANG"
# 2. Try auto-detected system language
elif command -v locale >/dev/null; then
local sys_lang=$(locale | grep '^LANG=' | cut -d= -f2 | cut -d. -f1 | tr -d '"')
if [[ -n "$sys_lang" && -f "$lang_dir/$sys_lang" ]]; then
lang="$sys_lang"
fi
fi
# Initialize messages array
typeset -gA messages
# Load the language file
if [[ -f "$lang_dir/$lang" ]]; then
# Source the language file which should define the messages array
source "$lang_dir/$lang"
_msg debug "Using language: $lang"
else
_msg error "Language file not found: $lang_dir/$lang"
exit 1
fi
# If no messages were loaded, initialize with default messages
if (( ${#messages[@]} == 0 )); then
_msg warning "No messages loaded from language file. Using default messages."
messages=(
[error.config_not_found]="Configuration file not found"
[error.invalid_config]="Invalid configuration"
[warning.legacy_config_used]="Using legacy config file. Consider renaming to 'site.conf'"
[info.create_config]="Please create 'site.conf' in your project directory."
# Add more default messages as needed
)
fi
}
# Get a localized message
_i18n() {
local key="$1"
shift
# First try to get the message from the messages array
# The messages are stored in the format: messages[key]="value"
local msg=""
# Handle different message types
case "$key" in
error.*|warning.*|info.*|debug.*|success.*|blog.*)
# Direct message key (e.g., error.config_not_found)
msg="${messages[$key]:-}"
;;
*)
# Try to find the message by key first
msg="${messages[$key]:-}"
;;
esac
# If message is still empty, use the key as fallback
if [[ -z "$msg" ]]; then
msg="$key"
fi
# Replace placeholders with provided arguments
local i=1
for arg in "$@"; do
msg=${msg/\%s/"$arg"}
i=$((i + 1))
done
echo -n "$msg"
}
# Original _msg function that handles the actual message output
_original_msg() {
local level="$1"
shift
# Skip if quiet mode is enabled for this level
case "$level" in
debug) [[ -z "$DEBUG" ]] && return ;;
info) [[ -n "$QUIET" ]] && return ;;
esac
# Output the message with appropriate formatting
case "$level" in
error) echo -e "\e[1;31m[ERROR] $*\e[0m" >&2 ;;
warning) echo -e "\e[1;33m[WARNING] $*\e[0m" >&2 ;;
success) echo -e "\e[1;32m[SUCCESS] $*\e[0m" ;;
info) echo -e "[INFO] $*" ;;
debug) echo -e "\e[2m[DEBUG] $*\e[0m" >&2 ;;
*) echo -e "[$level] $*" ;;
esac
}
# Wrapper for _msg to support i18n
_msg() {
local level="$1"
shift
if [[ "$1" == "i18n" ]]; then
shift
local key="$1"
shift
_original_msg "$level" "$(_i18n "$key" "$@")"
else
_original_msg "$level" "$@"
fi
}
# Core required tools and their minimum versions
declare -A REQUIRED_TOOLS=(
[zsh]="5.8"
[grep]="3.0"
[sed]="4.5"
[find]="4.7"
)
# Optional tools and their minimum versions
declare -A OPTIONAL_TOOLS=(
[pandoc]="2.0" # Only needed for markdown support
)
# Check for required tools and their versions
_check_dependencies() {
local tool version min_version
local missing_deps=()
local outdated_deps=()
# Check core required tools
for tool in "${(@k)REQUIRED_TOOLS}"; do
if ! command -v "$tool" &>/dev/null; then
missing_deps+=("$tool")
continue
fi
min_version="${REQUIRED_TOOLS[$tool]}"
version=$(_get_tool_version "$tool")
if [[ "$(_version_compare "$version" "$min_version")" == "<" ]]; then
outdated_deps+=("$tool (installed: $version, required: $min_version)")
fi
done
# Check optional tools
for tool in "${(@k)OPTIONAL_TOOLS}"; do
if ! command -v "$tool" &>/dev/null; then
_msg warning i18n warning.optional_dependency "$tool" "${OPTIONAL_TOOLS[$tool]}"
continue
fi
min_version="${OPTIONAL_TOOLS[$tool]}"
version=$(_get_tool_version "$tool")
if [[ "$(_version_compare "$version" "$min_version")" == "<" ]]; then
_msg warning i18n warning.dependency_version "$tool" "$version" "$min_version"
fi
done
# Report missing dependencies
if (( ${#missing_deps[@]} > 0 )); then
_msg error i18n error.missing_dependencies "${(j:, :)missing_deps}"
return 1
fi
# Report outdated dependencies
if (( ${#outdated_deps[@]} > 0 )); then
_msg warning i18n warning.outdated_dependencies
for dep in "${outdated_deps[@]}"; do
_msg warning "- $dep"
done
fi
# Check optional tools based on configuration
if [[ "${config[project_generator]:-}" == "markdown" ]]; then
for tool in "${(@k)OPTIONAL_TOOLS}"; do
if ! command -v "$tool" &>/dev/null; then
_msg warning "Optional tool '$tool' is required for markdown support but not found"
continue
fi
min_version="${OPTIONAL_TOOLS[$tool]}"
version=$(_get_tool_version "$tool")
if [[ "$(_version_compare "$version" "$min_version")" == "<" ]]; then
_msg warning "$tool version $version is below the recommended version $min_version"
fi
done
fi
if (( ${#missing_deps[@]} > 0 )); then
_msg error "Missing required dependencies: ${(j:, :)missing_deps}"
fi
if (( ${#outdated_deps[@]} > 0 )); then
_msg warning "Some dependencies are outdated:"
printf ' - %s\n' "${outdated_deps[@]}"
fi
if (( ${#missing_deps[@]} > 0 )); then
exit 1
fi
}
# Get version of a tool
_get_tool_version() {
local tool="$1"
case "$tool" in
zsh)
"$tool" --version </dev/null | head -n1 | grep -oE '[0-9]+\.[0-9]+(\.[0-9]+)?'
;;
pandoc)
"$tool" --version | head -n1 | grep -oE '[0-9]+\.[0-9]+(\.[0-9]+)?'
;;
*)
"$tool" --version 2>&1 | grep -oE '[0-9]+\.[0-9]+(\.[0-9]+)?' | head -n1
;;
esac
}
# Compare version numbers
_version_compare() {
local v1=("${(s/./)1}")
local v2=("${(s/./)2}")
local i
for ((i=1; i <= ${#v1} || i <= ${#v2}; i++)); do
if (( ${v1[i]:-0} < ${v2[i]:-0} )); then
echo "<"
return 0
elif (( ${v1[i]:-0} > ${v2[i]:-0} )); then
echo ">"
return 0
fi
done
echo "="
}
# Enable/disable debug mode for the entire script
# Can be overridden in individual functions for targeted debugging
globaldebug=false
# Load messages early
_load_messages
# Set debug mode if needed
[[ -n "$DEBUG" ]] && globaldebug=true
# Use Zsh fpath to set the path to some extra functions
fpath=(${HOME}/bin/include/common ${HOME}/bin/include/qsgen2/lang $fpath)
# In this case, let's load the 'include' function
autoload include
autoload zini
# Including some colors to the script
include common/colors
# Modern file reading function using zsh features
_read_file() {
local file="$1"
[[ -r "$file" ]] || return 1
# Use zsh's mapfile equivalent with proper error handling
local content
content="${(j: :f)$(<"$file")}" || return 1
# Remove trailing newline if present
echo -n "${content%$'\n'}"
return 0
}
echo "${magenta}${blue_bg} ${QSGEN} ${end}${bold_white}${blue_bg}${VERSION} ${end}"
# Validate configuration values
_validate_config() {
local -a required=(
"site_name" "site_url" "site_theme" "site_root" "project_root"
"site_lang" "site_tagline" "site_description"
)
local valid=true
# Check required fields
for key in "${required[@]}"; do
if [[ -z "${config[$key]:-}" ]]; then
_msg error i18n "error.missing_required_config" "${key}"
valid=false
fi
done
# Validate URLs
if [[ -n "${config[site_url]:-}" ]]; then
if ! [[ "${config[site_url]}" =~ ^https?:// ]]; then
_msg error i18n "error.invalid_url_format"
valid=false
fi
fi
# Validate directories
local -a dirs=("site_root" "project_root")
for dir in "${dirs[@]}"; do
if [[ -n "${config[$dir]:-}" && ! -d "${config[$dir]}" ]]; then
_msg error i18n "error.directory_not_found" "${config[$dir]}"
valid=false
fi
done
# Validate theme
if [[ -n "${config[site_theme]:-}" && ! -d "${config[project_root]}/themes/${config[site_theme]}" ]]; then
_msg error i18n "error.theme_not_found" "${config[project_root]}/themes/${config[site_theme]}"
valid=false
fi
# Load the theme configuration
local theme_conf="${config[project_root]}/themes/${config[site_theme]}/theme.conf"
if [[ -f "$theme_conf" ]]; then
source "$theme_conf"
_msg debug i18n "debug.theme_config_loaded" "$theme_conf"
else
_msg error i18n "error.theme_config_not_found" "$theme_conf"
exit 1
fi
if [[ "$valid" == false ]]; then
_msg error i18n "error.config_validation_failed"
exit 1
fi
}
# Load and validate configuration
_load_config() {
local config_file="$1"
if [[ -f "$config_file" ]]; then
if (${globaldebug}); then _msg debug i18n "debug.loading_config" "$config_file"; fi
if zini "$config_file"; then
return 0
else
_msg error i18n "error.config_load_failed" "$config_file"
return 1
fi
fi
return 1
}
# Check for, and source, the config file for this specific website
config_loaded=false
if _load_config "$(pwd)/site.conf"; then
config_loaded=true
elif _load_config "$(pwd)/config"; then
_msg warning i18n "warning.legacy_config_used"
config_loaded=true
else
_msg error i18n "error.config_not_found"
_msg info i18n "info.config_help"
_msg info i18n "info.config_template"
_msg info i18n "info.git_repo_help"
exit 1
fi
# Validate the loaded configuration
_validate_config
# Set default values for optional parameters
config[parallel_jobs]="${config[parallel_jobs]:-$(nproc)}"
config[site_author]="${config[site_author]:-${USER}}"
config[site_timezone]="${config[site_timezone]:-$(date +%Z)}"
# Ensure paths are absolute
if [[ -n "${config[site_root]:-}" && "${config[site_root]}" != /* ]]; then
config[site_root]="$(pwd)/${config[site_root]}"
fi
if [[ -n "${config[project_root]:-}" && "${config[project_root]}" != /* ]]; then
config[project_root]="$(pwd)/${config[project_root]}"
fi
# Safe file operations
_safe_path() {
# Resolve and validate a path to prevent directory traversal
# Usage: _safe_path "base/dir" "relative/path"
local base_dir="$1"
local target_path="$2"
# Convert to absolute path
if [[ "$target_path" != /* ]]; then
target_path="$base_dir/$target_path"
fi
# Normalize the path (resolve . and ..)
local normalized_path
normalized_path=$(realpath -m -- "$target_path" 2>/dev/null || echo "")
# Ensure the path is within the base directory
if [[ -z "$normalized_path" || "$normalized_path" != "$base_dir"/* && "$normalized_path" != "$base_dir" ]]; then
_msg error i18n error.invalid_path "$target_path"
return 1
fi
echo "$normalized_path"
return 0
}
# Safe file writing with atomic operation
_safe_write() {
# Usage: _safe_write "content" "/path/to/file"
local content="$1"
local target_file="$2"
local tmp_file
# Create parent directories if they don't exist
local dir_name
dir_name=$(dirname -- "$target_file")
mkdir -p -- "$dir_name"
# Create a temporary file in the same directory for atomic write
tmp_file=$(mktemp -p "$dir_name" "${target_file##*/}.XXXXXXXXXX")
# Write content to temporary file
echo -n "$content" > "$tmp_file" || {
_msg error i18n error.write_failed "$tmp_file"
rm -f -- "$tmp_file"
return 1
}
# Atomically move the file into place
mv -f -- "$tmp_file" "$target_file" || {
_msg error i18n error.move_failed "$tmp_file" "$target_file"
rm -f -- "$tmp_file"
return 1
}
return 0
}
# Debug: Show loaded configuration
if (${globaldebug}); then
_msg debug i18n debug.loaded_config
for key value in ${(kv)config}; do
_msg debug i18n debug.config_key_value "$key" "$value"
done
_msg debug i18n debug.config_end
fi
# Check if we're in a git repository (but don't fail, just warn)
if git rev-parse --is-inside-work-tree &>/dev/null; then
_msg warning i18n warning.git_repo
_msg info i18n info.git_repo_help
fi
# Create necessary directories if they don't exist
for dir in "${config[project_root]}/themes" "${config[project_root]}/pages" "${config[project_root]}/blog"; do
if [[ ! -d "$dir" ]]; then
mkdir -p "$dir"
_msg debug i18n debug.created_directory "$dir"
fi
done
# Set up the appropriate engine and file extension
if [[ ${config[project_generator]} == "qstags" ]]; then
engine=_qstags
export file_ext="qst"
elif [[ ${config[project_generator]} == "markdown" ]]; then
if ! command -v pandoc &>/dev/null; then
_msg error i18n pandoc.install
_msg info i18n pandoc.download
exit 1
else
engine="pandoc"
export file_ext="md"
fi
else
_msg error i18n generator.not_found
exit 1
fi
_msg debug i18n debug.using_engine "$engine" "$file_ext"
function _generate_sample_page() {
# Usage: _generate_sample_page
# This function generates a sample page
local sample_page="${config[project_root]}/pages/about.${file_ext}"
if [[ ! -f "${sample_page}" ]]; then
_msg info i18n info.creating_sample_page
_atomic_write "${sample_page}" "# $(i18n page.about_me_title)
## $(i18n page.welcome_title)
$(i18n page.welcome_message \"${sample_page}")
## $(i18n page.my_story_title)
$(i18n page.my_story_content)
## $(i18n page.skills_title)
- $(i18n page.skill_webdev)
- $(i18n page.skill_design)
- $(i18n page.skill_opensource)
## $(i18n page.contact_title)
$(i18n page.contact_content)
## $(i18n page.about_site_title)
$(i18n page.about_site_content)"
if [[ $? -ne 0 ]]; then
_msg error i18n error.create_sample_page_failed "${sample_page}"
return 1
fi
_msg success i18n success.sample_page_created "${sample_page}"
else
_msg debug i18n debug.sample_page_exists "${sample_page}"
fi
return 0
}
function _generate_sample_blog_post() {
# Usage: _generate_sample_blog_post
# This function generates a sample blog post
local sample_post="${config[project_root]}/blog/hello-world.${file_ext}"
if [[ ! -f "${sample_post}" ]]; then
_msg info i18n info.creating_sample_post
_atomic_write "${sample_post}" "# $(i18n blog.hello_world_title)
*$(i18n blog.published_on) $(date +'%Y-%m-%d')*
$(i18n blog.welcome_message)
## $(i18n blog.getting_started)
$(i18n blog.edit_this_post \"${sample_post}")
## $(i18n blog.features)
- $(i18n blog.feature_markdown)
- $(i18n blog.feature_easy_customize)
- $(i18n blog.feature_fast_lightweight)
## $(i18n blog.next_steps)
1. $(i18n blog.step_edit_post)
2. $(i18n blog.step_add_posts)
3. $(i18n blog.step_customize_theme)
4. $(i18n blog.step_publish_site)
$(i18n blog.happy_blogging)"
if [[ $? -ne 0 ]]; then
_msg error i18n error.create_sample_post_failed "${sample_post}"
return 1
fi
_msg success i18n success.sample_post_created "${sample_post}"
else
_msg debug i18n debug.sample_post_exists "${sample_post}"
fi
return 0
}
function _generate_blog_index() {
# Usage: _generate_blog_index
# This function generates the blog index page
local blog_index="${config[project_root]}/blog/index.${file_ext}"
if [[ ! -f "${blog_index}" ]]; then
_msg info i18n info.creating_blog_index
_atomic_write "${blog_index}" "# ${config[site_name]:-Blog}
${config[site_tagline]:+> ${config[site_tagline]}}
## $(i18n blog.latest_posts)
- [$(i18n blog.sample_post) 1](blog/post1.${file_ext})
- [$(i18n blog.sample_post) 2](blog/post2.${file_ext})
## $(i18n blog.categories)
- [$(i18n blog.sample_category) 1](blog/category1.${file_ext})
- [$(i18n blog.sample_category) 2](blog/category2.${file_ext})
## $(i18n blog.archives)
- [2023](blog/2023.${file_ext})
- [2024](blog/2024.${file_ext})
## $(i18n blog.tags)
- [tag1](blog/tag1.${file_ext})
- [tag2](blog/tag2.${file_ext})
## $(i18n blog.about)
$(i18n blog.about_text \"${blog_index}")"
if [[ $? -ne 0 ]]; then
_msg error i18n error.create_blog_index_failed "${blog_index}"
return 1
fi
_msg success i18n success.blog_index_created "${blog_index}"
else
_msg debug i18n debug.blog_index_exists "${blog_index}"
fi
return 0
}
function _run_engine() {
# Usage: _run_engine <input>
# This function runs the appropriate engine on the input file
if [[ ${config[project_generator]} == "qstags" ]]; then
if [[ ! -x "$engine" ]]; then
_msg error i18n error.engine_not_found "$engine"
return 1
fi
"$engine" "$1"
elif [[ ${config[project_generator]} == "markdown" ]]; then
if ! command -v "$engine" &>/dev/null; then
_msg error i18n error.engine_not_found "$engine"
return 1
fi
"$engine" -f markdown -t html "$1"
else
_msg error i18n error.unknown_generator "${config[project_generator]}"
return 1
fi
}
if (${globaldebug}); then _msg debug "_qsgen2_msg_6"; fi
builtin cd ${config[project_root]}
# Loading Zsh modules
zmodload zsh/files
zmodload zsh/datetime
zmodload zsh/regex
# Let's put these here for now.
export today=$(strftime "%Y-%m-%d - %T")
export blogdate=$(strftime "%a-%Y-%b-%d")
# Let's create arrays of all the files we'll be working on
function _list_pages() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# Initialize or clear the array to ensure it's empty before adding files
pages_file_array=()
export no_pages_found=false
# Temporarily set null_glob for this function
setopt local_options null_glob
# Using an array to directly capture matching files
local -a pages_files=(*.${file_ext})
if (( ${#pages_files} == 0 )); then
if ${debug}; then _msg debug "${0:t}_msg_1" " ${file_ext}."; fi
export no_pages_found=true
return
else
for file in "${pages_files[@]}"; do
if ${debug}; then _msg debug "${0:t}_msg_2" " ${file}"; fi
pages_file_array+=("$file")
done
fi
}
function _list_blogs() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# Initialize or clear the blogs array to ensure it's empty before adding files
blogs_file_array=()
export no_blogs_found=false
# Temporarily set null_glob for this function
setopt local_options null_glob
# Directly capture matching blog files into an array
local -a blog_files=(blog/*.blog(On))
if (( ${#blog_files[@]} == 0 )); then
if ${debug}; then _msg debug "${0:t}_msg_1"; fi
export no_blogs_found=true
return
else
for file in "${blog_files[@]}"
do
if ${debug}; then _msg debug "${0:t}_msg_2" " $file"; fi
blogs_file_array+=("$file")
done
fi
}
# BLOG CACHE
blog_cache_file="${config[project_root]}/.blogindex.cache"
function _update_blog_cache() {
# This function updates the blog cache with the current blog index content
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
if (${debug}) _msg debug "Updating blog cache at ${blog_cache_file}"
# Get the current blog index content
local blog_index_content="$(<${config[project_root]}/blog/index.tmp.html)"
# Store the content in the cache file
echo "${blog_index_content}" > "${blog_cache_file}"
if (${debug}) _msg debug "Blog cache updated with ${#blog_index_content} bytes"
}
function _load_blog_cache() {
# Loads the blog index from cache if it exists
if [[ -f "${blog_cache_file}" ]]; then
if (${debug}) _msg debug "Loading blog index from cache"
cat "${blog_cache_file}"
return 0
fi
return 1
}
function _is_blog_cache_stale() {
# Returns 0 (success) if cache is stale or doesn't exist, 1 (failure) otherwise
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# If we have new or updated blogs, cache is considered stale
if [[ ${new_updated_blogs} == "true" ]]; then
if (${debug}) _msg debug "Blog cache stale: New or updated blogs detected"
return 0
fi
# If cache file doesn't exist, it's stale
if [[ ! -f "${blog_cache_file}" ]]; then
if (${debug}) _msg debug "Blog cache stale: Cache file does not exist"
return 0
fi
# Check if cache is older than 1 hour (3600 seconds)
local cache_mtime=$(stat -c %Y "${blog_cache_file}" 2>/dev/null || echo 0)
local current_time=$(date +%s)
if (( current_time - cache_mtime > 3600 )); then
if (${debug}) _msg debug "Blog cache stale: Cache is older than 1 hour"
return 0
fi
if (${debug}) _msg debug "Blog cache is fresh"
return 1
}
function _blog_cache() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
_list_blogs
# Create an associative array for the blog cache
typeset -gA config_cache
# Load the existing blog cache
if [[ -f $blog_cache_file ]]; then
while IFS=':' read -r name hash; do
blog_cache[$name]=$hash
if (${debug}) _msg debug "${0:t}_msg_1" " ${blog_cache[${name}]}"
done < "$blog_cache_file"
fi
# Initialize the array for storing blog files to process
make_blog_array=()
# Process blog files
for blog_file in ${blogs_file_array[@]}; do
# Compute the current blog file hash
current_hash=$(md5sum "$blog_file" | awk '{print $1}')
if (${debug}) _msg debug "${0:t}_msg_2" " ${blog_file}"
if (${debug}) _msg debug "${0:t}_msg_3" " ${current_hash}"
# Check if the blog file is new or has changed
if [[ ${blog_cache[$blog_file]} != "$current_hash" ]]; then
if (${debug}) _msg debug "${0:t}_msg_4" " ${blog_file}"
if (${debug}) _msg debug "${0:t}_msg_5" " ${current_hash}"
# Blog file is new or has changed; add it to the processing array
make_blog_array+=("$blog_file")
# Update the blog cache with the new hash
blog_cache[$blog_file]=$current_hash
fi
done
# Rebuild the blog cache file from scratch
: >| "$blog_cache_file" # Truncate the file before writing
for name in "${(@k)blog_cache}"; do
echo "$name:${blog_cache[$name]}" >> "$blog_cache_file"
done
}
# PAGES CACHE
# Returns the array pages_array()
function _pages_cache() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# Create an associative array for the pages cache
typeset -gA config_cache
_list_pages
# Load the existing pages cache
if [[ -f $pages_cache_file ]]; then
while IFS=':' read -r name hash; do
pages_cache[$name]=$hash
if (${debug}) _msg debug "${0:t}_msg_1" " ${pages_cache[${name}]}"
done < "$pages_cache_file"
fi
# Initialize the array for storing pages files to process
pages_array=()
# Process pages files
for file in ${pages_file_array[@]}; do
# Compute the current blog file hash
current_hash=$(md5sum "$file" | awk '{print $1}')
if (${debug}) _msg debug "${0:t}_msg_2" " ${pages_cache[$file]}"
if (${debug}) _msg debug "${0:t}_msg_3" " current_cache: ${current_hash}"
# Check if the pages file is new or has changed
if [[ ${pages_cache[$file]} != "$current_hash" ]]; then
if (${debug}) _msg debug "${0:t}_msg_4" " ${pages_cache[$file]}"
if (${debug}) _msg debug "${0:t}_msg_5" " current_cache: ${current_hash}"
# Pages file is new or has changed; add it to the processing array
pages_array+=("$file")
# Update the pages cache with the new hash
pages_cache[$file]=$current_hash
fi
done
# Rebuild the pages cache file from scratch
: >| "$pages_cache_file" # Truncate the file before writing
for name in "${(@k)pages_cache}"; do
echo "$name:${pages_cache[$name]}" >> "$pages_cache_file"
done
}
function _last_updated() {
# This function updates #updated and #version tags in the provided string for buffers
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local upd_msg="Last updated ${today} by <a href=\"https://blog.kekepower.com/qsgen2.html\">${QSGEN} ${VERSION}</a>"
if (${debug}); then _msg debug "${0:t}_msg_1"; fi
if (${debug}); then _msg debug "${0:t}_msg_2" " ${upd_msg}"; fi
local content="${1}"
# Perform the replacements
local updated_content=$(echo "${content}" | sed \
-e "s|#updated|${upd_msg}|")
# Return the updated content
echo "${updated_content}"
}
function _f_last_updated() {
# Updates #updated and #version tags in the provided file using Zsh
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# local file_path="${1}"
local upd_msg="Last updated ${today} by <a href=\"https://blog.kekepower.com/qsgen2.html\">${QSGEN} ${VERSION}</a>"
if ${debug}; then
_msg debug "${0:t}_msg_1" " ${1}"
_msg debug "${0:t}_msg_2" " ${upd_msg}"
fi
# Read the file content into a variable
local content="$(<${1})"
# Perform the replacement
content="${content//#updated/${upd_msg}}"
if [[ -f "${1}" ]]; then
sed -i -e "s|#updated|${upd_msg}|" "${1}"
else
_msg debug "${0:t}_msg_3" " '${1}' " "${0:t}_msg_3.1"
fi
}
function _file_to_lower() {
local filename="${1}"
# Replace spaces with dashes
filename="${filename// /-}"
# Convert to lowercase and remove invalid characters
filename=$(echo "${filename}" | sed -e 's/^[^a-zA-Z0-9_.]+//g' -e 's/[^a-zA-Z0-9_-]+/-/g')
echo ${filename}
}
function _pages() {
# This function generates all the new and updated Pages
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
_msg main "${0:t}_msg_3"
# Load the cache for Pages
if (${debug}) _msg debug "${0:t}_msg_1"
_pages_cache
if [[ ${no_pages_found} == "true" ]]; then
_msg sub "${0:t}_msg_1"
return
fi
if (( ${#pages_array[@]} > 0 )); then
# If pages_array is not empty, we do work
if (${debug}) _msg debug "${0:t}_msg_4"
for pages_in_array in ${pages_array[@]}
do
if (${debug}) _msg debug "${0:t}_msg_5"
local pages=${config[project_root]}/themes/${config[site_theme]}/pages.tpl
# Let's check if we can access the pages.tpl file.
# It not, exit script.
if [[ ! -f ${pages} ]]; then
_msg info "${0:t}_msg_6" " ${pages}"
exit
else
# Read template once
if (${debug}) _msg debug "${0:t}_msg_7"
local pages_tpl="$(<${pages})"
fi
# _msg std " - ${pages_in_array%.*}.html"
# Read the file once
if (${debug}) _msg debug "${0:t}_msg_9" " ${pages_in_array}"
local page_content="$(<${pages_in_array})"
# Grab the title from the Page
if (${debug}) _msg debug "${0:t}_msg_10"
if [[ ${config[project_generator]} == "native" ]]; then
while read -r line
do
if [[ "$line" =~ ^#title=(.*) ]]; then
local page_title=${match[1]}
break
#local page_title=$( echo ${page_content} | head -2 | grep \#title | cut -d= -f2 )
fi
done <<< "$page_content"
elif [[ ${config[project_generator]} == "markdown" ]]; then
while IFS= read -r line
do
# Check if the line starts with '#' and capture the line
if [[ "$line" == \#* ]]; then
# Remove all leading '#' characters and the first space (if present)
local page_title="${line#\#}" # Remove the first '#' character
page_title="${page_title#\#}" # Remove the second '#' character if present
page_title="${page_title#"${page_title%%[![:space:]]*}"}" # Trim leading whitespace
break # Exit the loop after finding the first heading
fi
done <<< ${page_content}
fi
_msg std " - ${page_title}"
if (${debug}) _msg debug "${0:t}_msg_11" " ${page_title}"
# Remove the #title line from the buffer. No longer needed.
if (${debug}) _msg debug "${0:t}_msg_12"
page_content=$( echo ${page_content} | grep -v \#title )
# HTML'ify the page content
if (${debug}) _msg debug "${0:t}_msg_13" " ${pages_in_array}"
page_content=$( _run_engine "$page_content" )
# Look for links, images and videos and convert them if present.
if (${debug}) _msg debug "${0:t}_msg_14"
if [[ $( echo ${page_content} | grep \#link ) ]]; then
if (${debug}) _msg debug "${0:t}_msg_15"
page_content=$( _link "${page_content}" )
fi
if [[ $( echo ${page_content} | grep \#showimg ) ]]; then
if (${debug}) _msg debug "${0:t}_msg_16"
page_content=$( _image "${page_content}" )
fi
if [[ $( echo ${page_content} | grep \#linkimg ) ]]; then
if (${debug}) _msg debug "${0:t}_msg_17"
page_content=$( _linkimg "${page_content}" )
fi
if [[ $( echo ${page_content} | grep \#ytvideo ) ]]; then
if (${debug}) _msg debug "${0:t}_msg_18"
page_content=$( _youtube "${page_content}" )
fi
# Replace every #pagetitle in pages_tpl
if (${debug}) _msg debug "${0:t}_msg_19"
pages_tpl=$(echo "${pages_tpl}" | perl -pe "s|#pagetitle|${page_title}|gs; s|#tagline|${config[site_tagline]}|gs; s|#sitename|${config[site_name]}|gs")
if (${debug}) _msg debug "${0:t}_msg_20"
# Use awk for multi-line and special character handling
pages_tpl=$( awk -v new_body="$page_content" '{sub(/BODY/, new_body)} 1' <(echo "${pages_tpl}") )
# Replace #updated with today's date and #version with Name and Version to footer
if (${debug}) _msg debug "${0:t}_msg_21"
pages_tpl=$( _last_updated ${pages_tpl} )
# Always use lowercase for file names
if (${debug}) _msg debug "${0:t}_msg_22"
pages_title_lower=$( _file_to_lower "${pages_in_array}" )
# Clean up unused tags, if any
if (${debug}) _msg debug "${0:t}_msg_23"
pages_tpl=$( _cleanup "${pages_tpl}" )
# Write pages_tpl to disk
# _msg std "Writing ${config[site_root]}/${pages_title_lower%.*}.html to disk."
echo "${pages_tpl}" > ${config[site_root]}/${pages_title_lower%.*}.html
# Insert the blog to the front page is blog_in_index is true and the file in the array is index.file_ext
# and if index.tmp.html exist and is not empty
if [[ ${pages_in_array} == "index.${file_ext}" && ${config[site_blog]} == "true" && -s "${config[project_root]}/blog/index.tmp.html" ]]; then
if (${debug}) _msg sub "${0:t}_msg_24" " ${pages_in_array}"
if (${debug}) _msg sub "${0:t}_msg_25" " ${config[site_blog]}"
if (${debug}) _msg sub "${0:t}_msg_26"
if (${debug}) ls -l ${config[project_root]}/blog/index.tmp.html
_add_blog_list_to_index
fi
done
export new_updated_pages=true
else
# Insert the blog to the front page is blog_in_index is true and the file in the array is index.file_ext
# and if index.tmp.html exist and is not empty
if [[ ${config[site_blog]} == "true" && -s "${config[project_root]}/blog/index.tmp.html" ]]; then
_msg std "${0:t}_msg_27"
if (${debug}) _msg sub "${0:t}_msg_28" " ${pages_in_array}"
if (${debug}) _msg sub "${0:t}_msg_29" " ${config[site_blog]}"
if (${debug}) _msg sub "${0:t}_msg_30"
if (${debug}) ls -l ${config[project_root]}/blog/index.tmp.html
_add_blog_list_to_index
fi
_msg sub "${0:t}_msg_31"
export new_updated_pages=false
fi
}
function _blogs() {
# This function either generates blog files or exports metadata based on the argument
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
_msg main "${0:t}_msg_3"
# Running function _list_blogs
if (${debug}) _msg debug "${0:t}_msg_1"
_list_blogs
if [[ ${no_blogs_found} == "true" ]]; then
_msg sub "${0:t}_msg_2"
return
fi
# Running function _blog_cache
if (${debug}) _msg debug "${0:t}_msg_4"
_blog_cache
if (( ${#make_blog_array[@]} > 0 )); then
# Declare arrays to hold blog content and metadata
typeset -ga BLOG_META_STR_ARRAY
typeset -A blog_contents
typeset -A blog_metadata
# Array to store PIDs of background jobs
local pids=()
# Counter for active jobs
local active_jobs=0
# Maximum number of parallel jobs (configurable, default to number of CPU cores)
local max_jobs=${config[parallel_jobs]:-$(nproc)}
# Load template once before processing
if [[ -f ${config[project_root]}/themes/${config[site_theme]}/blogs.tpl ]]; then
local blog_tpl=$(<"${config[project_root]}/themes/${config[site_theme]}/blogs.tpl")
else
_msg info "${0:t}_msg_5"
exit
fi
# Create a FIFO for inter-process communication
local fifo=$(mktemp -u)
mkfifo "${fifo}"
# Function to process a single blog file
_process_blog() {
local blog="$1"
local debug="$2"
local blog_tpl="$3"
# Process in a subshell to avoid variable conflicts
(
# Process blog in memory
local content="$(<"${blog}")"
local sdate btitle ingress body blog_index blog_dir blog_url
local date_found=false
local title_found=false
# Process content line by line
while IFS= read -r line; do
[[ "${line}" == "DATE "* ]] && { date_found=true; sdate=(${line#DATE }); }
[[ "${line}" == "BLOG_TITLE "* ]] && { title_found=true; btitle="${line#BLOG_TITLE }"; }
[[ "${date_found}" == true && "${title_found}" == true ]] && break
done <<< "${content}"
# Skip if required metadata is missing
[[ "${date_found}" != true || "${title_found}" != true ]] && return 1
# Extract blog content
ingress=$(echo "${content}" | sed "s/'/\\\'/g" | xargs |
grep -Po "#INGRESS_START\K(.*?)#INGRESS_STOP" |
sed "s|\ \#INGRESS_STOP||; s|^\ ||")
body=$(echo "${content}" | sed "s/'/\\\'/g" | xargs |
grep -Po "#BODY_START\K(.*?)#BODY_STOP" |
sed "s|\ \#BODY_STOP||; s|^\ ||")
# Process blog metadata
sdate=($(echo ${sdate} | sed 's|-| |g'))
blog_index=$(echo "${btitle:l}" | sed 's/ /_/g; s/[,.:()]//g')
blog_dir="/blog/${sdate[2]}/${sdate[3]:l}/${sdate[4]}"
blog_url="${blog_dir}/${blog_index}.html"
# Generate blog content with template
local blog_content=$(echo "${blog_tpl}" | \
perl -pe "
s|BLOGTITLE|${btitle}|g;
s|BLOGURL|${blog_url}|g;
s|\QINGRESS\E|${ingress}|g;
s|\QBODY\E|${body}|g")
# Apply template variables
blog_content="${blog_content//CALNDAY/${sdate[4]}}"
blog_content="${blog_content//CALYEAR/${sdate[2]}}"
blog_content="${blog_content//CALMONTH/${sdate[3]}}"
blog_content="${blog_content//CALADAY/${sdate[1]}}"
# Process content with engine and plugins
blog_content=$(_run_engine "${blog_content}")
[[ "${blog_content}" == *"#link"* ]] && blog_content=$(_link "${blog_content}")
[[ "${blog_content}" == *"#showimg"* ]] && blog_content=$(_image "${blog_content}")
[[ "${blog_content}" == *"#linkimg"* ]] && blog_content=$(_linkimg "${blog_content}")
[[ "${blog_content}" == *"#ytvideo"* ]] && blog_content=$(_youtube "${blog_content}")
# Apply site-wide variables
blog_content=$(echo "${blog_content}" | \
perl -pe "s|#tagline|${config[site_tagline]}|gs;
s|#sitename|${config[site_name]}|gs;
s|#pagetitle|${page_title}|gs")
# Final processing
blog_content=$(_last_updated "${blog_content}")
blog_content=$(_cleanup "${blog_content}")
# Output metadata and content through FIFO
echo "${blog}:"
echo " dir: ${blog_dir}"
echo " url: ${blog_url}"
echo " title: ${btitle}"
echo " content: |"
echo " ${blog_content//$'\n'/$'\n '}"
echo "---"
) > "${fifo}.${blog//\//_}" &
return $?
}
# Start a background process to read from FIFO
local reader_pid
(
while IFS= read -r line; do
if [[ $line == *":" ]]; then
current_blog="${line%:}"
elif [[ $line == " dir: "* ]]; then
blog_metadata["${current_blog}_dir"]="${line# dir: }"
elif [[ $line == " url: "* ]]; then
blog_metadata["${current_blog}_url"]="${line# url: }"
elif [[ $line == " title: "* ]]; then
blog_metadata["${current_blog}_title"]="${line# title: }"
elif [[ $line == " content: |" ]]; then
blog_content=""
while IFS= read -r content_line; do
[[ $content_line == " "* ]] || break
blog_content+="${content_line# }\n"
done
blog_contents["${current_blog}"]="${blog_content%\n}"
fi
done < <(cat "${fifo}".* 2>/dev/null)
) &
reader_pid=$!
# Export functions and variables needed in subshells
export -f _run_engine _link _image _linkimg _youtube _last_updated _cleanup
export config
# Process blogs in parallel
for blog in "${make_blog_array[@]}"; do
# Wait for a slot if we've reached max jobs
while (( active_jobs >= max_jobs )); do
# Check for completed jobs
local new_pids=()
local completed=0
for pid in "${pids[@]}"; do
if kill -0 "$pid" 2>/dev/null; then
new_pids+=("$pid")
else
((completed++))
fi
done
pids=("${new_pids[@]}")
((active_jobs -= completed))
# If still at max, wait a bit
(( active_jobs >= max_jobs )) && sleep 0.1
done
# Start a new job
if (${debug}) _msg debug "Processing blog: ${blog}"
_process_blog "${blog}" "${debug}" "${blog_tpl}" &
pids+=($!)
((active_jobs++))
done
# Wait for all background jobs to complete
wait "${pids[@]}" 2>/dev/null
# Signal the reader to finish
wait $reader_pid 2>/dev/null
# Write all blogs to disk in a single pass
for blog in "${!blog_contents[@]}"; do
local dir="${blog_metadata[${blog}_dir]}"
local url="${blog_metadata[${blog}_url]}"
local title="${blog_metadata[${blog}_title]}"
local content="${blog_contents[$blog]}"
# Skip if required fields are missing
if [[ -z "$dir" || -z "$url" || -z "$content" ]]; then
_msg warning "Skipping blog post due to missing metadata: ${blog}"
continue
fi
# Construct the full output path safely
local output_path
output_path=$(_safe_path "${config[site_root]}" "${url#/}") || {
_msg error "Invalid output path for blog: $url"
continue
}
# Create output directory if it doesn't exist
local output_dir
output_dir=$(dirname -- "$output_path")
if ! mkdir -p -- "$output_dir"; then
_msg error "Failed to create directory: $output_dir"
continue
fi
# Write the blog content safely
if ! _safe_write "$content" "$output_path"; then
_msg error "Failed to write blog post: $output_path"
continue
fi
# Add to metadata array for index generation
BLOG_META_STR_ARRAY+=("SDATA:${dir}||BTITLE:${title}||INGRESS:${content}||URL:${url}")
if (${debug}); then
_msg debug "Successfully wrote blog post: $output_path"
fi
done
# Clean up FIFO files
rm -f "${fifo}" "${fifo}".* 2>/dev/null
# Process each blog in parallel
# Now BLOG_META_STR_ARRAY contains the metadata string for each blog post
export BLOG_META_STR_ARRAY
if (${debug}) _msg debug "${0:t}_msg_24"
export new_updated_blogs=true
else
_msg sub "${0:t}_msg_25"
export new_updated_blogs=false
fi
if [[ ${new_updated_blogs} == "true" ]]; then
if (${debug}) _msg sub "${0:t}_msg_26"
_blog_idx_for_index
if (${debug}) _msg sub "${0:t}_msg_27"
_blog_index
fi
}
function _blog_idx_for_index() {
# This function generates the file blog/index.tmp.html
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
_msg sub "${0:t}_msg_1" " ${config[project_root]}/blog/index.tmp.html"
if (${debug}) _msg debug "${0:t}_msg_2"
local blog_list_tpl=$(<${config[project_root]}/themes/${config[site_theme]}/blog_list.tpl)
local blog_list_content=""
# Truncate file before writing new one
: >| "${config[project_root]}/blog/index.tmp.html"
# if (${debug}) _msg debug "${0:t}_msg_3" " ${BLOG_META_STR_ARRAY[@]}"
for meta_str in ${BLOG_META_STR_ARRAY[@]}
do
if (${debug}) _msg debug "${0:t}_msg_4"
if (${debug}) _msg debug "${0:t}_msg_5" " ${meta_str}"
# Split meta_str into individual metadata components
local -a meta_array=("${(@s/||/)meta_str}")
# Initialize variables to store each component
local sdate btitle ingress url
# Iterate over each component and extract information
if (${debug}) _msg debug "${0:t}_msg_6"
for component in "${meta_array[@]}"
do
case "${component}" in
SDATE:*) sdate=${component#SDATE: } ;;
BTITLE:*) btitle=${component#BTITLE: } ;;
INGRESS:*) ingress=${component#INGRESS: } ;;
URL:*) url=${component#URL: } ;;
esac
done
local adate=( $( echo ${sdate} ) )
local caladay="${adate[1]}"
local calyear="${adate[2]}"
local calmonth="${adate[3]}"
local calnday="${adate[4]}"
local bdate="${adate[1]} - ${adate[4]}/${adate[3]}/${adate[2]}"
blog_list_content+=$(
echo "${blog_list_tpl}" | \
perl -pe "\
s|BLOGURL|${config[site_url]}${url}|g; \
s|BLOGTITLE|${btitle}|g; \
s|INGRESS|${ingress}|g; \
s|BLOGDATE|${bdate}|g; \
s|CALADAY|${caladay}|g; \
s|CALNDAY|${calnday}|g; \
s|CALMONTH|${calmonth}|g; \
s|CALYEAR|${calyear}|g \
")
unset sdate btitle ingress url adate caladay calyear calmonth calnday
done
if (${debug}) _msg debug "${0:t}_msg_7" " ${engine} " "${0:t}_msg_7.1"
# Catch any QStags or Markdown in the Ingress
blog_list_content=$( _run_engine ${blog_list_content} )
if (${debug}) _msg debug "${0:t}_msg_8" " ${config[project_root]}/blog/index.tmp.html"
#if (${debug}) _msg debug "${0:t}_msg_9" " ${blog_list_content}"
echo ${blog_list_content} > ${config[project_root]}/blog/index.tmp.html
_update_blog_cache
}
function _blog_index() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# Check if we need to update the blog index
if [[ ${new_updated_blogs} == "true" ]] || _is_blog_cache_stale; then
if (${debug}) _msg debug "Generating new blog index"
# Get the template
local blog_index_tpl=$(<${config[project_root]}/themes/${config[site_theme]}/blog_index.tpl)
# Get the blog list content
local blog_index_list
if [[ ${new_updated_blogs} == "true" ]]; then
# If we have new/updated blogs, use the fresh content
blog_index_list=$(<${config[project_root]}/blog/index.tmp.html)
else
# Otherwise try to load from cache
blog_index_list=$(_load_blog_cache) || {
# If cache load fails, use the fresh content
blog_index_list=$(<${config[project_root]}/blog/index.tmp.html)
}
fi
# Generate the final content
local blog_index_content=$(echo "${blog_index_tpl}" | \
perl -pe "s|#sitename|${config[site_name]}|gs; s|#tagline|${config[site_tagline]}|gs")
blog_index_content=$(awk -v new_body="$blog_index_list" '{sub(/BODY/, new_body)} 1' <(echo "${blog_index_content}"))
# Create output directory if it doesn't exist
mkdir -p "${config[site_root]}/blog"
# Write the index file
echo "$blog_index_content" > "${config[site_root]}/blog/index.html"
_f_last_updated "${config[site_root]}/blog/index.html"
# Update the cache with the new content
_update_blog_cache
if (${debug}); then
_msg debug "Generated new blog index at ${config[site_root]}/blog/index.html"
_msg debug "Blog index size: ${#blog_index_content} bytes"
fi
else
# Use cached content
if (${debug}) _msg debug "Using cached blog index"
local cached_content=$(_load_blog_cache)
mkdir -p "${config[site_root]}/blog"
echo "$cached_content" > "${config[site_root]}/blog/index.html"
fi
}
function _add_blog_list_to_index() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# Let's find the file 'index.qst' and add the blog if blog_in_index is true
if (${debug}) _msg debug "${0:t}_msg_1"
local blog_index_list=$(<${config[project_root]}/blog/index.tmp.html)
local site_index_file=$(<${config[site_root]}/index.html)
echo "${site_index_file}" | awk -v new_body="${blog_index_list}" '{sub(/BLOGINDEX/, new_body)} 1' > "${config[site_root]}/index.html"
}
function _sitemap() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
# Check if sitemap is set to true and if there are updated Blogs, Pages, or if cache is stale
if ([[ ${config[site_sitemap]} == "true" ]] &&
([[ ${new_updated_blogs} == "true" ]] ||
[[ ${new_updated_pages} == "true" ]] ||
_is_blog_cache_stale)) ||
[[ ${sitemap_force} == "true" ]]; then
setopt extendedglob
_msg main "${0:t}_msg_1"
local sm_file="sitemap.xml"
local b_file="sitemap-blogs.xml"
local p_file="sitemap-pages.xml"
local sitemap_file="${config[site_root]}/${sm_file}"
local sitemap_blog="${config[site_root]}/${b_file}"
local sitemap_page="${config[site_root]}/${p_file}"
# Find all HTML files and store them in an array
builtin cd ${config[site_root]}
local -a html_files=(**/[a-z]*.html(.))
local -a blog_files=()
local -a page_files=()
for file in "${html_files[@]}"; do
if [[ $file == *blog* ]]; then
blog_files+=("$file")
else
page_files+=("$file")
fi
done
# Start of the XML file for BLOGS
echo '<?xml version="1.0" encoding="UTF-8"?>' > ${sitemap_blog}
echo "<!-- Sitemap generated by ${QSGEN} ${VERSION} - https://github.com/kekePower/qsgen2 -->" >> ${sitemap_blog}
echo "<?xml-stylesheet type=\"text/xsl\" href=\"${config[site_url]}/css/default-sitemap.xsl?sitemap=page\"?>" >> ${sitemap_blog}
echo '<urlset' >> ${sitemap_blog}
echo ' xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"' >> ${sitemap_blog}
echo ' xmlns:xhtml="http://www.w3.org/1999/xhtml"' >> ${sitemap_blog}
echo ' xmlns:image="http://www.google.com/schemas/sitemap-image/1.1"' >> ${sitemap_blog}
echo '>' >> ${sitemap_blog}
# Add each URL to the sitemap
for file in "${blog_files[@]}"
do
# Remove www_root from the path and prepend site_url
local url="${config[site_url]}/${file}"
local lastmod=$(stat -c %y "${file}" 2>/dev/null | cut -d' ' -f1,2 | sed 's/ /T/' | sed 's/\..*$//')
echo " <url>" >> ${sitemap_blog}
echo " <loc>${url}</loc>" >> ${sitemap_blog}
echo " <lastmod><![CDATA[${lastmod}+01:00]]></lastmod>" >> ${sitemap_blog}
echo " <changefreq><![CDATA[always]]></changefreq>" >> ${sitemap_blog}
echo " <priority><![CDATA[1]]></priority>" >> ${sitemap_blog}
echo " </url>" >> ${sitemap_blog}
done
# End of the XML file
echo '</urlset>' >> "${sitemap_blog}"
_msg std " - ${b_file}"
# Start of the XML file for PAGES
echo '<?xml version="1.0" encoding="UTF-8"?>' > ${sitemap_page}
echo "<!-- Sitemap generated by ${QSGEN} ${VERSION} - https://github.com/kekePower/qsgen2 -->" >> ${sitemap_page}
echo "<?xml-stylesheet type=\"text/xsl\" href=\"${config[site_url]}/css/default-sitemap.xsl?sitemap=page\"?>" >> ${sitemap_page}
echo '<urlset' >> ${sitemap_page}
echo ' xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"' >> ${sitemap_page}
echo ' xmlns:xhtml="http://www.w3.org/1999/xhtml"' >> ${sitemap_page}
echo ' xmlns:image="http://www.google.com/schemas/sitemap-image/1.1"' >> ${sitemap_page}
echo '>' >> ${sitemap_page}
# Add each URL to the sitemap
for file in "${page_files[@]}"
do
# Remove www_root from the path and prepend site_url
local url="${config[site_url]}/${file}"
local lastmod=$(stat -c %y "${file}" 2>/dev/null | cut -d' ' -f1,2 | sed 's/ /T/' | sed 's/\..*$//')
echo " <url>" >> ${sitemap_page}
echo " <loc>${url}</loc>" >> ${sitemap_page}
echo " <lastmod><![CDATA[${lastmod}+01:00]]></lastmod>" >> ${sitemap_page}
echo " <changefreq><![CDATA[always]]></changefreq>" >> ${sitemap_page}
echo " <priority><![CDATA[1]]></priority>" >> ${sitemap_page}
echo " </url>" >> ${sitemap_page}
done
# End of the XML file
echo '</urlset>' >> "${sitemap_page}"
_msg std " - ${p_file}"
# Update the blog cache after generating sitemap
_update_blog_cache
if (${debug}); then _msg debug "${0:t}_msg_2" " ${sitemap_file}"; fi
# Start of the XML file for the main sitemap
echo '<?xml version="1.0" encoding="UTF-8"?>' > "${sitemap_file}"
echo "<sitemapindex xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\">" >> "${sitemap_file}"
# Add sitemap-blogs.xml to the sitemap
echo " <sitemap>" >> "${sitemap_file}"
echo " <loc>${config[site_url]}/${b_file}</loc>" >> "${sitemap_file}"
local lastmod_b=$(stat -c %y "${b_file}" 2>/dev/null | cut -d' ' -f1,2 | sed 's/ /T/' | sed 's/\..*$//')
echo " <lastmod>${lastmod_b}</lastmod>" >> "${sitemap_file}"
echo " </sitemap>" >> "${sitemap_file}"
# Add sitemap-pages.xml to the sitemap
echo " <sitemap>" >> "${sitemap_file}"
echo " <loc>${config[site_url]}/${p_file}</loc>" >> "${sitemap_file}"
local lastmod_p=$(stat -c %y "${p_file}" 2>/dev/null | cut -d' ' -f1,2 | sed 's/ /T/' | sed 's/\..*$//')
echo " <lastmod>${lastmod_p}</lastmod>" >> "${sitemap_file}"
echo " </sitemap>" >> "${sitemap_file}"
# End of the XML file
echo "</sitemapindex>" >> "${sitemap_file}"
_msg std " - ${sm_file}"
builtin cd ${config[project_root]}
fi
}
function _link() {
# This converts #link tags to actual clickable links in a provided string
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local content="${1}"
local modified_content=""
# Process the content line by line
echo "${content}" | while IFS= read -r line; do
if [[ ${line} == *"#link"* ]]; then
if (${debug}) _msg debug "${0:t}_msg_1" " ${line}"
# Extract the URL and the link text
local url_full=$(echo "${line}" | awk -F'#link ' '{print $2}' | awk -F'¤' '{print $1 "¤" $2}')
local url_dest=$(echo "${url_full}" | awk -F'¤' '{print $1}')
local url_txt=$(echo "${url_full}" | awk -F'¤' '{print $2}')
if (${debug}) _msg debug "${0:t}_msg_2" " ${url_dest}"
if (${debug}) _msg debug "${0:t}_msg_3" " ${url_txt}"
# Form the replacement HTML link
local modified_link="<a href=\"${url_dest}\">${url_txt}"
if [[ ${url_dest} =~ ^https?:// ]]; then
# Add external link icon for external URLs
modified_link+="<img class=\"exticon\" alt=\"External site icon\" src=\"/images/ext-black-top.svg\" width=\"12\" />"
fi
modified_link+="</a>"
line=${line//"#link ${url_full}"/${modified_link}}
fi
modified_content+="${line}\n"
done
# Return the modified content
echo -e "${modified_content}"
}
function _image() {
# This replaces #showimg tags with actual HTML img tags in a provided string
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local content="${1}"
local modified_content=""
# Process the content line by line
echo "${content}" | while IFS= read -r line; do
if [[ ${line} == *"#showimg"* ]]; then
if (${debug}) _msg debug "${0:t}_msg_1" " ${line}"
# Extract image link and alt text
local img_link=$(echo "${line}" | awk -F'#showimg ' '{print $2}')
local image=$(echo "${img_link}" | awk -F'¤' '{print $1}')
local img_alt=$(echo "${img_link}" | awk -F'¤' '{print $2}')
# Determine the source of the image
local real_image=""
if [[ ${image} =~ ^https?:// ]]; then
real_image=${image}
elif [[ ${image} =~ ^\/ ]]; then
real_image=${image}
else
real_image="/images/${image}"
fi
# Form the replacement HTML image tag
local img_tag="<img src=\"${real_image}\" alt=\"${img_alt}\" width=\"500\" />"
line=${line//"#showimg ${img_link}"/${img_tag}}
fi
modified_content+="${line}\n"
done
# Return the modified content
echo -e "${modified_content}"
}
function _linkimg() {
# This function replaces #linkimg tags with <a> tags around <img> tags
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local content="${1}"
local modified_content=""
# Process the content line by line
echo "${content}" | while IFS= read -r line; do
if [[ ${line} == *"#linkimg"* ]]; then
if (${debug}) _msg debug "${0:t}_msg_1" " ${line}"
# Extract link, image, and alt text
local img_link=$(echo "${line}" | awk -F'#linkimg ' '{print $2}')
local img_url=$(echo "${img_link}" | awk -F'¤' '{print $1}')
local img_alt=$(echo "${img_link}" | awk -F'¤' '{print $2}')
# Determine the source of the image
local real_image=""
if [[ ${img_url} =~ ^https?:// ]]; then
real_image=${img_url}
elif [[ ${img_url} =~ ^\/ ]]; then
real_image=${img_url}
else
real_image="/images/${img_url}"
fi
# Form the replacement HTML link and image tag
local img_tag="<a href=\"${real_image}\"><img src=\"${real_image}\" alt=\"${img_alt}\" width=\"500\" /></a>"
line=${line//"#linkimg ${img_link}"/${img_tag}}
fi
modified_content+="${line}\n"
done
# Return the modified content
echo -e "${modified_content}"
}
function _youtube() {
# This embeds a YouTube video in a provided string
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local content="${1}"
local modified_content=""
# Process the content line by line
echo "${content}" | while IFS= read -r line; do
if [[ ${line} == *"#ytvideo"* ]]; then
if (${debug}) _msg debug "${0:t}_msg_1" " ${line}"
# Extract YouTube video ID
local yt_id=$(echo "${line}" | awk -F'#ytvideo ' '{print $2}')
# Form the replacement YouTube iframe embed
local yt_iframe="<iframe width=\"560\" height=\"315\" src=\"https://www.youtube.com/embed/${yt_id}\" title=\"YouTube video player\" frameborder=\"0\" allow=\"accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share\" allowfullscreen></iframe>"
line=${line//"#ytvideo ${yt_id}"/${yt_iframe}}
fi
modified_content+="${line}\n"
done
# Return the modified content
echo -e "${modified_content}"
}
function _cleanup() {
# This removes tags used in the templates that may be left over for some reason
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local content="${1}"
if (${debug}) _msg debug "${0:t}_msg_1"
# Perform the cleanup
# -e "s|BLOGINDEX\ ||g"
local cleaned_content=$(echo "${content}" | sed \
-e "s|¤||g" \
-e "s|#showimg\ ||g" \
-e "s|#ytvideo\ ||g" \
-e "s|#link\ ||g" \
-e "s|#linkimg\ ||g" \
)
# Return the cleaned content
echo "${cleaned_content}"
}
function _p_qstags() {
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local content="${1}"
if ${debug}; then
_msg debug "${0:t}_msg_1"
fi
# Use perl to convert QStags to HTML
perl -0777 -pe '
BEGIN {
@qstags = (
"#BR", "<br/>\n",
"#BD", "<b>", "#EBD", "</b>",
"#I", "<i>", "#EI", "</i>\n",
"#P", "<p>", "#EP", "</p>\n",
"#Q", "<blockquote>", "#EQ", "</blockquote>\n",
"#C", "<code>", "#EC", "</code>\n",
"#H1", "<h1>", "#EH1", "</h1>\n",
"#H2", "<h2>", "#EH2", "</h2>\n",
"#H3", "<h3>", "#EH3", "</h3>\n",
"#H4", "<h4>", "#EH4", "</h4>\n",
"#H5", "<h5>", "#EH5", "</h5>\n",
"#H6", "<h6>", "#EH6", "</h6>\n",
"#STRONG", "<strong>", "#ESTRONG", "</strong>\n",
"#EM", "<em>", "#SEM", "</em>\n",
"#DV", "<div>", "#EDV", "</div>\n",
"#SPN", "<span>", "#ESPN", "</span>\n",
"#UL", "<ul>", "#EUL", "</ul>\n",
"#OL", "<ol>", "#EOL", "</ol>\n",
"#LI", "<li>", "#ELI", "</li>\n",
"#UD", "<u>", "#EUD", "</u>\n",
"#TBL", "<table>", "#ETBL", "</table>\n",
"#TR", "<tr>", "#ETR", "</tr>\n",
"#TD", "<td>", "#ETD", "</td>\n",
"#TH", "<th>", "#ETH", "</th>\n",
"#ART", "<article>", "#EART", "</article>\n",
"#SEC", "<section>", "#ESEC", "</section>\n",
"#ASIDE", "<aside>", "#EASIDE", "</aside>\n",
"#NAV", "<nav>", "#ENAV", "</nav>\n",
"#BTN", "<button>", "#EBTN", "</button>\n",
"#SEL", "<select>", "#ESEL", "</select>\n",
"#OPT", "<option>", "#EOPT", "</option>\n",
"#LT", "&lt;", "#GT", "&gt;", "#NUM", "&num;"
);
}
for (my $i = 0; $i < $#qstags; $i += 2) {
my $qstag = $qstags[$i];
my $html = $qstags[$i + 1];
s/\Q$qstag\E/$html/g;
}
' <<< "$content"
}
###############################################################################
# MARKUP CONVERSION FUNCTIONS
# These functions handle conversion between QSTags and Markdown formats
###############################################################################
# _convert_markup - Main function for converting between QSTags and Markdown
#
# This function provides a unified interface for converting between QSTags (used internally
# by qsgen2) and Markdown (a more standard markup format). It supports both single file
# and batch processing modes.
#
# Usage:
# _convert_markup [--to-markdown|--to-qstags] [--all] [input_file] [output_file]
#
# Options:
# --to-markdown Convert from QSTags to Markdown format
# --to-qstags Convert from Markdown to QSTags format
# --all Process all .qst and .blog files in the project
#
# If no files are provided, reads from stdin and writes to stdout.
# If no mode is specified, auto-detects based on file extension or content.
#
# Examples:
# # Convert a single file
# _convert_markup --to-markdown page.qst page.md
#
# # Convert all files in batch mode
# _convert_markup --to-markdown --all
local mode=""
local process_all=false
local input_file="-" # Default to stdin
local output_file="-" # Default to stdout
# Parse arguments
while [[ $# -gt 0 ]]; do
case "$1" in
--to-markdown)
mode="to_markdown"
shift
;;
--to-qstags)
mode="to_qstags"
shift
;;
--all)
process_all=true
shift
;;
*)
if [[ -z "$input_file" || "$input_file" == "-" ]]; then
input_file="$1"
else
output_file="$1"
fi
shift
;;
esac
done
# If --all is specified, process all .qst and .blog files
if [[ "$process_all" == true ]]; then
if [[ -z "$mode" ]]; then
_msg error "Please specify --to-markdown or --to-qstags with --all"
return 1
fi
local target_ext="md" # Always use .md for markdown output
if [[ "$mode" == "to_qstags" ]]; then
# For QSTags output, we need to know if it's a page or blog post
# This will be handled in the file processing loops
:
fi
# Process pages (.qst files)
local page_count=0
local blog_count=0
# Process pages
for page in "${config[project_root]}"/*.qst; do
[[ -f "$page" ]] || continue
local output_file
if [[ "$mode" == "to_markdown" ]]; then
output_file="${page%.qst}.md"
else
output_file="${page%.qst}.qst" # Keep original extension
fi
_msg info "Converting ${page##*/} to ${output_file##*/}"
_convert_single_file "$page" "$output_file" "$mode"
((page_count++))
done
# Process blog posts
if [[ -d "${config[project_root]}/blog" ]]; then
for blog in "${config[project_root]}/blog/"*.blog; do
[[ -f "$blog" ]] || continue
local output_file
if [[ "$mode" == "to_markdown" ]]; then
output_file="${blog%.blog}.md"
else
output_file="${blog%.blog}.blog" # Keep original extension
fi
_msg info "Converting ${blog##*/} to ${output_file##*/}"
_convert_single_file "$blog" "$output_file" "$mode"
((blog_count++))
done
fi
_msg success "Converted $page_count pages and $blog_count blog posts"
return 0
fi
# If no mode specified, try to auto-detect
if [[ -z "$mode" ]]; then
if [[ "$input_file" != "-" ]] && [[ "$input_file" == *.md || "$input_file" == *.markdown ]]; then
mode="to_qstags"
else
mode="to_markdown"
fi
fi
# Process a single file
_convert_single_file "$input_file" "$output_file" "$mode"
}
function _convert_single_file() {
local input_file="$1"
local output_file="$2"
local mode="$3"
# Read input
local content
if [[ "$input_file" == "-" ]]; then
content=$(cat -)
else
[[ -f "$input_file" ]] || { _msg error "Input file not found: $input_file"; return 1; }
content=$(<"$input_file")
fi
# Conversion logic
#
# The conversion uses sed with extended regular expressions to perform pattern
# matching and substitution. Each pattern handles a specific QSTag or Markdown
# construct, converting it to the target format.
#
# Note: The order of replacements is significant as some patterns may be
# prefixes of others.
case "$mode" in
to_markdown)
# Convert QSTags to Markdown
#
# Pattern explanations:
# s/#BR/\n/g - Line breaks
# s/#BD(.*?)#EBD/**\1**/g - Bold text
# s/#I(.*?)#EI/*\1*/g - Italic text
# s/#P\s*/\n\n/g; s/#EP//g - Paragraphs
# s/#Q\s*/> /g; s/#EQ//g - Blockquotes
# s/#C(.*?)#EC/`\1`/g - Inline code
# s/#H1\s*/# /g; s/#EH1//g - Headers (H1-H6)
# ... and so on for other QSTags
content=$(echo "$content" | sed -E '\
s/#BR/\n/g;\
s/#BD(.*?)#EBD/**\1**/g;\
s/#I(.*?)#EI/*\1*/g;\
s/#P\s*/\n\n/g; s/#EP//g;\
s/#Q\s*/> /g; s/#EQ//g;\
s/#C(.*?)#EC/`\1`/g;\
s/#H1\s*/# /g; s/#EH1//g;\
s/#H2\s*/## /g; s/#EH2//g;\
s/#H3\s*/### /g; s/#EH3//g;\
s/#H4\s*/#### /g; s/#EH4//g;\
s/#H5\s*/##### /g; s/#EH5//g;\
s/#H6\s*/###### /g; s/#EH6//g;\
s/#STRONG(.*?)#ESTRONG/**\1**/g;\
s/#EM(.*?)#SEM/*\1*/g;\
s/#UL\s*/\n/g; s/#EUL//g;\
s/#OL\s*/\n/g; s/#EOL//g;\
s/#LI\s*/- /g; s/#ELI//g;\
s/#TBL\s*/\n/g; s/#ETBL//g;\
s/#TR//g; s/#ETR//g;\
s/#TD/| /g; s/#ETD/ |/g;\
s/#TH/| **/g; s/#ETH/** |/g;\
s/#LT/</g; s/#GT/>/g; s/#NUM/#/g\
')
;;
to_qstags)
# Convert Markdown to QSTags
#
# This is the inverse of the to_markdown conversion, translating
# Markdown syntax back to QSTags. The patterns are designed to handle
# the most common Markdown constructs while being reasonably robust
# against variations in whitespace and formatting.
#
# Note: Some Markdown features may not have direct QSTag equivalents
# and will be preserved as plain text.
content=$(echo "$content" | sed -E '\
s/\*\*(.*?)\*\*/#BD\1#EBD/g;\
s/\*(.*?)\*/#I\1#EI/g;\
s/^# (.*)$/#H1 \1#EH1/g;\
s/^## (.*)$/#H2 \1#EH2/g;\
s/^### (.*)$/#H3 \1#EH3/g;\
s/^#### (.*)$/#H4 \1#EH4/g;\
s/^##### (.*)$/#H5 \1#EH5/g;\
s/^###### (.*)$/#H6 \1#EH6/g;\
s/^> (.*)$/#Q \1#EQ/g;\
s/`([^`]+)`/#C\1#EC/g;\
s/^\s*[-*+]\s+(.*)$/#LI \1#ELI/g;\
s/\n\s*\n/#P\n/g;\
s/</#LT/g; s/>/#GT/g\
')
;;
esac
# Write output
if [[ "$output_file" == "-" ]]; then
echo "$content"
else
echo "$content" > "$output_file"
fi
}
function _qstags() {
# This function uses the regex module from Zsh to parse the QStags
if [[ ${globaldebug} == "true" ]]; then
local debug=true
else
local debug=false
fi
local content="${1}"
if ${debug}; then
_msg debug "${0:t}_msg_1"
fi
# Load regex module
# zmodload zsh/regex
# Define tag replacements as an associative array
typeset -A qstags=(
"#BR" "<br/>\n"
"#BD" "<b>" "#EBD" "</b>"
"#I" "<i>" "#EI" "</i>\n"
"#P" "<p>" "#EP" "</p>\n"
"#Q" "<blockquote>" "#EQ" "</blockquote>\n"
"#C" "<code>" "#EC" "</code>\n"
"#H1" "<h1>" "#EH1" "</h1>\n"
"#H2" "<h2>" "#EH2" "</h2>\n"
"#H3" "<h3>" "#EH3" "</h3>\n"
"#H4" "<h4>" "#EH4" "</h4>\n"
"#H5" "<h5>" "#EH5" "</h5>\n"
"#H6" "<h6>" "#EH6" "</h6>\n"
"#STRONG" "<strong>" "#ESTRONG" "</strong>\n"
"#EM" "<em>" "#SEM" "</em>\n"
"#DV" "<div>" "#EDV" "</div>\n"
"#SPN" "<span>" "#ESPN" "</span>\n"
"#UL" "<ul>" "#EUL" "</ul>\n"
"#OL" "<ol>" "#EOL" "</ol>\n"
"#LI" "<li>" "#ELI" "</li>\n"
"#UD" "<u>" "#EUD" "</u>\n"
"#TBL" "<table>" "#ETBL" "</table>\n"
"#TR" "<tr>" "#ETR" "</tr>\n"
"#TD" "<td>" "#ETD" "</td>\n"
"#TH" "<th>" "#ETH" "</th>\n"
"#ART" "<article>" "#EART" "</article>\n"
"#SEC" "<section>" "#ESEC" "</section>\n"
"#ASIDE" "<aside>" "#EASIDE" "</aside>\n"
"#NAV" "<nav>" "#ENAV" "</nav>\n"
"#BTN" "<button>" "#EBTN" "</button>\n"
"#SEL" "<select>" "#ESEL" "</select>\n"
"#OPT" "<option>" "#EOPT" "</option>\n"
"#LT" "&lt;" "#GT" "&gt;" "#NUM" "&num;"
)
#for qstag html (${(kv)qstags})
# do
# # Escape tag for regex use
# local escapedTag=$(printf '%s' "$qstag" | sed 's/[].\[^$*]/\\&/g')
# if [[ "$content" =~ "$escapedTag" ]]; then
# content=${content//($qstag)/$html}
# fi
#done
for qstag html (${(kv)qstags}); do
# Direct replacement without regex check
content=${content//${qstag}/${html}}
done
echo "${content}"
}
case ${1} in
force)
_msg sub "_qsgen2_msg_2"
: >| "$blog_cache_file" # Truncate the blog cache before doing update
: >| "$pages_cache_file" # Truncate the page cache before doing update
;;
sitemap)
_msg sub "Updating sitemaps"
export sitemap_force=true
_sitemap
exit
;;
*)
# Nothing
;;
esac
_blogs
_pages
_sitemap