#!/bin/dash
# arguments: filename
saferm()
{
if [ -f "$1" ]
then
rm "$1"
fi
}
safecat()
{
if [ -f "$1" ]
then
cat "$1"
fi
}
# arguments: inputname
pagedump()
{
# if we have pandoc...
if [ ! -z "$(command -v pandoc)" ]
then
# always check for markdown first
if [ -f "$1.md" ]
then
pandoc -t html -f markdown "$1.md"
return
fi
fi
# else, look for plain text... because we want to process newlines efficiently
if [ -f "$1.txt" ]
then
cat "$1.txt"
return
fi
# ...or HTML. last resort!
if [ -f "$1.html" ]
then
cat "$1.html"
return
fi
echo "ERROR: content for $1 is not present."
}
# arguments: filename, title, site
html_start()
{
echo "" > "$1"
echo "" >> "$1"
echo "
" >> "$1"
echo "$2" >> "$1"
echo "" >> "$1"
echo "" >> "$1"
echo "" >> "$1"
echo "> "$1"
safecat "./sites/$3/etc/body" >> "$1"
echo ">" >> "$1"
echo "" >> "$1"
echo "" >> "$1" # DC ignores this, but need this to look consistent on other browsers
pagedump "./sites/$3/header" >> "$1"
echo "
" >> "$1"
echo "" >> "$1"
echo "" >> "$1"
echo "" >> "$1"
## this is where the internal layout management starts
echo "" >> "$1"
echo "" >> "$1"
echo "" >> "$1"
# right nav-bar start (always needed for News)
#if [ -d "./sites/$3/pages_left" ]
#then
echo "> "$1"
safecat "./sites/$3/etc/navtd" >> "$1"
echo ">$(cat ./sites/$3/tmp_pagesl) | " >> "$1"
#fi
# right nav-bar end
echo "" >> "$1"
# after this, we're expected to put content into this column...
}
# arguments: filename, site
html_end()
{
echo " | " >> "$1"
# end of content here!
# left nav-bar start
if [ -d "./sites/$2/pages_right" ]
then
echo "> "$1"
safecat "./sites/$2/etc/navtd" >> "$1"
echo ">$(cat ./sites/$2/tmp_pagesr) | " >> "$1"
fi
# left nav-bar end
# close row, then the whole table
echo " " >> "$1"
echo "" >> "$1"
echo " " >> "$1"
## this is where the internal layout management ends.
echo " | " >> "$1"
echo "
" >> "$1"
echo "
" >> "$1"
echo "
" >> "$1"
pagedump "./sites/$2/footer" >> "$1"
echo "" >> "$1" # not part of HTML 3.2
echo "" >> "$1"
echo "" >> "$1"
echo "" >> "$1"
}
# arguments: site, filename, news-id
make_news_header()
{
AUTHOR=$(cat "./sites/$1/news/$3/author")
DATE=$(cat "./sites/$1/news/$3/date")
TITLE=$(cat "./sites/$1/news/$3/title")
if [ -z "$TITLE" ]
then
AUTHOR="Unknown Title"
fi
if [ -z "$AUTHOR" ]
then
AUTHOR="Unknown Author"
fi
if [ -z "$DATE" ]
then
DATE="Unknown Date"
fi
# all an anchor to a temp file from which we'll build our news listing
echo "> "$2"
safecat "./sites/$1/etc/table" >> "$2" # copy the table style
echo ">" >> "$2"
echo "" >> "$2"
echo "" >> "$2"
echo "> "$2"
safecat "./sites/$1/etc/td" >> "$2" # copy the table style
echo ">" >> "$2"
echo "$TITLE " >> "$2"
echo " | " >> "$2"
echo "
" >> "$2"
echo "" >> "$2"
echo "> "$2"
safecat "./sites/$1/etc/td" >> "$2" # copy the table style
echo ">" >> "$2"
echo "Posted by $AUTHOR on $DATE" >> "$2"
echo " | " >> "$2"
echo "
" >> "$2"
echo "" >> "$2"
echo "
" >> "$2"
}
# arguments: site, page side, page, title
site_button()
{
if [ "$2" = "news" ]
then
BTN_SEARCHPATH="./sites/$1/news"
BTN_NAME="news"
BTN_PAGE="index"
else
BTN_SEARCHPATH="./sites/$1/$2/$3"
BTN_NAME="$3"
BTN_PAGE="$3"
fi
BTN_OUT="_pb_$BTN_NAME"
if [ -f "$BTN_SEARCHPATH/$BTN_NAME.gif" ]
then
cp "$BTN_SEARCHPATH/$BTN_NAME.gif" "./out/$1/$BTN_OUT.gif"
echo "
"
return
fi
if [ -f "$BTN_SEARCHPATH/$BTN_NAME.jpg" ]
then
cp "$BTN_SEARCHPATH/$BTN_NAME.jpg" "./out/$1/$BTN_OUT.jpg"
echo "
"
return
fi
if [ -f "$BTN_SEARCHPATH/$BTN_NAME.png" ]
then
cp "$BTN_SEARCHPATH/$BTN_NAME.png" "./out/$1/$BTN_OUT.png"
echo "
"
return
fi
echo "• $4
"
}
# arguments: site-name (folder inside ./sites)
site_process()
{
TMP_NEWS="./sites/$1/tmp_news"
TMP_PAGES_R="./sites/$1/tmp_pagesr"
TMP_PAGES_L="./sites/$1/tmp_pagesl"
OUT_INDEX="./out/$1/index.html"
SITE_TITLE=$(cat "./sites/$1/domain")
# delete output dir if it exists#
if [ -d "./out/$1" ]
then
rm -rf "./out/$1"
fi
# create the output-dir for our site
mkdir -p "./out/$1"
# clean up 1
saferm "$TMP_NEWS"
saferm "$TMP_PAGES_R"
saferm "$TMP_PAGES_L"
# before we collect all pages, add a 'News' button:
site_button "$1" "news" "index" "News" >> "$TMP_PAGES_L"
#echo "• News
" >> "$TMP_PAGES_L"
# enumerate and list the right-aligned pages
if [ -d "./sites/$1/pages_right" ]
then
find ./sites/$1/pages_right -name title | sort | while read LINE
do
DIR=$(dirname "$LINE")
ID_NAME=$(basename "$DIR")
TITLE=$(cat "$DIR/title")
if [ -f "./sites/$1/pages_right/$ID_NAME/hidden" ]
then
continue
fi
site_button "$1" "pages_right" "$ID_NAME" "$TITLE" >> "$TMP_PAGES_R"
#echo "• $TITLE
" >> "$TMP_PAGES_R"
done
fi
# enumerate and list the left-aligned pages
if [ -d "./sites/$1/pages_left" ]
then
find ./sites/$1/pages_left -name title | sort | while read LINE
do
DIR=$(dirname "$LINE")
ID_NAME=$(basename "$DIR")
TITLE=$(cat "$DIR/title")
if [ -f "./sites/$1/pages_left/$ID_NAME/hidden" ]
then
continue
fi
site_button "$1" "pages_left" "$ID_NAME" "$TITLE" >> "$TMP_PAGES_L"
#echo "• $TITLE
" >> "$TMP_PAGES_L"
done
fi
# Collect all news articles
find ./sites/$1/news -name title | sort -r | while read LINE
do
DIR=$(dirname "$LINE")
ID_NAME=$(basename "$DIR")
TITLE=$(cat "$DIR/title")
OUTFILE="./out/$1/news-$ID_NAME.html"
# all an anchor to a temp file from which we'll build our news listing
make_news_header "$1" "$TMP_NEWS" "$ID_NAME"
echo "
" >> "$TMP_NEWS"
# generate the individual news page
html_start "$OUTFILE" "$TITLE" "$1"
make_news_header "$1" "$OUTFILE" "$ID_NAME"
pagedump "$DIR/content" >> "$OUTFILE"
html_end "$OUTFILE" "$1"
done
# build right-aligned pages
if [ -d "./sites/$1/pages_right" ]
then
find ./sites/$1/pages_right -name title | sort | while read LINE
do
DIR=$(dirname "$LINE")
ID_NAME=$(basename "$DIR")
TITLE=$(cat "$DIR/title")
OUTFILE="./out/$1/$ID_NAME.html"
# generate the individual news page
html_start "$OUTFILE" "$TITLE" "$1"
pagedump "$DIR/content" >> "$OUTFILE"
html_end "$OUTFILE" "$1"
done
fi
# Build left-aligned pages
if [ -d "./sites/$1/pages_left" ]
then
find ./sites/$1/pages_left -name title | sort | while read LINE
do
DIR=$(dirname "$LINE")
ID_NAME=$(basename "$DIR")
TITLE=$(cat "$DIR/title")
OUTFILE="./out/$1/$ID_NAME.html"
# generate the individual news page
html_start "$OUTFILE" "$TITLE" "$1"
pagedump "$DIR/content" >> "$OUTFILE"
html_end "$OUTFILE" "$1"
done
fi
# generate the index full of news-pages
html_start "$OUT_INDEX" "$SITE_TITLE" "$1"
cat "$TMP_NEWS" >> "$OUT_INDEX"
html_end "$OUT_INDEX" "$1"
# copy over data
if [ -d "./sites/$1/data" ]
then
rsync -ra "./sites/$1/data/" "./out/$1/"
fi
# clean up 2
saferm "$TMP_NEWS"
saferm "$TMP_PAGES_R"
saferm "$TMP_PAGES_L"
}
# iterate through all of the websites
find ./sites -name domain | while read SITE
do
DOMAIN=$(basename $(dirname "$SITE"))
site_process "$DOMAIN"
done