#!/bin/sh ## wag is a tool to generate static web sites ## ## Commands: unset CDPATH export LC_ALL=C IFS=' ' cmd=$(command -v "$0") dest=./public footer() { : } # front parses front matter front() { { read a b && [ "$a" = "---" ] && [ "$b" = "" ] || return while read a b; do [ "$a" = "---" ] && [ "$b" = "" ] && break eval "front_${a%:}=\"$b\"" done } < "$1" } ## gen generates site content from source files gen() { [ -d "$dest" ] || mkdir -p "$dest" for f in *.md; do [ "$f" = "*.md" ] && continue g="$dest/${f%.md}.html" [ "$f" -ot "$g" ] && continue echo "f: $f $g" front "$f" { header md2html "$f" footer } > "$g" done } header() { cat <<- EOT $front_title EOT } ## help prints this program documentation help() { awk '/^## / {print substr($0, 4)}' "$cmd"; } lipsum='Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.' # An overly simplified http request parser for static web sites. http_request() { read -r cmd uri proto case $uri in */) uri="${uri}index.html" ;; esac while true; do read -r line || break [ ${#line} = 0 ] && break done printf 'HTTP/1.1 200 OK\n\n' && cat "${uri#/}" } md2h() { want="$2" got=$(echo "$1" | md2html -); } md2html() { tmp=$(mktemp -u) trap "rm -f '$tmp'" EXIT { cat "${1:--}" | tee "$tmp" | awk '/^[ ]*\[[^]]+\]:/' cat "$tmp" } | awk ' function newblock(nblock) { if (text) print "<" block ">" text "" text = "" out = 1 block = nblock ? nblock : "p" } function subinline(tgl, inl) { while (match($0, tgl)) { if (inline[ni] == inl) ni -= sub(tgl, "") else if (sub(tgl, "<" inl ">")) inline[++ni] = inl } } function dolink(href, lnk) { # Undo escaped html in uris gsub(/&/, "\\&", href) gsub(/</, "<", href) gsub(/>/, ">", href) # & can be tricky, and not standard: gsub(/&/, "\\\\\\&", href) gsub(/&/, "\\\\\\&", lnk) return "" lnk "" } BEGIN { ni = 0 # inlines nl = 0 # nested lists out = 0 # 0 if no output so far text = "" block = "p" } # Skip front matter. out == 0 && $0 == "---" { do getline while ($0 != "---") next } # Escape HTML. esc != "false" { gsub("&", "\\&") gsub("<", "\\<") gsub(">", "\\>") } # Internal references. match($0, /^[ ]*\[[^]]+\]:/) > 0 { k = substr($0, RSTART+1, RLENGTH-3) v = substr($0, RLENGTH+1) sub(/^[ ]/, "", v) sub(/[ ]$/, "", v) ref[substr($0, RSTART+1, RLENGTH-3)] = v next } # Horizontal rules. /^[ ]*([-*_] ?)+[ ]*$/ && text == "" { print "
" next } # Tables. Syntax: # Right Align| Center Align |Left Align /([ ]\|)|(\|[ ])/ { if (block != "table") newblock("table") nc = split($0, cells, "|") $0 = "\n" for (i = 1; i <= nc; i++) { align = "left" if (sub(/^[ ]+/, "", cells[i])) { if (sub(/[ ]+$/, "", cells[i])) align = "center" else align = "right" } sub(/[ ]+$/, "", cells[i]) $0 = $0 "" cells[i] "\n" } $0 = $0 "" } # Ordered and unordered (possibly nested) lists. /^[ ]*([*+-]|(([0-9]+[.-]?)+))[ ]/ { newblock("li") nnl = 1 while (match($0, /^[ ]/)) { sub(/^[ ]/, "") nnl++ } while (nl > nnl) print "" while (nl < nnl) { list[++nl] = "ol" if (match($0, /^[*+-]/)) list[nl] = "ul" print "<" list[nl] ">" } sub(/^([*+-]|(([0-9]+[.-]?)+))[ ]/, "") } # Multi line list items. block == "li" { sub(/^( *)|( *)/, "") } # Code blocks. /^( | )/ { if (block != "code") newblock("code") sub(/^( | )/, "") text = text $0 "\n" next } # Paragraphs. /^$/ { newblock() while (nl > 0) print "" } # Headers. /^#+ / { newblock() match($0, /#+/) n = RLENGTH if (n > 6) n = 6 text = substr($0, RLENGTH + 1) sub(/^ */, "", text) block = "h" n next } # Alternate headers (underlined). /^=+$/ { block = "h" 1 next } /^-+$/ { block = "h" 2 next } { # Images. while (match($0, /!\[[^]]+\]\([^)]+\)/)) { split(substr($0, RSTART, RLENGTH), a, /(!\[)|\)|(\]\()/) sub(/!\[[^]]+\]\([^)]+\)/, "\""") } # Links. while (match($0, /\[[^]]+\]\([^)]+\)/)) { split(substr($0, RSTART, RLENGTH), a, /[[)]|(\]\()/) sub(/\[[^]]+\]\([^)]+\)/, dolink(a[3], a[2])) } # Internal references. while (match($0, /\[[^]]+\]/)) { k = substr($0, RSTART+1, RLENGTH-2) sub(/\[[^]]+\]/, dolink(ref[k], k)) } # Auto links (uri matching is poor). na = split($0, a, /(^\()|[ ]|([,.)]([ ]|$))/) for (i = 1; i <= na; i++) if (match(a[i], /^(((https?|ftp|file|news|irc):\/\/)|(mailto:)).+$/)) sub(a[i], dolink(a[i], a[i])) # Inline. subinline("(\\*\\*)|(__)", "strong") subinline("\\*", "em") subinline("`", "code") text = text (text ? " " : "") $0 } END { while (ni > 0) text = text "" newblock() while (nl > 0) print "" }' } #serve() { while true; do busybox nc -kl -p 1500 -e "$cmd http_request"; done; } serve() { WAG_FUN=http_request busybox nc -kl -p 1500 -e "$cmd"; } test() { fail=0 pass=0 skip=0 tfilter="$*" test_run md2h 'abc __def__ ghi' '

abc def ghi

' test_run md2h 'abc **def** ghi' '

abc def ghi

' test_run md2h 'abc *def* ghi' '

abc def ghi

' test_run md2h 'abc ***def*** ghi' '

abc def ghi

' test_run md2h 'abc `def` ghi' '

abc def ghi

' test_run md2h '# h1' '

h1

' test_run md2h '## h2' '

h2

' test_run md2h 'h1 ==' '

h1

' test_run md2h 'h2 --' '

h2

' test_run md2h 'abc [github] def' '

abc github def

' test_run md2h 'abc [github](https://github.com) def' '

abc github def

' test_run md2h 'abc [github] def [github]: https://github.com' '

abc github def

' test_run md2h '--- Title: front matter test --- Hello [world]. --- Bye. [world]: http://example.com' '

Hello world.


Bye.

' echo "Total: $((pass + fail + skip)), Passed: $pass, Failed: $fail, Skip: $skip" return "$fail" } test_run() { eval "test_$1=\$((test_$1 + 1)); ti=\"\$test_$1\"" [ "$tfilter" ] && case "$1#$ti" in $tfilter) ;; *) skip=$((skip + 1)); return 0 ;; esac "$@" [ "$got" = "$want" ] && { pass=$((pass + 1)) return 0 } fail=$((fail + 1)) printf "%s FAIL\n Got: %s\n Want: %s\n" "$1#$ti" "$got" "$want" >&2 return 1 } [ "$WAG_FUN" ] && { $WAG_FUN; exit; } # Execute command line [ "$1" ] || help && "$@"