Skip to content

Instantly share code, notes, and snippets.

@focusaurus
Last active June 5, 2017 09:20
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save focusaurus/932dbc63cda7288ad993d78480813b04 to your computer and use it in GitHub Desktop.
Save focusaurus/932dbc63cda7288ad993d78480813b04 to your computer and use it in GitHub Desktop.
markdown slides with reveal build snippet
#!/usr/bin/env bash
base_build() {
build_dir="./build"
mkdir -p "${build_dir}"
# OSX build support. BSD tar vs GNU tar issue
if [[ "$(uname)" == "Darwin" ]]; then
alias tar=gtar
fi
# This takes all files that have been at least added to git
# tars them up and extracts them in the build dir
git ls-files \
| tar --files-from - --create --file - \
| tar --directory "${build_dir}" \
--exclude='trainer-docs.md' \
--extract --file -
}
exercises() {
browserify -r tape -r tape-catch -r tape-dom > "${build_dir}/common/tape.js"
browserify -r events > "${build_dir}/common/events.js"
find "${build_dir}" -type f -name '*-exercises.js' | {
while IFS= read -r exercises; do
topic_name=$(basename "${exercises}" -exercises.js)
course_name=$(basename "$(dirname "${exercises}")")
# if [[ ! -e "${build_dir}/${course_name}/01-welcome-exercises.js" ]]; then
# cp "${build_dir}/common/01-welcome-"* "${build_dir}/${course_name}"
# fi
sed -e "s/TITLE/${topic_name}/g" \
-e "s/TOPIC/${topic_name}/g" \
"${build_dir}/common/exercises.html" > \
"${build_dir}/${course_name}/${topic_name}-exercises.html"
echo -n "."
done
}
}
slides_html() {
cp -r node_modules/reveal.js "${build_dir}"
# reveal.js node deps are huge (esp socket.io)
rm -rf "${build_dir}/reveal.js/node_modules"
# fonts for other themes are huge
rm -rf "${build_dir}/reveal.js/lib/font"
find "${build_dir}" -type f -name '*-slides.md' | {
while IFS= read -r markdown
do
local topic_name
topic_name=$(basename "${markdown}" -slides.md)
local course_dir
course_dir="$(dirname "${markdown}")"
mkdir -p "${course_dir}"
##### reveal.js for live presentations #####
cat "${build_dir}/common/slides-header.html" "${markdown}" "${build_dir}/common/slides-footer.html" \
| sed -e "s/TITLE/${topic_name}/g" \
> "${course_dir}/${topic_name}-slides.html"
echo -n "."
done
}
}
slides_pdf() {
find "${build_dir}" -type f -name '*-slides.md' | {
while IFS= read -r markdown
do
local topic_name
topic_name=$(basename "${markdown}" -slides.md)
local course_dir
course_dir="$(dirname "${markdown}")"
##### markdown to html for learners #####
cp "${build_dir}/common/pdf-header.html" "${course_dir}/${topic_name}.html"
"$(npm bin)/marked" --gfm "${markdown}" >> "${course_dir}/${topic_name}.html"
##### html to pdf for learners #####
wkhtmltopdf --quiet --enable-local-file-access "${course_dir}/${topic_name}.html" "${course_dir}/${topic_name}-slides.pdf"
rm "${course_dir}/${topic_name}.html"
echo -n "."
done
}
# echo "Waiting for PDFs to finish"
# local failed_count=0
# for job in `jobs -p`; do
# echo $job
# wait $job || failed_count=$((${failed_count} + 1))
# done
#
# if [[ ${failed_count} -ne 0 ]];
# then
# echo "Failed to render ${failed_count} PDFs"
# exit ${failed_count}
# fi
}
build_zip() {
cd "${build_dir}" > /dev/null
local stamp
stamp=$(date +%Y%m%d)
find . -type d -maxdepth 1 -mindepth 1 -not -name bin -not -name common -not -name reveal.js | {
while IFS= read -r course_dir
do
zip --quiet --recurse-paths --symlinks "${course_dir}-${stamp}.zip" "${course_dir}" common reveal.js
done
}
# zip --quiet --recurse-paths --symlinks "all-courses-$(date +%Y%m%d).zip" . --exclude '*.zip'
cd - > /dev/null
}
print_paths() {
find "${build_dir}" -type f -name '*-exercises.*'
}
main() {
cd "$(dirname "$0")/.." || exit
source "./bin/_strict.sh"
PATH="$(npm bin):${PATH}"
echo -ne "base git build…"
base_build
echo -ne "✓\nslides md -> html"
slides_html
echo -ne "✓\nexercises"
exercises
echo -ne "✓\nslides html -> pdf"
if [[ "$1" == "fast" ]]; then
return
fi
slides_pdf
echo -ne "✓\nzip…"
build_zip
echo ""
print_paths
}
main "$@"
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment