View bil-to-hgt.sh
#!/bin/bash
function gdal_pixel_count() {
if [ -z "$1" ]; then
echo "Missing arguments. Syntax:"
echo " gdal_extent <input_raster>"
return
fi
EXTENT=$(gdalinfo $1 |\
grep "Size is" |\
sed "s/Size is //g;s/,//;s/\n//")
View build-gdal-combined-lib.sh
#!/bin/bash
PREFIX=`pwd`/install/
rm -rf $PREFIX
mkdir $PREFIX
LOG=./log
rm -rf $LOG
mkdir $LOG
if [ -e ${PREFIX} ]
then
View function.js
var AWS = require('aws-sdk');
var url = require('url');
var https = require('https');
var hookUrl, kmsEncyptedHookUrl, slackChannel;
kmsEncyptedHookUrl = ''; // Enter the base-64 encoded, encrypted key (CiphertextBlob)
slackChannel = '#alerts'; // Enter the Slack channel to send a message to
View README.md

Deploy updates to aws ecs services based on github commits. Posts deploy notifications to slack.

Assumptions:

  • Task definitions map 1:1 with services, and they have the same name.
  • Code is stored on github
  • You want to deploy the latest commit in a branch
  • Docker images are tagged with commit SHA
  • Docker images are stored on AWS ECR
View aws.conf
[plugins]
cwlogs = cwlogs
[default]
region = ap-southeast-2
View convert-geopdf.py
#!/usr/bin/env python
#based on http://lists.osgeo.org/pipermail/gdal-dev/2013-January/035269.html
from osgeo import gdal, osr
import os
import sys
from optparse import OptionParser
import tempfile
from shapely import wkt
from shapely.geometry import mapping
from fiona import collection, crs
View cleanup.js
"use strict";
var AWS = require("aws-sdk");
var ec2 = new AWS.EC2();
var securityGroup = "sg-XXX";
exports.handler = (event, context, callback) => {
ec2.describeSecurityGroups({ GroupIds: [securityGroup] }, function(
err,
data
View publish_queue_size.py
#!/usr/bin/env python
from __future__ import with_statement, print_function
from pyrabbit.api import Client
from boto.ec2.cloudwatch import CloudWatchConnection
import os
from time import sleep
def get_queue_depths(host, username, password, vhost):
cl = Client(host, username, password)
View worldclim_to_cog.md

http://www.worldclim.org/current

wget http://biogeo.ucdavis.edu/data/climate/worldclim/1_4/grid/cur/tmin_30s_bil.zip
wget http://biogeo.ucdavis.edu/data/climate/worldclim/1_4/grid/cur/tmax_30s_bil.zip
wget http://biogeo.ucdavis.edu/data/climate/worldclim/1_4/grid/cur/tmean_30s_bil.zip
wget http://biogeo.ucdavis.edu/data/climate/worldclim/1_4/grid/cur/prec_30s_bil.zip

unzip tmin_30s_bil.zip
View nlcd_to_cog.md

NLCD data is published in 3x3 degree chunks, as tiff files inside a zip file. For processing this data it's far more convienent if the files are available directly as tiff files without having to unzip.

s3cmd get --skip-existing -r s3://prd-tnm/StagedProducts/NLCD/data/2011/landcover/3x3/
for i in *.zip; do unzip $i '*.tif'; done

mkdir tmp
mkdir clouded

for i in *.tif; do