Created
July 10, 2014 22:13
-
-
Save danhammer/8fdb1744db2e35e682de to your computer and use it in GitHub Desktop.
forma playground code, working
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// FORMA, Hammer et al. (2014) | |
// Objective: | |
// Alerts of forest disturbance from MODIS imagery | |
// GEE core assets: | |
// MOD44B_C4_TREE_2000 (Vegetation Continuous Fields, annual 250m) | |
// MOD13Q1 (Vegetation indices, 16-day 250m) | |
// NOAA/PRECL_05D (Precipitation Reconstruction over Land, monthly 0.5 degree) | |
// users/thau/htrop_hotspots_500m_2 TODO: replace with UMD data set | |
// Supplementary assets: | |
// WWF ecoregions (fusion table: ft:1Ey1pQ7wNwkU7o-MzkZjOLrlGMHuUEckzb40k64A) | |
// FORMA coefficients (fusion table: ft:1z7_bmktFDOGPSpPGuqLvnCi5uMqAHe81Vi0x0dw) | |
// Reference: | |
// Hammer, Dan and Robin Kraft and David Wheeler. 2014. "Alerts of forest disturbance | |
// from MODIS imagery". International Journal of Applied Earth Observation and | |
// Geoinformation, Vol. 33, 1-9. | |
var ECOREGION_FT = "ft:1Ey1pQ7wNwkU7o-MzkZjOLrlGMHuUEckzb40k64A"; | |
var COEFS = ee.FeatureCollection("ft:1z7_bmktFDOGPSpPGuqLvnCi5uMqAHe81Vi0x0dw"); | |
var ECOID = 40160; | |
var THRESH = 1; | |
var prepVCF = function(threshold) { | |
// Returns the Vegetation Continuous Field above a given threshold, used to define | |
// the study area. The FCLH 2005 data employs a threshold of 25. The UMD 2012 data | |
// employs a threshold of 10. | |
var vcf = ee.Image("MOD44B_C4_TREE_2000"); | |
return vcf.gte(threshold); | |
}; | |
var getEcoPolygon = function(ecoid) { | |
// Returns a MultiPolygon Feature to define the geographic extent for local training. | |
var eco = ee.FeatureCollection(ECOREGION_FT).filter(ee.Filter.eq('eco_id', ecoid)); | |
return eco.union(); | |
}; | |
var extractFeature = function(product, feature_name, start_date, end_date) { | |
// Accepts a product name as a string (e.g., 'MOD13Q1') and the name of a single band | |
// (e.g., 'NDVI') along with the start and end dates. Returns an image collection | |
// filtered only to the supplied band for the given date range. Returned images fall | |
// within interval [start_date, end_date), non-inclusive on the end_date. | |
var coll = ee.ImageCollection(product).filterDate(start_date, end_date); | |
return coll.map(function(img) {return img.select([feature_name])}); | |
}; | |
var getInput = function(start_date, end_date) { | |
// Returnes the feature vector used for classification. Note that the original | |
// algorithm defines the training period to be `start_date = '2000-02-18'` and | |
// `end_date = '2005-12-31'`. The subsequent predictions maintain the start date but | |
// telescope the end date. | |
var ndvi = extractFeature('MOD13Q1', 'NDVI', start_date, end_date); | |
var rain = extractFeature('NOAA/PRECL_05D', 'precip_mm', start_date, end_date); | |
// calculate FORMA trends mask out non-forest pixels. | |
var vcf = prepVCF(25); | |
var all_trends = ndvi.formaTrend(rain, 9); | |
var trends = all_trends.mask(vcf); | |
// calculate neighborhood values for the trends | |
var neighbors = trends.reduceNeighborhood(ee.Reducer.mean(), ee.Kernel.circle(1.5)); | |
// return trends and neighborhood trends as the final feature vector | |
return trends.addBands(neighbors); | |
}; | |
var grabCoefficientVector = function(ecoid) { | |
// Accepts the unique ecoregion ID and returns a coefficient vector, ready for | |
// calculation of probabilities. | |
var eco = COEFS.filterMetadata('eco_id', 'equals', ecoid); | |
// grab only the first feature. there is a lot of redundant information, since the | |
// ecoregion is technically a multipolygon, where each element has the same | |
// coefficients stored as properties. | |
var x = eco.getInfo().features[0].properties; | |
var coefs = [x.b0, x.b1, x.b2, x.b3, x.b4, x.b5, x.b6, x.b7, x.b8]; | |
return(coefs); | |
}; | |
function addSecs(img, dt) { | |
// adds a metadata property with the date of the calculated probability in seconds | |
// from the start of the UNIX epoch | |
var secs = ee.Date(dt).getInfo().value / 1000; | |
return img.set({'secs':secs}); | |
} | |
var calculateProbabilities = function(start_date, end_date, ecoid) { | |
// Accepts the start and end date for the estimation and the ecoregion identifier. | |
// Returns the raw probabilities as an image, based on a previously calculated | |
// coefficient vector, along with the associated date as a metadata property | |
var eco_poly = getEcoPolygon(ecoid); | |
var feature_image = getInput(start_date, end_date).clip(eco_poly); | |
var coefs = grabCoefficientVector(ecoid); | |
// dot product of coefficients and features, adding the constant (intercept) | |
var probs = feature_image.multiply(coefs.slice(1)).reduce("sum").add(coefs[0]); | |
var prob_img = addSecs(probs, end_date) | |
return(prob_img); | |
}; | |
var timeAboveThresh = function (img) { | |
// Accepts a probability image and assigns the date if the probability is | |
// greater than the supplied ecoregion threshold (global var) | |
var hits = img.gte(THRESH); | |
return img.metadata('secs').where(hits.not(), 0); | |
}; | |
var firstHit = function (coll) { | |
// Accepts a probability image collection and returns the date of first hit, | |
// conditional on a hit at all | |
var hit_dates = coll.map(timeAboveThresh); | |
var hit_dates_nonzero = hit_dates.map(function (x) { return x.mask(x.gt(0))}); | |
return hit_dates_nonzero.min(); | |
}; | |
// Evaluate the image at a random location in Indonesia, just to show the | |
// structure of the result. | |
function show(desc, image) { | |
print(desc); | |
print(ee.ImageCollection([image]) | |
.getRegion(ee.Feature.Point(101.73615,1.45550), 30) | |
.getInfo()); | |
} | |
var exampleCalc = function() { | |
// Example hook into the script to calculate the probabilities for a specific | |
// ecoregion for three periods. Returns the alerts and the associated dates as | |
// pixel values | |
var p0 = calculateProbabilities('2000-02-18', '2005-12-31', ECOID); | |
var p1 = calculateProbabilities('2000-02-18', '2006-12-31', ECOID); | |
var p2 = calculateProbabilities('2000-02-18', '2007-12-31', ECOID); | |
var p3 = calculateProbabilities('2000-02-18', '2008-12-31', ECOID); | |
var p4 = calculateProbabilities('2000-02-18', '2009-12-31', ECOID); | |
var coll = ee.ImageCollection.fromImages([p0, p1, p2, p3, p4]); | |
// Get all values in each band in array form. Note the result has the | |
// same number of bands as the image collection, unlike the other cases | |
// that all produce a single band called 'array'. | |
var image_arrayPerBand = coll.toArrayPerBand(); | |
show('collection.toArrayPerBand()', coll.toArrayPerBand()); | |
// Slice the arrays to shift elements of the array. | |
var slice1 = image_arrayPerBand.arraySlice(0,0,-2); | |
var slice2 = image_arrayPerBand.arraySlice(0,1,-1); | |
var slice3 = image_arrayPerBand.arraySlice(0,2); | |
var collection_weighted = slice1.multiply(1.0/3) | |
.add(slice2.multiply(1.0/3)) | |
.add(slice3.multiply(1.0/3)); | |
show('collection_weighted', collection_weighted); | |
var weighted_012 = collection_weighted.arrayGet([0]); | |
var weighted_123 = collection_weighted.arrayGet([1]); | |
var weighted_234 = collection_weighted.arrayGet([2]); | |
var w0 = addSecs(weighted_012, '2007-12-31'); | |
var w1 = addSecs(weighted_123, '2008-12-31'); | |
var w2 = addSecs(weighted_234, '2009-12-31'); | |
var smoothed_coll = ee.ImageCollection.fromImages( | |
[w0, w1, w2] | |
); | |
// clip and mask to the area of interest and display the final data | |
var vcf = prepVCF(25); | |
var eco_poly = getEcoPolygon(ECOID); | |
var hits = firstHit(smoothed_coll); | |
var final = hits.mask(vcf).clip(eco_poly); | |
addToMap(final.mask(final), {palette:'FF0000'}, "forma hits"); | |
centerMap(102.07397, 1.32923, 10); | |
}; | |
// call final function | |
exampleCalc(); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment