Skip to content

Instantly share code, notes, and snippets.

View ryan-hill's full-sized avatar

Ryan Hill ryan-hill

View GitHub Profile
#FTP location
ftpdir <- 'ftp://newftp.epa.gov/EPADataCommons/ORD/NHDPlusLandscapeAttributes/StreamCat/States/'
#Your output directory
out_dir <- 'D:/Lixo/'
#Desired table (change to name of desired table)
tables <- c('NLCD2001RipBuf100_CA','Kffact_CA','ImperviousSurfaces2001_CA')
#Get URL, split returned list, select out only desired tables by name ('table' above)
library(RCurl)
url_list <- getURL(ftpdir, dirlistonly = TRUE)
url_list <- strsplit(url_list, split = '\r\n')[[1]]
@ryan-hill
ryan-hill / batch-StreamCat-download.R
Last active June 11, 2018 22:54
Batch download StreamCat files for entire US based on desired table
#FTP location
ftpdir <- 'ftp://newftp.epa.gov/EPADataCommons/ORD/NHDPlusLandscapeAttributes/StreamCat/HydroRegions/'
#Desired table (change to name of desired table)
table <- 'PredictedBioCondition'
#Get URL, split returned list, select out only desired tables by name ('table' above)
library(RCurl)
url_list <- getURL(ftpdir, dirlistonly = TRUE)
url_list <- strsplit(url_list, split = '\r\n')[[1]]
url_list <- url_list[grep(table, url_list)]
#States need to be in equal area projection
#5070 is EPSG code for the USGS Alber's projection
states <- st_transform(states, crs = 5070)
#Create centroids of states
cntr <- st_centroid(states)
#Bind these coordinates to the centroid feature
cntr <- cbind(cntr, st_coordinates(cntr))
#Get centroid for conterminous US
xy <- states %>% st_union() %>%
st_centroid() %>% st_coordinates()
library(jsonlite);library(sf);library(sp);library(geojsonio)
watershed = function(state, lon, lat, sf=TRUE){
p1 = 'https://streamstats.usgs.gov/streamstatsservices/watershed.geojson?rcode='
p2 = '&xlocation='
p3 = '&ylocation='
p4 = '&crs=4326&includeparameters=false&includeflowtypes=false&includefeatures=true&simplify=true'
query <- paste0(p1, state, p2, toString(lon), p3, toString(lat), p4)
mydata <- fromJSON(query, simplifyVector = FALSE, simplifyDataFrame = FALSE)
poly_geojsonsting <- toJSON(mydata$featurecollection[[2]]$feature, auto_unbox = TRUE)
@ryan-hill
ryan-hill / gages-near-portland2.R
Last active May 6, 2018 18:03
Gist used to create hidden answers for challenge questions in Intro to GIS in R course (https://ryan-hill.github.io/sfs-r-gis-2018/)
#Read in gages data and convert to spatial points data frame
#Give it the **pts** CRS and reproject to **pts2** CRS
#Select out Portland and use `gBuffer` from `rgeos` package with width = 50,000 meters.
#Use `over` function from `sp` package to identify overlapping points with 50 km buffer.
library(sp); library(rgeos)
gages <- read.csv('./data/StreamGages.csv')
gages <- SpatialPointsDataFrame(gages[c('LON_SITE','LAT_SITE')], gages)
gages@proj4string <- pts@proj4string
@ryan-hill
ryan-hill / gages-near-portland.R
Last active May 6, 2018 18:02
Gist used to create hidden answers for challenge questions in Intro to GIS in R course (https://ryan-hill.github.io/sfs-r-gis-2018/)
#Read in gages data and convert to spatial points data frame
#Give it the **pts** CRS and reproject to **pts2** CRS
#Select out Portland and use `gDistance` from `rgeos` package with portand as x and gages as y in the function.
#Sum across TRUE/FALSE values in query. R will count TRUE == 1 and FALSE == 0.
library(sp); library(rgeos)
gages <- read.csv('./data/StreamGages.csv')
gages <- SpatialPointsDataFrame(gages[c('LON_SITE','LAT_SITE')], gages)
gages@proj4string <- pts@proj4string
@ryan-hill
ryan-hill / walk_up.py
Created October 19, 2017 16:50 — forked from debboutr/walk_up.py
recursive function to walk up flow table. NOTE: this will not handle braided flowlines!
from StreamCat_functions import dbf2DF
pre = 'D:/NHDPlusV21/NHDPlusGL/NHDPlus04'
fline = dbf2DF('D%s/NHDSnapshot/Hydrography/NHDFlowline.dbf' % pre)
flow = dbf2DF('%s/NHDPlusAttributes/PlusFlow.dbf' pre)[['TOCOMID','FROMCOMID']]
def recurs(val, ups):
print val
ups = ups + flow.ix[flow.TOCOMID == val].FROMCOMID.tolist()
if 0 in ups:
#Code loops through StreamCat files on drive and combines into long table
#Also removes ancillary columns
combine_streamcat = function(x, wd){
hydro.rgns <- c("01","02","03S","03N","03W","04","05","06","07","08","09","10L","10U","11","12","13","14","15","16","17","18")
for(i in 1:length(hydro.rgns)){
print(hydro.rgns[i])
if(i == 1){
outDF = read.csv(paste0(wd, x, '_Region', hydro.rgns[i], '.csv'))
}else{
tmpDF = read.csv(paste0(wd, x, '_Region', hydro.rgns[i], '.csv'))
import numpy as np
import pandas as pd
#Read in .csv file
nlcd = pd.read_csv('NLCD2011_FINAL.csv')
#Select desired columns - selects 1st column and then 23rd column to end
nlcd = nlcd.iloc[:, np.r_[:1, 23:len(nlcd.columns)]]
#Strip out 'Ws' string from column names that contain it
newnames = [w.replace('Ws', '') for w in nlcd.columns]
#rename columns
nlcd.columns = newnames
@ryan-hill
ryan-hill / float2int.py
Created October 14, 2015 00:35 — forked from bryanluman/float2int.py
Converting a large DEM with rasterio
import numpy as np
import rasterio
"""
2014-02-13
Bryan Luman
Use it however you like at your own risk
Problem:
I have a huge DEM converted from LiDAR LAS points. I'd like to make it slightly