Skip to content

Instantly share code, notes, and snippets.

@etes
Last active January 3, 2016 05:49
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save etes/8418237 to your computer and use it in GitHub Desktop.
Save etes/8418237 to your computer and use it in GitHub Desktop.
miscellaneous scripts
$startfolder = "c:\data\*"
$folders = get-childitem $startfolder | where{$_.PSiscontainer -eq "True"}
"Directory Name`tDirectory Size (MB)"
foreach ($fol in $Folders){
$colItems = (Get-ChildItem $fol.fullname -recurse | Measure-Object -property length -sum)
$size = "{0:N2}" -f ($colItems.sum / 1MB)
"$($fol.name)`t$size"
}
#install.packages("RSQLite")
#install.packages("XML")
#install.packages("ggplot2")
#install.packages(c("maps","mapproj"))
#install.packages("stalkR_0.02.zip", repos=NULL, type="source")
#install.packages("C:/Users/ermias/Documents/stalkR.zip", repos=NULL, type="source")
#install.packages("C:/Users/ermias/Documents/stalkR_0.02.zip",lib="C:/Users/ermias/Documents/R/win-library/2.13", repos=NULL, type="source")
#install.packages('RgoogleMaps')
library(RgoogleMaps)
Df<-read.csv("/media/data/Dropbox/R/mapping/pmap3.csv",sep=";",header=FALSE)
names(Df)<-c("Latitude", "Longitude","key")
bb <- qbbox(lat=range(Df$Latitude), lon=range(Df$Longitude))
m <- c(mean(Df$Latitude), mean(Df$Longitude))
zoom <- min(MaxZoom(latrange=bb$latR,lonrange=bb$lonR))
Map <- GetMap.bbox(bb$lonR, bb$latR, zoom=zoom, maptype="mobile",
NEWMAP=TRUE, destfile="tempmap.jpg", RETURNIMAGE=TRUE, GRAYSCALE=TRUE)
tmp <- PlotOnStaticMap(lat=Df$Latitude, lon=Df$Longitude,
cex=.7,pch=20,col="red", MyMap=Map, NEWMAP=FALSE)
pm=read.csv("pm10.txt",sep=",",header=TRUE)
pm=pm[,c(1,2,5)]
bb <- qbbox(lat=range(pm$Lat), lon=range(pm$Long))
m <- c(mean(pm$Lat), mean(pm$Long))
zoom <- min(MaxZoom(latrange=bb$latR,lonrange=bb$lonR))
Map <- GetMap.bbox(bb$lonR, bb$latR, zoom=zoom, maptype="mobile",
NEWMAP=TRUE, destfile="tempmap.jpg", RETURNIMAGE=TRUE, GRAYSCALE=TRUE)
tmp <- PlotOnStaticMap(lat=pm$Lat, lon=pm$Long,
cex=1,pch=20,col="red", MyMap=Map, NEWMAP=FALSE)
##OR
tmp <- PlotOnStaticMap(lat=pm$Lat, lon=pm$Long,
cex=1,pch=20,col=heat.colors(nrow(pm), alpha = pm$pm10), MyMap=Map, NEWMAP=FALSE)
### meusezinc data
library(rgdal)
Df=read.csv("/media/data/Dropbox/R/mapping/meusezinc.csv",sep=" ",header=TRUE)
coordinates(Df)=~ x + y
proj4string(Df) = CRS("+init=epsg:28992")
bbox(Df)
Df1 = spTransform(Df, CRS("+init=epsg:4326"))
bbox(Df1)
Df2=as.data.frame(Df1)
names(Df2)<-c("zinc","Longitude","Latitude")
bb <- qbbox(lat=range(Df2$Latitude), lon=range(Df2$Longitude))
m <- c(mean(Df2$Latitude), mean(Df2$Longitude))
zoom <- min(MaxZoom(latrange=bb$latR,lonrange=bb$lonR))
Map <- GetMap.bbox(bb$lonR, bb$latR, zoom=zoom, maptype="mobile",
NEWMAP=TRUE, destfile="tempmap.jpg", RETURNIMAGE=TRUE, GRAYSCALE=TRUE)
## rescale=(0.5*(Df2$zinc+max(Df2$zinc))-min(Df2$zinc))/(max(Df2$zinc)-min(Df2$zinc))
rescaled=(Df2$zinc-min(Df2$zinc))/(max(Df2$zinc)-min(Df2$zinc))
tmp <- PlotOnStaticMap(lat=Df2$Latitude, lon=Df2$Longitude,
cex=2,pch=20,col=heat.colors(nrow(Df2), alpha = rescaled), MyMap=Map, NEWMAP=FALSE)
#### Meuse data coordinate transformation
library(gstat)
library(rgdal)
data(meuse)
coordinates(meuse) =~ x + y
proj4string(meuse) = CRS("+init=epsg:28992")
bbox(meuse)
meuse1 = spTransform(meuse, CRS(paste("+proj=stere +lat_0=90",
"+lon_0=0.0 +lat_ts=60.0 +a=6378388 +b=6356912 +x_0=0 +y_0=0")))
bbox(meuse1)
meuse2 <- spTransform(meuse1, CRS("+init=epsg:28992"))
bbox(meuse2)
all.equal(bbox(meuse), bbox(meuse2))
###########################################3
#### Meuse data plot on google
library(gstat)
library(rgdal)
library(ReadImages)
library(RgoogleMaps)
data(meuse)
coordinates(meuse) =~ x + y
proj4string(meuse) = CRS("+init=epsg:28992")
bbox(meuse)
meuse1 = spTransform(meuse, CRS("+init=epsg:4326"))
bbox(meuse1)
meuse2=as.data.frame(meuse1)
mzn=meuse2[,c(14,13,4)]
names(mzn)<-c("Latitude","Longitude","zinc")
bb <- qbbox(lat=range(mzn$Latitude), lon=range(mzn$Longitude))
m <- c(mean(mzn$Latitude), mean(mzn$Longitude))
zoom <- min(MaxZoom(latrange=bb$latR,lonrange=bb$lonR))
Map <- GetMap.bbox(bb$lonR, bb$latR, zoom=zoom, maptype="mobile",
NEWMAP=TRUE, destfile="tempmap.jpg", RETURNIMAGE=TRUE, GRAYSCALE=TRUE)
## rescale=(0.5*(mzn$zinc+max(mzn$zinc))-min(mzn$zinc))/(max(mzn$zinc)-min(mzn$zinc))
rescaled=(mzn$zinc-min(mzn$zinc))/(max(mzn$zinc)-min(mzn$zinc))
tmp <- PlotOnStaticMap(lat=mzn$Latitude, lon=mzn$Longitude,
cex=2,pch=20,col=heat.colors(nrow(mzn), alpha = rescaled), MyMap=Map, NEWMAP=FALSE)
##### OPEN STREET MAP
library(RgoogleMaps)
png(filename="GetMap.OSM_%03d_med.png", width=480, height=480)
CologneMap <- GetMap.OSM(lonR= c(6.89, 7.09), latR = c(50.87, 51), scale = 150000, destfile = "Cologne.png");
PlotOnStaticMap(CologneMap, mar=rep(4,4), NEWMAP = FALSE, TrueProj = FALSE, axes= TRUE);
PrincetonMap <- GetMap.OSM(lonR= c(-74.67102, -74.63943), latR = c(40.33804,40.3556), scale = 12500, destfile = "Princeton.png");
png("PrincetonWithAxes.png", 1004, 732)
PlotOnStaticMap(PrincetonMap, axes = TRUE, mar = rep(4,4));
dev.off()
###### Meuse on OSM
library(gstat)
library(rgdal)
library(ReadImages)
library(RgoogleMaps)
data(meuse)
coordinates(meuse) =~ x + y
proj4string(meuse) = CRS("+init=epsg:28992")
bbox(meuse)
meuse1 = spTransform(meuse, CRS("+init=epsg:4326"))
bbox(meuse1)
meuse2=as.data.frame(meuse1)
mzn=meuse2[,c(14,13,4)]
names(mzn)<-c("Latitude","Longitude","zinc")
bb <- qbbox(lat=range(mzn$Latitude), lon=range(mzn$Longitude))
m <- c(mean(mzn$Latitude), mean(mzn$Longitude))
zoom <- min(MaxZoom(latrange=bb$latR,lonrange=bb$lonR))
Map <- GetMap.bbox(bb$lonR, bb$latR, zoom=zoom, maptype="mobile", ## or satellite
NEWMAP=TRUE, destfile="tempmap.jpg", RETURNIMAGE=TRUE, GRAYSCALE=FALSE)
##rescaled=(0.5*(mzn$zinc+max(mzn$zinc))-min(mzn$zinc))/(max(mzn$zinc)-min(mzn$zinc))
rescaled=(mzn$zinc-min(mzn$zinc))/(max(mzn$zinc)-min(mzn$zinc))
tmp <- PlotOnStaticMap(lat=mzn$Latitude, lon=mzn$Longitude,
cex=2,pch=20,col=heat.colors(nrow(mzn), alpha = rescaled), MyMap=Map, NEWMAP=TRUE)
### with OSM
MMap <- GetMap.OSM(bb$lonR, bb$latR, scale = 12500, destfile = "tempmap.png", NEWMAP = TRUE, RETURNIMAGE=TRUE);
tmp <- PlotOnStaticMap(lat=mzn$Latitude, lon=mzn$Longitude,
cex=2,pch=20,col=heat.colors(nrow(mzn), alpha = rescaled),axes = TRUE, MyMap=MMap, mar = rep(4,4), NEWMAP=FALSE)
####### Coso major faults
#install.packages("geomapdata")
library(rgdal)
library(ReadImages)
library(RgoogleMaps)
library(geomapdata)
data(cosomap)
bb <- qbbox(lon=cosomap$POINTS$lon-360,lat=cosomap$POINTS$lat)
MyMap <- GetMap.bbox(bb$lonR, bb$latR,destfile = "Coso.png",
maptype="satellite",zoom=11)
tmp <- PlotOnStaticMap(MyMap,lon=cosomap$POINTS$lon-360,
lat=cosomap$POINTS$lat, pch=20,cex = .5,col='red',verbose=0);
dev.off()
############ earthquake data
library(rgdal)
library(ReadImages)
library(RgoogleMaps)
#setwd("F:././Documents/R/")
pm<-read.csv("eqs7day-M1.txt", header=TRUE)
bb <- qbbox(lat=range(pm$Lat), lon=range(pm$Lon))
m <- c(mean(pm$Lat), mean(pm$Lon))
zoom <- min(MaxZoom(latrange=bb$latR,lonrange=bb$lonR))
Map <- GetMap.bbox(bb$lonR, bb$latR, zoom=zoom, maptype="mobile",
NEWMAP=TRUE, destfile="tempmap.jpg", RETURNIMAGE=TRUE, GRAYSCALE=TRUE)
tmp <- PlotOnStaticMap(lat=pm$Lat, lon=pm$Long,
cex=1,pch=20,col="red", MyMap=Map, NEWMAP=FALSE)
##OR
tmp <- PlotOnStaticMap(lat=pm$Lat, lon=pm$Long,
cex=1,pch=20,col=heat.colors(nrow(pm), alpha = pm$Magnitude), MyMap=Map, NEWMAP=FALSE)
######## Mengist Data
library(rgdal)
library(ReadImages)
library(RgoogleMaps)
setwd("F:././Documents/R/")
geoch<-read.csv("020093942011521183834.csv", header=TRUE)
geo<-geoch[,c(6,7,8,9)]
names(geo)<-c("Lat","Lon","LatMax","LonMax")
geo<-geo[-(99:109),]
pm<-geo
bb <- qbbox(lat=range(pm$Lat), lon=range(pm$Lon))
m <- c(mean(pm$Lat), mean(pm$Lon))
zoom <- min(MaxZoom(latrange=bb$latR,lonrange=bb$lonR))
Map <- GetMap.bbox(bb$lonR, bb$latR, zoom=zoom, maptype="mobile",
NEWMAP=TRUE, destfile="tempmap.jpg", RETURNIMAGE=TRUE, GRAYSCALE=TRUE)
tmp <- PlotOnStaticMap(lat=pm$Lat, lon=pm$Long,
cex=1,pch=20,col="red", MyMap=Map, NEWMAP=FALSE)
##OR
tmp <- PlotOnStaticMap(lat=pm$Lat, lon=pm$Long,
cex=1,pch=20,col=heat.colors(nrow(pm), alpha = pm$Magnitude), MyMap=Map, NEWMAP=FALSE)
#interpolation of xyz data and projection onto a map
map.xyz <- function(xyz_data, file_name="map.xyz.png",
projection="stereographic", orientation=NULL,
par=NULL, fill=FALSE, nside=40, breaks=100, res=100,
xlim = NULL, ylim = NULL
){
if(length(which(rowSums(is.na(xyz_data)) != 0)) > 0){
xyz_data <- xyz_data[-which(rowSums(is.na(xyz_data)) != 0),]
}
require(akima)
require(maps)
require(mapproj)
temp<-interp(
xyz_data[,1], xyz_data[,2], xyz_data[,3],
xo=seq(min(xyz_data[,1]), max(xyz_data[,1]), length = nside),
yo=seq(min(xyz_data[,2]), max(xyz_data[,2]), length = nside, extrap=TRUE)
)
polys<-vector("list", length(as.vector(temp$z)))
for(i in 1:length(polys)){
lonx <- pos2coord(pos=i, mat=temp$z)$coord[1]
laty <- pos2coord(pos=i, mat=temp$z)$coord[2]
ifelse(laty < length(temp$y), neigh_y<-c(laty+1,lonx), neigh_y<-c(laty-1,lonx))
ifelse(lonx < length(temp$x), neigh_x<-c(laty,lonx+1), neigh_x<-c(laty,lonx-1))
dist_y <- earth.dist(temp$x[lonx], temp$y[laty], temp$x[neigh_y[2]], temp$y[neigh_y[1]])
dist_x <- earth.dist(temp$x[lonx], temp$y[laty], temp$x[neigh_x[2]], temp$y[neigh_x[1]])
s1 = new.lon.lat(temp$x[lonx], temp$y[laty], 180, dist_y/2)
s3 = new.lon.lat(temp$x[lonx], temp$y[laty], 0, dist_y/2)
s2 = new.lon.lat(temp$x[lonx], temp$y[laty], 270, dist_x/2)
s4 = new.lon.lat(temp$x[lonx], temp$y[laty], 90, dist_x/2)
polys[[i]] = cbind(c(s2[1], s2[1], s4[1], s4[1]), c(s1[2], s3[2], s3[2], s1[2]))
}
M.range=range(temp$z, na.rm=TRUE)
M.breaks <- pretty(M.range, n=breaks)
M.cols=colorRampPalette(c("blue","cyan", "yellow", "red"),space="rgb")
colorlut <- M.cols(length(M.breaks)) # color lookup table
colorvalues <- colorlut[((as.vector(temp$z)-M.breaks[1])/(range(M.breaks)[2]-range(M.breaks)[1])*length(M.breaks))+1] # assign colors to heights for each point
if(is.null(xlim)){xlim <- range(xyz_data[,1], na.rm=TRUE)}
if(is.null(ylim)){ylim <- range(xyz_data[,2], na.rm=TRUE)}
png(filename=file_name, res=res)
map("world",projection=projection, orientation=orientation, par=par, fill=fill, xlim=xlim, ylim=ylim)
for(i in 1:length(polys)){
polygon(mapproject(x=polys[[i]][,1], y=polys[[i]][,2]), col=colorvalues[i], border=NA)
print(i)
}
map("world", add=TRUE, projection="", par=par, fill=fill, xlim=xlim, ylim=ylim)
map.grid(c(-180, 180, -80, 80), nx=10, ny=18, labels=FALSE, col="grey")
dev.off()
}
pos2coord<-function(pos=NULL, coord=NULL, mat=NULL){
if(is.null(pos) & is.null(coord) & is.null(mat)){
stop("must supply either 'pos' or 'coord', and 'mat'")
}
if(is.null(pos) & !is.null(coord) & !is.null(mat)){
pos <- ((coord[2]-1)*dim(mat)[1])+coord[1]
}
if(!is.null(pos) & is.null(coord) & !is.null(mat)){
coord <- NA*c(1:2)
coord[1] <- ((pos-1) %% dim(mat)[1]) +1
coord[2] <- ((pos-1) %/% dim(mat)[1])+1
}
list(pos=pos, coord=coord)
}
#distance in kilometers between two long/lat positions (from "fossil" package)
earth.dist <- function (long1, lat1, long2, lat2)
{
rad <- pi/180
a1 <- lat1 * rad
a2 <- long1 * rad
b1 <- lat2 * rad
b2 <- long2 * rad
dlon <- b2 - a2
dlat <- b1 - a1
a <- (sin(dlat/2))^2 + cos(a1) * cos(b1) * (sin(dlon/2))^2
c <- 2 * atan2(sqrt(a), sqrt(1 - a))
R <- 6378.145
d <- R * c
return(d)
}
#new long/lat position given a starting lon/lat and degree bearing (from "fossil" package)
new.lon.lat <- function (lon, lat, bearing, distance) {
rad <- pi/180
a1 <- lat * rad
a2 <- lon * rad
tc <- bearing * rad
d <- distance/6378.145
nlat <- asin(sin(a1) * cos(d) + cos(a1) * sin(d) * cos(tc))
dlon <- atan2(sin(tc) * sin(d) * cos(a1), cos(d) - sin(a1) *
sin(nlat))
nlon <- ((a2 + dlon + pi)%%(2 * pi)) - pi
npts <- c(nlon/rad, nlat/rad)
return(npts)
}
#degree bearing between two long/lat positions (from "fossil" package)
earth.bear <- function (long1, lat1, long2, lat2)
{
rad <- pi/180
a1 <- lat1 * rad
a2 <- long1 * rad
b1 <- lat2 * rad
b2 <- long2 * rad
dlon <- b2 - a2
bear <- atan2(sin(dlon) * cos(b1), cos(a1) * sin(b1) - sin(a1) *
cos(b1) * cos(dlon))
deg <- (bear%%(2 * pi)) * (180/pi)
return(deg)
}
lon.lat.filter <- function (lon_vector, lat_vector, west, east, north, south)
{
if(west>east) {
lon_vector_new=replace(lon_vector, which(lon_vector<0), lon_vector[which(lon_vector<0)]+360)
east_new=east+360
} else {
lon_vector_new=lon_vector
east_new=east
}
hits=which(lon_vector_new < east_new & lon_vector_new > west & lat_vector < north & lat_vector > south)
return(hits)
}
import numpy
def mapFunc(row):
"Calculate the statistics for a single row of data."
return (numpy.size(row), numpy.mean(row), numpy.var(row))
def reduceFunc(row1, row2):
"Calculate the combined statistics from two rows of data."
n_a, mean_a, var_a = row1
n_b, mean_b, var_b = row2
n_ab = n_a + n_b
mean_ab = ((mean_a * n_a) + (mean_b * n_b)) / n_ab
var_ab = (((n_a * var_a) + (n_b * var_b)) / n_ab) + ((n_a * n_b) * ((mean_b - mean_a) / n_ab)**2)
return (n_ab, mean_ab, var_ab)
numRows = 100
numSamplesPerRow = 500
x = numpy.random.rand(numRows, numSamplesPerRow)
y = reduce(reduceFunc, map(mapFunc, x))
print "n=%d, mean=%f, var=%f" % y
# TODO: Add comment
#
# Author: ERMIAS
###############################################################################
# Circle lengths
j = seq(0.1,1.9,.08)
par(bg = "black")
plot(-2,-2,pch=".",xlim=c(-2,2),ylim=c(-2,2),col="white")
# How many dots around the circle?
dots = 1000
# Create an offkilter circle
rads = seq(0,2*pi,2*pi/dots)
for(aLength in j) {
# Pick a random color
myCol = paste("#",paste(sample(c(1:9,"A","B","C","D","E","F"),6,replace=T),collapse=""),collapse="",sep="")
# Start at length = 1, then walk.
myLength = rep(aLength,dots)
for(i in 2:dots) {
myLength[i] = myLength[(i-1)] + rnorm(1,0,sd=.005)
# Closer we are to end, faster we return to where started so circle closes
dist = aLength - myLength[i]
myLength[i] = aLength - (dist*((dots-(i/4))/(dots)))
}
for(i in 1:dots) {
cat(myLength[i]*cos(rads[i]),myLength[i]*sin(rads[i]),"\n")
points(myLength[i]*cos(rads[i]),myLength[i]*sin(rads[i]),col=myCol,pch=20,cex=2)
}
}
########################################################
##################### ERITREA###########################
library(sp)
#install.packages("RColorBrewer")
n=20
plot(1:n, pch=CIRCLE<-16, cex=1:n, col=rainbow(n))
# get spatial data for ERITREA on regions level
con <- url("http://www4.uji.es/~al143368/ERI_adm2.RData")
print(load(con))
close(con)
# plot Germany with random colors
col = rainbow(length(levels(gadm$NAME_2)))
spplot(gadm, "NAME_2", col.regions=col, main="Eritrean Regions",
colorkey = FALSE, lwd=.4, col="white")
################### NASA TEMPERATURE ANOMALY ############
setwd("C:\\Users\\ermias\\Documents\\IFGI\\Geostatistics_SS2010")
#giss<-read.table("AnnualMeanSurfaceAirTemperatureChange.txt",header=T)
library(pwr)
library(ggplot2)
URL <- "http://www4.uji.es/"
PATH <- "~al143368/"
FILE <- "AnnualMeanSurfaceAirTemperatureChange.txt"
#download.file(paste(URL,PATH,FILE,sep=""),"AirTemp.txt")
#giss<-read.table(file = "AirTemp.txt",header=T)
airTemp<-url(paste(URL, PATH, FILE, sep = ""),open = "rt")
giss<-read.table(airTemp,header=T)
### OR
airTemp<-url(paste("http://www4.uji.es/~al143368/AnnualMeanSurfaceAirTemperatureChange.txt"),open = "rt")
giss<-read.table(airTemp,header=T)
#download.file("http://www4.uji.es/~al143368/AnnualMeanSurfaceAirTemperatureChange.txt","AirTemp.txt")
#giss<-read.table(file = "AirTemp2.txt",header=T)
head(giss, 2)
## Year Annual_Mean X5_Year_Mean
## 1 1880 -0.28 NA
## 2 1881 -0.21 NA
colnames(giss)[1]= "Year"
colnames(giss)[2]= "Annual_Mean"
colnames(giss)[3]= "X5_Year_Mean"
## calculate power for different sample sizes
n <- 130 # most recent year index
m <- 100 # oldest year index
f2 <- 0.35 # cohen's f2 0.02, 0.15, and 0.35 represent small, medium, and large effect sizes.
alpha <- 0.05
pwr.graph <- function(n, m, f2, alpha) {
giss.n <- (m+4):n-m+1 # sample size
giss.pwr <- cbind(giss.n, 0, 0, 0)
for (i in giss.n - 4) {
giss.data <- giss[(n-i-4+1):n,]
giss.lm <- lm(Annual_Mean ~ Year, data = giss.data)
giss.pwr[i,3] <- giss.r2 <- summary(giss.lm)$r.squared
giss.pwr[i,2] <- pwr.f2.test(summary(giss.lm)$f[2], summary(giss.lm)$f[3],
giss.r2 / (1 - giss.r2), alpha)$power
giss.pwr[i,4] <- as.numeric(tail(giss.data, 1)[1])
}
giss.pwr
}
n <- 130
giss.pwr <- pwr.graph(n, n-30, f2, alpha)
for (n in rev(seq(70, n-1, by = 1))) {
giss.pwr <- rbind(giss.pwr, pwr.graph(n,n-30,f2,alpha))
}
colnames(giss.pwr) <- c("Sample Size", "Power", "R^2", "Latest Year")
fac.name <- function(value, name) {
x <- value
names(x) <- name
x
}
Latest.Year <- giss.pwr[,4]
num.cuts <- 7 # cut(unique(Latest.Year), num.cuts) should be whole numbers
giss.legend.name <- as.numeric(sub("[^,]*,([^]]*)\\]", "\\1", unique(cut(unique(Latest.Year), num.cuts))))
names(giss.legend.name) <- unique(cut(unique(Latest.Year), num.cuts))
p1 <- ggplot(aes(Sample.Size, Power), data = data.frame(giss.pwr)) +
geom_point(aes(fill = cut(Latest.Year, num.cuts), col = cut(Latest.Year, num.cuts)), alpha = I(.2), size = I(3), position = "jitter") +
stat_smooth(aes(fill = cut(Latest.Year, num.cuts), col = cut(Latest.Year, num.cuts)), fullrange = T) +
stat_summary(aes(group = 1), fun.data = "mean_cl_boot", geom = "errorbar", color = "white", size = I(1.1)) +
scale_colour_manual(name = "Latest Year", values = as.factor(fac.name(rgb(seq(1,0,len=num.cuts),0,0), names(giss.legend.name)))) + #, breaks = giss.legend.name, labels = names(giss.legend.name)) +
scale_fill_manual(name = "Latest Year", values = as.factor(fac.name(rgb(seq(1,0,len=num.cuts),0,0), names(giss.legend.name)))) +#, breaks = giss.legend.name, labels = names(giss.legend.name))
opts(title = expression(paste("GISS Temps Power Analysis of Trends: Effect Size Cohen's ", f^2))) +
ylim(0,1.1) + xlab("Sample Size (subtract from Latest Year to get Start Year)")
p2 <- ggplot(aes(Year, Annual_Mean), data = giss) +
scale_fill_gradient(low = "black", high = "red") +
opts(panel.grid.minor = theme_line(colour = NA),
panel.grid.major = theme_line(colour = NA),
panel.border = theme_rect(fill = NA, colour = NA),
panel.background = theme_rect(fill = NA, colour = NA),
plot.background = theme_rect(fill = NA, colour = NA),
legend.position = "none")
p2 <- p2 + geom_polygon(aes(c(Year, rev(Year)),
c(pmin(Annual_Mean, min(giss$Annual_Mean, na.rm = T), na.rm = T),
rev(pmax(Annual_Mean, max(giss$Annual_Mean, na.rm = T), na.rm = T))),
fill = Year,
group = c(cut(Year, num.cuts), rev(cut(Year, num.cuts)))),
subset = .(Year > min(giss.legend.name) - num.cuts),
alpha = I(.5)) + geom_point(col = I("white"))
# theoretical values w/ strong effect size .35
s <- 5:30 # sample sizes
s.p <- pwr.f2.test(1, s-2, .35, .05)$power
s.df <- data.frame(s, s.p)
p3 <- geom_line(aes(s, s.p), data = s.df, col = I("yellow"))
grid.newpage()
library(Hmisc)
p1 + opts(panel.border = theme_blank()) + p3
print(p2, vp = viewport(just = c("left", "top"), x = unit(.05, "npc"), y = unit(.96, "npc"), width = .4, height = .3))
################## MAPPING PACKAGE #####################
library(maps)
getDocNodeVal=function(doc, path)
{
sapply(getNodeSet(doc, path), function(el) xmlValue(el))
}
gGeoCode=function(str)
{
library(XML)
u=paste('http://maps.google.com/maps/api/geocode/xml?sensor=false&address=',str)
doc = xmlTreeParse(u, useInternal=TRUE)
str=gsub(' ','%20',str)
lng=getDocNodeVal(doc, "/GeocodeResponse/result/geometry/location/lat")
lat=getDocNodeVal(doc, "/GeocodeResponse/result/geometry/location/lng")
c(lat,lng)
}
bullseyeEtc=function(str)
{
for (i in 1:10){points(loc[1],loc[2],col=heat.colors(10)[i],cex=i)}
for (i in 1:10){points(loc[1],loc[2],col=heat.colors(10)[i],cex=i*.5)}
title(main='The R User Conference 2010', sub='July 20-23, 2010')
mtext("NIST: Gaithersburg, Maryland, USA")
mtext("http://user2010.org",4)
}
loc=gGeoCode('100 Bureau Drive Gaithersburg, MD, 20899, USA')
# World
map('world', plot=TRUE, fill=TRUE);bullseyeEtc()
X11()
# USA
map('usa', plot=TRUE, fill=TRUE);bullseyeEtc()
# State
map('state','Maryland', plot=TRUE, fill=TRUE);bullseyeEtc()
########################################################
################# STACK OVERFLOW SOLUTION ##############
# improved list of objects
.ls.objects <- function (pos = 1, pattern, order.by,
decreasing=FALSE, head=FALSE, n=5) {
napply <- function(names, fn) sapply(names, function(x)
fn(get(x, pos = pos)))
names <- ls(pos = pos, pattern = pattern)
obj.class <- napply(names, function(x) as.character(class(x))[1])
obj.mode <- napply(names, mode)
obj.type <- ifelse(is.na(obj.class), obj.mode, obj.class)
obj.size <- napply(names, object.size)
obj.dim <- t(napply(names, function(x)
as.numeric(dim(x))[1:2]))
vec <- is.na(obj.dim)[, 1] & (obj.type != "function")
obj.dim[vec, 1] <- napply(names, length)[vec]
out <- data.frame(obj.type, obj.size, obj.dim)
names(out) <- c("Type", "Size", "Rows", "Columns")
if (!missing(order.by))
out <- out[order(out[[order.by]], decreasing=decreasing), ]
if (head)
out <- head(out, n)
out
}
# shorthand
lsos <- function(..., n=10) {
.ls.objects(..., order.by="Size", decreasing=TRUE, head=TRUE, n=n)
}
############### edited StackOverflow manager
.ls.objects <- function (pos = 1, pattern, order.by,
decreasing=FALSE, head=FALSE, n=5) {
napply <- function(names, fn) sapply(names, function(x)
fn(get(x, pos = pos)))
names <- ls(pos = pos, pattern = pattern)
obj.class <- napply(names, function(x) as.character(class(x))[1])
obj.mode <- napply(names, mode)
obj.type <- ifelse(is.na(obj.class), obj.mode, obj.class)
obj.size <- napply(names, object.size)
obj.prettysize <- sapply(obj.size, function(r) prettyNum(r, big.mark = ",") )
obj.dim <- t(napply(names, function(x)
as.numeric(dim(x))[1:2]))
vec <- is.na(obj.dim)[, 1] & (obj.type != "function")
obj.dim[vec, 1] <- napply(names, length)[vec]
out <- data.frame(obj.type, obj.size,obj.prettysize, obj.dim)
names(out) <- c("Type", "Size", "PrettySize", "Rows", "Columns")
if (!missing(order.by))
out <- out[order(out[[order.by]], decreasing=decreasing), ]
out <- out[c("Type", "PrettySize", "Rows", "Columns")]
names(out) <- c("Type", "Size", "Rows", "Columns")
if (head)
out <- head(out, n)
out
}
##############COLORS############
n <- 20
plot(1:n, pch=CIRCLE<-16, cex=1:n, col=rainbow(n))
################## World bank data##################
#install.packages("WDI")
library(WDI)
library(ggplot2)
DF <- WDI(country=c("US","CA","MX"), indicator="NY.GDP.MKTP.KD.ZG", start=1990, end=2008)
ggplot(DF, aes(year, NY.GDP.MKTP.KD.ZG, color=country))
+geom_line(stat="identity")+theme_bw()
+xlab("Year")+opts(title="Annual GDP Growth rate (%)")+ylab("")
#################### World Bank 2 ###################3
library('XML')
plotWorldBank= function (country='US',
indicator='NY.GDP.MKTP.CD',
start_year=2002,
end_year=2008,
color='blue'
){
# Construct the URL
url = paste('http://open.worldbank.org/countries/',
country,
'/indicators/',
indicator,
'?date=',
start_year,
':',
end_year,
sep='')
# Print URL for reference
print(url)
# Parse the XML
doc = xmlTreeParse(url, useInternal = TRUE)
# Extract the relevant values
indicator = xmlValue(getNodeSet(doc, "//wb:indicator")[[1]])
countryName = xmlValue(getNodeSet(doc, "//wb:country ")[[1]])
values = sapply(getNodeSet(doc, "//wb:value") , function(el) xmlValue(el))
dates = sapply(getNodeSet(doc, "//wb:date") , function(el) xmlValue(el))
names(values)=dates
# Plot the data
par(las=2,mar=c(4, 8, 1, 2) + 0.1)
barplot(t(rev(values)), main=paste(countryName ,indicator),
col=color)
}
#####################GERMANY UNEMPLOYMENT########################
library(sp)
library(RColorBrewer)
# get spatial data for Germany on county level
con <- url("http://gadm.org/data/rda/DEU_adm3.RData")
print(load(con))
close(con)
# plot Germany with random colors
col = rainbow(length(levels(gadm$NAME_3)))
spplot(gadm, "NAME_3", col.regions=col, main="German Regions",
colorkey = FALSE, lwd=.4, col="white")
###############################################################
###############################################################
### DATA PREP ###
# loading the unemployment data
setwd("C:\\Users\\ermias\\Documents\\IFGI\\Geostatistics_SS2010")
unempl <- read.delim2(file="data_germany_unemployment_by_county.txt", header = TRUE, sep = "\t",
dec=",", stringsAsFactors=F)
# due to Mac OS encoding, otherwise not needed
gadm_names <- gadm$NAME
# fuzzy matching of data: quick & dirty
# caution: this step takes some time ~ 2 min.
# parsing out "Städte"
gadm_names_n <- gsub("Städte", "", gadm_names)
total <- length(gadm_names_n)
# create progress bar
pb <- txtProgressBar(min = 0, max = total, style = 3)
order <- vector()
for (i in 1:total){
order[i] <- agrep(gadm_names_n[i], unempl$Landkreis,
max.distance = 0.2)[1]
setTxtProgressBar(pb, i) # update progress bar
}
# choose color by unemployment rate
col_no <- as.factor(as.numeric(cut(unempl$Wert[order],
c(0,2.5,5,7.5,10,15,100))))
levels(col_no) <- c(">2,5%", "2,5-5%", "5-7,5%",
"7,5-10%", "10-15%", ">15%")
gadm$col_no <- col_no
myPalette<-brewer.pal(6,"Purples")
# plotting
spplot(gadm, "col_no", col=grey(.9), col.regions=myPalette,
main="Unemployment in Germany by district")
###############################################################
###############################################################
library(sp)
library(maptools)
nc1 <- readShapePoly("./data/DEU_adm/DEU_adm1.dbf",
proj4string=CRS("+proj=longlat +datum=NAD27"))
nc3 <- readShapePoly("./data/DEU_adm/DEU_adm3.dbf",
proj4string=CRS("+proj=longlat +datum=NAD27"))
# col_no comes from the calculations above
par(mar=c(0,0,0,0))
plot(nc3, col=myPalette[col_no], border=grey(.9), lwd=.5)
plot(nc1, col=NA, border=grey(.5), lwd=1, add=TRUE)
###############################################################
###############################################################
################### READ RDATA###########
#########################################
> infilepath <- "C:/R/workdir/projectname/lab.RData"
> outfilepath <- "C:/R/workdir/projectname/lab.txt"
> load(infilepath)
##now, assume that there is a single table in the .RData file called my.data.
>ls()
my.data
>write.table(my.data, file = outfilepath)
#that should do the trick. of course, this is a bit different if you are trying to output something other than a table. if you want to print out summaries exactly how they appear in R, use capture.output, see
> ?capture.output
################ Weekend ART
# More aRt
par(bg="white")
par(mar=c(0,0,0,0))
plot(c(0,1),c(0,1),col="white",pch=".",xlim=c(0,1),ylim=c(0,1))
iters = 500
for(i in 1:iters) {
center = runif(2)
size = 1/rbeta(2,1,3)
# Let's create random HTML-style colors
color = sample(c(0:9,"A","B","C","D","E","F"),12,replace=T)
fill = paste("#", paste(color[1:6],collapse=""),sep="")
brdr = paste("#", paste(color[7:12],collapse=""),sep="")
points(center[1], center[2], col=fill, pch=20, cex=size)
points(center[1], center[2], col=fill, pch=21, cex=size,lwd=runif(1,1,4))
}
############### 3D volcanic terrain view
z <- 2 * volcano # Exaggerate the relief
x <- 10 * (1:nrow(z)) # 10 meter spacing (S to N)
y <- 10 * (1:ncol(z)) # 10 meter spacing (E to W)
z0 <- min(z) - 20
z <- rbind(z0, cbind(z0, z, z0), z0)
x <- c(min(x) - 1e-10, x, max(x) + 1e-10)
y <- c(min(y) - 1e-10, y, max(y) + 1e-10)
fill <- matrix("green3", nr = nrow(z)-1, nc = ncol(z)-1)
fill[ , i2 <- c(1,ncol(fill))] <- "gray"
fill[i1 <- c(1,nrow(fill)) , ] <- "gray"
par(bg = "lightblue",mar=c(.5,.5,2.5,.5))
persp(x, y, z, theta = 120, phi = 15, col = fill, scale = FALSE, axes = FALSE)
title(main = "ALID VOLCANO\nOne of the Eritrean volcanoes at East African Rift System.",font.main = 4)
x11()
z <- 2 * volcano # Exaggerate the relief
x <- 10 * (1:nrow(z)) # 10 meter spacing (S to N)
y <- 10 * (1:ncol(z)) # 10 meter spacing (E to W)
z0 <- min(z) - 20
z <- rbind(z0, cbind(z0, z, z0), z0)
x <- c(min(x) - 1e-10, x, max(x) + 1e-10)
y <- c(min(y) - 1e-10, y, max(y) + 1e-10)
fill <- matrix("green3", nr = nrow(z)-1, nc = ncol(z)-1)
fill[ , i2 <- c(1,ncol(fill))] <- "gray"
fill[i1 <- c(1,nrow(fill)) , ] <- "gray"
par(bg = "slategray",mar=rep(.5,4))
persp(x, y, z, theta = 135, phi = 30, col = fill, scale = FALSE,
ltheta = -120, lphi = 15, shade = 0.65, axes = FALSE)
title(main = "ALID VOLCANO\nOne of the Eritrean volcanoes at East African Rift System.",font.main = 4)
############### TERRAIN WITHOUT COLOR ################
x11()
z <- 2 * volcano # Exaggerate the relief
x <- 10 * (1:nrow(z)) # 10 meter spacing (S to N)
y <- 10 * (1:ncol(z)) # 10 meter spacing (E to W)
par(mar=rep(.5,4))
persp(x, y, z, theta = 120, phi = 15, scale = FALSE, axes = FALSE)
############## sea wave
install.packages("seewave")
require(seewave)
data(pellucens)
par(bg = "black", col = "white")
pellucenszoom <- cutw(pellucens, f = 22050, from = 1, plot = FALSE)
spectro(pellucenszoom, f = 22050, wl = 512, ovlp = 85, collevels = seq(-25,
0, 0.5), osc = TRUE, colgrid = "white", palette = rev.heat.colors,
colwave = "white", colaxis = "white", collab = "white", colline = "white")
########## SIN WAVE #######
x<-seq(0,20,by=0.001)
y<-2*sin(2*pi*.5*x) #amplitude =2, frequency=0.5
plot(x,y,type="l")
http://pbil.univ-lyon1.fr/ADE-4/ade4-html/add.scatter.html
######## Plotting with poisson
x = seq(.001,50,.001)
par(bg="black")
par(mar=c(0,0,0,0))
plot(x,sin(1/x)*rpois(length(x),x),pch=20,col="blue")
#install.packages(c("Rcpp","RInside","inline"))
conv <- function ( a , b )
.C( " convolve " ,
as.double ( a ) ,
as.integer ( length ( a ) ) ,
as.double ( b ) ,
as.integer ( length ( b ) ) ,
ab = double ( length ( a ) + length ( b ) - 1) ) $ab
###### Code and brief instruction for graphing Twitter with R
# Load twitteR package
library(twitteR)
# Load igraph package
library(igraph)
# Set up friends and followers as vectors. This, along with some stuff below, is not really necessary, but the result of my relative inability to deal with the twitter user object in an elegant way. I'm hopeful that I will figure out a way of shortening this in the future
friends <- as.character()
followers <- as.character()
# Start an Twitter session. Note that the user through whom the session is started doesn't have to be the one that your search for in the next step. I'm using myself (coffee001) in the code below, but you could authenticate with your username and then search for somebody else.
sess <- initSession('ermiasbet', 'ermi243')
# Retrieve a maximum of 500 friends for user 'coffee001'.
friends.object <- userFriends('ermiasbet', n=500, sess)
# Retrieve a maximum of 500 followers for 'coffee001'. Note that retrieving many/all of your followers will create a very busy graph, so if you are experimenting it's better to start with a small number of people (I used 25 for the graph below).
followers.object <- userFollowers('ermiasbet', n=500, sess)
# This code is necessary at the moment, but only because I don't know how to slice just the "name" field for friends and followers from the list of user objects that twitteR retrieves. I am 100% sure there is an alternative to looping over the objects, I just haven't found it yet. Let me know if you do...
for (i in 1:length(friends.object))
{
friends <- c(friends, friends.object[[i]]@name);
}
for (i in 1:length(followers.object))
{
followers <- c(followers, followers.object[[i]]@name);
}
# Create data frames that relate friends and followers to the user you search for and merge them.
relations.1 <- data.frame(User='ermiasbet', Follower=friends)
relations.2 <- data.frame(User=followers, Follower='ermiasbet')
relations <- merge(relations.1, relations.2, all=T)
# Create graph from relations.
g <- graph.data.frame(relations, directed = T)
# Assign labels to the graph (=people's names)
V(g)$label <- V(g)$name
# Plot the graph.
plot(g)
############# Zone of Instability
# Take a wacky walk, return the final "track" steps
wackyWalk <- function(iters, track=iters) {
locations = c()
mean2use = 0
sd2use = 1
for (i in 1:iters) {
mean2use = rnorm(1,mean2use,sd2use)
# The farther from the center, the smaller the variance
sd2use = abs(1/mean2use)
if(track > (iters - i) ) {
locations = c(locations, mean2use)
}
}
return(locations)
}
# How many steps to take
iters = 300
track = 300
locations = wackyWalk(iters,track)
# Start us off with a plot
plot(0,0,xlim=c(min(locations),max(locations)),ylim=c(0,iters),pch=20,col="white")
for (i in 1:track) {
points(locations[i],i,pch=20,col="blue")
# To create a pseudo animation, take a break between plotting points
Sys.sleep(.10)
}
# How does the number of steps compare with distance from center
meta = c()
for (j in 1:20) {
iters = 2^j
track = 1
meta = c(meta, wackyWalk(iters,track))
}
plot(1:20, abs(meta), pch=20, col="blue",xlab="2^x",ylab="abs value of final number in sequence")
######################################################################################################
################ FRACTALS ###############
#########################################
library(ggplot2)
max_iter=25
cl=colours()
step=seq(-2,0.8,by=0.005)
points=array(0,dim=c(length(step)^2,3))
t=0
for(a in step)
{
for(b in step+0.6)
{
x=0;y=0;n=0;dist=0
while(n<max_iter & dist<4)
{
n=n+1
newx=a+x^2-y^2
newy=b+2*x*y
dist=newx^2+newy^2
x=newx;y=newy
}
if(dist<4)
{
color=24 # black
}
else
{
color=n*floor(length(cl)/max_iter)
}
t=t+1
points[t,]=c(a,b,color)
}
}
df=as.data.frame(points)
# Can change the colors by fiddling with the following.
# last_plot() + scale_colour_manual(values=sort(c("#00000000", rainbow(23)), decreasing=FALSE))
ggplot(data=df, aes(V1, V2, color=cl[V3]))+
geom_point() +
opts(panel.background=theme_blank(),
panel.grid.major=theme_blank(),
panel.grid.minor=theme_blank(),
axis.ticks=theme_blank(),
axis.text.x=theme_blank(),
axis.text.y=theme_blank(),
axis.title.x=theme_blank(),
axis.title.y=theme_blank(), legend.position = 'none')
ggsave('mandelbrot_ggplot2.png')
print('Image Saved.')
dev.off()
####################################################
##################### MANDELBROT SET ##
####################
Limits=c(-2,0.8)
MaxIter=25
cl=colours()
Step=seq(Limits[1],Limits[2],by=0.005)
S=floor(length(cl)/MaxIter)
Dist=0
PointsMatrix=array(0,dim=c(length(Step)*length(Step),3))
t=0
for(a in Step)
{
for(b in Step+0.6)
{
x=0;y=0;n=0;Dist=0
while(n<MaxIter & Dist<4)
{
n=n+1
newx=a+x^2-y^2
newy=b+2*x*y
Dist=newx^2+newy^2
x=newx;y=newy
}
if(Dist<4) colour=24 # black colour
else colour=n*S
t=t+1
PointsMatrix[t,]=c(a,b,colour)
}
}
X11()
plot(PointsMatrix[,1], PointsMatrix[,2], xlim=Limits, ylim=Limits+0.6, col=cl[PointsMatrix[,3]], pch=".")
#########################################
################ WEEKEND ART ############
# Circle lengths
j = seq(0.1,1.9,.08)
par(bg = "black")
plot(-2,-2,pch=".",xlim=c(-2,2),ylim=c(-2,2),col="white")
# How many dots around the circle?
dots = 1000
# Create an offkilter circle
rads = seq(0,2*pi,2*pi/dots)
for(aLength in j) {
# Pick a random color
myCol = paste("#",paste(sample(c(1:9,"A","B","C","D","E","F"),6,replace=T),collapse=""),collapse="",sep="")
# Start at length = 1, then walk.
myLength = rep(aLength,dots)
for(i in 2:dots) {
myLength[i] = myLength[(i-1)] + rnorm(1,0,sd=.005)
# Closer we are to end, faster we return to where started so circle closes
dist = aLength - myLength[i]
myLength[i] = aLength - (dist*((dots-(i/4))/(dots)))
}
for(i in 1:dots) {
cat(myLength[i]*cos(rads[i]),myLength[i]*sin(rads[i]),"\n")
points(myLength[i]*cos(rads[i]),myLength[i]*sin(rads[i]),col=myCol,pch=20,cex=2)
}
}
Option Explicit
Const SAVEPATH = "c:\NPI_projects\Amora\Bareno\email\"
Sub CustomMailMessageRule(Item As Outlook.MailItem)
Dim oAttachment As Outlook.Attachment
Dim iAttachCnt As Integer
Dim i As Integer
' get count of attachments
iAttachCnt = Item.Attachments.Count
If iAttachCnt > 0 Then
For i = 1 To iAttachCnt
Item.Attachments.Item(i).SaveAsFile _
createFilename(Item.Attachments(i).filename)
Next i
End If
'MsgBox "you have an attachment"
End Sub
' creates a unique filename for the file
Function createFilename(ByVal filename As String) As String
Dim fs As New FileSystemObject
Dim tempFN1 As String
Dim tempFN2 As String
Dim tempInt As Integer
Dim newFN As String
tempInt = 1000
newFN = filename
Do Until Not (fs.FileExists(SAVEPATH & newFN))
If InStrRev(filename, ".") > 0 Then
tempFN1 = Left(filename, InStrRev(filename, ".") - 1)
tempFN2 = Mid(filename, InStrRev(filename, "."))
newFN = tempFN1 & CStr(tempInt) & tempFN2
tempInt = tempInt + 1
Else
newFN = newFN & CStr(tempInt)
tempInt = tempInt + 1
End If
Loop
createFilename = SAVEPATH & newFN
End Function
##### GET TRACKING DATA FROM API.NPOLAR.NO as JSON format ####################
#on powershell
Register-EngineEvent PowerShell.Exiting {
Get-History -Count 32767 | Group CommandLine |
Foreach {$_.Group[0]} | Export-CliXml "$home\pshist.xml" } -SupportEvent
Import-Clixml ".\pshist.xml" | Add-History
cd E:\Data\Projects
PS S:\> (Invoke-RestMethod -URI "http://api.npolar.no/tracking/?q=&start=0&limit=3000&sort=&filter-platform=9690&filter-type=diag&format=json").feed.entries | ConvertTo-Json | Out-File E:\Data\tracking\tracking.json
PS E:\Data> (Invoke-RestMethod -URI "http://api.npolar.no/tracking/?q=&start=0&limit=2500&sort=&filter-platform=9690&filter-type=diag&format=json").feed.entries | ConvertTo-Csv -delimiter ";" -NoTypeInformation |Out-File E:\Data\tracking\tracking.csv
# html to rst
PS S:\> pandoc -f html -t rst E:\Github\geodata.npolar.no\documentation\index.html -o E:\Github\geodata.npolar.no\documentation\index.rst
#### Check arcgis caching job status
http://willem3.npolar.no:6080/arcgis/rest/services/System/CachingTools/GPServer/Manage%20Map%20Cache%20Tiles/jobs/jcbf2ce4b6a4f4b36bc2fd8e94b9d31e6
C:\OSGeoW64\bin\gdal_calc.py -A E:\Data\Projects\IceCharts\Data\2012\09\ice20120901_EPSG3575.tif -B E:\Data\Projects\IceCharts\Data\2012\09\ice20120902_EPSG3575.tif --outfile=E:\Data\result.tif --calc="(A+B)/2"
############ SORT CODED VALUE DOMAIN (Data Management) of geodatabases ########
import arcpy
from arcpy import env
env.workspace = "C:/data"
arcpy.SortCodedValueDomain_management("montgomery.gdb", "material", "CODE", "ASCENDING")
########## File list bat cmd ############
cd barents_shp
dir *.shp > aug_filelist.bat
#### WMS
Sjøfugl kolonier:
http://wms.nina.no/seapop/wms.aspx?SERVICE=WMS&REQUEST=GetCapabilities
Sjøgrenser:
http://wms.geonorge.no/skwms1/wms.sjogrenser2?SERVICE=WMS&REQUEST=GetCapabilities
ssh -N -L 3000:127.8.167.1:5432 45f903d529e6499682ea438c35e5c124@geoserver-ermias.rhcloud.com
ssh 45f903d529e6499682ea438c35e5c124@geoserver-ermias.rhcloud.com
#### MYSQL enable remote access to MySQL on Windows
mysql -u root --password=
GRANT ALL PRIVILEGES ON *.* TO 'USERNAME'@'IP' IDENTIFIED BY 'PASSWORD';
FLUSH PRIVILEGES;
#### WGET a site
## WGET for windows
Invoke-WebRequest http://www.google.com/ -OutFile c:\google.html
Invoke-WebRequest http://umberto.npolar.no/ris15/ssfprojects.kml -OutFile S:\ssfprojects.kml
C:\OSGeo4W\bin\ogr2ogr.exe -f "ESRI Shapefile" ssfprojects.shp .\ssfprojects.kml
############ WGET a site ####################
# wget – recursively download all files from certain directory listed by apache
# Case: recursively download all the files that are in the ‘ddd’ folder for the url ‘http://hostname/aaa/bbb/ccc/ddd/’
# Solution:
wget -r -np -nH –cut-dirs=3 -R index.html http://hostname/aaa/bbb/ccc/ddd/
# Explanation:
# It will download all files and subfolders in ddd directory:
# recursively (-r),
# not going to upper directories, like ccc/… (-np),
# not saving files to hostname folder (-nH),
# but to ddd by omitting first 3 folders aaa, bbb, ccc (–cut-dirs=3),
# excluding index.html files (-R index.html)
# Solution to libjvm.so bug on gdal and Ubuntu precise
LD_LIBRARY_PATH=/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/amd64/server
export LD_LIBRARY_PATH
wget -r -np -nH –cut-dirs=1 http://kgs.uky.edu/kgsmap/kgsgeoserver/viewer.asp
wget -r -np -nH –cut-dirs=2 -R index.html
http://gcoos.org/products/maps/glidertracker/
######### SSH copy
scp -r Desktop/Glaciers/ 45f903d529e6499682ea438c35e5c124@geoserver-ermias.rhcloud.com:app-root/data
############# Arcgis license #######################
Feature: Arc/Info
Server name: ipaddress
License path: 27000@ipadress;
The modified Mercator projection used by Google, Bing, and ArcGIS Online is not designed to minimize distortion at all. Instead, it was engineered for convenience in working with cached map tiles. This projection fit the entire globe (well, most of the latitudes anyway) into a square area that could be covered by 256 x 256 pixel tiles.
Point to note in ArcGIS 10 that Web Mercator which was previously an ESRI WKID of 102100 (Google used a WKID of 900913 as it looked like “google”) will be an EPSG WKID of 3857 ( http://www.epsg-registry.org/export.htm?gml=urn:ogc:def:crs:EPSG::3857 ). WKID of 102100 will still work if used in programming the dataframe but a WMS will report it correctly as EPSG:3857.
if (!require("gWidgetsRGtk2")) install.packages("gWidgetsRGtk2")
if (!require("cairoDevice")) install.packages("cairoDevice")
library(gWidgetsRGtk2)
options(guiToolkit = "RGtk2")
tbl = glayout(container = gwindow("Power of the F Test"),
spacing = 0)
tbl[1, 1:4, anchor = c(0, 0), expand = TRUE] = g.f = ggraphics(container = tbl,
expand = TRUE, ps = 11)
tbl[2, 1, anchor = c(1, 0)] = "numerator df"
tbl[2, 2, anchor = c(0, 0), expand = TRUE] = g.dfn = gslider(from = 1,
to = 50, value = 3, container = tbl, handler = function(h,
...) {
p.Ftest(dfn = svalue(h$obj))
})
tbl[2, 3, anchor = c(1, 0)] = "denominator df"
tbl[2, 4, anchor = c(0, 0), expand = TRUE] = g.dfd = gslider(from = 1,
to = 50, value = 20, container = tbl, handler = function(h,
...) {
p.Ftest(dfd = svalue(h$obj))
})
tbl[3, 1, anchor = c(1, 0)] = "delta^2"
tbl[3, 2, anchor = c(0, 0), expand = TRUE] = g.ncp = gslider(from = 0,
to = 100, value = 10, container = tbl, handler = function(h,
...) {
p.Ftest(ncp = svalue(h$obj))
})
tbl[3, 3, anchor = c(1, 0)] = "alpha"
tbl[3, 4, anchor = c(0, 0), expand = TRUE] = g.alpha = gslider(from = 0,
to = 1, by = 0.01, value = 0.05, container = tbl, handler = function(h,
...) {
p.Ftest(alpha = svalue(h$obj))
})
tbl[4, 1, anchor = c(1, 0)] = "x range"
tbl[4, 2:4, anchor = c(0, 0), expand = TRUE] = g.xr = gslider(from = 1,
to = 50, value = 15, container = tbl, handler = function(h,
...) {
p.Ftest(xr = svalue(h$obj))
})
## draw the graph
p.Ftest = function(dfn = svalue(g.dfn), dfd = svalue(g.dfd),
ncp = svalue(g.ncp), alpha = svalue(g.alpha), xr = svalue(g.xr)) {
x = seq(0.001, xr, length.out = 300)
yc = df(x, dfn, dfd)
yn = df(x, dfn, dfd, ncp = ncp)
par(mar = c(4.5, 4, 1, 0.05))
plot(x, yc, type = "n", ylab = "Density", ylim = c(0, max(yc,
yn)))
xq = qf(1 - alpha, dfn, dfd)
polygon(c(xq, x[x >= xq], xr), c(0, yn[x > xq], 0), col = "gray",
border = NA)
lines(x, yc, lty = 1)
lines(x, yn, lty = 2)
legend("topright", c(as.expression(substitute(F[list(df1,
df2)] ~ " density", list(df1 = dfn, df2 = dfd))), as.expression(substitute(F[list(df1,
df2)](ncp) ~ " density", list(df1 = dfn, df2 = dfd, ncp = ncp))),
as.expression(substitute("Power = " ~ p, list(p = round(1 -
pf(xq, dfn, dfd, ncp = ncp), 4))))), lty = c(1:2,
NA), fill = c(NA, NA, "gray"), border = NA, bty = "n")
return(1 - pf(xq, dfn, dfd, ncp = ncp))
}
p.Ftest()
wunder_station_daily <- function(station, date)
{
base_url <- 'http://www.wunderground.com/weatherstation/WXDailyHistory.asp?'
# parse date
m <- as.integer(format(date, '%m'))
d <- as.integer(format(date, '%d'))
y <- format(date, '%Y')
# compose final url
final_url <- paste(base_url,
'ID=', station,
'&month=', m,
'&day=', d,
'&year=', y,
'&format=1', sep='')
# reading in as raw lines from the web server
# contains <br> tags on every other line
u <- url(final_url)
the_data <- readLines(u)
close(u)
# only keep records with more than 5 rows of data
if(length(the_data) > 5 )
{
# remove the first and last lines
the_data <- the_data[-c(1, length(the_data))]
# remove odd numbers starting from 3 --> end
the_data <- the_data[-seq(3, length(the_data), by=2)]
# extract header and cleanup
the_header <- the_data[1]
the_header <- make.names(strsplit(the_header, ',')[[1]])
# convert to CSV, without header
tC <- textConnection(paste(the_data, collapse='\n'))
the_data <- read.csv(tC, as.is=TRUE, row.names=NULL, header=FALSE, skip=1)
close(tC)
# remove the last column, created by trailing comma
the_data <- the_data[, -ncol(the_data)]
# assign column names
names(the_data) <- the_header
# convert Time column into properly encoded date time
the_data$Time <- as.POSIXct(strptime(the_data$Time, format='%Y-%m-%d %H:%M:%S'))
# remove UTC and software type columns
the_data$DateUTC.br. <- NULL
the_data$SoftwareType <- NULL
# sort and fix rownames
the_data <- the_data[order(the_data$Time), ]
row.names(the_data) <- 1:nrow(the_data)
# done
return(the_data)
}
}
w <- wunder_station_daily('ITROMSTR5', as.Date('2014-05-01'))
plot(w[1:2],type="n", col = "black")
lines(w[1:2],col = "red")
# get data for a range of dates
library(plyr)
date.range <- seq.Date(from=as.Date('2013-05-01'), to=as.Date('2014-01-01'), by='1 day')
# pre-allocate list
l <- list(mode='list', length=length(seq.Date))
# loop over dates, and fetch data
for(i in seq_along(date.range))
{
print(date.range[i])
l[[i]] <- wunder_station_daily('ITROMSTR5', date.range[i])
}
# stack elements of list into DF, filling missing columns with NA
d <- ldply(l)
write.csv(d, file=gzfile('ITROMSTR5.csv.gz'), row.names=FALSE)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment