Skip to content

Instantly share code, notes, and snippets.

View meggart's full-sized avatar

Fabian Gans meggart

  • Max-Planck-Institute for Biogeochemistry
  • Jena, Germany
View GitHub Profile
@meggart
meggart / gist:20503e86e18ad21c9bbd
Created November 25, 2014 15:55
Use label_components with non-Array output
type Naive_Sparse_3{T} <: AbstractArray{T,3}
size::(Int,Int,Int)
vals::Dict{Int,T}
end
function getindex(A::Naive_Sparse_3,i::Int)
A.vals[i]
end
function getindex(A::Naive_Sparse_3,i1::Int,i2::Int,i3::Int)
i=((i3-1)*A.size[1]+(i2-1))*A.size[2]+i1
getindex(A,i)
@meggart
meggart / .juliarc.jl
Created July 16, 2015 15:26
Macroexpand shortcut in the REPL
if VERSION > v"0.4.0-dev+4268"
const marcoexpandkeys = Dict{Any,Any}("^I" => function (s,o...)
if !isempty(s)
line = parse(Base.LineEdit.input_string(s))
s.kill_buffer=Base.LineEdit.input_string(s)
Base.LineEdit.edit_clear(s)
Base.LineEdit.edit_insert(s,string(macroexpand(line)))
end
"Get the exact version of a package."
function version(name::AbstractString)
packages = JSON.parse(readall(`$conda list --json`))
for package in packages
if startswith(package, name)
return package
end
end
warn("Could not find the $name package")
return "Not found"
@meggart
meggart / tinytestcubecube.py
Created January 5, 2016 10:04
A draft for a tiny test data cube that can be used for unit testing the Data Access APIs in different languages.
from datetime import datetime, timedelta
from cablab import Cube, CubeConfig, CubeSourceProvider
import numpy
class MiniCubeProvider(CubeSourceProvider):
def __init__(self, cube_config):
if cube_config.grid_width != 6 or cube_config.grid_height != 3:
raise ValueError('illegal cube configuration, cube dimension must be 6x3')
import cablab
from cablab import cube_gen
from cablab import cube_config
from collections import OrderedDict
from textwrap import TextWrapper
class varInfo:
def __init__(self):
plist = cube_gen._load_source_providers()
plist.pop('test')
"""
broadcast_reduce(f,op,v0,A,Bs...)
Should behave like mapreduce, with the only difference that singleton dimension are expanded like in broadcast.
"""
function broadcast_reduce(f,op,v0,A,Bs...)
shape = Base.Broadcast.broadcast_indices(A,Bs...)
iter = CartesianRange(shape)
keeps, Idefaults = Base.Broadcast.map_newindexer(shape, A, Bs)
_broadcast_reduce(f,op,v0,keeps,Idefaults,A, Bs,Val{length(Bs)}(),iter)
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file has been truncated, but you can view the full file.
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"import numpy.fft as fft\n",
FROM jupyter/datascience-notebook:eb149a8c333a
MAINTAINER dev@brockmann-consult.de
LABEL version='0.6.1'
USER root
RUN sudo apt-get update && sudo apt-get upgrade -y
RUN sudo apt-get install -y git unzip apt-utils vim
ADD start.sh /usr/local/bin/start.sh
@meggart
meggart / parseprj.jl
Created August 20, 2019 07:04
Parse prj from Shapefile
import Tokenize.Tokens: IDENTIFIER, LSQUARE, COMMA, RSQUARE, STRING, ENDMARKER
function parsetokens(t,ar)
if t[1].kind==Tokenize.Tokens.IDENTIFIER
@assert t[2].kind==LSQUARE
brackcount=1
i=3
while true
t[i].kind==LSQUARE && (brackcount=brackcount+1)
t[i].kind==RSQUARE && (brackcount=brackcount-1)
brackcount==0 && break