-
Go to Heroku (https://www.heroku.com/) and create an account if you don’t have one
-
Install the heroku toolbelt from https://toolbelt.heroku.com/ or using Homebrew:
/> brew install heroku-toolbelt
-
Send you heroku account email to
brian@binnacle.io
-
You’ll receive an email from heroku about being invited to test the alpha version of the binnacle add-on
-
Go to https://dashboard.heroku.com/apps to create a new app (+)
-
Clone https://github.com/binnacle-io/binnacle-rails-showcase:
View binnacle-heroku-add-on-test-instructions.asc
View gist:003b54a64dcdcc675bc3
/usr/local/lib | |
└─┬ kong-dashboard@1.0.1 | |
├─┬ bower@1.6.9 | |
│ ├── abbrev@1.0.7 | |
│ ├─┬ bower-config@1.2.3 | |
│ │ ├── graceful-fs@4.1.2 | |
│ │ ├─┬ optimist@0.6.1 | |
│ │ │ └── wordwrap@0.0.3 | |
│ │ └─┬ osenv@0.1.3 | |
│ │ ├── os-homedir@1.0.1 |
View emr-spark-launch.sh
# elastic-mapreduce --create --name "Spark Example Project" --instance-type m1.xlarge --instance-count 3 \ | |
# --jobflow-role SparkRole --service-role EMR_DefaultRole \ | |
# --bootstrap-action s3://support.elasticmapreduce/spark/install-spark --bootstrap-name "Install Spark" \ | |
# --jar s3://elasticmapreduce/libs/script-runner/script-runner.jar --step-name "Run Spark Example Project" \ | |
# --step-action TERMINATE_JOB_FLOW \ | |
# --arg s3://snowplow-hosted-assets/common/spark/run-spark-job-0.1.0.sh \ | |
# --arg s3://sparkcluster/spark-example-project-0.2.0.jar \ | |
# --arg com.snowplowanalytics.spark.WordCountJob \ | |
# --arg s3n://sparkcluster/hello.txt \ | |
# --arg s3n://sparkcluster/results |
View geowave-emr-launch.sh
# https://blogs.aws.amazon.com/bigdata/post/Tx15973X6QHUM43/Running-Apache-Accumulo-on-Amazon-EMR | |
aws emr create-cluster --name GeoWaveCluster --no-auto-terminate --bootstrap-actions Path=s3://elasticmapreduce.bootstrapactions/accumulo/1.6.1/install-accumulo_mj,Name=Install_Accumulo Path=s3://rbt-emrbootstrap/install-geowave,Name=Install_GeoWave Path=s3://rbt-emrbootstrap/initialize-geowave.sh,Name=Init_GeoWave Path=s3://support.elasticmapreduce/spark/install-spark,Name=Install_Spark --ami-version 3.6.0 --instance-groups InstanceGroupType=MASTER,InstanceCount=1,InstanceType=m3.xlarge InstanceGroupType=CORE,InstanceCount=2,InstanceType=m3.xlarge --ec2-attributes KeyName=sparkcluster,InstanceProfile=SparkRole --service-role EMR_DefaultRole |
View gist:4a2303e773015aa06adb
* Trying 192.168.99.100... | |
* Connected to 192.168.99.100 (192.168.99.100) port 32770 (#0) | |
* Server auth using Basic with user 'admin' | |
> GET /go/api/agents HTTP/1.1 | |
> Host: 192.168.99.100:32770 | |
> Authorization: Basic YWRtaW46Zm9vYmFyOTk= | |
> User-Agent: curl/7.43.0 | |
> Accept: application/vnd.go.cd.v1+json | |
> | |
* Empty reply from server |
View pg state
postgresql-repo: | |
pkg.installed: | |
- sources: | |
- pgdg-centos94: http://{{ pillar['pg_yum_domain']}}/9.4/redhat/rhel-6-x86_64/pgdg-centos94-9.4-1.noarch.rpm | |
postgresql-install: | |
pkg.installed: | |
- names: | |
- postgresql94 | |
- postgresql94-server | |
- postgis2_94 |
View build_ossim.txt
Append to .bashrc or .zshrc | |
# OSSIM environment vars | |
export OSSIM_VERSION=1.8.19 | |
export OSSIM_DATA=/data | |
# Wherever you checked out the code to: | |
export OSSIM_DEV_HOME=${HOME}/ossim_trunk | |
export OSSIM_DEPENDENCIES=$OSSIM_DEV_HOME/ossim_dependencies |
View ossim_preferences
elevation_manager.elevation_source1.connection_string: $(OSSIM_DATA)/elevation/srtm/1arc | |
elevation_manager.elevation_source1.enabled: true | |
elevation_manager.elevation_source1.type: srtm_directory | |
elevation_manager.elevation_source1.min_open_cells: 25 | |
elevation_manager.elevation_source1.max_open_cells: 50 | |
elevation_manager.elevation_source1.memory_map_cells: false | |
elevation_manager.elevation_source1.geoid.type: geoid1996 | |
elevation_manager.elevation_source2.connection_string: $(OSSIM_DATA)/elevation/dted/level1 | |
elevation_manager.elevation_source2.enabled: false |
View configure.sh
#!/bin/bash | |
cmake "$@" \ | |
-DCMAKE_BUILD_TYPE=Release \ | |
-DCMAKE_OSX_ARCHITECTURES="x86_64" \ | |
-DCMAKE_LIBRARY_PATH=$OSSIM_DEPENDENCIES/lib \ | |
-DCMAKE_INCLUDE_PATH=$OSSIM_DEPENDENCIES/include \ | |
-DCMAKE_FRAMEWORK_PATH=$OSSIM_DEPENDENCIES/Frameworks \ | |
-DCMAKE_INSTALL_PREFIX=$OSSIM_INSTALL_PREFIX \ | |
-DCMAKE_PREFIX_PATH=$OSSIM_DEPENDENCIES \ | |
-DBUILD_OSSIMPREDATOR=ON\ |
View gist:6478ad038ee46f992b9c
[hadoop@ip-172-31-6-148 helloaccumulo]$ java -jar target/helloaccumulo-standalone.jar -i instance -n geowave.test -u geowave -p geowave -z ip-172-31-6-148.ec2.internal | |
Attempting to get zookeeper instance | |
1 [main] DEBUG org.apache.accumulo.fate.zookeeper.ZooSession - Connecting to ip-172-31-6-148.ec2.internal with timeout 30000 with auth | |
16 [main] INFO org.apache.zookeeper.ZooKeeper - Client environment:zookeeper.version=3.3.6-1366786, built on 07/29/2012 06:22 GMT | |
16 [main] INFO org.apache.zookeeper.ZooKeeper - Client environment:host.name=ip-172-31-6-148.ec2.internal | |
16 [main] INFO org.apache.zookeeper.ZooKeeper - Client environment:java.version=1.7.0_71 | |
16 [main] INFO org.apache.zookeeper.ZooKeeper - Client environment:java.vendor=Oracle Corporation | |
16 [main] INFO org.apache.zookeeper.ZooKeeper - Client environment:java.home=/usr/java/jdk1.7.0_71/jre | |
16 [main] INFO org.apache.zookeeper.ZooKeeper - Client environment:java.class.path=target/helloaccumulo-standalone.jar | |
16 [main] INFO org.apache.zookee |
NewerOlder