- using Ansible command line:
ansible-playbook --connection=local 127.0.0.1 playbook.yml
- using inventory:
127.0.0.1 ansible_connection=local
{ | |
"emojis": [ | |
{"emoji": "👩👩👧👧", "name": "family: woman, woman, girl, girl", "shortname": ":woman_woman_girl_girl:", "unicode": "1F469 200D 1F469 200D 1F467 200D 1F467", "html": "👩‍👩‍👧‍👧", "category": "People & Body (family)", "order": ""}, | |
{"emoji": "👩👩👧👦", "name": "family: woman, woman, girl, boy", "shortname": ":woman_woman_girl_boy:", "unicode": "1F469 200D 1F469 200D 1F467 200D 1F466", "html": "👩‍👩‍👧‍👦", "category": "People & Body (family)", "order": ""}, | |
{"emoji": "👩👩👦👦", "name": "family: woman, woman, boy, boy", "shortname": ":woman_woman_boy_boy:", "unicode": "1F469 200D 1F469 200D 1F466 200D 1F466", "html": "👩‍👩‍👦‍👦", "category": "People & Body (family)", "order": ""}, | |
{"emoji": "👨👩👧👧", "name": "family: man, woman, girl, girl", "shortname": ":man_woman_girl_girl:", "unicode": "1F468 200D 1F469 200D 1F467 200D 1F467", "html": "👨‍👩&z |
#!/bin/bash | |
# List the blobs in an Azure storage container. | |
echo "usage: ${0##*/} <container-name> [blob-name]" | |
storage_account="$AZURE_STORAGE_ACCOUNT" | |
container_name="$1" | |
access_key="$AZURE_STORAGE_KEY" |
/** | |
* Create a class, CatTuple, to pass to the ANOVA function so that columns can be referred to by specific names. | |
* Create a class, ANOVAStats, that will be returned from the ANOVA function so that its outputs can be selected and referred to by name. | |
**/ | |
final case class CatTuple(cat: String, value: Double) | |
final case class ANOVAStats(dfb: Long, dfw: Double, F_value: Double, etaSq: Double, omegaSq: Double) | |
// Column names to use when converting to CatTuple | |
val colnames = Seq("cat", "value") |
ansible-playbook --connection=local 127.0.0.1 playbook.yml
127.0.0.1 ansible_connection=local
Adding Hidden Agendas | |
Adjusting Bell Curves | |
Aesthesizing Industrial Areas | |
Aligning Covariance Matrices | |
Applying Feng Shui Shaders | |
Applying Theatre Soda Layer | |
Asserting Packed Exemplars | |
Attempting to Lock Back-Buffer | |
Binding Sapling Root System | |
Breeding Fauna |
# For a local environment | |
# Install hadoop and apache-spark via homebrew | |
# Apache Spark conf file | |
# libexec/conf/spark-defaults.conf | |
# Make the AWS jars available to Spark | |
spark.executor.extraClassPath /usr/local/Cellar/hadoop/2.7.1/libexec/share/hadoop/tools/lib/aws-java-sdk-1.7.4.jar:/usr/local/Cellar/hadoop/2.7.1/libexec/share/hadoop/tools/lib/hadoop-aws-2.7.1.jar | |
spark.driver.extraClassPath /usr/local/Cellar/hadoop/2.7.1/libexec/share/hadoop/tools/lib/aws-java-sdk-1.7.4.jar:/usr/local/Cellar/hadoop/2.7.1/libexec/share/hadoop/tools/lib/hadoop-aws-2.7.1.jar | |
# Add file |
*~ | |
*-old | |
*.cache | |
*.dpkg-dist | |
*.dpkg-bak | |
/*- | |
/*.org | |
/adjtime | |
/.pwd.lock | |
/mtab |
/* | |
* Copyright 2015 Databricks, Inc. | |
* | |
* Licensed under the Apache License, Version 2.0 (the "License"); you may | |
* not use this file except in compliance with the License. You may obtain | |
* a copy of the License at | |
* | |
* http://www.apache.org/licenses/LICENSE-2.0 | |
* | |
* Unless required by applicable law or agreed to in writing, software |
import org.neo4j.graphdb.ResourceIterable; | |
import org.neo4j.graphdb.ResourceIterator; | |
import org.neo4j.io.fs.FileUtils; | |
import org.neo4j.kernel.impl.util.ResourceIterators; | |
import org.neo4j.kernel.logging.SystemOutLogging; | |
import org.neo4j.unsafe.impl.batchimport.Configuration; | |
import org.neo4j.unsafe.impl.batchimport.ParallelBatchImporter; | |
import org.neo4j.unsafe.impl.batchimport.cache.NumberArrayFactory; | |
import org.neo4j.unsafe.impl.batchimport.cache.idmapping.IdGenerator; | |
import org.neo4j.unsafe.impl.batchimport.cache.idmapping.IdGenerators; |
$/
artifacts/
build/
docs/
lib/
packages/
samples/
src/
tests/