Each of these commands will run an ad hoc http static server in your current (or specified) directory, available at http://localhost:8000. Use this power wisely.
$ python -m SimpleHTTPServer 8000
Each of these commands will run an ad hoc http static server in your current (or specified) directory, available at http://localhost:8000. Use this power wisely.
$ python -m SimpleHTTPServer 8000
// We'll use Puppeteer is our browser automation framework. | |
const puppeteer = require('puppeteer'); | |
// This is where we'll put the code to get around the tests. | |
const preparePageForTests = async (page) => { | |
// Pass the User-Agent Test. | |
const userAgent = 'Mozilla/5.0 (X11; Linux x86_64)' + | |
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.39 Safari/537.36'; | |
await page.setUserAgent(userAgent); |
The first step is to remove older version of PostGIS if any.
sudo apt-get purge postgis
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -
sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main" >> /etc/apt/sources.list.d/postgresql.list'
import logging | |
import unicodecsv | |
from datetime import datetime as dt | |
from airflow.models import BaseOperator | |
from airflow.utils.decorators import apply_defaults | |
from airflow.exceptions import AirflowException | |
from airflow.hooks.postgres_hook import PostgresHook | |
from airflow.plugins_manager import AirflowPlugin | |
from psycopg2.extras import DictCursor |
from airflow import DAG | |
from airflow.operators import PythonOperator, TriggerDagRunOperator | |
from datetime import datetime, timedelta | |
import sys | |
sys.path.append('/home/pablo/workspace/scratch/') | |
from default_test import default_test | |
default_args = { | |
'owner': 'airflow', | |
'depends_on_past': False, |
; Configuration for Airflow webserver and scheduler in Supervisor | |
[program:airflow] | |
command=/bin/airflow webserver | |
stopsignal=QUIT | |
stopasgroup=true | |
user=airflow | |
stdout_logfile=/var/log/airflow/airflow-stdout.log | |
stderr_logfile=/var/log/airflow/airflow-stderr.log | |
environment=HOME="/home/airflow",AIRFLOW_HOME="/etc/airflow",TMPDIR="/storage/airflow_tmp" |
##################### ElasticSearch Configuration Example ##################### | |
# This file contains an overview of various configuration settings, | |
# targeted at operations staff. Application developers should | |
# consult the guide at <http://elasticsearch.org/guide>. | |
# | |
# The installation procedure is covered at | |
# <http://elasticsearch.org/guide/en/elasticsearch/reference/current/setup.html>. | |
# | |
# ElasticSearch comes with reasonable defaults for most settings, |
# Elasticsearch Cheatsheet - an overview of commonly used Elasticsearch API commands | |
# cat paths | |
/_cat/allocation | |
/_cat/shards | |
/_cat/shards/{index} | |
/_cat/master | |
/_cat/nodes | |
/_cat/indices | |
/_cat/indices/{index} |