View parallel_paths_include_tlist_cost_v5.patch
diff --git a/src/backend/optimizer/geqo/geqo_eval.c b/src/backend/optimizer/geqo/geqo_eval.c
index b5cab0c..faa5bb7 100644
--- a/src/backend/optimizer/geqo/geqo_eval.c
+++ b/src/backend/optimizer/geqo/geqo_eval.c
@@ -40,7 +40,7 @@ typedef struct
} Clump;
static List *merge_clump(PlannerInfo *root, List *clumps, Clump *new_clump,
- bool force);
+ int num_gene, bool force);
View random-test-data.sql
--
-- Polygons in contained in a 10000x10000
-- square, with just enough size/density to mostly
-- cover the whole area.
--
DROP TABLE IF EXISTS polygon_table_10000;
CREATE TABLE polygon_table_10000 AS
SELECT ST_Buffer(
ST_SetSRID(
ST_MakePoint(random() * 10000, random() * 10000),
View curl-options.csv
CURL_OPT VERSION LIBCURL_VERSION_NUM URL
CURLOPT_ABSTRACT_UNIX_SOCKET 7.53.0 0x073500 http://curl.haxx.se/libcurl/c/CURLOPT_ABSTRACT_UNIX_SOCKET.html
CURLOPT_ACCEPTTIMEOUT_MS 7.24.0 0x071800 http://curl.haxx.se/libcurl/c/CURLOPT_ACCEPTTIMEOUT_MS.html
CURLOPT_ACCEPT_ENCODING 7.21.6 0x071506 http://curl.haxx.se/libcurl/c/CURLOPT_ACCEPT_ENCODING.html
CURLOPT_ADDRESS_SCOPE 7.19.0 0x071300 http://curl.haxx.se/libcurl/c/CURLOPT_ADDRESS_SCOPE.html
CURLOPT_APPEND 7.16.4 0x071004 http://curl.haxx.se/libcurl/c/CURLOPT_APPEND.html
CURLOPT_BUFFERSIZE 7.53.0 0x073500 http://curl.haxx.se/libcurl/c/CURLOPT_BUFFERSIZE.html
CURLOPT_CHUNK_BGN_FUNCTION 7.21.0 0x071500 http://curl.haxx.se/libcurl/c/CURLOPT_CHUNK_BGN_FUNCTION.html
CURLOPT_CHUNK_DATA 7.21.0 0x071500 http://curl.haxx.se/libcurl/c/CURLOPT_CHUNK_DATA.html
CURLOPT_CHUNK_END_FUNCTION 7.21.0 0x071500 http://curl.haxx.se/libcurl/c/CURLOPT_CHUNK_END_FUNCTION.html
View encrypt.sh
#
# Simple commandline encryption aliases for OSX.
# Put this in your .bash_profile
#
# encrypt myfile
# takes your password, encrypts myfile to myfile.enc,
# writes some random data into myfile and then deletes it
#
# decrypt myfile.enc
# takes your passowrd, decrypts to myfile,
View address_match_pgsql.py
#
# This script expects that
#
# - as many tables have been loaded as possible,
# to allow as much longitudinal data as possible to be used in
# identifying "identical" addresses
# - a table of locality congruencies has been created, 'locality_corpus'
# that maps an id key to different localities it shows up in. So, for
# example:
#
View pgsql.txt
blocking...
creating blocking_map database
creating inverted indexes
writing blocking map
Traceback (most recent call last):
File "1_identify_address_entities.py", line 208, in <module>
csv_writer.writerows(b_data)
File "/Library/Python/2.7/site-packages/dedupe/blocking.py", line 42, in __call__
block_keys = predicate(instance)
File "/Library/Python/2.7/site-packages/dedupe/predicates.py", line 224, in __call__
View localities.txt
from | to
----------------------+----------------------
100 Mile House | 108 Mile Ranch
100 Mile House | 93 Mile
100 Mile House | Buffalo Creek
100 Mile House | Lone Butte
103 Mile | 100 Mile House
108 Mile House | 108 Mile Ranch
108 Mile Ranch | 100 Mile House
150 Mile House | 141 Mile House
View match_address_voters.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This code demonstrates how to use dedupe with to match messy records
against a deduplicated, canonical dataset. In this example, we'll be
matching voters list strings against a list of valid adddresses
"""
import os
import csv
View log.txt
Butterfly:~/Code/pgsql-ogr-fdw pramsey(readwrite)$ cat data/pt_two.dbf
Peter 45 5.6019650412 Paul 33 5.8419710325
Butterfly:~/Code/pgsql-ogr-fdw pramsey(readwrite)$ psql fdw
psql (9.5.1)
Type "help" for help.
fdw=# select * from pt_1 ;
fid | geom | name | age | height | birthdate
-----+--------------------------------------------+-------+-----+--------+------------
View gist:ff7cbf70dbe581189565
max_parallel_degree 0
Sets the maximum degree of parallelism for an individual parallel
operation. Note that the requested number of workers may not actually
be available at runtime. Parallel workers are taken from the pool
of processes established by `max-worker-processes`
parallel_tuple_cost 0.1