Skip to content

Instantly share code, notes, and snippets.

@detlevn
Last active March 19, 2016 14:30
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save detlevn/d663261499c5b05a9a51 to your computer and use it in GitHub Desktop.
Save detlevn/d663261499c5b05a9a51 to your computer and use it in GitHub Desktop.
Transfers attributes from layers to features of a master layer whose feature geometries are equal to or contained by geometries of these layers
# -*- coding: utf-8 -*-
"""
/***************************************************************************
Integrate
Transfers attributes from layers to features of a master layer whose
feature geometries are equal to or contained by geometries of these layers
-------------------
date : 2016-03-09
version : 6.5.1
copyright : (C) 2015-2016 by Detlev Neumann
Dr. Neumann Consulting - Geospatial Services
email : dneumann@geospatial-services.de
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtCore import QVariant
from os import path
import datetime
import cPickle as pickle
# helper function
def find(pattern, src_path, depth=0):
"""Returns all files in directory *src_path* matching *pattern*.
Files will be search in all directory and all subdirectory down
to *depth* levels.
"""
from os import path, walk
import fnmatch
for root, dirs, files in walk(src_path):
depth -= 1
for name in files:
if fnmatch.fnmatch(name, pattern):
yield path.join(root, name)
if depth < 0:
break
# TODO: Full unicode support (file names, layer names, field names and values)
# TODO: run processes in separate worker threads
USE_SELECTED_ONLY = False
SAVE_TEMPORARY_FILES = False # True
ENABLE_SQLITE_OUTPUT = False
TRANSACTION_SIZE=65536
SPLIT_DICTS = True
DICT_MAX_ITEMS = 500000
temporary_files = []
TEMP_DIR = 'E:\\Temp'
error_msgs = {}
if not SAVE_TEMPORARY_FILES:
SPLIT_DICTS = False
t = datetime.timedelta(0)
t0 = datetime.datetime.now()
print 'Starting at: ', t0
# get layers; one line for each layer to evaluate, with network containing the master geometry
# reading the layer from file is less memory consuming then adding it to map layer registry
#network = QgsMapLayerRegistry.instance().mapLayersByName('thtn-vbg')[0]
#network = QgsVectorLayer('e:\PUB\thtn-vbg.shp', 'thtn-vbg', 'ogr')
network = QgsMapLayerRegistry.instance().mapLayersByName('layer1')[0]
layer2 = QgsMapLayerRegistry.instance().mapLayersByName('layer2')[0]
#layer3 = QgsMapLayerRegistry.instance().mapLayersByName('layer3')[0]
# TODO: keep selected attributes from network layer
# TODO: create list of fields read from layers, instead of list of names and types
# how attributes_dict works:
# specify for each layer the attribute to transfer to the master geometry
#attributes_dict = {layer2: [{'fname': 'NAMN', 'ftype': QVariant.String, 'alias': 'NAMN_6'}, {'fname': 'ORDNING', 'ftype': QVariant.Int, 'alias': 'ORDNING_6'}, {'fname': 'RLID', 'ftype': QVariant.String, 'alias': 'RLID_6'}],
# layer6: [{'fname': 'NAMN', 'ftype': QVariant.String, 'alias': 'NAMN_9'}, {'fname': 'KONSTRTION', 'ftype': QVariant.String}, {'fname': 'RLID', 'ftype': QVariant.String, 'alias': 'RLID_9'}]}
attributes_dict = {layer2: [{'fname': 'wert', 'ftype': QVariant.Int}, {'fname': 'field2', 'ftype': QVariant.Int}],
# layer3: [{'fname': 'wert', 'ftype': QVariant.Int}, {'fname': 'field2', 'ftype': QVariant.Int}]
}
# field against which conditional checks are to perform
field_to_compare = 'wert'
# split the dataset into chunks of max_features
n = 1 # counter for features read
start_feature = 1 # feature id to begin with
max_features = 0 # max count of features to process
# select condition, eg. "RLID" like '1000:%'
request = QgsFeatureRequest()
#request = QgsFeatureRequest().setFilterExpression('"RLID" LIKE \'1000:%\'')
# process the basic network layer (master), that contributes the geometry
fn = path.join(TEMP_DIR, path.splitext(network.name())[0]+'.tmp')
if (not SAVE_TEMPORARY_FILES) or (SAVE_TEMPORARY_FILES and not path.exists(fn)):
# split lines into segments and save all segments into a dict along with provided attributes
# master geometry; must be equal to, contained by, or disjoint from other geometries
x = 0 # controls of frequency messages
p = 1 # controls of frequency messages
s = 0 # counter for total segments processed
m = 0 # counter for segments per dict
u = 0 # name postfix for parts of cached dicts
cnt = network.featureCount()
cnt50 = cnt / 25 # print message every 4% of processed polylines
segments = {}
if USE_SELECTED_ONLY:
features = network.selectedFeatures()
else:
features = network.dataProvider().getFeatures(request)
for line in features:
if max_features > 0:
if n < start_feature:
n += 1
continue
# tuple of tuples as key is faster then any surrogate key (eg. concatenated string)
# a single tuple is slightly faster, but needs more time to reconstruct the original
c = 0 # enumerator for segments of a given polyline
try:
if feat.geometry().wkbType() == 2:
lines = [feat.geometry().asPolyline()]
elif feat.geometry().wkbType() == 5:
lines = feat.geometry().asMultiPolyline()
else:
continue
for line in lines:
for i in xrange(len(line)-1):
segments[((line[i][0], line[i][1]), (line[i+1][0], line[i+1][1]))] = \
{'fid': feat.id(), 'seq': c}
c += 1
except AttributeError:
print 'WARNING: line %d has errors' % line.id()
s += c
x += 1
if x >= cnt50:
t1 = datetime.datetime.now()
t = t1 - t0
try:
remaining_secs = datetime.timedelta(seconds=(cnt - p * cnt50)/ (p * cnt50 * 1.0) * t.total_seconds())
except ZeroDivisionError:
remaining_secs = 0
print 'Building segments progress: %d of %d (%s) Time remaining: %s' % (p*cnt50, cnt, network.name(), remaining_secs)
x = 0
p += 1
m += 1
if SPLIT_DICTS and m >= DICT_MAX_ITEMS:
if u == 0:
fn = path.join(TEMP_DIR, path.splitext(network.name())[0]+'.tmp')
else:
fn = path.join(TEMP_DIR, path.splitext(network.name())[0]+'_'+str(u)+'.tmp')
t96 = datetime.datetime.now()
handle = open(fn, 'wb')
pickle.dump(segments, handle, 2)
handle.close()
t97 = datetime.datetime.now()
t = t97 - t96
print '%d segments from %d lines dumped to file %s (%s)' % (len(segments), m, fn, t)
if u == 0:
temporary_files.append([fn])
else:
temporary_files[-1].append(fn)
segments = {}
u += 1
m = 0
if max_features > 0:
if n > start_feature + max_features - 1:
break
n += 1
if SAVE_TEMPORARY_FILES:
if u == 0:
fn = path.join(TEMP_DIR, path.splitext(network.name())[0]+'.tmp')
temporary_files.append([fn])
else:
fn = path.join(TEMP_DIR, path.splitext(network.name())[0]+'_'+str(u)+'.tmp')
temporary_files[-1].append(fn)
t96 = datetime.datetime.now()
handle = open(fn, 'wb')
pickle.dump(segments, handle, 2)
handle.close()
t97 = datetime.datetime.now()
t = t97 - t96
print '%d segments from %d lines dumped to file %s (%s)' % (len(segments), m, fn, t)
segments = None
t1 = datetime.datetime.now()
t = t1 - t0
print 'Total network segments : %d' % s
print 'Network layer processed ', t
else:
temporary_files.append([fn for fn in find(path.splitext(network.name())[0]+'*.tmp', TEMP_DIR)])
t1 = datetime.datetime.now()
print 'Information: Using network from previous run'
error_count = 0
list_of_dicts = []
len_of_dicts = []
y = 1 # counter for max features per layer
s = 0 # counter for total segments processed
l = 0
# other geometries with attributes
for the_layer, the_attribs in attributes_dict.items():
# catch the case of invalid layers
if the_layer is None:
print 'ERROR: Unable to load layer %s (source %s)' % (the_layer.name(), layer.source())
l += 1
continue
t8 = datetime.datetime.now()
fn = path.join(TEMP_DIR, path.splitext(the_layer.name())[0]+'.tmp')
if (not SAVE_TEMPORARY_FILES) or (SAVE_TEMPORARY_FILES and not path.exists(fn)):
print 'Exploding lines from layer %s' % the_layer.name()
d = {}
x = 0 # controls of frequency messages
p = 1 # controls of frequency messages
r = 0 # counter for segments processed per layer
m = 0 # counter for lines per dict
u = 0 # name postfix for parts of cached dicts
cnt = the_layer.featureCount()
step = min(25, cnt)
cnt50 = cnt / step # print message every 4% of processed polylines
if USE_SELECTED_ONLY:
features = the_layer.selectedFeatures()
else:
features = the_layer.dataProvider().getFeatures(request)
for feat in features:
try:
# Experimental: aggregation and condition (overlapping feature from the same layer)
# TODO: when aggregating attributes, only one of them must be retained, except it is aliased
attribs = {the_attribs[n]['fname'] if the_attribs[n].get('alias', None) is None else the_attribs[n]['alias']: feat.attribute(the_attribs[n]['fname']) for n in range(len(the_attribs))}
except AttributeError:
print 'WARNING: line %d has attribute errors' % feat.id()
except KeyError, e:
print 'ERROR: KeyError. Cannot find field %s' % e
print 'Abort processing layer %s' % the_layer.name()
break
try:
if feat.geometry().wkbType() == 2:
lines = [feat.geometry().asPolyline()]
elif feat.geometry().wkbType() == 5:
lines = feat.geometry().asMultiPolyline()
elif feat.geometry().wkbType() == 3:
lines = feat.geometry().asPolygon()
elif feat.geometry().wkbType() == 6:
pass
else:
continue
for line in lines:
for i in xrange(len(line)-1):
# try updating and eventually throw an exception is faster then checking for existence and updating
try:
# updating a dict element take 2.5x running time then to create it
# updating with a dict is faster then adding single elements
segmentx = ((line[i][0], line[i][1]), (line[i+1][0], line[i+1][1]))
if d.get(segmentx, None):
if d[segmentx].get(field_to_compare, None):
if d[segmentx][field_to_compare] > attribs[field_to_compare]:
d[segmentx] = attribs
else:
d[segmentx] = attribs
else:
d[segmentx] = attribs
r += 1
except KeyError:
pass
except AttributeError:
print 'WARNING: line %d has invalid geometry' % feat.id()
x += 1
if x >= cnt50:
t9 = datetime.datetime.now()
t = t9 - t8
remaining_secs = datetime.timedelta(seconds=(cnt - p * cnt50)/ (p * cnt50 * 1.0) * t.total_seconds())
print 'Exploding lines progress: %d of %d (layer %d of %d) Time remaining: %s' % (p*cnt50, cnt, l+1, len(attributes_dict), remaining_secs)
x = 0
p += 1
m += 1
if SPLIT_DICTS and m >= DICT_MAX_ITEMS:
if u == 0:
fn = path.join(TEMP_DIR, path.splitext(the_layer.name())[0]+'.tmp')
else:
fn = path.join(TEMP_DIR, path.splitext(the_layer.name())[0]+'_'+str(u)+'.tmp')
t96 = datetime.datetime.now()
handle = open(fn, 'wb')
pickle.dump(d, handle, 2)
handle.close()
t97 = datetime.datetime.now()
t = t97 - t96
print '%d segments from %d lines dumped to file %s (%s)' % (len(d), m, fn, t)
if u == 0:
temporary_files.append([fn])
else:
temporary_files[-1].append(fn)
d = {}
u += 1
m = 0
# process last chunk
if len(d) > 0: # in case of error d may be empty
if SAVE_TEMPORARY_FILES:
if u == 0:
fn = path.join(TEMP_DIR, path.splitext(the_layer.name())[0]+'.tmp')
temporary_files.append([fn])
else:
fn = path.join(TEMP_DIR, path.splitext(the_layer.name())[0]+'_'+str(u)+'.tmp')
temporary_files[-1].append(fn)
t96 = datetime.datetime.now()
handle = open(fn, 'wb')
pickle.dump(d, handle, 2)
handle.close()
t97 = datetime.datetime.now()
t = t97 - t96
print '%d segments from %d lines dumped to file %s (%s)' % (len(d), m, fn, t)
else:
list_of_dicts.append(d.copy())
# output total count of segments per layer
print 'Exploded %d lines to %d segments (%s)' % (cnt, r, the_layer.name())
s += r
d = None
l += 1
else:
temporary_files.append([fn for fn in find(path.splitext(the_layer.name())[0]+'*.tmp', TEMP_DIR)])
print 'Information: Using %s segments from previous run (layer %d of %d)' % (the_layer.name(), l+1, len(attributes_dict))
l += 1
# if field name are aliased for output its time to restructure attributes_dict
for the_layer, the_attribs in attributes_dict.items():
for n in range(len(the_attribs)):
alias = the_attribs[n].get('alias', None)
if alias is not None:
the_attribs[n]['fname'] = alias
t2 = datetime.datetime.now()
t = t2 - t1
print 'Segments exploded : ', t
print 'Total segments processed: %d' % s
# combine all dicts into one
if SAVE_TEMPORARY_FILES:
# if segments_[123456789...]
u = 0
for sn in temporary_files[0]:
if u == 0:
fn = path.join(TEMP_DIR, 'segments.tmp')
else:
fn = path.join(TEMP_DIR, 'segments'+'_'+str(u)+'.tmp')
u += 1
if path.exists(fn):
print 'Information: %s exists. Segments will be read from previous run' % fn
else:
print 'Loading network segments from %s' % sn
t96 = datetime.datetime.now()
handle = open(sn, 'rb')
segments = pickle.load(handle)
handle.close()
t97 = datetime.datetime.now()
t = t97 - t96
print '%d network segments loaded from file %s (%s)' % (len(segments), sn, t)
l = 1
for lns in temporary_files[1:]:
for ln in lns:
# read other cached layers
print 'Loading segments from %s' % ln
t96 = datetime.datetime.now()
handle = open(ln, 'rb')
layerx = pickle.load(handle)
handle.close()
t97 = datetime.datetime.now()
t = t97 - t96
print '%d segments loaded from file %s (%s)' % (len(layerx), ln, t)
print 'Integrating %s and %s (layer %d of %d)' % (path.basename(sn), path.basename(ln), l, len(attributes_dict))
x = 0
p = 1
cnt = len(layerx)
step = min(25, cnt / 10)
cnt50 = cnt / step # print message every 4% of processed polylines
for segmentx in layerx:
try:
# next line is original version
# segments[segmentx].update(layerx[segmentx])
# Experimental: aggregation and condition (overlapping feature from the different layers)
if field_to_compare in segments[segmentx]:
if segments[segmentx][field_to_compare] > layerx[segmentx][field_to_compare]:
segments[segmentx].update(layerx[segmentx])
else:
segments[segmentx].update(layerx[segmentx])
# print 'P: ', segments[segmentx][field_to_compare], layerx[segmentx][field_to_compare]
# Experimental: end
except KeyError:
pass
x += 1
if x >= cnt50:
print 'Integrating segments progress: %d of %d (layer %d of %d)' % (p*cnt50, cnt, l, len(attributes_dict))
x = 0
p += 1
l += 1
# Dump segments
t96 = datetime.datetime.now()
handle = open(fn, 'wb')
pickle.dump(segments, handle, 2)
handle.close()
t97 = datetime.datetime.now()
t = t97 - t96
print '%d segments dumped to file %s (%s)' % (len(segments), fn, t)
segments = None
else:
l = 1
for layerx in list_of_dicts:
x = 0
p = 1
cnt = len(layerx)
step = min(25, cnt)
cnt50 = cnt / step # print message every 4% of processed polylines
for segmentx in layerx:
try:
# next line is original version
# segments[segmentx].update(layerx[segmentx])
# Experimental: aggregation and condition (overlapping feature from the different layers)
if field_to_compare in segments[segmentx]:
if segments[segmentx][field_to_compare] > layerx[segmentx][field_to_compare]:
segments[segmentx].update(layerx[segmentx])
else:
segments[segmentx].update(layerx[segmentx])
# print 'P: ', segments[segmentx][field_to_compare], layerx[segmentx][field_to_compare]
# Experimental: end
except KeyError:
pass
x += 1
if x >= cnt50:
print 'Integrating segments progress: %d of %d (layer %d of %d)' % (p*cnt50, cnt, l, len(attributes_dict))
x = 0
p += 1
l += 1
t3 = datetime.datetime.now()
t = t3 - t2
print 'Segments integrated: ', t
# define QGIS layer with attributes
layers = QgsMapLayerRegistry.instance().mapLayersByName('Result')
if len(layers) > 0:
layer = layers[0]
print 'Appending output to existing layer %s' % layer.name()
else:
# Todo: CRS as parameter
# TODO: andere Ausgabeformate (SQLite, Shape)
# TODO: Field definitions
layer = QgsVectorLayer('LineString?crs=EPSG:4326', 'Result', 'memory')
print 'Creating output layer %s' % layer.source()
prov = layer.dataProvider()
_attributes = [QgsField('fid', QVariant.Int)]
try:
for the_layer, the_attribs in attributes_dict.items():
for attrib in the_attribs:
_attributes.append(QgsField(attrib['fname'], attrib['ftype']))
except IndexError, e:
pass
_attributes.append(QgsField('length', QVariant.Double))
prov.addAttributes(_attributes)
layer.updateFields()
# TODO: how to apply -gt option? -gt chunk size
if ENABLE_SQLITE_OUTPUT:
fn = path.join(TEMP_DIR, 'result.sqlite')
error = QgsVectorFileWriter.writeAsVectorFormat(layer, fn, 'utf-8', layer.crs(), driverName = 'SQLite', layerOptions=['LAUNDER=NO, COMPRESS_GEOM=YES'])
if error == 0:
layer = QgsVectorLayer('H:/result.sqlite', 'Result', 'ogr')
prov = layer.dataProvider()
else:
QgsMapLayerRegistry.instance().addMapLayer(layer)
print 'Output layer added to map legend as %s' % layer.name()
# create features
layer.startEditing()
if SAVE_TEMPORARY_FILES:
for fn in find('segments*.tmp', TEMP_DIR):
t3 = datetime.datetime.now()
t96 = datetime.datetime.now()
handle = open(fn, 'rb')
segments = pickle.load(handle)
handle.close()
t97 = datetime.datetime.now()
t = t97 - t96
print 'Information: %d Segments read from %s (%s)' % (len(segments), fn, t)
# building attributes
print 'Building attributes'
error_count = 0
composed = {}
for segment in segments:
fid = segments[segment]['fid']
if fid in composed:
composed[fid]['vertices'].append((segments[segment]['seq'], (segment[0][0], segment[0][1]), (segment[1][0], segment[1][1])))
else:
_attributes = [segments[segment]['fid']]
# if field contains a NULL value replace it with None
for the_layer, the_attribs in attributes_dict.items():
for attrib in the_attribs:
try:
if segments[segment].get(attrib['fname'], None) != NULL:
_attributes.append(segments[segment].get(attrib['fname'], None))
else:
_attributes.append(None)
except IndexError, err_msg:
try:
error_msgs[err_msg.args[0]] += 1
except KeyError:
error_msgs[err_msg.args[0]] = 1
print 'Warning: %s' % err_msg
error_count += 1
composed[fid] = {'attributes': _attributes, 'vertices': [(segments[segment]['seq'], (segment[0][0], segment[0][1]), (segment[1][0], segment[1][1]))]}
# free resources
segments = None
for k, v in error_msgs.items():
print 'Information: Error %s supressed %d times' % (k, v)
t4 = datetime.datetime.now()
t = t4 -t3
print 'Segments prepared: ', t, ' with %d errors' % error_count
# sorting segments and dissolving to polylines
print 'Sorting and dissolving segments'
f = 0 # counter for features to save
w = 0 # counter for chunks of features
feats = []
for compose in composed:
# sort segments according their sequence number
composed[compose]['vertices'].sort()
feat = QgsFeature()
# from the first segment take both vertices
vertices = [QgsPoint(composed[compose]['vertices'][0][1][0], composed[compose]['vertices'][0][1][1]),
QgsPoint(composed[compose]['vertices'][0][2][0], composed[compose]['vertices'][0][2][1])]
# from all other segments take the last vertex
vertices.extend([QgsPoint(composed[compose]['vertices'][idx][2][0], composed[compose]['vertices'][idx][2][1])
for idx in range(1, len(composed[compose]['vertices']))])
# from this list of vertices build polylines
feat.setGeometry(QgsGeometry.fromPolyline(vertices))
# append length of new polyline to attributes dict and assign this to attributes of new feature
composed[compose]['attributes'].append(feat.geometry().length())
feat.setAttributes(composed[compose]['attributes'] )
feats.append(feat)
f += 1
# add features in chunks of TRANSACTION_SIZE each and save edits
if f >= TRANSACTION_SIZE:
prov.addFeatures(feats)
layer.commitChanges()
feats = []
f = 0
w += TRANSACTION_SIZE
print 'Writing output feature # %d (%s)' % (w, datetime.datetime.now())
# free resources
composed = None
# and save the remaining features
prov.addFeatures(feats)
layer.commitChanges()
feats = []
w += f
print 'Writing output feature # %d (%s)' % (w, datetime.datetime.now())
else:
# building attributes
print 'Building attributes'
error_count = 0
composed = {}
for segment in segments:
fid = segments[segment]['fid']
if fid in composed:
composed[fid]['vertices'].append((segments[segment]['seq'], (segment[0][0], segment[0][1]), (segment[1][0], segment[1][1])))
else:
_attributes = [segments[segment]['fid']]
for the_layer, the_attribs in attributes_dict.items():
for attrib in the_attribs:
try:
if segments[segment].get(attrib['fname'], None) != NULL:
_attributes.append(segments[segment].get(attrib['fname'], None))
else:
_attributes.append(None)
except IndexError, err_msg:
try:
error_msgs[err_msg.args[0]] += 1
except KeyError:
error_msgs[err_msg.args[0]] = 1
print 'Warning: %s' % err_msg
error_count += 1
composed[fid] = {'attributes': _attributes, 'vertices': [(segments[segment]['seq'], (segment[0][0], segment[0][1]), (segment[1][0], segment[1][1]))]}
# free resources
segments = None
for k, v in error_msgs.items():
print 'Information: Error %s supressed %d times' % (k, v)
t4 = datetime.datetime.now()
t = t4 -t3
print 'Segments prepared: ', t, ' with %d errors' % error_count
# sorting segments and dissolving to polylines
print 'Sorting and dissolving segments'
f = 0 # counter for features to save
w = 0 # counter for chunks of features
feats = []
for compose in composed:
# sort segments according their sequence number
composed[compose]['vertices'].sort()
feat = QgsFeature()
# from the first segment take both vertices
vertices = [QgsPoint(composed[compose]['vertices'][0][1][0], composed[compose]['vertices'][0][1][1]),
QgsPoint(composed[compose]['vertices'][0][2][0], composed[compose]['vertices'][0][2][1])]
# from all other segments take the last vertex
vertices.extend([QgsPoint(composed[compose]['vertices'][idx][2][0], composed[compose]['vertices'][idx][2][1])
for idx in range(1, len(composed[compose]['vertices']))])
# from this list of vertices build polylines
feat.setGeometry(QgsGeometry.fromPolyline(vertices))
# append length of new polyline to attributes dict and assign this to attributes of new feature
composed[compose]['attributes'].append(feat.geometry().length())
feat.setAttributes(composed[compose]['attributes'] )
feats.append(feat)
f += 1
# add features in chunks of TRANSACTION_SIZE each and save edits
if f >= TRANSACTION_SIZE:
prov.addFeatures(feats)
layer.commitChanges()
feats = []
f = 0
w += TRANSACTION_SIZE
print 'Writing output feature # %d (%s)' % (w, datetime.datetime.now())
# free resources
composed = None
# and save the remaining features
prov.addFeatures(feats)
layer.commitChanges()
feats = []
w += f
print 'Writing output feature # %d (%s)' % (w, datetime.datetime.now())
t5 = datetime.datetime.now()
t = t5 -t4
print 'Lines built: ', t
# finally
layer.updateExtents()
# Save layer to SQLite just for sure
if ENABLE_SQLITE_OUTPUT:
t96 = datetime.datetime.now()
fn = path.join(TEMP_DIR, 'result.sqlite')
error = QgsVectorFileWriter.writeAsVectorFormat(layer, f, 'utf-8', layer.crs(), driverName = 'SQLite', layerOptions=['LAUNDER=NO, COMPRESS_GEOM=YES'])
t97 = datetime.datetime.now()
t = t97 - t96
print 'Layer %s dumped to file %s (%s)' % (layer.name(), fn, t)
t6 = datetime.datetime.now()
t = t6 - t0
print 'Finished: ', t6
print 'Total time elapsed: ', t
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment