Skip to content

Commit

Permalink
fix Join attributes by location (#7103)
Browse files Browse the repository at this point in the history
  • Loading branch information
alexbruy committed Mar 13, 2013
1 parent a57aa0c commit a915c41
Showing 1 changed file with 15 additions and 11 deletions.
26 changes: 15 additions & 11 deletions python/plugins/fTools/tools/doSpatialJoin.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,23 +123,23 @@ def outFile(self):
def compute(self, inName, joinName, outName, summary, sumList, keep, progressBar):
layer1 = ftools_utils.getVectorLayerByName(inName)
provider1 = layer1.dataProvider()
fieldList1 = ftools_utils.getFieldList(layer1).values()
fieldList1 = ftools_utils.getFieldList(layer1).toList()

layer2 = ftools_utils.getVectorLayerByName(joinName)
provider2 = layer2.dataProvider()

fieldList2 = ftools_utils.getFieldList(layer2)
fieldList2 = ftools_utils.getFieldList(layer2).toList()
fieldList = []
if provider1.crs() != provider2.crs():
QMessageBox.warning(self, self.tr("CRS warning!"), self.tr("Warning: Input layers have non-matching CRS.\nThis may cause unexpected results."))
if not summary:
fieldList2 = ftools_utils.testForUniqueness(fieldList1, fieldList2.values())
fieldList2 = ftools_utils.testForUniqueness(fieldList1, fieldList2)
seq = range(0, len(fieldList1) + len(fieldList2))
fieldList1.extend(fieldList2)
fieldList1 = dict(zip(seq, fieldList1))
else:
numFields = {}
for j in fieldList2.keys():
for j in xrange(len(fieldList2)):
if fieldList2[j].type() == QVariant.Int or fieldList2[j].type() == QVariant.Double:
numFields[j] = []
for i in sumList:
Expand All @@ -153,7 +153,8 @@ def compute(self, inName, joinName, outName, summary, sumList, keep, progressBar
fieldList1 = dict(zip(seq, fieldList1))

# check for correct field names
longNames = ftools_utils.checkFieldNameLength( fieldList1 )
print fieldList1
longNames = ftools_utils.checkFieldNameLength( fieldList1.values() )
if not longNames.isEmpty():
QMessageBox.warning( self, self.tr( 'Incorrect field names' ),
self.tr( 'No output will be created.\nFollowing field names are longer than 10 characters:\n%1' )
Expand All @@ -168,7 +169,10 @@ def compute(self, inName, joinName, outName, summary, sumList, keep, progressBar
QMessageBox.warning( self, self.tr( 'Error deleting shapefile' ),
self.tr( "Can't delete existing shapefile\n%1" ).arg( self.shapefileName ) )
return False
writer = QgsVectorFileWriter(self.shapefileName, self.encoding, fieldList1, provider1.geometryType(), sRs)
fields = QgsFields()
for f in fieldList1.values():
fields.append(f)
writer = QgsVectorFileWriter(self.shapefileName, self.encoding, fields, provider1.geometryType(), sRs)
#writer = QgsVectorFileWriter(outName, "UTF-8", fieldList1, provider1.geometryType(), sRs)
inFeat = QgsFeature()
outFeat = QgsFeature()
Expand All @@ -179,7 +183,7 @@ def compute(self, inName, joinName, outName, summary, sumList, keep, progressBar
add = 85.00 / provider1.featureCount()

index = ftools_utils.createIndex(provider2)
fit1 = provider1.getFeatures()
fit1 = provider1.getFeatures()
while fit1.nextFeature(inFeat):
inGeom = inFeat.geometry()
atMap1 = inFeat.attributes()
Expand Down Expand Up @@ -211,16 +215,16 @@ def compute(self, inName, joinName, outName, summary, sumList, keep, progressBar
none = False
atMap2 = inFeatB.attributes()
if not summary:
atMap = atMap1.values()
atMap2 = atMap2.values()
atMap = atMap1
atMap2 = atMap2
atMap.extend(atMap2)
atMap = dict(zip(seq, atMap))
break
else:
for j in numFields.keys():
numFields[j].append(atMap2[j].toDouble()[0])
if summary and not none:
atMap = atMap1.values()
atMap = atMap1
for j in numFields.keys():
for k in sumList:
if k == "SUM": atMap.append(QVariant(sum(numFields[j])))
Expand All @@ -234,7 +238,7 @@ def compute(self, inName, joinName, outName, summary, sumList, keep, progressBar
if none:
outFeat.setAttributes(atMap1)
else:
outFeat.setAttributes(atMap)
outFeat.setAttributes(atMap.values())
if keep: # keep all records
writer.addFeature(outFeat)
else: # keep only matching records
Expand Down

0 comments on commit a915c41

Please sign in to comment.