improve duplicates filtering routing (and fix spelling)
This commit is contained in:
parent
66087e14dc
commit
40995a6e44
1 changed files with 16 additions and 21 deletions
|
@ -53,9 +53,9 @@ class Row(BaseRow):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def getContextAttr(obj, attr):
|
def getContextAttr(obj, attr):
|
||||||
return getattr(obj.context, attr)
|
return getattr(obj.context, attr)
|
||||||
|
|
||||||
def getCategories(self):
|
def getCategories(self):
|
||||||
return [self.getRawValue(f.__name__) for f in
|
return [self.getRawValue(f.name) for f in
|
||||||
self.parent.context.fields if 'category' in f.executionSteps]
|
self.parent.context.fields if 'category' in f.executionSteps]
|
||||||
|
|
||||||
|
|
||||||
|
@ -63,31 +63,27 @@ class ResultSet(object):
|
||||||
|
|
||||||
def __init__(self, context, data, rowFactory=Row,
|
def __init__(self, context, data, rowFactory=Row,
|
||||||
sortCriteria=None, queryCriteria=BaseQueryCriteria(),
|
sortCriteria=None, queryCriteria=BaseQueryCriteria(),
|
||||||
filterDublicate=False):
|
filterDuplicates=False):
|
||||||
self.context = context # the report or report instance
|
self.context = context # the report or report instance
|
||||||
self.data = data
|
self.data = data
|
||||||
self.rowFactory = rowFactory
|
self.rowFactory = rowFactory
|
||||||
self.sortCriteria = sortCriteria
|
self.sortCriteria = sortCriteria
|
||||||
self.queryCriteria = queryCriteria
|
self.queryCriteria = queryCriteria
|
||||||
self.filterDublicate = filterDublicate
|
self.filterDuplicates = filterDuplicates
|
||||||
self.totals = BaseRow(None, self)
|
self.totals = BaseRow(None, self)
|
||||||
|
|
||||||
def filterDublicateRows(self, result):
|
def filterDuplicateRows(self, result):
|
||||||
res = []
|
seen = set()
|
||||||
for row in result:
|
for row in result:
|
||||||
add = True
|
cats = tuple(row.getRawValue(f.name) for f in self.categoryColumns)
|
||||||
for r in res:
|
if cats not in seen:
|
||||||
for f in self.categoryColumns:
|
seen.add(cats)
|
||||||
if row.getRawValue(f.__name__) == r.getRawValue(f.__name__):
|
yield row
|
||||||
add = False
|
|
||||||
if add:
|
|
||||||
res.append(row)
|
|
||||||
return res
|
|
||||||
|
|
||||||
def getResult(self):
|
def getResult(self):
|
||||||
result = [self.rowFactory(item, self) for item in self.data]
|
result = [self.rowFactory(item, self) for item in self.data]
|
||||||
if self.filterDublicate:
|
if self.filterDuplicates:
|
||||||
result = [row for row in self.filterDublicateRows(result)]
|
result = self.filterDuplicateRows(result)
|
||||||
result = [row for row in result if self.queryCriteria.check(row)]
|
result = [row for row in result if self.queryCriteria.check(row)]
|
||||||
if self.sortCriteria:
|
if self.sortCriteria:
|
||||||
result.sort(key=lambda x: [f.getSortValue(x) for f in self.sortCriteria])
|
result.sort(key=lambda x: [f.getSortValue(x) for f in self.sortCriteria])
|
||||||
|
@ -108,20 +104,19 @@ class ResultSet(object):
|
||||||
|
|
||||||
|
|
||||||
class CombinedResultSet(ResultSet):
|
class CombinedResultSet(ResultSet):
|
||||||
|
|
||||||
def __init__(self, context, categorySet, resultSet):
|
def __init__(self, context, categorySet, resultSet):
|
||||||
self.context = context
|
self.context = context
|
||||||
self.categorySet = categorySet
|
self.categorySet = categorySet
|
||||||
self.resultSet = resultSet
|
self.resultSet = resultSet
|
||||||
self.totals = BaseRow(None, self)
|
self.totals = BaseRow(None, self)
|
||||||
|
|
||||||
def getResult(self):
|
def getResult(self):
|
||||||
result = []
|
result = []
|
||||||
for row in self.categorySet:
|
for row in self.categorySet:
|
||||||
result.append(row)
|
result.append(row)
|
||||||
for res in self.resultSet:
|
for res in self.resultSet:
|
||||||
for f in self.categoryColumns:
|
for f in self.categoryColumns:
|
||||||
if res.getRawValue(f.__name__) == row.getRawValue(f.__name__):
|
if res.getRawValue(f.name) == row.getRawValue(f.name):
|
||||||
result.append(res)
|
result.append(res)
|
||||||
return result
|
return result
|
||||||
|
|
Loading…
Add table
Reference in a new issue