Author: croberts
Date: 2012-01-18 20:21:39 +0000 (Wed, 18 Jan 2012)
New Revision: 5189
Modified:
trunk/cumin/python/cumin/inventory/system.py
Log:
Addressing BZ 782902 by ensuring that we are fetching all of the sesame data before
merging it with the wallaby data. Prevviously, we were only getting the first 25 sesame
records that came back...not always in the same order. This would cause the sesame data
to "flicker" in the interface.
Modified: trunk/cumin/python/cumin/inventory/system.py
===================================================================
--- trunk/cumin/python/cumin/inventory/system.py 2012-01-18 19:20:57 UTC (rev 5188)
+++ trunk/cumin/python/cumin/inventory/system.py 2012-01-18 20:21:39 UTC (rev 5189)
@@ -296,30 +296,33 @@
return len(self.get_data(values, options))
def get_data(self, values, options):
- #first, fetch all the sql data, unsorted since we will sort it after merging the
wallaby data
+ #first, fetch all the sql data, unsorted since we will sort and limit it after
merging the wallaby data
requested_sort_field = options.sort_field
+ requested_limit = options.limit
+ requested_offset = options.offset
options.sort_field = None
+ options.limit = 10000
+ options.offset = 0
sqldata = super(WallabyAndSqlAdapter, self).get_data(values, options)
#now get the wallaby data
wallaby_nodes = self.app.wallaby.get_data(WBTypes.NODES)
data = list()
-
if len(sqldata) > 0 and len(wallaby_nodes) > 0: #means that we have both
sesame and wallaby data
#now merge them
for i, node in enumerate(wallaby_nodes):
- match_index = [i for i, y in enumerate(sqldata) if y[1] == node.name]
- if len(match_index) > 0:
- ## merge-in the wallaby data to the matched node entry in sqldata
- new_record = list(sqldata[match_index[0]])
- new_record.append(",
".join(self.app.wallaby.get_tag_names(node)))
- new_record.append(node.last_checkin)
- sqldata[match_index[0]] = tuple(new_record)
- else:
- # there was no match found, add a wallaby-only row
- data_row = tuple([node.name, node.name, "", "",
"", "", ", ".join(self.app.wallaby.get_tag_names(node)),
node.last_checkin])
- sqldata.append(data_row)
+ match_index = [i for i, y in enumerate(sqldata) if y[1] == node.name]
+ if len(match_index) > 0:
+ ## merge-in the wallaby data to the matched node entry in sqldata
+ new_record = list(sqldata[match_index[0]])
+ new_record.append(",
".join(self.app.wallaby.get_tag_names(node)))
+ new_record.append(node.last_checkin)
+ sqldata[match_index[0]] = tuple(new_record)
+ else:
+ # there was no match found, add a wallaby-only row
+ data_row = tuple([node.name, node.name, "", "",
"", "", ", ".join(self.app.wallaby.get_tag_names(node)),
node.last_checkin])
+ sqldata.append(data_row)
data = sqldata
elif len(sqldata) > 0: # we only have sesame data
for node in sqldata:
@@ -333,10 +336,12 @@
data_row = tuple([node.name, node.name, "", "",
"", "", ", ".join(self.app.wallaby.get_tag_names(node)),
node.last_checkin])
data.append(data_row)
- options.sort_field = requested_sort_field
+ options.sort_field = requested_sort_field
+ options.limit = requested_limit
+ options.offset = requested_offset
data = self.sort_rows(data, options)
data = self.limit_rows(data, options)
-
+
return data
def limit_rows(self, rows, options):