[FIX] base_module_quality: speed_test
authorChristophe Simonis <chs@tinyerp.com>
Thu, 18 Feb 2010 15:23:03 +0000 (16:23 +0100)
committerChristophe Simonis <chs@tinyerp.com>
Thu, 18 Feb 2010 15:23:03 +0000 (16:23 +0100)
lp bug: https://launchpad.net/bugs/522161 fixed

bzr revid: chs@tinyerp.com-20100218152303-77pvb17rnezajnfs

addons/base_module_quality/speed_test/speed_test.py

index afbcb6a..fd1a3ab 100644 (file)
 #
 ##############################################################################
 
-from osv import fields, osv
 from tools.translate import _
 import pooler
 
 from base_module_quality import base_module_quality
 
+class CounterCursor(object):
+    def __init__(self, real_cursor):
+        self.cr = real_cursor
+        self.count = 0
+
+    def reset(self):
+        self.count = 0
+
+    def execute(self, query, params=None):
+        if query.lower().startswith('select '):
+            self.count += 1
+        return self.cr.execute(query, params)
+
+    def __getattr__(self, attr):
+        return getattr(self.cr, attr)
+
+
 class quality_test(base_module_quality.abstract_quality_check):
 
     def __init__(self):
@@ -63,6 +79,7 @@ This test checks the speed of the module. Note that at least 5 demo data is need
         result_dict = {}
         result_dict2 = {}
         self.result_details += _("<html>O(1) means that the number of SQL requests to read the object does not depand on the number of objects we are reading. This feature is hardly wished.\n</html>")
+        ccr = CounterCursor(cr)
         for obj, ids in obj_ids.items():
             code_base_complexity = 0
             code_half_complexity = 0
@@ -73,22 +90,22 @@ This test checks the speed of the module. Note that at least 5 demo data is need
             list2 = []
             if size:
                 speed_list = []
-                #we perform the operation twice, and count the number of queries in the second run. This allows to avoid the cache effect. (like translated terms that asks for more queries)
                 try:
-                    pool.get(obj).read(cr, uid, [ids[0]])
-                    cnt = cr.count
-                    pool.get(obj).read(cr, uid, [ids[0]])
-                    code_base_complexity = cr.count - cnt
+                    # perform the operation once to put data in cache
+                    pool.get(obj).read(cr, uid, ids)
 
-                    pool.get(obj).read(cr, uid, ids[:size/2])
-                    cnt = cr.count
-                    pool.get(obj).read(cr, uid, ids[:size/2])
-                    code_half_complexity = cr.count - cnt
+                    ccr.reset()
+                    pool.get(obj).read(ccr, uid, [ids[0]])
+                    code_base_complexity = ccr.count
+
+                    ccr.reset()
+                    pool.get(obj).read(ccr, uid, ids[:size/2])
+                    code_half_complexity = ccr.count
+
+                    ccr.reset()
+                    pool.get(obj).read(ccr, uid, ids)
+                    code_size_complexity = ccr.count
 
-                    pool.get(obj).read(cr, uid, ids)
-                    cnt = cr.count
-                    pool.get(obj).read(cr, uid, ids)
-                    code_size_complexity = cr.count - cnt
                 except Exception, e:
                     list2 = [obj, _("Error in Read method")]
                     speed_list = [obj, size, code_base_complexity, code_half_complexity, code_size_complexity, _("Error in Read method:" + str(e))]