[IMP] Added YAML for demo data.
[odoo/odoo.git] / bin / sql_db.py
index dc46252..17bfa34 100644 (file)
@@ -21,7 +21,7 @@
 
 __all__ = ['db_connect', 'close_db']
 
-import netsvc
+import logging
 from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_SERIALIZABLE
 from psycopg2.psycopg1 import cursor as psycopg1cursor
 from psycopg2.pool import PoolError
@@ -54,20 +54,17 @@ from tools.func import wraps
 from datetime import datetime as mdt
 from datetime import timedelta
 import threading
+from inspect import stack
 
 import re
 re_from = re.compile('.* from "?([a-zA-Z_0-9]+)"? .*$');
 re_into = re.compile('.* into "?([a-zA-Z_0-9]+)"? .*$');
 
-
-def log(msg, lvl=netsvc.LOG_DEBUG2):
-    logger = netsvc.Logger()
-    logger.notifyChannel('sql', lvl, msg)
-
 sql_counter = 0
 
 class Cursor(object):
     IN_MAX = 1000
+    __logger = logging.getLogger('db.cursor')
 
     def check(f):
         @wraps(f)
@@ -91,10 +88,7 @@ class Cursor(object):
         self._obj = self._cnx.cursor(cursor_factory=psycopg1cursor)
         self.__closed = False   # real initialisation value
         self.autocommit(False)
-
-        if tools.config['log_level'] in (netsvc.LOG_DEBUG, netsvc.LOG_DEBUG_RPC):
-            from inspect import stack
-            self.__caller = tuple(stack()[2][1:3])
+        self.__caller = tuple(stack()[2][1:3])
 
     def __del__(self):
         if not self.__closed:
@@ -103,40 +97,38 @@ class Cursor(object):
             # but the database connection is not put back into the connection
             # pool, preventing some operation on the database like dropping it.
             # This can also lead to a server overload.
-            if tools.config['log_level'] in (netsvc.LOG_DEBUG, netsvc.LOG_DEBUG_RPC):
-                msg = "Cursor not closed explicitly\n"  \
-                      "Cursor was created at %s:%s" % self.__caller
-                log(msg, netsvc.LOG_WARNING)
+            msg = "Cursor not closed explicitly\n"  \
+                  "Cursor was created at %s:%s"
+            self.__logger.warn(msg, *self.__caller)
             self.close()
 
     @check
     def execute(self, query, params=None):
         if '%d' in query or '%f' in query:
-            log(query, netsvc.LOG_WARNING)
-            log("SQL queries cannot contain %d or %f anymore. Use only %s", netsvc.LOG_WARNING)
+            self.__logger.warn(query)
+            self.__logger.warn("SQL queries cannot contain %d or %f anymore. "
+                               "Use only %s")
             if params:
                 query = query.replace('%d', '%s').replace('%f', '%s')
 
         if self.sql_log:
             now = mdt.now()
-        
+
         try:
             params = params or None
             res = self._obj.execute(query, params)
         except psycopg2.ProgrammingError, pe:
-            logger= netsvc.Logger()
-            logger.notifyChannel('sql_db', netsvc.LOG_ERROR, "Programming error: %s, in query %s" % (pe, query))
+            self.__logger.error("Programming error: %s, in query %s" % (pe, query))
             raise
-        except Exception, e:
-            log("bad query: %s" % self._obj.query)
-            log(e)
+        except Exception:
+            self.__logger.exception("bad query: %s", self._obj.query)
             raise
 
         if self.sql_log:
             delay = mdt.now() - now
             delay = delay.seconds * 1E6 + delay.microseconds
 
-            log("query: %s" % self._obj.query)
+            self.__logger.debug("query: %s", self._obj.query)
             self.sql_log_count+=1
             res_from = re_from.match(query.lower())
             if res_from:
@@ -161,14 +153,16 @@ class Cursor(object):
             if sqllogs[type]:
                 sqllogitems = sqllogs[type].items()
                 sqllogitems.sort(key=lambda k: k[1][1])
-                log("SQL LOG %s:" % (type,))
+                self.__logger.debug("SQL LOG %s:", type)
                 for r in sqllogitems:
                     delay = timedelta(microseconds=r[1][1])
-                    log("table: %s: %s/%s" %(r[0], str(delay), r[1][0]))
+                    self.__logger.debug("table: %s: %s/%s",
+                                        r[0], delay, r[1][0])
                     sum+= r[1][1]
                 sqllogs[type].clear()
             sum = timedelta(microseconds=sum)
-            log("SUM %s:%s/%d [%d]" % (type, str(sum), self.sql_log_count, sql_counter))
+            self.__logger.debug("SUM %s:%s/%d [%d]",
+                                type, sum, self.sql_log_count, sql_counter)
             sqllogs[type].clear()
         process('from')
         process('into')
@@ -200,11 +194,11 @@ class Cursor(object):
     def autocommit(self, on):
         offlevel = [ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_SERIALIZABLE][bool(self._serialized)]
         self._cnx.set_isolation_level([offlevel, ISOLATION_LEVEL_AUTOCOMMIT][bool(on)])
-    
+
     @check
     def commit(self):
         return self._cnx.commit()
-    
+
     @check
     def rollback(self):
         return self._cnx.rollback()
@@ -216,6 +210,8 @@ class Cursor(object):
 
 class ConnectionPool(object):
 
+    __logger = logging.getLogger('db.connection_pool')
+
     def locked(fun):
         @wraps(fun)
         def _locked(self, *args, **kwargs):
@@ -231,18 +227,19 @@ class ConnectionPool(object):
         self._connections = []
         self._maxconn = max(maxconn, 1)
         self._lock = threading.Lock()
-        self._logger = netsvc.Logger()
 
-    def _log(self, msg):
-        #self._logger.notifyChannel('ConnectionPool', netsvc.LOG_INFO, msg)
-        pass
+    def __repr__(self):
+        used = len([1 for c, u in self._connections[:] if u])
+        count = len(self._connections)
+        return "ConnectionPool(used=%d/count=%d/max=%d)" % (used, count, self._maxconn)
+
     def _debug(self, msg):
-        #self._logger.notifyChannel('ConnectionPool', netsvc.LOG_DEBUG, msg)
-        pass
+        self.__logger.debug(repr(self))
+        self.__logger.debug(msg)
 
     @locked
     def borrow(self, dsn):
-        self._log('Borrow connection to %s' % (dsn,))
+        self._debug('Borrow connection to %s' % (dsn,))
 
         result = None
         for i, (cnx, used) in enumerate(self._connections):
@@ -258,7 +255,7 @@ class ConnectionPool(object):
             return result
 
         if len(self._connections) >= self._maxconn:
-            # try to remove the older connection not used
+            # try to remove the oldest connection not used
             for i, (cnx, used) in enumerate(self._connections):
                 if not used:
                     self._debug('Removing old connection at index %d: %s' % (i, cnx.dsn))
@@ -266,7 +263,7 @@ class ConnectionPool(object):
                     break
             else:
                 # note: this code is called only if the for loop has completed (no break)
-                raise PoolError('Connection Pool Full')
+                raise PoolError('The Connection Pool Is Full')
 
         self._debug('Create new connection')
         result = psycopg2.connect(dsn=dsn)
@@ -275,7 +272,7 @@ class ConnectionPool(object):
 
     @locked
     def give_back(self, connection):
-        self._log('Give back connection to %s' % (connection.dsn,))
+        self._debug('Give back connection to %s' % (connection.dsn,))
         for i, (cnx, used) in enumerate(self._connections):
             if cnx is connection:
                 self._connections.pop(i)
@@ -286,6 +283,7 @@ class ConnectionPool(object):
 
     @locked
     def close_all(self, dsn):
+        self._debug('Close all connections to %s' % (dsn,))
         for i, (cnx, used) in tools.reverse_enumerate(self._connections):
             if dsn_are_equals(cnx.dsn, dsn):
                 cnx.close()
@@ -293,37 +291,15 @@ class ConnectionPool(object):
 
 
 class Connection(object):
-    __LOCKS = {}
+    __logger = logging.getLogger('db.connection')
 
-    def __init__(self, pool, dbname, unique=False):
+    def __init__(self, pool, dbname):
         self.dbname = dbname
         self._pool = pool
-        self._unique = unique
-
-    def __enter__(self):
-        if self._unique:
-            self.lock()
-        return self
-    
-    def __exit__(self, exc_type, exc_value, traceback):
-        if self._unique:
-            self.release()
-
-    def lock(self):
-        if self.dbname not in self.__LOCKS:
-            self.__LOCKS[self.dbname] = threading.Lock()
-        self.__LOCKS[self.dbname].acquire()
-        
-    def release(self):
-        close_db(self.dbname)
-        self.__LOCKS[self.dbname].release()
 
     def cursor(self, serialized=False):
-        if self._unique:
-            lock = self.__LOCKS.get(self.dbname, None)
-            if not (lock and lock.locked()):
-                netsvc.Logger().notifyChannel('Connection', netsvc.LOG_WARNING, 'Unprotected connection to %s' % (self.dbname,))
-
+        cursor_type = serialized and 'serialized ' or ''
+        self.__logger.debug('create %scursor to "%s"' % (cursor_type, self.dbname,))
         return Cursor(self._pool, self.dbname, serialized=serialized)
 
     def serialized_cursor(self):
@@ -359,8 +335,7 @@ def dsn_are_equals(first, second):
 _Pool = ConnectionPool(int(tools.config['db_maxconn']))
 
 def db_connect(db_name):
-    unique = db_name in ['template1', 'template0']
-    return Connection(_Pool, db_name, unique)
+    return Connection(_Pool, db_name)
 
 def close_db(db_name):
     _Pool.close_all(dsn(db_name))