X-Git-Url: http://git.inspyration.org/?a=blobdiff_plain;f=openerp%2Fnetsvc.py;h=755f6513b259b74c911e0f3e4b4db9239a53c9f0;hb=a1e5f69cb3cf7cd36f2dc354392aa2367b9d018c;hp=3de345a0adfa93436ed9c32e4ce26928af4e37e0;hpb=aa7d42acfc43465049665405dfec81d185e23403;p=odoo%2Fodoo.git diff --git a/openerp/netsvc.py b/openerp/netsvc.py index 3de345a..755f651 100644 --- a/openerp/netsvc.py +++ b/openerp/netsvc.py @@ -19,29 +19,23 @@ # ############################################################################## + import logging import logging.handlers import os import release import sys import threading +import time +import types from pprint import pformat - -import psycopg2 +import psutil import tools import openerp -import sql_db _logger = logging.getLogger(__name__) -def log(logger, level, prefix, msg, depth=None): - indent='' - indent_after=' '*len(prefix) - for line in (prefix+pformat(msg, depth=depth)).split('\n'): - logger.log(level, indent+line) - indent=indent_after - def LocalService(name): """ The openerp.netsvc.LocalService() function is deprecated. It still works @@ -68,36 +62,6 @@ def LocalService(name): with registry.cursor() as cr: return registry['ir.actions.report.xml']._lookup_report(cr, name[len('report.'):]) -class PostgreSQLHandler(logging.Handler): - """ PostgreSQL Loggin Handler will store logs in the database, by default - the current database, can be set using --log-db=DBNAME - """ - def emit(self, record): - ct = threading.current_thread() - ct_db = getattr(ct, 'dbname', None) - ct_uid = getattr(ct, 'uid', None) - dbname = tools.config['log_db'] or ct_db - if dbname: - cr = None - try: - cr = sql_db.db_connect(dbname).cursor() - msg = unicode(record.msg) - traceback = getattr(record, 'exc_text', '') - if traceback: - msg = "%s\n%s" % (msg, traceback) - level = logging.getLevelName(record.levelno) - val = (ct_uid, ct_uid, 'server', dbname, record.name, level, msg, record.pathname, record.lineno, record.funcName) - cr.execute(""" - INSERT INTO ir_logging(create_date, write_date, create_uid, write_uid, type, dbname, name, level, message, path, line, func) - VALUES (NOW() at time zone 'UTC', NOW() at time zone 'UTC', %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) - """, val ) - cr.commit() - except Exception, e: - pass - finally: - if cr: - cr.close() - BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, _NOTHING, DEFAULT = range(10) #The background is set with 40 plus the number of the color, and the foreground with 30 #These are the sequences need to get colored ouput @@ -137,7 +101,7 @@ def init_logger(): if os.name == 'nt': handler = logging.handlers.NTEventLogHandler("%s %s" % (release.description, release.version)) else: - handler = logging.handlers.SysLogHandler() + handler = logging.handlers.SysLogHandler('/dev/log') format = '%s %s' % (release.description, release.version) \ + ':%(dbname)s:%(levelname)s:%(name)s:%(message)s' @@ -145,12 +109,11 @@ def init_logger(): # LogFile Handler logf = tools.config['logfile'] try: - # We check we have the right location for the log files dirname = os.path.dirname(logf) if dirname and not os.path.isdir(dirname): os.makedirs(dirname) if tools.config['logrotate'] is not False: - handler = logging.handlers.TimedRotatingFileHandler(filename=logf, when='D', interval=1, backupCount=30) + handler = logging.handlers.TimedRotatingFileHandler(logf,'D',1,30) elif os.name == 'posix': handler = logging.handlers.WatchedFileHandler(logf) else: @@ -166,10 +129,9 @@ def init_logger(): # behind Apache with mod_wsgi, handler.stream will have type mod_wsgi.Log, # which has no fileno() method. (mod_wsgi.Log is what is being bound to # sys.stderr when the logging.StreamHandler is being constructed above.) - def is_a_tty(stream): - return hasattr(stream, 'fileno') and os.isatty(stream.fileno()) - - if isinstance(handler, logging.StreamHandler) and is_a_tty(handler.stream): + if isinstance(handler, logging.StreamHandler) \ + and hasattr(handler.stream, 'fileno') \ + and os.isatty(handler.stream.fileno()): formatter = ColoredFormatter(format) else: formatter = DBFormatter(format) @@ -180,9 +142,6 @@ def init_logger(): logconfig = tools.config['log_handler'] - postgresqlHandler = PostgreSQLHandler() - postgresqlHandler.setLevel(logging.WARNING) - logging_configurations = DEFAULT_LOG_CONFIGURATION + pseudo_config + logconfig for logconfig_item in logging_configurations: loggername, level = logconfig_item.split(':') @@ -191,7 +150,6 @@ def init_logger(): logger.handlers = [] logger.setLevel(level) logger.addHandler(handler) - logger.addHandler(postgresqlHandler) if loggername != '': logger.propagate = False @@ -217,4 +175,99 @@ PSEUDOCONFIG_MAPPER = { 'critical': ['openerp:CRITICAL'], } +# A alternative logging scheme for automated runs of the +# server intended to test it. +def init_alternative_logger(): + class H(logging.Handler): + def emit(self, record): + if record.levelno > 20: + print record.levelno, record.pathname, record.msg + handler = H() + # Add the handler to the 'openerp' logger. + logger = logging.getLogger('openerp') + logger.addHandler(handler) + logger.setLevel(logging.ERROR) + +def replace_request_password(args): + # password is always 3rd argument in a request, we replace it in RPC logs + # so it's easier to forward logs for diagnostics/debugging purposes... + if len(args) > 2: + args = list(args) + args[2] = '*' + return tuple(args) + +def log(logger, level, prefix, msg, depth=None): + indent='' + indent_after=' '*len(prefix) + for line in (prefix+pformat(msg, depth=depth)).split('\n'): + logger.log(level, indent+line) + indent=indent_after + +def dispatch_rpc(service_name, method, params): + """ Handle a RPC call. + + This is pure Python code, the actual marshalling (from/to XML-RPC) is done + in a upper layer. + """ + try: + rpc_request = logging.getLogger(__name__ + '.rpc.request') + rpc_response = logging.getLogger(__name__ + '.rpc.response') + rpc_request_flag = rpc_request.isEnabledFor(logging.DEBUG) + rpc_response_flag = rpc_response.isEnabledFor(logging.DEBUG) + if rpc_request_flag or rpc_response_flag: + start_time = time.time() + start_rss, start_vms = 0, 0 + start_rss, start_vms = psutil.Process(os.getpid()).get_memory_info() + if rpc_request and rpc_response_flag: + log(rpc_request,logging.DEBUG,'%s.%s'%(service_name,method), replace_request_password(params)) + + threading.current_thread().uid = None + threading.current_thread().dbname = None + if service_name == 'common': + dispatch = openerp.service.common.dispatch + elif service_name == 'db': + dispatch = openerp.service.db.dispatch + elif service_name == 'object': + dispatch = openerp.service.model.dispatch + elif service_name == 'report': + dispatch = openerp.service.report.dispatch + else: + dispatch = openerp.service.wsgi_server.rpc_handlers.get(service_name) + result = dispatch(method, params) + + if rpc_request_flag or rpc_response_flag: + end_time = time.time() + end_rss, end_vms = 0, 0 + end_rss, end_vms = psutil.Process(os.getpid()).get_memory_info() + logline = '%s.%s time:%.3fs mem: %sk -> %sk (diff: %sk)' % (service_name, method, end_time - start_time, start_vms / 1024, end_vms / 1024, (end_vms - start_vms)/1024) + if rpc_response_flag: + log(rpc_response,logging.DEBUG, logline, result) + else: + log(rpc_request,logging.DEBUG, logline, replace_request_password(params), depth=1) + + return result + except openerp.osv.orm.except_orm: + raise + except openerp.exceptions.AccessError: + raise + except openerp.exceptions.AccessDenied: + raise + except openerp.exceptions.Warning: + raise + except openerp.exceptions.RedirectWarning: + raise + except openerp.exceptions.DeferredException, e: + _logger.exception(tools.exception_to_unicode(e)) + post_mortem(e.traceback) + raise + except Exception, e: + _logger.exception(tools.exception_to_unicode(e)) + post_mortem(sys.exc_info()) + raise + +def post_mortem(info): + if tools.config['debug_mode'] and isinstance(info[2], types.TracebackType): + import pdb + pdb.post_mortem(info[2]) + # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: