#
##############################################################################
-import time
from datetime import datetime
from dateutil.relativedelta import relativedelta
+import logging
from operator import itemgetter
+import time
-import netsvc
-import pooler
-from osv import fields, osv
import decimal_precision as dp
+from osv import fields, osv
+import pooler
from tools.translate import _
+_logger = logging.getLogger(__name__)
+
def check_cycle(self, cr, uid, ids, context=None):
""" climbs the ``self._table.parent_id`` chains for 100 levels or
until it can't find any more parent(s)
_name = "account.account"
_description = "Account"
_parent_store = True
- logger = netsvc.Logger()
def search(self, cr, uid, args, offset=0, limit=None, order=None,
context=None, count=False):
if aml_query.strip():
wheres.append(aml_query.strip())
filters = " AND ".join(wheres)
- self.logger.notifyChannel('addons.'+self._name, netsvc.LOG_DEBUG,
- 'Filters: %s'%filters)
+ _logger.debug('Filters: %s', filters)
# IN might not work ideally in case there are too many
# children_and_consolidated, in that case join on a
# values() e.g.:
" GROUP BY l.account_id")
params = (tuple(children_and_consolidated),) + query_params
cr.execute(request, params)
- self.logger.notifyChannel('addons.'+self._name, netsvc.LOG_DEBUG,
- 'Status: %s'%cr.statusmessage)
+ _logger.debug('Status: %s', cr.statusmessage)
for res in cr.dictfetchall():
accounts[res['id']] = res
}
def compute(self, cr, uid, taxes, price_unit, quantity, address_id=None, product=None, partner=None):
- logger = netsvc.Logger()
- logger.notifyChannel("warning", netsvc.LOG_WARNING,
+ _logger.warning(
"Deprecated, use compute_all(...)['taxes'] instead of compute(...) to manage prices with tax included")
return self._compute(cr, uid, taxes, price_unit, quantity, address_id, product, partner)
import time
from report import report_sxw
-import pooler
-import netsvc
-logger=netsvc.Logger()
class bank_statement_balance_report(report_sxw.rml_parse):
def set_context(self, objects, data, ids, report_type=None):
- #logger.notifyChannel('addons.'+__name__, netsvc.LOG_WARNING, 'set_context, objects = %s, data = %s, ids = %s' % (objects, data, ids))
cr = self.cr
uid = self.uid
context = self.context
##############################################################################
import time
-from osv import osv, fields
+
import decimal_precision as dp
-import netsvc
+from osv import osv, fields
from tools.translate import _
-logger=netsvc.Logger()
class coda_bank_account(osv.osv):
_name= 'coda.bank.account'
#
##############################################################################
-import time
import base64
-from osv import fields,osv
-from tools.translate import _
-import netsvc
import re
-from traceback import format_exception
from sys import exc_info
-logger=netsvc.Logger()
+import time
+from traceback import format_exception
+
+from osv import fields,osv
+from tools.translate import _
+
+_logger = logging.getLogger(__name__)
class account_coda_import(osv.osv_memory):
_name = 'account.coda.import'
ttype = line['type'] == 'supplier' and 'payment' or 'receipt',
date = line['val_date'],
context = context)
- #logger.notifyChannel('addons.'+self._name, netsvc.LOG_WARNING, 'voucher_dict = %s' % voucher_dict)
voucher_line_vals = False
if voucher_dict['value']['line_ids']:
for line_dict in voucher_dict['value']['line_ids']:
nb_err += 1
err_string += _('\nError ! ') + str(e)
tb = ''.join(format_exception(*exc_info()))
- logger.notifyChannel('addons.'+self._name, netsvc.LOG_ERROR,
- 'Application Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb))
+ _logger.error('Application Error while processing Statement %s\n%s',
+ statement.get('name', '/'), tb)
except Exception, e:
cr.rollback()
nb_err += 1
err_string += _('\nSystem Error : ') + str(e)
tb = ''.join(format_exception(*exc_info()))
- logger.notifyChannel('addons.'+self._name, netsvc.LOG_ERROR,
- 'System Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb))
+ _logger.error('System Error while processing Statement %s\n%s',
+ statement.get('name', '/'), tb)
except :
cr.rollback()
nb_err += 1
err_string = _('\nUnknown Error : ') + str(e)
tb = ''.join(format_exception(*exc_info()))
- logger.notifyChannel('addons.'+self._name, netsvc.LOG_ERROR,
- 'Unknown Error while processing Statement %s\n%s' % (statement.get('name', '/'),tb))
+ _logger.error('Unknown Error while processing Statement %s\n%s',
+ statement.get('name', '/'), tb)
# end 'for statement in coda_statements'
#
##############################################################################
+import logging
import time
from osv import osv, fields
import netsvc
+_logger = logging.getLogger(__name__)
+
class payment_mode(osv.osv):
_name= 'payment.mode'
_description= 'Payment Mode'
#dead code
def get_wizard(self, type):
- logger = netsvc.Logger()
- logger.notifyChannel("warning", netsvc.LOG_WARNING,
- "No wizard found for the payment type '%s'." % type)
+ _logger.warning("No wizard found for the payment type '%s'.", type)
return None
def _total(self, cursor, user, ids, name, args, context=None):
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
-from os.path import join
import base64
-import tempfile
-import tarfile
import httplib
+import logging
+import os
+from os.path import join
+import tarfile
+import tempfile
-import netsvc
import wizard
import pooler
-import os
-import tools
+
+_logger = logging.getLogger(__name__)
choose_file_form = '''<?xml version="1.0"?>
<form string="Create Technical Guide in rst format">
if res.status in (200, ):
status_good = True
except (Exception, ), e:
- logger = netsvc.Logger()
msg = "error connecting to server '%s' with link '%s'. Error message: %s" % (server, link, str(e))
- logger.notifyChannel("base_module_doc_rst", netsvc.LOG_ERROR, msg)
+ _logger.error(msg)
status_good = False
return status_good
def _write_objects(self):
def write_field(field_def):
if not isinstance(field_def, tuple):
- logger = netsvc.Logger()
msg = "Error on Object %s: field_def: %s [type: %s]" % (obj_name.encode('utf8'), field_def.encode('utf8'), type(field_def))
- logger.notifyChannel("base_module_doc_rst", netsvc.LOG_ERROR, msg)
+ _logger.error(msg)
return ""
field_name = field_def[0]
try:
os.unlink(tgz_tmp_filename)
except Exception, e:
- logger = netsvc.Logger()
msg = "Temporary file %s could not be deleted. (%s)" % (tgz_tmp_filename, e)
- logger.notifyChannel("warning", netsvc.LOG_WARNING, msg)
+ _logger.warning(msg)
return {
'rst_file': base64.encodestring(out),
res = modobj.fields_get(cr, uid).items()
return res
else:
- logger = netsvc.Logger()
msg = "Object %s not found" % (obj)
- logger.notifyChannel("base_module_doc_rst", netsvc.LOG_ERROR, msg)
+ _logger.error(msg)
return ""
states = {
from osv import osv, fields
import logging
import addons
+
+_logger = logging.getLogger(__name__)
+
class abstract_quality_check(object):
'''
This Class is abstract class for all test
#This variable used to give message if test result is good or not
self.message = ''
- self.log = logging.getLogger('module.quality')
#The tests have to subscribe itselfs in this list, that contains
#all the test that have to be performed.
model_data = pool.get('ir.model.data').browse(cr, uid, ids2)
for model in model_data:
model_list.append(model.res_id)
- self.log.debug('get_objects() model_list: %s', ','.join(map(str, model_list)))
+ _logger.debug('get_objects() model_list: %s', ','.join(map(str, model_list)))
obj_list = []
for mod in pool.get('ir.model').browse(cr, uid, model_list):
obj_list.append(str(mod.model))
- self.log.debug('get_objects() obj_list: %s', ','.join(obj_list))
+ _logger.debug('get_objects() obj_list: %s', ','.join(obj_list))
return obj_list
def get_model_ids(self, cr, uid, models=[]):
if not models:
return []
pool = pooler.get_pool(cr.dbname)
- self.log.debug('get_model_ids([%s])', ', '.join(models))
+ _logger.debug('get_model_ids([%s])', ', '.join(models))
return pool.get('ir.model').search(cr, uid, [('model', 'in', models)])
def get_ids(self, cr, uid, object_list):
So here the detail result is in html format and summary will be in text_wiki format.
'''
pool = pooler.get_pool(cr.dbname)
- log = logging.getLogger('module.quality')
obj_module = pool.get('ir.module.module')
if not module_state:
module_id = obj_module.search(cr, uid, [('name', '=', module_name)])
ponderation_sum = 0.0
create_ids = []
module_path = addons.get_module_path(module_name)
- log.info('Performing quality tests for %s', module_name)
+ _logger.info('Performing quality tests for %s', module_name)
for test in abstract_obj.tests:
val = test.quality_test()
if not val.active:
- log.info('Skipping inactive step %s for %s', val.name, module_name)
+ _logger.info('Skipping inactive step %s for %s', val.name, module_name)
continue
- log.info('Performing step %s for %s', val.name, module_name)
+ _logger.info('Performing step %s for %s', val.name, module_name)
# Get a separate cursor per test, so that an SQL error in one
# will not block the others.
cr2 = pooler.get_db(cr.dbname).cursor()
'summary': _("The module has to be installed before running this test.")
}
create_ids.append((0, 0, data))
- log.info('Finished quality test step')
+ _logger.info('Finished quality test step')
except Exception, e:
- log.exception("Could not finish test step %s due to %s", val.name, e)
+ _logger.exception("Could not finish test step %s due to %s", val.name, e)
finally:
cr2.rollback()
cr2.close()
import nodes
import logging
+_logger = logging.getLogger(__name__)
+
DMS_ROOT_PATH = tools.config.get('document_path', os.path.join(tools.config['root_path'], 'filestore'))
class document_file(osv.osv):
parent_id = self.pool.get('document.directory')._get_root_directory(cr,uid)
if not parent_id:
- logging.getLogger('document').warning("at _attach_parent_id(), still not able to set the parent!")
+ _logger.warning("at _attach_parent_id(), still not able to set the parent!")
return False
if ids is not None:
if r:
unres.append(r)
else:
- logging.getLogger('document').warning("Unlinking attachment #%s %s that has no storage",
+ _logger.warning("Unlinking attachment #%s %s that has no storage",
f.id, f.name)
res = super(document_file, self).unlink(cr, uid, ids, context)
stor.do_unlink(cr, uid, unres)
#
##############################################################################
+import logging
from osv import osv, fields
from osv.orm import except_orm
import nodes
from tools.translate import _
+_logger = logging.getLogger(__name__)
+
class document_directory(osv.osv):
_name = 'document.directory'
_description = 'Directory'
root_id = objid.read(cr, uid, mid, ['res_id'])['res_id']
return root_id
except Exception, e:
- import netsvc
- logger = netsvc.Logger()
- logger.notifyChannel("document", netsvc.LOG_WARNING, 'Cannot set directory root:'+ str(e))
+ _logger.warning('Cannot set directory root:' + str(e))
return False
return objid.browse(cr, uid, mid, context=context).res_id
import nodes
from content_index import cntIndex
+_logger = logging.getLogger(__name__)
+
DMS_ROOT_PATH = tools.config.get('document_path', os.path.join(tools.config.get('root_path'), 'filestore'))
mime, icont = cntIndex.doIndex(None, filename=filename,
content_type=None, realfname=fname)
except Exception:
- logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True)
+ _logger.debug('Cannot index file:', exc_info=True)
pass
try:
cr.commit()
cr.close()
except Exception:
- logging.getLogger('document.storage').warning('Cannot save file indexed content:', exc_info=True)
+ _logger.warning('Cannot save file indexed content:', exc_info=True)
elif self.mode in ('a', 'a+' ):
try:
cr.commit()
cr.close()
except Exception:
- logging.getLogger('document.storage').warning('Cannot save file appended content:', exc_info=True)
+ _logger.warning('Cannot save file appended content:', exc_info=True)
elif mode == 'a':
StringIO.__init__(self, None)
else:
- logging.getLogger('document.storage').error("Incorrect mode %s specified", mode)
+ _logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
self.mode = mode
mime, icont = cntIndex.doIndex(data, filename=filename,
content_type=None, realfname=None)
except Exception:
- logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True)
+ _logger.debug('Cannot index file:', exc_info=True)
pass
try:
(out, len(data), par.file_id))
cr.commit()
except Exception:
- logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id)
+ _logger.exception('Cannot update db file #%d for close:', par.file_id)
raise
finally:
cr.close()
elif mode == 'a':
StringIO.__init__(self, None)
else:
- logging.getLogger('document.storage').error("Incorrect mode %s specified", mode)
+ _logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
self.mode = mode
mime, icont = cntIndex.doIndex(data, filename=filename,
content_type=None, realfname=None)
except Exception:
- logging.getLogger('document.storage').debug('Cannot index file:', exc_info=True)
+ _logger.debug('Cannot index file:', exc_info=True)
pass
try:
(base64.encodestring(data), len(data), par.file_id))
cr.commit()
except Exception:
- logging.getLogger('document.storage').exception('Cannot update db file #%d for close:', par.file_id)
+ _logger.exception('Cannot update db file #%d for close:', par.file_id)
raise
finally:
cr.close()
"""
_name = 'document.storage'
_description = 'Storage Media'
- _doclog = logging.getLogger('document')
_columns = {
'name': fields.char('Name', size=64, required=True, select=1),
# npath may contain empty elements, for root directory etc.
npath = filter(lambda x: x is not None, npath)
- # if self._debug:
- # self._doclog.debug('Npath: %s', npath)
for n in npath:
if n == '..':
raise ValueError("Invalid '..' element in path")
dpath += npath[:-1]
path = os.path.join(*dpath)
if not os.path.isdir(path):
- self._doclog.debug("Create dirs: %s", path)
+ _logger.debug("Create dirs: %s", path)
os.makedirs(path)
return path, npath
# try to fix their directory.
if mode in ('r','r+'):
if ira.file_size:
- self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
+ _logger.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
raise IOError(errno.ENOENT, 'No file can be located')
else:
store_fname = self.__get_random_fname(boo.path)
# On a migrated db, some files may have the wrong storage type
# try to fix their directory.
if ira.file_size:
- self._doclog.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
+ _logger.warning( "ir.attachment #%d does not have a filename, but is at filestore, fix it!" % ira.id)
return None
fpath = os.path.join(boo.path, ira.store_fname)
return file(fpath, 'rb').read()
# On a migrated db, some files may have the wrong storage type
# try to fix their directory.
if ira.file_size:
- self._doclog.warning("ir.attachment #%d does not have a filename, trying the name." %ira.id)
+ _logger.warning("ir.attachment #%d does not have a filename, trying the name." %ira.id)
# sfname = ira.name
fpath = os.path.join(boo.path,ira.store_fname or ira.name)
if os.path.exists(fpath):
if boo.readonly:
raise IOError(errno.EPERM, "Readonly medium")
- self._doclog.debug( "Store data for ir.attachment #%d" % ira.id)
+ _logger.debug( "Store data for ir.attachment #%d" % ira.id)
store_fname = None
fname = None
if boo.type == 'filestore':
fp.write(data)
finally:
fp.close()
- self._doclog.debug( "Saved data to %s" % fname)
+ _logger.debug( "Saved data to %s" % fname)
filesize = len(data) # os.stat(fname).st_size
# TODO Here, an old file would be left hanging.
except Exception, e:
- self._doclog.warning( "Couldn't save data to %s", path, exc_info=True)
+ _logger.warning( "Couldn't save data to %s", path, exc_info=True)
raise except_orm(_('Error!'), str(e))
elif boo.type == 'db':
filesize = len(data)
fp.write(data)
finally:
fp.close()
- self._doclog.debug("Saved data to %s", fname)
+ _logger.debug("Saved data to %s", fname)
filesize = len(data) # os.stat(fname).st_size
store_fname = os.path.join(*npath)
# TODO Here, an old file would be left hanging.
except Exception,e :
- self._doclog.warning("Couldn't save data:", exc_info=True)
+ _logger.warning("Couldn't save data:", exc_info=True)
raise except_orm(_('Error!'), str(e))
elif boo.type == 'virtual':
mime, icont = cntIndex.doIndex(data, ira.datas_fname,
ira.file_type or None, fname)
except Exception:
- self._doclog.debug('Cannot index file:', exc_info=True)
+ _logger.debug('Cannot index file:', exc_info=True)
pass
try:
file_node.content_type = mime
return True
except Exception, e :
- self._doclog.warning("Couldn't save data:", exc_info=True)
+ _logger.warning("Couldn't save data:", exc_info=True)
# should we really rollback once we have written the actual data?
# at the db case (only), that rollback would be safe
raise except_orm(_('Error at doc write!'), str(e))
try:
os.unlink(fname)
except Exception:
- self._doclog.warning("Could not remove file %s, please remove manually.", fname, exc_info=True)
+ _logger.warning("Could not remove file %s, please remove manually.", fname, exc_info=True)
else:
- self._doclog.warning("Unknown unlink key %s" % ktype)
+ _logger.warning("Unknown unlink key %s" % ktype)
return True
fname = ira.store_fname
if not fname:
- self._doclog.warning("Trying to rename a non-stored file")
+ _logger.warning("Trying to rename a non-stored file")
if fname != os.path.join(*npath):
- self._doclog.warning("inconsistency in realstore: %s != %s" , fname, repr(npath))
+ _logger.warning("inconsistency in realstore: %s != %s" , fname, repr(npath))
oldpath = os.path.join(path, npath[-1])
newpath = os.path.join(path, new_name)
break
par = par.parent_id
if file_node.storage_id != psto:
- self._doclog.debug('Cannot move file %r from %r to %r', file_node, file_node.parent, ndir_bro.name)
+ _logger.debug('Cannot move file %r from %r to %r', file_node, file_node.parent, ndir_bro.name)
raise NotImplementedError('Cannot move files between storage media')
if sbro.type in ('filestore', 'db', 'db64'):
fname = ira.store_fname
if not fname:
- self._doclog.warning("Trying to rename a non-stored file")
+ _logger.warning("Trying to rename a non-stored file")
if fname != os.path.join(*opath):
- self._doclog.warning("inconsistency in realstore: %s != %s" , fname, repr(opath))
+ _logger.warning("inconsistency in realstore: %s != %s" , fname, repr(opath))
oldpath = os.path.join(path, opath[-1])
npath = filter(lambda x: x is not None, npath)
newdir = os.path.join(*npath)
if not os.path.isdir(newdir):
- self._doclog.debug("Must create dir %s", newdir)
+ _logger.debug("Must create dir %s", newdir)
os.makedirs(newdir)
npath.append(opath[-1])
newpath = os.path.join(*npath)
- self._doclog.debug("Going to move %s from %s to %s", opath[-1], oldpath, newpath)
+ _logger.debug("Going to move %s from %s to %s", opath[-1], oldpath, newpath)
shutil.move(oldpath, newpath)
store_path = npath[1:] + [opath[-1],]
# root: if we are at the first directory of a ressource
#
-logger = logging.getLogger('doc2.nodes')
+_logger = logging.getLogger(__name__)
def _str2time(cre):
""" Convert a string with time representation (from db) into time (float)
if self.DAV_M_NS.has_key(ns):
prefix = self.DAV_M_NS[ns]
else:
- logger.debug('No namespace: %s ("%s")',ns, prop)
+ _logger.debug('No namespace: %s ("%s")',ns, prop)
return None
mname = prefix + "_" + prop.replace('-','_')
r = m(cr)
return r
except AttributeError:
- logger.debug('Property %s not supported' % prop, exc_info=True)
+ _logger.debug('Property %s not supported' % prop, exc_info=True)
return None
def get_dav_resourcetype(self, cr):
def create_child(self, cr, path, data=None):
""" Create a regular file under this node
"""
- logger.warning("Attempted to create a file under %r, not possible.", self)
+ _logger.warning("Attempted to create a file under %r, not possible.", self)
raise IOError(errno.EPERM, "Not allowed to create files here")
def create_child_collection(self, cr, objname):
""" Create a child collection (directory) under self
"""
- logger.warning("Attempted to create a collection under %r, not possible.", self)
+ _logger.warning("Attempted to create a collection under %r, not possible.", self)
raise IOError(errno.EPERM, "Not allowed to create folders here")
def rm(self, cr):
assert self.parent
if self.parent != ndir_node:
- logger.debug('Cannot move dir %r from %r to %r', self, self.parent, ndir_node)
+ _logger.debug('Cannot move dir %r from %r to %r', self, self.parent, ndir_node)
raise NotImplementedError('Cannot move dir to another dir')
ret = {}
def get_dav_eprop_DEPR(self, cr, ns, prop):
# Deprecated!
if ns != 'http://groupdav.org/' or prop != 'resourcetype':
- logger.warning("Who asked for %s:%s?" % (ns, prop))
+ _logger.warning("Who asked for %s:%s?" % (ns, prop))
return None
cntobj = self.context._dirobj.pool.get('document.directory.content')
uid = self.context.uid
ret = {}
if ndir_node and self.parent != ndir_node:
if not (isinstance(self.parent, node_dir) and isinstance(ndir_node, node_dir)):
- logger.debug('Cannot move file %r from %r to %r', self, self.parent, ndir_node)
+ _logger.debug('Cannot move file %r from %r to %r', self, self.parent, ndir_node)
raise NotImplementedError('Cannot move files between dynamic folders')
if not ndir_obj:
elif mode == 'a':
StringIO.__init__(self, None)
else:
- logging.getLogger('document.content').error("Incorrect mode %s specified", mode)
+ _logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
self.mode = mode
raise NotImplementedError
cr.commit()
except Exception:
- logging.getLogger('document.content').exception('Cannot update db content #%d for close:', par.cnt_id)
+ _logger.exception('Cannot update db content #%d for close:', par.cnt_id)
raise
finally:
cr.close()
elif mode == 'a':
StringIO.__init__(self, None)
else:
- logging.getLogger('document.nodes').error("Incorrect mode %s specified", mode)
+ _logger.error("Incorrect mode %s specified", mode)
raise IOError(errno.EINVAL, "Invalid file mode")
self.mode = mode
raise NotImplementedError
cr.commit()
except Exception:
- logging.getLogger('document.nodes').exception('Cannot update db content #%d for close:', par.cnt_id)
+ _logger.exception('Cannot update db content #%d for close:', par.cnt_id)
raise
finally:
cr.close()
import sys, zipfile, xml.dom.minidom
import logging
+_logger = logging.getLogger(__name__)
+
def _to_unicode(s):
try:
return s.decode('utf-8')
(data, _) = pop.communicate()
return _to_unicode(data)
except OSError:
- logger = logging.getLogger('document.DocIndex')
- logger.warn("Failed attempt to execute antiword (MS Word reader). Antiword is necessary to index the file %s of MIME type %s. Detailed error available at DEBUG level.", fname, self._getMimeTypes()[0])
- logger.debug("Trace of the failed file indexing attempt: ", exc_info=True)
+ _logger.warning("Failed attempt to execute antiword (MS Word reader). Antiword is necessary to index the file %s of MIME type %s. Detailed error available at DEBUG level.", fname, self._getMimeTypes()[0])
+ _logger.debug("Trace of the failed file indexing attempt: ", exc_info=True)
return False
cntIndex.register(DocIndex())
#
##############################################################################
+import logging
import threading
import ftpserver
import authorizer
import netsvc
from tools import config
+_logger = logging.getLogger(__name__)
+
def start_server():
HOST = config.get('ftp_server_host', '127.0.0.1')
PORT = int(config.get('ftp_server_port', '8021'))
class ftp_server(threading.Thread):
def log(self, level, message):
- logger = netsvc.Logger()
- logger.notifyChannel('FTP', level, message)
+ _logger.log(level, message)
def run(self):
autho = authorizer.authorizer()
ftpd.serve_forever()
if HOST.lower() == 'none':
- netsvc.Logger().notifyChannel("FTP", netsvc.LOG_INFO, "\n Server FTP Not Started\n")
+ _logger.info("\n Server FTP Not Started\n")
else:
- netsvc.Logger().notifyChannel("FTP", netsvc.LOG_INFO, "\n Serving FTP on %s:%s\n" % (HOST, PORT))
+ _logger.info(\n Serving FTP on %s:%s\n", HOST, PORT)
ds = ftp_server()
ds.daemon = True
ds.start()
try:
import controllers
except ImportError:
- logging.getLogger('init.load').warn(
+ logging.getLogger(__name__).warning(
"""Could not load openerp-web section of EDI, EDI will not behave correctly
To fix, launch openerp-web in embedded mode""")
import netsvc
import openerp
-_logger = logging.getLogger('edi.service')
+_logger = logging.getLogger(__name__)
class edi(netsvc.ExportService):
from tools.translate import _
from tools.safe_eval import safe_eval as eval
+_logger = logging.getLogger(__name__)
+
EXTERNAL_ID_PATTERN = re.compile(r'^([^.:]+)(?::([^.]+))?\.(\S+)$')
EDI_VIEW_WEB_URL = '%s/edi/view?debug=1&db=%s&token=%s'
EDI_PROTOCOL_VERSION = 1 # arbitrary ever-increasing version number
return record_log.get('write_date') or record_log.get('create_date') or False
return False
-_logger = logging.getLogger('edi')
-
class edi_document(osv.osv):
_name = 'edi.document'
_description = 'EDI Document'
return record_id
-# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
\ No newline at end of file
+# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
from openerp import SUPERUSER_ID
from tools.translate import _
+_logger = logging.getLogger(__name__)
+
RES_PARTNER_ADDRESS_EDI_STRUCT = {
'name': True,
'email': True,
code, label = 'edi_generic', 'Generic Bank Type (auto-created for EDI)'
bank_code_ids = res_partner_bank_type.search(cr, uid, [('code','=',code)], context=context)
if not bank_code_ids:
- logging.getLogger('edi.res_partner').info('Normal bank account type is missing, creating '
+ _logger.info('Normal bank account type is missing, creating '
'a generic bank account type for EDI.')
self.res_partner_bank_type.create(cr, SUPERUSER_ID, {'name': label,
'code': label})
bank_name, ext_bank_id, context=import_ctx)
except osv.except_osv:
# failed to import it, try again with unrestricted default type
- logging.getLogger('edi.res_partner').warning('Failed to import bank account using'
+ _logger.warning('Failed to import bank account using'
'bank type: %s, ignoring', import_ctx['default_state'],
exc_info=True)
return address_id
import tools
from tools.translate import _
-logger = logging.getLogger('fetchmail')
+_logger = logging.getLogger(__name__)
class fetchmail_server(osv.osv):
"""Incoming POP/IMAP mail server account"""
connection = server.connect()
server.write({'state':'done'})
except Exception, e:
- logger.exception("Failed to connect to %s server %s", server.type, server.name)
+ _logger.exception("Failed to connect to %s server %s", server.type, server.name)
raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s") % tools.ustr(e))
finally:
try:
mail_thread = self.pool.get('mail.thread')
action_pool = self.pool.get('ir.actions.server')
for server in self.browse(cr, uid, ids, context=context):
- logger.info('start checking for new emails on %s server %s', server.type, server.name)
+ _logger.info('start checking for new emails on %s server %s', server.type, server.name)
context.update({'fetchmail_server_id': server.id, 'server_type': server.type})
count = 0
if server.type == 'imap':
imap_server.store(num, '+FLAGS', '\\Seen')
cr.commit()
count += 1
- logger.info("fetched/processed %s email(s) on %s server %s", count, server.type, server.name)
+ _logger.info("fetched/processed %s email(s) on %s server %s", count, server.type, server.name)
except Exception, e:
- logger.exception("Failed to fetch mail from %s server %s", server.type, server.name)
+ _logger.exception("Failed to fetch mail from %s server %s", server.type, server.name)
finally:
if imap_server:
imap_server.close()
action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids':[res_id]})
pop_server.dele(num)
cr.commit()
- logger.info("fetched/processed %s email(s) on %s server %s", numMsgs, server.type, server.name)
+ _logger.info("fetched/processed %s email(s) on %s server %s", numMsgs, server.type, server.name)
except Exception, e:
- logger.exception("Failed to fetch mail from %s server %s", server.type, server.name)
+ _logger.exception("Failed to fetch mail from %s server %s", server.type, server.name)
finally:
if pop_server:
pop_server.quit()
import logging
import StringIO
import traceback
-pp = pprint.PrettyPrinter(indent=4)
-
+_logger = logging.getLogger(__name__)
+pp = pprint.PrettyPrinter(indent=4)
class import_framework(Thread):
"""
self.context = context or {}
self.email = email_to_notify
self.table_list = []
- self.logger = logging.getLogger(module_name)
self.initialize()
"""
data_i is a map external field_name => value
and each data_i have a external id => in data_id['id']
"""
- self.logger.info(' Importing %s into %s' % (table, model))
+ _logger.info(' Importing %s into %s', table, model)
if not datas:
return (0, 'No data found')
mapping['id'] = 'id_new'
model_obj = self.obj.pool.get(model)
if not model_obj:
raise ValueError(_("%s is not a valid model name") % model)
- self.logger.debug(_(" fields imported : ") + str(fields))
+ _logger.debug(_(" fields imported : ") + str(fields))
(p, r, warning, s) = model_obj.import_data(self.cr, self.uid, fields, res, mode='update', current_module=self.module_name, noupdate=True, context=self.context)
for (field, field_name) in self_dependencies:
self._import_self_dependencies(model_obj, field, datas)
'auto_delete' : True})
email_obj.send(self.cr, self.uid, [email_id])
if error:
- self.logger.error(_("Import failed due to an unexpected error"))
+ _logger.error(_("Import failed due to an unexpected error"))
else:
- self.logger.info(_("Import finished, notification email sended"))
+ _logger.info(_("Import finished, notification email sended"))
def get_email_subject(self, result, error=False):
"""
-# -*- encoding: utf-8 -*-\r
-##############################################################################\r
-#\r
-# OpenERP, Open Source Management Solution\r
-# \r
-# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.\r
-# \r
-# This program is free software: you can redistribute it and/or modify\r
-# it under the terms of the GNU Affero General Public License as\r
-# published by the Free Software Foundation, either version 3 of the\r
-# License, or (at your option) any later version.\r
-#\r
-# This program is distributed in the hope that it will be useful,\r
-# but WITHOUT ANY WARRANTY; without even the implied warranty of\r
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\r
-# GNU Affero General Public License for more details.\r
-#\r
-# You should have received a copy of the GNU Affero General Public License\r
-# along with this program. If not, see <http://www.gnu.org/licenses/>.\r
-#\r
-##############################################################################\r
-\r
-import re, time, random\r
-from osv import fields, osv\r
-from tools.translate import _\r
-import netsvc\r
-logger=netsvc.Logger()\r
-\r
-"""\r
-account.invoice object:\r
- - Add support for Belgian structured communication\r
- - Rename 'reference' field labels to 'Communication'\r
-"""\r
-\r
-class account_invoice(osv.osv):\r
- _inherit = 'account.invoice'\r
-\r
- def _get_reference_type(self, cursor, user, context=None):\r
- """Add BBA Structured Communication Type and change labels from 'reference' into 'communication' """ \r
- res = super(account_invoice, self)._get_reference_type(cursor, user,\r
- context=context)\r
- res[[i for i,x in enumerate(res) if x[0] == 'none'][0]] = ('none', 'Free Communication')\r
- res.append(('bba', 'BBA Structured Communication'))\r
- #logger.notifyChannel('addons.'+self._name, netsvc.LOG_WARNING, 'reference_type = %s' %res ) \r
- return res\r
-\r
- def check_bbacomm(self, val):\r
- supported_chars = '0-9+*/ '\r
- pattern = re.compile('[^' + supported_chars + ']')\r
- if pattern.findall(val or ''):\r
- return False \r
- bbacomm = re.sub('\D', '', val or '')\r
- if len(bbacomm) == 12:\r
- base = int(bbacomm[:10])\r
- mod = base % 97 or 97 \r
- if mod == int(bbacomm[-2:]):\r
- return True\r
- return False\r
-\r
- def _check_communication(self, cr, uid, ids):\r
- for inv in self.browse(cr, uid, ids):\r
- if inv.reference_type == 'bba':\r
- return self.check_bbacomm(inv.reference)\r
- return True\r
-\r
- def onchange_partner_id(self, cr, uid, ids, type, partner_id,\r
- date_invoice=False, payment_term=False, partner_bank_id=False, company_id=False): \r
- result = super(account_invoice, self).onchange_partner_id(cr, uid, ids, type, partner_id,\r
- date_invoice, payment_term, partner_bank_id, company_id)\r
-# reference_type = self.default_get(cr, uid, ['reference_type'])['reference_type']\r
-# logger.notifyChannel('addons.'+self._name, netsvc.LOG_WARNING, 'partner_id %s' % partner_id)\r
- reference = False\r
- reference_type = 'none'\r
- if partner_id: \r
- if (type == 'out_invoice'):\r
- reference_type = self.pool.get('res.partner').browse(cr, uid, partner_id).out_inv_comm_type\r
- if reference_type:\r
- algorithm = self.pool.get('res.partner').browse(cr, uid, partner_id).out_inv_comm_algorithm\r
- if not algorithm:\r
- algorithm = 'random' \r
- reference = self.generate_bbacomm(cr, uid, ids, type, reference_type, algorithm, partner_id, '')['value']['reference']\r
- res_update = { \r
- 'reference_type': reference_type or 'none',\r
- 'reference': reference,\r
- }\r
- result['value'].update(res_update)\r
- return result \r
-\r
- def generate_bbacomm(self, cr, uid, ids, type, reference_type, algorithm, partner_id, reference):\r
- partner_obj = self.pool.get('res.partner')\r
- reference = reference or '' \r
- if (type == 'out_invoice'):\r
- if reference_type == 'bba':\r
- if not algorithm:\r
- if partner_id:\r
- algorithm = partner_obj.browse(cr, uid, partner_id).out_inv_comm_algorithm\r
- if not algorithm:\r
- if not algorithm: \r
- algorithm = 'random'\r
- if algorithm == 'date':\r
- if not self.check_bbacomm(reference):\r
- doy = time.strftime('%j')\r
- year = time.strftime('%Y')\r
- seq = '001'\r
- seq_ids = self.search(cr, uid, \r
- [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),\r
- ('reference', 'like', '+++%s/%s/%%' % (doy, year))], order='reference')\r
- if seq_ids:\r
- prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15])\r
- if prev_seq < 999:\r
- seq = '%03d' % (prev_seq + 1)\r
- else:\r
- raise osv.except_osv(_('Warning!'),\r
- _('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \\r
- '\nPlease create manually a unique BBA Structured Communication.'))\r
- bbacomm = doy + year + seq\r
- base = int(bbacomm)\r
- mod = base % 97 or 97 \r
- reference = '+++%s/%s/%s%02d+++' % (doy, year, seq, mod)\r
- elif algorithm == 'partner_ref':\r
- if not self.check_bbacomm(reference):\r
- partner_ref = self.pool.get('res.partner').browse(cr, uid, partner_id).ref\r
- partner_ref_nr = re.sub('\D', '', partner_ref or '')\r
- if (len(partner_ref_nr) < 3) or (len(partner_ref_nr) > 7):\r
- raise osv.except_osv(_('Warning!'),\r
- _('The Partner should have a 3-7 digit Reference Number for the generation of BBA Structured Communications!' \\r
- '\nPlease correct the Partner record.')) \r
- else:\r
- partner_ref_nr = partner_ref_nr.ljust(7, '0')\r
- seq = '001'\r
- seq_ids = self.search(cr, uid, \r
- [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),\r
- ('reference', 'like', '+++%s/%s/%%' % (partner_ref_nr[:3], partner_ref_nr[3:]))], order='reference') \r
- if seq_ids:\r
- prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15])\r
- if prev_seq < 999:\r
- seq = '%03d' % (prev_seq + 1)\r
- else:\r
- raise osv.except_osv(_('Warning!'),\r
- _('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \\r
- '\nPlease create manually a unique BBA Structured Communication.')) \r
- bbacomm = partner_ref_nr + seq\r
- base = int(bbacomm)\r
- mod = base % 97 or 97 \r
- reference = '+++%s/%s/%s%02d+++' % (partner_ref_nr[:3], partner_ref_nr[3:], seq, mod)\r
- elif algorithm == 'random':\r
- if not self.check_bbacomm(reference):\r
- base = random.randint(1, 9999999999)\r
- bbacomm = str(base).rjust(7, '0')\r
- base = int(bbacomm)\r
- mod = base % 97 or 97\r
- mod = str(mod).rjust(2, '0') \r
- reference = '+++%s/%s/%s%s+++' % (bbacomm[:3], bbacomm[3:7], bbacomm[7:], mod)\r
- else:\r
- raise osv.except_osv(_('Error!'),\r
- _("Unsupported Structured Communication Type Algorithm '%s' !" \\r
- "\nPlease contact your OpenERP support channel.") % algorithm) \r
- return {'value': {'reference': reference}} \r
- \r
- def create(self, cr, uid, vals, context=None):\r
- if vals.has_key('reference_type'):\r
- reference_type = vals['reference_type']\r
- if reference_type == 'bba': \r
- if vals.has_key('reference'):\r
- bbacomm = vals['reference']\r
- else:\r
- raise osv.except_osv(_('Warning!'),\r
- _('Empty BBA Structured Communication!' \\r
- '\nPlease fill in a unique BBA Structured Communication.')) \r
- if self.check_bbacomm(bbacomm):\r
- reference = re.sub('\D', '', bbacomm)\r
- vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++' \r
- same_ids = self.search(cr, uid, \r
- [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),\r
- ('reference', '=', vals['reference'])])\r
- if same_ids:\r
- raise osv.except_osv(_('Warning!'),\r
- _('The BBA Structured Communication has already been used!' \\r
- '\nPlease create manually a unique BBA Structured Communication.')) \r
- return super(account_invoice, self).create(cr, uid, vals, context=context) \r
-\r
- def write(self, cr, uid, ids, vals, context={}):\r
- if isinstance(ids, (int, long)):\r
- ids = [ids]\r
- for inv in self.browse(cr, uid, ids, context): \r
- if vals.has_key('reference_type'):\r
- reference_type = vals['reference_type']\r
- else: \r
- reference_type = inv.reference_type or ''\r
- if reference_type == 'bba': \r
- if vals.has_key('reference'):\r
- bbacomm = vals['reference']\r
- else:\r
- bbacomm = inv.reference or ''\r
- if self.check_bbacomm(bbacomm):\r
- reference = re.sub('\D', '', bbacomm)\r
- vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++' \r
- same_ids = self.search(cr, uid, \r
- [('id', '!=', inv.id), ('type', '=', 'out_invoice'), \r
- ('reference_type', '=', 'bba'), ('reference', '=', vals['reference'])])\r
- if same_ids:\r
- raise osv.except_osv(_('Warning!'),\r
- _('The BBA Structured Communication has already been used!' \\r
- '\nPlease create manually a unique BBA Structured Communication.')) \r
- return super(account_invoice, self).write(cr, uid, ids, vals, context) \r
-\r
- _columns = {\r
- 'reference': fields.char('Communication', size=64, help="The partner reference of this invoice."),\r
- 'reference_type': fields.selection(_get_reference_type, 'Communication Type',\r
- required=True),\r
- }\r
- \r
- _constraints = [\r
- (_check_communication, 'Invalid BBA Structured Communication !', ['Communication']),\r
- ]\r
-\r
-account_invoice()\r
+# -*- encoding: utf-8 -*-
+##############################################################################
+#
+# OpenERP, Open Source Management Solution
+#
+# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+#
+##############################################################################
+
+import random
+import re
+import time
+
+from osv import fields, osv
+from tools.translate import _
+
+"""
+account.invoice object:
+ - Add support for Belgian structured communication
+ - Rename 'reference' field labels to 'Communication'
+"""
+
+class account_invoice(osv.osv):
+ _inherit = 'account.invoice'
+
+ def _get_reference_type(self, cursor, user, context=None):
+ """Add BBA Structured Communication Type and change labels from 'reference' into 'communication' """
+ res = super(account_invoice, self)._get_reference_type(cursor, user,
+ context=context)
+ res[[i for i,x in enumerate(res) if x[0] == 'none'][0]] = ('none', 'Free Communication')
+ res.append(('bba', 'BBA Structured Communication'))
+ return res
+
+ def check_bbacomm(self, val):
+ supported_chars = '0-9+*/ '
+ pattern = re.compile('[^' + supported_chars + ']')
+ if pattern.findall(val or ''):
+ return False
+ bbacomm = re.sub('\D', '', val or '')
+ if len(bbacomm) == 12:
+ base = int(bbacomm[:10])
+ mod = base % 97 or 97
+ if mod == int(bbacomm[-2:]):
+ return True
+ return False
+
+ def _check_communication(self, cr, uid, ids):
+ for inv in self.browse(cr, uid, ids):
+ if inv.reference_type == 'bba':
+ return self.check_bbacomm(inv.reference)
+ return True
+
+ def onchange_partner_id(self, cr, uid, ids, type, partner_id,
+ date_invoice=False, payment_term=False, partner_bank_id=False, company_id=False):
+ result = super(account_invoice, self).onchange_partner_id(cr, uid, ids, type, partner_id,
+ date_invoice, payment_term, partner_bank_id, company_id)
+# reference_type = self.default_get(cr, uid, ['reference_type'])['reference_type']
+ reference = False
+ reference_type = 'none'
+ if partner_id:
+ if (type == 'out_invoice'):
+ reference_type = self.pool.get('res.partner').browse(cr, uid, partner_id).out_inv_comm_type
+ if reference_type:
+ algorithm = self.pool.get('res.partner').browse(cr, uid, partner_id).out_inv_comm_algorithm
+ if not algorithm:
+ algorithm = 'random'
+ reference = self.generate_bbacomm(cr, uid, ids, type, reference_type, algorithm, partner_id, '')['value']['reference']
+ res_update = {
+ 'reference_type': reference_type or 'none',
+ 'reference': reference,
+ }
+ result['value'].update(res_update)
+ return result
+
+ def generate_bbacomm(self, cr, uid, ids, type, reference_type, algorithm, partner_id, reference):
+ partner_obj = self.pool.get('res.partner')
+ reference = reference or ''
+ if (type == 'out_invoice'):
+ if reference_type == 'bba':
+ if not algorithm:
+ if partner_id:
+ algorithm = partner_obj.browse(cr, uid, partner_id).out_inv_comm_algorithm
+ if not algorithm:
+ if not algorithm:
+ algorithm = 'random'
+ if algorithm == 'date':
+ if not self.check_bbacomm(reference):
+ doy = time.strftime('%j')
+ year = time.strftime('%Y')
+ seq = '001'
+ seq_ids = self.search(cr, uid,
+ [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
+ ('reference', 'like', '+++%s/%s/%%' % (doy, year))], order='reference')
+ if seq_ids:
+ prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15])
+ if prev_seq < 999:
+ seq = '%03d' % (prev_seq + 1)
+ else:
+ raise osv.except_osv(_('Warning!'),
+ _('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \
+ '\nPlease create manually a unique BBA Structured Communication.'))
+ bbacomm = doy + year + seq
+ base = int(bbacomm)
+ mod = base % 97 or 97
+ reference = '+++%s/%s/%s%02d+++' % (doy, year, seq, mod)
+ elif algorithm == 'partner_ref':
+ if not self.check_bbacomm(reference):
+ partner_ref = self.pool.get('res.partner').browse(cr, uid, partner_id).ref
+ partner_ref_nr = re.sub('\D', '', partner_ref or '')
+ if (len(partner_ref_nr) < 3) or (len(partner_ref_nr) > 7):
+ raise osv.except_osv(_('Warning!'),
+ _('The Partner should have a 3-7 digit Reference Number for the generation of BBA Structured Communications!' \
+ '\nPlease correct the Partner record.'))
+ else:
+ partner_ref_nr = partner_ref_nr.ljust(7, '0')
+ seq = '001'
+ seq_ids = self.search(cr, uid,
+ [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
+ ('reference', 'like', '+++%s/%s/%%' % (partner_ref_nr[:3], partner_ref_nr[3:]))], order='reference')
+ if seq_ids:
+ prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15])
+ if prev_seq < 999:
+ seq = '%03d' % (prev_seq + 1)
+ else:
+ raise osv.except_osv(_('Warning!'),
+ _('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \
+ '\nPlease create manually a unique BBA Structured Communication.'))
+ bbacomm = partner_ref_nr + seq
+ base = int(bbacomm)
+ mod = base % 97 or 97
+ reference = '+++%s/%s/%s%02d+++' % (partner_ref_nr[:3], partner_ref_nr[3:], seq, mod)
+ elif algorithm == 'random':
+ if not self.check_bbacomm(reference):
+ base = random.randint(1, 9999999999)
+ bbacomm = str(base).rjust(7, '0')
+ base = int(bbacomm)
+ mod = base % 97 or 97
+ mod = str(mod).rjust(2, '0')
+ reference = '+++%s/%s/%s%s+++' % (bbacomm[:3], bbacomm[3:7], bbacomm[7:], mod)
+ else:
+ raise osv.except_osv(_('Error!'),
+ _("Unsupported Structured Communication Type Algorithm '%s' !" \
+ "\nPlease contact your OpenERP support channel.") % algorithm)
+ return {'value': {'reference': reference}}
+
+ def create(self, cr, uid, vals, context=None):
+ if vals.has_key('reference_type'):
+ reference_type = vals['reference_type']
+ if reference_type == 'bba':
+ if vals.has_key('reference'):
+ bbacomm = vals['reference']
+ else:
+ raise osv.except_osv(_('Warning!'),
+ _('Empty BBA Structured Communication!' \
+ '\nPlease fill in a unique BBA Structured Communication.'))
+ if self.check_bbacomm(bbacomm):
+ reference = re.sub('\D', '', bbacomm)
+ vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++'
+ same_ids = self.search(cr, uid,
+ [('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
+ ('reference', '=', vals['reference'])])
+ if same_ids:
+ raise osv.except_osv(_('Warning!'),
+ _('The BBA Structured Communication has already been used!' \
+ '\nPlease create manually a unique BBA Structured Communication.'))
+ return super(account_invoice, self).create(cr, uid, vals, context=context)
+
+ def write(self, cr, uid, ids, vals, context={}):
+ if isinstance(ids, (int, long)):
+ ids = [ids]
+ for inv in self.browse(cr, uid, ids, context):
+ if vals.has_key('reference_type'):
+ reference_type = vals['reference_type']
+ else:
+ reference_type = inv.reference_type or ''
+ if reference_type == 'bba':
+ if vals.has_key('reference'):
+ bbacomm = vals['reference']
+ else:
+ bbacomm = inv.reference or ''
+ if self.check_bbacomm(bbacomm):
+ reference = re.sub('\D', '', bbacomm)
+ vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++'
+ same_ids = self.search(cr, uid,
+ [('id', '!=', inv.id), ('type', '=', 'out_invoice'),
+ ('reference_type', '=', 'bba'), ('reference', '=', vals['reference'])])
+ if same_ids:
+ raise osv.except_osv(_('Warning!'),
+ _('The BBA Structured Communication has already been used!' \
+ '\nPlease create manually a unique BBA Structured Communication.'))
+ return super(account_invoice, self).write(cr, uid, ids, vals, context)
+
+ _columns = {
+ 'reference': fields.char('Communication', size=64, help="The partner reference of this invoice."),
+ 'reference_type': fields.selection(_get_reference_type, 'Communication Type',
+ required=True),
+ }
+
+ _constraints = [
+ (_check_communication, 'Invalid BBA Structured Communication !', ['Communication']),
+ ]
+
+account_invoice()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
#
##############################################################################
-from osv import fields, osv
import time
+
+from osv import fields, osv
from tools.translate import _
-import netsvc
-logger=netsvc.Logger()
class res_partner(osv.osv):
""" add field to indicate default 'Communication Type' on customer invoices """
#
##############################################################################
-from osv import fields, osv
+import logging
import os
+
+from osv import fields, osv
from tools.translate import _
-import netsvc
-logger=netsvc.Logger()
+
+_logger = logging.getLogger(__name__)
class wizard_multi_charts_accounts(osv.osv_memory):
"""
if context.get('lang') == lang:
self.pool.get(out_obj._name).write(cr, uid, out_ids[j], {in_field: value[in_id]})
else:
- logger.notifyChannel('addons.'+self._name, netsvc.LOG_WARNING,
- 'Language: %s. Translation from template: there is no translation available for %s!' %(lang, src[in_id]))#out_obj._name))
+ _logger.warning(
+ 'Language: %s. Translation from template: there is no translation available for %s!',
+ lang, src[in_id])
return True
def execute(self, cr, uid, ids, context=None):
from osv import fields
from tools.translate import _
-_logger = logging.getLogger('mail')
+_logger = logging.getLogger(__name__)
def format_date_tz(date, tz=None):
if not date:
import email
from email.utils import parsedate
-import logging
import xmlrpclib
from osv import osv, fields
from tools.translate import _
from mail_message import decode, to_email
-_logger = logging.getLogger('mail')
-
class mail_thread(osv.osv):
'''Mixin model, meant to be inherited by any model that needs to
act as a discussion topic on which messages can be attached.
##############################################################################
from osv import fields, osv
-import netsvc
class messages(osv.osv):
"""
Message from one user to another within a project
"""
_name = 'project.messages'
- logger = netsvc.Logger()
_columns = {
'create_date': fields.datetime('Creation Date', readonly=True),
from mako.lookup import TemplateLookup
from mako import exceptions
-import netsvc
import pooler
from report_helper import WebKitHelper
from report.report_sxw import *
from tools.translate import _
from osv.osv import except_osv
-logger = logging.getLogger('report_webkit')
+_logger = logging.getLogger(__name__)
def mako_template(text):
"""Build a Mako template.
htmls.append(html)
except Exception, e:
msg = exceptions.text_error_template().render()
- logger.error(msg)
+ _logger.error(msg)
raise except_osv(_('Webkit render'), msg)
else:
try :
htmls.append(html)
except Exception, e:
msg = exceptions.text_error_template().render()
- logger.error(msg)
+ _logger.error(msg)
raise except_osv(_('Webkit render'), msg)
head_mako_tpl = mako_template(header)
try :
**self.parser_instance.localcontext)
except:
msg = exceptions.text_error_template().render()
- logger.error(msg)
+ _logger.error(msg)
raise except_osv(_('Webkit render'), msg)
if report_xml.webkit_debug :
try :
**self.parser_instance.localcontext)
except Exception, e:
msg = exceptions.text_error_template().render()
- logger.error(msg)
+ _logger.error(msg)
raise except_osv(_('Webkit render'), msg)
return (deb, 'html')
bin = self.get_lib(cursor, uid, company.id)
import decimal_precision as dp
import logging
+_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# Incoterms
# so we ROLLBACK to the SAVEPOINT to restore the transaction to its earlier
# state, we return False as if the products were not available, and log it:
cr.execute("ROLLBACK TO stock_location_product_reserve")
- logger = logging.getLogger('stock.location')
- logger.warn("Failed attempt to reserve %s x product %s, likely due to another transaction already in progress. Next attempt is likely to work. Detailed error available at DEBUG level.", product_qty, product_id)
- logger.debug("Trace of the failed product reservation attempt: ", exc_info=True)
+ _logger.warning("Failed attempt to reserve %s x product %s, likely due to another transaction already in progress. Next attempt is likely to work. Detailed error available at DEBUG level.", product_qty, product_id)
+ _logger.debug("Trace of the failed product reservation attempt: ", exc_info=True)
return False
# XXX TODO: rewrite this with one single query, possibly even the quantity conversion