'addons_path': None,
'root_path': None,
'debug_mode': False,
- 'commit_mode': False,
+ 'import_partial': "",
'pidfile': None,
'logfile': None,
'secure': False,
group.add_option("--db_host", dest="db_host", help="specify the database host")
group.add_option("--db_port", dest="db_port", help="specify the database port")
group.add_option("--db_maxconn", dest="db_maxconn", default='64', help="specify the the maximum number of physical connections to posgresql")
- group.add_option("-C", "--commit-mode", dest="commit_mode", action="store_true", help="Several commit during one file importation. Use this for big data importation.", default=False)
+ group.add_option("-C", "--commit-mode", dest="import_partial", help="Several commit during one file importation. Use this for big data importation. Provide a filename to store intermediate state.", default=False)
parser.add_option_group(group)
group = optparse.OptionGroup(parser, "Internationalisation options",
self.options['pidfile'] = False
for arg in ('interface', 'port', 'db_name', 'db_user', 'db_password', 'db_host',
- 'db_port', 'logfile', 'pidfile', 'secure', 'smtp_ssl', 'email_from', 'smtp_server', 'smtp_user', 'smtp_password', 'price_accuracy', 'netinterface', 'netport', 'db_maxconn', 'commit_mode', 'addons_path'):
+ 'db_port', 'logfile', 'pidfile', 'secure', 'smtp_ssl', 'email_from', 'smtp_server', 'smtp_user', 'smtp_password', 'price_accuracy', 'netinterface', 'netport', 'db_maxconn', 'import_partial', 'addons_path'):
if getattr(opt, arg):
self.options[arg] = getattr(opt, arg)
id = self.pool.get('ir.model.data')._update(cr, self.uid, rec_model, self.module, res, rec_id or False, not self.isnoupdate(data_node), noupdate=self.isnoupdate(data_node), mode=self.mode )
if rec_id:
self.idref[rec_id] = int(id)
- if config.get('commit_mode', False):
+ if config.get('import_partial', False):
cr.commit()
return rec_model, id
pool = pooler.get_pool(cr.dbname)
+ import pickle
+ if config.get('import_partial'):
+ data = pickle.load(config.get('import_partial'))
+ if fname in data:
+ if not data[fname]:
+ return
input = StringIO.StringIO(csvcontent)
reader = csv.reader(input, quotechar='"', delimiter=',')
fields = reader.next()
continue
datas.append( map(lambda x:x.decode('utf8').encode('utf8'), line))
pool.get(model).import_data(cr, uid, fields, datas,mode, module,noupdate)
+ if config.get('import_partial'):
+ data = pickle.load(config.get('import_partial'))
+ data[fname] = 0
+ pickle.save(config.get('import_partial'), data)
#
# xml import/export