Commit e3bbd996 authored by Denis Laxalde's avatar Denis Laxalde
Browse files

Get rid of cubicweb <= 3.23 monkeypatches

Related to #17133718.
parent 2b4d0044cccb
......@@ -22,7 +22,6 @@ from six import text_type, binary_type
import cubicweb
CW_VERSION = tuple(map(int, cubicweb.__version__.split('.')[:2]))
POST_323 = CW_VERSION >= (3, 23)
def register_skos_concept_rdf_list_mapping(reg):
......
# copyright 2015-2016 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# copyright 2015 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr -- mailto:contact@logilab.fr
#
# This program is free software: you can redistribute it and/or modify it under
......@@ -14,91 +14,3 @@
# You should have received a copy of the GNU Lesser General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""cubicweb-skos site customizations"""
# pylint: disable=wrong-import-order
from datetime import datetime
from logilab.common.decorators import monkeypatch
from cubes.skos import POST_323
if not POST_323:
# asynchronous source synchronization from the UI (https://www.cubicweb.org/ticket/10468967)
# other part in views/__init__.py
from pytz import utc
from cubicweb import ObjectNotFound
from cubicweb.server.sources import datafeed
@monkeypatch(datafeed.DataFeedSource)
def pull_data(self, cnx, force=False, raise_on_error=False, import_log_eid=None):
"""Launch synchronization of the source if needed.
This method is responsible to handle commit/rollback on the given
connection.
"""
if not force and self.fresh():
return {}
if not self.acquire_synchronization_lock(cnx, force):
return {}
try:
return self._pull_data(cnx, force, raise_on_error, import_log_eid)
finally:
cnx.rollback() # rollback first in case there is some dirty transaction remaining
self.release_synchronization_lock(cnx)
@monkeypatch(datafeed.DataFeedSource)
def _pull_data(self, cnx, force=False, raise_on_error=False, import_log_eid=None):
importlog = self.init_import_log(cnx, import_log_eid)
myuris = self.source_cwuris(cnx)
try:
parser = self._get_parser(cnx, sourceuris=myuris, import_log=importlog)
except ObjectNotFound:
return {}
if self.process_urls(parser, self.urls, raise_on_error):
self.warning("some error occurred, don't attempt to delete entities")
else:
parser.handle_deletion(self.config, cnx, myuris)
self.update_latest_retrieval(cnx)
stats = parser.stats
if stats.get('created'):
importlog.record_info('added %s entities' % len(stats['created']))
if stats.get('updated'):
importlog.record_info('updated %s entities' % len(stats['updated']))
importlog.write_log(cnx, end_timestamp=self.latest_retrieval)
cnx.commit()
return stats
@monkeypatch(datafeed.DataFeedSource)
def init_import_log(self, cnx, import_log_eid=None, **kwargs):
if import_log_eid is None:
import_log = cnx.create_entity('CWDataImport', cw_import_of=self,
start_timestamp=datetime.now(tz=utc),
**kwargs)
else:
import_log = cnx.entity_from_eid(import_log_eid)
import_log.cw_set(start_timestamp=datetime.now(tz=utc), **kwargs)
cnx.commit()
import_log.init()
return import_log
@monkeypatch(datafeed.DataFeedSource)
def acquire_synchronization_lock(self, cnx, force=False):
# XXX race condition until WHERE of SET queries is executed using
# 'SELECT FOR UPDATE'
now = datetime.now(tz=utc)
if force:
maxdt = now
else:
maxdt = now - self.max_lock_lifetime
if not cnx.execute(
'SET X in_synchronization %(now)s WHERE X eid %(x)s, '
'X in_synchronization NULL OR X in_synchronization < %(maxdt)s',
{'x': self.eid, 'now': now, 'maxdt': maxdt}):
self.error('concurrent synchronization detected, skip pull')
cnx.commit()
return False
cnx.commit()
return True
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment