Commit 786c41f8 authored by Sylvain Thénault's avatar Sylvain Thénault
Browse files

[repo] use logilab.db instead of lgc.adbh/lgc.db/lgc.sqlgen/indexer, test new...

[repo] use logilab.db instead of lgc.adbh/lgc.db/lgc.sqlgen/indexer, test new date extranction functions
parent 3b79a0fc91db
......@@ -998,7 +998,7 @@ application_configuration = deprecated('use instance_configuration')(instance_co
_EXT_REGISTERED = False
def register_stored_procedures():
from logilab.common.adbh import FunctionDescr
from logilab.db import FunctionDescr
from rql.utils import register_function, iter_funcnode_variables
global _EXT_REGISTERED
......@@ -1010,8 +1010,7 @@ def register_stored_procedures():
supported_backends = ('postgres', 'sqlite',)
rtype = 'String'
@classmethod
def st_description(cls, funcnode, mainindex, tr):
def st_description(self, funcnode, mainindex, tr):
return ', '.join(sorted(term.get_description(mainindex, tr)
for term in iter_funcnode_variables(funcnode)))
......@@ -1023,6 +1022,7 @@ def register_stored_procedures():
register_function(CONCAT_STRINGS) # XXX bw compat
class GROUP_CONCAT(CONCAT_STRINGS):
supported_backends = ('mysql', 'postgres', 'sqlite',)
......@@ -1033,8 +1033,7 @@ def register_stored_procedures():
supported_backends = ('postgres', 'sqlite',)
rtype = 'String'
@classmethod
def st_description(cls, funcnode, mainindex, tr):
def st_description(self, funcnode, mainindex, tr):
return funcnode.children[0].get_description(mainindex, tr)
register_function(LIMIT_SIZE)
......@@ -1046,7 +1045,6 @@ def register_stored_procedures():
register_function(TEXT_LIMIT_SIZE)
class FSPATH(FunctionDescr):
supported_backends = ('postgres', 'sqlite',)
rtype = 'Bytes'
......
......@@ -33,7 +33,7 @@ XB-Python-Version: ${python:Versions}
Conflicts: cubicweb-multisources
Replaces: cubicweb-multisources
Provides: cubicweb-multisources
Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-indexer (>= 0.6.1), python-psycopg2 | python-mysqldb | python-pysqlite2
Depends: ${python:Depends}, cubicweb-common (= ${source:Version}), cubicweb-ctl (= ${source:Version}), python-logilab-database, python-psycopg2 | python-mysqldb | python-pysqlite2
Recommends: pyro, cubicweb-documentation (= ${source:Version})
Description: server part of the CubicWeb framework
CubicWeb is a semantic web application framework.
......
......@@ -7,9 +7,7 @@
"""
__docformat__ = "restructuredtext en"
from logilab.common.adbh import get_adv_func_helper
from indexer import get_indexer
from logilab.db import get_db_helper
from cubicweb.req import RequestSessionBase
from cubicweb.cwvreg import CubicWebVRegistry
......@@ -200,12 +198,7 @@ class FakeRepo(object):
class FakeSource(object):
dbhelper = get_adv_func_helper('sqlite')
indexer = get_indexer('sqlite', 'UTF8')
dbhelper.fti_uid_attr = indexer.uid_attr
dbhelper.fti_table = indexer.table
dbhelper.fti_restriction_sql = indexer.restriction_sql
dbhelper.fti_need_distinct_query = indexer.need_distinct
dbhelper = get_db_helper('sqlite')
def __init__(self, uri):
self.uri = uri
......
......@@ -16,7 +16,7 @@ if __name__ == '__main__':
cw_gen = ModuleGenerator('cubicweb', '../..')
cw_gen.generate("../book/en/annexes/api_cubicweb.rst",
EXCLUDE_DIRS + ('cwdesklets', 'misc', 'skel', 'skeleton'))
for modname in ('indexer', 'logilab', 'rql', 'yams'):
for modname in ('logilab', 'rql', 'yams'):
cw_gen = ModuleGenerator(modname, '../../../' + modname)
cw_gen.generate("../book/en/annexes/api_%s.rst" % modname,
EXCLUDE_DIRS + ('tools',))
......@@ -894,12 +894,12 @@ class Entity(AppObject, dict):
"""used by the full text indexer to get words to index
this method should only be used on the repository side since it depends
on the indexer package
on the logilab.db package
:rtype: list
:return: the list of indexable word of this entity
"""
from indexer.query_objects import tokenize
from logilab.db.fti import tokenize
# take care to cases where we're modyfying the schema
pending = self._cw.transaction_data.setdefault('pendingrdefs', set())
words = []
......
......@@ -145,12 +145,9 @@ def init_repository(config, interactive=True, drop=False, vreg=None):
# can't skip entities table even if system source doesn't support them,
# they are used sometimes by generated sql. Keeping them empty is much
# simpler than fixing this...
if sqlcnx.logged_user != source['db-user']:
schemasql = sqlschema(schema, driver, user=source['db-user'])
else:
schemasql = sqlschema(schema, driver)
#skip_entities=[str(e) for e in schema.entities()
# if not repo.system_source.support_entity(str(e))])
schemasql = sqlschema(schema, driver)
#skip_entities=[str(e) for e in schema.entities()
# if not repo.system_source.support_entity(str(e))])
sqlexec(schemasql, execute, pbtitle=_title)
sqlcursor.close()
sqlcnx.commit()
......@@ -237,8 +234,8 @@ def initialize_schema(config, schema, mhandler, event='create'):
config.set_hooks_mode(oldmode)
# sqlite'stored procedures have to be registered at connexion opening time
SQL_CONNECT_HOOKS = {}
# sqlite'stored procedures have to be registered at connection opening time
from logilab.db import SQL_CONNECT_HOOKS
# add to this set relations which should have their add security checking done
# *BEFORE* adding the actual relation (done after by default)
......
......@@ -70,13 +70,9 @@ def reindex_entities(schema, session, withpb=True):
# to be updated due to the reindexation
repo = session.repo
cursor = session.pool['system']
if not repo.system_source.indexer.has_fti_table(cursor):
from indexer import get_indexer
if not repo.system_source.dbhelper.has_fti_table(cursor):
print 'no text index table'
indexer = get_indexer(repo.system_source.dbdriver)
# XXX indexer.init_fti(cursor) once index 0.7 is out
indexer.init_extensions(cursor)
cursor.execute(indexer.sql_init_fti())
dbhelper.init_fti(cursor)
repo.config.disabled_hooks_categories.add('metadata')
repo.config.disabled_hooks_categories.add('integrity')
repo.system_source.do_fti = True # ensure full-text indexation is activated
......
......@@ -63,9 +63,11 @@ def source_cnx(source, dbname=None, special_privs=False, verbose=True):
password = getpass('password: ')
extra_args = source.get('db-extra-arguments')
extra = extra_args and {'extra_args': extra_args} or {}
return get_connection(driver, dbhost, dbname, user, password=password,
port=source.get('db-port'),
**extra)
cnx = get_connection(driver, dbhost, dbname, user, password=password,
port=source.get('db-port'),
**extra)
cnx.logged_user = logged_user
return cnx
def system_source_cnx(source, dbms_system_base=False,
special_privs='CREATE/DROP DATABASE', verbose=True):
......@@ -75,8 +77,8 @@ def system_source_cnx(source, dbms_system_base=False,
create/drop the instance database)
"""
if dbms_system_base:
from logilab.common.adbh import get_adv_func_helper
system_db = get_adv_func_helper(source['db-driver']).system_database()
from logilab.db import get_db_helper
system_db = get_db_helper(source['db-driver']).system_database()
return source_cnx(source, system_db, special_privs=special_privs, verbose=verbose)
return source_cnx(source, special_privs=special_privs, verbose=verbose)
......@@ -85,11 +87,11 @@ def _db_sys_cnx(source, what, db=None, user=None, verbose=True):
or a database
"""
import logilab.common as lgp
from logilab.common.adbh import get_adv_func_helper
from logilab.db import get_db_helper
lgp.USE_MX_DATETIME = False
special_privs = ''
driver = source['db-driver']
helper = get_adv_func_helper(driver)
helper = get_db_helper(driver)
if user is not None and helper.users_support:
special_privs += '%s USER' % what
if db is not None:
......@@ -202,10 +204,10 @@ class RepositoryDeleteHandler(CommandHandler):
def cleanup(self):
"""remove instance's configuration and database"""
from logilab.common.adbh import get_adv_func_helper
from logilab.db import get_db_helper
source = self.config.sources()['system']
dbname = source['db-name']
helper = get_adv_func_helper(source['db-driver'])
helper = get_db_helper(source['db-driver'])
if ASK.confirm('Delete database %s ?' % dbname):
user = source['db-user'] or None
cnx = _db_sys_cnx(source, 'DROP DATABASE', user=user)
......@@ -285,8 +287,7 @@ class CreateInstanceDBCommand(Command):
)
def run(self, args):
"""run the command with its specific arguments"""
from logilab.common.adbh import get_adv_func_helper
from indexer import get_indexer
from logilab.db import get_db_helper
verbose = self.get('verbose')
automatic = self.get('automatic')
appid = pop_arg(args, msg='No instance specified !')
......@@ -295,7 +296,7 @@ class CreateInstanceDBCommand(Command):
dbname = source['db-name']
driver = source['db-driver']
create_db = self.config.create_db
helper = get_adv_func_helper(driver)
helper = get_db_helper(driver)
if driver == 'sqlite':
if os.path.exists(dbname) and automatic or \
ASK.confirm('Database %s already exists -- do you want to drop it ?' % dbname):
......@@ -330,8 +331,7 @@ class CreateInstanceDBCommand(Command):
raise
cnx = system_source_cnx(source, special_privs='LANGUAGE C', verbose=verbose)
cursor = cnx.cursor()
indexer = get_indexer(driver)
indexer.init_extensions(cursor)
dbhelper.init_fti_extensions(cursor)
# postgres specific stuff
if driver == 'postgres':
# install plpythonu/plpgsql language if not installed by the cube
......
......@@ -20,12 +20,6 @@ class ConnectionWrapper(object):
self.source = source
self._cnx = None
@property
def logged_user(self):
if self._cnx is None:
self._cnx = self.source._sqlcnx
return self._cnx.logged_user
def cursor(self):
if self._cnx is None:
self._cnx = self.source._sqlcnx
......
......@@ -21,10 +21,8 @@ from logilab.common.compat import any
from logilab.common.cache import Cache
from logilab.common.decorators import cached, clear_cache
from logilab.common.configuration import Method
from logilab.common.adbh import get_adv_func_helper
from logilab.common.shellutils import getlogin
from indexer import get_indexer
from logilab.db import get_db_helper
from cubicweb import UnknownEid, AuthenticationError, Binary, server
from cubicweb.cwconfig import CubicWebNoAppConfiguration
......@@ -151,16 +149,9 @@ class NativeSQLSource(SQLAdapterMixIn, AbstractSource):
*args, **kwargs)
# sql generator
self._rql_sqlgen = self.sqlgen_class(appschema, self.dbhelper,
self.encoding, ATTR_MAP.copy())
ATTR_MAP.copy())
# full text index helper
self.do_fti = not repo.config['delay-full-text-indexation']
if self.do_fti:
self.indexer = get_indexer(self.dbdriver, self.encoding)
# XXX should go away with logilab.db
self.dbhelper.fti_uid_attr = self.indexer.uid_attr
self.dbhelper.fti_table = self.indexer.table
self.dbhelper.fti_restriction_sql = self.indexer.restriction_sql
self.dbhelper.fti_need_distinct_query = self.indexer.need_distinct
# sql queries cache
self._cache = Cache(repo.config['rql-cache-size'])
self._temp_table_data = {}
......@@ -207,7 +198,7 @@ class NativeSQLSource(SQLAdapterMixIn, AbstractSource):
pool.pool_set()
# check full text index availibility
if self.do_fti:
if not self.indexer.has_fti_table(pool['system']):
if not self.dbhelper.has_fti_table(pool['system']):
if not self.repo.config.creating:
self.critical('no text index table')
self.do_fti = False
......@@ -321,8 +312,7 @@ class NativeSQLSource(SQLAdapterMixIn, AbstractSource):
assert isinstance(sql, basestring), repr(sql)
try:
cursor = self.doexec(session, sql, args)
except (self.dbapi_module.OperationalError,
self.dbapi_module.InterfaceError):
except (self.OperationalError, self.InterfaceError):
# FIXME: better detection of deconnection pb
self.info("request failed '%s' ... retry with a new cursor", sql)
session.pool.reconnect(self)
......@@ -342,7 +332,7 @@ class NativeSQLSource(SQLAdapterMixIn, AbstractSource):
prefix='ON THE FLY temp data insertion into %s from' % table)
# generate sql queries if we are able to do so
sql, query_args = self._rql_sqlgen.generate(union, args, varmap)
query = 'INSERT INTO %s %s' % (table, sql.encode(self.encoding))
query = 'INSERT INTO %s %s' % (table, sql.encode(self._dbencoding))
self.doexec(session, query, self.merge_args(args, query_args))
def manual_insert(self, results, table, session):
......@@ -359,7 +349,7 @@ class NativeSQLSource(SQLAdapterMixIn, AbstractSource):
row = tuple(row)
for index, cell in enumerate(row):
if isinstance(cell, Binary):
cell = self.binary(cell.getvalue())
cell = self._binary(cell.getvalue())
kwargs[str(index)] = cell
kwargs_list.append(kwargs)
self.doexecmany(session, query, kwargs_list)
......@@ -614,7 +604,7 @@ class NativeSQLSource(SQLAdapterMixIn, AbstractSource):
index
"""
try:
self.indexer.cursor_unindex_object(eid, session.pool['system'])
self.dbhelper.cursor_unindex_object(eid, session.pool['system'])
except Exception: # let KeyboardInterrupt / SystemExit propagate
self.exception('error while unindexing %s', eid)
......@@ -625,8 +615,8 @@ class NativeSQLSource(SQLAdapterMixIn, AbstractSource):
try:
# use cursor_index_object, not cursor_reindex_object since
# unindexing done in the FTIndexEntityOp
self.indexer.cursor_index_object(entity.eid, entity,
session.pool['system'])
self.dbhelper.cursor_index_object(entity.eid, entity,
session.pool['system'])
except Exception: # let KeyboardInterrupt / SystemExit propagate
self.exception('error while reindexing %s', entity)
......@@ -659,7 +649,7 @@ class FTIndexEntityOp(hook.LateOperation):
def sql_schema(driver):
helper = get_adv_func_helper(driver)
helper = get_db_helper(driver)
tstamp_col_type = helper.TYPE_MAPPING['Datetime']
schema = """
/* Create the repository's system database */
......@@ -692,7 +682,7 @@ CREATE INDEX deleted_entities_extid_idx ON deleted_entities(extid);
def sql_drop_schema(driver):
helper = get_adv_func_helper(driver)
helper = get_db_helper(driver)
return """
%s
DROP TABLE entities;
......
......@@ -332,10 +332,10 @@ class SQLGenerator(object):
protected by a lock
"""
def __init__(self, schema, dbms_helper, dbencoding='UTF-8', attrmap=None):
def __init__(self, schema, dbms_helper, attrmap=None):
self.schema = schema
self.dbms_helper = dbms_helper
self.dbencoding = dbencoding
self.dbencoding = dbms_helper.dbencoding
self.keyword_map = {'NOW' : self.dbms_helper.sql_current_timestamp,
'TODAY': self.dbms_helper.sql_current_date,
}
......@@ -977,10 +977,9 @@ class SQLGenerator(object):
def visit_function(self, func):
"""generate SQL name for a function"""
# function_description will check function is supported by the backend
sqlname = self.dbms_helper.func_sqlname(func.name)
return '%s(%s)' % (sqlname, ', '.join(c.accept(self)
for c in func.children))
# func_sql_call will check function is supported by the backend
return self.dbms_helper.func_as_sql(func.name,
[c.accept(self) for c in func.children])
def visit_constant(self, constant):
"""generate SQL name for a constant"""
......
......@@ -92,9 +92,8 @@ class BytesFileSystemStorage(Storage):
cu = sysource.doexec(entity._cw,
'SELECT cw_%s FROM cw_%s WHERE cw_eid=%s' % (
attr, entity.__regid__, entity.eid))
dbmod = sysource.dbapi_module
return dbmod.process_value(cu.fetchone()[0], [None, dbmod.BINARY],
binarywrap=str)
return sysource._process_value(cu.fetchone()[0], [None, dbmod.BINARY],
binarywrap=str)
class AddFileOp(Operation):
......
......@@ -11,21 +11,17 @@ import os
import subprocess
from datetime import datetime, date
import logilab.common as lgc
from logilab.common import db
from logilab import db, common as lgc
from logilab.common.shellutils import ProgressBar
from logilab.common.adbh import get_adv_func_helper
from logilab.common.sqlgen import SQLGenerator
from logilab.common.date import todate, todatetime
from indexer import get_indexer
from logilab.db.sqlgen import SQLGenerator
from cubicweb import Binary, ConfigurationError
from cubicweb.uilib import remove_html_tags
from cubicweb.schema import PURE_VIRTUAL_RTYPES
from cubicweb.server import SQL_CONNECT_HOOKS
from cubicweb.server.utils import crypt_password
from rql.utils import RQL_FUNCTIONS_REGISTRY
lgc.USE_MX_DATETIME = False
SQL_PREFIX = 'cw_'
......@@ -77,8 +73,8 @@ def sqlgrants(schema, driver, user,
w(native.grant_schema(user, set_owner))
w('')
if text_index:
indexer = get_indexer(driver)
w(indexer.sql_grant_user(user))
dbhelper = db.get_db_helper(driver)
w(dbhelper.sql_grant_user_on_fti(user))
w('')
w(grant_schema(schema, user, set_owner, skip_entities=skip_entities, prefix=SQL_PREFIX))
return '\n'.join(output)
......@@ -96,11 +92,10 @@ def sqlschema(schema, driver, text_index=True,
w = output.append
w(native.sql_schema(driver))
w('')
dbhelper = db.get_db_helper(driver)
if text_index:
indexer = get_indexer(driver)
w(indexer.sql_init_fti())
w(dbhelper.sql_init_fti())
w('')
dbhelper = get_adv_func_helper(driver)
w(schema2sql(dbhelper, schema, prefix=SQL_PREFIX,
skip_entities=skip_entities, skip_relations=skip_relations))
if dbhelper.users_support and user:
......@@ -120,8 +115,8 @@ def sqldropschema(schema, driver, text_index=True,
w(native.sql_drop_schema(driver))
w('')
if text_index:
indexer = get_indexer(driver)
w(indexer.sql_drop_fti())
dbhelper = db.get_db_helper(driver)
w(dbhelper.sql_drop_fti())
w('')
w(dropschema2sql(schema, prefix=SQL_PREFIX,
skip_entities=skip_entities,
......@@ -137,55 +132,42 @@ class SQLAdapterMixIn(object):
def __init__(self, source_config):
try:
self.dbdriver = source_config['db-driver'].lower()
self.dbname = source_config['db-name']
dbname = source_config['db-name']
except KeyError:
raise ConfigurationError('missing some expected entries in sources file')
self.dbhost = source_config.get('db-host')
dbhost = source_config.get('db-host')
port = source_config.get('db-port')
self.dbport = port and int(port) or None
self.dbuser = source_config.get('db-user')
self.dbpasswd = source_config.get('db-password')
self.encoding = source_config.get('db-encoding', 'UTF-8')
self.dbapi_module = db.get_dbapi_compliant_module(self.dbdriver)
self.dbdriver_extra_args = source_config.get('db-extra-arguments')
self.binary = self.dbapi_module.Binary
self.dbhelper = self.dbapi_module.adv_func_helper
dbport = port and int(port) or None
dbuser = source_config.get('db-user')
dbpassword = source_config.get('db-password')
dbencoding = source_config.get('db-encoding', 'UTF-8')
dbextraargs = source_config.get('db-extra-arguments')
self.dbhelper = db.get_db_helper(self.dbdriver)
self.dbhelper.record_connection_info(dbname, dbhost, dbport, dbuser,
dbpassword, dbextraargs,
dbencoding)
self.sqlgen = SQLGenerator()
def get_connection(self, user=None, password=None):
# copy back some commonly accessed attributes
dbapi_module = self.dbhelper.dbapi_module
self.OperationalError = dbapi_module.OperationalError
self.InterfaceError = dbapi_module.InterfaceError
self._binary = dbapi_module.Binary
self._process_value = dbapi_module.process_value
self._dbencoding = dbencoding
def get_connection(self):
"""open and return a connection to the database"""
if user or self.dbuser:
self.info('connecting to %s@%s for user %s', self.dbname,
self.dbhost or 'localhost', user or self.dbuser)
else:
self.info('connecting to %s@%s', self.dbname,
self.dbhost or 'localhost')
extra = {}
if self.dbdriver_extra_args:
extra = {'extra_args': self.dbdriver_extra_args}
cnx = self.dbapi_module.connect(self.dbhost, self.dbname,
user or self.dbuser,
password or self.dbpasswd,
port=self.dbport,
**extra)
init_cnx(self.dbdriver, cnx)
#self.dbapi_module.type_code_test(cnx.cursor())
return cnx
return self.dbhelper.get_connection()
def backup_to_file(self, backupfile):
for cmd in self.dbhelper.backup_commands(self.dbname, self.dbhost,
self.dbuser, backupfile,
dbport=self.dbport,
for cmd in self.dbhelper.backup_commands(backupfile,
keepownership=False):
if _run_command(cmd):
if not confirm(' [Failed] Continue anyway?', default='n'):
raise Exception('Failed command: %s' % cmd)
def restore_from_file(self, backupfile, confirm, drop=True):
for cmd in self.dbhelper.restore_commands(self.dbname, self.dbhost,
self.dbuser, backupfile,
self.encoding,
dbport=self.dbport,
for cmd in self.dbhelper.restore_commands(backupfile,
keepownership=False,
drop=drop):
if _run_command(cmd):
......@@ -198,7 +180,7 @@ class SQLAdapterMixIn(object):
for key, val in args.iteritems():
# convert cubicweb binary into db binary
if isinstance(val, Binary):
val = self.binary(val.getvalue())
val = self._binary(val.getvalue())
newargs[key] = val
# should not collide
newargs.update(query_args)
......@@ -208,10 +190,12 @@ class SQLAdapterMixIn(object):
def process_result(self, cursor):
"""return a list of CubicWeb compliant values from data in the given cursor
"""
# begin bind to locals for optimization
descr = cursor.description
encoding = self.encoding
process_value = self.dbapi_module.process_value
encoding = self._dbencoding
process_value = self._process_value
binary = Binary
# /end
results = cursor.fetchall()
for i, line in enumerate(results):
result = []
......@@ -242,14 +226,14 @@ class SQLAdapterMixIn(object):
value = value.getvalue()
else:
value = crypt_password(value)
value = self.binary(value)
value = self._binary(value)
# XXX needed for sqlite but I don't think it is for other backends
elif atype == 'Datetime' and isinstance(value, date):
value = todatetime(value)
elif atype == 'Date' and isinstance(value, datetime):
value = todate(value)
elif isinstance(value, Binary):
value = self.binary(value.getvalue())
value = self._binary(value.getvalue())
attrs[SQL_PREFIX+str(attr)] = value
return attrs
......@@ -259,12 +243,8 @@ from cubicweb import set_log_methods
set_log_methods(SQLAdapterMixIn, getLogger('cubicweb.sqladapter'))
def init_sqlite_connexion(cnx):
# XXX should not be publicly exposed
#def comma_join(strings):
# return ', '.join(strings)
#cnx.create_function("COMMA_JOIN", 1, comma_join)
class concat_strings(object):
class group_concat(object):
def __init__(self):
self.values = []
def step(self, value):
......@@ -272,10 +252,7 @@ def init_sqlite_connexion(cnx):
self.values.append(value)
def finalize(self):
return ', '.join(self.values)
# renamed to GROUP_CONCAT in cubicweb 2.45, keep old name for bw compat for
# some time
cnx.create_aggregate("CONCAT_STRINGS", 1, concat_strings)
cnx.create_aggregate("GROUP_CONCAT", 1, concat_strings)
cnx.create_aggregate("GROUP_CONCAT", 1, group_concat)
def _limit_size(text, maxsize, format='text/plain'):
if len(text) < maxsize:
......@@ -293,9 +270,9 @@ def init_sqlite_connexion(cnx):
def limit_size2(text, maxsize):
return _limit_size(text, maxsize)
cnx.create_function("TEXT_LIMIT_SIZE", 2, limit_size2)
import yams.constraints
if hasattr(yams.constraints, 'patch_sqlite_decimal'):
yams.constraints.patch_sqlite_decimal()
yams.constraints.patch_sqlite_decimal()
def fspath(eid, etype, attr):
try:
......@@ -320,10 +297,5 @@ def init_sqlite_connexion(cnx):
raise
cnx.create_function('_fsopen', 1, _fsopen)
sqlite_hooks = SQL_CONNECT_HOOKS.setdefault('sqlite', [])
sqlite_hooks.append(init_sqlite_connexion)
def init_cnx(driver, cnx):
for hook in SQL_CONNECT_HOOKS.get(driver, ()):
hook(cnx)