mirror of
https://github.com/yweber/lodel2.git
synced 2026-03-19 09:42:01 +01:00
sqlwrapper deletion and sqlsetup enhancement
Moved the database initialisation functionnality to sqlsetup and fixed all the code to handle the sqlwrapper deletion
This commit is contained in:
parent
43bb085667
commit
a3b9daf319
10 changed files with 189 additions and 733 deletions
|
|
@ -1,85 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import logging as logger
|
||||
|
||||
import sqlalchemy as sql
|
||||
|
||||
from Database.sqlwrapper import SqlWrapper
|
||||
|
||||
class SqlQueryBuilder():
|
||||
|
||||
def __init__(self, sqlwrapper, table):
|
||||
if not type(sqlwrapper) is SqlWrapper:
|
||||
logger.error("Unable to instanciate, bad argument...")
|
||||
raise TypeError('Excepted a SqlWrapper not a '+str(type(sqlwrapper)))
|
||||
self.table = table
|
||||
self.sqlwrapper = sqlwrapper
|
||||
self.proxy = None
|
||||
|
||||
|
||||
def Select(self, arg):
|
||||
""" Alias for select clause
|
||||
@param arg iterable: arg must be a Python list or other iterable and contain eather table name/type or colum/literal_column
|
||||
"""
|
||||
|
||||
self.proxy = sql.select(arg)
|
||||
return self.proxy
|
||||
|
||||
def Where(self, arg):
|
||||
""" Alias for where clause
|
||||
@param arg SQL expression object or string
|
||||
"""
|
||||
self.proxy = self.proxy.where(arg)
|
||||
|
||||
def From(self, arg):
|
||||
""" Alias for select_from clause
|
||||
@param arg Table or table('tablename') or join clause
|
||||
"""
|
||||
self.proxy = self.proxy.select_from(arg)
|
||||
|
||||
def Update(self):
|
||||
self.proxy = self.table.update()
|
||||
|
||||
def Insert(self):
|
||||
self.proxy = self.table.insert()
|
||||
|
||||
def Delete(self):
|
||||
self.proxy = self.proxy.delete()
|
||||
|
||||
def Value(self, arg):
|
||||
"""
|
||||
Allow you to specifies the VALUES or SET clause of the statement.
|
||||
@param arg: VALUES or SET clause
|
||||
"""
|
||||
self.proxy = self.proxy.values(arg)
|
||||
|
||||
def Execute(self, bindedparam):
|
||||
"""
|
||||
Execute the sql query constructed in the proxy and return the result.
|
||||
If no query then return False.
|
||||
@return: query result on success else False
|
||||
"""
|
||||
if(self.proxy.__str__().split() == 'SELECT'):
|
||||
if('bindparam' in self.proxy.__str__()):
|
||||
#on test separement la présence de la clause bindparam et le type de l'argument correspondant
|
||||
#car si la clause est présente mais que l'argument est defectueux on doit renvoyer False et non pas executer la requete
|
||||
if(type(bindedparam) is list and type(bindedparam[0]) is dict):
|
||||
return self.sqlwrapper.rconn.execute(self.proxy, bindedparam)
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return self.sqlwrapper.rconn.execute(self.proxy)
|
||||
elif(self.proxy is not None):
|
||||
if('bindparam' in self.proxy.__str__()):
|
||||
if(type(bindedparam) is list and type(bindedparam[0]) is dict):
|
||||
return self.sqlwrapper.wconn.execute(self.proxy, bindedparam)
|
||||
else:
|
||||
return False
|
||||
else:
|
||||
return self.sqlwrapper.wconn.execute(self.proxy)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,120 +1,191 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
from Database.sqlwrapper import SqlWrapper
|
||||
import sqlalchemy as sql
|
||||
|
||||
class SQLSetup(object):
|
||||
|
||||
def initDb(self, dbconfname = 'default', alchemy_logs=None):
|
||||
db = SqlWrapper(read_db = dbconfname, write_db = dbconfname, alchemy_logs=alchemy_logs)
|
||||
tables = self.get_schema()
|
||||
db.dropAll()
|
||||
db.createAllFromConf(tables)
|
||||
|
||||
def get_schema(self):
|
||||
tables = []
|
||||
|
||||
default_columns = [
|
||||
{"name":"uid", "type":"INTEGER", "extra":{"foreignkey":"uids.uid", "nullable":False, "primarykey":True}},
|
||||
{"name":"name", "type":"VARCHAR(50)", "extra":{"nullable":False, "unique":True}},
|
||||
{"name":"string", "type":"TEXT"},
|
||||
{"name":"help", "type":"TEXT"},
|
||||
{"name":"rank", "type":"INTEGER"},
|
||||
{"name":"date_create", "type":"DATETIME"},
|
||||
{"name":"date_update", "type":"DATETIME"},
|
||||
]
|
||||
|
||||
# Table listing all objects created by lodel, giving them an unique id
|
||||
uids = {
|
||||
"name":"uids",
|
||||
"columns":[
|
||||
{"name":"uid", "type":"INTEGER", "extra":{"nullable":False, "primarykey":True, 'autoincrement':True}},
|
||||
{"name":"table", "type":"VARCHAR(50)"}
|
||||
]
|
||||
}
|
||||
tables.append(uids)
|
||||
import re #Converting string to sqlalchemy types
|
||||
from Database import sqlutils
|
||||
|
||||
|
||||
# Table listing the classes
|
||||
em_class = {"name":"em_class"}
|
||||
em_class['columns'] = default_columns + [
|
||||
{"name":"classtype", "type":"VARCHAR(50)"},
|
||||
{"name":"sortcolumn", "type":"VARCHAR(50)", "extra":{"default":"rank"}},
|
||||
{"name":"icon", "type":"INTEGER"},
|
||||
]
|
||||
tables.append(em_class)
|
||||
def init_db(dbconfname = 'default', alchemy_logs=None, schema=None):
|
||||
|
||||
dbe = sqlutils.getEngine(dbconfname, alchemy_logs)
|
||||
meta = sqlutils.meta(dbe)
|
||||
meta.reflect()
|
||||
meta.drop_all(dbe)
|
||||
#refresh meta (maybe useless)
|
||||
meta = sqlutils.meta(dbe)
|
||||
meta.reflect()
|
||||
|
||||
if schema is None:
|
||||
schema = get_schema()
|
||||
|
||||
for table in schema:
|
||||
topt = table.copy()
|
||||
del topt['columns']
|
||||
name = topt['name']
|
||||
del topt['name']
|
||||
cur_table = sql.Table(name, meta, **topt)
|
||||
for col in table['columns']:
|
||||
cur_col = create_column(**col)
|
||||
cur_table.append_column(cur_col)
|
||||
|
||||
meta.create_all(bind=dbe)
|
||||
pass
|
||||
|
||||
|
||||
|
||||
# Table listing the types
|
||||
em_type = {"name":"em_type"}
|
||||
em_type['columns'] = default_columns + [
|
||||
{"name":"class_id", "type":"INTEGER", "extra":{"foreignkey":"em_class.uid", "nullable":False}},
|
||||
{"name":"sortcolumn", "type":"VARCHAR(50)", "extra":{"default":"rank"}},
|
||||
{"name":"icon", "type":"INTEGER"},
|
||||
]
|
||||
tables.append(em_type)
|
||||
def get_schema():
|
||||
tables = []
|
||||
|
||||
# relation between types: which type can be a child of another
|
||||
em_type_hierarchy = {"name":"em_type_hierarchy"}
|
||||
em_type_hierarchy['columns'] = [
|
||||
{"name":"superior_id", "type":"INTEGER", "extra":{"foreignkey":"em_type.uid", "nullable":False, "primarykey":True}},
|
||||
{"name":"subordinate_id", "type":"INTEGER", "extra":{"foreignkey":"em_type.uid", "nullable":False, "primarykey":True}},
|
||||
{"name":"nature", "type":"VARCHAR(50)", "extra":{"primarykey":True}},
|
||||
]
|
||||
tables.append(em_type_hierarchy)
|
||||
default_columns = [
|
||||
{"name":"uid", "type":"INTEGER", "extra":{"foreignkey":"uids.uid", "nullable":False, "primarykey":True}},
|
||||
{"name":"name", "type":"VARCHAR(50)", "extra":{"nullable":False, "unique":True}},
|
||||
{"name":"string", "type":"TEXT"},
|
||||
{"name":"help", "type":"TEXT"},
|
||||
{"name":"rank", "type":"INTEGER"},
|
||||
{"name":"date_create", "type":"DATETIME"},
|
||||
{"name":"date_update", "type":"DATETIME"},
|
||||
]
|
||||
|
||||
# Table listing the fieldgroups of a class
|
||||
em_fieldgroup = {"name":"em_fieldgroup"}
|
||||
em_fieldgroup['columns'] = default_columns + [
|
||||
{"name":"class_id", "type":"INTEGER", "extra":{"foreignkey":"em_class.uid", "nullable":False}},
|
||||
]
|
||||
tables.append(em_fieldgroup)
|
||||
|
||||
# Table listing the fields of a fieldgroup
|
||||
em_field = {"name":"em_field"}
|
||||
em_field['columns'] = default_columns + [
|
||||
{"name":"fieldtype", "type":"VARCHAR(50)", "extra":{"nullable":False}},
|
||||
{"name":"fieldgroup_id", "type":"INTEGER", "extra":{"foreignkey":"em_fieldgroup.uid", "nullable":False}},
|
||||
{"name":"rel_to_type_id", "type":"INTEGER", "extra":{"foreignkey":"em_type.uid", "nullable":True, "server_default": sql.text('NULL')}}, # if relational: type this field refer to
|
||||
{"name":"rel_field_id", "type":"INTEGER", "extra":{"foreignkey":"em_type.uid", "nullable":True, "server_default": sql.text('NULL')}}, # if relational: field that specify the rel_to_type_id
|
||||
{"name":"optional", "type":"BOOLEAN"},
|
||||
{"name":"internal", "type":"BOOLEAN"},
|
||||
{"name":"icon", "type":"INTEGER"},
|
||||
]
|
||||
tables.append(em_field)
|
||||
|
||||
# selected field for each type
|
||||
em_field_type = {"name":"em_field_type"}
|
||||
em_field_type['columns'] = [
|
||||
{"name":"type_id", "type":"INTEGER", "extra":{"foreignkey":"em_type.uid", "nullable":False, "primarykey":True}},
|
||||
{"name":"field_id", "type":"INTEGER", "extra":{"foreignkey":"em_field.uid", "nullable":False, "primarykey":True}},
|
||||
]
|
||||
tables.append(em_field_type)
|
||||
|
||||
# Table of the objects created by the user (instance of the types)
|
||||
objects = {
|
||||
"name":"objects",
|
||||
# Table listing all objects created by lodel, giving them an unique id
|
||||
uids = {
|
||||
"name":"uids",
|
||||
"columns":[
|
||||
{"name":"uid", "type":"INTEGER", "extra":{"foreignkey":"uids.uid", "nullable":False, "primarykey":True}},
|
||||
{"name":"string", "type":"VARCHAR(50)"},
|
||||
{"name":"class_id", "type":"INTEGER", "extra":{"foreignkey":"em_class.uid"}},
|
||||
{"name":"type_id", "type":"INTEGER", "extra":{"foreignkey":"em_type.uid"}},
|
||||
{"name":"date_create", "type":"DATETIME"},
|
||||
{"name":"date_update", "type":"DATETIME"},
|
||||
{"name":"history", "type":"TEXT"}
|
||||
{"name":"uid", "type":"INTEGER", "extra":{"nullable":False, "primarykey":True, 'autoincrement':True}},
|
||||
{"name":"table", "type":"VARCHAR(50)"}
|
||||
]
|
||||
}
|
||||
tables.append(objects)
|
||||
tables.append(uids)
|
||||
|
||||
# Table listing all files
|
||||
# TODO Préciser les colonnes à ajouter
|
||||
files = {
|
||||
"name":"files",
|
||||
"columns":[
|
||||
{"name":"uid", "type":"INTEGER", "extra":{"foreignkey":"uids.uid", "nullable":False, "primarykey":True}},
|
||||
{"name":"field1", "type":"VARCHAR(50)"}
|
||||
]
|
||||
}
|
||||
tables.append(files)
|
||||
|
||||
return tables
|
||||
# Table listing the classes
|
||||
em_class = {"name":"em_class"}
|
||||
em_class['columns'] = default_columns + [
|
||||
{"name":"classtype", "type":"VARCHAR(50)"},
|
||||
{"name":"sortcolumn", "type":"VARCHAR(50)", "extra":{"default":"rank"}},
|
||||
{"name":"icon", "type":"INTEGER"},
|
||||
]
|
||||
tables.append(em_class)
|
||||
|
||||
|
||||
# Table listing the types
|
||||
em_type = {"name":"em_type"}
|
||||
em_type['columns'] = default_columns + [
|
||||
{"name":"class_id", "type":"INTEGER", "extra":{"foreignkey":"em_class.uid", "nullable":False}},
|
||||
{"name":"sortcolumn", "type":"VARCHAR(50)", "extra":{"default":"rank"}},
|
||||
{"name":"icon", "type":"INTEGER"},
|
||||
]
|
||||
tables.append(em_type)
|
||||
|
||||
# relation between types: which type can be a child of another
|
||||
em_type_hierarchy = {"name":"em_type_hierarchy"}
|
||||
em_type_hierarchy['columns'] = [
|
||||
{"name":"superior_id", "type":"INTEGER", "extra":{"foreignkey":"em_type.uid", "nullable":False, "primarykey":True}},
|
||||
{"name":"subordinate_id", "type":"INTEGER", "extra":{"foreignkey":"em_type.uid", "nullable":False, "primarykey":True}},
|
||||
{"name":"nature", "type":"VARCHAR(50)", "extra":{"primarykey":True}},
|
||||
]
|
||||
tables.append(em_type_hierarchy)
|
||||
|
||||
# Table listing the fieldgroups of a class
|
||||
em_fieldgroup = {"name":"em_fieldgroup"}
|
||||
em_fieldgroup['columns'] = default_columns + [
|
||||
{"name":"class_id", "type":"INTEGER", "extra":{"foreignkey":"em_class.uid", "nullable":False}},
|
||||
]
|
||||
tables.append(em_fieldgroup)
|
||||
|
||||
# Table listing the fields of a fieldgroup
|
||||
em_field = {"name":"em_field"}
|
||||
em_field['columns'] = default_columns + [
|
||||
{"name":"fieldtype", "type":"VARCHAR(50)", "extra":{"nullable":False}},
|
||||
{"name":"fieldgroup_id", "type":"INTEGER", "extra":{"foreignkey":"em_fieldgroup.uid", "nullable":False}},
|
||||
{"name":"rel_to_type_id", "type":"INTEGER", "extra":{"foreignkey":"em_type.uid", "nullable":True, "server_default": sql.text('NULL')}}, # if relational: type this field refer to
|
||||
{"name":"rel_field_id", "type":"INTEGER", "extra":{"foreignkey":"em_type.uid", "nullable":True, "server_default": sql.text('NULL')}}, # if relational: field that specify the rel_to_type_id
|
||||
{"name":"optional", "type":"BOOLEAN"},
|
||||
{"name":"internal", "type":"BOOLEAN"},
|
||||
{"name":"icon", "type":"INTEGER"},
|
||||
]
|
||||
tables.append(em_field)
|
||||
|
||||
# selected field for each type
|
||||
em_field_type = {"name":"em_field_type"}
|
||||
em_field_type['columns'] = [
|
||||
{"name":"type_id", "type":"INTEGER", "extra":{"foreignkey":"em_type.uid", "nullable":False, "primarykey":True}},
|
||||
{"name":"field_id", "type":"INTEGER", "extra":{"foreignkey":"em_field.uid", "nullable":False, "primarykey":True}},
|
||||
]
|
||||
tables.append(em_field_type)
|
||||
|
||||
# Table of the objects created by the user (instance of the types)
|
||||
objects = {
|
||||
"name":"objects",
|
||||
"columns":[
|
||||
{"name":"uid", "type":"INTEGER", "extra":{"foreignkey":"uids.uid", "nullable":False, "primarykey":True}},
|
||||
{"name":"string", "type":"VARCHAR(50)"},
|
||||
{"name":"class_id", "type":"INTEGER", "extra":{"foreignkey":"em_class.uid"}},
|
||||
{"name":"type_id", "type":"INTEGER", "extra":{"foreignkey":"em_type.uid"}},
|
||||
{"name":"date_create", "type":"DATETIME"},
|
||||
{"name":"date_update", "type":"DATETIME"},
|
||||
{"name":"history", "type":"TEXT"}
|
||||
]
|
||||
}
|
||||
tables.append(objects)
|
||||
|
||||
# Table listing all files
|
||||
# TODO Préciser les colonnes à ajouter
|
||||
files = {
|
||||
"name":"files",
|
||||
"columns":[
|
||||
{"name":"uid", "type":"INTEGER", "extra":{"foreignkey":"uids.uid", "nullable":False, "primarykey":True}},
|
||||
{"name":"field1", "type":"VARCHAR(50)"}
|
||||
]
|
||||
}
|
||||
tables.append(files)
|
||||
|
||||
return tables
|
||||
|
||||
def create_column(**kwargs):
|
||||
#Converting parameters
|
||||
if 'type_' not in kwargs and 'type' in kwargs:
|
||||
kwargs['type_'] = _strToSqlAType(kwargs['type'])
|
||||
del kwargs['type']
|
||||
|
||||
if 'extra' in kwargs:
|
||||
#put the extra keys in kwargs
|
||||
for exname in kwargs['extra']:
|
||||
kwargs[exname] = kwargs['extra'][exname]
|
||||
del kwargs['extra']
|
||||
|
||||
if 'foreignkey' in kwargs:
|
||||
#Instanciate a fk
|
||||
fk = sql.ForeignKey(kwargs['foreignkey'])
|
||||
del kwargs['foreignkey']
|
||||
else:
|
||||
fk = None
|
||||
|
||||
if 'primarykey' in kwargs:
|
||||
#renaming primary_key in primarykey in kwargs
|
||||
kwargs['primary_key'] = kwargs['primarykey']
|
||||
del kwargs['primarykey']
|
||||
|
||||
col = sql.Column(**kwargs)
|
||||
|
||||
if fk != None:
|
||||
col.append_foreign_key(fk)
|
||||
|
||||
return col
|
||||
|
||||
def _strToSqlAType(strtype):
|
||||
""" Convert a string to an sqlAlchemy column type """
|
||||
if 'VARCHAR' in strtype:
|
||||
return _strToVarchar(strtype)
|
||||
else:
|
||||
try:
|
||||
return getattr(sql, strtype)
|
||||
except AttributeError:
|
||||
raise NameError("Unknown type '"+strtype+"'")
|
||||
pass
|
||||
|
||||
def _strToVarchar(vstr):
|
||||
""" Convert a string like 'VARCHAR(XX)' (with XX an integer) to a SqlAlchemy varchar type"""
|
||||
check_length = re.search(re.compile('VARCHAR\(([\d]+)\)', re.IGNORECASE), vstr)
|
||||
column_length = int(check_length.groups()[0]) if check_length else None
|
||||
return sql.VARCHAR(length=column_length)
|
||||
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ def getEngine(ename = 'default', sqlalogging = None):
|
|||
# @param engine sqlalchemy.engine : A sqlalchemy engine
|
||||
# @return an sql alechemy MetaData instance bind to engine
|
||||
def meta(engine):
|
||||
res = sqla.MetaData()
|
||||
res = sqla.MetaData(bind=engine)
|
||||
res.reflect(bind=engine)
|
||||
return res
|
||||
|
||||
|
|
|
|||
|
|
@ -1,515 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
|
||||
import sqlalchemy as sqla
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from django.conf import settings
|
||||
|
||||
from Database.sqlalter import *
|
||||
|
||||
#Logger config
|
||||
#logger.getLogger().setLevel('WARNING')
|
||||
logger = logging.getLogger('lodel2.Database.sqlwrapper')
|
||||
logger.setLevel('CRITICAL')
|
||||
logger.propagate = False
|
||||
#To be able to use dango confs
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Lodel.settings")
|
||||
|
||||
class SqlWrapper(object):
|
||||
""" A wrapper class to sqlalchemy
|
||||
|
||||
Usefull to provide a standart API
|
||||
|
||||
__Note__ : This class is not thread safe (sqlAlchemy connections are not). Create a new instance of the class to use in different threads or use SqlWrapper::copy
|
||||
"""
|
||||
ENGINES = {'mysql': {
|
||||
'driver': 'pymysql',
|
||||
'encoding': 'utf8'
|
||||
},
|
||||
'postgresql': {
|
||||
'driver': 'psycopg2',
|
||||
'encoding': 'utf8',
|
||||
},
|
||||
'sqlite': {
|
||||
'driver': 'pysqlite',
|
||||
'encoding': 'utf8',
|
||||
},
|
||||
}
|
||||
|
||||
##Configuration dict alias for class access
|
||||
config=settings.LODEL2SQLWRAPPER
|
||||
|
||||
##Wrapper instance list
|
||||
wrapinstance = dict()
|
||||
|
||||
def __init__(self, name=None, alchemy_logs=None, read_db = "default", write_db = "default"):
|
||||
""" Instanciate a new SqlWrapper
|
||||
@param name str: The wrapper name
|
||||
@param alchemy_logs bool: If true activate sqlalchemy logger
|
||||
@param read_db str: The name of the db conf
|
||||
@param write_db str: The name of the db conf
|
||||
|
||||
@todo Better use of name (should use self.cfg['wrapper'][name] to get engines configs
|
||||
@todo Is it a really good idea to store instance in class scope ? Maybe not !!
|
||||
"""
|
||||
|
||||
self.sqlalogging = False if alchemy_logs == None else bool(alchemy_logs)
|
||||
|
||||
if name == None:
|
||||
self.name = read_db+':'+write_db
|
||||
else:
|
||||
self.name = name
|
||||
|
||||
self.r_dbconf = read_db
|
||||
self.w_dbconf = write_db
|
||||
|
||||
self._checkConf() #raise if errors in configuration
|
||||
|
||||
if self.name in self.__class__.wrapinstance:
|
||||
logger.warning("A SqlWrapper with the name "+self.name+" allready exist. Replacing the old one by the new one")
|
||||
SqlWrapper.wrapinstance[self.name] = self
|
||||
|
||||
#Engine and wrapper initialisation
|
||||
self.r_engine = self._getEngine(True, self.sqlalogging)
|
||||
self.w_engine = self._getEngine(False, self.sqlalogging)
|
||||
self.r_conn = None
|
||||
self.w_conn = None
|
||||
|
||||
|
||||
self.metadata = None #TODO : use it to load all db schema in 1 request and don't load it each table instanciation
|
||||
self.meta_crea = None
|
||||
|
||||
logger.debug("New wrapper instance : <"+self.name+" read:"+str(self.r_engine)+" write:"+str(self.w_engine))
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def getEngine(c, ename = 'default', logs = None):
|
||||
return c(read_db = ename, write_db = ename, alchemy_logs = logs).r_engine
|
||||
|
||||
@property
|
||||
def cfg(self):
|
||||
""" Return the SqlWrapper.config dict """
|
||||
return self.__class__.config;
|
||||
@property
|
||||
def _engines_cfg(self):
|
||||
return self.__class__.ENGINES;
|
||||
|
||||
@property
|
||||
def meta(self):
|
||||
if self.metadata == None:
|
||||
self.renewMetaData()
|
||||
return self.metadata
|
||||
|
||||
def renewMetaData(self):
|
||||
""" (Re)load the database schema """
|
||||
self.metadata = sqla.MetaData(bind=self.r_engine)
|
||||
self.metadata.reflect()
|
||||
|
||||
@property
|
||||
def rconn(self):
|
||||
""" Return the read connection
|
||||
@warning Do not store the connection, call this method each time you need it
|
||||
"""
|
||||
return self._getConnection(True)
|
||||
@property
|
||||
def wconn(self):
|
||||
""" Return the write connection
|
||||
@warning Do not store the connection, call this method each time you need it
|
||||
"""
|
||||
return self._getConnection(False)
|
||||
|
||||
def _getConnection(self, read):
|
||||
""" Return an opened connection
|
||||
@param read bool: If true return the reading connection
|
||||
@return A sqlAlchemy db connection
|
||||
@private
|
||||
"""
|
||||
if read:
|
||||
r = self.r_conn
|
||||
else:
|
||||
r = self.w_conn
|
||||
|
||||
if r == None:
|
||||
#Connection not yet opened
|
||||
self.connect(read)
|
||||
r = self._getConnection(read) #TODO : Un truc plus safe/propre qu'un appel reccursif ?
|
||||
return r
|
||||
|
||||
|
||||
def connect(self, read = None):
|
||||
""" Open a connection to a database
|
||||
@param read bool|None: If None connect both, if True only connect the read side (False the write side)
|
||||
@return None
|
||||
"""
|
||||
if read or read == None:
|
||||
if self.r_conn != None:
|
||||
logger.debug(' SqlWrapper("'+self.name+'") Unable to connect, already connected')
|
||||
else:
|
||||
self.r_conn = self.r_engine.connect()
|
||||
|
||||
if not read or read == None:
|
||||
if self.w_conn != None:
|
||||
logger.debug(' SqlWrapper("'+self.name+'") Unable to connect, already connected')
|
||||
else:
|
||||
self.w_conn = self.w_engine.connect()
|
||||
|
||||
def disconnect(self, read = None):
|
||||
""" Close a connection to a database
|
||||
@param read bool|None: If None disconnect both, if True only connect the read side (False the write side)
|
||||
@return None
|
||||
"""
|
||||
if read or read == None:
|
||||
if self.r_conn == None:
|
||||
logger.info('Unable to close read connection : connection not opened')
|
||||
else:
|
||||
self.r_conn.close()
|
||||
self.r_conn = None
|
||||
|
||||
if not read or read == None:
|
||||
if self.r_conn == None:
|
||||
logger.info('Unable to close write connection : connection not opened')
|
||||
else:
|
||||
self.w_conn.close()
|
||||
self.w_conn = None
|
||||
|
||||
def reconnect(self, read = None):
|
||||
""" Close and reopen a connection to a database
|
||||
@param read bool|None: If None disconnect both, if True only connect the read side (False the write side)
|
||||
@return None
|
||||
"""
|
||||
self.disconnect(read)
|
||||
self.connect(read)
|
||||
|
||||
@classmethod
|
||||
def reconnectAll(c, read = None):
|
||||
""" Reconnect all the wrappers
|
||||
@static
|
||||
"""
|
||||
for wname in c.wrapinstance:
|
||||
c.wrapinstance[wname].reconnect(read)
|
||||
|
||||
def Table(self, tname):
|
||||
""" Instanciate a new SqlAlchemy Table
|
||||
@param tname str: The table name
|
||||
@return A new instance of SqlAlchemy::Table
|
||||
"""
|
||||
if not isinstance(tname, str):
|
||||
return TypeError('Excepting a <class str> but got a '+str(type(tname)))
|
||||
#return sqla.Table(tname, self.meta, autoload_with=self.r_engine, autoload=True)
|
||||
return sqla.Table(tname, self.meta)
|
||||
|
||||
def _getEngine(self, read=True, sqlalogging = None):
|
||||
""" Return a sqlalchemy engine
|
||||
@param read bool: If True return the read engine, else
|
||||
return the write one
|
||||
@return a sqlachemy engine instance
|
||||
|
||||
@todo Put the check on db config in SqlWrapper.checkConf()
|
||||
"""
|
||||
#Loading confs
|
||||
cfg = self.cfg['db'][self.r_dbconf if read else self.w_dbconf]
|
||||
|
||||
edata = self._engines_cfg[cfg['ENGINE']] #engine infos
|
||||
conn_str = ""
|
||||
|
||||
if cfg['ENGINE'] == 'sqlite':
|
||||
#Sqlite connection string
|
||||
conn_str = '%s+%s:///%s'%( cfg['ENGINE'],
|
||||
edata['driver'],
|
||||
cfg['NAME'])
|
||||
else:
|
||||
#Mysql and Postgres connection string
|
||||
user = cfg['USER']
|
||||
user += (':'+cfg['PASSWORD'] if 'PASSWORD' in cfg else '')
|
||||
|
||||
if 'HOST' not in cfg:
|
||||
logger.info('Not HOST in configuration, using localhost')
|
||||
host = 'localhost'
|
||||
else:
|
||||
host = cfg['HOST']
|
||||
|
||||
host += (':'+cfg['PORT'] if 'PORT' in cfg else '')
|
||||
|
||||
conn_str = '%s+%s://'%(cfg['ENGINE'], edata['driver'])
|
||||
conn_str += '%s@%s/%s'%(user,host,cfg['NAME'])
|
||||
|
||||
|
||||
ret = sqla.create_engine(conn_str, encoding=edata['encoding'], echo=self.sqlalogging)
|
||||
|
||||
logger.debug("Getting engine :"+str(ret))
|
||||
|
||||
return ret
|
||||
|
||||
@classmethod
|
||||
def getWrapper(c, name):
|
||||
""" Return a wrapper instance from a wrapper name
|
||||
@param name str: The wrapper name
|
||||
@return a SqlWrapper instance
|
||||
|
||||
@throw KeyError
|
||||
"""
|
||||
if name not in c.wrapinstance:
|
||||
raise KeyError("No wrapper named '"+name+"' exists")
|
||||
return c.wrapinstance[name]
|
||||
|
||||
def _checkConf(self):
|
||||
""" Class method that check the configuration
|
||||
|
||||
Configuration looks like
|
||||
- db (mandatory)
|
||||
- ENGINE (mandatory)
|
||||
- NAME (mandatory)
|
||||
- USER
|
||||
- PASSWORD
|
||||
- engines (mandatory)
|
||||
- driver (mandatory)
|
||||
- encoding (mandatory)
|
||||
- dbread (mandatory if no default db)
|
||||
- dbwrite (mandatory if no default db)
|
||||
"""
|
||||
err = []
|
||||
if 'db' not in self.cfg:
|
||||
err.append('Missing "db" in configuration')
|
||||
else:
|
||||
for dbname in [self.r_dbconf, self.w_dbconf]:
|
||||
if dbname not in self.cfg['db']:
|
||||
err.append('Missing "'+dbname+'" db configuration')
|
||||
else:
|
||||
db = self.cfg['db'][dbname]
|
||||
if db['ENGINE'] not in self._engines_cfg:
|
||||
err.append('Unknown engine "'+db['ENGINE']+'"')
|
||||
elif db['ENGINE'] != 'sqlite' and 'USER' not in db:
|
||||
err.append('Missing "User" in configuration of database "'+dbname+'"')
|
||||
if 'NAME' not in db:
|
||||
err.append('Missing "NAME" in database "'+dbname+'"')
|
||||
|
||||
if len(err)>0:
|
||||
err_str = "\n"
|
||||
for e in err:
|
||||
err_str += "\t\t"+e+"\n"
|
||||
raise NameError('Configuration errors in LODEL2SQLWRAPPER:'+err_str)
|
||||
|
||||
def dropAll(self):
|
||||
""" Drop ALL tables from the database """
|
||||
if not settings.DEBUG:
|
||||
logger.critical("Trying to drop all tables but we are not in DEBUG !!!")
|
||||
raise RuntimeError("Trying to drop all tables but we are not in DEBUG !!!")
|
||||
meta = sqla.MetaData(bind=self.w_engine)
|
||||
meta.reflect()
|
||||
meta.drop_all()
|
||||
pass
|
||||
|
||||
def createAllFromConf(self, schema):
|
||||
""" Create a bunch of tables from a schema
|
||||
@param schema list: A list of table schema
|
||||
@see SqlWrapper::createTable()
|
||||
"""
|
||||
self.meta_crea = sqla.MetaData()
|
||||
|
||||
logger.info("Running function createAllFromConf")
|
||||
for i,table in enumerate(schema):
|
||||
if not isinstance(table, dict):
|
||||
raise TypeError("Excepted a list of dict but got a "+str(type(schema))+" in the list")
|
||||
self.createTable(**table)
|
||||
|
||||
self.meta_crea.create_all(bind = self.w_engine)
|
||||
logger.info("All tables created")
|
||||
self.meta_crea = None
|
||||
self.renewMetaData()
|
||||
pass
|
||||
|
||||
def createTable(self, name, columns, **kw):
|
||||
""" Create a table
|
||||
@param name str: The table name
|
||||
@param columns list: A list of columns description dict
|
||||
@param extra dict: Extra arguments for table creation
|
||||
@see SqlWrapper::createColumn()
|
||||
"""
|
||||
|
||||
if self.meta_crea == None:
|
||||
self.meta_crea = sqla.MetaData()
|
||||
crea_now = True
|
||||
else:
|
||||
crea_now = False
|
||||
|
||||
if not isinstance(name, str):
|
||||
raise TypeError("<class str> excepted for table name, but got "+type(name))
|
||||
|
||||
#if not 'mysql_engine' in kw and self.w_engine.dialect.name == 'mysql':
|
||||
# kw['mysql_engine'] = 'InnoDB'
|
||||
|
||||
res = sqla.Table(name, self.meta_crea, **kw)
|
||||
for i,col in enumerate(columns):
|
||||
res.append_column(self.createColumn(**col))
|
||||
|
||||
if crea_now:
|
||||
self.meta_crea.create_all(self.w_engine)
|
||||
logger.debug("Table '"+name+"' created")
|
||||
|
||||
pass
|
||||
|
||||
def createColumn(self, **kwargs):
|
||||
""" Create a Column
|
||||
|
||||
Accepte named parameters :
|
||||
- name : The column name
|
||||
- type : see SqlWrapper::_strToSqlAType()
|
||||
- extra : a dict like { "primarykey":True, "nullable":False, "default":"test"...}
|
||||
@param **kwargs
|
||||
"""
|
||||
if not 'name' in kwargs or ('type' not in kwargs and 'type_' not in kwargs):
|
||||
pass#ERROR
|
||||
|
||||
#Converting parameters
|
||||
if 'type_' not in kwargs and 'type' in kwargs:
|
||||
kwargs['type_'] = self._strToSqlAType(kwargs['type'])
|
||||
del kwargs['type']
|
||||
|
||||
if 'extra' in kwargs:
|
||||
#put the extra keys in kwargs
|
||||
for exname in kwargs['extra']:
|
||||
kwargs[exname] = kwargs['extra'][exname]
|
||||
del kwargs['extra']
|
||||
|
||||
if 'foreignkey' in kwargs:
|
||||
#Instanciate a fk
|
||||
fk = sqla.ForeignKey(kwargs['foreignkey'])
|
||||
del kwargs['foreignkey']
|
||||
else:
|
||||
fk = None
|
||||
|
||||
if 'primarykey' in kwargs:
|
||||
#renaming primary_key in primarykey in kwargs
|
||||
kwargs['primary_key'] = kwargs['primarykey']
|
||||
del kwargs['primarykey']
|
||||
|
||||
res = sqla.Column(**kwargs)
|
||||
|
||||
if fk != None:
|
||||
res.append_foreign_key(fk)
|
||||
|
||||
#logger.debug("Column '"+kwargs['name']+"' created")
|
||||
return res
|
||||
|
||||
def _strToSqlAType(self, strtype):
|
||||
""" Convert a string to an sqlAlchemy column type """
|
||||
|
||||
if 'VARCHAR' in strtype:
|
||||
return self._strToVarchar(strtype)
|
||||
else:
|
||||
try:
|
||||
return getattr(sqla, strtype)
|
||||
except AttributeError:
|
||||
raise NameError("Unknown type '"+strtype+"'")
|
||||
pass
|
||||
|
||||
def _strToVarchar(self, vstr):
|
||||
""" Convert a string like 'VARCHAR(XX)' (with XX an integer) to a SqlAlchemy varchar type"""
|
||||
check_length = re.search(re.compile('VARCHAR\(([\d]+)\)', re.IGNORECASE), vstr)
|
||||
column_length = int(check_length.groups()[0]) if check_length else None
|
||||
return sqla.VARCHAR(length=column_length)
|
||||
|
||||
|
||||
def dropColumn(self, tname, colname):
|
||||
""" Drop a column from a table
|
||||
@param tname str|sqlalchemy.Table: The table name or a Table object
|
||||
@param colname str|sqlalchemy.Column: The column name or a column object
|
||||
@return None
|
||||
"""
|
||||
if tname not in self.meta.tables: #Useless ?
|
||||
raise NameError("The table '"+tname+"' dont exist")
|
||||
table = self.Table(tname)
|
||||
col = sqla.Column(colname)
|
||||
|
||||
ddl = DropColumn(table, col)
|
||||
sql = ddl.compile(dialect=self.w_engine.dialect)
|
||||
sql = str(sql)
|
||||
logger.debug("Executing SQL : '"+sql+"'")
|
||||
ret = bool(self.w_engine.execute(sql))
|
||||
|
||||
self.renewMetaData()
|
||||
return ret
|
||||
|
||||
|
||||
def addColumn(self, tname, colname, coltype):
|
||||
""" Add a column to a table
|
||||
@param tname str: The table name
|
||||
@param colname str: The column name
|
||||
@param coltype str: The new column type
|
||||
|
||||
@return True if query success False if it fails
|
||||
"""
|
||||
if tname not in self.meta.tables: #Useless ?
|
||||
raise NameError("The table '"+tname+"' dont exist")
|
||||
table = self.Table(tname)
|
||||
newcol = self.createColumn(name=colname, type_ = coltype)
|
||||
|
||||
ddl = AddColumn(table, newcol)
|
||||
sql = ddl.compile(dialect=self.w_engine.dialect)
|
||||
sql = str(sql)
|
||||
logger.debug("Executing SQL : '"+sql+"'")
|
||||
ret = bool(self.wconn.execute(sql))
|
||||
|
||||
self.renewMetaData()
|
||||
return ret
|
||||
|
||||
## AddColumnObject
|
||||
#
|
||||
# Adds a column from a SQLAlchemy Column Object
|
||||
#
|
||||
# @param tname str: Name of the table in which to add the column
|
||||
# @param column Column: Column object to add to the table
|
||||
# @return True if query is successful, False if it fails
|
||||
def addColumnObject(self, tname, column):
|
||||
if tname not in self.meta.tables:
|
||||
raise NameError("The table '%s' doesn't exist" % tname)
|
||||
table = self.Table(tname)
|
||||
|
||||
ddl = AddColumn(table, column)
|
||||
sql = ddl.compile(dialect=self.w_engine.dialect)
|
||||
sql = str(sql)
|
||||
logger.debug("Executing SQL : '%s'" % sql)
|
||||
ret = bool(self.wconn.execute(sql))
|
||||
self.renewMetaData()
|
||||
return ret
|
||||
|
||||
def alterColumn(self, tname, colname, col_newtype):
|
||||
""" Change the type of a column
|
||||
@param tname str: The table name
|
||||
@param colname str: The column name
|
||||
@param col_newtype str: The column new type
|
||||
|
||||
@return True if query successs False if it fails
|
||||
"""
|
||||
|
||||
if tname not in self.meta.tables: #Useless ?
|
||||
raise NameError("The table '"+tname+"' dont exist")
|
||||
|
||||
col = self.createColumn(name=colname, type_=col_newtype)
|
||||
table = self.Table(tname)
|
||||
|
||||
ddl = AlterColumn(table, newcol)
|
||||
sql = ddl.compile(dialect=self.w_engine.dialect)
|
||||
sql = str(sql)
|
||||
logger.debug("Executing SQL : '"+sql+"'")
|
||||
ret = bool(self.wconn.execute(sql))
|
||||
|
||||
self.renewMetaData()
|
||||
return ret
|
||||
|
||||
def _debug__printSchema(self):
|
||||
""" Debug function to print the db schema """
|
||||
print(self.meta)
|
||||
for tname in self.meta.tables:
|
||||
self._debug__printTable(tname)
|
||||
|
||||
def _debug__printTable(self, tname):
|
||||
t = self.meta.tables[tname]
|
||||
tstr = 'Table : "'+tname+'" :\n'
|
||||
for c in t.c:
|
||||
tstr += '\t\t"'+c.name+'"('+str(c.type)+') \n'
|
||||
print(tstr)
|
||||
|
||||
|
||||
|
|
@ -175,8 +175,7 @@ class EmComponent(object):
|
|||
dbe = cls.db_engine()
|
||||
conn = dbe.connect()
|
||||
|
||||
#kwargs['rank'] = cls.get_max_rank(kwargs[cls.ranked_in]) + 1 #Warning !!!
|
||||
kwargs['rank'] = -1
|
||||
kwargs['rank'] = -1 #Warning !!!
|
||||
|
||||
table = sql.Table(cls.table, sqlutils.meta(dbe))
|
||||
req = table.insert(kwargs)
|
||||
|
|
|
|||
|
|
@ -16,8 +16,7 @@ from EditorialModel.types import EmType
|
|||
from EditorialModel.fields import EmField
|
||||
import EditorialModel.fieldtypes as fieldTypes
|
||||
|
||||
from Database.sqlsetup import SQLSetup
|
||||
from Database import sqlutils
|
||||
from Database import sqlutils, sqlsetup
|
||||
import sqlalchemy as sqla
|
||||
|
||||
|
||||
|
|
@ -33,8 +32,7 @@ class ClassesTestCase(TestCase):
|
|||
# run before every instanciation of the class
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
sql = SQLSetup()
|
||||
sql.initDb()
|
||||
sqlsetup.init_db()
|
||||
|
||||
# run before every function of the class
|
||||
def setUp(self):
|
||||
|
|
|
|||
|
|
@ -21,9 +21,8 @@ from EditorialModel.test.utils import *
|
|||
|
||||
from Lodel.utils.mlstring import MlString
|
||||
|
||||
from Database.sqlsetup import SQLSetup
|
||||
from Database.sqlwrapper import SqlWrapper
|
||||
from Database import sqlutils
|
||||
from Database import sqlsetup
|
||||
import sqlalchemy as sqla
|
||||
|
||||
|
||||
|
|
@ -57,8 +56,7 @@ def setUpModule():
|
|||
logging.basicConfig(level=logging.CRITICAL)
|
||||
|
||||
#testDB setup
|
||||
sqls = SQLSetup()
|
||||
tables = sqls.get_schema()
|
||||
tables = sqlsetup.get_schema()
|
||||
ttest = { 'name':'ttest',
|
||||
'columns': [
|
||||
{"name":"uid", "type":"INTEGER", "extra":{"foreignkey":"uids.uid", "nullable":False, "primarykey":True}},
|
||||
|
|
@ -73,7 +71,6 @@ def setUpModule():
|
|||
}
|
||||
tables.append(ttest)
|
||||
|
||||
sqlwrap = globals()['dbwrapper'] = SqlWrapper(read_db='default', write_db = 'default', alchemy_logs=False)
|
||||
globals()['tables'] = tables
|
||||
|
||||
#Creating db structure
|
||||
|
|
@ -81,9 +78,9 @@ def setUpModule():
|
|||
initTestDb(TEST_COMPONENT_DBNAME)
|
||||
setDbConf(TEST_COMPONENT_DBNAME)
|
||||
|
||||
sqlwrap.createAllFromConf(tables)
|
||||
sqlsetup.init_db('default', False, tables)
|
||||
|
||||
dbe = sqlwrap.r_engine
|
||||
dbe = sqlutils.getEngine('default')
|
||||
|
||||
# Insertion of testings datas
|
||||
conn = dbe.connect()
|
||||
|
|
@ -143,16 +140,12 @@ class ComponentTestCase(TestCase):
|
|||
{ 'uid': 1025, 'name': 'name', 'string': '{}', 'help': '{}', 'rank': 5},
|
||||
]
|
||||
|
||||
@property
|
||||
def db(self):
|
||||
return globals()['dbwrapper']
|
||||
@property
|
||||
def tables(self):
|
||||
return globals()['tables']
|
||||
|
||||
def setUp(self):
|
||||
self.dber = globals()['dbwrapper'].r_engine
|
||||
self.dbew = globals()['dbwrapper'].w_engine
|
||||
self.dber = sqlutils.getEngine('default')
|
||||
self.test_values = self.__class__.test_values
|
||||
#Db RAZ
|
||||
#shutil.copyfile(TEST_COMPONENT_DBNAME+'_bck', globals()['component_test_dbfilename'])
|
||||
|
|
|
|||
|
|
@ -16,9 +16,6 @@ from EditorialModel.fields_types import Em_Field_Type
|
|||
from EditorialModel.test.utils import *
|
||||
from EditorialModel.fieldtypes import *
|
||||
|
||||
from Database.sqlsetup import SQLSetup
|
||||
from Database.sqlwrapper import SqlWrapper
|
||||
from Database.sqlquerybuilder import SqlQueryBuilder
|
||||
from Database import sqlutils
|
||||
|
||||
import sqlalchemy as sqla
|
||||
|
|
@ -45,8 +42,7 @@ class FieldTestCase(TestCase):
|
|||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
sqls = SQLSetup()
|
||||
sqls.initDb()
|
||||
sqlsetup.init_db()
|
||||
|
||||
# Generation of the test data
|
||||
testclass = EmClass.create("testclass1",EmClassType.entity)
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ from EditorialModel.fieldtypes import *
|
|||
|
||||
from EditorialModel.test.utils import *
|
||||
|
||||
from Database.sqlsetup import SQLSetup
|
||||
from Database import sqlutils
|
||||
|
||||
import sqlalchemy as sqla
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import logging
|
|||
import shutil
|
||||
|
||||
from django.conf import settings
|
||||
from Database.sqlsetup import SQLSetup
|
||||
from Database import sqlsetup
|
||||
|
||||
|
||||
_TESTDB_DEFAULT_DIR = '/tmp/'
|
||||
|
|
@ -42,7 +42,7 @@ def initTestDb(dbname = _TESTDB_DEFAULT_NAME, dbdir = _TESTDB_DEFAULT_DIR):
|
|||
'ENGINE': 'sqlite',
|
||||
'NAME': db_default,
|
||||
}
|
||||
SQLSetup().initDb(dbconfname = 'dbtest_default')
|
||||
sqlsetup.init_db(dbconfname = 'dbtest_default')
|
||||
#Make the backup copy
|
||||
shutil.copyfile(db_default, db_copy)
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue