1
0
Fork 0
mirror of https://github.com/yweber/lodel2.git synced 2025-11-21 13:19:16 +01:00

Merge branch 'master' of git@git.labocleo.org:lodel2

This commit is contained in:
Quentin Bonaventure 2017-03-28 12:27:35 +02:00
commit cb95014a9b
27 changed files with 993 additions and 822 deletions

View file

@ -11,243 +11,251 @@ LodelContext.expose_modules(globals(), {
'lodel.logger': 'logger',
'lodel.plugin': [('SessionHandlerPlugin', 'SessionHandler')],
'lodel.auth.exceptions': ['ClientError', 'ClientAuthenticationFailure',
'ClientPermissionDenied', 'ClientAuthenticationError'],
'lodel.leapi.query': ['LeGetQuery'],})
'ClientPermissionDenied', 'ClientAuthenticationError'],
'lodel.leapi.query': ['LeGetQuery'], })
##@brief Client metaclass designed to implements container accessor on
#Client Class
# @brief Client metaclass designed to implements container accessor on
# Client Class
#
#@todo Maybe we can delete this metaclass....
class ClientMetaclass(type):
def __init__(self, name, bases, attrs):
return super(ClientMetaclass, self).__init__(name, bases, attrs)
def __getitem__(self, key):
return self.datas()[key]
return self.data()[key]
def __delitem__(self, key):
del(self.datas()[key])
del(self.data()[key])
def __setitem__(self, key, value):
if self.get_session_token() is None:
self.set_session_token(SessionHandler.start())
datas = self.datas()
datas[key] = value
data = self.data()
data[key] = value
def __str__(self):
return str(self._instance)
##@brief Abstract singleton class designed to handle client informations
# @brief Abstract singleton class designed to handle client informations
#
# This class is designed to handle client authentication and sessions
class Client(object, metaclass = ClientMetaclass):
##@brief Singleton instance
class Client(object, metaclass=ClientMetaclass):
# @brief Singleton instance
_instance = None
##@brief List of dict that stores field ref for login and password
# @brief List of dict that stores field ref for login and password
#
# Storage specs :
# Storage specs :
#
# A list of dict, with keys 'login' and 'password', items are tuple.
#- login tuple contains (LeObjectChild, FieldName, link_field) with:
# - login tuple contains (LeObjectChild, FieldName, link_field) with:
# - LeObjectChild the dynclass containing the login
# - Fieldname the fieldname of LeObjectChild containing the login
# - link_field None if both login and password are in the same
# LeObjectChild. Else contains the field that make the link between
# login LeObject and password LeObject
#- password typle contains (LeObjectChild, FieldName)
# - password typle contains (LeObjectChild, FieldName)
_infos_fields = None
##@brief Constant that stores the session key that stores authentication
#informations
_AUTH_DATANAME = '__auth_user_infos'
##@brief Constructor
# @brief Constant that stores the session key that stores authentication
# informations
_AUTH_DATANAME = '__auth_user_infos'
# @brief Constructor
#@param session_token mixed : Session token provided by client to interface
def __init__(self,session_token = None):
def __init__(self, session_token=None):
logger.debug(session_token)
if self.__class__ == Client:
raise NotImplementedError("Abstract class")
logger.debug("New instance of Client child class %s" %
self.__class__.__name__)
self.__class__.__name__)
if Client._instance is not None:
old = Client._instance
Client._instance = None
del(old)
logger.debug("Replacing old Client instance by a new one")
else:
#first instanciation, fetching settings
# first instanciation, fetching settings
self.fetch_settings()
##@brief Stores infos for authenticated users (None == anonymous)
# @brief Stores infos for authenticated users (None == anonymous)
self.__user = None
##@brief Stores the session handler
# @brief Stores the session handler
Client._instance = self
##@brief Stores LodelSession instance
self.__datas = dict()
# @brief Stores LodelSession instance
self.__data = dict()
if session_token is not None:
self.__datas = SessionHandler.restore(session_token)
self.__data = SessionHandler.restore(session_token)
self.__session_token = session_token
logger.debug("New client : %s" % self)
def __del__(self):
del(self.__session_token)
del(self.__datas)
del(self.__data)
# @brief Returns session
#@ returns the dict which stores session
@classmethod
def datas(cls):
return cls._instance.__datas
def data(cls):
return cls._instance.__data
# @brief Returns the user's information contained in the session's data
@classmethod
def user(cls):
if '__auth_user_infos' in cls._instance.__datas:
return cls._instance.__datas['__auth_user_infos']
if '__auth_user_infos' in cls._instance.__data:
return cls._instance.__data['__auth_user_infos']
else:
return None
# @brief Returns the session's token
@classmethod
def get_session_token(cls):
return cls._instance.__session_token
# @brief Set the session's token
#@param the value of the token
@classmethod
def set_session_token(cls, value):
cls._instance.__session_token = value
##@brief Try to authenticate a user with a login and a password
# @brief Try to authenticate a user with a login and a password
#@param login str : provided login
#@param password str : provided password (hash)
#@warning brokes composed UID
#@note implemets multiple login/password sources (useless ?)
#@note implements multiple login/password sources (useless ?)
#@todo composed UID broken method
#@todo allow to provide an authentication source
@classmethod
def authenticate(self, login = None, password = None):
#Authenticate
def authenticate(self, login=None, password=None):
# Authenticate
for infos in self._infos_fields:
logger.debug(self._infos_fields)
login_cls = infos['login'][0]
pass_cls = infos['password'][0]
qfilter = "{passfname} = {passhash}"
uid_fname = login_cls.uid_fieldname()[0] #COMPOSED UID BROKEN
uid_fname = login_cls.uid_fieldname()[0] # COMPOSED UID BROKEN
if login_cls == pass_cls:
#Same EmClass for login & pass
# Same EmClass for login & pass
qfilter = qfilter.format(
passfname = infos['password'][1],
passhash = password)
passfname=infos['password'][1],
passhash=password)
else:
#Different EmClass, building a relational filter
# Different EmClass, building a relational filter
passfname = "%s.%s" % (infos['login'][2], infos['password'][1])
qfilter = qfilter.format(
passfname = passfname,
passhash = password)
passfname=passfname,
passhash=password)
getq = LeGetQuery(infos['login'][0], qfilter,
field_list = [uid_fname], limit = 1)
field_list=[uid_fname], limit=1)
req = getq.execute()
if len(req) == 1:
self.__set_authenticated(infos['login'][0],req[0][uid_fname])
self.__set_authenticated(infos['login'][0], req[0][uid_fname])
break
if self.is_anonymous():
self.authentication_failure() #Security logging
##@brief Attempt to restore a session given a session token
self.authentication_failure() # Security logging
# @brief Attempt to restore a session given a session token
#@param token mixed : a session token
#@return Session datas (a dict)
#@return Session data (a dict)
#@throw ClientAuthenticationFailure if token is not valid or not
#existing
# existing
@classmethod
def restore_session(cls, token):
cls._assert_instance()
if cls._instance.__session_token is not None:
raise ClientAuthenticationError("Trying to restore a session, but \
a session is allready started !!!")
a session is already started !!!")
try:
cls._instance.__datas = SessionHandler.restore(token)
cls._instance.__data = SessionHandler.restore(token)
cls._instance.__session_token = token
except ClientAuthenticationFailure:
logger.warning("Session restoring fails")
return copy.copy(cls._instance.datas)
##@brief Return the current session token or None
logger.warning("Session restoring failed")
return copy.copy(cls._instance.data)
# @brief Returns the current session token or None
#@return A session token or None
@classmethod
def session_token(cls):
cls._assert_instance()
return cls._instance.__session_token
##@brief Delete current session
# @brief Deletes current session
@classmethod
def destroy(cls):
cls._assert_instance()
SessionHandler.destroy(cls._instance.__session_token)
cls._instance.__session_token = None
cls._instance.__datas = dict()
##@brief Delete current client and save its session
cls._instance.__data = dict()
# @brief Deletes current client and saves its session
@classmethod
def clean(cls):
if cls._instance.__session_token is not None:
SessionHandler.save(cls._instance.__session_token, cls._instance.__datas)
SessionHandler.save(cls._instance.__session_token, cls._instance.__data)
if Client._instance is not None:
del(Client._instance)
Client._instance = None
##@brief Test wether a client is anonymous or logged in
# @brief Tests if a client is anonymous or logged in
#@return True if client is anonymous
@classmethod
def is_anonymous(cls):
return Client._instance.user() is None
##@brief Method to call on authentication failure
# @brief Method to be called on authentication failure
#@throw ClientAuthenticationFailure
#@throw LodelFatalError if no Client child instance found
#@throw LodelFatalError if no Client child instance is found
@classmethod
def authentication_failure(cls):
cls._generic_error(ClientAuthenticationFailure)
##@brief Method to call on authentication error
# @brief Method to be called on authentication error
#@throw ClientAuthenticationError
#@throw LodelFatalError if no Client child instance found
#@throw LodelFatalError if no Client child instance is found
@classmethod
def authentication_error(cls, msg = "Unknow error"):
def authentication_error(cls, msg="Unknow error"):
cls._generic_error(ClientAuthenticationError, msg)
##@brief Method to call on permission denied error
# @brief Method to be called on permission denied error
#@throw ClientPermissionDenied
#@throw LodelFatalError if no Client child instance found
#@throw LodelFatalError if no Client child instance is found
@classmethod
def permission_denied_error(cls, msg = ""):
def permission_denied_error(cls, msg=""):
cls._generic_error(ClientPermissionDenied, msg)
##@brief Generic error method
# @brief Generic error method
#@see Client::authentication_failure() Client::authentication_error()
#Client::permission_denied_error()
#@throw LodelFatalError if no Client child instance found
# Client::permission_denied_error()
#@throw LodelFatalError if no Client child instance is found
@classmethod
def _generic_error(cls, expt, msg = ""):
def _generic_error(cls, expt, msg=""):
cls._assert_instance()
raise expt(Client._instance, msg)
##@brief Assert that an instance of Client child class exists
#@throw LodelFataError if no instance of Client child class found
# @brief Asserts that an instance of Client child class exists
#@throw LodelFataError if no instance of Client child class is found
@classmethod
def _assert_instance(cls):
if Client._instance is None:
raise LodelFatalError("No client instance found. Abording.")
##@brief Class method that fetches conf
# @brief Class method that fetches conf
#
#This method populates Client._infos_fields . This attribute stores
#informations on login and password location (LeApi object & field)
# This method populates Client._infos_fields . This attribute stores
# informations on login and password location (LeApi object & field)
@classmethod
def fetch_settings(cls):
LodelContext.expose_dyncode(globals(), 'dyncode')
if cls._infos_fields is None:
cls._infos_fields = list()
else:
#Allready fetched
# Already fetched
return
infos = (
Settings.auth.login_classfield,
@ -266,21 +274,21 @@ a session is allready started !!!")
if fdh.is_reference() and res_infos[1][0] in fdh.linked_classes():
link_field = fname
if link_field is None:
#Unable to find link between login & password EmClasses
# Unable to find link between login & password EmClass
raise AuthenticationError("Unable to find a link between \
login EmClass '%s' and password EmClass '%s'. Abording..." % (
res_infos[0][0], res_infos[1][0]))
res_infos[0] = (res_infos[0][0], res_infos[0][1], link_field)
cls._infos_fields.append(
{'login':res_infos[0], 'password':res_infos[1]})
{'login': res_infos[0], 'password': res_infos[1]})
##@brief Set a user as authenticated and start a new session
# @brief Sets a user as authenticated and starts a new session
#@param leo LeObject child class : the LeObject the user is stored in
#@param uid str : uniq id (in leo)
#@return None
@classmethod
def __set_authenticated(cls, leo, uid):
cls._instance.__user = {'classname': leo.__name__, 'uid': uid, 'leoclass': leo}
#Store auth infos in session
cls._instance.__datas[cls._instance.__class__._AUTH_DATANAME] = copy.copy(cls._instance.__user)
# Store auth infos in session
cls._instance.__data[cls._instance.__class__._AUTH_DATANAME] = copy.copy(
cls._instance.__user)

View file

@ -36,11 +36,13 @@ class EmComponent(MlNamedObject):
self.group = group
super().__init__(display_name, help_text)
# @brief Returns the display_name of the component if it is not None, its uid else
def __str__(self):
if self.display_name is None:
return str(self.uid)
return str(self.display_name)
# @brief Returns a hash code for the component
def d_hash(self):
m = hashlib.md5()
for data in (
@ -57,7 +59,7 @@ class EmComponent(MlNamedObject):
#@ingroup lodel2_em
class EmClass(EmComponent):
# @brief Instanciate a new EmClass
# @brief Instanciates a new EmClass
#@param uid str : uniq identifier
#@param display_name MlString|str|dict : component display_name
#@param abstract bool : set the class as asbtract if True
@ -78,7 +80,7 @@ class EmClass(EmComponent):
self.pure_abstract = bool(pure_abstract)
self.__datasource = datasources
if not isinstance(datasources, str) and len(datasources) != 2:
raise ValueError("datasources arguement can be a single datasource\
raise ValueError("datasources argument can be a single datasource\
name or two names in a tuple or a list")
if self.pure_abstract:
self.abtract = True
@ -115,8 +117,9 @@ class EmClass(EmComponent):
internal=True,
group=group)
# @brief Property that represent a dict of all fields (the EmField defined in this class and all its parents)
# @todo use Settings.editorialmodel.groups to determine wich fields should be returned
# @brief Property that represents a dict of all fields
# (the EmField objects defined in this class and all their parents)
# @todo use Settings.editorialmodel.groups to determine which fields should be returned
@property
def __all_fields(self):
res = dict()
@ -130,9 +133,9 @@ class EmClass(EmComponent):
def datasource(self):
return self.__datasource
# @brief Return the list of all dependencies
# @brief Returns the list of all dependencies
#
# Reccursive parents listing
# Recursive parents listing
@property
def parents_recc(self):
if len(self.parents) == 0:
@ -155,7 +158,7 @@ class EmClass(EmComponent):
except KeyError:
raise EditorialModelError("No such EmField '%s'" % uid)
# @brief Keep in __fields only fields contained in active groups
# @brief Keeps in __fields only fields contained in active groups
def _set_active_fields(self, active_groups):
if not Settings.editorialmodel.editormode:
active_fields = []
@ -165,10 +168,10 @@ class EmClass(EmComponent):
self.__fields = {fname: fdh for fname, fdh in self.__fields.items()
if fdh in active_fields}
# @brief Add a field to the EmClass
# @brief Adds a field to the EmClass
# @param emfield EmField : an EmField instance
# @warning do not add an EmField allready in another class !
# @throw EditorialModelException if an EmField with same uid allready in this EmClass (overwritting allowed from parents)
# @warning do not add an EmField already in another class !
# @throw EditorialModelException if an EmField with same uid already in this EmClass (overwriting allowed from parents)
# @todo End the override checks (needs methods in data_handlers)
def add_field(self, emfield):
assert_edit()
@ -180,11 +183,11 @@ class EmClass(EmComponent):
parent_field = self.__all_fields[emfield.uid]
if not emfield.data_handler_instance.can_override(parent_field.data_handler_instance):
raise AttributeError(
"'%s' field override a parent field, but data_handles are not compatible" % emfield.uid)
"'%s' field overrides a parent field, but data_handlers are not compatible" % emfield.uid)
self.__fields[emfield.uid] = emfield
return emfield
# @brief Create a new EmField and add it to the EmClass
# @brief Creates a new EmField and adds it to the EmClass
# @param data_handler str : A DataHandler name
# @param uid str : the EmField uniq id
# @param **field_kwargs : EmField constructor parameters ( see @ref EmField.__init__() )
@ -221,7 +224,7 @@ class EmClass(EmComponent):
#@ingroup lodel2_em
class EmField(EmComponent):
# @brief Instanciate a new EmField
# @brief Instanciates a new EmField
# @param uid str : uniq identifier
# @param display_name MlString|str|dict : field display_name
# @param data_handler str : A DataHandler name
@ -256,7 +259,7 @@ class EmField(EmComponent):
def get_data_handler_cls(self):
return copy.copy(self.data_handler_cls)
##@brief Returne the uid of the emclass which contains this field
##@brief Returns the uid of the emclass which contains this field
def get_emclass_uid(self):
return self._emclass.uid
@ -277,7 +280,7 @@ class EmField(EmComponent):
class EmGroup(MlNamedObject):
# @brief Create a new EmGroup
# @brief Creates a new EmGroup
# @note you should NEVER call the constructor yourself. Use Model.add_group instead
# @param uid str : Uniq identifier
# @param depends list : A list of EmGroup dependencies
@ -297,10 +300,10 @@ class EmGroup(MlNamedObject):
for grp in depends:
if not isinstance(grp, EmGroup):
raise ValueError("EmGroup expected in depends argument but %s found" % grp)
self.add_dependencie(grp)
self.add_dependency(grp)
# @brief Returns EmGroup dependencie
# @param recursive bool : if True return all dependencies and their dependencies
# @brief Returns EmGroup dependencies
# @param recursive bool : if True returns all dependencies and their own dependencies
# @return a dict of EmGroup identified by uid
def dependencies(self, recursive=False):
res = copy.copy(self.require)
@ -316,7 +319,7 @@ class EmGroup(MlNamedObject):
return res
# @brief Returns EmGroup applicants
# @param recursive bool : if True return all dependencies and their dependencies
# @param recursive bool : if True returns all dependencies and their dependencies
# @returns a dict of EmGroup identified by uid
def applicants(self, recursive=False):
res = copy.copy(self.required_by)
@ -337,7 +340,7 @@ class EmGroup(MlNamedObject):
return (self.__components).copy()
# @brief Returns EmGroup display_name
# @param lang str | None : If None return default lang translation
# @param lang str | None : If None returns default lang translation
# @returns None if display_name is None, a str for display_name else
def get_display_name(self, lang=None):
name = self.display_name
@ -346,7 +349,7 @@ class EmGroup(MlNamedObject):
return name.get(lang)
# @brief Returns EmGroup help_text
# @param lang str | None : If None return default lang translation
# @param lang str | None : If None returns default lang translation
# @returns None if display_name is None, a str for display_name else
def get_help_text(self, lang=None):
help = self.help_text
@ -354,7 +357,7 @@ class EmGroup(MlNamedObject):
return None
return help.get(lang)
# @brief Add components in a group
# @brief Adds components in a group
# @param components list : EmComponent instances list
def add_components(self, components):
assert_edit()
@ -369,20 +372,20 @@ class EmGroup(MlNamedObject):
"Expecting components to be a list of EmComponent, but %s found in the list" % type(component))
self.__components |= set(components)
# @brief Add a dependencie
# @param em_group EmGroup|iterable : an EmGroup instance or list of instance
def add_dependencie(self, grp):
# @brief Add a dependency
# @param em_group EmGroup|iterable : an EmGroup instance or list of instances
def add_dependency(self, grp):
assert_edit()
try:
for group in grp:
self.add_dependencie(group)
self.add_dependency(group)
return
except TypeError:
pass
if grp.uid in self.require:
return
if self.__circular_dependencie(grp):
if self.__circular_dependency(grp):
raise EditorialModelError("Circular dependencie detected, cannot add dependencie")
self.require[grp.uid] = grp
grp.required_by[self.uid] = self
@ -406,9 +409,9 @@ class EmGroup(MlNamedObject):
self.required_by[grp.uid] = grp
grp.require[self.uid] = self
# @brief Search for circular dependencie
# @brief Search for circular dependency
# @return True if circular dep found else False
def __circular_dependencie(self, new_dep):
def __circular_dependency(self, new_dep):
return self.uid in new_dep.dependencies(True)
# @brief Search for circular applicant
@ -424,6 +427,8 @@ class EmGroup(MlNamedObject):
else:
return self.display_name.get()
# @brief Computes a d-hash code for the EmGroup
# @return a string
def d_hash(self):
payload = "%s%s%s" % (

View file

@ -1,13 +1,20 @@
#-*- coding: utf-8 -*-
## @package lodel.editorial_model.exceptions
# This module contains the specific exceptions related to the EditorialModel Management.
## @brief Raises an Editorial Model specific exception.
class EditorialModelError(Exception):
pass
## @brief Tries to import the settings module.
# @raise EditorialModelError
def assert_edit():
try:
from lodel import Settings
except ImportError: #Very dirty, but don't know how to fix the tests
except ImportError: # Very dirty, but don't know how to fix the tests
return
if not Settings.editorialmodel.editormode:
raise EditorialModelError("EM is readonly : editormode is OFF")

View file

@ -42,7 +42,8 @@ class EditorialModel(MlNamedObject):
super().__init__(display_name, help_text)
# @brief EmClass uids accessor
#@return a dict of emclasses
#@return a copy of the dict containing all emclasses of the model if uid is None
# else a copy the class with uid uid
def all_classes(self, uid=None):
if uid is None:
return copy.copy(self.__classes)
@ -52,6 +53,9 @@ class EditorialModel(MlNamedObject):
except KeyError:
raise EditorialModelException("EmClass not found : '%s'" % uid)
# @brief EmClass uids accessor
#@return the dict containing all emclasses of the model if uid is None
# else the class with uid uid
def all_classes_ref(self, uid=None):
if uid is None:
return self.__classes
@ -62,12 +66,13 @@ class EditorialModel(MlNamedObject):
raise EditorialModelException("EmGroup not found : '%s'" % uid)
# @brief active EmClass uids accessor
#@return a list of class uids
#@return a list of active class uids
def active_classes_uids(self):
return list(self.__active_classes.keys())
# @brief EmGroups accessor
#@return a dict of groups
#@return a copy of the dict of the model's group if uid is None
# else a copy of the group with uniq id uid
def all_groups(self, uid=None):
if uid is None:
return copy.copy(self.__groups)
@ -78,7 +83,8 @@ class EditorialModel(MlNamedObject):
raise EditorialModelException("EmGroup not found : '%s'" % uid)
# @brief EmGroups accessor
#@return a dict of groups
#@return the dict of the model's group if uid is None
# else the group with uniq id uid
def all_groups_ref(self, uid=None):
if uid is None:
return self.__groups
@ -89,7 +95,7 @@ class EditorialModel(MlNamedObject):
raise EditorialModelException("EmGroup not found : '%s'" % uid)
# @brief active EmClass uids accessor
#@return a list of class uids
#@return a list of active group uids
def active_groups_uids(self):
return list(self.__active_groups.keys())
@ -97,7 +103,7 @@ class EditorialModel(MlNamedObject):
#@param uid None | str : give this argument to get a specific EmClass
#@return if uid is given returns an EmClass else returns an EmClass
# iterator
#@todo use Settings.editorialmodel.groups to determine wich classes should
#@todo use Settings.editorialmodel.groups to determine which classes should
# be returned
def classes(self, uid=None):
try:

View file

@ -1,4 +1,4 @@
## @package lodel.editorial_model.translator Editorial model translators
#
# This package contains modules that provides a save and a load function able to load and save
# lodel.editorial_model.model.EditorialModel
# This package is dedicated to the translation of an EditorialModel as several formats like pickle files or XML files. \n
# Each module provides save and load functions to read/write an lodel.editorial_model.model.EditorialModel object from and to a file.

View file

@ -11,63 +11,76 @@ LodelContext.expose_modules(globals(), {
'EmGroup'],
'lodel.utils.mlstring': ['MlString']})
##@package lodel.editorial_model.translator.xmlfile Translator module designed
#to load & save EM in XML
## @package lodel.editorial_model.translator.xmlfile
# This module is a translator toolkit between and editorial model and an XML file.
#
# Structure of a xml file which represents an editorial model:
# The XML file representing an editorial is composed by several nodes.
#
# @par \<name\>
# The name of the model. It matches with the <b><em>name</em></b> field of the <b><em>EditorialModel class</em></b>
#
# @par \<description\>
# This is the description of a composed element. Inside this node, we can have as many child node as there are languages in which it is translated. \n
# Each translation is notified by a node, using the following scheme :
# <ul>
# <li>\<name\>: name of the model, field <b><em>name</em></b> in class <b><em>EditorialModel</em></b>
# <li>\<description\>: field <b><em>description</em></b> of a composed element, one for each language translation named
# <ul><li>\<fre\> for french,
# <li>\<eng\> for english,
# <li>\<esp\> for spanish,
# <li>\<ger\> for german</ul>
# <li>\<classes\>: set of all <b><em>EmClass</em></b> in the model \n
# for each classe: \n
# \<class\><ul>
# <li>\<uid\>the class's id
# <li>\<display_name\> The name of the class, field <b><em>display_name</em></b> of the <b><em>EmClass</em></b> , in different languages if they're available :
# <ul><li>\<fre\> for french,
# <li>\<eng\> for english,
# <li>\<esp\> for spanish,
# <li>\<ger> for german</ul>
# <li>\<help_text\> Short explanation of the class's purpose, in different languages, as above
# <li>\<abstract\> True or False, field <b><em>abstract</em></b> of the <b><em>EmClass</em></b>
# <li>\<pure_abstract\> True or False, field <b><em>pure_bastract</em></b> of the <b><em>EmClass</em></b>
# <li>\<group\><b><em>uid</em></b> of the group of the field <b><em>group</em></b> of the <b><em>EmClass</em></b>
# <li>\<fields\>: set of all the <b><em>EmField</em></b> of the <b><em>EmClass</em></b>\n
# for each field: \n
# \<field\>
# <ul><li>\<uid\> uid of the <b><em>EmField</em></b>
# <li>\<display_name\> field <b><em>display_name</em></b> of the <b><em>EmField</em></b>, in different languages, as above
# <li>\<help_text\> Short explanation of the class's purpose, in different languages, as above
# <li>\<group\><b><em>uid</em></b> of the group of the field <b><em>group</em></b> of the <b><em>EmClass</em></b>
# <li>\<datahandler_name\> field <b><em>datahandler_name</em></b> of the Emfield, the name of a datahandler
# <li>\<datahandler_options\>, a list of xml items, each of them named with an option name and contains its value</ul></ul>
# <li>\<groups\>: set of all the groups <b><em>EmGroup</em></b> in the model\n
# for each group:\n
# <ul><li>\<uid\> uid of the <b><em>EmField</em></b>
# <li>\<display_name\> field <b><em>display_name</em></b> of the <b><em>EmField</em></b>, in different languages, as above
# <li>\<help_text\> Short explanation of the class's purpose, in different languages, as above
# <li>\<requires\> all uids of the <b><em>EmGroups</em></b> required by this group and which are in the fields <b><em>require</em></b>
# <li>\<components\> Set of all components of the <b><em>EmGroups</em></b>, representation of the field <b><em>__components</em></b> \n
# this item is splitted in two parts :\
# <ul><li>\<emfields\> all the emfields with, for each of them:\n
# \<emfield\> \n
# <ul><li> \<uid\> <b><em>uid</em></b> of the <b><em>EmField</em></b></ul>
# <li>\<emclasses\> all the emclasses with, for each of them:\n
# \<emclass\> \n
# <ul><li> \<uid\> <b><em>uid</em></b> of the <b><em>EmClass</em></b></ul></ul></ul>
# <li><b>\<fre\></b> : french
# <li><b>\<eng\></b> : english
# <li><b>\<esp\></b> : spanish
# <li><b>\<ger\></b> : german
# </ul>
#
# @par \<classes\>
# This node contains a set of all the <b><em>EmClass</em></b> classes we can find in the model, each represented by a <b>\<class\></b> child node.
#
# @par \<class\>
# It is the representation of a single <b><em>EmClass</em></b> class. It is contained in the <b><em>\<classes\></em></b> node. It contains the following child nodes :
# <ul>
# <li><b>\<uid\></b> : The identifier of the class.
# <li><b>\<display_name\></b> : The class' name, given by the <b><em>display_name</em></b> field of the <b><em>EmClass</em></b> class. This node contains the same language child nodes as the \<description\> node.
# <li><b>\<help_text\></b> : A short description of the purpose of this class, using the same child nodes for each language, as above.
# <li><b>\<abstract\></b> : Boolean node, with True or False as values, corresponding to the field <b><em>abstract</em></b> of the <b><em>EmClass</em></b> object.
# <li><b>\<abstract\></b> : Boolean node, with True or False as values, corresponding to the field <b><em>pure_abstract</em></b> of the <b><em>EmClass</em></b> object.
# <li><b>\<group\></b> : The unique identifier of the group stored in the <b><em>group</em></b> field of the <b><em>EmClass</em></b> object.
# <li><b>\<fields\></b> : A set of all the <b><em>EmField</em></b> fields attached to an <b></em>EmClass</em></b> class. Each of them is represented by a <b>\<field\></b> child node.
# </ul>
#
# @par \<field\>
# This node is the XML representation of an <b><em>EmField</em></b> class. It contains the following child nodes :
# <ul>
# <li><b>\<uid\></b> : The identifier of the field.
# <li><b>\<display_name\></b> : Displayed name, in different languages (same child nodes as above), corresponding to the <b><em>display_name</em></b> property of the <b><em>EmField</em></b>.
# <li><b>\<help_text\></b> : Short explanation of the purpose of the field, in different languages (one child node for each translation, see above).
# <li><b>\<group\></b> : <b><em>uid</em></b> of the group of the field <b><em>group</em></b> in the <b><em>EmField</em></b>
# <li><b>\<datahandler_name\></b> : The name of the datahandler attached to this field (corresponds to the field <b><em>datahandler_name</em></b> of the Emfield)
# <li><b>\<datahandler_options\></b> : A list of xml items, each of them named with an option name and containing its value
# </ul>
#
# @par \<groups\>
# This node contains a set of all the groups in the model (represented by <b><em>EmGroup</em></b> objects) with a <b>\<group\></b> child node for each one.
#
# @par \<group\>
# Represents a single group. This node contains the following child nodes :
# <ul>
# <li><b>\<uid\></b> : unique id of the <b><em>EmField</em></b>.
# <li><b>\<display_name\></b> : Corresponds to the <b><em>display_name</em></b> property of the <b><em>EmField</em></b>, in different languages (see above)
# <li><b>\help_text\></b> : Short explanation of the group's purpose, in different languages (see above)
# <li><b>\<requires\></b> : All the unique identifiers of the <b><em>EmGroups</em></b> required by this group and which are in the fields <b><em>require</em></b>.
# <li><b>\<components\></b> : A set of all components of the <b><em>EmGroups</em></b>, representation of the field <b><em>__components</em></b>. This node is splitted in two parts :
# <ul>
# <li><b>\<emfields\></b> : all the emfields with, for each of them:\n
# <b>\<emfield\></b> \n
# <b>\<uid\></b> : <b><em>uid</em></b> of the <b><em>EmField</em></b>
# <li><b>\<emclasses\></b> : all the emclasses with, for each of them:\n
# <b>\<emclass\></b> \n
# <b>\<uid\></b> : <b><em>uid</em></b> of the <b><em>EmClass</em></b>
# </ul>
# </ul>
##@brief Saves a model in a xml file
## @brief Saves a model in a XML file
# @param model EditorialModel : the model to save
# @param filename str|None : if None display on stdout else writes in the file filename
# @param kwargs dict : additional options.
# - filename str|None : if None display on stdout else writes in the file filename
def save(model, **kwargs):
Em = etree.Element("editorial_model")
em_name = etree.SubElement(Em, 'name')
@ -100,19 +113,19 @@ def save(model, **kwargs):
outfile.close()
##@brief Writes a representation of a MlString in xml
# @param etree : the xml object
# @param elem : the element which represents a MlString
# @param mlstr : the mlstr to write
## @brief Writes a representation of a MlString in XML
# @param etree Element : the XML object
# @param elem Element : the element which represents a MlString
# @param mlstr MlString: the mlstr to write
def write_mlstring_xml(etree, elem, mlstr):
for lang in mlstr.values:
ss_mlstr = etree.SubElement(elem,lang)
ss_mlstr.text = mlstr.get(lang)
##@brief Writes the definition of a datahandler in xml
## @brief Writes the definition of a datahandler in xml
# @param etree : the xml object
# @param elem : the element which defines a datahandler
# @param dhdl_name : the name of the datahandler
# @param elem Element : the element which defines a datahandler
# @param dhdl_name str : the name of the datahandler
# @param kwargs : the options of the datahandler
def write_datahandler_xml(etree, elem, dhdl_name, **kwargs):
dhdl = etree.SubElement(elem,'datahandler_name')
@ -138,15 +151,15 @@ def write_datahandler_xml(etree, elem, dhdl_name, **kwargs):
opt_val = str(argu)
arg.text = opt_val
##@brief Writes a representation in xml of a EmField
## @brief Writes a representation in xml of a EmField
# @param etree : the xml object
# @param elem : the element for the EmField
# @param uid : the uid of the EmField
# @param name : the name of the field
# @param help_text : explanations of the EmField
# @param group_uid : the uid of a group, can be None
# @datahandler_name
# @**kwargs : options of the datahandler
# @param elem Element: the element for the EmField
# @param uid str : the uid of the EmField
# @param name str : the name of the field
# @param help_text MlString: explanations of the EmField
# @param group str|None: the uid of a group, can be None
# @param datahandler_name str: Name of the datahandler attached to the field
# @param **kwargs dict : options of the datahandler
def write_emfield_xml(etree, elem, uid, name, help_text, group, datahandler_name, **kwargs):
emfield = etree.SubElement(elem,'field')
emfield_uid = etree.SubElement(emfield, 'uid')
@ -169,10 +182,12 @@ def write_emfield_xml(etree, elem, uid, name, help_text, group, datahandler_name
##@brief Writes a representation of a EmGroup in xml
# @param etree : the xml object
# @param elem : the element for the EmGroup
# @param name : the name of the group
# @param help_text : explanations of the EmGroup
# @param requires : a list of the group's uids whose this group depends
# @param elem Element : the element for the EmGroup
# @param uid str : the uid of the EmGroup
# @param name str : the name of the group
# @param help_text MlString : explanations of the EmGroup
# @param requires list : a list of the group's uids whose this group depends
# @param components list : a list of the EmComponent objects contained in the group
def write_emgroup_xml(etree, elem, uid, name, help_text, requires, components):
emgroup = etree.SubElement(elem, 'group')
emgroup_uid = etree.SubElement(emgroup, 'uid')
@ -204,15 +219,16 @@ def write_emgroup_xml(etree, elem, uid, name, help_text, requires, components):
em_group_comp_cls_ins = etree.SubElement(emgroup_comp_cls, 'emclass')
em_group_comp_cls_ins.text = component.uid
##@brief Writes a representation of a EmClass in xml
# @param etree : the xml object
# @param elem : the element for the EmClass
# @param name : the name of the group
# @param help_text : explanations of the EmClass
# @param fields : a dict
# @param parents : a list of EmClass uids
# @param abstract : a boolean
# @param pure_abstract : a boolean
## @brief Writes a representation of a EmClass in XML
# @param etree : the XML object
# @param elem Element : the element for the EmClass
# @param uid str : the unique identifier of the EmClass
# @param name str : the name of the group
# @param help_text MlString : explanations of the EmClass
# @param fields dict : a dict representing all the fields of the class
# @param parents list : a list of the EmClass uids of this class' parents
# @param abstract bool : a boolean
# @param pure_abstract bool : a boolean
def write_emclass_xml(etree, elem, uid, name, help_text, group, fields, parents, abstract = False, pure_abstract = False):
emclass = etree.SubElement(elem, 'class')
emclass_uid = etree.SubElement(emclass, 'uid')
@ -244,11 +260,10 @@ def write_emclass_xml(etree, elem, uid, name, help_text, group, fields, parents,
emclass_parents = etree.SubElement(emclass, 'parents')
emclass_parents.text = ",".join(parents_list)
##@brief Loads a model from a xml file
# @param model EditorialModel : the model to load
## @brief Loads a model from a XML file
# @param filename str : The file from which the editorial model will be loaded
# @return a new EditorialModel object
def load(filename):
Em = etree.parse(filename)
emodel = Em.getroot()
name = emodel.find('name')
@ -270,9 +285,10 @@ def load(filename):
grp = model.add_group(grp)
return model
##@brief Creates a EmClass from a xml description
# @param elem : the element which represents the EmClass
# @param model : the model which will contain the new class
## @brief Creates a EmClass from a xml description
# @param model EditorialModel : the model which will contain the new class
# @param elem Element: the element which represents the EmClass
# @return a new EmClass object
def load_class_xml(model, elem):
uid = elem.find('uid').text
@ -332,11 +348,11 @@ def load_class_xml(model, elem):
return emclass
##@brief Creates a EmField from a xml description
#@param elem : the element which represents the EmField
#@param model : the model which will contain the new field
#@param emclass EmClass : the EmClass of the field
#@return a new EmField object
## @brief Creates a EmField from a XML description
# @param model EditorialModel: the model which will contain the new field
# @param elem Element : the element which represents the EmField
# @param emclass EmClass : the EmClass of the field
# @return a new EmField object
def load_field_xml(model, elem, emclass):
uid = elem.find('uid').text
if elem.find('display_name').text is None:
@ -369,10 +385,11 @@ def load_field_xml(model, elem, emclass):
return emfield
##@brief Returns datahandler options from a xml description
# @param elem : the element which represents the datahandler
# @param model : the model which will contain the new field
# @return datahandler options
## @brief Returns datahandler options from a XML description
# @param elem Element : the element which represents the datahandler
# @param model EditorialModel : the model which will contain the new field
# @return dict
def load_dhdl_options_xml(model, elem):
dhdl_options=dict()
for opt in elem:
@ -396,10 +413,10 @@ def load_dhdl_options_xml(model, elem):
return dhdl_options
##@brief Creates a EmGroup from a xml description
# @param elem : the element which represents the EmGroup
# @param model : the model which will contain the new group
# @return a new EmGroup object
## @brief Creates a EmGroup from a XML description
# @param model EditorialModel : the model which will contain the new group
# @param elem Element : the element which represents the EmGroup
# @return EmGroup
def load_group_xml(model, elem):
uid = elem.find('uid')
@ -443,17 +460,16 @@ def load_group_xml(model, elem):
group = model.all_groups_ref(uid.text)
group.display_name = name
group.help_text = help_text
group.add_dependencie(requires)
group.add_dependency(requires)
else:
group = EmGroup(uid.text, requires, name, help_text)
group.add_components(comp)
return group
##@brief Constructs a MlString from a xml description
# @param elem : the element which represents the MlString
# @param model : the model which will contain the new group
# @return a new MlString object
## @brief Constructs a MlString from a XML description
# @param elem Element : the element which represents the MlString
# @return MlString
def load_mlstring_xml(elem):
mlstr = dict()
for lang in elem:

View file

@ -1,4 +1,9 @@
#-*- coding: utf-8 -*-
## @package lodel.leapi.datahandlers.datas
# This module contains specific datahandlers extending the basic ones from the lodel.leapi.datahandlers.datas_base module.
import warnings
import inspect
import re
@ -12,22 +17,29 @@ LodelContext.expose_modules(globals(), {
'LodelFatalError', 'DataNoneValid', 'FieldValidationError']})
##@brief Data field designed to handle formated strings
## @brief Data field designed to handle formated strings
class FormatString(Varchar):
help = 'Automatic string field, designed to use the str % operator to \
build its content'
help = 'Automatic string field, designed to use the str % operator to build its content'
base_type = 'char'
##@brief Build its content with a field list and a format string
# @param format_string str
# @param field_list list : List of field to use
# @param **kwargs
## @brief Constructor
# @param _field_list list : List of fields to use
# @param _format_string str : formatted string
# @param **kwargs : additional options
def __init__(self, format_string, field_list, **kwargs):
self._field_list = field_list
self._format_string = format_string
super().__init__(internal='automatic', **kwargs)
## @brief constructs the formatted string data
# The string can be truncated depending on the maximum length defined for this field.
#
# @param emcomponent EmComponent
# @param fname str
# @param datas dict
# @param cur_value str
# @return str
def _construct_data(self, emcomponent, fname, datas, cur_value):
ret = self._format_string % tuple(
datas[fname] for fname in self._field_list)
@ -35,27 +47,28 @@ build its content'
warnings.warn("Format field overflow. Truncating value")
ret = ret[:self.max_length]
return ret
##@brief Varchar validated by a regex
## @brief Varchar validated by a regex
class Regex(Varchar):
help = 'String field validated with a regex. Takes two options : \
max_length and regex'
base_type = 'char'
##@brief A string field validated by a regex
# @param regex str : a regex string (passed as argument to re.compile())
## @brief A string field validated by a regex
# @param regex str : a regex string (passed as argument to re.compile()), default value is an empty string
# @param max_length int : the max length for this field (default : 10)
# @param **kwargs
# @param **kwargs : additional options
def __init__(self, regex='', max_length=10, **kwargs):
self.regex = regex
self.compiled_re = re.compile(regex) # trigger an error if invalid regex
super(self.__class__, self).__init__(max_length=max_length, **kwargs)
##@brief Check and cast value in appropriate type
#@param value *
#@throw FieldValidationError if value is unappropriate or can not be cast
#@return value
## @brief Check and cast value in appropriate type
# @param value *
# @throw FieldValidationError if value is unappropriate or can not be cast
# @return str
def _check_data_value(self, value):
value = super()._check_data_value(value)
if not self.compiled_re.match(value) or len(value) > self.max_length:
@ -63,6 +76,10 @@ max_length and regex'
raise FieldValidationError(msg)
return value
## @brief checks if another datahandler can override this one
#
# @param data_handler Datahandler
# @return bool
def can_override(self, data_handler):
if not super().can_override(data_handler):
return False
@ -71,36 +88,58 @@ max_length and regex'
return False
return True
##@brief Handles uniq ID
class UniqID(Integer):
help = 'Fieldtype designed to handle editorial model UID'
base_type = 'int'
##@brief A uid field
# @param **kwargs
## @brief A uid field
#
# @param **kwargs dict
def __init__(self, **kwargs):
kwargs['internal'] = 'automatic'
super(self.__class__, self).__init__(primary_key = True, **kwargs)
## @brief Constructs the field's data
# @param emcomponent EmComponent : Component corresponding to the field
# @param fname
# @param datas
# @param cur_value str : current value to use (is retrieved from the datasource if not given)
# @return str
# @remarks fname and datas are not used and should become non mandatory, cur_value should have a None default value
def construct_data(self, emcomponent, fname, datas, cur_value):
if cur_value is None:
#Ask datasource to provide a new uniqID
return emcomponent._ro_datasource.new_numeric_id(emcomponent)
return cur_value
## @brief Class representing a LeObject subclass
class LeobjectSubclassIdentifier(Varchar):
help = 'Datahandler designed to handle LeObject subclass identifier in DB'
base_type = 'varchar'
## @brief Constructor
# @param kwargs dict : additional options
# @throw RuntimeError
# @todo define the "internal" option that can be given in the kwargs, and document its meaning
def __init__(self, **kwargs):
if 'internal' in kwargs and not kwargs['internal']:
raise RuntimeError(self.__class__.__name__+" datahandler can only \
be internal")
kwargs['internal'] = True
super().__init__(**kwargs)
## @brief Returns the class' name
# @param emcomponent EmComponent : Component correponding to the field
# @param fname
# @param datas
# @param cur_value
# @return str
# @remarks fname, datas and cur_value should be given default values as they are not mandatory here.
def construct_data(self, emcomponent, fname, datas, cur_value):
cls = emcomponent
if not inspect.isclass(emcomponent):
@ -108,13 +147,13 @@ be internal")
return cls.__name__
##@brief Data field designed to handle concatenated fields
## @brief Data field designed to handle concatenated fields
class Concat(FormatString):
help = 'Automatic strings concatenation'
base_type = 'char'
##@brief Build its content with a field list and a separator
# @param field_list list : List of field to use
## @brief Build its content with a field list and a separator
# @param field_list list : List of fields to concatenate
# @param separator str
# @param **kwargs
def __init__(self, field_list, separator=' ', **kwargs):
@ -124,22 +163,34 @@ class Concat(FormatString):
**kwargs)
## @brief Datahandler managing a password
class Password(Varchar):
help = 'Handle passwords'
base_type = 'password'
pass
## @brief Datahandler turning a string into a list
class VarcharList(Varchar):
help = 'DataHandler designed to make a list out of a string.'
base_type = 'varchar'
## @brief Constructor
# @param delimiter str : default value is a whitespace character
# @param **kwargs : additional options
# @throw LodelException : this exception is raised when the delimiter is not a string
def __init__(self, delimiter=' ', **kwargs):
if not isinstance(delimiter, str):
raise LodelException("The delimiter has to be a string, %s given" % type(delimiter))
self.delimiter = str(delimiter)
super().__init__(**kwargs)
## @brief Constructs the field's data
# @param emcomponent EmComponent
# @param fname
# @param datas
# @param cur_value : current value to use
# @return list
# @remarks emcomponent, fname and datas should be given a default value as they seem to be non mandatory
def construct_data(self, emcomponent, fname, datas, cur_value):
result = cur_value.split(self.delimiter)
return result

View file

@ -1,3 +1,5 @@
## @brief Exception classes for datahandlers
class LodelDataHandlerException(Exception):
pass

View file

@ -9,12 +9,13 @@ LodelContext.expose_modules(globals(), {
'LodelFatalError', 'DataNoneValid',
'FieldValidationError']})
## @brief Child class of SingleRef. The object referenced must exist
class Link(SingleRef):
pass
## @brief Child class of MultipleRef where references are represented in the form of a python list
# All the objects referenced must exist
class List(MultipleRef):
## @brief instanciates a list reference
@ -97,13 +98,16 @@ class Map(MultipleRef):
## @brief This Reference class is designed to handler hierarchy with some constraint
class Hierarch(MultipleRef):
directly_editable = False
## @brief Instanciate a data handler handling hierarchical relation with constraints
# @param back_reference tuple : Here it is mandatory to have a back ref (like a parent field)
# @param max_depth int | None : limit of depth
# @param max_childs int | Nine : maximum number of childs by nodes
# @param kwargs :
# - allowed_classes list | None : list of allowed em classes if None no restriction
# - internal bool : if False, the field is not internal
def __init__(self, back_reference, max_depth=None, max_childs=None, **kwargs):
super().__init__(back_reference=back_reference,
max_depth=max_depth,

View file

@ -4,6 +4,7 @@ from lodel.context import LodelContext
LodelContext.expose_modules(globals(), {
'lodel.exceptions': ['LodelExceptions', 'LodelException']})
##@brief Handles LeApi error
class LeApiError(LodelException):
pass
@ -13,20 +14,20 @@ class LeApiErrors(LodelExceptions, LeApiError):
pass
##@brief When an error concerns a datas
##@brief When an error concerns a data
class LeApiDataCheckError(LeApiError):
pass
##@brief Handles LeApi data errors
class LeApiDataCheckErrors(LodelExceptions, LeApiError):
pass
##@brief Handles leapi query errors
class LeApiQueryError(LeApiError):
pass
##@brief Handles mulitple query errors
##@brief Handles multiple query errors
class LeApiQueryErrors(LodelExceptions, LeApiQueryError):
pass

View file

@ -1,22 +1,25 @@
#-*- coding: utf-8 -*-
import os, os.path
import os
import os.path
import functools
from lodel.context import LodelContext
LodelContext.expose_modules(globals(), {
'lodel.editorial_model.components': ['EmComponent', 'EmClass', 'EmField',
'EmGroup'],
'EmGroup'],
'lodel.leapi.leobject': ['LeObject'],
'lodel.leapi.datahandlers.base_classes': ['DataHandler'],
'lodel.logger': 'logger'})
##@brief Generate python module code from a given model
# @brief Generates python module code from a given model
# @param model lodel.editorial_model.model.EditorialModel
def dyncode_from_em(model):
# Generation of LeObject child classes code
cls_code, modules, bootstrap_instr = generate_classes(model)
cls_code, bootstrap_instr = generate_classes(model)
# Header
imports = """from lodel.context import LodelContext
@ -25,10 +28,8 @@ LodelContext.expose_modules(globals(), {
'lodel.leapi.datahandlers.base_classes': ['DataField'],
'lodel.plugin.hooks': ['LodelHook']})
"""
for module in modules:
imports += "import %s\n" % module
class_list = [ LeObject.name2objname(cls.uid) for cls in get_classes(model) ]
# generates the list of all classes in the editorial model
class_list = [LeObject.name2objname(cls.uid) for cls in get_classes(model)]
# formating all components of output
res_code = """#-*- coding: utf-8 -*-
@ -41,17 +42,21 @@ dynclasses = {class_list}
dynclasses_dict = {class_dict}
{common_code}
""".format(
imports = imports,
classes = cls_code,
bootstrap_instr = bootstrap_instr,
class_list = '[' + (', '.join([cls for cls in class_list]))+']',
class_dict = '{' + (', '.join([ "'%s': %s" % (cls, cls)
for cls in class_list]))+'}',
common_code = common_code(),
imports=imports,
classes=cls_code,
bootstrap_instr=bootstrap_instr,
class_list='[' + (', '.join([cls for cls in class_list])) + ']',
class_dict='{' + (', '.join(["'%s': %s" % (cls, cls)
for cls in class_list])) + '}',
common_code=common_code(),
)
return res_code
##@brief Return the content of lodel.leapi.lefactory_common
# @brief Returns the content of lodel.leapi.lefactory_common
#
# @return a string
def common_code():
res = ""
fname = os.path.dirname(__file__)
@ -61,23 +66,32 @@ def common_code():
if not line.startswith('#-'):
res += line
return res
##@brief return A list of EmClass sorted by dependencies
# @brief return A list of EmClass sorted by dependencies
#
# The first elts in the list depends on nothing, etc.
# The first elts in the list depend on nothing, etc.
# @param a list of Emclass instances to be sorted
# @return a list of EmClass instances
def emclass_sorted_by_deps(emclass_list):
def emclass_deps_cmp(cls_a, cls_b):
return len(cls_a.parents_recc) - len(cls_b.parents_recc)
ret = sorted(emclass_list, key = functools.cmp_to_key(emclass_deps_cmp))
ret = sorted(emclass_list, key=functools.cmp_to_key(emclass_deps_cmp))
return ret
##@brief Returns a list of EmClass that will be represented as LeObject child classes
def get_classes(model):
return [ cls for cls in emclass_sorted_by_deps(model.classes()) if not cls.pure_abstract ]
# @brief Returns a list of EmClass instances that will be represented as LeObject child classes
# @param model : an EditorialModel instance
# @return a list of EmClass instances
def get_classes(model):
return [cls for cls in emclass_sorted_by_deps(model.classes()) if not cls.pure_abstract]
# @brief Given an EmField returns the data_handler constructor suitable for dynamic code
# @param a EmField instance
# @return a string
##@brief Given an EmField returns the data_handler constructor suitable for dynamic code
def data_handler_constructor(emfield):
#dh_module_name = DataHandler.module_name(emfield.data_handler_name)+'.DataHandler'
get_handler_class_instr = 'DataField.from_name(%s)' % repr(emfield.data_handler_name)
@ -85,60 +99,65 @@ def data_handler_constructor(emfield):
for name, val in emfield.data_handler_options.items():
if name == 'back_reference' and isinstance(val, tuple):
options.append('{optname}: ({leo_name}, {fieldname})'.format(
optname = repr(name),
leo_name = LeObject.name2objname(val[0]),
fieldname = repr(val[1]),))
optname=repr(name),
leo_name=LeObject.name2objname(val[0]),
fieldname=repr(val[1]),))
else:
options.append(repr(name)+': '+forge_optval(val))
options.append(repr(name) + ': ' + forge_optval(val))
return '{handler_instr}(**{{ {options} }})'.format(
handler_instr = get_handler_class_instr,
options = ', '.join(options))
##@brief Return a python repr of option values
handler_instr=get_handler_class_instr,
options=', '.join(options))
# @brief Return a python repr of option values
# @param A value of any type which represents option
# @return a string
def forge_optval(optval):
if isinstance(optval, dict):
return '{' + (', '.join( [ '%s: %s' % (repr(name), forge_optval(val)) for name, val in optval.items()])) + '}'
return '{' + (', '.join(['%s: %s' % (repr(name), forge_optval(val)) for name, val in optval.items()])) + '}'
if isinstance(optval, (set, list, tuple)):
return '[' + (', '.join([forge_optval(val) for val in optval])) + ']'
if isinstance(optval, EmField):
return "{leobject}.data_handler({fieldname})".format(
leobject = LeObject.name2objname(optval._emclass.uid),
fieldname = repr(optval.uid)
)
elif isinstance(optval, EmClass):
leobject=LeObject.name2objname(optval._emclass.uid),
fieldname=repr(optval.uid)
)
if isinstance(optval, EmClass):
return LeObject.name2objname(optval.uid)
else:
return repr(optval)
##@brief Generate dyncode from an EmClass
# @param model EditorialModel :
# @todo delete imports. It is never use, consequently changed return parameters.
return repr(optval)
# @brief Generate dyncode from an EmClass
# @param model EditorialModel :
# @return a tuple with emclass python code, a set containing modules name to import, and a list of python instruction to bootstrap dynamic code, in this order
def generate_classes(model):
res = ""
imports = list()
bootstrap = ""
# Generating field list for LeObjects generated from EmClass
for em_class in get_classes(model):
logger.info("Generating a dynamic class for %s" % em_class.uid)
uid = list() # List of fieldnames that are part of the EmClass primary key
parents = list() # List of parents EmClass
# Determine pk
uid = list() # List for fieldnames that are part of the EmClass primary key
parents = list() # List for em_class's parents
# Determines primary key
for field in em_class.fields():
if field.data_handler_instance.is_primary_key():
uid.append(field.uid)
# Determine parent for inheritance
# Determines parentsfor inheritance
if len(em_class.parents) > 0:
for parent in em_class.parents:
parents.append(LeObject.name2objname(parent.uid))
parents.append(LeObject.name2objname(parent.uid))
else:
parents.append('LeObject')
datasource_name = em_class.datasource
# Dynamic code generation for LeObject childs classes
# Dynamic code generation for LeObject child classes
em_cls_code = """
class {clsname}({parents}):
_abstract = {abstract}
@ -150,12 +169,12 @@ class {clsname}({parents}):
_child_classes = None
""".format(
clsname = LeObject.name2objname(em_class.uid),
parents = ', '.join(parents),
abstract = 'True' if em_class.abstract else 'False',
uid_list = repr(uid),
datasource_name = repr(datasource_name),
)
clsname=LeObject.name2objname(em_class.uid),
parents=', '.join(parents),
abstract='True' if em_class.abstract else 'False',
uid_list=repr(uid),
datasource_name=repr(datasource_name),
)
res += em_cls_code
# Dyncode fields bootstrap instructions
child_classes = model.get_class_childs(em_class.uid)
@ -163,14 +182,14 @@ class {clsname}({parents}):
child_classes = 'tuple()'
else:
child_classes = '(%s,)' % (', '.join(
[ LeObject.name2objname(emcls.uid) for emcls in child_classes]))
[LeObject.name2objname(emcls.uid) for emcls in child_classes]))
bootstrap += """{classname}._set__fields({fields})
{classname}._child_classes = {child_classes}
""".format(
classname = LeObject.name2objname(em_class.uid),
fields = '{' + (', '.join(['\n\t%s: %s' % (repr(emfield.uid),data_handler_constructor(emfield)) for emfield in em_class.fields()])) + '}',
child_classes = child_classes,
)
classname=LeObject.name2objname(em_class.uid),
fields='{' + (', '.join(['\n\t%s: %s' % (repr(emfield.uid),
data_handler_constructor(emfield)) for emfield in em_class.fields()])) + '}',
child_classes=child_classes,
)
bootstrap += "\n"
return res, set(imports), bootstrap
return res, bootstrap

View file

@ -5,7 +5,7 @@
#- All lines that begins with #- will be deleted from dynamically generated
#- code...
##@brief Return a dynamically generated class given it's name
##@brief Returns a dynamically generated class given its name
#@param name str : The dynamic class name
#@return False or a child class of LeObject
def name2class(name):
@ -14,7 +14,7 @@ def name2class(name):
return dynclasses_dict[name]
##@brief Return a dynamically generated class given it's name
##@brief Returns a dynamically generated class given its name
#@note Case insensitive version of name2class
#@param name str
#@return False or a child class of LeObject
@ -26,11 +26,10 @@ def lowername2class(name):
return new_dict[name]
##@brief Trigger dynclasses datasources initialisation
##@brief Triggers dynclasses datasources initialisation
@LodelHook("lodel2_plugins_loaded")
def lodel2_dyncode_datasources_init(self, caller, payload):
for cls in dynclasses:
cls._init_datasources()
LodelContext.expose_modules(globals(), {'lodel.plugin.hooks': ['LodelHook']})
LodelHook.call_hook("lodel2_dyncode_loaded", __name__, dynclasses)

View file

@ -1,5 +1,9 @@
#-*- coding: utf-8 -*-
## @package lodel.leapi.leobject
# This module is centered around the basic LeObject class, which is the main class for all the objects managed by lodel.
import importlib
import warnings
import copy
@ -22,20 +26,16 @@ LodelContext.expose_modules(globals(), {
'lodel.plugin': ['Plugin', 'DatasourcePlugin'],
'lodel.leapi.datahandlers.base_classes': ['DatasConstructor', 'Reference']})
# @brief Stores the name of the field present in each LeObject that indicates
# the name of LeObject subclass represented by this object
## @brief Stores the name of the field present in each LeObject that indicates the name of LeObject subclass represented by this object
CLASS_ID_FIELDNAME = "classname"
# @brief Wrapper class for LeObject getter & setter
## @brief Wrapper class for LeObject getter & setter
#
# This class intend to provide easy & friendly access to LeObject fields values
# without name collision problems
# This class intend to provide easy & friendly access to LeObject fields values without name collision problems
# @note Wrapped methods are : LeObject.data() & LeObject.set_data()
class LeObjectValues(object):
# @brief Construct a new LeObjectValues
# @param fieldnames_callback method
# @param set_callback method : The LeObject.set_datas() method of corresponding LeObject class
# @param get_callback method : The LeObject.get_datas() method of corresponding LeObject class
@ -43,47 +43,49 @@ class LeObjectValues(object):
self._setter = set_callback
self._getter = get_callback
# @brief Provide read access to datas values
## @brief Provides read access to datas values
# @note Read access should be provided for all fields
# @param fname str : Field name
# @return method
def __getattribute__(self, fname):
getter = super().__getattribute__('_getter')
return getter(fname)
# @brief Provide write access to datas values
## @brief Provides write access to datas values
# @note Write acces shouldn't be provided for internal or immutable fields
# @param fname str : Field name
# @param fval * : the field value
# @return method
def __setattribute__(self, fname, fval):
setter = super().__getattribute__('_setter')
return setter(fname, fval)
## @brief Represents a handled object in Lodel.
class LeObject(object):
# @brief boolean that tells if an object is abtract or not
## @brief boolean that tells if an object is abtract or not
_abstract = None
# @brief A dict that stores DataHandler instances indexed by field name
## @brief A dict that stores DataHandler instances indexed by field name
_fields = None
# @brief A tuple of fieldname (or a uniq fieldname) representing uid
## @brief A tuple of fieldname (or a uniq fieldname) representing uid
_uid = None
# @brief Read only datasource ( see @ref lodel2_datasources )
## @brief Read only datasource ( see @ref lodel2_datasources )
_ro_datasource = None
# @brief Read & write datasource ( see @ref lodel2_datasources )
## @brief Read & write datasource ( see @ref lodel2_datasources )
_rw_datasource = None
# @brief Store the list of child classes
## @brief Store the list of child classes
_child_classes = None
# @brief Name of the datasource plugin
## @brief Name of the datasource plugin
_datasource_name = None
def __new__(cls, **kwargs):
self = object.__new__(cls)
# @brief A dict that stores fieldvalues indexed by fieldname
## @brief A dict that stores fieldvalues indexed by fieldname
self.__datas = {fname: None for fname in self._fields}
# @brief Store a list of initianilized fields when instanciation not complete else store True
## @brief Store a list of initianilized fields when instanciation not complete else store True
self.__initialized = list()
# @brief Datas accessor. Instance of @ref LeObjectValues
## @brief Datas accessor. Instance of @ref LeObjectValues
self.d = LeObjectValues(self.fieldnames, self.set_data, self.data)
for fieldname, fieldval in kwargs.items():
self.__datas[fieldname] = fieldval
@ -92,8 +94,10 @@ class LeObject(object):
self.__set_initialized()
return self
# @brief Construct an object representing an Editorial component
# @note Can be considered as EmClass instance
# @param **kwargs
# @throw NotImplementedError when the class being instanciated is noted as abstract and then should not be instanciated.
# @throw LeApiError in case of missing or invalid data.
def __init__(self, **kwargs):
if self._abstract:
raise NotImplementedError(
@ -130,19 +134,21 @@ class LeObject(object):
# Fields datas handling methods #
#-----------------------------------#
# @brief Property method True if LeObject is initialized else False
## @brief Property method True if LeObject is initialized else False
# @return bool
@property
def initialized(self):
return self.__is_initialized
# @return The uid field name
## @brief Returns the uid field name
# @return str
@classmethod
def uid_fieldname(cls):
return cls._uid
# @brief Return a list of fieldnames
# @param include_ro bool : if True include read only field names
# @return a list of str
## @brief Returns a list of fieldnames
# @param include_ro bool : if True includes the read only field names
# @return list of string
@classmethod
def fieldnames(cls, include_ro=False):
if not include_ro:
@ -150,13 +156,17 @@ class LeObject(object):
else:
return list(cls._fields.keys())
## @brief Returns a name, capitalizing the first character of each word
# @param name str
# @return str
@classmethod
def name2objname(cls, name):
return name.title()
# @brief Return the datahandler asssociated with a LeObject field
# @param fieldname str : The fieldname
## @brief Returns the datahandler asssociated with a LeObject field
# @param fieldname str : The field's name
# @return A data handler instance
# @throw NameError when the given field name doesn't exist
#@todo update class of exception raised
@classmethod
def data_handler(cls, fieldname):
@ -164,9 +174,9 @@ class LeObject(object):
raise NameError("No field named '%s' in %s" % (fieldname, cls.__name__))
return cls._fields[fieldname]
# @brief Getter for references datahandlers
#@param with_backref bool : if true return only references with back_references
#@return <code>{'fieldname': datahandler, ...}</code>
## @brief Returns a dictionary containing the reference datahandlers
# @param with_backref bool : if true return only references with back_references
# @return dict : <code>{'fieldname': datahandler, ...}</code>
@classmethod
def reference_handlers(cls, with_backref=True):
return {fname: fdh
@ -174,11 +184,12 @@ class LeObject(object):
if fdh.is_reference() and
(not with_backref or fdh.back_reference is not None)}
# @brief Return a LeObject child class from a name
## @brief Returns a LeObject child class from a name
# @warning This method has to be called from dynamically generated LeObjects
# @param leobject_name str : LeObject name
# @return A LeObject child class
# @throw NameError if invalid name given
# @throw NotImplementedError if the method is abstract (if we use the LeObject class)
# @throw LeApiError if an unexisting name is given
@classmethod
def name2class(cls, leobject_name):
if cls.__module__ == 'lodel.leapi.leobject':
@ -189,14 +200,16 @@ class LeObject(object):
except (AttributeError, TypeError):
raise LeApiError("No LeObject named '%s'" % leobject_name)
## @brief Checks if the class is abstract or not
# @return bool
@classmethod
def is_abstract(cls):
return cls._abstract
# @brief Field data handler getter
#@param fieldname str : The field name
#@return A datahandler instance
#@throw NameError if the field doesn't exist
## @brief Field data handler getter
# @param fieldname str : The field name
# @return A datahandler instance
# @throw NameError if the field doesn't exist
@classmethod
def field(cls, fieldname):
try:
@ -204,8 +217,9 @@ class LeObject(object):
except KeyError:
raise NameError("No field named '%s' in %s" % (fieldname,
cls.__name__))
# @return A dict with fieldname as key and datahandler as instance
## @brief Returns the fields' datahandlers as a dictionary
# @param include_ro bool : if True, includes the read-only fields (default value : False)
# @return dict
@classmethod
def fields(cls, include_ro=False):
if include_ro:
@ -214,14 +228,12 @@ class LeObject(object):
return {fname: cls._fields[fname] for fname in cls._fields\
if not cls._fields[fname].is_internal()}
# @brief Return the list of parents classes
## @brief Return the list of parents classes
#
#@note the first item of the list is the current class, the second is it's
# parent etc...
#@param cls
#@warning multiple inheritance broken by this method
#@return a list of LeObject child classes
#@todo multiple parent capabilities implementation
# @note the first item of the list is the current class, the second is its parent etc...
# @warning multiple inheritance broken by this method
# @return a list of LeObject child classes
# @todo multiple parent capabilities implementation
@classmethod
def hierarch(cls):
res = [cls]
@ -234,16 +246,15 @@ class LeObject(object):
res.append(cur)
return res
# @brief Return a tuple a child classes
#@return a tuple of child classes
## @brief Returns a tuple of child classes
# @return tuple
@classmethod
def child_classes(cls):
return copy.copy(cls._child_classes)
# @brief Return the parent class that is the "source" of uid
## @brief Returns the parent class that defines the unique id
#
# The method goal is to return the parent class that defines UID.
#@return a LeObject child class or false if no UID defined
# @return a LeObject child class or false if no UID defined
@classmethod
def uid_source(cls):
if cls._uid is None or len(cls._uid) == 0:
@ -259,11 +270,11 @@ class LeObject(object):
prev = pcls
return prev
# @brief Initialise both datasources (ro and rw)
## @brief Initialise both datasources (ro and rw)
#
# This method is used once at dyncode load to replace the datasource string
# by a datasource instance to avoid doing this operation for each query
#@see LeObject::_init_datasource()
# @see LeObject::_init_datasource()
@classmethod
def _init_datasources(cls):
if isinstance(cls._datasource_name, str):
@ -291,16 +302,16 @@ class LeObject(object):
log_msg %= (ro_ds, cls.__name__)
logger.debug(log_msg)
# @brief Return the uid of the current LeObject instance
#@return the uid value
#@warning Broke multiple uid capabilities
## @brief Returns the uid of the current LeObject instance
# @return str
# @warning Broke multiple uid capabilities
def uid(self):
return self.data(self._uid[0])
# @brief Read only access to all datas
## @brief Returns the value of a field
# @note for fancy data accessor use @ref LeObject.g attribute @ref LeObjectValues instance
# @param field_name str : field name
# @return the Value
# @param field_name str : field's name
# @return the value
# @throw RuntimeError if the field is not initialized yet
# @throw NameError if name is not an existing field name
def data(self, field_name):
@ -311,18 +322,19 @@ class LeObject(object):
"The field %s is not initialized yet (and have no value)" % field_name)
return self.__datas[field_name]
# @brief Read only access to all datas
#@return a dict representing datas of current instance
## @brief Returns a dictionary containing all the fields' values
# @return dict
def datas(self, internal=False):
return {fname: self.data(fname) for fname in self.fieldnames(internal)}
# @brief Datas setter
## @brief Datas setter
# @note for fancy data accessor use @ref LeObject.g attribute @ref LeObjectValues instance
# @param fname str : field name
# @param fname str : field's name
# @param fval * : field value
# @return the value that is really set
# @throw NameError if fname is not valid
# @throw AttributeError if the field is not writtable
# @throw LeApiErrors if the data check generates an error
def set_data(self, fname, fval):
if fname not in self.fieldnames(include_ro=False):
if fname not in self._fields.keys():
@ -353,18 +365,18 @@ class LeObject(object):
else:
self.__datas[fname] = val
# @brief Update the __initialized attribute according to LeObject internal state
## @brief Updates the __initialized attribute according to LeObject internal state
#
# Check the list of initialized fields and set __initialized to True if all fields initialized
# Checks the list of initialized fields and sets __initialized at True if all fields initialized
def __set_initialized(self):
if isinstance(self.__initialized, list):
expected_fields = self.fieldnames(include_ro=False) + self._uid
if set(expected_fields) == set(self.__initialized):
self.__is_initialized = True
# @brief Designed to be called when datas are modified
## @brief Designed to be called when datas are modified
#
# Make different checks on the LeObject given it's state (fully initialized or not)
# Makes different checks on the LeObject given it's state (fully initialized or not)
# @return None if checks succeded else return an exception list
def __check_modified_values(self):
err_list = dict()
@ -409,24 +421,24 @@ class LeObject(object):
# Other methods #
#--------------------#
# @brief Temporary method to set private fields attribute at dynamic code generation
## @brief Temporary method to set private fields attribute at dynamic code generation
#
# This method is used in the generated dynamic code to set the _fields attribute
# at the end of the dyncode parse
# @warning This method is deleted once the dynamic code loaded
# @param field_list list : list of EmField instance
# @param cls
# @param field_list list : list of EmField instance
@classmethod
def _set__fields(cls, field_list):
cls._fields = field_list
# @brief Check that datas are valid for this type
## @brief Checks if the data is valid for this type
# @param datas dict : key == field name value are field values
# @param complete bool : if True expect that datas provide values for all non internal fields
# @param allow_internal bool : if True don't raise an error if a field is internal
# @param complete bool : if True expects that values are provided for all non internal fields
# @param allow_internal bool : if True does not raise an error if a field is internal
# @param cls
# @return Checked datas
# @throw LeApiDataCheckError if errors reported during check
# @throw LeApiDataCheckErrors if errors are reported during check
@classmethod
def check_datas_value(cls, datas, complete=False, allow_internal=True):
err_l = dict() # Error storing
@ -459,14 +471,13 @@ class LeObject(object):
raise LeApiDataCheckErrors("Error while checking datas", err_l)
return checked_datas
# @brief Check and prepare datas
## @brief Checks and prepares all the data
#
# @warning when complete = False we are not able to make construct_datas() and _check_data_consistency()
#
# @param datas dict : {fieldname : fieldvalue, ...}
# @param complete bool : If True you MUST give all the datas
# @param allow_internal : Wether or not interal fields are expected in datas
# @param cls
# @param complete bool : If True you MUST give all the datas (default value : False)
# @param allow_internal : Wether or not interal fields are expected in datas (default value : True)
# @return Datas ready for use
# @todo: complete is very unsafe, find a way to get rid of it
@classmethod
@ -482,9 +493,8 @@ construction and consitency when datas are not complete\n")
cls._check_datas_consistency(ret_datas)
return ret_datas
# @brief Construct datas values
## @brief Constructs datas values
#
# @param cls
# @param datas dict : Datas that have been returned by LeCrud.check_datas_value() methods
# @return A new dict of datas
# @todo IMPLEMENTATION
@ -498,12 +508,12 @@ construction and consitency when datas are not complete\n")
}
return ret
# @brief Check datas consistency
## @brief Checks datas consistency
# 
# @warning assert that datas is complete
# @param cls
# @param datas dict : Datas that have been returned by LeCrud._construct_datas() method
# @throw LeApiDataCheckError if fails
# @throw LeApiDataCheckError in case of failure
@classmethod
def _check_datas_consistency(cls, datas):
err_l = []
@ -516,27 +526,28 @@ construction and consitency when datas are not complete\n")
if len(err_l) > 0:
raise LeApiDataCheckError("Datas consistency checks fails", err_l)
# @brief Check datas consistency
## @brief Checks data consistency
# 
# @warning assert that datas is complete
# @param cls
# @param datas dict : Datas that have been returned by LeCrud.prepare_datas() method
# @param datas dict : Data that have been returned by prepare_datas() method
# @param type_query str : Type of query to be performed , default value : insert
@classmethod
def make_consistency(cls, datas, type_query='insert'):
for fname, dh in cls._fields.items():
ret = dh.make_consistency(fname, datas, type_query)
# @brief Add a new instance of LeObject
# @return a new uid en case of success, False otherwise
## @brief Adds a new instance of LeObject
# @param datas dict : LeObject's data
# @return a new uid in case of success, False otherwise
@classmethod
def insert(cls, datas):
query = LeInsertQuery(cls)
return query.execute(datas)
# @brief Update an instance of LeObject
## @brief Update an instance of LeObject
#
#@param datas : list of new datas
# @param datas : list of new datas
# @return LeObject
def update(self, datas=None):
datas = self.datas(internal=False) if datas is None else datas
uids = self._uid
@ -555,9 +566,9 @@ construction and consitency when datas are not complete\n")
return result
# @brief Delete an instance of LeObject
## @brief Delete an instance of LeObject
#
#@return 1 if the objet has been deleted
# @return 1 if the objet has been deleted
def delete(self):
uids = self._uid
query_filter = list()
@ -570,9 +581,9 @@ construction and consitency when datas are not complete\n")
return result
# @brief Delete instances of LeObject
#@param query_filters list
#@returns the number of deleted items
## @brief Deletes instances of LeObject
# @param query_filters list
# @return the number of deleted items
@classmethod
def delete_bundle(cls, query_filters):
deleted = 0
@ -589,16 +600,16 @@ construction and consitency when datas are not complete\n")
deleted += result
return deleted
# @brief Get instances of LeObject
## @brief Gets instances of LeObject
#
#@param query_filters dict : (filters, relational filters), with filters is a list of tuples : (FIELD, OPERATOR, VALUE) )
#@param field_list list|None : list of string representing fields see
#@ref leobject_filters
#@param order list : A list of field names or tuple (FIELDNAME,[ASC | DESC])
#@param group list : A list of field names or tuple (FIELDNAME,[ASC | DESC])
#@param limit int : The maximum number of returned results
#@param offset int : offset
#@return a list of items (lists of (fieldname, fieldvalue))
# @param query_filters dict : (filters, relational filters), with filters is a list of tuples : (FIELD, OPERATOR, VALUE) )
# @param field_list list|None : list of string representing fields see
# @ref leobject_filters
# @param order list : A list of field names or tuple (FIELDNAME,[ASC | DESC])
# @param group list : A list of field names or tuple (FIELDNAME,[ASC | DESC])
# @param limit int : The maximum number of returned results
# @param offset int : offset (default value : 0)
# @return a list of items (lists of (fieldname, fieldvalue))
@classmethod
def get(cls, query_filters, field_list=None, order=None, group=None, limit=None, offset=0):
if field_list is not None:
@ -628,7 +639,10 @@ construction and consitency when datas are not complete\n")
return objects
# @brief Retrieve an object given an UID
## @brief Retrieves an object given an UID
# @param uid str : Unique ID of the searched LeObject
# @return LeObject
# @throw LodelFatalError if the class does not have such a UID defined or if duplicates are found
#@todo broken multiple UID
@classmethod
def get_from_uid(cls, uid):
@ -645,7 +659,7 @@ construction and consitency when datas are not complete\n")
while len(res_cp) > 0:
cur_res = res_cp.pop()
if cur_res.uid() in [r.uid() for r in res_cp]:
logger.error("DOUBLON detected in query results !!!")
logger.error("Duplicates detected in query results !!!")
else:
res.append(cur_res)
if len(res) > 1:

View file

@ -8,20 +8,22 @@ import warnings
from lodel.context import LodelContext
LodelContext.expose_modules(globals(), {
'lodel.leapi.exceptions': ['LeApiError', 'LeApiErrors',
'LeApiDataCheckError', 'LeApiDataCheckErrors', 'LeApiQueryError',
'LeApiQueryErrors'],
'LeApiDataCheckError', 'LeApiDataCheckErrors', 'LeApiQueryError',
'LeApiQueryErrors'],
'lodel.plugin.hooks': ['LodelHook'],
'lodel.logger': ['logger']})
##@todo check datas when running query
# @todo check data when running query
class LeQuery(object):
##@brief Hookname prefix
# @brief Hookname prefix
_hook_prefix = None
##@brief arguments for the LeObject.check_data_value()
# @brief arguments for the LeObject.check_data_value()
_data_check_args = {'complete': False, 'allow_internal': False}
##@brief Abstract constructor
# @brief Abstract constructor
# @param target_class LeObject : class of object the query is about
def __init__(self, target_class):
from .leobject import LeObject
@ -29,77 +31,80 @@ class LeQuery(object):
raise NotImplementedError("Abstract class")
if not inspect.isclass(target_class) or \
not issubclass(target_class, LeObject):
raise TypeError("target class has to be a child class of LeObject but %s given"% target_class)
raise TypeError(
"target class has to be a child class of LeObject but %s given" % target_class)
self._target_class = target_class
self._ro_datasource = target_class._ro_datasource
self._rw_datasource = target_class._rw_datasource
##@brief Execute a query and return the result
#@param **datas
# @brief Executes a query and returns the result
#@param **data
#@return the query result
#@see LeQuery._query()
#@todo check that the check_datas_value is not duplicated/useless
def execute(self, datas):
if not datas is None:
def execute(self, data):
if data is not None:
self._target_class.check_datas_value(
datas,
**self._data_check_args)
self._target_class.prepare_datas(datas) #not yet implemented
data,
**self._data_check_args)
self._target_class.prepare_datas(data) # not yet implemented
if self._hook_prefix is None:
raise NotImplementedError("Abstract method")
LodelHook.call_hook(self._hook_prefix+'pre',
self._target_class,
datas)
ret = self._query(datas=datas)
ret = LodelHook.call_hook(self._hook_prefix+'post',
self._target_class,
ret)
LodelHook.call_hook(self._hook_prefix + 'pre',
self._target_class,
data)
ret = self._query(data=data)
ret = LodelHook.call_hook(self._hook_prefix + 'post',
self._target_class,
ret)
return ret
##@brief Childs classes implements this method to execute the query
#@param **datas
# @brief Child classes implement this method to execute the query
#@param **data
#@return query result
def _query(self, **datas):
def _query(self, **data):
raise NotImplementedError("Asbtract method")
##@return a dict with query infos
# @return a dict with query infos
def dump_infos(self):
return {'target_class': self._target_class}
def __repr__(self):
ret = "<{classname} target={target_class}>"
return ret.format(
classname=self.__class__.__name__,
target_class = self._target_class)
classname=self.__class__.__name__,
target_class=self._target_class)
# @brief Abstract class handling query with filters
##@brief Abstract class handling query with filters
class LeFilteredQuery(LeQuery):
##@brief The available operators used in query definitions
# @brief The available operators used in query definitions
_query_operators = [
' = ',
' <= ',
' >= ',
' != ',
' < ',
' > ',
' in ',
' not in ',
' like ',
' not like ']
' = ',
' <= ',
' >= ',
' != ',
' < ',
' > ',
' in ',
' not in ',
' like ',
' not like ']
##@brief Regular expression to process filters
# @brief Regular expression to process filters
_query_re = None
##@brief Abtract constructor for queries with filter
# @brief Abtract constructor for queries with filter
#@param target_class LeObject : class of object the query is about
#@param query_filters list : with a tuple (only one filter) or a list of
# tuple or a dict: {OP,list(filters)} with OP = 'OR' or 'AND for tuple
# (FIELD,OPERATOR,VALUE)
def __init__(self, target_class, query_filters=None):
super().__init__(target_class)
##@brief The query filter tuple(std_filter, relational_filters)
# @brief The query filter tuple(std_filter, relational_filters)
self._query_filter = None
##@brief Stores potential subqueries (used when a query implies
# @brief Stores potential subqueries (used when a query implies
# more than one datasource.
#
# Subqueries are tuple(target_class_ref_field, LeGetQuery)
@ -107,11 +112,11 @@ class LeFilteredQuery(LeQuery):
query_filters = [] if query_filters is None else query_filters
self.set_query_filter(query_filters)
##@brief Abstract FilteredQuery execution method
# @brief Abstract FilteredQuery execution method
#
# This method takes care to execute subqueries before calling super execute
def execute(self, datas=None):
#copy originals filters
def execute(self, data=None):
# copy originals filters
orig_filters = copy.copy(self._query_filter)
std_filters, rel_filters = self._query_filter
@ -123,17 +128,17 @@ class LeFilteredQuery(LeQuery):
try:
filters, rel_filters = self._query_filter
res = super().execute(datas)
res = super().execute(data)
except Exception as e:
#restoring filters even if an exception is raised
# restoring filters even if an exception is raised
self.__query_filter = orig_filters
raise e #reraise
#restoring filters
raise e # reraise
# restoring filters
self._query_filter = orig_filters
return res
##@brief Add filter(s) to the query
# @brief Add filter(s) to the query
#
# This method is also able to slice query if different datasources are
# implied in the request
@ -144,39 +149,39 @@ class LeFilteredQuery(LeQuery):
def set_query_filter(self, query_filter):
if isinstance(query_filter, str):
query_filter = [query_filter]
#Query filter prepration
# Query filter prepration
filters_orig, rel_filters = self._prepare_filters(query_filter)
# Here we now that each relational filter concern only one datasource
# thank's to _prepare_relational_fields
#Multiple datasources detection
# Multiple datasources detection
self_ds_name = self._target_class._datasource_name
result_rel_filters = list() # The filters that will stay in the query
result_rel_filters = list() # The filters that will stay in the query
other_ds_filters = dict()
for rfilter in rel_filters:
(rfield, ref_dict), op, value = rfilter
#rfield : the field in self._target_class
tmp_rel_filter = dict() #designed to stores rel_field of same DS
# rfield : the field in self._target_class
tmp_rel_filter = dict() # designed to stores rel_field of same DS
# First step : simplification
# Trying to delete relational filters done on referenced class uid
for tclass, tfield in copy.copy(ref_dict).items():
#tclass : reference target class
#tfield : referenced field from target class
# tclass : reference target class
# tfield : referenced field from target class
#
# !!!WARNING!!!
# The line below brake multi UID support
#
if tfield == tclass.uid_fieldname()[0]:
#This relational filter can be simplified as
# This relational filter can be simplified as
# ref_field, op, value
# Note : we will have to dedup filters_orig
filters_orig.append((rfield, op, value))
del(ref_dict[tclass])
if len(ref_dict) == 0:
continue
#Determine what to do with other relational filters given
# Determine what to do with other relational filters given
# referenced class datasource
#Remember : each class in a relational filter has the same
# Remember : each class in a relational filter has the same
# datasource
tclass = list(ref_dict.keys())[0]
cur_ds = tclass._datasource_name
@ -189,23 +194,23 @@ class LeFilteredQuery(LeQuery):
other_ds_filters[cur_ds] = list()
other_ds_filters[cur_ds].append(
((rfield, ref_dict), op, value))
#deduplication of std filters
# deduplication of std filters
filters_cp = set()
if not isinstance(filters_orig, set):
for i, cfilt in enumerate(filters_orig):
a, b, c = cfilt
if isinstance(c, list): #list are not hashable
if isinstance(c, list): # list are not hashable
newc = tuple(c)
else:
newc = c
old_len = len(filters_cp)
filters_cp |= set((a,b,newc))
filters_cp |= set((a, b, newc))
if len(filters_cp) == old_len:
del(filters_orig[i])
# Sets _query_filter attribute of self query
self._query_filter = (filters_orig, result_rel_filters)
#Sub queries creation
# Sub queries creation
subq = list()
for ds, rfilters in other_ds_filters.items():
for rfilter in rfilters:
@ -218,7 +223,7 @@ class LeFilteredQuery(LeQuery):
subq.append((rfield, query))
self.subqueries = subq
##@return informations
# @return informations
def dump_infos(self):
ret = super().dump_infos()
ret['query_filter'] = self._query_filter
@ -238,16 +243,16 @@ class LeFilteredQuery(LeQuery):
res += '>'
return res
## @brief Prepare filters for datasource
# @brief Prepare filters for datasource
#
#A filter can be a string or a tuple with len = 3.
# A filter can be a string or a tuple with len = 3.
#
#This method divide filters in two categories :
# This method divide filters in two categories :
#
#@par Simple filters
#
#Those filters concerns fields that represent object values (a title,
#the content, etc.) They are composed of three elements : FIELDNAME OP
# Those filters concerns fields that represent object values (a title,
# the content, etc.) They are composed of three elements : FIELDNAME OP
# VALUE . Where :
#- FIELDNAME is the name of the field
#- OP is one of the authorized comparison operands (see
@ -256,14 +261,14 @@ class LeFilteredQuery(LeQuery):
#
#@par Relational filters
#
#Those filters concerns on reference fields (see the corresponding
#abstract datahandler @ref lodel.leapi.datahandlers.base_classes.Reference)
#The filter as quite the same composition than simple filters :
# Those filters concerns on reference fields (see the corresponding
# abstract datahandler @ref lodel.leapi.datahandlers.base_classes.Reference)
# The filter as quite the same composition than simple filters :
# FIELDNAME[.REF_FIELD] OP VALUE . Where :
#- FIELDNAME is the name of the reference field
#- REF_FIELD is an optionnal addon to the base field. It indicate on wich
#field of the referenced object the comparison as to be done. If no
#REF_FIELD is indicated the comparison will be done on identifier.
# field of the referenced object the comparison as to be done. If no
# REF_FIELD is indicated the comparison will be done on identifier.
#
#@param cls
#@param filters_l list : This list of str or tuple (or both)
@ -271,11 +276,11 @@ class LeFilteredQuery(LeQuery):
#@todo move this doc in another place (a dedicated page ?)
#@warning Does not supports multiple UID for an EmClass
def _prepare_filters(self, filters_l):
filters=list()
filters = list()
res_filters = list()
rel_filters = list()
err_l = dict()
#Splitting in tuple if necessary
# Splitting in tuple if necessary
for i, fil in enumerate(filters_l):
if len(fil) == 3 and not isinstance(fil, str):
filters.append(tuple(fil))
@ -286,7 +291,7 @@ class LeFilteredQuery(LeQuery):
err_l["filter %d" % i] = e
for field, operator, value in filters:
err_key = "%s %s %s" % (field, operator, value) #to push in err_l
err_key = "%s %s %s" % (field, operator, value) # to push in err_l
# Spliting field name to be able to detect a relational field
field_spl = field.split('.')
if len(field_spl) == 2:
@ -310,12 +315,12 @@ field name" % field)
# inconsistency
err_l[field] = NameError("The field '%s' in %s is not \
a relational field, but %s.%s was present in the filter"
% (field,
self._target_class.__name__,
field,
ref_field))
% (field,
self._target_class.__name__,
field,
ref_field))
if field_datahandler.is_reference():
#Relationnal field
# Relationnal field
if ref_field is None:
# ref_field default value
#
@ -350,14 +355,14 @@ field to use for the relational filter"
value, error = field_datahandler.check_data_value(value)
if isinstance(error, Exception):
value = value_orig
res_filters.append((field,operator, value))
res_filters.append((field, operator, value))
if len(err_l) > 0:
raise LeApiDataCheckErrors(
"Error while preparing filters : ",
err_l)
"Error while preparing filters : ",
err_l)
return (res_filters, rel_filters)
## @brief Check and split a query filter
# @brief Check and split a query filter
# @note The query_filter format is "FIELD OPERATOR VALUE"
# @param query_filter str : A query_filter string
# @param cls
@ -382,18 +387,18 @@ field to use for the relational filter"
raise ValueError(msg % query_filter)
return result
## @brief Compile the regex for query_filter processing
# @brief Compile the regex for query_filter processing
# @note Set _LeObject._query_re
@classmethod
def __compile_query_re(cls):
op_re_piece = '(?P<operator>(%s)'
op_re_piece %= cls._query_operators[0].replace(' ', '\s')
for operator in cls._query_operators[1:]:
op_re_piece += '|(%s)'%operator.replace(' ', '\s')
op_re_piece += '|(%s)' % operator.replace(' ', '\s')
op_re_piece += ')'
re_full = '^\s*(?P<field>([a-z_][a-z0-9\-_]*\.)?[a-z_][a-z0-9\-_]*)\s*'
re_full += op_re_piece+'\s*(?P<value>.*)\s*$'
re_full += op_re_piece + '\s*(?P<value>.*)\s*$'
cls._query_re = re.compile(re_full, flags=re.IGNORECASE)
pass
@ -407,10 +412,10 @@ field to use for the relational filter"
msg %= (fieldname, target_class.__name__)
return NameError(msg)
##@brief Prepare a relational filter
# @brief Prepare a relational filter
#
#Relational filters are composed of a tuple like the simple filters
#but the first element of this tuple is a tuple to :
# Relational filters are composed of a tuple like the simple filters
# but the first element of this tuple is a tuple to :
#
#<code>((FIELDNAME, {REF_CLASS: REF_FIELD}), OP, VALUE)</code>
# Where :
@ -419,9 +424,9 @@ field to use for the relational filter"
# - REF_CLASS as key. It's a LeObject child class
# - REF_FIELD as value. The name of the referenced field in the REF_CLASS
#
#Visibly the REF_FIELD value of the dict will vary only when
#no REF_FIELD is explicitly given in the filter string notation
#and REF_CLASSES has differents uid
# Visibly the REF_FIELD value of the dict will vary only when
# no REF_FIELD is explicitly given in the filter string notation
# and REF_CLASSES has differents uid
#
#@par String notation examples
#<pre>contributeur IN (1,2,3,5)</pre> will be transformed into :
@ -439,7 +444,7 @@ field to use for the relational filter"
#
#@param fieldname str : The relational field name
#@param ref_field str|None : The referenced field name (if None use
#uniq identifiers as referenced field
# uniq identifiers as referenced field
#@return a well formed relational filter tuple or an Exception instance
def _prepare_relational_fields(self, fieldname, ref_field=None):
datahandler = self._target_class.field(fieldname)
@ -467,12 +472,12 @@ the relational filter %s"
logger.debug(msg)
if len(ref_dict) == 0:
return NameError("No field named '%s' in referenced objects [%s]"
% (ref_field,
','.join([rc.__name__ for rc in ref_classes])))
% (ref_field,
','.join([rc.__name__ for rc in ref_classes])))
return (fieldname, ref_dict)
##@brief A query to insert a new object
# @brief A query to insert a new object
class LeInsertQuery(LeQuery):
_hook_prefix = 'leapi_insert_'
_data_check_args = {'complete': True, 'allow_internal': False}
@ -483,49 +488,49 @@ class LeInsertQuery(LeQuery):
abstract LeObject : %s" % target_class)
super().__init__(target_class)
## @brief Implements an insert query operation, with only one insertion
# @param datas : datas to be inserted
def _query(self, datas):
datas = self._target_class.prepare_datas(datas, True, False)
id_inserted = self._rw_datasource.insert(self._target_class, datas)
#  @brief Implements an insert query operation, with only one insertion
# @param data : data to be inserted
def _query(self, data):
data = self._target_class.prepare_datas(data, True, False)
id_inserted = self._rw_datasource.insert(self._target_class, data)
return id_inserted
"""
## @brief Implements an insert query operation, with multiple insertions
# @param datas : list of **datas to be inserted
def _query(self, datas):
# @param data : list of **data to be inserted
def _query(self, data):
nb_inserted = self._datasource.insert_multi(
self._target_class,datas_list)
self._target_class,data_list)
if nb_inserted < 0:
raise LeApiQueryError("Multiple insertions error")
return nb_inserted
"""
## @brief Execute the insert query
def execute(self, datas):
return super().execute(datas=datas)
#  @brief Execute the insert query
def execute(self, data):
return super().execute(data=data)
##@brief A query to update datas for a given object
# @brief A query to update data for a given object
#
#@todo Change behavior, Huge optimization problem when updating using filters
#and not instance. We have to run a GET and then 1 update by fecthed object...
# and not instance. We have to run a GET and then 1 update by fecthed object...
class LeUpdateQuery(LeFilteredQuery):
_hook_prefix = 'leapi_update_'
_data_check_args = {'complete': False, 'allow_internal': False}
##@brief Instanciate an update query
# @brief Instanciate an update query
#
#If a class and not an instance is given, no query_filters are expected
#and the update will be fast and simple. Else we have to run a get query
#before updating (to fetch datas, update them and then, construct them
#and check their consistency)
# If a class and not an instance is given, no query_filters are expected
# and the update will be fast and simple. Else we have to run a get query
# before updating (to fetch data, update them and then, construct them
# and check their consistency)
#@param target LeObject clas or instance
#@param query_filters list|None
#@todo change strategy with instance update. We have to accept datas for
#the execute method
#@todo change strategy with instance update. We have to accept data for
# the execute method
def __init__(self, target, query_filters=None):
##@brief This attr is set only if the target argument is an
#instance of a LeObject subclass
# @brief This attr is set only if the target argument is an
# instance of a LeObject subclass
self.__leobject_instance_datas = None
target_class = target
@ -542,16 +547,16 @@ target to LeUpdateQuery constructor"
super().__init__(target_class, query_filters)
##@brief Implements an update query
#@param datas dict : datas to update
# @brief Implements an update query
#@param data dict : data to be updated
#@returns the number of updated items
#@todo change stategy for instance update. Datas should be allowed
#for execute method (and query)
def _query(self, datas):
#@todo change stategy for instance update. Data should be allowed
# for execute method (and query)
def _query(self, data):
uid_name = self._target_class._uid[0]
if self.__leobject_instance_datas is not None:
#Instance update
#Building query_filter
# Instance update
# Building query_filter
filters = [(
uid_name,
'=',
@ -560,59 +565,60 @@ target to LeUpdateQuery constructor"
self._target_class, filters, [],
self.__leobject_instance_datas)
else:
#Update by filters, we have to fetch datas before updating
# Update by filters, we have to fetch data before updating
res = self._ro_datasource.select(
self._target_class, self._target_class.fieldnames(True),
self._query_filter[0],
self._query_filter[1])
#Checking and constructing datas
upd_datas = dict()
# Checking and constructing data
upd_data = dict()
for res_data in res:
res_data.update(datas)
res_datas = self._target_class.prepare_datas(
res_data.update(data)
res_data = self._target_class.prepare_datas(
res_data, True, True)
filters = [(uid_name, '=', res_data[uid_name])]
res = self._rw_datasource.update(
self._target_class, filters, [],
res_datas)
res_data)
return res
## @brief Execute the update query
def execute(self, datas=None):
if self.__leobject_instance_datas is not None and datas is not None:
raise LeApiQueryError("No datas expected when running an update \
#  @brief Execute the update query
def execute(self, data=None):
if self.__leobject_instance_datas is not None and data is not None:
raise LeApiQueryError("No data expected when running an update \
query on an instance")
if self.__leobject_instance_datas is None and datas is None:
raise LeApiQueryError("Datas are mandatory when running an update \
if self.__leobject_instance_datas is None and data is None:
raise LeApiQueryError("Data are mandatory when running an update \
query on a class with filters")
return super().execute(datas=datas)
return super().execute(data=data)
##@brief A query to delete an object
# @brief A query to delete an object
class LeDeleteQuery(LeFilteredQuery):
_hook_prefix = 'leapi_delete_'
def __init__(self, target_class, query_filter):
super().__init__(target_class, query_filter)
## @brief Execute the delete query
# @param datas
def execute(self, datas=None):
#  @brief Execute the delete query
# @param data
def execute(self, data=None):
return super().execute()
##@brief Implements delete query operations
# @param datas
# @brief Implements delete query operations
# @param data
#@returns the number of deleted items
def _query(self, datas=None):
def _query(self, data=None):
filters, rel_filters = self._query_filter
nb_deleted = self._rw_datasource.delete(
self._target_class, filters, rel_filters)
return nb_deleted
class LeGetQuery(LeFilteredQuery):
_hook_prefix = 'leapi_get_'
##@brief Instanciate a new get query
# @brief Instanciate a new get query
#@param target_class LeObject : class of object the query is about
#@param query_filters dict : {OP, list of query filters}
# or tuple (FIELD, OPERATOR, VALUE) )
@ -624,33 +630,33 @@ class LeGetQuery(LeFilteredQuery):
# - offset int : offset
def __init__(self, target_class, query_filters, **kwargs):
super().__init__(target_class, query_filters)
##@brief The fields to get
# @brief The fields to get
self._field_list = None
##@brief An equivalent to the SQL ORDER BY
# @brief An equivalent to the SQL ORDER BY
self._order = None
##@brief An equivalent to the SQL GROUP BY
# @brief An equivalent to the SQL GROUP BY
self._group = None
##@brief An equivalent to the SQL LIMIT x
# @brief An equivalent to the SQL LIMIT x
self._limit = None
##@brief An equivalent to the SQL LIMIT x, OFFSET
# @brief An equivalent to the SQL LIMIT x, OFFSET
self._offset = 0
# Checking kwargs and assigning default values if there is some
for argname in kwargs:
if argname not in (
'field_list', 'order', 'group', 'limit', 'offset'):
'field_list', 'order', 'group', 'limit', 'offset'):
raise TypeError("Unexpected argument '%s'" % argname)
if 'field_list' not in kwargs:
self.set_field_list(target_class.fieldnames(include_ro = True))
self.set_field_list(target_class.fieldnames(include_ro=True))
else:
self.set_field_list(kwargs['field_list'])
if 'order' in kwargs:
#check kwargs['order']
# check kwargs['order']
self._order = kwargs['order']
if 'group' in kwargs:
#check kwargs['group']
# check kwargs['group']
self._group = kwargs['group']
if 'limit' in kwargs and kwargs['limit'] is not None:
try:
@ -669,7 +675,7 @@ class LeGetQuery(LeFilteredQuery):
msg = "offset argument expected to be an integer >= 0"
raise ValueError(msg)
##@brief Set the field list
# @brief Set the field list
# @param field_list list | None : If None use all fields
# @return None
# @throw LeApiQueryError if unknown field given
@ -682,41 +688,41 @@ class LeGetQuery(LeFilteredQuery):
msg = "No field named '%s' in %s"
msg %= (fieldname, self._target_class.__name__)
expt = NameError(msg)
err_l[fieldname] = expt
err_l[fieldname] = expt
if len(err_l) > 0:
msg = "Error while setting field_list in a get query"
raise LeApiQueryErrors(msg = msg, exceptions = err_l)
raise LeApiQueryErrors(msg=msg, exceptions=err_l)
self._field_list = list(set(field_list))
##@brief Execute the get query
def execute(self, datas=None):
# @brief Execute the get query
def execute(self, data=None):
return super().execute()
##@brief Implements select query operations
# @brief Implements select query operations
# @returns a list containing the item(s)
def _query(self, datas=None):
# select datas corresponding to query_filter
def _query(self, data=None):
# select data corresponding to query_filter
fl = list(self._field_list) if self._field_list is not None else None
l_datas=self._ro_datasource.select(
target = self._target_class,
field_list = fl,
filters = self._query_filter[0],
relational_filters = self._query_filter[1],
order = self._order,
group = self._group,
limit = self._limit,
offset = self._offset)
return l_datas
l_data = self._ro_datasource.select(
target=self._target_class,
field_list=fl,
filters=self._query_filter[0],
relational_filters=self._query_filter[1],
order=self._order,
group=self._group,
limit=self._limit,
offset=self._offset)
return l_data
##@return a dict with query infos
# @return a dict with query infos
def dump_infos(self):
ret = super().dump_infos()
ret.update({ 'field_list' : self._field_list,
'order' : self._order,
'group' : self._group,
'limit' : self._limit,
'offset': self._offset,
})
ret.update({'field_list': self._field_list,
'order': self._order,
'group': self._group,
'limit': self._limit,
'offset': self._offset,
})
return ret
def __repr__(self):
@ -725,7 +731,7 @@ field_list={field_list} order={order} group={group} limit={limit} \
offset={offset}"
res = res.format(**self.dump_infos())
if len(self.subqueries) > 0:
for n,subq in enumerate(self.subqueries):
for n, subq in enumerate(self.subqueries):
res += "\n\tSubquerie %d : %s"
res %= (n, subq)
res += ">"

View file

@ -0,0 +1,5 @@
## @package lodel.mlnamedobject A package dedicated to manage the objects' properties which can be
# translated in several languages.
#
# Each object in Lodel which can be translated will see his displayed name and help text being
# managed by a MlNamedObject instance, a class that is part of this package.

View file

@ -4,15 +4,18 @@ from lodel.context import LodelContext
LodelContext.expose_modules(globals(), {
'lodel.utils.mlstring': ['MlString']})
# @package lodel.mlnamedobject Lodel2 description of objects module
## @package lodel.mlnamedobject.mlnamedobject Lodel2 description of objects module
#
# Display name and Description of a lodel2 object
# @brief Class allows display name and help text for lodel2 objects and fields
## @brief Represents a multi-language object (dealing with its translations)
class MlNamedObject(object):
##
# @param display_name str|dict : displayed string to name the object (either a string or a dictionnary of the translated strings can be passed)
# @param help_text str|dict : description text for this object (either a string or a dictionnary of the translated strings can be passed)
def __init__(self, display_name=None, help_text=None):
## @brief The object's name which will be used in all the user interfaces
self.display_name = None if display_name is None else MlString(display_name)
## @brief Description text for this object
self.help_text = None if help_text is None else MlString(help_text)

View file

@ -6,11 +6,15 @@ LodelContext.expose_modules(globals(), {
'lodel.settings': ['Settings'],
'lodel.logger': 'logger'})
##@package lodel.plugin.core_hooks
#@brief Lodel2 internal hooks declaration
#@ingroup lodel2_plugins
## @package lodel.plugin.core_hooks
# @brief Lodel2 internal hooks declaration
# @ingroup lodel2_plugins
##@brief Bootstrap hook to check datasources configuration
## @brief Bootstrap hook that checks datasources configuration
# @param hook_name str
# @param caller * : the hook's caller
# @param payload * : data to be given to the hook
# @throw NameError when : a set datasource family name can not be found or a datasource identifier does not match with a configured datasource.
@LodelHook('lodel2_bootstraped')
def datasources_bootstrap_hook(hook_name, caller, payload):
for ds_name in Settings.datasources._fields:
@ -32,12 +36,14 @@ def datasources_bootstrap_hook(hook_name, caller, payload):
msg %= identifier
raise NameError(msg)
log_msg = "Found a datasource named '%s' identified by '%s'"
log_msg %= (ds_name, identifier)
logger.debug(log_msg)
##@brief Bootstrap hook that print debug infos about registered hooks
## @brief Bootstrap hook that prints debug infos about registered hooks
# @param name str
# @param caller * : the hook's caller
# @param payload * : data to be given to the hook
@LodelHook('lodel2_bootstraped')
def list_hook_debug_hook(name, caller, payload):
LodelContext.expose_modules(globals(), {
@ -55,7 +61,9 @@ def list_hook_debug_hook(name, caller, payload):
##@brief Hooks that trigger custom methods injection in dynmic classes
## @brief Hook that triggers custom methods injection in dynamic classes
# @param caller * : the hook's caller
# @param dynclasses list : a list of classes in which the injection will occur
@LodelHook("lodel2_dyncode_loaded")
def lodel2_plugins_custom_methods(self, caller, dynclasses):
LodelContext.expose_modules(globals(), {

View file

@ -1,3 +1,8 @@
## @package lodel.plugin.datasource_plugin Datasource plugins management module
#
# It contains the base classes for all the datasource plugins that could be added to Lodel
from lodel.context import LodelContext
LodelContext.expose_modules(globals(), {
'lodel.plugin.plugins': ['Plugin'],
@ -7,58 +12,61 @@ LodelContext.expose_modules(globals(), {
'lodel.exceptions': ['LodelException', 'LodelExceptions',
'LodelFatalError', 'DataNoneValid', 'FieldValidationError']})
## @brief The plugin type that is used in the global settings of Lodel
_glob_typename = 'datasource'
##@brief Datasource class in plugins HAVE TO inherit from this abstract class
## @brief Main abstract class from which the plugins' datasource classes must inherit.
class AbstractDatasource(object):
##@brief Trigger LodelFatalError when abtract method called
## @brief Trigger LodelFatalError when abtract method called
# @throw LodelFatalError if there is an attempt to instanciate an object from this class
@staticmethod
def _abs_err():
raise LodelFatalError("This method is abstract and HAVE TO be \
reimplemented by plugin datasource child class")
##@brief The constructor
##
# @param *conn_args
# @param **conn_kwargs
def __init__(self, *conn_args, **conn_kwargs):
self._abs_err()
##@brief Provide a new uniq numeric ID
#@param emcomp LeObject subclass (not instance) : To know on wich things we
#have to be uniq
#@return an integer
## @brief Provides a new uniq numeric ID
# @param emcomp LeObject subclass (not instance) : defines against which objects type the id should be unique
# @return int
def new_numeric_id(self, emcomp):
self._abs_err()
##@brief returns a selection of documents from the datasource
#@param target Emclass
#@param field_list list
#@param filters list : List of filters
#@param rel_filters list : List of relational filters
#@param order list : List of column to order. ex: order = [('title', 'ASC'),]
#@param group list : List of tupple representing the column to group together. ex: group = [('title', 'ASC'),]
#@param limit int : Number of records to be returned
#@param offset int: used with limit to choose the start record
#@param instanciate bool : If true, the records are returned as instances, else they are returned as dict
#@return list
## @brief Returns a selection of documents from the datasource
# @param target Emclass : class of the documents
# @param field_list list : fields to get from the datasource
# @param filters list : List of filters
# @param rel_filters list : List of relational filters (default value : None)
# @param order list : List of column to order. ex: order = [('title', 'ASC'),] (default value : None)
# @param group list : List of tupple representing the column to group together. ex: group = [('title', 'ASC'),] (default value : None)
# @param limit int : Number of records to be returned (default value None)
# @param offset int: used with limit to choose the start record (default value : 0)
# @param instanciate bool : If true, the records are returned as instances, else they are returned as dict (default value : True)
# @return list
def select(self, target, field_list, filters, rel_filters=None, order=None, group=None, limit=None, offset=0,
instanciate=True):
self._abs_err()
##@brief Deletes records according to given filters
#@param target Emclass : class of the record to delete
#@param filters list : List of filters
#@param relational_filters list : List of relational filters
#@return int : number of deleted records
## @brief Deletes records according to given filters
# @param target Emclass : class of the record to delete
# @param filters list : List of filters
# @param relational_filters list : List of relational filters
# @return int : number of deleted records
def delete(self, target, filters, relational_filters):
self._abs_err()
## @brief updates records according to given filters
#@param target Emclass : class of the object to insert
#@param filters list : List of filters
#@param relational_filters list : List of relational filters
#@param upd_datas dict : datas to update (new values)
#@return int : Number of updated records
# @param target Emclass : class of the object to insert
# @param filters list : List of filters
# @param relational_filters list : List of relational filters
# @param upd_datas dict : datas to update (new values)
# @return int : Number of updated records
def update(self, target, filters, relational_filters, upd_datas):
self._abs_err()
@ -77,22 +85,21 @@ reimplemented by plugin datasource child class")
self._abs_err()
##@brief Designed to handles datasources plugins
## @brief Represents a Datasource plugin
#
#A datasource provide data access to LeAPI typically a connector on a DB
#or an API
# It will provide an access to a data collection to LeAPI (i.e. database connector, API ...).
#
#Provide methods to initialize datasource attribute in LeAPI LeObject child
#classes (see @ref leapi.leobject.LeObject._init_datasources() )
# It provides the methods needed to initialize the datasource attribute in LeAPI LeObject child
# classes (see @ref leapi.leobject.LeObject._init_datasources() )
#
#@note For the moment implementation is done with a retro-compatibilities
#priority and not with a convenience priority.
#@todo Refactor and rewrite lodel2 datasource handling
#@todo Write abstract classes for Datasource and MigrationHandler !!!
# @note For the moment implementation is done with a retro-compatibilities priority and not with a convenience priority.
# @todo Refactor and rewrite lodel2 datasource handling
# @todo Write abstract classes for Datasource and MigrationHandler !!!
class DatasourcePlugin(Plugin):
_type_conf_name = _glob_typename
##@brief Stores confspecs indicating where DatasourcePlugin list is stored
## @brief Stores confspecs indicating where DatasourcePlugin list is stored
_plist_confspecs = {
'section': 'lodel2',
'key': 'datasource_connectors',
@ -104,15 +111,17 @@ class DatasourcePlugin(Plugin):
'none_is_valid': False})
}
##@brief Construct a DatasourcePlugin
#@param name str : plugin name
#@see plugins.Plugin
##
# @param name str : plugin's name
# @see plugins.Plugin
def __init__(self, name):
super().__init__(name)
self.__datasource_cls = None
##@brief Accessor to the datasource class
#@return A python datasource class
## @brief Returns an accessor to the datasource class
# @return A python datasource class
# @throw DatasourcePluginError if the plugin's datasource class is not a child of
# @ref lodel.plugin.datasource_plugin.AbstractDatasource
def datasource_cls(self):
if self.__datasource_cls is None:
self.__datasource_cls = self.loader_module().Datasource
@ -122,17 +131,15 @@ class DatasourcePlugin(Plugin):
lodel.plugin.datasource_plugin.AbstractDatasource" % (self.name))
return self.__datasource_cls
##@brief Accessor to migration handler class
#@return A python migration handler class
## @brief Returns an accessor to migration handler class
# @return A python migration handler class
def migration_handler_cls(self):
return self.loader_module().migration_handler_class()
##@brief Return an initialized Datasource instance
#@param ds_name str : The name of the datasource to instanciate
#@param ro bool
#@return A properly initialized Datasource instance
#@throw SettingsError if an error occurs in settings
#@throw DatasourcePluginError for various errors
## @brief Returns an initialized Datasource instance
# @param ds_name str : The name of the datasource to instanciate
# @param ro bool : indicates if it will be in read only mode, else it will be in write only mode
# @return A properly initialized Datasource instance
@classmethod
def init_datasource(cls, ds_name, ro):
plugin_name, ds_identifier = cls.plugin_name(ds_name, ro)
@ -140,9 +147,10 @@ lodel.plugin.datasource_plugin.AbstractDatasource" % (self.name))
ds_cls = cls.get_datasource(plugin_name)
return ds_cls(**ds_conf)
##@brief Return an initialized MigrationHandler instance
#@param ds_name str : The datasource name
#@return A properly initialized MigrationHandler instance
## @brief Returns an initialized MigrationHandler instance
# @param ds_name str : The datasource name
# @return A properly initialized MigrationHandler instance
# @throw PluginError if a read only datasource instance was given to the migration handler.
@classmethod
def init_migration_handler(cls, ds_name):
plugin_name, ds_identifier = cls.plugin_name(ds_name, False)
@ -156,13 +164,12 @@ migration handler !!!")
return mh_cls(**ds_conf)
##@brief Given a datasource name returns a DatasourcePlugin name
#@param ds_name str : datasource name
#@param ro bool : if true consider the datasource as readonly
#@return a DatasourcePlugin name
#@throw PluginError if datasource name not found
#@throw DatasourcePermError if datasource is read_only but ro flag arg is
#false
## @brief Given a datasource name returns a DatasourcePlugin name
# @param ds_name str : datasource name
# @param ro bool : if true consider the datasource as readonly
# @return a DatasourcePlugin name
# @throw DatasourcePluginError if the given datasource is unknown or not configured, or if there is a conflict in its "read-only" property (between the instance and the settings).
# @throw SettingsError if there are misconfigured datasource settings.
@staticmethod
def plugin_name(ds_name, ro):
LodelContext.expose_modules(globals(), {
@ -195,11 +202,11 @@ True found in settings for datasource '%s'" % ds_name)
DS_PLUGIN_NAME.DS_INSTANCE_NAME. But got %s" % ds_identifier)
return res
##@brief Try to fetch a datasource configuration
#@param ds_identifier str : datasource name
#@param ds_plugin_name : datasource plugin name
#@return a dict containing datasource initialisation options
#@throw NameError if a datasource plugin or instance cannot be found
## @brief Returns a datasource's configuration
# @param ds_identifier str : datasource name
# @param ds_plugin_name : datasource plugin name
# @return a dict containing datasource initialisation options
# @throw DatasourcePluginError if a datasource plugin or instance cannot be found
@staticmethod
def _get_ds_connection_conf(ds_identifier,ds_plugin_name):
LodelContext.expose_modules(globals(), {
@ -216,56 +223,55 @@ DS_PLUGIN_NAME.DS_INSTANCE_NAME. But got %s" % ds_identifier)
ds_conf = getattr(ds_conf, ds_identifier)
return {k: getattr(ds_conf,k) for k in ds_conf._fields }
##@brief DatasourcePlugin instance accessor
#@param ds_name str : plugin name
#@return a DatasourcePlugin instance
#@throw PluginError if no plugin named ds_name found
#@throw PluginTypeError if ds_name ref to a plugin that is not a
#DatasourcePlugin
## @brief Returns a DatasourcePlugin instance from a plugin's name
# @param ds_name str : plugin name
# @return DatasourcePlugin
# @throw PluginError if no plugin named ds_name found (@see lodel.plugin.plugins.Plugin)
# @throw PluginTypeError if ds_name ref to a plugin that is not a DatasourcePlugin
@classmethod
def get(cls, ds_name):
pinstance = super().get(ds_name) #Will raise PluginError if bad name
pinstance = super().get(ds_name) # Will raise PluginError if bad name
if not isinstance(pinstance, DatasourcePlugin):
raise PluginTypeErrror("A name of a DatasourcePlugin was excepted \
but %s is a %s" % (ds_name, pinstance.__class__.__name__))
return pinstance
##@brief Return a datasource class given a datasource name
#@param ds_plugin_name str : datasource plugin name
#@throw PluginError if ds_name is not an existing plugin name
#@throw PluginTypeError if ds_name is not the name of a DatasourcePlugin
## @brief Returns a datasource class given a datasource name
# @param ds_plugin_name str : datasource plugin name
# @return Datasource class
@classmethod
def get_datasource(cls, ds_plugin_name):
return cls.get(ds_plugin_name).datasource_cls()
##@brief Given a plugin name returns a migration handler class
#@param ds_plugin_name str : a datasource plugin name
## @brief Returns a migration handler class, given a plugin name
# @param ds_plugin_name str : a datasource plugin name
# @return MigrationHandler class
@classmethod
def get_migration_handler(cls, ds_plugin_name):
return cls.get(ds_plugin_name).migration_handler_cls()
##@page lodel2_datasources Lodel2 datasources
## @page lodel2_datasources Lodel2 datasources
#
#@par lodel2_datasources_intro Intro
# @par lodel2_datasources_intro Introduction
# A single lodel2 website can interact with multiple datasources. This page
# aims to describe configuration & organisation of datasources in lodel2.
# aims to describe configuration and organisation of datasources in lodel2.
# Each object is attached to a datasource. This association is done in the
# editorial model, the datasource is identified by a name.
# editorial model, in which the datasource is identified by its name.
#
#@par Datasources declaration
# To define a datasource you have to write something like this in confs file :
#<pre>
#[lodel2.datasources.DATASOURCE_NAME]
#identifier = DATASOURCE_FAMILY.SOURCE_NAME
#</pre>
# See below for DATASOURCE_FAMILY & SOURCE_NAME
# @par Datasources declaration
# To define a datasource you have to write something like this in configuration file :
# <pre>
# [lodel2.datasources.DATASOURCE_NAME]
# identifier = DATASOURCE_FAMILY.SOURCE_NAME
# </pre>
# See below for DATASOURCE_FAMILY & SOURCE_NAME
#
#@par Datasources plugins
# Each datasource family is a plugin (
#@ref plugin_doc "More informations on plugins" ). For example mysql or a
#mongodb plugins. Here is the CONFSPEC variable templates for datasources
#plugin
# @par Datasources plugins
# Each datasource family is a plugin ( @ref plugin_doc "More informations on plugins" ).
# For example mysql or a mongodb plugins. \n
#
# Here is the CONFSPEC variable templates for datasources plugin
#<pre>
#CONFSPEC = {
# 'lodel2.datasource.example.*' : {
@ -275,7 +281,8 @@ but %s is a %s" % (ds_name, pinstance.__class__.__name__))
# }
#}
#</pre>
#MySQL example
#
#MySQL example \n
#<pre>
#CONFSPEC = {
# 'lodel2.datasource.mysql.*' : {
@ -291,8 +298,8 @@ but %s is a %s" % (ds_name, pinstance.__class__.__name__))
#}
#</pre>
#
#@par Configuration example
#<pre>
# @par Configuration example
# <pre>
# [lodel2.datasources.main]
# identifier = mysql.Core
# [lodel2.datasources.revues_write]

View file

@ -1,3 +1,6 @@
## @package lodel.plugin.exceptions Plugin management specific exceptions
class PluginError(Exception):
pass

View file

@ -1,3 +1,6 @@
## @package lodel.plugin.extensions A package to manage the Extension plugins
from lodel.context import LodelContext
LodelContext.expose_modules(globals(), {
'lodel.plugin.plugins': ['Plugin'],
@ -7,9 +10,12 @@ LodelContext.expose_modules(globals(), {
_glob_typename = 'extension'
## @brief A class representing a basic Extension plugin
#
# This class will be extended for each plugin of this type.
class Extension(Plugin):
## @brief Specifies the settings linked to this plugin
_plist_confspecs = {
'section': 'lodel2',
'key': 'extensions',
@ -20,6 +26,8 @@ class Extension(Plugin):
'ptype': _glob_typename,
'none_is_valid': False})
}
## @brief A property defining the type's name of this plugin.
# By default, it's the global type name ("extension" here).
_type_conf_name = _glob_typename

View file

@ -1,20 +1,22 @@
#-*- coding: utf-8 -*-
## @package lodel.plugin.hooks This module deals with the Hook management in Lodel
import os
import copy
from lodel.context import LodelContext
##@brief Class designed to handle a hook's callback with a priority
## @brief Class designed to handle a hook's callback with a priority
class DecoratedWrapper(object):
##@brief Constructor
##
# @param hook function : the function to wrap
# @param priority int : the callbacl priority
def __init__(self, hook, priority):
self._priority = priority
self._hook = hook
##@brief Call the callback
## @brief Calls the callback
# @param hook_name str : The name of the called hook
# @param caller * : The caller (depends on the hook)
# @param payload * : Datas that depends on the hook
@ -22,12 +24,14 @@ class DecoratedWrapper(object):
def __call__(self, hook_name, caller, payload):
return self._hook(hook_name, caller, payload)
## @brief Returns the string representation of the class
# It shows the name and the priority of the hook
def __str__(self):
return "<LodelHook '%s' priority = %s>" % (
self._hook.__name__, self._priority)
##@brief Decorator designed to register hook's callbacks
#@ingroup lodel2_plugins
## @brief Decorator designed to register hook's callbacks
# @ingroup lodel2_plugins
#
# @note Decorated functions are expected to take 3 arguments :
#  - hook_name : the called hook name
@ -35,17 +39,17 @@ class DecoratedWrapper(object):
# - payload : datas depending on the hook
class LodelHook(object):
##@brief Stores all hooks (DecoratedWrapper instances)
## @brief Stores all hooks (DecoratedWrapper instances)
_hooks = dict()
##@brief Decorator constructor
##
# @param hook_name str : the name of the hook to register to
# @param priority int : the hook priority
# @param priority int : the hook priority (default value : None)
def __init__(self, hook_name, priority = None):
self._hook_name = hook_name
self._priority = 0xFFFF if priority is None else priority
##@brief called just after __init__
## @brief called just after __init__
# @param hook function : the decorated function
# @return the hook argument
def __call__(self, hook):
@ -56,11 +60,10 @@ class LodelHook(object):
self._hooks[self._hook_name] = sorted(self._hooks[self._hook_name], key = lambda h: h._priority)
return hook
##@brief Call hooks
## @brief Calls a hook
# @param hook_name str : the hook's name
# @param caller * : the hook caller (depends on the hook)
# @param payload * : datas for the hook
# @param cls
# @return modified payload
@classmethod
def call_hook(cls, hook_name, caller, payload):
@ -73,10 +76,9 @@ class LodelHook(object):
payload = hook(hook_name, caller, payload)
return payload
##@brief Fetch registered hooks
# @param names list | None : optionnal filter on name
# @param cls
# @return a list of functions
## @brief Fetches registered hooks
# @param names list | None : optionnal filter on name (default value : None)
# @return dict containing for each name a list of the hooks and their priorities
@classmethod
def hook_list(cls, names = None):
res = None
@ -86,8 +88,7 @@ class LodelHook(object):
res = copy.copy(cls._hooks)
return { name: [(hook._hook, hook._priority) for hook in hooks] for name, hooks in res.items() }
##@brief Unregister all hooks
# @param cls
## @brief Unregister all hooks
# @warning REALLY NOT a good idea !
# @note implemented for testing purpose
@classmethod

View file

@ -1,3 +1,5 @@
## @package lodel.plugin.interface Handles the Interface type plugins
from lodel.context import LodelContext
LodelContext.expose_modules(globals(), {
'lodel.plugin.plugins': ['Plugin'],
@ -5,16 +7,18 @@ LodelContext.expose_modules(globals(), {
'LodelScriptError', 'DatasourcePluginError'],
'lodel.validator.validator': ['Validator']})
## @brief Global type name used in the settings of Lodel for this type of plugins
_glob_typename = 'ui'
##@brief Handles interfaces plugin
##@brief A plugin Interface
#@note It's a singleton class. Only 1 interface allowed by instance.
class InterfacePlugin(Plugin):
##@brief Singleton instance storage
## @brief Singleton instance storage
_instance = None
## @brief Settings description
_plist_confspecs = {
'section': 'lodel2',
'key': 'interface',
@ -22,16 +26,20 @@ class InterfacePlugin(Plugin):
'validator': Validator(
'plugin', none_is_valid = True, ptype = _glob_typename)}
## @brief plugin type name
_type_conf_name = _glob_typename
##
# @param name str : Name of the interface plugin
# @throw PluginError if there is already an interface plugin instanciated
def __init__(self, name):
if InterfacePlugin._instance is not None:
raise PluginError("Maximum one interface allowed")
super().__init__(name)
self._instance = self
##@brief Clear class
#@see plugins.Plugin::clear()
## @brief Clears the singleton from its active instance
# @see plugins.Plugin::clear()
@classmethod
def clear_cls(cls):
if cls._instance is not None:

View file

@ -84,9 +84,9 @@ class MongoDbDatasource(AbstractDatasource):
target = emcomp.uid_source()
tuid = target._uid[0] # Multiple UID broken here
results = self.select(
target, field_list = [tuid], filters = [],
target, field_list = [tuid], filters = [],
order=[(tuid, 'DESC')], limit = 1)
if len(results) == 0:
if len(results) == 0:
return 1
return results[0][tuid]+1
@ -95,23 +95,23 @@ class MongoDbDatasource(AbstractDatasource):
#@param field_list list
#@param filters list : List of filters
#@param relational_filters list : List of relational filters
#@param order list : List of column to order. ex: order =
#@param order list : List of column to order. ex: order =
#[('title', 'ASC'),]
#@param group list : List of tupple representing the column used as
#@param group list : List of tupple representing the column used as
#"group by" fields. ex: group = [('title', 'ASC'),]
#@param limit int : Number of records to be returned
#@param offset int: used with limit to choose the start record
#@return list
#@todo Implement group for abstract LeObject childs
def select(self, target, field_list, filters = None,
relational_filters=None, order=None, group=None, limit=None,
def select(self, target, field_list, filters = None,
relational_filters=None, order=None, group=None, limit=None,
offset=0):
if target.is_abstract():
#Reccursiv calls for abstract LeObject child
results = self.__act_on_abstract(target, filters,
relational_filters, self.select, field_list = field_list,
order = order, group = group, limit = limit)
#Here we may implement the group
#If sorted query we have to sort again
if order is not None:
@ -138,11 +138,11 @@ class MongoDbDatasource(AbstractDatasource):
query_filters = self.__process_filters(
target, filters, relational_filters)
query_result_ordering = None
if order is not None:
query_result_ordering = utils.parse_query_order(order)
if group is None:
if field_list is None:
field_list = dict()
@ -189,7 +189,7 @@ class MongoDbDatasource(AbstractDatasource):
results = list()
for document in cursor:
results.append(document)
return results
##@brief Deletes records according to given filters
@ -236,7 +236,7 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
self.__update_backref_filtered(target, filters, relational_filters,
upd_datas, old_datas_l)
return res
##@brief Designed to be called by backref update in order to avoid
#infinite updates between back references
#@see update()
@ -269,7 +269,7 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
raise MongoDataSourceError("Missing UID data will inserting a new \
%s" % target.__class__)
res = self.__collection(target).insert(new_datas)
self.__update_backref(target, new_datas[uidname], None, new_datas)
self.__update_backref(target, new_datas[uidname], None, new_datas)
return str(res)
## @brief Inserts a list of records in a given collection
@ -281,10 +281,10 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
self._data_cast(datas)
res = self.__collection(target).insert_many(datas_list)
for new_datas in datas_list:
self.__update_backref(target, None, new_datas)
self.__update_backref(target, None, new_datas)
target.make_consistency(datas=new_datas)
return list(res.inserted_ids)
##@brief Update backref giving an action
#@param target leObject child class
#@param filters
@ -303,7 +303,7 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
old_datas_l = self.__collection(target).find(
mongo_filters)
old_datas_l = list(old_datas_l)
uidname = target.uid_fieldname()[0] #MULTIPLE UID BROKEN HERE
for old_datas in old_datas_l:
self.__update_backref(
@ -312,7 +312,7 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
##@brief Update back references of an object
#@ingroup plugin_mongodb_bref_op
#
#old_datas and new_datas arguments are set to None to indicate
#old_datas and new_datas arguments are set to None to indicate
#insertion or deletion. Calls examples :
#@par LeObject insert __update backref call
#<pre>
@ -441,8 +441,8 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
self.__update_no_backref(
leo.__class__, [(leo.uid_fieldname()[0], '=', uidval)],
[], datas)
##@brief Utility function designed to handle the upd_dict of
##@brief Utility function designed to handle the upd_dict of
#__update_backref()
#
#Basically checks if a key exists at some level, if not create it with
@ -453,7 +453,7 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
#@param uid_val mixed : the UID of the referenced object
#@return the updated version of upd_dict
@staticmethod
def __update_backref_upd_dict_prepare(upd_dict,bref_infos, bref_fname,
def __update_backref_upd_dict_prepare(upd_dict,bref_infos, bref_fname,
uid_val):
bref_cls, bref_leo, bref_dh, bref_value = bref_infos
if bref_cls not in upd_dict:
@ -463,8 +463,8 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
if bref_fname not in upd_dict[bref_cls][uid_val]:
upd_dict[bref_cls][uid_val][1][bref_fname] = bref_value
return upd_dict
##@brief Prepare a one value back reference update
#@param fname str : the source Reference field name
#@param fdh DataHandler : the source Reference DataHandler
@ -520,7 +520,7 @@ have expected value. Expected was %s but found %s in %s" % (
return bref_val
elif oldd and not newdd:
#deletion
if not hasattr(bref_dh, "default"):
if not hasattr(bref_dh, "default"):
raise MongoDbConsistencyError("Unable to delete a \
value for a back reference update. The concerned field don't have a default \
value : in %s field %s" % (bref_leo,fname))
@ -528,7 +528,7 @@ value : in %s field %s" % (bref_leo,fname))
elif not oldd and newdd:
bref_val = tuid
return bref_val
##@brief Fetch back reference informations
#@warning thank's to __update_backref_act() this method is useless
#@param bref_cls LeObject child class : __back_reference[0]
@ -608,7 +608,7 @@ on non abstract childs" % act.__name__)
port = self.__db_infos['port'],
db_name = db_name,
ro = ro)
self.__conn_hash = conn_h = hash(conn_string)
if conn_h in self._connections:
self._connections[conn_h]['conn_count'] += 1
@ -619,7 +619,7 @@ on non abstract childs" % act.__name__)
'conn_count': 1,
'db': utils.connect(conn_string)}
return self._connections[conn_h]['db'][self.__db_infos['db_name']]
##@brief Return a pymongo collection given a LeObject child class
#@param leobject LeObject child class (no instance)
@ -760,7 +760,7 @@ on non abstract childs" % act.__name__)
rfilters[fname][repr_leo][rfield] = list()
rfilters[fname][repr_leo][rfield].append((op, value))
return rfilters
##@brief Convert lodel2 filters to pymongo conditions
#@param filters list : list of lodel filters
#@return dict representing pymongo conditions
@ -859,7 +859,7 @@ field/operator couple in a query. We will keep only the first one")
1 if (a[fname]>b[fname] if cmpdir == 'ASC' else a[fname]<b[fname])\
else -1)
##@brief Correct some datas before giving them to pymongo
#
#For example sets has to be casted to lise
@ -874,13 +874,3 @@ field/operator couple in a query. We will keep only the first one")
#with sets
datas[dname] = list(datas[dname])
return datas
##@brief Tool to check if a record with unique id uid is set in the target_class representation
#@param target_class : class to check in
#@param uid : a unique id in target_class
#@returns true if a record with unique id uid exists in the target_class representation, false if not
def is_exist(self, target_class, uid):
# retrouver la table qui correspond à target_class
# vérifier qu'il existe, ou pas, un enregistrement contenant uid
result = self.select(self, target_class, [target_class.uid_fieldname], filters = [(target_class.uid_fieldname, '=', uid)])
return len(result) == 1

View file

@ -23,7 +23,7 @@ class EditorialModelTestCase(unittest.TestCase):
grp1.add_components((cls1, c1f1))
grp2 = model.new_group('testgroup2')
grp2.add_components((cls2, c1f2, c2f1, c2f2))
grp2.add_dependencie(grp1)
grp2.add_dependency(grp1)
e_hash = 0x250eab75e782e51bbf212f47c6159571
self.assertEqual(model.d_hash(), e_hash)
@ -181,10 +181,10 @@ class EmGroupTestCase(unittest.TestCase):
grp3 = EmGroup('grp3')
grp4 = EmGroup('grp4')
grp2.add_dependencie(grp1)
grp3.add_dependencie(grp2)
grp4.add_dependencie(grp2)
grp4.add_dependencie(grp1)
grp2.add_dependency(grp1)
grp3.add_dependency(grp2)
grp4.add_dependency(grp2)
grp4.add_dependency(grp1)
self.assertEqual(set(grp1.dependencies().values()), set())
self.assertEqual(set(grp2.dependencies().values()), set([grp1]))
@ -261,10 +261,10 @@ class EmGroupTestCase(unittest.TestCase):
def test_deps_complex(self):
""" More complex dependencies handling test """
grps = [ EmGroup('group%d' % i) for i in range(6) ]
grps[5].add_dependencie( (grps[1], grps[2], grps[4]) )
grps[4].add_dependencie( (grps[1], grps[3]) )
grps[3].add_dependencie( (grps[0],) )
grps[1].add_dependencie( (grps[2], grps[0]) )
grps[5].add_dependency( (grps[1], grps[2], grps[4]) )
grps[4].add_dependency( (grps[1], grps[3]) )
grps[3].add_dependency( (grps[0],) )
grps[1].add_dependency( (grps[2], grps[0]) )
self.assertEqual(
set(grps[5].dependencies(True).values()),
set( grps[i] for i in range(5))
@ -273,7 +273,7 @@ class EmGroupTestCase(unittest.TestCase):
set(grps[4].dependencies(True).values()),
set( grps[i] for i in range(4))
)
grps[2].add_dependencie(grps[0])
grps[2].add_dependency(grps[0])
self.assertEqual(
set(grps[5].dependencies(True).values()),
set( grps[i] for i in range(5))
@ -284,18 +284,18 @@ class EmGroupTestCase(unittest.TestCase):
)
# Inserting circular deps
with self.assertRaises(EditorialModelError):
grps[0].add_dependencie(grps[5])
grps[0].add_dependency(grps[5])
def test_circular_dep(self):
""" Test circular dependencies detection """
grps = [ EmGroup('group%d' % i) for i in range(10) ]
for i in range(1,10):
grps[i].add_dependencie(grps[i-1])
grps[i].add_dependency(grps[i-1])
for i in range(1,10):
for j in range(i+1,10):
with self.assertRaises(EditorialModelError):
grps[i].add_dependencie(grps[j])
grps[i].add_dependency(grps[j])
def test_d_hash(self):
""" Test the deterministic hash method """

View file

@ -26,7 +26,7 @@ class PickleFileTestCase(unittest.TestCase):
grp2 = model.new_group('testgroup2')
grp2.add_components((cls2, c1f2, c2f1, c2f2))
grp2.add_dependencie(grp1)
grp2.add_dependency(grp1)
tmpfd, temp_file = tempfile.mkstemp()
os.close(tmpfd)

View file

@ -44,7 +44,7 @@ class XmlFileTestCase(unittest.TestCase):
grp2 = emmodel.new_group('testgroup2')
grp2.add_components((cls2, c1f2, c2f1, c2f2))
grp2.add_dependencie(grp1)
grp2.add_dependency(grp1)
f_tmp, file_name = tempfile.mkstemp()
os.close(f_tmp)

View file

@ -105,7 +105,7 @@ class LeQueryDatasourceTestCase(unittest.TestCase):
[(('alias', {cls: 'firstname'}), '=', 'foo')])
self.check_nocall(read = False, exclude = ['delete'])
self.check_nocall(read = True)
@unittest.skip("Waiting references checks stack implementation")
def test_insert(self):
""" Testing LeInsertQuery mocking datasource """
@ -145,8 +145,8 @@ class LeQueryDatasourceTestCase(unittest.TestCase):
query = LeUpdateQuery(inst)
with self.assertRaises(LeApiQueryError):
# Bad call, giving datas while an instance was given to __init__
query.execute(datas = {'firstname': 'ooba'})
# Bad call, giving data while an instance was given to __init__
query.execute(data = {'firstname': 'ooba'})
query.execute()
self.mockwrite.update.assert_called_once_with(