1
0
Fork 0
mirror of https://github.com/yweber/lodel2.git synced 2025-10-29 18:49:03 +01:00

Delete is_exist

This commit is contained in:
prieto 2017-03-23 15:34:35 +01:00
commit cdd13d9b4b
15 changed files with 494 additions and 454 deletions

View file

@ -11,243 +11,251 @@ LodelContext.expose_modules(globals(), {
'lodel.logger': 'logger',
'lodel.plugin': [('SessionHandlerPlugin', 'SessionHandler')],
'lodel.auth.exceptions': ['ClientError', 'ClientAuthenticationFailure',
'ClientPermissionDenied', 'ClientAuthenticationError'],
'lodel.leapi.query': ['LeGetQuery'],})
'ClientPermissionDenied', 'ClientAuthenticationError'],
'lodel.leapi.query': ['LeGetQuery'], })
##@brief Client metaclass designed to implements container accessor on
#Client Class
# @brief Client metaclass designed to implements container accessor on
# Client Class
#
#@todo Maybe we can delete this metaclass....
class ClientMetaclass(type):
def __init__(self, name, bases, attrs):
return super(ClientMetaclass, self).__init__(name, bases, attrs)
def __getitem__(self, key):
return self.datas()[key]
return self.data()[key]
def __delitem__(self, key):
del(self.datas()[key])
del(self.data()[key])
def __setitem__(self, key, value):
if self.get_session_token() is None:
self.set_session_token(SessionHandler.start())
datas = self.datas()
datas[key] = value
data = self.data()
data[key] = value
def __str__(self):
return str(self._instance)
##@brief Abstract singleton class designed to handle client informations
# @brief Abstract singleton class designed to handle client informations
#
# This class is designed to handle client authentication and sessions
class Client(object, metaclass = ClientMetaclass):
##@brief Singleton instance
class Client(object, metaclass=ClientMetaclass):
# @brief Singleton instance
_instance = None
##@brief List of dict that stores field ref for login and password
# @brief List of dict that stores field ref for login and password
#
# Storage specs :
# Storage specs :
#
# A list of dict, with keys 'login' and 'password', items are tuple.
#- login tuple contains (LeObjectChild, FieldName, link_field) with:
# - login tuple contains (LeObjectChild, FieldName, link_field) with:
# - LeObjectChild the dynclass containing the login
# - Fieldname the fieldname of LeObjectChild containing the login
# - link_field None if both login and password are in the same
# LeObjectChild. Else contains the field that make the link between
# login LeObject and password LeObject
#- password typle contains (LeObjectChild, FieldName)
# - password typle contains (LeObjectChild, FieldName)
_infos_fields = None
##@brief Constant that stores the session key that stores authentication
#informations
_AUTH_DATANAME = '__auth_user_infos'
##@brief Constructor
# @brief Constant that stores the session key that stores authentication
# informations
_AUTH_DATANAME = '__auth_user_infos'
# @brief Constructor
#@param session_token mixed : Session token provided by client to interface
def __init__(self,session_token = None):
def __init__(self, session_token=None):
logger.debug(session_token)
if self.__class__ == Client:
raise NotImplementedError("Abstract class")
logger.debug("New instance of Client child class %s" %
self.__class__.__name__)
self.__class__.__name__)
if Client._instance is not None:
old = Client._instance
Client._instance = None
del(old)
logger.debug("Replacing old Client instance by a new one")
else:
#first instanciation, fetching settings
# first instanciation, fetching settings
self.fetch_settings()
##@brief Stores infos for authenticated users (None == anonymous)
# @brief Stores infos for authenticated users (None == anonymous)
self.__user = None
##@brief Stores the session handler
# @brief Stores the session handler
Client._instance = self
##@brief Stores LodelSession instance
self.__datas = dict()
# @brief Stores LodelSession instance
self.__data = dict()
if session_token is not None:
self.__datas = SessionHandler.restore(session_token)
self.__data = SessionHandler.restore(session_token)
self.__session_token = session_token
logger.debug("New client : %s" % self)
def __del__(self):
del(self.__session_token)
del(self.__datas)
del(self.__data)
# @brief Returns session
#@ returns the dict which stores session
@classmethod
def datas(cls):
return cls._instance.__datas
def data(cls):
return cls._instance.__data
# @brief Returns the user's information contained in the session's data
@classmethod
def user(cls):
if '__auth_user_infos' in cls._instance.__datas:
return cls._instance.__datas['__auth_user_infos']
if '__auth_user_infos' in cls._instance.__data:
return cls._instance.__data['__auth_user_infos']
else:
return None
# @brief Returns the session's token
@classmethod
def get_session_token(cls):
return cls._instance.__session_token
# @brief Set the session's token
#@param the value of the token
@classmethod
def set_session_token(cls, value):
cls._instance.__session_token = value
##@brief Try to authenticate a user with a login and a password
# @brief Try to authenticate a user with a login and a password
#@param login str : provided login
#@param password str : provided password (hash)
#@warning brokes composed UID
#@note implemets multiple login/password sources (useless ?)
#@note implements multiple login/password sources (useless ?)
#@todo composed UID broken method
#@todo allow to provide an authentication source
@classmethod
def authenticate(self, login = None, password = None):
#Authenticate
def authenticate(self, login=None, password=None):
# Authenticate
for infos in self._infos_fields:
logger.debug(self._infos_fields)
login_cls = infos['login'][0]
pass_cls = infos['password'][0]
qfilter = "{passfname} = {passhash}"
uid_fname = login_cls.uid_fieldname()[0] #COMPOSED UID BROKEN
uid_fname = login_cls.uid_fieldname()[0] # COMPOSED UID BROKEN
if login_cls == pass_cls:
#Same EmClass for login & pass
# Same EmClass for login & pass
qfilter = qfilter.format(
passfname = infos['password'][1],
passhash = password)
passfname=infos['password'][1],
passhash=password)
else:
#Different EmClass, building a relational filter
# Different EmClass, building a relational filter
passfname = "%s.%s" % (infos['login'][2], infos['password'][1])
qfilter = qfilter.format(
passfname = passfname,
passhash = password)
passfname=passfname,
passhash=password)
getq = LeGetQuery(infos['login'][0], qfilter,
field_list = [uid_fname], limit = 1)
field_list=[uid_fname], limit=1)
req = getq.execute()
if len(req) == 1:
self.__set_authenticated(infos['login'][0],req[0][uid_fname])
self.__set_authenticated(infos['login'][0], req[0][uid_fname])
break
if self.is_anonymous():
self.authentication_failure() #Security logging
##@brief Attempt to restore a session given a session token
self.authentication_failure() # Security logging
# @brief Attempt to restore a session given a session token
#@param token mixed : a session token
#@return Session datas (a dict)
#@return Session data (a dict)
#@throw ClientAuthenticationFailure if token is not valid or not
#existing
# existing
@classmethod
def restore_session(cls, token):
cls._assert_instance()
if cls._instance.__session_token is not None:
raise ClientAuthenticationError("Trying to restore a session, but \
a session is allready started !!!")
a session is already started !!!")
try:
cls._instance.__datas = SessionHandler.restore(token)
cls._instance.__data = SessionHandler.restore(token)
cls._instance.__session_token = token
except ClientAuthenticationFailure:
logger.warning("Session restoring fails")
return copy.copy(cls._instance.datas)
##@brief Return the current session token or None
logger.warning("Session restoring failed")
return copy.copy(cls._instance.data)
# @brief Returns the current session token or None
#@return A session token or None
@classmethod
def session_token(cls):
cls._assert_instance()
return cls._instance.__session_token
##@brief Delete current session
# @brief Deletes current session
@classmethod
def destroy(cls):
cls._assert_instance()
SessionHandler.destroy(cls._instance.__session_token)
cls._instance.__session_token = None
cls._instance.__datas = dict()
##@brief Delete current client and save its session
cls._instance.__data = dict()
# @brief Deletes current client and saves its session
@classmethod
def clean(cls):
if cls._instance.__session_token is not None:
SessionHandler.save(cls._instance.__session_token, cls._instance.__datas)
SessionHandler.save(cls._instance.__session_token, cls._instance.__data)
if Client._instance is not None:
del(Client._instance)
Client._instance = None
##@brief Test wether a client is anonymous or logged in
# @brief Tests if a client is anonymous or logged in
#@return True if client is anonymous
@classmethod
def is_anonymous(cls):
return Client._instance.user() is None
##@brief Method to call on authentication failure
# @brief Method to be called on authentication failure
#@throw ClientAuthenticationFailure
#@throw LodelFatalError if no Client child instance found
#@throw LodelFatalError if no Client child instance is found
@classmethod
def authentication_failure(cls):
cls._generic_error(ClientAuthenticationFailure)
##@brief Method to call on authentication error
# @brief Method to be called on authentication error
#@throw ClientAuthenticationError
#@throw LodelFatalError if no Client child instance found
#@throw LodelFatalError if no Client child instance is found
@classmethod
def authentication_error(cls, msg = "Unknow error"):
def authentication_error(cls, msg="Unknow error"):
cls._generic_error(ClientAuthenticationError, msg)
##@brief Method to call on permission denied error
# @brief Method to be called on permission denied error
#@throw ClientPermissionDenied
#@throw LodelFatalError if no Client child instance found
#@throw LodelFatalError if no Client child instance is found
@classmethod
def permission_denied_error(cls, msg = ""):
def permission_denied_error(cls, msg=""):
cls._generic_error(ClientPermissionDenied, msg)
##@brief Generic error method
# @brief Generic error method
#@see Client::authentication_failure() Client::authentication_error()
#Client::permission_denied_error()
#@throw LodelFatalError if no Client child instance found
# Client::permission_denied_error()
#@throw LodelFatalError if no Client child instance is found
@classmethod
def _generic_error(cls, expt, msg = ""):
def _generic_error(cls, expt, msg=""):
cls._assert_instance()
raise expt(Client._instance, msg)
##@brief Assert that an instance of Client child class exists
#@throw LodelFataError if no instance of Client child class found
# @brief Asserts that an instance of Client child class exists
#@throw LodelFataError if no instance of Client child class is found
@classmethod
def _assert_instance(cls):
if Client._instance is None:
raise LodelFatalError("No client instance found. Abording.")
##@brief Class method that fetches conf
# @brief Class method that fetches conf
#
#This method populates Client._infos_fields . This attribute stores
#informations on login and password location (LeApi object & field)
# This method populates Client._infos_fields . This attribute stores
# informations on login and password location (LeApi object & field)
@classmethod
def fetch_settings(cls):
LodelContext.expose_dyncode(globals(), 'dyncode')
if cls._infos_fields is None:
cls._infos_fields = list()
else:
#Allready fetched
# Already fetched
return
infos = (
Settings.auth.login_classfield,
@ -266,21 +274,21 @@ a session is allready started !!!")
if fdh.is_reference() and res_infos[1][0] in fdh.linked_classes():
link_field = fname
if link_field is None:
#Unable to find link between login & password EmClasses
# Unable to find link between login & password EmClass
raise AuthenticationError("Unable to find a link between \
login EmClass '%s' and password EmClass '%s'. Abording..." % (
res_infos[0][0], res_infos[1][0]))
res_infos[0] = (res_infos[0][0], res_infos[0][1], link_field)
cls._infos_fields.append(
{'login':res_infos[0], 'password':res_infos[1]})
{'login': res_infos[0], 'password': res_infos[1]})
##@brief Set a user as authenticated and start a new session
# @brief Sets a user as authenticated and starts a new session
#@param leo LeObject child class : the LeObject the user is stored in
#@param uid str : uniq id (in leo)
#@return None
@classmethod
def __set_authenticated(cls, leo, uid):
cls._instance.__user = {'classname': leo.__name__, 'uid': uid, 'leoclass': leo}
#Store auth infos in session
cls._instance.__datas[cls._instance.__class__._AUTH_DATANAME] = copy.copy(cls._instance.__user)
# Store auth infos in session
cls._instance.__data[cls._instance.__class__._AUTH_DATANAME] = copy.copy(
cls._instance.__user)

View file

@ -36,11 +36,13 @@ class EmComponent(MlNamedObject):
self.group = group
super().__init__(display_name, help_text)
# @brief Returns the display_name of the component if it is not None, its uid else
def __str__(self):
if self.display_name is None:
return str(self.uid)
return str(self.display_name)
# @brief Returns a hash code for the component
def d_hash(self):
m = hashlib.md5()
for data in (
@ -57,7 +59,7 @@ class EmComponent(MlNamedObject):
#@ingroup lodel2_em
class EmClass(EmComponent):
# @brief Instanciate a new EmClass
# @brief Instanciates a new EmClass
#@param uid str : uniq identifier
#@param display_name MlString|str|dict : component display_name
#@param abstract bool : set the class as asbtract if True
@ -78,7 +80,7 @@ class EmClass(EmComponent):
self.pure_abstract = bool(pure_abstract)
self.__datasource = datasources
if not isinstance(datasources, str) and len(datasources) != 2:
raise ValueError("datasources arguement can be a single datasource\
raise ValueError("datasources argument can be a single datasource\
name or two names in a tuple or a list")
if self.pure_abstract:
self.abtract = True
@ -115,8 +117,9 @@ class EmClass(EmComponent):
internal=True,
group=group)
# @brief Property that represent a dict of all fields (the EmField defined in this class and all its parents)
# @todo use Settings.editorialmodel.groups to determine wich fields should be returned
# @brief Property that represents a dict of all fields
# (the EmField objects defined in this class and all their parents)
# @todo use Settings.editorialmodel.groups to determine which fields should be returned
@property
def __all_fields(self):
res = dict()
@ -130,9 +133,9 @@ class EmClass(EmComponent):
def datasource(self):
return self.__datasource
# @brief Return the list of all dependencies
# @brief Returns the list of all dependencies
#
# Reccursive parents listing
# Recursive parents listing
@property
def parents_recc(self):
if len(self.parents) == 0:
@ -155,7 +158,7 @@ class EmClass(EmComponent):
except KeyError:
raise EditorialModelError("No such EmField '%s'" % uid)
# @brief Keep in __fields only fields contained in active groups
# @brief Keeps in __fields only fields contained in active groups
def _set_active_fields(self, active_groups):
if not Settings.editorialmodel.editormode:
active_fields = []
@ -165,10 +168,10 @@ class EmClass(EmComponent):
self.__fields = {fname: fdh for fname, fdh in self.__fields.items()
if fdh in active_fields}
# @brief Add a field to the EmClass
# @brief Adds a field to the EmClass
# @param emfield EmField : an EmField instance
# @warning do not add an EmField allready in another class !
# @throw EditorialModelException if an EmField with same uid allready in this EmClass (overwritting allowed from parents)
# @warning do not add an EmField already in another class !
# @throw EditorialModelException if an EmField with same uid already in this EmClass (overwriting allowed from parents)
# @todo End the override checks (needs methods in data_handlers)
def add_field(self, emfield):
assert_edit()
@ -180,11 +183,11 @@ class EmClass(EmComponent):
parent_field = self.__all_fields[emfield.uid]
if not emfield.data_handler_instance.can_override(parent_field.data_handler_instance):
raise AttributeError(
"'%s' field override a parent field, but data_handles are not compatible" % emfield.uid)
"'%s' field overrides a parent field, but data_handlers are not compatible" % emfield.uid)
self.__fields[emfield.uid] = emfield
return emfield
# @brief Create a new EmField and add it to the EmClass
# @brief Creates a new EmField and adds it to the EmClass
# @param data_handler str : A DataHandler name
# @param uid str : the EmField uniq id
# @param **field_kwargs : EmField constructor parameters ( see @ref EmField.__init__() )
@ -221,7 +224,7 @@ class EmClass(EmComponent):
#@ingroup lodel2_em
class EmField(EmComponent):
# @brief Instanciate a new EmField
# @brief Instanciates a new EmField
# @param uid str : uniq identifier
# @param display_name MlString|str|dict : field display_name
# @param data_handler str : A DataHandler name
@ -256,7 +259,7 @@ class EmField(EmComponent):
def get_data_handler_cls(self):
return copy.copy(self.data_handler_cls)
##@brief Returne the uid of the emclass which contains this field
##@brief Returns the uid of the emclass which contains this field
def get_emclass_uid(self):
return self._emclass.uid
@ -277,7 +280,7 @@ class EmField(EmComponent):
class EmGroup(MlNamedObject):
# @brief Create a new EmGroup
# @brief Creates a new EmGroup
# @note you should NEVER call the constructor yourself. Use Model.add_group instead
# @param uid str : Uniq identifier
# @param depends list : A list of EmGroup dependencies
@ -297,10 +300,10 @@ class EmGroup(MlNamedObject):
for grp in depends:
if not isinstance(grp, EmGroup):
raise ValueError("EmGroup expected in depends argument but %s found" % grp)
self.add_dependencie(grp)
self.add_dependency(grp)
# @brief Returns EmGroup dependencie
# @param recursive bool : if True return all dependencies and their dependencies
# @brief Returns EmGroup dependencies
# @param recursive bool : if True returns all dependencies and their own dependencies
# @return a dict of EmGroup identified by uid
def dependencies(self, recursive=False):
res = copy.copy(self.require)
@ -316,7 +319,7 @@ class EmGroup(MlNamedObject):
return res
# @brief Returns EmGroup applicants
# @param recursive bool : if True return all dependencies and their dependencies
# @param recursive bool : if True returns all dependencies and their dependencies
# @returns a dict of EmGroup identified by uid
def applicants(self, recursive=False):
res = copy.copy(self.required_by)
@ -337,7 +340,7 @@ class EmGroup(MlNamedObject):
return (self.__components).copy()
# @brief Returns EmGroup display_name
# @param lang str | None : If None return default lang translation
# @param lang str | None : If None returns default lang translation
# @returns None if display_name is None, a str for display_name else
def get_display_name(self, lang=None):
name = self.display_name
@ -346,7 +349,7 @@ class EmGroup(MlNamedObject):
return name.get(lang)
# @brief Returns EmGroup help_text
# @param lang str | None : If None return default lang translation
# @param lang str | None : If None returns default lang translation
# @returns None if display_name is None, a str for display_name else
def get_help_text(self, lang=None):
help = self.help_text
@ -354,7 +357,7 @@ class EmGroup(MlNamedObject):
return None
return help.get(lang)
# @brief Add components in a group
# @brief Adds components in a group
# @param components list : EmComponent instances list
def add_components(self, components):
assert_edit()
@ -369,20 +372,20 @@ class EmGroup(MlNamedObject):
"Expecting components to be a list of EmComponent, but %s found in the list" % type(component))
self.__components |= set(components)
# @brief Add a dependencie
# @param em_group EmGroup|iterable : an EmGroup instance or list of instance
def add_dependencie(self, grp):
# @brief Add a dependency
# @param em_group EmGroup|iterable : an EmGroup instance or list of instances
def add_dependency(self, grp):
assert_edit()
try:
for group in grp:
self.add_dependencie(group)
self.add_dependency(group)
return
except TypeError:
pass
if grp.uid in self.require:
return
if self.__circular_dependencie(grp):
if self.__circular_dependency(grp):
raise EditorialModelError("Circular dependencie detected, cannot add dependencie")
self.require[grp.uid] = grp
grp.required_by[self.uid] = self
@ -406,9 +409,9 @@ class EmGroup(MlNamedObject):
self.required_by[grp.uid] = grp
grp.require[self.uid] = self
# @brief Search for circular dependencie
# @brief Search for circular dependency
# @return True if circular dep found else False
def __circular_dependencie(self, new_dep):
def __circular_dependency(self, new_dep):
return self.uid in new_dep.dependencies(True)
# @brief Search for circular applicant
@ -424,6 +427,8 @@ class EmGroup(MlNamedObject):
else:
return self.display_name.get()
# @brief Computes a d-hash code for the EmGroup
# @return a string
def d_hash(self):
payload = "%s%s%s" % (

View file

@ -42,7 +42,8 @@ class EditorialModel(MlNamedObject):
super().__init__(display_name, help_text)
# @brief EmClass uids accessor
#@return a dict of emclasses
#@return a copy of the dict containing all emclasses of the model if uid is None
# else a copy the class with uid uid
def all_classes(self, uid=None):
if uid is None:
return copy.copy(self.__classes)
@ -52,6 +53,9 @@ class EditorialModel(MlNamedObject):
except KeyError:
raise EditorialModelException("EmClass not found : '%s'" % uid)
# @brief EmClass uids accessor
#@return the dict containing all emclasses of the model if uid is None
# else the class with uid uid
def all_classes_ref(self, uid=None):
if uid is None:
return self.__classes
@ -62,12 +66,13 @@ class EditorialModel(MlNamedObject):
raise EditorialModelException("EmGroup not found : '%s'" % uid)
# @brief active EmClass uids accessor
#@return a list of class uids
#@return a list of active class uids
def active_classes_uids(self):
return list(self.__active_classes.keys())
# @brief EmGroups accessor
#@return a dict of groups
#@return a copy of the dict of the model's group if uid is None
# else a copy of the group with uniq id uid
def all_groups(self, uid=None):
if uid is None:
return copy.copy(self.__groups)
@ -78,7 +83,8 @@ class EditorialModel(MlNamedObject):
raise EditorialModelException("EmGroup not found : '%s'" % uid)
# @brief EmGroups accessor
#@return a dict of groups
#@return the dict of the model's group if uid is None
# else the group with uniq id uid
def all_groups_ref(self, uid=None):
if uid is None:
return self.__groups
@ -89,7 +95,7 @@ class EditorialModel(MlNamedObject):
raise EditorialModelException("EmGroup not found : '%s'" % uid)
# @brief active EmClass uids accessor
#@return a list of class uids
#@return a list of active group uids
def active_groups_uids(self):
return list(self.__active_groups.keys())
@ -97,7 +103,7 @@ class EditorialModel(MlNamedObject):
#@param uid None | str : give this argument to get a specific EmClass
#@return if uid is given returns an EmClass else returns an EmClass
# iterator
#@todo use Settings.editorialmodel.groups to determine wich classes should
#@todo use Settings.editorialmodel.groups to determine which classes should
# be returned
def classes(self, uid=None):
try:

View file

@ -460,7 +460,7 @@ def load_group_xml(model, elem):
group = model.all_groups_ref(uid.text)
group.display_name = name
group.help_text = help_text
group.add_dependencie(requires)
group.add_dependency(requires)
else:
group = EmGroup(uid.text, requires, name, help_text)

View file

@ -1,3 +1,5 @@
## @brief Exception classes for datahandlers
class LodelDataHandlerException(Exception):
pass

View file

@ -9,12 +9,13 @@ LodelContext.expose_modules(globals(), {
'LodelFatalError', 'DataNoneValid',
'FieldValidationError']})
## @brief Child class of SingleRef. The object referenced must exist
class Link(SingleRef):
pass
## @brief Child class of MultipleRef where references are represented in the form of a python list
# All the objects referenced must exist
class List(MultipleRef):
## @brief instanciates a list reference
@ -97,13 +98,16 @@ class Map(MultipleRef):
## @brief This Reference class is designed to handler hierarchy with some constraint
class Hierarch(MultipleRef):
directly_editable = False
## @brief Instanciate a data handler handling hierarchical relation with constraints
# @param back_reference tuple : Here it is mandatory to have a back ref (like a parent field)
# @param max_depth int | None : limit of depth
# @param max_childs int | Nine : maximum number of childs by nodes
# @param kwargs :
# - allowed_classes list | None : list of allowed em classes if None no restriction
# - internal bool : if False, the field is not internal
def __init__(self, back_reference, max_depth=None, max_childs=None, **kwargs):
super().__init__(back_reference=back_reference,
max_depth=max_depth,

View file

@ -4,6 +4,7 @@ from lodel.context import LodelContext
LodelContext.expose_modules(globals(), {
'lodel.exceptions': ['LodelExceptions', 'LodelException']})
##@brief Handles LeApi error
class LeApiError(LodelException):
pass
@ -13,20 +14,20 @@ class LeApiErrors(LodelExceptions, LeApiError):
pass
##@brief When an error concerns a datas
##@brief When an error concerns a data
class LeApiDataCheckError(LeApiError):
pass
##@brief Handles LeApi data errors
class LeApiDataCheckErrors(LodelExceptions, LeApiError):
pass
##@brief Handles leapi query errors
class LeApiQueryError(LeApiError):
pass
##@brief Handles mulitple query errors
##@brief Handles multiple query errors
class LeApiQueryErrors(LodelExceptions, LeApiQueryError):
pass

View file

@ -1,22 +1,25 @@
#-*- coding: utf-8 -*-
import os, os.path
import os
import os.path
import functools
from lodel.context import LodelContext
LodelContext.expose_modules(globals(), {
'lodel.editorial_model.components': ['EmComponent', 'EmClass', 'EmField',
'EmGroup'],
'EmGroup'],
'lodel.leapi.leobject': ['LeObject'],
'lodel.leapi.datahandlers.base_classes': ['DataHandler'],
'lodel.logger': 'logger'})
##@brief Generate python module code from a given model
# @brief Generates python module code from a given model
# @param model lodel.editorial_model.model.EditorialModel
def dyncode_from_em(model):
# Generation of LeObject child classes code
cls_code, modules, bootstrap_instr = generate_classes(model)
cls_code, bootstrap_instr = generate_classes(model)
# Header
imports = """from lodel.context import LodelContext
@ -25,10 +28,8 @@ LodelContext.expose_modules(globals(), {
'lodel.leapi.datahandlers.base_classes': ['DataField'],
'lodel.plugin.hooks': ['LodelHook']})
"""
for module in modules:
imports += "import %s\n" % module
class_list = [ LeObject.name2objname(cls.uid) for cls in get_classes(model) ]
# generates the list of all classes in the editorial model
class_list = [LeObject.name2objname(cls.uid) for cls in get_classes(model)]
# formating all components of output
res_code = """#-*- coding: utf-8 -*-
@ -41,17 +42,21 @@ dynclasses = {class_list}
dynclasses_dict = {class_dict}
{common_code}
""".format(
imports = imports,
classes = cls_code,
bootstrap_instr = bootstrap_instr,
class_list = '[' + (', '.join([cls for cls in class_list]))+']',
class_dict = '{' + (', '.join([ "'%s': %s" % (cls, cls)
for cls in class_list]))+'}',
common_code = common_code(),
imports=imports,
classes=cls_code,
bootstrap_instr=bootstrap_instr,
class_list='[' + (', '.join([cls for cls in class_list])) + ']',
class_dict='{' + (', '.join(["'%s': %s" % (cls, cls)
for cls in class_list])) + '}',
common_code=common_code(),
)
return res_code
##@brief Return the content of lodel.leapi.lefactory_common
# @brief Returns the content of lodel.leapi.lefactory_common
#
# @return a string
def common_code():
res = ""
fname = os.path.dirname(__file__)
@ -61,23 +66,32 @@ def common_code():
if not line.startswith('#-'):
res += line
return res
##@brief return A list of EmClass sorted by dependencies
# @brief return A list of EmClass sorted by dependencies
#
# The first elts in the list depends on nothing, etc.
# The first elts in the list depend on nothing, etc.
# @param a list of Emclass instances to be sorted
# @return a list of EmClass instances
def emclass_sorted_by_deps(emclass_list):
def emclass_deps_cmp(cls_a, cls_b):
return len(cls_a.parents_recc) - len(cls_b.parents_recc)
ret = sorted(emclass_list, key = functools.cmp_to_key(emclass_deps_cmp))
ret = sorted(emclass_list, key=functools.cmp_to_key(emclass_deps_cmp))
return ret
##@brief Returns a list of EmClass that will be represented as LeObject child classes
def get_classes(model):
return [ cls for cls in emclass_sorted_by_deps(model.classes()) if not cls.pure_abstract ]
# @brief Returns a list of EmClass instances that will be represented as LeObject child classes
# @param model : an EditorialModel instance
# @return a list of EmClass instances
def get_classes(model):
return [cls for cls in emclass_sorted_by_deps(model.classes()) if not cls.pure_abstract]
# @brief Given an EmField returns the data_handler constructor suitable for dynamic code
# @param a EmField instance
# @return a string
##@brief Given an EmField returns the data_handler constructor suitable for dynamic code
def data_handler_constructor(emfield):
#dh_module_name = DataHandler.module_name(emfield.data_handler_name)+'.DataHandler'
get_handler_class_instr = 'DataField.from_name(%s)' % repr(emfield.data_handler_name)
@ -85,60 +99,65 @@ def data_handler_constructor(emfield):
for name, val in emfield.data_handler_options.items():
if name == 'back_reference' and isinstance(val, tuple):
options.append('{optname}: ({leo_name}, {fieldname})'.format(
optname = repr(name),
leo_name = LeObject.name2objname(val[0]),
fieldname = repr(val[1]),))
optname=repr(name),
leo_name=LeObject.name2objname(val[0]),
fieldname=repr(val[1]),))
else:
options.append(repr(name)+': '+forge_optval(val))
options.append(repr(name) + ': ' + forge_optval(val))
return '{handler_instr}(**{{ {options} }})'.format(
handler_instr = get_handler_class_instr,
options = ', '.join(options))
##@brief Return a python repr of option values
handler_instr=get_handler_class_instr,
options=', '.join(options))
# @brief Return a python repr of option values
# @param A value of any type which represents option
# @return a string
def forge_optval(optval):
if isinstance(optval, dict):
return '{' + (', '.join( [ '%s: %s' % (repr(name), forge_optval(val)) for name, val in optval.items()])) + '}'
return '{' + (', '.join(['%s: %s' % (repr(name), forge_optval(val)) for name, val in optval.items()])) + '}'
if isinstance(optval, (set, list, tuple)):
return '[' + (', '.join([forge_optval(val) for val in optval])) + ']'
if isinstance(optval, EmField):
return "{leobject}.data_handler({fieldname})".format(
leobject = LeObject.name2objname(optval._emclass.uid),
fieldname = repr(optval.uid)
)
elif isinstance(optval, EmClass):
leobject=LeObject.name2objname(optval._emclass.uid),
fieldname=repr(optval.uid)
)
if isinstance(optval, EmClass):
return LeObject.name2objname(optval.uid)
else:
return repr(optval)
##@brief Generate dyncode from an EmClass
# @param model EditorialModel :
# @todo delete imports. It is never use, consequently changed return parameters.
return repr(optval)
# @brief Generate dyncode from an EmClass
# @param model EditorialModel :
# @return a tuple with emclass python code, a set containing modules name to import, and a list of python instruction to bootstrap dynamic code, in this order
def generate_classes(model):
res = ""
imports = list()
bootstrap = ""
# Generating field list for LeObjects generated from EmClass
for em_class in get_classes(model):
logger.info("Generating a dynamic class for %s" % em_class.uid)
uid = list() # List of fieldnames that are part of the EmClass primary key
parents = list() # List of parents EmClass
# Determine pk
uid = list() # List for fieldnames that are part of the EmClass primary key
parents = list() # List for em_class's parents
# Determines primary key
for field in em_class.fields():
if field.data_handler_instance.is_primary_key():
uid.append(field.uid)
# Determine parent for inheritance
# Determines parentsfor inheritance
if len(em_class.parents) > 0:
for parent in em_class.parents:
parents.append(LeObject.name2objname(parent.uid))
parents.append(LeObject.name2objname(parent.uid))
else:
parents.append('LeObject')
datasource_name = em_class.datasource
# Dynamic code generation for LeObject childs classes
# Dynamic code generation for LeObject child classes
em_cls_code = """
class {clsname}({parents}):
_abstract = {abstract}
@ -150,12 +169,12 @@ class {clsname}({parents}):
_child_classes = None
""".format(
clsname = LeObject.name2objname(em_class.uid),
parents = ', '.join(parents),
abstract = 'True' if em_class.abstract else 'False',
uid_list = repr(uid),
datasource_name = repr(datasource_name),
)
clsname=LeObject.name2objname(em_class.uid),
parents=', '.join(parents),
abstract='True' if em_class.abstract else 'False',
uid_list=repr(uid),
datasource_name=repr(datasource_name),
)
res += em_cls_code
# Dyncode fields bootstrap instructions
child_classes = model.get_class_childs(em_class.uid)
@ -163,14 +182,14 @@ class {clsname}({parents}):
child_classes = 'tuple()'
else:
child_classes = '(%s,)' % (', '.join(
[ LeObject.name2objname(emcls.uid) for emcls in child_classes]))
[LeObject.name2objname(emcls.uid) for emcls in child_classes]))
bootstrap += """{classname}._set__fields({fields})
{classname}._child_classes = {child_classes}
""".format(
classname = LeObject.name2objname(em_class.uid),
fields = '{' + (', '.join(['\n\t%s: %s' % (repr(emfield.uid),data_handler_constructor(emfield)) for emfield in em_class.fields()])) + '}',
child_classes = child_classes,
)
classname=LeObject.name2objname(em_class.uid),
fields='{' + (', '.join(['\n\t%s: %s' % (repr(emfield.uid),
data_handler_constructor(emfield)) for emfield in em_class.fields()])) + '}',
child_classes=child_classes,
)
bootstrap += "\n"
return res, set(imports), bootstrap
return res, bootstrap

View file

@ -5,7 +5,7 @@
#- All lines that begins with #- will be deleted from dynamically generated
#- code...
##@brief Return a dynamically generated class given it's name
##@brief Returns a dynamically generated class given its name
#@param name str : The dynamic class name
#@return False or a child class of LeObject
def name2class(name):
@ -14,7 +14,7 @@ def name2class(name):
return dynclasses_dict[name]
##@brief Return a dynamically generated class given it's name
##@brief Returns a dynamically generated class given its name
#@note Case insensitive version of name2class
#@param name str
#@return False or a child class of LeObject
@ -26,11 +26,10 @@ def lowername2class(name):
return new_dict[name]
##@brief Trigger dynclasses datasources initialisation
##@brief Triggers dynclasses datasources initialisation
@LodelHook("lodel2_plugins_loaded")
def lodel2_dyncode_datasources_init(self, caller, payload):
for cls in dynclasses:
cls._init_datasources()
LodelContext.expose_modules(globals(), {'lodel.plugin.hooks': ['LodelHook']})
LodelHook.call_hook("lodel2_dyncode_loaded", __name__, dynclasses)

View file

@ -8,20 +8,22 @@ import warnings
from lodel.context import LodelContext
LodelContext.expose_modules(globals(), {
'lodel.leapi.exceptions': ['LeApiError', 'LeApiErrors',
'LeApiDataCheckError', 'LeApiDataCheckErrors', 'LeApiQueryError',
'LeApiQueryErrors'],
'LeApiDataCheckError', 'LeApiDataCheckErrors', 'LeApiQueryError',
'LeApiQueryErrors'],
'lodel.plugin.hooks': ['LodelHook'],
'lodel.logger': ['logger']})
##@todo check datas when running query
# @todo check data when running query
class LeQuery(object):
##@brief Hookname prefix
# @brief Hookname prefix
_hook_prefix = None
##@brief arguments for the LeObject.check_data_value()
# @brief arguments for the LeObject.check_data_value()
_data_check_args = {'complete': False, 'allow_internal': False}
##@brief Abstract constructor
# @brief Abstract constructor
# @param target_class LeObject : class of object the query is about
def __init__(self, target_class):
from .leobject import LeObject
@ -29,77 +31,80 @@ class LeQuery(object):
raise NotImplementedError("Abstract class")
if not inspect.isclass(target_class) or \
not issubclass(target_class, LeObject):
raise TypeError("target class has to be a child class of LeObject but %s given"% target_class)
raise TypeError(
"target class has to be a child class of LeObject but %s given" % target_class)
self._target_class = target_class
self._ro_datasource = target_class._ro_datasource
self._rw_datasource = target_class._rw_datasource
##@brief Execute a query and return the result
#@param **datas
# @brief Executes a query and returns the result
#@param **data
#@return the query result
#@see LeQuery._query()
#@todo check that the check_datas_value is not duplicated/useless
def execute(self, datas):
if not datas is None:
def execute(self, data):
if data is not None:
self._target_class.check_datas_value(
datas,
**self._data_check_args)
self._target_class.prepare_datas(datas) #not yet implemented
data,
**self._data_check_args)
self._target_class.prepare_datas(data) # not yet implemented
if self._hook_prefix is None:
raise NotImplementedError("Abstract method")
LodelHook.call_hook(self._hook_prefix+'pre',
self._target_class,
datas)
ret = self._query(datas=datas)
ret = LodelHook.call_hook(self._hook_prefix+'post',
self._target_class,
ret)
LodelHook.call_hook(self._hook_prefix + 'pre',
self._target_class,
data)
ret = self._query(data=data)
ret = LodelHook.call_hook(self._hook_prefix + 'post',
self._target_class,
ret)
return ret
##@brief Childs classes implements this method to execute the query
#@param **datas
# @brief Child classes implement this method to execute the query
#@param **data
#@return query result
def _query(self, **datas):
def _query(self, **data):
raise NotImplementedError("Asbtract method")
##@return a dict with query infos
# @return a dict with query infos
def dump_infos(self):
return {'target_class': self._target_class}
def __repr__(self):
ret = "<{classname} target={target_class}>"
return ret.format(
classname=self.__class__.__name__,
target_class = self._target_class)
classname=self.__class__.__name__,
target_class=self._target_class)
# @brief Abstract class handling query with filters
##@brief Abstract class handling query with filters
class LeFilteredQuery(LeQuery):
##@brief The available operators used in query definitions
# @brief The available operators used in query definitions
_query_operators = [
' = ',
' <= ',
' >= ',
' != ',
' < ',
' > ',
' in ',
' not in ',
' like ',
' not like ']
' = ',
' <= ',
' >= ',
' != ',
' < ',
' > ',
' in ',
' not in ',
' like ',
' not like ']
##@brief Regular expression to process filters
# @brief Regular expression to process filters
_query_re = None
##@brief Abtract constructor for queries with filter
# @brief Abtract constructor for queries with filter
#@param target_class LeObject : class of object the query is about
#@param query_filters list : with a tuple (only one filter) or a list of
# tuple or a dict: {OP,list(filters)} with OP = 'OR' or 'AND for tuple
# (FIELD,OPERATOR,VALUE)
def __init__(self, target_class, query_filters=None):
super().__init__(target_class)
##@brief The query filter tuple(std_filter, relational_filters)
# @brief The query filter tuple(std_filter, relational_filters)
self._query_filter = None
##@brief Stores potential subqueries (used when a query implies
# @brief Stores potential subqueries (used when a query implies
# more than one datasource.
#
# Subqueries are tuple(target_class_ref_field, LeGetQuery)
@ -107,11 +112,11 @@ class LeFilteredQuery(LeQuery):
query_filters = [] if query_filters is None else query_filters
self.set_query_filter(query_filters)
##@brief Abstract FilteredQuery execution method
# @brief Abstract FilteredQuery execution method
#
# This method takes care to execute subqueries before calling super execute
def execute(self, datas=None):
#copy originals filters
def execute(self, data=None):
# copy originals filters
orig_filters = copy.copy(self._query_filter)
std_filters, rel_filters = self._query_filter
@ -123,17 +128,17 @@ class LeFilteredQuery(LeQuery):
try:
filters, rel_filters = self._query_filter
res = super().execute(datas)
res = super().execute(data)
except Exception as e:
#restoring filters even if an exception is raised
# restoring filters even if an exception is raised
self.__query_filter = orig_filters
raise e #reraise
#restoring filters
raise e # reraise
# restoring filters
self._query_filter = orig_filters
return res
##@brief Add filter(s) to the query
# @brief Add filter(s) to the query
#
# This method is also able to slice query if different datasources are
# implied in the request
@ -144,39 +149,39 @@ class LeFilteredQuery(LeQuery):
def set_query_filter(self, query_filter):
if isinstance(query_filter, str):
query_filter = [query_filter]
#Query filter prepration
# Query filter prepration
filters_orig, rel_filters = self._prepare_filters(query_filter)
# Here we now that each relational filter concern only one datasource
# thank's to _prepare_relational_fields
#Multiple datasources detection
# Multiple datasources detection
self_ds_name = self._target_class._datasource_name
result_rel_filters = list() # The filters that will stay in the query
result_rel_filters = list() # The filters that will stay in the query
other_ds_filters = dict()
for rfilter in rel_filters:
(rfield, ref_dict), op, value = rfilter
#rfield : the field in self._target_class
tmp_rel_filter = dict() #designed to stores rel_field of same DS
# rfield : the field in self._target_class
tmp_rel_filter = dict() # designed to stores rel_field of same DS
# First step : simplification
# Trying to delete relational filters done on referenced class uid
for tclass, tfield in copy.copy(ref_dict).items():
#tclass : reference target class
#tfield : referenced field from target class
# tclass : reference target class
# tfield : referenced field from target class
#
# !!!WARNING!!!
# The line below brake multi UID support
#
if tfield == tclass.uid_fieldname()[0]:
#This relational filter can be simplified as
# This relational filter can be simplified as
# ref_field, op, value
# Note : we will have to dedup filters_orig
filters_orig.append((rfield, op, value))
del(ref_dict[tclass])
if len(ref_dict) == 0:
continue
#Determine what to do with other relational filters given
# Determine what to do with other relational filters given
# referenced class datasource
#Remember : each class in a relational filter has the same
# Remember : each class in a relational filter has the same
# datasource
tclass = list(ref_dict.keys())[0]
cur_ds = tclass._datasource_name
@ -189,23 +194,23 @@ class LeFilteredQuery(LeQuery):
other_ds_filters[cur_ds] = list()
other_ds_filters[cur_ds].append(
((rfield, ref_dict), op, value))
#deduplication of std filters
# deduplication of std filters
filters_cp = set()
if not isinstance(filters_orig, set):
for i, cfilt in enumerate(filters_orig):
a, b, c = cfilt
if isinstance(c, list): #list are not hashable
if isinstance(c, list): # list are not hashable
newc = tuple(c)
else:
newc = c
old_len = len(filters_cp)
filters_cp |= set((a,b,newc))
filters_cp |= set((a, b, newc))
if len(filters_cp) == old_len:
del(filters_orig[i])
# Sets _query_filter attribute of self query
self._query_filter = (filters_orig, result_rel_filters)
#Sub queries creation
# Sub queries creation
subq = list()
for ds, rfilters in other_ds_filters.items():
for rfilter in rfilters:
@ -218,7 +223,7 @@ class LeFilteredQuery(LeQuery):
subq.append((rfield, query))
self.subqueries = subq
##@return informations
# @return informations
def dump_infos(self):
ret = super().dump_infos()
ret['query_filter'] = self._query_filter
@ -238,16 +243,16 @@ class LeFilteredQuery(LeQuery):
res += '>'
return res
## @brief Prepare filters for datasource
# @brief Prepare filters for datasource
#
#A filter can be a string or a tuple with len = 3.
# A filter can be a string or a tuple with len = 3.
#
#This method divide filters in two categories :
# This method divide filters in two categories :
#
#@par Simple filters
#
#Those filters concerns fields that represent object values (a title,
#the content, etc.) They are composed of three elements : FIELDNAME OP
# Those filters concerns fields that represent object values (a title,
# the content, etc.) They are composed of three elements : FIELDNAME OP
# VALUE . Where :
#- FIELDNAME is the name of the field
#- OP is one of the authorized comparison operands (see
@ -256,14 +261,14 @@ class LeFilteredQuery(LeQuery):
#
#@par Relational filters
#
#Those filters concerns on reference fields (see the corresponding
#abstract datahandler @ref lodel.leapi.datahandlers.base_classes.Reference)
#The filter as quite the same composition than simple filters :
# Those filters concerns on reference fields (see the corresponding
# abstract datahandler @ref lodel.leapi.datahandlers.base_classes.Reference)
# The filter as quite the same composition than simple filters :
# FIELDNAME[.REF_FIELD] OP VALUE . Where :
#- FIELDNAME is the name of the reference field
#- REF_FIELD is an optionnal addon to the base field. It indicate on wich
#field of the referenced object the comparison as to be done. If no
#REF_FIELD is indicated the comparison will be done on identifier.
# field of the referenced object the comparison as to be done. If no
# REF_FIELD is indicated the comparison will be done on identifier.
#
#@param cls
#@param filters_l list : This list of str or tuple (or both)
@ -271,11 +276,11 @@ class LeFilteredQuery(LeQuery):
#@todo move this doc in another place (a dedicated page ?)
#@warning Does not supports multiple UID for an EmClass
def _prepare_filters(self, filters_l):
filters=list()
filters = list()
res_filters = list()
rel_filters = list()
err_l = dict()
#Splitting in tuple if necessary
# Splitting in tuple if necessary
for i, fil in enumerate(filters_l):
if len(fil) == 3 and not isinstance(fil, str):
filters.append(tuple(fil))
@ -286,7 +291,7 @@ class LeFilteredQuery(LeQuery):
err_l["filter %d" % i] = e
for field, operator, value in filters:
err_key = "%s %s %s" % (field, operator, value) #to push in err_l
err_key = "%s %s %s" % (field, operator, value) # to push in err_l
# Spliting field name to be able to detect a relational field
field_spl = field.split('.')
if len(field_spl) == 2:
@ -310,12 +315,12 @@ field name" % field)
# inconsistency
err_l[field] = NameError("The field '%s' in %s is not \
a relational field, but %s.%s was present in the filter"
% (field,
self._target_class.__name__,
field,
ref_field))
% (field,
self._target_class.__name__,
field,
ref_field))
if field_datahandler.is_reference():
#Relationnal field
# Relationnal field
if ref_field is None:
# ref_field default value
#
@ -350,14 +355,14 @@ field to use for the relational filter"
value, error = field_datahandler.check_data_value(value)
if isinstance(error, Exception):
value = value_orig
res_filters.append((field,operator, value))
res_filters.append((field, operator, value))
if len(err_l) > 0:
raise LeApiDataCheckErrors(
"Error while preparing filters : ",
err_l)
"Error while preparing filters : ",
err_l)
return (res_filters, rel_filters)
## @brief Check and split a query filter
# @brief Check and split a query filter
# @note The query_filter format is "FIELD OPERATOR VALUE"
# @param query_filter str : A query_filter string
# @param cls
@ -382,18 +387,18 @@ field to use for the relational filter"
raise ValueError(msg % query_filter)
return result
## @brief Compile the regex for query_filter processing
# @brief Compile the regex for query_filter processing
# @note Set _LeObject._query_re
@classmethod
def __compile_query_re(cls):
op_re_piece = '(?P<operator>(%s)'
op_re_piece %= cls._query_operators[0].replace(' ', '\s')
for operator in cls._query_operators[1:]:
op_re_piece += '|(%s)'%operator.replace(' ', '\s')
op_re_piece += '|(%s)' % operator.replace(' ', '\s')
op_re_piece += ')'
re_full = '^\s*(?P<field>([a-z_][a-z0-9\-_]*\.)?[a-z_][a-z0-9\-_]*)\s*'
re_full += op_re_piece+'\s*(?P<value>.*)\s*$'
re_full += op_re_piece + '\s*(?P<value>.*)\s*$'
cls._query_re = re.compile(re_full, flags=re.IGNORECASE)
pass
@ -407,10 +412,10 @@ field to use for the relational filter"
msg %= (fieldname, target_class.__name__)
return NameError(msg)
##@brief Prepare a relational filter
# @brief Prepare a relational filter
#
#Relational filters are composed of a tuple like the simple filters
#but the first element of this tuple is a tuple to :
# Relational filters are composed of a tuple like the simple filters
# but the first element of this tuple is a tuple to :
#
#<code>((FIELDNAME, {REF_CLASS: REF_FIELD}), OP, VALUE)</code>
# Where :
@ -419,9 +424,9 @@ field to use for the relational filter"
# - REF_CLASS as key. It's a LeObject child class
# - REF_FIELD as value. The name of the referenced field in the REF_CLASS
#
#Visibly the REF_FIELD value of the dict will vary only when
#no REF_FIELD is explicitly given in the filter string notation
#and REF_CLASSES has differents uid
# Visibly the REF_FIELD value of the dict will vary only when
# no REF_FIELD is explicitly given in the filter string notation
# and REF_CLASSES has differents uid
#
#@par String notation examples
#<pre>contributeur IN (1,2,3,5)</pre> will be transformed into :
@ -439,7 +444,7 @@ field to use for the relational filter"
#
#@param fieldname str : The relational field name
#@param ref_field str|None : The referenced field name (if None use
#uniq identifiers as referenced field
# uniq identifiers as referenced field
#@return a well formed relational filter tuple or an Exception instance
def _prepare_relational_fields(self, fieldname, ref_field=None):
datahandler = self._target_class.field(fieldname)
@ -467,12 +472,12 @@ the relational filter %s"
logger.debug(msg)
if len(ref_dict) == 0:
return NameError("No field named '%s' in referenced objects [%s]"
% (ref_field,
','.join([rc.__name__ for rc in ref_classes])))
% (ref_field,
','.join([rc.__name__ for rc in ref_classes])))
return (fieldname, ref_dict)
##@brief A query to insert a new object
# @brief A query to insert a new object
class LeInsertQuery(LeQuery):
_hook_prefix = 'leapi_insert_'
_data_check_args = {'complete': True, 'allow_internal': False}
@ -483,49 +488,49 @@ class LeInsertQuery(LeQuery):
abstract LeObject : %s" % target_class)
super().__init__(target_class)
## @brief Implements an insert query operation, with only one insertion
# @param datas : datas to be inserted
def _query(self, datas):
datas = self._target_class.prepare_datas(datas, True, False)
id_inserted = self._rw_datasource.insert(self._target_class, datas)
#  @brief Implements an insert query operation, with only one insertion
# @param data : data to be inserted
def _query(self, data):
data = self._target_class.prepare_datas(data, True, False)
id_inserted = self._rw_datasource.insert(self._target_class, data)
return id_inserted
"""
## @brief Implements an insert query operation, with multiple insertions
# @param datas : list of **datas to be inserted
def _query(self, datas):
# @param data : list of **data to be inserted
def _query(self, data):
nb_inserted = self._datasource.insert_multi(
self._target_class,datas_list)
self._target_class,data_list)
if nb_inserted < 0:
raise LeApiQueryError("Multiple insertions error")
return nb_inserted
"""
## @brief Execute the insert query
def execute(self, datas):
return super().execute(datas=datas)
#  @brief Execute the insert query
def execute(self, data):
return super().execute(data=data)
##@brief A query to update datas for a given object
# @brief A query to update data for a given object
#
#@todo Change behavior, Huge optimization problem when updating using filters
#and not instance. We have to run a GET and then 1 update by fecthed object...
# and not instance. We have to run a GET and then 1 update by fecthed object...
class LeUpdateQuery(LeFilteredQuery):
_hook_prefix = 'leapi_update_'
_data_check_args = {'complete': False, 'allow_internal': False}
##@brief Instanciate an update query
# @brief Instanciate an update query
#
#If a class and not an instance is given, no query_filters are expected
#and the update will be fast and simple. Else we have to run a get query
#before updating (to fetch datas, update them and then, construct them
#and check their consistency)
# If a class and not an instance is given, no query_filters are expected
# and the update will be fast and simple. Else we have to run a get query
# before updating (to fetch data, update them and then, construct them
# and check their consistency)
#@param target LeObject clas or instance
#@param query_filters list|None
#@todo change strategy with instance update. We have to accept datas for
#the execute method
#@todo change strategy with instance update. We have to accept data for
# the execute method
def __init__(self, target, query_filters=None):
##@brief This attr is set only if the target argument is an
#instance of a LeObject subclass
# @brief This attr is set only if the target argument is an
# instance of a LeObject subclass
self.__leobject_instance_datas = None
target_class = target
@ -542,16 +547,16 @@ target to LeUpdateQuery constructor"
super().__init__(target_class, query_filters)
##@brief Implements an update query
#@param datas dict : datas to update
# @brief Implements an update query
#@param data dict : data to be updated
#@returns the number of updated items
#@todo change stategy for instance update. Datas should be allowed
#for execute method (and query)
def _query(self, datas):
#@todo change stategy for instance update. Data should be allowed
# for execute method (and query)
def _query(self, data):
uid_name = self._target_class._uid[0]
if self.__leobject_instance_datas is not None:
#Instance update
#Building query_filter
# Instance update
# Building query_filter
filters = [(
uid_name,
'=',
@ -560,59 +565,60 @@ target to LeUpdateQuery constructor"
self._target_class, filters, [],
self.__leobject_instance_datas)
else:
#Update by filters, we have to fetch datas before updating
# Update by filters, we have to fetch data before updating
res = self._ro_datasource.select(
self._target_class, self._target_class.fieldnames(True),
self._query_filter[0],
self._query_filter[1])
#Checking and constructing datas
upd_datas = dict()
# Checking and constructing data
upd_data = dict()
for res_data in res:
res_data.update(datas)
res_datas = self._target_class.prepare_datas(
res_data.update(data)
res_data = self._target_class.prepare_datas(
res_data, True, True)
filters = [(uid_name, '=', res_data[uid_name])]
res = self._rw_datasource.update(
self._target_class, filters, [],
res_datas)
res_data)
return res
## @brief Execute the update query
def execute(self, datas=None):
if self.__leobject_instance_datas is not None and datas is not None:
raise LeApiQueryError("No datas expected when running an update \
#  @brief Execute the update query
def execute(self, data=None):
if self.__leobject_instance_datas is not None and data is not None:
raise LeApiQueryError("No data expected when running an update \
query on an instance")
if self.__leobject_instance_datas is None and datas is None:
raise LeApiQueryError("Datas are mandatory when running an update \
if self.__leobject_instance_datas is None and data is None:
raise LeApiQueryError("Data are mandatory when running an update \
query on a class with filters")
return super().execute(datas=datas)
return super().execute(data=data)
##@brief A query to delete an object
# @brief A query to delete an object
class LeDeleteQuery(LeFilteredQuery):
_hook_prefix = 'leapi_delete_'
def __init__(self, target_class, query_filter):
super().__init__(target_class, query_filter)
## @brief Execute the delete query
# @param datas
def execute(self, datas=None):
#  @brief Execute the delete query
# @param data
def execute(self, data=None):
return super().execute()
##@brief Implements delete query operations
# @param datas
# @brief Implements delete query operations
# @param data
#@returns the number of deleted items
def _query(self, datas=None):
def _query(self, data=None):
filters, rel_filters = self._query_filter
nb_deleted = self._rw_datasource.delete(
self._target_class, filters, rel_filters)
return nb_deleted
class LeGetQuery(LeFilteredQuery):
_hook_prefix = 'leapi_get_'
##@brief Instanciate a new get query
# @brief Instanciate a new get query
#@param target_class LeObject : class of object the query is about
#@param query_filters dict : {OP, list of query filters}
# or tuple (FIELD, OPERATOR, VALUE) )
@ -624,33 +630,33 @@ class LeGetQuery(LeFilteredQuery):
# - offset int : offset
def __init__(self, target_class, query_filters, **kwargs):
super().__init__(target_class, query_filters)
##@brief The fields to get
# @brief The fields to get
self._field_list = None
##@brief An equivalent to the SQL ORDER BY
# @brief An equivalent to the SQL ORDER BY
self._order = None
##@brief An equivalent to the SQL GROUP BY
# @brief An equivalent to the SQL GROUP BY
self._group = None
##@brief An equivalent to the SQL LIMIT x
# @brief An equivalent to the SQL LIMIT x
self._limit = None
##@brief An equivalent to the SQL LIMIT x, OFFSET
# @brief An equivalent to the SQL LIMIT x, OFFSET
self._offset = 0
# Checking kwargs and assigning default values if there is some
for argname in kwargs:
if argname not in (
'field_list', 'order', 'group', 'limit', 'offset'):
'field_list', 'order', 'group', 'limit', 'offset'):
raise TypeError("Unexpected argument '%s'" % argname)
if 'field_list' not in kwargs:
self.set_field_list(target_class.fieldnames(include_ro = True))
self.set_field_list(target_class.fieldnames(include_ro=True))
else:
self.set_field_list(kwargs['field_list'])
if 'order' in kwargs:
#check kwargs['order']
# check kwargs['order']
self._order = kwargs['order']
if 'group' in kwargs:
#check kwargs['group']
# check kwargs['group']
self._group = kwargs['group']
if 'limit' in kwargs and kwargs['limit'] is not None:
try:
@ -669,7 +675,7 @@ class LeGetQuery(LeFilteredQuery):
msg = "offset argument expected to be an integer >= 0"
raise ValueError(msg)
##@brief Set the field list
# @brief Set the field list
# @param field_list list | None : If None use all fields
# @return None
# @throw LeApiQueryError if unknown field given
@ -682,41 +688,41 @@ class LeGetQuery(LeFilteredQuery):
msg = "No field named '%s' in %s"
msg %= (fieldname, self._target_class.__name__)
expt = NameError(msg)
err_l[fieldname] = expt
err_l[fieldname] = expt
if len(err_l) > 0:
msg = "Error while setting field_list in a get query"
raise LeApiQueryErrors(msg = msg, exceptions = err_l)
raise LeApiQueryErrors(msg=msg, exceptions=err_l)
self._field_list = list(set(field_list))
##@brief Execute the get query
def execute(self, datas=None):
# @brief Execute the get query
def execute(self, data=None):
return super().execute()
##@brief Implements select query operations
# @brief Implements select query operations
# @returns a list containing the item(s)
def _query(self, datas=None):
# select datas corresponding to query_filter
def _query(self, data=None):
# select data corresponding to query_filter
fl = list(self._field_list) if self._field_list is not None else None
l_datas=self._ro_datasource.select(
target = self._target_class,
field_list = fl,
filters = self._query_filter[0],
relational_filters = self._query_filter[1],
order = self._order,
group = self._group,
limit = self._limit,
offset = self._offset)
return l_datas
l_data = self._ro_datasource.select(
target=self._target_class,
field_list=fl,
filters=self._query_filter[0],
relational_filters=self._query_filter[1],
order=self._order,
group=self._group,
limit=self._limit,
offset=self._offset)
return l_data
##@return a dict with query infos
# @return a dict with query infos
def dump_infos(self):
ret = super().dump_infos()
ret.update({ 'field_list' : self._field_list,
'order' : self._order,
'group' : self._group,
'limit' : self._limit,
'offset': self._offset,
})
ret.update({'field_list': self._field_list,
'order': self._order,
'group': self._group,
'limit': self._limit,
'offset': self._offset,
})
return ret
def __repr__(self):
@ -725,7 +731,7 @@ field_list={field_list} order={order} group={group} limit={limit} \
offset={offset}"
res = res.format(**self.dump_infos())
if len(self.subqueries) > 0:
for n,subq in enumerate(self.subqueries):
for n, subq in enumerate(self.subqueries):
res += "\n\tSubquerie %d : %s"
res %= (n, subq)
res += ">"

View file

@ -84,9 +84,9 @@ class MongoDbDatasource(AbstractDatasource):
target = emcomp.uid_source()
tuid = target._uid[0] # Multiple UID broken here
results = self.select(
target, field_list = [tuid], filters = [],
target, field_list = [tuid], filters = [],
order=[(tuid, 'DESC')], limit = 1)
if len(results) == 0:
if len(results) == 0:
return 1
return results[0][tuid]+1
@ -95,23 +95,23 @@ class MongoDbDatasource(AbstractDatasource):
#@param field_list list
#@param filters list : List of filters
#@param relational_filters list : List of relational filters
#@param order list : List of column to order. ex: order =
#@param order list : List of column to order. ex: order =
#[('title', 'ASC'),]
#@param group list : List of tupple representing the column used as
#@param group list : List of tupple representing the column used as
#"group by" fields. ex: group = [('title', 'ASC'),]
#@param limit int : Number of records to be returned
#@param offset int: used with limit to choose the start record
#@return list
#@todo Implement group for abstract LeObject childs
def select(self, target, field_list, filters = None,
relational_filters=None, order=None, group=None, limit=None,
def select(self, target, field_list, filters = None,
relational_filters=None, order=None, group=None, limit=None,
offset=0):
if target.is_abstract():
#Reccursiv calls for abstract LeObject child
results = self.__act_on_abstract(target, filters,
relational_filters, self.select, field_list = field_list,
order = order, group = group, limit = limit)
#Here we may implement the group
#If sorted query we have to sort again
if order is not None:
@ -138,11 +138,11 @@ class MongoDbDatasource(AbstractDatasource):
query_filters = self.__process_filters(
target, filters, relational_filters)
query_result_ordering = None
if order is not None:
query_result_ordering = utils.parse_query_order(order)
if group is None:
if field_list is None:
field_list = dict()
@ -189,7 +189,7 @@ class MongoDbDatasource(AbstractDatasource):
results = list()
for document in cursor:
results.append(document)
return results
##@brief Deletes records according to given filters
@ -236,7 +236,7 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
self.__update_backref_filtered(target, filters, relational_filters,
upd_datas, old_datas_l)
return res
##@brief Designed to be called by backref update in order to avoid
#infinite updates between back references
#@see update()
@ -269,7 +269,7 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
raise MongoDataSourceError("Missing UID data will inserting a new \
%s" % target.__class__)
res = self.__collection(target).insert(new_datas)
self.__update_backref(target, new_datas[uidname], None, new_datas)
self.__update_backref(target, new_datas[uidname], None, new_datas)
return str(res)
## @brief Inserts a list of records in a given collection
@ -281,10 +281,10 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
self._data_cast(datas)
res = self.__collection(target).insert_many(datas_list)
for new_datas in datas_list:
self.__update_backref(target, None, new_datas)
self.__update_backref(target, None, new_datas)
target.make_consistency(datas=new_datas)
return list(res.inserted_ids)
##@brief Update backref giving an action
#@param target leObject child class
#@param filters
@ -303,7 +303,7 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
old_datas_l = self.__collection(target).find(
mongo_filters)
old_datas_l = list(old_datas_l)
uidname = target.uid_fieldname()[0] #MULTIPLE UID BROKEN HERE
for old_datas in old_datas_l:
self.__update_backref(
@ -312,7 +312,7 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
##@brief Update back references of an object
#@ingroup plugin_mongodb_bref_op
#
#old_datas and new_datas arguments are set to None to indicate
#old_datas and new_datas arguments are set to None to indicate
#insertion or deletion. Calls examples :
#@par LeObject insert __update backref call
#<pre>
@ -441,8 +441,8 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
self.__update_no_backref(
leo.__class__, [(leo.uid_fieldname()[0], '=', uidval)],
[], datas)
##@brief Utility function designed to handle the upd_dict of
##@brief Utility function designed to handle the upd_dict of
#__update_backref()
#
#Basically checks if a key exists at some level, if not create it with
@ -453,7 +453,7 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
#@param uid_val mixed : the UID of the referenced object
#@return the updated version of upd_dict
@staticmethod
def __update_backref_upd_dict_prepare(upd_dict,bref_infos, bref_fname,
def __update_backref_upd_dict_prepare(upd_dict,bref_infos, bref_fname,
uid_val):
bref_cls, bref_leo, bref_dh, bref_value = bref_infos
if bref_cls not in upd_dict:
@ -463,8 +463,8 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
if bref_fname not in upd_dict[bref_cls][uid_val]:
upd_dict[bref_cls][uid_val][1][bref_fname] = bref_value
return upd_dict
##@brief Prepare a one value back reference update
#@param fname str : the source Reference field name
#@param fdh DataHandler : the source Reference DataHandler
@ -520,7 +520,7 @@ have expected value. Expected was %s but found %s in %s" % (
return bref_val
elif oldd and not newdd:
#deletion
if not hasattr(bref_dh, "default"):
if not hasattr(bref_dh, "default"):
raise MongoDbConsistencyError("Unable to delete a \
value for a back reference update. The concerned field don't have a default \
value : in %s field %s" % (bref_leo,fname))
@ -528,7 +528,7 @@ value : in %s field %s" % (bref_leo,fname))
elif not oldd and newdd:
bref_val = tuid
return bref_val
##@brief Fetch back reference informations
#@warning thank's to __update_backref_act() this method is useless
#@param bref_cls LeObject child class : __back_reference[0]
@ -608,7 +608,7 @@ on non abstract childs" % act.__name__)
port = self.__db_infos['port'],
db_name = db_name,
ro = ro)
self.__conn_hash = conn_h = hash(conn_string)
if conn_h in self._connections:
self._connections[conn_h]['conn_count'] += 1
@ -619,7 +619,7 @@ on non abstract childs" % act.__name__)
'conn_count': 1,
'db': utils.connect(conn_string)}
return self._connections[conn_h]['db'][self.__db_infos['db_name']]
##@brief Return a pymongo collection given a LeObject child class
#@param leobject LeObject child class (no instance)
@ -760,7 +760,7 @@ on non abstract childs" % act.__name__)
rfilters[fname][repr_leo][rfield] = list()
rfilters[fname][repr_leo][rfield].append((op, value))
return rfilters
##@brief Convert lodel2 filters to pymongo conditions
#@param filters list : list of lodel filters
#@return dict representing pymongo conditions
@ -859,7 +859,7 @@ field/operator couple in a query. We will keep only the first one")
1 if (a[fname]>b[fname] if cmpdir == 'ASC' else a[fname]<b[fname])\
else -1)
##@brief Correct some datas before giving them to pymongo
#
#For example sets has to be casted to lise
@ -874,13 +874,3 @@ field/operator couple in a query. We will keep only the first one")
#with sets
datas[dname] = list(datas[dname])
return datas
##@brief Tool to check if a record with unique id uid is set in the target_class representation
#@param target_class : class to check in
#@param uid : a unique id in target_class
#@returns true if a record with unique id uid exists in the target_class representation, false if not
def is_exist(self, target_class, uid):
# retrouver la table qui correspond à target_class
# vérifier qu'il existe, ou pas, un enregistrement contenant uid
result = self.select(self, target_class, [target_class.uid_fieldname], filters = [(target_class.uid_fieldname, '=', uid)])
return len(result) == 1

View file

@ -23,7 +23,7 @@ class EditorialModelTestCase(unittest.TestCase):
grp1.add_components((cls1, c1f1))
grp2 = model.new_group('testgroup2')
grp2.add_components((cls2, c1f2, c2f1, c2f2))
grp2.add_dependencie(grp1)
grp2.add_dependency(grp1)
e_hash = 0x250eab75e782e51bbf212f47c6159571
self.assertEqual(model.d_hash(), e_hash)
@ -181,10 +181,10 @@ class EmGroupTestCase(unittest.TestCase):
grp3 = EmGroup('grp3')
grp4 = EmGroup('grp4')
grp2.add_dependencie(grp1)
grp3.add_dependencie(grp2)
grp4.add_dependencie(grp2)
grp4.add_dependencie(grp1)
grp2.add_dependency(grp1)
grp3.add_dependency(grp2)
grp4.add_dependency(grp2)
grp4.add_dependency(grp1)
self.assertEqual(set(grp1.dependencies().values()), set())
self.assertEqual(set(grp2.dependencies().values()), set([grp1]))
@ -261,10 +261,10 @@ class EmGroupTestCase(unittest.TestCase):
def test_deps_complex(self):
""" More complex dependencies handling test """
grps = [ EmGroup('group%d' % i) for i in range(6) ]
grps[5].add_dependencie( (grps[1], grps[2], grps[4]) )
grps[4].add_dependencie( (grps[1], grps[3]) )
grps[3].add_dependencie( (grps[0],) )
grps[1].add_dependencie( (grps[2], grps[0]) )
grps[5].add_dependency( (grps[1], grps[2], grps[4]) )
grps[4].add_dependency( (grps[1], grps[3]) )
grps[3].add_dependency( (grps[0],) )
grps[1].add_dependency( (grps[2], grps[0]) )
self.assertEqual(
set(grps[5].dependencies(True).values()),
set( grps[i] for i in range(5))
@ -273,7 +273,7 @@ class EmGroupTestCase(unittest.TestCase):
set(grps[4].dependencies(True).values()),
set( grps[i] for i in range(4))
)
grps[2].add_dependencie(grps[0])
grps[2].add_dependency(grps[0])
self.assertEqual(
set(grps[5].dependencies(True).values()),
set( grps[i] for i in range(5))
@ -284,18 +284,18 @@ class EmGroupTestCase(unittest.TestCase):
)
# Inserting circular deps
with self.assertRaises(EditorialModelError):
grps[0].add_dependencie(grps[5])
grps[0].add_dependency(grps[5])
def test_circular_dep(self):
""" Test circular dependencies detection """
grps = [ EmGroup('group%d' % i) for i in range(10) ]
for i in range(1,10):
grps[i].add_dependencie(grps[i-1])
grps[i].add_dependency(grps[i-1])
for i in range(1,10):
for j in range(i+1,10):
with self.assertRaises(EditorialModelError):
grps[i].add_dependencie(grps[j])
grps[i].add_dependency(grps[j])
def test_d_hash(self):
""" Test the deterministic hash method """

View file

@ -26,7 +26,7 @@ class PickleFileTestCase(unittest.TestCase):
grp2 = model.new_group('testgroup2')
grp2.add_components((cls2, c1f2, c2f1, c2f2))
grp2.add_dependencie(grp1)
grp2.add_dependency(grp1)
tmpfd, temp_file = tempfile.mkstemp()
os.close(tmpfd)

View file

@ -44,7 +44,7 @@ class XmlFileTestCase(unittest.TestCase):
grp2 = emmodel.new_group('testgroup2')
grp2.add_components((cls2, c1f2, c2f1, c2f2))
grp2.add_dependencie(grp1)
grp2.add_dependency(grp1)
f_tmp, file_name = tempfile.mkstemp()
os.close(f_tmp)

View file

@ -105,7 +105,7 @@ class LeQueryDatasourceTestCase(unittest.TestCase):
[(('alias', {cls: 'firstname'}), '=', 'foo')])
self.check_nocall(read = False, exclude = ['delete'])
self.check_nocall(read = True)
@unittest.skip("Waiting references checks stack implementation")
def test_insert(self):
""" Testing LeInsertQuery mocking datasource """
@ -145,8 +145,8 @@ class LeQueryDatasourceTestCase(unittest.TestCase):
query = LeUpdateQuery(inst)
with self.assertRaises(LeApiQueryError):
# Bad call, giving datas while an instance was given to __init__
query.execute(datas = {'firstname': 'ooba'})
# Bad call, giving data while an instance was given to __init__
query.execute(data = {'firstname': 'ooba'})
query.execute()
self.mockwrite.update.assert_called_once_with(