mirror of
https://github.com/yweber/lodel2.git
synced 2025-10-29 02:29:03 +01:00
Query refactoring
This commit is contained in:
parent
10b24a43ff
commit
61dd475e8c
3 changed files with 263 additions and 294 deletions
|
|
@ -1,3 +1,6 @@
|
|||
Local configuration :
|
||||
First of all copy the settings.ini to settings_local.ini and replace values by correct path
|
||||
|
||||
Doxygen documentation generation :
|
||||
doxygen
|
||||
|
||||
|
|
|
|||
|
|
@ -273,10 +273,107 @@ class LeObject(object):
|
|||
# at the end of the dyncode parse
|
||||
# @warning This method is deleted once the dynamic code loaded
|
||||
# @param field_list list : list of EmField instance
|
||||
# @param cls
|
||||
@classmethod
|
||||
def _set__fields(cls, field_list):
|
||||
cls._fields = field_list
|
||||
|
||||
## @brief Check that datas are valid for this type
|
||||
# @param datas dict : key == field name value are field values
|
||||
# @param complete bool : if True expect that datas provide values for all non internal fields
|
||||
# @param allow_internal bool : if True don't raise an error if a field is internal
|
||||
# @param cls
|
||||
# @return Checked datas
|
||||
# @throw LeApiDataCheckError if errors reported during check
|
||||
@classmethod
|
||||
def check_datas_value(cls, datas, complete = False, allow_internal = True):
|
||||
err_l = dict() #Error storing
|
||||
correct = set() #valid fields name
|
||||
mandatory = set() #mandatory fields name
|
||||
for fname, datahandler in cls._fields.items():
|
||||
if allow_internal or not datahandler.is_internal():
|
||||
correct.add(fname)
|
||||
if complete and not hasattr(datahandler, 'default'):
|
||||
mandatory.add(fname)
|
||||
provided = set(datas.keys())
|
||||
# searching for unknow fields
|
||||
for u_f in provided - correct:
|
||||
#Here we can check if the field is invalid or rejected because
|
||||
# it is internel
|
||||
err_l[u_f] = AttributeError("Unknown or unauthorized field '%s'" % u_f)
|
||||
# searching for missing mandatory fieldsa
|
||||
for missing in mandatory - provided:
|
||||
err_l[miss_field] = AttributeError("The data for field '%s' is missing" % missing)
|
||||
#Checks datas
|
||||
checked_datas = dict()
|
||||
for name, value in [ (name, value) for name, value in datas.items() if name in correct ]:
|
||||
dh = cls._fields[name]
|
||||
res = dh.check_data_value(value)
|
||||
checked_datas[name], err = res
|
||||
if err:
|
||||
err_l[name] = err
|
||||
|
||||
if len(err_l) > 0:
|
||||
raise LeApiDataCheckError("Error while checking datas", err_l)
|
||||
return checked_datas
|
||||
|
||||
##@brief Check and prepare datas
|
||||
#
|
||||
# @warning when complete = False we are not able to make construct_datas() and _check_data_consistency()
|
||||
#
|
||||
# @param datas dict : {fieldname : fieldvalue, ...}
|
||||
# @param complete bool : If True you MUST give all the datas
|
||||
# @param allow_internal : Wether or not interal fields are expected in datas
|
||||
# @param cls
|
||||
# @return Datas ready for use
|
||||
# @todo: complete is very unsafe, find a way to get rid of it
|
||||
@classmethod
|
||||
def prepare_datas(cls, datas, complete=False, allow_internal=True):
|
||||
if not complete:
|
||||
warnings.warn("\nActual implementation can make datas construction and consitency unsafe when datas are not complete\n")
|
||||
ret_datas = cls.check_datas_value(datas, complete, allow_internal)
|
||||
if isinstance(ret_datas, Exception):
|
||||
raise ret_datas
|
||||
|
||||
if complete:
|
||||
ret_datas = cls._construct_datas(ret_datas)
|
||||
cls._check_datas_consistency(ret_datas)
|
||||
return ret_datas
|
||||
|
||||
## @brief Construct datas values
|
||||
#
|
||||
# @param cls
|
||||
# @param datas dict : Datas that have been returned by LeCrud.check_datas_value() methods
|
||||
# @return A new dict of datas
|
||||
# @todo IMPLEMENTATION
|
||||
@classmethod
|
||||
def _construct_datas(cls, datas):
|
||||
"""
|
||||
constructor = DatasConstructor(cls, datas, cls.fieldtypes())
|
||||
ret = {
|
||||
fname:constructor[fname]
|
||||
for fname, ftype in cls.fieldtypes().items()
|
||||
if not ftype.is_internal() or ftype.internal != 'autosql'
|
||||
}
|
||||
return ret
|
||||
"""
|
||||
pass
|
||||
|
||||
## @brief Check datas consistency
|
||||
#
|
||||
# @warning assert that datas is complete
|
||||
# @param cls
|
||||
# @param datas dict : Datas that have been returned by LeCrud._construct_datas() method
|
||||
# @throw LeApiDataCheckError if fails
|
||||
@classmethod
|
||||
def _check_datas_consistency(cls, datas):
|
||||
err_l = []
|
||||
err_l = dict()
|
||||
for fname, dh in cls._fields.items():
|
||||
ret = dh.check_data_consistency(cls, fname, datas)
|
||||
if isinstance(ret, Exception):
|
||||
err_l[fname] = ret
|
||||
|
||||
if len(err_l) > 0:
|
||||
raise LeApiDataCheckError("Datas consistency checks fails", err_l)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,322 +2,191 @@
|
|||
|
||||
import re
|
||||
from .leobject import LeObject, LeApiErrors, LeApiDataCheckError
|
||||
|
||||
from lodel.plugin.hooks import LodelHook
|
||||
|
||||
class LeQueryError(Exception):
|
||||
pass
|
||||
|
||||
class LeQuery(object):
|
||||
|
||||
##@brief Hookname preffix
|
||||
_hook_prefix = None
|
||||
##@brief arguments for the LeObject.check_data_value()
|
||||
_data_check_args = { 'complete': False, 'allow_internal': False }
|
||||
|
||||
##@brief The datasource object used for this query
|
||||
datasource = None
|
||||
|
||||
##@brief The available operators used in query definitions
|
||||
query_operators = ['=', '<=', '>=', '!=', '<', '>', ' in ', ' not in ', ' like ', ' not like ']
|
||||
|
||||
##@brief Constructor
|
||||
# @param target_class EmClass : class of the object to query about
|
||||
##@brief Abstract constructor
|
||||
# @param target_class LeObject : class of object the query is about
|
||||
def __init__(self, target_class):
|
||||
if hook_prefix is None:
|
||||
raise NotImplementedError("Asbtract class")
|
||||
if not issubclass(target_class, LeObject):
|
||||
raise TypeError("target class has to be a child class of LeObject")
|
||||
self.target_class = target_class
|
||||
|
||||
|
||||
##@brief Class representing an Insert query
|
||||
class LeInsertQuery(LeQuery):
|
||||
|
||||
##@brief Constructor
|
||||
# @param target_class EmClass: class corresponding to the inserted object
|
||||
# @param datas dict : datas to insert
|
||||
def __init__(self, target_class, datas):
|
||||
super().__init__(target_class)
|
||||
self.datas = datas
|
||||
|
||||
##@brief executes the insert query
|
||||
# @return bool
|
||||
# @TODO reactivate the LodelHooks call when this class is implemented
|
||||
def execute(self):
|
||||
datas = self.datas # LodelHooks.call_hook('leapi_insert_pre', self.target_class, self.datas)
|
||||
ret = self.__insert(**datas)
|
||||
# ret = LodelHook.call_hook('leapi_insert_post', self.target_class, ret)
|
||||
self.__target_class = target_class
|
||||
|
||||
##@brief Execute a query and return the result
|
||||
# @param **datas
|
||||
# @return the query result
|
||||
# @see LeQuery.__query()
|
||||
#
|
||||
# @note maybe the datasource in not an argument but should be determined
|
||||
#elsewhere
|
||||
def execute(self, datasource, **datas):
|
||||
if len(datas) > 0:
|
||||
self.__target_class.check_datas_value(datas, **self._data_check_args)
|
||||
self.__target_class.prepare_datas() #not yet implemented
|
||||
if self._hook_prefix is None:
|
||||
raise NotImplementedError("Abstract method")
|
||||
LodelHook.call_hook( self._hook_prefix+'_pre',
|
||||
self.__target_class,
|
||||
datas)
|
||||
ret = self.__query(datasource, **datas)
|
||||
ret = LodelHook.call_hook( self._hook_prefix+'_post',
|
||||
self.__target_class,
|
||||
ret)
|
||||
return ret
|
||||
|
||||
##@brief Childs classes implements this method to execute the query
|
||||
# @param **datas
|
||||
# @return query result
|
||||
def __query(self, **datas):
|
||||
raise NotImplementedError("Asbtract method")
|
||||
|
||||
##@brief calls the datasource to perform the insert command
|
||||
# @param datas dict : formatted datas corresponding to the insert
|
||||
# @return str : the uid of the inserted object
|
||||
def __insert(self, **datas):
|
||||
insert_datas = self.target_class.prepare_datas(datas, complete=True, allow_internal=True)
|
||||
res = self.datasource.insert(self.target_class, **insert_datas)
|
||||
return res
|
||||
|
||||
|
||||
##@brief Class representing an Abstract Filtered Query
|
||||
class LeFilteredQuery(LeQuery):
|
||||
|
||||
##@brief The available operators used in query definitions
|
||||
query_operators = [
|
||||
'=',
|
||||
'<=',
|
||||
'>=',
|
||||
'!=',
|
||||
'<',
|
||||
'>',
|
||||
' in ',
|
||||
' not in ',
|
||||
' like ',
|
||||
' not like ']
|
||||
|
||||
##@brief Abtract constructor for queries with filter
|
||||
# @param target_class LeObject : class of object the query is about
|
||||
# @param query_filters list : list of string of query filters (or tuple
|
||||
#(FIELD, OPERATOR, VALUE) )
|
||||
def __init__(self, target_class, query_filter):
|
||||
super().__init__(target_class)
|
||||
##@brief The query filter
|
||||
self.__query_filter = None
|
||||
self.set_qeury_filter(query_filter)
|
||||
|
||||
##@brief Set the query filter for a query
|
||||
def set_query_filter(self, query_filter):
|
||||
#
|
||||
# Query filter check & prepare should be done here
|
||||
#
|
||||
self.__query_filter = query_filter
|
||||
|
||||
##@brief A query for insert a new object
|
||||
class LeInsertQuery(LeQuery):
|
||||
|
||||
_hook_prefix = 'leapi_insert_'
|
||||
_data_check_args = { 'complete': True, 'allow_internal': False }
|
||||
|
||||
##@brief Constructor
|
||||
# @param target_class EmClass : Object of the query
|
||||
def __init__(self, target_class):
|
||||
super().__init__(target_class)
|
||||
|
||||
## @brief Implements an insert query operations
|
||||
# @param **datas : datas to be inserted
|
||||
def __query(self, datasource, **datas):
|
||||
pass
|
||||
|
||||
##@brief Validates the query filters
|
||||
# @param query_filters list
|
||||
# @return bool
|
||||
# @raise LeQueryError if one of the filter is not valid
|
||||
@classmethod
|
||||
def validate_query_filters(cls, query_filters):
|
||||
for query_filter in query_filters:
|
||||
if query_filter[1] not in cls.query_operators:
|
||||
raise LeQueryError("The operator %s is not valid." % query_filter[1])
|
||||
return True
|
||||
|
||||
##@brief Checks if a field is relational
|
||||
# @param field str : Name of the field
|
||||
# @return bool
|
||||
@classmethod
|
||||
def is_relational_field(cls, field):
|
||||
return field.startswith('superior.') or field.startswith('subordinate.')
|
||||
|
||||
|
||||
##@brief Class representing a Get Query
|
||||
class LeGetQuery(LeFilteredQuery):
|
||||
|
||||
##@brief Constructor
|
||||
# @param target_class EmClass : main class
|
||||
# @param query_filters
|
||||
# @param field_list list
|
||||
# @param order list : list of tuples corresponding to the fields used to sort the results
|
||||
# @param group list : list of tuples corresponding to the fields used to group the results
|
||||
# @param limit int : Maximum number of results to get
|
||||
# @param offset int
|
||||
# @param instanciate bool : if True, objects will be returned instead of dictionaries
|
||||
def __init__(self, target_class, query_filters, field_list=None, order=None, group=None, limit=None, offset=0, instanciate=True):
|
||||
super().__init__(target_class)
|
||||
self.query_filters = query_filters
|
||||
self.default_field_list = []
|
||||
self.field_list = field_list if field_list is not None else self.target_class.fieldnames()
|
||||
self.order = order
|
||||
self.group = group
|
||||
self.limit = limit
|
||||
self.offset = offset
|
||||
self.instanciate = instanciate
|
||||
|
||||
##@brief executes the query
|
||||
# @return list
|
||||
# @TODO activate LodelHook calls
|
||||
def execute(self):
|
||||
datas = self.datas # LodelHook.call_hook('leapi_get_pre', self.target_object, self.datas)
|
||||
ret = self.__get(**datas)
|
||||
# ret = LodelHook.call_hook('leapi_get_post', self.target_object, ret)
|
||||
return ret
|
||||
|
||||
def __get(self, **datas):
|
||||
field_list = self.__prepare_field_list(self.field_list)
|
||||
|
||||
query_filters, relational_filters = self.__prepare_filters()
|
||||
|
||||
# Preparing the "order" parameters
|
||||
if self.order:
|
||||
order = self.__prepare_order()
|
||||
if isinstance(order, Exception):
|
||||
raise order # can be buffered and raised later, but __prepare_filters raise when fails
|
||||
|
||||
# Preparing the "group" parameters
|
||||
if self.group:
|
||||
group = self.__prepare_order()
|
||||
if isinstance(group, Exception):
|
||||
raise group # can be buffered and raised later
|
||||
|
||||
# checks the limit and offset values
|
||||
if self.limit is not None and self.limit <= 0:
|
||||
raise ValueError('Invalid limit given')
|
||||
|
||||
if self.offset is not None and self.offset < 0:
|
||||
raise ValueError('Invalid offset given : %d' % self.offset)
|
||||
|
||||
results = self._datasource.select() # TODO add the correct arguments for the datasource's method call
|
||||
return results
|
||||
|
||||
##@brief prepares the field list
|
||||
# @return list
|
||||
# @raise LeApiDataCheckError
|
||||
def __prepare_field_list(self):
|
||||
errors = dict()
|
||||
ret_field_list = list()
|
||||
for field in self.field_list:
|
||||
if self.is_relational(field):
|
||||
ret = self.__prepare_relational_field(field)
|
||||
else:
|
||||
ret = self.__check_field(field)
|
||||
|
||||
if isinstance(ret, Exception):
|
||||
errors[field] = ret
|
||||
else:
|
||||
ret_field_list.append(ret)
|
||||
|
||||
if len(errors) > 0:
|
||||
raise LeApiDataCheckError(errors)
|
||||
|
||||
return ret_field_list
|
||||
|
||||
##@brief prepares a relational field
|
||||
def __prepare_relational_field(self, field):
|
||||
# TODO Implement the method
|
||||
return field
|
||||
|
||||
##@brief splits the filter string into a tuple (FIELD, OPERATOR, VALUE)
|
||||
# @param filter str
|
||||
# @return tuple
|
||||
# @raise ValueError
|
||||
def __split_filter(self, filter):
|
||||
if self.query_re is None:
|
||||
self.__compile_query_re()
|
||||
|
||||
matches = self.query_re.match(filter)
|
||||
if not matches:
|
||||
raise ValueError("The query_filter '%s' seems to be invalid" % filter)
|
||||
|
||||
result = (matches.group('field'), re.sub(r'\s', ' ', matches.group('operator')), matches.group('value').strip())
|
||||
for r in result:
|
||||
if len(r) == 0:
|
||||
raise ValueError("The query_filter '%s' seems to be invalid" % filter)
|
||||
|
||||
return result
|
||||
|
||||
def __compile_query_re(self):
|
||||
op_re_piece = '(?P<operator>(%s)' % self._query_operators[0].replace(' ', '\s')
|
||||
for operator in self._query_operators[1:]:
|
||||
op_re_piece += '|(%s)' % operator.replace(' ', '\s')
|
||||
op_re_piece += ')'
|
||||
self.query_re = re.compile('^\s*(?P<field>(((superior)|(subordinate))\.)?[a-z_][a-z0-9\-_]*)\s*'+op_re_piece+'\s*(?P<value>[^<>=!].*)\s*$', flags=re.IGNORECASE)
|
||||
|
||||
##@brief checks if a field is in the target class of the query
|
||||
# @param field str
|
||||
# @return str
|
||||
# @raise ValueError
|
||||
def __check_field(self, field):
|
||||
if field not in self.target_class.fieldnames():
|
||||
return ValueError("No such field '%s' in %s" % (field, self.target_class))
|
||||
return field
|
||||
|
||||
##@brief Prepares the filters (relational and others)
|
||||
# @return tuple
|
||||
def __prepare_filters(self):
|
||||
filters = list()
|
||||
errors = dict()
|
||||
res_filters = list()
|
||||
rel_filters = list()
|
||||
|
||||
# Splitting in tuple if necessary
|
||||
for filter in self.query_filters:
|
||||
if len(filter) == 3 and not isinstance(filter, str):
|
||||
filters.append(tuple(filter))
|
||||
else:
|
||||
filters.append(self.__split_filter(filter))
|
||||
|
||||
for field, operator, value in filters:
|
||||
# TODO check the relation filters
|
||||
ret = self.__check_field(field)
|
||||
if isinstance(ret, Exception):
|
||||
errors[field] = ret
|
||||
else:
|
||||
res_filters.append((ret, operator, value))
|
||||
|
||||
if len(errors) > 0:
|
||||
raise LeApiDataCheckError("Error while preparing filters : ", errors)
|
||||
|
||||
return (res_filters, rel_filters)
|
||||
|
||||
|
||||
datas = dict()
|
||||
if LeFilteredQuery.validate_query_filters(self.query_filters):
|
||||
datas['query_filters'] = self.query_filters
|
||||
datas['target_class'] = self.target_class
|
||||
return datas
|
||||
|
||||
##@brief prepares the "order" parameters
|
||||
# @return list
|
||||
def __prepare_order(self):
|
||||
errors = dict()
|
||||
result = []
|
||||
for order_field in self.order:
|
||||
if not isinstance(order_field, tuple):
|
||||
order_field = (order_field, 'ASC')
|
||||
if len(order_field) != 2 or order_field[1].upper() not in ['ASC', 'DESC']:
|
||||
errors[order_field] = ValueError("Expected a string or a tuple with (FIELDNAME, ['ASC'|'DESC']) but got : %s" % order_field)
|
||||
else:
|
||||
ret = self.target_class.check_field(order_field[0])
|
||||
if isinstance(ret, Exception):
|
||||
errors[order_field] = ret
|
||||
order_field = (order_field[0], order_field[1].upper())
|
||||
result.append(order_field)
|
||||
|
||||
if len(errors) > 0:
|
||||
raise LeApiErrors("Errors when preparing ordering fields", errors)
|
||||
return result
|
||||
|
||||
|
||||
##@brief A query to update datas for a given object
|
||||
class LeUpdateQuery(LeFilteredQuery):
|
||||
|
||||
_hook_prefix = 'leapi_update_'
|
||||
_data_check_args = { 'complete': True, 'allow_internal': False }
|
||||
|
||||
def __init__(self, target_class, target_uid, query_filters):
|
||||
super().__init__(target_class)
|
||||
self.query_filters = query_filters
|
||||
self.target_uid = target_uid
|
||||
|
||||
def execute(self):
|
||||
# LodelHook.call_hook('leapi_update_pre', self.target_object, None)
|
||||
ret = self.__update()
|
||||
# ret = LodelHook.call_hook('leapi_update_post', self.target_object, ret)
|
||||
return ret
|
||||
|
||||
##@brief calls the datasource's update method and the corresponding hooks
|
||||
# @return bool
|
||||
# @TODO change the behavior in case of error in the update process
|
||||
def __update(self):
|
||||
updated_datas = self.__prepare()
|
||||
ret = self.datasource.update(self.target_uid, **updated_datas) # TODO add the correct arguments for the datasource's method
|
||||
if ret == 1:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
##@brief prepares the query_filters to be used as argument for the datasource's update method
|
||||
def __prepare(self):
|
||||
datas = dict()
|
||||
if LeFilteredQuery.validate_query_filters(self.query_filters):
|
||||
datas['query_filters'] = self.query_filters
|
||||
|
||||
datas['target_uid'] = self.target_uid
|
||||
datas['target_class'] = self._target_class
|
||||
return datas
|
||||
|
||||
def __init__(self, target_class, query_filter):
|
||||
super().__init__(target_class, query_filter)
|
||||
|
||||
##@brief Implements an update query
|
||||
# @param **datas : datas to update
|
||||
def __query(self, datasource, **datas):
|
||||
pass
|
||||
|
||||
##@brief A query to delete an object
|
||||
class LeDeleteQuery(LeFilteredQuery):
|
||||
|
||||
_hook_prefix = 'leapi_delete_'
|
||||
|
||||
def __init__(self, target_class, target_uid, query_filters):
|
||||
super().__init__(self._target_class)
|
||||
self.target_uid = target_uid
|
||||
self.query_filters = query_filters
|
||||
def __init__(self, target_class, query_filter):
|
||||
super().__init__(target_class, query_filter)
|
||||
|
||||
def execute(self):
|
||||
# LodelHook.call_hook('leapi_delete_pre', self.target_uid, None)
|
||||
ret = self.__delete()
|
||||
# ret = LodelHook.call('leapi_delete_post', self.target_object, ret)
|
||||
return ret
|
||||
## @brief Execute the delete query
|
||||
def execute(self, datasource):
|
||||
super().execute()
|
||||
|
||||
##@brief Implements delete query operations
|
||||
def __query(self, datasource):
|
||||
pass
|
||||
|
||||
##@brief calls the datasource's delete method
|
||||
# @return bool
|
||||
# @TODO change the behavior in case of error in the update process
|
||||
def __delete(self):
|
||||
delete_datas = self.__prepare()
|
||||
ret = self._datasource.delete(**delete_datas)
|
||||
return ret
|
||||
class LeGetQuery(LeFilteredQuery):
|
||||
|
||||
_hook_prefix = 'leapi_get_'
|
||||
|
||||
def __prepare(self):
|
||||
datas = dict()
|
||||
if LeFilteredQuery.validate_query_filters(self.query_filters):
|
||||
datas['query_filters'] = self.query_filters
|
||||
##@brief Instanciate a new get query
|
||||
# @param target_class LeObject : class of object the query is about
|
||||
# @param query_filters list : list of string of query filters (or tuple
|
||||
#(FIELD, OPERATOR, VALUE) )
|
||||
# @param field_list list|None : list of string representing fields see @ref leobject_filters
|
||||
# @param order list : A list of field names or tuple (FIELDNAME, [ASC | DESC])
|
||||
# @param group list : A list of field names or tuple (FIELDNAME, [ASC | DESC])
|
||||
# @param limit int : The maximum number of returned results
|
||||
# @param offset int : offset
|
||||
def __init__(self, target_class, query_filter, **kwargs):
|
||||
super().__init__(target_class, query_filter)
|
||||
|
||||
##@brief The fields to get
|
||||
self.__field_list = None
|
||||
##@brief An equivalent to the SQL ORDER BY
|
||||
self.__order = None
|
||||
##@brief An equivalent to the SQL GROUP BY
|
||||
self.__group = None
|
||||
##@brief An equivalent to the SQL LIMIT x
|
||||
self.__limit = None
|
||||
##@brief An equivalent to the SQL LIMIT x, OFFSET
|
||||
self.__offset = 0
|
||||
|
||||
# Checking kwargs and assigning default values if there is some
|
||||
for argname in kwargs:
|
||||
if argname not in ('order', 'group', 'limit', 'offset'):
|
||||
raise TypeError("Unexpected argument '%s'" % argname)
|
||||
|
||||
datas['target_uid'] = self.target_uid
|
||||
datas['target_class'] = self._target_class
|
||||
if 'field_list' not in kwargs:
|
||||
#field_list = target_class.get_field_list
|
||||
pass
|
||||
else:
|
||||
#target_class.check_fields(kwargs['field_list'])
|
||||
field_list = kwargs['field_list']
|
||||
if 'order' in kwargs:
|
||||
#check kwargs['order']
|
||||
self.__order = kwargs['order']
|
||||
if 'group' in kwargs:
|
||||
#check kwargs['group']
|
||||
self.__group = kwargs['group']
|
||||
if 'limit' in kwargs:
|
||||
try:
|
||||
self.__limit = int(kwargs[limit])
|
||||
if self.__limit <= 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValueError("limit argument expected to be an interger > 0")
|
||||
if 'offset' in kwargs:
|
||||
try:
|
||||
self.__offset = int(kwargs['offset'])
|
||||
if self.__offset < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValueError("offset argument expected to be an integer >= 0")
|
||||
|
||||
##@brief Execute the get query
|
||||
def execute(self, datasource):
|
||||
super().execute(datasource)
|
||||
|
||||
return datas
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue