Browse Source

Documentation cleaning in mongodb_datasource

Roland Haroutiounian 7 years ago
parent
commit
3b942fcfbb

+ 3
- 3
lodel/plugins/mongodb_datasource/confspec.py View File

@@ -4,10 +4,10 @@ from lodel.context import LodelContext
4 4
 LodelContext.expose_modules(globals(), {
5 5
     'lodel.validator.validator': ['Validator']})
6 6
 
7
-##@brief Mongodb datasource plugin confspec
8
-#@ingroup plugin_mongodb_datasource
7
+## @brief Mongodb datasource plugin confspec
8
+# @ingroup plugin_mongodb_datasource
9 9
 #
10
-#Describe mongodb plugin configuration. Keys are :
10
+# Describes mongodb plugin configuration and the corresponding validators
11 11
 CONFSPEC = {
12 12
     'lodel2.datasource.mongodb_datasource.*':{
13 13
         'read_only': (False, Validator('bool')),

+ 225
- 229
lodel/plugins/mongodb_datasource/datasource.py View File

@@ -1,4 +1,9 @@
1
-object# -*- coding: utf-8 -*-
1
+# -*- coding: utf-8 -*-
2
+
3
+## @package plugins.mongodb_datasource.datasource Main datasource module
4
+# 
5
+# In this class, there is the MongoDbDatasource class, that handles the basic 
6
+# operations that can be done (CRUD ones).
2 7
 
3 8
 import re
4 9
 import warnings
@@ -23,51 +28,54 @@ from .utils import object_collection_name, collection_name, \
23 28
     MONGODB_SORT_OPERATORS_MAP, connection_string, mongo_fieldname
24 29
 
25 30
 
26
-##@brief Datasource class
27
-#@ingroup plugin_mongodb_datasource
31
+## @brief Datasource class
32
+# @ingroup plugin_mongodb_datasource
28 33
 class MongoDbDatasource(AbstractDatasource):
29 34
 
30
-    ##@brief Stores existing connections
35
+    ## @brief Stores existing connections
31 36
     #
32
-    #The key of this dict is a hash of the connection string + ro parameter.
33
-    #The value is a dict with 2 keys :
37
+    # The key of this dict is a hash built upon the connection string and the 
38
+    # ro (read-only) parameter.
39
+    # 
40
+    # The value is a dict with 2 keys :
34 41
     # - conn_count : the number of instanciated datasource that use this
35
-    #connection
42
+    # connection
36 43
     # - db : the pymongo database object instance
37 44
     _connections = dict()
38 45
 
39
-    ##@brief Mapping from lodel2 operators to mongodb operator
46
+    ## @brief Mapping from lodel2 operators to mongodb operators
40 47
     lodel2mongo_op_map = {
41 48
         '=':'$eq', '<=':'$lte', '>=':'$gte', '!=':'$ne', '<':'$lt',
42 49
         '>':'$gt', 'in':'$in', 'not in':'$nin' }
43
-    ##@brief List of mongodb operators that expect re as value
50
+    
51
+    ## @brief List of mongodb operators that expect re as value
44 52
     mongo_op_re = ['$in', '$nin']
45 53
     wildcard_re = re.compile('[^\\\\]\*')
46 54
 
47
-    ##@brief instanciates a database object given a connection name
48
-    #@param host str : hostname or IP
49
-    #@param port int : mongodb listening port
50
-    #@param db_name str
51
-    #@param username str
52
-    #@param password str
53
-    #@param read_only bool : If True the Datasource is for read only, else the
54
-    #Datasource is write only !
55
+    ## @brief instanciates a database object given a connection name
56
+    # @param host str : hostname or IP
57
+    # @param port int : mongodb listening port
58
+    # @param db_name str
59
+    # @param username str
60
+    # @param password str
61
+    # @param read_only bool : If True the Datasource is for read only, else the
62
+    # Datasource is write only !
55 63
     def __init__(self, host, port, db_name, username, password, read_only = False):
56
-        ##@brief Connections infos that can be kept securly
64
+        ## @brief Connections infos that can be kept securly
57 65
         self.__db_infos = {'host': host, 'port': port, 'db_name': db_name}
58
-        ##@brief Is the instance read only ? (if not it's write only)
66
+        ## @brief Is the instance read only ? (if not it's write only)
59 67
         self.__read_only = bool(read_only)
60
-        ##@brief Uniq ID for mongodb connection
68
+        ## @brief Uniq ID for mongodb connection
61 69
         self.__conn_hash= None
62
-        ##@brief Stores the database cursor
70
+        ## @brief Stores the database cursor
63 71
         self.database = self.__connect(
64 72
             username, password, db_name, self.__read_only)
65 73
 
66
-    ##@brief Destructor that attempt to close connection to DB
74
+    ## @brief Destructor that attempt to close connection to DB
67 75
     #
68
-    #Decrease the conn_count of associated MongoDbDatasource::_connections
69
-    #item. If it reach 0 close the connection to the db
70
-    #@see MongoDbDatasource::__connect()
76
+    # Decrease the conn_count of associated MongoDbDatasource::_connections
77
+    # item. If it reach 0 close the connection to the db
78
+    # @see MongoDbDatasource::__connect()
71 79
     def __del__(self):
72 80
         self._connections[self.__conn_hash]['conn_count'] -= 1
73 81
         if self._connections[self.__conn_hash]['conn_count'] <= 0:
@@ -75,11 +83,11 @@ class MongoDbDatasource(AbstractDatasource):
75 83
             del(self._connections[self.__conn_hash])
76 84
             logger.info("Closing connection to database")
77 85
 
78
-    ##@brief Provide a new uniq numeric ID
79
-    #@param emcomp LeObject subclass (not instance) : To know on wich things we
80
-    #have to be uniq
81
-    #@warning multiple UID broken by this method
82
-    #@return an integer
86
+    ## @brief Provides a new uniq numeric ID
87
+    # @param emcomp LeObject subclass (not instance) : To know on wich things we
88
+    # have to be uniq
89
+    # @warning multiple UID broken by this method
90
+    # @return an integer
83 91
     def new_numeric_id(self, emcomp):
84 92
         target = emcomp.uid_source()
85 93
         tuid = target._uid[0] # Multiple UID broken here
@@ -90,35 +98,34 @@ class MongoDbDatasource(AbstractDatasource):
90 98
             return 1
91 99
         return results[0][tuid]+1
92 100
 
93
-    ##@brief returns a selection of documents from the datasource
94
-    #@param target Emclass
95
-    #@param field_list list
96
-    #@param filters list : List of filters
97
-    #@param relational_filters list : List of relational filters
98
-    #@param order list : List of column to order. ex: order =
99
-    #[('title', 'ASC'),]
100
-    #@param group list : List of tupple representing the column used as
101
-    #"group by" fields. ex: group = [('title', 'ASC'),]
102
-    #@param limit int : Number of records to be returned
103
-    #@param offset int: used with limit to choose the start record
104
-    #@return list
105
-    #@todo Implement group for abstract LeObject childs
101
+    ## @brief returns a selection of documents from the datasource
102
+    # @param target Emclass
103
+    # @param field_list list
104
+    # @param filters list : List of filters
105
+    # @param relational_filters list : List of relational filters
106
+    # @param order list : List of column to order. ex: order = [('title', 'ASC'),]
107
+    # @param group list : List of tupple representing the column used as
108
+    # "group by" fields. ex: group = [('title', 'ASC'),]
109
+    # @param limit int : Number of records to be returned
110
+    # @param offset int: used with limit to choose the start record
111
+    # @return list
112
+    # @todo Implement group for abstract LeObject childs
106 113
     def select(self, target, field_list, filters = None,
107 114
             relational_filters=None, order=None, group=None, limit=None,
108 115
             offset=0):
109 116
         if target.is_abstract():
110
-            #Reccursiv calls for abstract LeObject child
117
+            # Reccursive calls for abstract LeObject child
111 118
             results =  self.__act_on_abstract(target, filters,
112 119
                 relational_filters, self.select, field_list = field_list,
113 120
                 order = order, group = group, limit = limit)
114 121
 
115
-            #Here we may implement the group
116
-            #If sorted query we have to sort again
122
+            # Here we may implement the group
123
+            # If sorted query we have to sort again
117 124
             if order is not None:
118 125
                 key_fun = functools.cmp_to_key(
119 126
                     self.__generate_lambda_cmp_order(order))
120 127
                 results = sorted(results, key=key_fun)
121
-            #If limit given apply limit again
128
+            # If limit given apply limit again
122 129
             if offset > len(results):
123 130
                 results = list()
124 131
             else:
@@ -192,35 +199,35 @@ class MongoDbDatasource(AbstractDatasource):
192 199
 
193 200
         return results
194 201
 
195
-    ##@brief Deletes records according to given filters
196
-    #@param target Emclass : class of the record to delete
197
-    #@param filters list : List of filters
198
-    #@param relational_filters list : List of relational filters
199
-    #@return int : number of deleted records
202
+    ## @brief Deletes records according to given filters
203
+    # @param target Emclass : class of the record to delete
204
+    # @param filters list : List of filters
205
+    # @param relational_filters list : List of relational filters
206
+    # @return int : number of deleted records
200 207
     def delete(self, target, filters, relational_filters):
201 208
         if target.is_abstract():
202 209
             logger.debug("Delete called on %s filtered by (%s,%s). Target is \
203 210
 abstract, preparing reccursiv calls" % (target, filters, relational_filters))
204
-            #Deletion with abstract LeObject as target (reccursiv calls)
211
+            # Deletion with abstract LeObject as target (reccursiv calls)
205 212
             return self.__act_on_abstract(target, filters,
206 213
                 relational_filters, self.delete)
207 214
         logger.debug("Delete called on %s filtered by (%s,%s)." % (
208 215
             target, filters, relational_filters))
209
-        #Non abstract beahavior
216
+        # Non abstract beahavior
210 217
         mongo_filters = self.__process_filters(
211 218
             target, filters, relational_filters)
212
-        #Updating backref before deletion
219
+        # Updating backref before deletion
213 220
         self.__update_backref_filtered(target, filters, relational_filters,
214 221
             None)
215 222
         res = self.__collection(target).remove(mongo_filters)
216 223
         return res['n']
217 224
 
218
-    ##@brief updates records according to given filters
219
-    #@param target Emclass : class of the object to insert
220
-    #@param filters list : List of filters
221
-    #@param relational_filters list : List of relational filters
222
-    #@param upd_datas dict : datas to update (new values)
223
-    #@return int : Number of updated records
225
+    ## @brief updates records according to given filters
226
+    # @param target Emclass : class of the object to insert
227
+    # @param filters list : List of filters
228
+    # @param relational_filters list : List of relational filters
229
+    # @param upd_datas dict : datas to update (new values)
230
+    # @return int : Number of updated records
224 231
     def update(self, target, filters, relational_filters, upd_datas):
225 232
         self._data_cast(upd_datas)
226 233
         #fetching current datas state
@@ -237,9 +244,9 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
237 244
             upd_datas, old_datas_l)
238 245
         return res
239 246
 
240
-    ##@brief Designed to be called by backref update in order to avoid
241
-    #infinite updates between back references
242
-    #@see update()
247
+    ## @brief Designed to be called by backref update in order to avoid
248
+    # infinite updates between back references
249
+    # @see update()
243 250
     def __update_no_backref(self, target, filters, relational_filters,
244 251
             upd_datas):
245 252
         logger.debug("Update called on %s filtered by (%s,%s) with datas \
@@ -285,18 +292,18 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
285 292
             target.make_consistency(datas=new_datas)
286 293
         return list(res.inserted_ids)
287 294
 
288
-    ##@brief Update backref giving an action
289
-    #@param target leObject child class
290
-    #@param filters
291
-    #@param relational_filters,
292
-    #@param new_datas None | dict : optional new datas if None mean we are deleting
293
-    #@param old_datas_l None | list : if None fetch old datas from db (usefull
294
-    #when modifications are made on instance before updating backrefs)
295
-    #@return nothing (for the moment
295
+    ## @brief Update backref giving an action
296
+    # @param target leObject child class
297
+    # @param filters
298
+    # @param relational_filters,
299
+    # @param new_datas None | dict : optional new datas if None mean we are deleting
300
+    # @param old_datas_l None | list : if None fetch old datas from db (usefull
301
+    # when modifications are made on instance before updating backrefs)
302
+    # @return nothing (for the moment
296 303
     def __update_backref_filtered(self, target,
297 304
             filters, relational_filters, new_datas = None, old_datas_l = None):
298
-        #Getting all the UID of the object that will be deleted in order
299
-        #to update back_references
305
+        # Getting all the UID of the object that will be deleted in order
306
+        # to update back_references
300 307
         if old_datas_l is None:
301 308
             mongo_filters = self.__process_filters(
302 309
                 target, filters, relational_filters)
@@ -304,46 +311,45 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
304 311
                 mongo_filters)
305 312
             old_datas_l = list(old_datas_l)
306 313
 
307
-        uidname = target.uid_fieldname()[0] #MULTIPLE UID BROKEN HERE
314
+        uidname = target.uid_fieldname()[0]  # MULTIPLE UID BROKEN HERE
308 315
         for old_datas in old_datas_l:
309 316
             self.__update_backref(
310 317
                 target, old_datas[uidname], old_datas, new_datas)
311 318
 
312
-    ##@brief Update back references of an object
313
-    #@ingroup plugin_mongodb_bref_op
319
+    ## @brief Update back references of an object
320
+    # @ingroup plugin_mongodb_bref_op
314 321
     #
315
-    #old_datas and new_datas arguments are set to None to indicate
316
-    #insertion or deletion. Calls examples :
317
-    #@par LeObject insert __update backref call
318
-    #<pre>
319
-    #Insert(datas):
320
-    #  self.make_insert(datas)
321
-    #  self.__update_backref(self.__class__, None, datas)
322
-    #</pre>
323
-    #@par LeObject delete __update backref call
324
-    #Delete()
325
-    #  old_datas = self.datas()
326
-    #  self.make_delete()
327
-    #  self.__update_backref(self.__class__, old_datas, None)
328
-    #@par LeObject update __update_backref call
329
-    #<pre>
330
-    #Update(new_datas):
331
-    #  old_datas = self.datas()
332
-    #  self.make_udpdate(new_datas)
333
-    #  self.__update_backref(self.__class__, old_datas, new_datas)
334
-    #</pre>
322
+    # old_datas and new_datas arguments are set to None to indicate
323
+    # insertion or deletion. Calls examples :
324
+    # @par LeObject insert __update backref call
325
+    # <pre>
326
+    # Insert(datas):
327
+    #   self.make_insert(datas)
328
+    #   self.__update_backref(self.__class__, None, datas)
329
+    # </pre>
330
+    # @par LeObject delete __update backref call
331
+    # Delete()
332
+    #   old_datas = self.datas()
333
+    #   self.make_delete()
334
+    #   self.__update_backref(self.__class__, old_datas, None)
335
+    # @par LeObject update __update_backref call
336
+    # <pre>
337
+    # Update(new_datas):
338
+    #   old_datas = self.datas()
339
+    #   self.make_udpdate(new_datas)
340
+    #   self.__update_backref(self.__class__, old_datas, new_datas)
341
+    # </pre>
335 342
     #
336
-    #@param target LeObject child classa
337
-    #@param tuid mixed : The target UID (the value that will be inserted in
338
-    #back references)
339
-    #@param old_datas dict : datas state before update
340
-    #@param new_datas dict : datas state after the update process
341
-    #retun None
343
+    # @param target LeObject child classa
344
+    # @param tuid mixed : The target UID (the value that will be inserted in
345
+    # back references)
346
+    # @param old_datas dict : datas state before update
347
+    # @param new_datas dict : datas state after the update process
342 348
     def __update_backref(self, target, tuid, old_datas, new_datas):
343 349
         #upd_dict is the dict that will allow to run updates in an optimized
344 350
         #way (or try to help doing it)
345 351
         #
346
-        #It's struct looks like :
352
+        #Its structure looks like :
347 353
         # { LeoCLASS : {
348 354
         #       UID1: (
349 355
         #           LeoINSTANCE,
@@ -362,17 +368,17 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
362 368
                 and not new_datas[fname] is None
363 369
             if (oldd and newd and old_datas[fname] == new_datas[fname])\
364 370
                     or not(oldd or newd):
365
-                #No changes or not concerned
371
+                # No changes or not concerned
366 372
                 continue
367 373
             bref_cls = fdh.back_reference[0]
368 374
             bref_fname = fdh.back_reference[1]
369 375
             if not fdh.is_singlereference():
370
-                #fdh is a multiple ref. So the update preparation will be
371
-                #divided into two loops :
372
-                #- one loop for deleting old datas
373
-                #- one loop for inserting updated datas
376
+                # fdh is a multiple reference. So the update preparation will be
377
+                # divided into two loops :
378
+                # - one loop for deleting old datas
379
+                # - one loop for inserting updated datas
374 380
                 #
375
-                #Preparing the list of values to delete or to add
381
+                # Preparing the list of values to delete or to add
376 382
                 if newd and oldd:
377 383
                     old_values = old_datas[fname]
378 384
                     new_values = new_datas[fname]
@@ -388,29 +394,29 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
388 394
                 elif not oldd and newd:
389 395
                     to_del = []
390 396
                     to_add = new_datas[fname]
391
-                #Calling __back_ref_upd_one_value() with good arguments
397
+                # Calling __back_ref_upd_one_value() with good arguments
392 398
                 for vtype, vlist in [('old',to_del), ('new', to_add)]:
393 399
                     for value in vlist:
394
-                        #fetching backref infos
400
+                        # fetching backref infos
395 401
                         bref_infos = self.__bref_get_check(
396 402
                             bref_cls, value, bref_fname)
397
-                        #preparing the upd_dict
403
+                        # preparing the upd_dict
398 404
                         upd_dict = self.__update_backref_upd_dict_prepare(
399 405
                             upd_dict, bref_infos, bref_fname, value)
400
-                        #preparing updated bref_infos
406
+                        # preparing updated bref_infos
401 407
                         bref_cls, bref_leo, bref_dh, bref_value = bref_infos
402 408
                         bref_infos = (bref_cls, bref_leo, bref_dh,
403 409
                             upd_dict[bref_cls][value][1][bref_fname])
404 410
                         vdict = {vtype: value}
405
-                        #fetch and store updated value
411
+                        # fetch and store updated value
406 412
                         new_bref_val = self.__back_ref_upd_one_value(
407 413
                             fname, fdh, tuid, bref_infos, **vdict)
408 414
                         upd_dict[bref_cls][value][1][bref_fname] = new_bref_val
409 415
             else:
410
-                #fdh is a single ref so the process is simpler, we do not have
411
-                #to loop and we may do an update in only one
412
-                #__back_ref_upd_one_value() call by giving both old and new
413
-                #value
416
+                # fdh is a single ref so the process is simpler, we do not have
417
+                # to loop and we may do an update in only one
418
+                # __back_ref_upd_one_value() call by giving both old and new
419
+                # value
414 420
                 vdict = {}
415 421
                 if oldd:
416 422
                     vdict['old'] = old_datas[fname]
@@ -419,39 +425,38 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
419 425
                     vdict['new'] = new_datas[fname]
420 426
                     if not oldd:
421 427
                         uid_val = vdict['new']
422
-                #Fetching back ref infos
428
+                # Fetching back ref infos
423 429
                 bref_infos = self.__bref_get_check(
424 430
                     bref_cls, uid_val, bref_fname)
425
-                #prepare the upd_dict
431
+                # prepare the upd_dict
426 432
                 upd_dict = self.__update_backref_upd_dict_prepare(
427 433
                     upd_dict, bref_infos, bref_fname, uid_val)
428
-                #forging update bref_infos
434
+                # forging update bref_infos
429 435
                 bref_cls, bref_leo, bref_dh, bref_value = bref_infos
430 436
                 bref_infos = (bref_cls, bref_leo, bref_dh,
431 437
                         upd_dict[bref_cls][uid_val][1][bref_fname])
432
-                #fetche and store updated value
438
+                # fetch and store updated value
433 439
                 new_bref_val = self.__back_ref_upd_one_value(
434 440
                     fname, fdh, tuid, bref_infos, **vdict)
435 441
                 upd_dict[bref_cls][uid_val][1][bref_fname] = new_bref_val
436
-        #Now we've got our upd_dict ready.
437
-        #running the updates
442
+        # Now we've got our upd_dict ready.
443
+        # running the updates
438 444
         for bref_cls, uid_dict in upd_dict.items():
439 445
             for uidval, (leo, datas) in uid_dict.items():
440
-                #MULTIPLE UID BROKEN 2 LINES BELOW
446
+                # MULTIPLE UID BROKEN 2 LINES BELOW
441 447
                 self.__update_no_backref(
442 448
                     leo.__class__, [(leo.uid_fieldname()[0], '=', uidval)],
443 449
                     [], datas)
444 450
 
445
-    ##@brief Utility function designed to handle the upd_dict of
446
-    #__update_backref()
451
+    ## @brief Utility function designed to handle the upd_dict of __update_backref()
447 452
     #
448
-    #Basically checks if a key exists at some level, if not create it with
449
-    #the good default value (in most case dict())
450
-    #@param upd_dict dict : in & out args modified by reference
451
-    #@param bref_infos tuple : as returned by __bref_get_check()
452
-    #@param bref_fname str : name of the field in referenced class
453
-    #@param uid_val mixed : the UID of the referenced object
454
-    #@return the updated version of upd_dict
453
+    # Basically checks if a key exists at some level, if not create it with
454
+    # the good default value (in most case dict())
455
+    # @param upd_dict dict : in & out args modified by reference
456
+    # @param bref_infos tuple : as returned by __bref_get_check()
457
+    # @param bref_fname str : name of the field in referenced class
458
+    # @param uid_val mixed : the UID of the referenced object
459
+    # @return the updated version of upd_dict
455 460
     @staticmethod
456 461
     def __update_backref_upd_dict_prepare(upd_dict,bref_infos, bref_fname,
457 462
             uid_val):
@@ -465,14 +470,14 @@ abstract, preparing reccursiv calls" % (target, filters, relational_filters))
465 470
         return upd_dict
466 471
 
467 472
 
468
-    ##@brief Prepare a one value back reference update
469
-    #@param fname str : the source Reference field name
470
-    #@param fdh DataHandler : the source Reference DataHandler
471
-    #@param tuid mixed : the uid of the Leo that make reference to the backref
472
-    #@param bref_infos tuple : as returned by __bref_get_check() method
473
-    #@param old mixed : (optional **values) the old value
474
-    #@param new mixed : (optional **values) the new value
475
-    #@return the new back reference field value
473
+    ## @brief Prepare a one value back reference update
474
+    # @param fname str : the source Reference field name
475
+    # @param fdh DataHandler : the source Reference DataHandler
476
+    # @param tuid mixed : the uid of the Leo that make reference to the backref
477
+    # @param bref_infos tuple : as returned by __bref_get_check() method
478
+    # @param old mixed : (optional **values) the old value
479
+    # @param new mixed : (optional **values) the new value
480
+    # @return the new back reference field value
476 481
     def __back_ref_upd_one_value(self, fname, fdh, tuid, bref_infos, **values):
477 482
         bref_cls, bref_leo, bref_dh, bref_val = bref_infos
478 483
         oldd = 'old' in values
@@ -487,7 +492,7 @@ delete in this back reference update was not found in the back referenced \
487 492
 object : %s. Value was : '%s'" % (bref_leo, tuid))
488 493
                 return bref_val
489 494
             elif oldd and not newdd:
490
-                #deletion
495
+                # deletion
491 496
                 old_value = values['old']
492 497
                 if tuid not in bref_val:
493 498
                     raise MongoDbConsistencyError("The value we want to \
@@ -511,7 +516,7 @@ object : %s. Value was : '%s'" % (bref_leo, tuid))
511 516
                 else:
512 517
                     bref_val.append(tuid)
513 518
         else:
514
-            #Single value backref
519
+            # Single value backref
515 520
             if oldd and newdd:
516 521
                 if bref_val != tuid:
517 522
                     raise MongoDbConsistencyError("The backreference doesn't \
@@ -519,7 +524,7 @@ have expected value. Expected was %s but found %s in %s" % (
519 524
                         tuid, bref_val, bref_leo))
520 525
                 return bref_val
521 526
             elif oldd and not newdd:
522
-                #deletion
527
+                # deletion
523 528
                 if not hasattr(bref_dh, "default"):
524 529
                     raise MongoDbConsistencyError("Unable to delete a \
525 530
 value for a back reference update. The concerned field don't have a default \
@@ -529,17 +534,14 @@ value : in %s field %s" % (bref_leo,fname))
529 534
                 bref_val = tuid
530 535
         return bref_val
531 536
 
532
-    ##@brief Fetch back reference informations
533
-    #@warning thank's to __update_backref_act() this method is useless
534
-    #@param bref_cls LeObject child class : __back_reference[0]
535
-    #@param uidv mixed : UID value (the content of the reference field)
536
-    #@param bref_fname str : the name of the back_reference field
537
-    #@return tuple(bref_class, bref_LeObect_instance, bref_datahandler,
538
-    #bref_value)
539
-    #@throw MongoDbConsistencyError when LeObject instance not found given
540
-    #uidv
541
-    #@throw LodelFatalError if the back reference field is not a Reference
542
-    #subclass (major failure)
537
+    ## @brief Fetch back reference informations
538
+    # @warning thank's to __update_backref_act() this method is useless
539
+    # @param bref_cls LeObject child class : __back_reference[0]
540
+    # @param uidv mixed : UID value (the content of the reference field)
541
+    # @param bref_fname str : the name of the back_reference field
542
+    # @return tuple(bref_class, bref_LeObect_instance, bref_datahandler, bref_value)
543
+    # @throw MongoDbConsistencyError when LeObject instance not found given uidv
544
+    # @throw LodelFatalError if the back reference field is not a Reference subclass (major failure)
543 545
     def __bref_get_check(self, bref_cls, uidv, bref_fname):
544 546
         bref_leo = bref_cls.get_from_uid(uidv)
545 547
         if bref_leo is None:
@@ -552,17 +554,17 @@ is not a reference : '%s' field '%s'" % (bref_leo, bref_fname))
552 554
         bref_val = bref_leo.data(bref_fname)
553 555
         return (bref_leo.__class__, bref_leo, bref_dh, bref_val)
554 556
 
555
-    ##@brief Act on abstract LeObject child
557
+    ## @brief Act on abstract LeObject child
556 558
     #
557
-    #This method is designed to be called by insert, select and delete method
558
-    #when they encounter an abtract class
559
-    #@param target LeObject child class
560
-    #@param filters
561
-    #@param relational_filters
562
-    #@param act function : the caller method
563
-    #@param **kwargs other arguments
564
-    #@return sum of results (if it's an array it will result in a concat)
565
-    #@todo optimization implementing a cache for __bref_get_check()
559
+    # This method is designed to be called by insert, select and delete method
560
+    # when they encounter an abtract class
561
+    # @param target LeObject child class
562
+    # @param filters
563
+    # @param relational_filters
564
+    # @param act function : the caller method
565
+    # @param **kwargs other arguments
566
+    # @return sum of results (if it's an array it will result in a concat)
567
+    # @todo optimization implementing a cache for __bref_get_check()
566 568
     def __act_on_abstract(self,
567 569
         target, filters, relational_filters, act, **kwargs):
568 570
 
@@ -577,7 +579,7 @@ on non abstract childs" % act.__name__)
577 579
         for target_child in target_childs:
578 580
             logger.debug(
579 581
                 "Abstract %s on %s" % (act.__name__, target_child.__name__))
580
-            #Add target_child to filter
582
+            # Add target_child to filter
581 583
             new_filters = copy.copy(filters)
582 584
             for i in range(len(filters)):
583 585
                 fname, op, val = filters[i]
@@ -595,12 +597,11 @@ on non abstract childs" % act.__name__)
595 597
                 **kwargs)
596 598
         return result
597 599
 
598
-    ##@brief Connect to database
599
-    #@note this method avoid opening two times the same connection using
600
-    #MongoDbDatasource::_connections static attribute
601
-    #@param username str
602
-    #@param password str
603
-    #@param ro bool : If True the Datasource is for read only, else the
600
+    ## @brief Connect to database
601
+    # @note this method avoid opening two times the same connection using MongoDbDatasource::_connections static attribute
602
+    # @param username str
603
+    # @param password str
604
+    # @param ro bool : If True the Datasource is for read only, else it will be write only
604 605
     def __connect(self, username, password, db_name, ro):
605 606
         conn_string = connection_string(
606 607
             username = username, password = password,
@@ -621,31 +622,26 @@ on non abstract childs" % act.__name__)
621 622
             return self._connections[conn_h]['db'][self.__db_infos['db_name']]
622 623
 
623 624
 
624
-    ##@brief Return a pymongo collection given a LeObject child class
625
-    #@param leobject LeObject child class (no instance)
626
-    #return a pymongo.collection instance
625
+    ## @brief Return a pymongo collection given a LeObject child class
626
+    # @param leobject LeObject child class (no instance)
627
+    # @return a pymongo.collection instance
627 628
     def __collection(self, leobject):
628 629
         return self.database[object_collection_name(leobject)]
629 630
 
630
-    ##@brief Perform subqueries implies by relational filters and append the
631
+    ## @brief Perform subqueries implies by relational filters and append the
631 632
     # result to existing filters
632 633
     #
633
-    #The processing is divided in multiple steps :
634
-    # - determine (for each relational field of the target)  every collection
635
-    #that are involved
636
-    # - generate subqueries for relational_filters that concerns a different
637
-    #collection than target collection
638
-    #filters
639
-    # - execute subqueries
640
-    # - transform subqueries results in filters
641
-    # - merge subqueries generated filters with existing filters
634
+    # The processing is divided in multiple steps :
635
+    #  - determine (for each relational field of the target)  every collection that are involved
636
+    #  - generate subqueries for relational_filters that concerns a different collection than target collection filters
637
+    #  - execute subqueries
638
+    #  - transform subqueries results in filters
639
+    #  - merge subqueries generated filters with existing filters
642 640
     #
643
-    #@param target LeObject subclass (no instance) : Target class
644
-    #@param filters list : List of tuple(FIELDNAME, OP, VALUE)
645
-    #@param relational_filters : same composition thant filters except that
646
-    # FIELD is represented by a tuple(FIELDNAME, {CLASS1:RFIELD1,
647
-    # CLASS2:RFIELD2})
648
-    #@return a list of pymongo filters ( dict {FIELD:{OPERATOR:VALUE}} )
641
+    # @param target LeObject subclass (no instance) : Target class
642
+    # @param filters list : List of tuple(FIELDNAME, OP, VALUE)
643
+    # @param relational_filters : same composition thant filters except that FIELD is represented by a tuple(FIELDNAME, {CLASS1:RFIELD1, CLASS2:RFIELD2})
644
+    # @return a list of pymongo filters ( dict {FIELD:{OPERATOR:VALUE}} )
649 645
     def __process_filters(self,target, filters, relational_filters):
650 646
         # Simple filters lodel2 -> pymongo converting
651 647
         res = self.__filters2mongo(filters, target)
@@ -688,21 +684,19 @@ on non abstract childs" % act.__name__)
688 684
             logger.debug("End of subquery execution")
689 685
         return res
690 686
 
691
-    ##@brief Generate subqueries from rfilters tree
687
+    ## @brief Generate subqueries from rfilters tree
692 688
     #
693
-    #Returned struct organization :
694
-    # - 1st level keys : relational field name of target
695
-    # - 2nd level keys : referenced leobject
696
-    # - 3th level values : pymongo filters (dict)
689
+    # Returned struct organization :
690
+    #  - 1st level keys : relational field name of target
691
+    #  - 2nd level keys : referenced leobject
692
+    #  - 3th level values : pymongo filters (dict)
697 693
     #
698
-    #@note The only caller of this method is __process_filters
699
-    #@warning No return value, the rfilters arguement is modified by
700
-    #reference
694
+    # @note The only caller of this method is __process_filters
695
+    # @warning No return value, the rfilters arguement is modified by reference
701 696
     #
702
-    #@param target LeObject subclass (no instance) : Target class
703
-    #@param rfilters dict : A struct as returned by
704
-    #MongoDbDatasource.__prepare_relational_filters()
705
-    #@return None, the rfilters argument is modified by reference
697
+    # @param target LeObject subclass (no instance) : Target class
698
+    # @param rfilters dict : A struct as returned by MongoDbDatasource.__prepare_relational_filters()
699
+    # @return None, the rfilters argument is modified by reference
706 700
     @classmethod
707 701
     def __subqueries_from_relational_filters(cls, target, rfilters):
708 702
         for fname in rfilters:
@@ -715,21 +709,20 @@ on non abstract childs" % act.__name__)
715 709
                         rfilters[fname][leobject][rfield], target.field(fname))
716 710
                     rfilters[fname][leobject][rfield] = mongofilters
717 711
 
718
-    ##@brief Generate a tree from relational_filters
712
+    ## @brief Generate a tree from relational_filters
719 713
     #
720
-    #The generated struct is a dict with :
721
-    # - 1st level keys : relational field name of target
722
-    # - 2nd level keys : referenced leobject
723
-    # - 3th level keys : referenced field in referenced class
724
-    # - 4th level values : list of tuple(op, value)
714
+    # The generated struct is a dict with :
715
+    #  - 1st level keys : relational field name of target
716
+    #  - 2nd level keys : referenced leobject
717
+    #  - 3th level keys : referenced field in referenced class
718
+    #  - 4th level values : list of tuple(op, value)
725 719
     #
726
-    #@note The only caller of this method is __process_filters
727
-    #@warning An assertion is done : if two leobject are stored in the same
728
-    #collection they share the same uid
720
+    # @note The only caller of this method is __process_filters
721
+    # @warning An assertion is done : if two leobject are stored in the same collection they share the same uid
729 722
     #
730
-    #@param target LeObject subclass (no instance) : Target class
731
-    #@param relational_filters : same composition thant filters except that
732
-    #@return a struct as described above
723
+    # @param target LeObject subclass (no instance) : Target class
724
+    # @param relational_filters : same composition thant filters except that
725
+    # @return a struct as described above
733 726
     @classmethod
734 727
     def __prepare_relational_filters(cls, target, relational_filters):
735 728
         # We are going to regroup relationnal filters by reference field
@@ -761,9 +754,9 @@ on non abstract childs" % act.__name__)
761 754
                 rfilters[fname][repr_leo][rfield].append((op, value))
762 755
         return rfilters
763 756
 
764
-    ##@brief Convert lodel2 filters to pymongo conditions
765
-    #@param filters list : list of lodel filters
766
-    #@return dict representing pymongo conditions
757
+    ## @brief Convert lodel2 filters to pymongo conditions
758
+    # @param filters list : list of lodel filters
759
+    # @return dict representing pymongo conditions
767 760
     @classmethod
768 761
     def __filters2mongo(cls, filters, target):
769 762
         res = dict()
@@ -797,12 +790,13 @@ by an equality filter")
797 790
         return res
798 791
 
799 792
 
800
-    ##@brief Convert lodel2 operator and value to pymongo struct
793
+    ## @brief Convert lodel2 operator and value to pymongo struct
801 794
     #
802
-    #Convertion is done using MongoDbDatasource::lodel2mongo_op_map
803
-    #@param op str : take value in LeFilteredQuery::_query_operators
804
-    #@param value mixed : the value
805
-    #@return a tuple(mongo_op, mongo_value)
795
+    # Convertion is done using MongoDbDatasource::lodel2mongo_op_map
796
+    # @param op str : take value in LeFilteredQuery::_query_operators
797
+    # @param value mixed : the value
798
+    # @param dhdl
799
+    # @return a tuple(mongo_op, mongo_value)
806 800
     @classmethod
807 801
     def __op_value_conv(cls, op, value, dhdl):
808 802
         if op not in cls.lodel2mongo_op_map:
@@ -810,8 +804,8 @@ by an equality filter")
810 804
             raise MongoDbDataSourceError(msg)
811 805
         mongop = cls.lodel2mongo_op_map[op]
812 806
         mongoval = value
813
-        #Converting lodel2 wildcarded string into a case insensitive
814
-        #mongodb re
807
+        # Converting lodel2 wildcarded string into a case insensitive
808
+        # mongodb re
815 809
         if mongop in cls.mongo_op_re:
816 810
             if value.startswith('(') and value.endswith(')'):
817 811
                 if (dhdl.cast_type is not None):
@@ -831,8 +825,10 @@ by an equality filter")
831 825
             mongoval = {'$regex': mongoval, '$options': 'i'}
832 826
         return (op, mongoval)
833 827
 
834
-    ##@brief Convert a list of tuple(OP, VALUE) into a pymongo filter dict
835
-    #@return a dict with mongo op as key and value as value...
828
+    ## @brief Convert a list of tuple(OP, VALUE) into a pymongo filter dict
829
+    # @param op_value_list list
830
+    # @param dhdl
831
+    # @return a dict with mongo op as key and value as value...
836 832
     @classmethod
837 833
     def __op_value_listconv(cls, op_value_list, dhdl):
838 834
         result = dict()

Loading…
Cancel
Save