Package jsondata :: Module jsondataserializer
[hide private]
[frames] | no frames]

Source Code for Module jsondata.jsondataserializer

  1  # -*- coding:utf-8   -*- 
  2  """Basic features for the persistence of JSON based in-memory data. 
  3   
  4  * import and export of JSON data from/into files 
  5  * modular import and export of JSON branches from/into files 
  6  * validation by JSON schema 
  7  """ 
  8  from __future__ import absolute_import 
  9  from __future__ import print_function 
 10  from __future__ import division 
 11   
 12  import os 
 13  import sys 
 14   
 15  # pylint: disable-msg=F0401 
 16  if sys.modules.get('ujson'): 
 17      import ujson as myjson  # @UnusedImport @Reimport @UnresolvedImport 
 18  else: 
 19      import json as myjson  # @Reimport 
 20   
 21  from jsondata import ISSTR, MS_OFF, MODE_SCHEMA_DEFAULT, \ 
 22      MATCH_NO, MATCH_KEY, MATCH_CHLDATTR, MATCH_INDEX, MATCH_MEM, \ 
 23      JSONDataError, JSONDataValueError, JSONDataModeError, \ 
 24      JSONDataSourceFileError, JSONDataTargetFileError, \ 
 25      JSONDataAmbiguityError, JSONDataParameterError, \ 
 26      mode2mj, MJ_RFC4627, MJ_DEFAULT, \ 
 27      B_ADD, B_AND, B_OR, B_XOR 
 28   
 29  from jsondata.jsondata import JSONData 
 30   
 31  import jsondata 
 32   
 33  # pylint: enable-msg=F0401 
 34   
 35   
 36  _debug = jsondata._debug  #pylint: disable=protected-access 
 37  _verbose= jsondata._verbose   #pylint: disable=protected-access 
 38   
 39   
 40  __author__ = 'Arno-Can Uestuensoez' 
 41  __maintainer__ = 'Arno-Can Uestuensoez' 
 42  __license__ = "Artistic-License-2.0 + Forced-Fairplay-Constraints" 
 43  __copyright__ = "Copyright (C) 2015-2016 Arno-Can Uestuensoez" \ 
 44                  " @Ingenieurbuero Arno-Can Uestuensoez" 
 45  __version__ = '0.2.21' 
 46  __uuid__ = '63b597d6-4ada-4880-9f99-f5e0961351fb' 
 47   
 48   
49 -class JSONDataSerializer(JSONData):
50 """Persistency for *JSONData*. 51 """ 52
53 - def __init__(self, jdata, **kargs):
54 """Creates a serializable instance of *JSONData*, optionally loads 55 and validates a JSON definition. 56 57 Args: 58 **jdata**: 59 The initial data of current instance, see *JSONData* 60 61 kargs: 62 Keywords are also passed to *JSONData*. 63 64 **datafile**: 65 Filepathname of JSON data file, when provided a further 66 search by pathlist, filelist, and filepathlist is suppressed. 67 Therefore it has to be a valid filepathname. 68 69 **mode**: 70 The mode of JSON processing: :: 71 72 mode := ( 73 MJ_RFC4627 74 | MJ_RFC7493 # currently not supported, mapped to RFC7159 75 | MJ_RFC7159 76 | MJ_RFC8259 77 | MJ_ECMA404 # same as RFC8259 78 ) 79 80 default := MJ_RFC7159 81 82 **schemafile**: 83 Filepathname of JSONschema file. 84 85 **schema**: 86 Could be used instead of *schemafile*, see *JSONData*. 87 88 **validator**: 89 See *JSONData*. 90 91 Returns: 92 Results in an initialized object. 93 94 Raises: 95 NameError 96 97 JSONDataSourceFileError 98 99 JSONDataAmbiguityError 100 101 JSONDataValueError 102 103 jsonschema.ValidationError 104 105 jsonschema.SchemaError 106 107 """ 108 self.debug = kargs.get('debug', _debug) 109 self.verbose = kargs.get('verbose', _verbose) 110 111 # 112 self.mode_json = kargs.get('mode', MJ_DEFAULT) 113 try: 114 self.mode_json = mode2mj[self.mode_json] 115 if self.mode_json in (MJ_RFC4627,) and type(jdata) not in (dict, list,): 116 raise JSONDataModeError( 117 "mode rfc4627 requires dict or list, got:" 118 + str(type(jdata)) 119 ) 120 121 except KeyError: 122 raise JSONDataParameterError("Unknown mode:" + str(self.mode_json)) 123 124 # 125 # data file 126 # 127 self.datafile = kargs.get('datafile') 128 if self.datafile and not os.path.isfile(self.datafile): 129 # 130 # must exist when provided 131 # 132 raise JSONDataSourceFileError( 133 "value", "datasource", 134 str(self.datafile)) 135 136 # 137 # schema file 138 # 139 self.schema = kargs.get('schema') 140 self.schemafile = kargs.get('schemafile') 141 if self.schemafile: 142 self.schemafile = os.path.abspath(self.schemafile) 143 if not os.path.isfile(self.schemafile): 144 raise JSONDataSourceFileError( 145 "open", "schemafile", str(self.schemafile)) 146 147 if self.schema: 148 # 149 # must exist when provided 150 # 151 raise JSONDataAmbiguityError( 152 "value", "schema + schemafile", 153 str(self.schemafile)) 154 155 elif not os.path.isfile(self.schemafile): 156 # 157 # must exist when provided 158 # 159 raise JSONDataSourceFileError( 160 "value", "schema", 161 str(self.schemafile)) 162 163 with open(self.schemafile) as schema_file: 164 self.schema = myjson.load(schema_file) 165 if not self.schema: 166 raise JSONDataSourceFileError( 167 "read", "schemafile", str(self.schemafile)) 168 169 170 validator = kargs.get('validator', MODE_SCHEMA_DEFAULT) 171 172 # 173 # load data when specified 174 # use import(), for rdf7159 priitives as target too. 175 # even though import requires a container as target 176 # 177 if type(jdata) in (list, dict): 178 _j = jdata 179 _k = None 180 else: 181 _j = [] 182 _k = 0 183 184 # prepare the data container 185 JSONData.__init__( 186 self, 187 _j, 188 schema=self.schema 189 ) 190 191 if self.datafile: 192 if self.schema: 193 self.json_import( 194 self.datafile, 195 _j, 196 _k, 197 schema=self.schema, 198 validator=validator, 199 ) 200 201 else: 202 # should not be reached 203 self.json_import( 204 self.datafile, 205 _j, 206 _k, 207 schemafile=self.schemafile, 208 validator=validator, 209 ) 210 211 if type(jdata) not in (list, dict): 212 if _j: 213 self.data = _j[0] 214 else: 215 self.data = None 216 217 if self.schemafile: 218 # it is the init of the creation, so initialize the schema for the object 219 kargs['schemafile'] = self.schemafile 220 self.set_schema(**kargs)
221 222
223 - def json_export(self, datafile, sourcenode=None, **kargs):
224 """ Exports current data into a file. 225 226 Args: 227 **datafile**: 228 File name for the exported data. 229 230 **sourcenode**: 231 Base of sub-tree for export. 232 None for complete JSON document. 233 234 default := *self.data* 235 236 kargs: 237 **force**: 238 Forces the overwrite of existing files. 239 240 **pretty**: 241 Defines the syntax format of the data. :: 242 243 pretty := ( 244 True # tree view 245 | False # all in one line 246 ) 247 248 When set, the value is fetched from 249 *self.indent*. 250 251 default := *True* 252 253 Returns: 254 When successful returns 'True', else returns either 'False', 255 or raises an exception. 256 257 Raises: 258 JSONDataTargetFileError: 259 """ 260 _force = kargs.get('force') 261 262 if kargs.get('pretty'): 263 _ind = self.indent 264 else: 265 _ind = None 266 267 f = os.path.abspath(os.path.normpath(datafile)) 268 if os.path.exists(f) and not _force: 269 raise JSONDataTargetFileError("Exists, use the force to replace: " + str(f)) 270 271 if sourcenode == None: 272 sourcenode = self.data 273 274 try: 275 with open(f, 'w') as fp: 276 myjson.dump(sourcenode, fp, indent=_ind) 277 except Exception as e: 278 raise JSONDataTargetFileError("open-" + str(e), "data.dump", str(datafile)) 279 280 return True
281
282 - def json_import(self, datafile, targetnode=None, key=None, **kargs):
283 """ Imports and validates data from a file. 284 285 The schema and validator for the imported data could be set 286 independent from the schema of the main data. 287 288 Args: 289 **datafile**: 290 JSON data file name containing the subtree for the target branch. :: 291 292 datafile := <filepathname> 293 294 **targetnode**: 295 Target container for the inclusion of the loaded branch. 296 297 .. parsed-literal:: 298 299 targetnode := ( 300 JSONPointer # [RFC6901]_ or [RELPOINTER]_ 301 | <rfc6901-string> # [RFC6901]_ 302 | <relative-pointer-string> # [RELPOINTER]_ 303 | <pointer-items-list> # non-URI-fragment pointer path items of [RFC6901]_ 304 ) 305 306 default := *self.data* 307 308 **key**: 309 The optional index/key-hook within the *targetnode*, 310 311 default:= None 312 313 kargs: 314 **mechanic**: 315 The import mechanic. Selects either the RFC6902 conform 316 *branch_add*, or the flexible mapping by *branch_superpose*. 317 The latter is more suitable for the application of modular 318 templates. :: 319 320 mechanic := ( 321 B_ADD | 'add' # branch_add 322 | B_AND | 'and' # branch_superpose(map=B_AND) 323 | B_OR | 'or' # branch_superpose(map=B_OR) 324 | B_XOR | 'xor' # branch_superpose(map=B_XOR) 325 ) 326 327 328 **matchcondition**: 329 Defines the criteria for comparison of present child nodes 330 in the target container. The value is a list of criteria 331 combined by logical AND. The criteria may vary due to 332 the requirement and the type of applied container. 333 334 **schema**: 335 JSON-Schema for validation of the subtree/branch. 336 337 default := *self.schema* # the pre-loaded schema 338 339 **schemafile**: 340 JSON-Schema filename for validation of the subtree/branch. 341 342 default := *self.schema* # the pre-loaded schema 343 344 **subpointer**: 345 The path of the sub-tree of the serialized document 346 to be imported. 347 348 default := '' # whole serialized document 349 350 **validator**: 351 Sets schema validator for the data file. 352 Current release relies on *jsonschema*, which 353 supports at the time of writing draft-03 and 354 draft-04. 355 356 The values are: :: 357 358 validator := ( 359 MS_DRAFT3 | 'draft3' 360 | MS_DRAFT4 | 'draft4' 361 | MS_ON | 'on' 362 | MS_OFF | 'off' 363 | MODE_SCHEMA_DEFAULT | 'default' 364 ) 365 366 default:= MS_OFF 367 368 Returns: 369 When successful returns 'True', else returns either 'False', or 370 raises an exception. 371 372 Raises: 373 JSONDataError 374 375 JSONDataValueError 376 377 JSONDataSourceFileError: 378 379 """ 380 jval = None 381 382 schemafile = kargs.get('schemafile') 383 schema = kargs.get('schema') 384 subpointer = kargs.get('subpointer') 385 386 mechanic = kargs.get('mechanic') 387 _call = self.branch_superpose 388 if mechanic in (B_ADD, 'add'): 389 _call = self.branch_add 390 # elif mechanic in (B_AND, 'and'): 391 # _call = self.branch_superpose 392 # elif mechanic in (B_OR, 'or'): 393 # _call = self.branch_superpose 394 # elif mechanic in (B_XOR, 'xor'): 395 # _call = self.branch_superpose 396 397 matchcondition = kargs.get('matchcondition') 398 if matchcondition: 399 if matchcondition in ('key', MATCH_KEY): 400 matchcondition.append(MATCH_KEY) 401 elif matchcondition in ('no', MATCH_NO): 402 matchcondition.append(MATCH_NO) 403 elif matchcondition in ('child_attr_list', MATCH_CHLDATTR): 404 matchcondition.append(MATCH_CHLDATTR) 405 elif matchcondition in ('index', MATCH_INDEX): 406 matchcondition.append(MATCH_INDEX) 407 elif matchcondition in ('mem', MATCH_MEM): 408 matchcondition.append(MATCH_MEM) 409 else: 410 raise JSONDataValueError('matchcondition', str(matchcondition)) 411 412 try: 413 validator = kargs.get('validator', self.validator) 414 except AttributeError: 415 validator = kargs.get('validator', MODE_SCHEMA_DEFAULT) 416 417 418 # INPUT-BRANCH: schema for validation 419 if validator != MS_OFF: # validation requested, requires schema 420 if not schemafile: # no new import, use present data 421 if not self.schema: # no schema data present 422 raise JSONDataError("value", "schema", self.schema) 423 424 else: 425 schemafile = os.path.abspath(schemafile) 426 if not os.path.isfile(schemafile): 427 raise JSONDataSourceFileError("open", "schemafile", 428 str(schemafile)) 429 with open(schemafile) as schema_file: 430 schema = myjson.load(schema_file) 431 if not schema: 432 raise JSONDataSourceFileError("read", "schemafile", 433 str(schemafile)) 434 435 # INPUT-BRANCH: data 436 datafile = os.path.abspath(datafile) 437 if not os.path.isfile(datafile): 438 raise JSONDataSourceFileError("open", "datafile", str(datafile)) 439 try: 440 with open(datafile) as data_file: # load data 441 jval = myjson.load(data_file) 442 except Exception as e: 443 raise JSONDataSourceFileError("open", "datafile", str(datafile), str(e)) 444 445 # INPUT-BRANCH: validate data 446 self.validate(jval, schema, validator) 447 448 # now - after validation - use the requested sub-branch only, default is whole branch 449 if subpointer: 450 jval = JSONPointer(subpointer)(jval) 451 452 # TARGET-CONTAINER: manage new branch data 453 if isinstance(targetnode, JSONData): 454 return _call(jval, targetnode.data, key) 455 elif type(targetnode) in (dict, list): 456 return _call(jval, targetnode, key) 457 elif isinstance(targetnode, JSONPointer): 458 return _call(jval, targetnode, key) 459 elif type(targetnode) in ISSTR: 460 return _call(jval, targetnode, key) 461 elif targetnode == None: 462 if self.data != None: 463 return _call(jval, self.data) 464 return _call(jval, '') 465 466 # if isinstance(targetnode, JSONData): 467 # return self.branch_add(jval, targetnode.data, key) 468 # elif type(targetnode) in (dict, list): 469 # return self.branch_add(jval, targetnode, key) 470 # elif isinstance(targetnode, JSONPointer): 471 # return self.branch_add(jval, targetnode, key) 472 # elif type(targetnode) in ISSTR: 473 # return self.branch_add(jval, targetnode, key) 474 # elif targetnode == None: 475 # if self.data != None: 476 # return self.branch_add(jval, self.data) 477 # return self.branch_add(jval, '') 478 479 raise JSONDataParameterError("import requires a container: object(dict) or array(list).")
480 481
482 - def dump_data(self, pretty=True, **kargs):
483 """Dumps structured data by calling *json.dumps()*. 484 485 Args: 486 **pretty**: 487 Activates pretty printer for treeview, else flat. 488 489 kargs: 490 The remaining keyword arguments are passed 491 through to *json.dumps()*. 492 493 **ensure_ascii**: 494 See *json_dumps*. 495 496 default := False 497 498 **indent**: 499 Sets indent when *pretty* is *True*. 500 501 **sort_keys**: 502 Sorts keys. 503 504 default := False 505 506 **sourcefile**: 507 Loads data from 'sourcefile' into 'source'. 508 509 default := None 510 511 **source**: 512 Prints data within 'source'. 513 514 default := self.data 515 516 Returns: 517 When successful returns the dump string, else either 'None', 518 or raises an exception. 519 520 Raises: 521 JSONDataAmbiguityError: 522 523 forwarded from 'json' 524 525 """ 526 try: 527 source = kargs.pop('source') 528 except KeyError: 529 source = None 530 531 try: 532 sourcefile = kargs.pop('sourcefile') 533 except KeyError: 534 sourcefile = None 535 536 if sourcefile and source: 537 raise JSONDataAmbiguityError('sourcefile/source', 538 "sourcefile=" + str(sourcefile), 539 "source=" + str(source)) 540 if sourcefile: 541 source = open(sourcefile) 542 source = myjson.load(source) 543 elif not source: 544 source = self.data # yes, almost the same... 545 546 if not kargs.get('indent') and pretty: 547 kargs['indent'] = self.indent 548 if not kargs.get('ensure_ascii'): 549 kargs['ensure_ascii'] = False 550 # if not kargs.get('sort_keys'): 551 # kargs['sort_keys'] = False 552 553 return myjson.dumps(source, **kargs)
554 555
556 - def dump_schema(self, pretty=True, **kargs):
557 """Dumps structured schema by calling *json.dumps()*. 558 559 Args: 560 **pretty**: 561 Activates pretty printer for treeview, else flat. 562 563 kargs: 564 The remaining keyword arguments are passed 565 through to *json.dumps()*. 566 567 **ensure_ascii**: 568 See *json_dumps*. 569 570 default := False 571 572 **indent**: 573 Sets indent when *pretty* is *True*. 574 575 **sort_keys**: 576 Sorts keys. 577 578 default := False 579 580 **sourcefile**: 581 Loads schema from 'sourcefile' into 'source'. 582 583 default := None 584 585 **source**: 586 Prints schema within 'source'. 587 588 default := self.schema 589 590 Returns: 591 When successful returns the dump string, else either 'None', 592 or raises an exception. 593 594 Raises: 595 JSONDataAmbiguityError: 596 597 forwarded from 'json' 598 599 """ 600 try: 601 source = kargs.pop('source') 602 except KeyError: 603 source = None 604 605 try: 606 sourcefile = kargs.pop('sourcefile') 607 except KeyError: 608 sourcefile = None 609 610 if sourcefile and source: 611 raise JSONDataAmbiguityError('sourcefile/source', 612 "sourcefile=" + str(sourcefile), 613 "source=" + str(source)) 614 if sourcefile: 615 source = open(sourcefile) 616 source = myjson.load(source) 617 elif not source: 618 source = self.schema # yes, almost the same... 619 620 if not kargs.get('indent') and pretty: 621 kargs['indent'] = self.indent 622 if not kargs.get('ensure_ascii'): 623 kargs['ensure_ascii'] = False 624 # if not kargs.get('sort_keys'): 625 # kargs['sort_keys'] = False 626 return myjson.dumps(source, **kargs)
627 628
629 - def set_schema(self, schemafile=None, targetnode=None, **kargs):
630 """Sets schema or inserts a new branch into the current schema. 631 The main schema(targetnode==None) is the schema of the current 632 instance. Additional branches could be added by importing the 633 specific schema definitions. These could either kept volatile 634 as a temporary runtime extension, or stored persistently. 635 636 Args: 637 **schemafile**: 638 JSON-Schema filename for validation of the 639 subtree/branch, see also *kargs['schema']*. 640 641 **targetnode**: 642 Target container hook for the inclusion of 643 the loaded branch. 644 645 kargs: 646 **schema**: 647 In-memory JSON-Schema as an alternative 648 to schemafile, when provided the 'schemafile' 649 is ignored. 650 651 default:=None 652 653 **persistent**: 654 Stores the 'schema' persistently into 'schemafile' 655 after the completion of update, requires a 656 valid 'schemafile'. 657 658 default:=False 659 660 Returns: 661 When successful returns 'True', else returns either 'False', or 662 raises an exception. 663 664 Raises: 665 666 JSONDataError 667 668 JSONDataSourceFileError 669 670 JSONDataValueError 671 672 """ 673 schema = kargs.get('schema') 674 persistent = kargs.get('persistent', False) 675 676 if schemafile: 677 self.schemafile = schemafile 678 elif self.schemafile != None: # use present 679 schemafile = self.schemafile 680 681 if not schemafile: 682 if persistent: # persistence requires storage 683 raise JSONDataTargetFileError("open", "JSONSchemaFilename", 684 schemafile) 685 686 if schemafile: # load from file 687 schemafile = os.path.abspath(schemafile) 688 self.schemafile = schemafile 689 if not os.path.isfile(schemafile): 690 raise JSONDataSourceFileError("open", "schemafile", str(schemafile)) 691 with open(schemafile) as schema_file: 692 schema = myjson.load(schema_file) 693 if schema == None: 694 raise JSONDataSourceFileError("read", "schemafile", str(schemafile)) 695 696 else: # missing at all 697 raise JSONDataSourceFileError("open", "schemafile", str(schemafile)) 698 699 # 700 # manage new branch data 701 # 702 if not targetnode: 703 self.schema = schema 704 705 else: # data history present, so decide how to handle 706 707 # the container hook has to match for insertion- 708 if type(targetnode) != type(schema): 709 raise JSONDataError( 710 "type", "target!=branch", 711 str(type(targetnode)) + "!=" + str(type(schema))) 712 713 self.branch_add(schema, targetnode, None) 714 715 return schema != None
716 717 718 from jsondata.jsonpointer import JSONPointer 719 # avoid nested recursion problems 720