docs: cleanup: Rephrase and correct typos
[barectf.git] / barectf / config_parse_common.py
1 # The MIT License (MIT)
2 #
3 # Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
4 #
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documeneffective_filetation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
12 #
13 # The above copyright notice and this permission notice shall be
14 # included in all copies or substantial portions of the Software.
15 #
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20 # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23
24 import pkg_resources
25 import collections
26 import jsonschema # type: ignore
27 import os.path
28 import yaml
29 import copy
30 import os
31 from barectf.typing import VersionNumber, _OptStr
32 from typing import Optional, List, Dict, Any, TextIO, MutableMapping, Union, Set, Iterable, Callable, Tuple
33 import typing
34
35
36 # The context of a configuration parsing error.
37 #
38 # Such a context object has a name and, optionally, a message.
39 class _ConfigurationParseErrorContext:
40 def __init__(self, name: str, message: _OptStr = None):
41 self._name = name
42 self._msg = message
43
44 @property
45 def name(self) -> str:
46 return self._name
47
48 @property
49 def message(self) -> _OptStr:
50 return self._msg
51
52
53 # A configuration parsing error.
54 #
55 # Such an error object contains a list of contexts (`context` property).
56 #
57 # The first context of this list is the most specific context, while the
58 # last is the more general.
59 #
60 # Use _append_ctx() to append a context to an existing configuration
61 # parsing error when you catch it before raising it again. You can use
62 # _append_error_ctx() to do exactly this in a single call.
63 class _ConfigurationParseError(Exception):
64 def __init__(self, init_ctx_obj_name, init_ctx_msg=None):
65 super().__init__()
66 self._ctx: List[_ConfigurationParseErrorContext] = []
67 self._append_ctx(init_ctx_obj_name, init_ctx_msg)
68
69 @property
70 def context(self) -> List[_ConfigurationParseErrorContext]:
71 return self._ctx
72
73 def _append_ctx(self, name: str, msg: _OptStr = None):
74 self._ctx.append(_ConfigurationParseErrorContext(name, msg))
75
76 def __str__(self):
77 lines = []
78
79 for ctx in reversed(self._ctx):
80 line = f'{ctx.name}:'
81
82 if ctx.message is not None:
83 line += f' {ctx.message}'
84
85 lines.append(line)
86
87 return '\n'.join(lines)
88
89
90 # Appends the context having the object name `obj_name` and the
91 # (optional) message `message` to the `_ConfigurationParseError`
92 # exception `exc` and then raises `exc` again.
93 def _append_error_ctx(exc: _ConfigurationParseError, obj_name: str, message: _OptStr = None):
94 exc._append_ctx(obj_name, message)
95 raise exc
96
97
98 _V3Prefixes = collections.namedtuple('_V3Prefixes', ['identifier', 'file_name'])
99
100
101 # Convers a v2 prefix to v3 prefixes.
102 def _v3_prefixes_from_v2_prefix(v2_prefix: str) -> _V3Prefixes:
103 return _V3Prefixes(v2_prefix, v2_prefix.rstrip('_'))
104
105
106 # This JSON schema reference resolver only serves to detect when it
107 # needs to resolve a remote URI.
108 #
109 # This must never happen in barectf because all our schemas are local;
110 # it would mean a programming or schema error.
111 class _RefResolver(jsonschema.RefResolver):
112 def resolve_remote(self, uri: str):
113 raise RuntimeError(f'Missing local schema with URI `{uri}`')
114
115
116 # Not all static type checkers support type recursion, so let's just use
117 # `Any` as a map node's value's type.
118 _MapNode = MutableMapping[str, Any]
119
120
121 # Schema validator which considers all the schemas found in the
122 # subdirectories `subdirs` (at build time) of the barectf package's
123 # `schemas` directory.
124 #
125 # The only public method is validate() which accepts an instance to
126 # validate as well as a schema short ID.
127 class _SchemaValidator:
128 def __init__(self, subdirs: Iterable[str]):
129 schemas_dir = pkg_resources.resource_filename(__name__, 'schemas')
130 self._store: Dict[str, str] = {}
131
132 for subdir in subdirs:
133 dir = os.path.join(schemas_dir, subdir)
134
135 for file_name in os.listdir(dir):
136 if not file_name.endswith('.yaml'):
137 continue
138
139 with open(os.path.join(dir, file_name)) as f:
140 schema = yaml.load(f, Loader=yaml.SafeLoader)
141
142 assert '$id' in schema
143 schema_id = schema['$id']
144 assert schema_id not in self._store
145 self._store[schema_id] = schema
146
147 @staticmethod
148 def _dict_from_ordered_dict(obj):
149 if type(obj) is not collections.OrderedDict:
150 return obj
151
152 dct = {}
153
154 for k, v in obj.items():
155 new_v = v
156
157 if type(v) is collections.OrderedDict:
158 new_v = _SchemaValidator._dict_from_ordered_dict(v)
159 elif type(v) is list:
160 new_v = [_SchemaValidator._dict_from_ordered_dict(elem) for elem in v]
161
162 dct[k] = new_v
163
164 return dct
165
166 def _validate(self, instance: _MapNode, schema_short_id: str):
167 # retrieve full schema ID from short ID
168 schema_id = f'https://barectf.org/schemas/{schema_short_id}.json'
169 assert schema_id in self._store
170
171 # retrieve full schema
172 schema = self._store[schema_id]
173
174 # Create a reference resolver for this schema using this
175 # validator's schema store.
176 resolver = _RefResolver(base_uri=schema_id, referrer=schema,
177 store=self._store)
178
179 # create a JSON schema validator using this reference resolver
180 validator = jsonschema.Draft7Validator(schema, resolver=resolver)
181
182 # Validate the instance, converting its
183 # `collections.OrderedDict` objects to `dict` objects so as to
184 # make any error message easier to read (because
185 # validator.validate() below uses str() for error messages, and
186 # collections.OrderedDict.__str__() returns a somewhat bulky
187 # representation).
188 validator.validate(self._dict_from_ordered_dict(instance))
189
190 # Validates `instance` using the schema having the short ID
191 # `schema_short_id`.
192 #
193 # A schema short ID is the part between `schemas/` and `.json` in
194 # its URI.
195 #
196 # Raises a `_ConfigurationParseError` object, hiding any
197 # `jsonschema` exception, on validation failure.
198 def validate(self, instance: _MapNode, schema_short_id: str):
199 try:
200 self._validate(instance, schema_short_id)
201 except jsonschema.ValidationError as exc:
202 # convert to barectf `_ConfigurationParseError` exception
203 contexts = ['Configuration object']
204
205 # Each element of the instance's absolute path is either an
206 # integer (array element's index) or a string (object
207 # property's name).
208 for elem in exc.absolute_path:
209 if type(elem) is int:
210 ctx = f'Element #{elem + 1}'
211 else:
212 ctx = f'`{elem}` property'
213
214 contexts.append(ctx)
215
216 schema_ctx = ''
217
218 if len(exc.context) > 0:
219 # According to the documentation of
220 # jsonschema.ValidationError.context(), the method
221 # returns a
222 #
223 # > list of errors from the subschemas
224 #
225 # This contains additional information about the
226 # validation failure which can help the user figure out
227 # what's wrong exactly.
228 #
229 # Join each message with `; ` and append this to our
230 # configuration parsing error's message.
231 msgs = '; '.join([e.message for e in exc.context])
232 schema_ctx = f': {msgs}'
233
234 new_exc = _ConfigurationParseError(contexts.pop(),
235 f'{exc.message}{schema_ctx} (from schema `{schema_short_id}`)')
236
237 for ctx in reversed(contexts):
238 new_exc._append_ctx(ctx)
239
240 raise new_exc
241
242
243 # barectf 3 YAML configuration node.
244 class _ConfigNodeV3:
245 def __init__(self, config_node: _MapNode):
246 self._config_node = config_node
247
248 @property
249 def config_node(self) -> _MapNode:
250 return self._config_node
251
252
253 _CONFIG_V3_YAML_TAG = 'tag:barectf.org,2020/3/config'
254
255
256 # Loads the content of the YAML file-like object `file` as a Python
257 # object and returns it.
258 #
259 # If the file's object has the barectf 3 configuration tag, then this
260 # function returns a `_ConfigNodeV3` object. Otherwise, it returns a
261 # `collections.OrderedDict` object.
262 #
263 # All YAML maps are loaded as `collections.OrderedDict` objects.
264 def _yaml_load(file: TextIO) -> Union[_ConfigNodeV3, _MapNode]:
265 class Loader(yaml.Loader):
266 pass
267
268 def config_ctor(loader, node) -> _ConfigNodeV3:
269 if not isinstance(node, yaml.MappingNode):
270 problem = f'Expecting a map for the tag `{node.tag}`'
271 raise yaml.constructor.ConstructorError(problem=problem)
272
273 loader.flatten_mapping(node)
274 return _ConfigNodeV3(collections.OrderedDict(loader.construct_pairs(node)))
275
276 def mapping_ctor(loader, node) -> _MapNode:
277 loader.flatten_mapping(node)
278 return collections.OrderedDict(loader.construct_pairs(node))
279
280 Loader.add_constructor(_CONFIG_V3_YAML_TAG, config_ctor)
281 Loader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, mapping_ctor)
282
283 # YAML -> Python
284 try:
285 return yaml.load(file, Loader=Loader)
286 except (yaml.YAMLError, OSError, IOError) as exc:
287 raise _ConfigurationParseError('YAML loader', f'Cannot load file: {exc}')
288
289
290 def _yaml_load_path(path: str) -> Union[_ConfigNodeV3, _MapNode]:
291 with open(path) as f:
292 return _yaml_load(f)
293
294
295 # Dumps the content of the Python object `obj`
296 # (`collections.OrderedDict` or `_ConfigNodeV3`) as a YAML string and
297 # returns it.
298 def _yaml_dump(node: _MapNode, **kwds) -> str:
299 class Dumper(yaml.Dumper):
300 pass
301
302 def config_repr(dumper, node):
303 return dumper.represent_mapping(_CONFIG_V3_YAML_TAG, node.config_node.items())
304
305 def mapping_repr(dumper, node):
306 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
307 node.items())
308
309 Dumper.add_representer(_ConfigNodeV3, config_repr)
310 Dumper.add_representer(collections.OrderedDict, mapping_repr)
311
312 # Python -> YAML
313 return yaml.dump(node, Dumper=Dumper, version=(1, 2), **kwds)
314
315
316 # A common barectf YAML configuration parser.
317 #
318 # This is the base class of any barectf YAML configuration parser. It
319 # mostly contains helpers.
320 class _Parser:
321 # Builds a base barectf YAML configuration parser to process the
322 # configuration node `node` (already loaded from the file-like
323 # object `file`).
324 #
325 # For its _process_node_include() method, the parser considers the
326 # package inclusion directory as well as `include_dirs`, and ignores
327 # nonexistent inclusion files if `ignore_include_not_found` is
328 # `True`.
329 def __init__(self, root_file: TextIO, node: Union[_MapNode, _ConfigNodeV3],
330 with_pkg_include_dir: bool, include_dirs: Optional[List[str]],
331 ignore_include_not_found: bool, major_version: VersionNumber):
332 self._root_file = root_file
333 self._root_node = node
334 self._ft_prop_names = [
335 # barectf 2.1+
336 '$inherit',
337
338 # barectf 2
339 'inherit',
340 'value-type',
341 'element-type',
342
343 # barectf 3
344 'element-field-type',
345 ]
346
347 if include_dirs is None:
348 include_dirs = []
349
350 self._include_dirs = copy.copy(include_dirs)
351
352 if with_pkg_include_dir:
353 self._include_dirs.append(pkg_resources.resource_filename(__name__, f'include/{major_version}'))
354
355 self._ignore_include_not_found = ignore_include_not_found
356 self._include_stack: List[str] = []
357 self._resolved_ft_aliases: Set[str] = set()
358 self._schema_validator = _SchemaValidator({'config/common', f'config/{major_version}'})
359 self._major_version = major_version
360
361 @property
362 def root_node(self):
363 return self._root_node
364
365 @property
366 def _struct_ft_node_members_prop_name(self) -> str:
367 if self._major_version == 2:
368 return 'fields'
369 else:
370 return 'members'
371
372 # Returns the last included file name from the parser's inclusion
373 # file name stack, or `N/A` if the root file does not have an
374 # associated path under the `name` property.
375 def _get_last_include_file(self) -> str:
376 if self._include_stack:
377 return self._include_stack[-1]
378
379 if hasattr(self._root_file, 'name'):
380 return typing.cast(str, self._root_file.name)
381
382 return 'N/A'
383
384 # Loads the inclusion file having the path `yaml_path` and returns
385 # its content as a `collections.OrderedDict` object.
386 def _load_include(self, yaml_path) -> Optional[_MapNode]:
387 for inc_dir in self._include_dirs:
388 # Current inclusion dir + file name path.
389 #
390 # Note: os.path.join() only takes the last argument if it's
391 # absolute.
392 inc_path = os.path.join(inc_dir, yaml_path)
393
394 # real path (symbolic links resolved)
395 real_path = os.path.realpath(inc_path)
396
397 # normalized path (weird stuff removed!)
398 norm_path = os.path.normpath(real_path)
399
400 if not os.path.isfile(norm_path):
401 # file doesn't exist: skip
402 continue
403
404 if norm_path in self._include_stack:
405 base_path = self._get_last_include_file()
406 raise _ConfigurationParseError(f'File `{base_path}`',
407 f'Cannot recursively include file `{norm_path}`')
408
409 self._include_stack.append(norm_path)
410
411 # load raw content
412 return typing.cast(_MapNode, _yaml_load_path(norm_path))
413
414 if not self._ignore_include_not_found:
415 base_path = self._get_last_include_file()
416 raise _ConfigurationParseError(f'File `{base_path}`',
417 f'Cannot include file `{yaml_path}`: file not found in inclusion directories')
418
419 return None
420
421 # Returns a list of all the inclusion file paths as found in the
422 # inclusion node `include_node`.
423 def _get_include_paths(self, include_node: _MapNode) -> List[str]:
424 if include_node is None:
425 # none
426 return []
427
428 if type(include_node) is str:
429 # wrap as array
430 return [typing.cast(str, include_node)]
431
432 # already an array
433 assert type(include_node) is list
434 return typing.cast(List[str], include_node)
435
436 # Updates the node `base_node` with an overlay node `overlay_node`.
437 #
438 # Both the inclusion and field type node inheritance features use
439 # this update mechanism.
440 def _update_node(self, base_node: _MapNode, overlay_node: _MapNode):
441 # see the comment about the `members` property below
442 def update_members_node(base_value: List[Any], olay_value: List[Any]):
443 for olay_item in olay_value:
444 # assume we append `olay_item` to `base_value` initially
445 append_olay_item = True
446
447 if type(olay_item) is collections.OrderedDict:
448 # overlay item is an object
449 if len(olay_item) == 1:
450 # overlay object item contains a single property
451 olay_name = list(olay_item)[0]
452
453 # find corresponding base item
454 for base_item in base_value:
455 if type(base_item) is collections.OrderedDict:
456 if len(olay_item) == 1:
457 base_name = list(base_item)[0]
458
459 if olay_name == base_name:
460 # Names match: update with usual
461 # strategy.
462 self._update_node(base_item, olay_item)
463
464 # Do _not_ append `olay_item` to
465 # `base_value`: we just updated
466 # `base_item`.
467 append_olay_item = False
468 break
469
470 if append_olay_item:
471 base_value.append(copy.deepcopy(olay_item))
472
473 for olay_key, olay_value in overlay_node.items():
474 if olay_key in base_node:
475 base_value = base_node[olay_key]
476
477 if type(olay_value) is collections.OrderedDict and type(base_value) is collections.OrderedDict:
478 # merge both objects
479 self._update_node(base_value, olay_value)
480 elif type(olay_value) is list and type(base_value) is list:
481 if olay_key == 'members' and self._major_version == 3:
482 # This is a "temporary" hack.
483 #
484 # In barectf 2, a structure field type node
485 # looks like this:
486 #
487 # class: struct
488 # fields:
489 # hello: uint8
490 # world: string
491 #
492 # Having an overlay such as
493 #
494 # fields:
495 # hello: float
496 #
497 # will result in
498 #
499 # class: struct
500 # fields:
501 # hello: float
502 # world: string
503 #
504 # because the `fields` property is a map.
505 #
506 # In barectf 3, this is fixed (a YAML map is not
507 # ordered), so that the same initial structure
508 # field type node looks like this:
509 #
510 # class: struct
511 # members:
512 # - hello: uint8
513 # - world:
514 # field-type:
515 # class: str
516 #
517 # Although the `members` property is
518 # syntaxically an array, it's semantically an
519 # ordered map, where an entry's key is the array
520 # item's map's first key (like YAML's `!!omap`).
521 #
522 # Having an overlay such as
523 #
524 # members:
525 # - hello: float
526 #
527 # would result in
528 #
529 # class: struct
530 # members:
531 # - hello: uint8
532 # - world:
533 # field-type:
534 # class: str
535 # - hello: float
536 #
537 # with the naive strategy, while what we really
538 # want is:
539 #
540 # class: struct
541 # members:
542 # - hello: float
543 # - world:
544 # field-type:
545 # class: str
546 #
547 # As of this version of barectf, the _only_
548 # property with a list value which acts as an
549 # ordered map is named `members`. This is why we
550 # can only check the value of `olay_key`,
551 # whatever our context.
552 #
553 # update_members_node() attempts to perform
554 # this below. For a given item of `olay_value`,
555 # if
556 #
557 # * It's not an object.
558 #
559 # * It contains more than one property.
560 #
561 # * Its single property's name does not match
562 # the name of the single property of any
563 # object item of `base_value`.
564 #
565 # then we append the item to `base_value` as
566 # usual.
567 update_members_node(base_value, olay_value)
568 else:
569 # append extension array items to base items
570 base_value += copy.deepcopy(olay_value)
571 else:
572 # fall back to replacing base property
573 base_node[olay_key] = copy.deepcopy(olay_value)
574 else:
575 # set base property from overlay property
576 base_node[olay_key] = copy.deepcopy(olay_value)
577
578 # Processes inclusions using `last_overlay_node` as the last overlay
579 # node to use to "patch" the node.
580 #
581 # If `last_overlay_node` contains an `$include` property, then this
582 # method patches the current base node (initially empty) in order
583 # using the content of the inclusion files (recursively).
584 #
585 # At the end, this method removes the `$include` property of
586 # `last_overlay_node` and then patches the current base node with
587 # its other properties before returning the result (always a deep
588 # copy).
589 def _process_node_include(self, last_overlay_node: _MapNode,
590 process_base_include_cb: Callable[[_MapNode], _MapNode],
591 process_children_include_cb: Optional[Callable[[_MapNode], None]] = None) -> _MapNode:
592 # process children inclusions first
593 if process_children_include_cb is not None:
594 process_children_include_cb(last_overlay_node)
595
596 incl_prop_name = '$include'
597
598 if incl_prop_name in last_overlay_node:
599 include_node = last_overlay_node[incl_prop_name]
600 else:
601 # no inclusions!
602 return last_overlay_node
603
604 include_paths = self._get_include_paths(include_node)
605 cur_base_path = self._get_last_include_file()
606 base_node = None
607
608 # keep the inclusion paths and remove the `$include` property
609 include_paths = copy.deepcopy(include_paths)
610 del last_overlay_node[incl_prop_name]
611
612 for include_path in include_paths:
613 # load raw YAML from included file
614 overlay_node = self._load_include(include_path)
615
616 if overlay_node is None:
617 # Cannot find inclusion file, but we're ignoring those
618 # errors, otherwise _load_include() itself raises a
619 # config error.
620 continue
621
622 # recursively process inclusions
623 try:
624 overlay_node = process_base_include_cb(overlay_node)
625 except _ConfigurationParseError as exc:
626 _append_error_ctx(exc, f'File `{cur_base_path}`')
627
628 # pop inclusion stack now that we're done including
629 del self._include_stack[-1]
630
631 # At this point, `base_node` is fully resolved (does not
632 # contain any `$include` property).
633 if base_node is None:
634 base_node = overlay_node
635 else:
636 self._update_node(base_node, overlay_node)
637
638 # Finally, update the latest base node with our last overlay
639 # node.
640 if base_node is None:
641 # Nothing was included, which is possible when we're
642 # ignoring inclusion errors.
643 return last_overlay_node
644
645 self._update_node(base_node, last_overlay_node)
646 return base_node
647
648 # Generates pairs of member node and field type node property name
649 # (in the member node) for the structure field type node's members
650 # node `node`.
651 def _struct_ft_member_fts_iter(self,
652 node: Union[List[_MapNode], _MapNode]) -> Iterable[Tuple[_MapNode, str]]:
653 if type(node) is list:
654 # barectf 3
655 assert self._major_version == 3
656 node = typing.cast(List[_MapNode], node)
657
658 for member_node in node:
659 assert type(member_node) is collections.OrderedDict
660 member_node = typing.cast(_MapNode, member_node)
661 name, val = list(member_node.items())[0]
662
663 if type(val) is collections.OrderedDict:
664 member_node = val
665 name = 'field-type'
666
667 yield member_node, name
668 else:
669 # barectf 2
670 assert self._major_version == 2
671 assert type(node) is collections.OrderedDict
672 node = typing.cast(_MapNode, node)
673
674 for name in node:
675 yield node, name
676
677 # Resolves the field type alias `key` in the node `parent_node`, as
678 # well as any nested field type aliases, using the aliases of the
679 # `ft_aliases_node` node.
680 #
681 # If `key` is not in `parent_node`, this method returns.
682 #
683 # This method can modify `ft_aliases_node` and `parent_node[key]`.
684 #
685 # `ctx_obj_name` is the context's object name when this method
686 # raises a `_ConfigurationParseError` exception.
687 def _resolve_ft_alias(self, ft_aliases_node: _MapNode, parent_node: _MapNode, key: str,
688 ctx_obj_name: str, alias_set: Optional[Set[str]] = None):
689 if key not in parent_node:
690 return
691
692 node = parent_node[key]
693
694 if node is None:
695 # some nodes can be null to use their default value
696 return
697
698 # This set holds all the field type aliases to be expanded,
699 # recursively. This is used to detect cycles.
700 if alias_set is None:
701 alias_set = set()
702
703 if type(node) is str:
704 alias = node
705
706 # Make sure this alias names an existing field type node, at
707 # least.
708 if alias not in ft_aliases_node:
709 raise _ConfigurationParseError(ctx_obj_name,
710 f'Field type alias `{alias}` does not exist')
711
712 if alias not in self._resolved_ft_aliases:
713 # Only check for a field type alias cycle when we didn't
714 # resolve the alias yet, as a given node can refer to
715 # the same field type alias more than once.
716 if alias in alias_set:
717 msg = f'Cycle detected during the `{alias}` field type alias resolution'
718 raise _ConfigurationParseError(ctx_obj_name, msg)
719
720 # Resolve it.
721 #
722 # Add `alias` to the set of encountered field type
723 # aliases before calling self._resolve_ft_alias() to
724 # detect cycles.
725 alias_set.add(alias)
726 self._resolve_ft_alias(ft_aliases_node, ft_aliases_node, alias, ctx_obj_name,
727 alias_set)
728 self._resolved_ft_aliases.add(alias)
729
730 # replace alias with field type node copy
731 parent_node[key] = copy.deepcopy(ft_aliases_node[alias])
732 return
733
734 # resolve nested field type aliases
735 for pkey in self._ft_prop_names:
736 self._resolve_ft_alias(ft_aliases_node, node, pkey, ctx_obj_name, alias_set)
737
738 # Resolve field type aliases of structure field type node member
739 # nodes.
740 pkey = self._struct_ft_node_members_prop_name
741
742 if pkey in node:
743 for member_node, ft_prop_name in self._struct_ft_member_fts_iter(node[pkey]):
744 self._resolve_ft_alias(ft_aliases_node, member_node, ft_prop_name,
745 ctx_obj_name, alias_set)
746
747 # Like _resolve_ft_alias(), but builds a context object name for any
748 # `ctx_obj_name` exception.
749 def _resolve_ft_alias_from(self, ft_aliases_node: _MapNode, parent_node: _MapNode, key: str):
750 self._resolve_ft_alias(ft_aliases_node, parent_node, key, f'`{key}` property')
751
752 # Applies field type node inheritance to the property `key` of
753 # `parent_node`.
754 #
755 # `parent_node[key]`, if it exists, must not contain any field type
756 # alias (all field type objects are complete).
757 #
758 # This method can modify `parent[key]`.
759 #
760 # When this method returns, no field type node has an `$inherit` or
761 # `inherit` property.
762 def _apply_ft_inheritance(self, parent_node: _MapNode, key: str):
763 if key not in parent_node:
764 return
765
766 node = parent_node[key]
767
768 if node is None:
769 return
770
771 # process children first
772 for pkey in self._ft_prop_names:
773 self._apply_ft_inheritance(node, pkey)
774
775 # Process the field types of structure field type node member
776 # nodes.
777 pkey = self._struct_ft_node_members_prop_name
778
779 if pkey in node:
780 for member_node, ft_prop_name in self._struct_ft_member_fts_iter(node[pkey]):
781 self._apply_ft_inheritance(member_node, ft_prop_name)
782
783 # apply inheritance for this node
784 if 'inherit' in node:
785 # barectf 2.1: `inherit` property was renamed to `$inherit`
786 assert '$inherit' not in node
787 node['$inherit'] = node['inherit']
788 del node['inherit']
789
790 inherit_key = '$inherit'
791
792 if inherit_key in node:
793 assert type(node[inherit_key]) is collections.OrderedDict
794
795 # apply inheritance below
796 self._apply_ft_inheritance(node, inherit_key)
797
798 # `node` is an overlay on the `$inherit` node
799 base_node = node[inherit_key]
800 del node[inherit_key]
801 self._update_node(base_node, node)
802
803 # set updated base node as this node
804 parent_node[key] = base_node
This page took 0.057064 seconds and 4 git commands to generate.