1 # The MIT License (MIT)
3 # Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documeneffective_filetation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
13 # The above copyright notice and this permission notice shall be
14 # included in all copies or substantial portions of the Software.
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20 # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 import jsonschema
# type: ignore
31 from barectf
.typing
import VersionNumber
, _OptStr
32 from typing
import Optional
, List
, Dict
, Any
, TextIO
, MutableMapping
, Union
, Set
, Iterable
, Callable
, Tuple
36 # The context of a configuration parsing error.
38 # Such a context object has a name and, optionally, a message.
39 class _ConfigurationParseErrorContext
:
40 def __init__(self
, name
: str, message
: _OptStr
= None):
45 def name(self
) -> str:
49 def message(self
) -> _OptStr
:
53 # A configuration parsing error.
55 # Such an error object contains a list of contexts (`context` property).
57 # The first context of this list is the most specific context, while the
58 # last is the more general.
60 # Use _append_ctx() to append a context to an existing configuration
61 # parsing error when you catch it before raising it again. You can use
62 # _append_error_ctx() to do exactly this in a single call.
63 class _ConfigurationParseError(Exception):
64 def __init__(self
, init_ctx_obj_name
, init_ctx_msg
=None):
66 self
._ctx
: List
[_ConfigurationParseErrorContext
] = []
67 self
._append
_ctx
(init_ctx_obj_name
, init_ctx_msg
)
70 def context(self
) -> List
[_ConfigurationParseErrorContext
]:
73 def _append_ctx(self
, name
: str, msg
: _OptStr
= None):
74 self
._ctx
.append(_ConfigurationParseErrorContext(name
, msg
))
79 for ctx
in reversed(self
._ctx
):
82 if ctx
.message
is not None:
83 line
+= f
' {ctx.message}'
87 return '\n'.join(lines
)
90 # Appends the context having the object name `obj_name` and the
91 # (optional) message `message` to the `_ConfigurationParseError`
92 # exception `exc` and then raises `exc` again.
93 def _append_error_ctx(exc
: _ConfigurationParseError
, obj_name
: str, message
: _OptStr
= None):
94 exc
._append
_ctx
(obj_name
, message
)
98 _V3Prefixes
= collections
.namedtuple('_V3Prefixes', ['identifier', 'file_name'])
101 # Convers a v2 prefix to v3 prefixes.
102 def _v3_prefixes_from_v2_prefix(v2_prefix
: str) -> _V3Prefixes
:
103 return _V3Prefixes(v2_prefix
, v2_prefix
.rstrip('_'))
106 # This JSON schema reference resolver only serves to detect when it
107 # needs to resolve a remote URI.
109 # This must never happen in barectf because all our schemas are local;
110 # it would mean a programming or schema error.
111 class _RefResolver(jsonschema
.RefResolver
):
112 def resolve_remote(self
, uri
: str):
113 raise RuntimeError(f
'Missing local schema with URI `{uri}`')
116 # Not all static type checkers support type recursion, so let's just use
117 # `Any` as a map node's value's type.
118 _MapNode
= MutableMapping
[str, Any
]
121 # Schema validator which considers all the schemas found in the
122 # subdirectories `subdirs` (at build time) of the barectf package's
123 # `schemas` directory.
125 # The only public method is validate() which accepts an instance to
126 # validate as well as a schema short ID.
127 class _SchemaValidator
:
128 def __init__(self
, subdirs
: Iterable
[str]):
129 schemas_dir
= pkg_resources
.resource_filename(__name__
, 'schemas')
130 self
._store
: Dict
[str, str] = {}
132 for subdir
in subdirs
:
133 dir = os
.path
.join(schemas_dir
, subdir
)
135 for file_name
in os
.listdir(dir):
136 if not file_name
.endswith('.yaml'):
139 with
open(os
.path
.join(dir, file_name
)) as f
:
140 schema
= yaml
.load(f
, Loader
=yaml
.SafeLoader
)
142 assert '$id' in schema
143 schema_id
= schema
['$id']
144 assert schema_id
not in self
._store
145 self
._store
[schema_id
] = schema
148 def _dict_from_ordered_dict(obj
):
149 if type(obj
) is not collections
.OrderedDict
:
154 for k
, v
in obj
.items():
157 if type(v
) is collections
.OrderedDict
:
158 new_v
= _SchemaValidator
._dict
_from
_ordered
_dict
(v
)
159 elif type(v
) is list:
160 new_v
= [_SchemaValidator
._dict
_from
_ordered
_dict
(elem
) for elem
in v
]
166 def _validate(self
, instance
: _MapNode
, schema_short_id
: str):
167 # retrieve full schema ID from short ID
168 schema_id
= f
'https://barectf.org/schemas/{schema_short_id}.json'
169 assert schema_id
in self
._store
171 # retrieve full schema
172 schema
= self
._store
[schema_id
]
174 # Create a reference resolver for this schema using this
175 # validator's schema store.
176 resolver
= _RefResolver(base_uri
=schema_id
, referrer
=schema
,
179 # create a JSON schema validator using this reference resolver
180 validator
= jsonschema
.Draft7Validator(schema
, resolver
=resolver
)
182 # Validate the instance, converting its
183 # `collections.OrderedDict` objects to `dict` objects so as to
184 # make any error message easier to read (because
185 # validator.validate() below uses str() for error messages, and
186 # collections.OrderedDict.__str__() returns a somewhat bulky
188 validator
.validate(self
._dict
_from
_ordered
_dict
(instance
))
190 # Validates `instance` using the schema having the short ID
193 # A schema short ID is the part between `schemas/` and `.json` in
196 # Raises a `_ConfigurationParseError` object, hiding any
197 # `jsonschema` exception, on validation failure.
198 def validate(self
, instance
: _MapNode
, schema_short_id
: str):
200 self
._validate
(instance
, schema_short_id
)
201 except jsonschema
.ValidationError
as exc
:
202 # convert to barectf `_ConfigurationParseError` exception
203 contexts
= ['Configuration object']
205 # Each element of the instance's absolute path is either an
206 # integer (array element's index) or a string (object
208 for elem
in exc
.absolute_path
:
209 if type(elem
) is int:
210 ctx
= f
'Element #{elem + 1}'
212 ctx
= f
'`{elem}` property'
218 if len(exc
.context
) > 0:
219 # According to the documentation of
220 # jsonschema.ValidationError.context(), the method
223 # > list of errors from the subschemas
225 # This contains additional information about the
226 # validation failure which can help the user figure out
227 # what's wrong exactly.
229 # Join each message with `; ` and append this to our
230 # configuration parsing error's message.
231 msgs
= '; '.join([e
.message
for e
in exc
.context
])
232 schema_ctx
= f
': {msgs}'
234 new_exc
= _ConfigurationParseError(contexts
.pop(),
235 f
'{exc.message}{schema_ctx} (from schema `{schema_short_id}`)')
237 for ctx
in reversed(contexts
):
238 new_exc
._append
_ctx
(ctx
)
243 # barectf 3 YAML configuration node.
245 def __init__(self
, config_node
: _MapNode
):
246 self
._config
_node
= config_node
249 def config_node(self
) -> _MapNode
:
250 return self
._config
_node
253 _CONFIG_V3_YAML_TAG
= 'tag:barectf.org,2020/3/config'
256 # Loads the content of the YAML file-like object `file` as a Python
257 # object and returns it.
259 # If the file's object has the barectf 3 configuration tag, then this
260 # function returns a `_ConfigNodeV3` object. Otherwise, it returns a
261 # `collections.OrderedDict` object.
263 # All YAML maps are loaded as `collections.OrderedDict` objects.
264 def _yaml_load(file: TextIO
) -> Union
[_ConfigNodeV3
, _MapNode
]:
265 class Loader(yaml
.Loader
):
268 def config_ctor(loader
, node
) -> _ConfigNodeV3
:
269 if not isinstance(node
, yaml
.MappingNode
):
270 problem
= f
'Expecting a map for the tag `{node.tag}`'
271 raise yaml
.constructor
.ConstructorError(problem
=problem
)
273 loader
.flatten_mapping(node
)
274 return _ConfigNodeV3(collections
.OrderedDict(loader
.construct_pairs(node
)))
276 def mapping_ctor(loader
, node
) -> _MapNode
:
277 loader
.flatten_mapping(node
)
278 return collections
.OrderedDict(loader
.construct_pairs(node
))
280 Loader
.add_constructor(_CONFIG_V3_YAML_TAG
, config_ctor
)
281 Loader
.add_constructor(yaml
.resolver
.BaseResolver
.DEFAULT_MAPPING_TAG
, mapping_ctor
)
285 return yaml
.load(file, Loader
=Loader
)
286 except (yaml
.YAMLError
, OSError, IOError) as exc
:
287 raise _ConfigurationParseError('YAML loader', f
'Cannot load file: {exc}')
290 def _yaml_load_path(path
: str) -> Union
[_ConfigNodeV3
, _MapNode
]:
291 with
open(path
) as f
:
295 # Dumps the content of the Python object `obj`
296 # (`collections.OrderedDict` or `_ConfigNodeV3`) as a YAML string and
298 def _yaml_dump(node
: _MapNode
, **kwds
) -> str:
299 class Dumper(yaml
.Dumper
):
302 def config_repr(dumper
, node
):
303 return dumper
.represent_mapping(_CONFIG_V3_YAML_TAG
, node
.config_node
.items())
305 def mapping_repr(dumper
, node
):
306 return dumper
.represent_mapping(yaml
.resolver
.BaseResolver
.DEFAULT_MAPPING_TAG
,
309 Dumper
.add_representer(_ConfigNodeV3
, config_repr
)
310 Dumper
.add_representer(collections
.OrderedDict
, mapping_repr
)
313 return yaml
.dump(node
, Dumper
=Dumper
, version
=(1, 2), **kwds
)
316 # A common barectf YAML configuration parser.
318 # This is the base class of any barectf YAML configuration parser. It
319 # mostly contains helpers.
321 # Builds a base barectf YAML configuration parser to process the
322 # configuration node `node` (already loaded from the file-like
325 # For its _process_node_include() method, the parser considers the
326 # package inclusion directory as well as `include_dirs`, and ignores
327 # nonexistent inclusion files if `ignore_include_not_found` is
329 def __init__(self
, root_file
: TextIO
, node
: Union
[_MapNode
, _ConfigNodeV3
],
330 with_pkg_include_dir
: bool, include_dirs
: Optional
[List
[str]],
331 ignore_include_not_found
: bool, major_version
: VersionNumber
):
332 self
._root
_file
= root_file
333 self
._root
_node
= node
334 self
._ft
_prop
_names
= [
344 'element-field-type',
347 if include_dirs
is None:
350 self
._include
_dirs
= copy
.copy(include_dirs
)
352 if with_pkg_include_dir
:
353 self
._include
_dirs
.append(pkg_resources
.resource_filename(__name__
, f
'include/{major_version}'))
355 self
._ignore
_include
_not
_found
= ignore_include_not_found
356 self
._include
_stack
: List
[str] = []
357 self
._resolved
_ft
_aliases
: Set
[str] = set()
358 self
._schema
_validator
= _SchemaValidator({'config/common', f
'config/{major_version}'})
359 self
._major
_version
= major_version
363 return self
._root
_node
366 def _struct_ft_node_members_prop_name(self
) -> str:
367 if self
._major
_version
== 2:
372 # Returns the last included file name from the parser's inclusion
373 # file name stack, or `N/A` if the root file does not have an
374 # associated path under the `name` property.
375 def _get_last_include_file(self
) -> str:
376 if self
._include
_stack
:
377 return self
._include
_stack
[-1]
379 if hasattr(self
._root
_file
, 'name'):
380 return typing
.cast(str, self
._root
_file
.name
)
384 # Loads the inclusion file having the path `yaml_path` and returns
385 # its content as a `collections.OrderedDict` object.
386 def _load_include(self
, yaml_path
) -> Optional
[_MapNode
]:
387 for inc_dir
in self
._include
_dirs
:
388 # Current inclusion dir + file name path.
390 # Note: os.path.join() only takes the last argument if it's
392 inc_path
= os
.path
.join(inc_dir
, yaml_path
)
394 # real path (symbolic links resolved)
395 real_path
= os
.path
.realpath(inc_path
)
397 # normalized path (weird stuff removed!)
398 norm_path
= os
.path
.normpath(real_path
)
400 if not os
.path
.isfile(norm_path
):
401 # file doesn't exist: skip
404 if norm_path
in self
._include
_stack
:
405 base_path
= self
._get
_last
_include
_file
()
406 raise _ConfigurationParseError(f
'File `{base_path}`',
407 f
'Cannot recursively include file `{norm_path}`')
409 self
._include
_stack
.append(norm_path
)
412 return typing
.cast(_MapNode
, _yaml_load_path(norm_path
))
414 if not self
._ignore
_include
_not
_found
:
415 base_path
= self
._get
_last
_include
_file
()
416 raise _ConfigurationParseError(f
'File `{base_path}`',
417 f
'Cannot include file `{yaml_path}`: file not found in inclusion directories')
421 # Returns a list of all the inclusion file paths as found in the
422 # inclusion node `include_node`.
423 def _get_include_paths(self
, include_node
: _MapNode
) -> List
[str]:
424 if include_node
is None:
428 if type(include_node
) is str:
430 return [typing
.cast(str, include_node
)]
433 assert type(include_node
) is list
434 return typing
.cast(List
[str], include_node
)
436 # Updates the node `base_node` with an overlay node `overlay_node`.
438 # Both the inclusion and field type node inheritance features use
439 # this update mechanism.
440 def _update_node(self
, base_node
: _MapNode
, overlay_node
: _MapNode
):
441 # see the comment about the `members` property below
442 def update_members_node(base_value
: List
[Any
], olay_value
: List
[Any
]):
443 for olay_item
in olay_value
:
444 # assume we append `olay_item` to `base_value` initially
445 append_olay_item
= True
447 if type(olay_item
) is collections
.OrderedDict
:
448 # overlay item is an object
449 if len(olay_item
) == 1:
450 # overlay object item contains a single property
451 olay_name
= list(olay_item
)[0]
453 # find corresponding base item
454 for base_item
in base_value
:
455 if type(base_item
) is collections
.OrderedDict
:
456 if len(olay_item
) == 1:
457 base_name
= list(base_item
)[0]
459 if olay_name
== base_name
:
460 # Names match: update with usual
462 self
._update
_node
(base_item
, olay_item
)
464 # Do _not_ append `olay_item` to
465 # `base_value`: we just updated
467 append_olay_item
= False
471 base_value
.append(copy
.deepcopy(olay_item
))
473 for olay_key
, olay_value
in overlay_node
.items():
474 if olay_key
in base_node
:
475 base_value
= base_node
[olay_key
]
477 if type(olay_value
) is collections
.OrderedDict
and type(base_value
) is collections
.OrderedDict
:
479 self
._update
_node
(base_value
, olay_value
)
480 elif type(olay_value
) is list and type(base_value
) is list:
481 if olay_key
== 'members' and self
._major
_version
== 3:
482 # This is a "temporary" hack.
484 # In barectf 2, a structure field type node
492 # Having an overlay such as
504 # because the `fields` property is a map.
506 # In barectf 3, this is fixed (a YAML map is not
507 # ordered), so that the same initial structure
508 # field type node looks like this:
517 # Although the `members` property is
518 # syntaxically an array, it's semantically an
519 # ordered map, where an entry's key is the array
520 # item's map's first key (like YAML's `!!omap`).
522 # Having an overlay such as
537 # with the naive strategy, while what we really
547 # As of this version of barectf, the _only_
548 # property with a list value which acts as an
549 # ordered map is named `members`. This is why we
550 # can only check the value of `olay_key`,
551 # whatever our context.
553 # update_members_node() attempts to perform
554 # this below. For a given item of `olay_value`,
557 # * It's not an object.
559 # * It contains more than one property.
561 # * Its single property's name does not match
562 # the name of the single property of any
563 # object item of `base_value`.
565 # then we append the item to `base_value` as
567 update_members_node(base_value
, olay_value
)
569 # append extension array items to base items
570 base_value
+= copy
.deepcopy(olay_value
)
572 # fall back to replacing base property
573 base_node
[olay_key
] = copy
.deepcopy(olay_value
)
575 # set base property from overlay property
576 base_node
[olay_key
] = copy
.deepcopy(olay_value
)
578 # Processes inclusions using `last_overlay_node` as the last overlay
579 # node to use to "patch" the node.
581 # If `last_overlay_node` contains an `$include` property, then this
582 # method patches the current base node (initially empty) in order
583 # using the content of the inclusion files (recursively).
585 # At the end, this method removes the `$include` property of
586 # `last_overlay_node` and then patches the current base node with
587 # its other properties before returning the result (always a deep
589 def _process_node_include(self
, last_overlay_node
: _MapNode
,
590 process_base_include_cb
: Callable
[[_MapNode
], _MapNode
],
591 process_children_include_cb
: Optional
[Callable
[[_MapNode
], None]] = None) -> _MapNode
:
592 # process children inclusions first
593 if process_children_include_cb
is not None:
594 process_children_include_cb(last_overlay_node
)
596 incl_prop_name
= '$include'
598 if incl_prop_name
in last_overlay_node
:
599 include_node
= last_overlay_node
[incl_prop_name
]
602 return last_overlay_node
604 include_paths
= self
._get
_include
_paths
(include_node
)
605 cur_base_path
= self
._get
_last
_include
_file
()
608 # keep the inclusion paths and remove the `$include` property
609 include_paths
= copy
.deepcopy(include_paths
)
610 del last_overlay_node
[incl_prop_name
]
612 for include_path
in include_paths
:
613 # load raw YAML from included file
614 overlay_node
= self
._load
_include
(include_path
)
616 if overlay_node
is None:
617 # Cannot find inclusion file, but we're ignoring those
618 # errors, otherwise _load_include() itself raises a
622 # recursively process inclusions
624 overlay_node
= process_base_include_cb(overlay_node
)
625 except _ConfigurationParseError
as exc
:
626 _append_error_ctx(exc
, f
'File `{cur_base_path}`')
628 # pop inclusion stack now that we're done including
629 del self
._include
_stack
[-1]
631 # At this point, `base_node` is fully resolved (does not
632 # contain any `$include` property).
633 if base_node
is None:
634 base_node
= overlay_node
636 self
._update
_node
(base_node
, overlay_node
)
638 # Finally, update the latest base node with our last overlay
640 if base_node
is None:
641 # Nothing was included, which is possible when we're
642 # ignoring inclusion errors.
643 return last_overlay_node
645 self
._update
_node
(base_node
, last_overlay_node
)
648 # Generates pairs of member node and field type node property name
649 # (in the member node) for the structure field type node's members
651 def _struct_ft_member_fts_iter(self
,
652 node
: Union
[List
[_MapNode
], _MapNode
]) -> Iterable
[Tuple
[_MapNode
, str]]:
653 if type(node
) is list:
655 assert self
._major
_version
== 3
656 node
= typing
.cast(List
[_MapNode
], node
)
658 for member_node
in node
:
659 assert type(member_node
) is collections
.OrderedDict
660 member_node
= typing
.cast(_MapNode
, member_node
)
661 name
, val
= list(member_node
.items())[0]
663 if type(val
) is collections
.OrderedDict
:
667 yield member_node
, name
670 assert self
._major
_version
== 2
671 assert type(node
) is collections
.OrderedDict
672 node
= typing
.cast(_MapNode
, node
)
677 # Resolves the field type alias `key` in the node `parent_node`, as
678 # well as any nested field type aliases, using the aliases of the
679 # `ft_aliases_node` node.
681 # If `key` is not in `parent_node`, this method returns.
683 # This method can modify `ft_aliases_node` and `parent_node[key]`.
685 # `ctx_obj_name` is the context's object name when this method
686 # raises a `_ConfigurationParseError` exception.
687 def _resolve_ft_alias(self
, ft_aliases_node
: _MapNode
, parent_node
: _MapNode
, key
: str,
688 ctx_obj_name
: str, alias_set
: Optional
[Set
[str]] = None):
689 if key
not in parent_node
:
692 node
= parent_node
[key
]
695 # some nodes can be null to use their default value
698 # This set holds all the field type aliases to be expanded,
699 # recursively. This is used to detect cycles.
700 if alias_set
is None:
703 if type(node
) is str:
706 # Make sure this alias names an existing field type node, at
708 if alias
not in ft_aliases_node
:
709 raise _ConfigurationParseError(ctx_obj_name
,
710 f
'Field type alias `{alias}` does not exist')
712 if alias
not in self
._resolved
_ft
_aliases
:
713 # Only check for a field type alias cycle when we didn't
714 # resolve the alias yet, as a given node can refer to
715 # the same field type alias more than once.
716 if alias
in alias_set
:
717 msg
= f
'Cycle detected during the `{alias}` field type alias resolution'
718 raise _ConfigurationParseError(ctx_obj_name
, msg
)
722 # Add `alias` to the set of encountered field type
723 # aliases before calling self._resolve_ft_alias() to
726 self
._resolve
_ft
_alias
(ft_aliases_node
, ft_aliases_node
, alias
, ctx_obj_name
,
728 self
._resolved
_ft
_aliases
.add(alias
)
730 # replace alias with field type node copy
731 parent_node
[key
] = copy
.deepcopy(ft_aliases_node
[alias
])
734 # resolve nested field type aliases
735 for pkey
in self
._ft
_prop
_names
:
736 self
._resolve
_ft
_alias
(ft_aliases_node
, node
, pkey
, ctx_obj_name
, alias_set
)
738 # Resolve field type aliases of structure field type node member
740 pkey
= self
._struct
_ft
_node
_members
_prop
_name
743 for member_node
, ft_prop_name
in self
._struct
_ft
_member
_fts
_iter
(node
[pkey
]):
744 self
._resolve
_ft
_alias
(ft_aliases_node
, member_node
, ft_prop_name
,
745 ctx_obj_name
, alias_set
)
747 # Like _resolve_ft_alias(), but builds a context object name for any
748 # `ctx_obj_name` exception.
749 def _resolve_ft_alias_from(self
, ft_aliases_node
: _MapNode
, parent_node
: _MapNode
, key
: str):
750 self
._resolve
_ft
_alias
(ft_aliases_node
, parent_node
, key
, f
'`{key}` property')
752 # Applies field type node inheritance to the property `key` of
755 # `parent_node[key]`, if it exists, must not contain any field type
756 # alias (all field type objects are complete).
758 # This method can modify `parent[key]`.
760 # When this method returns, no field type node has an `$inherit` or
761 # `inherit` property.
762 def _apply_ft_inheritance(self
, parent_node
: _MapNode
, key
: str):
763 if key
not in parent_node
:
766 node
= parent_node
[key
]
771 # process children first
772 for pkey
in self
._ft
_prop
_names
:
773 self
._apply
_ft
_inheritance
(node
, pkey
)
775 # Process the field types of structure field type node member
777 pkey
= self
._struct
_ft
_node
_members
_prop
_name
780 for member_node
, ft_prop_name
in self
._struct
_ft
_member
_fts
_iter
(node
[pkey
]):
781 self
._apply
_ft
_inheritance
(member_node
, ft_prop_name
)
783 # apply inheritance for this node
784 if 'inherit' in node
:
785 # barectf 2.1: `inherit` property was renamed to `$inherit`
786 assert '$inherit' not in node
787 node
['$inherit'] = node
['inherit']
790 inherit_key
= '$inherit'
792 if inherit_key
in node
:
793 assert type(node
[inherit_key
]) is collections
.OrderedDict
795 # apply inheritance below
796 self
._apply
_ft
_inheritance
(node
, inherit_key
)
798 # `node` is an overlay on the `$inherit` node
799 base_node
= node
[inherit_key
]
800 del node
[inherit_key
]
801 self
._update
_node
(base_node
, node
)
803 # set updated base node as this node
804 parent_node
[key
] = base_node