return obj.to_public()
+# Pseudo object base class.
+#
+# A concrete pseudo object contains the same data as its public version,
+# but it's mutable.
+#
+# The to_public() method converts the pseudo object to an equivalent
+# public, immutable object, caching the result so as to always return
+# the same Python object.
class _PseudoObj:
def __init__(self):
self._public = None
# it would mean a programming or schema error.
class _RefResolver(jsonschema.RefResolver):
def resolve_remote(self, uri):
- # this must never happen: all our schemas are local
raise RuntimeError('Missing local schema with URI `{}`'.format(uri))
# `collections.OrderedDict` objects to `dict` objects so as to
# make any error message easier to read (because
# validator.validate() below uses str() for error messages, and
- # collections.OrderedDict.__str__() is bulky).
+ # collections.OrderedDict.__str__() returns a somewhat bulky
+ # representation).
validator.validate(self._dict_from_ordered_dict(instance))
# Validates `instance` using the schema having the short ID
# convert to barectf `_ConfigParseError` exception
contexts = ['Configuration object']
+ # Each element of the instance's absolute path is either an
+ # integer (array element's index) or a string (object
+ # property's name).
for elem in exc.absolute_path:
if type(elem) is int:
ctx = 'Element {}'.format(elem)
schema_ctx = ''
if len(exc.context) > 0:
+ # According to the documentation of
+ # jsonschema.ValidationError.context(),
+ # the method returns a
+ #
+ # > list of errors from the subschemas
+ #
+ # This contains additional information about the
+ # validation failure which can help the user figure out
+ # what's wrong exactly.
+ #
+ # Join each message with `; ` and append this to our
+ # configuration parsing error's message.
msgs = '; '.join([e.message for e in exc.context])
schema_ctx = ': {}'.format(msgs)
raise new_exc
+# Converts the byte order string `bo_str` to a `metadata.ByteOrder`
+# enumerator.
def _byte_order_str_to_bo(bo_str):
bo_str = bo_str.lower()
return metadata.ByteOrder.BE
+# Converts the encoding string `encoding_str` to a `metadata.Encoding`
+# enumerator.
def _encoding_str_to_encoding(encoding_str):
encoding_str = encoding_str.lower()
return metadata.Encoding.NONE
+# Validates the TSDL identifier `iden`, raising a `_ConfigParseError`
+# exception using `ctx_obj_name` and `prop` to format the message if
+# it's invalid.
def _validate_identifier(iden, ctx_obj_name, prop):
assert type(iden) is str
ctf_keywords = {
raise _ConfigParseError(ctx_obj_name, fmt.format(prop, iden))
+# Validates the alignment `align`, raising a `_ConfigParseError`
+# exception using `ctx_obj_name` if it's invalid.
def _validate_alignment(align, ctx_obj_name):
assert align >= 1
'Invalid alignment: {}'.format(align))
+# Appends the context having the object name `obj_name` and the
+# (optional) message `msg` to the `_ConfigParseError` exception `exc`
+# and then raises `exc` again.
def _append_error_ctx(exc, obj_name, msg=None):
exc.append_ctx(obj_name, msg)
raise
EVENT_PAYLOAD = 5
-# This validator validates the configured metadata for barectf specific
-# needs.
+# A validator which validates the configured metadata for barectf
+# specific needs.
#
# barectf needs:
#
-# * All header/contexts are at least byte-aligned.
-# * No nested structures or arrays.
+# * The alignments of all header/context field types are at least 8.
+#
+# * There are no nested structure or array field types, except the
+# packet header field type's `uuid` field
+#
class _BarectfMetadataValidator:
def __init__(self):
self._type_to_validate_type_func = {
if t is None:
return
- # make sure entity is byte-aligned
+ # make sure root field type has a real alignment of at least 8
if t.real_align < 8:
raise _ConfigParseError('Root type',
'Alignment must be at least 8')
assert type(t) is _Struct
- # validate types
+ # validate field types
self._validate_type(t, True)
def _validate_entities_and_names(self, meta):
_append_error_ctx(exc, 'Stream `{}`'.format(stream_name))
def _validate_default_stream(self, meta):
- if meta.default_stream_name:
+ if meta.default_stream_name is not None:
if meta.default_stream_name not in meta.streams.keys():
fmt = 'Default stream name (`{}`) does not exist'
raise _ConfigParseError('barectf metadata',
self._validate_default_stream(meta)
-# This validator validates special fields of trace, stream, and event
+# A validator which validates special fields of trace, stream, and event
# types.
class _MetadataSpecialFieldsValidator:
+ # Validates the packet header field type `t`.
def _validate_trace_packet_header_type(self, t):
- # needs `stream_id` field?
+ # If there's more than one stream type, then the `stream_id`
+ # (stream type ID) field is required.
if len(self._meta.streams) > 1:
- # yes
if t is None:
raise _ConfigParseError('`packet-header-type` property',
'Need `stream_id` field (more than one stream), but trace packet header type is missing')
if t is None:
return
- # `magic` and `stream_id`
+ # The `magic` field type must be the first one.
+ #
+ # The `stream_id` field type's size (bits) must be large enough
+ # to accomodate any stream type ID.
for i, (field_name, field_type) in enumerate(t.fields.items()):
if field_name == 'magic':
if i != 0:
raise _ConfigParseError('`packet-header-type` property',
'`magic` field must be the first trace packet header type\'s field')
elif field_name == 'stream_id':
- # `id` size can fit all event IDs
if len(self._meta.streams) > (1 << field_type.size):
raise _ConfigParseError('`packet-header-type` property',
'`stream_id` field\' size is too small for the number of trace streams')
+ # Validates the trace type of the metadata object `meta`.
def _validate_trace(self, meta):
self._validate_trace_packet_header_type(meta.trace.packet_header_type)
+ # Validates the packet context field type of the stream type
+ # `stream`.
def _validate_stream_packet_context(self, stream):
t = stream.packet_context_type
assert t is not None
- # `timestamp_begin` and `timestamp_end`
+ # The `timestamp_begin` and `timestamp_end` field types must be
+ # mapped to the `value` property of the same clock.
ts_begin = t.fields.get('timestamp_begin')
ts_end = t.fields.get('timestamp_end')
- # `timestamp_begin` and `timestamp_end` are mapped to the same clock
if ts_begin is not None and ts_end is not None:
if ts_begin.property_mappings[0].object.name != ts_end.property_mappings[0].object.name:
raise _ConfigParseError('`timestamp_begin` and `timestamp_end` fields must be mapped to the same clock object in stream packet context type')
- # `packet_size` size must be greater than or equal to `content_size` size
+ # The `packet_size` field type's size must be greater than or
+ # equal to the `content_size` field type's size.
if t.fields['content_size'].size > t.fields['packet_size'].size:
raise _ConfigParseError('`packet-context-type` property',
'`content_size` field size must be lesser than or equal to `packet_size` field size')
+ # Validates the event header field type of the stream type `stream`.
def _validate_stream_event_header(self, stream):
t = stream.event_header_type
- # needs `id` field?
+ # If there's more than one event type, then the `id` (event type
+ # ID) field is required.
if len(stream.events) > 1:
- # yes
if t is None:
raise _ConfigParseError('`event-header-type` property',
'Need `id` field (more than one event), but stream event header type is missing')
if t is None:
return
- # `id`
+ # The `id` field type's size (bits) must be large enough to
+ # accomodate any event type ID.
eid = t.fields.get('id')
if eid is not None:
- # `id` size can fit all event IDs
if len(stream.events) > (1 << eid.size):
raise _ConfigParseError('`event-header-type` property',
'`id` field\' size is too small for the number of stream events')
+ # Validates the stream type `stream`.
def _validate_stream(self, stream):
self._validate_stream_packet_context(stream)
self._validate_stream_event_header(stream)
+ # Validates the trace and stream types of the metadata object
+ # `meta`.
def validate(self, meta):
self._meta = meta
self._validate_trace(meta)
_append_error_ctx(exc, 'Stream `{}`'.format(stream.name), 'Invalid')
+# A barectf YAML configuration parser.
+#
+# Build such a parser and then call parse() to get the resulting
+# `config.Config` object.
+#
+# See the comments of parse() for more implementation details about the
+# parsing stages and general strategy.
class _YamlConfigParser:
+ # Builds a barectf YAML configuration parser which considers the
+ # inclusion directories `include_dirs`, ignores nonexistent
+ # inclusion files if `ignore_include_not_found` is `True`, and dumps
+ # the effective configuration (as YAML) if `dump_config` is `True`.
def __init__(self, include_dirs, ignore_include_not_found, dump_config):
self._class_name_to_create_type_func = {
'int': self._create_integer,
self._dump_config = dump_config
self._schema_validator = _SchemaValidator()
+ # Sets the default byte order as found in the `metadata_node` node.
def _set_byte_order(self, metadata_node):
self._bo = _byte_order_str_to_bo(metadata_node['trace']['byte-order'])
assert self._bo is not None
+ # Sets the clock value property mapping of the pseudo integer field
+ # type object `int_obj` as found in the `prop_mapping_node` node.
def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
clock_name = prop_mapping_node['name']
clock = self._clocks.get(clock_name)
prop_mapping.prop = 'value'
int_obj.property_mappings.append(prop_mapping)
+ # Creates a pseudo integer field type from the node `node` and
+ # returns it.
def _create_integer(self, node):
obj = _Integer()
-
- # size
obj.size = node['size']
-
- # align
align_node = node.get('align')
if align_node is not None:
_validate_alignment(align_node, 'Integer type')
obj.align = align_node
- # signed
signed_node = node.get('signed')
if signed_node is not None:
obj.signed = signed_node
- # byte order
obj.byte_order = self._bo
bo_node = node.get('byte-order')
if bo_node is not None:
obj.byte_order = _byte_order_str_to_bo(bo_node)
- # base
base_node = node.get('base')
if base_node is not None:
assert base_node == 'hex'
obj.base = 16
- # encoding
encoding_node = node.get('encoding')
if encoding_node is not None:
obj.encoding = _encoding_str_to_encoding(encoding_node)
- # property mappings
pm_node = node.get('property-mappings')
if pm_node is not None:
return obj
+ # Creates a pseudo floating point number field type from the node
+ # `node` and returns it.
def _create_float(self, node):
obj = _FloatingPoint()
-
- # size
size_node = node['size']
obj.exp_size = size_node['exp']
obj.mant_size = size_node['mant']
-
- # align
align_node = node.get('align')
if align_node is not None:
_validate_alignment(align_node, 'Floating point number type')
obj.align = align_node
- # byte order
obj.byte_order = self._bo
bo_node = node.get('byte-order')
return obj
+ # Creates a pseudo enumeration field type from the node `node` and
+ # returns it.
def _create_enum(self, node):
obj = _Enum()
- # value type
+ # value (integer) field type
try:
obj.value_type = self._create_type(node['value-type'])
except _ConfigParseError as exc:
value = (mn, mx)
cur = mx + 1
+ # Make sure that all the integral values of the range
+ # fits the enumeration field type's integer value field
+ # type depending on its size (bits).
name_fmt = 'Enumeration type\'s member `{}`'
msg_fmt = 'Value {} is outside the value type range [{}, {}]'
return obj
+ # Creates a pseudo string field type from the node `node` and
+ # returns it.
def _create_string(self, node):
obj = _String()
-
- # encoding
encoding_node = node.get('encoding')
if encoding_node is not None:
return obj
+ # Creates a pseudo structure field type from the node `node` and
+ # returns it.
def _create_struct(self, node):
obj = _Struct()
-
- # minimum alignment
min_align_node = node.get('min-align')
if min_align_node is not None:
_validate_alignment(min_align_node, 'Structure type')
obj.min_align = min_align_node
- # fields
fields_node = node.get('fields')
if fields_node is not None:
return obj
+ # Creates a pseudo array field type from the node `node` and returns
+ # it.
def _create_array(self, node):
obj = _Array()
-
- # length
obj.length = node['length']
- # element type
try:
obj.element_type = self._create_type(node['element-type'])
except _ConfigParseError as exc:
return obj
+ # Creates a pseudo field type from the node `node` and returns it.
+ #
+ # This method checks the `class` property of `node` to determine
+ # which function of `self._class_name_to_create_type_func` to call
+ # to create the corresponding pseudo field type.
def _create_type(self, type_node):
return self._class_name_to_create_type_func[type_node['class']](type_node)
+ # Creates a pseudo clock type from the node `node` and returns it.
def _create_clock(self, node):
- # create clock object
clock = _Clock()
-
- # UUID
uuid_node = node.get('uuid')
if uuid_node is not None:
except:
raise _ConfigParseError('Clock', 'Malformed UUID: `{}`'.format(uuid_node))
- # description
descr_node = node.get('description')
if descr_node is not None:
clock.description = descr_node
- # frequency
freq_node = node.get('freq')
if freq_node is not None:
clock.freq = freq_node
- # error cycles
error_cycles_node = node.get('error-cycles')
if error_cycles_node is not None:
clock.error_cycles = error_cycles_node
- # offset
offset_node = node.get('offset')
if offset_node is not None:
- # cycles
offset_cycles_node = offset_node.get('cycles')
if offset_cycles_node is not None:
clock.offset_cycles = offset_cycles_node
- # seconds
offset_seconds_node = offset_node.get('seconds')
if offset_seconds_node is not None:
clock.offset_seconds = offset_seconds_node
- # absolute
absolute_node = node.get('absolute')
if absolute_node is not None:
return_ctype_node = node.get('$return-ctype')
if return_ctype_node is None:
+ # barectf 2.1: `return-ctype` property was renamed to
+ # `$return-ctype`
return_ctype_node = node.get('return-ctype')
if return_ctype_node is not None:
return clock
+ # Registers all the clock types of the metadata node
+ # `metadata_node`, creating pseudo clock types during the process,
+ # within this parser.
+ #
+ # The pseudo clock types in `self._clocks` are then accessible when
+ # creating a pseudo integer field type (see _create_integer() and
+ # _set_int_clock_prop_mapping()).
def _register_clocks(self, metadata_node):
self._clocks = collections.OrderedDict()
clocks_node = metadata_node.get('clocks')
clock.name = clock_name
self._clocks[clock_name] = clock
+ # Creates an environment object (`collections.OrderedDict`) from the
+ # metadata node `metadata_node` and returns it.
def _create_env(self, metadata_node):
env_node = metadata_node.get('env')
return copy.deepcopy(env_node)
+ # Creates a pseudo trace type from the metadata node `metadata_node`
+ # and returns it.
def _create_trace(self, metadata_node):
- # create trace object
trace = _Trace()
-
trace_node = metadata_node['trace']
-
- # set byte order (already parsed)
trace.byte_order = self._bo
-
- # UUID
uuid_node = trace_node.get('uuid')
if uuid_node is not None:
+ # The `uuid` property of the trace type node can be `auto`
+ # to make barectf generate a UUID.
if uuid_node == 'auto':
trace.uuid = uuid.uuid1()
else:
raise _ConfigParseError('Trace',
'Malformed UUID: `{}`'.format(uuid_node))
- # packet header type
pht_node = trace_node.get('packet-header-type')
if pht_node is not None:
return trace
+ # Creates a pseudo event type from the event node `event_node` and
+ # returns it.
def _create_event(self, event_node):
- # create event object
event = _Event()
-
log_level_node = event_node.get('log-level')
if log_level_node is not None:
return event
+ # Creates a pseudo stream type named `stream_name` from the stream
+ # node `stream_node` and returns it.
def _create_stream(self, stream_name, stream_node):
- # create stream object
stream = _Stream()
-
pct_node = stream_node.get('packet-context-type')
if pct_node is not None:
return stream
+ # Creates a `collections.OrderedDict` object where keys are stream
+ # type names and values are pseudo stream types from the metadata
+ # node `metadata_node` and returns it.
def _create_streams(self, metadata_node):
streams = collections.OrderedDict()
streams_node = metadata_node['streams']
return streams
+ # Creates a pseudo metadata object from the configuration node
+ # `root` and returns it.
def _create_metadata(self, root):
self._meta = _Metadata()
metadata_node = root['metadata']
self._meta.trace = self._create_trace(metadata_node)
self._meta.streams = self._create_streams(metadata_node)
- # validate metadata
+ # validate the pseudo metadata object
try:
_MetadataSpecialFieldsValidator().validate(self._meta)
except _ConfigParseError as exc:
return self._meta
+ # Gets and validates the tracing prefix as found in the
+ # configuration node `config_node` and returns it.
def _get_prefix(self, config_node):
prefix = config_node.get('prefix', 'barectf_')
_validate_identifier(prefix, '`prefix` property', 'prefix')
return prefix
+ # Gets the options as found in the configuration node `config_node`
+ # and returns a corresponding `config.ConfigOptions` object.
def _get_options(self, config_node):
gen_prefix_def = False
gen_default_stream_def = False
return config.ConfigOptions(gen_prefix_def, gen_default_stream_def)
+ # Returns the last included file name from the parser's inclusion
+ # file name stack.
def _get_last_include_file(self):
if self._include_stack:
return self._include_stack[-1]
return self._root_yaml_path
+ # Loads the inclusion file having the path `yaml_path` and returns
+ # its content as a `collections.OrderedDict` object.
def _load_include(self, yaml_path):
for inc_dir in self._include_dirs:
# Current inclusion dir + file name path.
raise _ConfigParseError('In `{}`',
'Cannot include file `{}`: file not found in include directories'.format(base_path,
yaml_path))
-
+ # Returns a list of all the inclusion file paths as found in the
+ # inclusion node `include_node`.
def _get_include_paths(self, include_node):
if include_node is None:
# none
assert type(include_node) is list
return include_node
+ # Updates the node `base_node` with an overlay node `overlay_node`.
+ #
+ # Both the inclusion and field type inheritance features use this
+ # update mechanism.
def _update_node(self, base_node, overlay_node):
for olay_key, olay_value in overlay_node.items():
if olay_key in base_node:
base_value = base_node[olay_key]
if type(olay_value) is collections.OrderedDict and type(base_value) is collections.OrderedDict:
- # merge dictionaries
+ # merge both objects
self._update_node(base_value, olay_value)
elif type(olay_value) is list and type(base_value) is list:
# append extension array items to base items
base_value += olay_value
else:
- # fall back to replacing
+ # fall back to replacing base property
base_node[olay_key] = olay_value
else:
+ # set base property from overlay property
base_node[olay_key] = olay_value
+ # Processes inclusions using `last_overlay_node` as the last overlay
+ # node to use to "patch" the node.
+ #
+ # If `last_overlay_node` contains an `$include` property, then this
+ # method patches the current base node (initially empty) in order
+ # using the content of the inclusion files (recursively).
+ #
+ # At the end, this method removes the `$include` of
+ # `last_overlay_node` and then patches the current base node with
+ # its other properties before returning the result (always a deep
+ # copy).
def _process_node_include(self, last_overlay_node,
process_base_include_cb,
process_children_include_cb=None):
self._update_node(base_node, last_overlay_node)
return base_node
+ # Process the inclusions of the event type node `event_node`,
+ # returning the effective node.
def _process_event_include(self, event_node):
- # Make sure the event object is valid for the inclusion
+ # Make sure the event type node is valid for the inclusion
# processing stage.
self._schema_validator.validate(event_node,
'2/config/event-pre-include')
return self._process_node_include(event_node,
self._process_event_include)
+ # Process the inclusions of the stream type node `stream_node`,
+ # returning the effective node.
def _process_stream_include(self, stream_node):
def process_children_include(stream_node):
if 'events' in stream_node:
for key in list(events_node):
events_node[key] = self._process_event_include(events_node[key])
- # Make sure the stream object is valid for the inclusion
+ # Make sure the stream type node is valid for the inclusion
# processing stage.
self._schema_validator.validate(stream_node,
'2/config/stream-pre-include')
self._process_stream_include,
process_children_include)
+ # Process the inclusions of the trace type node `trace_node`,
+ # returning the effective node.
def _process_trace_include(self, trace_node):
- # Make sure the trace object is valid for the inclusion
+ # Make sure the trace type node is valid for the inclusion
# processing stage.
self._schema_validator.validate(trace_node,
'2/config/trace-pre-include')
return self._process_node_include(trace_node,
self._process_trace_include)
+ # Process the inclusions of the clock type node `clock_node`,
+ # returning the effective node.
def _process_clock_include(self, clock_node):
- # Make sure the clock object is valid for the inclusion
+ # Make sure the clock type node is valid for the inclusion
# processing stage.
self._schema_validator.validate(clock_node,
'2/config/clock-pre-include')
return self._process_node_include(clock_node,
self._process_clock_include)
+ # Process the inclusions of the metadata node `metadata_node`,
+ # returning the effective node.
def _process_metadata_include(self, metadata_node):
def process_children_include(metadata_node):
if 'trace' in metadata_node:
for key in list(streams_node):
streams_node[key] = self._process_stream_include(streams_node[key])
- # Make sure the metadata object is valid for the inclusion
+ # Make sure the metadata node is valid for the inclusion
# processing stage.
self._schema_validator.validate(metadata_node,
'2/config/metadata-pre-include')
self._process_metadata_include,
process_children_include)
+ # Process the inclusions of the configuration node `config_node`,
+ # returning the effective node.
def _process_config_includes(self, config_node):
# Process inclusions in this order:
#
- # 1. Clock object, event objects, and trace objects (the order
- # between those is not important).
+ # 1. Clock type node, event type nodes, and trace type nodes
+ # (the order between those is not important).
#
- # 2. Stream objects.
+ # 2. Stream type nodes.
#
- # 3. Metadata object.
+ # 3. Metadata node.
#
# This is because:
#
- # * A metadata object can include clock objects, a trace object,
- # stream objects, and event objects (indirectly).
+ # * A metadata node can include clock type nodes, a trace type
+ # node, stream type nodes, and event type nodes (indirectly).
#
- # * A stream object can include event objects.
+ # * A stream type node can include event type nodes.
#
# We keep a stack of absolute paths to included files
# (`self._include_stack`) to detect recursion.
self._schema_validator.validate(config_node,
'2/config/config-pre-include')
- # Process metadata object inclusions.
+ # Process metadata node inclusions.
#
# self._process_metadata_include() returns a new (or the same)
- # metadata object without any `$include` property in it,
+ # metadata node without any `$include` property in it,
# recursively.
config_node['metadata'] = self._process_metadata_include(config_node['metadata'])
return config_node
+ # Expands the field type aliases found in the metadata node
+ # `metadata_node` using the aliases of the `type_aliases_node` node.
+ #
+ # This method modifies `metadata_node`.
+ #
+ # When this method returns:
+ #
+ # * Any field type alias is replaced with its full field type
+ # equivalent.
+ #
+ # * The `type-aliases` property of `metadata_node` is removed.
def _expand_field_type_aliases(self, metadata_node, type_aliases_node):
def resolve_field_type_aliases(parent_node, key, from_descr,
alias_set=None):
# set of resolved field type aliases
resolved_aliases = set()
- # expand field type aliases within trace, streams, and events now
+ # Expand field type aliases within trace, stream, and event
+ # types now.
resolve_field_type_aliases_from(metadata_node['trace'],
'packet-header-type', 'trace')
try:
for event_name, event in stream['events'].items():
- resolve_field_type_aliases_from(event, 'context-type', 'event',
- event_name)
- resolve_field_type_aliases_from(event, 'payload-type', 'event',
- event_name)
+ resolve_field_type_aliases_from(event, 'context-type',
+ 'event', event_name)
+ resolve_field_type_aliases_from(event, 'payload-type',
+ 'event', event_name)
except _ConfigParseError as exc:
_append_error_ctx(exc, 'Stream `{}`'.format(stream_name))
- # we don't need the `type-aliases` node anymore
+ # remove the (now unneeded) `type-aliases` node
del metadata_node['type-aliases']
+ # Applies field type inheritance to all field types found in
+ # `metadata_node`.
+ #
+ # This method modifies `metadata_node`.
+ #
+ # When this method returns, no field type node has an `$inherit` or
+ # `inherit` property.
def _expand_field_type_inheritance(self, metadata_node):
def apply_inheritance(parent_node, key):
if key not in parent_node:
apply_inheritance(event, 'context-type')
apply_inheritance(event, 'payload-type')
+ # Calls _expand_field_type_aliases() and
+ # _expand_field_type_inheritance() if the metadata node
+ # `metadata_node` has a `type-aliases` property.
def _expand_field_types(self, metadata_node):
type_aliases_node = metadata_node.get('type-aliases')
# next, apply inheritance to create effective field types
self._expand_field_type_inheritance(metadata_node)
+ # Replaces the textual log levels in event type nodes of the
+ # metadata node `metadata_node` with their numeric equivalent (as
+ # found in the `$log-levels` or `log-levels` node of
+ # `metadata_node`).
+ #
+ # This method modifies `metadata_node`.
+ #
+ # When this method returns, the `$log-levels` or `log-level`
+ # property of `metadata_node` is removed.
def _expand_log_levels(self, metadata_node):
if 'log-levels' in metadata_node:
- # barectf 2.1: `log-levels` property was renamed to `$log-levels`
+ # barectf 2.1: `log-levels` property was renamed to
+ # `$log-levels`
assert '$log-levels' not in node
node['$log-levels'] = node['log-levels']
del node['log-levels']
except _ConfigParseError as exc:
_append_error_ctx(exc, 'Stream `{}`'.format(stream_name))
+ # Dumps the node `node` as YAML, passing `kwds` to yaml.dump().
def _yaml_ordered_dump(self, node, **kwds):
class ODumper(yaml.Dumper):
pass
# Python -> YAML
return yaml.dump(node, Dumper=ODumper, **kwds)
+ # Loads the content of the YAML file having the path `yaml_path` as
+ # a Python object.
+ #
+ # All YAML maps are loaded as `collections.OrderedDict` objects.
def _yaml_ordered_load(self, yaml_path):
class OLoader(yaml.Loader):
pass
return node
+ #
def _reset(self):
self._version = None
self._include_stack = []
# After this step, the `type-aliases` property of the
# `metadata` node is gone.
#
- # 2. Applies inheritance following the `$inherit`/`inherit`
+ # 2. Applies inheritance, following the `$inherit`/`inherit`
# properties.
#
# After this step, field type objects do not contain