1 # The MIT License (MIT)
3 # Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documentation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
13 # The above copyright notice and this permission notice shall be
14 # included in all copies or substantial portions of the Software.
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19 # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20 # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 from barectf
import metadata
25 from barectf
import config
37 # The context of a configuration parsing error.
39 # Such a context object has a name and, optionally, a message.
40 class _ConfigParseErrorCtx
:
41 def __init__(self
, name
, msg
=None):
54 # Appends the context having the object name `obj_name` and the
55 # (optional) message `msg` to the `_ConfigParseError` exception `exc`
56 # and then raises `exc` again.
57 def _append_error_ctx(exc
, obj_name
, msg
=None):
58 exc
.append_ctx(obj_name
, msg
)
62 # A configuration parsing error.
64 # Such an error object contains a list of contexts (`ctx` property).
66 # The first context of this list is the most specific context, while the
67 # last is the more general.
69 # Use append_ctx() to append a context to an existing configuration
70 # parsing error when you catch it before raising it again. You can use
71 # _append_error_ctx() to do exactly this in a single call.
72 class _ConfigParseError(RuntimeError):
73 def __init__(self
, init_ctx_name
, init_ctx_msg
=None):
75 self
.append_ctx(init_ctx_name
, init_ctx_msg
)
81 def append_ctx(self
, name
, msg
=None):
82 self
._ctx
.append(_ConfigParseErrorCtx(name
, msg
))
85 def _opt_to_public(obj
):
89 return obj
.to_public()
92 # Pseudo object base class.
94 # A concrete pseudo object contains the same data as its public version,
97 # The to_public() method converts the pseudo object to an equivalent
98 # public, immutable object, caching the result so as to always return
99 # the same Python object.
105 if self
._public
is None:
106 self
._public
= self
._to
_public
()
110 def _to_public(self
):
111 raise NotImplementedError
114 class _PropertyMapping(_PseudoObj
):
120 def _to_public(self
):
121 return metadata
.PropertyMapping(self
.object.to_public(), self
.prop
)
124 class _Integer(_PseudoObj
):
128 self
.byte_order
= None
132 self
.encoding
= metadata
.Encoding
.NONE
133 self
.property_mappings
= []
136 def real_align(self
):
137 if self
.align
is None:
138 if self
.size
% 8 == 0:
145 def _to_public(self
):
146 prop_mappings
= [pm
.to_public() for pm
in self
.property_mappings
]
147 return metadata
.Integer(self
.size
, self
.byte_order
, self
.align
,
148 self
.signed
, self
.base
, self
.encoding
,
152 class _FloatingPoint(_PseudoObj
):
156 self
.mant_size
= None
157 self
.byte_order
= None
161 def real_align(self
):
164 def _to_public(self
):
165 return metadata
.FloatingPoint(self
.exp_size
, self
.mant_size
,
166 self
.byte_order
, self
.align
)
169 class _Enum(_PseudoObj
):
172 self
.value_type
= None
173 self
.members
= collections
.OrderedDict()
176 def real_align(self
):
177 return self
.value_type
.real_align
179 def _to_public(self
):
180 return metadata
.Enum(self
.value_type
.to_public(), self
.members
)
183 class _String(_PseudoObj
):
186 self
.encoding
= metadata
.Encoding
.UTF8
189 def real_align(self
):
192 def _to_public(self
):
193 return metadata
.String(self
.encoding
)
196 class _Array(_PseudoObj
):
199 self
.element_type
= None
203 def real_align(self
):
204 return self
.element_type
.real_align
206 def _to_public(self
):
207 return metadata
.Array(self
.element_type
.to_public(), self
.length
)
210 class _Struct(_PseudoObj
):
214 self
.fields
= collections
.OrderedDict()
217 def real_align(self
):
218 align
= self
.min_align
220 for pseudo_field
in self
.fields
.values():
221 if pseudo_field
.real_align
> align
:
222 align
= pseudo_field
.real_align
226 def _to_public(self
):
229 for name
, pseudo_field
in self
.fields
.items():
230 fields
.append((name
, pseudo_field
.to_public()))
232 return metadata
.Struct(self
.min_align
, collections
.OrderedDict(fields
))
235 class _Trace(_PseudoObj
):
238 self
.byte_order
= None
240 self
.packet_header_type
= None
242 def _to_public(self
):
243 return metadata
.Trace(self
.byte_order
, self
.uuid
,
244 _opt_to_public(self
.packet_header_type
))
247 class _Clock(_PseudoObj
):
252 self
.description
= None
254 self
.error_cycles
= 0
255 self
.offset_seconds
= 0
256 self
.offset_cycles
= 0
257 self
.absolute
= False
258 self
.return_ctype
= 'uint32_t'
260 def _to_public(self
):
261 return metadata
.Clock(self
.name
, self
.uuid
, self
.description
, self
.freq
,
262 self
.error_cycles
, self
.offset_seconds
,
263 self
.offset_cycles
, self
.absolute
,
267 class _Event(_PseudoObj
):
272 self
.log_level
= None
273 self
.payload_type
= None
274 self
.context_type
= None
276 def _to_public(self
):
277 return metadata
.Event(self
.id, self
.name
, self
.log_level
,
278 _opt_to_public(self
.payload_type
),
279 _opt_to_public(self
.context_type
))
282 class _Stream(_PseudoObj
):
287 self
.packet_context_type
= None
288 self
.event_header_type
= None
289 self
.event_context_type
= None
290 self
.events
= collections
.OrderedDict()
292 def is_event_empty(self
, event
):
295 if self
.event_header_type
is not None:
296 total_fields
+= len(self
.event_header_type
.fields
)
298 if self
.event_context_type
is not None:
299 total_fields
+= len(self
.event_context_type
.fields
)
301 if event
.context_type
is not None:
302 total_fields
+= len(event
.context_type
.fields
)
304 if event
.payload_type
is not None:
305 total_fields
+= len(event
.payload_type
.fields
)
307 return total_fields
== 0
309 def _to_public(self
):
312 for name
, pseudo_ev
in self
.events
.items():
313 events
.append((name
, pseudo_ev
.to_public()))
315 return metadata
.Stream(self
.id, self
.name
,
316 _opt_to_public(self
.packet_context_type
),
317 _opt_to_public(self
.event_header_type
),
318 _opt_to_public(self
.event_context_type
),
319 collections
.OrderedDict(events
))
322 class _Metadata(_PseudoObj
):
329 self
.default_stream_name
= None
331 def _to_public(self
):
334 for name
, pseudo_clock
in self
.clocks
.items():
335 clocks
.append((name
, pseudo_clock
.to_public()))
339 for name
, pseudo_stream
in self
.streams
.items():
340 streams
.append((name
, pseudo_stream
.to_public()))
342 return metadata
.Metadata(self
.trace
.to_public(), self
.env
,
343 collections
.OrderedDict(clocks
),
344 collections
.OrderedDict(streams
),
345 self
.default_stream_name
)
348 # This JSON schema reference resolver only serves to detect when it
349 # needs to resolve a remote URI.
351 # This must never happen in barectf because all our schemas are local;
352 # it would mean a programming or schema error.
353 class _RefResolver(jsonschema
.RefResolver
):
354 def resolve_remote(self
, uri
):
355 raise RuntimeError(f
'Missing local schema with URI `{uri}`')
358 # Schema validator which considers all the schemas found in the barectf
359 # package's `schemas` directory.
361 # The only public method is validate() which accepts an instance to
362 # validate as well as a schema short ID.
363 class _SchemaValidator
:
365 subdirs
= ['config', os
.path
.join('2', 'config')]
366 schemas_dir
= pkg_resources
.resource_filename(__name__
, 'schemas')
369 for subdir
in subdirs
:
370 dir = os
.path
.join(schemas_dir
, subdir
)
372 for file_name
in os
.listdir(dir):
373 if not file_name
.endswith('.yaml'):
376 with
open(os
.path
.join(dir, file_name
)) as f
:
377 schema
= yaml
.load(f
, Loader
=yaml
.SafeLoader
)
379 assert '$id' in schema
380 schema_id
= schema
['$id']
381 assert schema_id
not in self
._store
382 self
._store
[schema_id
] = schema
385 def _dict_from_ordered_dict(o_dict
):
388 for k
, v
in o_dict
.items():
391 if type(v
) is collections
.OrderedDict
:
392 new_v
= _SchemaValidator
._dict
_from
_ordered
_dict
(v
)
398 def _validate(self
, instance
, schema_short_id
):
399 # retrieve full schema ID from short ID
400 schema_id
= f
'https://barectf.org/schemas/{schema_short_id}.json'
401 assert schema_id
in self
._store
403 # retrieve full schema
404 schema
= self
._store
[schema_id
]
406 # Create a reference resolver for this schema using this
407 # validator's schema store.
408 resolver
= _RefResolver(base_uri
=schema_id
, referrer
=schema
,
411 # create a JSON schema validator using this reference resolver
412 validator
= jsonschema
.Draft7Validator(schema
, resolver
=resolver
)
414 # Validate the instance, converting its
415 # `collections.OrderedDict` objects to `dict` objects so as to
416 # make any error message easier to read (because
417 # validator.validate() below uses str() for error messages, and
418 # collections.OrderedDict.__str__() returns a somewhat bulky
420 validator
.validate(self
._dict
_from
_ordered
_dict
(instance
))
422 # Validates `instance` using the schema having the short ID
425 # A schema short ID is the part between `schemas/` and `.json` in
428 # Raises a `_ConfigParseError` object, hiding any `jsonschema`
429 # exception, on validation failure.
430 def validate(self
, instance
, schema_short_id
):
432 self
._validate
(instance
, schema_short_id
)
433 except jsonschema
.ValidationError
as exc
:
434 # convert to barectf `_ConfigParseError` exception
435 contexts
= ['Configuration object']
437 # Each element of the instance's absolute path is either an
438 # integer (array element's index) or a string (object
440 for elem
in exc
.absolute_path
:
441 if type(elem
) is int:
442 ctx
= f
'Element {elem}'
444 ctx
= f
'`{elem}` property'
450 if len(exc
.context
) > 0:
451 # According to the documentation of
452 # jsonschema.ValidationError.context(),
453 # the method returns a
455 # > list of errors from the subschemas
457 # This contains additional information about the
458 # validation failure which can help the user figure out
459 # what's wrong exactly.
461 # Join each message with `; ` and append this to our
462 # configuration parsing error's message.
463 msgs
= '; '.join([e
.message
for e
in exc
.context
])
464 schema_ctx
= f
': {msgs}'
466 new_exc
= _ConfigParseError(contexts
.pop(),
467 f
'{exc.message}{schema_ctx} (from schema `{schema_short_id}`)')
469 for ctx
in reversed(contexts
):
470 new_exc
.append_ctx(ctx
)
475 # Converts the byte order string `bo_str` to a `metadata.ByteOrder`
477 def _byte_order_str_to_bo(bo_str
):
478 bo_str
= bo_str
.lower()
481 return metadata
.ByteOrder
.LE
483 return metadata
.ByteOrder
.BE
486 # Converts the encoding string `encoding_str` to a `metadata.Encoding`
488 def _encoding_str_to_encoding(encoding_str
):
489 encoding_str
= encoding_str
.lower()
491 if encoding_str
== 'utf-8' or encoding_str
== 'utf8':
492 return metadata
.Encoding
.UTF8
493 elif encoding_str
== 'ascii':
494 return metadata
.Encoding
.ASCII
495 elif encoding_str
== 'none':
496 return metadata
.Encoding
.NONE
499 # Validates the TSDL identifier `iden`, raising a `_ConfigParseError`
500 # exception using `ctx_obj_name` and `prop` to format the message if
502 def _validate_identifier(iden
, ctx_obj_name
, prop
):
503 assert type(iden
) is str
522 if iden
in ctf_keywords
:
523 msg
= f
'Invalid {prop} (not a valid identifier): `{iden}`'
524 raise _ConfigParseError(ctx_obj_name
, msg
)
527 # Validates the alignment `align`, raising a `_ConfigParseError`
528 # exception using `ctx_obj_name` if it's invalid.
529 def _validate_alignment(align
, ctx_obj_name
):
532 if (align
& (align
- 1)) != 0:
533 raise _ConfigParseError(ctx_obj_name
,
534 f
'Invalid alignment (not a power of two): {align}')
539 # Order of values is important here.
541 class _Entity(enum
.IntEnum
):
542 TRACE_PACKET_HEADER
= 0
543 STREAM_PACKET_CONTEXT
= 1
544 STREAM_EVENT_HEADER
= 2
545 STREAM_EVENT_CONTEXT
= 3
550 # A validator which validates the configured metadata for barectf
555 # * The alignments of all header/context field types are at least 8.
557 # * There are no nested structure or array field types, except the
558 # packet header field type's `uuid` field
560 class _BarectfMetadataValidator
:
562 self
._type
_to
_validate
_type
_func
= {
563 _Struct
: self
._validate
_struct
_type
,
564 _Array
: self
._validate
_array
_type
,
567 def _validate_struct_type(self
, t
, entity_root
):
569 raise _ConfigParseError('Structure field type',
570 'Inner structure field types are not supported as of this version')
572 for field_name
, field_type
in t
.fields
.items():
573 if entity_root
and self
._cur
_entity
is _Entity
.TRACE_PACKET_HEADER
:
574 if field_name
== 'uuid':
579 self
._validate
_type
(field_type
, False)
580 except _ConfigParseError
as exc
:
581 _append_error_ctx(exc
,
582 f
'Structure field type\'s field `{field_name}`')
584 def _validate_array_type(self
, t
, entity_root
):
585 raise _ConfigParseError('Array field type',
586 'Not supported as of this version')
588 def _validate_type(self
, t
, entity_root
):
589 func
= self
._type
_to
_validate
_type
_func
.get(type(t
))
594 def _validate_entity(self
, t
):
598 # make sure root field type has a real alignment of at least 8
600 raise _ConfigParseError('Root field type',
601 f
'Effective alignment must be at least 8 (got {t.real_align})')
603 assert type(t
) is _Struct
605 # validate field types
606 self
._validate
_type
(t
, True)
608 def _validate_event_entities_and_names(self
, stream
, ev
):
610 _validate_identifier(ev
.name
, 'Event type', 'event type name')
612 self
._cur
_entity
= _Entity
.EVENT_CONTEXT
615 self
._validate
_entity
(ev
.context_type
)
616 except _ConfigParseError
as exc
:
617 _append_error_ctx(exc
, 'Event type',
618 'Invalid context field type')
620 self
._cur
_entity
= _Entity
.EVENT_PAYLOAD
623 self
._validate
_entity
(ev
.payload_type
)
624 except _ConfigParseError
as exc
:
625 _append_error_ctx(exc
, 'Event type',
626 'Invalid payload field type')
628 if stream
.is_event_empty(ev
):
629 raise _ConfigParseError('Event type', 'Empty')
630 except _ConfigParseError
as exc
:
631 _append_error_ctx(exc
, f
'Event type `{ev.name}`')
633 def _validate_stream_entities_and_names(self
, stream
):
635 _validate_identifier(stream
.name
, 'Stream type', 'stream type name')
636 self
._cur
_entity
= _Entity
.STREAM_PACKET_CONTEXT
639 self
._validate
_entity
(stream
.packet_context_type
)
640 except _ConfigParseError
as exc
:
641 _append_error_ctx(exc
, 'Stream type',
642 'Invalid packet context field type')
644 self
._cur
_entity
= _Entity
.STREAM_EVENT_HEADER
647 self
._validate
_entity
(stream
.event_header_type
)
648 except _ConfigParseError
as exc
:
649 _append_error_ctx(exc
, 'Stream type',
650 'Invalid event header field type')
652 self
._cur
_entity
= _Entity
.STREAM_EVENT_CONTEXT
655 self
._validate
_entity
(stream
.event_context_type
)
656 except _ConfigParseError
as exc
:
657 _append_error_ctx(exc
, 'Stream type',
658 'Invalid event context field type')
660 for ev
in stream
.events
.values():
661 self
._validate
_event
_entities
_and
_names
(stream
, ev
)
662 except _ConfigParseError
as exc
:
663 _append_error_ctx(exc
, f
'Stream type `{stream.name}`')
665 def _validate_entities_and_names(self
, meta
):
666 self
._cur
_entity
= _Entity
.TRACE_PACKET_HEADER
669 self
._validate
_entity
(meta
.trace
.packet_header_type
)
670 except _ConfigParseError
as exc
:
671 _append_error_ctx(exc
, 'Trace type',
672 'Invalid packet header field type')
674 for stream
in meta
.streams
.values():
675 self
._validate
_stream
_entities
_and
_names
(stream
)
677 def _validate_default_stream(self
, meta
):
678 if meta
.default_stream_name
is not None:
679 if meta
.default_stream_name
not in meta
.streams
.keys():
680 msg
= f
'Default stream type name (`{meta.default_stream_name}`) does not name an existing stream type'
681 raise _ConfigParseError('Metadata', msg
)
683 def validate(self
, meta
):
685 self
._validate
_entities
_and
_names
(meta
)
686 self
._validate
_default
_stream
(meta
)
687 except _ConfigParseError
as exc
:
688 _append_error_ctx(exc
, 'barectf metadata')
691 # A validator which validates special fields of trace, stream, and event
693 class _MetadataSpecialFieldsValidator
:
694 # Validates the packet header field type `t`.
695 def _validate_trace_packet_header_type(self
, t
):
696 ctx_obj_name
= '`packet-header-type` property'
698 # If there's more than one stream type, then the `stream_id`
699 # (stream type ID) field is required.
700 if len(self
._meta
.streams
) > 1:
702 raise _ConfigParseError('Trace type',
703 '`stream_id` field is required (because there\'s more than one stream type), but packet header field type is missing')
705 if 'stream_id' not in t
.fields
:
706 raise _ConfigParseError(ctx_obj_name
,
707 '`stream_id` field is required (because there\'s more than one stream type)')
712 # The `magic` field type must be the first one.
714 # The `stream_id` field type's size (bits) must be large enough
715 # to accomodate any stream type ID.
716 for i
, (field_name
, field_type
) in enumerate(t
.fields
.items()):
717 if field_name
== 'magic':
719 raise _ConfigParseError(ctx_obj_name
,
720 '`magic` field must be the first packet header field type\'s field')
721 elif field_name
== 'stream_id':
722 if len(self
._meta
.streams
) > (1 << field_type
.size
):
723 raise _ConfigParseError(ctx_obj_name
,
724 f
'`stream_id` field\'s size is too small to accomodate {len(self._meta.streams)} stream types')
726 # Validates the trace type of the metadata object `meta`.
727 def _validate_trace(self
, meta
):
728 self
._validate
_trace
_packet
_header
_type
(meta
.trace
.packet_header_type
)
730 # Validates the packet context field type of the stream type
732 def _validate_stream_packet_context(self
, stream
):
733 ctx_obj_name
= '`packet-context-type` property'
734 t
= stream
.packet_context_type
737 # The `timestamp_begin` and `timestamp_end` field types must be
738 # mapped to the `value` property of the same clock.
739 ts_begin
= t
.fields
.get('timestamp_begin')
740 ts_end
= t
.fields
.get('timestamp_end')
742 if ts_begin
is not None and ts_end
is not None:
743 if ts_begin
.property_mappings
[0].object.name
!= ts_end
.property_mappings
[0].object.name
:
744 raise _ConfigParseError(ctx_obj_name
,
745 '`timestamp_begin` and `timestamp_end` fields must be mapped to the same clock value')
747 # The `packet_size` field type's size must be greater than or
748 # equal to the `content_size` field type's size.
749 if t
.fields
['content_size'].size
> t
.fields
['packet_size'].size
:
750 raise _ConfigParseError(ctx_obj_name
,
751 '`content_size` field\'s size must be less than or equal to `packet_size` field\'s size')
753 # Validates the event header field type of the stream type `stream`.
754 def _validate_stream_event_header(self
, stream
):
755 ctx_obj_name
= '`event-header-type` property'
756 t
= stream
.event_header_type
758 # If there's more than one event type, then the `id` (event type
759 # ID) field is required.
760 if len(stream
.events
) > 1:
762 raise _ConfigParseError('Stream type',
763 '`id` field is required (because there\'s more than one event type), but event header field type is missing')
765 if 'id' not in t
.fields
:
766 raise _ConfigParseError(ctx_obj_name
,
767 '`id` field is required (because there\'s more than one event type)')
772 # The `id` field type's size (bits) must be large enough to
773 # accomodate any event type ID.
774 eid
= t
.fields
.get('id')
777 if len(stream
.events
) > (1 << eid
.size
):
778 raise _ConfigParseError(ctx_obj_name
,
779 f
'`id` field\'s size is too small to accomodate {len(stream.events)} event types')
781 # Validates the stream type `stream`.
782 def _validate_stream(self
, stream
):
783 self
._validate
_stream
_packet
_context
(stream
)
784 self
._validate
_stream
_event
_header
(stream
)
786 # Validates the trace and stream types of the metadata object
788 def validate(self
, meta
):
793 self
._validate
_trace
(meta
)
794 except _ConfigParseError
as exc
:
795 _append_error_ctx(exc
, 'Trace type')
797 for stream
in meta
.streams
.values():
799 self
._validate
_stream
(stream
)
800 except _ConfigParseError
as exc
:
801 _append_error_ctx(exc
, f
'Stream type `{stream.name}`')
802 except _ConfigParseError
as exc
:
803 _append_error_ctx(exc
, 'Metadata')
806 # A barectf YAML configuration parser.
808 # When you build such a parser, it parses the configuration file and
809 # creates a corresponding `config.Config` object which you can get with
810 # the `config` property.
812 # See the comments of _parse() for more implementation details about the
813 # parsing stages and general strategy.
814 class _YamlConfigParser
:
815 # Builds a barectf YAML configuration parser and parses the
816 # configuration file having the path `path`.
818 # The parser considers the inclusion directories `include_dirs`,
819 # ignores nonexistent inclusion files if `ignore_include_not_found`
820 # is `True`, and dumps the effective configuration (as YAML) if
821 # `dump_config` is `True`.
823 # Raises `_ConfigParseError` on parsing error.
824 def __init__(self
, path
, include_dirs
, ignore_include_not_found
,
826 self
._root
_path
= path
827 self
._class
_name
_to
_create
_field
_type
_func
= {
828 'int': self
._create
_integer
_field
_type
,
829 'integer': self
._create
_integer
_field
_type
,
830 'flt': self
._create
_float
_field
_type
,
831 'float': self
._create
_float
_field
_type
,
832 'floating-point': self
._create
_float
_field
_type
,
833 'enum': self
._create
_enum
_field
_type
,
834 'enumeration': self
._create
_enum
_field
_type
,
835 'str': self
._create
_string
_field
_type
,
836 'string': self
._create
_string
_field
_type
,
837 'struct': self
._create
_struct
_field
_type
,
838 'structure': self
._create
_struct
_field
_type
,
839 'array': self
._create
_array
_field
_type
,
841 self
._include
_dirs
= include_dirs
842 self
._ignore
_include
_not
_found
= ignore_include_not_found
843 self
._dump
_config
= dump_config
844 self
._schema
_validator
= _SchemaValidator()
847 # Sets the default byte order as found in the `metadata_node` node.
848 def _set_byte_order(self
, metadata_node
):
849 self
._bo
= _byte_order_str_to_bo(metadata_node
['trace']['byte-order'])
850 assert self
._bo
is not None
852 # Sets the clock value property mapping of the pseudo integer field
853 # type object `int_obj` as found in the `prop_mapping_node` node.
854 def _set_int_clock_prop_mapping(self
, int_obj
, prop_mapping_node
):
855 clock_name
= prop_mapping_node
['name']
856 clock
= self
._clocks
.get(clock_name
)
859 exc
= _ConfigParseError('`property-mappings` property',
860 f
'Clock type `{clock_name}` does not exist')
861 exc
.append_ctx('Integer field type')
864 prop_mapping
= _PropertyMapping()
865 prop_mapping
.object = clock
866 prop_mapping
.prop
= 'value'
867 int_obj
.property_mappings
.append(prop_mapping
)
869 # Creates a pseudo integer field type from the node `node` and
871 def _create_integer_field_type(self
, node
):
873 obj
.size
= node
['size']
874 align_node
= node
.get('align')
876 if align_node
is not None:
877 _validate_alignment(align_node
, 'Integer field type')
878 obj
.align
= align_node
880 signed_node
= node
.get('signed')
882 if signed_node
is not None:
883 obj
.signed
= signed_node
885 obj
.byte_order
= self
._bo
886 bo_node
= node
.get('byte-order')
888 if bo_node
is not None:
889 obj
.byte_order
= _byte_order_str_to_bo(bo_node
)
891 base_node
= node
.get('base')
893 if base_node
is not None:
894 if base_node
== 'bin':
896 elif base_node
== 'oct':
898 elif base_node
== 'dec':
901 assert base_node
== 'hex'
904 encoding_node
= node
.get('encoding')
906 if encoding_node
is not None:
907 obj
.encoding
= _encoding_str_to_encoding(encoding_node
)
909 pm_node
= node
.get('property-mappings')
911 if pm_node
is not None:
912 assert len(pm_node
) == 1
913 self
._set
_int
_clock
_prop
_mapping
(obj
, pm_node
[0])
917 # Creates a pseudo floating point number field type from the node
918 # `node` and returns it.
919 def _create_float_field_type(self
, node
):
920 obj
= _FloatingPoint()
921 size_node
= node
['size']
922 obj
.exp_size
= size_node
['exp']
923 obj
.mant_size
= size_node
['mant']
924 align_node
= node
.get('align')
926 if align_node
is not None:
927 _validate_alignment(align_node
, 'Floating point number field type')
928 obj
.align
= align_node
930 obj
.byte_order
= self
._bo
931 bo_node
= node
.get('byte-order')
933 if bo_node
is not None:
934 obj
.byte_order
= _byte_order_str_to_bo(bo_node
)
938 # Creates a pseudo enumeration field type from the node `node` and
940 def _create_enum_field_type(self
, node
):
941 ctx_obj_name
= 'Enumeration field type'
944 # value (integer) field type
946 obj
.value_type
= self
._create
_type
(node
['value-type'])
947 except _ConfigParseError
as exc
:
948 _append_error_ctx(exc
, ctx_obj_name
,
949 'Cannot create value (integer) field type')
952 members_node
= node
.get('members')
954 if members_node
is not None:
955 if obj
.value_type
.signed
:
956 value_min
= -(1 << obj
.value_type
.size
- 1)
957 value_max
= (1 << (obj
.value_type
.size
- 1)) - 1
960 value_max
= (1 << obj
.value_type
.size
) - 1
964 for m_node
in members_node
:
965 if type(m_node
) is str:
970 assert type(m_node
) is collections
.OrderedDict
971 label
= m_node
['label']
972 value
= m_node
['value']
974 if type(value
) is int:
976 value
= (value
, value
)
978 assert type(value
) is list
979 assert len(value
) == 2
984 exc
= _ConfigParseError(ctx_obj_name
)
985 exc
.append_ctx(f
'Member `{label}`',
986 f
'Invalid integral range ({mn} > {mx})')
992 # Make sure that all the integral values of the range
993 # fits the enumeration field type's integer value field
994 # type depending on its size (bits).
995 member_obj_name
= f
'Member `{label}`'
996 msg
= f
'Value {value[0]} is outside the value type range [{value_min}, {value_max}]'
999 if value
[0] < value_min
or value
[0] > value_max
:
1000 raise _ConfigParseError(member_obj_name
, msg
)
1002 if value
[1] < value_min
or value
[1] > value_max
:
1003 raise _ConfigParseError(member_obj_name
, msg
)
1004 except _ConfigParseError
as exc
:
1005 _append_error_ctx(exc
, ctx_obj_name
)
1007 obj
.members
[label
] = value
1011 # Creates a pseudo string field type from the node `node` and
1013 def _create_string_field_type(self
, node
):
1015 encoding_node
= node
.get('encoding')
1017 if encoding_node
is not None:
1018 obj
.encoding
= _encoding_str_to_encoding(encoding_node
)
1022 # Creates a pseudo structure field type from the node `node` and
1024 def _create_struct_field_type(self
, node
):
1025 ctx_obj_name
= 'Structure field type'
1027 min_align_node
= node
.get('min-align')
1029 if min_align_node
is not None:
1030 _validate_alignment(min_align_node
, ctx_obj_name
)
1031 obj
.min_align
= min_align_node
1033 fields_node
= node
.get('fields')
1035 if fields_node
is not None:
1036 for field_name
, field_node
in fields_node
.items():
1037 _validate_identifier(field_name
, ctx_obj_name
, 'field name')
1040 obj
.fields
[field_name
] = self
._create
_type
(field_node
)
1041 except _ConfigParseError
as exc
:
1042 _append_error_ctx(exc
, ctx_obj_name
,
1043 f
'Cannot create field `{field_name}`')
1047 # Creates a pseudo array field type from the node `node` and returns
1049 def _create_array_field_type(self
, node
):
1051 obj
.length
= node
['length']
1054 obj
.element_type
= self
._create
_type
(node
['element-type'])
1055 except _ConfigParseError
as exc
:
1056 _append_error_ctx(exc
, 'Array field type',
1057 'Cannot create element field type')
1061 # Creates a pseudo field type from the node `node` and returns it.
1063 # This method checks the `class` property of `node` to determine
1064 # which function of `self._class_name_to_create_field_type_func` to
1065 # call to create the corresponding pseudo field type.
1066 def _create_type(self
, type_node
):
1067 return self
._class
_name
_to
_create
_field
_type
_func
[type_node
['class']](type_node
)
1069 # Creates a pseudo clock type from the node `node` and returns it.
1070 def _create_clock(self
, node
):
1072 uuid_node
= node
.get('uuid')
1074 if uuid_node
is not None:
1076 clock
.uuid
= uuid
.UUID(uuid_node
)
1077 except ValueError as exc
:
1078 raise _ConfigParseError('Clock type',
1079 f
'Malformed UUID `{uuid_node}`: {exc}')
1081 descr_node
= node
.get('description')
1083 if descr_node
is not None:
1084 clock
.description
= descr_node
1086 freq_node
= node
.get('freq')
1088 if freq_node
is not None:
1089 clock
.freq
= freq_node
1091 error_cycles_node
= node
.get('error-cycles')
1093 if error_cycles_node
is not None:
1094 clock
.error_cycles
= error_cycles_node
1096 offset_node
= node
.get('offset')
1098 if offset_node
is not None:
1099 offset_cycles_node
= offset_node
.get('cycles')
1101 if offset_cycles_node
is not None:
1102 clock
.offset_cycles
= offset_cycles_node
1104 offset_seconds_node
= offset_node
.get('seconds')
1106 if offset_seconds_node
is not None:
1107 clock
.offset_seconds
= offset_seconds_node
1109 absolute_node
= node
.get('absolute')
1111 if absolute_node
is not None:
1112 clock
.absolute
= absolute_node
1114 return_ctype_node
= node
.get('$return-ctype')
1116 if return_ctype_node
is None:
1117 # barectf 2.1: `return-ctype` property was renamed to
1119 return_ctype_node
= node
.get('return-ctype')
1121 if return_ctype_node
is not None:
1122 clock
.return_ctype
= return_ctype_node
1126 # Registers all the clock types of the metadata node
1127 # `metadata_node`, creating pseudo clock types during the process,
1128 # within this parser.
1130 # The pseudo clock types in `self._clocks` are then accessible when
1131 # creating a pseudo integer field type (see
1132 # _create_integer_field_type() and _set_int_clock_prop_mapping()).
1133 def _register_clocks(self
, metadata_node
):
1134 self
._clocks
= collections
.OrderedDict()
1135 clocks_node
= metadata_node
.get('clocks')
1137 if clocks_node
is None:
1140 for clock_name
, clock_node
in clocks_node
.items():
1141 _validate_identifier(clock_name
, 'Metadata', 'clock type name')
1142 assert clock_name
not in self
._clocks
1145 clock
= self
._create
_clock
(clock_node
)
1146 except _ConfigParseError
as exc
:
1147 _append_error_ctx(exc
, 'Metadata',
1148 f
'Cannot create clock type `{clock}`')
1150 clock
.name
= clock_name
1151 self
._clocks
[clock_name
] = clock
1153 # Creates an environment object (`collections.OrderedDict`) from the
1154 # metadata node `metadata_node` and returns it.
1155 def _create_env(self
, metadata_node
):
1156 env_node
= metadata_node
.get('env')
1158 if env_node
is None:
1159 return collections
.OrderedDict()
1161 for env_name
, env_value
in env_node
.items():
1162 _validate_identifier(env_name
, 'Metadata',
1163 'environment variable name')
1165 return copy
.deepcopy(env_node
)
1167 # Creates a pseudo trace type from the metadata node `metadata_node`
1169 def _create_trace(self
, metadata_node
):
1170 ctx_obj_name
= 'Trace type'
1172 trace_node
= metadata_node
['trace']
1173 trace
.byte_order
= self
._bo
1174 uuid_node
= trace_node
.get('uuid')
1176 if uuid_node
is not None:
1177 # The `uuid` property of the trace type node can be `auto`
1178 # to make barectf generate a UUID.
1179 if uuid_node
== 'auto':
1180 trace
.uuid
= uuid
.uuid1()
1183 trace
.uuid
= uuid
.UUID(uuid_node
)
1184 except ValueError as exc
:
1185 raise _ConfigParseError(ctx_obj_name
,
1186 f
'Malformed UUID `{uuid_node}`: {exc}')
1188 pht_node
= trace_node
.get('packet-header-type')
1190 if pht_node
is not None:
1192 trace
.packet_header_type
= self
._create
_type
(pht_node
)
1193 except _ConfigParseError
as exc
:
1194 _append_error_ctx(exc
, ctx_obj_name
,
1195 'Cannot create packet header field type')
1199 # Creates a pseudo event type from the event node `event_node` and
1201 def _create_event(self
, event_node
):
1202 ctx_obj_name
= 'Event type'
1204 log_level_node
= event_node
.get('log-level')
1206 if log_level_node
is not None:
1207 assert type(log_level_node
) is int
1208 event
.log_level
= metadata
.LogLevel(None, log_level_node
)
1210 ct_node
= event_node
.get('context-type')
1212 if ct_node
is not None:
1214 event
.context_type
= self
._create
_type
(ct_node
)
1215 except _ConfigParseError
as exc
:
1216 _append_error_ctx(exc
, ctx_obj_name
,
1217 'Cannot create context field type')
1219 pt_node
= event_node
.get('payload-type')
1221 if pt_node
is not None:
1223 event
.payload_type
= self
._create
_type
(pt_node
)
1224 except _ConfigParseError
as exc
:
1225 _append_error_ctx(exc
, ctx_obj_name
,
1226 'Cannot create payload field type')
1230 # Creates a pseudo stream type named `stream_name` from the stream
1231 # node `stream_node` and returns it.
1232 def _create_stream(self
, stream_name
, stream_node
):
1233 ctx_obj_name
= 'Stream type'
1235 pct_node
= stream_node
.get('packet-context-type')
1237 if pct_node
is not None:
1239 stream
.packet_context_type
= self
._create
_type
(pct_node
)
1240 except _ConfigParseError
as exc
:
1241 _append_error_ctx(exc
, ctx_obj_name
,
1242 'Cannot create packet context field type')
1244 eht_node
= stream_node
.get('event-header-type')
1246 if eht_node
is not None:
1248 stream
.event_header_type
= self
._create
_type
(eht_node
)
1249 except _ConfigParseError
as exc
:
1250 _append_error_ctx(exc
, ctx_obj_name
,
1251 'Cannot create event header field type')
1253 ect_node
= stream_node
.get('event-context-type')
1255 if ect_node
is not None:
1257 stream
.event_context_type
= self
._create
_type
(ect_node
)
1258 except _ConfigParseError
as exc
:
1259 _append_error_ctx(exc
, ctx_obj_name
,
1260 'Cannot create event context field type')
1262 events_node
= stream_node
['events']
1265 for ev_name
, ev_node
in events_node
.items():
1267 ev
= self
._create
_event
(ev_node
)
1268 except _ConfigParseError
as exc
:
1269 _append_error_ctx(exc
, ctx_obj_name
,
1270 f
'Cannot create event type `{ev_name}`')
1274 stream
.events
[ev_name
] = ev
1277 default_node
= stream_node
.get('$default')
1279 if default_node
is not None:
1280 if self
._meta
.default_stream_name
is not None and self
._meta
.default_stream_name
!= stream_name
:
1281 msg
= f
'Cannot specify more than one default stream type (default stream type already set to `{self._meta.default_stream_name}`)'
1282 raise _ConfigParseError('Stream type', msg
)
1284 self
._meta
.default_stream_name
= stream_name
1288 # Creates a `collections.OrderedDict` object where keys are stream
1289 # type names and values are pseudo stream types from the metadata
1290 # node `metadata_node` and returns it.
1291 def _create_streams(self
, metadata_node
):
1292 streams
= collections
.OrderedDict()
1293 streams_node
= metadata_node
['streams']
1296 for stream_name
, stream_node
in streams_node
.items():
1298 stream
= self
._create
_stream
(stream_name
, stream_node
)
1299 except _ConfigParseError
as exc
:
1300 _append_error_ctx(exc
, 'Metadata',
1301 f
'Cannot create stream type `{stream_name}`')
1304 stream
.name
= stream_name
1305 streams
[stream_name
] = stream
1310 # Creates a pseudo metadata object from the configuration node
1311 # `root` and returns it.
1312 def _create_metadata(self
, root
):
1313 self
._meta
= _Metadata()
1314 metadata_node
= root
['metadata']
1316 if '$default-stream' in metadata_node
and metadata_node
['$default-stream'] is not None:
1317 default_stream_node
= metadata_node
['$default-stream']
1318 self
._meta
.default_stream_name
= default_stream_node
1320 self
._set
_byte
_order
(metadata_node
)
1321 self
._register
_clocks
(metadata_node
)
1322 self
._meta
.clocks
= self
._clocks
1323 self
._meta
.env
= self
._create
_env
(metadata_node
)
1324 self
._meta
.trace
= self
._create
_trace
(metadata_node
)
1325 self
._meta
.streams
= self
._create
_streams
(metadata_node
)
1327 # validate the pseudo metadata object
1328 _MetadataSpecialFieldsValidator().validate(self
._meta
)
1329 _BarectfMetadataValidator().validate(self
._meta
)
1333 # Gets and validates the tracing prefix as found in the
1334 # configuration node `config_node` and returns it.
1335 def _get_prefix(self
, config_node
):
1336 prefix
= config_node
.get('prefix', 'barectf_')
1337 _validate_identifier(prefix
, '`prefix` property', 'prefix')
1340 # Gets the options as found in the configuration node `config_node`
1341 # and returns a corresponding `config.ConfigOptions` object.
1342 def _get_options(self
, config_node
):
1343 gen_prefix_def
= False
1344 gen_default_stream_def
= False
1345 options_node
= config_node
.get('options')
1347 if options_node
is not None:
1348 gen_prefix_def
= options_node
.get('gen-prefix-def',
1350 gen_default_stream_def
= options_node
.get('gen-default-stream-def',
1351 gen_default_stream_def
)
1353 return config
.ConfigOptions(gen_prefix_def
, gen_default_stream_def
)
1355 # Returns the last included file name from the parser's inclusion
1357 def _get_last_include_file(self
):
1358 if self
._include
_stack
:
1359 return self
._include
_stack
[-1]
1361 return self
._root
_path
1363 # Loads the inclusion file having the path `yaml_path` and returns
1364 # its content as a `collections.OrderedDict` object.
1365 def _load_include(self
, yaml_path
):
1366 for inc_dir
in self
._include
_dirs
:
1367 # Current inclusion dir + file name path.
1369 # Note: os.path.join() only takes the last argument if it's
1371 inc_path
= os
.path
.join(inc_dir
, yaml_path
)
1373 # real path (symbolic links resolved)
1374 real_path
= os
.path
.realpath(inc_path
)
1376 # normalized path (weird stuff removed!)
1377 norm_path
= os
.path
.normpath(real_path
)
1379 if not os
.path
.isfile(norm_path
):
1380 # file doesn't exist: skip
1383 if norm_path
in self
._include
_stack
:
1384 base_path
= self
._get
_last
_include
_file
()
1385 raise _ConfigParseError(f
'File `{base_path}`',
1386 f
'Cannot recursively include file `{norm_path}`')
1388 self
._include
_stack
.append(norm_path
)
1391 return self
._yaml
_ordered
_load
(norm_path
)
1393 if not self
._ignore
_include
_not
_found
:
1394 base_path
= self
._get
_last
_include
_file
()
1395 raise _ConfigParseError(f
'File `{base_path}`',
1396 f
'Cannot include file `{yaml_path}`: file not found in inclusion directories')
1398 # Returns a list of all the inclusion file paths as found in the
1399 # inclusion node `include_node`.
1400 def _get_include_paths(self
, include_node
):
1401 if include_node
is None:
1405 if type(include_node
) is str:
1407 return [include_node
]
1410 assert type(include_node
) is list
1413 # Updates the node `base_node` with an overlay node `overlay_node`.
1415 # Both the inclusion and field type inheritance features use this
1417 def _update_node(self
, base_node
, overlay_node
):
1418 for olay_key
, olay_value
in overlay_node
.items():
1419 if olay_key
in base_node
:
1420 base_value
= base_node
[olay_key
]
1422 if type(olay_value
) is collections
.OrderedDict
and type(base_value
) is collections
.OrderedDict
:
1423 # merge both objects
1424 self
._update
_node
(base_value
, olay_value
)
1425 elif type(olay_value
) is list and type(base_value
) is list:
1426 # append extension array items to base items
1427 base_value
+= olay_value
1429 # fall back to replacing base property
1430 base_node
[olay_key
] = olay_value
1432 # set base property from overlay property
1433 base_node
[olay_key
] = olay_value
1435 # Processes inclusions using `last_overlay_node` as the last overlay
1436 # node to use to "patch" the node.
1438 # If `last_overlay_node` contains an `$include` property, then this
1439 # method patches the current base node (initially empty) in order
1440 # using the content of the inclusion files (recursively).
1442 # At the end, this method removes the `$include` of
1443 # `last_overlay_node` and then patches the current base node with
1444 # its other properties before returning the result (always a deep
1446 def _process_node_include(self
, last_overlay_node
,
1447 process_base_include_cb
,
1448 process_children_include_cb
=None):
1449 # process children inclusions first
1450 if process_children_include_cb
is not None:
1451 process_children_include_cb(last_overlay_node
)
1453 incl_prop_name
= '$include'
1455 if incl_prop_name
in last_overlay_node
:
1456 include_node
= last_overlay_node
[incl_prop_name
]
1459 return last_overlay_node
1461 include_paths
= self
._get
_include
_paths
(include_node
)
1462 cur_base_path
= self
._get
_last
_include
_file
()
1465 # keep the inclusion paths and remove the `$include` property
1466 include_paths
= copy
.deepcopy(include_paths
)
1467 del last_overlay_node
[incl_prop_name
]
1469 for include_path
in include_paths
:
1470 # load raw YAML from included file
1471 overlay_node
= self
._load
_include
(include_path
)
1473 if overlay_node
is None:
1474 # Cannot find inclusion file, but we're ignoring those
1475 # errors, otherwise _load_include() itself raises a
1479 # recursively process inclusions
1481 overlay_node
= process_base_include_cb(overlay_node
)
1482 except _ConfigParseError
as exc
:
1483 _append_error_ctx(exc
, f
'File `{cur_base_path}`')
1485 # pop inclusion stack now that we're done including
1486 del self
._include
_stack
[-1]
1488 # At this point, `base_node` is fully resolved (does not
1489 # contain any `$include` property).
1490 if base_node
is None:
1491 base_node
= overlay_node
1493 self
._update
_node
(base_node
, overlay_node
)
1495 # Finally, update the latest base node with our last overlay
1497 if base_node
is None:
1498 # Nothing was included, which is possible when we're
1499 # ignoring inclusion errors.
1500 return last_overlay_node
1502 self
._update
_node
(base_node
, last_overlay_node
)
1505 # Process the inclusions of the event type node `event_node`,
1506 # returning the effective node.
1507 def _process_event_include(self
, event_node
):
1508 # Make sure the event type node is valid for the inclusion
1510 self
._schema
_validator
.validate(event_node
,
1511 '2/config/event-pre-include')
1513 # process inclusions
1514 return self
._process
_node
_include
(event_node
,
1515 self
._process
_event
_include
)
1517 # Process the inclusions of the stream type node `stream_node`,
1518 # returning the effective node.
1519 def _process_stream_include(self
, stream_node
):
1520 def process_children_include(stream_node
):
1521 if 'events' in stream_node
:
1522 events_node
= stream_node
['events']
1524 for key
in list(events_node
):
1525 events_node
[key
] = self
._process
_event
_include
(events_node
[key
])
1527 # Make sure the stream type node is valid for the inclusion
1529 self
._schema
_validator
.validate(stream_node
,
1530 '2/config/stream-pre-include')
1532 # process inclusions
1533 return self
._process
_node
_include
(stream_node
,
1534 self
._process
_stream
_include
,
1535 process_children_include
)
1537 # Process the inclusions of the trace type node `trace_node`,
1538 # returning the effective node.
1539 def _process_trace_include(self
, trace_node
):
1540 # Make sure the trace type node is valid for the inclusion
1542 self
._schema
_validator
.validate(trace_node
,
1543 '2/config/trace-pre-include')
1545 # process inclusions
1546 return self
._process
_node
_include
(trace_node
,
1547 self
._process
_trace
_include
)
1549 # Process the inclusions of the clock type node `clock_node`,
1550 # returning the effective node.
1551 def _process_clock_include(self
, clock_node
):
1552 # Make sure the clock type node is valid for the inclusion
1554 self
._schema
_validator
.validate(clock_node
,
1555 '2/config/clock-pre-include')
1557 # process inclusions
1558 return self
._process
_node
_include
(clock_node
,
1559 self
._process
_clock
_include
)
1561 # Process the inclusions of the metadata node `metadata_node`,
1562 # returning the effective node.
1563 def _process_metadata_include(self
, metadata_node
):
1564 def process_children_include(metadata_node
):
1565 if 'trace' in metadata_node
:
1566 metadata_node
['trace'] = self
._process
_trace
_include
(metadata_node
['trace'])
1568 if 'clocks' in metadata_node
:
1569 clocks_node
= metadata_node
['clocks']
1571 for key
in list(clocks_node
):
1572 clocks_node
[key
] = self
._process
_clock
_include
(clocks_node
[key
])
1574 if 'streams' in metadata_node
:
1575 streams_node
= metadata_node
['streams']
1577 for key
in list(streams_node
):
1578 streams_node
[key
] = self
._process
_stream
_include
(streams_node
[key
])
1580 # Make sure the metadata node is valid for the inclusion
1582 self
._schema
_validator
.validate(metadata_node
,
1583 '2/config/metadata-pre-include')
1585 # process inclusions
1586 return self
._process
_node
_include
(metadata_node
,
1587 self
._process
_metadata
_include
,
1588 process_children_include
)
1590 # Process the inclusions of the configuration node `config_node`,
1591 # returning the effective node.
1592 def _process_config_includes(self
, config_node
):
1593 # Process inclusions in this order:
1595 # 1. Clock type node, event type nodes, and trace type nodes
1596 # (the order between those is not important).
1598 # 2. Stream type nodes.
1604 # * A metadata node can include clock type nodes, a trace type
1605 # node, stream type nodes, and event type nodes (indirectly).
1607 # * A stream type node can include event type nodes.
1609 # We keep a stack of absolute paths to included files
1610 # (`self._include_stack`) to detect recursion.
1612 # First, make sure the configuration object itself is valid for
1613 # the inclusion processing stage.
1614 self
._schema
_validator
.validate(config_node
,
1615 '2/config/config-pre-include')
1617 # Process metadata node inclusions.
1619 # self._process_metadata_include() returns a new (or the same)
1620 # metadata node without any `$include` property in it,
1622 config_node
['metadata'] = self
._process
_metadata
_include
(config_node
['metadata'])
1626 # Expands the field type aliases found in the metadata node
1627 # `metadata_node` using the aliases of the `type_aliases_node` node.
1629 # This method modifies `metadata_node`.
1631 # When this method returns:
1633 # * Any field type alias is replaced with its full field type
1636 # * The `type-aliases` property of `metadata_node` is removed.
1637 def _expand_field_type_aliases(self
, metadata_node
, type_aliases_node
):
1638 def resolve_field_type_aliases(parent_node
, key
, from_descr
,
1640 if key
not in parent_node
:
1643 # This set holds all the aliases we need to expand,
1644 # recursively. This is used to detect cycles.
1645 if alias_set
is None:
1648 node
= parent_node
[key
]
1653 if type(node
) is str:
1656 if alias
not in resolved_aliases
:
1657 # Only check for a field type alias cycle when we
1658 # didn't resolve the alias yet, as a given node can
1659 # refer to the same field type alias more than once.
1660 if alias
in alias_set
:
1661 msg
= f
'Cycle detected during the `{alias}` field type alias resolution'
1662 raise _ConfigParseError(from_descr
, msg
)
1664 # try to load field type alias node named `alias`
1665 if alias
not in type_aliases_node
:
1666 raise _ConfigParseError(from_descr
,
1667 f
'Field type alias `{alias}` does not exist')
1670 alias_set
.add(alias
)
1671 resolve_field_type_aliases(type_aliases_node
, alias
,
1672 from_descr
, alias_set
)
1673 resolved_aliases
.add(alias
)
1675 parent_node
[key
] = copy
.deepcopy(type_aliases_node
[node
])
1678 # traverse, resolving field type aliases as needed
1679 for pkey
in ['$inherit', 'inherit', 'value-type', 'element-type']:
1680 resolve_field_type_aliases(node
, pkey
, from_descr
, alias_set
)
1682 # structure field type fields
1686 assert type(node
[pkey
]) is collections
.OrderedDict
1688 for field_name
in node
[pkey
]:
1689 resolve_field_type_aliases(node
[pkey
], field_name
,
1690 from_descr
, alias_set
)
1692 def resolve_field_type_aliases_from(parent_node
, key
):
1693 resolve_field_type_aliases(parent_node
, key
,
1694 f
'`{key}` property')
1696 # set of resolved field type aliases
1697 resolved_aliases
= set()
1699 # Expand field type aliases within trace, stream, and event
1702 resolve_field_type_aliases_from(metadata_node
['trace'],
1703 'packet-header-type')
1704 except _ConfigParseError
as exc
:
1705 _append_error_ctx(exc
, 'Trace type')
1707 for stream_name
, stream
in metadata_node
['streams'].items():
1709 resolve_field_type_aliases_from(stream
, 'packet-context-type')
1710 resolve_field_type_aliases_from(stream
, 'event-header-type')
1711 resolve_field_type_aliases_from(stream
, 'event-context-type')
1713 for event_name
, event
in stream
['events'].items():
1715 resolve_field_type_aliases_from(event
, 'context-type')
1716 resolve_field_type_aliases_from(event
, 'payload-type')
1717 except _ConfigParseError
as exc
:
1718 _append_error_ctx(exc
, f
'Event type `{event_name}`')
1719 except _ConfigParseError
as exc
:
1720 _append_error_ctx(exc
, f
'Stream type `{stream_name}`')
1722 # remove the (now unneeded) `type-aliases` node
1723 del metadata_node
['type-aliases']
1725 # Applies field type inheritance to all field types found in
1728 # This method modifies `metadata_node`.
1730 # When this method returns, no field type node has an `$inherit` or
1731 # `inherit` property.
1732 def _expand_field_type_inheritance(self
, metadata_node
):
1733 def apply_inheritance(parent_node
, key
):
1734 if key
not in parent_node
:
1737 node
= parent_node
[key
]
1742 # process children first
1743 for pkey
in ['$inherit', 'inherit', 'value-type', 'element-type']:
1744 apply_inheritance(node
, pkey
)
1746 # structure field type fields
1750 assert type(node
[pkey
]) is collections
.OrderedDict
1752 for field_name
, field_type
in node
[pkey
].items():
1753 apply_inheritance(node
[pkey
], field_name
)
1755 # apply inheritance of this node
1756 if 'inherit' in node
:
1757 # barectf 2.1: `inherit` property was renamed to `$inherit`
1758 assert '$inherit' not in node
1759 node
['$inherit'] = node
['inherit']
1762 inherit_key
= '$inherit'
1764 if inherit_key
in node
:
1765 assert type(node
[inherit_key
]) is collections
.OrderedDict
1767 # apply inheritance below
1768 apply_inheritance(node
, inherit_key
)
1770 # `node` is an overlay on the `$inherit` node
1771 base_node
= node
[inherit_key
]
1772 del node
[inherit_key
]
1773 self
._update
_node
(base_node
, node
)
1775 # set updated base node as this node
1776 parent_node
[key
] = base_node
1778 apply_inheritance(metadata_node
['trace'], 'packet-header-type')
1780 for stream
in metadata_node
['streams'].values():
1781 apply_inheritance(stream
, 'packet-context-type')
1782 apply_inheritance(stream
, 'event-header-type')
1783 apply_inheritance(stream
, 'event-context-type')
1785 for event
in stream
['events'].values():
1786 apply_inheritance(event
, 'context-type')
1787 apply_inheritance(event
, 'payload-type')
1789 # Calls _expand_field_type_aliases() and
1790 # _expand_field_type_inheritance() if the metadata node
1791 # `metadata_node` has a `type-aliases` property.
1792 def _expand_field_types(self
, metadata_node
):
1793 type_aliases_node
= metadata_node
.get('type-aliases')
1795 if type_aliases_node
is None:
1796 # If there's no `type-aliases` node, then there's no field
1797 # type aliases and therefore no possible inheritance.
1800 # first, expand field type aliases
1801 self
._expand
_field
_type
_aliases
(metadata_node
, type_aliases_node
)
1803 # next, apply inheritance to create effective field types
1804 self
._expand
_field
_type
_inheritance
(metadata_node
)
1806 # Replaces the textual log levels in event type nodes of the
1807 # metadata node `metadata_node` with their numeric equivalent (as
1808 # found in the `$log-levels` or `log-levels` node of
1811 # This method modifies `metadata_node`.
1813 # When this method returns, the `$log-levels` or `log-level`
1814 # property of `metadata_node` is removed.
1815 def _expand_log_levels(self
, metadata_node
):
1816 if 'log-levels' in metadata_node
:
1817 # barectf 2.1: `log-levels` property was renamed to
1819 assert '$log-levels' not in metadata_node
1820 metadata_node
['$log-levels'] = metadata_node
['log-levels']
1821 del metadata_node
['log-levels']
1823 log_levels_key
= '$log-levels'
1824 log_levels_node
= metadata_node
.get(log_levels_key
)
1826 if log_levels_node
is None:
1827 # no log level aliases
1830 # not needed anymore
1831 del metadata_node
[log_levels_key
]
1833 for stream_name
, stream
in metadata_node
['streams'].items():
1835 for event_name
, event
in stream
['events'].items():
1836 prop_name
= 'log-level'
1837 ll_node
= event
.get(prop_name
)
1842 if type(ll_node
) is str:
1843 if ll_node
not in log_levels_node
:
1844 exc
= _ConfigParseError('`log-level` property',
1845 f
'Log level alias `{ll_node}` does not exist')
1846 exc
.append_ctx(f
'Event type `{event_name}`')
1849 event
[prop_name
] = log_levels_node
[ll_node
]
1850 except _ConfigParseError
as exc
:
1851 _append_error_ctx(exc
, f
'Stream type `{stream_name}`')
1853 # Dumps the node `node` as YAML, passing `kwds` to yaml.dump().
1854 def _yaml_ordered_dump(self
, node
, **kwds
):
1855 class ODumper(yaml
.Dumper
):
1858 def dict_representer(dumper
, node
):
1859 return dumper
.represent_mapping(yaml
.resolver
.BaseResolver
.DEFAULT_MAPPING_TAG
,
1862 ODumper
.add_representer(collections
.OrderedDict
, dict_representer
)
1865 return yaml
.dump(node
, Dumper
=ODumper
, **kwds
)
1867 # Loads the content of the YAML file having the path `yaml_path` as
1870 # All YAML maps are loaded as `collections.OrderedDict` objects.
1871 def _yaml_ordered_load(self
, yaml_path
):
1872 class OLoader(yaml
.Loader
):
1875 def construct_mapping(loader
, node
):
1876 loader
.flatten_mapping(node
)
1878 return collections
.OrderedDict(loader
.construct_pairs(node
))
1880 OLoader
.add_constructor(yaml
.resolver
.BaseResolver
.DEFAULT_MAPPING_TAG
,
1885 with
open(yaml_path
, 'r') as f
:
1886 node
= yaml
.load(f
, OLoader
)
1887 except (OSError, IOError) as exc
:
1888 raise _ConfigParseError(f
'File `{yaml_path}`',
1889 f
'Cannot open file: {exc}')
1891 assert type(node
) is collections
.OrderedDict
1895 self
._version
= None
1896 self
._include
_stack
= []
1898 # load the configuration object as is from the root YAML file
1900 config_node
= self
._yaml
_ordered
_load
(self
._root
_path
)
1901 except _ConfigParseError
as exc
:
1902 _append_error_ctx(exc
, 'Configuration',
1903 f
'Cannot parse YAML file `{self._root_path}`')
1905 # Make sure the configuration object is minimally valid, that
1906 # is, it contains a valid `version` property.
1908 # This step does not validate the whole configuration object
1909 # yet because we don't have an effective configuration object;
1912 # * Process inclusions.
1913 # * Expand field types (inheritance and aliases).
1914 self
._schema
_validator
.validate(config_node
, 'config/config-min')
1916 # Process configuration object inclusions.
1918 # self._process_config_includes() returns a new (or the same)
1919 # configuration object without any `$include` property in it,
1921 config_node
= self
._process
_config
_includes
(config_node
)
1923 # Make sure that the current configuration object is valid
1924 # considering field types are not expanded yet.
1925 self
._schema
_validator
.validate(config_node
,
1926 '2/config/config-pre-field-type-expansion')
1928 # Expand field types.
1932 # 1. Replaces field type aliases with "effective" field
1933 # types, recursively.
1935 # After this step, the `type-aliases` property of the
1936 # `metadata` node is gone.
1938 # 2. Applies inheritance, following the `$inherit`/`inherit`
1941 # After this step, field type objects do not contain
1942 # `$inherit` or `inherit` properties.
1944 # This is done blindly, in that the process _doesn't_ validate
1945 # field type objects at this point.
1946 self
._expand
_field
_types
(config_node
['metadata'])
1948 # Make sure that the current configuration object is valid
1949 # considering log levels are not expanded yet.
1950 self
._schema
_validator
.validate(config_node
,
1951 '2/config/config-pre-log-level-expansion')
1953 # Expand log levels, that is, replace log level strings with
1954 # their equivalent numeric values.
1955 self
._expand
_log
_levels
(config_node
['metadata'])
1957 # validate the whole, effective configuration object
1958 self
._schema
_validator
.validate(config_node
, '2/config/config')
1960 # dump config if required
1961 if self
._dump
_config
:
1962 print(self
._yaml
_ordered
_dump
(config_node
, indent
=2,
1963 default_flow_style
=False))
1965 # get prefix, options, and metadata pseudo-object
1966 prefix
= self
._get
_prefix
(config_node
)
1967 opts
= self
._get
_options
(config_node
)
1968 pseudo_meta
= self
._create
_metadata
(config_node
)
1970 # create public configuration
1971 self
._config
= config
.Config(pseudo_meta
.to_public(), prefix
, opts
)
1978 def _from_file(path
, include_dirs
, ignore_include_not_found
, dump_config
):
1980 return _YamlConfigParser(path
, include_dirs
, ignore_include_not_found
,
1982 except _ConfigParseError
as exc
:
1983 _append_error_ctx(exc
, 'Configuration',
1984 f
'Cannot create configuration from YAML file `{path}`')