barectf: reflow licence headers for 72 columns
[barectf.git] / barectf / config_parse.py
CommitLineData
7f4429f2
PP
1# The MIT License (MIT)
2#
4a90140d 3# Copyright (c) 2015-2020 Philippe Proulx <pproulx@efficios.com>
7f4429f2 4#
1378f213
PP
5# Permission is hereby granted, free of charge, to any person obtaining
6# a copy of this software and associated documentation files (the
7# "Software"), to deal in the Software without restriction, including
8# without limitation the rights to use, copy, modify, merge, publish,
9# distribute, sublicense, and/or sell copies of the Software, and to
10# permit persons to whom the Software is furnished to do so, subject to
11# the following conditions:
7f4429f2 12#
1378f213
PP
13# The above copyright notice and this permission notice shall be
14# included in all copies or substantial portions of the Software.
7f4429f2 15#
1378f213
PP
16# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
7f4429f2
PP
23
24from barectf import metadata
25from barectf import config
6839ffba 26import pkg_resources
7f4429f2 27import collections
6839ffba 28import jsonschema
7f4429f2
PP
29import datetime
30import barectf
6839ffba 31import os.path
7f4429f2
PP
32import enum
33import yaml
34import uuid
35import copy
36import re
37import os
38
39
40class _ConfigParseErrorCtx:
41 def __init__(self, name, msg=None):
42 self._name = name
43 self._msg = msg
44
45 @property
46 def name(self):
47 return self._name
48
49 @property
50 def msg(self):
51 return self._msg
52
53
54class ConfigParseError(RuntimeError):
55 def __init__(self, init_ctx_name, init_ctx_msg=None):
56 self._ctx = []
57 self.append_ctx(init_ctx_name, init_ctx_msg)
58
59 @property
60 def ctx(self):
61 return self._ctx
62
63 def append_ctx(self, name, msg=None):
64 self._ctx.append(_ConfigParseErrorCtx(name, msg))
65
66
67def _opt_to_public(obj):
68 if obj is None:
69 return
70
71 return obj.to_public()
72
73
74class _PseudoObj:
75 def __init__(self):
76 self._public = None
77
78 def to_public(self):
79 if self._public is None:
80 self._public = self._to_public()
81
82 return self._public
83
84 def _to_public(self):
85 raise NotImplementedError
86
87
88class _PropertyMapping(_PseudoObj):
89 def __init__(self):
90 super().__init__()
91 self.object = None
92 self.prop = None
93
94 def _to_public(self):
95 return metadata.PropertyMapping(self.object.to_public(), self.prop)
96
97
98class _Integer(_PseudoObj):
99 def __init__(self):
100 super().__init__()
101 self.size = None
102 self.byte_order = None
7f4429f2 103 self.align = None
7f4429f2 104 self.signed = False
7f4429f2 105 self.base = 10
7f4429f2 106 self.encoding = metadata.Encoding.NONE
7f4429f2
PP
107 self.property_mappings = []
108
109 @property
110 def real_align(self):
111 if self.align is None:
112 if self.size % 8 == 0:
113 return 8
114 else:
115 return 1
116 else:
117 return self.align
118
119 def _to_public(self):
120 prop_mappings = [pm.to_public() for pm in self.property_mappings]
121 return metadata.Integer(self.size, self.byte_order, self.align,
122 self.signed, self.base, self.encoding,
123 prop_mappings)
124
125
126class _FloatingPoint(_PseudoObj):
127 def __init__(self):
128 super().__init__()
129 self.exp_size = None
130 self.mant_size = None
131 self.byte_order = None
7f4429f2
PP
132 self.align = 8
133
134 @property
135 def real_align(self):
136 return self.align
137
138 def _to_public(self):
139 return metadata.FloatingPoint(self.exp_size, self.mant_size,
140 self.byte_order, self.align)
141
142
143class _Enum(_PseudoObj):
144 def __init__(self):
145 super().__init__()
146 self.value_type = None
147 self.members = collections.OrderedDict()
148
7f4429f2
PP
149 @property
150 def real_align(self):
151 return self.value_type.real_align
152
153 def _to_public(self):
154 return metadata.Enum(self.value_type.to_public(), self.members)
155
156
157class _String(_PseudoObj):
158 def __init__(self):
159 super().__init__()
7f4429f2
PP
160 self.encoding = metadata.Encoding.UTF8
161
6839ffba
PP
162 @property
163 def real_align(self):
164 return 8
7f4429f2
PP
165
166 def _to_public(self):
167 return metadata.String(self.encoding)
168
169
170class _Array(_PseudoObj):
171 def __init__(self):
172 super().__init__()
173 self.element_type = None
174 self.length = None
175
176 @property
177 def real_align(self):
178 return self.element_type.real_align
179
180 def _to_public(self):
181 return metadata.Array(self.element_type.to_public(), self.length)
182
183
184class _Struct(_PseudoObj):
185 def __init__(self):
186 super().__init__()
7f4429f2 187 self.min_align = 1
7f4429f2
PP
188 self.fields = collections.OrderedDict()
189
190 @property
191 def real_align(self):
192 align = self.min_align
193
194 for pseudo_field in self.fields.values():
195 if pseudo_field.real_align > align:
196 align = pseudo_field.real_align
197
198 return align
199
200 def _to_public(self):
201 fields = []
202
203 for name, pseudo_field in self.fields.items():
204 fields.append((name, pseudo_field.to_public()))
205
206 return metadata.Struct(self.min_align, collections.OrderedDict(fields))
207
208
209class _Trace(_PseudoObj):
210 def __init__(self):
211 super().__init__()
212 self.byte_order = None
213 self.uuid = None
214 self.packet_header_type = None
215
216 def _to_public(self):
217 return metadata.Trace(self.byte_order, self.uuid,
218 _opt_to_public(self.packet_header_type))
219
220
221class _Clock(_PseudoObj):
222 def __init__(self):
223 super().__init__()
7f4429f2 224 self.name = None
7f4429f2 225 self.uuid = None
7f4429f2 226 self.description = None
7f4429f2 227 self.freq = int(1e9)
7f4429f2 228 self.error_cycles = 0
7f4429f2 229 self.offset_seconds = 0
7f4429f2 230 self.offset_cycles = 0
7f4429f2 231 self.absolute = False
7f4429f2
PP
232 self.return_ctype = 'uint32_t'
233
234 def _to_public(self):
235 return metadata.Clock(self.name, self.uuid, self.description, self.freq,
236 self.error_cycles, self.offset_seconds,
237 self.offset_cycles, self.absolute,
238 self.return_ctype)
239
240
241class _Event(_PseudoObj):
242 def __init__(self):
243 super().__init__()
244 self.id = None
245 self.name = None
246 self.log_level = None
247 self.payload_type = None
248 self.context_type = None
249
250 def _to_public(self):
251 return metadata.Event(self.id, self.name, self.log_level,
252 _opt_to_public(self.payload_type),
253 _opt_to_public(self.context_type))
254
255
256class _Stream(_PseudoObj):
257 def __init__(self):
258 super().__init__()
259 self.id = None
260 self.name = None
261 self.packet_context_type = None
262 self.event_header_type = None
263 self.event_context_type = None
264 self.events = collections.OrderedDict()
265
266 def is_event_empty(self, event):
267 total_fields = 0
268
269 if self.event_header_type is not None:
270 total_fields += len(self.event_header_type.fields)
271
272 if self.event_context_type is not None:
273 total_fields += len(self.event_context_type.fields)
274
275 if event.context_type is not None:
276 total_fields += len(event.context_type.fields)
277
278 if event.payload_type is not None:
279 total_fields += len(event.payload_type.fields)
280
281 return total_fields == 0
282
283 def _to_public(self):
284 events = []
285
286 for name, pseudo_ev in self.events.items():
287 events.append((name, pseudo_ev.to_public()))
288
289 return metadata.Stream(self.id, self.name,
290 _opt_to_public(self.packet_context_type),
291 _opt_to_public(self.event_header_type),
292 _opt_to_public(self.event_context_type),
293 collections.OrderedDict(events))
294
295
296class _Metadata(_PseudoObj):
297 def __init__(self):
298 super().__init__()
299 self.trace = None
300 self.env = None
301 self.clocks = None
302 self.streams = None
303 self.default_stream_name = None
304
305 def _to_public(self):
306 clocks = []
307
308 for name, pseudo_clock in self.clocks.items():
309 clocks.append((name, pseudo_clock.to_public()))
310
311 streams = []
312
313 for name, pseudo_stream in self.streams.items():
314 streams.append((name, pseudo_stream.to_public()))
315
316 return metadata.Metadata(self.trace.to_public(), self.env,
317 collections.OrderedDict(clocks),
318 collections.OrderedDict(streams),
319 self.default_stream_name)
320
321
6839ffba
PP
322# This JSON schema reference resolver only serves to detect when it
323# needs to resolve a remote URI.
324#
325# This must never happen in barectf because all our schemas are local;
326# it would mean a programming or schema error.
327class _RefResolver(jsonschema.RefResolver):
328 def resolve_remote(self, uri):
329 # this must never happen: all our schemas are local
1bf9d86d 330 raise RuntimeError('Missing local schema with URI `{}`'.format(uri))
6839ffba
PP
331
332
333# Schema validator which considers all the schemas found in the barectf
334# package's `schemas` directory.
335#
336# The only public method is validate() which accepts an instance to
337# validate as well as a schema short ID.
338class _SchemaValidator:
339 def __init__(self):
340 subdirs = ['config', os.path.join('2', 'config')]
341 schemas_dir = pkg_resources.resource_filename(__name__, 'schemas')
342 self._store = {}
343
344 for subdir in subdirs:
345 dir = os.path.join(schemas_dir, subdir)
346
347 for file_name in os.listdir(dir):
348 if not file_name.endswith('.yaml'):
349 continue
350
351 with open(os.path.join(dir, file_name)) as f:
352 schema = yaml.load(f, Loader=yaml.SafeLoader)
353
354 assert '$id' in schema
355 schema_id = schema['$id']
356 assert schema_id not in self._store
357 self._store[schema_id] = schema
358
359 @staticmethod
360 def _dict_from_ordered_dict(o_dict):
361 dct = {}
362
363 for k, v in o_dict.items():
364 new_v = v
365
366 if type(v) is collections.OrderedDict:
367 new_v = _SchemaValidator._dict_from_ordered_dict(v)
7f4429f2 368
6839ffba 369 dct[k] = new_v
7f4429f2 370
6839ffba 371 return dct
7f4429f2 372
6839ffba
PP
373 def _validate(self, instance, schema_short_id):
374 # retrieve full schema ID from short ID
375 schema_id = 'https://barectf.org/schemas/{}.json'.format(schema_short_id)
376 assert schema_id in self._store
7f4429f2 377
6839ffba
PP
378 # retrieve full schema
379 schema = self._store[schema_id]
7f4429f2 380
6839ffba
PP
381 # Create a reference resolver for this schema using this
382 # validator's schema store.
383 resolver = _RefResolver(base_uri=schema_id, referrer=schema,
384 store=self._store)
7f4429f2 385
6839ffba
PP
386 # create a JSON schema validator using this reference resolver
387 validator = jsonschema.Draft7Validator(schema, resolver=resolver)
7f4429f2 388
6839ffba
PP
389 # Validate the instance, converting its
390 # `collections.OrderedDict` objects to `dict` objects so as to
391 # make any error message easier to read (because
392 # validator.validate() below uses str() for error messages, and
393 # collections.OrderedDict.__str__() is bulky).
394 validator.validate(self._dict_from_ordered_dict(instance))
7f4429f2 395
6839ffba
PP
396 # Validates `instance` using the schema having the short ID
397 # `schema_short_id`.
398 #
399 # A schema short ID is the part between `schemas/` and `.json` in
400 # its URI.
401 #
402 # Raises a `ConfigParseError` object, hiding any `jsonschema`
403 # exception, on validation failure.
404 def validate(self, instance, schema_short_id):
405 try:
406 self._validate(instance, schema_short_id)
407 except jsonschema.ValidationError as exc:
408 # convert to barectf `ConfigParseError` exception
409 contexts = ['Configuration object']
1bf9d86d 410 contexts += ['`{}` property'.format(p) for p in exc.absolute_path]
6839ffba
PP
411 schema_ctx = ''
412
413 if len(exc.context) > 0:
414 msgs = '; '.join([e.message for e in exc.context])
415 schema_ctx = ': {}'.format(msgs)
7f4429f2 416
6839ffba 417 new_exc = ConfigParseError(contexts.pop(),
1bf9d86d 418 '{}{} (from schema `{}`)'.format(exc.message,
6839ffba
PP
419 schema_ctx,
420 schema_short_id))
7f4429f2 421
6839ffba
PP
422 for ctx in reversed(contexts):
423 new_exc.append_ctx(ctx)
424
425 raise new_exc
7f4429f2
PP
426
427
428def _byte_order_str_to_bo(bo_str):
429 bo_str = bo_str.lower()
430
431 if bo_str == 'le':
432 return metadata.ByteOrder.LE
433 elif bo_str == 'be':
434 return metadata.ByteOrder.BE
435
436
437def _encoding_str_to_encoding(encoding_str):
438 encoding_str = encoding_str.lower()
439
440 if encoding_str == 'utf-8' or encoding_str == 'utf8':
441 return metadata.Encoding.UTF8
442 elif encoding_str == 'ascii':
443 return metadata.Encoding.ASCII
444 elif encoding_str == 'none':
445 return metadata.Encoding.NONE
446
447
6839ffba
PP
448def _validate_identifier(iden, ctx_obj_name, prop):
449 assert type(iden) is str
450 ctf_keywords = {
451 'align',
452 'callsite',
453 'clock',
454 'enum',
455 'env',
456 'event',
457 'floating_point',
458 'integer',
459 'stream',
460 'string',
461 'struct',
462 'trace',
463 'typealias',
464 'typedef',
465 'variant',
466 }
7f4429f2 467
6839ffba 468 if iden in ctf_keywords:
1bf9d86d 469 fmt = 'Invalid {} (not a valid identifier): `{}`'
6839ffba 470 raise ConfigParseError(ctx_obj_name, fmt.format(prop, iden))
7f4429f2 471
7f4429f2 472
6839ffba
PP
473def _validate_alignment(align, ctx_obj_name):
474 assert align >= 1
7f4429f2 475
6839ffba
PP
476 if (align & (align - 1)) != 0:
477 raise ConfigParseError(ctx_obj_name,
478 'Invalid alignment: {}'.format(align))
7f4429f2
PP
479
480
131d409a
PP
481def _append_error_ctx(exc, obj_name, msg=None):
482 exc.append_ctx(obj_name, msg)
483 raise
484
485
6839ffba
PP
486# Entities.
487#
488# Order of values is important here.
489@enum.unique
490class _Entity(enum.IntEnum):
491 TRACE_PACKET_HEADER = 0
492 STREAM_PACKET_CONTEXT = 1
493 STREAM_EVENT_HEADER = 2
494 STREAM_EVENT_CONTEXT = 3
495 EVENT_CONTEXT = 4
496 EVENT_PAYLOAD = 5
497
498
7f4429f2
PP
499# This validator validates the configured metadata for barectf specific
500# needs.
501#
502# barectf needs:
503#
6839ffba
PP
504# * All header/contexts are at least byte-aligned.
505# * No nested structures or arrays.
7f4429f2
PP
506class _BarectfMetadataValidator:
507 def __init__(self):
508 self._type_to_validate_type_func = {
7f4429f2
PP
509 _Struct: self._validate_struct_type,
510 _Array: self._validate_array_type,
511 }
512
7f4429f2
PP
513 def _validate_struct_type(self, t, entity_root):
514 if not entity_root:
6839ffba
PP
515 raise ConfigParseError('Structure type',
516 'Inner structure types are not supported as of this version')
7f4429f2
PP
517
518 for field_name, field_type in t.fields.items():
519 if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
520 if field_name == 'uuid':
521 # allow
522 continue
523
524 try:
525 self._validate_type(field_type, False)
526 except ConfigParseError as exc:
1bf9d86d 527 _append_error_ctx(exc, 'Structure type\'s field `{}`'.format(field_name))
7f4429f2
PP
528
529 def _validate_array_type(self, t, entity_root):
530 raise ConfigParseError('Array type', 'Not supported as of this version')
531
532 def _validate_type(self, t, entity_root):
6839ffba
PP
533 func = self._type_to_validate_type_func.get(type(t))
534
535 if func is not None:
536 func(t, entity_root)
7f4429f2
PP
537
538 def _validate_entity(self, t):
539 if t is None:
540 return
541
542 # make sure entity is byte-aligned
543 if t.real_align < 8:
6839ffba
PP
544 raise ConfigParseError('Root type',
545 'Alignment must be at least 8')
7f4429f2 546
6839ffba 547 assert type(t) is _Struct
7f4429f2
PP
548
549 # validate types
550 self._validate_type(t, True)
551
552 def _validate_entities_and_names(self, meta):
553 self._cur_entity = _Entity.TRACE_PACKET_HEADER
554
555 try:
556 self._validate_entity(meta.trace.packet_header_type)
557 except ConfigParseError as exc:
131d409a 558 _append_error_ctx(exc, 'Trace', 'Invalid packet header type')
7f4429f2
PP
559
560 for stream_name, stream in meta.streams.items():
6839ffba 561 _validate_identifier(stream_name, 'Trace', 'stream name')
7f4429f2
PP
562 self._cur_entity = _Entity.STREAM_PACKET_CONTEXT
563
564 try:
565 self._validate_entity(stream.packet_context_type)
566 except ConfigParseError as exc:
1bf9d86d 567 _append_error_ctx(exc, 'Stream `{}`'.format(stream_name),
131d409a 568 'Invalid packet context type')
7f4429f2
PP
569
570 self._cur_entity = _Entity.STREAM_EVENT_HEADER
571
572 try:
573 self._validate_entity(stream.event_header_type)
574 except ConfigParseError as exc:
1bf9d86d 575 _append_error_ctx(exc, 'Stream `{}`'.format(stream_name),
131d409a 576 'Invalid event header type')
7f4429f2
PP
577
578 self._cur_entity = _Entity.STREAM_EVENT_CONTEXT
579
580 try:
581 self._validate_entity(stream.event_context_type)
582 except ConfigParseError as exc:
1bf9d86d 583 _append_error_ctx(exc, 'Stream `{}`'.format(stream_name),
131d409a 584 'Invalid event context type'.format(stream_name))
7f4429f2
PP
585
586 try:
587 for ev_name, ev in stream.events.items():
6839ffba 588 _validate_identifier(ev_name,
1bf9d86d 589 'Stream `{}`'.format(stream_name),
6839ffba 590 'event name')
7f4429f2
PP
591
592 self._cur_entity = _Entity.EVENT_CONTEXT
593
594 try:
595 self._validate_entity(ev.context_type)
596 except ConfigParseError as exc:
1bf9d86d 597 _append_error_ctx(exc, 'Event `{}`'.format(ev_name),
131d409a 598 'Invalid context type')
7f4429f2
PP
599
600 self._cur_entity = _Entity.EVENT_PAYLOAD
601
602 try:
603 self._validate_entity(ev.payload_type)
604 except ConfigParseError as exc:
1bf9d86d 605 _append_error_ctx(exc, 'Event `{}`'.format(ev_name),
131d409a 606 'Invalid payload type')
7f4429f2
PP
607
608 if stream.is_event_empty(ev):
1bf9d86d 609 raise ConfigParseError('Event `{}`'.format(ev_name), 'Empty')
7f4429f2 610 except ConfigParseError as exc:
1bf9d86d 611 _append_error_ctx(exc, 'Stream `{}`'.format(stream_name))
7f4429f2
PP
612
613 def _validate_default_stream(self, meta):
614 if meta.default_stream_name:
615 if meta.default_stream_name not in meta.streams.keys():
1bf9d86d 616 fmt = 'Default stream name (`{}`) does not exist'
6839ffba
PP
617 raise ConfigParseError('barectf metadata',
618 fmt.format(meta.default_stream_name))
7f4429f2
PP
619
620 def validate(self, meta):
621 self._validate_entities_and_names(meta)
622 self._validate_default_stream(meta)
623
624
625# This validator validates special fields of trace, stream, and event
6839ffba
PP
626# types.
627#
1bf9d86d 628# For example, it checks that the `stream_id` field exists in the trace
6839ffba 629# packet header if there's more than one stream, and much more.
7f4429f2
PP
630class _MetadataSpecialFieldsValidator:
631 def _validate_trace_packet_header_type(self, t):
1bf9d86d 632 # needs `stream_id` field?
7f4429f2
PP
633 if len(self._meta.streams) > 1:
634 # yes
635 if t is None:
1bf9d86d
PP
636 raise ConfigParseError('`packet-header-type` property',
637 'Need `stream_id` field (more than one stream), but trace packet header type is missing')
7f4429f2
PP
638
639 if type(t) is not _Struct:
1bf9d86d
PP
640 raise ConfigParseError('`packet-header-type` property',
641 'Need `stream_id` field (more than one stream), but trace packet header type is not a structure type')
7f4429f2
PP
642
643 if 'stream_id' not in t.fields:
1bf9d86d
PP
644 raise ConfigParseError('`packet-header-type` property',
645 'Need `stream_id` field (more than one stream)')
7f4429f2 646
1bf9d86d 647 # validate `magic` and `stream_id` types
7f4429f2
PP
648 if type(t) is not _Struct:
649 return
650
651 for i, (field_name, field_type) in enumerate(t.fields.items()):
652 if field_name == 'magic':
653 if type(field_type) is not _Integer:
1bf9d86d
PP
654 raise ConfigParseError('`packet-header-type` property',
655 '`magic` field must be an integer type')
7f4429f2
PP
656
657 if field_type.signed or field_type.size != 32:
1bf9d86d
PP
658 raise ConfigParseError('`packet-header-type` property',
659 '`magic` field must be a 32-bit unsigned integer type')
7f4429f2
PP
660
661 if i != 0:
1bf9d86d
PP
662 raise ConfigParseError('`packet-header-type` property',
663 '`magic` field must be the first trace packet header type\'s field')
7f4429f2
PP
664 elif field_name == 'stream_id':
665 if type(field_type) is not _Integer:
1bf9d86d
PP
666 raise ConfigParseError('`packet-header-type` property',
667 '`stream_id` field must be an integer type')
7f4429f2
PP
668
669 if field_type.signed:
1bf9d86d
PP
670 raise ConfigParseError('`packet-header-type` property',
671 '`stream_id` field must be an unsigned integer type')
7f4429f2 672
1bf9d86d 673 # `id` size can fit all event IDs
7f4429f2 674 if len(self._meta.streams) > (1 << field_type.size):
1bf9d86d
PP
675 raise ConfigParseError('`packet-header-type` property',
676 '`stream_id` field\' size is too small for the number of trace streams')
7f4429f2
PP
677 elif field_name == 'uuid':
678 if self._meta.trace.uuid is None:
1bf9d86d
PP
679 raise ConfigParseError('`packet-header-type` property',
680 '`uuid` field specified, but no trace UUID provided')
7f4429f2
PP
681
682 if type(field_type) is not _Array:
1bf9d86d
PP
683 raise ConfigParseError('`packet-header-type` property',
684 '`uuid` field must be an array')
7f4429f2
PP
685
686 if field_type.length != 16:
1bf9d86d
PP
687 raise ConfigParseError('`packet-header-type` property',
688 '`uuid` field must be an array of 16 bytes')
7f4429f2
PP
689
690 element_type = field_type.element_type
691
692 if type(element_type) is not _Integer:
1bf9d86d
PP
693 raise ConfigParseError('`packet-header-type` property',
694 '`uuid` field must be an array of 16 unsigned bytes')
7f4429f2
PP
695
696 if element_type.size != 8:
1bf9d86d
PP
697 raise ConfigParseError('`packet-header-type` property',
698 '`uuid` field must be an array of 16 unsigned bytes')
7f4429f2
PP
699
700 if element_type.signed:
1bf9d86d
PP
701 raise ConfigParseError('`packet-header-type` property',
702 '`uuid` field must be an array of 16 unsigned bytes')
7f4429f2
PP
703
704 if element_type.real_align != 8:
1bf9d86d
PP
705 raise ConfigParseError('`packet-header-type` property',
706 '`uuid` field must be an array of 16 unsigned, byte-aligned bytes')
7f4429f2
PP
707
708 def _validate_trace(self, meta):
709 self._validate_trace_packet_header_type(meta.trace.packet_header_type)
710
711 def _validate_stream_packet_context(self, stream):
712 t = stream.packet_context_type
713
714 if type(t) is None:
715 raise ConfigParseError('Stream',
1bf9d86d 716 'Missing `packet-context-type` property')
7f4429f2
PP
717
718 if type(t) is not _Struct:
1bf9d86d 719 raise ConfigParseError('`packet-context-type` property',
7f4429f2
PP
720 'Expecting a structure type')
721
1bf9d86d 722 # `timestamp_begin`, if exists, is an unsigned integer type,
7f4429f2
PP
723 # mapped to a clock
724 ts_begin = None
725
726 if 'timestamp_begin' in t.fields:
727 ts_begin = t.fields['timestamp_begin']
728
729 if type(ts_begin) is not _Integer:
1bf9d86d
PP
730 raise ConfigParseError('`packet-context-type` property',
731 '`timestamp_begin` field must be an integer type')
7f4429f2
PP
732
733 if ts_begin.signed:
1bf9d86d
PP
734 raise ConfigParseError('`packet-context-type` property',
735 '`timestamp_begin` field must be an unsigned integer type')
7f4429f2
PP
736
737 if not ts_begin.property_mappings:
1bf9d86d
PP
738 raise ConfigParseError('`packet-context-type` property',
739 '`timestamp_begin` field must be mapped to a clock')
7f4429f2 740
1bf9d86d 741 # `timestamp_end`, if exists, is an unsigned integer type,
7f4429f2
PP
742 # mapped to a clock
743 ts_end = None
744
745 if 'timestamp_end' in t.fields:
746 ts_end = t.fields['timestamp_end']
747
748 if type(ts_end) is not _Integer:
1bf9d86d
PP
749 raise ConfigParseError('`packet-context-type` property',
750 '`timestamp_end` field must be an integer type')
7f4429f2
PP
751
752 if ts_end.signed:
1bf9d86d
PP
753 raise ConfigParseError('`packet-context-type` property',
754 '`timestamp_end` field must be an unsigned integer type')
7f4429f2
PP
755
756 if not ts_end.property_mappings:
1bf9d86d
PP
757 raise ConfigParseError('`packet-context-type` property',
758 '`timestamp_end` field must be mapped to a clock')
7f4429f2 759
1bf9d86d 760 # `timestamp_begin` and `timestamp_end` exist together
7f4429f2 761 if (('timestamp_begin' in t.fields) ^ ('timestamp_end' in t.fields)):
1bf9d86d 762 raise ConfigParseError('`timestamp_begin` and `timestamp_end` fields must be defined together in stream packet context type')
7f4429f2 763
1bf9d86d 764 # `timestamp_begin` and `timestamp_end` are mapped to the same clock
7f4429f2
PP
765 if ts_begin is not None and ts_end is not None:
766 if ts_begin.property_mappings[0].object.name != ts_end.property_mappings[0].object.name:
1bf9d86d 767 raise ConfigParseError('`timestamp_begin` and `timestamp_end` fields must be mapped to the same clock object in stream packet context type')
7f4429f2 768
1bf9d86d 769 # `events_discarded`, if exists, is an unsigned integer type
7f4429f2
PP
770 if 'events_discarded' in t.fields:
771 events_discarded = t.fields['events_discarded']
772
773 if type(events_discarded) is not _Integer:
1bf9d86d
PP
774 raise ConfigParseError('`packet-context-type` property',
775 '`events_discarded` field must be an integer type')
7f4429f2
PP
776
777 if events_discarded.signed:
1bf9d86d
PP
778 raise ConfigParseError('`packet-context-type` property',
779 '`events_discarded` field must be an unsigned integer type')
7f4429f2 780
1bf9d86d 781 # `packet_size` and `content_size` must exist
7f4429f2 782 if 'packet_size' not in t.fields:
1bf9d86d
PP
783 raise ConfigParseError('`packet-context-type` property',
784 'Missing `packet_size` field in stream packet context type')
7f4429f2
PP
785
786 packet_size = t.fields['packet_size']
787
1bf9d86d 788 # `content_size` and `content_size` must exist
7f4429f2 789 if 'content_size' not in t.fields:
1bf9d86d
PP
790 raise ConfigParseError('`packet-context-type` property',
791 'Missing `content_size` field in stream packet context type')
7f4429f2
PP
792
793 content_size = t.fields['content_size']
794
1bf9d86d 795 # `packet_size` is an unsigned integer type
7f4429f2 796 if type(packet_size) is not _Integer:
1bf9d86d
PP
797 raise ConfigParseError('`packet-context-type` property',
798 '`packet_size` field in stream packet context type must be an integer type')
7f4429f2
PP
799
800 if packet_size.signed:
1bf9d86d
PP
801 raise ConfigParseError('`packet-context-type` property',
802 '`packet_size` field in stream packet context type must be an unsigned integer type')
7f4429f2 803
1bf9d86d 804 # `content_size` is an unsigned integer type
7f4429f2 805 if type(content_size) is not _Integer:
1bf9d86d
PP
806 raise ConfigParseError('`packet-context-type` property',
807 '`content_size` field in stream packet context type must be an integer type')
7f4429f2
PP
808
809 if content_size.signed:
1bf9d86d
PP
810 raise ConfigParseError('`packet-context-type` property',
811 '`content_size` field in stream packet context type must be an unsigned integer type')
7f4429f2 812
1bf9d86d 813 # `packet_size` size should be greater than or equal to `content_size` size
7f4429f2 814 if content_size.size > packet_size.size:
1bf9d86d
PP
815 raise ConfigParseError('`packet-context-type` property',
816 '`content_size` field size must be lesser than or equal to `packet_size` field size')
7f4429f2
PP
817
818 def _validate_stream_event_header(self, stream):
819 t = stream.event_header_type
820
1bf9d86d 821 # needs `id` field?
7f4429f2
PP
822 if len(stream.events) > 1:
823 # yes
824 if t is None:
1bf9d86d
PP
825 raise ConfigParseError('`event-header-type` property',
826 'Need `id` field (more than one event), but stream event header type is missing')
7f4429f2
PP
827
828 if type(t) is not _Struct:
1bf9d86d
PP
829 raise ConfigParseError('`event-header-type` property',
830 'Need `id` field (more than one event), but stream event header type is not a structure type')
7f4429f2
PP
831
832 if 'id' not in t.fields:
1bf9d86d
PP
833 raise ConfigParseError('`event-header-type` property',
834 'Need `id` field (more than one event)')
7f4429f2 835
1bf9d86d 836 # validate `id` and `timestamp` types
7f4429f2
PP
837 if type(t) is not _Struct:
838 return
839
1bf9d86d 840 # `timestamp`, if exists, is an unsigned integer type,
7f4429f2
PP
841 # mapped to a clock
842 if 'timestamp' in t.fields:
843 ts = t.fields['timestamp']
844
845 if type(ts) is not _Integer:
1bf9d86d
PP
846 raise ConfigParseError('`event-header-type` property',
847 '`timestamp` field must be an integer type')
7f4429f2
PP
848
849 if ts.signed:
1bf9d86d
PP
850 raise ConfigParseError('`event-header-type` property',
851 '`timestamp` field must be an unsigned integer type')
7f4429f2
PP
852
853 if not ts.property_mappings:
1bf9d86d
PP
854 raise ConfigParseError('`event-header-type` property',
855 '`timestamp` field must be mapped to a clock')
7f4429f2
PP
856
857 if 'id' in t.fields:
858 eid = t.fields['id']
859
1bf9d86d 860 # `id` is an unsigned integer type
7f4429f2 861 if type(eid) is not _Integer:
1bf9d86d
PP
862 raise ConfigParseError('`event-header-type` property',
863 '`id` field must be an integer type')
7f4429f2
PP
864
865 if eid.signed:
1bf9d86d
PP
866 raise ConfigParseError('`event-header-type` property',
867 '`id` field must be an unsigned integer type')
7f4429f2 868
1bf9d86d 869 # `id` size can fit all event IDs
7f4429f2 870 if len(stream.events) > (1 << eid.size):
1bf9d86d
PP
871 raise ConfigParseError('`event-header-type` property',
872 '`id` field\' size is too small for the number of stream events')
7f4429f2
PP
873
874 def _validate_stream(self, stream):
875 self._validate_stream_packet_context(stream)
876 self._validate_stream_event_header(stream)
877
878 def validate(self, meta):
879 self._meta = meta
880 self._validate_trace(meta)
881
882 for stream in meta.streams.values():
883 try:
884 self._validate_stream(stream)
885 except ConfigParseError as exc:
1bf9d86d 886 _append_error_ctx(exc, 'Stream `{}`'.format(stream.name), 'Invalid')
7f4429f2
PP
887
888
7f4429f2
PP
889class _YamlConfigParser:
890 def __init__(self, include_dirs, ignore_include_not_found, dump_config):
891 self._class_name_to_create_type_func = {
892 'int': self._create_integer,
893 'integer': self._create_integer,
894 'flt': self._create_float,
895 'float': self._create_float,
896 'floating-point': self._create_float,
897 'enum': self._create_enum,
898 'enumeration': self._create_enum,
899 'str': self._create_string,
900 'string': self._create_string,
901 'struct': self._create_struct,
902 'structure': self._create_struct,
903 'array': self._create_array,
904 }
7f4429f2
PP
905 self._include_dirs = include_dirs
906 self._ignore_include_not_found = ignore_include_not_found
907 self._dump_config = dump_config
6839ffba 908 self._schema_validator = _SchemaValidator()
7f4429f2
PP
909
910 def _set_byte_order(self, metadata_node):
6839ffba
PP
911 self._bo = _byte_order_str_to_bo(metadata_node['trace']['byte-order'])
912 assert self._bo is not None
7f4429f2
PP
913
914 def _set_int_clock_prop_mapping(self, int_obj, prop_mapping_node):
7f4429f2 915 clock_name = prop_mapping_node['name']
6839ffba 916 clock = self._clocks.get(clock_name)
7f4429f2 917
6839ffba 918 if clock is None:
7f4429f2 919 raise ConfigParseError('Integer type\'s clock property mapping',
1bf9d86d 920 'Invalid clock name `{}`'.format(clock_name))
7f4429f2 921
7f4429f2 922 prop_mapping = _PropertyMapping()
6839ffba
PP
923 prop_mapping.object = clock
924 prop_mapping.prop = 'value'
7f4429f2
PP
925 int_obj.property_mappings.append(prop_mapping)
926
6839ffba
PP
927 def _create_integer(self, node):
928 obj = _Integer()
7f4429f2
PP
929
930 # size
6839ffba 931 obj.size = node['size']
7f4429f2
PP
932
933 # align
6839ffba 934 align_node = node.get('align')
7f4429f2 935
6839ffba
PP
936 if align_node is not None:
937 _validate_alignment(align_node, 'Integer type')
938 obj.align = align_node
7f4429f2
PP
939
940 # signed
6839ffba 941 signed_node = node.get('signed')
7f4429f2 942
6839ffba
PP
943 if signed_node is not None:
944 obj.signed = signed_node
7f4429f2
PP
945
946 # byte order
6839ffba
PP
947 obj.byte_order = self._bo
948 bo_node = node.get('byte-order')
7f4429f2 949
6839ffba
PP
950 if bo_node is not None:
951 obj.byte_order = _byte_order_str_to_bo(bo_node)
7f4429f2
PP
952
953 # base
6839ffba
PP
954 base_node = node.get('base')
955
956 if base_node is not None:
957 if base_node == 'bin':
958 obj.base = 2
959 elif base_node == 'oct':
960 obj.base = 8
961 elif base_node == 'dec':
962 obj.base = 10
7f4429f2 963 else:
6839ffba
PP
964 assert base_node == 'hex'
965 obj.base = 16
7f4429f2
PP
966
967 # encoding
6839ffba 968 encoding_node = node.get('encoding')
7f4429f2 969
6839ffba
PP
970 if encoding_node is not None:
971 obj.encoding = _encoding_str_to_encoding(encoding_node)
7f4429f2
PP
972
973 # property mappings
6839ffba 974 pm_node = node.get('property-mappings')
7f4429f2 975
6839ffba
PP
976 if pm_node is not None:
977 assert len(pm_node) == 1
978 self._set_int_clock_prop_mapping(obj, pm_node[0])
7f4429f2
PP
979
980 return obj
981
6839ffba
PP
982 def _create_float(self, node):
983 obj = _FloatingPoint()
7f4429f2
PP
984
985 # size
6839ffba
PP
986 size_node = node['size']
987 obj.exp_size = size_node['exp']
988 obj.mant_size = size_node['mant']
7f4429f2
PP
989
990 # align
6839ffba 991 align_node = node.get('align')
7f4429f2 992
6839ffba
PP
993 if align_node is not None:
994 _validate_alignment(align_node, 'Floating point number type')
995 obj.align = align_node
7f4429f2
PP
996
997 # byte order
6839ffba
PP
998 obj.byte_order = self._bo
999 bo_node = node.get('byte-order')
7f4429f2 1000
6839ffba
PP
1001 if bo_node is not None:
1002 obj.byte_order = _byte_order_str_to_bo(bo_node)
7f4429f2
PP
1003
1004 return obj
1005
6839ffba
PP
1006 def _create_enum(self, node):
1007 obj = _Enum()
7f4429f2
PP
1008
1009 # value type
6839ffba
PP
1010 try:
1011 obj.value_type = self._create_type(node['value-type'])
1012 except ConfigParseError as exc:
1013 _append_error_ctx(exc, 'Enumeration type',
1014 'Cannot create integer type')
7f4429f2
PP
1015
1016 # members
6839ffba 1017 members_node = node.get('members')
7f4429f2 1018
6839ffba
PP
1019 if members_node is not None:
1020 if obj.value_type.signed:
1021 value_min = -(1 << obj.value_type.size - 1)
1022 value_max = (1 << (obj.value_type.size - 1)) - 1
7f4429f2 1023 else:
6839ffba
PP
1024 value_min = 0
1025 value_max = (1 << obj.value_type.size) - 1
7f4429f2 1026
6839ffba 1027 cur = 0
7f4429f2 1028
6839ffba
PP
1029 for m_node in members_node:
1030 if type(m_node) is str:
7f4429f2
PP
1031 label = m_node
1032 value = (cur, cur)
1033 cur += 1
1034 else:
6839ffba 1035 assert type(m_node) is collections.OrderedDict
7f4429f2 1036 label = m_node['label']
7f4429f2
PP
1037 value = m_node['value']
1038
6839ffba 1039 if type(value) is int:
7f4429f2
PP
1040 cur = value + 1
1041 value = (value, value)
1042 else:
6839ffba
PP
1043 assert type(value) is list
1044 assert len(value) == 2
7f4429f2
PP
1045 mn = value[0]
1046 mx = value[1]
1047
1048 if mn > mx:
1049 raise ConfigParseError('Enumeration type',
1bf9d86d 1050 'Invalid member (`{}`): invalid range ({} > {})'.format(label, mn, mx))
7f4429f2
PP
1051
1052 value = (mn, mx)
1053 cur = mx + 1
1054
1bf9d86d 1055 name_fmt = 'Enumeration type\'s member `{}`'
6839ffba 1056 msg_fmt = 'Value {} is outside the value type range [{}, {}]'
7f4429f2 1057
6839ffba
PP
1058 if value[0] < value_min or value[0] > value_max:
1059 raise ConfigParseError(name_fmt.format(label),
1060 msg_fmt.format(value[0],
1061 value_min,
1062 value_max))
7f4429f2 1063
6839ffba
PP
1064 if value[1] < value_min or value[1] > value_max:
1065 raise ConfigParseError(name_fmt.format(label),
1066 msg_fmt.format(value[0],
1067 value_min,
1068 value_max))
7f4429f2 1069
6839ffba 1070 obj.members[label] = value
7f4429f2 1071
6839ffba 1072 return obj
7f4429f2 1073
6839ffba
PP
1074 def _create_string(self, node):
1075 obj = _String()
7f4429f2 1076
6839ffba
PP
1077 # encoding
1078 encoding_node = node.get('encoding')
7f4429f2 1079
6839ffba
PP
1080 if encoding_node is not None:
1081 obj.encoding = _encoding_str_to_encoding(encoding_node)
7f4429f2
PP
1082
1083 return obj
1084
6839ffba
PP
1085 def _create_struct(self, node):
1086 obj = _Struct()
7f4429f2
PP
1087
1088 # minimum alignment
6839ffba 1089 min_align_node = node.get('min-align')
7f4429f2 1090
6839ffba
PP
1091 if min_align_node is not None:
1092 _validate_alignment(min_align_node, 'Structure type')
1093 obj.min_align = min_align_node
7f4429f2
PP
1094
1095 # fields
6839ffba 1096 fields_node = node.get('fields')
7f4429f2 1097
6839ffba
PP
1098 if fields_node is not None:
1099 for field_name, field_node in fields_node.items():
1100 _validate_identifier(field_name, 'Structure type', 'field name')
7f4429f2 1101
6839ffba
PP
1102 try:
1103 obj.fields[field_name] = self._create_type(field_node)
1104 except ConfigParseError as exc:
1105 _append_error_ctx(exc, 'Structure type',
1bf9d86d 1106 'Cannot create field `{}`'.format(field_name))
7f4429f2
PP
1107
1108 return obj
1109
6839ffba
PP
1110 def _create_array(self, node):
1111 obj = _Array()
7f4429f2
PP
1112
1113 # length
6839ffba 1114 obj.length = node['length']
7f4429f2 1115
6839ffba
PP
1116 # element type
1117 try:
1118 obj.element_type = self._create_type(node['element-type'])
1119 except ConfigParseError as exc:
1120 _append_error_ctx(exc, 'Array type', 'Cannot create element type')
7f4429f2 1121
6839ffba 1122 return obj
7f4429f2 1123
6839ffba
PP
1124 def _create_type(self, type_node):
1125 return self._class_name_to_create_type_func[type_node['class']](type_node)
7f4429f2
PP
1126
1127 def _create_clock(self, node):
1128 # create clock object
1129 clock = _Clock()
1130
7f4429f2 1131 # UUID
6839ffba 1132 uuid_node = node.get('uuid')
7f4429f2 1133
6839ffba
PP
1134 if uuid_node is not None:
1135 try:
1136 clock.uuid = uuid.UUID(uuid_node)
1137 except:
1bf9d86d 1138 raise ConfigParseError('Clock', 'Malformed UUID: `{}`'.format(uuid_node))
7f4429f2
PP
1139
1140 # description
6839ffba 1141 descr_node = node.get('description')
7f4429f2 1142
6839ffba
PP
1143 if descr_node is not None:
1144 clock.description = descr_node
7f4429f2
PP
1145
1146 # frequency
6839ffba 1147 freq_node = node.get('freq')
7f4429f2 1148
6839ffba
PP
1149 if freq_node is not None:
1150 clock.freq = freq_node
7f4429f2
PP
1151
1152 # error cycles
6839ffba 1153 error_cycles_node = node.get('error-cycles')
7f4429f2 1154
6839ffba
PP
1155 if error_cycles_node is not None:
1156 clock.error_cycles = error_cycles_node
7f4429f2
PP
1157
1158 # offset
6839ffba 1159 offset_node = node.get('offset')
7f4429f2 1160
6839ffba
PP
1161 if offset_node is not None:
1162 # cycles
1163 offset_cycles_node = offset_node.get('cycles')
7f4429f2 1164
6839ffba
PP
1165 if offset_cycles_node is not None:
1166 clock.offset_cycles = offset_cycles_node
7f4429f2 1167
6839ffba
PP
1168 # seconds
1169 offset_seconds_node = offset_node.get('seconds')
7f4429f2 1170
6839ffba
PP
1171 if offset_seconds_node is not None:
1172 clock.offset_seconds = offset_seconds_node
7f4429f2
PP
1173
1174 # absolute
6839ffba 1175 absolute_node = node.get('absolute')
7f4429f2 1176
6839ffba
PP
1177 if absolute_node is not None:
1178 clock.absolute = absolute_node
7f4429f2 1179
6839ffba 1180 return_ctype_node = node.get('$return-ctype')
7f4429f2 1181
6839ffba
PP
1182 if return_ctype_node is None:
1183 return_ctype_node = node.get('return-ctype')
7f4429f2
PP
1184
1185 if return_ctype_node is not None:
6839ffba 1186 clock.return_ctype = return_ctype_node
7f4429f2
PP
1187
1188 return clock
1189
1190 def _register_clocks(self, metadata_node):
1191 self._clocks = collections.OrderedDict()
6839ffba 1192 clocks_node = metadata_node.get('clocks')
7f4429f2
PP
1193
1194 if clocks_node is None:
1195 return
1196
7f4429f2 1197 for clock_name, clock_node in clocks_node.items():
6839ffba
PP
1198 _validate_identifier(clock_name, 'Metadata', 'clock name')
1199 assert clock_name not in self._clocks
7f4429f2
PP
1200
1201 try:
1202 clock = self._create_clock(clock_node)
1203 except ConfigParseError as exc:
131d409a 1204 _append_error_ctx(exc, 'Metadata',
1bf9d86d 1205 'Cannot create clock `{}`'.format(clock_name))
7f4429f2
PP
1206
1207 clock.name = clock_name
1208 self._clocks[clock_name] = clock
1209
1210 def _create_env(self, metadata_node):
6839ffba 1211 env_node = metadata_node.get('env')
7f4429f2
PP
1212
1213 if env_node is None:
6839ffba 1214 return collections.OrderedDict()
7f4429f2
PP
1215
1216 for env_name, env_value in env_node.items():
6839ffba
PP
1217 _validate_identifier(env_name, 'Metadata',
1218 'environment variable name')
7f4429f2 1219
6839ffba 1220 return copy.deepcopy(env_node)
7f4429f2
PP
1221
1222 def _create_trace(self, metadata_node):
1223 # create trace object
1224 trace = _Trace()
1225
7f4429f2
PP
1226 trace_node = metadata_node['trace']
1227
7f4429f2
PP
1228 # set byte order (already parsed)
1229 trace.byte_order = self._bo
1230
1231 # UUID
6839ffba 1232 uuid_node = trace_node.get('uuid')
7f4429f2 1233
6839ffba
PP
1234 if uuid_node is not None:
1235 if uuid_node == 'auto':
1236 trace.uuid = uuid.uuid1()
7f4429f2
PP
1237 else:
1238 try:
6839ffba 1239 trace.uuid = uuid.UUID(uuid_node)
7f4429f2
PP
1240 except:
1241 raise ConfigParseError('Trace',
1bf9d86d 1242 'Malformed UUID: `{}`'.format(uuid_node))
7f4429f2
PP
1243
1244 # packet header type
6839ffba
PP
1245 pht_node = trace_node.get('packet-header-type')
1246
1247 if pht_node is not None:
7f4429f2 1248 try:
6839ffba 1249 trace.packet_header_type = self._create_type(pht_node)
7f4429f2 1250 except ConfigParseError as exc:
131d409a
PP
1251 _append_error_ctx(exc, 'Trace',
1252 'Cannot create packet header type')
7f4429f2 1253
7f4429f2
PP
1254 return trace
1255
7f4429f2 1256 def _create_event(self, event_node):
6839ffba 1257 # create event object
7f4429f2
PP
1258 event = _Event()
1259
6839ffba 1260 log_level_node = event_node.get('log-level')
7f4429f2 1261
6839ffba
PP
1262 if log_level_node is not None:
1263 assert type(log_level_node) is int
1264 event.log_level = metadata.LogLevel(None, log_level_node)
7f4429f2 1265
6839ffba 1266 ct_node = event_node.get('context-type')
7f4429f2 1267
6839ffba 1268 if ct_node is not None:
7f4429f2 1269 try:
6839ffba 1270 event.context_type = self._create_type(ct_node)
7f4429f2 1271 except ConfigParseError as exc:
131d409a
PP
1272 _append_error_ctx(exc, 'Event',
1273 'Cannot create context type object')
7f4429f2 1274
6839ffba 1275 pt_node = event_node.get('payload-type')
7f4429f2 1276
6839ffba 1277 if pt_node is not None:
7f4429f2 1278 try:
6839ffba 1279 event.payload_type = self._create_type(pt_node)
7f4429f2 1280 except ConfigParseError as exc:
131d409a
PP
1281 _append_error_ctx(exc, 'Event',
1282 'Cannot create payload type object')
7f4429f2 1283
7f4429f2
PP
1284 return event
1285
1286 def _create_stream(self, stream_name, stream_node):
6839ffba 1287 # create stream object
7f4429f2
PP
1288 stream = _Stream()
1289
6839ffba 1290 pct_node = stream_node.get('packet-context-type')
7f4429f2 1291
6839ffba 1292 if pct_node is not None:
7f4429f2 1293 try:
6839ffba 1294 stream.packet_context_type = self._create_type(pct_node)
7f4429f2 1295 except ConfigParseError as exc:
131d409a
PP
1296 _append_error_ctx(exc, 'Stream',
1297 'Cannot create packet context type object')
7f4429f2 1298
6839ffba 1299 eht_node = stream_node.get('event-header-type')
7f4429f2 1300
6839ffba 1301 if eht_node is not None:
7f4429f2 1302 try:
6839ffba 1303 stream.event_header_type = self._create_type(eht_node)
7f4429f2 1304 except ConfigParseError as exc:
131d409a
PP
1305 _append_error_ctx(exc, 'Stream',
1306 'Cannot create event header type object')
7f4429f2 1307
6839ffba 1308 ect_node = stream_node.get('event-context-type')
7f4429f2 1309
6839ffba 1310 if ect_node is not None:
7f4429f2 1311 try:
6839ffba 1312 stream.event_context_type = self._create_type(ect_node)
7f4429f2 1313 except ConfigParseError as exc:
131d409a
PP
1314 _append_error_ctx(exc, 'Stream',
1315 'Cannot create event context type object')
7f4429f2 1316
6839ffba
PP
1317 events_node = stream_node['events']
1318 cur_id = 0
7f4429f2 1319
6839ffba
PP
1320 for ev_name, ev_node in events_node.items():
1321 try:
1322 ev = self._create_event(ev_node)
1323 except ConfigParseError as exc:
1324 _append_error_ctx(exc, 'Stream',
1bf9d86d 1325 'Cannot create event `{}`'.format(ev_name))
7f4429f2 1326
6839ffba
PP
1327 ev.id = cur_id
1328 ev.name = ev_name
1329 stream.events[ev_name] = ev
1330 cur_id += 1
7f4429f2 1331
6839ffba 1332 default_node = stream_node.get('$default')
7f4429f2 1333
6839ffba
PP
1334 if default_node is not None:
1335 if self._meta.default_stream_name is not None and self._meta.default_stream_name != stream_name:
1bf9d86d 1336 fmt = 'Cannot specify more than one default stream (default stream already set to `{}`)'
7f4429f2 1337 raise ConfigParseError('Stream',
6839ffba 1338 fmt.format(self._meta.default_stream_name))
7f4429f2 1339
6839ffba 1340 self._meta.default_stream_name = stream_name
7f4429f2
PP
1341
1342 return stream
1343
1344 def _create_streams(self, metadata_node):
1345 streams = collections.OrderedDict()
7f4429f2 1346 streams_node = metadata_node['streams']
7f4429f2
PP
1347 cur_id = 0
1348
1349 for stream_name, stream_node in streams_node.items():
1350 try:
1351 stream = self._create_stream(stream_name, stream_node)
1352 except ConfigParseError as exc:
131d409a 1353 _append_error_ctx(exc, 'Metadata',
1bf9d86d 1354 'Cannot create stream `{}`'.format(stream_name))
7f4429f2
PP
1355
1356 stream.id = cur_id
6839ffba 1357 stream.name = stream_name
7f4429f2
PP
1358 streams[stream_name] = stream
1359 cur_id += 1
1360
1361 return streams
1362
1363 def _create_metadata(self, root):
1364 self._meta = _Metadata()
7f4429f2
PP
1365 metadata_node = root['metadata']
1366
7f4429f2
PP
1367 if '$default-stream' in metadata_node and metadata_node['$default-stream'] is not None:
1368 default_stream_node = metadata_node['$default-stream']
7f4429f2
PP
1369 self._meta.default_stream_name = default_stream_node
1370
1371 self._set_byte_order(metadata_node)
1372 self._register_clocks(metadata_node)
1373 self._meta.clocks = self._clocks
7f4429f2
PP
1374 self._meta.env = self._create_env(metadata_node)
1375 self._meta.trace = self._create_trace(metadata_node)
7f4429f2
PP
1376 self._meta.streams = self._create_streams(metadata_node)
1377
1378 # validate metadata
1379 try:
6839ffba 1380 _MetadataSpecialFieldsValidator().validate(self._meta)
7f4429f2 1381 except ConfigParseError as exc:
131d409a 1382 _append_error_ctx(exc, 'Metadata')
7f4429f2
PP
1383
1384 try:
6839ffba 1385 _BarectfMetadataValidator().validate(self._meta)
7f4429f2 1386 except ConfigParseError as exc:
131d409a 1387 _append_error_ctx(exc, 'barectf metadata')
7f4429f2
PP
1388
1389 return self._meta
1390
6839ffba
PP
1391 def _get_prefix(self, config_node):
1392 prefix = config_node.get('prefix', 'barectf_')
1bf9d86d 1393 _validate_identifier(prefix, '`prefix` property', 'prefix')
6839ffba 1394 return prefix
7f4429f2 1395
6839ffba
PP
1396 def _get_options(self, config_node):
1397 gen_prefix_def = False
1398 gen_default_stream_def = False
1399 options_node = config_node.get('options')
7f4429f2 1400
6839ffba
PP
1401 if options_node is not None:
1402 gen_prefix_def = options_node.get('gen-prefix-def',
1403 gen_prefix_def)
1404 gen_default_stream_def = options_node.get('gen-default-stream-def',
1405 gen_default_stream_def)
7f4429f2
PP
1406
1407 return config.ConfigOptions(gen_prefix_def, gen_default_stream_def)
1408
1409 def _get_last_include_file(self):
1410 if self._include_stack:
1411 return self._include_stack[-1]
1412
1413 return self._root_yaml_path
1414
1415 def _load_include(self, yaml_path):
1416 for inc_dir in self._include_dirs:
6839ffba
PP
1417 # Current inclusion dir + file name path.
1418 #
1419 # Note: os.path.join() only takes the last argument if it's
1420 # absolute.
7f4429f2
PP
1421 inc_path = os.path.join(inc_dir, yaml_path)
1422
1423 # real path (symbolic links resolved)
1424 real_path = os.path.realpath(inc_path)
1425
1426 # normalized path (weird stuff removed!)
1427 norm_path = os.path.normpath(real_path)
1428
1429 if not os.path.isfile(norm_path):
6839ffba 1430 # file doesn't exist: skip
7f4429f2
PP
1431 continue
1432
1433 if norm_path in self._include_stack:
1434 base_path = self._get_last_include_file()
1bf9d86d
PP
1435 raise ConfigParseError('In `{}`',
1436 'Cannot recursively include file `{}`'.format(base_path,
6839ffba 1437 norm_path))
7f4429f2
PP
1438
1439 self._include_stack.append(norm_path)
1440
1441 # load raw content
1442 return self._yaml_ordered_load(norm_path)
1443
1444 if not self._ignore_include_not_found:
1445 base_path = self._get_last_include_file()
1bf9d86d
PP
1446 raise ConfigParseError('In `{}`',
1447 'Cannot include file `{}`: file not found in include directories'.format(base_path,
6839ffba 1448 yaml_path))
7f4429f2
PP
1449
1450 def _get_include_paths(self, include_node):
1451 if include_node is None:
6839ffba 1452 # none
7f4429f2
PP
1453 return []
1454
6839ffba
PP
1455 if type(include_node) is str:
1456 # wrap as array
7f4429f2
PP
1457 return [include_node]
1458
6839ffba
PP
1459 # already an array
1460 assert type(include_node) is list
1461 return include_node
7f4429f2
PP
1462
1463 def _update_node(self, base_node, overlay_node):
1464 for olay_key, olay_value in overlay_node.items():
1465 if olay_key in base_node:
1466 base_value = base_node[olay_key]
1467
6839ffba 1468 if type(olay_value) is collections.OrderedDict and type(base_value) is collections.OrderedDict:
7f4429f2
PP
1469 # merge dictionaries
1470 self._update_node(base_value, olay_value)
6839ffba 1471 elif type(olay_value) is list and type(base_value) is list:
7f4429f2
PP
1472 # append extension array items to base items
1473 base_value += olay_value
1474 else:
1475 # fall back to replacing
1476 base_node[olay_key] = olay_value
1477 else:
1478 base_node[olay_key] = olay_value
1479
6839ffba 1480 def _process_node_include(self, last_overlay_node,
7f4429f2
PP
1481 process_base_include_cb,
1482 process_children_include_cb=None):
7f4429f2 1483 # process children inclusions first
6839ffba 1484 if process_children_include_cb is not None:
7f4429f2
PP
1485 process_children_include_cb(last_overlay_node)
1486
6839ffba
PP
1487 incl_prop_name = '$include'
1488
1489 if incl_prop_name in last_overlay_node:
1490 include_node = last_overlay_node[incl_prop_name]
7f4429f2 1491 else:
6839ffba 1492 # no inclusions!
7f4429f2
PP
1493 return last_overlay_node
1494
1495 include_paths = self._get_include_paths(include_node)
1496 cur_base_path = self._get_last_include_file()
1497 base_node = None
1498
6839ffba 1499 # keep the inclusion paths and remove the `$include` property
7f4429f2 1500 include_paths = copy.deepcopy(include_paths)
6839ffba 1501 del last_overlay_node[incl_prop_name]
7f4429f2
PP
1502
1503 for include_path in include_paths:
1504 # load raw YAML from included file
1505 overlay_node = self._load_include(include_path)
1506
1507 if overlay_node is None:
6839ffba
PP
1508 # Cannot find inclusion file, but we're ignoring those
1509 # errors, otherwise _load_include() itself raises a
1510 # config error.
7f4429f2
PP
1511 continue
1512
6839ffba 1513 # recursively process inclusions
7f4429f2
PP
1514 try:
1515 overlay_node = process_base_include_cb(overlay_node)
1516 except ConfigParseError as exc:
1bf9d86d 1517 _append_error_ctx(exc, 'In `{}`'.format(cur_base_path))
7f4429f2 1518
6839ffba 1519 # pop inclusion stack now that we're done including
7f4429f2
PP
1520 del self._include_stack[-1]
1521
6839ffba
PP
1522 # At this point, `base_node` is fully resolved (does not
1523 # contain any `$include` property).
7f4429f2
PP
1524 if base_node is None:
1525 base_node = overlay_node
1526 else:
1527 self._update_node(base_node, overlay_node)
1528
6839ffba
PP
1529 # Finally, update the latest base node with our last overlay
1530 # node.
7f4429f2 1531 if base_node is None:
6839ffba
PP
1532 # Nothing was included, which is possible when we're
1533 # ignoring inclusion errors.
7f4429f2
PP
1534 return last_overlay_node
1535
1536 self._update_node(base_node, last_overlay_node)
7f4429f2
PP
1537 return base_node
1538
1539 def _process_event_include(self, event_node):
6839ffba
PP
1540 # Make sure the event object is valid for the inclusion
1541 # processing stage.
1542 self._schema_validator.validate(event_node,
1543 '2/config/event-pre-include')
1544
1545 # process inclusions
1546 return self._process_node_include(event_node,
7f4429f2
PP
1547 self._process_event_include)
1548
1549 def _process_stream_include(self, stream_node):
1550 def process_children_include(stream_node):
1551 if 'events' in stream_node:
1552 events_node = stream_node['events']
1553
6839ffba
PP
1554 for key in list(events_node):
1555 events_node[key] = self._process_event_include(events_node[key])
7f4429f2 1556
6839ffba
PP
1557 # Make sure the stream object is valid for the inclusion
1558 # processing stage.
1559 self._schema_validator.validate(stream_node,
1560 '2/config/stream-pre-include')
7f4429f2 1561
6839ffba
PP
1562 # process inclusions
1563 return self._process_node_include(stream_node,
7f4429f2
PP
1564 self._process_stream_include,
1565 process_children_include)
1566
1567 def _process_trace_include(self, trace_node):
6839ffba
PP
1568 # Make sure the trace object is valid for the inclusion
1569 # processing stage.
1570 self._schema_validator.validate(trace_node,
1571 '2/config/trace-pre-include')
1572
1573 # process inclusions
1574 return self._process_node_include(trace_node,
7f4429f2
PP
1575 self._process_trace_include)
1576
1577 def _process_clock_include(self, clock_node):
6839ffba
PP
1578 # Make sure the clock object is valid for the inclusion
1579 # processing stage.
1580 self._schema_validator.validate(clock_node,
1581 '2/config/clock-pre-include')
1582
1583 # process inclusions
1584 return self._process_node_include(clock_node,
7f4429f2
PP
1585 self._process_clock_include)
1586
1587 def _process_metadata_include(self, metadata_node):
1588 def process_children_include(metadata_node):
1589 if 'trace' in metadata_node:
1590 metadata_node['trace'] = self._process_trace_include(metadata_node['trace'])
1591
1592 if 'clocks' in metadata_node:
1593 clocks_node = metadata_node['clocks']
1594
6839ffba
PP
1595 for key in list(clocks_node):
1596 clocks_node[key] = self._process_clock_include(clocks_node[key])
7f4429f2
PP
1597
1598 if 'streams' in metadata_node:
1599 streams_node = metadata_node['streams']
1600
6839ffba
PP
1601 for key in list(streams_node):
1602 streams_node[key] = self._process_stream_include(streams_node[key])
7f4429f2 1603
6839ffba
PP
1604 # Make sure the metadata object is valid for the inclusion
1605 # processing stage.
1606 self._schema_validator.validate(metadata_node,
1607 '2/config/metadata-pre-include')
7f4429f2 1608
6839ffba
PP
1609 # process inclusions
1610 return self._process_node_include(metadata_node,
7f4429f2
PP
1611 self._process_metadata_include,
1612 process_children_include)
1613
6839ffba
PP
1614 def _process_config_includes(self, config_node):
1615 # Process inclusions in this order:
1616 #
1617 # 1. Clock object, event objects, and trace objects (the order
1618 # between those is not important).
1619 #
1620 # 2. Stream objects.
1621 #
1622 # 3. Metadata object.
7f4429f2 1623 #
6839ffba 1624 # This is because:
7f4429f2 1625 #
6839ffba
PP
1626 # * A metadata object can include clock objects, a trace object,
1627 # stream objects, and event objects (indirectly).
7f4429f2 1628 #
6839ffba 1629 # * A stream object can include event objects.
7f4429f2 1630 #
6839ffba
PP
1631 # We keep a stack of absolute paths to included files
1632 # (`self._include_stack`) to detect recursion.
1633 #
1634 # First, make sure the configuration object itself is valid for
1635 # the inclusion processing stage.
1636 self._schema_validator.validate(config_node,
1637 '2/config/config-pre-include')
1638
1639 # Process metadata object inclusions.
1640 #
1641 # self._process_metadata_include() returns a new (or the same)
1642 # metadata object without any `$include` property in it,
1643 # recursively.
1644 config_node['metadata'] = self._process_metadata_include(config_node['metadata'])
1645
1646 return config_node
7f4429f2 1647
6839ffba
PP
1648 def _expand_field_type_aliases(self, metadata_node, type_aliases_node):
1649 def resolve_field_type_aliases(parent_node, key, from_descr,
1650 alias_set=None):
1651 if key not in parent_node:
1652 return
1653
1654 # This set holds all the aliases we need to expand,
1655 # recursively. This is used to detect cycles.
1656 if alias_set is None:
1657 alias_set = set()
1658
1659 node = parent_node[key]
1660
1661 if node is None:
1662 return
1663
1664 if type(node) is str:
1665 alias = node
1666
1667 if alias not in resolved_aliases:
1668 # Only check for a field type alias cycle when we
1669 # didn't resolve the alias yet, as a given node can
1670 # refer to the same field type alias more than once.
1671 if alias in alias_set:
1bf9d86d 1672 fmt = 'Cycle detected during the `{}` type alias resolution'
6839ffba
PP
1673 raise ConfigParseError(from_descr, fmt.format(alias))
1674
1675 # try to load field type alias node named `alias`
1676 if alias not in type_aliases_node:
1677 raise ConfigParseError(from_descr,
1bf9d86d 1678 'Type alias `{}` does not exist'.format(alias))
6839ffba
PP
1679
1680 # resolve it
1681 alias_set.add(alias)
1682 resolve_field_type_aliases(type_aliases_node, alias,
1683 from_descr, alias_set)
1684 resolved_aliases.add(alias)
1685
1686 parent_node[key] = copy.deepcopy(type_aliases_node[node])
1687 return
1688
1689 # traverse, resolving field type aliases as needed
1690 for pkey in ['$inherit', 'inherit', 'value-type', 'element-type']:
1691 resolve_field_type_aliases(node, pkey, from_descr, alias_set)
1692
1693 # structure field type fields
1694 pkey = 'fields'
1695
1696 if pkey in node:
1697 assert type(node[pkey]) is collections.OrderedDict
1698
1699 for field_name in node[pkey]:
1700 resolve_field_type_aliases(node[pkey], field_name,
1701 from_descr, alias_set)
1702
1703 def resolve_field_type_aliases_from(parent_node, key, parent_node_type_name,
1704 parent_node_name=None):
1bf9d86d 1705 from_descr = '`{}` property of {}'.format(key,
6839ffba
PP
1706 parent_node_type_name)
1707
1708 if parent_node_name is not None:
1bf9d86d 1709 from_descr += ' `{}`'.format(parent_node_name)
6839ffba
PP
1710
1711 resolve_field_type_aliases(parent_node, key, from_descr)
1712
1713 # set of resolved field type aliases
1714 resolved_aliases = set()
1715
1716 # expand field type aliases within trace, streams, and events now
1717 resolve_field_type_aliases_from(metadata_node['trace'],
1718 'packet-header-type', 'trace')
1719
1720 for stream_name, stream in metadata_node['streams'].items():
1721 resolve_field_type_aliases_from(stream, 'packet-context-type',
1722 'stream', stream_name)
1723 resolve_field_type_aliases_from(stream, 'event-header-type',
1724 'stream', stream_name)
1725 resolve_field_type_aliases_from(stream, 'event-context-type',
1726 'stream', stream_name)
1727
1728 try:
1729 for event_name, event in stream['events'].items():
1730 resolve_field_type_aliases_from(event, 'context-type', 'event',
1731 event_name)
1732 resolve_field_type_aliases_from(event, 'payload-type', 'event',
1733 event_name)
1734 except ConfigParseError as exc:
1bf9d86d 1735 _append_error_ctx(exc, 'Stream `{}`'.format(stream_name))
6839ffba
PP
1736
1737 # we don't need the `type-aliases` node anymore
1738 del metadata_node['type-aliases']
1739
1740 def _expand_field_type_inheritance(self, metadata_node):
1741 def apply_inheritance(parent_node, key):
1742 if key not in parent_node:
1743 return
1744
1745 node = parent_node[key]
1746
1747 if node is None:
1748 return
1749
1750 # process children first
1751 for pkey in ['$inherit', 'inherit', 'value-type', 'element-type']:
1752 apply_inheritance(node, pkey)
1753
1754 # structure field type fields
1755 pkey = 'fields'
1756
1757 if pkey in node:
1758 assert type(node[pkey]) is collections.OrderedDict
1759
1760 for field_name, field_type in node[pkey].items():
1761 apply_inheritance(node[pkey], field_name)
1762
1763 # apply inheritance of this node
1764 if 'inherit' in node:
1765 # barectf 2.1: `inherit` property was renamed to `$inherit`
1766 assert '$inherit' not in node
1767 node['$inherit'] = node['inherit']
1768 del node['inherit']
1769
1770 inherit_key = '$inherit'
1771
1772 if inherit_key in node:
1773 assert type(node[inherit_key]) is collections.OrderedDict
1774
1775 # apply inheritance below
1776 apply_inheritance(node, inherit_key)
1777
1778 # `node` is an overlay on the `$inherit` node
1779 base_node = node[inherit_key]
1780 del node[inherit_key]
1781 self._update_node(base_node, node)
1782
1783 # set updated base node as this node
1784 parent_node[key] = base_node
1785
1786 apply_inheritance(metadata_node['trace'], 'packet-header-type')
1787
1788 for stream in metadata_node['streams'].values():
1789 apply_inheritance(stream, 'packet-context-type')
1790 apply_inheritance(stream, 'event-header-type')
1791 apply_inheritance(stream, 'event-context-type')
1792
1793 for event in stream['events'].values():
1794 apply_inheritance(event, 'context-type')
1795 apply_inheritance(event, 'payload-type')
1796
1797 def _expand_field_types(self, metadata_node):
1798 type_aliases_node = metadata_node.get('type-aliases')
1799
1800 if type_aliases_node is None:
1801 # If there's no `type-aliases` node, then there's no field
1802 # type aliases and therefore no possible inheritance.
1803 return
1804
1805 # first, expand field type aliases
1806 self._expand_field_type_aliases(metadata_node, type_aliases_node)
1807
1808 # next, apply inheritance to create effective field types
1809 self._expand_field_type_inheritance(metadata_node)
1810
1811 def _expand_log_levels(self, metadata_node):
1812 if 'log-levels' in metadata_node:
1813 # barectf 2.1: `log-levels` property was renamed to `$log-levels`
1814 assert '$log-levels' not in node
1815 node['$log-levels'] = node['log-levels']
1816 del node['log-levels']
1817
1818 log_levels_key = '$log-levels'
1819 log_levels_node = metadata_node.get(log_levels_key)
1820
1821 if log_levels_node is None:
1822 # no log level aliases
1823 return
1824
1825 # not needed anymore
1826 del metadata_node[log_levels_key]
1827
1828 for stream_name, stream in metadata_node['streams'].items():
1829 try:
1830 for event_name, event in stream['events'].items():
1831 prop_name = 'log-level'
1832 ll_node = event.get(prop_name)
1833
1834 if ll_node is None:
1835 continue
1836
1837 if type(ll_node) is str:
1838 if ll_node not in log_levels_node:
1bf9d86d
PP
1839 raise ConfigParseError('Event `{}`'.format(event_name),
1840 'Log level `{}` does not exist'.format(ll_node))
6839ffba
PP
1841
1842 event[prop_name] = log_levels_node[ll_node]
1843 except ConfigParseError as exc:
1bf9d86d 1844 _append_error_ctx(exc, 'Stream `{}`'.format(stream_name))
7f4429f2
PP
1845
1846 def _yaml_ordered_dump(self, node, **kwds):
1847 class ODumper(yaml.Dumper):
1848 pass
1849
1850 def dict_representer(dumper, node):
1851 return dumper.represent_mapping(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
1852 node.items())
1853
1854 ODumper.add_representer(collections.OrderedDict, dict_representer)
1855
6839ffba 1856 # Python -> YAML
7f4429f2
PP
1857 return yaml.dump(node, Dumper=ODumper, **kwds)
1858
1859 def _yaml_ordered_load(self, yaml_path):
1860 class OLoader(yaml.Loader):
1861 pass
1862
1863 def construct_mapping(loader, node):
1864 loader.flatten_mapping(node)
1865
1866 return collections.OrderedDict(loader.construct_pairs(node))
1867
1868 OLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
1869 construct_mapping)
1870
1871 # YAML -> Python
1872 try:
1873 with open(yaml_path, 'r') as f:
1874 node = yaml.load(f, OLoader)
1875 except (OSError, IOError) as e:
1876 raise ConfigParseError('Configuration',
1bf9d86d 1877 'Cannot open file `{}`'.format(yaml_path))
7f4429f2 1878 except ConfigParseError as exc:
131d409a 1879 _append_error_ctx(exc, 'Configuration',
1bf9d86d 1880 'Unknown error while trying to load file `{}`'.format(yaml_path))
7f4429f2
PP
1881
1882 # loaded node must be an associate array
6839ffba 1883 if type(node) is not collections.OrderedDict:
7f4429f2 1884 raise ConfigParseError('Configuration',
1bf9d86d 1885 'Root of YAML file `{}` must be an associative array'.format(yaml_path))
7f4429f2
PP
1886
1887 return node
1888
1889 def _reset(self):
1890 self._version = None
1891 self._include_stack = []
1892
1893 def parse(self, yaml_path):
1894 self._reset()
1895 self._root_yaml_path = yaml_path
1896
6839ffba 1897 # load the configuration object as is from the root YAML file
7f4429f2 1898 try:
6839ffba 1899 config_node = self._yaml_ordered_load(yaml_path)
7f4429f2 1900 except ConfigParseError as exc:
131d409a 1901 _append_error_ctx(exc, 'Configuration',
1bf9d86d 1902 'Cannot parse YAML file `{}`'.format(yaml_path))
7f4429f2 1903
6839ffba
PP
1904 # Make sure the configuration object is minimally valid, that
1905 # is, it contains a valid `version` property.
1906 #
1907 # This step does not validate the whole configuration object
1908 # yet because we don't have an effective configuration object;
1909 # we still need to:
1910 #
1911 # * Process inclusions.
1912 # * Expand field types (inheritance and aliases).
1913 self._schema_validator.validate(config_node, 'config/config-min')
7f4429f2 1914
6839ffba
PP
1915 # Process configuration object inclusions.
1916 #
1917 # self._process_config_includes() returns a new (or the same)
1918 # configuration object without any `$include` property in it,
1919 # recursively.
1920 config_node = self._process_config_includes(config_node)
7f4429f2 1921
6839ffba
PP
1922 # Make sure that the current configuration object is valid
1923 # considering field types are not expanded yet.
1924 self._schema_validator.validate(config_node,
1925 '2/config/config-pre-field-type-expansion')
7f4429f2 1926
6839ffba
PP
1927 # Expand field types.
1928 #
1929 # This process:
1930 #
1931 # 1. Replaces field type aliases with "effective" field
1932 # types, recursively.
1933 #
1934 # After this step, the `type-aliases` property of the
1935 # `metadata` node is gone.
1936 #
1937 # 2. Applies inheritance following the `$inherit`/`inherit`
1938 # properties.
1939 #
1940 # After this step, field type objects do not contain
1941 # `$inherit` or `inherit` properties.
1942 #
1943 # This is done blindly, in that the process _doesn't_ validate
1944 # field type objects at this point.
1945 self._expand_field_types(config_node['metadata'])
7f4429f2 1946
6839ffba
PP
1947 # Make sure that the current configuration object is valid
1948 # considering log levels are not expanded yet.
1949 self._schema_validator.validate(config_node,
1950 '2/config/config-pre-log-level-expansion')
7f4429f2 1951
6839ffba
PP
1952 # Expand log levels, that is, replace log level strings with
1953 # their equivalent numeric values.
1954 self._expand_log_levels(config_node['metadata'])
7f4429f2 1955
6839ffba
PP
1956 # validate the whole, effective configuration object
1957 self._schema_validator.validate(config_node, '2/config/config')
7f4429f2
PP
1958
1959 # dump config if required
1960 if self._dump_config:
6839ffba 1961 print(self._yaml_ordered_dump(config_node, indent=2,
7f4429f2
PP
1962 default_flow_style=False))
1963
6839ffba
PP
1964 # get prefix, options, and metadata pseudo-object
1965 prefix = self._get_prefix(config_node)
1966 opts = self._get_options(config_node)
1967 pseudo_meta = self._create_metadata(config_node)
7f4429f2 1968
6839ffba
PP
1969 # create public configuration
1970 return config.Config(pseudo_meta.to_public(), prefix, opts)
7f4429f2
PP
1971
1972
1973def _from_file(path, include_dirs, ignore_include_not_found, dump_config):
1974 try:
1975 parser = _YamlConfigParser(include_dirs, ignore_include_not_found,
1976 dump_config)
1977 return parser.parse(path)
1978 except ConfigParseError as exc:
131d409a 1979 _append_error_ctx(exc, 'Configuration',
1bf9d86d 1980 'Cannot create configuration from YAML file `{}`'.format(path))
This page took 0.131995 seconds and 4 git commands to generate.