From 6b5a6c2fb74c862851e2cd5775024e014098e56c Mon Sep 17 00:00:00 2001 From: Philippe Proulx Date: Fri, 15 May 2020 15:28:59 -0400 Subject: [PATCH] Append context to a `ConfigError` instead of wrapping another exception Signed-off-by: Philippe Proulx --- barectf/cli.py | 28 +- barectf/config.py | 876 ++++++++++++++++++++++++++++++---------------- 2 files changed, 585 insertions(+), 319 deletions(-) diff --git a/barectf/cli.py b/barectf/cli.py index 1734117..8b12f15 100644 --- a/barectf/cli.py +++ b/barectf/cli.py @@ -39,28 +39,16 @@ def _perror(msg): sys.exit(1) -def _pconfig_error(e): - lines = [] - - while True: - if e is None: - break - - lines.append(str(e)) - - if not hasattr(e, 'prev'): - break - - e = e.prev - - if len(lines) == 1: - _perror(lines[0]) - +def _pconfig_error(exc): cprint('Error:', 'red', file=sys.stderr) - for i, line in enumerate(lines): - suf = ':' if i < len(lines) - 1 else '' - cprint(' ' + line + suf, 'red', attrs=['bold'], file=sys.stderr) + for ctx in reversed(exc.ctx): + if ctx.msg is not None: + msg = ' {}'.format(ctx.msg) + else: + msg = '' + cprint(' {}:{}'.format(ctx.name, msg), 'red', attrs=['bold'], + file=sys.stderr) sys.exit(1) diff --git a/barectf/config.py b/barectf/config.py index 7b574db..dbccaf5 100644 --- a/barectf/config.py +++ b/barectf/config.py @@ -32,14 +32,31 @@ import re import os +class _ConfigErrorCtx: + def __init__(self, name, msg=None): + self._name = name + self._msg = msg + + @property + def name(self): + return self._name + + @property + def msg(self): + return self._msg + + class ConfigError(RuntimeError): - def __init__(self, msg, prev=None): - super().__init__(msg) - self._prev = prev + def __init__(self, init_ctx_name, init_ctx_msg=None): + self._ctx = [] + self.append_ctx(init_ctx_name, init_ctx_msg) @property - def prev(self): - return self._prev + def ctx(self): + return self._ctx + + def append_ctx(self, name, msg=None): + self._ctx.append(_ConfigErrorCtx(name, msg)) class Config: @@ -55,14 +72,16 @@ class Config: validator.validate(meta) validator = _MetadataSpecialFieldsValidator() validator.validate(meta) - except Exception as e: - raise ConfigError('metadata error', e) + except ConfigError as exc: + exc.append_ctx('metadata') + raise try: validator = _BarectfMetadataValidator() validator.validate(meta) - except Exception as e: - raise ConfigError('barectf metadata error', e) + except ConfigError as exc: + exc.append_ctx('barectf metadata') + raise def _augment_metadata_env(self, meta): version_tuple = barectf.get_version_tuple() @@ -103,7 +122,8 @@ class Config: @prefix.setter def prefix(self, value): if not _is_valid_identifier(value): - raise ConfigError('configuration prefix must be a valid C identifier') + raise ConfigError('configuration', + 'prefix must be a valid C identifier') self._prefix = value @@ -241,22 +261,22 @@ class _BarectfMetadataValidator: def _validate_int_type(self, t, entity_root): if t.size > 64: - raise ConfigError('integer type\'s size must be lesser than or equal to 64 bits') + raise ConfigError('integer type', 'size must be lesser than or equal to 64 bits') def _validate_float_type(self, t, entity_root): if t.size > 64: - raise ConfigError('floating point number type\'s size must be lesser than or equal to 64 bits') + raise ConfigError('floating point number type', 'size must be lesser than or equal to 64 bits') def _validate_enum_type(self, t, entity_root): if t.value_type.size > 64: - raise ConfigError('enumeration type\'s integer type\'s size must be lesser than or equal to 64 bits') + raise ConfigError('enumeration type', 'integer type\'s size must be lesser than or equal to 64 bits') def _validate_string_type(self, t, entity_root): pass def _validate_struct_type(self, t, entity_root): if not entity_root: - raise ConfigError('inner structure types are not supported as of this version') + raise ConfigError('structure type', 'inner structure types are not supported as of this version') for field_name, field_type in t.fields.items(): if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER: @@ -266,11 +286,12 @@ class _BarectfMetadataValidator: try: self._validate_type(field_type, False) - except Exception as e: - raise ConfigError('in structure type\'s field "{}"'.format(field_name), e) + except ConfigError as exc: + exc.append_ctx('structure type\' field "{}"'.format(field_name)) + raise def _validate_array_type(self, t, entity_root): - raise ConfigError('array types are not supported as of this version') + raise ConfigError('array type', 'not supported as of this version') def _validate_type(self, t, entity_root): self._type_to_validate_type_func[type(t)](t, entity_root) @@ -281,11 +302,11 @@ class _BarectfMetadataValidator: # make sure entity is byte-aligned if t.align < 8: - raise ConfigError('type\'s alignment must be at least byte-aligned') + raise ConfigError('root type', 'alignment must be at least byte-aligned') # make sure entity is a structure if type(t) is not metadata.Struct: - raise ConfigError('expecting a structure type') + raise ConfigError('root type', 'expecting a structure type') # validate types self._validate_type(t, True) @@ -295,62 +316,75 @@ class _BarectfMetadataValidator: try: self._validate_entity(meta.trace.packet_header_type) - except Exception as e: - raise ConfigError('invalid trace packet header type', e) + except ConfigError as exc: + exc.append_ctx('trace', 'invalid packet header type') + raise for stream_name, stream in meta.streams.items(): if not _is_valid_identifier(stream_name): - raise ConfigError('stream name "{}" is not a valid C identifier'.format(stream_name)) + raise ConfigError('trace', 'stream name "{}" is not a valid C identifier'.format(stream_name)) self._cur_entity = _Entity.STREAM_PACKET_CONTEXT try: self._validate_entity(stream.packet_context_type) - except Exception as e: - raise ConfigError('invalid packet context type in stream "{}"'.format(stream_name), e) + except ConfigError as exc: + exc.append_ctx('stream "{}"'.format(stream_name), + 'invalid packet context type') + raise self._cur_entity = _Entity.STREAM_EVENT_HEADER try: self._validate_entity(stream.event_header_type) - except Exception as e: - raise ConfigError('invalid event header type in stream "{}"'.format(stream_name), e) + except ConfigError as exc: + exc.append_ctx('stream "{}"'.format(stream_name), + 'invalid event header type') + raise self._cur_entity = _Entity.STREAM_EVENT_CONTEXT try: self._validate_entity(stream.event_context_type) - except Exception as e: - raise ConfigError('invalid event context type in stream "{}"'.format(stream_name), e) + except ConfigError as exc: + exc.append_ctx('stream "{}"'.format(stream_name), + 'invalid event context type'.format(stream_name)) + raise try: for ev_name, ev in stream.events.items(): if not _is_valid_identifier(ev_name): - raise ConfigError('event name "{}" is not a valid C identifier'.format(ev_name)) + raise ConfigError('stream "{}"'.format(stream_name), + 'event name "{}" is not a valid C identifier'.format(ev_name)) self._cur_entity = _Entity.EVENT_CONTEXT try: self._validate_entity(ev.context_type) - except Exception as e: - raise ConfigError('invalid context type in event "{}"'.format(ev_name), e) + except ConfigError as exc: + exc.append_ctx('event "{}"'.format(ev_name), + 'invalid context type') + raise self._cur_entity = _Entity.EVENT_PAYLOAD try: self._validate_entity(ev.payload_type) - except Exception as e: - raise ConfigError('invalid payload type in event "{}"'.format(ev_name), e) + except ConfigError as exc: + exc.append_ctx('event "{}"'.format(ev_name), + 'invalid payload type') + raise if stream.is_event_empty(ev): - raise ConfigError('event "{}" is empty'.format(ev_name)) - except Exception as e: - raise ConfigError('invalid stream "{}"'.format(stream_name), e) + raise ConfigError('event "{}"'.format(ev_name), 'empty') + except ConfigError as exc: + exc.append_ctx('stream "{}"'.format(stream_name)) + raise def _validate_default_stream(self, meta): if meta.default_stream_name: if meta.default_stream_name not in meta.streams.keys(): - raise ConfigError('default stream name ("{}") does not exist'.format(meta.default_stream_name)) + raise ConfigError('barectf metadata', 'default stream name ("{}") does not exist'.format(meta.default_stream_name)) def validate(self, meta): self._validate_entities_and_names(meta) @@ -366,13 +400,16 @@ class _MetadataSpecialFieldsValidator: if len(self._meta.streams) > 1: # yes if t is None: - raise ConfigError('need "stream_id" field in trace packet header type (more than one stream), but trace packet header type is missing') + raise ConfigError('"packet-header-type" property', + 'need "stream_id" field (more than one stream), but trace packet header type is missing') if type(t) is not metadata.Struct: - raise ConfigError('need "stream_id" field in trace packet header type (more than one stream), but trace packet header type is not a structure type') + raise ConfigError('"packet-header-type" property', + 'need "stream_id" field (more than one stream), but trace packet header type is not a structure type') if 'stream_id' not in t.fields: - raise ConfigError('need "stream_id" field in trace packet header type (more than one stream)') + raise ConfigError('"packet-header-type" property', + 'need "stream_id" field (more than one stream)') # validate "magic" and "stream_id" types if type(t) is not metadata.Struct: @@ -381,46 +418,59 @@ class _MetadataSpecialFieldsValidator: for i, (field_name, field_type) in enumerate(t.fields.items()): if field_name == 'magic': if type(field_type) is not metadata.Integer: - raise ConfigError('"magic" field in trace packet header type must be an integer type') + raise ConfigError('"packet-header-type" property', + '"magic" field must be an integer type') if field_type.signed or field_type.size != 32: - raise ConfigError('"magic" field in trace packet header type must be a 32-bit unsigned integer type') + raise ConfigError('"packet-header-type" property', + '"magic" field must be a 32-bit unsigned integer type') if i != 0: - raise ConfigError('"magic" field must be the first trace packet header type\'s field') + raise ConfigError('"packet-header-type" property', + '"magic" field must be the first trace packet header type\'s field') elif field_name == 'stream_id': if type(field_type) is not metadata.Integer: - raise ConfigError('"stream_id" field in trace packet header type must be an integer type') + raise ConfigError('"packet-header-type" property', + '"stream_id" field must be an integer type') if field_type.signed: - raise ConfigError('"stream_id" field in trace packet header type must be an unsigned integer type') + raise ConfigError('"packet-header-type" property', + '"stream_id" field must be an unsigned integer type') # "id" size can fit all event IDs if len(self._meta.streams) > (1 << field_type.size): - raise ConfigError('"stream_id" field\' size in trace packet header type is too small for the number of trace streams') + raise ConfigError('"packet-header-type" property', + '"stream_id" field\' size is too small for the number of trace streams') elif field_name == 'uuid': if self._meta.trace.uuid is None: - raise ConfigError('"uuid" field in trace packet header type specified, but no trace UUID provided') + raise ConfigError('"packet-header-type" property', + '"uuid" field specified, but no trace UUID provided') if type(field_type) is not metadata.Array: - raise ConfigError('"uuid" field in trace packet header type must be an array') + raise ConfigError('"packet-header-type" property', + '"uuid" field must be an array') if field_type.length != 16: - raise ConfigError('"uuid" field in trace packet header type must be an array of 16 bytes') + raise ConfigError('"packet-header-type" property', + '"uuid" field must be an array of 16 bytes') element_type = field_type.element_type if type(element_type) is not metadata.Integer: - raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes') + raise ConfigError('"packet-header-type" property', + '"uuid" field must be an array of 16 unsigned bytes') if element_type.size != 8: - raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes') + raise ConfigError('"packet-header-type" property', + '"uuid" field must be an array of 16 unsigned bytes') if element_type.signed: - raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned bytes') + raise ConfigError('"packet-header-type" property', + '"uuid" field must be an array of 16 unsigned bytes') if element_type.align != 8: - raise ConfigError('"uuid" field in trace packet header type must be an array of 16 unsigned, byte-aligned bytes') + raise ConfigError('"packet-header-type" property', + '"uuid" field must be an array of 16 unsigned, byte-aligned bytes') def _validate_trace(self, meta): self._validate_trace_packet_header_type(meta.trace.packet_header_type) @@ -429,10 +479,12 @@ class _MetadataSpecialFieldsValidator: t = stream.packet_context_type if type(t) is None: - raise ConfigError('missing "packet-context-type" property in stream object') + raise ConfigError('stream', + 'missing "packet-context-type" property') if type(t) is not metadata.Struct: - raise ConfigError('"packet-context-type": expecting a structure type') + raise ConfigError('"packet-context-type" property', + 'expecting a structure type') # "timestamp_begin", if exists, is an unsigned integer type, # mapped to a clock @@ -442,13 +494,16 @@ class _MetadataSpecialFieldsValidator: ts_begin = t.fields['timestamp_begin'] if type(ts_begin) is not metadata.Integer: - raise ConfigError('"timestamp_begin" field in stream packet context type must be an integer type') + raise ConfigError('"packet-context-type" property', + '"timestamp_begin" field must be an integer type') if ts_begin.signed: - raise ConfigError('"timestamp_begin" field in stream packet context type must be an unsigned integer type') + raise ConfigError('"packet-context-type" property', + '"timestamp_begin" field must be an unsigned integer type') if not ts_begin.property_mappings: - raise ConfigError('"timestamp_begin" field in stream packet context type must be mapped to a clock') + raise ConfigError('"packet-context-type" property', + '"timestamp_begin" field must be mapped to a clock') # "timestamp_end", if exists, is an unsigned integer type, # mapped to a clock @@ -458,13 +513,16 @@ class _MetadataSpecialFieldsValidator: ts_end = t.fields['timestamp_end'] if type(ts_end) is not metadata.Integer: - raise ConfigError('"timestamp_end" field in stream packet context type must be an integer type') + raise ConfigError('"packet-context-type" property', + '"timestamp_end" field must be an integer type') if ts_end.signed: - raise ConfigError('"timestamp_end" field in stream packet context type must be an unsigned integer type') + raise ConfigError('"packet-context-type" property', + '"timestamp_end" field must be an unsigned integer type') if not ts_end.property_mappings: - raise ConfigError('"timestamp_end" field in stream packet context type must be mapped to a clock') + raise ConfigError('"packet-context-type" property', + '"timestamp_end" field must be mapped to a clock') # "timestamp_begin" and "timestamp_end" exist together if (('timestamp_begin' in t.fields) ^ ('timestamp_end' in t.fields)): @@ -480,40 +538,49 @@ class _MetadataSpecialFieldsValidator: events_discarded = t.fields['events_discarded'] if type(events_discarded) is not metadata.Integer: - raise ConfigError('"events_discarded" field in stream packet context type must be an integer type') + raise ConfigError('"packet-context-type" property', + '"events_discarded" field must be an integer type') if events_discarded.signed: - raise ConfigError('"events_discarded" field in stream packet context type must be an unsigned integer type') + raise ConfigError('"packet-context-type" property', + '"events_discarded" field must be an unsigned integer type') # "packet_size" and "content_size" must exist if 'packet_size' not in t.fields: - raise ConfigError('missing "packet_size" field in stream packet context type') + raise ConfigError('"packet-context-type" property', + 'missing "packet_size" field in stream packet context type') packet_size = t.fields['packet_size'] # "content_size" and "content_size" must exist if 'content_size' not in t.fields: - raise ConfigError('missing "content_size" field in stream packet context type') + raise ConfigError('"packet-context-type" property', + 'missing "content_size" field in stream packet context type') content_size = t.fields['content_size'] # "packet_size" is an unsigned integer type if type(packet_size) is not metadata.Integer: - raise ConfigError('"packet_size" field in stream packet context type must be an integer type') + raise ConfigError('"packet-context-type" property', + '"packet_size" field in stream packet context type must be an integer type') if packet_size.signed: - raise ConfigError('"packet_size" field in stream packet context type must be an unsigned integer type') + raise ConfigError('"packet-context-type" property', + '"packet_size" field in stream packet context type must be an unsigned integer type') # "content_size" is an unsigned integer type if type(content_size) is not metadata.Integer: - raise ConfigError('"content_size" field in stream packet context type must be an integer type') + raise ConfigError('"packet-context-type" property', + '"content_size" field in stream packet context type must be an integer type') if content_size.signed: - raise ConfigError('"content_size" field in stream packet context type must be an unsigned integer type') + raise ConfigError('"packet-context-type" property', + '"content_size" field in stream packet context type must be an unsigned integer type') # "packet_size" size should be greater than or equal to "content_size" size if content_size.size > packet_size.size: - raise ConfigError('"content_size" field size must be lesser than or equal to "packet_size" field size') + raise ConfigError('"packet-context-type" property', + '"content_size" field size must be lesser than or equal to "packet_size" field size') def _validate_stream_event_header(self, stream): t = stream.event_header_type @@ -522,13 +589,16 @@ class _MetadataSpecialFieldsValidator: if len(stream.events) > 1: # yes if t is None: - raise ConfigError('need "id" field in stream event header type (more than one event), but stream event header type is missing') + raise ConfigError('"event-header-type" property', + 'need "id" field (more than one event), but stream event header type is missing') if type(t) is not metadata.Struct: - raise ConfigError('need "id" field in stream event header type (more than one event), but stream event header type is not a structure type') + raise ConfigError('"event-header-type" property', + 'need "id" field (more than one event), but stream event header type is not a structure type') if 'id' not in t.fields: - raise ConfigError('need "id" field in stream event header type (more than one event)') + raise ConfigError('"event-header-type" property', + 'need "id" field (more than one event)') # validate "id" and "timestamp" types if type(t) is not metadata.Struct: @@ -540,27 +610,33 @@ class _MetadataSpecialFieldsValidator: ts = t.fields['timestamp'] if type(ts) is not metadata.Integer: - raise ConfigError('"timestamp" field in stream event header type must be an integer type') + raise ConfigError('"event-header-type" property', + '"timestamp" field must be an integer type') if ts.signed: - raise ConfigError('"timestamp" field in stream event header type must be an unsigned integer type') + raise ConfigError('"event-header-type" property', + '"timestamp" field must be an unsigned integer type') if not ts.property_mappings: - raise ConfigError('"timestamp" field in stream event header type must be mapped to a clock') + raise ConfigError('"event-header-type" property', + '"timestamp" field must be mapped to a clock') if 'id' in t.fields: eid = t.fields['id'] # "id" is an unsigned integer type if type(eid) is not metadata.Integer: - raise ConfigError('"id" field in stream event header type must be an integer type') + raise ConfigError('"event-header-type" property', + '"id" field must be an integer type') if eid.signed: - raise ConfigError('"id" field in stream event header type must be an unsigned integer type') + raise ConfigError('"event-header-type" property', + '"id" field must be an unsigned integer type') # "id" size can fit all event IDs if len(stream.events) > (1 << eid.size): - raise ConfigError('"id" field\' size in stream event header type is too small for the number of stream events') + raise ConfigError('"event-header-type" property', + '"id" field\' size is too small for the number of stream events') def _validate_stream(self, stream): self._validate_stream_packet_context(stream) @@ -573,8 +649,9 @@ class _MetadataSpecialFieldsValidator: for stream in meta.streams.values(): try: self._validate_stream(stream) - except Exception as e: - raise ConfigError('invalid stream "{}"'.format(stream.name), e) + except ConfigError as exc: + exc.append_ctx('stream "{}"'.format(stream.name), 'invalid') + raise # Entities. Order of values is important here. @@ -606,29 +683,32 @@ class _MetadataTypesHistologyValidator: def _validate_integer_histology(self, t): # size is set if t.size is None: - raise ConfigError('missing integer type\'s size') + raise ConfigError('integer type', 'missing size') def _validate_float_histology(self, t): # exponent digits is set if t.exp_size is None: - raise ConfigError('missing floating point number type\'s exponent size') + raise ConfigError('floating point number type', + 'missing exponent size') # mantissa digits is set if t.mant_size is None: - raise ConfigError('missing floating point number type\'s mantissa size') + raise ConfigError('floating point number type', + 'missing mantissa size') # exponent and mantissa sum is a multiple of 8 if (t.exp_size + t.mant_size) % 8 != 0: - raise ConfigError('floating point number type\'s mantissa and exponent sizes sum must be a multiple of 8') + raise ConfigError('floating point number type', + 'mantissa and exponent sizes sum must be a multiple of 8') def _validate_enum_histology(self, t): # integer type is set if t.value_type is None: - raise ConfigError('missing enumeration type\'s value type') + raise ConfigError('enumeration type', 'missing value type') # there's at least one member if not t.members: - raise ConfigError('enumeration type needs at least one member') + raise ConfigError('enumeration type', 'at least one member required') # no overlapping values and all values are valid considering # the value type @@ -644,15 +724,19 @@ class _MetadataTypesHistologyValidator: for label, value in t.members.items(): for rg in ranges: if value[0] <= rg[1] and rg[0] <= value[1]: - raise ConfigError('enumeration type\'s member "{}" overlaps another member'.format(label)) + raise ConfigError('enumeration type\'s member "{}"', + 'overlaps another member'.format(label)) - fmt = 'enumeration type\'s member "{}": value {} is outside the value type range [{}, {}]' + name_fmt = 'enumeration type\'s member "{}"' + msg_fmt = 'value {} is outside the value type range [{}, {}]' if value[0] < value_min or value[0] > value_max: - raise ConfigError(fmt.format(label, value[0], value_min, value_max)) + raise ConfigError(name_fmt.format(label), + msg_fmt.format(value[0], value_min, value_max)) if value[1] < value_min or value[1] > value_max: - raise ConfigError(fmt.format(label, value[1], value_min, value_max)) + raise ConfigError(name_fmt.format(label), + msg_fmt.format(value[0], value_min, value_max)) ranges.append(value) @@ -665,23 +749,25 @@ class _MetadataTypesHistologyValidator: for field_name, field_type in t.fields.items(): try: self._validate_type_histology(field_type) - except Exception as e: - raise ConfigError('invalid structure type\'s field "{}"'.format(field_name), e) + except ConfigError as exc: + exc.append_ctx('structure type\'s field "{}"'.format(field_name)) + raise def _validate_array_histology(self, t): # length is set if t.length is None: - raise ConfigError('missing array type\'s length') + raise ConfigError('array type', 'missing length') # element type is set if t.element_type is None: - raise ConfigError('missing array type\'s element type') + raise ConfigError('array type', 'missing element type') # element type is valid try: self._validate_type_histology(t.element_type) - except Exception as e: - raise ConfigError('invalid array type\'s element type', e) + except ConfigError as exc: + exc.append_ctx('array type', 'invalid element type') + raise def _validate_type_histology(self, t): if t is None: @@ -694,7 +780,7 @@ class _MetadataTypesHistologyValidator: return if type(t) is not metadata.Struct: - raise ConfigError('expecting a structure type') + raise ConfigError('root type', 'expecting a structure type') self._validate_type_histology(t) @@ -704,14 +790,18 @@ class _MetadataTypesHistologyValidator: # validate event context type try: self._validate_entity_type_histology(ev.context_type) - except Exception as e: - raise ConfigError('invalid event context type for event "{}"'.format(ev_name), e) + except ConfigError as exc: + exc.append_ctx('event "{}"'.format(ev.name), + 'invalid context type') + raise # validate event payload type try: self._validate_entity_type_histology(ev.payload_type) - except Exception as e: - raise ConfigError('invalid event payload type for event "{}"'.format(ev_name), e) + except ConfigError as exc: + exc.append_ctx('event "{}"'.format(ev.name), + 'invalid payload type') + raise def _validate_stream_types_histology(self, stream): stream_name = stream.name @@ -719,34 +809,43 @@ class _MetadataTypesHistologyValidator: # validate stream packet context type try: self._validate_entity_type_histology(stream.packet_context_type) - except Exception as e: - raise ConfigError('invalid stream packet context type for stream "{}"'.format(stream_name), e) + except ConfigError as exc: + exc.append_ctx('stream "{}"'.format(stream_name), + 'invalid packet context type') + raise # validate stream event header type try: self._validate_entity_type_histology(stream.event_header_type) - except Exception as e: - raise ConfigError('invalid stream event header type for stream "{}"'.format(stream_name), e) + except ConfigError as exc: + exc.append_ctx('stream "{}"'.format(stream_name), + 'invalid event header type') + raise # validate stream event context type try: self._validate_entity_type_histology(stream.event_context_type) - except Exception as e: - raise ConfigError('invalid stream event context type for stream "{}"'.format(stream_name), e) + except ConfigError as exc: + exc.append_ctx('stream "{}"'.format(stream_name), + 'invalid event context type') + raise # validate events for ev in stream.events.values(): try: self._validate_event_types_histology(ev) - except Exception as e: - raise ConfigError('invalid event in stream "{}"'.format(stream_name), e) + except ConfigError as exc: + exc.append_ctx('stream "{}"'.format(stream_name), + 'invalid event') + raise def validate(self, meta): # validate trace packet header type try: self._validate_entity_type_histology(meta.trace.packet_header_type) - except Exception as e: - raise ConfigError('invalid trace packet header type', e) + except ConfigError as exc: + exc.append_ctx('metadata\'s trace', 'invalid packet header type') + raise # validate streams for stream in meta.streams.values(): @@ -783,25 +882,29 @@ class _YamlConfigParser: def _set_byte_order(self, metadata_node): if 'trace' not in metadata_node: - raise ConfigError('missing "trace" property (metadata)') + raise ConfigError('metadata', 'missing "trace" property') trace_node = metadata_node['trace'] if not _is_assoc_array_prop(trace_node): - raise ConfigError('"trace" property (metadata) must be an associative array') + raise ConfigError('metadata\'s "trace" property', + 'must be an associative array') if 'byte-order' not in trace_node: - raise ConfigError('missing "byte-order" property (trace)') + raise ConfigError('metadata\'s "trace" property', + 'missing "byte-order" property') bo_node = trace_node['byte-order'] if not _is_str_prop(bo_node): - raise ConfigError('"byte-order" property of trace object must be a string ("le" or "be")') + raise ConfigError('metadata\'s "trace" property', + '"byte-order" property must be a string ("le" or "be")') self._bo = _byte_order_str_to_bo(bo_node) if self._bo is None: - raise ConfigError('invalid "byte-order" property (trace): must be "le" or "be"') + raise ConfigError('metadata\'s "trace" property', + 'invalid "byte-order" property: must be "le" or "be"') def _lookup_type_alias(self, name): if name in self._tas: @@ -811,28 +914,35 @@ class _YamlConfigParser: unk_prop = _get_first_unknown_prop(prop_mapping_node, ['type', 'name', 'property']) if unk_prop: - raise ConfigError('unknown property in integer type object\'s clock property mapping: "{}"'.format(unk_prop)) + raise ConfigError('integer type\'s clock property mapping', + 'unknown property: "{}"'.format(unk_prop)) if 'name' not in prop_mapping_node: - raise ConfigError('missing "name" property in integer type object\'s clock property mapping') + raise ConfigError('integer type\'s clock property mapping', + 'missing "name" property') if 'property' not in prop_mapping_node: - raise ConfigError('missing "property" property in integer type object\'s clock property mapping') + raise ConfigError('integer type\'s clock property mapping', + 'missing "property" property') clock_name = prop_mapping_node['name'] prop = prop_mapping_node['property'] if not _is_str_prop(clock_name): - raise ConfigError('"name" property of integer type object\'s clock property mapping must be a string') + raise ConfigError('integer type\'s clock property mapping', + '"name" property must be a string') if not _is_str_prop(prop): - raise ConfigError('"property" property of integer type object\'s clock property mapping must be a string') + raise ConfigError('integer type\'s clock property mapping', + '"property" property must be a string') if clock_name not in self._clocks: - raise ConfigError('invalid clock name "{}" in integer type object\'s clock property mapping'.format(clock_name)) + raise ConfigError('integer type\'s clock property mapping', + 'invalid clock name "{}"'.format(clock_name)) if prop != 'value': - raise ConfigError('invalid "property" property in integer type object\'s clock property mapping: "{}"'.format(prop)) + raise ConfigError('integer type\'s clock property mapping', + 'invalid "property" property: "{}"'.format(prop)) mapped_clock = self._clocks[clock_name] int_obj.property_mappings.append(metadata.PropertyMapping(mapped_clock, prop)) @@ -861,17 +971,20 @@ class _YamlConfigParser: ]) if unk_prop: - raise ConfigError('unknown integer type object property: "{}"'.format(unk_prop)) + raise ConfigError('integer type', + 'unknown property: "{}"'.format(unk_prop)) # size if 'size' in node: size = node['size'] if not _is_int_prop(size): - raise ConfigError('"size" property of integer type object must be an integer') + raise ConfigError('integer type', + '"size" property of integer type object must be an integer') if size < 1: - raise ConfigError('invalid integer size: {}'.format(size)) + raise ConfigError('integer type', + 'invalid integer size: {}'.format(size)) obj.size = size @@ -883,10 +996,12 @@ class _YamlConfigParser: obj.set_default_align() else: if not _is_int_prop(align): - raise ConfigError('"align" property of integer type object must be an integer') + raise ConfigError('integer type', + '"align" property of integer type object must be an integer') if not _is_valid_alignment(align): - raise ConfigError('invalid alignment: {}'.format(align)) + raise ConfigError('integer type', + 'invalid alignment: {}'.format(align)) obj.align = align @@ -898,7 +1013,8 @@ class _YamlConfigParser: obj.set_default_signed() else: if not _is_bool_prop(signed): - raise ConfigError('"signed" property of integer type object must be a boolean') + raise ConfigError('integer type', + '"signed" property of integer type object must be a boolean') obj.signed = signed @@ -910,12 +1026,14 @@ class _YamlConfigParser: obj.byte_order = self._bo else: if not _is_str_prop(byte_order): - raise ConfigError('"byte-order" property of integer type object must be a string ("le" or "be")') + raise ConfigError('integer type', + '"byte-order" property of integer type object must be a string ("le" or "be")') byte_order = _byte_order_str_to_bo(byte_order) if byte_order is None: - raise ConfigError('invalid "byte-order" property in integer type object') + raise ConfigError('integer type', + 'invalid "byte-order" property in integer type object') obj.byte_order = byte_order else: @@ -929,7 +1047,8 @@ class _YamlConfigParser: obj.set_default_base() else: if not _is_str_prop(base): - raise ConfigError('"base" property of integer type object must be a string ("bin", "oct", "dec", or "hex")') + raise ConfigError('integer type', + '"base" property of integer type object must be a string ("bin", "oct", "dec", or "hex")') if base == 'bin': base = 2 @@ -940,7 +1059,8 @@ class _YamlConfigParser: elif base == 'hex': base = 16 else: - raise ConfigError('unknown "base" property value: "{}" ("bin", "oct", "dec", and "hex" are accepted)'.format(base)) + raise ConfigError('integer type', + 'unknown "base" property value: "{}" ("bin", "oct", "dec", and "hex" are accepted)'.format(base)) obj.base = base @@ -952,12 +1072,14 @@ class _YamlConfigParser: obj.set_default_encoding() else: if not _is_str_prop(encoding): - raise ConfigError('"encoding" property of integer type object must be a string ("none", "ascii", or "utf-8")') + raise ConfigError('integer type', + '"encoding" property of integer type object must be a string ("none", "ascii", or "utf-8")') encoding = _encoding_str_to_encoding(encoding) if encoding is None: - raise ConfigError('invalid "encoding" property in integer type object') + raise ConfigError('integer type', + 'invalid "encoding" property in integer type object') obj.encoding = encoding @@ -969,27 +1091,33 @@ class _YamlConfigParser: obj.set_default_property_mappings() else: if not _is_array_prop(prop_mappings): - raise ConfigError('"property-mappings" property of integer type object must be an array') + raise ConfigError('integer type', + '"property-mappings" property of integer type object must be an array') if len(prop_mappings) > 1: - raise ConfigError('length of "property-mappings" array in integer type object must be 1') + raise ConfigError('integer type', + 'length of "property-mappings" array in integer type object must be 1') for index, prop_mapping in enumerate(prop_mappings): if not _is_assoc_array_prop(prop_mapping): - raise ConfigError('elements of "property-mappings" property of integer type object must be associative arrays') + raise ConfigError('integer type', + 'elements of "property-mappings" property of integer type object must be associative arrays') if 'type' not in prop_mapping: - raise ConfigError('missing "type" property in integer type object\'s "property-mappings" array\'s element #{}'.format(index)) + raise ConfigError('integer type', + 'missing "type" property in integer type object\'s "property-mappings" array\'s element #{}'.format(index)) prop_type = prop_mapping['type'] if not _is_str_prop(prop_type): - raise ConfigError('"type" property of integer type object\'s "property-mappings" array\'s element #{} must be a string'.format(index)) + raise ConfigError('integer type', + '"type" property of integer type object\'s "property-mappings" array\'s element #{} must be a string'.format(index)) if prop_type == 'clock': self._set_int_clock_prop_mapping(obj, prop_mapping) else: - raise ConfigError('unknown property mapping type "{}" in integer type object\'s "property-mappings" array\'s element #{}'.format(prop_type, index)) + raise ConfigError('integer type', + 'unknown property mapping type "{}" in integer type object\'s "property-mappings" array\'s element #{}'.format(prop_type, index)) return obj @@ -1005,28 +1133,33 @@ class _YamlConfigParser: ]) if unk_prop: - raise ConfigError('unknown floating point number type object property: "{}"'.format(unk_prop)) + raise ConfigError('floating point number type', + 'unknown property: "{}"'.format(unk_prop)) # size if 'size' in node: size = node['size'] if not _is_assoc_array_prop(size): - raise ConfigError('"size" property of floating point number type object must be an associative array') + raise ConfigError('floating point number type', + '"size" property must be an associative array') unk_prop = _get_first_unknown_prop(size, ['exp', 'mant']) if unk_prop: - raise ConfigError('unknown floating point number type object\'s "size" property: "{}"'.format(unk_prop)) + raise ConfigError('floating point number type\'s "size" property', + 'unknown property: "{}"'.format(unk_prop)) if 'exp' in size: exp = size['exp'] if not _is_int_prop(exp): - raise ConfigError('"exp" property of floating point number type object\'s "size" property must be an integer') + raise ConfigError('floating point number type\'s "size" property', + '"exp" property must be an integer') if exp < 1: - raise ConfigError('invalid floating point number exponent size: {}') + raise ConfigError('floating point number type\'s "size" property', + 'invalid exponent size: {}') obj.exp_size = exp @@ -1034,10 +1167,12 @@ class _YamlConfigParser: mant = size['mant'] if not _is_int_prop(mant): - raise ConfigError('"mant" property of floating point number type object\'s "size" property must be an integer') + raise ConfigError('floating point number type\'s "size" property', + '"mant" property must be an integer') if mant < 1: - raise ConfigError('invalid floating point number mantissa size: {}') + raise ConfigError('floating point number type\'s "size" property', + 'invalid mantissa size: {}') obj.mant_size = mant @@ -1049,10 +1184,12 @@ class _YamlConfigParser: obj.set_default_align() else: if not _is_int_prop(align): - raise ConfigError('"align" property of floating point number type object must be an integer') + raise ConfigError('floating point number type', + '"align" property must be an integer') if not _is_valid_alignment(align): - raise ConfigError('invalid alignment: {}'.format(align)) + raise ConfigError('floating point number type', + 'invalid alignment: {}'.format(align)) obj.align = align @@ -1064,12 +1201,14 @@ class _YamlConfigParser: obj.byte_order = self._bo else: if not _is_str_prop(byte_order): - raise ConfigError('"byte-order" property of floating point number type object must be a string ("le" or "be")') + raise ConfigError('floating point number type', + '"byte-order" property must be a string ("le" or "be")') byte_order = _byte_order_str_to_bo(byte_order) if byte_order is None: - raise ConfigError('invalid "byte-order" property in floating point number type object') + raise ConfigError('floating point number type', + 'invalid "byte-order" property') else: obj.byte_order = self._bo @@ -1086,7 +1225,8 @@ class _YamlConfigParser: ]) if unk_prop: - raise ConfigError('unknown enumeration type object property: "{}"'.format(unk_prop)) + raise ConfigError('enumeration type', + 'unknown property: "{}"'.format(unk_prop)) # value type if 'value-type' in node: @@ -1094,15 +1234,17 @@ class _YamlConfigParser: try: obj.value_type = self._create_type(value_type_node) - except Exception as e: - raise ConfigError('cannot create enumeration type\'s integer type', e) + except ConfigError as exc: + exc.append_ctx('enumeration type', 'cannot create integer type') + raise # members if 'members' in node: members_node = node['members'] if not _is_array_prop(members_node): - raise ConfigError('"members" property of enumeration type object must be an array') + raise ConfigError('enumeration type', + '"members" property must be an array') cur = 0 last_value = obj.last_value @@ -1114,7 +1256,8 @@ class _YamlConfigParser: for index, m_node in enumerate(members_node): if not _is_str_prop(m_node) and not _is_assoc_array_prop(m_node): - raise ConfigError('invalid enumeration member #{}: expecting a string or an associative array'.format(index)) + raise ConfigError('enumeration type', + 'invalid member #{}: expecting a string or an associative array'.format(index)) if _is_str_prop(m_node): label = m_node @@ -1127,36 +1270,43 @@ class _YamlConfigParser: ]) if unk_prop: - raise ConfigError('unknown enumeration type member object property: "{}"'.format(unk_prop)) + raise ConfigError('enumeration type', + 'unknown member object property: "{}"'.format(unk_prop)) if 'label' not in m_node: - raise ConfigError('missing "label" property in enumeration member #{}'.format(index)) + raise ConfigError('enumeration type', + 'missing "label" property in member #{}'.format(index)) label = m_node['label'] if not _is_str_prop(label): - raise ConfigError('"label" property of enumeration member #{} must be a string'.format(index)) + raise ConfigError('enumeration type', + '"label" property of member #{} must be a string'.format(index)) if 'value' not in m_node: - raise ConfigError('missing "value" property in enumeration member ("{}")'.format(label)) + raise ConfigError('enumeration type', + 'missing "value" property in member ("{}")'.format(label)) value = m_node['value'] if not _is_int_prop(value) and not _is_array_prop(value): - raise ConfigError('invalid enumeration member ("{}"): expecting an integer or an array'.format(label)) + raise ConfigError('enumeration type', + 'invalid member ("{}"): expecting an integer or an array'.format(label)) if _is_int_prop(value): cur = value + 1 value = (value, value) else: if len(value) != 2: - raise ConfigError('invalid enumeration member ("{}"): range must have exactly two items'.format(label)) + raise ConfigError('enumeration type', + 'invalid member ("{}"): range must have exactly two items'.format(label)) mn = value[0] mx = value[1] if mn > mx: - raise ConfigError('invalid enumeration member ("{}"): invalid range ({} > {})'.format(label, mn, mx)) + raise ConfigError('enumeration type', + 'invalid member ("{}"): invalid range ({} > {})'.format(label, mn, mx)) value = (mn, mx) cur = mx + 1 @@ -1175,7 +1325,8 @@ class _YamlConfigParser: ]) if unk_prop: - raise ConfigError('unknown string type object property: "{}"'.format(unk_prop)) + raise ConfigError('string type', + 'unknown object property: "{}"'.format(unk_prop)) # encoding if 'encoding' in node: @@ -1185,12 +1336,14 @@ class _YamlConfigParser: obj.set_default_encoding() else: if not _is_str_prop(encoding): - raise ConfigError('"encoding" property of string type object must be a string ("none", "ascii", or "utf-8")') + raise ConfigError('string type', + '"encoding" property of must be a string ("none", "ascii", or "utf-8")') encoding = _encoding_str_to_encoding(encoding) if encoding is None: - raise ConfigError('invalid "encoding" property in string type object') + raise ConfigError('string type', + 'invalid "encoding" property') obj.encoding = encoding @@ -1207,7 +1360,8 @@ class _YamlConfigParser: ]) if unk_prop: - raise ConfigError('unknown string type object property: "{}"'.format(unk_prop)) + raise ConfigError('structure type', + 'unknown object property: "{}"'.format(unk_prop)) # minimum alignment if 'min-align' in node: @@ -1217,10 +1371,12 @@ class _YamlConfigParser: obj.set_default_min_align() else: if not _is_int_prop(min_align): - raise ConfigError('"min-align" property of structure type object must be an integer') + raise ConfigError('structure type', + '"min-align" property must be an integer') if not _is_valid_alignment(min_align): - raise ConfigError('invalid minimum alignment: {}'.format(min_align)) + raise ConfigError('structure type', + 'invalid minimum alignment: {}'.format(min_align)) obj.min_align = min_align @@ -1232,16 +1388,20 @@ class _YamlConfigParser: obj.set_default_fields() else: if not _is_assoc_array_prop(fields): - raise ConfigError('"fields" property of structure type object must be an associative array') + raise ConfigError('structure type', + '"fields" property must be an associative array') for field_name, field_node in fields.items(): if not _is_valid_identifier(field_name): - raise ConfigError('"{}" is not a valid field name for structure type'.format(field_name)) + raise ConfigError('structure type', + '"{}" is not a valid field name'.format(field_name)) try: obj.fields[field_name] = self._create_type(field_node) - except Exception as e: - raise ConfigError('cannot create structure type\'s field "{}"'.format(field_name), e) + except ConfigError as exc: + exc.append_ctx('structure type', + 'cannot create field "{}"'.format(field_name)) + raise return obj @@ -1256,17 +1416,20 @@ class _YamlConfigParser: ]) if unk_prop: - raise ConfigError('unknown array type object property: "{}"'.format(unk_prop)) + raise ConfigError('array type', + 'unknown property: "{}"'.format(unk_prop)) # length if 'length' in node: length = node['length'] if not _is_int_prop(length): - raise ConfigError('"length" property of array type object must be an integer') + raise ConfigError('array type', + '"length" property must be an integer') if type(length) is int and length < 0: - raise ConfigError('invalid static array length: {}'.format(length)) + raise ConfigError('array type', + 'invalid length: {}'.format(length)) obj.length = length @@ -1276,8 +1439,9 @@ class _YamlConfigParser: try: obj.element_type = self._create_type(node['element-type']) - except Exception as e: - raise ConfigError('cannot create array type\'s element type', e) + except ConfigError as exc: + exc.append_ctx('array type', 'cannot create element type') + raise return obj @@ -1286,12 +1450,14 @@ class _YamlConfigParser: t = self._lookup_type_alias(type_node) if t is None: - raise ConfigError('unknown type alias "{}"'.format(type_node)) + raise ConfigError('type', + 'unknown type alias "{}"'.format(type_node)) return t if not _is_assoc_array_prop(type_node): - raise ConfigError('type objects must be associative arrays or strings (type alias name)') + raise ConfigError('type', + 'expecting associative arrays or string (type alias name)') # inherit: # v2.0: "inherit" @@ -1306,35 +1472,41 @@ class _YamlConfigParser: if self._version >= 201: if '$inherit' in type_node: if inherit_node is not None: - raise ConfigError('cannot specify both "inherit" and "$inherit" properties of type object: prefer "$inherit"') + raise ConfigError('type', + 'cannot specify both "inherit" and "$inherit" properties of type object: prefer "$inherit"') inherit_prop = '$inherit' inherit_node = type_node[inherit_prop] if inherit_node is not None and 'class' in type_node: - raise ConfigError('cannot specify both "{}" and "class" properties in type object'.format(inherit_prop)) + raise ConfigError('type', + 'cannot specify both "{}" and "class" properties in type object'.format(inherit_prop)) if inherit_node is not None: if not _is_str_prop(inherit_node): - raise ConfigError('"{}" property of type object must be a string'.format(inherit_prop)) + raise ConfigError('type', + '"{}" property of type object must be a string'.format(inherit_prop)) base = self._lookup_type_alias(inherit_node) if base is None: - raise ConfigError('cannot inherit from type alias "{}": type alias does not exist at this point'.format(inherit_node)) + raise ConfigError('type', + 'cannot inherit from type alias "{}": type alias does not exist at this point'.format(inherit_node)) func = self._type_to_create_type_func[type(base)] else: if 'class' not in type_node: - raise ConfigError('type objects which do not inherit must have a "class" property') + raise ConfigError('type', + 'does not inherit, therefore must have a "class" property') class_name = type_node['class'] if type(class_name) is not str: - raise ConfigError('type objects\' "class" property must be a string') + raise ConfigError('type', '"class" property must be a string') if class_name not in self._class_name_to_create_type_func: - raise ConfigError('unknown type class "{}"'.format(class_name)) + raise ConfigError('type', + 'unknown class "{}"'.format(class_name)) base = None func = self._class_name_to_create_type_func[class_name] @@ -1353,16 +1525,20 @@ class _YamlConfigParser: return if not _is_assoc_array_prop(ta_node): - raise ConfigError('"type-aliases" property (metadata) must be an associative array') + raise ConfigError('metadata', + '"type-aliases" property must be an associative array') for ta_name, ta_type in ta_node.items(): if ta_name in self._tas: - raise ConfigError('duplicate type alias "{}"'.format(ta_name)) + raise ConfigError('metadata', + 'duplicate type alias "{}"'.format(ta_name)) try: t = self._create_type(ta_type) - except Exception as e: - raise ConfigError('cannot create type alias "{}"'.format(ta_name), e) + except ConfigError as exc: + exc.append_ctx('metadata', + 'cannot create type alias "{}"'.format(ta_name)) + raise self._tas[ta_name] = t @@ -1371,7 +1547,8 @@ class _YamlConfigParser: clock = metadata.Clock() if not _is_assoc_array_prop(node): - raise ConfigError('clock objects must be associative arrays') + raise ConfigError('metadata', + 'clock objects must be associative arrays') known_props = [ 'uuid', @@ -1389,7 +1566,8 @@ class _YamlConfigParser: unk_prop = _get_first_unknown_prop(node, known_props) if unk_prop: - raise ConfigError('unknown clock object property: "{}"'.format(unk_prop)) + raise ConfigError('clock', + 'unknown property: "{}"'.format(unk_prop)) # UUID if 'uuid' in node: @@ -1399,12 +1577,13 @@ class _YamlConfigParser: clock.set_default_uuid() else: if not _is_str_prop(uuidp): - raise ConfigError('"uuid" property of clock object must be a string') + raise ConfigError('clock', + '"uuid" property must be a string') try: uuidp = uuid.UUID(uuidp) except: - raise ConfigError('malformed UUID (clock object): "{}"'.format(uuidp)) + raise ConfigError('clock', 'malformed UUID: "{}"'.format(uuidp)) clock.uuid = uuidp @@ -1416,7 +1595,8 @@ class _YamlConfigParser: clock.set_default_description() else: if not _is_str_prop(desc): - raise ConfigError('"description" property of clock object must be a string') + raise ConfigError('clock', + '"description" property must be a string') clock.description = desc @@ -1428,10 +1608,12 @@ class _YamlConfigParser: clock.set_default_freq() else: if not _is_int_prop(freq): - raise ConfigError('"freq" property of clock object must be an integer') + raise ConfigError('clock', + '"freq" property must be an integer') if freq < 1: - raise ConfigError('invalid clock frequency: {}'.format(freq)) + raise ConfigError('clock', + 'invalid frequency: {}'.format(freq)) clock.freq = freq @@ -1443,10 +1625,12 @@ class _YamlConfigParser: clock.set_default_error_cycles() else: if not _is_int_prop(error_cycles): - raise ConfigError('"error-cycles" property of clock object must be an integer') + raise ConfigError('clock', + '"error-cycles" property must be an integer') if error_cycles < 0: - raise ConfigError('invalid clock error cycles: {}'.format(error_cycles)) + raise ConfigError('clock', + 'invalid error cycles: {}'.format(error_cycles)) clock.error_cycles = error_cycles @@ -1459,12 +1643,14 @@ class _YamlConfigParser: clock.set_default_offset_cycles() else: if not _is_assoc_array_prop(offset): - raise ConfigError('"offset" property of clock object must be an associative array') + raise ConfigError('clock', + '"offset" property must be an associative array') unk_prop = _get_first_unknown_prop(offset, ['cycles', 'seconds']) if unk_prop: - raise ConfigError('unknown clock object\'s offset property: "{}"'.format(unk_prop)) + raise ConfigError('clock', + 'unknown offset property: "{}"'.format(unk_prop)) # cycles if 'cycles' in offset: @@ -1474,10 +1660,12 @@ class _YamlConfigParser: clock.set_default_offset_cycles() else: if not _is_int_prop(offset_cycles): - raise ConfigError('"cycles" property of clock object\'s offset property must be an integer') + raise ConfigError('clock\'s "offset" property', + '"cycles" property must be an integer') if offset_cycles < 0: - raise ConfigError('invalid clock offset cycles: {}'.format(offset_cycles)) + raise ConfigError('clock\'s "offset" property', + 'invalid cycles: {}'.format(offset_cycles)) clock.offset_cycles = offset_cycles @@ -1489,10 +1677,12 @@ class _YamlConfigParser: clock.set_default_offset_seconds() else: if not _is_int_prop(offset_seconds): - raise ConfigError('"seconds" property of clock object\'s offset property must be an integer') + raise ConfigError('clock\'s "offset" property', + '"seconds" property must be an integer') if offset_seconds < 0: - raise ConfigError('invalid clock offset seconds: {}'.format(offset_seconds)) + raise ConfigError('clock\'s "offset" property', + 'invalid seconds: {}'.format(offset_seconds)) clock.offset_seconds = offset_seconds @@ -1504,7 +1694,8 @@ class _YamlConfigParser: clock.set_default_absolute() else: if not _is_bool_prop(absolute): - raise ConfigError('"absolute" property of clock object must be a boolean') + raise ConfigError('clock', + '"absolute" property must be a boolean') clock.absolute = absolute @@ -1521,7 +1712,8 @@ class _YamlConfigParser: if self._version >= 201: if '$return-ctype' in node: if return_ctype_node is not None: - raise ConfigError('cannot specify both "return-ctype" and "$return-ctype" properties of clock object: prefer "$return-ctype"') + raise ConfigError('clock', + 'cannot specify both "return-ctype" and "$return-ctype" properties: prefer "$return-ctype"') return_ctype_prop = '$return-ctype' return_ctype_node = node[return_ctype_prop] @@ -1531,7 +1723,8 @@ class _YamlConfigParser: clock.set_default_return_ctype() else: if not _is_str_prop(return_ctype_node): - raise ConfigError('"{}" property of clock object must be a string'.format(return_ctype_prop)) + raise ConfigError('clock', + '"{}" property of must be a string'.format(return_ctype_prop)) clock.return_ctype = return_ctype_node @@ -1549,19 +1742,24 @@ class _YamlConfigParser: return if not _is_assoc_array_prop(clocks_node): - raise ConfigError('"clocks" property (metadata) must be an associative array') + raise ConfigError('metadata', + '"clocks" property must be an associative array') for clock_name, clock_node in clocks_node.items(): if not _is_valid_identifier(clock_name): - raise ConfigError('invalid clock name: "{}"'.format(clock_name)) + raise ConfigError('metadata', + 'invalid clock name: "{}"'.format(clock_name)) if clock_name in self._clocks: - raise ConfigError('duplicate clock "{}"'.format(clock_name)) + raise ConfigError('metadata', + 'duplicate clock "{}"'.format(clock_name)) try: clock = self._create_clock(clock_node) - except Exception as e: - raise ConfigError('cannot create clock "{}"'.format(clock_name), e) + except ConfigError as exc: + exc.append_ctx('metadata', + 'cannot create clock "{}"'.format(clock_name)) + raise clock.name = clock_name self._clocks[clock_name] = clock @@ -1578,17 +1776,21 @@ class _YamlConfigParser: return env if not _is_assoc_array_prop(env_node): - raise ConfigError('"env" property (metadata) must be an associative array') + raise ConfigError('metadata', + '"env" property must be an associative array') for env_name, env_value in env_node.items(): if env_name in env: - raise ConfigError('duplicate environment variable "{}"'.format(env_name)) + raise ConfigError('metadata', + 'duplicate environment variable "{}"'.format(env_name)) if not _is_valid_identifier(env_name): - raise ConfigError('invalid environment variable name: "{}"'.format(env_name)) + raise ConfigError('metadata', + 'invalid environment variable name: "{}"'.format(env_name)) if not _is_int_prop(env_value) and not _is_str_prop(env_value): - raise ConfigError('invalid environment variable value ("{}"): expecting integer or string'.format(env_name)) + raise ConfigError('metadata', + 'invalid environment variable value ("{}"): expecting integer or string'.format(env_name)) env[env_name] = env_value @@ -1610,7 +1812,8 @@ class _YamlConfigParser: if self._version >= 201: if '$log-levels' in metadata_node: if log_levels_node is not None: - raise ConfigError('cannot specify both "log-levels" and "$log-levels" properties of metadata object: prefer "$log-levels"') + raise ConfigError('metadata', + 'cannot specify both "log-levels" and "$log-levels" properties of metadata object: prefer "$log-levels"') log_levels_prop = '$log-levels' log_levels_node = metadata_node[log_levels_prop] @@ -1619,17 +1822,21 @@ class _YamlConfigParser: return if not _is_assoc_array_prop(log_levels_node): - raise ConfigError('"{}" property (metadata) must be an associative array'.format(log_levels_prop)) + raise ConfigError('metadata', + '"{}" property (metadata) must be an associative array'.format(log_levels_prop)) for ll_name, ll_value in log_levels_node.items(): if ll_name in self._log_levels: - raise ConfigError('duplicate log level entry "{}"'.format(ll_name)) + raise ConfigError('"{}" property"'.format(log_levels_prop), + 'duplicate entry "{}"'.format(ll_name)) if not _is_int_prop(ll_value): - raise ConfigError('invalid log level entry ("{}"): expecting an integer'.format(ll_name)) + raise ConfigError('"{}" property"'.format(log_levels_prop), + 'invalid entry ("{}"): expecting an integer'.format(ll_name)) if ll_value < 0: - raise ConfigError('invalid log level entry ("{}"): log level value must be positive'.format(ll_name)) + raise ConfigError('"{}" property"'.format(log_levels_prop), + 'invalid entry ("{}"): value must be positive'.format(ll_name)) self._log_levels[ll_name] = ll_value @@ -1638,12 +1845,13 @@ class _YamlConfigParser: trace = metadata.Trace() if 'trace' not in metadata_node: - raise ConfigError('missing "trace" property (metadata)') + raise ConfigError('metadata', 'missing "trace" property') trace_node = metadata_node['trace'] if not _is_assoc_array_prop(trace_node): - raise ConfigError('"trace" property (metadata) must be an associative array') + raise ConfigError('metadata', + '"trace" property must be an associative array') unk_prop = _get_first_unknown_prop(trace_node, [ 'byte-order', @@ -1652,7 +1860,8 @@ class _YamlConfigParser: ]) if unk_prop: - raise ConfigError('unknown trace object property: "{}"'.format(unk_prop)) + raise ConfigError('trace', + 'unknown property: "{}"'.format(unk_prop)) # set byte order (already parsed) trace.byte_order = self._bo @@ -1662,7 +1871,8 @@ class _YamlConfigParser: uuidp = trace_node['uuid'] if not _is_str_prop(uuidp): - raise ConfigError('"uuid" property of trace object must be a string') + raise ConfigError('trace', + '"uuid" property must be a string') if uuidp == 'auto': uuidp = uuid.uuid1() @@ -1670,7 +1880,8 @@ class _YamlConfigParser: try: uuidp = uuid.UUID(uuidp) except: - raise ConfigError('malformed UUID (trace object): "{}"'.format(uuidp)) + raise ConfigError('trace', + 'malformed UUID: "{}"'.format(uuidp)) trace.uuid = uuidp @@ -1678,8 +1889,10 @@ class _YamlConfigParser: if 'packet-header-type' in trace_node and trace_node['packet-header-type'] is not None: try: ph_type = self._create_type(trace_node['packet-header-type']) - except Exception as e: - raise ConfigError('cannot create packet header type (trace)', e) + except ConfigError as exc: + exc.append_ctx('trace', + 'cannot create packet header type') + raise trace.packet_header_type = ph_type @@ -1695,7 +1908,8 @@ class _YamlConfigParser: event = metadata.Event() if not _is_assoc_array_prop(event_node): - raise ConfigError('event objects must be associative arrays') + raise ConfigError('event', + 'expecting associative array') unk_prop = _get_first_unknown_prop(event_node, [ 'log-level', @@ -1704,7 +1918,8 @@ class _YamlConfigParser: ]) if unk_prop: - raise ConfigError('unknown event object property: "{}"'.format(unk_prop)) + raise ConfigError('event', + 'unknown property: "{}"'.format(unk_prop)) if 'log-level' in event_node and event_node['log-level'] is not None: ll_node = event_node['log-level'] @@ -1713,16 +1928,19 @@ class _YamlConfigParser: ll_value = self._lookup_log_level(event_node['log-level']) if ll_value is None: - raise ConfigError('cannot find log level "{}"'.format(ll_node)) + raise ConfigError('event\'s "log-level" property', + 'cannot find log level "{}"'.format(ll_node)) ll = metadata.LogLevel(event_node['log-level'], ll_value) elif _is_int_prop(ll_node): if ll_node < 0: - raise ConfigError('invalid log level value {}: value must be positive'.format(ll_node)) + raise ConfigError('event\'s "log-level" property', + 'invalid value {}: value must be positive'.format(ll_node)) ll = metadata.LogLevel(None, ll_node) else: - raise ConfigError('"log-level" property must be either a string or an integer') + raise ConfigError('event\'s "log-level" property', + 'must be either a string or an integer') event.log_level = ll @@ -1731,16 +1949,20 @@ class _YamlConfigParser: try: t = self._create_type(event_node['context-type']) - except Exception as e: - raise ConfigError('cannot create event\'s context type object', e) + except ConfigError as exc: + exc.append_ctx('event', + 'cannot create context type object') + raise event.context_type = t if 'payload-type' in event_node and event_node['payload-type'] is not None: try: t = self._create_type(event_node['payload-type']) - except Exception as e: - raise ConfigError('cannot create event\'s payload type object', e) + except ConfigError as exc: + exc.append_ctx('event', + 'cannot create payload type object') + raise event.payload_type = t @@ -1770,51 +1992,62 @@ class _YamlConfigParser: if unk_prop == '$default': add = ' (use version 2.2 or greater)' - raise ConfigError('unknown stream object property{}: "{}"'.format(add, unk_prop)) + raise ConfigError('stream', + 'unknown property{}: "{}"'.format(add, unk_prop)) if 'packet-context-type' in stream_node and stream_node['packet-context-type'] is not None: try: t = self._create_type(stream_node['packet-context-type']) - except Exception as e: - raise ConfigError('cannot create stream\'s packet context type object', e) + except ConfigError as exc: + exc.append_ctx('stream', + 'cannot create packet context type object') + raise stream.packet_context_type = t if 'event-header-type' in stream_node and stream_node['event-header-type'] is not None: try: t = self._create_type(stream_node['event-header-type']) - except Exception as e: - raise ConfigError('cannot create stream\'s event header type object', e) + except ConfigError as exc: + exc.append_ctx('stream', + 'cannot create event header type object') + raise stream.event_header_type = t if 'event-context-type' in stream_node and stream_node['event-context-type'] is not None: try: t = self._create_type(stream_node['event-context-type']) - except Exception as e: - raise ConfigError('cannot create stream\'s event context type object', e) + except ConfigError as exc: + exc.append_ctx('stream', + 'cannot create event context type object') + raise stream.event_context_type = t if 'events' not in stream_node: - raise ConfigError('missing "events" property in stream object') + raise ConfigError('stream', + 'missing "events" property') events = stream_node['events'] if events is not None: if not _is_assoc_array_prop(events): - raise ConfigError('"events" property of stream object must be an associative array') + raise ConfigError('stream', + '"events" property must be an associative array') if not events: - raise ConfigError('at least one event is needed within a stream object') + raise ConfigError('stream', 'at least one event is needed') cur_id = 0 for ev_name, ev_node in events.items(): try: ev = self._create_event(ev_node) - except Exception as e: - raise ConfigError('cannot create event "{}"'.format(ev_name), e) + except ConfigError as exc: + exc.append_ctx('stream', + 'cannot create event "{}"'.format(ev_name)) + raise ev.id = cur_id ev.name = ev_name @@ -1825,12 +2058,14 @@ class _YamlConfigParser: default_node = stream_node['$default'] if not _is_bool_prop(default_node): - raise ConfigError('invalid "$default" property in stream object: expecting a boolean') + raise ConfigError('stream', + 'invalid "$default" property: expecting a boolean') if default_node: if self._meta.default_stream_name is not None and self._meta.default_stream_name != stream_name: fmt = 'cannot specify more than one default stream (default stream already set to "{}")' - raise ConfigError(fmt.format(self._meta.default_stream_name)) + raise ConfigError('stream', + fmt.format(self._meta.default_stream_name)) self._meta.default_stream_name = stream_name @@ -1840,23 +2075,28 @@ class _YamlConfigParser: streams = collections.OrderedDict() if 'streams' not in metadata_node: - raise ConfigError('missing "streams" property (metadata)') + raise ConfigError('metadata', + 'missing "streams" property') streams_node = metadata_node['streams'] if not _is_assoc_array_prop(streams_node): - raise ConfigError('"streams" property (metadata) must be an associative array') + raise ConfigError('metadata', + '"streams" property must be an associative array') if not streams_node: - raise ConfigError('at least one stream is needed (metadata)') + raise ConfigError('metadata\'s "streams" property', + 'at least one stream is needed') cur_id = 0 for stream_name, stream_node in streams_node.items(): try: stream = self._create_stream(stream_name, stream_node) - except Exception as e: - raise ConfigError('cannot create stream "{}"'.format(stream_name), e) + except ConfigError as exc: + exc.append_ctx('metadata', + 'cannot create stream "{}"'.format(stream_name)) + raise stream.id = cur_id stream.name = str(stream_name) @@ -1869,12 +2109,14 @@ class _YamlConfigParser: self._meta = metadata.Metadata() if 'metadata' not in root: - raise ConfigError('missing "metadata" property (configuration)') + raise ConfigError('configuration', + 'missing "metadata" property') metadata_node = root['metadata'] if not _is_assoc_array_prop(metadata_node): - raise ConfigError('"metadata" property (configuration) must be an associative array') + raise ConfigError('configuration\'s "metadata" property', + 'must be an associative array') known_props = [ 'type-aliases', @@ -1902,13 +2144,15 @@ class _YamlConfigParser: if unk_prop == '$default-stream': add = ' (use version 2.2 or greater)' - raise ConfigError('unknown metadata property{}: "{}"'.format(add, unk_prop)) + raise ConfigError('metadata', + 'unknown property{}: "{}"'.format(add, unk_prop)) if '$default-stream' in metadata_node and metadata_node['$default-stream'] is not None: default_stream_node = metadata_node['$default-stream'] if not _is_str_prop(default_stream_node): - raise ConfigError('invalid "$default-stream" property (metadata): expecting a string') + raise ConfigError('metadata\'s "$default-stream" property', + 'expecting a string') self._meta.default_stream_name = default_stream_node @@ -1925,17 +2169,20 @@ class _YamlConfigParser: def _get_version(self, root): if 'version' not in root: - raise ConfigError('missing "version" property (configuration)') + raise ConfigError('configuration', + 'missing "version" property') version_node = root['version'] if not _is_str_prop(version_node): - raise ConfigError('"version" property (configuration) must be a string') + raise ConfigError('configuration\'s "version" property', + 'must be a string') version_node = version_node.strip() if version_node not in ['2.0', '2.1', '2.2']: - raise ConfigError('unsupported version ({}): versions 2.0, 2.1, and 2.2 are supported'.format(version_node)) + raise ConfigError('configuration', + 'unsupported version ({}): versions 2.0, 2.1, and 2.2 are supported'.format(version_node)) # convert version string to comparable version integer parts = version_node.split('.') @@ -1955,10 +2202,12 @@ class _YamlConfigParser: return def_prefix if not _is_str_prop(prefix_node): - raise ConfigError('"prefix" property (configuration) must be a string') + raise ConfigError('configuration\'s "prefix" property', + 'must be a string') if not _is_valid_identifier(prefix_node): - raise ConfigError('"prefix" property (configuration) must be a valid C identifier') + raise ConfigError('configuration\'s "prefix" property', + 'must be a valid C identifier') return prefix_node @@ -1971,7 +2220,8 @@ class _YamlConfigParser: options_node = root['options'] if not _is_assoc_array_prop(options_node): - raise ConfigError('"options" property (configuration) must be an associative array') + raise ConfigError('configuration\'s "options" property', + 'must be an associative array') known_props = [ 'gen-prefix-def', @@ -1980,13 +2230,15 @@ class _YamlConfigParser: unk_prop = _get_first_unknown_prop(options_node, known_props) if unk_prop: - raise ConfigError('unknown configuration option property: "{}"'.format(unk_prop)) + raise ConfigError('configuration\'s "options" property', + 'unknown property: "{}"'.format(unk_prop)) if 'gen-prefix-def' in options_node and options_node['gen-prefix-def'] is not None: gen_prefix_def_node = options_node['gen-prefix-def'] if not _is_bool_prop(gen_prefix_def_node): - raise ConfigError('invalid configuration option "gen-prefix-def": expecting a boolean') + raise ConfigError('configuration\'s "options" property', + 'invalid option "gen-prefix-def": expecting a boolean') cfg_options.gen_prefix_def = gen_prefix_def_node @@ -1994,7 +2246,8 @@ class _YamlConfigParser: gen_default_stream_def_node = options_node['gen-default-stream-def'] if not _is_bool_prop(gen_default_stream_def_node): - raise ConfigError('invalid configuration option "gen-default-stream-def": expecting a boolean') + raise ConfigError('configuration\'s "options" property', + 'invalid option "gen-default-stream-def": expecting a boolean') cfg_options.gen_default_stream_def = gen_default_stream_def_node @@ -2024,7 +2277,8 @@ class _YamlConfigParser: if norm_path in self._include_stack: base_path = self._get_last_include_file() - raise ConfigError('in "{}": cannot recursively include file "{}"'.format(base_path, norm_path)) + raise ConfigError('in "{}"', + 'cannot recursively include file "{}"'.format(base_path, norm_path)) self._include_stack.append(norm_path) @@ -2033,7 +2287,8 @@ class _YamlConfigParser: if not self._ignore_include_not_found: base_path = self._get_last_include_file() - raise ConfigError('in "{}": cannot include file "{}": file not found in include directories'.format(base_path, yaml_path)) + raise ConfigError('in "{}"', + 'cannot include file "{}": file not found in include directories'.format(base_path, yaml_path)) return None @@ -2047,11 +2302,13 @@ class _YamlConfigParser: if _is_array_prop(include_node): for include_path in include_node: if not _is_str_prop(include_path): - raise ConfigError('invalid include property: expecting array of strings') + raise ConfigError('"$include" property', + 'expecting array of strings') return include_node - raise ConfigError('invalid include property: expecting string or array of strings') + raise ConfigError('"$include" property', + 'expecting string or array of strings') def _update_node(self, base_node, overlay_node): for olay_key, olay_value in overlay_node.items(): @@ -2074,7 +2331,8 @@ class _YamlConfigParser: process_base_include_cb, process_children_include_cb=None): if not _is_assoc_array_prop(last_overlay_node): - raise ConfigError('{} objects must be associative arrays'.format(name)) + raise ConfigError('"$include" property', + '{} objects must be associative arrays'.format(name)) # process children inclusions first if process_children_include_cb: @@ -2107,8 +2365,9 @@ class _YamlConfigParser: # recursively process includes try: overlay_node = process_base_include_cb(overlay_node) - except Exception as e: - raise ConfigError('in "{}"'.format(cur_base_path), e) + except ConfigError as exc: + exc.append_ctx('in "{}"'.format(cur_base_path)) + raise # pop include stack now that we're done including del self._include_stack[-1] @@ -2141,7 +2400,8 @@ class _YamlConfigParser: events_node = stream_node['events'] if not _is_assoc_array_prop(events_node): - raise ConfigError('"events" property must be an associative array') + raise ConfigError('"$include" property', + '"events" property must be an associative array') events_node_keys = list(events_node.keys()) @@ -2150,8 +2410,10 @@ class _YamlConfigParser: try: events_node[key] = self._process_event_include(event_node) - except Exception as e: - raise ConfigError('cannot process includes of event object "{}"'.format(key), e) + except ConfigError as exc: + exc.append_ctx('"$include" property', + 'cannot process includes of event object "{}"'.format(key)) + raise return self._process_node_include(stream_node, 'stream', self._process_stream_include, @@ -2174,7 +2436,8 @@ class _YamlConfigParser: clocks_node = metadata_node['clocks'] if not _is_assoc_array_prop(clocks_node): - raise ConfigError('"clocks" property (metadata) must be an associative array') + raise ConfigError('"$include" property', + '"clocks" property must be an associative array') clocks_node_keys = list(clocks_node.keys()) @@ -2183,14 +2446,17 @@ class _YamlConfigParser: try: clocks_node[key] = self._process_clock_include(clock_node) - except Exception as e: - raise ConfigError('cannot process includes of clock object "{}"'.format(key), e) + except ConfigError as exc: + exc.append_ctx('"$include" property', + 'cannot process includes of clock object "{}"'.format(key)) + raise if 'streams' in metadata_node: streams_node = metadata_node['streams'] if not _is_assoc_array_prop(streams_node): - raise ConfigError('"streams" property (metadata) must be an associative array') + raise ConfigError('"$include" property', + '"streams" property must be an associative array') streams_node_keys = list(streams_node.keys()) @@ -2199,8 +2465,10 @@ class _YamlConfigParser: try: streams_node[key] = self._process_stream_include(stream_node) - except Exception as e: - raise ConfigError('cannot process includes of stream object "{}"'.format(key), e) + except ConfigError as exc: + exc.append_ctx('"$include" property', + 'cannot process includes of stream object "{}"'.format(key)) + raise return self._process_node_include(metadata_node, 'metadata', self._process_metadata_include, @@ -2257,13 +2525,17 @@ class _YamlConfigParser: with open(yaml_path, 'r') as f: node = yaml.load(f, OLoader) except (OSError, IOError) as e: - raise ConfigError('cannot open file "{}"'.format(yaml_path)) - except Exception as e: - raise ConfigError('unknown error while trying to load file "{}"'.format(yaml_path), e) + raise ConfigError('configuration', + 'cannot open file "{}"'.format(yaml_path)) + except ConfigError as exc: + exc.append_ctx('configuration', + 'unknown error while trying to load file "{}"'.format(yaml_path)) + raise # loaded node must be an associate array if not _is_assoc_array_prop(node): - raise ConfigError('root of YAML file "{}" must be an associative array'.format(yaml_path)) + raise ConfigError('configuration', + 'root of YAML file "{}" must be an associative array'.format(yaml_path)) return node @@ -2277,11 +2549,14 @@ class _YamlConfigParser: try: root = self._yaml_ordered_load(yaml_path) - except Exception as e: - raise ConfigError('cannot parse YAML file "{}"'.format(yaml_path), e) + except ConfigError as exc: + exc.append_ctx('configuration', + 'cannot parse YAML file "{}"'.format(yaml_path)) + raise if not _is_assoc_array_prop(root): - raise ConfigError('configuration must be an associative array') + raise ConfigError('configuration', + 'must be an associative array') # get the config version self._version = self._get_version(root) @@ -2303,7 +2578,8 @@ class _YamlConfigParser: if unk_prop == 'options': add = ' (use version 2.2 or greater)' - raise ConfigError('unknown configuration property{}: "{}"'.format(add, unk_prop)) + raise ConfigError('configuration', + 'unknown property{}: "{}"'.format(add, unk_prop)) # process includes if supported if self._version >= 201: @@ -2329,5 +2605,7 @@ def from_yaml_file(path, include_dirs, ignore_include_not_found, dump_config): cfg = parser.parse(path) return cfg - except Exception as e: - raise ConfigError('cannot create configuration from YAML file "{}"'.format(path), e) + except ConfigError as exc: + exc.append_ctx('configuration', + 'cannot create configuration from YAML file "{}"'.format(path)) + raise -- 2.34.1