return self._msg
-class ConfigParseError(RuntimeError):
+class _ConfigParseError(RuntimeError):
def __init__(self, init_ctx_name, init_ctx_msg=None):
self._ctx = []
self.append_ctx(init_ctx_name, init_ctx_msg)
# A schema short ID is the part between `schemas/` and `.json` in
# its URI.
#
- # Raises a `ConfigParseError` object, hiding any `jsonschema`
+ # Raises a `_ConfigParseError` object, hiding any `jsonschema`
# exception, on validation failure.
def validate(self, instance, schema_short_id):
try:
self._validate(instance, schema_short_id)
except jsonschema.ValidationError as exc:
- # convert to barectf `ConfigParseError` exception
+ # convert to barectf `_ConfigParseError` exception
contexts = ['Configuration object']
for elem in exc.absolute_path:
msgs = '; '.join([e.message for e in exc.context])
schema_ctx = ': {}'.format(msgs)
- new_exc = ConfigParseError(contexts.pop(),
- '{}{} (from schema `{}`)'.format(exc.message,
- schema_ctx,
- schema_short_id))
+ new_exc = _ConfigParseError(contexts.pop(),
+ '{}{} (from schema `{}`)'.format(exc.message,
+ schema_ctx,
+ schema_short_id))
for ctx in reversed(contexts):
new_exc.append_ctx(ctx)
if iden in ctf_keywords:
fmt = 'Invalid {} (not a valid identifier): `{}`'
- raise ConfigParseError(ctx_obj_name, fmt.format(prop, iden))
+ raise _ConfigParseError(ctx_obj_name, fmt.format(prop, iden))
def _validate_alignment(align, ctx_obj_name):
assert align >= 1
if (align & (align - 1)) != 0:
- raise ConfigParseError(ctx_obj_name,
- 'Invalid alignment: {}'.format(align))
+ raise _ConfigParseError(ctx_obj_name,
+ 'Invalid alignment: {}'.format(align))
def _append_error_ctx(exc, obj_name, msg=None):
def _validate_struct_type(self, t, entity_root):
if not entity_root:
- raise ConfigParseError('Structure type',
- 'Inner structure types are not supported as of this version')
+ raise _ConfigParseError('Structure type',
+ 'Inner structure types are not supported as of this version')
for field_name, field_type in t.fields.items():
if entity_root and self._cur_entity is _Entity.TRACE_PACKET_HEADER:
try:
self._validate_type(field_type, False)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Structure type\'s field `{}`'.format(field_name))
def _validate_array_type(self, t, entity_root):
- raise ConfigParseError('Array type', 'Not supported as of this version')
+ raise _ConfigParseError('Array type', 'Not supported as of this version')
def _validate_type(self, t, entity_root):
func = self._type_to_validate_type_func.get(type(t))
# make sure entity is byte-aligned
if t.real_align < 8:
- raise ConfigParseError('Root type',
- 'Alignment must be at least 8')
+ raise _ConfigParseError('Root type',
+ 'Alignment must be at least 8')
assert type(t) is _Struct
try:
self._validate_entity(meta.trace.packet_header_type)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Trace', 'Invalid packet header type')
for stream_name, stream in meta.streams.items():
try:
self._validate_entity(stream.packet_context_type)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Stream `{}`'.format(stream_name),
'Invalid packet context type')
try:
self._validate_entity(stream.event_header_type)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Stream `{}`'.format(stream_name),
'Invalid event header type')
try:
self._validate_entity(stream.event_context_type)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Stream `{}`'.format(stream_name),
'Invalid event context type'.format(stream_name))
try:
self._validate_entity(ev.context_type)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Event `{}`'.format(ev_name),
'Invalid context type')
try:
self._validate_entity(ev.payload_type)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Event `{}`'.format(ev_name),
'Invalid payload type')
if stream.is_event_empty(ev):
- raise ConfigParseError('Event `{}`'.format(ev_name), 'Empty')
- except ConfigParseError as exc:
+ raise _ConfigParseError('Event `{}`'.format(ev_name), 'Empty')
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Stream `{}`'.format(stream_name))
def _validate_default_stream(self, meta):
if meta.default_stream_name:
if meta.default_stream_name not in meta.streams.keys():
fmt = 'Default stream name (`{}`) does not exist'
- raise ConfigParseError('barectf metadata',
- fmt.format(meta.default_stream_name))
+ raise _ConfigParseError('barectf metadata',
+ fmt.format(meta.default_stream_name))
def validate(self, meta):
self._validate_entities_and_names(meta)
if len(self._meta.streams) > 1:
# yes
if t is None:
- raise ConfigParseError('`packet-header-type` property',
- 'Need `stream_id` field (more than one stream), but trace packet header type is missing')
+ raise _ConfigParseError('`packet-header-type` property',
+ 'Need `stream_id` field (more than one stream), but trace packet header type is missing')
if 'stream_id' not in t.fields:
- raise ConfigParseError('`packet-header-type` property',
- 'Need `stream_id` field (more than one stream)')
+ raise _ConfigParseError('`packet-header-type` property',
+ 'Need `stream_id` field (more than one stream)')
if t is None:
return
for i, (field_name, field_type) in enumerate(t.fields.items()):
if field_name == 'magic':
if i != 0:
- raise ConfigParseError('`packet-header-type` property',
- '`magic` field must be the first trace packet header type\'s field')
+ raise _ConfigParseError('`packet-header-type` property',
+ '`magic` field must be the first trace packet header type\'s field')
elif field_name == 'stream_id':
# `id` size can fit all event IDs
if len(self._meta.streams) > (1 << field_type.size):
- raise ConfigParseError('`packet-header-type` property',
- '`stream_id` field\' size is too small for the number of trace streams')
+ raise _ConfigParseError('`packet-header-type` property',
+ '`stream_id` field\' size is too small for the number of trace streams')
def _validate_trace(self, meta):
self._validate_trace_packet_header_type(meta.trace.packet_header_type)
# `timestamp_begin` and `timestamp_end` are mapped to the same clock
if ts_begin is not None and ts_end is not None:
if ts_begin.property_mappings[0].object.name != ts_end.property_mappings[0].object.name:
- raise ConfigParseError('`timestamp_begin` and `timestamp_end` fields must be mapped to the same clock object in stream packet context type')
+ raise _ConfigParseError('`timestamp_begin` and `timestamp_end` fields must be mapped to the same clock object in stream packet context type')
# `packet_size` size must be greater than or equal to `content_size` size
if t.fields['content_size'].size > t.fields['packet_size'].size:
- raise ConfigParseError('`packet-context-type` property',
- '`content_size` field size must be lesser than or equal to `packet_size` field size')
+ raise _ConfigParseError('`packet-context-type` property',
+ '`content_size` field size must be lesser than or equal to `packet_size` field size')
def _validate_stream_event_header(self, stream):
t = stream.event_header_type
if len(stream.events) > 1:
# yes
if t is None:
- raise ConfigParseError('`event-header-type` property',
- 'Need `id` field (more than one event), but stream event header type is missing')
+ raise _ConfigParseError('`event-header-type` property',
+ 'Need `id` field (more than one event), but stream event header type is missing')
if 'id' not in t.fields:
- raise ConfigParseError('`event-header-type` property',
- 'Need `id` field (more than one event)')
+ raise _ConfigParseError('`event-header-type` property',
+ 'Need `id` field (more than one event)')
if t is None:
return
if eid is not None:
# `id` size can fit all event IDs
if len(stream.events) > (1 << eid.size):
- raise ConfigParseError('`event-header-type` property',
- '`id` field\' size is too small for the number of stream events')
+ raise _ConfigParseError('`event-header-type` property',
+ '`id` field\' size is too small for the number of stream events')
def _validate_stream(self, stream):
self._validate_stream_packet_context(stream)
for stream in meta.streams.values():
try:
self._validate_stream(stream)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Stream `{}`'.format(stream.name), 'Invalid')
clock = self._clocks.get(clock_name)
if clock is None:
- raise ConfigParseError('Integer type\'s clock property mapping',
- 'Invalid clock name `{}`'.format(clock_name))
+ raise _ConfigParseError('Integer type\'s clock property mapping',
+ 'Invalid clock name `{}`'.format(clock_name))
prop_mapping = _PropertyMapping()
prop_mapping.object = clock
# value type
try:
obj.value_type = self._create_type(node['value-type'])
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Enumeration type',
'Cannot create integer type')
mx = value[1]
if mn > mx:
- raise ConfigParseError('Enumeration type',
- 'Invalid member (`{}`): invalid range ({} > {})'.format(label, mn, mx))
+ raise _ConfigParseError('Enumeration type',
+ 'Invalid member (`{}`): invalid range ({} > {})'.format(label, mn, mx))
value = (mn, mx)
cur = mx + 1
msg_fmt = 'Value {} is outside the value type range [{}, {}]'
if value[0] < value_min or value[0] > value_max:
- raise ConfigParseError(name_fmt.format(label),
- msg_fmt.format(value[0],
- value_min,
- value_max))
+ raise _ConfigParseError(name_fmt.format(label),
+ msg_fmt.format(value[0],
+ value_min,
+ value_max))
if value[1] < value_min or value[1] > value_max:
- raise ConfigParseError(name_fmt.format(label),
- msg_fmt.format(value[0],
- value_min,
- value_max))
+ raise _ConfigParseError(name_fmt.format(label),
+ msg_fmt.format(value[0],
+ value_min,
+ value_max))
obj.members[label] = value
try:
obj.fields[field_name] = self._create_type(field_node)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Structure type',
'Cannot create field `{}`'.format(field_name))
# element type
try:
obj.element_type = self._create_type(node['element-type'])
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Array type', 'Cannot create element type')
return obj
try:
clock.uuid = uuid.UUID(uuid_node)
except:
- raise ConfigParseError('Clock', 'Malformed UUID: `{}`'.format(uuid_node))
+ raise _ConfigParseError('Clock', 'Malformed UUID: `{}`'.format(uuid_node))
# description
descr_node = node.get('description')
try:
clock = self._create_clock(clock_node)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Metadata',
'Cannot create clock `{}`'.format(clock_name))
try:
trace.uuid = uuid.UUID(uuid_node)
except:
- raise ConfigParseError('Trace',
- 'Malformed UUID: `{}`'.format(uuid_node))
+ raise _ConfigParseError('Trace',
+ 'Malformed UUID: `{}`'.format(uuid_node))
# packet header type
pht_node = trace_node.get('packet-header-type')
if pht_node is not None:
try:
trace.packet_header_type = self._create_type(pht_node)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Trace',
'Cannot create packet header type')
if ct_node is not None:
try:
event.context_type = self._create_type(ct_node)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Event',
'Cannot create context type object')
if pt_node is not None:
try:
event.payload_type = self._create_type(pt_node)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Event',
'Cannot create payload type object')
if pct_node is not None:
try:
stream.packet_context_type = self._create_type(pct_node)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Stream',
'Cannot create packet context type object')
if eht_node is not None:
try:
stream.event_header_type = self._create_type(eht_node)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Stream',
'Cannot create event header type object')
if ect_node is not None:
try:
stream.event_context_type = self._create_type(ect_node)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Stream',
'Cannot create event context type object')
for ev_name, ev_node in events_node.items():
try:
ev = self._create_event(ev_node)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Stream',
'Cannot create event `{}`'.format(ev_name))
if default_node is not None:
if self._meta.default_stream_name is not None and self._meta.default_stream_name != stream_name:
fmt = 'Cannot specify more than one default stream (default stream already set to `{}`)'
- raise ConfigParseError('Stream',
- fmt.format(self._meta.default_stream_name))
+ raise _ConfigParseError('Stream',
+ fmt.format(self._meta.default_stream_name))
self._meta.default_stream_name = stream_name
for stream_name, stream_node in streams_node.items():
try:
stream = self._create_stream(stream_name, stream_node)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Metadata',
'Cannot create stream `{}`'.format(stream_name))
# validate metadata
try:
_MetadataSpecialFieldsValidator().validate(self._meta)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Metadata')
try:
_BarectfMetadataValidator().validate(self._meta)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'barectf metadata')
return self._meta
if norm_path in self._include_stack:
base_path = self._get_last_include_file()
- raise ConfigParseError('In `{}`',
- 'Cannot recursively include file `{}`'.format(base_path,
+ raise _ConfigParseError('In `{}`',
+ 'Cannot recursively include file `{}`'.format(base_path,
norm_path))
self._include_stack.append(norm_path)
if not self._ignore_include_not_found:
base_path = self._get_last_include_file()
- raise ConfigParseError('In `{}`',
- 'Cannot include file `{}`: file not found in include directories'.format(base_path,
+ raise _ConfigParseError('In `{}`',
+ 'Cannot include file `{}`: file not found in include directories'.format(base_path,
yaml_path))
def _get_include_paths(self, include_node):
# recursively process inclusions
try:
overlay_node = process_base_include_cb(overlay_node)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'In `{}`'.format(cur_base_path))
# pop inclusion stack now that we're done including
# refer to the same field type alias more than once.
if alias in alias_set:
fmt = 'Cycle detected during the `{}` type alias resolution'
- raise ConfigParseError(from_descr, fmt.format(alias))
+ raise _ConfigParseError(from_descr, fmt.format(alias))
# try to load field type alias node named `alias`
if alias not in type_aliases_node:
- raise ConfigParseError(from_descr,
- 'Type alias `{}` does not exist'.format(alias))
+ raise _ConfigParseError(from_descr,
+ 'Type alias `{}` does not exist'.format(alias))
# resolve it
alias_set.add(alias)
event_name)
resolve_field_type_aliases_from(event, 'payload-type', 'event',
event_name)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Stream `{}`'.format(stream_name))
# we don't need the `type-aliases` node anymore
if type(ll_node) is str:
if ll_node not in log_levels_node:
- raise ConfigParseError('Event `{}`'.format(event_name),
- 'Log level `{}` does not exist'.format(ll_node))
+ raise _ConfigParseError('Event `{}`'.format(event_name),
+ 'Log level `{}` does not exist'.format(ll_node))
event[prop_name] = log_levels_node[ll_node]
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Stream `{}`'.format(stream_name))
def _yaml_ordered_dump(self, node, **kwds):
with open(yaml_path, 'r') as f:
node = yaml.load(f, OLoader)
except (OSError, IOError) as e:
- raise ConfigParseError('Configuration',
- 'Cannot open file `{}`'.format(yaml_path))
- except ConfigParseError as exc:
+ raise _ConfigParseError('Configuration',
+ 'Cannot open file `{}`'.format(yaml_path))
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Configuration',
- 'Unknown error while trying to load file `{}`'.format(yaml_path))
+ 'Unknown error while trying to load file `{}`'.format(yaml_path))
# loaded node must be an associate array
if type(node) is not collections.OrderedDict:
- raise ConfigParseError('Configuration',
- 'Root of YAML file `{}` must be an associative array'.format(yaml_path))
+ raise _ConfigParseError('Configuration',
+ 'Root of YAML file `{}` must be an associative array'.format(yaml_path))
return node
# load the configuration object as is from the root YAML file
try:
config_node = self._yaml_ordered_load(yaml_path)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Configuration',
'Cannot parse YAML file `{}`'.format(yaml_path))
parser = _YamlConfigParser(include_dirs, ignore_include_not_found,
dump_config)
return parser.parse(path)
- except ConfigParseError as exc:
+ except _ConfigParseError as exc:
_append_error_ctx(exc, 'Configuration',
- 'Cannot create configuration from YAML file `{}`'.format(path))
+ 'Cannot create configuration from YAML file `{}`'.format(path))