2 * SPDX-License-Identifier: MIT
4 * Copyright 2018 Philippe Proulx <pproulx@efficios.com>
7 #include <babeltrace2/babeltrace.h>
8 #include "common/macros.h"
9 #include "common/assert.h"
16 #include "ctf-meta-visitors.hpp"
20 bt_self_component
*self_comp
;
21 bt_trace_class
*ir_tc
;
22 bt_stream_class
*ir_sc
;
23 struct ctf_trace_class
*tc
;
24 struct ctf_stream_class
*sc
;
25 struct ctf_event_class
*ec
;
29 static inline bt_field_class
*ctf_field_class_to_ir(struct ctx
*ctx
, struct ctf_field_class
*fc
);
31 static inline void ctf_field_class_int_set_props(struct ctf_field_class_int
*fc
,
32 bt_field_class
*ir_fc
)
34 bt_field_class_integer_set_field_value_range(ir_fc
, fc
->base
.size
);
35 bt_field_class_integer_set_preferred_display_base(ir_fc
, fc
->disp_base
);
38 static inline bt_field_class
*ctf_field_class_int_to_ir(struct ctx
*ctx
,
39 struct ctf_field_class_int
*fc
)
41 bt_field_class
*ir_fc
;
44 ir_fc
= bt_field_class_integer_signed_create(ctx
->ir_tc
);
46 ir_fc
= bt_field_class_integer_unsigned_create(ctx
->ir_tc
);
50 ctf_field_class_int_set_props(fc
, ir_fc
);
54 static inline bt_field_class
*ctf_field_class_enum_to_ir(struct ctx
*ctx
,
55 struct ctf_field_class_enum
*fc
)
58 bt_field_class
*ir_fc
;
61 if (fc
->base
.is_signed
) {
62 ir_fc
= bt_field_class_enumeration_signed_create(ctx
->ir_tc
);
64 ir_fc
= bt_field_class_enumeration_unsigned_create(ctx
->ir_tc
);
68 ctf_field_class_int_set_props(&fc
->base
, ir_fc
);
70 for (i
= 0; i
< fc
->mappings
->len
; i
++) {
71 struct ctf_field_class_enum_mapping
*mapping
=
72 ctf_field_class_enum_borrow_mapping_by_index(fc
, i
);
73 bt_integer_range_set_signed
*range_set_signed
= NULL
;
74 bt_integer_range_set_unsigned
*range_set_unsigned
= NULL
;
77 if (fc
->base
.is_signed
) {
78 range_set_signed
= bt_integer_range_set_signed_create();
79 BT_ASSERT(range_set_signed
);
81 range_set_unsigned
= bt_integer_range_set_unsigned_create();
82 BT_ASSERT(range_set_unsigned
);
85 for (range_i
= 0; range_i
< mapping
->ranges
->len
; range_i
++) {
86 struct ctf_range
*range
=
87 ctf_field_class_enum_mapping_borrow_range_by_index(mapping
, range_i
);
89 if (fc
->base
.is_signed
) {
90 ret
= bt_integer_range_set_signed_add_range(range_set_signed
, range
->lower
.i
,
93 ret
= bt_integer_range_set_unsigned_add_range(range_set_unsigned
, range
->lower
.u
,
100 if (fc
->base
.is_signed
) {
101 ret
= bt_field_class_enumeration_signed_add_mapping(ir_fc
, mapping
->label
->str
,
103 BT_INTEGER_RANGE_SET_SIGNED_PUT_REF_AND_RESET(range_set_signed
);
105 ret
= bt_field_class_enumeration_unsigned_add_mapping(ir_fc
, mapping
->label
->str
,
107 BT_INTEGER_RANGE_SET_UNSIGNED_PUT_REF_AND_RESET(range_set_unsigned
);
116 static inline bt_field_class
*ctf_field_class_float_to_ir(struct ctx
*ctx
,
117 struct ctf_field_class_float
*fc
)
119 bt_field_class
*ir_fc
;
121 if (fc
->base
.size
== 32) {
122 ir_fc
= bt_field_class_real_single_precision_create(ctx
->ir_tc
);
124 ir_fc
= bt_field_class_real_double_precision_create(ctx
->ir_tc
);
131 static inline bt_field_class
*ctf_field_class_string_to_ir(struct ctx
*ctx
,
132 struct ctf_field_class_string
*)
134 bt_field_class
*ir_fc
= bt_field_class_string_create(ctx
->ir_tc
);
140 static inline void translate_struct_field_class_members(struct ctx
*ctx
,
141 struct ctf_field_class_struct
*fc
,
142 bt_field_class
*ir_fc
,
144 struct ctf_field_class_struct
*)
149 for (i
= 0; i
< fc
->members
->len
; i
++) {
150 struct ctf_named_field_class
*named_fc
=
151 ctf_field_class_struct_borrow_member_by_index(fc
, i
);
152 bt_field_class
*member_ir_fc
;
153 const char *name
= named_fc
->name
->str
;
155 if (!named_fc
->fc
->in_ir
) {
159 member_ir_fc
= ctf_field_class_to_ir(ctx
, named_fc
->fc
);
160 BT_ASSERT(member_ir_fc
);
161 ret
= bt_field_class_structure_append_member(ir_fc
, name
, member_ir_fc
);
163 bt_field_class_put_ref(member_ir_fc
);
167 static inline bt_field_class
*ctf_field_class_struct_to_ir(struct ctx
*ctx
,
168 struct ctf_field_class_struct
*fc
)
170 bt_field_class
*ir_fc
= bt_field_class_structure_create(ctx
->ir_tc
);
173 translate_struct_field_class_members(ctx
, fc
, ir_fc
, false, NULL
);
177 static inline bt_field_class
*borrow_ir_fc_from_field_path(struct ctx
*ctx
,
178 struct ctf_field_path
*field_path
)
180 bt_field_class
*ir_fc
= NULL
;
181 struct ctf_field_class
*fc
=
182 ctf_field_path_borrow_field_class(field_path
, ctx
->tc
, ctx
->sc
, ctx
->ec
);
193 static inline const bt_field_class_enumeration_mapping
*
194 find_ir_enum_field_class_mapping_by_label(const bt_field_class
*fc
, const char *label
,
197 const bt_field_class_enumeration_mapping
*mapping
= NULL
;
200 for (i
= 0; i
< bt_field_class_enumeration_get_mapping_count(fc
); i
++) {
201 const bt_field_class_enumeration_mapping
*this_mapping
;
202 const bt_field_class_enumeration_signed_mapping
*signed_this_mapping
= NULL
;
203 const bt_field_class_enumeration_unsigned_mapping
*unsigned_this_mapping
= NULL
;
206 signed_this_mapping
=
207 bt_field_class_enumeration_signed_borrow_mapping_by_index_const(fc
, i
);
208 BT_ASSERT(signed_this_mapping
);
210 bt_field_class_enumeration_signed_mapping_as_mapping_const(signed_this_mapping
);
212 unsigned_this_mapping
=
213 bt_field_class_enumeration_unsigned_borrow_mapping_by_index_const(fc
, i
);
214 BT_ASSERT(unsigned_this_mapping
);
216 bt_field_class_enumeration_unsigned_mapping_as_mapping_const(unsigned_this_mapping
);
219 BT_ASSERT(this_mapping
);
221 if (strcmp(bt_field_class_enumeration_mapping_get_label(this_mapping
), label
) == 0) {
222 mapping
= this_mapping
;
231 static inline bt_field_class
*ctf_field_class_variant_to_ir(struct ctx
*ctx
,
232 struct ctf_field_class_variant
*fc
)
235 bt_field_class
*ir_fc
;
237 bt_field_class
*ir_tag_fc
= NULL
;
239 if (fc
->tag_path
.root
!= CTF_SCOPE_PACKET_HEADER
&&
240 fc
->tag_path
.root
!= CTF_SCOPE_EVENT_HEADER
) {
241 ir_tag_fc
= borrow_ir_fc_from_field_path(ctx
, &fc
->tag_path
);
242 BT_ASSERT(ir_tag_fc
);
245 ir_fc
= bt_field_class_variant_create(ctx
->ir_tc
, ir_tag_fc
);
248 for (i
= 0; i
< fc
->options
->len
; i
++) {
249 struct ctf_named_field_class
*named_fc
=
250 ctf_field_class_variant_borrow_option_by_index(fc
, i
);
251 bt_field_class
*option_ir_fc
;
253 BT_ASSERT(named_fc
->fc
->in_ir
);
254 option_ir_fc
= ctf_field_class_to_ir(ctx
, named_fc
->fc
);
255 BT_ASSERT(option_ir_fc
);
259 * At this point the trace IR selector
260 * (enumeration) field class already exists if
261 * the variant is tagged (`ir_tag_fc`). This one
262 * already contains range sets for its mappings,
263 * so we just reuse the same, finding them by
264 * matching a variant field class's option's
265 * _original_ name (with a leading underscore,
266 * possibly) with a selector field class's
269 if (fc
->tag_fc
->base
.is_signed
) {
270 const bt_field_class_enumeration_signed_mapping
*mapping
=
271 (bt_field_class_enumeration_signed_mapping
*)
272 find_ir_enum_field_class_mapping_by_label(ir_tag_fc
,
273 named_fc
->orig_name
->str
, true);
274 const bt_integer_range_set_signed
*range_set
;
277 range_set
= bt_field_class_enumeration_signed_mapping_borrow_ranges_const(mapping
);
278 BT_ASSERT(range_set
);
279 ret
= bt_field_class_variant_with_selector_field_integer_signed_append_option(
280 ir_fc
, named_fc
->name
->str
, option_ir_fc
, range_set
);
282 const bt_field_class_enumeration_unsigned_mapping
*mapping
=
283 (bt_field_class_enumeration_unsigned_mapping
*)
284 find_ir_enum_field_class_mapping_by_label(ir_tag_fc
,
285 named_fc
->orig_name
->str
, false);
286 const bt_integer_range_set_unsigned
*range_set
;
290 bt_field_class_enumeration_unsigned_mapping_borrow_ranges_const(mapping
);
291 BT_ASSERT(range_set
);
292 ret
= bt_field_class_variant_with_selector_field_integer_unsigned_append_option(
293 ir_fc
, named_fc
->name
->str
, option_ir_fc
, range_set
);
296 ret
= bt_field_class_variant_without_selector_append_option(ir_fc
, named_fc
->name
->str
,
301 bt_field_class_put_ref(option_ir_fc
);
307 static inline bt_field_class
*ctf_field_class_array_to_ir(struct ctx
*ctx
,
308 struct ctf_field_class_array
*fc
)
310 bt_field_class
*ir_fc
;
311 bt_field_class
*elem_ir_fc
;
313 if (fc
->base
.is_text
) {
314 ir_fc
= bt_field_class_string_create(ctx
->ir_tc
);
319 elem_ir_fc
= ctf_field_class_to_ir(ctx
, fc
->base
.elem_fc
);
320 BT_ASSERT(elem_ir_fc
);
321 ir_fc
= bt_field_class_array_static_create(ctx
->ir_tc
, elem_ir_fc
, fc
->length
);
323 bt_field_class_put_ref(elem_ir_fc
);
329 static inline bt_field_class
*ctf_field_class_sequence_to_ir(struct ctx
*ctx
,
330 struct ctf_field_class_sequence
*fc
)
332 bt_field_class
*ir_fc
;
333 bt_field_class
*elem_ir_fc
;
334 bt_field_class
*length_fc
= NULL
;
336 if (fc
->base
.is_text
) {
337 ir_fc
= bt_field_class_string_create(ctx
->ir_tc
);
342 elem_ir_fc
= ctf_field_class_to_ir(ctx
, fc
->base
.elem_fc
);
343 BT_ASSERT(elem_ir_fc
);
345 if (fc
->length_path
.root
!= CTF_SCOPE_PACKET_HEADER
&&
346 fc
->length_path
.root
!= CTF_SCOPE_EVENT_HEADER
) {
347 length_fc
= borrow_ir_fc_from_field_path(ctx
, &fc
->length_path
);
348 BT_ASSERT(length_fc
);
351 ir_fc
= bt_field_class_array_dynamic_create(ctx
->ir_tc
, elem_ir_fc
, length_fc
);
353 bt_field_class_put_ref(elem_ir_fc
);
360 static inline bt_field_class
*ctf_field_class_to_ir(struct ctx
*ctx
, struct ctf_field_class
*fc
)
362 bt_field_class
*ir_fc
= NULL
;
365 BT_ASSERT(fc
->in_ir
);
368 case CTF_FIELD_CLASS_TYPE_INT
:
369 ir_fc
= ctf_field_class_int_to_ir(ctx
, ctf_field_class_as_int(fc
));
371 case CTF_FIELD_CLASS_TYPE_ENUM
:
372 ir_fc
= ctf_field_class_enum_to_ir(ctx
, ctf_field_class_as_enum(fc
));
374 case CTF_FIELD_CLASS_TYPE_FLOAT
:
375 ir_fc
= ctf_field_class_float_to_ir(ctx
, ctf_field_class_as_float(fc
));
377 case CTF_FIELD_CLASS_TYPE_STRING
:
378 ir_fc
= ctf_field_class_string_to_ir(ctx
, ctf_field_class_as_string(fc
));
380 case CTF_FIELD_CLASS_TYPE_STRUCT
:
381 ir_fc
= ctf_field_class_struct_to_ir(ctx
, ctf_field_class_as_struct(fc
));
383 case CTF_FIELD_CLASS_TYPE_ARRAY
:
384 ir_fc
= ctf_field_class_array_to_ir(ctx
, ctf_field_class_as_array(fc
));
386 case CTF_FIELD_CLASS_TYPE_SEQUENCE
:
387 ir_fc
= ctf_field_class_sequence_to_ir(ctx
, ctf_field_class_as_sequence(fc
));
389 case CTF_FIELD_CLASS_TYPE_VARIANT
:
390 ir_fc
= ctf_field_class_variant_to_ir(ctx
, ctf_field_class_as_variant(fc
));
401 ctf_field_class_struct_has_immediate_member_in_ir(struct ctf_field_class_struct
*fc
)
404 bool has_immediate_member_in_ir
= false;
407 * If the structure field class has no members at all, then it
408 * was an empty structure in the beginning, so leave it existing
411 if (fc
->members
->len
== 0) {
412 has_immediate_member_in_ir
= true;
416 for (i
= 0; i
< fc
->members
->len
; i
++) {
417 struct ctf_named_field_class
*named_fc
=
418 ctf_field_class_struct_borrow_member_by_index(fc
, i
);
420 if (named_fc
->fc
->in_ir
) {
421 has_immediate_member_in_ir
= true;
427 return has_immediate_member_in_ir
;
430 static inline bt_field_class
*scope_ctf_field_class_to_ir(struct ctx
*ctx
)
432 bt_field_class
*ir_fc
= NULL
;
433 struct ctf_field_class
*fc
= NULL
;
435 switch (ctx
->scope
) {
436 case CTF_SCOPE_PACKET_CONTEXT
:
437 fc
= ctx
->sc
->packet_context_fc
;
439 case CTF_SCOPE_EVENT_COMMON_CONTEXT
:
440 fc
= ctx
->sc
->event_common_context_fc
;
442 case CTF_SCOPE_EVENT_SPECIFIC_CONTEXT
:
443 fc
= ctx
->ec
->spec_context_fc
;
445 case CTF_SCOPE_EVENT_PAYLOAD
:
446 fc
= ctx
->ec
->payload_fc
;
452 if (fc
&& ctf_field_class_struct_has_immediate_member_in_ir(ctf_field_class_as_struct(fc
))) {
453 ir_fc
= ctf_field_class_to_ir(ctx
, fc
);
459 static inline void ctf_event_class_to_ir(struct ctx
*ctx
)
462 bt_event_class
*ir_ec
= NULL
;
463 bt_field_class
*ir_fc
;
467 if (ctx
->ec
->is_translated
) {
468 ir_ec
= bt_stream_class_borrow_event_class_by_id(ctx
->ir_sc
, ctx
->ec
->id
);
473 ir_ec
= bt_event_class_create_with_id(ctx
->ir_sc
, ctx
->ec
->id
);
475 bt_event_class_put_ref(ir_ec
);
476 ctx
->scope
= CTF_SCOPE_EVENT_SPECIFIC_CONTEXT
;
477 ir_fc
= scope_ctf_field_class_to_ir(ctx
);
479 ret
= bt_event_class_set_specific_context_field_class(ir_ec
, ir_fc
);
481 bt_field_class_put_ref(ir_fc
);
484 ctx
->scope
= CTF_SCOPE_EVENT_PAYLOAD
;
485 ir_fc
= scope_ctf_field_class_to_ir(ctx
);
487 ret
= bt_event_class_set_payload_field_class(ir_ec
, ir_fc
);
489 bt_field_class_put_ref(ir_fc
);
492 if (ctx
->ec
->name
->len
> 0) {
493 ret
= bt_event_class_set_name(ir_ec
, ctx
->ec
->name
->str
);
497 if (ctx
->ec
->emf_uri
->len
> 0) {
498 ret
= bt_event_class_set_emf_uri(ir_ec
, ctx
->ec
->emf_uri
->str
);
502 if (ctx
->ec
->is_log_level_set
) {
503 bt_event_class_set_log_level(ir_ec
, ctx
->ec
->log_level
);
506 ctx
->ec
->is_translated
= true;
507 ctx
->ec
->ir_ec
= ir_ec
;
513 static inline void ctf_stream_class_to_ir(struct ctx
*ctx
)
516 bt_field_class
*ir_fc
;
520 if (ctx
->sc
->is_translated
) {
521 ctx
->ir_sc
= bt_trace_class_borrow_stream_class_by_id(ctx
->ir_tc
, ctx
->sc
->id
);
522 BT_ASSERT(ctx
->ir_sc
);
526 ctx
->ir_sc
= bt_stream_class_create_with_id(ctx
->ir_tc
, ctx
->sc
->id
);
527 BT_ASSERT(ctx
->ir_sc
);
528 bt_stream_class_put_ref(ctx
->ir_sc
);
530 if (ctx
->sc
->default_clock_class
) {
531 BT_ASSERT(ctx
->sc
->default_clock_class
->ir_cc
);
532 ret
= bt_stream_class_set_default_clock_class(ctx
->ir_sc
,
533 ctx
->sc
->default_clock_class
->ir_cc
);
537 bt_stream_class_set_supports_packets(ctx
->ir_sc
, BT_TRUE
, ctx
->sc
->packets_have_ts_begin
,
538 ctx
->sc
->packets_have_ts_end
);
539 bt_stream_class_set_supports_discarded_events(ctx
->ir_sc
, ctx
->sc
->has_discarded_events
,
540 ctx
->sc
->discarded_events_have_default_cs
);
541 bt_stream_class_set_supports_discarded_packets(ctx
->ir_sc
, ctx
->sc
->has_discarded_packets
,
542 ctx
->sc
->discarded_packets_have_default_cs
);
543 ctx
->scope
= CTF_SCOPE_PACKET_CONTEXT
;
544 ir_fc
= scope_ctf_field_class_to_ir(ctx
);
546 ret
= bt_stream_class_set_packet_context_field_class(ctx
->ir_sc
, ir_fc
);
548 bt_field_class_put_ref(ir_fc
);
551 ctx
->scope
= CTF_SCOPE_EVENT_COMMON_CONTEXT
;
552 ir_fc
= scope_ctf_field_class_to_ir(ctx
);
554 ret
= bt_stream_class_set_event_common_context_field_class(ctx
->ir_sc
, ir_fc
);
556 bt_field_class_put_ref(ir_fc
);
559 bt_stream_class_set_assigns_automatic_event_class_id(ctx
->ir_sc
, BT_FALSE
);
560 bt_stream_class_set_assigns_automatic_stream_id(ctx
->ir_sc
, BT_FALSE
);
562 ctx
->sc
->is_translated
= true;
563 ctx
->sc
->ir_sc
= ctx
->ir_sc
;
569 static inline void ctf_clock_class_to_ir(bt_clock_class
*ir_cc
, struct ctf_clock_class
*cc
)
573 if (strlen(cc
->name
->str
) > 0) {
574 ret
= bt_clock_class_set_name(ir_cc
, cc
->name
->str
);
578 if (strlen(cc
->description
->str
) > 0) {
579 ret
= bt_clock_class_set_description(ir_cc
, cc
->description
->str
);
583 bt_clock_class_set_frequency(ir_cc
, cc
->frequency
);
584 bt_clock_class_set_precision(ir_cc
, cc
->precision
);
585 bt_clock_class_set_offset(ir_cc
, cc
->offset_seconds
, cc
->offset_cycles
);
588 bt_clock_class_set_uuid(ir_cc
, cc
->uuid
);
591 bt_clock_class_set_origin_is_unix_epoch(ir_cc
, cc
->is_absolute
);
594 static inline int ctf_trace_class_to_ir(struct ctx
*ctx
)
600 BT_ASSERT(ctx
->ir_tc
);
602 if (ctx
->tc
->is_translated
) {
606 for (i
= 0; i
< ctx
->tc
->clock_classes
->len
; i
++) {
607 ctf_clock_class
*cc
= (ctf_clock_class
*) ctx
->tc
->clock_classes
->pdata
[i
];
609 cc
->ir_cc
= bt_clock_class_create(ctx
->self_comp
);
610 ctf_clock_class_to_ir(cc
->ir_cc
, cc
);
613 bt_trace_class_set_assigns_automatic_stream_class_id(ctx
->ir_tc
, BT_FALSE
);
614 ctx
->tc
->is_translated
= true;
615 ctx
->tc
->ir_tc
= ctx
->ir_tc
;
621 int ctf_trace_class_translate(bt_self_component
*self_comp
, bt_trace_class
*ir_tc
,
622 struct ctf_trace_class
*tc
)
628 ctx
.self_comp
= self_comp
;
631 ret
= ctf_trace_class_to_ir(&ctx
);
636 for (i
= 0; i
< tc
->stream_classes
->len
; i
++) {
638 ctx
.sc
= (ctf_stream_class
*) tc
->stream_classes
->pdata
[i
];
640 ctf_stream_class_to_ir(&ctx
);
642 for (j
= 0; j
< ctx
.sc
->event_classes
->len
; j
++) {
643 ctx
.ec
= (ctf_event_class
*) ctx
.sc
->event_classes
->pdata
[j
];
645 ctf_event_class_to_ir(&ctx
);