struct bt_ctf_field_type *type)
{
enum bt_ctf_byte_order ret = BT_CTF_BYTE_ORDER_UNKNOWN;
- int internal_byte_order = -1;
if (!type) {
goto end;
{
struct bt_ctf_field_type_integer *integer = container_of(
type, struct bt_ctf_field_type_integer, parent);
- internal_byte_order = integer->declaration.byte_order;
+ ret = integer->user_byte_order;
break;
}
case CTF_TYPE_FLOAT:
container_of(type,
struct bt_ctf_field_type_floating_point,
parent);
- internal_byte_order = floating_point->declaration.byte_order;
+ ret = floating_point->user_byte_order;
break;
}
default:
goto end;
}
- switch (internal_byte_order) {
- case LITTLE_ENDIAN:
- ret = BT_CTF_BYTE_ORDER_LITTLE_ENDIAN;
- break;
- case BIG_ENDIAN:
- ret = BT_CTF_BYTE_ORDER_BIG_ENDIAN;
- break;
- case 0:
- ret = BT_CTF_BYTE_ORDER_NATIVE;
- break;
- default:
- ret = BT_CTF_BYTE_ORDER_UNKNOWN;
- }
+ assert(ret == BT_CTF_BYTE_ORDER_NATIVE ||
+ ret == BT_CTF_BYTE_ORDER_LITTLE_ENDIAN ||
+ ret == BT_CTF_BYTE_ORDER_BIG_ENDIAN ||
+ ret == BT_CTF_BYTE_ORDER_NETWORK);
+
end:
return ret;
}
return 0;
}
+static
+enum bt_ctf_byte_order get_ctf_ir_byte_order(int byte_order) {
+ enum bt_ctf_byte_order ret;
+
+ switch (byte_order) {
+ case BT_CTF_BYTE_ORDER_LITTLE_ENDIAN:
+ case LITTLE_ENDIAN:
+ ret = BT_CTF_BYTE_ORDER_LITTLE_ENDIAN;
+ break;
+ case BT_CTF_BYTE_ORDER_BIG_ENDIAN:
+ case BIG_ENDIAN:
+ ret = BT_CTF_BYTE_ORDER_BIG_ENDIAN;
+ break;
+ case BT_CTF_BYTE_ORDER_NETWORK:
+ ret = BT_CTF_BYTE_ORDER_NETWORK;
+ break;
+ case BT_CTF_BYTE_ORDER_NATIVE:
+ ret = BT_CTF_BYTE_ORDER_NATIVE;
+ break;
+ default:
+ ret = BT_CTF_BYTE_ORDER_UNKNOWN;
+ break;
+ }
+
+ return ret;
+}
+
static
void bt_ctf_field_type_integer_set_byte_order(struct bt_ctf_field_type *type,
int byte_order, int set_native)
struct bt_ctf_field_type_integer, parent);
if (set_native) {
- integer_type->declaration.byte_order =
- integer_type->declaration.byte_order == 0 ?
- byte_order : integer_type->declaration.byte_order;
+ if (integer_type->user_byte_order == BT_CTF_BYTE_ORDER_NATIVE) {
+ /*
+ * User byte order is native, so we can set
+ * the real byte order.
+ */
+ integer_type->declaration.byte_order =
+ byte_order;
+ }
} else {
+ integer_type->user_byte_order =
+ get_ctf_ir_byte_order(byte_order);
integer_type->declaration.byte_order = byte_order;
}
}
parent);
if (set_native) {
- floating_point_type->declaration.byte_order =
- floating_point_type->declaration.byte_order == 0 ?
- byte_order :
- floating_point_type->declaration.byte_order;
- floating_point_type->sign.byte_order =
- floating_point_type->sign.byte_order == 0 ?
- byte_order : floating_point_type->sign.byte_order;
- floating_point_type->mantissa.byte_order =
- floating_point_type->mantissa.byte_order == 0 ?
- byte_order : floating_point_type->mantissa.byte_order;
- floating_point_type->exp.byte_order =
- floating_point_type->exp.byte_order == 0 ?
- byte_order : floating_point_type->exp.byte_order;
+ if (floating_point_type->user_byte_order ==
+ BT_CTF_BYTE_ORDER_NATIVE) {
+ /*
+ * User byte order is native, so we can set
+ * the real byte order.
+ */
+ floating_point_type->declaration.byte_order =
+ byte_order;
+ floating_point_type->sign.byte_order =
+ byte_order;
+ floating_point_type->mantissa.byte_order =
+ byte_order;
+ floating_point_type->exp.byte_order =
+ byte_order;
+ }
} else {
+ floating_point_type->user_byte_order =
+ get_ctf_ir_byte_order(byte_order);
floating_point_type->declaration.byte_order = byte_order;
floating_point_type->sign.byte_order = byte_order;
floating_point_type->mantissa.byte_order = byte_order;
bt_get(integer->mapped_clock);
copy_integer->mapped_clock = integer->mapped_clock;
}
+
+ copy_integer->user_byte_order = integer->user_byte_order;
+
end:
return copy;
}
copy_float->sign = floating_point->sign;
copy_float->mantissa = floating_point->mantissa;
copy_float->exp = floating_point->exp;
+ copy_float->user_byte_order = floating_point->user_byte_order;
end:
return copy;
}
}
BT_HIDDEN
-int bt_ctf_stream_class_set_byte_order(struct bt_ctf_stream_class *stream_class,
- enum bt_ctf_byte_order byte_order)
+void bt_ctf_stream_class_set_byte_order(
+ struct bt_ctf_stream_class *stream_class, int byte_order)
{
- int i, ret = 0;
- int internal_byte_order;
+ int i;
- /* Note that "NATIVE" means the trace's endianness, not the host's. */
- if (!stream_class || byte_order <= BT_CTF_BYTE_ORDER_UNKNOWN ||
- byte_order > BT_CTF_BYTE_ORDER_NETWORK) {
- ret = -1;
- goto end;
- }
-
- switch (byte_order) {
- case BT_CTF_BYTE_ORDER_NETWORK:
- case BT_CTF_BYTE_ORDER_BIG_ENDIAN:
- internal_byte_order = BIG_ENDIAN;
- break;
- case BT_CTF_BYTE_ORDER_LITTLE_ENDIAN:
- internal_byte_order = LITTLE_ENDIAN;
- break;
- default:
- ret = -1;
- goto end;
- }
-
- stream_class->byte_order = internal_byte_order;
+ assert(stream_class);
+ assert(byte_order == LITTLE_ENDIAN || byte_order == BIG_ENDIAN);
+ stream_class->byte_order = byte_order;
/* Set native byte order to little or big endian */
bt_ctf_field_type_set_native_byte_order(
- stream_class->event_header_type, stream_class->byte_order);
+ stream_class->event_header_type, byte_order);
bt_ctf_field_type_set_native_byte_order(
- stream_class->packet_context_type, stream_class->byte_order);
+ stream_class->packet_context_type, byte_order);
bt_ctf_field_type_set_native_byte_order(
- stream_class->event_context_type, stream_class->byte_order);
+ stream_class->event_context_type, byte_order);
/* Set all events' native byte order */
for (i = 0; i < stream_class->event_classes->len; i++) {
- struct bt_ctf_event_class *event_class;
+ struct bt_ctf_event_class *event_class =
+ g_ptr_array_index(stream_class->event_classes, i);
- event_class = g_ptr_array_index(stream_class->event_classes, i);
bt_ctf_event_class_set_native_byte_order(event_class,
- stream_class->byte_order);
+ byte_order);
}
-end:
- return ret;
}
BT_HIDDEN