2 * SPDX-License-Identifier: MIT
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 * LTTng UST bytecode specializer.
13 #include <lttng/align.h>
15 #include "context-internal.h"
16 #include "lttng-bytecode.h"
17 #include "ust-events-internal.h"
19 static int lttng_fls(int val
)
22 unsigned int x
= (unsigned int) val
;
26 if (!(x
& 0xFFFF0000U
)) {
30 if (!(x
& 0xFF000000U
)) {
34 if (!(x
& 0xF0000000U
)) {
38 if (!(x
& 0xC0000000U
)) {
42 if (!(x
& 0x80000000U
)) {
48 static int get_count_order(unsigned int count
)
52 order
= lttng_fls(count
) - 1;
53 if (count
& (count
- 1))
58 static ssize_t
bytecode_reserve_data(struct bytecode_runtime
*runtime
,
59 size_t align
, size_t len
)
62 size_t padding
= lttng_ust_offset_align(runtime
->data_len
, align
);
63 size_t new_len
= runtime
->data_len
+ padding
+ len
;
64 size_t new_alloc_len
= new_len
;
65 size_t old_alloc_len
= runtime
->data_alloc_len
;
67 if (new_len
> BYTECODE_MAX_DATA_LEN
)
70 if (new_alloc_len
> old_alloc_len
) {
74 max_t(size_t, 1U << get_count_order(new_alloc_len
), old_alloc_len
<< 1);
75 newptr
= realloc(runtime
->data
, new_alloc_len
);
78 runtime
->data
= newptr
;
79 /* We zero directly the memory from start of allocation. */
80 memset(&runtime
->data
[old_alloc_len
], 0, new_alloc_len
- old_alloc_len
);
81 runtime
->data_alloc_len
= new_alloc_len
;
83 runtime
->data_len
+= padding
;
84 ret
= runtime
->data_len
;
85 runtime
->data_len
+= len
;
89 static ssize_t
bytecode_push_data(struct bytecode_runtime
*runtime
,
90 const void *p
, size_t align
, size_t len
)
94 offset
= bytecode_reserve_data(runtime
, align
, len
);
97 memcpy(&runtime
->data
[offset
], p
, len
);
101 static int specialize_load_field(struct vstack_entry
*stack_top
,
102 struct load_op
*insn
)
106 switch (stack_top
->load
.type
) {
109 case LOAD_ROOT_CONTEXT
:
110 case LOAD_ROOT_APP_CONTEXT
:
111 case LOAD_ROOT_PAYLOAD
:
113 dbg_printf("Bytecode warning: cannot load root, missing field name.\n");
117 switch (stack_top
->load
.object_type
) {
119 dbg_printf("op load field s8\n");
120 stack_top
->type
= REG_S64
;
121 if (!stack_top
->load
.rev_bo
)
122 insn
->op
= BYTECODE_OP_LOAD_FIELD_S8
;
124 case OBJECT_TYPE_S16
:
125 dbg_printf("op load field s16\n");
126 stack_top
->type
= REG_S64
;
127 if (!stack_top
->load
.rev_bo
)
128 insn
->op
= BYTECODE_OP_LOAD_FIELD_S16
;
130 case OBJECT_TYPE_S32
:
131 dbg_printf("op load field s32\n");
132 stack_top
->type
= REG_S64
;
133 if (!stack_top
->load
.rev_bo
)
134 insn
->op
= BYTECODE_OP_LOAD_FIELD_S32
;
136 case OBJECT_TYPE_S64
:
137 dbg_printf("op load field s64\n");
138 stack_top
->type
= REG_S64
;
139 if (!stack_top
->load
.rev_bo
)
140 insn
->op
= BYTECODE_OP_LOAD_FIELD_S64
;
142 case OBJECT_TYPE_SIGNED_ENUM
:
143 dbg_printf("op load field signed enumeration\n");
144 stack_top
->type
= REG_PTR
;
147 dbg_printf("op load field u8\n");
148 stack_top
->type
= REG_U64
;
149 insn
->op
= BYTECODE_OP_LOAD_FIELD_U8
;
151 case OBJECT_TYPE_U16
:
152 dbg_printf("op load field u16\n");
153 stack_top
->type
= REG_U64
;
154 if (!stack_top
->load
.rev_bo
)
155 insn
->op
= BYTECODE_OP_LOAD_FIELD_U16
;
157 case OBJECT_TYPE_U32
:
158 dbg_printf("op load field u32\n");
159 stack_top
->type
= REG_U64
;
160 if (!stack_top
->load
.rev_bo
)
161 insn
->op
= BYTECODE_OP_LOAD_FIELD_U32
;
163 case OBJECT_TYPE_U64
:
164 dbg_printf("op load field u64\n");
165 stack_top
->type
= REG_U64
;
166 if (!stack_top
->load
.rev_bo
)
167 insn
->op
= BYTECODE_OP_LOAD_FIELD_U64
;
169 case OBJECT_TYPE_UNSIGNED_ENUM
:
170 dbg_printf("op load field unsigned enumeration\n");
171 stack_top
->type
= REG_PTR
;
173 case OBJECT_TYPE_DOUBLE
:
174 stack_top
->type
= REG_DOUBLE
;
175 insn
->op
= BYTECODE_OP_LOAD_FIELD_DOUBLE
;
177 case OBJECT_TYPE_STRING
:
178 dbg_printf("op load field string\n");
179 stack_top
->type
= REG_STRING
;
180 insn
->op
= BYTECODE_OP_LOAD_FIELD_STRING
;
182 case OBJECT_TYPE_STRING_SEQUENCE
:
183 dbg_printf("op load field string sequence\n");
184 stack_top
->type
= REG_STRING
;
185 insn
->op
= BYTECODE_OP_LOAD_FIELD_SEQUENCE
;
187 case OBJECT_TYPE_DYNAMIC
:
188 dbg_printf("op load field dynamic\n");
189 stack_top
->type
= REG_UNKNOWN
;
190 /* Don't specialize load op. */
192 case OBJECT_TYPE_SEQUENCE
:
193 case OBJECT_TYPE_ARRAY
:
194 case OBJECT_TYPE_STRUCT
:
195 case OBJECT_TYPE_VARIANT
:
196 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
206 static int specialize_get_index_object_type(enum object_type
*otype
,
207 int signedness
, uint32_t elem_len
)
212 *otype
= OBJECT_TYPE_S8
;
214 *otype
= OBJECT_TYPE_U8
;
218 *otype
= OBJECT_TYPE_S16
;
220 *otype
= OBJECT_TYPE_U16
;
224 *otype
= OBJECT_TYPE_S32
;
226 *otype
= OBJECT_TYPE_U32
;
230 *otype
= OBJECT_TYPE_S64
;
232 *otype
= OBJECT_TYPE_U64
;
240 static int specialize_get_index(struct bytecode_runtime
*runtime
,
241 struct load_op
*insn
, uint64_t index
,
242 struct vstack_entry
*stack_top
,
246 struct bytecode_get_index_data gid
;
249 memset(&gid
, 0, sizeof(gid
));
250 switch (stack_top
->load
.type
) {
252 switch (stack_top
->load
.object_type
) {
253 case OBJECT_TYPE_ARRAY
:
255 const struct lttng_integer_type
*integer_type
;
256 const struct lttng_event_field
*field
;
257 uint32_t elem_len
, num_elems
;
260 field
= stack_top
->load
.field
;
261 switch (field
->type
.atype
) {
263 integer_type
= &field
->type
.u
.legacy
.array
.elem_type
.u
.basic
.integer
;
264 num_elems
= field
->type
.u
.legacy
.array
.length
;
266 case atype_array_nestable
:
267 if (field
->type
.u
.array_nestable
.elem_type
->atype
!= atype_integer
) {
271 integer_type
= &field
->type
.u
.array_nestable
.elem_type
->u
.integer
;
272 num_elems
= field
->type
.u
.array_nestable
.length
;
278 elem_len
= integer_type
->size
;
279 signedness
= integer_type
->signedness
;
280 if (index
>= num_elems
) {
284 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
285 signedness
, elem_len
);
288 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
289 gid
.array_len
= num_elems
* (elem_len
/ CHAR_BIT
);
290 gid
.elem
.type
= stack_top
->load
.object_type
;
291 gid
.elem
.len
= elem_len
;
292 if (integer_type
->reverse_byte_order
)
293 gid
.elem
.rev_bo
= true;
294 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
297 case OBJECT_TYPE_SEQUENCE
:
299 const struct lttng_integer_type
*integer_type
;
300 const struct lttng_event_field
*field
;
304 field
= stack_top
->load
.field
;
305 switch (field
->type
.atype
) {
307 integer_type
= &field
->type
.u
.legacy
.sequence
.elem_type
.u
.basic
.integer
;
309 case atype_sequence_nestable
:
310 if (field
->type
.u
.sequence_nestable
.elem_type
->atype
!= atype_integer
) {
314 integer_type
= &field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
;
320 elem_len
= integer_type
->size
;
321 signedness
= integer_type
->signedness
;
322 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
323 signedness
, elem_len
);
326 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
327 gid
.elem
.type
= stack_top
->load
.object_type
;
328 gid
.elem
.len
= elem_len
;
329 if (integer_type
->reverse_byte_order
)
330 gid
.elem
.rev_bo
= true;
331 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
334 case OBJECT_TYPE_STRUCT
:
335 /* Only generated by the specialize phase. */
336 case OBJECT_TYPE_VARIANT
: /* Fall-through */
338 ERR("Unexpected get index type %d",
339 (int) stack_top
->load
.object_type
);
344 case LOAD_ROOT_CONTEXT
:
345 case LOAD_ROOT_APP_CONTEXT
:
346 case LOAD_ROOT_PAYLOAD
:
347 ERR("Index lookup for root field not implemented yet.");
351 data_offset
= bytecode_push_data(runtime
, &gid
,
352 __alignof__(gid
), sizeof(gid
));
353 if (data_offset
< 0) {
359 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
362 ((struct get_index_u64
*) insn
->data
)->index
= data_offset
;
375 static int specialize_context_lookup_name(struct lttng_ctx
*ctx
,
376 struct bytecode_runtime
*bytecode
,
377 struct load_op
*insn
)
382 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
383 name
= bytecode
->p
.priv
->bc
->bc
.data
+ bytecode
->p
.priv
->bc
->bc
.reloc_offset
+ offset
;
384 return lttng_get_context_index(ctx
, name
);
387 static int specialize_load_object(const struct lttng_event_field
*field
,
388 struct vstack_load
*load
, bool is_context
)
390 load
->type
= LOAD_OBJECT
;
392 switch (field
->type
.atype
) {
394 if (field
->type
.u
.integer
.signedness
)
395 load
->object_type
= OBJECT_TYPE_S64
;
397 load
->object_type
= OBJECT_TYPE_U64
;
398 load
->rev_bo
= false;
401 case atype_enum_nestable
:
403 const struct lttng_integer_type
*itype
;
405 if (field
->type
.atype
== atype_enum
) {
406 itype
= &field
->type
.u
.legacy
.basic
.enumeration
.container_type
;
408 itype
= &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
410 if (itype
->signedness
)
411 load
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
413 load
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
414 load
->rev_bo
= false;
418 if (field
->type
.u
.legacy
.array
.elem_type
.atype
!= atype_integer
) {
419 ERR("Array nesting only supports integer types.");
423 load
->object_type
= OBJECT_TYPE_STRING
;
425 if (field
->type
.u
.legacy
.array
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
426 load
->object_type
= OBJECT_TYPE_ARRAY
;
429 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
433 case atype_array_nestable
:
434 if (field
->type
.u
.array_nestable
.elem_type
->atype
!= atype_integer
) {
435 ERR("Array nesting only supports integer types.");
439 load
->object_type
= OBJECT_TYPE_STRING
;
441 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
442 load
->object_type
= OBJECT_TYPE_ARRAY
;
445 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
450 if (field
->type
.u
.legacy
.sequence
.elem_type
.atype
!= atype_integer
) {
451 ERR("Sequence nesting only supports integer types.");
455 load
->object_type
= OBJECT_TYPE_STRING
;
457 if (field
->type
.u
.legacy
.sequence
.elem_type
.u
.basic
.integer
.encoding
== lttng_encode_none
) {
458 load
->object_type
= OBJECT_TYPE_SEQUENCE
;
461 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
465 case atype_sequence_nestable
:
466 if (field
->type
.u
.sequence_nestable
.elem_type
->atype
!= atype_integer
) {
467 ERR("Sequence nesting only supports integer types.");
471 load
->object_type
= OBJECT_TYPE_STRING
;
473 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
474 load
->object_type
= OBJECT_TYPE_SEQUENCE
;
477 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
483 load
->object_type
= OBJECT_TYPE_STRING
;
486 load
->object_type
= OBJECT_TYPE_DOUBLE
;
489 load
->object_type
= OBJECT_TYPE_DYNAMIC
;
492 ERR("Structure type cannot be loaded.");
495 ERR("Unknown type: %d", (int) field
->type
.atype
);
501 static int specialize_context_lookup(struct lttng_ctx
*ctx
,
502 struct bytecode_runtime
*runtime
,
503 struct load_op
*insn
,
504 struct vstack_load
*load
)
507 struct lttng_ctx_field
*ctx_field
;
508 struct lttng_event_field
*field
;
509 struct bytecode_get_index_data gid
;
512 idx
= specialize_context_lookup_name(ctx
, runtime
, insn
);
516 ctx_field
= &ctx
->fields
[idx
];
517 field
= &ctx_field
->event_field
;
518 ret
= specialize_load_object(field
, load
, true);
521 /* Specialize each get_symbol into a get_index. */
522 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
523 memset(&gid
, 0, sizeof(gid
));
525 gid
.elem
.type
= load
->object_type
;
526 gid
.elem
.rev_bo
= load
->rev_bo
;
528 data_offset
= bytecode_push_data(runtime
, &gid
,
529 __alignof__(gid
), sizeof(gid
));
530 if (data_offset
< 0) {
533 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
537 static int specialize_app_context_lookup(struct lttng_ctx
**pctx
,
538 struct bytecode_runtime
*runtime
,
539 struct load_op
*insn
,
540 struct vstack_load
*load
)
543 const char *orig_name
;
546 struct lttng_ctx_field
*ctx_field
;
547 struct lttng_event_field
*field
;
548 struct bytecode_get_index_data gid
;
551 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
552 orig_name
= runtime
->p
.priv
->bc
->bc
.data
+ runtime
->p
.priv
->bc
->bc
.reloc_offset
+ offset
;
553 name
= zmalloc(strlen(orig_name
) + strlen("$app.") + 1);
558 strcpy(name
, "$app.");
559 strcat(name
, orig_name
);
560 idx
= lttng_get_context_index(*pctx
, name
);
562 assert(lttng_context_is_app(name
));
563 ret
= lttng_ust_add_app_context_to_ctx_rcu(name
,
567 idx
= lttng_get_context_index(*pctx
, name
);
571 ctx_field
= &(*pctx
)->fields
[idx
];
572 field
= &ctx_field
->event_field
;
573 ret
= specialize_load_object(field
, load
, true);
576 /* Specialize each get_symbol into a get_index. */
577 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
578 memset(&gid
, 0, sizeof(gid
));
580 gid
.elem
.type
= load
->object_type
;
581 gid
.elem
.rev_bo
= load
->rev_bo
;
583 data_offset
= bytecode_push_data(runtime
, &gid
,
584 __alignof__(gid
), sizeof(gid
));
585 if (data_offset
< 0) {
589 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
596 static int specialize_payload_lookup(const struct lttng_event_desc
*event_desc
,
597 struct bytecode_runtime
*runtime
,
598 struct load_op
*insn
,
599 struct vstack_load
*load
)
603 unsigned int i
, nr_fields
;
605 uint32_t field_offset
= 0;
606 const struct lttng_event_field
*field
;
608 struct bytecode_get_index_data gid
;
611 nr_fields
= event_desc
->nr_fields
;
612 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
613 name
= runtime
->p
.priv
->bc
->bc
.data
+ runtime
->p
.priv
->bc
->bc
.reloc_offset
+ offset
;
614 for (i
= 0; i
< nr_fields
; i
++) {
615 field
= &event_desc
->fields
[i
];
616 if (field
->u
.ext
.nofilter
) {
619 if (!strcmp(field
->name
, name
)) {
623 /* compute field offset on stack */
624 switch (field
->type
.atype
) {
627 case atype_enum_nestable
:
628 field_offset
+= sizeof(int64_t);
631 case atype_array_nestable
:
633 case atype_sequence_nestable
:
634 field_offset
+= sizeof(unsigned long);
635 field_offset
+= sizeof(void *);
638 field_offset
+= sizeof(void *);
641 field_offset
+= sizeof(double);
653 ret
= specialize_load_object(field
, load
, false);
657 /* Specialize each get_symbol into a get_index. */
658 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
659 memset(&gid
, 0, sizeof(gid
));
660 gid
.offset
= field_offset
;
661 gid
.elem
.type
= load
->object_type
;
662 gid
.elem
.rev_bo
= load
->rev_bo
;
664 data_offset
= bytecode_push_data(runtime
, &gid
,
665 __alignof__(gid
), sizeof(gid
));
666 if (data_offset
< 0) {
670 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
676 int lttng_bytecode_specialize(const struct lttng_event_desc
*event_desc
,
677 struct bytecode_runtime
*bytecode
)
679 void *pc
, *next_pc
, *start_pc
;
681 struct vstack _stack
;
682 struct vstack
*stack
= &_stack
;
683 struct lttng_ctx
**pctx
= bytecode
->p
.priv
->pctx
;
687 start_pc
= &bytecode
->code
[0];
688 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
690 switch (*(bytecode_opcode_t
*) pc
) {
691 case BYTECODE_OP_UNKNOWN
:
693 ERR("unknown bytecode op %u\n",
694 (unsigned int) *(bytecode_opcode_t
*) pc
);
698 case BYTECODE_OP_RETURN
:
699 if (vstack_ax(stack
)->type
== REG_S64
||
700 vstack_ax(stack
)->type
== REG_U64
)
701 *(bytecode_opcode_t
*) pc
= BYTECODE_OP_RETURN_S64
;
705 case BYTECODE_OP_RETURN_S64
:
706 if (vstack_ax(stack
)->type
!= REG_S64
&&
707 vstack_ax(stack
)->type
!= REG_U64
) {
708 ERR("Unexpected register type\n");
716 case BYTECODE_OP_MUL
:
717 case BYTECODE_OP_DIV
:
718 case BYTECODE_OP_MOD
:
719 case BYTECODE_OP_PLUS
:
720 case BYTECODE_OP_MINUS
:
721 ERR("unsupported bytecode op %u\n",
722 (unsigned int) *(bytecode_opcode_t
*) pc
);
728 struct binary_op
*insn
= (struct binary_op
*) pc
;
730 switch(vstack_ax(stack
)->type
) {
732 ERR("unknown register type\n");
737 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
739 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
740 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
742 insn
->op
= BYTECODE_OP_EQ_STRING
;
744 case REG_STAR_GLOB_STRING
:
745 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
747 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
751 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
753 if (vstack_bx(stack
)->type
== REG_S64
||
754 vstack_bx(stack
)->type
== REG_U64
)
755 insn
->op
= BYTECODE_OP_EQ_S64
;
757 insn
->op
= BYTECODE_OP_EQ_DOUBLE_S64
;
760 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
762 if (vstack_bx(stack
)->type
== REG_S64
||
763 vstack_bx(stack
)->type
== REG_U64
)
764 insn
->op
= BYTECODE_OP_EQ_S64_DOUBLE
;
766 insn
->op
= BYTECODE_OP_EQ_DOUBLE
;
769 break; /* Dynamic typing. */
772 if (vstack_pop(stack
)) {
776 vstack_ax(stack
)->type
= REG_S64
;
777 next_pc
+= sizeof(struct binary_op
);
783 struct binary_op
*insn
= (struct binary_op
*) pc
;
785 switch(vstack_ax(stack
)->type
) {
787 ERR("unknown register type\n");
792 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
794 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
795 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
797 insn
->op
= BYTECODE_OP_NE_STRING
;
799 case REG_STAR_GLOB_STRING
:
800 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
802 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
806 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
808 if (vstack_bx(stack
)->type
== REG_S64
||
809 vstack_bx(stack
)->type
== REG_U64
)
810 insn
->op
= BYTECODE_OP_NE_S64
;
812 insn
->op
= BYTECODE_OP_NE_DOUBLE_S64
;
815 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
817 if (vstack_bx(stack
)->type
== REG_S64
||
818 vstack_bx(stack
)->type
== REG_U64
)
819 insn
->op
= BYTECODE_OP_NE_S64_DOUBLE
;
821 insn
->op
= BYTECODE_OP_NE_DOUBLE
;
824 break; /* Dynamic typing. */
827 if (vstack_pop(stack
)) {
831 vstack_ax(stack
)->type
= REG_S64
;
832 next_pc
+= sizeof(struct binary_op
);
838 struct binary_op
*insn
= (struct binary_op
*) pc
;
840 switch(vstack_ax(stack
)->type
) {
842 ERR("unknown register type\n");
846 case REG_STAR_GLOB_STRING
:
847 ERR("invalid register type for > binary operator\n");
851 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
853 insn
->op
= BYTECODE_OP_GT_STRING
;
857 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
859 if (vstack_bx(stack
)->type
== REG_S64
||
860 vstack_bx(stack
)->type
== REG_U64
)
861 insn
->op
= BYTECODE_OP_GT_S64
;
863 insn
->op
= BYTECODE_OP_GT_DOUBLE_S64
;
866 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
868 if (vstack_bx(stack
)->type
== REG_S64
||
869 vstack_bx(stack
)->type
== REG_U64
)
870 insn
->op
= BYTECODE_OP_GT_S64_DOUBLE
;
872 insn
->op
= BYTECODE_OP_GT_DOUBLE
;
875 break; /* Dynamic typing. */
878 if (vstack_pop(stack
)) {
882 vstack_ax(stack
)->type
= REG_S64
;
883 next_pc
+= sizeof(struct binary_op
);
889 struct binary_op
*insn
= (struct binary_op
*) pc
;
891 switch(vstack_ax(stack
)->type
) {
893 ERR("unknown register type\n");
897 case REG_STAR_GLOB_STRING
:
898 ERR("invalid register type for < binary operator\n");
902 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
904 insn
->op
= BYTECODE_OP_LT_STRING
;
908 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
910 if (vstack_bx(stack
)->type
== REG_S64
||
911 vstack_bx(stack
)->type
== REG_U64
)
912 insn
->op
= BYTECODE_OP_LT_S64
;
914 insn
->op
= BYTECODE_OP_LT_DOUBLE_S64
;
917 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
919 if (vstack_bx(stack
)->type
== REG_S64
||
920 vstack_bx(stack
)->type
== REG_U64
)
921 insn
->op
= BYTECODE_OP_LT_S64_DOUBLE
;
923 insn
->op
= BYTECODE_OP_LT_DOUBLE
;
926 break; /* Dynamic typing. */
929 if (vstack_pop(stack
)) {
933 vstack_ax(stack
)->type
= REG_S64
;
934 next_pc
+= sizeof(struct binary_op
);
940 struct binary_op
*insn
= (struct binary_op
*) pc
;
942 switch(vstack_ax(stack
)->type
) {
944 ERR("unknown register type\n");
948 case REG_STAR_GLOB_STRING
:
949 ERR("invalid register type for >= binary operator\n");
953 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
955 insn
->op
= BYTECODE_OP_GE_STRING
;
959 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
961 if (vstack_bx(stack
)->type
== REG_S64
||
962 vstack_bx(stack
)->type
== REG_U64
)
963 insn
->op
= BYTECODE_OP_GE_S64
;
965 insn
->op
= BYTECODE_OP_GE_DOUBLE_S64
;
968 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
970 if (vstack_bx(stack
)->type
== REG_S64
||
971 vstack_bx(stack
)->type
== REG_U64
)
972 insn
->op
= BYTECODE_OP_GE_S64_DOUBLE
;
974 insn
->op
= BYTECODE_OP_GE_DOUBLE
;
977 break; /* Dynamic typing. */
980 if (vstack_pop(stack
)) {
984 vstack_ax(stack
)->type
= REG_U64
;
985 next_pc
+= sizeof(struct binary_op
);
990 struct binary_op
*insn
= (struct binary_op
*) pc
;
992 switch(vstack_ax(stack
)->type
) {
994 ERR("unknown register type\n");
998 case REG_STAR_GLOB_STRING
:
999 ERR("invalid register type for <= binary operator\n");
1003 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
1005 insn
->op
= BYTECODE_OP_LE_STRING
;
1009 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
1011 if (vstack_bx(stack
)->type
== REG_S64
||
1012 vstack_bx(stack
)->type
== REG_U64
)
1013 insn
->op
= BYTECODE_OP_LE_S64
;
1015 insn
->op
= BYTECODE_OP_LE_DOUBLE_S64
;
1018 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
1020 if (vstack_bx(stack
)->type
== REG_S64
||
1021 vstack_bx(stack
)->type
== REG_U64
)
1022 insn
->op
= BYTECODE_OP_LE_S64_DOUBLE
;
1024 insn
->op
= BYTECODE_OP_LE_DOUBLE
;
1027 break; /* Dynamic typing. */
1029 vstack_ax(stack
)->type
= REG_S64
;
1030 next_pc
+= sizeof(struct binary_op
);
1034 case BYTECODE_OP_EQ_STRING
:
1035 case BYTECODE_OP_NE_STRING
:
1036 case BYTECODE_OP_GT_STRING
:
1037 case BYTECODE_OP_LT_STRING
:
1038 case BYTECODE_OP_GE_STRING
:
1039 case BYTECODE_OP_LE_STRING
:
1040 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
1041 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
1042 case BYTECODE_OP_EQ_S64
:
1043 case BYTECODE_OP_NE_S64
:
1044 case BYTECODE_OP_GT_S64
:
1045 case BYTECODE_OP_LT_S64
:
1046 case BYTECODE_OP_GE_S64
:
1047 case BYTECODE_OP_LE_S64
:
1048 case BYTECODE_OP_EQ_DOUBLE
:
1049 case BYTECODE_OP_NE_DOUBLE
:
1050 case BYTECODE_OP_GT_DOUBLE
:
1051 case BYTECODE_OP_LT_DOUBLE
:
1052 case BYTECODE_OP_GE_DOUBLE
:
1053 case BYTECODE_OP_LE_DOUBLE
:
1054 case BYTECODE_OP_EQ_DOUBLE_S64
:
1055 case BYTECODE_OP_NE_DOUBLE_S64
:
1056 case BYTECODE_OP_GT_DOUBLE_S64
:
1057 case BYTECODE_OP_LT_DOUBLE_S64
:
1058 case BYTECODE_OP_GE_DOUBLE_S64
:
1059 case BYTECODE_OP_LE_DOUBLE_S64
:
1060 case BYTECODE_OP_EQ_S64_DOUBLE
:
1061 case BYTECODE_OP_NE_S64_DOUBLE
:
1062 case BYTECODE_OP_GT_S64_DOUBLE
:
1063 case BYTECODE_OP_LT_S64_DOUBLE
:
1064 case BYTECODE_OP_GE_S64_DOUBLE
:
1065 case BYTECODE_OP_LE_S64_DOUBLE
:
1068 if (vstack_pop(stack
)) {
1072 vstack_ax(stack
)->type
= REG_S64
;
1073 next_pc
+= sizeof(struct binary_op
);
1077 case BYTECODE_OP_BIT_RSHIFT
:
1078 case BYTECODE_OP_BIT_LSHIFT
:
1079 case BYTECODE_OP_BIT_AND
:
1080 case BYTECODE_OP_BIT_OR
:
1081 case BYTECODE_OP_BIT_XOR
:
1084 if (vstack_pop(stack
)) {
1088 vstack_ax(stack
)->type
= REG_S64
;
1089 next_pc
+= sizeof(struct binary_op
);
1094 case BYTECODE_OP_UNARY_PLUS
:
1096 struct unary_op
*insn
= (struct unary_op
*) pc
;
1098 switch(vstack_ax(stack
)->type
) {
1100 ERR("unknown register type\n");
1106 insn
->op
= BYTECODE_OP_UNARY_PLUS_S64
;
1109 insn
->op
= BYTECODE_OP_UNARY_PLUS_DOUBLE
;
1111 case REG_UNKNOWN
: /* Dynamic typing. */
1115 next_pc
+= sizeof(struct unary_op
);
1119 case BYTECODE_OP_UNARY_MINUS
:
1121 struct unary_op
*insn
= (struct unary_op
*) pc
;
1123 switch(vstack_ax(stack
)->type
) {
1125 ERR("unknown register type\n");
1131 insn
->op
= BYTECODE_OP_UNARY_MINUS_S64
;
1134 insn
->op
= BYTECODE_OP_UNARY_MINUS_DOUBLE
;
1136 case REG_UNKNOWN
: /* Dynamic typing. */
1140 next_pc
+= sizeof(struct unary_op
);
1144 case BYTECODE_OP_UNARY_NOT
:
1146 struct unary_op
*insn
= (struct unary_op
*) pc
;
1148 switch(vstack_ax(stack
)->type
) {
1150 ERR("unknown register type\n");
1156 insn
->op
= BYTECODE_OP_UNARY_NOT_S64
;
1159 insn
->op
= BYTECODE_OP_UNARY_NOT_DOUBLE
;
1161 case REG_UNKNOWN
: /* Dynamic typing. */
1165 next_pc
+= sizeof(struct unary_op
);
1169 case BYTECODE_OP_UNARY_BIT_NOT
:
1172 next_pc
+= sizeof(struct unary_op
);
1176 case BYTECODE_OP_UNARY_PLUS_S64
:
1177 case BYTECODE_OP_UNARY_MINUS_S64
:
1178 case BYTECODE_OP_UNARY_NOT_S64
:
1179 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1180 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1181 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1184 next_pc
+= sizeof(struct unary_op
);
1189 case BYTECODE_OP_AND
:
1190 case BYTECODE_OP_OR
:
1192 /* Continue to next instruction */
1193 /* Pop 1 when jump not taken */
1194 if (vstack_pop(stack
)) {
1198 next_pc
+= sizeof(struct logical_op
);
1202 /* load field ref */
1203 case BYTECODE_OP_LOAD_FIELD_REF
:
1205 ERR("Unknown field ref type\n");
1209 /* get context ref */
1210 case BYTECODE_OP_GET_CONTEXT_REF
:
1212 if (vstack_push(stack
)) {
1216 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1217 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1220 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1221 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1222 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1224 if (vstack_push(stack
)) {
1228 vstack_ax(stack
)->type
= REG_STRING
;
1229 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1232 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1233 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1235 if (vstack_push(stack
)) {
1239 vstack_ax(stack
)->type
= REG_S64
;
1240 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1243 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1244 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1246 if (vstack_push(stack
)) {
1250 vstack_ax(stack
)->type
= REG_DOUBLE
;
1251 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1255 /* load from immediate operand */
1256 case BYTECODE_OP_LOAD_STRING
:
1258 struct load_op
*insn
= (struct load_op
*) pc
;
1260 if (vstack_push(stack
)) {
1264 vstack_ax(stack
)->type
= REG_STRING
;
1265 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1269 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1271 struct load_op
*insn
= (struct load_op
*) pc
;
1273 if (vstack_push(stack
)) {
1277 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1278 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1282 case BYTECODE_OP_LOAD_S64
:
1284 if (vstack_push(stack
)) {
1288 vstack_ax(stack
)->type
= REG_S64
;
1289 next_pc
+= sizeof(struct load_op
)
1290 + sizeof(struct literal_numeric
);
1294 case BYTECODE_OP_LOAD_DOUBLE
:
1296 if (vstack_push(stack
)) {
1300 vstack_ax(stack
)->type
= REG_DOUBLE
;
1301 next_pc
+= sizeof(struct load_op
)
1302 + sizeof(struct literal_double
);
1307 case BYTECODE_OP_CAST_TO_S64
:
1309 struct cast_op
*insn
= (struct cast_op
*) pc
;
1311 switch (vstack_ax(stack
)->type
) {
1313 ERR("unknown register type\n");
1318 case REG_STAR_GLOB_STRING
:
1319 ERR("Cast op can only be applied to numeric or floating point registers\n");
1323 insn
->op
= BYTECODE_OP_CAST_NOP
;
1326 insn
->op
= BYTECODE_OP_CAST_DOUBLE_TO_S64
;
1333 vstack_ax(stack
)->type
= REG_S64
;
1334 next_pc
+= sizeof(struct cast_op
);
1337 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1340 vstack_ax(stack
)->type
= REG_S64
;
1341 next_pc
+= sizeof(struct cast_op
);
1344 case BYTECODE_OP_CAST_NOP
:
1346 next_pc
+= sizeof(struct cast_op
);
1351 * Instructions for recursive traversal through composed types.
1353 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1355 if (vstack_push(stack
)) {
1359 vstack_ax(stack
)->type
= REG_PTR
;
1360 vstack_ax(stack
)->load
.type
= LOAD_ROOT_CONTEXT
;
1361 next_pc
+= sizeof(struct load_op
);
1364 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1366 if (vstack_push(stack
)) {
1370 vstack_ax(stack
)->type
= REG_PTR
;
1371 vstack_ax(stack
)->load
.type
= LOAD_ROOT_APP_CONTEXT
;
1372 next_pc
+= sizeof(struct load_op
);
1375 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1377 if (vstack_push(stack
)) {
1381 vstack_ax(stack
)->type
= REG_PTR
;
1382 vstack_ax(stack
)->load
.type
= LOAD_ROOT_PAYLOAD
;
1383 next_pc
+= sizeof(struct load_op
);
1387 case BYTECODE_OP_LOAD_FIELD
:
1389 struct load_op
*insn
= (struct load_op
*) pc
;
1391 assert(vstack_ax(stack
)->type
== REG_PTR
);
1393 ret
= specialize_load_field(vstack_ax(stack
), insn
);
1397 next_pc
+= sizeof(struct load_op
);
1401 case BYTECODE_OP_LOAD_FIELD_S8
:
1402 case BYTECODE_OP_LOAD_FIELD_S16
:
1403 case BYTECODE_OP_LOAD_FIELD_S32
:
1404 case BYTECODE_OP_LOAD_FIELD_S64
:
1407 vstack_ax(stack
)->type
= REG_S64
;
1408 next_pc
+= sizeof(struct load_op
);
1412 case BYTECODE_OP_LOAD_FIELD_U8
:
1413 case BYTECODE_OP_LOAD_FIELD_U16
:
1414 case BYTECODE_OP_LOAD_FIELD_U32
:
1415 case BYTECODE_OP_LOAD_FIELD_U64
:
1418 vstack_ax(stack
)->type
= REG_U64
;
1419 next_pc
+= sizeof(struct load_op
);
1423 case BYTECODE_OP_LOAD_FIELD_STRING
:
1424 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1427 vstack_ax(stack
)->type
= REG_STRING
;
1428 next_pc
+= sizeof(struct load_op
);
1432 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1435 vstack_ax(stack
)->type
= REG_DOUBLE
;
1436 next_pc
+= sizeof(struct load_op
);
1440 case BYTECODE_OP_GET_SYMBOL
:
1442 struct load_op
*insn
= (struct load_op
*) pc
;
1444 dbg_printf("op get symbol\n");
1445 switch (vstack_ax(stack
)->load
.type
) {
1447 ERR("Nested fields not implemented yet.");
1450 case LOAD_ROOT_CONTEXT
:
1451 /* Lookup context field. */
1452 ret
= specialize_context_lookup(*pctx
,
1454 &vstack_ax(stack
)->load
);
1458 case LOAD_ROOT_APP_CONTEXT
:
1459 /* Lookup app context field. */
1460 ret
= specialize_app_context_lookup(pctx
,
1462 &vstack_ax(stack
)->load
);
1466 case LOAD_ROOT_PAYLOAD
:
1467 /* Lookup event payload field. */
1468 ret
= specialize_payload_lookup(event_desc
,
1470 &vstack_ax(stack
)->load
);
1475 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1479 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1481 /* Always generated by specialize phase. */
1486 case BYTECODE_OP_GET_INDEX_U16
:
1488 struct load_op
*insn
= (struct load_op
*) pc
;
1489 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1491 dbg_printf("op get index u16\n");
1493 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1494 vstack_ax(stack
), sizeof(*index
));
1497 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1501 case BYTECODE_OP_GET_INDEX_U64
:
1503 struct load_op
*insn
= (struct load_op
*) pc
;
1504 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1506 dbg_printf("op get index u64\n");
1508 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1509 vstack_ax(stack
), sizeof(*index
));
1512 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);