2 * SPDX-License-Identifier: MIT
4 * Copyright (C) 2010-2016 Mathieu Desnoyers <mathieu.desnoyers@efficios.com>
6 * LTTng UST bytecode specializer.
13 #include <lttng/align.h>
15 #include "context-internal.h"
16 #include "lttng-bytecode.h"
17 #include "ust-events-internal.h"
19 static int lttng_fls(int val
)
22 unsigned int x
= (unsigned int) val
;
26 if (!(x
& 0xFFFF0000U
)) {
30 if (!(x
& 0xFF000000U
)) {
34 if (!(x
& 0xF0000000U
)) {
38 if (!(x
& 0xC0000000U
)) {
42 if (!(x
& 0x80000000U
)) {
48 static int get_count_order(unsigned int count
)
52 order
= lttng_fls(count
) - 1;
53 if (count
& (count
- 1))
58 static ssize_t
bytecode_reserve_data(struct bytecode_runtime
*runtime
,
59 size_t align
, size_t len
)
62 size_t padding
= lttng_ust_offset_align(runtime
->data_len
, align
);
63 size_t new_len
= runtime
->data_len
+ padding
+ len
;
64 size_t new_alloc_len
= new_len
;
65 size_t old_alloc_len
= runtime
->data_alloc_len
;
67 if (new_len
> BYTECODE_MAX_DATA_LEN
)
70 if (new_alloc_len
> old_alloc_len
) {
74 max_t(size_t, 1U << get_count_order(new_alloc_len
), old_alloc_len
<< 1);
75 newptr
= realloc(runtime
->data
, new_alloc_len
);
78 runtime
->data
= newptr
;
79 /* We zero directly the memory from start of allocation. */
80 memset(&runtime
->data
[old_alloc_len
], 0, new_alloc_len
- old_alloc_len
);
81 runtime
->data_alloc_len
= new_alloc_len
;
83 runtime
->data_len
+= padding
;
84 ret
= runtime
->data_len
;
85 runtime
->data_len
+= len
;
89 static ssize_t
bytecode_push_data(struct bytecode_runtime
*runtime
,
90 const void *p
, size_t align
, size_t len
)
94 offset
= bytecode_reserve_data(runtime
, align
, len
);
97 memcpy(&runtime
->data
[offset
], p
, len
);
101 static int specialize_load_field(struct vstack_entry
*stack_top
,
102 struct load_op
*insn
)
106 switch (stack_top
->load
.type
) {
109 case LOAD_ROOT_CONTEXT
:
110 case LOAD_ROOT_APP_CONTEXT
:
111 case LOAD_ROOT_PAYLOAD
:
113 dbg_printf("Bytecode warning: cannot load root, missing field name.\n");
117 switch (stack_top
->load
.object_type
) {
119 dbg_printf("op load field s8\n");
120 stack_top
->type
= REG_S64
;
121 if (!stack_top
->load
.rev_bo
)
122 insn
->op
= BYTECODE_OP_LOAD_FIELD_S8
;
124 case OBJECT_TYPE_S16
:
125 dbg_printf("op load field s16\n");
126 stack_top
->type
= REG_S64
;
127 if (!stack_top
->load
.rev_bo
)
128 insn
->op
= BYTECODE_OP_LOAD_FIELD_S16
;
130 case OBJECT_TYPE_S32
:
131 dbg_printf("op load field s32\n");
132 stack_top
->type
= REG_S64
;
133 if (!stack_top
->load
.rev_bo
)
134 insn
->op
= BYTECODE_OP_LOAD_FIELD_S32
;
136 case OBJECT_TYPE_S64
:
137 dbg_printf("op load field s64\n");
138 stack_top
->type
= REG_S64
;
139 if (!stack_top
->load
.rev_bo
)
140 insn
->op
= BYTECODE_OP_LOAD_FIELD_S64
;
142 case OBJECT_TYPE_SIGNED_ENUM
:
143 dbg_printf("op load field signed enumeration\n");
144 stack_top
->type
= REG_PTR
;
147 dbg_printf("op load field u8\n");
148 stack_top
->type
= REG_U64
;
149 insn
->op
= BYTECODE_OP_LOAD_FIELD_U8
;
151 case OBJECT_TYPE_U16
:
152 dbg_printf("op load field u16\n");
153 stack_top
->type
= REG_U64
;
154 if (!stack_top
->load
.rev_bo
)
155 insn
->op
= BYTECODE_OP_LOAD_FIELD_U16
;
157 case OBJECT_TYPE_U32
:
158 dbg_printf("op load field u32\n");
159 stack_top
->type
= REG_U64
;
160 if (!stack_top
->load
.rev_bo
)
161 insn
->op
= BYTECODE_OP_LOAD_FIELD_U32
;
163 case OBJECT_TYPE_U64
:
164 dbg_printf("op load field u64\n");
165 stack_top
->type
= REG_U64
;
166 if (!stack_top
->load
.rev_bo
)
167 insn
->op
= BYTECODE_OP_LOAD_FIELD_U64
;
169 case OBJECT_TYPE_UNSIGNED_ENUM
:
170 dbg_printf("op load field unsigned enumeration\n");
171 stack_top
->type
= REG_PTR
;
173 case OBJECT_TYPE_DOUBLE
:
174 stack_top
->type
= REG_DOUBLE
;
175 insn
->op
= BYTECODE_OP_LOAD_FIELD_DOUBLE
;
177 case OBJECT_TYPE_STRING
:
178 dbg_printf("op load field string\n");
179 stack_top
->type
= REG_STRING
;
180 insn
->op
= BYTECODE_OP_LOAD_FIELD_STRING
;
182 case OBJECT_TYPE_STRING_SEQUENCE
:
183 dbg_printf("op load field string sequence\n");
184 stack_top
->type
= REG_STRING
;
185 insn
->op
= BYTECODE_OP_LOAD_FIELD_SEQUENCE
;
187 case OBJECT_TYPE_DYNAMIC
:
188 dbg_printf("op load field dynamic\n");
189 stack_top
->type
= REG_UNKNOWN
;
190 /* Don't specialize load op. */
192 case OBJECT_TYPE_SEQUENCE
:
193 case OBJECT_TYPE_ARRAY
:
194 case OBJECT_TYPE_STRUCT
:
195 case OBJECT_TYPE_VARIANT
:
196 ERR("Sequences, arrays, struct and variant cannot be loaded (nested types).");
206 static int specialize_get_index_object_type(enum object_type
*otype
,
207 int signedness
, uint32_t elem_len
)
212 *otype
= OBJECT_TYPE_S8
;
214 *otype
= OBJECT_TYPE_U8
;
218 *otype
= OBJECT_TYPE_S16
;
220 *otype
= OBJECT_TYPE_U16
;
224 *otype
= OBJECT_TYPE_S32
;
226 *otype
= OBJECT_TYPE_U32
;
230 *otype
= OBJECT_TYPE_S64
;
232 *otype
= OBJECT_TYPE_U64
;
240 static int specialize_get_index(struct bytecode_runtime
*runtime
,
241 struct load_op
*insn
, uint64_t index
,
242 struct vstack_entry
*stack_top
,
246 struct bytecode_get_index_data gid
;
249 memset(&gid
, 0, sizeof(gid
));
250 switch (stack_top
->load
.type
) {
252 switch (stack_top
->load
.object_type
) {
253 case OBJECT_TYPE_ARRAY
:
255 const struct lttng_integer_type
*integer_type
;
256 const struct lttng_event_field
*field
;
257 uint32_t elem_len
, num_elems
;
260 field
= stack_top
->load
.field
;
261 switch (field
->type
.atype
) {
262 case atype_array_nestable
:
263 if (field
->type
.u
.array_nestable
.elem_type
->atype
!= atype_integer
) {
267 integer_type
= &field
->type
.u
.array_nestable
.elem_type
->u
.integer
;
268 num_elems
= field
->type
.u
.array_nestable
.length
;
274 elem_len
= integer_type
->size
;
275 signedness
= integer_type
->signedness
;
276 if (index
>= num_elems
) {
280 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
281 signedness
, elem_len
);
284 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
285 gid
.array_len
= num_elems
* (elem_len
/ CHAR_BIT
);
286 gid
.elem
.type
= stack_top
->load
.object_type
;
287 gid
.elem
.len
= elem_len
;
288 if (integer_type
->reverse_byte_order
)
289 gid
.elem
.rev_bo
= true;
290 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
293 case OBJECT_TYPE_SEQUENCE
:
295 const struct lttng_integer_type
*integer_type
;
296 const struct lttng_event_field
*field
;
300 field
= stack_top
->load
.field
;
301 switch (field
->type
.atype
) {
302 case atype_sequence_nestable
:
303 if (field
->type
.u
.sequence_nestable
.elem_type
->atype
!= atype_integer
) {
307 integer_type
= &field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
;
313 elem_len
= integer_type
->size
;
314 signedness
= integer_type
->signedness
;
315 ret
= specialize_get_index_object_type(&stack_top
->load
.object_type
,
316 signedness
, elem_len
);
319 gid
.offset
= index
* (elem_len
/ CHAR_BIT
);
320 gid
.elem
.type
= stack_top
->load
.object_type
;
321 gid
.elem
.len
= elem_len
;
322 if (integer_type
->reverse_byte_order
)
323 gid
.elem
.rev_bo
= true;
324 stack_top
->load
.rev_bo
= gid
.elem
.rev_bo
;
327 case OBJECT_TYPE_STRUCT
:
328 /* Only generated by the specialize phase. */
329 case OBJECT_TYPE_VARIANT
: /* Fall-through */
331 ERR("Unexpected get index type %d",
332 (int) stack_top
->load
.object_type
);
337 case LOAD_ROOT_CONTEXT
:
338 case LOAD_ROOT_APP_CONTEXT
:
339 case LOAD_ROOT_PAYLOAD
:
340 ERR("Index lookup for root field not implemented yet.");
344 data_offset
= bytecode_push_data(runtime
, &gid
,
345 __alignof__(gid
), sizeof(gid
));
346 if (data_offset
< 0) {
352 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
355 ((struct get_index_u64
*) insn
->data
)->index
= data_offset
;
368 static int specialize_context_lookup_name(struct lttng_ctx
*ctx
,
369 struct bytecode_runtime
*bytecode
,
370 struct load_op
*insn
)
375 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
376 name
= bytecode
->p
.priv
->bc
->bc
.data
+ bytecode
->p
.priv
->bc
->bc
.reloc_offset
+ offset
;
377 return lttng_get_context_index(ctx
, name
);
380 static int specialize_load_object(const struct lttng_event_field
*field
,
381 struct vstack_load
*load
, bool is_context
)
383 load
->type
= LOAD_OBJECT
;
385 switch (field
->type
.atype
) {
387 if (field
->type
.u
.integer
.signedness
)
388 load
->object_type
= OBJECT_TYPE_S64
;
390 load
->object_type
= OBJECT_TYPE_U64
;
391 load
->rev_bo
= false;
393 case atype_enum_nestable
:
395 const struct lttng_integer_type
*itype
;
397 itype
= &field
->type
.u
.enum_nestable
.container_type
->u
.integer
;
398 if (itype
->signedness
)
399 load
->object_type
= OBJECT_TYPE_SIGNED_ENUM
;
401 load
->object_type
= OBJECT_TYPE_UNSIGNED_ENUM
;
402 load
->rev_bo
= false;
405 case atype_array_nestable
:
406 if (field
->type
.u
.array_nestable
.elem_type
->atype
!= atype_integer
) {
407 ERR("Array nesting only supports integer types.");
411 load
->object_type
= OBJECT_TYPE_STRING
;
413 if (field
->type
.u
.array_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
414 load
->object_type
= OBJECT_TYPE_ARRAY
;
417 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
421 case atype_sequence_nestable
:
422 if (field
->type
.u
.sequence_nestable
.elem_type
->atype
!= atype_integer
) {
423 ERR("Sequence nesting only supports integer types.");
427 load
->object_type
= OBJECT_TYPE_STRING
;
429 if (field
->type
.u
.sequence_nestable
.elem_type
->u
.integer
.encoding
== lttng_encode_none
) {
430 load
->object_type
= OBJECT_TYPE_SEQUENCE
;
433 load
->object_type
= OBJECT_TYPE_STRING_SEQUENCE
;
439 load
->object_type
= OBJECT_TYPE_STRING
;
442 load
->object_type
= OBJECT_TYPE_DOUBLE
;
445 load
->object_type
= OBJECT_TYPE_DYNAMIC
;
448 ERR("Unknown type: %d", (int) field
->type
.atype
);
454 static int specialize_context_lookup(struct lttng_ctx
*ctx
,
455 struct bytecode_runtime
*runtime
,
456 struct load_op
*insn
,
457 struct vstack_load
*load
)
460 struct lttng_ctx_field
*ctx_field
;
461 struct lttng_event_field
*field
;
462 struct bytecode_get_index_data gid
;
465 idx
= specialize_context_lookup_name(ctx
, runtime
, insn
);
469 ctx_field
= &ctx
->fields
[idx
];
470 field
= &ctx_field
->event_field
;
471 ret
= specialize_load_object(field
, load
, true);
474 /* Specialize each get_symbol into a get_index. */
475 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
476 memset(&gid
, 0, sizeof(gid
));
478 gid
.elem
.type
= load
->object_type
;
479 gid
.elem
.rev_bo
= load
->rev_bo
;
481 data_offset
= bytecode_push_data(runtime
, &gid
,
482 __alignof__(gid
), sizeof(gid
));
483 if (data_offset
< 0) {
486 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
490 static int specialize_app_context_lookup(struct lttng_ctx
**pctx
,
491 struct bytecode_runtime
*runtime
,
492 struct load_op
*insn
,
493 struct vstack_load
*load
)
496 const char *orig_name
;
499 struct lttng_ctx_field
*ctx_field
;
500 struct lttng_event_field
*field
;
501 struct bytecode_get_index_data gid
;
504 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
505 orig_name
= runtime
->p
.priv
->bc
->bc
.data
+ runtime
->p
.priv
->bc
->bc
.reloc_offset
+ offset
;
506 name
= zmalloc(strlen(orig_name
) + strlen("$app.") + 1);
511 strcpy(name
, "$app.");
512 strcat(name
, orig_name
);
513 idx
= lttng_get_context_index(*pctx
, name
);
515 assert(lttng_context_is_app(name
));
516 ret
= lttng_ust_add_app_context_to_ctx_rcu(name
,
520 idx
= lttng_get_context_index(*pctx
, name
);
524 ctx_field
= &(*pctx
)->fields
[idx
];
525 field
= &ctx_field
->event_field
;
526 ret
= specialize_load_object(field
, load
, true);
529 /* Specialize each get_symbol into a get_index. */
530 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
531 memset(&gid
, 0, sizeof(gid
));
533 gid
.elem
.type
= load
->object_type
;
534 gid
.elem
.rev_bo
= load
->rev_bo
;
536 data_offset
= bytecode_push_data(runtime
, &gid
,
537 __alignof__(gid
), sizeof(gid
));
538 if (data_offset
< 0) {
542 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
549 static int specialize_payload_lookup(const struct lttng_ust_event_desc
*event_desc
,
550 struct bytecode_runtime
*runtime
,
551 struct load_op
*insn
,
552 struct vstack_load
*load
)
556 unsigned int i
, nr_fields
;
558 uint32_t field_offset
= 0;
559 const struct lttng_event_field
*field
;
561 struct bytecode_get_index_data gid
;
564 nr_fields
= event_desc
->nr_fields
;
565 offset
= ((struct get_symbol
*) insn
->data
)->offset
;
566 name
= runtime
->p
.priv
->bc
->bc
.data
+ runtime
->p
.priv
->bc
->bc
.reloc_offset
+ offset
;
567 for (i
= 0; i
< nr_fields
; i
++) {
568 field
= &event_desc
->fields
[i
];
569 if (field
->u
.ext
.nofilter
) {
572 if (!strcmp(field
->name
, name
)) {
576 /* compute field offset on stack */
577 switch (field
->type
.atype
) {
579 case atype_enum_nestable
:
580 field_offset
+= sizeof(int64_t);
582 case atype_array_nestable
:
583 case atype_sequence_nestable
:
584 field_offset
+= sizeof(unsigned long);
585 field_offset
+= sizeof(void *);
588 field_offset
+= sizeof(void *);
591 field_offset
+= sizeof(double);
603 ret
= specialize_load_object(field
, load
, false);
607 /* Specialize each get_symbol into a get_index. */
608 insn
->op
= BYTECODE_OP_GET_INDEX_U16
;
609 memset(&gid
, 0, sizeof(gid
));
610 gid
.offset
= field_offset
;
611 gid
.elem
.type
= load
->object_type
;
612 gid
.elem
.rev_bo
= load
->rev_bo
;
614 data_offset
= bytecode_push_data(runtime
, &gid
,
615 __alignof__(gid
), sizeof(gid
));
616 if (data_offset
< 0) {
620 ((struct get_index_u16
*) insn
->data
)->index
= data_offset
;
626 int lttng_bytecode_specialize(const struct lttng_ust_event_desc
*event_desc
,
627 struct bytecode_runtime
*bytecode
)
629 void *pc
, *next_pc
, *start_pc
;
631 struct vstack _stack
;
632 struct vstack
*stack
= &_stack
;
633 struct lttng_ctx
**pctx
= bytecode
->p
.priv
->pctx
;
637 start_pc
= &bytecode
->code
[0];
638 for (pc
= next_pc
= start_pc
; pc
- start_pc
< bytecode
->len
;
640 switch (*(bytecode_opcode_t
*) pc
) {
641 case BYTECODE_OP_UNKNOWN
:
643 ERR("unknown bytecode op %u\n",
644 (unsigned int) *(bytecode_opcode_t
*) pc
);
648 case BYTECODE_OP_RETURN
:
649 if (vstack_ax(stack
)->type
== REG_S64
||
650 vstack_ax(stack
)->type
== REG_U64
)
651 *(bytecode_opcode_t
*) pc
= BYTECODE_OP_RETURN_S64
;
655 case BYTECODE_OP_RETURN_S64
:
656 if (vstack_ax(stack
)->type
!= REG_S64
&&
657 vstack_ax(stack
)->type
!= REG_U64
) {
658 ERR("Unexpected register type\n");
666 case BYTECODE_OP_MUL
:
667 case BYTECODE_OP_DIV
:
668 case BYTECODE_OP_MOD
:
669 case BYTECODE_OP_PLUS
:
670 case BYTECODE_OP_MINUS
:
671 ERR("unsupported bytecode op %u\n",
672 (unsigned int) *(bytecode_opcode_t
*) pc
);
678 struct binary_op
*insn
= (struct binary_op
*) pc
;
680 switch(vstack_ax(stack
)->type
) {
682 ERR("unknown register type\n");
687 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
689 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
690 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
692 insn
->op
= BYTECODE_OP_EQ_STRING
;
694 case REG_STAR_GLOB_STRING
:
695 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
697 insn
->op
= BYTECODE_OP_EQ_STAR_GLOB_STRING
;
701 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
703 if (vstack_bx(stack
)->type
== REG_S64
||
704 vstack_bx(stack
)->type
== REG_U64
)
705 insn
->op
= BYTECODE_OP_EQ_S64
;
707 insn
->op
= BYTECODE_OP_EQ_DOUBLE_S64
;
710 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
712 if (vstack_bx(stack
)->type
== REG_S64
||
713 vstack_bx(stack
)->type
== REG_U64
)
714 insn
->op
= BYTECODE_OP_EQ_S64_DOUBLE
;
716 insn
->op
= BYTECODE_OP_EQ_DOUBLE
;
719 break; /* Dynamic typing. */
722 if (vstack_pop(stack
)) {
726 vstack_ax(stack
)->type
= REG_S64
;
727 next_pc
+= sizeof(struct binary_op
);
733 struct binary_op
*insn
= (struct binary_op
*) pc
;
735 switch(vstack_ax(stack
)->type
) {
737 ERR("unknown register type\n");
742 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
744 if (vstack_bx(stack
)->type
== REG_STAR_GLOB_STRING
)
745 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
747 insn
->op
= BYTECODE_OP_NE_STRING
;
749 case REG_STAR_GLOB_STRING
:
750 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
752 insn
->op
= BYTECODE_OP_NE_STAR_GLOB_STRING
;
756 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
758 if (vstack_bx(stack
)->type
== REG_S64
||
759 vstack_bx(stack
)->type
== REG_U64
)
760 insn
->op
= BYTECODE_OP_NE_S64
;
762 insn
->op
= BYTECODE_OP_NE_DOUBLE_S64
;
765 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
767 if (vstack_bx(stack
)->type
== REG_S64
||
768 vstack_bx(stack
)->type
== REG_U64
)
769 insn
->op
= BYTECODE_OP_NE_S64_DOUBLE
;
771 insn
->op
= BYTECODE_OP_NE_DOUBLE
;
774 break; /* Dynamic typing. */
777 if (vstack_pop(stack
)) {
781 vstack_ax(stack
)->type
= REG_S64
;
782 next_pc
+= sizeof(struct binary_op
);
788 struct binary_op
*insn
= (struct binary_op
*) pc
;
790 switch(vstack_ax(stack
)->type
) {
792 ERR("unknown register type\n");
796 case REG_STAR_GLOB_STRING
:
797 ERR("invalid register type for > binary operator\n");
801 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
803 insn
->op
= BYTECODE_OP_GT_STRING
;
807 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
809 if (vstack_bx(stack
)->type
== REG_S64
||
810 vstack_bx(stack
)->type
== REG_U64
)
811 insn
->op
= BYTECODE_OP_GT_S64
;
813 insn
->op
= BYTECODE_OP_GT_DOUBLE_S64
;
816 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
818 if (vstack_bx(stack
)->type
== REG_S64
||
819 vstack_bx(stack
)->type
== REG_U64
)
820 insn
->op
= BYTECODE_OP_GT_S64_DOUBLE
;
822 insn
->op
= BYTECODE_OP_GT_DOUBLE
;
825 break; /* Dynamic typing. */
828 if (vstack_pop(stack
)) {
832 vstack_ax(stack
)->type
= REG_S64
;
833 next_pc
+= sizeof(struct binary_op
);
839 struct binary_op
*insn
= (struct binary_op
*) pc
;
841 switch(vstack_ax(stack
)->type
) {
843 ERR("unknown register type\n");
847 case REG_STAR_GLOB_STRING
:
848 ERR("invalid register type for < binary operator\n");
852 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
854 insn
->op
= BYTECODE_OP_LT_STRING
;
858 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
860 if (vstack_bx(stack
)->type
== REG_S64
||
861 vstack_bx(stack
)->type
== REG_U64
)
862 insn
->op
= BYTECODE_OP_LT_S64
;
864 insn
->op
= BYTECODE_OP_LT_DOUBLE_S64
;
867 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
869 if (vstack_bx(stack
)->type
== REG_S64
||
870 vstack_bx(stack
)->type
== REG_U64
)
871 insn
->op
= BYTECODE_OP_LT_S64_DOUBLE
;
873 insn
->op
= BYTECODE_OP_LT_DOUBLE
;
876 break; /* Dynamic typing. */
879 if (vstack_pop(stack
)) {
883 vstack_ax(stack
)->type
= REG_S64
;
884 next_pc
+= sizeof(struct binary_op
);
890 struct binary_op
*insn
= (struct binary_op
*) pc
;
892 switch(vstack_ax(stack
)->type
) {
894 ERR("unknown register type\n");
898 case REG_STAR_GLOB_STRING
:
899 ERR("invalid register type for >= binary operator\n");
903 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
905 insn
->op
= BYTECODE_OP_GE_STRING
;
909 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
911 if (vstack_bx(stack
)->type
== REG_S64
||
912 vstack_bx(stack
)->type
== REG_U64
)
913 insn
->op
= BYTECODE_OP_GE_S64
;
915 insn
->op
= BYTECODE_OP_GE_DOUBLE_S64
;
918 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
920 if (vstack_bx(stack
)->type
== REG_S64
||
921 vstack_bx(stack
)->type
== REG_U64
)
922 insn
->op
= BYTECODE_OP_GE_S64_DOUBLE
;
924 insn
->op
= BYTECODE_OP_GE_DOUBLE
;
927 break; /* Dynamic typing. */
930 if (vstack_pop(stack
)) {
934 vstack_ax(stack
)->type
= REG_U64
;
935 next_pc
+= sizeof(struct binary_op
);
940 struct binary_op
*insn
= (struct binary_op
*) pc
;
942 switch(vstack_ax(stack
)->type
) {
944 ERR("unknown register type\n");
948 case REG_STAR_GLOB_STRING
:
949 ERR("invalid register type for <= binary operator\n");
953 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
955 insn
->op
= BYTECODE_OP_LE_STRING
;
959 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
961 if (vstack_bx(stack
)->type
== REG_S64
||
962 vstack_bx(stack
)->type
== REG_U64
)
963 insn
->op
= BYTECODE_OP_LE_S64
;
965 insn
->op
= BYTECODE_OP_LE_DOUBLE_S64
;
968 if (vstack_bx(stack
)->type
== REG_UNKNOWN
)
970 if (vstack_bx(stack
)->type
== REG_S64
||
971 vstack_bx(stack
)->type
== REG_U64
)
972 insn
->op
= BYTECODE_OP_LE_S64_DOUBLE
;
974 insn
->op
= BYTECODE_OP_LE_DOUBLE
;
977 break; /* Dynamic typing. */
979 vstack_ax(stack
)->type
= REG_S64
;
980 next_pc
+= sizeof(struct binary_op
);
984 case BYTECODE_OP_EQ_STRING
:
985 case BYTECODE_OP_NE_STRING
:
986 case BYTECODE_OP_GT_STRING
:
987 case BYTECODE_OP_LT_STRING
:
988 case BYTECODE_OP_GE_STRING
:
989 case BYTECODE_OP_LE_STRING
:
990 case BYTECODE_OP_EQ_STAR_GLOB_STRING
:
991 case BYTECODE_OP_NE_STAR_GLOB_STRING
:
992 case BYTECODE_OP_EQ_S64
:
993 case BYTECODE_OP_NE_S64
:
994 case BYTECODE_OP_GT_S64
:
995 case BYTECODE_OP_LT_S64
:
996 case BYTECODE_OP_GE_S64
:
997 case BYTECODE_OP_LE_S64
:
998 case BYTECODE_OP_EQ_DOUBLE
:
999 case BYTECODE_OP_NE_DOUBLE
:
1000 case BYTECODE_OP_GT_DOUBLE
:
1001 case BYTECODE_OP_LT_DOUBLE
:
1002 case BYTECODE_OP_GE_DOUBLE
:
1003 case BYTECODE_OP_LE_DOUBLE
:
1004 case BYTECODE_OP_EQ_DOUBLE_S64
:
1005 case BYTECODE_OP_NE_DOUBLE_S64
:
1006 case BYTECODE_OP_GT_DOUBLE_S64
:
1007 case BYTECODE_OP_LT_DOUBLE_S64
:
1008 case BYTECODE_OP_GE_DOUBLE_S64
:
1009 case BYTECODE_OP_LE_DOUBLE_S64
:
1010 case BYTECODE_OP_EQ_S64_DOUBLE
:
1011 case BYTECODE_OP_NE_S64_DOUBLE
:
1012 case BYTECODE_OP_GT_S64_DOUBLE
:
1013 case BYTECODE_OP_LT_S64_DOUBLE
:
1014 case BYTECODE_OP_GE_S64_DOUBLE
:
1015 case BYTECODE_OP_LE_S64_DOUBLE
:
1018 if (vstack_pop(stack
)) {
1022 vstack_ax(stack
)->type
= REG_S64
;
1023 next_pc
+= sizeof(struct binary_op
);
1027 case BYTECODE_OP_BIT_RSHIFT
:
1028 case BYTECODE_OP_BIT_LSHIFT
:
1029 case BYTECODE_OP_BIT_AND
:
1030 case BYTECODE_OP_BIT_OR
:
1031 case BYTECODE_OP_BIT_XOR
:
1034 if (vstack_pop(stack
)) {
1038 vstack_ax(stack
)->type
= REG_S64
;
1039 next_pc
+= sizeof(struct binary_op
);
1044 case BYTECODE_OP_UNARY_PLUS
:
1046 struct unary_op
*insn
= (struct unary_op
*) pc
;
1048 switch(vstack_ax(stack
)->type
) {
1050 ERR("unknown register type\n");
1056 insn
->op
= BYTECODE_OP_UNARY_PLUS_S64
;
1059 insn
->op
= BYTECODE_OP_UNARY_PLUS_DOUBLE
;
1061 case REG_UNKNOWN
: /* Dynamic typing. */
1065 next_pc
+= sizeof(struct unary_op
);
1069 case BYTECODE_OP_UNARY_MINUS
:
1071 struct unary_op
*insn
= (struct unary_op
*) pc
;
1073 switch(vstack_ax(stack
)->type
) {
1075 ERR("unknown register type\n");
1081 insn
->op
= BYTECODE_OP_UNARY_MINUS_S64
;
1084 insn
->op
= BYTECODE_OP_UNARY_MINUS_DOUBLE
;
1086 case REG_UNKNOWN
: /* Dynamic typing. */
1090 next_pc
+= sizeof(struct unary_op
);
1094 case BYTECODE_OP_UNARY_NOT
:
1096 struct unary_op
*insn
= (struct unary_op
*) pc
;
1098 switch(vstack_ax(stack
)->type
) {
1100 ERR("unknown register type\n");
1106 insn
->op
= BYTECODE_OP_UNARY_NOT_S64
;
1109 insn
->op
= BYTECODE_OP_UNARY_NOT_DOUBLE
;
1111 case REG_UNKNOWN
: /* Dynamic typing. */
1115 next_pc
+= sizeof(struct unary_op
);
1119 case BYTECODE_OP_UNARY_BIT_NOT
:
1122 next_pc
+= sizeof(struct unary_op
);
1126 case BYTECODE_OP_UNARY_PLUS_S64
:
1127 case BYTECODE_OP_UNARY_MINUS_S64
:
1128 case BYTECODE_OP_UNARY_NOT_S64
:
1129 case BYTECODE_OP_UNARY_PLUS_DOUBLE
:
1130 case BYTECODE_OP_UNARY_MINUS_DOUBLE
:
1131 case BYTECODE_OP_UNARY_NOT_DOUBLE
:
1134 next_pc
+= sizeof(struct unary_op
);
1139 case BYTECODE_OP_AND
:
1140 case BYTECODE_OP_OR
:
1142 /* Continue to next instruction */
1143 /* Pop 1 when jump not taken */
1144 if (vstack_pop(stack
)) {
1148 next_pc
+= sizeof(struct logical_op
);
1152 /* load field ref */
1153 case BYTECODE_OP_LOAD_FIELD_REF
:
1155 ERR("Unknown field ref type\n");
1159 /* get context ref */
1160 case BYTECODE_OP_GET_CONTEXT_REF
:
1162 if (vstack_push(stack
)) {
1166 vstack_ax(stack
)->type
= REG_UNKNOWN
;
1167 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1170 case BYTECODE_OP_LOAD_FIELD_REF_STRING
:
1171 case BYTECODE_OP_LOAD_FIELD_REF_SEQUENCE
:
1172 case BYTECODE_OP_GET_CONTEXT_REF_STRING
:
1174 if (vstack_push(stack
)) {
1178 vstack_ax(stack
)->type
= REG_STRING
;
1179 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1182 case BYTECODE_OP_LOAD_FIELD_REF_S64
:
1183 case BYTECODE_OP_GET_CONTEXT_REF_S64
:
1185 if (vstack_push(stack
)) {
1189 vstack_ax(stack
)->type
= REG_S64
;
1190 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1193 case BYTECODE_OP_LOAD_FIELD_REF_DOUBLE
:
1194 case BYTECODE_OP_GET_CONTEXT_REF_DOUBLE
:
1196 if (vstack_push(stack
)) {
1200 vstack_ax(stack
)->type
= REG_DOUBLE
;
1201 next_pc
+= sizeof(struct load_op
) + sizeof(struct field_ref
);
1205 /* load from immediate operand */
1206 case BYTECODE_OP_LOAD_STRING
:
1208 struct load_op
*insn
= (struct load_op
*) pc
;
1210 if (vstack_push(stack
)) {
1214 vstack_ax(stack
)->type
= REG_STRING
;
1215 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1219 case BYTECODE_OP_LOAD_STAR_GLOB_STRING
:
1221 struct load_op
*insn
= (struct load_op
*) pc
;
1223 if (vstack_push(stack
)) {
1227 vstack_ax(stack
)->type
= REG_STAR_GLOB_STRING
;
1228 next_pc
+= sizeof(struct load_op
) + strlen(insn
->data
) + 1;
1232 case BYTECODE_OP_LOAD_S64
:
1234 if (vstack_push(stack
)) {
1238 vstack_ax(stack
)->type
= REG_S64
;
1239 next_pc
+= sizeof(struct load_op
)
1240 + sizeof(struct literal_numeric
);
1244 case BYTECODE_OP_LOAD_DOUBLE
:
1246 if (vstack_push(stack
)) {
1250 vstack_ax(stack
)->type
= REG_DOUBLE
;
1251 next_pc
+= sizeof(struct load_op
)
1252 + sizeof(struct literal_double
);
1257 case BYTECODE_OP_CAST_TO_S64
:
1259 struct cast_op
*insn
= (struct cast_op
*) pc
;
1261 switch (vstack_ax(stack
)->type
) {
1263 ERR("unknown register type\n");
1268 case REG_STAR_GLOB_STRING
:
1269 ERR("Cast op can only be applied to numeric or floating point registers\n");
1273 insn
->op
= BYTECODE_OP_CAST_NOP
;
1276 insn
->op
= BYTECODE_OP_CAST_DOUBLE_TO_S64
;
1283 vstack_ax(stack
)->type
= REG_S64
;
1284 next_pc
+= sizeof(struct cast_op
);
1287 case BYTECODE_OP_CAST_DOUBLE_TO_S64
:
1290 vstack_ax(stack
)->type
= REG_S64
;
1291 next_pc
+= sizeof(struct cast_op
);
1294 case BYTECODE_OP_CAST_NOP
:
1296 next_pc
+= sizeof(struct cast_op
);
1301 * Instructions for recursive traversal through composed types.
1303 case BYTECODE_OP_GET_CONTEXT_ROOT
:
1305 if (vstack_push(stack
)) {
1309 vstack_ax(stack
)->type
= REG_PTR
;
1310 vstack_ax(stack
)->load
.type
= LOAD_ROOT_CONTEXT
;
1311 next_pc
+= sizeof(struct load_op
);
1314 case BYTECODE_OP_GET_APP_CONTEXT_ROOT
:
1316 if (vstack_push(stack
)) {
1320 vstack_ax(stack
)->type
= REG_PTR
;
1321 vstack_ax(stack
)->load
.type
= LOAD_ROOT_APP_CONTEXT
;
1322 next_pc
+= sizeof(struct load_op
);
1325 case BYTECODE_OP_GET_PAYLOAD_ROOT
:
1327 if (vstack_push(stack
)) {
1331 vstack_ax(stack
)->type
= REG_PTR
;
1332 vstack_ax(stack
)->load
.type
= LOAD_ROOT_PAYLOAD
;
1333 next_pc
+= sizeof(struct load_op
);
1337 case BYTECODE_OP_LOAD_FIELD
:
1339 struct load_op
*insn
= (struct load_op
*) pc
;
1341 assert(vstack_ax(stack
)->type
== REG_PTR
);
1343 ret
= specialize_load_field(vstack_ax(stack
), insn
);
1347 next_pc
+= sizeof(struct load_op
);
1351 case BYTECODE_OP_LOAD_FIELD_S8
:
1352 case BYTECODE_OP_LOAD_FIELD_S16
:
1353 case BYTECODE_OP_LOAD_FIELD_S32
:
1354 case BYTECODE_OP_LOAD_FIELD_S64
:
1357 vstack_ax(stack
)->type
= REG_S64
;
1358 next_pc
+= sizeof(struct load_op
);
1362 case BYTECODE_OP_LOAD_FIELD_U8
:
1363 case BYTECODE_OP_LOAD_FIELD_U16
:
1364 case BYTECODE_OP_LOAD_FIELD_U32
:
1365 case BYTECODE_OP_LOAD_FIELD_U64
:
1368 vstack_ax(stack
)->type
= REG_U64
;
1369 next_pc
+= sizeof(struct load_op
);
1373 case BYTECODE_OP_LOAD_FIELD_STRING
:
1374 case BYTECODE_OP_LOAD_FIELD_SEQUENCE
:
1377 vstack_ax(stack
)->type
= REG_STRING
;
1378 next_pc
+= sizeof(struct load_op
);
1382 case BYTECODE_OP_LOAD_FIELD_DOUBLE
:
1385 vstack_ax(stack
)->type
= REG_DOUBLE
;
1386 next_pc
+= sizeof(struct load_op
);
1390 case BYTECODE_OP_GET_SYMBOL
:
1392 struct load_op
*insn
= (struct load_op
*) pc
;
1394 dbg_printf("op get symbol\n");
1395 switch (vstack_ax(stack
)->load
.type
) {
1397 ERR("Nested fields not implemented yet.");
1400 case LOAD_ROOT_CONTEXT
:
1401 /* Lookup context field. */
1402 ret
= specialize_context_lookup(*pctx
,
1404 &vstack_ax(stack
)->load
);
1408 case LOAD_ROOT_APP_CONTEXT
:
1409 /* Lookup app context field. */
1410 ret
= specialize_app_context_lookup(pctx
,
1412 &vstack_ax(stack
)->load
);
1416 case LOAD_ROOT_PAYLOAD
:
1417 /* Lookup event payload field. */
1418 ret
= specialize_payload_lookup(event_desc
,
1420 &vstack_ax(stack
)->load
);
1425 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_symbol
);
1429 case BYTECODE_OP_GET_SYMBOL_FIELD
:
1431 /* Always generated by specialize phase. */
1436 case BYTECODE_OP_GET_INDEX_U16
:
1438 struct load_op
*insn
= (struct load_op
*) pc
;
1439 struct get_index_u16
*index
= (struct get_index_u16
*) insn
->data
;
1441 dbg_printf("op get index u16\n");
1443 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1444 vstack_ax(stack
), sizeof(*index
));
1447 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u16
);
1451 case BYTECODE_OP_GET_INDEX_U64
:
1453 struct load_op
*insn
= (struct load_op
*) pc
;
1454 struct get_index_u64
*index
= (struct get_index_u64
*) insn
->data
;
1456 dbg_printf("op get index u64\n");
1458 ret
= specialize_get_index(bytecode
, insn
, index
->index
,
1459 vstack_ax(stack
), sizeof(*index
));
1462 next_pc
+= sizeof(struct load_op
) + sizeof(struct get_index_u64
);