* utils.c (do_value_free): New function.
[deliverable/binutils-gdb.git] / gdb / dwarf2expr.c
1 /* DWARF 2 Expression Evaluator.
2
3 Copyright (C) 2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
5
6 Contributed by Daniel Berlin (dan@dberlin.org)
7
8 This file is part of GDB.
9
10 This program is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3 of the License, or
13 (at your option) any later version.
14
15 This program is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22
23 #include "defs.h"
24 #include "symtab.h"
25 #include "gdbtypes.h"
26 #include "value.h"
27 #include "gdbcore.h"
28 #include "dwarf2.h"
29 #include "dwarf2expr.h"
30 #include "gdb_assert.h"
31
32 /* Local prototypes. */
33
34 static void execute_stack_op (struct dwarf_expr_context *,
35 const gdb_byte *, const gdb_byte *);
36
37 /* Cookie for gdbarch data. */
38
39 static struct gdbarch_data *dwarf_arch_cookie;
40
41 /* This holds gdbarch-specific types used by the DWARF expression
42 evaluator. See comments in execute_stack_op. */
43
44 struct dwarf_gdbarch_types
45 {
46 struct type *dw_types[3];
47 };
48
49 /* Allocate and fill in dwarf_gdbarch_types for an arch. */
50
51 static void *
52 dwarf_gdbarch_types_init (struct gdbarch *gdbarch)
53 {
54 struct dwarf_gdbarch_types *types
55 = GDBARCH_OBSTACK_ZALLOC (gdbarch, struct dwarf_gdbarch_types);
56
57 /* The types themselves are lazily initialized. */
58
59 return types;
60 }
61
62 /* Return the type used for DWARF operations where the type is
63 unspecified in the DWARF spec. Only certain sizes are
64 supported. */
65
66 static struct type *
67 dwarf_expr_address_type (struct dwarf_expr_context *ctx)
68 {
69 struct dwarf_gdbarch_types *types = gdbarch_data (ctx->gdbarch,
70 dwarf_arch_cookie);
71 int ndx;
72
73 if (ctx->addr_size == 2)
74 ndx = 0;
75 else if (ctx->addr_size == 4)
76 ndx = 1;
77 else if (ctx->addr_size == 8)
78 ndx = 2;
79 else
80 error (_("Unsupported address size in DWARF expressions: %d bits"),
81 8 * ctx->addr_size);
82
83 if (types->dw_types[ndx] == NULL)
84 types->dw_types[ndx]
85 = arch_integer_type (ctx->gdbarch,
86 8 * ctx->addr_size,
87 0, "<signed DWARF address type>");
88
89 return types->dw_types[ndx];
90 }
91
92 /* Create a new context for the expression evaluator. */
93
94 struct dwarf_expr_context *
95 new_dwarf_expr_context (void)
96 {
97 struct dwarf_expr_context *retval;
98
99 retval = xcalloc (1, sizeof (struct dwarf_expr_context));
100 retval->stack_len = 0;
101 retval->stack_allocated = 10;
102 retval->stack = xmalloc (retval->stack_allocated
103 * sizeof (struct dwarf_stack_value));
104 retval->num_pieces = 0;
105 retval->pieces = 0;
106 retval->max_recursion_depth = 0x100;
107 return retval;
108 }
109
110 /* Release the memory allocated to CTX. */
111
112 void
113 free_dwarf_expr_context (struct dwarf_expr_context *ctx)
114 {
115 xfree (ctx->stack);
116 xfree (ctx->pieces);
117 xfree (ctx);
118 }
119
120 /* Helper for make_cleanup_free_dwarf_expr_context. */
121
122 static void
123 free_dwarf_expr_context_cleanup (void *arg)
124 {
125 free_dwarf_expr_context (arg);
126 }
127
128 /* Return a cleanup that calls free_dwarf_expr_context. */
129
130 struct cleanup *
131 make_cleanup_free_dwarf_expr_context (struct dwarf_expr_context *ctx)
132 {
133 return make_cleanup (free_dwarf_expr_context_cleanup, ctx);
134 }
135
136 /* Expand the memory allocated to CTX's stack to contain at least
137 NEED more elements than are currently used. */
138
139 static void
140 dwarf_expr_grow_stack (struct dwarf_expr_context *ctx, size_t need)
141 {
142 if (ctx->stack_len + need > ctx->stack_allocated)
143 {
144 size_t newlen = ctx->stack_len + need + 10;
145
146 ctx->stack = xrealloc (ctx->stack,
147 newlen * sizeof (struct dwarf_stack_value));
148 ctx->stack_allocated = newlen;
149 }
150 }
151
152 /* Push VALUE onto CTX's stack. */
153
154 static void
155 dwarf_expr_push (struct dwarf_expr_context *ctx, struct value *value,
156 int in_stack_memory)
157 {
158 struct dwarf_stack_value *v;
159
160 dwarf_expr_grow_stack (ctx, 1);
161 v = &ctx->stack[ctx->stack_len++];
162 v->value = value;
163 v->in_stack_memory = in_stack_memory;
164 }
165
166 /* Push VALUE onto CTX's stack. */
167
168 void
169 dwarf_expr_push_address (struct dwarf_expr_context *ctx, CORE_ADDR value,
170 int in_stack_memory)
171 {
172 dwarf_expr_push (ctx,
173 value_from_ulongest (dwarf_expr_address_type (ctx), value),
174 in_stack_memory);
175 }
176
177 /* Pop the top item off of CTX's stack. */
178
179 static void
180 dwarf_expr_pop (struct dwarf_expr_context *ctx)
181 {
182 if (ctx->stack_len <= 0)
183 error (_("dwarf expression stack underflow"));
184 ctx->stack_len--;
185 }
186
187 /* Retrieve the N'th item on CTX's stack. */
188
189 struct value *
190 dwarf_expr_fetch (struct dwarf_expr_context *ctx, int n)
191 {
192 if (ctx->stack_len <= n)
193 error (_("Asked for position %d of stack, "
194 "stack only has %d elements on it."),
195 n, ctx->stack_len);
196 return ctx->stack[ctx->stack_len - (1 + n)].value;
197 }
198
199 /* Require that TYPE be an integral type; throw an exception if not. */
200
201 static void
202 dwarf_require_integral (struct type *type)
203 {
204 if (TYPE_CODE (type) != TYPE_CODE_INT
205 && TYPE_CODE (type) != TYPE_CODE_CHAR
206 && TYPE_CODE (type) != TYPE_CODE_BOOL)
207 error (_("integral type expected in DWARF expression"));
208 }
209
210 /* Return the unsigned form of TYPE. TYPE is necessarily an integral
211 type. */
212
213 static struct type *
214 get_unsigned_type (struct gdbarch *gdbarch, struct type *type)
215 {
216 switch (TYPE_LENGTH (type))
217 {
218 case 1:
219 return builtin_type (gdbarch)->builtin_uint8;
220 case 2:
221 return builtin_type (gdbarch)->builtin_uint16;
222 case 4:
223 return builtin_type (gdbarch)->builtin_uint32;
224 case 8:
225 return builtin_type (gdbarch)->builtin_uint64;
226 default:
227 error (_("no unsigned variant found for type, while evaluating "
228 "DWARF expression"));
229 }
230 }
231
232 /* Retrieve the N'th item on CTX's stack, converted to an address. */
233
234 CORE_ADDR
235 dwarf_expr_fetch_address (struct dwarf_expr_context *ctx, int n)
236 {
237 struct value *result_val = dwarf_expr_fetch (ctx, n);
238 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
239 ULONGEST result;
240
241 dwarf_require_integral (value_type (result_val));
242 result = extract_unsigned_integer (value_contents (result_val),
243 TYPE_LENGTH (value_type (result_val)),
244 byte_order);
245
246 /* For most architectures, calling extract_unsigned_integer() alone
247 is sufficient for extracting an address. However, some
248 architectures (e.g. MIPS) use signed addresses and using
249 extract_unsigned_integer() will not produce a correct
250 result. Make sure we invoke gdbarch_integer_to_address()
251 for those architectures which require it. */
252 if (gdbarch_integer_to_address_p (ctx->gdbarch))
253 {
254 gdb_byte *buf = alloca (ctx->addr_size);
255 struct type *int_type = get_unsigned_type (ctx->gdbarch,
256 value_type (result_val));
257
258 store_unsigned_integer (buf, ctx->addr_size, byte_order, result);
259 return gdbarch_integer_to_address (ctx->gdbarch, int_type, buf);
260 }
261
262 return (CORE_ADDR) result;
263 }
264
265 /* Retrieve the in_stack_memory flag of the N'th item on CTX's stack. */
266
267 int
268 dwarf_expr_fetch_in_stack_memory (struct dwarf_expr_context *ctx, int n)
269 {
270 if (ctx->stack_len <= n)
271 error (_("Asked for position %d of stack, "
272 "stack only has %d elements on it."),
273 n, ctx->stack_len);
274 return ctx->stack[ctx->stack_len - (1 + n)].in_stack_memory;
275 }
276
277 /* Return true if the expression stack is empty. */
278
279 static int
280 dwarf_expr_stack_empty_p (struct dwarf_expr_context *ctx)
281 {
282 return ctx->stack_len == 0;
283 }
284
285 /* Add a new piece to CTX's piece list. */
286 static void
287 add_piece (struct dwarf_expr_context *ctx, ULONGEST size, ULONGEST offset)
288 {
289 struct dwarf_expr_piece *p;
290
291 ctx->num_pieces++;
292
293 ctx->pieces = xrealloc (ctx->pieces,
294 (ctx->num_pieces
295 * sizeof (struct dwarf_expr_piece)));
296
297 p = &ctx->pieces[ctx->num_pieces - 1];
298 p->location = ctx->location;
299 p->size = size;
300 p->offset = offset;
301
302 if (p->location == DWARF_VALUE_LITERAL)
303 {
304 p->v.literal.data = ctx->data;
305 p->v.literal.length = ctx->len;
306 }
307 else if (dwarf_expr_stack_empty_p (ctx))
308 {
309 p->location = DWARF_VALUE_OPTIMIZED_OUT;
310 /* Also reset the context's location, for our callers. This is
311 a somewhat strange approach, but this lets us avoid setting
312 the location to DWARF_VALUE_MEMORY in all the individual
313 cases in the evaluator. */
314 ctx->location = DWARF_VALUE_OPTIMIZED_OUT;
315 }
316 else if (p->location == DWARF_VALUE_MEMORY)
317 {
318 p->v.mem.addr = dwarf_expr_fetch_address (ctx, 0);
319 p->v.mem.in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
320 }
321 else if (p->location == DWARF_VALUE_IMPLICIT_POINTER)
322 {
323 p->v.ptr.die = ctx->len;
324 p->v.ptr.offset = value_as_long (dwarf_expr_fetch (ctx, 0));
325 }
326 else if (p->location == DWARF_VALUE_REGISTER)
327 p->v.regno = value_as_long (dwarf_expr_fetch (ctx, 0));
328 else
329 {
330 p->v.value = dwarf_expr_fetch (ctx, 0);
331 }
332 }
333
334 /* Evaluate the expression at ADDR (LEN bytes long) using the context
335 CTX. */
336
337 void
338 dwarf_expr_eval (struct dwarf_expr_context *ctx, const gdb_byte *addr,
339 size_t len)
340 {
341 int old_recursion_depth = ctx->recursion_depth;
342
343 execute_stack_op (ctx, addr, addr + len);
344
345 /* CTX RECURSION_DEPTH becomes invalid if an exception was thrown here. */
346
347 gdb_assert (ctx->recursion_depth == old_recursion_depth);
348 }
349
350 /* Decode the unsigned LEB128 constant at BUF into the variable pointed to
351 by R, and return the new value of BUF. Verify that it doesn't extend
352 past BUF_END. */
353
354 const gdb_byte *
355 read_uleb128 (const gdb_byte *buf, const gdb_byte *buf_end, ULONGEST * r)
356 {
357 unsigned shift = 0;
358 ULONGEST result = 0;
359 gdb_byte byte;
360
361 while (1)
362 {
363 if (buf >= buf_end)
364 error (_("read_uleb128: Corrupted DWARF expression."));
365
366 byte = *buf++;
367 result |= (byte & 0x7f) << shift;
368 if ((byte & 0x80) == 0)
369 break;
370 shift += 7;
371 }
372 *r = result;
373 return buf;
374 }
375
376 /* Decode the signed LEB128 constant at BUF into the variable pointed to
377 by R, and return the new value of BUF. Verify that it doesn't extend
378 past BUF_END. */
379
380 const gdb_byte *
381 read_sleb128 (const gdb_byte *buf, const gdb_byte *buf_end, LONGEST * r)
382 {
383 unsigned shift = 0;
384 LONGEST result = 0;
385 gdb_byte byte;
386
387 while (1)
388 {
389 if (buf >= buf_end)
390 error (_("read_sleb128: Corrupted DWARF expression."));
391
392 byte = *buf++;
393 result |= (byte & 0x7f) << shift;
394 shift += 7;
395 if ((byte & 0x80) == 0)
396 break;
397 }
398 if (shift < (sizeof (*r) * 8) && (byte & 0x40) != 0)
399 result |= -(1 << shift);
400
401 *r = result;
402 return buf;
403 }
404 \f
405
406 /* Check that the current operator is either at the end of an
407 expression, or that it is followed by a composition operator. */
408
409 void
410 dwarf_expr_require_composition (const gdb_byte *op_ptr, const gdb_byte *op_end,
411 const char *op_name)
412 {
413 /* It seems like DW_OP_GNU_uninit should be handled here. However,
414 it doesn't seem to make sense for DW_OP_*_value, and it was not
415 checked at the other place that this function is called. */
416 if (op_ptr != op_end && *op_ptr != DW_OP_piece && *op_ptr != DW_OP_bit_piece)
417 error (_("DWARF-2 expression error: `%s' operations must be "
418 "used either alone or in conjuction with DW_OP_piece "
419 "or DW_OP_bit_piece."),
420 op_name);
421 }
422
423 /* Return true iff the types T1 and T2 are "the same". This only does
424 checks that might reasonably be needed to compare DWARF base
425 types. */
426
427 static int
428 base_types_equal_p (struct type *t1, struct type *t2)
429 {
430 if (TYPE_CODE (t1) != TYPE_CODE (t2))
431 return 0;
432 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
433 return 0;
434 return TYPE_LENGTH (t1) == TYPE_LENGTH (t2);
435 }
436
437 /* A convenience function to call get_base_type on CTX and return the
438 result. DIE is the DIE whose type we need. SIZE is non-zero if
439 this function should verify that the resulting type has the correct
440 size. */
441
442 static struct type *
443 dwarf_get_base_type (struct dwarf_expr_context *ctx, ULONGEST die, int size)
444 {
445 struct type *result;
446
447 if (ctx->get_base_type)
448 {
449 result = ctx->get_base_type (ctx, die);
450 if (size != 0 && TYPE_LENGTH (result) != size)
451 error (_("DW_OP_GNU_const_type has different sizes for type and data"));
452 }
453 else
454 /* Anything will do. */
455 result = builtin_type (ctx->gdbarch)->builtin_int;
456
457 return result;
458 }
459
460 /* The engine for the expression evaluator. Using the context in CTX,
461 evaluate the expression between OP_PTR and OP_END. */
462
463 static void
464 execute_stack_op (struct dwarf_expr_context *ctx,
465 const gdb_byte *op_ptr, const gdb_byte *op_end)
466 {
467 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
468 /* Old-style "untyped" DWARF values need special treatment in a
469 couple of places, specifically DW_OP_mod and DW_OP_shr. We need
470 a special type for these values so we can distinguish them from
471 values that have an explicit type, because explicitly-typed
472 values do not need special treatment. This special type must be
473 different (in the `==' sense) from any base type coming from the
474 CU. */
475 struct type *address_type = dwarf_expr_address_type (ctx);
476
477 ctx->location = DWARF_VALUE_MEMORY;
478 ctx->initialized = 1; /* Default is initialized. */
479
480 if (ctx->recursion_depth > ctx->max_recursion_depth)
481 error (_("DWARF-2 expression error: Loop detected (%d)."),
482 ctx->recursion_depth);
483 ctx->recursion_depth++;
484
485 while (op_ptr < op_end)
486 {
487 enum dwarf_location_atom op = *op_ptr++;
488 ULONGEST result;
489 /* Assume the value is not in stack memory.
490 Code that knows otherwise sets this to 1.
491 Some arithmetic on stack addresses can probably be assumed to still
492 be a stack address, but we skip this complication for now.
493 This is just an optimization, so it's always ok to punt
494 and leave this as 0. */
495 int in_stack_memory = 0;
496 ULONGEST uoffset, reg;
497 LONGEST offset;
498 struct value *result_val = NULL;
499
500 switch (op)
501 {
502 case DW_OP_lit0:
503 case DW_OP_lit1:
504 case DW_OP_lit2:
505 case DW_OP_lit3:
506 case DW_OP_lit4:
507 case DW_OP_lit5:
508 case DW_OP_lit6:
509 case DW_OP_lit7:
510 case DW_OP_lit8:
511 case DW_OP_lit9:
512 case DW_OP_lit10:
513 case DW_OP_lit11:
514 case DW_OP_lit12:
515 case DW_OP_lit13:
516 case DW_OP_lit14:
517 case DW_OP_lit15:
518 case DW_OP_lit16:
519 case DW_OP_lit17:
520 case DW_OP_lit18:
521 case DW_OP_lit19:
522 case DW_OP_lit20:
523 case DW_OP_lit21:
524 case DW_OP_lit22:
525 case DW_OP_lit23:
526 case DW_OP_lit24:
527 case DW_OP_lit25:
528 case DW_OP_lit26:
529 case DW_OP_lit27:
530 case DW_OP_lit28:
531 case DW_OP_lit29:
532 case DW_OP_lit30:
533 case DW_OP_lit31:
534 result = op - DW_OP_lit0;
535 result_val = value_from_ulongest (address_type, result);
536 break;
537
538 case DW_OP_addr:
539 result = extract_unsigned_integer (op_ptr,
540 ctx->addr_size, byte_order);
541 op_ptr += ctx->addr_size;
542 /* Some versions of GCC emit DW_OP_addr before
543 DW_OP_GNU_push_tls_address. In this case the value is an
544 index, not an address. We don't support things like
545 branching between the address and the TLS op. */
546 if (op_ptr >= op_end || *op_ptr != DW_OP_GNU_push_tls_address)
547 result += ctx->offset;
548 result_val = value_from_ulongest (address_type, result);
549 break;
550
551 case DW_OP_const1u:
552 result = extract_unsigned_integer (op_ptr, 1, byte_order);
553 result_val = value_from_ulongest (address_type, result);
554 op_ptr += 1;
555 break;
556 case DW_OP_const1s:
557 result = extract_signed_integer (op_ptr, 1, byte_order);
558 result_val = value_from_ulongest (address_type, result);
559 op_ptr += 1;
560 break;
561 case DW_OP_const2u:
562 result = extract_unsigned_integer (op_ptr, 2, byte_order);
563 result_val = value_from_ulongest (address_type, result);
564 op_ptr += 2;
565 break;
566 case DW_OP_const2s:
567 result = extract_signed_integer (op_ptr, 2, byte_order);
568 result_val = value_from_ulongest (address_type, result);
569 op_ptr += 2;
570 break;
571 case DW_OP_const4u:
572 result = extract_unsigned_integer (op_ptr, 4, byte_order);
573 result_val = value_from_ulongest (address_type, result);
574 op_ptr += 4;
575 break;
576 case DW_OP_const4s:
577 result = extract_signed_integer (op_ptr, 4, byte_order);
578 result_val = value_from_ulongest (address_type, result);
579 op_ptr += 4;
580 break;
581 case DW_OP_const8u:
582 result = extract_unsigned_integer (op_ptr, 8, byte_order);
583 result_val = value_from_ulongest (address_type, result);
584 op_ptr += 8;
585 break;
586 case DW_OP_const8s:
587 result = extract_signed_integer (op_ptr, 8, byte_order);
588 result_val = value_from_ulongest (address_type, result);
589 op_ptr += 8;
590 break;
591 case DW_OP_constu:
592 op_ptr = read_uleb128 (op_ptr, op_end, &uoffset);
593 result = uoffset;
594 result_val = value_from_ulongest (address_type, result);
595 break;
596 case DW_OP_consts:
597 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
598 result = offset;
599 result_val = value_from_ulongest (address_type, result);
600 break;
601
602 /* The DW_OP_reg operations are required to occur alone in
603 location expressions. */
604 case DW_OP_reg0:
605 case DW_OP_reg1:
606 case DW_OP_reg2:
607 case DW_OP_reg3:
608 case DW_OP_reg4:
609 case DW_OP_reg5:
610 case DW_OP_reg6:
611 case DW_OP_reg7:
612 case DW_OP_reg8:
613 case DW_OP_reg9:
614 case DW_OP_reg10:
615 case DW_OP_reg11:
616 case DW_OP_reg12:
617 case DW_OP_reg13:
618 case DW_OP_reg14:
619 case DW_OP_reg15:
620 case DW_OP_reg16:
621 case DW_OP_reg17:
622 case DW_OP_reg18:
623 case DW_OP_reg19:
624 case DW_OP_reg20:
625 case DW_OP_reg21:
626 case DW_OP_reg22:
627 case DW_OP_reg23:
628 case DW_OP_reg24:
629 case DW_OP_reg25:
630 case DW_OP_reg26:
631 case DW_OP_reg27:
632 case DW_OP_reg28:
633 case DW_OP_reg29:
634 case DW_OP_reg30:
635 case DW_OP_reg31:
636 if (op_ptr != op_end
637 && *op_ptr != DW_OP_piece
638 && *op_ptr != DW_OP_bit_piece
639 && *op_ptr != DW_OP_GNU_uninit)
640 error (_("DWARF-2 expression error: DW_OP_reg operations must be "
641 "used either alone or in conjuction with DW_OP_piece "
642 "or DW_OP_bit_piece."));
643
644 result = op - DW_OP_reg0;
645 result_val = value_from_ulongest (address_type, result);
646 ctx->location = DWARF_VALUE_REGISTER;
647 break;
648
649 case DW_OP_regx:
650 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
651 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_regx");
652
653 result = reg;
654 result_val = value_from_ulongest (address_type, result);
655 ctx->location = DWARF_VALUE_REGISTER;
656 break;
657
658 case DW_OP_implicit_value:
659 {
660 ULONGEST len;
661
662 op_ptr = read_uleb128 (op_ptr, op_end, &len);
663 if (op_ptr + len > op_end)
664 error (_("DW_OP_implicit_value: too few bytes available."));
665 ctx->len = len;
666 ctx->data = op_ptr;
667 ctx->location = DWARF_VALUE_LITERAL;
668 op_ptr += len;
669 dwarf_expr_require_composition (op_ptr, op_end,
670 "DW_OP_implicit_value");
671 }
672 goto no_push;
673
674 case DW_OP_stack_value:
675 ctx->location = DWARF_VALUE_STACK;
676 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_stack_value");
677 goto no_push;
678
679 case DW_OP_GNU_implicit_pointer:
680 {
681 ULONGEST die;
682 LONGEST len;
683
684 /* The referred-to DIE. */
685 ctx->len = extract_unsigned_integer (op_ptr, ctx->addr_size,
686 byte_order);
687 op_ptr += ctx->addr_size;
688
689 /* The byte offset into the data. */
690 op_ptr = read_sleb128 (op_ptr, op_end, &len);
691 result = (ULONGEST) len;
692 result_val = value_from_ulongest (address_type, result);
693
694 ctx->location = DWARF_VALUE_IMPLICIT_POINTER;
695 dwarf_expr_require_composition (op_ptr, op_end,
696 "DW_OP_GNU_implicit_pointer");
697 }
698 break;
699
700 case DW_OP_breg0:
701 case DW_OP_breg1:
702 case DW_OP_breg2:
703 case DW_OP_breg3:
704 case DW_OP_breg4:
705 case DW_OP_breg5:
706 case DW_OP_breg6:
707 case DW_OP_breg7:
708 case DW_OP_breg8:
709 case DW_OP_breg9:
710 case DW_OP_breg10:
711 case DW_OP_breg11:
712 case DW_OP_breg12:
713 case DW_OP_breg13:
714 case DW_OP_breg14:
715 case DW_OP_breg15:
716 case DW_OP_breg16:
717 case DW_OP_breg17:
718 case DW_OP_breg18:
719 case DW_OP_breg19:
720 case DW_OP_breg20:
721 case DW_OP_breg21:
722 case DW_OP_breg22:
723 case DW_OP_breg23:
724 case DW_OP_breg24:
725 case DW_OP_breg25:
726 case DW_OP_breg26:
727 case DW_OP_breg27:
728 case DW_OP_breg28:
729 case DW_OP_breg29:
730 case DW_OP_breg30:
731 case DW_OP_breg31:
732 {
733 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
734 result = (ctx->read_reg) (ctx->baton, op - DW_OP_breg0);
735 result += offset;
736 result_val = value_from_ulongest (address_type, result);
737 }
738 break;
739 case DW_OP_bregx:
740 {
741 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
742 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
743 result = (ctx->read_reg) (ctx->baton, reg);
744 result += offset;
745 result_val = value_from_ulongest (address_type, result);
746 }
747 break;
748 case DW_OP_fbreg:
749 {
750 const gdb_byte *datastart;
751 size_t datalen;
752 unsigned int before_stack_len;
753
754 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
755 /* Rather than create a whole new context, we simply
756 record the stack length before execution, then reset it
757 afterwards, effectively erasing whatever the recursive
758 call put there. */
759 before_stack_len = ctx->stack_len;
760 /* FIXME: cagney/2003-03-26: This code should be using
761 get_frame_base_address(), and then implement a dwarf2
762 specific this_base method. */
763 (ctx->get_frame_base) (ctx->baton, &datastart, &datalen);
764 dwarf_expr_eval (ctx, datastart, datalen);
765 if (ctx->location == DWARF_VALUE_MEMORY)
766 result = dwarf_expr_fetch_address (ctx, 0);
767 else if (ctx->location == DWARF_VALUE_REGISTER)
768 result
769 = (ctx->read_reg) (ctx->baton,
770 value_as_long (dwarf_expr_fetch (ctx, 0)));
771 else
772 error (_("Not implemented: computing frame "
773 "base using explicit value operator"));
774 result = result + offset;
775 result_val = value_from_ulongest (address_type, result);
776 in_stack_memory = 1;
777 ctx->stack_len = before_stack_len;
778 ctx->location = DWARF_VALUE_MEMORY;
779 }
780 break;
781
782 case DW_OP_dup:
783 result_val = dwarf_expr_fetch (ctx, 0);
784 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
785 break;
786
787 case DW_OP_drop:
788 dwarf_expr_pop (ctx);
789 goto no_push;
790
791 case DW_OP_pick:
792 offset = *op_ptr++;
793 result_val = dwarf_expr_fetch (ctx, offset);
794 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, offset);
795 break;
796
797 case DW_OP_swap:
798 {
799 struct dwarf_stack_value t1, t2;
800
801 if (ctx->stack_len < 2)
802 error (_("Not enough elements for "
803 "DW_OP_swap. Need 2, have %d."),
804 ctx->stack_len);
805 t1 = ctx->stack[ctx->stack_len - 1];
806 t2 = ctx->stack[ctx->stack_len - 2];
807 ctx->stack[ctx->stack_len - 1] = t2;
808 ctx->stack[ctx->stack_len - 2] = t1;
809 goto no_push;
810 }
811
812 case DW_OP_over:
813 result_val = dwarf_expr_fetch (ctx, 1);
814 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 1);
815 break;
816
817 case DW_OP_rot:
818 {
819 struct dwarf_stack_value t1, t2, t3;
820
821 if (ctx->stack_len < 3)
822 error (_("Not enough elements for "
823 "DW_OP_rot. Need 3, have %d."),
824 ctx->stack_len);
825 t1 = ctx->stack[ctx->stack_len - 1];
826 t2 = ctx->stack[ctx->stack_len - 2];
827 t3 = ctx->stack[ctx->stack_len - 3];
828 ctx->stack[ctx->stack_len - 1] = t2;
829 ctx->stack[ctx->stack_len - 2] = t3;
830 ctx->stack[ctx->stack_len - 3] = t1;
831 goto no_push;
832 }
833
834 case DW_OP_deref:
835 case DW_OP_deref_size:
836 case DW_OP_GNU_deref_type:
837 {
838 int addr_size = (op == DW_OP_deref ? ctx->addr_size : *op_ptr++);
839 gdb_byte *buf = alloca (addr_size);
840 CORE_ADDR addr = dwarf_expr_fetch_address (ctx, 0);
841 struct type *type;
842
843 dwarf_expr_pop (ctx);
844
845 if (op == DW_OP_GNU_deref_type)
846 {
847 ULONGEST type_die;
848
849 op_ptr = read_uleb128 (op_ptr, op_end, &type_die);
850 type = dwarf_get_base_type (ctx, type_die, 0);
851 }
852 else
853 type = address_type;
854
855 (ctx->read_mem) (ctx->baton, buf, addr, addr_size);
856 result_val = value_from_contents_and_address (type, buf, addr);
857 break;
858 }
859
860 case DW_OP_abs:
861 case DW_OP_neg:
862 case DW_OP_not:
863 case DW_OP_plus_uconst:
864 {
865 /* Unary operations. */
866 result_val = dwarf_expr_fetch (ctx, 0);
867 dwarf_expr_pop (ctx);
868
869 switch (op)
870 {
871 case DW_OP_abs:
872 if (value_less (result_val,
873 value_zero (value_type (result_val), not_lval)))
874 result_val = value_neg (result_val);
875 break;
876 case DW_OP_neg:
877 result_val = value_neg (result_val);
878 break;
879 case DW_OP_not:
880 dwarf_require_integral (value_type (result_val));
881 result_val = value_complement (result_val);
882 break;
883 case DW_OP_plus_uconst:
884 dwarf_require_integral (value_type (result_val));
885 result = value_as_long (result_val);
886 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
887 result += reg;
888 result_val = value_from_ulongest (address_type, result);
889 break;
890 }
891 }
892 break;
893
894 case DW_OP_and:
895 case DW_OP_div:
896 case DW_OP_minus:
897 case DW_OP_mod:
898 case DW_OP_mul:
899 case DW_OP_or:
900 case DW_OP_plus:
901 case DW_OP_shl:
902 case DW_OP_shr:
903 case DW_OP_shra:
904 case DW_OP_xor:
905 case DW_OP_le:
906 case DW_OP_ge:
907 case DW_OP_eq:
908 case DW_OP_lt:
909 case DW_OP_gt:
910 case DW_OP_ne:
911 {
912 /* Binary operations. */
913 struct value *first, *second;
914
915 second = dwarf_expr_fetch (ctx, 0);
916 dwarf_expr_pop (ctx);
917
918 first = dwarf_expr_fetch (ctx, 0);
919 dwarf_expr_pop (ctx);
920
921 if (! base_types_equal_p (value_type (first), value_type (second)))
922 error (_("Incompatible types on DWARF stack"));
923
924 switch (op)
925 {
926 case DW_OP_and:
927 dwarf_require_integral (value_type (first));
928 dwarf_require_integral (value_type (second));
929 result_val = value_binop (first, second, BINOP_BITWISE_AND);
930 break;
931 case DW_OP_div:
932 result_val = value_binop (first, second, BINOP_DIV);
933 break;
934 case DW_OP_minus:
935 result_val = value_binop (first, second, BINOP_SUB);
936 break;
937 case DW_OP_mod:
938 {
939 int cast_back = 0;
940 struct type *orig_type = value_type (first);
941
942 /* We have to special-case "old-style" untyped values
943 -- these must have mod computed using unsigned
944 math. */
945 if (orig_type == address_type)
946 {
947 struct type *utype
948 = get_unsigned_type (ctx->gdbarch, orig_type);
949
950 cast_back = 1;
951 first = value_cast (utype, first);
952 second = value_cast (utype, second);
953 }
954 /* Note that value_binop doesn't handle float or
955 decimal float here. This seems unimportant. */
956 result_val = value_binop (first, second, BINOP_MOD);
957 if (cast_back)
958 result_val = value_cast (orig_type, result_val);
959 }
960 break;
961 case DW_OP_mul:
962 result_val = value_binop (first, second, BINOP_MUL);
963 break;
964 case DW_OP_or:
965 dwarf_require_integral (value_type (first));
966 dwarf_require_integral (value_type (second));
967 result_val = value_binop (first, second, BINOP_BITWISE_IOR);
968 break;
969 case DW_OP_plus:
970 result_val = value_binop (first, second, BINOP_ADD);
971 break;
972 case DW_OP_shl:
973 dwarf_require_integral (value_type (first));
974 dwarf_require_integral (value_type (second));
975 result_val = value_binop (first, second, BINOP_LSH);
976 break;
977 case DW_OP_shr:
978 dwarf_require_integral (value_type (first));
979 dwarf_require_integral (value_type (second));
980 if (!TYPE_UNSIGNED (value_type (first)))
981 {
982 struct type *utype
983 = get_unsigned_type (ctx->gdbarch, value_type (first));
984
985 first = value_cast (utype, first);
986 }
987
988 result_val = value_binop (first, second, BINOP_RSH);
989 /* Make sure we wind up with the same type we started
990 with. */
991 if (value_type (result_val) != value_type (second))
992 result_val = value_cast (value_type (second), result_val);
993 break;
994 case DW_OP_shra:
995 dwarf_require_integral (value_type (first));
996 dwarf_require_integral (value_type (second));
997 result_val = value_binop (first, second, BINOP_RSH);
998 break;
999 case DW_OP_xor:
1000 dwarf_require_integral (value_type (first));
1001 dwarf_require_integral (value_type (second));
1002 result_val = value_binop (first, second, BINOP_BITWISE_XOR);
1003 break;
1004 case DW_OP_le:
1005 /* A <= B is !(B < A). */
1006 result = ! value_less (second, first);
1007 result_val = value_from_ulongest (address_type, result);
1008 break;
1009 case DW_OP_ge:
1010 /* A >= B is !(A < B). */
1011 result = ! value_less (first, second);
1012 result_val = value_from_ulongest (address_type, result);
1013 break;
1014 case DW_OP_eq:
1015 result = value_equal (first, second);
1016 result_val = value_from_ulongest (address_type, result);
1017 break;
1018 case DW_OP_lt:
1019 result = value_less (first, second);
1020 result_val = value_from_ulongest (address_type, result);
1021 break;
1022 case DW_OP_gt:
1023 /* A > B is B < A. */
1024 result = value_less (second, first);
1025 result_val = value_from_ulongest (address_type, result);
1026 break;
1027 case DW_OP_ne:
1028 result = ! value_equal (first, second);
1029 result_val = value_from_ulongest (address_type, result);
1030 break;
1031 default:
1032 internal_error (__FILE__, __LINE__,
1033 _("Can't be reached."));
1034 }
1035 }
1036 break;
1037
1038 case DW_OP_call_frame_cfa:
1039 result = (ctx->get_frame_cfa) (ctx->baton);
1040 result_val = value_from_ulongest (address_type, result);
1041 in_stack_memory = 1;
1042 break;
1043
1044 case DW_OP_GNU_push_tls_address:
1045 /* Variable is at a constant offset in the thread-local
1046 storage block into the objfile for the current thread and
1047 the dynamic linker module containing this expression. Here
1048 we return returns the offset from that base. The top of the
1049 stack has the offset from the beginning of the thread
1050 control block at which the variable is located. Nothing
1051 should follow this operator, so the top of stack would be
1052 returned. */
1053 result = value_as_long (dwarf_expr_fetch (ctx, 0));
1054 dwarf_expr_pop (ctx);
1055 result = (ctx->get_tls_address) (ctx->baton, result);
1056 result_val = value_from_ulongest (address_type, result);
1057 break;
1058
1059 case DW_OP_skip:
1060 offset = extract_signed_integer (op_ptr, 2, byte_order);
1061 op_ptr += 2;
1062 op_ptr += offset;
1063 goto no_push;
1064
1065 case DW_OP_bra:
1066 {
1067 struct value *val;
1068
1069 offset = extract_signed_integer (op_ptr, 2, byte_order);
1070 op_ptr += 2;
1071 val = dwarf_expr_fetch (ctx, 0);
1072 dwarf_require_integral (value_type (val));
1073 if (value_as_long (val) != 0)
1074 op_ptr += offset;
1075 dwarf_expr_pop (ctx);
1076 }
1077 goto no_push;
1078
1079 case DW_OP_nop:
1080 goto no_push;
1081
1082 case DW_OP_piece:
1083 {
1084 ULONGEST size;
1085
1086 /* Record the piece. */
1087 op_ptr = read_uleb128 (op_ptr, op_end, &size);
1088 add_piece (ctx, 8 * size, 0);
1089
1090 /* Pop off the address/regnum, and reset the location
1091 type. */
1092 if (ctx->location != DWARF_VALUE_LITERAL
1093 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
1094 dwarf_expr_pop (ctx);
1095 ctx->location = DWARF_VALUE_MEMORY;
1096 }
1097 goto no_push;
1098
1099 case DW_OP_bit_piece:
1100 {
1101 ULONGEST size, offset;
1102
1103 /* Record the piece. */
1104 op_ptr = read_uleb128 (op_ptr, op_end, &size);
1105 op_ptr = read_uleb128 (op_ptr, op_end, &offset);
1106 add_piece (ctx, size, offset);
1107
1108 /* Pop off the address/regnum, and reset the location
1109 type. */
1110 if (ctx->location != DWARF_VALUE_LITERAL
1111 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
1112 dwarf_expr_pop (ctx);
1113 ctx->location = DWARF_VALUE_MEMORY;
1114 }
1115 goto no_push;
1116
1117 case DW_OP_GNU_uninit:
1118 if (op_ptr != op_end)
1119 error (_("DWARF-2 expression error: DW_OP_GNU_uninit must always "
1120 "be the very last op."));
1121
1122 ctx->initialized = 0;
1123 goto no_push;
1124
1125 case DW_OP_call2:
1126 result = extract_unsigned_integer (op_ptr, 2, byte_order);
1127 op_ptr += 2;
1128 ctx->dwarf_call (ctx, result);
1129 goto no_push;
1130
1131 case DW_OP_call4:
1132 result = extract_unsigned_integer (op_ptr, 4, byte_order);
1133 op_ptr += 4;
1134 ctx->dwarf_call (ctx, result);
1135 goto no_push;
1136
1137 case DW_OP_GNU_entry_value:
1138 /* This operation is not yet supported by GDB. */
1139 ctx->location = DWARF_VALUE_OPTIMIZED_OUT;
1140 ctx->stack_len = 0;
1141 ctx->num_pieces = 0;
1142 goto abort_expression;
1143
1144 case DW_OP_GNU_const_type:
1145 {
1146 ULONGEST type_die;
1147 int n;
1148 const gdb_byte *data;
1149 struct type *type;
1150
1151 op_ptr = read_uleb128 (op_ptr, op_end, &type_die);
1152 n = *op_ptr++;
1153 data = op_ptr;
1154 op_ptr += n;
1155
1156 type = dwarf_get_base_type (ctx, type_die, n);
1157 result_val = value_from_contents (type, data);
1158 }
1159 break;
1160
1161 case DW_OP_GNU_regval_type:
1162 {
1163 ULONGEST type_die;
1164 struct type *type;
1165
1166 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
1167 op_ptr = read_uleb128 (op_ptr, op_end, &type_die);
1168
1169 type = dwarf_get_base_type (ctx, type_die, 0);
1170 result = (ctx->read_reg) (ctx->baton, reg);
1171 result_val = value_from_ulongest (type, result);
1172 }
1173 break;
1174
1175 case DW_OP_GNU_convert:
1176 case DW_OP_GNU_reinterpret:
1177 {
1178 ULONGEST type_die;
1179 struct type *type;
1180
1181 op_ptr = read_uleb128 (op_ptr, op_end, &type_die);
1182
1183 type = dwarf_get_base_type (ctx, type_die, 0);
1184
1185 result_val = dwarf_expr_fetch (ctx, 0);
1186 dwarf_expr_pop (ctx);
1187
1188 if (op == DW_OP_GNU_convert)
1189 result_val = value_cast (type, result_val);
1190 else if (type == value_type (result_val))
1191 {
1192 /* Nothing. */
1193 }
1194 else if (TYPE_LENGTH (type)
1195 != TYPE_LENGTH (value_type (result_val)))
1196 error (_("DW_OP_GNU_reinterpret has wrong size"));
1197 else
1198 result_val
1199 = value_from_contents (type,
1200 value_contents_all (result_val));
1201 }
1202 break;
1203
1204 default:
1205 error (_("Unhandled dwarf expression opcode 0x%x"), op);
1206 }
1207
1208 /* Most things push a result value. */
1209 gdb_assert (result_val != NULL);
1210 dwarf_expr_push (ctx, result_val, in_stack_memory);
1211 no_push:
1212 ;
1213 }
1214
1215 /* To simplify our main caller, if the result is an implicit
1216 pointer, then make a pieced value. This is ok because we can't
1217 have implicit pointers in contexts where pieces are invalid. */
1218 if (ctx->location == DWARF_VALUE_IMPLICIT_POINTER)
1219 add_piece (ctx, 8 * ctx->addr_size, 0);
1220
1221 abort_expression:
1222 ctx->recursion_depth--;
1223 gdb_assert (ctx->recursion_depth >= 0);
1224 }
1225
1226 void
1227 _initialize_dwarf2expr (void)
1228 {
1229 dwarf_arch_cookie
1230 = gdbarch_data_register_post_init (dwarf_gdbarch_types_init);
1231 }
This page took 0.054771 seconds and 5 git commands to generate.