2010-06-21 Michael Snyder <msnyder@vmware.com>
[deliverable/binutils-gdb.git] / gdb / dwarf2expr.c
1 /* DWARF 2 Expression Evaluator.
2
3 Copyright (C) 2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5
6 Contributed by Daniel Berlin (dan@dberlin.org)
7
8 This file is part of GDB.
9
10 This program is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3 of the License, or
13 (at your option) any later version.
14
15 This program is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with this program. If not, see <http://www.gnu.org/licenses/>. */
22
23 #include "defs.h"
24 #include "symtab.h"
25 #include "gdbtypes.h"
26 #include "value.h"
27 #include "gdbcore.h"
28 #include "dwarf2.h"
29 #include "dwarf2expr.h"
30 #include "gdb_assert.h"
31
32 /* Local prototypes. */
33
34 static void execute_stack_op (struct dwarf_expr_context *,
35 const gdb_byte *, const gdb_byte *);
36 static struct type *unsigned_address_type (struct gdbarch *, int);
37
38 /* Create a new context for the expression evaluator. */
39
40 struct dwarf_expr_context *
41 new_dwarf_expr_context (void)
42 {
43 struct dwarf_expr_context *retval;
44
45 retval = xcalloc (1, sizeof (struct dwarf_expr_context));
46 retval->stack_len = 0;
47 retval->stack_allocated = 10;
48 retval->stack = xmalloc (retval->stack_allocated
49 * sizeof (struct dwarf_stack_value));
50 retval->num_pieces = 0;
51 retval->pieces = 0;
52 retval->max_recursion_depth = 0x100;
53 return retval;
54 }
55
56 /* Release the memory allocated to CTX. */
57
58 void
59 free_dwarf_expr_context (struct dwarf_expr_context *ctx)
60 {
61 xfree (ctx->stack);
62 xfree (ctx->pieces);
63 xfree (ctx);
64 }
65
66 /* Helper for make_cleanup_free_dwarf_expr_context. */
67
68 static void
69 free_dwarf_expr_context_cleanup (void *arg)
70 {
71 free_dwarf_expr_context (arg);
72 }
73
74 /* Return a cleanup that calls free_dwarf_expr_context. */
75
76 struct cleanup *
77 make_cleanup_free_dwarf_expr_context (struct dwarf_expr_context *ctx)
78 {
79 return make_cleanup (free_dwarf_expr_context_cleanup, ctx);
80 }
81
82 /* Expand the memory allocated to CTX's stack to contain at least
83 NEED more elements than are currently used. */
84
85 static void
86 dwarf_expr_grow_stack (struct dwarf_expr_context *ctx, size_t need)
87 {
88 if (ctx->stack_len + need > ctx->stack_allocated)
89 {
90 size_t newlen = ctx->stack_len + need + 10;
91
92 ctx->stack = xrealloc (ctx->stack,
93 newlen * sizeof (struct dwarf_stack_value));
94 ctx->stack_allocated = newlen;
95 }
96 }
97
98 /* Push VALUE onto CTX's stack. */
99
100 void
101 dwarf_expr_push (struct dwarf_expr_context *ctx, CORE_ADDR value,
102 int in_stack_memory)
103 {
104 struct dwarf_stack_value *v;
105
106 dwarf_expr_grow_stack (ctx, 1);
107 v = &ctx->stack[ctx->stack_len++];
108 v->value = value;
109 v->in_stack_memory = in_stack_memory;
110 }
111
112 /* Pop the top item off of CTX's stack. */
113
114 void
115 dwarf_expr_pop (struct dwarf_expr_context *ctx)
116 {
117 if (ctx->stack_len <= 0)
118 error (_("dwarf expression stack underflow"));
119 ctx->stack_len--;
120 }
121
122 /* Retrieve the N'th item on CTX's stack. */
123
124 CORE_ADDR
125 dwarf_expr_fetch (struct dwarf_expr_context *ctx, int n)
126 {
127 if (ctx->stack_len <= n)
128 error (_("Asked for position %d of stack, stack only has %d elements on it."),
129 n, ctx->stack_len);
130 return ctx->stack[ctx->stack_len - (1 + n)].value;
131
132 }
133
134 /* Retrieve the in_stack_memory flag of the N'th item on CTX's stack. */
135
136 int
137 dwarf_expr_fetch_in_stack_memory (struct dwarf_expr_context *ctx, int n)
138 {
139 if (ctx->stack_len <= n)
140 error (_("Asked for position %d of stack, stack only has %d elements on it."),
141 n, ctx->stack_len);
142 return ctx->stack[ctx->stack_len - (1 + n)].in_stack_memory;
143
144 }
145
146 /* Return true if the expression stack is empty. */
147
148 static int
149 dwarf_expr_stack_empty_p (struct dwarf_expr_context *ctx)
150 {
151 return ctx->stack_len == 0;
152 }
153
154 /* Add a new piece to CTX's piece list. */
155 static void
156 add_piece (struct dwarf_expr_context *ctx, ULONGEST size, ULONGEST offset)
157 {
158 struct dwarf_expr_piece *p;
159
160 ctx->num_pieces++;
161
162 ctx->pieces = xrealloc (ctx->pieces,
163 (ctx->num_pieces
164 * sizeof (struct dwarf_expr_piece)));
165
166 p = &ctx->pieces[ctx->num_pieces - 1];
167 p->location = ctx->location;
168 p->size = size;
169 p->offset = offset;
170
171 if (p->location == DWARF_VALUE_LITERAL)
172 {
173 p->v.literal.data = ctx->data;
174 p->v.literal.length = ctx->len;
175 }
176 else if (dwarf_expr_stack_empty_p (ctx))
177 {
178 p->location = DWARF_VALUE_OPTIMIZED_OUT;
179 /* Also reset the context's location, for our callers. This is
180 a somewhat strange approach, but this lets us avoid setting
181 the location to DWARF_VALUE_MEMORY in all the individual
182 cases in the evaluator. */
183 ctx->location = DWARF_VALUE_OPTIMIZED_OUT;
184 }
185 else
186 {
187 p->v.expr.value = dwarf_expr_fetch (ctx, 0);
188 p->v.expr.in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
189 }
190 }
191
192 /* Evaluate the expression at ADDR (LEN bytes long) using the context
193 CTX. */
194
195 void
196 dwarf_expr_eval (struct dwarf_expr_context *ctx, const gdb_byte *addr,
197 size_t len)
198 {
199 int old_recursion_depth = ctx->recursion_depth;
200
201 execute_stack_op (ctx, addr, addr + len);
202
203 /* CTX RECURSION_DEPTH becomes invalid if an exception was thrown here. */
204
205 gdb_assert (ctx->recursion_depth == old_recursion_depth);
206 }
207
208 /* Decode the unsigned LEB128 constant at BUF into the variable pointed to
209 by R, and return the new value of BUF. Verify that it doesn't extend
210 past BUF_END. */
211
212 const gdb_byte *
213 read_uleb128 (const gdb_byte *buf, const gdb_byte *buf_end, ULONGEST * r)
214 {
215 unsigned shift = 0;
216 ULONGEST result = 0;
217 gdb_byte byte;
218
219 while (1)
220 {
221 if (buf >= buf_end)
222 error (_("read_uleb128: Corrupted DWARF expression."));
223
224 byte = *buf++;
225 result |= (byte & 0x7f) << shift;
226 if ((byte & 0x80) == 0)
227 break;
228 shift += 7;
229 }
230 *r = result;
231 return buf;
232 }
233
234 /* Decode the signed LEB128 constant at BUF into the variable pointed to
235 by R, and return the new value of BUF. Verify that it doesn't extend
236 past BUF_END. */
237
238 const gdb_byte *
239 read_sleb128 (const gdb_byte *buf, const gdb_byte *buf_end, LONGEST * r)
240 {
241 unsigned shift = 0;
242 LONGEST result = 0;
243 gdb_byte byte;
244
245 while (1)
246 {
247 if (buf >= buf_end)
248 error (_("read_sleb128: Corrupted DWARF expression."));
249
250 byte = *buf++;
251 result |= (byte & 0x7f) << shift;
252 shift += 7;
253 if ((byte & 0x80) == 0)
254 break;
255 }
256 if (shift < (sizeof (*r) * 8) && (byte & 0x40) != 0)
257 result |= -(1 << shift);
258
259 *r = result;
260 return buf;
261 }
262
263 /* Read an address of size ADDR_SIZE from BUF, and verify that it
264 doesn't extend past BUF_END. */
265
266 CORE_ADDR
267 dwarf2_read_address (struct gdbarch *gdbarch, const gdb_byte *buf,
268 const gdb_byte *buf_end, int addr_size)
269 {
270 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
271
272 if (buf_end - buf < addr_size)
273 error (_("dwarf2_read_address: Corrupted DWARF expression."));
274
275 /* For most architectures, calling extract_unsigned_integer() alone
276 is sufficient for extracting an address. However, some
277 architectures (e.g. MIPS) use signed addresses and using
278 extract_unsigned_integer() will not produce a correct
279 result. Make sure we invoke gdbarch_integer_to_address()
280 for those architectures which require it.
281
282 The use of `unsigned_address_type' in the code below refers to
283 the type of buf and has no bearing on the signedness of the
284 address being returned. */
285
286 if (gdbarch_integer_to_address_p (gdbarch))
287 return gdbarch_integer_to_address
288 (gdbarch, unsigned_address_type (gdbarch, addr_size), buf);
289
290 return extract_unsigned_integer (buf, addr_size, byte_order);
291 }
292
293 /* Return the type of an address of size ADDR_SIZE,
294 for unsigned arithmetic. */
295
296 static struct type *
297 unsigned_address_type (struct gdbarch *gdbarch, int addr_size)
298 {
299 switch (addr_size)
300 {
301 case 2:
302 return builtin_type (gdbarch)->builtin_uint16;
303 case 4:
304 return builtin_type (gdbarch)->builtin_uint32;
305 case 8:
306 return builtin_type (gdbarch)->builtin_uint64;
307 default:
308 internal_error (__FILE__, __LINE__,
309 _("Unsupported address size.\n"));
310 }
311 }
312
313 /* Return the type of an address of size ADDR_SIZE,
314 for signed arithmetic. */
315
316 static struct type *
317 signed_address_type (struct gdbarch *gdbarch, int addr_size)
318 {
319 switch (addr_size)
320 {
321 case 2:
322 return builtin_type (gdbarch)->builtin_int16;
323 case 4:
324 return builtin_type (gdbarch)->builtin_int32;
325 case 8:
326 return builtin_type (gdbarch)->builtin_int64;
327 default:
328 internal_error (__FILE__, __LINE__,
329 _("Unsupported address size.\n"));
330 }
331 }
332 \f
333
334 /* Check that the current operator is either at the end of an
335 expression, or that it is followed by a composition operator. */
336
337 void
338 dwarf_expr_require_composition (const gdb_byte *op_ptr, const gdb_byte *op_end,
339 const char *op_name)
340 {
341 /* It seems like DW_OP_GNU_uninit should be handled here. However,
342 it doesn't seem to make sense for DW_OP_*_value, and it was not
343 checked at the other place that this function is called. */
344 if (op_ptr != op_end && *op_ptr != DW_OP_piece && *op_ptr != DW_OP_bit_piece)
345 error (_("DWARF-2 expression error: `%s' operations must be "
346 "used either alone or in conjuction with DW_OP_piece "
347 "or DW_OP_bit_piece."),
348 op_name);
349 }
350
351 /* The engine for the expression evaluator. Using the context in CTX,
352 evaluate the expression between OP_PTR and OP_END. */
353
354 static void
355 execute_stack_op (struct dwarf_expr_context *ctx,
356 const gdb_byte *op_ptr, const gdb_byte *op_end)
357 {
358 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
359
360 ctx->location = DWARF_VALUE_MEMORY;
361 ctx->initialized = 1; /* Default is initialized. */
362
363 if (ctx->recursion_depth > ctx->max_recursion_depth)
364 error (_("DWARF-2 expression error: Loop detected (%d)."),
365 ctx->recursion_depth);
366 ctx->recursion_depth++;
367
368 while (op_ptr < op_end)
369 {
370 enum dwarf_location_atom op = *op_ptr++;
371 CORE_ADDR result;
372 /* Assume the value is not in stack memory.
373 Code that knows otherwise sets this to 1.
374 Some arithmetic on stack addresses can probably be assumed to still
375 be a stack address, but we skip this complication for now.
376 This is just an optimization, so it's always ok to punt
377 and leave this as 0. */
378 int in_stack_memory = 0;
379 ULONGEST uoffset, reg;
380 LONGEST offset;
381
382 switch (op)
383 {
384 case DW_OP_lit0:
385 case DW_OP_lit1:
386 case DW_OP_lit2:
387 case DW_OP_lit3:
388 case DW_OP_lit4:
389 case DW_OP_lit5:
390 case DW_OP_lit6:
391 case DW_OP_lit7:
392 case DW_OP_lit8:
393 case DW_OP_lit9:
394 case DW_OP_lit10:
395 case DW_OP_lit11:
396 case DW_OP_lit12:
397 case DW_OP_lit13:
398 case DW_OP_lit14:
399 case DW_OP_lit15:
400 case DW_OP_lit16:
401 case DW_OP_lit17:
402 case DW_OP_lit18:
403 case DW_OP_lit19:
404 case DW_OP_lit20:
405 case DW_OP_lit21:
406 case DW_OP_lit22:
407 case DW_OP_lit23:
408 case DW_OP_lit24:
409 case DW_OP_lit25:
410 case DW_OP_lit26:
411 case DW_OP_lit27:
412 case DW_OP_lit28:
413 case DW_OP_lit29:
414 case DW_OP_lit30:
415 case DW_OP_lit31:
416 result = op - DW_OP_lit0;
417 break;
418
419 case DW_OP_addr:
420 result = dwarf2_read_address (ctx->gdbarch,
421 op_ptr, op_end, ctx->addr_size);
422 op_ptr += ctx->addr_size;
423 break;
424
425 case DW_OP_const1u:
426 result = extract_unsigned_integer (op_ptr, 1, byte_order);
427 op_ptr += 1;
428 break;
429 case DW_OP_const1s:
430 result = extract_signed_integer (op_ptr, 1, byte_order);
431 op_ptr += 1;
432 break;
433 case DW_OP_const2u:
434 result = extract_unsigned_integer (op_ptr, 2, byte_order);
435 op_ptr += 2;
436 break;
437 case DW_OP_const2s:
438 result = extract_signed_integer (op_ptr, 2, byte_order);
439 op_ptr += 2;
440 break;
441 case DW_OP_const4u:
442 result = extract_unsigned_integer (op_ptr, 4, byte_order);
443 op_ptr += 4;
444 break;
445 case DW_OP_const4s:
446 result = extract_signed_integer (op_ptr, 4, byte_order);
447 op_ptr += 4;
448 break;
449 case DW_OP_const8u:
450 result = extract_unsigned_integer (op_ptr, 8, byte_order);
451 op_ptr += 8;
452 break;
453 case DW_OP_const8s:
454 result = extract_signed_integer (op_ptr, 8, byte_order);
455 op_ptr += 8;
456 break;
457 case DW_OP_constu:
458 op_ptr = read_uleb128 (op_ptr, op_end, &uoffset);
459 result = uoffset;
460 break;
461 case DW_OP_consts:
462 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
463 result = offset;
464 break;
465
466 /* The DW_OP_reg operations are required to occur alone in
467 location expressions. */
468 case DW_OP_reg0:
469 case DW_OP_reg1:
470 case DW_OP_reg2:
471 case DW_OP_reg3:
472 case DW_OP_reg4:
473 case DW_OP_reg5:
474 case DW_OP_reg6:
475 case DW_OP_reg7:
476 case DW_OP_reg8:
477 case DW_OP_reg9:
478 case DW_OP_reg10:
479 case DW_OP_reg11:
480 case DW_OP_reg12:
481 case DW_OP_reg13:
482 case DW_OP_reg14:
483 case DW_OP_reg15:
484 case DW_OP_reg16:
485 case DW_OP_reg17:
486 case DW_OP_reg18:
487 case DW_OP_reg19:
488 case DW_OP_reg20:
489 case DW_OP_reg21:
490 case DW_OP_reg22:
491 case DW_OP_reg23:
492 case DW_OP_reg24:
493 case DW_OP_reg25:
494 case DW_OP_reg26:
495 case DW_OP_reg27:
496 case DW_OP_reg28:
497 case DW_OP_reg29:
498 case DW_OP_reg30:
499 case DW_OP_reg31:
500 if (op_ptr != op_end
501 && *op_ptr != DW_OP_piece
502 && *op_ptr != DW_OP_bit_piece
503 && *op_ptr != DW_OP_GNU_uninit)
504 error (_("DWARF-2 expression error: DW_OP_reg operations must be "
505 "used either alone or in conjuction with DW_OP_piece "
506 "or DW_OP_bit_piece."));
507
508 result = op - DW_OP_reg0;
509 ctx->location = DWARF_VALUE_REGISTER;
510 break;
511
512 case DW_OP_regx:
513 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
514 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_regx");
515
516 result = reg;
517 ctx->location = DWARF_VALUE_REGISTER;
518 break;
519
520 case DW_OP_implicit_value:
521 {
522 ULONGEST len;
523
524 op_ptr = read_uleb128 (op_ptr, op_end, &len);
525 if (op_ptr + len > op_end)
526 error (_("DW_OP_implicit_value: too few bytes available."));
527 ctx->len = len;
528 ctx->data = op_ptr;
529 ctx->location = DWARF_VALUE_LITERAL;
530 op_ptr += len;
531 dwarf_expr_require_composition (op_ptr, op_end,
532 "DW_OP_implicit_value");
533 }
534 goto no_push;
535
536 case DW_OP_stack_value:
537 ctx->location = DWARF_VALUE_STACK;
538 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_stack_value");
539 goto no_push;
540
541 case DW_OP_breg0:
542 case DW_OP_breg1:
543 case DW_OP_breg2:
544 case DW_OP_breg3:
545 case DW_OP_breg4:
546 case DW_OP_breg5:
547 case DW_OP_breg6:
548 case DW_OP_breg7:
549 case DW_OP_breg8:
550 case DW_OP_breg9:
551 case DW_OP_breg10:
552 case DW_OP_breg11:
553 case DW_OP_breg12:
554 case DW_OP_breg13:
555 case DW_OP_breg14:
556 case DW_OP_breg15:
557 case DW_OP_breg16:
558 case DW_OP_breg17:
559 case DW_OP_breg18:
560 case DW_OP_breg19:
561 case DW_OP_breg20:
562 case DW_OP_breg21:
563 case DW_OP_breg22:
564 case DW_OP_breg23:
565 case DW_OP_breg24:
566 case DW_OP_breg25:
567 case DW_OP_breg26:
568 case DW_OP_breg27:
569 case DW_OP_breg28:
570 case DW_OP_breg29:
571 case DW_OP_breg30:
572 case DW_OP_breg31:
573 {
574 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
575 result = (ctx->read_reg) (ctx->baton, op - DW_OP_breg0);
576 result += offset;
577 }
578 break;
579 case DW_OP_bregx:
580 {
581 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
582 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
583 result = (ctx->read_reg) (ctx->baton, reg);
584 result += offset;
585 }
586 break;
587 case DW_OP_fbreg:
588 {
589 const gdb_byte *datastart;
590 size_t datalen;
591 unsigned int before_stack_len;
592
593 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
594 /* Rather than create a whole new context, we simply
595 record the stack length before execution, then reset it
596 afterwards, effectively erasing whatever the recursive
597 call put there. */
598 before_stack_len = ctx->stack_len;
599 /* FIXME: cagney/2003-03-26: This code should be using
600 get_frame_base_address(), and then implement a dwarf2
601 specific this_base method. */
602 (ctx->get_frame_base) (ctx->baton, &datastart, &datalen);
603 dwarf_expr_eval (ctx, datastart, datalen);
604 if (ctx->location == DWARF_VALUE_LITERAL
605 || ctx->location == DWARF_VALUE_STACK)
606 error (_("Not implemented: computing frame base using explicit value operator"));
607 result = dwarf_expr_fetch (ctx, 0);
608 if (ctx->location == DWARF_VALUE_REGISTER)
609 result = (ctx->read_reg) (ctx->baton, result);
610 result = result + offset;
611 in_stack_memory = 1;
612 ctx->stack_len = before_stack_len;
613 ctx->location = DWARF_VALUE_MEMORY;
614 }
615 break;
616
617 case DW_OP_dup:
618 result = dwarf_expr_fetch (ctx, 0);
619 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
620 break;
621
622 case DW_OP_drop:
623 dwarf_expr_pop (ctx);
624 goto no_push;
625
626 case DW_OP_pick:
627 offset = *op_ptr++;
628 result = dwarf_expr_fetch (ctx, offset);
629 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, offset);
630 break;
631
632 case DW_OP_swap:
633 {
634 struct dwarf_stack_value t1, t2;
635
636 if (ctx->stack_len < 2)
637 error (_("Not enough elements for DW_OP_swap. Need 2, have %d."),
638 ctx->stack_len);
639 t1 = ctx->stack[ctx->stack_len - 1];
640 t2 = ctx->stack[ctx->stack_len - 2];
641 ctx->stack[ctx->stack_len - 1] = t2;
642 ctx->stack[ctx->stack_len - 2] = t1;
643 goto no_push;
644 }
645
646 case DW_OP_over:
647 result = dwarf_expr_fetch (ctx, 1);
648 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 1);
649 break;
650
651 case DW_OP_rot:
652 {
653 struct dwarf_stack_value t1, t2, t3;
654
655 if (ctx->stack_len < 3)
656 error (_("Not enough elements for DW_OP_rot. Need 3, have %d."),
657 ctx->stack_len);
658 t1 = ctx->stack[ctx->stack_len - 1];
659 t2 = ctx->stack[ctx->stack_len - 2];
660 t3 = ctx->stack[ctx->stack_len - 3];
661 ctx->stack[ctx->stack_len - 1] = t2;
662 ctx->stack[ctx->stack_len - 2] = t3;
663 ctx->stack[ctx->stack_len - 3] = t1;
664 goto no_push;
665 }
666
667 case DW_OP_deref:
668 case DW_OP_deref_size:
669 case DW_OP_abs:
670 case DW_OP_neg:
671 case DW_OP_not:
672 case DW_OP_plus_uconst:
673 /* Unary operations. */
674 result = dwarf_expr_fetch (ctx, 0);
675 dwarf_expr_pop (ctx);
676
677 switch (op)
678 {
679 case DW_OP_deref:
680 {
681 gdb_byte *buf = alloca (ctx->addr_size);
682
683 (ctx->read_mem) (ctx->baton, buf, result, ctx->addr_size);
684 result = dwarf2_read_address (ctx->gdbarch,
685 buf, buf + ctx->addr_size,
686 ctx->addr_size);
687 }
688 break;
689
690 case DW_OP_deref_size:
691 {
692 int addr_size = *op_ptr++;
693 gdb_byte *buf = alloca (addr_size);
694
695 (ctx->read_mem) (ctx->baton, buf, result, addr_size);
696 result = dwarf2_read_address (ctx->gdbarch,
697 buf, buf + addr_size,
698 addr_size);
699 }
700 break;
701
702 case DW_OP_abs:
703 if ((signed int) result < 0)
704 result = -result;
705 break;
706 case DW_OP_neg:
707 result = -result;
708 break;
709 case DW_OP_not:
710 result = ~result;
711 break;
712 case DW_OP_plus_uconst:
713 op_ptr = read_uleb128 (op_ptr, op_end, &reg);
714 result += reg;
715 break;
716 }
717 break;
718
719 case DW_OP_and:
720 case DW_OP_div:
721 case DW_OP_minus:
722 case DW_OP_mod:
723 case DW_OP_mul:
724 case DW_OP_or:
725 case DW_OP_plus:
726 case DW_OP_shl:
727 case DW_OP_shr:
728 case DW_OP_shra:
729 case DW_OP_xor:
730 case DW_OP_le:
731 case DW_OP_ge:
732 case DW_OP_eq:
733 case DW_OP_lt:
734 case DW_OP_gt:
735 case DW_OP_ne:
736 {
737 /* Binary operations. Use the value engine to do computations in
738 the right width. */
739 CORE_ADDR first, second;
740 enum exp_opcode binop;
741 struct value *val1 = NULL, *val2 = NULL;
742 struct type *stype, *utype;
743
744 second = dwarf_expr_fetch (ctx, 0);
745 dwarf_expr_pop (ctx);
746
747 first = dwarf_expr_fetch (ctx, 0);
748 dwarf_expr_pop (ctx);
749
750 utype = unsigned_address_type (ctx->gdbarch, ctx->addr_size);
751 stype = signed_address_type (ctx->gdbarch, ctx->addr_size);
752
753 switch (op)
754 {
755 case DW_OP_and:
756 binop = BINOP_BITWISE_AND;
757 break;
758 case DW_OP_div:
759 binop = BINOP_DIV;
760 val1 = value_from_longest (stype, first);
761 val2 = value_from_longest (stype, second);
762 break;
763 case DW_OP_minus:
764 binop = BINOP_SUB;
765 break;
766 case DW_OP_mod:
767 binop = BINOP_MOD;
768 break;
769 case DW_OP_mul:
770 binop = BINOP_MUL;
771 break;
772 case DW_OP_or:
773 binop = BINOP_BITWISE_IOR;
774 break;
775 case DW_OP_plus:
776 binop = BINOP_ADD;
777 break;
778 case DW_OP_shl:
779 binop = BINOP_LSH;
780 break;
781 case DW_OP_shr:
782 binop = BINOP_RSH;
783 break;
784 case DW_OP_shra:
785 binop = BINOP_RSH;
786 val1 = value_from_longest (stype, first);
787 break;
788 case DW_OP_xor:
789 binop = BINOP_BITWISE_XOR;
790 break;
791 case DW_OP_le:
792 binop = BINOP_LEQ;
793 val1 = value_from_longest (stype, first);
794 val2 = value_from_longest (stype, second);
795 break;
796 case DW_OP_ge:
797 binop = BINOP_GEQ;
798 val1 = value_from_longest (stype, first);
799 val2 = value_from_longest (stype, second);
800 break;
801 case DW_OP_eq:
802 binop = BINOP_EQUAL;
803 val1 = value_from_longest (stype, first);
804 val2 = value_from_longest (stype, second);
805 break;
806 case DW_OP_lt:
807 binop = BINOP_LESS;
808 val1 = value_from_longest (stype, first);
809 val2 = value_from_longest (stype, second);
810 break;
811 case DW_OP_gt:
812 binop = BINOP_GTR;
813 val1 = value_from_longest (stype, first);
814 val2 = value_from_longest (stype, second);
815 break;
816 case DW_OP_ne:
817 binop = BINOP_NOTEQUAL;
818 val1 = value_from_longest (stype, first);
819 val2 = value_from_longest (stype, second);
820 break;
821 default:
822 internal_error (__FILE__, __LINE__,
823 _("Can't be reached."));
824 }
825
826 /* We use unsigned operands by default. */
827 if (val1 == NULL)
828 val1 = value_from_longest (utype, first);
829 if (val2 == NULL)
830 val2 = value_from_longest (utype, second);
831
832 result = value_as_long (value_binop (val1, val2, binop));
833 }
834 break;
835
836 case DW_OP_call_frame_cfa:
837 result = (ctx->get_frame_cfa) (ctx->baton);
838 in_stack_memory = 1;
839 break;
840
841 case DW_OP_GNU_push_tls_address:
842 /* Variable is at a constant offset in the thread-local
843 storage block into the objfile for the current thread and
844 the dynamic linker module containing this expression. Here
845 we return returns the offset from that base. The top of the
846 stack has the offset from the beginning of the thread
847 control block at which the variable is located. Nothing
848 should follow this operator, so the top of stack would be
849 returned. */
850 result = dwarf_expr_fetch (ctx, 0);
851 dwarf_expr_pop (ctx);
852 result = (ctx->get_tls_address) (ctx->baton, result);
853 break;
854
855 case DW_OP_skip:
856 offset = extract_signed_integer (op_ptr, 2, byte_order);
857 op_ptr += 2;
858 op_ptr += offset;
859 goto no_push;
860
861 case DW_OP_bra:
862 offset = extract_signed_integer (op_ptr, 2, byte_order);
863 op_ptr += 2;
864 if (dwarf_expr_fetch (ctx, 0) != 0)
865 op_ptr += offset;
866 dwarf_expr_pop (ctx);
867 goto no_push;
868
869 case DW_OP_nop:
870 goto no_push;
871
872 case DW_OP_piece:
873 {
874 ULONGEST size;
875
876 /* Record the piece. */
877 op_ptr = read_uleb128 (op_ptr, op_end, &size);
878 add_piece (ctx, 8 * size, 0);
879
880 /* Pop off the address/regnum, and reset the location
881 type. */
882 if (ctx->location != DWARF_VALUE_LITERAL
883 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
884 dwarf_expr_pop (ctx);
885 ctx->location = DWARF_VALUE_MEMORY;
886 }
887 goto no_push;
888
889 case DW_OP_bit_piece:
890 {
891 ULONGEST size, offset;
892
893 /* Record the piece. */
894 op_ptr = read_uleb128 (op_ptr, op_end, &size);
895 op_ptr = read_uleb128 (op_ptr, op_end, &offset);
896 add_piece (ctx, size, offset);
897
898 /* Pop off the address/regnum, and reset the location
899 type. */
900 if (ctx->location != DWARF_VALUE_LITERAL
901 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
902 dwarf_expr_pop (ctx);
903 ctx->location = DWARF_VALUE_MEMORY;
904 }
905 goto no_push;
906
907 case DW_OP_GNU_uninit:
908 if (op_ptr != op_end)
909 error (_("DWARF-2 expression error: DW_OP_GNU_uninit must always "
910 "be the very last op."));
911
912 ctx->initialized = 0;
913 goto no_push;
914
915 case DW_OP_call2:
916 result = extract_unsigned_integer (op_ptr, 2, byte_order);
917 op_ptr += 2;
918 ctx->dwarf_call (ctx, result);
919 goto no_push;
920
921 case DW_OP_call4:
922 result = extract_unsigned_integer (op_ptr, 4, byte_order);
923 op_ptr += 4;
924 ctx->dwarf_call (ctx, result);
925 goto no_push;
926
927 default:
928 error (_("Unhandled dwarf expression opcode 0x%x"), op);
929 }
930
931 /* Most things push a result value. */
932 dwarf_expr_push (ctx, result, in_stack_memory);
933 no_push:;
934 }
935
936 ctx->recursion_depth--;
937 gdb_assert (ctx->recursion_depth >= 0);
938 }
This page took 0.049709 seconds and 4 git commands to generate.