1 /* DWARF 2 Expression Evaluator.
3 Copyright (C) 2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 Contributed by Daniel Berlin (dan@dberlin.org)
8 This file is part of GDB.
10 This program is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3 of the License, or
13 (at your option) any later version.
15 This program is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program. If not, see <http://www.gnu.org/licenses/>. */
29 #include "dwarf2expr.h"
30 #include "gdb_assert.h"
32 /* Local prototypes. */
34 static void execute_stack_op (struct dwarf_expr_context *,
35 gdb_byte *, gdb_byte *);
36 static struct type *unsigned_address_type (struct gdbarch *, int);
38 /* Create a new context for the expression evaluator. */
40 struct dwarf_expr_context *
41 new_dwarf_expr_context (void)
43 struct dwarf_expr_context *retval;
45 retval = xcalloc (1, sizeof (struct dwarf_expr_context));
46 retval->stack_len = 0;
47 retval->stack_allocated = 10;
48 retval->stack = xmalloc (retval->stack_allocated
49 * sizeof (struct dwarf_stack_value));
50 retval->num_pieces = 0;
52 retval->max_recursion_depth = 0x100;
56 /* Release the memory allocated to CTX. */
59 free_dwarf_expr_context (struct dwarf_expr_context *ctx)
66 /* Helper for make_cleanup_free_dwarf_expr_context. */
69 free_dwarf_expr_context_cleanup (void *arg)
71 free_dwarf_expr_context (arg);
74 /* Return a cleanup that calls free_dwarf_expr_context. */
77 make_cleanup_free_dwarf_expr_context (struct dwarf_expr_context *ctx)
79 return make_cleanup (free_dwarf_expr_context_cleanup, ctx);
82 /* Expand the memory allocated to CTX's stack to contain at least
83 NEED more elements than are currently used. */
86 dwarf_expr_grow_stack (struct dwarf_expr_context *ctx, size_t need)
88 if (ctx->stack_len + need > ctx->stack_allocated)
90 size_t newlen = ctx->stack_len + need + 10;
92 ctx->stack = xrealloc (ctx->stack,
93 newlen * sizeof (struct dwarf_stack_value));
94 ctx->stack_allocated = newlen;
98 /* Push VALUE onto CTX's stack. */
101 dwarf_expr_push (struct dwarf_expr_context *ctx, CORE_ADDR value,
104 struct dwarf_stack_value *v;
106 dwarf_expr_grow_stack (ctx, 1);
107 v = &ctx->stack[ctx->stack_len++];
109 v->in_stack_memory = in_stack_memory;
112 /* Pop the top item off of CTX's stack. */
115 dwarf_expr_pop (struct dwarf_expr_context *ctx)
117 if (ctx->stack_len <= 0)
118 error (_("dwarf expression stack underflow"));
122 /* Retrieve the N'th item on CTX's stack. */
125 dwarf_expr_fetch (struct dwarf_expr_context *ctx, int n)
127 if (ctx->stack_len <= n)
128 error (_("Asked for position %d of stack, stack only has %d elements on it."),
130 return ctx->stack[ctx->stack_len - (1 + n)].value;
134 /* Retrieve the in_stack_memory flag of the N'th item on CTX's stack. */
137 dwarf_expr_fetch_in_stack_memory (struct dwarf_expr_context *ctx, int n)
139 if (ctx->stack_len <= n)
140 error (_("Asked for position %d of stack, stack only has %d elements on it."),
142 return ctx->stack[ctx->stack_len - (1 + n)].in_stack_memory;
146 /* Return true if the expression stack is empty. */
149 dwarf_expr_stack_empty_p (struct dwarf_expr_context *ctx)
151 return ctx->stack_len == 0;
154 /* Add a new piece to CTX's piece list. */
156 add_piece (struct dwarf_expr_context *ctx, ULONGEST size, ULONGEST offset)
158 struct dwarf_expr_piece *p;
162 ctx->pieces = xrealloc (ctx->pieces,
164 * sizeof (struct dwarf_expr_piece)));
166 p = &ctx->pieces[ctx->num_pieces - 1];
167 p->location = ctx->location;
171 if (p->location == DWARF_VALUE_LITERAL)
173 p->v.literal.data = ctx->data;
174 p->v.literal.length = ctx->len;
176 else if (dwarf_expr_stack_empty_p (ctx))
178 p->location = DWARF_VALUE_OPTIMIZED_OUT;
179 /* Also reset the context's location, for our callers. This is
180 a somewhat strange approach, but this lets us avoid setting
181 the location to DWARF_VALUE_MEMORY in all the individual
182 cases in the evaluator. */
183 ctx->location = DWARF_VALUE_OPTIMIZED_OUT;
187 p->v.expr.value = dwarf_expr_fetch (ctx, 0);
188 p->v.expr.in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
192 /* Evaluate the expression at ADDR (LEN bytes long) using the context
196 dwarf_expr_eval (struct dwarf_expr_context *ctx, gdb_byte *addr, size_t len)
198 int old_recursion_depth = ctx->recursion_depth;
200 execute_stack_op (ctx, addr, addr + len);
202 /* CTX RECURSION_DEPTH becomes invalid if an exception was thrown here. */
204 gdb_assert (ctx->recursion_depth == old_recursion_depth);
207 /* Decode the unsigned LEB128 constant at BUF into the variable pointed to
208 by R, and return the new value of BUF. Verify that it doesn't extend
212 read_uleb128 (gdb_byte *buf, gdb_byte *buf_end, ULONGEST * r)
221 error (_("read_uleb128: Corrupted DWARF expression."));
224 result |= (byte & 0x7f) << shift;
225 if ((byte & 0x80) == 0)
233 /* Decode the signed LEB128 constant at BUF into the variable pointed to
234 by R, and return the new value of BUF. Verify that it doesn't extend
238 read_sleb128 (gdb_byte *buf, gdb_byte *buf_end, LONGEST * r)
247 error (_("read_sleb128: Corrupted DWARF expression."));
250 result |= (byte & 0x7f) << shift;
252 if ((byte & 0x80) == 0)
255 if (shift < (sizeof (*r) * 8) && (byte & 0x40) != 0)
256 result |= -(1 << shift);
262 /* Read an address of size ADDR_SIZE from BUF, and verify that it
263 doesn't extend past BUF_END. */
266 dwarf2_read_address (struct gdbarch *gdbarch, gdb_byte *buf,
267 gdb_byte *buf_end, int addr_size)
269 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
271 if (buf_end - buf < addr_size)
272 error (_("dwarf2_read_address: Corrupted DWARF expression."));
274 /* For most architectures, calling extract_unsigned_integer() alone
275 is sufficient for extracting an address. However, some
276 architectures (e.g. MIPS) use signed addresses and using
277 extract_unsigned_integer() will not produce a correct
278 result. Make sure we invoke gdbarch_integer_to_address()
279 for those architectures which require it.
281 The use of `unsigned_address_type' in the code below refers to
282 the type of buf and has no bearing on the signedness of the
283 address being returned. */
285 if (gdbarch_integer_to_address_p (gdbarch))
286 return gdbarch_integer_to_address
287 (gdbarch, unsigned_address_type (gdbarch, addr_size), buf);
289 return extract_unsigned_integer (buf, addr_size, byte_order);
292 /* Return the type of an address of size ADDR_SIZE,
293 for unsigned arithmetic. */
296 unsigned_address_type (struct gdbarch *gdbarch, int addr_size)
301 return builtin_type (gdbarch)->builtin_uint16;
303 return builtin_type (gdbarch)->builtin_uint32;
305 return builtin_type (gdbarch)->builtin_uint64;
307 internal_error (__FILE__, __LINE__,
308 _("Unsupported address size.\n"));
312 /* Return the type of an address of size ADDR_SIZE,
313 for signed arithmetic. */
316 signed_address_type (struct gdbarch *gdbarch, int addr_size)
321 return builtin_type (gdbarch)->builtin_int16;
323 return builtin_type (gdbarch)->builtin_int32;
325 return builtin_type (gdbarch)->builtin_int64;
327 internal_error (__FILE__, __LINE__,
328 _("Unsupported address size.\n"));
333 /* Check that the current operator is either at the end of an
334 expression, or that it is followed by a composition operator. */
337 require_composition (gdb_byte *op_ptr, gdb_byte *op_end, const char *op_name)
339 /* It seems like DW_OP_GNU_uninit should be handled here. However,
340 it doesn't seem to make sense for DW_OP_*_value, and it was not
341 checked at the other place that this function is called. */
342 if (op_ptr != op_end && *op_ptr != DW_OP_piece && *op_ptr != DW_OP_bit_piece)
343 error (_("DWARF-2 expression error: `%s' operations must be "
344 "used either alone or in conjuction with DW_OP_piece "
345 "or DW_OP_bit_piece."),
349 /* The engine for the expression evaluator. Using the context in CTX,
350 evaluate the expression between OP_PTR and OP_END. */
353 execute_stack_op (struct dwarf_expr_context *ctx,
354 gdb_byte *op_ptr, gdb_byte *op_end)
356 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
358 ctx->location = DWARF_VALUE_MEMORY;
359 ctx->initialized = 1; /* Default is initialized. */
361 if (ctx->recursion_depth > ctx->max_recursion_depth)
362 error (_("DWARF-2 expression error: Loop detected (%d)."),
363 ctx->recursion_depth);
364 ctx->recursion_depth++;
366 while (op_ptr < op_end)
368 enum dwarf_location_atom op = *op_ptr++;
370 /* Assume the value is not in stack memory.
371 Code that knows otherwise sets this to 1.
372 Some arithmetic on stack addresses can probably be assumed to still
373 be a stack address, but we skip this complication for now.
374 This is just an optimization, so it's always ok to punt
375 and leave this as 0. */
376 int in_stack_memory = 0;
377 ULONGEST uoffset, reg;
414 result = op - DW_OP_lit0;
418 result = dwarf2_read_address (ctx->gdbarch,
419 op_ptr, op_end, ctx->addr_size);
420 op_ptr += ctx->addr_size;
424 result = extract_unsigned_integer (op_ptr, 1, byte_order);
428 result = extract_signed_integer (op_ptr, 1, byte_order);
432 result = extract_unsigned_integer (op_ptr, 2, byte_order);
436 result = extract_signed_integer (op_ptr, 2, byte_order);
440 result = extract_unsigned_integer (op_ptr, 4, byte_order);
444 result = extract_signed_integer (op_ptr, 4, byte_order);
448 result = extract_unsigned_integer (op_ptr, 8, byte_order);
452 result = extract_signed_integer (op_ptr, 8, byte_order);
456 op_ptr = read_uleb128 (op_ptr, op_end, &uoffset);
460 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
464 /* The DW_OP_reg operations are required to occur alone in
465 location expressions. */
499 && *op_ptr != DW_OP_piece
500 && *op_ptr != DW_OP_bit_piece
501 && *op_ptr != DW_OP_GNU_uninit)
502 error (_("DWARF-2 expression error: DW_OP_reg operations must be "
503 "used either alone or in conjuction with DW_OP_piece "
504 "or DW_OP_bit_piece."));
506 result = op - DW_OP_reg0;
507 ctx->location = DWARF_VALUE_REGISTER;
511 op_ptr = read_uleb128 (op_ptr, op_end, ®);
512 require_composition (op_ptr, op_end, "DW_OP_regx");
515 ctx->location = DWARF_VALUE_REGISTER;
518 case DW_OP_implicit_value:
522 op_ptr = read_uleb128 (op_ptr, op_end, &len);
523 if (op_ptr + len > op_end)
524 error (_("DW_OP_implicit_value: too few bytes available."));
527 ctx->location = DWARF_VALUE_LITERAL;
529 require_composition (op_ptr, op_end, "DW_OP_implicit_value");
533 case DW_OP_stack_value:
534 ctx->location = DWARF_VALUE_STACK;
535 require_composition (op_ptr, op_end, "DW_OP_stack_value");
571 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
572 result = (ctx->read_reg) (ctx->baton, op - DW_OP_breg0);
578 op_ptr = read_uleb128 (op_ptr, op_end, ®);
579 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
580 result = (ctx->read_reg) (ctx->baton, reg);
588 unsigned int before_stack_len;
590 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
591 /* Rather than create a whole new context, we simply
592 record the stack length before execution, then reset it
593 afterwards, effectively erasing whatever the recursive
595 before_stack_len = ctx->stack_len;
596 /* FIXME: cagney/2003-03-26: This code should be using
597 get_frame_base_address(), and then implement a dwarf2
598 specific this_base method. */
599 (ctx->get_frame_base) (ctx->baton, &datastart, &datalen);
600 dwarf_expr_eval (ctx, datastart, datalen);
601 if (ctx->location == DWARF_VALUE_LITERAL
602 || ctx->location == DWARF_VALUE_STACK)
603 error (_("Not implemented: computing frame base using explicit value operator"));
604 result = dwarf_expr_fetch (ctx, 0);
605 if (ctx->location == DWARF_VALUE_REGISTER)
606 result = (ctx->read_reg) (ctx->baton, result);
607 result = result + offset;
609 ctx->stack_len = before_stack_len;
610 ctx->location = DWARF_VALUE_MEMORY;
615 result = dwarf_expr_fetch (ctx, 0);
616 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
620 dwarf_expr_pop (ctx);
625 result = dwarf_expr_fetch (ctx, offset);
626 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, offset);
631 struct dwarf_stack_value t1, t2;
633 if (ctx->stack_len < 2)
634 error (_("Not enough elements for DW_OP_swap. Need 2, have %d."),
636 t1 = ctx->stack[ctx->stack_len - 1];
637 t2 = ctx->stack[ctx->stack_len - 2];
638 ctx->stack[ctx->stack_len - 1] = t2;
639 ctx->stack[ctx->stack_len - 2] = t1;
644 result = dwarf_expr_fetch (ctx, 1);
645 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 1);
650 struct dwarf_stack_value t1, t2, t3;
652 if (ctx->stack_len < 3)
653 error (_("Not enough elements for DW_OP_rot. Need 3, have %d."),
655 t1 = ctx->stack[ctx->stack_len - 1];
656 t2 = ctx->stack[ctx->stack_len - 2];
657 t3 = ctx->stack[ctx->stack_len - 3];
658 ctx->stack[ctx->stack_len - 1] = t2;
659 ctx->stack[ctx->stack_len - 2] = t3;
660 ctx->stack[ctx->stack_len - 3] = t1;
665 case DW_OP_deref_size:
669 case DW_OP_plus_uconst:
670 /* Unary operations. */
671 result = dwarf_expr_fetch (ctx, 0);
672 dwarf_expr_pop (ctx);
678 gdb_byte *buf = alloca (ctx->addr_size);
680 (ctx->read_mem) (ctx->baton, buf, result, ctx->addr_size);
681 result = dwarf2_read_address (ctx->gdbarch,
682 buf, buf + ctx->addr_size,
687 case DW_OP_deref_size:
689 int addr_size = *op_ptr++;
690 gdb_byte *buf = alloca (addr_size);
692 (ctx->read_mem) (ctx->baton, buf, result, addr_size);
693 result = dwarf2_read_address (ctx->gdbarch,
694 buf, buf + addr_size,
700 if ((signed int) result < 0)
709 case DW_OP_plus_uconst:
710 op_ptr = read_uleb128 (op_ptr, op_end, ®);
734 /* Binary operations. Use the value engine to do computations in
736 CORE_ADDR first, second;
737 enum exp_opcode binop;
738 struct value *val1 = NULL, *val2 = NULL;
739 struct type *stype, *utype;
741 second = dwarf_expr_fetch (ctx, 0);
742 dwarf_expr_pop (ctx);
744 first = dwarf_expr_fetch (ctx, 0);
745 dwarf_expr_pop (ctx);
747 utype = unsigned_address_type (ctx->gdbarch, ctx->addr_size);
748 stype = signed_address_type (ctx->gdbarch, ctx->addr_size);
753 binop = BINOP_BITWISE_AND;
757 val1 = value_from_longest (stype, first);
758 val2 = value_from_longest (stype, second);
770 binop = BINOP_BITWISE_IOR;
783 val1 = value_from_longest (stype, first);
786 binop = BINOP_BITWISE_XOR;
790 val1 = value_from_longest (stype, first);
791 val2 = value_from_longest (stype, second);
795 val1 = value_from_longest (stype, first);
796 val2 = value_from_longest (stype, second);
800 val1 = value_from_longest (stype, first);
801 val2 = value_from_longest (stype, second);
805 val1 = value_from_longest (stype, first);
806 val2 = value_from_longest (stype, second);
810 val1 = value_from_longest (stype, first);
811 val2 = value_from_longest (stype, second);
814 binop = BINOP_NOTEQUAL;
815 val1 = value_from_longest (stype, first);
816 val2 = value_from_longest (stype, second);
819 internal_error (__FILE__, __LINE__,
820 _("Can't be reached."));
823 /* We use unsigned operands by default. */
825 val1 = value_from_longest (utype, first);
827 val2 = value_from_longest (utype, second);
829 result = value_as_long (value_binop (val1, val2, binop));
833 case DW_OP_call_frame_cfa:
834 result = (ctx->get_frame_cfa) (ctx->baton);
838 case DW_OP_GNU_push_tls_address:
839 /* Variable is at a constant offset in the thread-local
840 storage block into the objfile for the current thread and
841 the dynamic linker module containing this expression. Here
842 we return returns the offset from that base. The top of the
843 stack has the offset from the beginning of the thread
844 control block at which the variable is located. Nothing
845 should follow this operator, so the top of stack would be
847 result = dwarf_expr_fetch (ctx, 0);
848 dwarf_expr_pop (ctx);
849 result = (ctx->get_tls_address) (ctx->baton, result);
853 offset = extract_signed_integer (op_ptr, 2, byte_order);
859 offset = extract_signed_integer (op_ptr, 2, byte_order);
861 if (dwarf_expr_fetch (ctx, 0) != 0)
863 dwarf_expr_pop (ctx);
873 /* Record the piece. */
874 op_ptr = read_uleb128 (op_ptr, op_end, &size);
875 add_piece (ctx, 8 * size, 0);
877 /* Pop off the address/regnum, and reset the location
879 if (ctx->location != DWARF_VALUE_LITERAL
880 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
881 dwarf_expr_pop (ctx);
882 ctx->location = DWARF_VALUE_MEMORY;
886 case DW_OP_bit_piece:
888 ULONGEST size, offset;
890 /* Record the piece. */
891 op_ptr = read_uleb128 (op_ptr, op_end, &size);
892 op_ptr = read_uleb128 (op_ptr, op_end, &offset);
893 add_piece (ctx, size, offset);
895 /* Pop off the address/regnum, and reset the location
897 if (ctx->location != DWARF_VALUE_LITERAL
898 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
899 dwarf_expr_pop (ctx);
900 ctx->location = DWARF_VALUE_MEMORY;
904 case DW_OP_GNU_uninit:
905 if (op_ptr != op_end)
906 error (_("DWARF-2 expression error: DW_OP_GNU_uninit must always "
907 "be the very last op."));
909 ctx->initialized = 0;
913 error (_("Unhandled dwarf expression opcode 0x%x"), op);
916 /* Most things push a result value. */
917 dwarf_expr_push (ctx, result, in_stack_memory);
921 ctx->recursion_depth--;
922 gdb_assert (ctx->recursion_depth >= 0);