1 /* DWARF 2 Expression Evaluator.
3 Copyright (C) 2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 Contributed by Daniel Berlin (dan@dberlin.org)
8 This file is part of GDB.
10 This program is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3 of the License, or
13 (at your option) any later version.
15 This program is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program. If not, see <http://www.gnu.org/licenses/>. */
29 #include "dwarf2expr.h"
30 #include "gdb_assert.h"
32 /* Local prototypes. */
34 static void execute_stack_op (struct dwarf_expr_context *,
35 gdb_byte *, gdb_byte *);
36 static struct type *unsigned_address_type (struct gdbarch *, int);
38 /* Create a new context for the expression evaluator. */
40 struct dwarf_expr_context *
41 new_dwarf_expr_context (void)
43 struct dwarf_expr_context *retval;
45 retval = xcalloc (1, sizeof (struct dwarf_expr_context));
46 retval->stack_len = 0;
47 retval->stack_allocated = 10;
48 retval->stack = xmalloc (retval->stack_allocated
49 * sizeof (struct dwarf_stack_value));
50 retval->num_pieces = 0;
52 retval->max_recursion_depth = 0x100;
56 /* Release the memory allocated to CTX. */
59 free_dwarf_expr_context (struct dwarf_expr_context *ctx)
66 /* Helper for make_cleanup_free_dwarf_expr_context. */
69 free_dwarf_expr_context_cleanup (void *arg)
71 free_dwarf_expr_context (arg);
74 /* Return a cleanup that calls free_dwarf_expr_context. */
77 make_cleanup_free_dwarf_expr_context (struct dwarf_expr_context *ctx)
79 return make_cleanup (free_dwarf_expr_context_cleanup, ctx);
82 /* Expand the memory allocated to CTX's stack to contain at least
83 NEED more elements than are currently used. */
86 dwarf_expr_grow_stack (struct dwarf_expr_context *ctx, size_t need)
88 if (ctx->stack_len + need > ctx->stack_allocated)
90 size_t newlen = ctx->stack_len + need + 10;
92 ctx->stack = xrealloc (ctx->stack,
93 newlen * sizeof (struct dwarf_stack_value));
94 ctx->stack_allocated = newlen;
98 /* Push VALUE onto CTX's stack. */
101 dwarf_expr_push (struct dwarf_expr_context *ctx, CORE_ADDR value,
104 struct dwarf_stack_value *v;
106 dwarf_expr_grow_stack (ctx, 1);
107 v = &ctx->stack[ctx->stack_len++];
109 v->in_stack_memory = in_stack_memory;
112 /* Pop the top item off of CTX's stack. */
115 dwarf_expr_pop (struct dwarf_expr_context *ctx)
117 if (ctx->stack_len <= 0)
118 error (_("dwarf expression stack underflow"));
122 /* Retrieve the N'th item on CTX's stack. */
125 dwarf_expr_fetch (struct dwarf_expr_context *ctx, int n)
127 if (ctx->stack_len <= n)
128 error (_("Asked for position %d of stack, stack only has %d elements on it."),
130 return ctx->stack[ctx->stack_len - (1 + n)].value;
134 /* Retrieve the in_stack_memory flag of the N'th item on CTX's stack. */
137 dwarf_expr_fetch_in_stack_memory (struct dwarf_expr_context *ctx, int n)
139 if (ctx->stack_len <= n)
140 error (_("Asked for position %d of stack, stack only has %d elements on it."),
142 return ctx->stack[ctx->stack_len - (1 + n)].in_stack_memory;
146 /* Return true if the expression stack is empty. */
149 dwarf_expr_stack_empty_p (struct dwarf_expr_context *ctx)
151 return ctx->stack_len == 0;
154 /* Add a new piece to CTX's piece list. */
156 add_piece (struct dwarf_expr_context *ctx, ULONGEST size)
158 struct dwarf_expr_piece *p;
163 ctx->pieces = xrealloc (ctx->pieces,
165 * sizeof (struct dwarf_expr_piece)));
167 ctx->pieces = xmalloc (ctx->num_pieces
168 * sizeof (struct dwarf_expr_piece));
170 p = &ctx->pieces[ctx->num_pieces - 1];
171 p->location = ctx->location;
173 if (p->location == DWARF_VALUE_LITERAL)
175 p->v.literal.data = ctx->data;
176 p->v.literal.length = ctx->len;
178 else if (dwarf_expr_stack_empty_p (ctx))
180 p->location = DWARF_VALUE_OPTIMIZED_OUT;
181 /* Also reset the context's location, for our callers. This is
182 a somewhat strange approach, but this lets us avoid setting
183 the location to DWARF_VALUE_MEMORY in all the individual
184 cases in the evaluator. */
185 ctx->location = DWARF_VALUE_OPTIMIZED_OUT;
189 p->v.expr.value = dwarf_expr_fetch (ctx, 0);
190 p->v.expr.in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
194 /* Evaluate the expression at ADDR (LEN bytes long) using the context
198 dwarf_expr_eval (struct dwarf_expr_context *ctx, gdb_byte *addr, size_t len)
200 int old_recursion_depth = ctx->recursion_depth;
202 execute_stack_op (ctx, addr, addr + len);
204 /* CTX RECURSION_DEPTH becomes invalid if an exception was thrown here. */
206 gdb_assert (ctx->recursion_depth == old_recursion_depth);
209 /* Decode the unsigned LEB128 constant at BUF into the variable pointed to
210 by R, and return the new value of BUF. Verify that it doesn't extend
214 read_uleb128 (gdb_byte *buf, gdb_byte *buf_end, ULONGEST * r)
223 error (_("read_uleb128: Corrupted DWARF expression."));
226 result |= (byte & 0x7f) << shift;
227 if ((byte & 0x80) == 0)
235 /* Decode the signed LEB128 constant at BUF into the variable pointed to
236 by R, and return the new value of BUF. Verify that it doesn't extend
240 read_sleb128 (gdb_byte *buf, gdb_byte *buf_end, LONGEST * r)
249 error (_("read_sleb128: Corrupted DWARF expression."));
252 result |= (byte & 0x7f) << shift;
254 if ((byte & 0x80) == 0)
257 if (shift < (sizeof (*r) * 8) && (byte & 0x40) != 0)
258 result |= -(1 << shift);
264 /* Read an address of size ADDR_SIZE from BUF, and verify that it
265 doesn't extend past BUF_END. */
268 dwarf2_read_address (struct gdbarch *gdbarch, gdb_byte *buf,
269 gdb_byte *buf_end, int addr_size)
271 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
273 if (buf_end - buf < addr_size)
274 error (_("dwarf2_read_address: Corrupted DWARF expression."));
276 /* For most architectures, calling extract_unsigned_integer() alone
277 is sufficient for extracting an address. However, some
278 architectures (e.g. MIPS) use signed addresses and using
279 extract_unsigned_integer() will not produce a correct
280 result. Make sure we invoke gdbarch_integer_to_address()
281 for those architectures which require it.
283 The use of `unsigned_address_type' in the code below refers to
284 the type of buf and has no bearing on the signedness of the
285 address being returned. */
287 if (gdbarch_integer_to_address_p (gdbarch))
288 return gdbarch_integer_to_address
289 (gdbarch, unsigned_address_type (gdbarch, addr_size), buf);
291 return extract_unsigned_integer (buf, addr_size, byte_order);
294 /* Return the type of an address of size ADDR_SIZE,
295 for unsigned arithmetic. */
298 unsigned_address_type (struct gdbarch *gdbarch, int addr_size)
303 return builtin_type (gdbarch)->builtin_uint16;
305 return builtin_type (gdbarch)->builtin_uint32;
307 return builtin_type (gdbarch)->builtin_uint64;
309 internal_error (__FILE__, __LINE__,
310 _("Unsupported address size.\n"));
314 /* Return the type of an address of size ADDR_SIZE,
315 for signed arithmetic. */
318 signed_address_type (struct gdbarch *gdbarch, int addr_size)
323 return builtin_type (gdbarch)->builtin_int16;
325 return builtin_type (gdbarch)->builtin_int32;
327 return builtin_type (gdbarch)->builtin_int64;
329 internal_error (__FILE__, __LINE__,
330 _("Unsupported address size.\n"));
335 /* Check that the current operator is either at the end of an
336 expression, or that it is followed by a composition operator. */
339 require_composition (gdb_byte *op_ptr, gdb_byte *op_end, const char *op_name)
341 /* It seems like DW_OP_GNU_uninit should be handled here. However,
342 it doesn't seem to make sense for DW_OP_*_value, and it was not
343 checked at the other place that this function is called. */
344 if (op_ptr != op_end && *op_ptr != DW_OP_piece && *op_ptr != DW_OP_bit_piece)
345 error (_("DWARF-2 expression error: `%s' operations must be "
346 "used either alone or in conjuction with DW_OP_piece "
347 "or DW_OP_bit_piece."),
351 /* The engine for the expression evaluator. Using the context in CTX,
352 evaluate the expression between OP_PTR and OP_END. */
355 execute_stack_op (struct dwarf_expr_context *ctx,
356 gdb_byte *op_ptr, gdb_byte *op_end)
358 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
360 ctx->location = DWARF_VALUE_MEMORY;
361 ctx->initialized = 1; /* Default is initialized. */
363 if (ctx->recursion_depth > ctx->max_recursion_depth)
364 error (_("DWARF-2 expression error: Loop detected (%d)."),
365 ctx->recursion_depth);
366 ctx->recursion_depth++;
368 while (op_ptr < op_end)
370 enum dwarf_location_atom op = *op_ptr++;
372 /* Assume the value is not in stack memory.
373 Code that knows otherwise sets this to 1.
374 Some arithmetic on stack addresses can probably be assumed to still
375 be a stack address, but we skip this complication for now.
376 This is just an optimization, so it's always ok to punt
377 and leave this as 0. */
378 int in_stack_memory = 0;
379 ULONGEST uoffset, reg;
416 result = op - DW_OP_lit0;
420 result = dwarf2_read_address (ctx->gdbarch,
421 op_ptr, op_end, ctx->addr_size);
422 op_ptr += ctx->addr_size;
426 result = extract_unsigned_integer (op_ptr, 1, byte_order);
430 result = extract_signed_integer (op_ptr, 1, byte_order);
434 result = extract_unsigned_integer (op_ptr, 2, byte_order);
438 result = extract_signed_integer (op_ptr, 2, byte_order);
442 result = extract_unsigned_integer (op_ptr, 4, byte_order);
446 result = extract_signed_integer (op_ptr, 4, byte_order);
450 result = extract_unsigned_integer (op_ptr, 8, byte_order);
454 result = extract_signed_integer (op_ptr, 8, byte_order);
458 op_ptr = read_uleb128 (op_ptr, op_end, &uoffset);
462 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
466 /* The DW_OP_reg operations are required to occur alone in
467 location expressions. */
501 && *op_ptr != DW_OP_piece
502 && *op_ptr != DW_OP_GNU_uninit)
503 error (_("DWARF-2 expression error: DW_OP_reg operations must be "
504 "used either alone or in conjuction with DW_OP_piece."));
506 result = op - DW_OP_reg0;
507 ctx->location = DWARF_VALUE_REGISTER;
511 op_ptr = read_uleb128 (op_ptr, op_end, ®);
512 require_composition (op_ptr, op_end, "DW_OP_regx");
515 ctx->location = DWARF_VALUE_REGISTER;
518 case DW_OP_implicit_value:
522 op_ptr = read_uleb128 (op_ptr, op_end, &len);
523 if (op_ptr + len > op_end)
524 error (_("DW_OP_implicit_value: too few bytes available."));
527 ctx->location = DWARF_VALUE_LITERAL;
529 require_composition (op_ptr, op_end, "DW_OP_implicit_value");
533 case DW_OP_stack_value:
534 ctx->location = DWARF_VALUE_STACK;
535 require_composition (op_ptr, op_end, "DW_OP_stack_value");
571 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
572 result = (ctx->read_reg) (ctx->baton, op - DW_OP_breg0);
578 op_ptr = read_uleb128 (op_ptr, op_end, ®);
579 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
580 result = (ctx->read_reg) (ctx->baton, reg);
588 unsigned int before_stack_len;
590 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
591 /* Rather than create a whole new context, we simply
592 record the stack length before execution, then reset it
593 afterwards, effectively erasing whatever the recursive
595 before_stack_len = ctx->stack_len;
596 /* FIXME: cagney/2003-03-26: This code should be using
597 get_frame_base_address(), and then implement a dwarf2
598 specific this_base method. */
599 (ctx->get_frame_base) (ctx->baton, &datastart, &datalen);
600 dwarf_expr_eval (ctx, datastart, datalen);
601 if (ctx->location == DWARF_VALUE_LITERAL
602 || ctx->location == DWARF_VALUE_STACK)
603 error (_("Not implemented: computing frame base using explicit value operator"));
604 result = dwarf_expr_fetch (ctx, 0);
605 if (ctx->location == DWARF_VALUE_REGISTER)
606 result = (ctx->read_reg) (ctx->baton, result);
607 result = result + offset;
609 ctx->stack_len = before_stack_len;
610 ctx->location = DWARF_VALUE_MEMORY;
615 result = dwarf_expr_fetch (ctx, 0);
616 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
620 dwarf_expr_pop (ctx);
625 result = dwarf_expr_fetch (ctx, offset);
626 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, offset);
631 struct dwarf_stack_value t1, t2;
633 if (ctx->stack_len < 2)
634 error (_("Not enough elements for DW_OP_swap. Need 2, have %d."),
636 t1 = ctx->stack[ctx->stack_len - 1];
637 t2 = ctx->stack[ctx->stack_len - 2];
638 ctx->stack[ctx->stack_len - 1] = t2;
639 ctx->stack[ctx->stack_len - 2] = t1;
644 result = dwarf_expr_fetch (ctx, 1);
645 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 1);
650 struct dwarf_stack_value t1, t2, t3;
652 if (ctx->stack_len < 3)
653 error (_("Not enough elements for DW_OP_rot. Need 3, have %d."),
655 t1 = ctx->stack[ctx->stack_len - 1];
656 t2 = ctx->stack[ctx->stack_len - 2];
657 t3 = ctx->stack[ctx->stack_len - 3];
658 ctx->stack[ctx->stack_len - 1] = t2;
659 ctx->stack[ctx->stack_len - 2] = t3;
660 ctx->stack[ctx->stack_len - 3] = t1;
665 case DW_OP_deref_size:
669 case DW_OP_plus_uconst:
670 /* Unary operations. */
671 result = dwarf_expr_fetch (ctx, 0);
672 dwarf_expr_pop (ctx);
678 gdb_byte *buf = alloca (ctx->addr_size);
680 (ctx->read_mem) (ctx->baton, buf, result, ctx->addr_size);
681 result = dwarf2_read_address (ctx->gdbarch,
682 buf, buf + ctx->addr_size,
687 case DW_OP_deref_size:
689 int addr_size = *op_ptr++;
690 gdb_byte *buf = alloca (addr_size);
692 (ctx->read_mem) (ctx->baton, buf, result, addr_size);
693 result = dwarf2_read_address (ctx->gdbarch,
694 buf, buf + addr_size,
700 if ((signed int) result < 0)
709 case DW_OP_plus_uconst:
710 op_ptr = read_uleb128 (op_ptr, op_end, ®);
734 /* Binary operations. Use the value engine to do computations in
736 CORE_ADDR first, second;
737 enum exp_opcode binop;
738 struct value *val1 = NULL, *val2 = NULL;
739 struct type *stype, *utype;
741 second = dwarf_expr_fetch (ctx, 0);
742 dwarf_expr_pop (ctx);
744 first = dwarf_expr_fetch (ctx, 0);
745 dwarf_expr_pop (ctx);
747 utype = unsigned_address_type (ctx->gdbarch, ctx->addr_size);
748 stype = signed_address_type (ctx->gdbarch, ctx->addr_size);
753 binop = BINOP_BITWISE_AND;
757 val1 = value_from_longest (stype, first);
758 val2 = value_from_longest (stype, second);
770 binop = BINOP_BITWISE_IOR;
783 val1 = value_from_longest (stype, first);
786 binop = BINOP_BITWISE_XOR;
790 val1 = value_from_longest (stype, first);
791 val2 = value_from_longest (stype, second);
795 val1 = value_from_longest (stype, first);
796 val2 = value_from_longest (stype, second);
800 val1 = value_from_longest (stype, first);
801 val2 = value_from_longest (stype, second);
805 val1 = value_from_longest (stype, first);
806 val2 = value_from_longest (stype, second);
810 val1 = value_from_longest (stype, first);
811 val2 = value_from_longest (stype, second);
814 binop = BINOP_NOTEQUAL;
815 val1 = value_from_longest (stype, first);
816 val2 = value_from_longest (stype, second);
819 internal_error (__FILE__, __LINE__,
820 _("Can't be reached."));
823 /* We use unsigned operands by default. */
825 val1 = value_from_longest (utype, first);
827 val2 = value_from_longest (utype, second);
829 result = value_as_long (value_binop (val1, val2, binop));
833 case DW_OP_call_frame_cfa:
834 result = (ctx->get_frame_cfa) (ctx->baton);
838 case DW_OP_GNU_push_tls_address:
839 /* Variable is at a constant offset in the thread-local
840 storage block into the objfile for the current thread and
841 the dynamic linker module containing this expression. Here
842 we return returns the offset from that base. The top of the
843 stack has the offset from the beginning of the thread
844 control block at which the variable is located. Nothing
845 should follow this operator, so the top of stack would be
847 result = dwarf_expr_fetch (ctx, 0);
848 dwarf_expr_pop (ctx);
849 result = (ctx->get_tls_address) (ctx->baton, result);
853 offset = extract_signed_integer (op_ptr, 2, byte_order);
859 offset = extract_signed_integer (op_ptr, 2, byte_order);
861 if (dwarf_expr_fetch (ctx, 0) != 0)
863 dwarf_expr_pop (ctx);
873 /* Record the piece. */
874 op_ptr = read_uleb128 (op_ptr, op_end, &size);
875 add_piece (ctx, size);
877 /* Pop off the address/regnum, and reset the location
879 if (ctx->location != DWARF_VALUE_LITERAL
880 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
881 dwarf_expr_pop (ctx);
882 ctx->location = DWARF_VALUE_MEMORY;
886 case DW_OP_GNU_uninit:
887 if (op_ptr != op_end)
888 error (_("DWARF-2 expression error: DW_OP_GNU_uninit must always "
889 "be the very last op."));
891 ctx->initialized = 0;
895 error (_("Unhandled dwarf expression opcode 0x%x"), op);
898 /* Most things push a result value. */
899 dwarf_expr_push (ctx, result, in_stack_memory);
903 ctx->recursion_depth--;
904 gdb_assert (ctx->recursion_depth >= 0);