1 /* DWARF 2 Expression Evaluator.
3 Copyright (C) 2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 Contributed by Daniel Berlin (dan@dberlin.org)
8 This file is part of GDB.
10 This program is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3 of the License, or
13 (at your option) any later version.
15 This program is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program. If not, see <http://www.gnu.org/licenses/>. */
29 #include "dwarf2expr.h"
30 #include "gdb_assert.h"
32 /* Local prototypes. */
34 static void execute_stack_op (struct dwarf_expr_context *,
35 const gdb_byte *, const gdb_byte *);
37 /* Cookie for gdbarch data. */
39 static struct gdbarch_data *dwarf_arch_cookie;
41 /* This holds gdbarch-specific types used by the DWARF expression
42 evaluator. See comments in execute_stack_op. */
44 struct dwarf_gdbarch_types
46 struct type *dw_types[3];
49 /* Allocate and fill in dwarf_gdbarch_types for an arch. */
52 dwarf_gdbarch_types_init (struct gdbarch *gdbarch)
54 struct dwarf_gdbarch_types *types
55 = GDBARCH_OBSTACK_ZALLOC (gdbarch, struct dwarf_gdbarch_types);
57 /* The types themselves are lazily initialized. */
62 /* Return the type used for DWARF operations where the type is
63 unspecified in the DWARF spec. Only certain sizes are
67 dwarf_expr_address_type (struct dwarf_expr_context *ctx)
69 struct dwarf_gdbarch_types *types = gdbarch_data (ctx->gdbarch,
73 if (ctx->addr_size == 2)
75 else if (ctx->addr_size == 4)
77 else if (ctx->addr_size == 8)
80 error (_("Unsupported address size in DWARF expressions: %d bits"),
83 if (types->dw_types[ndx] == NULL)
85 = arch_integer_type (ctx->gdbarch,
87 0, "<signed DWARF address type>");
89 return types->dw_types[ndx];
92 /* Create a new context for the expression evaluator. */
94 struct dwarf_expr_context *
95 new_dwarf_expr_context (void)
97 struct dwarf_expr_context *retval;
99 retval = xcalloc (1, sizeof (struct dwarf_expr_context));
100 retval->stack_len = 0;
101 retval->stack_allocated = 10;
102 retval->stack = xmalloc (retval->stack_allocated
103 * sizeof (struct dwarf_stack_value));
104 retval->num_pieces = 0;
106 retval->max_recursion_depth = 0x100;
110 /* Release the memory allocated to CTX. */
113 free_dwarf_expr_context (struct dwarf_expr_context *ctx)
120 /* Helper for make_cleanup_free_dwarf_expr_context. */
123 free_dwarf_expr_context_cleanup (void *arg)
125 free_dwarf_expr_context (arg);
128 /* Return a cleanup that calls free_dwarf_expr_context. */
131 make_cleanup_free_dwarf_expr_context (struct dwarf_expr_context *ctx)
133 return make_cleanup (free_dwarf_expr_context_cleanup, ctx);
136 /* Expand the memory allocated to CTX's stack to contain at least
137 NEED more elements than are currently used. */
140 dwarf_expr_grow_stack (struct dwarf_expr_context *ctx, size_t need)
142 if (ctx->stack_len + need > ctx->stack_allocated)
144 size_t newlen = ctx->stack_len + need + 10;
146 ctx->stack = xrealloc (ctx->stack,
147 newlen * sizeof (struct dwarf_stack_value));
148 ctx->stack_allocated = newlen;
152 /* Push VALUE onto CTX's stack. */
155 dwarf_expr_push (struct dwarf_expr_context *ctx, struct value *value,
158 struct dwarf_stack_value *v;
160 dwarf_expr_grow_stack (ctx, 1);
161 v = &ctx->stack[ctx->stack_len++];
163 v->in_stack_memory = in_stack_memory;
166 /* Push VALUE onto CTX's stack. */
169 dwarf_expr_push_address (struct dwarf_expr_context *ctx, CORE_ADDR value,
172 dwarf_expr_push (ctx,
173 value_from_ulongest (dwarf_expr_address_type (ctx), value),
177 /* Pop the top item off of CTX's stack. */
180 dwarf_expr_pop (struct dwarf_expr_context *ctx)
182 if (ctx->stack_len <= 0)
183 error (_("dwarf expression stack underflow"));
187 /* Retrieve the N'th item on CTX's stack. */
190 dwarf_expr_fetch (struct dwarf_expr_context *ctx, int n)
192 if (ctx->stack_len <= n)
193 error (_("Asked for position %d of stack, "
194 "stack only has %d elements on it."),
196 return ctx->stack[ctx->stack_len - (1 + n)].value;
199 /* Require that TYPE be an integral type; throw an exception if not. */
202 dwarf_require_integral (struct type *type)
204 if (TYPE_CODE (type) != TYPE_CODE_INT
205 && TYPE_CODE (type) != TYPE_CODE_CHAR
206 && TYPE_CODE (type) != TYPE_CODE_BOOL)
207 error (_("integral type expected in DWARF expression"));
210 /* Return the unsigned form of TYPE. TYPE is necessarily an integral
214 get_unsigned_type (struct gdbarch *gdbarch, struct type *type)
216 switch (TYPE_LENGTH (type))
219 return builtin_type (gdbarch)->builtin_uint8;
221 return builtin_type (gdbarch)->builtin_uint16;
223 return builtin_type (gdbarch)->builtin_uint32;
225 return builtin_type (gdbarch)->builtin_uint64;
227 error (_("no unsigned variant found for type, while evaluating "
228 "DWARF expression"));
232 /* Return the signed form of TYPE. TYPE is necessarily an integral
236 get_signed_type (struct gdbarch *gdbarch, struct type *type)
238 switch (TYPE_LENGTH (type))
241 return builtin_type (gdbarch)->builtin_int8;
243 return builtin_type (gdbarch)->builtin_int16;
245 return builtin_type (gdbarch)->builtin_int32;
247 return builtin_type (gdbarch)->builtin_int64;
249 error (_("no signed variant found for type, while evaluating "
250 "DWARF expression"));
254 /* Retrieve the N'th item on CTX's stack, converted to an address. */
257 dwarf_expr_fetch_address (struct dwarf_expr_context *ctx, int n)
259 struct value *result_val = dwarf_expr_fetch (ctx, n);
260 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
263 dwarf_require_integral (value_type (result_val));
264 result = extract_unsigned_integer (value_contents (result_val),
265 TYPE_LENGTH (value_type (result_val)),
268 /* For most architectures, calling extract_unsigned_integer() alone
269 is sufficient for extracting an address. However, some
270 architectures (e.g. MIPS) use signed addresses and using
271 extract_unsigned_integer() will not produce a correct
272 result. Make sure we invoke gdbarch_integer_to_address()
273 for those architectures which require it. */
274 if (gdbarch_integer_to_address_p (ctx->gdbarch))
276 gdb_byte *buf = alloca (ctx->addr_size);
277 struct type *int_type = get_unsigned_type (ctx->gdbarch,
278 value_type (result_val));
280 store_unsigned_integer (buf, ctx->addr_size, byte_order, result);
281 return gdbarch_integer_to_address (ctx->gdbarch, int_type, buf);
284 return (CORE_ADDR) result;
287 /* Retrieve the in_stack_memory flag of the N'th item on CTX's stack. */
290 dwarf_expr_fetch_in_stack_memory (struct dwarf_expr_context *ctx, int n)
292 if (ctx->stack_len <= n)
293 error (_("Asked for position %d of stack, "
294 "stack only has %d elements on it."),
296 return ctx->stack[ctx->stack_len - (1 + n)].in_stack_memory;
299 /* Return true if the expression stack is empty. */
302 dwarf_expr_stack_empty_p (struct dwarf_expr_context *ctx)
304 return ctx->stack_len == 0;
307 /* Add a new piece to CTX's piece list. */
309 add_piece (struct dwarf_expr_context *ctx, ULONGEST size, ULONGEST offset)
311 struct dwarf_expr_piece *p;
315 ctx->pieces = xrealloc (ctx->pieces,
317 * sizeof (struct dwarf_expr_piece)));
319 p = &ctx->pieces[ctx->num_pieces - 1];
320 p->location = ctx->location;
324 if (p->location == DWARF_VALUE_LITERAL)
326 p->v.literal.data = ctx->data;
327 p->v.literal.length = ctx->len;
329 else if (dwarf_expr_stack_empty_p (ctx))
331 p->location = DWARF_VALUE_OPTIMIZED_OUT;
332 /* Also reset the context's location, for our callers. This is
333 a somewhat strange approach, but this lets us avoid setting
334 the location to DWARF_VALUE_MEMORY in all the individual
335 cases in the evaluator. */
336 ctx->location = DWARF_VALUE_OPTIMIZED_OUT;
338 else if (p->location == DWARF_VALUE_MEMORY)
340 p->v.mem.addr = dwarf_expr_fetch_address (ctx, 0);
341 p->v.mem.in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
343 else if (p->location == DWARF_VALUE_IMPLICIT_POINTER)
345 p->v.ptr.die = ctx->len;
346 p->v.ptr.offset = value_as_long (dwarf_expr_fetch (ctx, 0));
348 else if (p->location == DWARF_VALUE_REGISTER)
349 p->v.regno = value_as_long (dwarf_expr_fetch (ctx, 0));
352 p->v.value = dwarf_expr_fetch (ctx, 0);
356 /* Evaluate the expression at ADDR (LEN bytes long) using the context
360 dwarf_expr_eval (struct dwarf_expr_context *ctx, const gdb_byte *addr,
363 int old_recursion_depth = ctx->recursion_depth;
365 execute_stack_op (ctx, addr, addr + len);
367 /* CTX RECURSION_DEPTH becomes invalid if an exception was thrown here. */
369 gdb_assert (ctx->recursion_depth == old_recursion_depth);
372 /* Decode the unsigned LEB128 constant at BUF into the variable pointed to
373 by R, and return the new value of BUF. Verify that it doesn't extend
374 past BUF_END. R can be NULL, the constant is then only skipped. */
377 read_uleb128 (const gdb_byte *buf, const gdb_byte *buf_end, ULONGEST * r)
386 error (_("read_uleb128: Corrupted DWARF expression."));
389 result |= ((ULONGEST) (byte & 0x7f)) << shift;
390 if ((byte & 0x80) == 0)
399 /* Decode the signed LEB128 constant at BUF into the variable pointed to
400 by R, and return the new value of BUF. Verify that it doesn't extend
401 past BUF_END. R can be NULL, the constant is then only skipped. */
404 read_sleb128 (const gdb_byte *buf, const gdb_byte *buf_end, LONGEST * r)
413 error (_("read_sleb128: Corrupted DWARF expression."));
416 result |= ((ULONGEST) (byte & 0x7f)) << shift;
418 if ((byte & 0x80) == 0)
421 if (shift < (sizeof (*r) * 8) && (byte & 0x40) != 0)
422 result |= -(((LONGEST) 1) << shift);
430 /* Check that the current operator is either at the end of an
431 expression, or that it is followed by a composition operator. */
434 dwarf_expr_require_composition (const gdb_byte *op_ptr, const gdb_byte *op_end,
437 /* It seems like DW_OP_GNU_uninit should be handled here. However,
438 it doesn't seem to make sense for DW_OP_*_value, and it was not
439 checked at the other place that this function is called. */
440 if (op_ptr != op_end && *op_ptr != DW_OP_piece && *op_ptr != DW_OP_bit_piece)
441 error (_("DWARF-2 expression error: `%s' operations must be "
442 "used either alone or in conjunction with DW_OP_piece "
443 "or DW_OP_bit_piece."),
447 /* Return true iff the types T1 and T2 are "the same". This only does
448 checks that might reasonably be needed to compare DWARF base
452 base_types_equal_p (struct type *t1, struct type *t2)
454 if (TYPE_CODE (t1) != TYPE_CODE (t2))
456 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
458 return TYPE_LENGTH (t1) == TYPE_LENGTH (t2);
461 /* A convenience function to call get_base_type on CTX and return the
462 result. DIE is the DIE whose type we need. SIZE is non-zero if
463 this function should verify that the resulting type has the correct
467 dwarf_get_base_type (struct dwarf_expr_context *ctx, ULONGEST die, int size)
471 if (ctx->funcs->get_base_type)
473 result = ctx->funcs->get_base_type (ctx, die);
475 error (_("Could not find type for DW_OP_GNU_const_type"));
476 if (size != 0 && TYPE_LENGTH (result) != size)
477 error (_("DW_OP_GNU_const_type has different sizes for type and data"));
480 /* Anything will do. */
481 result = builtin_type (ctx->gdbarch)->builtin_int;
486 /* If <BUF..BUF_END] contains DW_FORM_block* with single DW_OP_reg* return the
487 DWARF register number. Otherwise return -1. */
490 dwarf_block_to_dwarf_reg (const gdb_byte *buf, const gdb_byte *buf_end)
496 if (*buf >= DW_OP_reg0 && *buf <= DW_OP_reg31)
498 if (buf_end - buf != 1)
500 return *buf - DW_OP_reg0;
503 if (*buf == DW_OP_GNU_regval_type)
506 buf = read_uleb128 (buf, buf_end, &dwarf_reg);
507 buf = read_uleb128 (buf, buf_end, NULL);
509 else if (*buf == DW_OP_regx)
512 buf = read_uleb128 (buf, buf_end, &dwarf_reg);
516 if (buf != buf_end || (int) dwarf_reg != dwarf_reg)
521 /* If <BUF..BUF_END] contains DW_FORM_block* with single DW_OP_fbreg(X) fill
522 in FB_OFFSET_RETURN with the X offset and return 1. Otherwise return 0. */
525 dwarf_block_to_fb_offset (const gdb_byte *buf, const gdb_byte *buf_end,
526 CORE_ADDR *fb_offset_return)
533 if (*buf != DW_OP_fbreg)
537 buf = read_sleb128 (buf, buf_end, &fb_offset);
538 *fb_offset_return = fb_offset;
539 if (buf != buf_end || fb_offset != (LONGEST) *fb_offset_return)
545 /* If <BUF..BUF_END] contains DW_FORM_block* with single DW_OP_bregSP(X) fill
546 in SP_OFFSET_RETURN with the X offset and return 1. Otherwise return 0.
547 The matched SP register number depends on GDBARCH. */
550 dwarf_block_to_sp_offset (struct gdbarch *gdbarch, const gdb_byte *buf,
551 const gdb_byte *buf_end, CORE_ADDR *sp_offset_return)
558 if (*buf >= DW_OP_breg0 && *buf <= DW_OP_breg31)
560 dwarf_reg = *buf - DW_OP_breg0;
565 if (*buf != DW_OP_bregx)
568 buf = read_uleb128 (buf, buf_end, &dwarf_reg);
571 if (gdbarch_dwarf2_reg_to_regnum (gdbarch, dwarf_reg)
572 != gdbarch_sp_regnum (gdbarch))
575 buf = read_sleb128 (buf, buf_end, &sp_offset);
576 *sp_offset_return = sp_offset;
577 if (buf != buf_end || sp_offset != (LONGEST) *sp_offset_return)
583 /* The engine for the expression evaluator. Using the context in CTX,
584 evaluate the expression between OP_PTR and OP_END. */
587 execute_stack_op (struct dwarf_expr_context *ctx,
588 const gdb_byte *op_ptr, const gdb_byte *op_end)
590 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
591 /* Old-style "untyped" DWARF values need special treatment in a
592 couple of places, specifically DW_OP_mod and DW_OP_shr. We need
593 a special type for these values so we can distinguish them from
594 values that have an explicit type, because explicitly-typed
595 values do not need special treatment. This special type must be
596 different (in the `==' sense) from any base type coming from the
598 struct type *address_type = dwarf_expr_address_type (ctx);
600 ctx->location = DWARF_VALUE_MEMORY;
601 ctx->initialized = 1; /* Default is initialized. */
603 if (ctx->recursion_depth > ctx->max_recursion_depth)
604 error (_("DWARF-2 expression error: Loop detected (%d)."),
605 ctx->recursion_depth);
606 ctx->recursion_depth++;
608 while (op_ptr < op_end)
610 enum dwarf_location_atom op = *op_ptr++;
612 /* Assume the value is not in stack memory.
613 Code that knows otherwise sets this to 1.
614 Some arithmetic on stack addresses can probably be assumed to still
615 be a stack address, but we skip this complication for now.
616 This is just an optimization, so it's always ok to punt
617 and leave this as 0. */
618 int in_stack_memory = 0;
619 ULONGEST uoffset, reg;
621 struct value *result_val = NULL;
623 /* The DWARF expression might have a bug causing an infinite
624 loop. In that case, quitting is the only way out. */
661 result = op - DW_OP_lit0;
662 result_val = value_from_ulongest (address_type, result);
666 result = extract_unsigned_integer (op_ptr,
667 ctx->addr_size, byte_order);
668 op_ptr += ctx->addr_size;
669 /* Some versions of GCC emit DW_OP_addr before
670 DW_OP_GNU_push_tls_address. In this case the value is an
671 index, not an address. We don't support things like
672 branching between the address and the TLS op. */
673 if (op_ptr >= op_end || *op_ptr != DW_OP_GNU_push_tls_address)
674 result += ctx->offset;
675 result_val = value_from_ulongest (address_type, result);
679 result = extract_unsigned_integer (op_ptr, 1, byte_order);
680 result_val = value_from_ulongest (address_type, result);
684 result = extract_signed_integer (op_ptr, 1, byte_order);
685 result_val = value_from_ulongest (address_type, result);
689 result = extract_unsigned_integer (op_ptr, 2, byte_order);
690 result_val = value_from_ulongest (address_type, result);
694 result = extract_signed_integer (op_ptr, 2, byte_order);
695 result_val = value_from_ulongest (address_type, result);
699 result = extract_unsigned_integer (op_ptr, 4, byte_order);
700 result_val = value_from_ulongest (address_type, result);
704 result = extract_signed_integer (op_ptr, 4, byte_order);
705 result_val = value_from_ulongest (address_type, result);
709 result = extract_unsigned_integer (op_ptr, 8, byte_order);
710 result_val = value_from_ulongest (address_type, result);
714 result = extract_signed_integer (op_ptr, 8, byte_order);
715 result_val = value_from_ulongest (address_type, result);
719 op_ptr = read_uleb128 (op_ptr, op_end, &uoffset);
721 result_val = value_from_ulongest (address_type, result);
724 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
726 result_val = value_from_ulongest (address_type, result);
729 /* The DW_OP_reg operations are required to occur alone in
730 location expressions. */
764 && *op_ptr != DW_OP_piece
765 && *op_ptr != DW_OP_bit_piece
766 && *op_ptr != DW_OP_GNU_uninit)
767 error (_("DWARF-2 expression error: DW_OP_reg operations must be "
768 "used either alone or in conjunction with DW_OP_piece "
769 "or DW_OP_bit_piece."));
771 result = op - DW_OP_reg0;
772 result_val = value_from_ulongest (address_type, result);
773 ctx->location = DWARF_VALUE_REGISTER;
777 op_ptr = read_uleb128 (op_ptr, op_end, ®);
778 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_regx");
781 result_val = value_from_ulongest (address_type, result);
782 ctx->location = DWARF_VALUE_REGISTER;
785 case DW_OP_implicit_value:
789 op_ptr = read_uleb128 (op_ptr, op_end, &len);
790 if (op_ptr + len > op_end)
791 error (_("DW_OP_implicit_value: too few bytes available."));
794 ctx->location = DWARF_VALUE_LITERAL;
796 dwarf_expr_require_composition (op_ptr, op_end,
797 "DW_OP_implicit_value");
801 case DW_OP_stack_value:
802 ctx->location = DWARF_VALUE_STACK;
803 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_stack_value");
806 case DW_OP_GNU_implicit_pointer:
811 if (ctx->ref_addr_size == -1)
812 error (_("DWARF-2 expression error: DW_OP_GNU_implicit_pointer "
813 "is not allowed in frame context"));
815 /* The referred-to DIE. */
816 ctx->len = extract_unsigned_integer (op_ptr, ctx->ref_addr_size,
818 op_ptr += ctx->ref_addr_size;
820 /* The byte offset into the data. */
821 op_ptr = read_sleb128 (op_ptr, op_end, &len);
822 result = (ULONGEST) len;
823 result_val = value_from_ulongest (address_type, result);
825 ctx->location = DWARF_VALUE_IMPLICIT_POINTER;
826 dwarf_expr_require_composition (op_ptr, op_end,
827 "DW_OP_GNU_implicit_pointer");
864 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
865 result = (ctx->funcs->read_reg) (ctx->baton, op - DW_OP_breg0);
867 result_val = value_from_ulongest (address_type, result);
872 op_ptr = read_uleb128 (op_ptr, op_end, ®);
873 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
874 result = (ctx->funcs->read_reg) (ctx->baton, reg);
876 result_val = value_from_ulongest (address_type, result);
881 const gdb_byte *datastart;
883 unsigned int before_stack_len;
885 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
886 /* Rather than create a whole new context, we simply
887 record the stack length before execution, then reset it
888 afterwards, effectively erasing whatever the recursive
890 before_stack_len = ctx->stack_len;
891 /* FIXME: cagney/2003-03-26: This code should be using
892 get_frame_base_address(), and then implement a dwarf2
893 specific this_base method. */
894 (ctx->funcs->get_frame_base) (ctx->baton, &datastart, &datalen);
895 dwarf_expr_eval (ctx, datastart, datalen);
896 if (ctx->location == DWARF_VALUE_MEMORY)
897 result = dwarf_expr_fetch_address (ctx, 0);
898 else if (ctx->location == DWARF_VALUE_REGISTER)
899 result = (ctx->funcs->read_reg) (ctx->baton,
900 value_as_long (dwarf_expr_fetch (ctx, 0)));
902 error (_("Not implemented: computing frame "
903 "base using explicit value operator"));
904 result = result + offset;
905 result_val = value_from_ulongest (address_type, result);
907 ctx->stack_len = before_stack_len;
908 ctx->location = DWARF_VALUE_MEMORY;
913 result_val = dwarf_expr_fetch (ctx, 0);
914 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
918 dwarf_expr_pop (ctx);
923 result_val = dwarf_expr_fetch (ctx, offset);
924 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, offset);
929 struct dwarf_stack_value t1, t2;
931 if (ctx->stack_len < 2)
932 error (_("Not enough elements for "
933 "DW_OP_swap. Need 2, have %d."),
935 t1 = ctx->stack[ctx->stack_len - 1];
936 t2 = ctx->stack[ctx->stack_len - 2];
937 ctx->stack[ctx->stack_len - 1] = t2;
938 ctx->stack[ctx->stack_len - 2] = t1;
943 result_val = dwarf_expr_fetch (ctx, 1);
944 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 1);
949 struct dwarf_stack_value t1, t2, t3;
951 if (ctx->stack_len < 3)
952 error (_("Not enough elements for "
953 "DW_OP_rot. Need 3, have %d."),
955 t1 = ctx->stack[ctx->stack_len - 1];
956 t2 = ctx->stack[ctx->stack_len - 2];
957 t3 = ctx->stack[ctx->stack_len - 3];
958 ctx->stack[ctx->stack_len - 1] = t2;
959 ctx->stack[ctx->stack_len - 2] = t3;
960 ctx->stack[ctx->stack_len - 3] = t1;
965 case DW_OP_deref_size:
966 case DW_OP_GNU_deref_type:
968 int addr_size = (op == DW_OP_deref ? ctx->addr_size : *op_ptr++);
969 gdb_byte *buf = alloca (addr_size);
970 CORE_ADDR addr = dwarf_expr_fetch_address (ctx, 0);
973 dwarf_expr_pop (ctx);
975 if (op == DW_OP_GNU_deref_type)
979 op_ptr = read_uleb128 (op_ptr, op_end, &type_die);
980 type = dwarf_get_base_type (ctx, type_die, 0);
985 (ctx->funcs->read_mem) (ctx->baton, buf, addr, addr_size);
987 /* If the size of the object read from memory is different
988 from the type length, we need to zero-extend it. */
989 if (TYPE_LENGTH (type) != addr_size)
992 extract_unsigned_integer (buf, addr_size, byte_order);
994 buf = alloca (TYPE_LENGTH (type));
995 store_unsigned_integer (buf, TYPE_LENGTH (type),
999 result_val = value_from_contents_and_address (type, buf, addr);
1006 case DW_OP_plus_uconst:
1008 /* Unary operations. */
1009 result_val = dwarf_expr_fetch (ctx, 0);
1010 dwarf_expr_pop (ctx);
1015 if (value_less (result_val,
1016 value_zero (value_type (result_val), not_lval)))
1017 result_val = value_neg (result_val);
1020 result_val = value_neg (result_val);
1023 dwarf_require_integral (value_type (result_val));
1024 result_val = value_complement (result_val);
1026 case DW_OP_plus_uconst:
1027 dwarf_require_integral (value_type (result_val));
1028 result = value_as_long (result_val);
1029 op_ptr = read_uleb128 (op_ptr, op_end, ®);
1031 result_val = value_from_ulongest (address_type, result);
1055 /* Binary operations. */
1056 struct value *first, *second;
1058 second = dwarf_expr_fetch (ctx, 0);
1059 dwarf_expr_pop (ctx);
1061 first = dwarf_expr_fetch (ctx, 0);
1062 dwarf_expr_pop (ctx);
1064 if (! base_types_equal_p (value_type (first), value_type (second)))
1065 error (_("Incompatible types on DWARF stack"));
1070 dwarf_require_integral (value_type (first));
1071 dwarf_require_integral (value_type (second));
1072 result_val = value_binop (first, second, BINOP_BITWISE_AND);
1075 result_val = value_binop (first, second, BINOP_DIV);
1078 result_val = value_binop (first, second, BINOP_SUB);
1083 struct type *orig_type = value_type (first);
1085 /* We have to special-case "old-style" untyped values
1086 -- these must have mod computed using unsigned
1088 if (orig_type == address_type)
1091 = get_unsigned_type (ctx->gdbarch, orig_type);
1094 first = value_cast (utype, first);
1095 second = value_cast (utype, second);
1097 /* Note that value_binop doesn't handle float or
1098 decimal float here. This seems unimportant. */
1099 result_val = value_binop (first, second, BINOP_MOD);
1101 result_val = value_cast (orig_type, result_val);
1105 result_val = value_binop (first, second, BINOP_MUL);
1108 dwarf_require_integral (value_type (first));
1109 dwarf_require_integral (value_type (second));
1110 result_val = value_binop (first, second, BINOP_BITWISE_IOR);
1113 result_val = value_binop (first, second, BINOP_ADD);
1116 dwarf_require_integral (value_type (first));
1117 dwarf_require_integral (value_type (second));
1118 result_val = value_binop (first, second, BINOP_LSH);
1121 dwarf_require_integral (value_type (first));
1122 dwarf_require_integral (value_type (second));
1123 if (!TYPE_UNSIGNED (value_type (first)))
1126 = get_unsigned_type (ctx->gdbarch, value_type (first));
1128 first = value_cast (utype, first);
1131 result_val = value_binop (first, second, BINOP_RSH);
1132 /* Make sure we wind up with the same type we started
1134 if (value_type (result_val) != value_type (second))
1135 result_val = value_cast (value_type (second), result_val);
1138 dwarf_require_integral (value_type (first));
1139 dwarf_require_integral (value_type (second));
1140 if (TYPE_UNSIGNED (value_type (first)))
1143 = get_signed_type (ctx->gdbarch, value_type (first));
1145 first = value_cast (stype, first);
1148 result_val = value_binop (first, second, BINOP_RSH);
1149 /* Make sure we wind up with the same type we started
1151 if (value_type (result_val) != value_type (second))
1152 result_val = value_cast (value_type (second), result_val);
1155 dwarf_require_integral (value_type (first));
1156 dwarf_require_integral (value_type (second));
1157 result_val = value_binop (first, second, BINOP_BITWISE_XOR);
1160 /* A <= B is !(B < A). */
1161 result = ! value_less (second, first);
1162 result_val = value_from_ulongest (address_type, result);
1165 /* A >= B is !(A < B). */
1166 result = ! value_less (first, second);
1167 result_val = value_from_ulongest (address_type, result);
1170 result = value_equal (first, second);
1171 result_val = value_from_ulongest (address_type, result);
1174 result = value_less (first, second);
1175 result_val = value_from_ulongest (address_type, result);
1178 /* A > B is B < A. */
1179 result = value_less (second, first);
1180 result_val = value_from_ulongest (address_type, result);
1183 result = ! value_equal (first, second);
1184 result_val = value_from_ulongest (address_type, result);
1187 internal_error (__FILE__, __LINE__,
1188 _("Can't be reached."));
1193 case DW_OP_call_frame_cfa:
1194 result = (ctx->funcs->get_frame_cfa) (ctx->baton);
1195 result_val = value_from_ulongest (address_type, result);
1196 in_stack_memory = 1;
1199 case DW_OP_GNU_push_tls_address:
1200 /* Variable is at a constant offset in the thread-local
1201 storage block into the objfile for the current thread and
1202 the dynamic linker module containing this expression. Here
1203 we return returns the offset from that base. The top of the
1204 stack has the offset from the beginning of the thread
1205 control block at which the variable is located. Nothing
1206 should follow this operator, so the top of stack would be
1208 result = value_as_long (dwarf_expr_fetch (ctx, 0));
1209 dwarf_expr_pop (ctx);
1210 result = (ctx->funcs->get_tls_address) (ctx->baton, result);
1211 result_val = value_from_ulongest (address_type, result);
1215 offset = extract_signed_integer (op_ptr, 2, byte_order);
1224 offset = extract_signed_integer (op_ptr, 2, byte_order);
1226 val = dwarf_expr_fetch (ctx, 0);
1227 dwarf_require_integral (value_type (val));
1228 if (value_as_long (val) != 0)
1230 dwarf_expr_pop (ctx);
1241 /* Record the piece. */
1242 op_ptr = read_uleb128 (op_ptr, op_end, &size);
1243 add_piece (ctx, 8 * size, 0);
1245 /* Pop off the address/regnum, and reset the location
1247 if (ctx->location != DWARF_VALUE_LITERAL
1248 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
1249 dwarf_expr_pop (ctx);
1250 ctx->location = DWARF_VALUE_MEMORY;
1254 case DW_OP_bit_piece:
1256 ULONGEST size, offset;
1258 /* Record the piece. */
1259 op_ptr = read_uleb128 (op_ptr, op_end, &size);
1260 op_ptr = read_uleb128 (op_ptr, op_end, &offset);
1261 add_piece (ctx, size, offset);
1263 /* Pop off the address/regnum, and reset the location
1265 if (ctx->location != DWARF_VALUE_LITERAL
1266 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
1267 dwarf_expr_pop (ctx);
1268 ctx->location = DWARF_VALUE_MEMORY;
1272 case DW_OP_GNU_uninit:
1273 if (op_ptr != op_end)
1274 error (_("DWARF-2 expression error: DW_OP_GNU_uninit must always "
1275 "be the very last op."));
1277 ctx->initialized = 0;
1281 result = extract_unsigned_integer (op_ptr, 2, byte_order);
1283 ctx->funcs->dwarf_call (ctx, result);
1287 result = extract_unsigned_integer (op_ptr, 4, byte_order);
1289 ctx->funcs->dwarf_call (ctx, result);
1292 case DW_OP_GNU_entry_value:
1296 CORE_ADDR deref_size;
1298 op_ptr = read_uleb128 (op_ptr, op_end, &len);
1299 if (op_ptr + len > op_end)
1300 error (_("DW_OP_GNU_entry_value: too few bytes available."));
1302 dwarf_reg = dwarf_block_to_dwarf_reg (op_ptr, op_ptr + len);
1303 if (dwarf_reg != -1)
1306 ctx->funcs->push_dwarf_reg_entry_value (ctx, dwarf_reg,
1311 error (_("DWARF-2 expression error: DW_OP_GNU_entry_value is "
1312 "supported only for single DW_OP_reg*"));
1315 case DW_OP_GNU_const_type:
1319 const gdb_byte *data;
1322 op_ptr = read_uleb128 (op_ptr, op_end, &type_die);
1327 type = dwarf_get_base_type (ctx, type_die, n);
1328 result_val = value_from_contents (type, data);
1332 case DW_OP_GNU_regval_type:
1337 op_ptr = read_uleb128 (op_ptr, op_end, ®);
1338 op_ptr = read_uleb128 (op_ptr, op_end, &type_die);
1340 type = dwarf_get_base_type (ctx, type_die, 0);
1341 result = (ctx->funcs->read_reg) (ctx->baton, reg);
1342 result_val = value_from_ulongest (address_type, result);
1343 result_val = value_from_contents (type,
1344 value_contents_all (result_val));
1348 case DW_OP_GNU_convert:
1349 case DW_OP_GNU_reinterpret:
1354 op_ptr = read_uleb128 (op_ptr, op_end, &type_die);
1357 type = address_type;
1359 type = dwarf_get_base_type (ctx, type_die, 0);
1361 result_val = dwarf_expr_fetch (ctx, 0);
1362 dwarf_expr_pop (ctx);
1364 if (op == DW_OP_GNU_convert)
1365 result_val = value_cast (type, result_val);
1366 else if (type == value_type (result_val))
1370 else if (TYPE_LENGTH (type)
1371 != TYPE_LENGTH (value_type (result_val)))
1372 error (_("DW_OP_GNU_reinterpret has wrong size"));
1375 = value_from_contents (type,
1376 value_contents_all (result_val));
1381 error (_("Unhandled dwarf expression opcode 0x%x"), op);
1384 /* Most things push a result value. */
1385 gdb_assert (result_val != NULL);
1386 dwarf_expr_push (ctx, result_val, in_stack_memory);
1391 /* To simplify our main caller, if the result is an implicit
1392 pointer, then make a pieced value. This is ok because we can't
1393 have implicit pointers in contexts where pieces are invalid. */
1394 if (ctx->location == DWARF_VALUE_IMPLICIT_POINTER)
1395 add_piece (ctx, 8 * ctx->addr_size, 0);
1398 ctx->recursion_depth--;
1399 gdb_assert (ctx->recursion_depth >= 0);
1402 /* Stub dwarf_expr_context_funcs.read_reg implementation. */
1405 ctx_no_read_reg (void *baton, int regnum)
1407 error (_("Registers access is invalid in this context"));
1410 /* Stub dwarf_expr_context_funcs.get_frame_base implementation. */
1413 ctx_no_get_frame_base (void *baton, const gdb_byte **start, size_t *length)
1415 error (_("%s is invalid in this context"), "DW_OP_fbreg");
1418 /* Stub dwarf_expr_context_funcs.get_frame_cfa implementation. */
1421 ctx_no_get_frame_cfa (void *baton)
1423 error (_("%s is invalid in this context"), "DW_OP_call_frame_cfa");
1426 /* Stub dwarf_expr_context_funcs.get_frame_pc implementation. */
1429 ctx_no_get_frame_pc (void *baton)
1431 error (_("%s is invalid in this context"), "DW_OP_GNU_implicit_pointer");
1434 /* Stub dwarf_expr_context_funcs.get_tls_address implementation. */
1437 ctx_no_get_tls_address (void *baton, CORE_ADDR offset)
1439 error (_("%s is invalid in this context"), "DW_OP_GNU_push_tls_address");
1442 /* Stub dwarf_expr_context_funcs.dwarf_call implementation. */
1445 ctx_no_dwarf_call (struct dwarf_expr_context *ctx, size_t die_offset)
1447 error (_("%s is invalid in this context"), "DW_OP_call*");
1450 /* Stub dwarf_expr_context_funcs.get_base_type implementation. */
1453 ctx_no_get_base_type (struct dwarf_expr_context *ctx, size_t die)
1455 error (_("Support for typed DWARF is not supported in this context"));
1458 /* Stub dwarf_expr_context_funcs.push_dwarf_block_entry_value
1462 ctx_no_push_dwarf_reg_entry_value (struct dwarf_expr_context *ctx,
1463 int dwarf_reg, CORE_ADDR fb_offset)
1465 internal_error (__FILE__, __LINE__,
1466 _("Support for DW_OP_GNU_entry_value is unimplemented"));
1470 _initialize_dwarf2expr (void)
1473 = gdbarch_data_register_post_init (dwarf_gdbarch_types_init);