1 /* DWARF 2 Expression Evaluator.
3 Copyright (C) 2001, 2002, 2003, 2005, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 Contributed by Daniel Berlin (dan@dberlin.org)
8 This file is part of GDB.
10 This program is free software; you can redistribute it and/or modify
11 it under the terms of the GNU General Public License as published by
12 the Free Software Foundation; either version 3 of the License, or
13 (at your option) any later version.
15 This program is distributed in the hope that it will be useful,
16 but WITHOUT ANY WARRANTY; without even the implied warranty of
17 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 GNU General Public License for more details.
20 You should have received a copy of the GNU General Public License
21 along with this program. If not, see <http://www.gnu.org/licenses/>. */
29 #include "dwarf2expr.h"
30 #include "gdb_assert.h"
32 /* Local prototypes. */
34 static void execute_stack_op (struct dwarf_expr_context *,
35 const gdb_byte *, const gdb_byte *);
37 /* Cookie for gdbarch data. */
39 static struct gdbarch_data *dwarf_arch_cookie;
41 /* This holds gdbarch-specific types used by the DWARF expression
42 evaluator. See comments in execute_stack_op. */
44 struct dwarf_gdbarch_types
46 struct type *dw_types[3];
49 /* Allocate and fill in dwarf_gdbarch_types for an arch. */
52 dwarf_gdbarch_types_init (struct gdbarch *gdbarch)
54 struct dwarf_gdbarch_types *types
55 = GDBARCH_OBSTACK_ZALLOC (gdbarch, struct dwarf_gdbarch_types);
57 /* The types themselves are lazily initialized. */
62 /* Return the type used for DWARF operations where the type is
63 unspecified in the DWARF spec. Only certain sizes are
67 dwarf_expr_address_type (struct dwarf_expr_context *ctx)
69 struct dwarf_gdbarch_types *types = gdbarch_data (ctx->gdbarch,
73 if (ctx->addr_size == 2)
75 else if (ctx->addr_size == 4)
77 else if (ctx->addr_size == 8)
80 error (_("Unsupported address size in DWARF expressions: %d bits"),
83 if (types->dw_types[ndx] == NULL)
85 = arch_integer_type (ctx->gdbarch,
87 0, "<signed DWARF address type>");
89 return types->dw_types[ndx];
92 /* Create a new context for the expression evaluator. */
94 struct dwarf_expr_context *
95 new_dwarf_expr_context (void)
97 struct dwarf_expr_context *retval;
99 retval = xcalloc (1, sizeof (struct dwarf_expr_context));
100 retval->stack_len = 0;
101 retval->stack_allocated = 10;
102 retval->stack = xmalloc (retval->stack_allocated
103 * sizeof (struct dwarf_stack_value));
104 retval->num_pieces = 0;
106 retval->max_recursion_depth = 0x100;
107 retval->mark = value_mark ();
111 /* Release the memory allocated to CTX. */
114 free_dwarf_expr_context (struct dwarf_expr_context *ctx)
116 value_free_to_mark (ctx->mark);
122 /* Helper for make_cleanup_free_dwarf_expr_context. */
125 free_dwarf_expr_context_cleanup (void *arg)
127 free_dwarf_expr_context (arg);
130 /* Return a cleanup that calls free_dwarf_expr_context. */
133 make_cleanup_free_dwarf_expr_context (struct dwarf_expr_context *ctx)
135 return make_cleanup (free_dwarf_expr_context_cleanup, ctx);
138 /* Expand the memory allocated to CTX's stack to contain at least
139 NEED more elements than are currently used. */
142 dwarf_expr_grow_stack (struct dwarf_expr_context *ctx, size_t need)
144 if (ctx->stack_len + need > ctx->stack_allocated)
146 size_t newlen = ctx->stack_len + need + 10;
148 ctx->stack = xrealloc (ctx->stack,
149 newlen * sizeof (struct dwarf_stack_value));
150 ctx->stack_allocated = newlen;
154 /* Push VALUE onto CTX's stack. */
157 dwarf_expr_push (struct dwarf_expr_context *ctx, struct value *value,
160 struct dwarf_stack_value *v;
162 dwarf_expr_grow_stack (ctx, 1);
163 v = &ctx->stack[ctx->stack_len++];
165 v->in_stack_memory = in_stack_memory;
168 /* Push VALUE onto CTX's stack. */
171 dwarf_expr_push_address (struct dwarf_expr_context *ctx, CORE_ADDR value,
174 dwarf_expr_push (ctx,
175 value_from_ulongest (dwarf_expr_address_type (ctx), value),
179 /* Pop the top item off of CTX's stack. */
182 dwarf_expr_pop (struct dwarf_expr_context *ctx)
184 if (ctx->stack_len <= 0)
185 error (_("dwarf expression stack underflow"));
189 /* Retrieve the N'th item on CTX's stack. */
192 dwarf_expr_fetch (struct dwarf_expr_context *ctx, int n)
194 if (ctx->stack_len <= n)
195 error (_("Asked for position %d of stack, "
196 "stack only has %d elements on it."),
198 return ctx->stack[ctx->stack_len - (1 + n)].value;
201 /* Require that TYPE be an integral type; throw an exception if not. */
204 dwarf_require_integral (struct type *type)
206 if (TYPE_CODE (type) != TYPE_CODE_INT
207 && TYPE_CODE (type) != TYPE_CODE_CHAR
208 && TYPE_CODE (type) != TYPE_CODE_BOOL)
209 error (_("integral type expected in DWARF expression"));
212 /* Return the unsigned form of TYPE. TYPE is necessarily an integral
216 get_unsigned_type (struct gdbarch *gdbarch, struct type *type)
218 switch (TYPE_LENGTH (type))
221 return builtin_type (gdbarch)->builtin_uint8;
223 return builtin_type (gdbarch)->builtin_uint16;
225 return builtin_type (gdbarch)->builtin_uint32;
227 return builtin_type (gdbarch)->builtin_uint64;
229 error (_("no unsigned variant found for type, while evaluating "
230 "DWARF expression"));
234 /* Retrieve the N'th item on CTX's stack, converted to an address. */
237 dwarf_expr_fetch_address (struct dwarf_expr_context *ctx, int n)
239 struct value *result_val = dwarf_expr_fetch (ctx, n);
240 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
243 dwarf_require_integral (value_type (result_val));
244 result = extract_unsigned_integer (value_contents (result_val),
245 TYPE_LENGTH (value_type (result_val)),
248 /* For most architectures, calling extract_unsigned_integer() alone
249 is sufficient for extracting an address. However, some
250 architectures (e.g. MIPS) use signed addresses and using
251 extract_unsigned_integer() will not produce a correct
252 result. Make sure we invoke gdbarch_integer_to_address()
253 for those architectures which require it. */
254 if (gdbarch_integer_to_address_p (ctx->gdbarch))
256 gdb_byte *buf = alloca (ctx->addr_size);
257 struct type *int_type = get_unsigned_type (ctx->gdbarch,
258 value_type (result_val));
260 store_unsigned_integer (buf, ctx->addr_size, byte_order, result);
261 return gdbarch_integer_to_address (ctx->gdbarch, int_type, buf);
264 return (CORE_ADDR) result;
267 /* Retrieve the in_stack_memory flag of the N'th item on CTX's stack. */
270 dwarf_expr_fetch_in_stack_memory (struct dwarf_expr_context *ctx, int n)
272 if (ctx->stack_len <= n)
273 error (_("Asked for position %d of stack, "
274 "stack only has %d elements on it."),
276 return ctx->stack[ctx->stack_len - (1 + n)].in_stack_memory;
279 /* Return true if the expression stack is empty. */
282 dwarf_expr_stack_empty_p (struct dwarf_expr_context *ctx)
284 return ctx->stack_len == 0;
287 /* Add a new piece to CTX's piece list. */
289 add_piece (struct dwarf_expr_context *ctx, ULONGEST size, ULONGEST offset)
291 struct dwarf_expr_piece *p;
295 ctx->pieces = xrealloc (ctx->pieces,
297 * sizeof (struct dwarf_expr_piece)));
299 p = &ctx->pieces[ctx->num_pieces - 1];
300 p->location = ctx->location;
304 if (p->location == DWARF_VALUE_LITERAL)
306 p->v.literal.data = ctx->data;
307 p->v.literal.length = ctx->len;
309 else if (dwarf_expr_stack_empty_p (ctx))
311 p->location = DWARF_VALUE_OPTIMIZED_OUT;
312 /* Also reset the context's location, for our callers. This is
313 a somewhat strange approach, but this lets us avoid setting
314 the location to DWARF_VALUE_MEMORY in all the individual
315 cases in the evaluator. */
316 ctx->location = DWARF_VALUE_OPTIMIZED_OUT;
318 else if (p->location == DWARF_VALUE_MEMORY)
320 p->v.mem.addr = dwarf_expr_fetch_address (ctx, 0);
321 p->v.mem.in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
323 else if (p->location == DWARF_VALUE_IMPLICIT_POINTER)
325 p->v.ptr.die = ctx->len;
326 p->v.ptr.offset = value_as_long (dwarf_expr_fetch (ctx, 0));
328 else if (p->location == DWARF_VALUE_REGISTER)
329 p->v.regno = value_as_long (dwarf_expr_fetch (ctx, 0));
332 p->v.value = dwarf_expr_fetch (ctx, 0);
336 /* Evaluate the expression at ADDR (LEN bytes long) using the context
340 dwarf_expr_eval (struct dwarf_expr_context *ctx, const gdb_byte *addr,
343 int old_recursion_depth = ctx->recursion_depth;
345 execute_stack_op (ctx, addr, addr + len);
347 /* CTX RECURSION_DEPTH becomes invalid if an exception was thrown here. */
349 gdb_assert (ctx->recursion_depth == old_recursion_depth);
352 /* Decode the unsigned LEB128 constant at BUF into the variable pointed to
353 by R, and return the new value of BUF. Verify that it doesn't extend
357 read_uleb128 (const gdb_byte *buf, const gdb_byte *buf_end, ULONGEST * r)
366 error (_("read_uleb128: Corrupted DWARF expression."));
369 result |= (byte & 0x7f) << shift;
370 if ((byte & 0x80) == 0)
378 /* Decode the signed LEB128 constant at BUF into the variable pointed to
379 by R, and return the new value of BUF. Verify that it doesn't extend
383 read_sleb128 (const gdb_byte *buf, const gdb_byte *buf_end, LONGEST * r)
392 error (_("read_sleb128: Corrupted DWARF expression."));
395 result |= (byte & 0x7f) << shift;
397 if ((byte & 0x80) == 0)
400 if (shift < (sizeof (*r) * 8) && (byte & 0x40) != 0)
401 result |= -(1 << shift);
408 /* Check that the current operator is either at the end of an
409 expression, or that it is followed by a composition operator. */
412 dwarf_expr_require_composition (const gdb_byte *op_ptr, const gdb_byte *op_end,
415 /* It seems like DW_OP_GNU_uninit should be handled here. However,
416 it doesn't seem to make sense for DW_OP_*_value, and it was not
417 checked at the other place that this function is called. */
418 if (op_ptr != op_end && *op_ptr != DW_OP_piece && *op_ptr != DW_OP_bit_piece)
419 error (_("DWARF-2 expression error: `%s' operations must be "
420 "used either alone or in conjuction with DW_OP_piece "
421 "or DW_OP_bit_piece."),
425 /* Return true iff the types T1 and T2 are "the same". This only does
426 checks that might reasonably be needed to compare DWARF base
430 base_types_equal_p (struct type *t1, struct type *t2)
432 if (TYPE_CODE (t1) != TYPE_CODE (t2))
434 if (TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
436 return TYPE_LENGTH (t1) == TYPE_LENGTH (t2);
439 /* A convenience function to call get_base_type on CTX and return the
440 result. DIE is the DIE whose type we need. SIZE is non-zero if
441 this function should verify that the resulting type has the correct
445 dwarf_get_base_type (struct dwarf_expr_context *ctx, ULONGEST die, int size)
449 if (ctx->get_base_type)
451 result = ctx->get_base_type (ctx, die);
452 if (size != 0 && TYPE_LENGTH (result) != size)
453 error (_("DW_OP_GNU_const_type has different sizes for type and data"));
456 /* Anything will do. */
457 result = builtin_type (ctx->gdbarch)->builtin_int;
462 /* The engine for the expression evaluator. Using the context in CTX,
463 evaluate the expression between OP_PTR and OP_END. */
466 execute_stack_op (struct dwarf_expr_context *ctx,
467 const gdb_byte *op_ptr, const gdb_byte *op_end)
469 enum bfd_endian byte_order = gdbarch_byte_order (ctx->gdbarch);
470 /* Old-style "untyped" DWARF values need special treatment in a
471 couple of places, specifically DW_OP_mod and DW_OP_shr. We need
472 a special type for these values so we can distinguish them from
473 values that have an explicit type, because explicitly-typed
474 values do not need special treatment. This special type must be
475 different (in the `==' sense) from any base type coming from the
477 struct type *address_type = dwarf_expr_address_type (ctx);
479 ctx->location = DWARF_VALUE_MEMORY;
480 ctx->initialized = 1; /* Default is initialized. */
482 if (ctx->recursion_depth > ctx->max_recursion_depth)
483 error (_("DWARF-2 expression error: Loop detected (%d)."),
484 ctx->recursion_depth);
485 ctx->recursion_depth++;
487 while (op_ptr < op_end)
489 enum dwarf_location_atom op = *op_ptr++;
491 /* Assume the value is not in stack memory.
492 Code that knows otherwise sets this to 1.
493 Some arithmetic on stack addresses can probably be assumed to still
494 be a stack address, but we skip this complication for now.
495 This is just an optimization, so it's always ok to punt
496 and leave this as 0. */
497 int in_stack_memory = 0;
498 ULONGEST uoffset, reg;
500 struct value *result_val = NULL;
536 result = op - DW_OP_lit0;
537 result_val = value_from_ulongest (address_type, result);
541 result = extract_unsigned_integer (op_ptr,
542 ctx->addr_size, byte_order);
543 op_ptr += ctx->addr_size;
544 /* Some versions of GCC emit DW_OP_addr before
545 DW_OP_GNU_push_tls_address. In this case the value is an
546 index, not an address. We don't support things like
547 branching between the address and the TLS op. */
548 if (op_ptr >= op_end || *op_ptr != DW_OP_GNU_push_tls_address)
549 result += ctx->offset;
550 result_val = value_from_ulongest (address_type, result);
554 result = extract_unsigned_integer (op_ptr, 1, byte_order);
555 result_val = value_from_ulongest (address_type, result);
559 result = extract_signed_integer (op_ptr, 1, byte_order);
560 result_val = value_from_ulongest (address_type, result);
564 result = extract_unsigned_integer (op_ptr, 2, byte_order);
565 result_val = value_from_ulongest (address_type, result);
569 result = extract_signed_integer (op_ptr, 2, byte_order);
570 result_val = value_from_ulongest (address_type, result);
574 result = extract_unsigned_integer (op_ptr, 4, byte_order);
575 result_val = value_from_ulongest (address_type, result);
579 result = extract_signed_integer (op_ptr, 4, byte_order);
580 result_val = value_from_ulongest (address_type, result);
584 result = extract_unsigned_integer (op_ptr, 8, byte_order);
585 result_val = value_from_ulongest (address_type, result);
589 result = extract_signed_integer (op_ptr, 8, byte_order);
590 result_val = value_from_ulongest (address_type, result);
594 op_ptr = read_uleb128 (op_ptr, op_end, &uoffset);
596 result_val = value_from_ulongest (address_type, result);
599 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
601 result_val = value_from_ulongest (address_type, result);
604 /* The DW_OP_reg operations are required to occur alone in
605 location expressions. */
639 && *op_ptr != DW_OP_piece
640 && *op_ptr != DW_OP_bit_piece
641 && *op_ptr != DW_OP_GNU_uninit)
642 error (_("DWARF-2 expression error: DW_OP_reg operations must be "
643 "used either alone or in conjuction with DW_OP_piece "
644 "or DW_OP_bit_piece."));
646 result = op - DW_OP_reg0;
647 result_val = value_from_ulongest (address_type, result);
648 ctx->location = DWARF_VALUE_REGISTER;
652 op_ptr = read_uleb128 (op_ptr, op_end, ®);
653 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_regx");
656 result_val = value_from_ulongest (address_type, result);
657 ctx->location = DWARF_VALUE_REGISTER;
660 case DW_OP_implicit_value:
664 op_ptr = read_uleb128 (op_ptr, op_end, &len);
665 if (op_ptr + len > op_end)
666 error (_("DW_OP_implicit_value: too few bytes available."));
669 ctx->location = DWARF_VALUE_LITERAL;
671 dwarf_expr_require_composition (op_ptr, op_end,
672 "DW_OP_implicit_value");
676 case DW_OP_stack_value:
677 ctx->location = DWARF_VALUE_STACK;
678 dwarf_expr_require_composition (op_ptr, op_end, "DW_OP_stack_value");
681 case DW_OP_GNU_implicit_pointer:
686 /* The referred-to DIE. */
687 ctx->len = extract_unsigned_integer (op_ptr, ctx->addr_size,
689 op_ptr += ctx->addr_size;
691 /* The byte offset into the data. */
692 op_ptr = read_sleb128 (op_ptr, op_end, &len);
693 result = (ULONGEST) len;
694 result_val = value_from_ulongest (address_type, result);
696 ctx->location = DWARF_VALUE_IMPLICIT_POINTER;
697 dwarf_expr_require_composition (op_ptr, op_end,
698 "DW_OP_GNU_implicit_pointer");
735 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
736 result = (ctx->read_reg) (ctx->baton, op - DW_OP_breg0);
738 result_val = value_from_ulongest (address_type, result);
743 op_ptr = read_uleb128 (op_ptr, op_end, ®);
744 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
745 result = (ctx->read_reg) (ctx->baton, reg);
747 result_val = value_from_ulongest (address_type, result);
752 const gdb_byte *datastart;
754 unsigned int before_stack_len;
756 op_ptr = read_sleb128 (op_ptr, op_end, &offset);
757 /* Rather than create a whole new context, we simply
758 record the stack length before execution, then reset it
759 afterwards, effectively erasing whatever the recursive
761 before_stack_len = ctx->stack_len;
762 /* FIXME: cagney/2003-03-26: This code should be using
763 get_frame_base_address(), and then implement a dwarf2
764 specific this_base method. */
765 (ctx->get_frame_base) (ctx->baton, &datastart, &datalen);
766 dwarf_expr_eval (ctx, datastart, datalen);
767 if (ctx->location == DWARF_VALUE_MEMORY)
768 result = dwarf_expr_fetch_address (ctx, 0);
769 else if (ctx->location == DWARF_VALUE_REGISTER)
771 = (ctx->read_reg) (ctx->baton,
772 value_as_long (dwarf_expr_fetch (ctx, 0)));
774 error (_("Not implemented: computing frame "
775 "base using explicit value operator"));
776 result = result + offset;
777 result_val = value_from_ulongest (address_type, result);
779 ctx->stack_len = before_stack_len;
780 ctx->location = DWARF_VALUE_MEMORY;
785 result_val = dwarf_expr_fetch (ctx, 0);
786 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 0);
790 dwarf_expr_pop (ctx);
795 result_val = dwarf_expr_fetch (ctx, offset);
796 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, offset);
801 struct dwarf_stack_value t1, t2;
803 if (ctx->stack_len < 2)
804 error (_("Not enough elements for "
805 "DW_OP_swap. Need 2, have %d."),
807 t1 = ctx->stack[ctx->stack_len - 1];
808 t2 = ctx->stack[ctx->stack_len - 2];
809 ctx->stack[ctx->stack_len - 1] = t2;
810 ctx->stack[ctx->stack_len - 2] = t1;
815 result_val = dwarf_expr_fetch (ctx, 1);
816 in_stack_memory = dwarf_expr_fetch_in_stack_memory (ctx, 1);
821 struct dwarf_stack_value t1, t2, t3;
823 if (ctx->stack_len < 3)
824 error (_("Not enough elements for "
825 "DW_OP_rot. Need 3, have %d."),
827 t1 = ctx->stack[ctx->stack_len - 1];
828 t2 = ctx->stack[ctx->stack_len - 2];
829 t3 = ctx->stack[ctx->stack_len - 3];
830 ctx->stack[ctx->stack_len - 1] = t2;
831 ctx->stack[ctx->stack_len - 2] = t3;
832 ctx->stack[ctx->stack_len - 3] = t1;
837 case DW_OP_deref_size:
838 case DW_OP_GNU_deref_type:
840 int addr_size = (op == DW_OP_deref ? ctx->addr_size : *op_ptr++);
841 gdb_byte *buf = alloca (addr_size);
842 CORE_ADDR addr = dwarf_expr_fetch_address (ctx, 0);
845 dwarf_expr_pop (ctx);
847 if (op == DW_OP_GNU_deref_type)
851 op_ptr = read_uleb128 (op_ptr, op_end, &type_die);
852 type = dwarf_get_base_type (ctx, type_die, 0);
857 (ctx->read_mem) (ctx->baton, buf, addr, addr_size);
858 result_val = value_from_contents_and_address (type, buf, addr);
865 case DW_OP_plus_uconst:
867 /* Unary operations. */
868 result_val = dwarf_expr_fetch (ctx, 0);
869 dwarf_expr_pop (ctx);
874 if (value_less (result_val,
875 value_zero (value_type (result_val), not_lval)))
876 result_val = value_neg (result_val);
879 result_val = value_neg (result_val);
882 dwarf_require_integral (value_type (result_val));
883 result_val = value_complement (result_val);
885 case DW_OP_plus_uconst:
886 dwarf_require_integral (value_type (result_val));
887 result = value_as_long (result_val);
888 op_ptr = read_uleb128 (op_ptr, op_end, ®);
890 result_val = value_from_ulongest (address_type, result);
914 /* Binary operations. */
915 struct value *first, *second;
917 second = dwarf_expr_fetch (ctx, 0);
918 dwarf_expr_pop (ctx);
920 first = dwarf_expr_fetch (ctx, 0);
921 dwarf_expr_pop (ctx);
923 if (! base_types_equal_p (value_type (first), value_type (second)))
924 error (_("Incompatible types on DWARF stack"));
929 dwarf_require_integral (value_type (first));
930 dwarf_require_integral (value_type (second));
931 result_val = value_binop (first, second, BINOP_BITWISE_AND);
934 result_val = value_binop (first, second, BINOP_DIV);
937 result_val = value_binop (first, second, BINOP_SUB);
942 struct type *orig_type = value_type (first);
944 /* We have to special-case "old-style" untyped values
945 -- these must have mod computed using unsigned
947 if (orig_type == address_type)
950 = get_unsigned_type (ctx->gdbarch, orig_type);
953 first = value_cast (utype, first);
954 second = value_cast (utype, second);
956 /* Note that value_binop doesn't handle float or
957 decimal float here. This seems unimportant. */
958 result_val = value_binop (first, second, BINOP_MOD);
960 result_val = value_cast (orig_type, result_val);
964 result_val = value_binop (first, second, BINOP_MUL);
967 dwarf_require_integral (value_type (first));
968 dwarf_require_integral (value_type (second));
969 result_val = value_binop (first, second, BINOP_BITWISE_IOR);
972 result_val = value_binop (first, second, BINOP_ADD);
975 dwarf_require_integral (value_type (first));
976 dwarf_require_integral (value_type (second));
977 result_val = value_binop (first, second, BINOP_LSH);
980 dwarf_require_integral (value_type (first));
981 dwarf_require_integral (value_type (second));
982 if (!TYPE_UNSIGNED (value_type (first)))
985 = get_unsigned_type (ctx->gdbarch, value_type (first));
987 first = value_cast (utype, first);
990 result_val = value_binop (first, second, BINOP_RSH);
991 /* Make sure we wind up with the same type we started
993 if (value_type (result_val) != value_type (second))
994 result_val = value_cast (value_type (second), result_val);
997 dwarf_require_integral (value_type (first));
998 dwarf_require_integral (value_type (second));
999 result_val = value_binop (first, second, BINOP_RSH);
1002 dwarf_require_integral (value_type (first));
1003 dwarf_require_integral (value_type (second));
1004 result_val = value_binop (first, second, BINOP_BITWISE_XOR);
1007 /* A <= B is !(B < A). */
1008 result = ! value_less (second, first);
1009 result_val = value_from_ulongest (address_type, result);
1012 /* A >= B is !(A < B). */
1013 result = ! value_less (first, second);
1014 result_val = value_from_ulongest (address_type, result);
1017 result = value_equal (first, second);
1018 result_val = value_from_ulongest (address_type, result);
1021 result = value_less (first, second);
1022 result_val = value_from_ulongest (address_type, result);
1025 /* A > B is B < A. */
1026 result = value_less (second, first);
1027 result_val = value_from_ulongest (address_type, result);
1030 result = ! value_equal (first, second);
1031 result_val = value_from_ulongest (address_type, result);
1034 internal_error (__FILE__, __LINE__,
1035 _("Can't be reached."));
1040 case DW_OP_call_frame_cfa:
1041 result = (ctx->get_frame_cfa) (ctx->baton);
1042 result_val = value_from_ulongest (address_type, result);
1043 in_stack_memory = 1;
1046 case DW_OP_GNU_push_tls_address:
1047 /* Variable is at a constant offset in the thread-local
1048 storage block into the objfile for the current thread and
1049 the dynamic linker module containing this expression. Here
1050 we return returns the offset from that base. The top of the
1051 stack has the offset from the beginning of the thread
1052 control block at which the variable is located. Nothing
1053 should follow this operator, so the top of stack would be
1055 result = value_as_long (dwarf_expr_fetch (ctx, 0));
1056 dwarf_expr_pop (ctx);
1057 result = (ctx->get_tls_address) (ctx->baton, result);
1058 result_val = value_from_ulongest (address_type, result);
1062 offset = extract_signed_integer (op_ptr, 2, byte_order);
1071 offset = extract_signed_integer (op_ptr, 2, byte_order);
1073 val = dwarf_expr_fetch (ctx, 0);
1074 dwarf_require_integral (value_type (val));
1075 if (value_as_long (val) != 0)
1077 dwarf_expr_pop (ctx);
1088 /* Record the piece. */
1089 op_ptr = read_uleb128 (op_ptr, op_end, &size);
1090 add_piece (ctx, 8 * size, 0);
1092 /* Pop off the address/regnum, and reset the location
1094 if (ctx->location != DWARF_VALUE_LITERAL
1095 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
1096 dwarf_expr_pop (ctx);
1097 ctx->location = DWARF_VALUE_MEMORY;
1101 case DW_OP_bit_piece:
1103 ULONGEST size, offset;
1105 /* Record the piece. */
1106 op_ptr = read_uleb128 (op_ptr, op_end, &size);
1107 op_ptr = read_uleb128 (op_ptr, op_end, &offset);
1108 add_piece (ctx, size, offset);
1110 /* Pop off the address/regnum, and reset the location
1112 if (ctx->location != DWARF_VALUE_LITERAL
1113 && ctx->location != DWARF_VALUE_OPTIMIZED_OUT)
1114 dwarf_expr_pop (ctx);
1115 ctx->location = DWARF_VALUE_MEMORY;
1119 case DW_OP_GNU_uninit:
1120 if (op_ptr != op_end)
1121 error (_("DWARF-2 expression error: DW_OP_GNU_uninit must always "
1122 "be the very last op."));
1124 ctx->initialized = 0;
1128 result = extract_unsigned_integer (op_ptr, 2, byte_order);
1130 ctx->dwarf_call (ctx, result);
1134 result = extract_unsigned_integer (op_ptr, 4, byte_order);
1136 ctx->dwarf_call (ctx, result);
1139 case DW_OP_GNU_entry_value:
1140 /* This operation is not yet supported by GDB. */
1141 ctx->location = DWARF_VALUE_OPTIMIZED_OUT;
1143 ctx->num_pieces = 0;
1144 goto abort_expression;
1146 case DW_OP_GNU_const_type:
1150 const gdb_byte *data;
1153 op_ptr = read_uleb128 (op_ptr, op_end, &type_die);
1158 type = dwarf_get_base_type (ctx, type_die, n);
1159 result_val = value_from_contents (type, data);
1163 case DW_OP_GNU_regval_type:
1168 op_ptr = read_uleb128 (op_ptr, op_end, ®);
1169 op_ptr = read_uleb128 (op_ptr, op_end, &type_die);
1171 type = dwarf_get_base_type (ctx, type_die, 0);
1172 result = (ctx->read_reg) (ctx->baton, reg);
1173 result_val = value_from_ulongest (type, result);
1177 case DW_OP_GNU_convert:
1178 case DW_OP_GNU_reinterpret:
1183 op_ptr = read_uleb128 (op_ptr, op_end, &type_die);
1185 type = dwarf_get_base_type (ctx, type_die, 0);
1187 result_val = dwarf_expr_fetch (ctx, 0);
1188 dwarf_expr_pop (ctx);
1190 if (op == DW_OP_GNU_convert)
1191 result_val = value_cast (type, result_val);
1192 else if (type == value_type (result_val))
1196 else if (TYPE_LENGTH (type)
1197 != TYPE_LENGTH (value_type (result_val)))
1198 error (_("DW_OP_GNU_reinterpret has wrong size"));
1201 = value_from_contents (type,
1202 value_contents_all (result_val));
1207 error (_("Unhandled dwarf expression opcode 0x%x"), op);
1210 /* Most things push a result value. */
1211 gdb_assert (result_val != NULL);
1212 dwarf_expr_push (ctx, result_val, in_stack_memory);
1217 /* To simplify our main caller, if the result is an implicit
1218 pointer, then make a pieced value. This is ok because we can't
1219 have implicit pointers in contexts where pieces are invalid. */
1220 if (ctx->location == DWARF_VALUE_IMPLICIT_POINTER)
1221 add_piece (ctx, 8 * ctx->addr_size, 0);
1224 ctx->recursion_depth--;
1225 gdb_assert (ctx->recursion_depth >= 0);
1229 _initialize_dwarf2expr (void)
1232 = gdbarch_data_register_post_init (dwarf_gdbarch_types_init);