1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file handles the generation of rtl code from tree structure
21 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
22 The functions whose names start with `expand_' are called by the
23 expander to generate RTL instructions for various kinds of constructs. */
27 #include "coretypes.h"
31 #include "hard-reg-set.h"
37 #include "insn-config.h"
42 #include "diagnostic-core.h"
45 #include "langhooks.h"
51 #include "alloc-pool.h"
52 #include "pretty-print.h"
53 #include "pointer-set.h"
58 /* Functions and data structures for expanding case statements. */
60 /* Case label structure, used to hold info on labels within case
61 statements. We handle "range" labels; for a single-value label
62 as in C, the high and low limits are the same.
64 We start with a vector of case nodes sorted in ascending order, and
65 the default label as the last element in the vector. Before expanding
66 to RTL, we transform this vector into a list linked via the RIGHT
67 fields in the case_node struct. Nodes with higher case values are
70 Switch statements can be output in three forms. A branch table is
71 used if there are more than a few labels and the labels are dense
72 within the range between the smallest and largest case value. If a
73 branch table is used, no further manipulations are done with the case
76 The alternative to the use of a branch table is to generate a series
77 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
78 and PARENT fields to hold a binary tree. Initially the tree is
79 totally unbalanced, with everything on the right. We balance the tree
80 with nodes on the left having lower case values than the parent
81 and nodes on the right having higher values. We then output the tree
84 For very small, suitable switch statements, we can generate a series
85 of simple bit test and branches instead. */
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
95 int prob; /* Probability of taking this case. */
96 /* Probability of reaching subtree rooted at this node */
100 typedef struct case_node case_node;
101 typedef struct case_node *case_node_ptr;
103 extern basic_block label_to_block_fn (struct function *, tree);
105 static int n_occurrences (int, const char *);
106 static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
107 static void expand_nl_goto_receiver (void);
108 static bool check_operand_nalternatives (tree, tree);
109 static bool check_unique_operand_names (tree, tree, tree);
110 static char *resolve_operand_name_1 (char *, tree, tree, tree);
111 static void expand_null_return_1 (void);
112 static void expand_value_return (rtx);
113 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
114 static int node_has_low_bound (case_node_ptr, tree);
115 static int node_has_high_bound (case_node_ptr, tree);
116 static int node_is_bounded (case_node_ptr, tree);
117 static void emit_case_nodes (rtx, case_node_ptr, rtx, int, tree);
119 /* Return the rtx-label that corresponds to a LABEL_DECL,
120 creating it if necessary. */
123 label_rtx (tree label)
125 gcc_assert (TREE_CODE (label) == LABEL_DECL);
127 if (!DECL_RTL_SET_P (label))
129 rtx r = gen_label_rtx ();
130 SET_DECL_RTL (label, r);
131 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
132 LABEL_PRESERVE_P (r) = 1;
135 return DECL_RTL (label);
138 /* As above, but also put it on the forced-reference list of the
139 function that contains it. */
141 force_label_rtx (tree label)
143 rtx ref = label_rtx (label);
144 tree function = decl_function_context (label);
146 gcc_assert (function);
148 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
152 /* Add an unconditional jump to LABEL as the next sequential instruction. */
155 emit_jump (rtx label)
157 do_pending_stack_adjust ();
158 emit_jump_insn (gen_jump (label));
162 /* Emit code to jump to the address
163 specified by the pointer expression EXP. */
166 expand_computed_goto (tree exp)
168 rtx x = expand_normal (exp);
170 x = convert_memory_address (Pmode, x);
172 do_pending_stack_adjust ();
173 emit_indirect_jump (x);
176 /* Handle goto statements and the labels that they can go to. */
178 /* Specify the location in the RTL code of a label LABEL,
179 which is a LABEL_DECL tree node.
181 This is used for the kind of label that the user can jump to with a
182 goto statement, and for alternatives of a switch or case statement.
183 RTL labels generated for loops and conditionals don't go through here;
184 they are generated directly at the RTL level, by other functions below.
186 Note that this has nothing to do with defining label *names*.
187 Languages vary in how they do that and what that even means. */
190 expand_label (tree label)
192 rtx label_r = label_rtx (label);
194 do_pending_stack_adjust ();
195 emit_label (label_r);
196 if (DECL_NAME (label))
197 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
199 if (DECL_NONLOCAL (label))
201 expand_nl_goto_receiver ();
202 nonlocal_goto_handler_labels
203 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
204 nonlocal_goto_handler_labels);
207 if (FORCED_LABEL (label))
208 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
210 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
211 maybe_set_first_label_num (label_r);
214 /* Generate RTL code for a `goto' statement with target label LABEL.
215 LABEL should be a LABEL_DECL tree node that was or will later be
216 defined with `expand_label'. */
219 expand_goto (tree label)
221 #ifdef ENABLE_CHECKING
222 /* Check for a nonlocal goto to a containing function. Should have
223 gotten translated to __builtin_nonlocal_goto. */
224 tree context = decl_function_context (label);
225 gcc_assert (!context || context == current_function_decl);
228 emit_jump (label_rtx (label));
231 /* Return the number of times character C occurs in string S. */
233 n_occurrences (int c, const char *s)
241 /* Generate RTL for an asm statement (explicit assembler code).
242 STRING is a STRING_CST node containing the assembler code text,
243 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
244 insn is volatile; don't optimize it. */
247 expand_asm_loc (tree string, int vol, location_t locus)
251 if (TREE_CODE (string) == ADDR_EXPR)
252 string = TREE_OPERAND (string, 0);
254 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
255 ggc_strdup (TREE_STRING_POINTER (string)),
258 MEM_VOLATILE_P (body) = vol;
263 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
264 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
265 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
266 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
267 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
268 constraint allows the use of a register operand. And, *IS_INOUT
269 will be true if the operand is read-write, i.e., if it is used as
270 an input as well as an output. If *CONSTRAINT_P is not in
271 canonical form, it will be made canonical. (Note that `+' will be
272 replaced with `=' as part of this process.)
274 Returns TRUE if all went well; FALSE if an error occurred. */
277 parse_output_constraint (const char **constraint_p, int operand_num,
278 int ninputs, int noutputs, bool *allows_mem,
279 bool *allows_reg, bool *is_inout)
281 const char *constraint = *constraint_p;
284 /* Assume the constraint doesn't allow the use of either a register
289 /* Allow the `=' or `+' to not be at the beginning of the string,
290 since it wasn't explicitly documented that way, and there is a
291 large body of code that puts it last. Swap the character to
292 the front, so as not to uglify any place else. */
293 p = strchr (constraint, '=');
295 p = strchr (constraint, '+');
297 /* If the string doesn't contain an `=', issue an error
301 error ("output operand constraint lacks %<=%>");
305 /* If the constraint begins with `+', then the operand is both read
306 from and written to. */
307 *is_inout = (*p == '+');
309 /* Canonicalize the output constraint so that it begins with `='. */
310 if (p != constraint || *is_inout)
313 size_t c_len = strlen (constraint);
316 warning (0, "output constraint %qc for operand %d "
317 "is not at the beginning",
320 /* Make a copy of the constraint. */
321 buf = XALLOCAVEC (char, c_len + 1);
322 strcpy (buf, constraint);
323 /* Swap the first character and the `=' or `+'. */
324 buf[p - constraint] = buf[0];
325 /* Make sure the first character is an `='. (Until we do this,
326 it might be a `+'.) */
328 /* Replace the constraint with the canonicalized string. */
329 *constraint_p = ggc_alloc_string (buf, c_len);
330 constraint = *constraint_p;
333 /* Loop through the constraint string. */
334 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
339 error ("operand constraint contains incorrectly positioned "
344 if (operand_num + 1 == ninputs + noutputs)
346 error ("%<%%%> constraint used with last operand");
351 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
355 case '?': case '!': case '*': case '&': case '#':
356 case 'E': case 'F': case 'G': case 'H':
357 case 's': case 'i': case 'n':
358 case 'I': case 'J': case 'K': case 'L': case 'M':
359 case 'N': case 'O': case 'P': case ',':
362 case '0': case '1': case '2': case '3': case '4':
363 case '5': case '6': case '7': case '8': case '9':
365 error ("matching constraint not valid in output operand");
369 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
370 excepting those that expand_call created. So match memory
387 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
389 #ifdef EXTRA_CONSTRAINT_STR
390 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
392 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
396 /* Otherwise we can't assume anything about the nature of
397 the constraint except that it isn't purely registers.
398 Treat it like "g" and hope for the best. */
409 /* Similar, but for input constraints. */
412 parse_input_constraint (const char **constraint_p, int input_num,
413 int ninputs, int noutputs, int ninout,
414 const char * const * constraints,
415 bool *allows_mem, bool *allows_reg)
417 const char *constraint = *constraint_p;
418 const char *orig_constraint = constraint;
419 size_t c_len = strlen (constraint);
421 bool saw_match = false;
423 /* Assume the constraint doesn't allow the use of either
424 a register or memory. */
428 /* Make sure constraint has neither `=', `+', nor '&'. */
430 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
431 switch (constraint[j])
433 case '+': case '=': case '&':
434 if (constraint == orig_constraint)
436 error ("input operand constraint contains %qc", constraint[j]);
442 if (constraint == orig_constraint
443 && input_num + 1 == ninputs - ninout)
445 error ("%<%%%> constraint used with last operand");
450 case 'V': case TARGET_MEM_CONSTRAINT: case 'o':
455 case '?': case '!': case '*': case '#':
456 case 'E': case 'F': case 'G': case 'H':
457 case 's': case 'i': case 'n':
458 case 'I': case 'J': case 'K': case 'L': case 'M':
459 case 'N': case 'O': case 'P': case ',':
462 /* Whether or not a numeric constraint allows a register is
463 decided by the matching constraint, and so there is no need
464 to do anything special with them. We must handle them in
465 the default case, so that we don't unnecessarily force
466 operands to memory. */
467 case '0': case '1': case '2': case '3': case '4':
468 case '5': case '6': case '7': case '8': case '9':
475 match = strtoul (constraint + j, &end, 10);
476 if (match >= (unsigned long) noutputs)
478 error ("matching constraint references invalid operand number");
482 /* Try and find the real constraint for this dup. Only do this
483 if the matching constraint is the only alternative. */
485 && (j == 0 || (j == 1 && constraint[0] == '%')))
487 constraint = constraints[match];
488 *constraint_p = constraint;
489 c_len = strlen (constraint);
491 /* ??? At the end of the loop, we will skip the first part of
492 the matched constraint. This assumes not only that the
493 other constraint is an output constraint, but also that
494 the '=' or '+' come first. */
498 j = end - constraint;
499 /* Anticipate increment at end of loop. */
514 if (! ISALPHA (constraint[j]))
516 error ("invalid punctuation %qc in constraint", constraint[j]);
519 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
522 #ifdef EXTRA_CONSTRAINT_STR
523 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
525 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
529 /* Otherwise we can't assume anything about the nature of
530 the constraint except that it isn't purely registers.
531 Treat it like "g" and hope for the best. */
539 if (saw_match && !*allows_reg)
540 warning (0, "matching constraint does not allow a register");
545 /* Return DECL iff there's an overlap between *REGS and DECL, where DECL
546 can be an asm-declared register. Called via walk_tree. */
549 decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
553 const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
555 if (TREE_CODE (decl) == VAR_DECL)
557 if (DECL_HARD_REGISTER (decl)
558 && REG_P (DECL_RTL (decl))
559 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
561 rtx reg = DECL_RTL (decl);
563 if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
568 else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
573 /* If there is an overlap between *REGS and DECL, return the first overlap
576 tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
578 return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
581 /* Check for overlap between registers marked in CLOBBERED_REGS and
582 anything inappropriate in T. Emit error and return the register
583 variable definition for error, NULL_TREE for ok. */
586 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
588 /* Conflicts between asm-declared register variables and the clobber
589 list are not allowed. */
590 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
594 error ("asm-specifier for variable %qE conflicts with asm clobber list",
595 DECL_NAME (overlap));
597 /* Reset registerness to stop multiple errors emitted for a single
599 DECL_REGISTER (overlap) = 0;
606 /* Generate RTL for an asm statement with arguments.
607 STRING is the instruction template.
608 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
609 Each output or input has an expression in the TREE_VALUE and
610 a tree list in TREE_PURPOSE which in turn contains a constraint
611 name in TREE_VALUE (or NULL_TREE) and a constraint string
613 CLOBBERS is a list of STRING_CST nodes each naming a hard register
614 that is clobbered by this insn.
616 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
617 should be the fallthru basic block of the asm goto.
619 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
620 Some elements of OUTPUTS may be replaced with trees representing temporary
621 values. The caller should copy those temporary values to the originally
624 VOL nonzero means the insn is volatile; don't optimize it. */
627 expand_asm_operands (tree string, tree outputs, tree inputs,
628 tree clobbers, tree labels, basic_block fallthru_bb,
629 int vol, location_t locus)
631 rtvec argvec, constraintvec, labelvec;
633 int ninputs = list_length (inputs);
634 int noutputs = list_length (outputs);
635 int nlabels = list_length (labels);
638 HARD_REG_SET clobbered_regs;
639 int clobber_conflict_found = 0;
643 /* Vector of RTX's of evaluated output operands. */
644 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
645 int *inout_opnum = XALLOCAVEC (int, noutputs);
646 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
647 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
648 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
649 int old_generating_concat_p = generating_concat_p;
650 rtx fallthru_label = NULL_RTX;
652 /* An ASM with no outputs needs to be treated as volatile, for now. */
656 if (! check_operand_nalternatives (outputs, inputs))
659 string = resolve_asm_operand_names (string, outputs, inputs, labels);
661 /* Collect constraints. */
663 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
664 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
665 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
666 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
668 /* Sometimes we wish to automatically clobber registers across an asm.
669 Case in point is when the i386 backend moved from cc0 to a hard reg --
670 maintaining source-level compatibility means automatically clobbering
671 the flags register. */
672 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
674 /* Count the number of meaningful clobbered registers, ignoring what
675 we would ignore later. */
677 CLEAR_HARD_REG_SET (clobbered_regs);
678 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
683 if (TREE_VALUE (tail) == error_mark_node)
685 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
687 i = decode_reg_name_and_count (regname, &nregs);
691 error ("unknown register name %qs in %<asm%>", regname);
693 /* Mark clobbered registers. */
698 for (reg = i; reg < i + nregs; reg++)
702 /* Clobbering the PIC register is an error. */
703 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
705 error ("PIC register clobbered by %qs in %<asm%>", regname);
709 SET_HARD_REG_BIT (clobbered_regs, reg);
714 /* First pass over inputs and outputs checks validity and sets
715 mark_addressable if needed. */
718 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
720 tree val = TREE_VALUE (tail);
721 tree type = TREE_TYPE (val);
722 const char *constraint;
727 /* If there's an erroneous arg, emit no insn. */
728 if (type == error_mark_node)
731 /* Try to parse the output constraint. If that fails, there's
732 no point in going further. */
733 constraint = constraints[i];
734 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
735 &allows_mem, &allows_reg, &is_inout))
742 && REG_P (DECL_RTL (val))
743 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
744 mark_addressable (val);
751 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
753 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
757 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
759 bool allows_reg, allows_mem;
760 const char *constraint;
762 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
763 would get VOIDmode and that could cause a crash in reload. */
764 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
767 constraint = constraints[i + noutputs];
768 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
769 constraints, &allows_mem, &allows_reg))
772 if (! allows_reg && allows_mem)
773 mark_addressable (TREE_VALUE (tail));
776 /* Second pass evaluates arguments. */
778 /* Make sure stack is consistent for asm goto. */
780 do_pending_stack_adjust ();
783 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
785 tree val = TREE_VALUE (tail);
786 tree type = TREE_TYPE (val);
793 ok = parse_output_constraint (&constraints[i], i, ninputs,
794 noutputs, &allows_mem, &allows_reg,
798 /* If an output operand is not a decl or indirect ref and our constraint
799 allows a register, make a temporary to act as an intermediate.
800 Make the asm insn write into that, then our caller will copy it to
801 the real output operand. Likewise for promoted variables. */
803 generating_concat_p = 0;
805 real_output_rtx[i] = NULL_RTX;
806 if ((TREE_CODE (val) == INDIRECT_REF
809 && (allows_mem || REG_P (DECL_RTL (val)))
810 && ! (REG_P (DECL_RTL (val))
811 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
815 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
817 op = validize_mem (op);
819 if (! allows_reg && !MEM_P (op))
820 error ("output number %d not directly addressable", i);
821 if ((! allows_mem && MEM_P (op))
822 || GET_CODE (op) == CONCAT)
824 real_output_rtx[i] = op;
825 op = gen_reg_rtx (GET_MODE (op));
827 emit_move_insn (op, real_output_rtx[i]);
832 op = assign_temp (type, 0, 1);
833 op = validize_mem (op);
834 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
835 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
836 TREE_VALUE (tail) = make_tree (type, op);
840 generating_concat_p = old_generating_concat_p;
844 inout_mode[ninout] = TYPE_MODE (type);
845 inout_opnum[ninout++] = i;
848 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
849 clobber_conflict_found = 1;
852 /* Make vectors for the expression-rtx, constraint strings,
853 and named operands. */
855 argvec = rtvec_alloc (ninputs);
856 constraintvec = rtvec_alloc (ninputs);
857 labelvec = rtvec_alloc (nlabels);
859 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
860 : GET_MODE (output_rtx[0])),
861 ggc_strdup (TREE_STRING_POINTER (string)),
862 empty_string, 0, argvec, constraintvec,
865 MEM_VOLATILE_P (body) = vol;
867 /* Eval the inputs and put them into ARGVEC.
868 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
870 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
872 bool allows_reg, allows_mem;
873 const char *constraint;
878 constraint = constraints[i + noutputs];
879 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
880 constraints, &allows_mem, &allows_reg);
883 generating_concat_p = 0;
885 val = TREE_VALUE (tail);
886 type = TREE_TYPE (val);
887 /* EXPAND_INITIALIZER will not generate code for valid initializer
888 constants, but will still generate code for other types of operand.
889 This is the behavior we want for constant constraints. */
890 op = expand_expr (val, NULL_RTX, VOIDmode,
891 allows_reg ? EXPAND_NORMAL
892 : allows_mem ? EXPAND_MEMORY
893 : EXPAND_INITIALIZER);
895 /* Never pass a CONCAT to an ASM. */
896 if (GET_CODE (op) == CONCAT)
897 op = force_reg (GET_MODE (op), op);
899 op = validize_mem (op);
901 if (asm_operand_ok (op, constraint, NULL) <= 0)
903 if (allows_reg && TYPE_MODE (type) != BLKmode)
904 op = force_reg (TYPE_MODE (type), op);
905 else if (!allows_mem)
906 warning (0, "asm operand %d probably doesn%'t match constraints",
910 /* We won't recognize either volatile memory or memory
911 with a queued address as available a memory_operand
912 at this point. Ignore it: clearly this *is* a memory. */
918 generating_concat_p = old_generating_concat_p;
919 ASM_OPERANDS_INPUT (body, i) = op;
921 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
922 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
923 ggc_strdup (constraints[i + noutputs]));
925 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
926 clobber_conflict_found = 1;
929 /* Protect all the operands from the queue now that they have all been
932 generating_concat_p = 0;
934 /* For in-out operands, copy output rtx to input rtx. */
935 for (i = 0; i < ninout; i++)
937 int j = inout_opnum[i];
940 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
943 sprintf (buffer, "%d", j);
944 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
945 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
948 /* Copy labels to the vector. */
949 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
952 /* If asm goto has any labels in the fallthru basic block, use
953 a label that we emit immediately after the asm goto. Expansion
954 may insert further instructions into the same basic block after
955 asm goto and if we don't do this, insertion of instructions on
956 the fallthru edge might misbehave. See PR58670. */
958 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
960 if (fallthru_label == NULL_RTX)
961 fallthru_label = gen_label_rtx ();
965 r = label_rtx (TREE_VALUE (tail));
966 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
969 generating_concat_p = old_generating_concat_p;
971 /* Now, for each output, construct an rtx
972 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
973 ARGVEC CONSTRAINTS OPNAMES))
974 If there is more than one, put them inside a PARALLEL. */
976 if (nlabels > 0 && nclobbers == 0)
978 gcc_assert (noutputs == 0);
979 emit_jump_insn (body);
981 else if (noutputs == 0 && nclobbers == 0)
983 /* No output operands: put in a raw ASM_OPERANDS rtx. */
986 else if (noutputs == 1 && nclobbers == 0)
988 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
989 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
999 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1001 /* For each output operand, store a SET. */
1002 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1004 XVECEXP (body, 0, i)
1005 = gen_rtx_SET (VOIDmode,
1007 gen_rtx_ASM_OPERANDS
1008 (GET_MODE (output_rtx[i]),
1009 ggc_strdup (TREE_STRING_POINTER (string)),
1010 ggc_strdup (constraints[i]),
1011 i, argvec, constraintvec, labelvec, locus));
1013 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1016 /* If there are no outputs (but there are some clobbers)
1017 store the bare ASM_OPERANDS into the PARALLEL. */
1020 XVECEXP (body, 0, i++) = obody;
1022 /* Store (clobber REG) for each clobbered register specified. */
1024 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1026 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1028 int j = decode_reg_name_and_count (regname, &nregs);
1033 if (j == -3) /* `cc', which is not a register */
1036 if (j == -4) /* `memory', don't cache memory across asm */
1038 XVECEXP (body, 0, i++)
1039 = gen_rtx_CLOBBER (VOIDmode,
1042 gen_rtx_SCRATCH (VOIDmode)));
1046 /* Ignore unknown register, error already signaled. */
1050 for (reg = j; reg < j + nregs; reg++)
1052 /* Use QImode since that's guaranteed to clobber just
1054 clobbered_reg = gen_rtx_REG (QImode, reg);
1056 /* Do sanity check for overlap between clobbers and
1057 respectively input and outputs that hasn't been
1058 handled. Such overlap should have been detected and
1060 if (!clobber_conflict_found)
1064 /* We test the old body (obody) contents to avoid
1065 tripping over the under-construction body. */
1066 for (opno = 0; opno < noutputs; opno++)
1067 if (reg_overlap_mentioned_p (clobbered_reg,
1070 ("asm clobber conflict with output operand");
1072 for (opno = 0; opno < ninputs - ninout; opno++)
1073 if (reg_overlap_mentioned_p (clobbered_reg,
1074 ASM_OPERANDS_INPUT (obody,
1077 ("asm clobber conflict with input operand");
1080 XVECEXP (body, 0, i++)
1081 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1086 emit_jump_insn (body);
1092 emit_label (fallthru_label);
1094 /* For any outputs that needed reloading into registers, spill them
1095 back to where they belong. */
1096 for (i = 0; i < noutputs; ++i)
1097 if (real_output_rtx[i])
1098 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1100 crtl->has_asm_statement = 1;
1105 expand_asm_stmt (gimple stmt)
1108 tree outputs, tail, t;
1112 tree str, out, in, cl, labels;
1113 location_t locus = gimple_location (stmt);
1114 basic_block fallthru_bb = NULL;
1116 /* Meh... convert the gimple asm operands into real tree lists.
1117 Eventually we should make all routines work on the vectors instead
1118 of relying on TREE_CHAIN. */
1120 n = gimple_asm_noutputs (stmt);
1123 t = out = gimple_asm_output_op (stmt, 0);
1124 for (i = 1; i < n; i++)
1125 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
1129 n = gimple_asm_ninputs (stmt);
1132 t = in = gimple_asm_input_op (stmt, 0);
1133 for (i = 1; i < n; i++)
1134 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
1138 n = gimple_asm_nclobbers (stmt);
1141 t = cl = gimple_asm_clobber_op (stmt, 0);
1142 for (i = 1; i < n; i++)
1143 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
1147 n = gimple_asm_nlabels (stmt);
1150 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
1152 fallthru_bb = fallthru->dest;
1153 t = labels = gimple_asm_label_op (stmt, 0);
1154 for (i = 1; i < n; i++)
1155 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
1158 s = gimple_asm_string (stmt);
1159 str = build_string (strlen (s), s);
1161 if (gimple_asm_input_p (stmt))
1163 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
1168 noutputs = gimple_asm_noutputs (stmt);
1169 /* o[I] is the place that output number I should be written. */
1170 o = (tree *) alloca (noutputs * sizeof (tree));
1172 /* Record the contents of OUTPUTS before it is modified. */
1173 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1174 o[i] = TREE_VALUE (tail);
1176 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1177 OUTPUTS some trees for where the values were actually stored. */
1178 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
1179 gimple_asm_volatile_p (stmt), locus);
1181 /* Copy all the intermediate outputs into the specified outputs. */
1182 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1184 if (o[i] != TREE_VALUE (tail))
1186 expand_assignment (o[i], TREE_VALUE (tail), false);
1189 /* Restore the original value so that it's correct the next
1190 time we expand this function. */
1191 TREE_VALUE (tail) = o[i];
1196 /* A subroutine of expand_asm_operands. Check that all operands have
1197 the same number of alternatives. Return true if so. */
1200 check_operand_nalternatives (tree outputs, tree inputs)
1202 if (outputs || inputs)
1204 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1206 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1209 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1211 error ("too many alternatives in %<asm%>");
1218 const char *constraint
1219 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1221 if (n_occurrences (',', constraint) != nalternatives)
1223 error ("operand constraints for %<asm%> differ "
1224 "in number of alternatives");
1228 if (TREE_CHAIN (tmp))
1229 tmp = TREE_CHAIN (tmp);
1231 tmp = next, next = 0;
1238 /* A subroutine of expand_asm_operands. Check that all operand names
1239 are unique. Return true if so. We rely on the fact that these names
1240 are identifiers, and so have been canonicalized by get_identifier,
1241 so all we need are pointer comparisons. */
1244 check_unique_operand_names (tree outputs, tree inputs, tree labels)
1246 tree i, j, i_name = NULL_TREE;
1248 for (i = outputs; i ; i = TREE_CHAIN (i))
1250 i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1254 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1255 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1259 for (i = inputs; i ; i = TREE_CHAIN (i))
1261 i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1265 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1266 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1268 for (j = outputs; j ; j = TREE_CHAIN (j))
1269 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1273 for (i = labels; i ; i = TREE_CHAIN (i))
1275 i_name = TREE_PURPOSE (i);
1279 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1280 if (simple_cst_equal (i_name, TREE_PURPOSE (j)))
1282 for (j = inputs; j ; j = TREE_CHAIN (j))
1283 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1290 error ("duplicate asm operand name %qs", TREE_STRING_POINTER (i_name));
1294 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1295 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1296 STRING and in the constraints to those numbers. */
1299 resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
1306 check_unique_operand_names (outputs, inputs, labels);
1308 /* Substitute [<name>] in input constraint strings. There should be no
1309 named operands in output constraints. */
1310 for (t = inputs; t ; t = TREE_CHAIN (t))
1312 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1313 if (strchr (c, '[') != NULL)
1315 p = buffer = xstrdup (c);
1316 while ((p = strchr (p, '[')) != NULL)
1317 p = resolve_operand_name_1 (p, outputs, inputs, NULL);
1318 TREE_VALUE (TREE_PURPOSE (t))
1319 = build_string (strlen (buffer), buffer);
1324 /* Now check for any needed substitutions in the template. */
1325 c = TREE_STRING_POINTER (string);
1326 while ((c = strchr (c, '%')) != NULL)
1330 else if (ISALPHA (c[1]) && c[2] == '[')
1334 c += 1 + (c[1] == '%');
1341 /* OK, we need to make a copy so we can perform the substitutions.
1342 Assume that we will not need extra space--we get to remove '['
1343 and ']', which means we cannot have a problem until we have more
1344 than 999 operands. */
1345 buffer = xstrdup (TREE_STRING_POINTER (string));
1346 p = buffer + (c - TREE_STRING_POINTER (string));
1348 while ((p = strchr (p, '%')) != NULL)
1352 else if (ISALPHA (p[1]) && p[2] == '[')
1356 p += 1 + (p[1] == '%');
1360 p = resolve_operand_name_1 (p, outputs, inputs, labels);
1363 string = build_string (strlen (buffer), buffer);
1370 /* A subroutine of resolve_operand_names. P points to the '[' for a
1371 potential named operand of the form [<name>]. In place, replace
1372 the name and brackets with a number. Return a pointer to the
1373 balance of the string after substitution. */
1376 resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
1382 /* Collect the operand name. */
1383 q = strchr (++p, ']');
1386 error ("missing close brace for named operand");
1387 return strchr (p, '\0');
1391 /* Resolve the name to a number. */
1392 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1394 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1395 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1398 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1400 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1401 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1404 for (t = labels; t ; t = TREE_CHAIN (t), op++)
1406 tree name = TREE_PURPOSE (t);
1407 if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1411 error ("undefined named operand %qs", identifier_to_locale (p));
1415 /* Replace the name with the number. Unfortunately, not all libraries
1416 get the return value of sprintf correct, so search for the end of the
1417 generated string by hand. */
1418 sprintf (--p, "%d", op);
1419 p = strchr (p, '\0');
1421 /* Verify the no extra buffer space assumption. */
1422 gcc_assert (p <= q);
1424 /* Shift the rest of the buffer down to fill the gap. */
1425 memmove (p, q + 1, strlen (q + 1) + 1);
1430 /* Generate RTL to return from the current function, with no value.
1431 (That is, we do not do anything about returning any value.) */
1434 expand_null_return (void)
1436 /* If this function was declared to return a value, but we
1437 didn't, clobber the return registers so that they are not
1438 propagated live to the rest of the function. */
1439 clobber_return_register ();
1441 expand_null_return_1 ();
1444 /* Generate RTL to return directly from the current function.
1445 (That is, we bypass any return value.) */
1448 expand_naked_return (void)
1452 clear_pending_stack_adjust ();
1453 do_pending_stack_adjust ();
1455 end_label = naked_return_label;
1457 end_label = naked_return_label = gen_label_rtx ();
1459 emit_jump (end_label);
1462 /* Generate RTL to return from the current function, with value VAL. */
1465 expand_value_return (rtx val)
1467 /* Copy the value to the return location unless it's already there. */
1469 tree decl = DECL_RESULT (current_function_decl);
1470 rtx return_reg = DECL_RTL (decl);
1471 if (return_reg != val)
1473 tree funtype = TREE_TYPE (current_function_decl);
1474 tree type = TREE_TYPE (decl);
1475 int unsignedp = TYPE_UNSIGNED (type);
1476 enum machine_mode old_mode = DECL_MODE (decl);
1477 enum machine_mode mode;
1478 if (DECL_BY_REFERENCE (decl))
1479 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
1481 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
1483 if (mode != old_mode)
1484 val = convert_modes (mode, old_mode, val, unsignedp);
1486 if (GET_CODE (return_reg) == PARALLEL)
1487 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1489 emit_move_insn (return_reg, val);
1492 expand_null_return_1 ();
1495 /* Output a return with no value. */
1498 expand_null_return_1 (void)
1500 clear_pending_stack_adjust ();
1501 do_pending_stack_adjust ();
1502 emit_jump (return_label);
1505 /* Generate RTL to evaluate the expression RETVAL and return it
1506 from the current function. */
1509 expand_return (tree retval)
1515 /* If function wants no value, give it none. */
1516 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1518 expand_normal (retval);
1519 expand_null_return ();
1523 if (retval == error_mark_node)
1525 /* Treat this like a return of no value from a function that
1527 expand_null_return ();
1530 else if ((TREE_CODE (retval) == MODIFY_EXPR
1531 || TREE_CODE (retval) == INIT_EXPR)
1532 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1533 retval_rhs = TREE_OPERAND (retval, 1);
1535 retval_rhs = retval;
1537 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1539 /* If we are returning the RESULT_DECL, then the value has already
1540 been stored into it, so we don't have to do anything special. */
1541 if (TREE_CODE (retval_rhs) == RESULT_DECL)
1542 expand_value_return (result_rtl);
1544 /* If the result is an aggregate that is being returned in one (or more)
1545 registers, load the registers here. */
1547 else if (retval_rhs != 0
1548 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1549 && REG_P (result_rtl))
1551 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
1554 /* Use the mode of the result value on the return register. */
1555 PUT_MODE (result_rtl, GET_MODE (val));
1556 expand_value_return (val);
1559 expand_null_return ();
1561 else if (retval_rhs != 0
1562 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1563 && (REG_P (result_rtl)
1564 || (GET_CODE (result_rtl) == PARALLEL)))
1566 /* Calculate the return value into a temporary (usually a pseudo
1568 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1569 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1571 val = assign_temp (nt, 0, 1);
1572 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1573 val = force_not_mem (val);
1574 /* Return the calculated value. */
1575 expand_value_return (val);
1579 /* No hard reg used; calculate value into hard return reg. */
1580 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1581 expand_value_return (result_rtl);
1585 /* Emit code to restore vital registers at the beginning of a nonlocal goto
1588 expand_nl_goto_receiver (void)
1592 /* Clobber the FP when we get here, so we have to make sure it's
1593 marked as used by this function. */
1594 emit_use (hard_frame_pointer_rtx);
1596 /* Mark the static chain as clobbered here so life information
1597 doesn't get messed up for it. */
1598 chain = targetm.calls.static_chain (current_function_decl, true);
1599 if (chain && REG_P (chain))
1600 emit_clobber (chain);
1602 #ifdef HAVE_nonlocal_goto
1603 if (! HAVE_nonlocal_goto)
1605 /* First adjust our frame pointer to its actual value. It was
1606 previously set to the start of the virtual area corresponding to
1607 the stacked variables when we branched here and now needs to be
1608 adjusted to the actual hardware fp value.
1610 Assignments are to virtual registers are converted by
1611 instantiate_virtual_regs into the corresponding assignment
1612 to the underlying register (fp in this case) that makes
1613 the original assignment true.
1614 So the following insn will actually be
1615 decrementing fp by STARTING_FRAME_OFFSET. */
1616 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1618 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
1619 if (fixed_regs[ARG_POINTER_REGNUM])
1621 #ifdef ELIMINABLE_REGS
1622 /* If the argument pointer can be eliminated in favor of the
1623 frame pointer, we don't need to restore it. We assume here
1624 that if such an elimination is present, it can always be used.
1625 This is the case on all known machines; if we don't make this
1626 assumption, we do unnecessary saving on many machines. */
1627 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1630 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1631 if (elim_regs[i].from == ARG_POINTER_REGNUM
1632 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1635 if (i == ARRAY_SIZE (elim_regs))
1638 /* Now restore our arg pointer from the address at which it
1639 was saved in our stack frame. */
1640 emit_move_insn (crtl->args.internal_arg_pointer,
1641 copy_to_reg (get_arg_pointer_save_area ()));
1646 #ifdef HAVE_nonlocal_goto_receiver
1647 if (HAVE_nonlocal_goto_receiver)
1648 emit_insn (gen_nonlocal_goto_receiver ());
1651 /* We must not allow the code we just generated to be reordered by
1652 scheduling. Specifically, the update of the frame pointer must
1653 happen immediately, not later. */
1654 emit_insn (gen_blockage ());
1657 /* Emit code to save the current value of stack. */
1659 expand_stack_save (void)
1663 do_pending_stack_adjust ();
1664 emit_stack_save (SAVE_BLOCK, &ret);
1668 /* Emit code to restore the current value of stack. */
1670 expand_stack_restore (tree var)
1672 rtx prev, sa = expand_normal (var);
1674 sa = convert_memory_address (Pmode, sa);
1676 prev = get_last_insn ();
1677 emit_stack_restore (SAVE_BLOCK, sa);
1678 fixup_args_size_notes (prev, get_last_insn (), 0);
1681 /* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE. PROB
1682 is the probability of jumping to LABEL. */
1684 do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
1685 int unsignedp, int prob)
1687 gcc_assert (prob <= REG_BR_PROB_BASE);
1688 do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
1689 NULL_RTX, NULL_RTX, label, prob);
1692 /* Do the insertion of a case label into case_list. The labels are
1693 fed to us in descending order from the sorted vector of case labels used
1694 in the tree part of the middle end. So the list we construct is
1695 sorted in ascending order.
1697 LABEL is the case label to be inserted. LOW and HIGH are the bounds
1698 against which the index is compared to jump to LABEL and PROB is the
1699 estimated probability LABEL is reached from the switch statement. */
1701 static struct case_node *
1702 add_case_node (struct case_node *head, tree low, tree high,
1703 tree label, int prob, alloc_pool case_node_pool)
1705 struct case_node *r;
1707 gcc_checking_assert (low);
1708 gcc_checking_assert (high && (TREE_TYPE (low) == TREE_TYPE (high)));
1710 /* Add this label to the chain. */
1711 r = (struct case_node *) pool_alloc (case_node_pool);
1714 r->code_label = label;
1715 r->parent = r->left = NULL;
1717 r->subtree_prob = prob;
1722 /* Dump ROOT, a list or tree of case nodes, to file. */
1725 dump_case_nodes (FILE *f, struct case_node *root,
1726 int indent_step, int indent_level)
1728 HOST_WIDE_INT low, high;
1734 dump_case_nodes (f, root->left, indent_step, indent_level);
1736 low = tree_low_cst (root->low, 0);
1737 high = tree_low_cst (root->high, 0);
1741 fprintf(f, "%*s" HOST_WIDE_INT_PRINT_DEC,
1742 indent_step * indent_level, "", low);
1744 fprintf(f, "%*s" HOST_WIDE_INT_PRINT_DEC " ... " HOST_WIDE_INT_PRINT_DEC,
1745 indent_step * indent_level, "", low, high);
1748 dump_case_nodes (f, root->right, indent_step, indent_level);
1752 #define HAVE_casesi 0
1755 #ifndef HAVE_tablejump
1756 #define HAVE_tablejump 0
1759 /* Return the smallest number of different values for which it is best to use a
1760 jump-table instead of a tree of conditional branches. */
1763 case_values_threshold (void)
1765 unsigned int threshold = PARAM_VALUE (PARAM_CASE_VALUES_THRESHOLD);
1768 threshold = targetm.case_values_threshold ();
1773 /* Return true if a switch should be expanded as a decision tree.
1774 RANGE is the difference between highest and lowest case.
1775 UNIQ is number of unique case node targets, not counting the default case.
1776 COUNT is the number of comparisons needed, not counting the default case. */
1779 expand_switch_as_decision_tree_p (tree range,
1780 unsigned int uniq ATTRIBUTE_UNUSED,
1785 /* If neither casesi or tablejump is available, or flag_jump_tables
1786 over-ruled us, we really have no choice. */
1787 if (!HAVE_casesi && !HAVE_tablejump)
1789 if (!flag_jump_tables)
1791 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
1796 /* If the switch is relatively small such that the cost of one
1797 indirect jump on the target are higher than the cost of a
1798 decision tree, go with the decision tree.
1800 If range of values is much bigger than number of values,
1801 or if it is too large to represent in a HOST_WIDE_INT,
1802 make a sequence of conditional branches instead of a dispatch.
1804 The definition of "much bigger" depends on whether we are
1805 optimizing for size or for speed. If the former, the maximum
1806 ratio range/count = 3, because this was found to be the optimal
1807 ratio for size on i686-pc-linux-gnu, see PR11823. The ratio
1808 10 is much older, and was probably selected after an extensive
1809 benchmarking investigation on numerous platforms. Or maybe it
1810 just made sense to someone at some point in the history of GCC,
1812 max_ratio = optimize_insn_for_size_p () ? 3 : 10;
1813 if (count < case_values_threshold ()
1814 || ! host_integerp (range, /*pos=*/1)
1815 || compare_tree_int (range, max_ratio * count) > 0)
1821 /* Generate a decision tree, switching on INDEX_EXPR and jumping to
1822 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1823 DEFAULT_PROB is the estimated probability that it jumps to
1826 We generate a binary decision tree to select the appropriate target
1827 code. This is done as follows:
1829 If the index is a short or char that we do not have
1830 an insn to handle comparisons directly, convert it to
1831 a full integer now, rather than letting each comparison
1832 generate the conversion.
1834 Load the index into a register.
1836 The list of cases is rearranged into a binary tree,
1837 nearly optimal assuming equal probability for each case.
1839 The tree is transformed into RTL, eliminating redundant
1840 test conditions at the same time.
1842 If program flow could reach the end of the decision tree
1843 an unconditional jump to the default code is emitted.
1845 The above process is unaware of the CFG. The caller has to fix up
1846 the CFG itself. This is done in cfgexpand.c. */
1849 emit_case_decision_tree (tree index_expr, tree index_type,
1850 struct case_node *case_list, rtx default_label,
1853 rtx index = expand_normal (index_expr);
1855 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
1856 && ! have_insn_for (COMPARE, GET_MODE (index)))
1858 int unsignedp = TYPE_UNSIGNED (index_type);
1859 enum machine_mode wider_mode;
1860 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
1861 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
1862 if (have_insn_for (COMPARE, wider_mode))
1864 index = convert_to_mode (wider_mode, index, unsignedp);
1869 do_pending_stack_adjust ();
1873 index = copy_to_reg (index);
1874 if (TREE_CODE (index_expr) == SSA_NAME)
1875 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (index_expr), index);
1878 balance_case_nodes (&case_list, NULL);
1880 if (dump_file && (dump_flags & TDF_DETAILS))
1882 int indent_step = ceil_log2 (TYPE_PRECISION (index_type)) + 2;
1883 fprintf (dump_file, ";; Expanding GIMPLE switch as decision tree:\n");
1884 dump_case_nodes (dump_file, case_list, indent_step, 0);
1887 emit_case_nodes (index, case_list, default_label, default_prob, index_type);
1889 emit_jump (default_label);
1892 /* Return the sum of probabilities of outgoing edges of basic block BB. */
1895 get_outgoing_edge_probs (basic_block bb)
1902 FOR_EACH_EDGE(e, ei, bb->succs)
1903 prob_sum += e->probability;
1907 /* Computes the conditional probability of jumping to a target if the branch
1908 instruction is executed.
1909 TARGET_PROB is the estimated probability of jumping to a target relative
1910 to some basic block BB.
1911 BASE_PROB is the probability of reaching the branch instruction relative
1912 to the same basic block BB. */
1915 conditional_probability (int target_prob, int base_prob)
1919 gcc_assert (target_prob >= 0);
1920 gcc_assert (target_prob <= base_prob);
1921 return RDIV (target_prob * REG_BR_PROB_BASE, base_prob);
1926 /* Generate a dispatch tabler, switching on INDEX_EXPR and jumping to
1927 one of the labels in CASE_LIST or to the DEFAULT_LABEL.
1928 MINVAL, MAXVAL, and RANGE are the extrema and range of the case
1929 labels in CASE_LIST. STMT_BB is the basic block containing the statement.
1931 First, a jump insn is emitted. First we try "casesi". If that
1932 fails, try "tablejump". A target *must* have one of them (or both).
1934 Then, a table with the target labels is emitted.
1936 The process is unaware of the CFG. The caller has to fix up
1937 the CFG itself. This is done in cfgexpand.c. */
1940 emit_case_dispatch_table (tree index_expr, tree index_type,
1941 struct case_node *case_list, rtx default_label,
1942 tree minval, tree maxval, tree range,
1943 basic_block stmt_bb)
1946 struct case_node *n;
1948 rtx fallback_label = label_rtx (case_list->code_label);
1949 rtx table_label = gen_label_rtx ();
1950 bool has_gaps = false;
1951 edge default_edge = stmt_bb ? EDGE_SUCC(stmt_bb, 0) : NULL;
1952 int default_prob = default_edge ? default_edge->probability : 0;
1953 int base = get_outgoing_edge_probs (stmt_bb);
1954 bool try_with_tablejump = false;
1956 int new_default_prob = conditional_probability (default_prob,
1959 if (! try_casesi (index_type, index_expr, minval, range,
1960 table_label, default_label, fallback_label,
1963 /* Index jumptables from zero for suitable values of minval to avoid
1964 a subtraction. For the rationale see:
1965 "http://gcc.gnu.org/ml/gcc-patches/2001-10/msg01234.html". */
1966 if (optimize_insn_for_speed_p ()
1967 && compare_tree_int (minval, 0) > 0
1968 && compare_tree_int (minval, 3) < 0)
1970 minval = build_int_cst (index_type, 0);
1974 try_with_tablejump = true;
1977 /* Get table of labels to jump to, in order of case index. */
1979 ncases = tree_low_cst (range, 0) + 1;
1980 labelvec = XALLOCAVEC (rtx, ncases);
1981 memset (labelvec, 0, ncases * sizeof (rtx));
1983 for (n = case_list; n; n = n->right)
1985 /* Compute the low and high bounds relative to the minimum
1986 value since that should fit in a HOST_WIDE_INT while the
1987 actual values may not. */
1989 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
1990 n->low, minval), 1);
1991 HOST_WIDE_INT i_high
1992 = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
1993 n->high, minval), 1);
1996 for (i = i_low; i <= i_high; i ++)
1998 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2001 /* Fill in the gaps with the default. We may have gaps at
2002 the beginning if we tried to avoid the minval subtraction,
2003 so substitute some label even if the default label was
2004 deemed unreachable. */
2006 default_label = fallback_label;
2007 for (i = 0; i < ncases; i++)
2008 if (labelvec[i] == 0)
2011 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2016 /* There is at least one entry in the jump table that jumps
2017 to default label. The default label can either be reached
2018 through the indirect jump or the direct conditional jump
2019 before that. Split the probability of reaching the
2020 default label among these two jumps. */
2021 new_default_prob = conditional_probability (default_prob/2,
2024 base -= default_prob;
2028 base -= default_prob;
2033 default_edge->probability = default_prob;
2035 /* We have altered the probability of the default edge. So the probabilities
2036 of all other edges need to be adjusted so that it sums up to
2037 REG_BR_PROB_BASE. */
2042 FOR_EACH_EDGE (e, ei, stmt_bb->succs)
2043 e->probability = RDIV (e->probability * REG_BR_PROB_BASE, base);
2046 if (try_with_tablejump)
2048 bool ok = try_tablejump (index_type, index_expr, minval, range,
2049 table_label, default_label, new_default_prob);
2052 /* Output the table. */
2053 emit_label (table_label);
2055 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2056 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2057 gen_rtx_LABEL_REF (Pmode, table_label),
2058 gen_rtvec_v (ncases, labelvec),
2059 const0_rtx, const0_rtx));
2061 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2062 gen_rtvec_v (ncases, labelvec)));
2064 /* Record no drop-through after the table. */
2068 /* Reset the aux field of all outgoing edges of basic block BB. */
2071 reset_out_edges_aux (basic_block bb)
2075 FOR_EACH_EDGE(e, ei, bb->succs)
2079 /* Compute the number of case labels that correspond to each outgoing edge of
2080 STMT. Record this information in the aux field of the edge. */
2083 compute_cases_per_edge (gimple stmt)
2085 basic_block bb = gimple_bb (stmt);
2086 reset_out_edges_aux (bb);
2087 int ncases = gimple_switch_num_labels (stmt);
2088 for (int i = ncases - 1; i >= 1; --i)
2090 tree elt = gimple_switch_label (stmt, i);
2091 tree lab = CASE_LABEL (elt);
2092 basic_block case_bb = label_to_block_fn (cfun, lab);
2093 edge case_edge = find_edge (bb, case_bb);
2094 case_edge->aux = (void *)((intptr_t)(case_edge->aux) + 1);
2098 /* Terminate a case (Pascal/Ada) or switch (C) statement
2099 in which ORIG_INDEX is the expression to be tested.
2100 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2101 type as given in the source before any compiler conversions.
2102 Generate the code to test it and jump to the right place. */
2105 expand_case (gimple stmt)
2107 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2108 rtx default_label = NULL_RTX;
2109 unsigned int count, uniq;
2111 int ncases = gimple_switch_num_labels (stmt);
2112 tree index_expr = gimple_switch_index (stmt);
2113 tree index_type = TREE_TYPE (index_expr);
2115 basic_block bb = gimple_bb (stmt);
2117 /* A list of case labels; it is first built as a list and it may then
2118 be rearranged into a nearly balanced binary tree. */
2119 struct case_node *case_list = 0;
2121 /* A pool for case nodes. */
2122 alloc_pool case_node_pool;
2124 /* An ERROR_MARK occurs for various reasons including invalid data type.
2125 ??? Can this still happen, with GIMPLE and all? */
2126 if (index_type == error_mark_node)
2129 /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2130 expressions being INTEGER_CST. */
2131 gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2133 case_node_pool = create_alloc_pool ("struct case_node pool",
2134 sizeof (struct case_node),
2137 do_pending_stack_adjust ();
2139 /* Find the default case target label. */
2140 default_label = label_rtx (CASE_LABEL (gimple_switch_default_label (stmt)));
2141 edge default_edge = EDGE_SUCC(bb, 0);
2142 int default_prob = default_edge->probability;
2144 /* Get upper and lower bounds of case values. */
2145 elt = gimple_switch_label (stmt, 1);
2146 minval = fold_convert (index_type, CASE_LOW (elt));
2147 elt = gimple_switch_label (stmt, ncases - 1);
2148 if (CASE_HIGH (elt))
2149 maxval = fold_convert (index_type, CASE_HIGH (elt));
2151 maxval = fold_convert (index_type, CASE_LOW (elt));
2153 /* Compute span of values. */
2154 range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2156 /* Listify the labels queue and gather some numbers to decide
2157 how to expand this switch(). */
2160 struct pointer_set_t *seen_labels = pointer_set_create ();
2161 compute_cases_per_edge (stmt);
2163 for (i = ncases - 1; i >= 1; --i)
2165 elt = gimple_switch_label (stmt, i);
2166 tree low = CASE_LOW (elt);
2168 tree high = CASE_HIGH (elt);
2169 gcc_assert (! high || tree_int_cst_lt (low, high));
2170 tree lab = CASE_LABEL (elt);
2172 /* Count the elements.
2173 A range counts double, since it requires two compares. */
2178 /* If we have not seen this label yet, then increase the
2179 number of unique case node targets seen. */
2180 if (!pointer_set_insert (seen_labels, lab))
2183 /* The bounds on the case range, LOW and HIGH, have to be converted
2184 to case's index type TYPE. Note that the original type of the
2185 case index in the source code is usually "lost" during
2186 gimplification due to type promotion, but the case labels retain the
2187 original type. Make sure to drop overflow flags. */
2188 low = fold_convert (index_type, low);
2189 if (TREE_OVERFLOW (low))
2190 low = build_int_cst_wide (index_type,
2191 TREE_INT_CST_LOW (low),
2192 TREE_INT_CST_HIGH (low));
2194 /* The canonical from of a case label in GIMPLE is that a simple case
2195 has an empty CASE_HIGH. For the casesi and tablejump expanders,
2196 the back ends want simple cases to have high == low. */
2199 high = fold_convert (index_type, high);
2200 if (TREE_OVERFLOW (high))
2201 high = build_int_cst_wide (index_type,
2202 TREE_INT_CST_LOW (high),
2203 TREE_INT_CST_HIGH (high));
2205 basic_block case_bb = label_to_block_fn (cfun, lab);
2206 edge case_edge = find_edge (bb, case_bb);
2207 case_list = add_case_node (
2208 case_list, low, high, lab,
2209 case_edge->probability / (intptr_t)(case_edge->aux),
2212 pointer_set_destroy (seen_labels);
2213 reset_out_edges_aux (bb);
2215 /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2216 destination, such as one with a default case only.
2217 It also removes cases that are out of range for the switch
2218 type, so we should never get a zero here. */
2219 gcc_assert (count > 0);
2221 rtx before_case = get_last_insn ();
2223 /* Decide how to expand this switch.
2224 The two options at this point are a dispatch table (casesi or
2225 tablejump) or a decision tree. */
2227 if (expand_switch_as_decision_tree_p (range, uniq, count))
2228 emit_case_decision_tree (index_expr, index_type,
2229 case_list, default_label,
2232 emit_case_dispatch_table (index_expr, index_type,
2233 case_list, default_label,
2234 minval, maxval, range, bb);
2236 reorder_insns (NEXT_INSN (before_case), get_last_insn (), before_case);
2239 free_alloc_pool (case_node_pool);
2242 /* Expand the dispatch to a short decrement chain if there are few cases
2243 to dispatch to. Likewise if neither casesi nor tablejump is available,
2244 or if flag_jump_tables is set. Otherwise, expand as a casesi or a
2245 tablejump. The index mode is always the mode of integer_type_node.
2246 Trap if no case matches the index.
2248 DISPATCH_INDEX is the index expression to switch on. It should be a
2249 memory or register operand.
2251 DISPATCH_TABLE is a set of case labels. The set should be sorted in
2252 ascending order, be contiguous, starting with value 0, and contain only
2253 single-valued case labels. */
2256 expand_sjlj_dispatch_table (rtx dispatch_index,
2257 vec<tree> dispatch_table)
2259 tree index_type = integer_type_node;
2260 enum machine_mode index_mode = TYPE_MODE (index_type);
2262 int ncases = dispatch_table.length ();
2264 do_pending_stack_adjust ();
2265 rtx before_case = get_last_insn ();
2267 /* Expand as a decrement-chain if there are 5 or fewer dispatch
2268 labels. This covers more than 98% of the cases in libjava,
2269 and seems to be a reasonable compromise between the "old way"
2270 of expanding as a decision tree or dispatch table vs. the "new
2271 way" with decrement chain or dispatch table. */
2272 if (dispatch_table.length () <= 5
2273 || (!HAVE_casesi && !HAVE_tablejump)
2274 || !flag_jump_tables)
2276 /* Expand the dispatch as a decrement chain:
2278 "switch(index) {case 0: do_0; case 1: do_1; ...; case N: do_N;}"
2282 if (index == 0) do_0; else index--;
2283 if (index == 0) do_1; else index--;
2285 if (index == 0) do_N; else index--;
2287 This is more efficient than a dispatch table on most machines.
2288 The last "index--" is redundant but the code is trivially dead
2289 and will be cleaned up by later passes. */
2290 rtx index = copy_to_mode_reg (index_mode, dispatch_index);
2291 rtx zero = CONST0_RTX (index_mode);
2292 for (int i = 0; i < ncases; i++)
2294 tree elt = dispatch_table[i];
2295 rtx lab = label_rtx (CASE_LABEL (elt));
2296 do_jump_if_equal (index_mode, index, zero, lab, 0, -1);
2297 force_expand_binop (index_mode, sub_optab,
2298 index, CONST1_RTX (index_mode),
2299 index, 0, OPTAB_DIRECT);
2304 /* Similar to expand_case, but much simpler. */
2305 struct case_node *case_list = 0;
2306 alloc_pool case_node_pool = create_alloc_pool ("struct sjlj_case pool",
2307 sizeof (struct case_node),
2309 tree index_expr = make_tree (index_type, dispatch_index);
2310 tree minval = build_int_cst (index_type, 0);
2311 tree maxval = CASE_LOW (dispatch_table.last ());
2312 tree range = maxval;
2313 rtx default_label = gen_label_rtx ();
2315 for (int i = ncases - 1; i >= 0; --i)
2317 tree elt = dispatch_table[i];
2318 tree low = CASE_LOW (elt);
2319 tree lab = CASE_LABEL (elt);
2320 case_list = add_case_node (case_list, low, low, lab, 0, case_node_pool);
2323 emit_case_dispatch_table (index_expr, index_type,
2324 case_list, default_label,
2325 minval, maxval, range,
2326 BLOCK_FOR_INSN (before_case));
2327 emit_label (default_label);
2328 free_alloc_pool (case_node_pool);
2331 /* Dispatching something not handled? Trap! */
2332 expand_builtin_trap ();
2334 reorder_insns (NEXT_INSN (before_case), get_last_insn (), before_case);
2340 /* Take an ordered list of case nodes
2341 and transform them into a near optimal binary tree,
2342 on the assumption that any target code selection value is as
2343 likely as any other.
2345 The transformation is performed by splitting the ordered
2346 list into two equal sections plus a pivot. The parts are
2347 then attached to the pivot as left and right branches. Each
2348 branch is then transformed recursively. */
2351 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2363 /* Count the number of entries on branch. Also count the ranges. */
2367 if (!tree_int_cst_equal (np->low, np->high))
2376 /* Split this list if it is long enough for that to help. */
2380 /* If there are just three nodes, split at the middle one. */
2382 npp = &(*npp)->right;
2385 /* Find the place in the list that bisects the list's total cost,
2386 where ranges count as 2.
2387 Here I gets half the total cost. */
2388 i = (i + ranges + 1) / 2;
2391 /* Skip nodes while their cost does not reach that amount. */
2392 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2397 npp = &(*npp)->right;
2402 np->parent = parent;
2405 /* Optimize each of the two split parts. */
2406 balance_case_nodes (&np->left, np);
2407 balance_case_nodes (&np->right, np);
2408 np->subtree_prob = np->prob;
2409 np->subtree_prob += np->left->subtree_prob;
2410 np->subtree_prob += np->right->subtree_prob;
2414 /* Else leave this branch as one level,
2415 but fill in `parent' fields. */
2417 np->parent = parent;
2418 np->subtree_prob = np->prob;
2419 for (; np->right; np = np->right)
2421 np->right->parent = np;
2422 (*head)->subtree_prob += np->right->subtree_prob;
2428 /* Search the parent sections of the case node tree
2429 to see if a test for the lower bound of NODE would be redundant.
2430 INDEX_TYPE is the type of the index expression.
2432 The instructions to generate the case decision tree are
2433 output in the same order as nodes are processed so it is
2434 known that if a parent node checks the range of the current
2435 node minus one that the current node is bounded at its lower
2436 span. Thus the test would be redundant. */
2439 node_has_low_bound (case_node_ptr node, tree index_type)
2442 case_node_ptr pnode;
2444 /* If the lower bound of this node is the lowest value in the index type,
2445 we need not test it. */
2447 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2450 /* If this node has a left branch, the value at the left must be less
2451 than that at this node, so it cannot be bounded at the bottom and
2452 we need not bother testing any further. */
2457 low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2459 build_int_cst (TREE_TYPE (node->low), 1));
2461 /* If the subtraction above overflowed, we can't verify anything.
2462 Otherwise, look for a parent that tests our value - 1. */
2464 if (! tree_int_cst_lt (low_minus_one, node->low))
2467 for (pnode = node->parent; pnode; pnode = pnode->parent)
2468 if (tree_int_cst_equal (low_minus_one, pnode->high))
2474 /* Search the parent sections of the case node tree
2475 to see if a test for the upper bound of NODE would be redundant.
2476 INDEX_TYPE is the type of the index expression.
2478 The instructions to generate the case decision tree are
2479 output in the same order as nodes are processed so it is
2480 known that if a parent node checks the range of the current
2481 node plus one that the current node is bounded at its upper
2482 span. Thus the test would be redundant. */
2485 node_has_high_bound (case_node_ptr node, tree index_type)
2488 case_node_ptr pnode;
2490 /* If there is no upper bound, obviously no test is needed. */
2492 if (TYPE_MAX_VALUE (index_type) == NULL)
2495 /* If the upper bound of this node is the highest value in the type
2496 of the index expression, we need not test against it. */
2498 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2501 /* If this node has a right branch, the value at the right must be greater
2502 than that at this node, so it cannot be bounded at the top and
2503 we need not bother testing any further. */
2508 high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2510 build_int_cst (TREE_TYPE (node->high), 1));
2512 /* If the addition above overflowed, we can't verify anything.
2513 Otherwise, look for a parent that tests our value + 1. */
2515 if (! tree_int_cst_lt (node->high, high_plus_one))
2518 for (pnode = node->parent; pnode; pnode = pnode->parent)
2519 if (tree_int_cst_equal (high_plus_one, pnode->low))
2525 /* Search the parent sections of the
2526 case node tree to see if both tests for the upper and lower
2527 bounds of NODE would be redundant. */
2530 node_is_bounded (case_node_ptr node, tree index_type)
2532 return (node_has_low_bound (node, index_type)
2533 && node_has_high_bound (node, index_type));
2537 /* Emit step-by-step code to select a case for the value of INDEX.
2538 The thus generated decision tree follows the form of the
2539 case-node binary tree NODE, whose nodes represent test conditions.
2540 INDEX_TYPE is the type of the index of the switch.
2542 Care is taken to prune redundant tests from the decision tree
2543 by detecting any boundary conditions already checked by
2544 emitted rtx. (See node_has_high_bound, node_has_low_bound
2545 and node_is_bounded, above.)
2547 Where the test conditions can be shown to be redundant we emit
2548 an unconditional jump to the target code. As a further
2549 optimization, the subordinates of a tree node are examined to
2550 check for bounded nodes. In this case conditional and/or
2551 unconditional jumps as a result of the boundary check for the
2552 current node are arranged to target the subordinates associated
2553 code for out of bound conditions on the current node.
2555 We can assume that when control reaches the code generated here,
2556 the index value has already been compared with the parents
2557 of this node, and determined to be on the same side of each parent
2558 as this node is. Thus, if this node tests for the value 51,
2559 and a parent tested for 52, we don't need to consider
2560 the possibility of a value greater than 51. If another parent
2561 tests for the value 50, then this node need not test anything. */
2564 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2565 int default_prob, tree index_type)
2567 /* If INDEX has an unsigned type, we must make unsigned branches. */
2568 int unsignedp = TYPE_UNSIGNED (index_type);
2570 int prob = node->prob, subtree_prob = node->subtree_prob;
2571 enum machine_mode mode = GET_MODE (index);
2572 enum machine_mode imode = TYPE_MODE (index_type);
2574 /* Handle indices detected as constant during RTL expansion. */
2575 if (mode == VOIDmode)
2578 /* See if our parents have already tested everything for us.
2579 If they have, emit an unconditional jump for this node. */
2580 if (node_is_bounded (node, index_type))
2581 emit_jump (label_rtx (node->code_label));
2583 else if (tree_int_cst_equal (node->low, node->high))
2585 probability = conditional_probability (prob, subtree_prob + default_prob);
2586 /* Node is single valued. First see if the index expression matches
2587 this node and then check our children, if any. */
2588 do_jump_if_equal (mode, index,
2589 convert_modes (mode, imode,
2590 expand_normal (node->low),
2592 label_rtx (node->code_label), unsignedp, probability);
2593 /* Since this case is taken at this point, reduce its weight from
2595 subtree_prob -= prob;
2596 if (node->right != 0 && node->left != 0)
2598 /* This node has children on both sides.
2599 Dispatch to one side or the other
2600 by comparing the index value with this node's value.
2601 If one subtree is bounded, check that one first,
2602 so we can avoid real branches in the tree. */
2604 if (node_is_bounded (node->right, index_type))
2606 probability = conditional_probability (
2608 subtree_prob + default_prob);
2609 emit_cmp_and_jump_insns (index,
2612 expand_normal (node->high),
2614 GT, NULL_RTX, mode, unsignedp,
2615 label_rtx (node->right->code_label),
2617 emit_case_nodes (index, node->left, default_label, default_prob,
2621 else if (node_is_bounded (node->left, index_type))
2623 probability = conditional_probability (
2625 subtree_prob + default_prob);
2626 emit_cmp_and_jump_insns (index,
2629 expand_normal (node->high),
2631 LT, NULL_RTX, mode, unsignedp,
2632 label_rtx (node->left->code_label),
2634 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2637 /* If both children are single-valued cases with no
2638 children, finish up all the work. This way, we can save
2639 one ordered comparison. */
2640 else if (tree_int_cst_equal (node->right->low, node->right->high)
2641 && node->right->left == 0
2642 && node->right->right == 0
2643 && tree_int_cst_equal (node->left->low, node->left->high)
2644 && node->left->left == 0
2645 && node->left->right == 0)
2647 /* Neither node is bounded. First distinguish the two sides;
2648 then emit the code for one side at a time. */
2650 /* See if the value matches what the right hand side
2652 probability = conditional_probability (
2654 subtree_prob + default_prob);
2655 do_jump_if_equal (mode, index,
2656 convert_modes (mode, imode,
2657 expand_normal (node->right->low),
2659 label_rtx (node->right->code_label),
2660 unsignedp, probability);
2662 /* See if the value matches what the left hand side
2664 probability = conditional_probability (
2666 subtree_prob + default_prob);
2667 do_jump_if_equal (mode, index,
2668 convert_modes (mode, imode,
2669 expand_normal (node->left->low),
2671 label_rtx (node->left->code_label),
2672 unsignedp, probability);
2677 /* Neither node is bounded. First distinguish the two sides;
2678 then emit the code for one side at a time. */
2681 = build_decl (curr_insn_location (),
2682 LABEL_DECL, NULL_TREE, NULL_TREE);
2684 /* The default label could be reached either through the right
2685 subtree or the left subtree. Divide the probability
2687 probability = conditional_probability (
2688 node->right->subtree_prob + default_prob/2,
2689 subtree_prob + default_prob);
2690 /* See if the value is on the right. */
2691 emit_cmp_and_jump_insns (index,
2694 expand_normal (node->high),
2696 GT, NULL_RTX, mode, unsignedp,
2697 label_rtx (test_label),
2701 /* Value must be on the left.
2702 Handle the left-hand subtree. */
2703 emit_case_nodes (index, node->left, default_label, default_prob, index_type);
2704 /* If left-hand subtree does nothing,
2707 emit_jump (default_label);
2709 /* Code branches here for the right-hand subtree. */
2710 expand_label (test_label);
2711 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2715 else if (node->right != 0 && node->left == 0)
2717 /* Here we have a right child but no left so we issue a conditional
2718 branch to default and process the right child.
2720 Omit the conditional branch to default if the right child
2721 does not have any children and is single valued; it would
2722 cost too much space to save so little time. */
2724 if (node->right->right || node->right->left
2725 || !tree_int_cst_equal (node->right->low, node->right->high))
2727 if (!node_has_low_bound (node, index_type))
2729 probability = conditional_probability (
2731 subtree_prob + default_prob);
2732 emit_cmp_and_jump_insns (index,
2735 expand_normal (node->high),
2737 LT, NULL_RTX, mode, unsignedp,
2743 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2747 probability = conditional_probability (
2748 node->right->subtree_prob,
2749 subtree_prob + default_prob);
2750 /* We cannot process node->right normally
2751 since we haven't ruled out the numbers less than
2752 this node's value. So handle node->right explicitly. */
2753 do_jump_if_equal (mode, index,
2756 expand_normal (node->right->low),
2758 label_rtx (node->right->code_label), unsignedp, probability);
2762 else if (node->right == 0 && node->left != 0)
2764 /* Just one subtree, on the left. */
2765 if (node->left->left || node->left->right
2766 || !tree_int_cst_equal (node->left->low, node->left->high))
2768 if (!node_has_high_bound (node, index_type))
2770 probability = conditional_probability (
2772 subtree_prob + default_prob);
2773 emit_cmp_and_jump_insns (index,
2776 expand_normal (node->high),
2778 GT, NULL_RTX, mode, unsignedp,
2784 emit_case_nodes (index, node->left, default_label,
2785 default_prob, index_type);
2789 probability = conditional_probability (
2790 node->left->subtree_prob,
2791 subtree_prob + default_prob);
2792 /* We cannot process node->left normally
2793 since we haven't ruled out the numbers less than
2794 this node's value. So handle node->left explicitly. */
2795 do_jump_if_equal (mode, index,
2798 expand_normal (node->left->low),
2800 label_rtx (node->left->code_label), unsignedp, probability);
2806 /* Node is a range. These cases are very similar to those for a single
2807 value, except that we do not start by testing whether this node
2808 is the one to branch to. */
2810 if (node->right != 0 && node->left != 0)
2812 /* Node has subtrees on both sides.
2813 If the right-hand subtree is bounded,
2814 test for it first, since we can go straight there.
2815 Otherwise, we need to make a branch in the control structure,
2816 then handle the two subtrees. */
2817 tree test_label = 0;
2819 if (node_is_bounded (node->right, index_type))
2821 /* Right hand node is fully bounded so we can eliminate any
2822 testing and branch directly to the target code. */
2823 probability = conditional_probability (
2824 node->right->subtree_prob,
2825 subtree_prob + default_prob);
2826 emit_cmp_and_jump_insns (index,
2829 expand_normal (node->high),
2831 GT, NULL_RTX, mode, unsignedp,
2832 label_rtx (node->right->code_label),
2837 /* Right hand node requires testing.
2838 Branch to a label where we will handle it later. */
2840 test_label = build_decl (curr_insn_location (),
2841 LABEL_DECL, NULL_TREE, NULL_TREE);
2842 probability = conditional_probability (
2843 node->right->subtree_prob + default_prob/2,
2844 subtree_prob + default_prob);
2845 emit_cmp_and_jump_insns (index,
2848 expand_normal (node->high),
2850 GT, NULL_RTX, mode, unsignedp,
2851 label_rtx (test_label),
2856 /* Value belongs to this node or to the left-hand subtree. */
2858 probability = conditional_probability (
2860 subtree_prob + default_prob);
2861 emit_cmp_and_jump_insns (index,
2864 expand_normal (node->low),
2866 GE, NULL_RTX, mode, unsignedp,
2867 label_rtx (node->code_label),
2870 /* Handle the left-hand subtree. */
2871 emit_case_nodes (index, node->left, default_label, default_prob, index_type);
2873 /* If right node had to be handled later, do that now. */
2877 /* If the left-hand subtree fell through,
2878 don't let it fall into the right-hand subtree. */
2880 emit_jump (default_label);
2882 expand_label (test_label);
2883 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2887 else if (node->right != 0 && node->left == 0)
2889 /* Deal with values to the left of this node,
2890 if they are possible. */
2891 if (!node_has_low_bound (node, index_type))
2893 probability = conditional_probability (
2895 subtree_prob + default_prob);
2896 emit_cmp_and_jump_insns (index,
2899 expand_normal (node->low),
2901 LT, NULL_RTX, mode, unsignedp,
2907 /* Value belongs to this node or to the right-hand subtree. */
2909 probability = conditional_probability (
2911 subtree_prob + default_prob);
2912 emit_cmp_and_jump_insns (index,
2915 expand_normal (node->high),
2917 LE, NULL_RTX, mode, unsignedp,
2918 label_rtx (node->code_label),
2921 emit_case_nodes (index, node->right, default_label, default_prob, index_type);
2924 else if (node->right == 0 && node->left != 0)
2926 /* Deal with values to the right of this node,
2927 if they are possible. */
2928 if (!node_has_high_bound (node, index_type))
2930 probability = conditional_probability (
2932 subtree_prob + default_prob);
2933 emit_cmp_and_jump_insns (index,
2936 expand_normal (node->high),
2938 GT, NULL_RTX, mode, unsignedp,
2944 /* Value belongs to this node or to the left-hand subtree. */
2946 probability = conditional_probability (
2948 subtree_prob + default_prob);
2949 emit_cmp_and_jump_insns (index,
2952 expand_normal (node->low),
2954 GE, NULL_RTX, mode, unsignedp,
2955 label_rtx (node->code_label),
2958 emit_case_nodes (index, node->left, default_label, default_prob, index_type);
2963 /* Node has no children so we check low and high bounds to remove
2964 redundant tests. Only one of the bounds can exist,
2965 since otherwise this node is bounded--a case tested already. */
2966 int high_bound = node_has_high_bound (node, index_type);
2967 int low_bound = node_has_low_bound (node, index_type);
2969 if (!high_bound && low_bound)
2971 probability = conditional_probability (
2973 subtree_prob + default_prob);
2974 emit_cmp_and_jump_insns (index,
2977 expand_normal (node->high),
2979 GT, NULL_RTX, mode, unsignedp,
2984 else if (!low_bound && high_bound)
2986 probability = conditional_probability (
2988 subtree_prob + default_prob);
2989 emit_cmp_and_jump_insns (index,
2992 expand_normal (node->low),
2994 LT, NULL_RTX, mode, unsignedp,
2998 else if (!low_bound && !high_bound)
3000 /* Widen LOW and HIGH to the same width as INDEX. */
3001 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3002 tree low = build1 (CONVERT_EXPR, type, node->low);
3003 tree high = build1 (CONVERT_EXPR, type, node->high);
3004 rtx low_rtx, new_index, new_bound;
3006 /* Instead of doing two branches, emit one unsigned branch for
3007 (index-low) > (high-low). */
3008 low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3009 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3010 NULL_RTX, unsignedp,
3012 new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3014 NULL_RTX, mode, EXPAND_NORMAL);
3016 probability = conditional_probability (
3018 subtree_prob + default_prob);
3019 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3020 mode, 1, default_label, probability);
3023 emit_jump (label_rtx (node->code_label));