1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
38 #include "coretypes.h"
47 #include "insn-config.h"
50 #include "hard-reg-set.h"
57 #include "langhooks.h"
63 /* Functions and data structures for expanding case statements. */
65 /* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
69 We start with a vector of case nodes sorted in ascending order, and
70 the default label as the last element in the vector. Before expanding
71 to RTL, we transform this vector into a list linked via the RIGHT
72 fields in the case_node struct. Nodes with higher case values are
75 Switch statements can be output in three forms. A branch table is
76 used if there are more than a few labels and the labels are dense
77 within the range between the smallest and largest case value. If a
78 branch table is used, no further manipulations are done with the case
81 The alternative to the use of a branch table is to generate a series
82 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
83 and PARENT fields to hold a binary tree. Initially the tree is
84 totally unbalanced, with everything on the right. We balance the tree
85 with nodes on the left having lower case values than the parent
86 and nodes on the right having higher values. We then output the tree
89 For very small, suitable switch statements, we can generate a series
90 of simple bit test and branches instead. */
92 struct case_node GTY(())
94 struct case_node *left; /* Left son in binary tree */
95 struct case_node *right; /* Right son in binary tree; also node chain */
96 struct case_node *parent; /* Parent of node in binary tree */
97 tree low; /* Lowest index value for this label */
98 tree high; /* Highest index value for this label */
99 tree code_label; /* Label to jump to when node matches */
102 typedef struct case_node case_node;
103 typedef struct case_node *case_node_ptr;
105 /* These are used by estimate_case_costs and balance_case_nodes. */
107 /* This must be a signed type, and non-ANSI compilers lack signed char. */
108 static short cost_table_[129];
109 static int use_cost_table;
110 static int cost_table_initialized;
112 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
114 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
116 /* Stack of control and binding constructs we are currently inside.
118 These constructs begin when you call `expand_start_WHATEVER'
119 and end when you call `expand_end_WHATEVER'. This stack records
120 info about how the construct began that tells the end-function
121 what to do. It also may provide information about the construct
122 to alter the behavior of other constructs within the body.
123 For example, they may affect the behavior of C `break' and `continue'.
125 Each construct gets one `struct nesting' object.
126 All of these objects are chained through the `all' field.
127 `nesting_stack' points to the first object (innermost construct).
128 The position of an entry on `nesting_stack' is in its `depth' field.
130 Each type of construct has its own individual stack.
131 For example, loops have `cond_stack'. Each object points to the
132 next object of the same type through the `next' field.
134 Some constructs are visible to `break' exit-statements and others
135 are not. Which constructs are visible depends on the language.
136 Therefore, the data structure allows each construct to be visible
137 or not, according to the args given when the construct is started.
138 The construct is visible if the `exit_label' field is non-null.
139 In that case, the value should be a CODE_LABEL rtx. */
141 struct nesting GTY(())
144 struct nesting *next;
154 /* For conds (if-then and if-then-else statements). */
157 /* Label for the end of the if construct.
158 There is none if EXITFLAG was not set
159 and no `else' has been seen yet. */
161 /* Label for the end of this alternative.
162 This may be the end of the if or the next else/elseif. */
164 } GTY ((tag ("COND_NESTING"))) cond;
165 /* For variable binding contours. */
168 /* Sequence number of this binding contour within the function,
169 in order of entry. */
170 int block_start_count;
171 /* The NOTE that starts this contour.
172 Used by expand_goto to check whether the destination
173 is within each contour or not. */
175 /* The saved target_temp_slot_level from our outer block.
176 We may reset target_temp_slot_level to be the level of
177 this block, if that is done, target_temp_slot_level
178 reverts to the saved target_temp_slot_level at the very
180 int block_target_temp_slot_level;
181 } GTY ((tag ("BLOCK_NESTING"))) block;
182 /* For switch (C) or case (Pascal) statements. */
185 /* The insn after which the case dispatch should finally
186 be emitted. Zero for a dummy. */
188 /* A list of case labels; it is first built as an AVL tree.
189 During expand_end_case, this is converted to a list, and may be
190 rearranged into a nearly balanced binary tree. */
191 struct case_node *case_list;
192 /* Label to jump to if no case matches. */
194 /* The expression to be dispatched on. */
196 } GTY ((tag ("CASE_NESTING"))) case_stmt;
197 } GTY ((desc ("%1.desc"))) data;
200 /* Allocate and return a new `struct nesting'. */
202 #define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
204 /* Pop the nesting stack element by element until we pop off
205 the element which is at the top of STACK.
206 Update all the other stacks, popping off elements from them
207 as we pop them from nesting_stack. */
209 #define POPSTACK(STACK) \
210 do { struct nesting *target = STACK; \
211 struct nesting *this; \
212 do { this = nesting_stack; \
213 if (cond_stack == this) \
214 cond_stack = cond_stack->next; \
215 if (block_stack == this) \
216 block_stack = block_stack->next; \
217 if (case_stack == this) \
218 case_stack = case_stack->next; \
219 nesting_depth = nesting_stack->depth - 1; \
220 nesting_stack = this->all; } \
221 while (this != target); } while (0)
224 struct stmt_status GTY(())
226 /* Chain of all pending binding contours. */
227 struct nesting * x_block_stack;
229 /* If any new stacks are added here, add them to POPSTACKS too. */
231 /* Chain of all pending conditional statements. */
232 struct nesting * x_cond_stack;
234 /* Chain of all pending case or switch statements. */
235 struct nesting * x_case_stack;
237 /* Separate chain including all of the above,
238 chained through the `all' field. */
239 struct nesting * x_nesting_stack;
241 /* Number of entries on nesting_stack now. */
244 /* Number of binding contours started so far in this function. */
245 int x_block_start_count;
247 /* Location of last line-number note, whether we actually
248 emitted it or not. */
249 location_t x_emit_locus;
252 #define block_stack (cfun->stmt->x_block_stack)
253 #define cond_stack (cfun->stmt->x_cond_stack)
254 #define case_stack (cfun->stmt->x_case_stack)
255 #define nesting_stack (cfun->stmt->x_nesting_stack)
256 #define nesting_depth (cfun->stmt->x_nesting_depth)
257 #define current_block_start_count (cfun->stmt->x_block_start_count)
258 #define emit_locus (cfun->stmt->x_emit_locus)
260 static int n_occurrences (int, const char *);
261 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
262 static void expand_nl_goto_receiver (void);
263 static bool check_operand_nalternatives (tree, tree);
264 static bool check_unique_operand_names (tree, tree);
265 static char *resolve_operand_name_1 (char *, tree, tree);
266 static void expand_null_return_1 (void);
267 static enum br_predictor return_prediction (rtx);
268 static rtx shift_return_value (rtx);
269 static void expand_value_return (rtx);
270 static void do_jump_if_equal (rtx, rtx, rtx, int);
271 static int estimate_case_costs (case_node_ptr);
272 static bool same_case_target_p (rtx, rtx);
273 static bool lshift_cheap_p (void);
274 static int case_bit_test_cmp (const void *, const void *);
275 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
276 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
277 static int node_has_low_bound (case_node_ptr, tree);
278 static int node_has_high_bound (case_node_ptr, tree);
279 static int node_is_bounded (case_node_ptr, tree);
280 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
283 init_stmt_for_function (void)
285 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
288 /* Record the current file and line. Called from emit_line_note. */
291 set_file_and_line_for_stmt (location_t location)
293 /* If we're outputting an inline function, and we add a line note,
294 there may be no CFUN->STMT information. So, there's no need to
297 emit_locus = location;
300 /* Emit a no-op instruction. */
307 last_insn = get_last_insn ();
309 && (LABEL_P (last_insn)
310 || (NOTE_P (last_insn)
311 && prev_real_insn (last_insn) == 0)))
312 emit_insn (gen_nop ());
315 /* Return the rtx-label that corresponds to a LABEL_DECL,
316 creating it if necessary. */
319 label_rtx (tree label)
321 if (TREE_CODE (label) != LABEL_DECL)
324 if (!DECL_RTL_SET_P (label))
326 rtx r = gen_label_rtx ();
327 SET_DECL_RTL (label, r);
328 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
329 LABEL_PRESERVE_P (r) = 1;
332 return DECL_RTL (label);
335 /* As above, but also put it on the forced-reference list of the
336 function that contains it. */
338 force_label_rtx (tree label)
340 rtx ref = label_rtx (label);
341 tree function = decl_function_context (label);
347 if (function != current_function_decl)
348 p = find_function_data (function);
352 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
353 p->expr->x_forced_labels);
357 /* Add an unconditional jump to LABEL as the next sequential instruction. */
360 emit_jump (rtx label)
362 do_pending_stack_adjust ();
363 emit_jump_insn (gen_jump (label));
367 /* Emit code to jump to the address
368 specified by the pointer expression EXP. */
371 expand_computed_goto (tree exp)
373 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
375 x = convert_memory_address (Pmode, x);
377 do_pending_stack_adjust ();
378 emit_indirect_jump (x);
381 /* Handle goto statements and the labels that they can go to. */
383 /* Specify the location in the RTL code of a label LABEL,
384 which is a LABEL_DECL tree node.
386 This is used for the kind of label that the user can jump to with a
387 goto statement, and for alternatives of a switch or case statement.
388 RTL labels generated for loops and conditionals don't go through here;
389 they are generated directly at the RTL level, by other functions below.
391 Note that this has nothing to do with defining label *names*.
392 Languages vary in how they do that and what that even means. */
395 expand_label (tree label)
397 rtx label_r = label_rtx (label);
399 do_pending_stack_adjust ();
400 emit_label (label_r);
401 if (DECL_NAME (label))
402 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
404 if (DECL_NONLOCAL (label))
406 expand_nl_goto_receiver ();
407 nonlocal_goto_handler_labels
408 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
409 nonlocal_goto_handler_labels);
412 if (FORCED_LABEL (label))
413 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
415 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
416 maybe_set_first_label_num (label_r);
419 /* Generate RTL code for a `goto' statement with target label LABEL.
420 LABEL should be a LABEL_DECL tree node that was or will later be
421 defined with `expand_label'. */
424 expand_goto (tree label)
426 #ifdef ENABLE_CHECKING
427 /* Check for a nonlocal goto to a containing function. Should have
428 gotten translated to __builtin_nonlocal_goto. */
429 tree context = decl_function_context (label);
430 if (context != 0 && context != current_function_decl)
434 emit_jump (label_rtx (label));
437 /* Return the number of times character C occurs in string S. */
439 n_occurrences (int c, const char *s)
447 /* Generate RTL for an asm statement (explicit assembler code).
448 STRING is a STRING_CST node containing the assembler code text,
449 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
450 insn is volatile; don't optimize it. */
453 expand_asm (tree string, int vol)
457 if (TREE_CODE (string) == ADDR_EXPR)
458 string = TREE_OPERAND (string, 0);
460 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
462 MEM_VOLATILE_P (body) = vol;
467 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
468 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
469 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
470 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
471 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
472 constraint allows the use of a register operand. And, *IS_INOUT
473 will be true if the operand is read-write, i.e., if it is used as
474 an input as well as an output. If *CONSTRAINT_P is not in
475 canonical form, it will be made canonical. (Note that `+' will be
476 replaced with `=' as part of this process.)
478 Returns TRUE if all went well; FALSE if an error occurred. */
481 parse_output_constraint (const char **constraint_p, int operand_num,
482 int ninputs, int noutputs, bool *allows_mem,
483 bool *allows_reg, bool *is_inout)
485 const char *constraint = *constraint_p;
488 /* Assume the constraint doesn't allow the use of either a register
493 /* Allow the `=' or `+' to not be at the beginning of the string,
494 since it wasn't explicitly documented that way, and there is a
495 large body of code that puts it last. Swap the character to
496 the front, so as not to uglify any place else. */
497 p = strchr (constraint, '=');
499 p = strchr (constraint, '+');
501 /* If the string doesn't contain an `=', issue an error
505 error ("output operand constraint lacks `='");
509 /* If the constraint begins with `+', then the operand is both read
510 from and written to. */
511 *is_inout = (*p == '+');
513 /* Canonicalize the output constraint so that it begins with `='. */
514 if (p != constraint || is_inout)
517 size_t c_len = strlen (constraint);
520 warning ("output constraint `%c' for operand %d is not at the beginning",
523 /* Make a copy of the constraint. */
524 buf = alloca (c_len + 1);
525 strcpy (buf, constraint);
526 /* Swap the first character and the `=' or `+'. */
527 buf[p - constraint] = buf[0];
528 /* Make sure the first character is an `='. (Until we do this,
529 it might be a `+'.) */
531 /* Replace the constraint with the canonicalized string. */
532 *constraint_p = ggc_alloc_string (buf, c_len);
533 constraint = *constraint_p;
536 /* Loop through the constraint string. */
537 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
542 error ("operand constraint contains incorrectly positioned '+' or '='");
546 if (operand_num + 1 == ninputs + noutputs)
548 error ("`%%' constraint used with last operand");
553 case 'V': case 'm': case 'o':
557 case '?': case '!': case '*': case '&': case '#':
558 case 'E': case 'F': case 'G': case 'H':
559 case 's': case 'i': case 'n':
560 case 'I': case 'J': case 'K': case 'L': case 'M':
561 case 'N': case 'O': case 'P': case ',':
564 case '0': case '1': case '2': case '3': case '4':
565 case '5': case '6': case '7': case '8': case '9':
567 error ("matching constraint not valid in output operand");
571 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
572 excepting those that expand_call created. So match memory
589 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
591 #ifdef EXTRA_CONSTRAINT_STR
592 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
594 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
598 /* Otherwise we can't assume anything about the nature of
599 the constraint except that it isn't purely registers.
600 Treat it like "g" and hope for the best. */
611 /* Similar, but for input constraints. */
614 parse_input_constraint (const char **constraint_p, int input_num,
615 int ninputs, int noutputs, int ninout,
616 const char * const * constraints,
617 bool *allows_mem, bool *allows_reg)
619 const char *constraint = *constraint_p;
620 const char *orig_constraint = constraint;
621 size_t c_len = strlen (constraint);
623 bool saw_match = false;
625 /* Assume the constraint doesn't allow the use of either
626 a register or memory. */
630 /* Make sure constraint has neither `=', `+', nor '&'. */
632 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
633 switch (constraint[j])
635 case '+': case '=': case '&':
636 if (constraint == orig_constraint)
638 error ("input operand constraint contains `%c'", constraint[j]);
644 if (constraint == orig_constraint
645 && input_num + 1 == ninputs - ninout)
647 error ("`%%' constraint used with last operand");
652 case 'V': case 'm': case 'o':
657 case '?': case '!': case '*': case '#':
658 case 'E': case 'F': case 'G': case 'H':
659 case 's': case 'i': case 'n':
660 case 'I': case 'J': case 'K': case 'L': case 'M':
661 case 'N': case 'O': case 'P': case ',':
664 /* Whether or not a numeric constraint allows a register is
665 decided by the matching constraint, and so there is no need
666 to do anything special with them. We must handle them in
667 the default case, so that we don't unnecessarily force
668 operands to memory. */
669 case '0': case '1': case '2': case '3': case '4':
670 case '5': case '6': case '7': case '8': case '9':
677 match = strtoul (constraint + j, &end, 10);
678 if (match >= (unsigned long) noutputs)
680 error ("matching constraint references invalid operand number");
684 /* Try and find the real constraint for this dup. Only do this
685 if the matching constraint is the only alternative. */
687 && (j == 0 || (j == 1 && constraint[0] == '%')))
689 constraint = constraints[match];
690 *constraint_p = constraint;
691 c_len = strlen (constraint);
693 /* ??? At the end of the loop, we will skip the first part of
694 the matched constraint. This assumes not only that the
695 other constraint is an output constraint, but also that
696 the '=' or '+' come first. */
700 j = end - constraint;
701 /* Anticipate increment at end of loop. */
716 if (! ISALPHA (constraint[j]))
718 error ("invalid punctuation `%c' in constraint", constraint[j]);
721 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
724 #ifdef EXTRA_CONSTRAINT_STR
725 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
727 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
731 /* Otherwise we can't assume anything about the nature of
732 the constraint except that it isn't purely registers.
733 Treat it like "g" and hope for the best. */
741 if (saw_match && !*allows_reg)
742 warning ("matching constraint does not allow a register");
747 /* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
748 if it is an operand which must be passed in memory (i.e. an "m"
749 constraint), false otherwise. */
752 asm_op_is_mem_input (tree input, tree expr)
754 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
755 tree outputs = ASM_OUTPUTS (expr);
756 int noutputs = list_length (outputs);
757 const char **constraints
758 = (const char **) alloca ((noutputs) * sizeof (const char *));
760 bool allows_mem, allows_reg;
763 /* Collect output constraints. */
764 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
765 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
767 /* We pass 0 for input_num, ninputs and ninout; they are only used for
768 error checking which will be done at expand time. */
769 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
770 &allows_mem, &allows_reg);
771 return (!allows_reg && allows_mem);
774 /* Check for overlap between registers marked in CLOBBERED_REGS and
775 anything inappropriate in DECL. Emit error and return TRUE for error,
779 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
781 /* Conflicts between asm-declared register variables and the clobber
782 list are not allowed. */
783 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
784 && DECL_REGISTER (decl)
785 && REG_P (DECL_RTL (decl))
786 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
788 rtx reg = DECL_RTL (decl);
791 for (regno = REGNO (reg);
793 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
795 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
797 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
798 IDENTIFIER_POINTER (DECL_NAME (decl)));
800 /* Reset registerness to stop multiple errors emitted for a
802 DECL_REGISTER (decl) = 0;
809 /* Generate RTL for an asm statement with arguments.
810 STRING is the instruction template.
811 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
812 Each output or input has an expression in the TREE_VALUE and
813 and a tree list in TREE_PURPOSE which in turn contains a constraint
814 name in TREE_VALUE (or NULL_TREE) and a constraint string
816 CLOBBERS is a list of STRING_CST nodes each naming a hard register
817 that is clobbered by this insn.
819 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
820 Some elements of OUTPUTS may be replaced with trees representing temporary
821 values. The caller should copy those temporary values to the originally
824 VOL nonzero means the insn is volatile; don't optimize it. */
827 expand_asm_operands (tree string, tree outputs, tree inputs,
828 tree clobbers, int vol, location_t locus)
830 rtvec argvec, constraintvec;
832 int ninputs = list_length (inputs);
833 int noutputs = list_length (outputs);
836 HARD_REG_SET clobbered_regs;
837 int clobber_conflict_found = 0;
841 /* Vector of RTX's of evaluated output operands. */
842 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
843 int *inout_opnum = alloca (noutputs * sizeof (int));
844 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
845 enum machine_mode *inout_mode
846 = alloca (noutputs * sizeof (enum machine_mode));
847 const char **constraints
848 = alloca ((noutputs + ninputs) * sizeof (const char *));
849 int old_generating_concat_p = generating_concat_p;
851 /* An ASM with no outputs needs to be treated as volatile, for now. */
855 if (! check_operand_nalternatives (outputs, inputs))
858 string = resolve_asm_operand_names (string, outputs, inputs);
860 /* Collect constraints. */
862 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
863 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
864 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
865 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
867 /* Sometimes we wish to automatically clobber registers across an asm.
868 Case in point is when the i386 backend moved from cc0 to a hard reg --
869 maintaining source-level compatibility means automatically clobbering
870 the flags register. */
871 clobbers = targetm.md_asm_clobbers (clobbers);
873 /* Count the number of meaningful clobbered registers, ignoring what
874 we would ignore later. */
876 CLEAR_HARD_REG_SET (clobbered_regs);
877 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
879 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
881 i = decode_reg_name (regname);
882 if (i >= 0 || i == -4)
885 error ("unknown register name `%s' in `asm'", regname);
887 /* Mark clobbered registers. */
890 /* Clobbering the PIC register is an error */
891 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
893 error ("PIC register `%s' clobbered in `asm'", regname);
897 SET_HARD_REG_BIT (clobbered_regs, i);
901 /* First pass over inputs and outputs checks validity and sets
902 mark_addressable if needed. */
905 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
907 tree val = TREE_VALUE (tail);
908 tree type = TREE_TYPE (val);
909 const char *constraint;
914 /* If there's an erroneous arg, emit no insn. */
915 if (type == error_mark_node)
918 /* Try to parse the output constraint. If that fails, there's
919 no point in going further. */
920 constraint = constraints[i];
921 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
922 &allows_mem, &allows_reg, &is_inout))
929 && REG_P (DECL_RTL (val))
930 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
931 lang_hooks.mark_addressable (val);
938 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
940 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
944 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
946 bool allows_reg, allows_mem;
947 const char *constraint;
949 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
950 would get VOIDmode and that could cause a crash in reload. */
951 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
954 constraint = constraints[i + noutputs];
955 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
956 constraints, &allows_mem, &allows_reg))
959 if (! allows_reg && allows_mem)
960 lang_hooks.mark_addressable (TREE_VALUE (tail));
963 /* Second pass evaluates arguments. */
966 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
968 tree val = TREE_VALUE (tail);
969 tree type = TREE_TYPE (val);
975 if (!parse_output_constraint (&constraints[i], i, ninputs,
976 noutputs, &allows_mem, &allows_reg,
980 /* If an output operand is not a decl or indirect ref and our constraint
981 allows a register, make a temporary to act as an intermediate.
982 Make the asm insn write into that, then our caller will copy it to
983 the real output operand. Likewise for promoted variables. */
985 generating_concat_p = 0;
987 real_output_rtx[i] = NULL_RTX;
988 if ((TREE_CODE (val) == INDIRECT_REF
991 && (allows_mem || REG_P (DECL_RTL (val)))
992 && ! (REG_P (DECL_RTL (val))
993 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
997 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
999 op = validize_mem (op);
1001 if (! allows_reg && !MEM_P (op))
1002 error ("output number %d not directly addressable", i);
1003 if ((! allows_mem && MEM_P (op))
1004 || GET_CODE (op) == CONCAT)
1006 real_output_rtx[i] = op;
1007 op = gen_reg_rtx (GET_MODE (op));
1009 emit_move_insn (op, real_output_rtx[i]);
1014 op = assign_temp (type, 0, 0, 1);
1015 op = validize_mem (op);
1016 TREE_VALUE (tail) = make_tree (type, op);
1020 generating_concat_p = old_generating_concat_p;
1024 inout_mode[ninout] = TYPE_MODE (type);
1025 inout_opnum[ninout++] = i;
1028 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1029 clobber_conflict_found = 1;
1032 /* Make vectors for the expression-rtx, constraint strings,
1033 and named operands. */
1035 argvec = rtvec_alloc (ninputs);
1036 constraintvec = rtvec_alloc (ninputs);
1038 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1039 : GET_MODE (output_rtx[0])),
1040 TREE_STRING_POINTER (string),
1041 empty_string, 0, argvec, constraintvec,
1044 MEM_VOLATILE_P (body) = vol;
1046 /* Eval the inputs and put them into ARGVEC.
1047 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1049 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1051 bool allows_reg, allows_mem;
1052 const char *constraint;
1056 constraint = constraints[i + noutputs];
1057 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1058 constraints, &allows_mem, &allows_reg))
1061 generating_concat_p = 0;
1063 val = TREE_VALUE (tail);
1064 type = TREE_TYPE (val);
1065 op = expand_expr (val, NULL_RTX, VOIDmode,
1066 (allows_mem && !allows_reg
1067 ? EXPAND_MEMORY : EXPAND_NORMAL));
1069 /* Never pass a CONCAT to an ASM. */
1070 if (GET_CODE (op) == CONCAT)
1071 op = force_reg (GET_MODE (op), op);
1072 else if (MEM_P (op))
1073 op = validize_mem (op);
1075 if (asm_operand_ok (op, constraint) <= 0)
1078 op = force_reg (TYPE_MODE (type), op);
1079 else if (!allows_mem)
1080 warning ("asm operand %d probably doesn't match constraints",
1082 else if (MEM_P (op))
1084 /* We won't recognize either volatile memory or memory
1085 with a queued address as available a memory_operand
1086 at this point. Ignore it: clearly this *is* a memory. */
1090 warning ("use of memory input without lvalue in "
1091 "asm operand %d is deprecated", i + noutputs);
1093 if (CONSTANT_P (op))
1095 rtx mem = force_const_mem (TYPE_MODE (type), op);
1097 op = validize_mem (mem);
1099 op = force_reg (TYPE_MODE (type), op);
1102 || GET_CODE (op) == SUBREG
1103 || GET_CODE (op) == CONCAT)
1105 tree qual_type = build_qualified_type (type,
1107 | TYPE_QUAL_CONST));
1108 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1109 memloc = validize_mem (memloc);
1110 emit_move_insn (memloc, op);
1116 generating_concat_p = old_generating_concat_p;
1117 ASM_OPERANDS_INPUT (body, i) = op;
1119 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1120 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1122 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1123 clobber_conflict_found = 1;
1126 /* Protect all the operands from the queue now that they have all been
1129 generating_concat_p = 0;
1131 /* For in-out operands, copy output rtx to input rtx. */
1132 for (i = 0; i < ninout; i++)
1134 int j = inout_opnum[i];
1137 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1140 sprintf (buffer, "%d", j);
1141 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1142 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1145 generating_concat_p = old_generating_concat_p;
1147 /* Now, for each output, construct an rtx
1148 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1149 ARGVEC CONSTRAINTS OPNAMES))
1150 If there is more than one, put them inside a PARALLEL. */
1152 if (noutputs == 1 && nclobbers == 0)
1154 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1155 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1158 else if (noutputs == 0 && nclobbers == 0)
1160 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1172 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1174 /* For each output operand, store a SET. */
1175 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1177 XVECEXP (body, 0, i)
1178 = gen_rtx_SET (VOIDmode,
1180 gen_rtx_ASM_OPERANDS
1181 (GET_MODE (output_rtx[i]),
1182 TREE_STRING_POINTER (string),
1183 constraints[i], i, argvec, constraintvec,
1186 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1189 /* If there are no outputs (but there are some clobbers)
1190 store the bare ASM_OPERANDS into the PARALLEL. */
1193 XVECEXP (body, 0, i++) = obody;
1195 /* Store (clobber REG) for each clobbered register specified. */
1197 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1199 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1200 int j = decode_reg_name (regname);
1205 if (j == -3) /* `cc', which is not a register */
1208 if (j == -4) /* `memory', don't cache memory across asm */
1210 XVECEXP (body, 0, i++)
1211 = gen_rtx_CLOBBER (VOIDmode,
1214 gen_rtx_SCRATCH (VOIDmode)));
1218 /* Ignore unknown register, error already signaled. */
1222 /* Use QImode since that's guaranteed to clobber just one reg. */
1223 clobbered_reg = gen_rtx_REG (QImode, j);
1225 /* Do sanity check for overlap between clobbers and respectively
1226 input and outputs that hasn't been handled. Such overlap
1227 should have been detected and reported above. */
1228 if (!clobber_conflict_found)
1232 /* We test the old body (obody) contents to avoid tripping
1233 over the under-construction body. */
1234 for (opno = 0; opno < noutputs; opno++)
1235 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1236 internal_error ("asm clobber conflict with output operand");
1238 for (opno = 0; opno < ninputs - ninout; opno++)
1239 if (reg_overlap_mentioned_p (clobbered_reg,
1240 ASM_OPERANDS_INPUT (obody, opno)))
1241 internal_error ("asm clobber conflict with input operand");
1244 XVECEXP (body, 0, i++)
1245 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1251 /* For any outputs that needed reloading into registers, spill them
1252 back to where they belong. */
1253 for (i = 0; i < noutputs; ++i)
1254 if (real_output_rtx[i])
1255 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1261 expand_asm_expr (tree exp)
1267 if (ASM_INPUT_P (exp))
1269 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1273 outputs = ASM_OUTPUTS (exp);
1274 noutputs = list_length (outputs);
1275 /* o[I] is the place that output number I should be written. */
1276 o = (tree *) alloca (noutputs * sizeof (tree));
1278 /* Record the contents of OUTPUTS before it is modified. */
1279 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1280 o[i] = TREE_VALUE (tail);
1282 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1283 OUTPUTS some trees for where the values were actually stored. */
1284 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1285 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1288 /* Copy all the intermediate outputs into the specified outputs. */
1289 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1291 if (o[i] != TREE_VALUE (tail))
1293 expand_assignment (o[i], TREE_VALUE (tail), 0);
1296 /* Restore the original value so that it's correct the next
1297 time we expand this function. */
1298 TREE_VALUE (tail) = o[i];
1303 /* A subroutine of expand_asm_operands. Check that all operands have
1304 the same number of alternatives. Return true if so. */
1307 check_operand_nalternatives (tree outputs, tree inputs)
1309 if (outputs || inputs)
1311 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1313 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1316 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1318 error ("too many alternatives in `asm'");
1325 const char *constraint
1326 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1328 if (n_occurrences (',', constraint) != nalternatives)
1330 error ("operand constraints for `asm' differ in number of alternatives");
1334 if (TREE_CHAIN (tmp))
1335 tmp = TREE_CHAIN (tmp);
1337 tmp = next, next = 0;
1344 /* A subroutine of expand_asm_operands. Check that all operand names
1345 are unique. Return true if so. We rely on the fact that these names
1346 are identifiers, and so have been canonicalized by get_identifier,
1347 so all we need are pointer comparisons. */
1350 check_unique_operand_names (tree outputs, tree inputs)
1354 for (i = outputs; i ; i = TREE_CHAIN (i))
1356 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1360 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1361 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1365 for (i = inputs; i ; i = TREE_CHAIN (i))
1367 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1371 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1372 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1374 for (j = outputs; j ; j = TREE_CHAIN (j))
1375 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1382 error ("duplicate asm operand name '%s'",
1383 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1387 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1388 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1389 STRING and in the constraints to those numbers. */
1392 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1399 check_unique_operand_names (outputs, inputs);
1401 /* Substitute [<name>] in input constraint strings. There should be no
1402 named operands in output constraints. */
1403 for (t = inputs; t ; t = TREE_CHAIN (t))
1405 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1406 if (strchr (c, '[') != NULL)
1408 p = buffer = xstrdup (c);
1409 while ((p = strchr (p, '[')) != NULL)
1410 p = resolve_operand_name_1 (p, outputs, inputs);
1411 TREE_VALUE (TREE_PURPOSE (t))
1412 = build_string (strlen (buffer), buffer);
1417 /* Now check for any needed substitutions in the template. */
1418 c = TREE_STRING_POINTER (string);
1419 while ((c = strchr (c, '%')) != NULL)
1423 else if (ISALPHA (c[1]) && c[2] == '[')
1434 /* OK, we need to make a copy so we can perform the substitutions.
1435 Assume that we will not need extra space--we get to remove '['
1436 and ']', which means we cannot have a problem until we have more
1437 than 999 operands. */
1438 buffer = xstrdup (TREE_STRING_POINTER (string));
1439 p = buffer + (c - TREE_STRING_POINTER (string));
1441 while ((p = strchr (p, '%')) != NULL)
1445 else if (ISALPHA (p[1]) && p[2] == '[')
1453 p = resolve_operand_name_1 (p, outputs, inputs);
1456 string = build_string (strlen (buffer), buffer);
1463 /* A subroutine of resolve_operand_names. P points to the '[' for a
1464 potential named operand of the form [<name>]. In place, replace
1465 the name and brackets with a number. Return a pointer to the
1466 balance of the string after substitution. */
1469 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1476 /* Collect the operand name. */
1477 q = strchr (p, ']');
1480 error ("missing close brace for named operand");
1481 return strchr (p, '\0');
1485 /* Resolve the name to a number. */
1486 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1488 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1491 const char *c = TREE_STRING_POINTER (name);
1492 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1496 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1498 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1501 const char *c = TREE_STRING_POINTER (name);
1502 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1508 error ("undefined named operand '%s'", p + 1);
1512 /* Replace the name with the number. Unfortunately, not all libraries
1513 get the return value of sprintf correct, so search for the end of the
1514 generated string by hand. */
1515 sprintf (p, "%d", op);
1516 p = strchr (p, '\0');
1518 /* Verify the no extra buffer space assumption. */
1522 /* Shift the rest of the buffer down to fill the gap. */
1523 memmove (p, q + 1, strlen (q + 1) + 1);
1528 /* Generate RTL to evaluate the expression EXP. */
1531 expand_expr_stmt (tree exp)
1536 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1537 type = TREE_TYPE (exp);
1539 /* If all we do is reference a volatile value in memory,
1540 copy it to a register to be sure it is actually touched. */
1541 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1543 if (TYPE_MODE (type) == VOIDmode)
1545 else if (TYPE_MODE (type) != BLKmode)
1546 value = copy_to_reg (value);
1549 rtx lab = gen_label_rtx ();
1551 /* Compare the value with itself to reference it. */
1552 emit_cmp_and_jump_insns (value, value, EQ,
1553 expand_expr (TYPE_SIZE (type),
1554 NULL_RTX, VOIDmode, 0),
1560 /* Free any temporaries used to evaluate this expression. */
1564 /* Warn if EXP contains any computations whose results are not used.
1565 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
1566 (potential) location of the expression. */
1569 warn_if_unused_value (tree exp, location_t locus)
1572 if (TREE_USED (exp))
1575 /* Don't warn about void constructs. This includes casting to void,
1576 void function calls, and statement expressions with a final cast
1578 if (VOID_TYPE_P (TREE_TYPE (exp)))
1581 if (EXPR_HAS_LOCATION (exp))
1582 locus = EXPR_LOCATION (exp);
1584 switch (TREE_CODE (exp))
1586 case PREINCREMENT_EXPR:
1587 case POSTINCREMENT_EXPR:
1588 case PREDECREMENT_EXPR:
1589 case POSTDECREMENT_EXPR:
1594 case TRY_CATCH_EXPR:
1595 case WITH_CLEANUP_EXPR:
1600 /* For a binding, warn if no side effect within it. */
1601 exp = BIND_EXPR_BODY (exp);
1605 exp = TREE_OPERAND (exp, 0);
1608 case TRUTH_ORIF_EXPR:
1609 case TRUTH_ANDIF_EXPR:
1610 /* In && or ||, warn if 2nd operand has no side effect. */
1611 exp = TREE_OPERAND (exp, 1);
1615 if (TREE_NO_WARNING (exp))
1617 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1619 /* Let people do `(foo (), 0)' without a warning. */
1620 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1622 exp = TREE_OPERAND (exp, 1);
1627 case NON_LVALUE_EXPR:
1628 /* Don't warn about conversions not explicit in the user's program. */
1629 if (TREE_NO_WARNING (exp))
1631 /* Assignment to a cast usually results in a cast of a modify.
1632 Don't complain about that. There can be an arbitrary number of
1633 casts before the modify, so we must loop until we find the first
1634 non-cast expression and then test to see if that is a modify. */
1636 tree tem = TREE_OPERAND (exp, 0);
1638 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1639 tem = TREE_OPERAND (tem, 0);
1641 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1642 || TREE_CODE (tem) == CALL_EXPR)
1648 /* Don't warn about automatic dereferencing of references, since
1649 the user cannot control it. */
1650 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1652 exp = TREE_OPERAND (exp, 0);
1658 /* Referencing a volatile value is a side effect, so don't warn. */
1660 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1661 && TREE_THIS_VOLATILE (exp))
1664 /* If this is an expression which has no operands, there is no value
1665 to be unused. There are no such language-independent codes,
1666 but front ends may define such. */
1667 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
1668 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1672 /* If this is an expression with side effects, don't warn. */
1673 if (TREE_SIDE_EFFECTS (exp))
1676 warning ("%Hvalue computed is not used", &locus);
1681 /* Generate RTL for the start of an if-then. COND is the expression
1682 whose truth should be tested.
1684 If EXITFLAG is nonzero, this conditional is visible to
1685 `exit_something'. */
1688 expand_start_cond (tree cond, int exitflag)
1690 struct nesting *thiscond = ALLOC_NESTING ();
1692 /* Make an entry on cond_stack for the cond we are entering. */
1694 thiscond->desc = COND_NESTING;
1695 thiscond->next = cond_stack;
1696 thiscond->all = nesting_stack;
1697 thiscond->depth = ++nesting_depth;
1698 thiscond->data.cond.next_label = gen_label_rtx ();
1699 /* Before we encounter an `else', we don't need a separate exit label
1700 unless there are supposed to be exit statements
1701 to exit this conditional. */
1702 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1703 thiscond->data.cond.endif_label = thiscond->exit_label;
1704 cond_stack = thiscond;
1705 nesting_stack = thiscond;
1707 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1710 /* Generate RTL between then-clause and the elseif-clause
1711 of an if-then-elseif-.... */
1714 expand_start_elseif (tree cond)
1716 if (cond_stack->data.cond.endif_label == 0)
1717 cond_stack->data.cond.endif_label = gen_label_rtx ();
1718 emit_jump (cond_stack->data.cond.endif_label);
1719 emit_label (cond_stack->data.cond.next_label);
1720 cond_stack->data.cond.next_label = gen_label_rtx ();
1721 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1724 /* Generate RTL between the then-clause and the else-clause
1725 of an if-then-else. */
1728 expand_start_else (void)
1730 if (cond_stack->data.cond.endif_label == 0)
1731 cond_stack->data.cond.endif_label = gen_label_rtx ();
1733 emit_jump (cond_stack->data.cond.endif_label);
1734 emit_label (cond_stack->data.cond.next_label);
1735 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
1738 /* After calling expand_start_else, turn this "else" into an "else if"
1739 by providing another condition. */
1742 expand_elseif (tree cond)
1744 cond_stack->data.cond.next_label = gen_label_rtx ();
1745 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1748 /* Generate RTL for the end of an if-then.
1749 Pop the record for it off of cond_stack. */
1752 expand_end_cond (void)
1754 struct nesting *thiscond = cond_stack;
1756 do_pending_stack_adjust ();
1757 if (thiscond->data.cond.next_label)
1758 emit_label (thiscond->data.cond.next_label);
1759 if (thiscond->data.cond.endif_label)
1760 emit_label (thiscond->data.cond.endif_label);
1762 POPSTACK (cond_stack);
1765 /* Return nonzero if we should preserve sub-expressions as separate
1766 pseudos. We never do so if we aren't optimizing. We always do so
1767 if -fexpensive-optimizations. */
1770 preserve_subexpressions_p (void)
1772 if (flag_expensive_optimizations)
1775 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
1782 /* Generate RTL to return from the current function, with no value.
1783 (That is, we do not do anything about returning any value.) */
1786 expand_null_return (void)
1788 /* If this function was declared to return a value, but we
1789 didn't, clobber the return registers so that they are not
1790 propagated live to the rest of the function. */
1791 clobber_return_register ();
1793 expand_null_return_1 ();
1796 /* Generate RTL to return directly from the current function.
1797 (That is, we bypass any return value.) */
1800 expand_naked_return (void)
1804 clear_pending_stack_adjust ();
1805 do_pending_stack_adjust ();
1807 end_label = naked_return_label;
1809 end_label = naked_return_label = gen_label_rtx ();
1811 emit_jump (end_label);
1814 /* Try to guess whether the value of return means error code. */
1815 static enum br_predictor
1816 return_prediction (rtx val)
1818 /* Different heuristics for pointers and scalars. */
1819 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
1821 /* NULL is usually not returned. */
1822 if (val == const0_rtx)
1823 return PRED_NULL_RETURN;
1827 /* Negative return values are often used to indicate
1829 if (GET_CODE (val) == CONST_INT
1830 && INTVAL (val) < 0)
1831 return PRED_NEGATIVE_RETURN;
1832 /* Constant return values are also usually erors,
1833 zero/one often mean booleans so exclude them from the
1835 if (CONSTANT_P (val)
1836 && (val != const0_rtx && val != const1_rtx))
1837 return PRED_CONST_RETURN;
1839 return PRED_NO_PREDICTION;
1843 /* If the current function returns values in the most significant part
1844 of a register, shift return value VAL appropriately. The mode of
1845 the function's return type is known not to be BLKmode. */
1848 shift_return_value (rtx val)
1852 type = TREE_TYPE (DECL_RESULT (current_function_decl));
1853 if (targetm.calls.return_in_msb (type))
1856 HOST_WIDE_INT shift;
1858 target = DECL_RTL (DECL_RESULT (current_function_decl));
1859 shift = (GET_MODE_BITSIZE (GET_MODE (target))
1860 - BITS_PER_UNIT * int_size_in_bytes (type));
1862 val = expand_shift (LSHIFT_EXPR, GET_MODE (target),
1863 gen_lowpart (GET_MODE (target), val),
1864 build_int_2 (shift, 0), target, 1);
1870 /* Generate RTL to return from the current function, with value VAL. */
1873 expand_value_return (rtx val)
1876 enum br_predictor pred;
1878 if (flag_guess_branch_prob
1879 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
1881 /* Emit information for branch prediction. */
1884 note = emit_note (NOTE_INSN_PREDICTION);
1886 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
1890 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1892 /* Copy the value to the return location
1893 unless it's already there. */
1895 if (return_reg != val)
1897 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1898 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1900 int unsignedp = TYPE_UNSIGNED (type);
1901 enum machine_mode old_mode
1902 = DECL_MODE (DECL_RESULT (current_function_decl));
1903 enum machine_mode mode
1904 = promote_mode (type, old_mode, &unsignedp, 1);
1906 if (mode != old_mode)
1907 val = convert_modes (mode, old_mode, val, unsignedp);
1909 if (GET_CODE (return_reg) == PARALLEL)
1910 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1912 emit_move_insn (return_reg, val);
1915 expand_null_return_1 ();
1918 /* Output a return with no value. */
1921 expand_null_return_1 (void)
1925 clear_pending_stack_adjust ();
1926 do_pending_stack_adjust ();
1928 end_label = return_label;
1930 end_label = return_label = gen_label_rtx ();
1931 emit_jump (end_label);
1934 /* Generate RTL to evaluate the expression RETVAL and return it
1935 from the current function. */
1938 expand_return (tree retval)
1944 /* If function wants no value, give it none. */
1945 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1947 expand_expr (retval, NULL_RTX, VOIDmode, 0);
1948 expand_null_return ();
1952 if (retval == error_mark_node)
1954 /* Treat this like a return of no value from a function that
1956 expand_null_return ();
1959 else if (TREE_CODE (retval) == RESULT_DECL)
1960 retval_rhs = retval;
1961 else if ((TREE_CODE (retval) == MODIFY_EXPR
1962 || TREE_CODE (retval) == INIT_EXPR)
1963 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1964 retval_rhs = TREE_OPERAND (retval, 1);
1966 retval_rhs = retval;
1968 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1970 /* If the result is an aggregate that is being returned in one (or more)
1971 registers, load the registers here. The compiler currently can't handle
1972 copying a BLKmode value into registers. We could put this code in a
1973 more general area (for use by everyone instead of just function
1974 call/return), but until this feature is generally usable it is kept here
1975 (and in expand_call). */
1978 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1979 && REG_P (result_rtl))
1982 unsigned HOST_WIDE_INT bitpos, xbitpos;
1983 unsigned HOST_WIDE_INT padding_correction = 0;
1984 unsigned HOST_WIDE_INT bytes
1985 = int_size_in_bytes (TREE_TYPE (retval_rhs));
1986 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1987 unsigned int bitsize
1988 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1989 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
1990 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1991 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
1992 enum machine_mode tmpmode, result_reg_mode;
1996 expand_null_return ();
2000 /* If the structure doesn't take up a whole number of words, see
2001 whether the register value should be padded on the left or on
2002 the right. Set PADDING_CORRECTION to the number of padding
2003 bits needed on the left side.
2005 In most ABIs, the structure will be returned at the least end of
2006 the register, which translates to right padding on little-endian
2007 targets and left padding on big-endian targets. The opposite
2008 holds if the structure is returned at the most significant
2009 end of the register. */
2010 if (bytes % UNITS_PER_WORD != 0
2011 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2013 : BYTES_BIG_ENDIAN))
2014 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2017 /* Copy the structure BITSIZE bits at a time. */
2018 for (bitpos = 0, xbitpos = padding_correction;
2019 bitpos < bytes * BITS_PER_UNIT;
2020 bitpos += bitsize, xbitpos += bitsize)
2022 /* We need a new destination pseudo each time xbitpos is
2023 on a word boundary and when xbitpos == padding_correction
2024 (the first time through). */
2025 if (xbitpos % BITS_PER_WORD == 0
2026 || xbitpos == padding_correction)
2028 /* Generate an appropriate register. */
2029 dst = gen_reg_rtx (word_mode);
2030 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2032 /* Clear the destination before we move anything into it. */
2033 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
2036 /* We need a new source operand each time bitpos is on a word
2038 if (bitpos % BITS_PER_WORD == 0)
2039 src = operand_subword_force (result_val,
2040 bitpos / BITS_PER_WORD,
2043 /* Use bitpos for the source extraction (left justified) and
2044 xbitpos for the destination store (right justified). */
2045 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2046 extract_bit_field (src, bitsize,
2047 bitpos % BITS_PER_WORD, 1,
2048 NULL_RTX, word_mode, word_mode));
2051 tmpmode = GET_MODE (result_rtl);
2052 if (tmpmode == BLKmode)
2054 /* Find the smallest integer mode large enough to hold the
2055 entire structure and use that mode instead of BLKmode
2056 on the USE insn for the return register. */
2057 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2058 tmpmode != VOIDmode;
2059 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2060 /* Have we found a large enough mode? */
2061 if (GET_MODE_SIZE (tmpmode) >= bytes)
2064 /* No suitable mode found. */
2065 if (tmpmode == VOIDmode)
2068 PUT_MODE (result_rtl, tmpmode);
2071 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2072 result_reg_mode = word_mode;
2074 result_reg_mode = tmpmode;
2075 result_reg = gen_reg_rtx (result_reg_mode);
2077 for (i = 0; i < n_regs; i++)
2078 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2081 if (tmpmode != result_reg_mode)
2082 result_reg = gen_lowpart (tmpmode, result_reg);
2084 expand_value_return (result_reg);
2086 else if (retval_rhs != 0
2087 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
2088 && (REG_P (result_rtl)
2089 || (GET_CODE (result_rtl) == PARALLEL)))
2091 /* Calculate the return value into a temporary (usually a pseudo
2093 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2094 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2096 val = assign_temp (nt, 0, 0, 1);
2097 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2098 val = force_not_mem (val);
2099 /* Return the calculated value. */
2100 expand_value_return (shift_return_value (val));
2104 /* No hard reg used; calculate value into hard return reg. */
2105 expand_expr (retval, const0_rtx, VOIDmode, 0);
2106 expand_value_return (result_rtl);
2110 /* Generate the RTL code for entering a binding contour.
2111 The variables are declared one by one, by calls to `expand_decl'.
2113 FLAGS is a bitwise or of the following flags:
2115 1 - Nonzero if this construct should be visible to
2118 2 - Nonzero if this contour does not require a
2119 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2120 language-independent code should set this flag because they
2121 will not create corresponding BLOCK nodes. (There should be
2122 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2123 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
2124 when expand_end_bindings is called.
2126 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2127 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2131 expand_start_bindings_and_block (int flags, tree block)
2133 struct nesting *thisblock = ALLOC_NESTING ();
2135 int exit_flag = ((flags & 1) != 0);
2136 int block_flag = ((flags & 2) == 0);
2138 /* If a BLOCK is supplied, then the caller should be requesting a
2139 NOTE_INSN_BLOCK_BEG note. */
2140 if (!block_flag && block)
2143 /* Create a note to mark the beginning of the block. */
2144 note = emit_note (NOTE_INSN_DELETED);
2146 /* Make an entry on block_stack for the block we are entering. */
2148 thisblock->desc = BLOCK_NESTING;
2149 thisblock->next = block_stack;
2150 thisblock->all = nesting_stack;
2151 thisblock->depth = ++nesting_depth;
2152 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
2154 /* When we insert instructions after the last unconditional cleanup,
2155 we don't adjust last_insn. That means that a later add_insn will
2156 clobber the instructions we've just added. The easiest way to
2157 fix this is to just insert another instruction here, so that the
2158 instructions inserted after the last unconditional cleanup are
2159 never the last instruction. */
2160 emit_note (NOTE_INSN_DELETED);
2162 thisblock->data.block.first_insn = note;
2163 thisblock->data.block.block_start_count = ++current_block_start_count;
2164 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2165 block_stack = thisblock;
2166 nesting_stack = thisblock;
2168 /* Make a new level for allocating stack slots. */
2172 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2173 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2174 expand_expr are made. After we end the region, we know that all
2175 space for all temporaries that were created by TARGET_EXPRs will be
2176 destroyed and their space freed for reuse. */
2179 expand_start_target_temps (void)
2181 /* This is so that even if the result is preserved, the space
2182 allocated will be freed, as we know that it is no longer in use. */
2185 /* Start a new binding layer that will keep track of all cleanup
2186 actions to be performed. */
2187 expand_start_bindings (2);
2189 target_temp_slot_level = temp_slot_level;
2193 expand_end_target_temps (void)
2195 expand_end_bindings (NULL_TREE, 0, 0);
2197 /* This is so that even if the result is preserved, the space
2198 allocated will be freed, as we know that it is no longer in use. */
2202 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
2203 in question represents the outermost pair of curly braces (i.e. the "body
2204 block") of a function or method.
2206 For any BLOCK node representing a "body block" of a function or method, the
2207 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2208 represents the outermost (function) scope for the function or method (i.e.
2209 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
2210 *that* node in turn will point to the relevant FUNCTION_DECL node. */
2213 is_body_block (tree stmt)
2215 if (lang_hooks.no_body_blocks)
2218 if (TREE_CODE (stmt) == BLOCK)
2220 tree parent = BLOCK_SUPERCONTEXT (stmt);
2222 if (parent && TREE_CODE (parent) == BLOCK)
2224 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2226 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2234 /* Return an opaque pointer to the current nesting level, so frontend code
2235 can check its own sanity. */
2238 current_nesting_level (void)
2240 return cfun ? block_stack : 0;
2243 /* Emit code to restore vital registers at the beginning of a nonlocal goto
2246 expand_nl_goto_receiver (void)
2248 /* Clobber the FP when we get here, so we have to make sure it's
2249 marked as used by this function. */
2250 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2252 /* Mark the static chain as clobbered here so life information
2253 doesn't get messed up for it. */
2254 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2256 #ifdef HAVE_nonlocal_goto
2257 if (! HAVE_nonlocal_goto)
2259 /* First adjust our frame pointer to its actual value. It was
2260 previously set to the start of the virtual area corresponding to
2261 the stacked variables when we branched here and now needs to be
2262 adjusted to the actual hardware fp value.
2264 Assignments are to virtual registers are converted by
2265 instantiate_virtual_regs into the corresponding assignment
2266 to the underlying register (fp in this case) that makes
2267 the original assignment true.
2268 So the following insn will actually be
2269 decrementing fp by STARTING_FRAME_OFFSET. */
2270 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2272 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2273 if (fixed_regs[ARG_POINTER_REGNUM])
2275 #ifdef ELIMINABLE_REGS
2276 /* If the argument pointer can be eliminated in favor of the
2277 frame pointer, we don't need to restore it. We assume here
2278 that if such an elimination is present, it can always be used.
2279 This is the case on all known machines; if we don't make this
2280 assumption, we do unnecessary saving on many machines. */
2281 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
2284 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
2285 if (elim_regs[i].from == ARG_POINTER_REGNUM
2286 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2289 if (i == ARRAY_SIZE (elim_regs))
2292 /* Now restore our arg pointer from the address at which it
2293 was saved in our stack frame. */
2294 emit_move_insn (virtual_incoming_args_rtx,
2295 copy_to_reg (get_arg_pointer_save_area (cfun)));
2300 #ifdef HAVE_nonlocal_goto_receiver
2301 if (HAVE_nonlocal_goto_receiver)
2302 emit_insn (gen_nonlocal_goto_receiver ());
2305 /* @@@ This is a kludge. Not all machine descriptions define a blockage
2306 insn, but we must not allow the code we just generated to be reordered
2307 by scheduling. Specifically, the update of the frame pointer must
2308 happen immediately, not later. So emit an ASM_INPUT to act as blockage
2310 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2313 /* Warn about any unused VARS (which may contain nodes other than
2314 VAR_DECLs, but such nodes are ignored). The nodes are connected
2315 via the TREE_CHAIN field. */
2318 warn_about_unused_variables (tree vars)
2322 if (warn_unused_variable)
2323 for (decl = vars; decl; decl = TREE_CHAIN (decl))
2324 if (TREE_CODE (decl) == VAR_DECL
2325 && ! TREE_USED (decl)
2326 && ! DECL_IN_SYSTEM_HEADER (decl)
2327 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
2328 warning ("%Junused variable '%D'", decl, decl);
2331 /* Generate RTL code to terminate a binding contour.
2333 VARS is the chain of VAR_DECL nodes for the variables bound in this
2334 contour. There may actually be other nodes in this chain, but any
2335 nodes other than VAR_DECLS are ignored.
2337 MARK_ENDS is nonzero if we should put a note at the beginning
2338 and end of this binding contour.
2340 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
2341 zero if we can jump into this contour only if it does not have a saved
2342 stack level, and negative if we are not to check for invalid use of
2343 labels (because the front end does that). */
2346 expand_end_bindings (tree vars, int mark_ends ATTRIBUTE_UNUSED,
2347 int dont_jump_in ATTRIBUTE_UNUSED)
2349 struct nesting *thisblock = block_stack;
2351 /* If any of the variables in this scope were not used, warn the
2353 warn_about_unused_variables (vars);
2355 if (thisblock->exit_label)
2357 do_pending_stack_adjust ();
2358 emit_label (thisblock->exit_label);
2361 /* Mark the beginning and end of the scope if requested. */
2363 /* Get rid of the beginning-mark if we don't make an end-mark. */
2364 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
2366 /* Restore the temporary level of TARGET_EXPRs. */
2367 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
2369 /* Restore block_stack level for containing block. */
2371 POPSTACK (block_stack);
2373 /* Pop the stack slot nesting and free any slots at this level. */
2377 /* Generate RTL for the automatic variable declaration DECL.
2378 (Other kinds of declarations are simply ignored if seen here.) */
2381 expand_decl (tree decl)
2385 type = TREE_TYPE (decl);
2387 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
2388 type in case this node is used in a reference. */
2389 if (TREE_CODE (decl) == CONST_DECL)
2391 DECL_MODE (decl) = TYPE_MODE (type);
2392 DECL_ALIGN (decl) = TYPE_ALIGN (type);
2393 DECL_SIZE (decl) = TYPE_SIZE (type);
2394 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
2398 /* Otherwise, only automatic variables need any expansion done. Static and
2399 external variables, and external functions, will be handled by
2400 `assemble_variable' (called from finish_decl). TYPE_DECL requires
2401 nothing. PARM_DECLs are handled in `assign_parms'. */
2402 if (TREE_CODE (decl) != VAR_DECL)
2405 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
2408 /* Create the RTL representation for the variable. */
2410 if (type == error_mark_node)
2411 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
2413 else if (DECL_SIZE (decl) == 0)
2414 /* Variable with incomplete type. */
2417 if (DECL_INITIAL (decl) == 0)
2418 /* Error message was already done; now avoid a crash. */
2419 x = gen_rtx_MEM (BLKmode, const0_rtx);
2421 /* An initializer is going to decide the size of this array.
2422 Until we know the size, represent its address with a reg. */
2423 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
2425 set_mem_attributes (x, decl, 1);
2426 SET_DECL_RTL (decl, x);
2428 else if (use_register_for_decl (decl))
2430 /* Automatic variable that can go in a register. */
2431 int unsignedp = TYPE_UNSIGNED (type);
2432 enum machine_mode reg_mode
2433 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
2435 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
2437 /* Note if the object is a user variable. */
2438 if (!DECL_ARTIFICIAL (decl))
2440 mark_user_reg (DECL_RTL (decl));
2442 /* Trust user variables which have a pointer type to really
2443 be pointers. Do not trust compiler generated temporaries
2444 as our type system is totally busted as it relates to
2445 pointer arithmetic which translates into lots of compiler
2446 generated objects with pointer types, but which are not really
2448 if (POINTER_TYPE_P (type))
2449 mark_reg_pointer (DECL_RTL (decl),
2450 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
2453 maybe_set_unchanging (DECL_RTL (decl), decl);
2456 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
2457 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
2458 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
2459 STACK_CHECK_MAX_VAR_SIZE)))
2461 /* Variable of fixed size that goes on the stack. */
2466 /* If we previously made RTL for this decl, it must be an array
2467 whose size was determined by the initializer.
2468 The old address was a register; set that register now
2469 to the proper address. */
2470 if (DECL_RTL_SET_P (decl))
2472 if (!MEM_P (DECL_RTL (decl))
2473 || !REG_P (XEXP (DECL_RTL (decl), 0)))
2475 oldaddr = XEXP (DECL_RTL (decl), 0);
2478 /* Set alignment we actually gave this decl. */
2479 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
2480 : GET_MODE_BITSIZE (DECL_MODE (decl)));
2481 DECL_USER_ALIGN (decl) = 0;
2483 x = assign_temp (decl, 1, 1, 1);
2484 set_mem_attributes (x, decl, 1);
2485 SET_DECL_RTL (decl, x);
2489 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
2490 if (addr != oldaddr)
2491 emit_move_insn (oldaddr, addr);
2495 /* Dynamic-size object: must push space on the stack. */
2497 rtx address, size, x;
2499 /* Record the stack pointer on entry to block, if have
2500 not already done so. */
2501 do_pending_stack_adjust ();
2503 /* Compute the variable's size, in bytes. This will expand any
2504 needed SAVE_EXPRs for the first time. */
2505 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
2508 /* Allocate space on the stack for the variable. Note that
2509 DECL_ALIGN says how the variable is to be aligned and we
2510 cannot use it to conclude anything about the alignment of
2512 address = allocate_dynamic_stack_space (size, NULL_RTX,
2513 TYPE_ALIGN (TREE_TYPE (decl)));
2515 /* Reference the variable indirect through that rtx. */
2516 x = gen_rtx_MEM (DECL_MODE (decl), address);
2517 set_mem_attributes (x, decl, 1);
2518 SET_DECL_RTL (decl, x);
2521 /* Indicate the alignment we actually gave this variable. */
2522 #ifdef STACK_BOUNDARY
2523 DECL_ALIGN (decl) = STACK_BOUNDARY;
2525 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
2527 DECL_USER_ALIGN (decl) = 0;
2531 /* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
2533 expand_stack_alloc (tree alloc, tree t_size)
2535 rtx address, dest, size;
2538 if (TREE_CODE (alloc) != ADDR_EXPR)
2540 var = TREE_OPERAND (alloc, 0);
2541 if (TREE_CODE (var) != VAR_DECL)
2544 type = TREE_TYPE (var);
2546 /* Compute the variable's size, in bytes. */
2547 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
2550 /* Allocate space on the stack for the variable. */
2551 address = XEXP (DECL_RTL (var), 0);
2552 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
2553 if (dest != address)
2554 emit_move_insn (address, dest);
2556 /* Indicate the alignment we actually gave this variable. */
2557 #ifdef STACK_BOUNDARY
2558 DECL_ALIGN (var) = STACK_BOUNDARY;
2560 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
2562 DECL_USER_ALIGN (var) = 0;
2565 /* Emit code to save the current value of stack. */
2567 expand_stack_save (void)
2571 do_pending_stack_adjust ();
2572 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
2576 /* Emit code to restore the current value of stack. */
2578 expand_stack_restore (tree var)
2580 rtx sa = DECL_RTL (var);
2582 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2585 /* Emit code to perform the initialization of a declaration DECL. */
2588 expand_decl_init (tree decl)
2590 int was_used = TREE_USED (decl);
2592 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
2593 for static decls. */
2594 if (TREE_CODE (decl) == CONST_DECL
2595 || TREE_STATIC (decl))
2598 /* Compute and store the initial value now. */
2602 if (DECL_INITIAL (decl) == error_mark_node)
2604 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
2606 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
2607 || code == POINTER_TYPE || code == REFERENCE_TYPE)
2608 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
2611 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
2613 emit_line_note (DECL_SOURCE_LOCATION (decl));
2614 expand_assignment (decl, DECL_INITIAL (decl), 0);
2617 /* Don't let the initialization count as "using" the variable. */
2618 TREE_USED (decl) = was_used;
2620 /* Free any temporaries we made while initializing the decl. */
2621 preserve_temp_slots (NULL_RTX);
2627 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
2628 DECL_ELTS is the list of elements that belong to DECL's type.
2629 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
2632 expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2638 /* If any of the elements are addressable, so is the entire union. */
2639 for (t = decl_elts; t; t = TREE_CHAIN (t))
2640 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2642 TREE_ADDRESSABLE (decl) = 1;
2647 x = DECL_RTL (decl);
2649 /* Go through the elements, assigning RTL to each. */
2650 for (t = decl_elts; t; t = TREE_CHAIN (t))
2652 tree decl_elt = TREE_VALUE (t);
2653 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2655 /* If any of the elements are addressable, so is the entire
2657 if (TREE_USED (decl_elt))
2658 TREE_USED (decl) = 1;
2660 /* Propagate the union's alignment to the elements. */
2661 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2662 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2664 /* If the element has BLKmode and the union doesn't, the union is
2665 aligned such that the element doesn't need to have BLKmode, so
2666 change the element's mode to the appropriate one for its size. */
2667 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2668 DECL_MODE (decl_elt) = mode
2669 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2671 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2672 instead create a new MEM rtx with the proper mode. */
2675 if (mode == GET_MODE (x))
2676 SET_DECL_RTL (decl_elt, x);
2678 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
2682 if (mode == GET_MODE (x))
2683 SET_DECL_RTL (decl_elt, x);
2685 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
2692 /* Enter a case (Pascal) or switch (C) statement.
2693 Push a block onto case_stack and nesting_stack
2694 to accumulate the case-labels that are seen
2695 and to record the labels generated for the statement.
2697 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
2698 Otherwise, this construct is transparent for `exit_something'.
2700 EXPR is the index-expression to be dispatched on.
2701 TYPE is its nominal type. We could simply convert EXPR to this type,
2702 but instead we take short cuts. */
2705 expand_start_case (tree index_expr)
2707 struct nesting *thiscase = ALLOC_NESTING ();
2709 /* Make an entry on case_stack for the case we are entering. */
2711 thiscase->desc = CASE_NESTING;
2712 thiscase->next = case_stack;
2713 thiscase->all = nesting_stack;
2714 thiscase->depth = ++nesting_depth;
2715 thiscase->exit_label = 0;
2716 thiscase->data.case_stmt.case_list = 0;
2717 thiscase->data.case_stmt.index_expr = index_expr;
2718 thiscase->data.case_stmt.default_label = 0;
2719 case_stack = thiscase;
2720 nesting_stack = thiscase;
2722 do_pending_stack_adjust ();
2724 /* Make sure case_stmt.start points to something that won't
2725 need any transformation before expand_end_case. */
2726 if (!NOTE_P (get_last_insn ()))
2727 emit_note (NOTE_INSN_DELETED);
2729 thiscase->data.case_stmt.start = get_last_insn ();
2732 /* Do the insertion of a case label into
2733 case_stack->data.case_stmt.case_list. The labels are fed to us
2734 in descending order from the sorted vector of case labels used
2735 in the tree part of the middle end. So the list we construct is
2736 sorted in ascending order. */
2739 add_case_node (tree low, tree high, tree label)
2741 struct case_node *r;
2743 /* If there's no HIGH value, then this is not a case range; it's
2744 just a simple case label. But that's just a degenerate case
2746 If the bounds are equal, turn this into the one-value case. */
2747 if (!high || tree_int_cst_equal (low, high))
2750 /* Handle default labels specially. */
2753 #ifdef ENABLE_CHECKING
2754 if (case_stack->data.case_stmt.default_label != 0)
2757 case_stack->data.case_stmt.default_label = label;
2761 /* Add this label to the chain. */
2762 r = ggc_alloc (sizeof (struct case_node));
2765 r->code_label = label;
2766 r->parent = r->left = NULL;
2767 r->right = case_stack->data.case_stmt.case_list;
2768 case_stack->data.case_stmt.case_list = r;
2771 /* Maximum number of case bit tests. */
2772 #define MAX_CASE_BIT_TESTS 3
2774 /* By default, enable case bit tests on targets with ashlsi3. */
2775 #ifndef CASE_USE_BIT_TESTS
2776 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
2777 != CODE_FOR_nothing)
2781 /* A case_bit_test represents a set of case nodes that may be
2782 selected from using a bit-wise comparison. HI and LO hold
2783 the integer to be tested against, LABEL contains the label
2784 to jump to upon success and BITS counts the number of case
2785 nodes handled by this test, typically the number of bits
2788 struct case_bit_test
2796 /* Determine whether "1 << x" is relatively cheap in word_mode. */
2799 bool lshift_cheap_p (void)
2801 static bool init = false;
2802 static bool cheap = true;
2806 rtx reg = gen_rtx_REG (word_mode, 10000);
2807 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
2808 cheap = cost < COSTS_N_INSNS (3);
2815 /* Comparison function for qsort to order bit tests by decreasing
2816 number of case nodes, i.e. the node with the most cases gets
2820 case_bit_test_cmp (const void *p1, const void *p2)
2822 const struct case_bit_test *d1 = p1;
2823 const struct case_bit_test *d2 = p2;
2825 return d2->bits - d1->bits;
2828 /* Expand a switch statement by a short sequence of bit-wise
2829 comparisons. "switch(x)" is effectively converted into
2830 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2833 INDEX_EXPR is the value being switched on, which is of
2834 type INDEX_TYPE. MINVAL is the lowest case value of in
2835 the case nodes, of INDEX_TYPE type, and RANGE is highest
2836 value minus MINVAL, also of type INDEX_TYPE. NODES is
2837 the set of case nodes, and DEFAULT_LABEL is the label to
2838 branch to should none of the cases match.
2840 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2844 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2845 tree range, case_node_ptr nodes, rtx default_label)
2847 struct case_bit_test test[MAX_CASE_BIT_TESTS];
2848 enum machine_mode mode;
2849 rtx expr, index, label;
2850 unsigned int i,j,lo,hi;
2851 struct case_node *n;
2855 for (n = nodes; n; n = n->right)
2857 label = label_rtx (n->code_label);
2858 for (i = 0; i < count; i++)
2859 if (same_case_target_p (label, test[i].label))
2864 if (count >= MAX_CASE_BIT_TESTS)
2868 test[i].label = label;
2875 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
2876 n->low, minval)), 1);
2877 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
2878 n->high, minval)), 1);
2879 for (j = lo; j <= hi; j++)
2880 if (j >= HOST_BITS_PER_WIDE_INT)
2881 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2883 test[i].lo |= (HOST_WIDE_INT) 1 << j;
2886 qsort (test, count, sizeof(*test), case_bit_test_cmp);
2888 index_expr = fold (build (MINUS_EXPR, index_type,
2889 convert (index_type, index_expr),
2890 convert (index_type, minval)));
2891 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
2892 do_pending_stack_adjust ();
2894 mode = TYPE_MODE (index_type);
2895 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
2896 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2899 index = convert_to_mode (word_mode, index, 0);
2900 index = expand_binop (word_mode, ashl_optab, const1_rtx,
2901 index, NULL_RTX, 1, OPTAB_WIDEN);
2903 for (i = 0; i < count; i++)
2905 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2906 expr = expand_binop (word_mode, and_optab, index, expr,
2907 NULL_RTX, 1, OPTAB_WIDEN);
2908 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2909 word_mode, 1, test[i].label);
2912 emit_jump (default_label);
2916 #define HAVE_casesi 0
2919 #ifndef HAVE_tablejump
2920 #define HAVE_tablejump 0
2923 /* Terminate a case (Pascal) or switch (C) statement
2924 in which ORIG_INDEX is the expression to be tested.
2925 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2926 type as given in the source before any compiler conversions.
2927 Generate the code to test it and jump to the right place. */
2930 expand_end_case_type (tree orig_index, tree orig_type)
2932 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2933 rtx default_label = 0;
2934 struct case_node *n, *m;
2935 unsigned int count, uniq;
2941 rtx before_case, end, lab;
2942 struct nesting *thiscase = case_stack;
2943 tree index_expr, index_type;
2944 bool exit_done = false;
2947 /* Don't crash due to previous errors. */
2948 if (thiscase == NULL)
2951 index_expr = thiscase->data.case_stmt.index_expr;
2952 index_type = TREE_TYPE (index_expr);
2953 unsignedp = TYPE_UNSIGNED (index_type);
2954 if (orig_type == NULL)
2955 orig_type = TREE_TYPE (orig_index);
2957 do_pending_stack_adjust ();
2959 /* An ERROR_MARK occurs for various reasons including invalid data type. */
2960 if (index_type != error_mark_node)
2962 /* If we don't have a default-label, create one here,
2963 after the body of the switch. */
2964 if (thiscase->data.case_stmt.default_label == 0)
2966 thiscase->data.case_stmt.default_label
2967 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
2968 /* Share the exit label if possible. */
2969 if (thiscase->exit_label)
2971 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
2972 thiscase->exit_label);
2975 expand_label (thiscase->data.case_stmt.default_label);
2977 default_label = label_rtx (thiscase->data.case_stmt.default_label);
2979 before_case = get_last_insn ();
2981 /* Get upper and lower bounds of case values.
2982 Also convert all the case values to the index expr's data type. */
2986 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
2988 /* Check low and high label values are integers. */
2989 if (TREE_CODE (n->low) != INTEGER_CST)
2991 if (TREE_CODE (n->high) != INTEGER_CST)
2994 n->low = convert (index_type, n->low);
2995 n->high = convert (index_type, n->high);
2997 /* Count the elements and track the largest and smallest
2998 of them (treating them as signed even if they are not). */
3006 if (INT_CST_LT (n->low, minval))
3008 if (INT_CST_LT (maxval, n->high))
3011 /* A range counts double, since it requires two compares. */
3012 if (! tree_int_cst_equal (n->low, n->high))
3015 /* Count the number of unique case node targets. */
3017 lab = label_rtx (n->code_label);
3018 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
3019 if (same_case_target_p (label_rtx (m->code_label), lab))
3026 /* Compute span of values. */
3028 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
3032 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
3033 emit_jump (default_label);
3036 /* Try implementing this switch statement by a short sequence of
3037 bit-wise comparisons. However, we let the binary-tree case
3038 below handle constant index expressions. */
3039 else if (CASE_USE_BIT_TESTS
3040 && ! TREE_CONSTANT (index_expr)
3041 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
3042 && compare_tree_int (range, 0) > 0
3043 && lshift_cheap_p ()
3044 && ((uniq == 1 && count >= 3)
3045 || (uniq == 2 && count >= 5)
3046 || (uniq == 3 && count >= 6)))
3048 /* Optimize the case where all the case values fit in a
3049 word without having to subtract MINVAL. In this case,
3050 we can optimize away the subtraction. */
3051 if (compare_tree_int (minval, 0) > 0
3052 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
3054 minval = integer_zero_node;
3057 emit_case_bit_tests (index_type, index_expr, minval, range,
3058 thiscase->data.case_stmt.case_list,
3062 /* If range of values is much bigger than number of values,
3063 make a sequence of conditional branches instead of a dispatch.
3064 If the switch-index is a constant, do it this way
3065 because we can optimize it. */
3067 else if (count < case_values_threshold ()
3068 || compare_tree_int (range,
3069 (optimize_size ? 3 : 10) * count) > 0
3070 /* RANGE may be signed, and really large ranges will show up
3071 as negative numbers. */
3072 || compare_tree_int (range, 0) < 0
3073 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
3076 || TREE_CONSTANT (index_expr)
3077 /* If neither casesi or tablejump is available, we can
3078 only go this way. */
3079 || (!HAVE_casesi && !HAVE_tablejump))
3081 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
3083 /* If the index is a short or char that we do not have
3084 an insn to handle comparisons directly, convert it to
3085 a full integer now, rather than letting each comparison
3086 generate the conversion. */
3088 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
3089 && ! have_insn_for (COMPARE, GET_MODE (index)))
3091 enum machine_mode wider_mode;
3092 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
3093 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
3094 if (have_insn_for (COMPARE, wider_mode))
3096 index = convert_to_mode (wider_mode, index, unsignedp);
3101 do_pending_stack_adjust ();
3104 index = copy_to_reg (index);
3105 if (GET_CODE (index) == CONST_INT
3106 || TREE_CODE (index_expr) == INTEGER_CST)
3108 /* Make a tree node with the proper constant value
3109 if we don't already have one. */
3110 if (TREE_CODE (index_expr) != INTEGER_CST)
3113 = build_int_2 (INTVAL (index),
3114 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
3115 index_expr = convert (index_type, index_expr);
3118 /* For constant index expressions we need only
3119 issue an unconditional branch to the appropriate
3120 target code. The job of removing any unreachable
3121 code is left to the optimization phase if the
3122 "-O" option is specified. */
3123 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3124 if (! tree_int_cst_lt (index_expr, n->low)
3125 && ! tree_int_cst_lt (n->high, index_expr))
3129 emit_jump (label_rtx (n->code_label));
3131 emit_jump (default_label);
3135 /* If the index expression is not constant we generate
3136 a binary decision tree to select the appropriate
3137 target code. This is done as follows:
3139 The list of cases is rearranged into a binary tree,
3140 nearly optimal assuming equal probability for each case.
3142 The tree is transformed into RTL, eliminating
3143 redundant test conditions at the same time.
3145 If program flow could reach the end of the
3146 decision tree an unconditional jump to the
3147 default code is emitted. */
3150 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
3151 && estimate_case_costs (thiscase->data.case_stmt.case_list));
3152 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
3153 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
3154 default_label, index_type);
3155 emit_jump (default_label);
3160 table_label = gen_label_rtx ();
3161 if (! try_casesi (index_type, index_expr, minval, range,
3162 table_label, default_label))
3164 index_type = integer_type_node;
3166 /* Index jumptables from zero for suitable values of
3167 minval to avoid a subtraction. */
3169 && compare_tree_int (minval, 0) > 0
3170 && compare_tree_int (minval, 3) < 0)
3172 minval = integer_zero_node;
3176 if (! try_tablejump (index_type, index_expr, minval, range,
3177 table_label, default_label))
3181 /* Get table of labels to jump to, in order of case index. */
3183 ncases = tree_low_cst (range, 0) + 1;
3184 labelvec = alloca (ncases * sizeof (rtx));
3185 memset (labelvec, 0, ncases * sizeof (rtx));
3187 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
3189 /* Compute the low and high bounds relative to the minimum
3190 value since that should fit in a HOST_WIDE_INT while the
3191 actual values may not. */
3193 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3194 n->low, minval)), 1);
3195 HOST_WIDE_INT i_high
3196 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
3197 n->high, minval)), 1);
3200 for (i = i_low; i <= i_high; i ++)
3202 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
3205 /* Fill in the gaps with the default. */
3206 for (i = 0; i < ncases; i++)
3207 if (labelvec[i] == 0)
3208 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
3210 /* Output the table. */
3211 emit_label (table_label);
3213 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
3214 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
3215 gen_rtx_LABEL_REF (Pmode, table_label),
3216 gen_rtvec_v (ncases, labelvec),
3217 const0_rtx, const0_rtx));
3219 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
3220 gen_rtvec_v (ncases, labelvec)));
3222 /* If the case insn drops through the table,
3223 after the table we must jump to the default-label.
3224 Otherwise record no drop-through after the table. */
3225 #ifdef CASE_DROPS_THROUGH
3226 emit_jump (default_label);
3232 before_case = NEXT_INSN (before_case);
3233 end = get_last_insn ();
3234 if (squeeze_notes (&before_case, &end))
3236 reorder_insns (before_case, end,
3237 thiscase->data.case_stmt.start);
3240 if (thiscase->exit_label && !exit_done)
3241 emit_label (thiscase->exit_label);
3243 POPSTACK (case_stack);
3248 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
3251 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
3253 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
3259 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
3260 (GET_MODE (op1) == VOIDmode
3261 ? GET_MODE (op2) : GET_MODE (op1)),
3265 /* Not all case values are encountered equally. This function
3266 uses a heuristic to weight case labels, in cases where that
3267 looks like a reasonable thing to do.
3269 Right now, all we try to guess is text, and we establish the
3272 chars above space: 16
3281 If we find any cases in the switch that are not either -1 or in the range
3282 of valid ASCII characters, or are control characters other than those
3283 commonly used with "\", don't treat this switch scanning text.
3285 Return 1 if these nodes are suitable for cost estimation, otherwise
3289 estimate_case_costs (case_node_ptr node)
3291 tree min_ascii = integer_minus_one_node;
3292 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
3296 /* If we haven't already made the cost table, make it now. Note that the
3297 lower bound of the table is -1, not zero. */
3299 if (! cost_table_initialized)
3301 cost_table_initialized = 1;
3303 for (i = 0; i < 128; i++)
3306 COST_TABLE (i) = 16;
3307 else if (ISPUNCT (i))
3309 else if (ISCNTRL (i))
3310 COST_TABLE (i) = -1;
3313 COST_TABLE (' ') = 8;
3314 COST_TABLE ('\t') = 4;
3315 COST_TABLE ('\0') = 4;
3316 COST_TABLE ('\n') = 2;
3317 COST_TABLE ('\f') = 1;
3318 COST_TABLE ('\v') = 1;
3319 COST_TABLE ('\b') = 1;
3322 /* See if all the case expressions look like text. It is text if the
3323 constant is >= -1 and the highest constant is <= 127. Do all comparisons
3324 as signed arithmetic since we don't want to ever access cost_table with a
3325 value less than -1. Also check that none of the constants in a range
3326 are strange control characters. */
3328 for (n = node; n; n = n->right)
3330 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
3333 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
3334 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
3335 if (COST_TABLE (i) < 0)
3339 /* All interesting values are within the range of interesting
3340 ASCII characters. */
3344 /* Determine whether two case labels branch to the same target.
3345 Since we now do tree optimizations, just comparing labels is
3349 same_case_target_p (rtx l1, rtx l2)
3354 /* Take an ordered list of case nodes
3355 and transform them into a near optimal binary tree,
3356 on the assumption that any target code selection value is as
3357 likely as any other.
3359 The transformation is performed by splitting the ordered
3360 list into two equal sections plus a pivot. The parts are
3361 then attached to the pivot as left and right branches. Each
3362 branch is then transformed recursively. */
3365 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
3378 /* Count the number of entries on branch. Also count the ranges. */
3382 if (!tree_int_cst_equal (np->low, np->high))
3386 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
3390 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
3398 /* Split this list if it is long enough for that to help. */
3403 /* Find the place in the list that bisects the list's total cost,
3404 Here I gets half the total cost. */
3409 /* Skip nodes while their cost does not reach that amount. */
3410 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3411 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
3412 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
3415 npp = &(*npp)->right;
3420 /* Leave this branch lopsided, but optimize left-hand
3421 side and fill in `parent' fields for right-hand side. */
3423 np->parent = parent;
3424 balance_case_nodes (&np->left, np);
3425 for (; np->right; np = np->right)
3426 np->right->parent = np;
3430 /* If there are just three nodes, split at the middle one. */
3432 npp = &(*npp)->right;
3435 /* Find the place in the list that bisects the list's total cost,
3436 where ranges count as 2.
3437 Here I gets half the total cost. */
3438 i = (i + ranges + 1) / 2;
3441 /* Skip nodes while their cost does not reach that amount. */
3442 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
3447 npp = &(*npp)->right;
3452 np->parent = parent;
3455 /* Optimize each of the two split parts. */
3456 balance_case_nodes (&np->left, np);
3457 balance_case_nodes (&np->right, np);
3461 /* Else leave this branch as one level,
3462 but fill in `parent' fields. */
3464 np->parent = parent;
3465 for (; np->right; np = np->right)
3466 np->right->parent = np;
3471 /* Search the parent sections of the case node tree
3472 to see if a test for the lower bound of NODE would be redundant.
3473 INDEX_TYPE is the type of the index expression.
3475 The instructions to generate the case decision tree are
3476 output in the same order as nodes are processed so it is
3477 known that if a parent node checks the range of the current
3478 node minus one that the current node is bounded at its lower
3479 span. Thus the test would be redundant. */
3482 node_has_low_bound (case_node_ptr node, tree index_type)
3485 case_node_ptr pnode;
3487 /* If the lower bound of this node is the lowest value in the index type,
3488 we need not test it. */
3490 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
3493 /* If this node has a left branch, the value at the left must be less
3494 than that at this node, so it cannot be bounded at the bottom and
3495 we need not bother testing any further. */
3500 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
3501 node->low, integer_one_node));
3503 /* If the subtraction above overflowed, we can't verify anything.
3504 Otherwise, look for a parent that tests our value - 1. */
3506 if (! tree_int_cst_lt (low_minus_one, node->low))
3509 for (pnode = node->parent; pnode; pnode = pnode->parent)
3510 if (tree_int_cst_equal (low_minus_one, pnode->high))
3516 /* Search the parent sections of the case node tree
3517 to see if a test for the upper bound of NODE would be redundant.
3518 INDEX_TYPE is the type of the index expression.
3520 The instructions to generate the case decision tree are
3521 output in the same order as nodes are processed so it is
3522 known that if a parent node checks the range of the current
3523 node plus one that the current node is bounded at its upper
3524 span. Thus the test would be redundant. */
3527 node_has_high_bound (case_node_ptr node, tree index_type)
3530 case_node_ptr pnode;
3532 /* If there is no upper bound, obviously no test is needed. */
3534 if (TYPE_MAX_VALUE (index_type) == NULL)
3537 /* If the upper bound of this node is the highest value in the type
3538 of the index expression, we need not test against it. */
3540 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
3543 /* If this node has a right branch, the value at the right must be greater
3544 than that at this node, so it cannot be bounded at the top and
3545 we need not bother testing any further. */
3550 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
3551 node->high, integer_one_node));
3553 /* If the addition above overflowed, we can't verify anything.
3554 Otherwise, look for a parent that tests our value + 1. */
3556 if (! tree_int_cst_lt (node->high, high_plus_one))
3559 for (pnode = node->parent; pnode; pnode = pnode->parent)
3560 if (tree_int_cst_equal (high_plus_one, pnode->low))
3566 /* Search the parent sections of the
3567 case node tree to see if both tests for the upper and lower
3568 bounds of NODE would be redundant. */
3571 node_is_bounded (case_node_ptr node, tree index_type)
3573 return (node_has_low_bound (node, index_type)
3574 && node_has_high_bound (node, index_type));
3577 /* Emit step-by-step code to select a case for the value of INDEX.
3578 The thus generated decision tree follows the form of the
3579 case-node binary tree NODE, whose nodes represent test conditions.
3580 INDEX_TYPE is the type of the index of the switch.
3582 Care is taken to prune redundant tests from the decision tree
3583 by detecting any boundary conditions already checked by
3584 emitted rtx. (See node_has_high_bound, node_has_low_bound
3585 and node_is_bounded, above.)
3587 Where the test conditions can be shown to be redundant we emit
3588 an unconditional jump to the target code. As a further
3589 optimization, the subordinates of a tree node are examined to
3590 check for bounded nodes. In this case conditional and/or
3591 unconditional jumps as a result of the boundary check for the
3592 current node are arranged to target the subordinates associated
3593 code for out of bound conditions on the current node.
3595 We can assume that when control reaches the code generated here,
3596 the index value has already been compared with the parents
3597 of this node, and determined to be on the same side of each parent
3598 as this node is. Thus, if this node tests for the value 51,
3599 and a parent tested for 52, we don't need to consider
3600 the possibility of a value greater than 51. If another parent
3601 tests for the value 50, then this node need not test anything. */
3604 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
3607 /* If INDEX has an unsigned type, we must make unsigned branches. */
3608 int unsignedp = TYPE_UNSIGNED (index_type);
3609 enum machine_mode mode = GET_MODE (index);
3610 enum machine_mode imode = TYPE_MODE (index_type);
3612 /* See if our parents have already tested everything for us.
3613 If they have, emit an unconditional jump for this node. */
3614 if (node_is_bounded (node, index_type))
3615 emit_jump (label_rtx (node->code_label));
3617 else if (tree_int_cst_equal (node->low, node->high))
3619 /* Node is single valued. First see if the index expression matches
3620 this node and then check our children, if any. */
3622 do_jump_if_equal (index,
3623 convert_modes (mode, imode,
3624 expand_expr (node->low, NULL_RTX,
3627 label_rtx (node->code_label), unsignedp);
3629 if (node->right != 0 && node->left != 0)
3631 /* This node has children on both sides.
3632 Dispatch to one side or the other
3633 by comparing the index value with this node's value.
3634 If one subtree is bounded, check that one first,
3635 so we can avoid real branches in the tree. */
3637 if (node_is_bounded (node->right, index_type))
3639 emit_cmp_and_jump_insns (index,
3642 expand_expr (node->high, NULL_RTX,
3645 GT, NULL_RTX, mode, unsignedp,
3646 label_rtx (node->right->code_label));
3647 emit_case_nodes (index, node->left, default_label, index_type);
3650 else if (node_is_bounded (node->left, index_type))
3652 emit_cmp_and_jump_insns (index,
3655 expand_expr (node->high, NULL_RTX,
3658 LT, NULL_RTX, mode, unsignedp,
3659 label_rtx (node->left->code_label));
3660 emit_case_nodes (index, node->right, default_label, index_type);
3663 /* If both children are single-valued cases with no
3664 children, finish up all the work. This way, we can save
3665 one ordered comparison. */
3666 else if (tree_int_cst_equal (node->right->low, node->right->high)
3667 && node->right->left == 0
3668 && node->right->right == 0
3669 && tree_int_cst_equal (node->left->low, node->left->high)
3670 && node->left->left == 0
3671 && node->left->right == 0)
3673 /* Neither node is bounded. First distinguish the two sides;
3674 then emit the code for one side at a time. */
3676 /* See if the value matches what the right hand side
3678 do_jump_if_equal (index,
3679 convert_modes (mode, imode,
3680 expand_expr (node->right->low,
3684 label_rtx (node->right->code_label),
3687 /* See if the value matches what the left hand side
3689 do_jump_if_equal (index,
3690 convert_modes (mode, imode,
3691 expand_expr (node->left->low,
3695 label_rtx (node->left->code_label),
3701 /* Neither node is bounded. First distinguish the two sides;
3702 then emit the code for one side at a time. */
3704 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3706 /* See if the value is on the right. */
3707 emit_cmp_and_jump_insns (index,
3710 expand_expr (node->high, NULL_RTX,
3713 GT, NULL_RTX, mode, unsignedp,
3714 label_rtx (test_label));
3716 /* Value must be on the left.
3717 Handle the left-hand subtree. */
3718 emit_case_nodes (index, node->left, default_label, index_type);
3719 /* If left-hand subtree does nothing,
3721 emit_jump (default_label);
3723 /* Code branches here for the right-hand subtree. */
3724 expand_label (test_label);
3725 emit_case_nodes (index, node->right, default_label, index_type);
3729 else if (node->right != 0 && node->left == 0)
3731 /* Here we have a right child but no left so we issue conditional
3732 branch to default and process the right child.
3734 Omit the conditional branch to default if we it avoid only one
3735 right child; it costs too much space to save so little time. */
3737 if (node->right->right || node->right->left
3738 || !tree_int_cst_equal (node->right->low, node->right->high))
3740 if (!node_has_low_bound (node, index_type))
3742 emit_cmp_and_jump_insns (index,
3745 expand_expr (node->high, NULL_RTX,
3748 LT, NULL_RTX, mode, unsignedp,
3752 emit_case_nodes (index, node->right, default_label, index_type);
3755 /* We cannot process node->right normally
3756 since we haven't ruled out the numbers less than
3757 this node's value. So handle node->right explicitly. */
3758 do_jump_if_equal (index,
3761 expand_expr (node->right->low, NULL_RTX,
3764 label_rtx (node->right->code_label), unsignedp);
3767 else if (node->right == 0 && node->left != 0)
3769 /* Just one subtree, on the left. */
3770 if (node->left->left || node->left->right
3771 || !tree_int_cst_equal (node->left->low, node->left->high))
3773 if (!node_has_high_bound (node, index_type))
3775 emit_cmp_and_jump_insns (index,
3778 expand_expr (node->high, NULL_RTX,
3781 GT, NULL_RTX, mode, unsignedp,
3785 emit_case_nodes (index, node->left, default_label, index_type);
3788 /* We cannot process node->left normally
3789 since we haven't ruled out the numbers less than
3790 this node's value. So handle node->left explicitly. */
3791 do_jump_if_equal (index,
3794 expand_expr (node->left->low, NULL_RTX,
3797 label_rtx (node->left->code_label), unsignedp);
3802 /* Node is a range. These cases are very similar to those for a single
3803 value, except that we do not start by testing whether this node
3804 is the one to branch to. */
3806 if (node->right != 0 && node->left != 0)
3808 /* Node has subtrees on both sides.
3809 If the right-hand subtree is bounded,
3810 test for it first, since we can go straight there.
3811 Otherwise, we need to make a branch in the control structure,
3812 then handle the two subtrees. */
3813 tree test_label = 0;
3815 if (node_is_bounded (node->right, index_type))
3816 /* Right hand node is fully bounded so we can eliminate any
3817 testing and branch directly to the target code. */
3818 emit_cmp_and_jump_insns (index,
3821 expand_expr (node->high, NULL_RTX,
3824 GT, NULL_RTX, mode, unsignedp,
3825 label_rtx (node->right->code_label));
3828 /* Right hand node requires testing.
3829 Branch to a label where we will handle it later. */
3831 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3832 emit_cmp_and_jump_insns (index,
3835 expand_expr (node->high, NULL_RTX,
3838 GT, NULL_RTX, mode, unsignedp,
3839 label_rtx (test_label));
3842 /* Value belongs to this node or to the left-hand subtree. */
3844 emit_cmp_and_jump_insns (index,
3847 expand_expr (node->low, NULL_RTX,
3850 GE, NULL_RTX, mode, unsignedp,
3851 label_rtx (node->code_label));
3853 /* Handle the left-hand subtree. */
3854 emit_case_nodes (index, node->left, default_label, index_type);
3856 /* If right node had to be handled later, do that now. */
3860 /* If the left-hand subtree fell through,
3861 don't let it fall into the right-hand subtree. */
3862 emit_jump (default_label);
3864 expand_label (test_label);
3865 emit_case_nodes (index, node->right, default_label, index_type);
3869 else if (node->right != 0 && node->left == 0)
3871 /* Deal with values to the left of this node,
3872 if they are possible. */
3873 if (!node_has_low_bound (node, index_type))
3875 emit_cmp_and_jump_insns (index,
3878 expand_expr (node->low, NULL_RTX,
3881 LT, NULL_RTX, mode, unsignedp,
3885 /* Value belongs to this node or to the right-hand subtree. */
3887 emit_cmp_and_jump_insns (index,
3890 expand_expr (node->high, NULL_RTX,
3893 LE, NULL_RTX, mode, unsignedp,
3894 label_rtx (node->code_label));
3896 emit_case_nodes (index, node->right, default_label, index_type);
3899 else if (node->right == 0 && node->left != 0)
3901 /* Deal with values to the right of this node,
3902 if they are possible. */
3903 if (!node_has_high_bound (node, index_type))
3905 emit_cmp_and_jump_insns (index,
3908 expand_expr (node->high, NULL_RTX,
3911 GT, NULL_RTX, mode, unsignedp,
3915 /* Value belongs to this node or to the left-hand subtree. */
3917 emit_cmp_and_jump_insns (index,
3920 expand_expr (node->low, NULL_RTX,
3923 GE, NULL_RTX, mode, unsignedp,
3924 label_rtx (node->code_label));
3926 emit_case_nodes (index, node->left, default_label, index_type);
3931 /* Node has no children so we check low and high bounds to remove
3932 redundant tests. Only one of the bounds can exist,
3933 since otherwise this node is bounded--a case tested already. */
3934 int high_bound = node_has_high_bound (node, index_type);
3935 int low_bound = node_has_low_bound (node, index_type);
3937 if (!high_bound && low_bound)
3939 emit_cmp_and_jump_insns (index,
3942 expand_expr (node->high, NULL_RTX,
3945 GT, NULL_RTX, mode, unsignedp,
3949 else if (!low_bound && high_bound)
3951 emit_cmp_and_jump_insns (index,
3954 expand_expr (node->low, NULL_RTX,
3957 LT, NULL_RTX, mode, unsignedp,
3960 else if (!low_bound && !high_bound)
3962 /* Widen LOW and HIGH to the same width as INDEX. */
3963 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3964 tree low = build1 (CONVERT_EXPR, type, node->low);
3965 tree high = build1 (CONVERT_EXPR, type, node->high);
3966 rtx low_rtx, new_index, new_bound;
3968 /* Instead of doing two branches, emit one unsigned branch for
3969 (index-low) > (high-low). */
3970 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
3971 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3972 NULL_RTX, unsignedp,
3974 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
3978 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3979 mode, 1, default_label);
3982 emit_jump (label_rtx (node->code_label));
3987 #include "gt-stmt.h"