1 /* Expands front end tree to back end RTL for GCC
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
38 #include "coretypes.h"
47 #include "insn-config.h"
50 #include "hard-reg-set.h"
57 #include "langhooks.h"
61 /* Assume that case vectors are not pc-relative. */
62 #ifndef CASE_VECTOR_PC_RELATIVE
63 #define CASE_VECTOR_PC_RELATIVE 0
66 /* Functions and data structures for expanding case statements. */
68 /* Case label structure, used to hold info on labels within case
69 statements. We handle "range" labels; for a single-value label
70 as in C, the high and low limits are the same.
72 An AVL tree of case nodes is initially created, and later transformed
73 to a list linked via the RIGHT fields in the nodes. Nodes with
74 higher case values are later in the list.
76 Switch statements can be output in one of two forms. A branch table
77 is used if there are more than a few labels and the labels are dense
78 within the range between the smallest and largest case value. If a
79 branch table is used, no further manipulations are done with the case
82 The alternative to the use of a branch table is to generate a series
83 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
84 and PARENT fields to hold a binary tree. Initially the tree is
85 totally unbalanced, with everything on the right. We balance the tree
86 with nodes on the left having lower case values than the parent
87 and nodes on the right having higher values. We then output the tree
90 struct case_node GTY(())
92 struct case_node *left; /* Left son in binary tree */
93 struct case_node *right; /* Right son in binary tree; also node chain */
94 struct case_node *parent; /* Parent of node in binary tree */
95 tree low; /* Lowest index value for this label */
96 tree high; /* Highest index value for this label */
97 tree code_label; /* Label to jump to when node matches */
101 typedef struct case_node case_node;
102 typedef struct case_node *case_node_ptr;
104 /* These are used by estimate_case_costs and balance_case_nodes. */
106 /* This must be a signed type, and non-ANSI compilers lack signed char. */
107 static short cost_table_[129];
108 static int use_cost_table;
109 static int cost_table_initialized;
111 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
113 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
115 /* Stack of control and binding constructs we are currently inside.
117 These constructs begin when you call `expand_start_WHATEVER'
118 and end when you call `expand_end_WHATEVER'. This stack records
119 info about how the construct began that tells the end-function
120 what to do. It also may provide information about the construct
121 to alter the behavior of other constructs within the body.
122 For example, they may affect the behavior of C `break' and `continue'.
124 Each construct gets one `struct nesting' object.
125 All of these objects are chained through the `all' field.
126 `nesting_stack' points to the first object (innermost construct).
127 The position of an entry on `nesting_stack' is in its `depth' field.
129 Each type of construct has its own individual stack.
130 For example, loops have `loop_stack'. Each object points to the
131 next object of the same type through the `next' field.
133 Some constructs are visible to `break' exit-statements and others
134 are not. Which constructs are visible depends on the language.
135 Therefore, the data structure allows each construct to be visible
136 or not, according to the args given when the construct is started.
137 The construct is visible if the `exit_label' field is non-null.
138 In that case, the value should be a CODE_LABEL rtx. */
140 struct nesting GTY(())
143 struct nesting *next;
154 /* For conds (if-then and if-then-else statements). */
157 /* Label for the end of the if construct.
158 There is none if EXITFLAG was not set
159 and no `else' has been seen yet. */
161 /* Label for the end of this alternative.
162 This may be the end of the if or the next else/elseif. */
164 } GTY ((tag ("COND_NESTING"))) cond;
168 /* Label at the top of the loop; place to loop back to. */
170 /* Label at the end of the whole construct. */
172 /* Label for `continue' statement to jump to;
173 this is in front of the stepper of the loop. */
175 } GTY ((tag ("LOOP_NESTING"))) loop;
176 /* For variable binding contours. */
179 /* Sequence number of this binding contour within the function,
180 in order of entry. */
181 int block_start_count;
182 /* Nonzero => value to restore stack to on exit. */
184 /* The NOTE that starts this contour.
185 Used by expand_goto to check whether the destination
186 is within each contour or not. */
188 /* Innermost containing binding contour that has a stack level. */
189 struct nesting *innermost_stack_block;
190 /* List of cleanups to be run on exit from this contour.
191 This is a list of expressions to be evaluated.
192 The TREE_PURPOSE of each link is the ..._DECL node
193 which the cleanup pertains to. */
195 /* List of cleanup-lists of blocks containing this block,
196 as they were at the locus where this block appears.
197 There is an element for each containing block,
198 ordered innermost containing block first.
199 The tail of this list can be 0,
200 if all remaining elements would be empty lists.
201 The element's TREE_VALUE is the cleanup-list of that block,
202 which may be null. */
204 /* Chain of labels defined inside this binding contour.
205 For contours that have stack levels or cleanups. */
206 struct label_chain *label_chain;
207 /* Nonzero if this is associated with an EH region. */
208 int exception_region;
209 /* The saved target_temp_slot_level from our outer block.
210 We may reset target_temp_slot_level to be the level of
211 this block, if that is done, target_temp_slot_level
212 reverts to the saved target_temp_slot_level at the very
214 int block_target_temp_slot_level;
215 /* True if we are currently emitting insns in an area of
216 output code that is controlled by a conditional
217 expression. This is used by the cleanup handling code to
218 generate conditional cleanup actions. */
219 int conditional_code;
220 /* A place to move the start of the exception region for any
221 of the conditional cleanups, must be at the end or after
222 the start of the last unconditional cleanup, and before any
223 conditional branch points. */
224 rtx last_unconditional_cleanup;
225 } GTY ((tag ("BLOCK_NESTING"))) block;
226 /* For switch (C) or case (Pascal) statements,
227 and also for dummies (see `expand_start_case_dummy'). */
230 /* The insn after which the case dispatch should finally
231 be emitted. Zero for a dummy. */
233 /* A list of case labels; it is first built as an AVL tree.
234 During expand_end_case, this is converted to a list, and may be
235 rearranged into a nearly balanced binary tree. */
236 struct case_node *case_list;
237 /* Label to jump to if no case matches. */
239 /* The expression to be dispatched on. */
241 /* Type that INDEX_EXPR should be converted to. */
243 /* Name of this kind of statement, for warnings. */
244 const char *printname;
245 /* Used to save no_line_numbers till we see the first case label.
246 We set this to -1 when we see the first case label in this
248 int line_number_status;
249 } GTY ((tag ("CASE_NESTING"))) case_stmt;
250 } GTY ((desc ("%1.desc"))) data;
253 /* Allocate and return a new `struct nesting'. */
255 #define ALLOC_NESTING() \
256 (struct nesting *) ggc_alloc (sizeof (struct nesting))
258 /* Pop the nesting stack element by element until we pop off
259 the element which is at the top of STACK.
260 Update all the other stacks, popping off elements from them
261 as we pop them from nesting_stack. */
263 #define POPSTACK(STACK) \
264 do { struct nesting *target = STACK; \
265 struct nesting *this; \
266 do { this = nesting_stack; \
267 if (loop_stack == this) \
268 loop_stack = loop_stack->next; \
269 if (cond_stack == this) \
270 cond_stack = cond_stack->next; \
271 if (block_stack == this) \
272 block_stack = block_stack->next; \
273 if (stack_block_stack == this) \
274 stack_block_stack = stack_block_stack->next; \
275 if (case_stack == this) \
276 case_stack = case_stack->next; \
277 nesting_depth = nesting_stack->depth - 1; \
278 nesting_stack = this->all; } \
279 while (this != target); } while (0)
281 /* In some cases it is impossible to generate code for a forward goto
282 until the label definition is seen. This happens when it may be necessary
283 for the goto to reset the stack pointer: we don't yet know how to do that.
284 So expand_goto puts an entry on this fixup list.
285 Each time a binding contour that resets the stack is exited,
287 If the target label has now been defined, we can insert the proper code. */
289 struct goto_fixup GTY(())
291 /* Points to following fixup. */
292 struct goto_fixup *next;
293 /* Points to the insn before the jump insn.
294 If more code must be inserted, it goes after this insn. */
296 /* The LABEL_DECL that this jump is jumping to, or 0
297 for break, continue or return. */
299 /* The BLOCK for the place where this goto was found. */
301 /* The CODE_LABEL rtx that this is jumping to. */
303 /* Number of binding contours started in current function
304 before the label reference. */
305 int block_start_count;
306 /* The outermost stack level that should be restored for this jump.
307 Each time a binding contour that resets the stack is exited,
308 if the target label is *not* yet defined, this slot is updated. */
310 /* List of lists of cleanup expressions to be run by this goto.
311 There is one element for each block that this goto is within.
312 The tail of this list can be 0,
313 if all remaining elements would be empty.
314 The TREE_VALUE contains the cleanup list of that block as of the
315 time this goto was seen.
316 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
317 tree cleanup_list_list;
320 /* Within any binding contour that must restore a stack level,
321 all labels are recorded with a chain of these structures. */
323 struct label_chain GTY(())
325 /* Points to following fixup. */
326 struct label_chain *next;
330 struct stmt_status GTY(())
332 /* Chain of all pending binding contours. */
333 struct nesting * x_block_stack;
335 /* If any new stacks are added here, add them to POPSTACKS too. */
337 /* Chain of all pending binding contours that restore stack levels
339 struct nesting * x_stack_block_stack;
341 /* Chain of all pending conditional statements. */
342 struct nesting * x_cond_stack;
344 /* Chain of all pending loops. */
345 struct nesting * x_loop_stack;
347 /* Chain of all pending case or switch statements. */
348 struct nesting * x_case_stack;
350 /* Separate chain including all of the above,
351 chained through the `all' field. */
352 struct nesting * x_nesting_stack;
354 /* Number of entries on nesting_stack now. */
357 /* Number of binding contours started so far in this function. */
358 int x_block_start_count;
360 /* Each time we expand an expression-statement,
361 record the expr's type and its RTL value here. */
362 tree x_last_expr_type;
363 rtx x_last_expr_value;
365 /* Nonzero if within a ({...}) grouping, in which case we must
366 always compute a value for each expr-stmt in case it is the last one. */
367 int x_expr_stmts_for_value;
369 /* Location of last line-number note, whether we actually
370 emitted it or not. */
371 location_t x_emit_locus;
373 struct goto_fixup *x_goto_fixup_chain;
376 #define block_stack (cfun->stmt->x_block_stack)
377 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
378 #define cond_stack (cfun->stmt->x_cond_stack)
379 #define loop_stack (cfun->stmt->x_loop_stack)
380 #define case_stack (cfun->stmt->x_case_stack)
381 #define nesting_stack (cfun->stmt->x_nesting_stack)
382 #define nesting_depth (cfun->stmt->x_nesting_depth)
383 #define current_block_start_count (cfun->stmt->x_block_start_count)
384 #define last_expr_type (cfun->stmt->x_last_expr_type)
385 #define last_expr_value (cfun->stmt->x_last_expr_value)
386 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
387 #define emit_locus (cfun->stmt->x_emit_locus)
388 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
390 /* Nonzero if we are using EH to handle cleanups. */
391 static int using_eh_for_cleanups_p = 0;
393 static int n_occurrences (int, const char *);
394 static bool parse_input_constraint (const char **, int, int, int, int,
395 const char * const *, bool *, bool *);
396 static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
397 static void expand_goto_internal (tree, rtx, rtx);
398 static int expand_fixup (tree, rtx, rtx);
399 static rtx expand_nl_handler_label (rtx, rtx);
400 static void expand_nl_goto_receiver (void);
401 static void expand_nl_goto_receivers (struct nesting *);
402 static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
403 static bool check_operand_nalternatives (tree, tree);
404 static bool check_unique_operand_names (tree, tree);
405 static char *resolve_operand_name_1 (char *, tree, tree);
406 static void expand_null_return_1 (rtx);
407 static enum br_predictor return_prediction (rtx);
408 static void expand_value_return (rtx);
409 static int tail_recursion_args (tree, tree);
410 static void expand_cleanups (tree, int, int);
411 static void check_seenlabel (void);
412 static void do_jump_if_equal (rtx, rtx, rtx, int);
413 static int estimate_case_costs (case_node_ptr);
414 static bool same_case_target_p (rtx, rtx);
415 static void strip_default_case_nodes (case_node_ptr *, rtx);
416 static bool lshift_cheap_p (void);
417 static int case_bit_test_cmp (const void *, const void *);
418 static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
419 static void group_case_nodes (case_node_ptr);
420 static void balance_case_nodes (case_node_ptr *, case_node_ptr);
421 static int node_has_low_bound (case_node_ptr, tree);
422 static int node_has_high_bound (case_node_ptr, tree);
423 static int node_is_bounded (case_node_ptr, tree);
424 static void emit_jump_if_reachable (rtx);
425 static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
426 static struct case_node *case_tree2list (case_node *, case_node *);
429 using_eh_for_cleanups (void)
431 using_eh_for_cleanups_p = 1;
435 init_stmt_for_function (void)
437 cfun->stmt = ((struct stmt_status *)ggc_alloc (sizeof (struct stmt_status)));
439 /* We are not currently within any block, conditional, loop or case. */
441 stack_block_stack = 0;
448 current_block_start_count = 0;
450 /* No gotos have been expanded yet. */
451 goto_fixup_chain = 0;
453 /* We are not processing a ({...}) grouping. */
454 expr_stmts_for_value = 0;
458 /* Record the current file and line. Called from emit_line_note. */
460 set_file_and_line_for_stmt (const char *file, int line)
462 /* If we're outputting an inline function, and we add a line note,
463 there may be no CFUN->STMT information. So, there's no need to
467 emit_locus.file = file;
468 emit_locus.line = line;
472 /* Emit a no-op instruction. */
479 last_insn = get_last_insn ();
481 && (GET_CODE (last_insn) == CODE_LABEL
482 || (GET_CODE (last_insn) == NOTE
483 && prev_real_insn (last_insn) == 0)))
484 emit_insn (gen_nop ());
487 /* Return the rtx-label that corresponds to a LABEL_DECL,
488 creating it if necessary. */
491 label_rtx (tree label)
493 if (TREE_CODE (label) != LABEL_DECL)
496 if (!DECL_RTL_SET_P (label))
497 SET_DECL_RTL (label, gen_label_rtx ());
499 return DECL_RTL (label);
502 /* As above, but also put it on the forced-reference list of the
503 function that contains it. */
505 force_label_rtx (tree label)
507 rtx ref = label_rtx (label);
508 tree function = decl_function_context (label);
514 if (function != current_function_decl
515 && function != inline_function_decl)
516 p = find_function_data (function);
520 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
521 p->expr->x_forced_labels);
525 /* Add an unconditional jump to LABEL as the next sequential instruction. */
528 emit_jump (rtx label)
530 do_pending_stack_adjust ();
531 emit_jump_insn (gen_jump (label));
535 /* Emit code to jump to the address
536 specified by the pointer expression EXP. */
539 expand_computed_goto (tree exp)
541 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
543 #ifdef POINTERS_EXTEND_UNSIGNED
544 if (GET_MODE (x) != Pmode)
545 x = convert_memory_address (Pmode, x);
550 if (! cfun->computed_goto_common_label)
552 cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
553 cfun->computed_goto_common_label = gen_label_rtx ();
554 emit_label (cfun->computed_goto_common_label);
556 do_pending_stack_adjust ();
557 emit_indirect_jump (cfun->computed_goto_common_reg);
559 current_function_has_computed_jump = 1;
563 emit_move_insn (cfun->computed_goto_common_reg, x);
564 emit_jump (cfun->computed_goto_common_label);
568 /* Handle goto statements and the labels that they can go to. */
570 /* Specify the location in the RTL code of a label LABEL,
571 which is a LABEL_DECL tree node.
573 This is used for the kind of label that the user can jump to with a
574 goto statement, and for alternatives of a switch or case statement.
575 RTL labels generated for loops and conditionals don't go through here;
576 they are generated directly at the RTL level, by other functions below.
578 Note that this has nothing to do with defining label *names*.
579 Languages vary in how they do that and what that even means. */
582 expand_label (tree label)
584 struct label_chain *p;
586 do_pending_stack_adjust ();
587 emit_label (label_rtx (label));
588 if (DECL_NAME (label))
589 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
591 if (stack_block_stack != 0)
593 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
594 p->next = stack_block_stack->data.block.label_chain;
595 stack_block_stack->data.block.label_chain = p;
600 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
601 from nested functions. */
604 declare_nonlocal_label (tree label)
606 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
608 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
609 LABEL_PRESERVE_P (label_rtx (label)) = 1;
610 if (nonlocal_goto_handler_slots == 0)
612 emit_stack_save (SAVE_NONLOCAL,
613 &nonlocal_goto_stack_level,
614 PREV_INSN (tail_recursion_reentry));
616 nonlocal_goto_handler_slots
617 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
620 /* Generate RTL code for a `goto' statement with target label LABEL.
621 LABEL should be a LABEL_DECL tree node that was or will later be
622 defined with `expand_label'. */
625 expand_goto (tree label)
629 /* Check for a nonlocal goto to a containing function. */
630 context = decl_function_context (label);
631 if (context != 0 && context != current_function_decl)
633 struct function *p = find_function_data (context);
634 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
635 rtx handler_slot, static_chain, save_area, insn;
638 /* Find the corresponding handler slot for this label. */
639 handler_slot = p->x_nonlocal_goto_handler_slots;
640 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
641 link = TREE_CHAIN (link))
642 handler_slot = XEXP (handler_slot, 1);
643 handler_slot = XEXP (handler_slot, 0);
645 p->has_nonlocal_label = 1;
646 current_function_has_nonlocal_goto = 1;
647 LABEL_REF_NONLOCAL_P (label_ref) = 1;
649 /* Copy the rtl for the slots so that they won't be shared in
650 case the virtual stack vars register gets instantiated differently
651 in the parent than in the child. */
653 static_chain = copy_to_reg (lookup_static_chain (label));
655 /* Get addr of containing function's current nonlocal goto handler,
656 which will do any cleanups and then jump to the label. */
657 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
658 virtual_stack_vars_rtx,
661 /* Get addr of containing function's nonlocal save area. */
662 save_area = p->x_nonlocal_goto_stack_level;
664 save_area = replace_rtx (copy_rtx (save_area),
665 virtual_stack_vars_rtx, static_chain);
667 #if HAVE_nonlocal_goto
668 if (HAVE_nonlocal_goto)
669 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
670 save_area, label_ref));
674 /* Restore frame pointer for containing function.
675 This sets the actual hard register used for the frame pointer
676 to the location of the function's incoming static chain info.
677 The non-local goto handler will then adjust it to contain the
678 proper value and reload the argument pointer, if needed. */
679 emit_move_insn (hard_frame_pointer_rtx, static_chain);
680 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
682 /* USE of hard_frame_pointer_rtx added for consistency;
683 not clear if really needed. */
684 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
685 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
686 emit_indirect_jump (handler_slot);
689 /* Search backwards to the jump insn and mark it as a
691 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
693 if (GET_CODE (insn) == JUMP_INSN)
695 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
696 const0_rtx, REG_NOTES (insn));
699 else if (GET_CODE (insn) == CALL_INSN)
704 expand_goto_internal (label, label_rtx (label), NULL_RTX);
707 /* Generate RTL code for a `goto' statement with target label BODY.
708 LABEL should be a LABEL_REF.
709 LAST_INSN, if non-0, is the rtx we should consider as the last
710 insn emitted (for the purposes of cleaning up a return). */
713 expand_goto_internal (tree body, rtx label, rtx last_insn)
715 struct nesting *block;
718 if (GET_CODE (label) != CODE_LABEL)
721 /* If label has already been defined, we can tell now
722 whether and how we must alter the stack level. */
724 if (PREV_INSN (label) != 0)
726 /* Find the innermost pending block that contains the label.
727 (Check containment by comparing insn-uids.)
728 Then restore the outermost stack level within that block,
729 and do cleanups of all blocks contained in it. */
730 for (block = block_stack; block; block = block->next)
732 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
734 if (block->data.block.stack_level != 0)
735 stack_level = block->data.block.stack_level;
736 /* Execute the cleanups for blocks we are exiting. */
737 if (block->data.block.cleanups != 0)
739 expand_cleanups (block->data.block.cleanups, 1, 1);
740 do_pending_stack_adjust ();
746 /* Ensure stack adjust isn't done by emit_jump, as this
747 would clobber the stack pointer. This one should be
748 deleted as dead by flow. */
749 clear_pending_stack_adjust ();
750 do_pending_stack_adjust ();
752 /* Don't do this adjust if it's to the end label and this function
753 is to return with a depressed stack pointer. */
754 if (label == return_label
755 && (((TREE_CODE (TREE_TYPE (current_function_decl))
757 && (TYPE_RETURNS_STACK_DEPRESSED
758 (TREE_TYPE (current_function_decl))))))
761 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
764 if (body != 0 && DECL_TOO_LATE (body))
765 error ("jump to `%s' invalidly jumps into binding contour",
766 IDENTIFIER_POINTER (DECL_NAME (body)));
768 /* Label not yet defined: may need to put this goto
769 on the fixup list. */
770 else if (! expand_fixup (body, label, last_insn))
772 /* No fixup needed. Record that the label is the target
773 of at least one goto that has no fixup. */
775 TREE_ADDRESSABLE (body) = 1;
781 /* Generate if necessary a fixup for a goto
782 whose target label in tree structure (if any) is TREE_LABEL
783 and whose target in rtl is RTL_LABEL.
785 If LAST_INSN is nonzero, we pretend that the jump appears
786 after insn LAST_INSN instead of at the current point in the insn stream.
788 The fixup will be used later to insert insns just before the goto.
789 Those insns will restore the stack level as appropriate for the
790 target label, and will (in the case of C++) also invoke any object
791 destructors which have to be invoked when we exit the scopes which
792 are exited by the goto.
794 Value is nonzero if a fixup is made. */
797 expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
799 struct nesting *block, *end_block;
801 /* See if we can recognize which block the label will be output in.
802 This is possible in some very common cases.
803 If we succeed, set END_BLOCK to that block.
804 Otherwise, set it to 0. */
807 && (rtl_label == cond_stack->data.cond.endif_label
808 || rtl_label == cond_stack->data.cond.next_label))
809 end_block = cond_stack;
810 /* If we are in a loop, recognize certain labels which
811 are likely targets. This reduces the number of fixups
812 we need to create. */
814 && (rtl_label == loop_stack->data.loop.start_label
815 || rtl_label == loop_stack->data.loop.end_label
816 || rtl_label == loop_stack->data.loop.continue_label))
817 end_block = loop_stack;
821 /* Now set END_BLOCK to the binding level to which we will return. */
825 struct nesting *next_block = end_block->all;
828 /* First see if the END_BLOCK is inside the innermost binding level.
829 If so, then no cleanups or stack levels are relevant. */
830 while (next_block && next_block != block)
831 next_block = next_block->all;
836 /* Otherwise, set END_BLOCK to the innermost binding level
837 which is outside the relevant control-structure nesting. */
838 next_block = block_stack->next;
839 for (block = block_stack; block != end_block; block = block->all)
840 if (block == next_block)
841 next_block = next_block->next;
842 end_block = next_block;
845 /* Does any containing block have a stack level or cleanups?
846 If not, no fixup is needed, and that is the normal case
847 (the only case, for standard C). */
848 for (block = block_stack; block != end_block; block = block->next)
849 if (block->data.block.stack_level != 0
850 || block->data.block.cleanups != 0)
853 if (block != end_block)
855 /* Ok, a fixup is needed. Add a fixup to the list of such. */
856 struct goto_fixup *fixup
857 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
858 /* In case an old stack level is restored, make sure that comes
859 after any pending stack adjust. */
860 /* ?? If the fixup isn't to come at the present position,
861 doing the stack adjust here isn't useful. Doing it with our
862 settings at that location isn't useful either. Let's hope
865 do_pending_stack_adjust ();
866 fixup->target = tree_label;
867 fixup->target_rtl = rtl_label;
869 /* Create a BLOCK node and a corresponding matched set of
870 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
871 this point. The notes will encapsulate any and all fixup
872 code which we might later insert at this point in the insn
873 stream. Also, the BLOCK node will be the parent (i.e. the
874 `SUPERBLOCK') of any other BLOCK nodes which we might create
875 later on when we are expanding the fixup code.
877 Note that optimization passes (including expand_end_loop)
878 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
882 rtx original_before_jump
883 = last_insn ? last_insn : get_last_insn ();
888 block = make_node (BLOCK);
889 TREE_USED (block) = 1;
891 if (!cfun->x_whole_function_mode_p)
892 (*lang_hooks.decls.insert_block) (block);
896 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
897 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
902 start = emit_note (NOTE_INSN_BLOCK_BEG);
903 if (cfun->x_whole_function_mode_p)
904 NOTE_BLOCK (start) = block;
905 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
906 end = emit_note (NOTE_INSN_BLOCK_END);
907 if (cfun->x_whole_function_mode_p)
908 NOTE_BLOCK (end) = block;
909 fixup->context = block;
911 emit_insn_after (start, original_before_jump);
914 fixup->block_start_count = current_block_start_count;
915 fixup->stack_level = 0;
916 fixup->cleanup_list_list
917 = ((block->data.block.outer_cleanups
918 || block->data.block.cleanups)
919 ? tree_cons (NULL_TREE, block->data.block.cleanups,
920 block->data.block.outer_cleanups)
922 fixup->next = goto_fixup_chain;
923 goto_fixup_chain = fixup;
929 /* Expand any needed fixups in the outputmost binding level of the
930 function. FIRST_INSN is the first insn in the function. */
933 expand_fixups (rtx first_insn)
935 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
938 /* When exiting a binding contour, process all pending gotos requiring fixups.
939 THISBLOCK is the structure that describes the block being exited.
940 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
941 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
942 FIRST_INSN is the insn that began this contour.
944 Gotos that jump out of this contour must restore the
945 stack level and do the cleanups before actually jumping.
947 DONT_JUMP_IN positive means report error if there is a jump into this
948 contour from before the beginning of the contour. This is also done if
949 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
952 fixup_gotos (struct nesting *thisblock, rtx stack_level,
953 tree cleanup_list, rtx first_insn, int dont_jump_in)
955 struct goto_fixup *f, *prev;
957 /* F is the fixup we are considering; PREV is the previous one. */
958 /* We run this loop in two passes so that cleanups of exited blocks
959 are run first, and blocks that are exited are marked so
962 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
964 /* Test for a fixup that is inactive because it is already handled. */
965 if (f->before_jump == 0)
967 /* Delete inactive fixup from the chain, if that is easy to do. */
969 prev->next = f->next;
971 /* Has this fixup's target label been defined?
972 If so, we can finalize it. */
973 else if (PREV_INSN (f->target_rtl) != 0)
977 /* If this fixup jumped into this contour from before the beginning
978 of this contour, report an error. This code used to use
979 the first non-label insn after f->target_rtl, but that's
980 wrong since such can be added, by things like put_var_into_stack
981 and have INSN_UIDs that are out of the range of the block. */
982 /* ??? Bug: this does not detect jumping in through intermediate
983 blocks that have stack levels or cleanups.
984 It detects only a problem with the innermost block
987 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
989 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
990 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
991 && ! DECL_ERROR_ISSUED (f->target))
993 error_with_decl (f->target,
994 "label `%s' used before containing binding contour");
995 /* Prevent multiple errors for one label. */
996 DECL_ERROR_ISSUED (f->target) = 1;
999 /* We will expand the cleanups into a sequence of their own and
1000 then later on we will attach this new sequence to the insn
1001 stream just ahead of the actual jump insn. */
1005 /* Temporarily restore the lexical context where we will
1006 logically be inserting the fixup code. We do this for the
1007 sake of getting the debugging information right. */
1009 (*lang_hooks.decls.pushlevel) (0);
1010 (*lang_hooks.decls.set_block) (f->context);
1012 /* Expand the cleanups for blocks this jump exits. */
1013 if (f->cleanup_list_list)
1016 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1017 /* Marked elements correspond to blocks that have been closed.
1018 Do their cleanups. */
1019 if (TREE_ADDRESSABLE (lists)
1020 && TREE_VALUE (lists) != 0)
1022 expand_cleanups (TREE_VALUE (lists), 1, 1);
1023 /* Pop any pushes done in the cleanups,
1024 in case function is about to return. */
1025 do_pending_stack_adjust ();
1029 /* Restore stack level for the biggest contour that this
1030 jump jumps out of. */
1032 && ! (f->target_rtl == return_label
1033 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1035 && (TYPE_RETURNS_STACK_DEPRESSED
1036 (TREE_TYPE (current_function_decl))))))
1037 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1039 /* Finish up the sequence containing the insns which implement the
1040 necessary cleanups, and then attach that whole sequence to the
1041 insn stream just ahead of the actual jump insn. Attaching it
1042 at that point insures that any cleanups which are in fact
1043 implicit C++ object destructions (which must be executed upon
1044 leaving the block) appear (to the debugger) to be taking place
1045 in an area of the generated code where the object(s) being
1046 destructed are still "in scope". */
1048 cleanup_insns = get_insns ();
1049 (*lang_hooks.decls.poplevel) (1, 0, 0);
1052 emit_insn_after (cleanup_insns, f->before_jump);
1058 /* For any still-undefined labels, do the cleanups for this block now.
1059 We must do this now since items in the cleanup list may go out
1060 of scope when the block ends. */
1061 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1062 if (f->before_jump != 0
1063 && PREV_INSN (f->target_rtl) == 0
1064 /* Label has still not appeared. If we are exiting a block with
1065 a stack level to restore, that started before the fixup,
1066 mark this stack level as needing restoration
1067 when the fixup is later finalized. */
1069 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1070 means the label is undefined. That's erroneous, but possible. */
1071 && (thisblock->data.block.block_start_count
1072 <= f->block_start_count))
1074 tree lists = f->cleanup_list_list;
1077 for (; lists; lists = TREE_CHAIN (lists))
1078 /* If the following elt. corresponds to our containing block
1079 then the elt. must be for this block. */
1080 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1083 (*lang_hooks.decls.pushlevel) (0);
1084 (*lang_hooks.decls.set_block) (f->context);
1085 expand_cleanups (TREE_VALUE (lists), 1, 1);
1086 do_pending_stack_adjust ();
1087 cleanup_insns = get_insns ();
1088 (*lang_hooks.decls.poplevel) (1, 0, 0);
1090 if (cleanup_insns != 0)
1092 = emit_insn_after (cleanup_insns, f->before_jump);
1094 f->cleanup_list_list = TREE_CHAIN (lists);
1098 f->stack_level = stack_level;
1102 /* Return the number of times character C occurs in string S. */
1104 n_occurrences (int c, const char *s)
1112 /* Generate RTL for an asm statement (explicit assembler code).
1113 STRING is a STRING_CST node containing the assembler code text,
1114 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
1115 insn is volatile; don't optimize it. */
1118 expand_asm (tree string, int vol)
1122 if (TREE_CODE (string) == ADDR_EXPR)
1123 string = TREE_OPERAND (string, 0);
1125 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1127 MEM_VOLATILE_P (body) = vol;
1134 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1135 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1136 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1137 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1138 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1139 constraint allows the use of a register operand. And, *IS_INOUT
1140 will be true if the operand is read-write, i.e., if it is used as
1141 an input as well as an output. If *CONSTRAINT_P is not in
1142 canonical form, it will be made canonical. (Note that `+' will be
1143 replaced with `=' as part of this process.)
1145 Returns TRUE if all went well; FALSE if an error occurred. */
1148 parse_output_constraint (const char **constraint_p, int operand_num,
1149 int ninputs, int noutputs, bool *allows_mem,
1150 bool *allows_reg, bool *is_inout)
1152 const char *constraint = *constraint_p;
1155 /* Assume the constraint doesn't allow the use of either a register
1157 *allows_mem = false;
1158 *allows_reg = false;
1160 /* Allow the `=' or `+' to not be at the beginning of the string,
1161 since it wasn't explicitly documented that way, and there is a
1162 large body of code that puts it last. Swap the character to
1163 the front, so as not to uglify any place else. */
1164 p = strchr (constraint, '=');
1166 p = strchr (constraint, '+');
1168 /* If the string doesn't contain an `=', issue an error
1172 error ("output operand constraint lacks `='");
1176 /* If the constraint begins with `+', then the operand is both read
1177 from and written to. */
1178 *is_inout = (*p == '+');
1180 /* Canonicalize the output constraint so that it begins with `='. */
1181 if (p != constraint || is_inout)
1184 size_t c_len = strlen (constraint);
1186 if (p != constraint)
1187 warning ("output constraint `%c' for operand %d is not at the beginning",
1190 /* Make a copy of the constraint. */
1191 buf = alloca (c_len + 1);
1192 strcpy (buf, constraint);
1193 /* Swap the first character and the `=' or `+'. */
1194 buf[p - constraint] = buf[0];
1195 /* Make sure the first character is an `='. (Until we do this,
1196 it might be a `+'.) */
1198 /* Replace the constraint with the canonicalized string. */
1199 *constraint_p = ggc_alloc_string (buf, c_len);
1200 constraint = *constraint_p;
1203 /* Loop through the constraint string. */
1204 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1209 error ("operand constraint contains incorrectly positioned '+' or '='");
1213 if (operand_num + 1 == ninputs + noutputs)
1215 error ("`%%' constraint used with last operand");
1220 case 'V': case 'm': case 'o':
1224 case '?': case '!': case '*': case '&': case '#':
1225 case 'E': case 'F': case 'G': case 'H':
1226 case 's': case 'i': case 'n':
1227 case 'I': case 'J': case 'K': case 'L': case 'M':
1228 case 'N': case 'O': case 'P': case ',':
1231 case '0': case '1': case '2': case '3': case '4':
1232 case '5': case '6': case '7': case '8': case '9':
1234 error ("matching constraint not valid in output operand");
1238 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1239 excepting those that expand_call created. So match memory
1256 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1258 #ifdef EXTRA_CONSTRAINT_STR
1259 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1261 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1265 /* Otherwise we can't assume anything about the nature of
1266 the constraint except that it isn't purely registers.
1267 Treat it like "g" and hope for the best. */
1278 /* Similar, but for input constraints. */
1281 parse_input_constraint (const char **constraint_p, int input_num,
1282 int ninputs, int noutputs, int ninout,
1283 const char * const * constraints,
1284 bool *allows_mem, bool *allows_reg)
1286 const char *constraint = *constraint_p;
1287 const char *orig_constraint = constraint;
1288 size_t c_len = strlen (constraint);
1291 /* Assume the constraint doesn't allow the use of either
1292 a register or memory. */
1293 *allows_mem = false;
1294 *allows_reg = false;
1296 /* Make sure constraint has neither `=', `+', nor '&'. */
1298 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1299 switch (constraint[j])
1301 case '+': case '=': case '&':
1302 if (constraint == orig_constraint)
1304 error ("input operand constraint contains `%c'", constraint[j]);
1310 if (constraint == orig_constraint
1311 && input_num + 1 == ninputs - ninout)
1313 error ("`%%' constraint used with last operand");
1318 case 'V': case 'm': case 'o':
1323 case '?': case '!': case '*': case '#':
1324 case 'E': case 'F': case 'G': case 'H':
1325 case 's': case 'i': case 'n':
1326 case 'I': case 'J': case 'K': case 'L': case 'M':
1327 case 'N': case 'O': case 'P': case ',':
1330 /* Whether or not a numeric constraint allows a register is
1331 decided by the matching constraint, and so there is no need
1332 to do anything special with them. We must handle them in
1333 the default case, so that we don't unnecessarily force
1334 operands to memory. */
1335 case '0': case '1': case '2': case '3': case '4':
1336 case '5': case '6': case '7': case '8': case '9':
1339 unsigned long match;
1341 match = strtoul (constraint + j, &end, 10);
1342 if (match >= (unsigned long) noutputs)
1344 error ("matching constraint references invalid operand number");
1348 /* Try and find the real constraint for this dup. Only do this
1349 if the matching constraint is the only alternative. */
1351 && (j == 0 || (j == 1 && constraint[0] == '%')))
1353 constraint = constraints[match];
1354 *constraint_p = constraint;
1355 c_len = strlen (constraint);
1357 /* ??? At the end of the loop, we will skip the first part of
1358 the matched constraint. This assumes not only that the
1359 other constraint is an output constraint, but also that
1360 the '=' or '+' come first. */
1364 j = end - constraint;
1365 /* Anticipate increment at end of loop. */
1380 if (! ISALPHA (constraint[j]))
1382 error ("invalid punctuation `%c' in constraint", constraint[j]);
1385 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1388 #ifdef EXTRA_CONSTRAINT_STR
1389 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1391 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1395 /* Otherwise we can't assume anything about the nature of
1396 the constraint except that it isn't purely registers.
1397 Treat it like "g" and hope for the best. */
1408 /* Check for overlap between registers marked in CLOBBERED_REGS and
1409 anything inappropriate in DECL. Emit error and return TRUE for error,
1413 decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1415 /* Conflicts between asm-declared register variables and the clobber
1416 list are not allowed. */
1417 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1418 && DECL_REGISTER (decl)
1419 && REG_P (DECL_RTL (decl))
1420 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1422 rtx reg = DECL_RTL (decl);
1425 for (regno = REGNO (reg);
1426 regno < (REGNO (reg)
1427 + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1429 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1431 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1432 IDENTIFIER_POINTER (DECL_NAME (decl)));
1434 /* Reset registerness to stop multiple errors emitted for a
1436 DECL_REGISTER (decl) = 0;
1443 /* Generate RTL for an asm statement with arguments.
1444 STRING is the instruction template.
1445 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1446 Each output or input has an expression in the TREE_VALUE and
1447 and a tree list in TREE_PURPOSE which in turn contains a constraint
1448 name in TREE_VALUE (or NULL_TREE) and a constraint string
1450 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1451 that is clobbered by this insn.
1453 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1454 Some elements of OUTPUTS may be replaced with trees representing temporary
1455 values. The caller should copy those temporary values to the originally
1458 VOL nonzero means the insn is volatile; don't optimize it. */
1461 expand_asm_operands (tree string, tree outputs, tree inputs,
1462 tree clobbers, int vol, const char *filename, int line)
1464 rtvec argvec, constraintvec;
1466 int ninputs = list_length (inputs);
1467 int noutputs = list_length (outputs);
1470 HARD_REG_SET clobbered_regs;
1471 int clobber_conflict_found = 0;
1475 /* Vector of RTX's of evaluated output operands. */
1476 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1477 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1478 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1479 enum machine_mode *inout_mode
1480 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1481 const char **constraints
1482 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1483 int old_generating_concat_p = generating_concat_p;
1485 /* An ASM with no outputs needs to be treated as volatile, for now. */
1489 if (! check_operand_nalternatives (outputs, inputs))
1492 if (! check_unique_operand_names (outputs, inputs))
1495 string = resolve_asm_operand_names (string, outputs, inputs);
1497 /* Collect constraints. */
1499 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1500 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1501 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1502 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1504 #ifdef MD_ASM_CLOBBERS
1505 /* Sometimes we wish to automatically clobber registers across an asm.
1506 Case in point is when the i386 backend moved from cc0 to a hard reg --
1507 maintaining source-level compatibility means automatically clobbering
1508 the flags register. */
1509 MD_ASM_CLOBBERS (clobbers);
1512 /* Count the number of meaningful clobbered registers, ignoring what
1513 we would ignore later. */
1515 CLEAR_HARD_REG_SET (clobbered_regs);
1516 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1518 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1520 i = decode_reg_name (regname);
1521 if (i >= 0 || i == -4)
1524 error ("unknown register name `%s' in `asm'", regname);
1526 /* Mark clobbered registers. */
1529 /* Clobbering the PIC register is an error */
1530 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1532 error ("PIC register `%s' clobbered in `asm'", regname);
1536 SET_HARD_REG_BIT (clobbered_regs, i);
1542 /* First pass over inputs and outputs checks validity and sets
1543 mark_addressable if needed. */
1546 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1548 tree val = TREE_VALUE (tail);
1549 tree type = TREE_TYPE (val);
1550 const char *constraint;
1555 /* If there's an erroneous arg, emit no insn. */
1556 if (type == error_mark_node)
1559 /* Try to parse the output constraint. If that fails, there's
1560 no point in going further. */
1561 constraint = constraints[i];
1562 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1563 &allows_mem, &allows_reg, &is_inout))
1570 && GET_CODE (DECL_RTL (val)) == REG
1571 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1572 (*lang_hooks.mark_addressable) (val);
1579 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1581 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1585 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1587 bool allows_reg, allows_mem;
1588 const char *constraint;
1590 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1591 would get VOIDmode and that could cause a crash in reload. */
1592 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1595 constraint = constraints[i + noutputs];
1596 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1597 constraints, &allows_mem, &allows_reg))
1600 if (! allows_reg && allows_mem)
1601 (*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1604 /* Second pass evaluates arguments. */
1607 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1609 tree val = TREE_VALUE (tail);
1610 tree type = TREE_TYPE (val);
1616 if (!parse_output_constraint (&constraints[i], i, ninputs,
1617 noutputs, &allows_mem, &allows_reg,
1621 /* If an output operand is not a decl or indirect ref and our constraint
1622 allows a register, make a temporary to act as an intermediate.
1623 Make the asm insn write into that, then our caller will copy it to
1624 the real output operand. Likewise for promoted variables. */
1626 generating_concat_p = 0;
1628 real_output_rtx[i] = NULL_RTX;
1629 if ((TREE_CODE (val) == INDIRECT_REF
1632 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1633 && ! (GET_CODE (DECL_RTL (val)) == REG
1634 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1638 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1639 if (GET_CODE (op) == MEM)
1640 op = validize_mem (op);
1642 if (! allows_reg && GET_CODE (op) != MEM)
1643 error ("output number %d not directly addressable", i);
1644 if ((! allows_mem && GET_CODE (op) == MEM)
1645 || GET_CODE (op) == CONCAT)
1647 real_output_rtx[i] = protect_from_queue (op, 1);
1648 op = gen_reg_rtx (GET_MODE (op));
1650 emit_move_insn (op, real_output_rtx[i]);
1655 op = assign_temp (type, 0, 0, 1);
1656 op = validize_mem (op);
1657 TREE_VALUE (tail) = make_tree (type, op);
1661 generating_concat_p = old_generating_concat_p;
1665 inout_mode[ninout] = TYPE_MODE (type);
1666 inout_opnum[ninout++] = i;
1669 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1670 clobber_conflict_found = 1;
1673 /* Make vectors for the expression-rtx, constraint strings,
1674 and named operands. */
1676 argvec = rtvec_alloc (ninputs);
1677 constraintvec = rtvec_alloc (ninputs);
1679 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1680 : GET_MODE (output_rtx[0])),
1681 TREE_STRING_POINTER (string),
1682 empty_string, 0, argvec, constraintvec,
1685 MEM_VOLATILE_P (body) = vol;
1687 /* Eval the inputs and put them into ARGVEC.
1688 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1690 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1692 bool allows_reg, allows_mem;
1693 const char *constraint;
1697 constraint = constraints[i + noutputs];
1698 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1699 constraints, &allows_mem, &allows_reg))
1702 generating_concat_p = 0;
1704 val = TREE_VALUE (tail);
1705 type = TREE_TYPE (val);
1706 op = expand_expr (val, NULL_RTX, VOIDmode,
1707 (allows_mem && !allows_reg
1708 ? EXPAND_MEMORY : EXPAND_NORMAL));
1710 /* Never pass a CONCAT to an ASM. */
1711 if (GET_CODE (op) == CONCAT)
1712 op = force_reg (GET_MODE (op), op);
1713 else if (GET_CODE (op) == MEM)
1714 op = validize_mem (op);
1716 if (asm_operand_ok (op, constraint) <= 0)
1719 op = force_reg (TYPE_MODE (type), op);
1720 else if (!allows_mem)
1721 warning ("asm operand %d probably doesn't match constraints",
1723 else if (GET_CODE (op) == MEM)
1725 /* We won't recognize either volatile memory or memory
1726 with a queued address as available a memory_operand
1727 at this point. Ignore it: clearly this *is* a memory. */
1731 warning ("use of memory input without lvalue in "
1732 "asm operand %d is deprecated", i + noutputs);
1734 if (CONSTANT_P (op))
1736 op = force_const_mem (TYPE_MODE (type), op);
1737 op = validize_mem (op);
1739 else if (GET_CODE (op) == REG
1740 || GET_CODE (op) == SUBREG
1741 || GET_CODE (op) == ADDRESSOF
1742 || GET_CODE (op) == CONCAT)
1744 tree qual_type = build_qualified_type (type,
1746 | TYPE_QUAL_CONST));
1747 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1748 memloc = validize_mem (memloc);
1749 emit_move_insn (memloc, op);
1755 generating_concat_p = old_generating_concat_p;
1756 ASM_OPERANDS_INPUT (body, i) = op;
1758 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1759 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1761 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1762 clobber_conflict_found = 1;
1765 /* Protect all the operands from the queue now that they have all been
1768 generating_concat_p = 0;
1770 for (i = 0; i < ninputs - ninout; i++)
1771 ASM_OPERANDS_INPUT (body, i)
1772 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1774 for (i = 0; i < noutputs; i++)
1775 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1777 /* For in-out operands, copy output rtx to input rtx. */
1778 for (i = 0; i < ninout; i++)
1780 int j = inout_opnum[i];
1783 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1786 sprintf (buffer, "%d", j);
1787 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1788 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1791 generating_concat_p = old_generating_concat_p;
1793 /* Now, for each output, construct an rtx
1794 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1795 ARGVEC CONSTRAINTS OPNAMES))
1796 If there is more than one, put them inside a PARALLEL. */
1798 if (noutputs == 1 && nclobbers == 0)
1800 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1801 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1804 else if (noutputs == 0 && nclobbers == 0)
1806 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1818 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1820 /* For each output operand, store a SET. */
1821 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1823 XVECEXP (body, 0, i)
1824 = gen_rtx_SET (VOIDmode,
1826 gen_rtx_ASM_OPERANDS
1827 (GET_MODE (output_rtx[i]),
1828 TREE_STRING_POINTER (string),
1829 constraints[i], i, argvec, constraintvec,
1832 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1835 /* If there are no outputs (but there are some clobbers)
1836 store the bare ASM_OPERANDS into the PARALLEL. */
1839 XVECEXP (body, 0, i++) = obody;
1841 /* Store (clobber REG) for each clobbered register specified. */
1843 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1845 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1846 int j = decode_reg_name (regname);
1851 if (j == -3) /* `cc', which is not a register */
1854 if (j == -4) /* `memory', don't cache memory across asm */
1856 XVECEXP (body, 0, i++)
1857 = gen_rtx_CLOBBER (VOIDmode,
1860 gen_rtx_SCRATCH (VOIDmode)));
1864 /* Ignore unknown register, error already signaled. */
1868 /* Use QImode since that's guaranteed to clobber just one reg. */
1869 clobbered_reg = gen_rtx_REG (QImode, j);
1871 /* Do sanity check for overlap between clobbers and respectively
1872 input and outputs that hasn't been handled. Such overlap
1873 should have been detected and reported above. */
1874 if (!clobber_conflict_found)
1878 /* We test the old body (obody) contents to avoid tripping
1879 over the under-construction body. */
1880 for (opno = 0; opno < noutputs; opno++)
1881 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1882 internal_error ("asm clobber conflict with output operand");
1884 for (opno = 0; opno < ninputs - ninout; opno++)
1885 if (reg_overlap_mentioned_p (clobbered_reg,
1886 ASM_OPERANDS_INPUT (obody, opno)))
1887 internal_error ("asm clobber conflict with input operand");
1890 XVECEXP (body, 0, i++)
1891 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1897 /* For any outputs that needed reloading into registers, spill them
1898 back to where they belong. */
1899 for (i = 0; i < noutputs; ++i)
1900 if (real_output_rtx[i])
1901 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1906 /* A subroutine of expand_asm_operands. Check that all operands have
1907 the same number of alternatives. Return true if so. */
1910 check_operand_nalternatives (tree outputs, tree inputs)
1912 if (outputs || inputs)
1914 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1916 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1919 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1921 error ("too many alternatives in `asm'");
1928 const char *constraint
1929 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1931 if (n_occurrences (',', constraint) != nalternatives)
1933 error ("operand constraints for `asm' differ in number of alternatives");
1937 if (TREE_CHAIN (tmp))
1938 tmp = TREE_CHAIN (tmp);
1940 tmp = next, next = 0;
1947 /* A subroutine of expand_asm_operands. Check that all operand names
1948 are unique. Return true if so. We rely on the fact that these names
1949 are identifiers, and so have been canonicalized by get_identifier,
1950 so all we need are pointer comparisons. */
1953 check_unique_operand_names (tree outputs, tree inputs)
1957 for (i = outputs; i ; i = TREE_CHAIN (i))
1959 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1963 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1964 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1968 for (i = inputs; i ; i = TREE_CHAIN (i))
1970 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1974 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1975 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1977 for (j = outputs; j ; j = TREE_CHAIN (j))
1978 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1985 error ("duplicate asm operand name '%s'",
1986 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1990 /* A subroutine of expand_asm_operands. Resolve the names of the operands
1991 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1992 STRING and in the constraints to those numbers. */
1995 resolve_asm_operand_names (tree string, tree outputs, tree inputs)
2001 /* Substitute [<name>] in input constraint strings. There should be no
2002 named operands in output constraints. */
2003 for (t = inputs; t ; t = TREE_CHAIN (t))
2005 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2006 if (strchr (c, '[') != NULL)
2008 p = buffer = xstrdup (c);
2009 while ((p = strchr (p, '[')) != NULL)
2010 p = resolve_operand_name_1 (p, outputs, inputs);
2011 TREE_VALUE (TREE_PURPOSE (t))
2012 = build_string (strlen (buffer), buffer);
2017 if (strchr (TREE_STRING_POINTER (string), '[') == NULL)
2020 /* Assume that we will not need extra space to perform the substitution.
2021 This because we get to remove '[' and ']', which means we cannot have
2022 a problem until we have more than 999 operands. */
2024 p = buffer = xstrdup (TREE_STRING_POINTER (string));
2025 while ((p = strchr (p, '%')) != NULL)
2029 else if (ISALPHA (p[1]) && p[2] == '[')
2037 p = resolve_operand_name_1 (p, outputs, inputs);
2040 string = build_string (strlen (buffer), buffer);
2046 /* A subroutine of resolve_operand_names. P points to the '[' for a
2047 potential named operand of the form [<name>]. In place, replace
2048 the name and brackets with a number. Return a pointer to the
2049 balance of the string after substitution. */
2052 resolve_operand_name_1 (char *p, tree outputs, tree inputs)
2059 /* Collect the operand name. */
2060 q = strchr (p, ']');
2063 error ("missing close brace for named operand");
2064 return strchr (p, '\0');
2068 /* Resolve the name to a number. */
2069 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2071 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2074 const char *c = TREE_STRING_POINTER (name);
2075 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2079 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2081 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2084 const char *c = TREE_STRING_POINTER (name);
2085 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2091 error ("undefined named operand '%s'", p + 1);
2095 /* Replace the name with the number. Unfortunately, not all libraries
2096 get the return value of sprintf correct, so search for the end of the
2097 generated string by hand. */
2098 sprintf (p, "%d", op);
2099 p = strchr (p, '\0');
2101 /* Verify the no extra buffer space assumption. */
2105 /* Shift the rest of the buffer down to fill the gap. */
2106 memmove (p, q + 1, strlen (q + 1) + 1);
2111 /* Generate RTL to evaluate the expression EXP
2112 and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2113 Provided just for backward-compatibility. expand_expr_stmt_value()
2114 should be used for new code. */
2117 expand_expr_stmt (tree exp)
2119 expand_expr_stmt_value (exp, -1, 1);
2122 /* Generate RTL to evaluate the expression EXP. WANT_VALUE tells
2123 whether to (1) save the value of the expression, (0) discard it or
2124 (-1) use expr_stmts_for_value to tell. The use of -1 is
2125 deprecated, and retained only for backward compatibility. */
2128 expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
2133 if (want_value == -1)
2134 want_value = expr_stmts_for_value != 0;
2136 /* If -Wextra, warn about statements with no side effects,
2137 except for an explicit cast to void (e.g. for assert()), and
2138 except for last statement in ({...}) where they may be useful. */
2140 && (expr_stmts_for_value == 0 || ! maybe_last)
2141 && exp != error_mark_node)
2143 if (! TREE_SIDE_EFFECTS (exp))
2145 if (warn_unused_value
2146 && !(TREE_CODE (exp) == CONVERT_EXPR
2147 && VOID_TYPE_P (TREE_TYPE (exp))))
2148 warning ("%Hstatement with no effect", &emit_locus);
2150 else if (warn_unused_value)
2151 warn_if_unused_value (exp);
2154 /* If EXP is of function type and we are expanding statements for
2155 value, convert it to pointer-to-function. */
2156 if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2157 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2159 /* The call to `expand_expr' could cause last_expr_type and
2160 last_expr_value to get reset. Therefore, we set last_expr_value
2161 and last_expr_type *after* calling expand_expr. */
2162 value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2164 type = TREE_TYPE (exp);
2166 /* If all we do is reference a volatile value in memory,
2167 copy it to a register to be sure it is actually touched. */
2168 if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2170 if (TYPE_MODE (type) == VOIDmode)
2172 else if (TYPE_MODE (type) != BLKmode)
2173 value = copy_to_reg (value);
2176 rtx lab = gen_label_rtx ();
2178 /* Compare the value with itself to reference it. */
2179 emit_cmp_and_jump_insns (value, value, EQ,
2180 expand_expr (TYPE_SIZE (type),
2181 NULL_RTX, VOIDmode, 0),
2187 /* If this expression is part of a ({...}) and is in memory, we may have
2188 to preserve temporaries. */
2189 preserve_temp_slots (value);
2191 /* Free any temporaries used to evaluate this expression. Any temporary
2192 used as a result of this expression will already have been preserved
2198 last_expr_value = value;
2199 last_expr_type = type;
2205 /* Warn if EXP contains any computations whose results are not used.
2206 Return 1 if a warning is printed; 0 otherwise. */
2209 warn_if_unused_value (tree exp)
2211 if (TREE_USED (exp))
2214 /* Don't warn about void constructs. This includes casting to void,
2215 void function calls, and statement expressions with a final cast
2217 if (VOID_TYPE_P (TREE_TYPE (exp)))
2220 switch (TREE_CODE (exp))
2222 case PREINCREMENT_EXPR:
2223 case POSTINCREMENT_EXPR:
2224 case PREDECREMENT_EXPR:
2225 case POSTDECREMENT_EXPR:
2230 case METHOD_CALL_EXPR:
2232 case TRY_CATCH_EXPR:
2233 case WITH_CLEANUP_EXPR:
2238 /* For a binding, warn if no side effect within it. */
2239 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2242 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2244 case TRUTH_ORIF_EXPR:
2245 case TRUTH_ANDIF_EXPR:
2246 /* In && or ||, warn if 2nd operand has no side effect. */
2247 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2250 if (TREE_NO_UNUSED_WARNING (exp))
2252 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2254 /* Let people do `(foo (), 0)' without a warning. */
2255 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2257 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2261 case NON_LVALUE_EXPR:
2262 /* Don't warn about conversions not explicit in the user's program. */
2263 if (TREE_NO_UNUSED_WARNING (exp))
2265 /* Assignment to a cast usually results in a cast of a modify.
2266 Don't complain about that. There can be an arbitrary number of
2267 casts before the modify, so we must loop until we find the first
2268 non-cast expression and then test to see if that is a modify. */
2270 tree tem = TREE_OPERAND (exp, 0);
2272 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2273 tem = TREE_OPERAND (tem, 0);
2275 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2276 || TREE_CODE (tem) == CALL_EXPR)
2282 /* Don't warn about automatic dereferencing of references, since
2283 the user cannot control it. */
2284 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2285 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2289 /* Referencing a volatile value is a side effect, so don't warn. */
2291 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2292 && TREE_THIS_VOLATILE (exp))
2295 /* If this is an expression which has no operands, there is no value
2296 to be unused. There are no such language-independent codes,
2297 but front ends may define such. */
2298 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2299 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2303 /* If this is an expression with side effects, don't warn. */
2304 if (TREE_SIDE_EFFECTS (exp))
2307 warning ("%Hvalue computed is not used", &emit_locus);
2312 /* Clear out the memory of the last expression evaluated. */
2315 clear_last_expr (void)
2317 last_expr_type = NULL_TREE;
2318 last_expr_value = NULL_RTX;
2321 /* Begin a statement-expression, i.e., a series of statements which
2322 may return a value. Return the RTL_EXPR for this statement expr.
2323 The caller must save that value and pass it to
2324 expand_end_stmt_expr. If HAS_SCOPE is nonzero, temporaries created
2325 in the statement-expression are deallocated at the end of the
2329 expand_start_stmt_expr (int has_scope)
2333 /* Make the RTL_EXPR node temporary, not momentary,
2334 so that rtl_expr_chain doesn't become garbage. */
2335 t = make_node (RTL_EXPR);
2336 do_pending_stack_adjust ();
2338 start_sequence_for_rtl_expr (t);
2342 expr_stmts_for_value++;
2346 /* Restore the previous state at the end of a statement that returns a value.
2347 Returns a tree node representing the statement's value and the
2348 insns to compute the value.
2350 The nodes of that expression have been freed by now, so we cannot use them.
2351 But we don't want to do that anyway; the expression has already been
2352 evaluated and now we just want to use the value. So generate a RTL_EXPR
2353 with the proper type and RTL value.
2355 If the last substatement was not an expression,
2356 return something with type `void'. */
2359 expand_end_stmt_expr (tree t)
2363 if (! last_expr_value || ! last_expr_type)
2365 last_expr_value = const0_rtx;
2366 last_expr_type = void_type_node;
2368 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2369 /* Remove any possible QUEUED. */
2370 last_expr_value = protect_from_queue (last_expr_value, 0);
2374 TREE_TYPE (t) = last_expr_type;
2375 RTL_EXPR_RTL (t) = last_expr_value;
2376 RTL_EXPR_SEQUENCE (t) = get_insns ();
2378 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2382 /* Don't consider deleting this expr or containing exprs at tree level. */
2383 TREE_SIDE_EFFECTS (t) = 1;
2384 /* Propagate volatility of the actual RTL expr. */
2385 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2388 expr_stmts_for_value--;
2393 /* Generate RTL for the start of an if-then. COND is the expression
2394 whose truth should be tested.
2396 If EXITFLAG is nonzero, this conditional is visible to
2397 `exit_something'. */
2400 expand_start_cond (tree cond, int exitflag)
2402 struct nesting *thiscond = ALLOC_NESTING ();
2404 /* Make an entry on cond_stack for the cond we are entering. */
2406 thiscond->desc = COND_NESTING;
2407 thiscond->next = cond_stack;
2408 thiscond->all = nesting_stack;
2409 thiscond->depth = ++nesting_depth;
2410 thiscond->data.cond.next_label = gen_label_rtx ();
2411 /* Before we encounter an `else', we don't need a separate exit label
2412 unless there are supposed to be exit statements
2413 to exit this conditional. */
2414 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2415 thiscond->data.cond.endif_label = thiscond->exit_label;
2416 cond_stack = thiscond;
2417 nesting_stack = thiscond;
2419 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2422 /* Generate RTL between then-clause and the elseif-clause
2423 of an if-then-elseif-.... */
2426 expand_start_elseif (tree cond)
2428 if (cond_stack->data.cond.endif_label == 0)
2429 cond_stack->data.cond.endif_label = gen_label_rtx ();
2430 emit_jump (cond_stack->data.cond.endif_label);
2431 emit_label (cond_stack->data.cond.next_label);
2432 cond_stack->data.cond.next_label = gen_label_rtx ();
2433 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2436 /* Generate RTL between the then-clause and the else-clause
2437 of an if-then-else. */
2440 expand_start_else (void)
2442 if (cond_stack->data.cond.endif_label == 0)
2443 cond_stack->data.cond.endif_label = gen_label_rtx ();
2445 emit_jump (cond_stack->data.cond.endif_label);
2446 emit_label (cond_stack->data.cond.next_label);
2447 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2450 /* After calling expand_start_else, turn this "else" into an "else if"
2451 by providing another condition. */
2454 expand_elseif (tree cond)
2456 cond_stack->data.cond.next_label = gen_label_rtx ();
2457 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2460 /* Generate RTL for the end of an if-then.
2461 Pop the record for it off of cond_stack. */
2464 expand_end_cond (void)
2466 struct nesting *thiscond = cond_stack;
2468 do_pending_stack_adjust ();
2469 if (thiscond->data.cond.next_label)
2470 emit_label (thiscond->data.cond.next_label);
2471 if (thiscond->data.cond.endif_label)
2472 emit_label (thiscond->data.cond.endif_label);
2474 POPSTACK (cond_stack);
2478 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2479 loop should be exited by `exit_something'. This is a loop for which
2480 `expand_continue' will jump to the top of the loop.
2482 Make an entry on loop_stack to record the labels associated with
2486 expand_start_loop (int exit_flag)
2488 struct nesting *thisloop = ALLOC_NESTING ();
2490 /* Make an entry on loop_stack for the loop we are entering. */
2492 thisloop->desc = LOOP_NESTING;
2493 thisloop->next = loop_stack;
2494 thisloop->all = nesting_stack;
2495 thisloop->depth = ++nesting_depth;
2496 thisloop->data.loop.start_label = gen_label_rtx ();
2497 thisloop->data.loop.end_label = gen_label_rtx ();
2498 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2499 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2500 loop_stack = thisloop;
2501 nesting_stack = thisloop;
2503 do_pending_stack_adjust ();
2505 emit_note (NOTE_INSN_LOOP_BEG);
2506 emit_label (thisloop->data.loop.start_label);
2511 /* Like expand_start_loop but for a loop where the continuation point
2512 (for expand_continue_loop) will be specified explicitly. */
2515 expand_start_loop_continue_elsewhere (int exit_flag)
2517 struct nesting *thisloop = expand_start_loop (exit_flag);
2518 loop_stack->data.loop.continue_label = gen_label_rtx ();
2522 /* Begin a null, aka do { } while (0) "loop". But since the contents
2523 of said loop can still contain a break, we must frob the loop nest. */
2526 expand_start_null_loop (void)
2528 struct nesting *thisloop = ALLOC_NESTING ();
2530 /* Make an entry on loop_stack for the loop we are entering. */
2532 thisloop->desc = LOOP_NESTING;
2533 thisloop->next = loop_stack;
2534 thisloop->all = nesting_stack;
2535 thisloop->depth = ++nesting_depth;
2536 thisloop->data.loop.start_label = emit_note (NOTE_INSN_DELETED);
2537 thisloop->data.loop.end_label = gen_label_rtx ();
2538 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2539 thisloop->exit_label = thisloop->data.loop.end_label;
2540 loop_stack = thisloop;
2541 nesting_stack = thisloop;
2546 /* Specify the continuation point for a loop started with
2547 expand_start_loop_continue_elsewhere.
2548 Use this at the point in the code to which a continue statement
2552 expand_loop_continue_here (void)
2554 do_pending_stack_adjust ();
2555 emit_note (NOTE_INSN_LOOP_CONT);
2556 emit_label (loop_stack->data.loop.continue_label);
2559 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2560 Pop the block off of loop_stack. */
2563 expand_end_loop (void)
2565 rtx start_label = loop_stack->data.loop.start_label;
2567 int eh_regions, debug_blocks;
2570 /* Mark the continue-point at the top of the loop if none elsewhere. */
2571 if (start_label == loop_stack->data.loop.continue_label)
2572 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2574 do_pending_stack_adjust ();
2576 /* If the loop starts with a loop exit, roll that to the end where
2577 it will optimize together with the jump back.
2579 If the loop presently looks like this (in pseudo-C):
2583 if (test) goto end_label;
2589 transform it to look like:
2596 if (test) goto end_label;
2600 We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2601 the end of the entry conditional. Without this, our lexical scan
2602 can't tell the difference between an entry conditional and a
2603 body conditional that exits the loop. Mistaking the two means
2604 that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2605 screw up loop unrolling.
2607 Things will be oh so much better when loop optimization is done
2608 off of a proper control flow graph... */
2610 /* Scan insns from the top of the loop looking for the END_TOP_COND note. */
2613 eh_regions = debug_blocks = 0;
2614 for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2615 if (GET_CODE (etc_note) == NOTE)
2617 if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2620 /* We must not walk into a nested loop. */
2621 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2623 etc_note = NULL_RTX;
2627 /* At the same time, scan for EH region notes, as we don't want
2628 to scrog region nesting. This shouldn't happen, but... */
2629 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2631 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2633 if (--eh_regions < 0)
2634 /* We've come to the end of an EH region, but never saw the
2635 beginning of that region. That means that an EH region
2636 begins before the top of the loop, and ends in the middle
2637 of it. The existence of such a situation violates a basic
2638 assumption in this code, since that would imply that even
2639 when EH_REGIONS is zero, we might move code out of an
2640 exception region. */
2644 /* Likewise for debug scopes. In this case we'll either (1) move
2645 all of the notes if they are properly nested or (2) leave the
2646 notes alone and only rotate the loop at high optimization
2647 levels when we expect to scrog debug info. */
2648 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2650 else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2653 else if (INSN_P (etc_note))
2660 && (debug_blocks == 0 || optimize >= 2)
2661 && NEXT_INSN (etc_note) != NULL_RTX
2662 && ! any_condjump_p (get_last_insn ()))
2664 /* We found one. Move everything from START to ETC to the end
2665 of the loop, and add a jump from the top of the loop. */
2666 rtx top_label = gen_label_rtx ();
2667 rtx start_move = start_label;
2669 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2670 then we want to move this note also. */
2671 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2672 && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2673 start_move = PREV_INSN (start_move);
2675 emit_label_before (top_label, start_move);
2677 /* Actually move the insns. If the debug scopes are nested, we
2678 can move everything at once. Otherwise we have to move them
2679 one by one and squeeze out the block notes. */
2680 if (debug_blocks == 0)
2681 reorder_insns (start_move, etc_note, get_last_insn ());
2684 rtx insn, next_insn;
2685 for (insn = start_move; insn; insn = next_insn)
2687 /* Figure out which insn comes after this one. We have
2688 to do this before we move INSN. */
2689 next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2691 if (GET_CODE (insn) == NOTE
2692 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2693 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2696 reorder_insns (insn, insn, get_last_insn ());
2700 /* Add the jump from the top of the loop. */
2701 emit_jump_insn_before (gen_jump (start_label), top_label);
2702 emit_barrier_before (top_label);
2703 start_label = top_label;
2706 emit_jump (start_label);
2707 emit_note (NOTE_INSN_LOOP_END);
2708 emit_label (loop_stack->data.loop.end_label);
2710 POPSTACK (loop_stack);
2715 /* Finish a null loop, aka do { } while (0). */
2718 expand_end_null_loop (void)
2720 do_pending_stack_adjust ();
2721 emit_label (loop_stack->data.loop.end_label);
2723 POPSTACK (loop_stack);
2728 /* Generate a jump to the current loop's continue-point.
2729 This is usually the top of the loop, but may be specified
2730 explicitly elsewhere. If not currently inside a loop,
2731 return 0 and do nothing; caller will print an error message. */
2734 expand_continue_loop (struct nesting *whichloop)
2736 /* Emit information for branch prediction. */
2739 if (flag_guess_branch_prob)
2741 note = emit_note (NOTE_INSN_PREDICTION);
2742 NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2746 whichloop = loop_stack;
2749 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2754 /* Generate a jump to exit the current loop. If not currently inside a loop,
2755 return 0 and do nothing; caller will print an error message. */
2758 expand_exit_loop (struct nesting *whichloop)
2762 whichloop = loop_stack;
2765 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2769 /* Generate a conditional jump to exit the current loop if COND
2770 evaluates to zero. If not currently inside a loop,
2771 return 0 and do nothing; caller will print an error message. */
2774 expand_exit_loop_if_false (struct nesting *whichloop, tree cond)
2780 whichloop = loop_stack;
2784 if (integer_nonzerop (cond))
2786 if (integer_zerop (cond))
2787 return expand_exit_loop (whichloop);
2789 /* Check if we definitely won't need a fixup. */
2790 if (whichloop == nesting_stack)
2792 jumpifnot (cond, whichloop->data.loop.end_label);
2796 /* In order to handle fixups, we actually create a conditional jump
2797 around an unconditional branch to exit the loop. If fixups are
2798 necessary, they go before the unconditional branch. */
2800 label = gen_label_rtx ();
2801 jumpif (cond, label);
2802 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2809 /* Like expand_exit_loop_if_false except also emit a note marking
2810 the end of the conditional. Should only be used immediately
2811 after expand_loop_start. */
2814 expand_exit_loop_top_cond (struct nesting *whichloop, tree cond)
2816 if (! expand_exit_loop_if_false (whichloop, cond))
2819 emit_note (NOTE_INSN_LOOP_END_TOP_COND);
2823 /* Return nonzero if we should preserve sub-expressions as separate
2824 pseudos. We never do so if we aren't optimizing. We always do so
2825 if -fexpensive-optimizations.
2827 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2828 the loop may still be a small one. */
2831 preserve_subexpressions_p (void)
2835 if (flag_expensive_optimizations)
2838 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2841 insn = get_last_insn_anywhere ();
2844 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2845 < n_non_fixed_regs * 3));
2849 /* Generate a jump to exit the current loop, conditional, binding contour
2850 or case statement. Not all such constructs are visible to this function,
2851 only those started with EXIT_FLAG nonzero. Individual languages use
2852 the EXIT_FLAG parameter to control which kinds of constructs you can
2855 If not currently inside anything that can be exited,
2856 return 0 and do nothing; caller will print an error message. */
2859 expand_exit_something (void)
2863 for (n = nesting_stack; n; n = n->all)
2864 if (n->exit_label != 0)
2866 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2873 /* Generate RTL to return from the current function, with no value.
2874 (That is, we do not do anything about returning any value.) */
2877 expand_null_return (void)
2881 last_insn = get_last_insn ();
2883 /* If this function was declared to return a value, but we
2884 didn't, clobber the return registers so that they are not
2885 propagated live to the rest of the function. */
2886 clobber_return_register ();
2888 expand_null_return_1 (last_insn);
2891 /* Try to guess whether the value of return means error code. */
2892 static enum br_predictor
2893 return_prediction (rtx val)
2895 /* Different heuristics for pointers and scalars. */
2896 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2898 /* NULL is usually not returned. */
2899 if (val == const0_rtx)
2900 return PRED_NULL_RETURN;
2904 /* Negative return values are often used to indicate
2906 if (GET_CODE (val) == CONST_INT
2907 && INTVAL (val) < 0)
2908 return PRED_NEGATIVE_RETURN;
2909 /* Constant return values are also usually erors,
2910 zero/one often mean booleans so exclude them from the
2912 if (CONSTANT_P (val)
2913 && (val != const0_rtx && val != const1_rtx))
2914 return PRED_CONST_RETURN;
2916 return PRED_NO_PREDICTION;
2919 /* Generate RTL to return from the current function, with value VAL. */
2922 expand_value_return (rtx val)
2926 enum br_predictor pred;
2928 if (flag_guess_branch_prob
2929 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2931 /* Emit information for branch prediction. */
2934 note = emit_note (NOTE_INSN_PREDICTION);
2936 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2940 last_insn = get_last_insn ();
2941 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2943 /* Copy the value to the return location
2944 unless it's already there. */
2946 if (return_reg != val)
2948 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2949 #ifdef PROMOTE_FUNCTION_RETURN
2950 int unsignedp = TREE_UNSIGNED (type);
2951 enum machine_mode old_mode
2952 = DECL_MODE (DECL_RESULT (current_function_decl));
2953 enum machine_mode mode
2954 = promote_mode (type, old_mode, &unsignedp, 1);
2956 if (mode != old_mode)
2957 val = convert_modes (mode, old_mode, val, unsignedp);
2959 if (GET_CODE (return_reg) == PARALLEL)
2960 emit_group_load (return_reg, val, int_size_in_bytes (type));
2962 emit_move_insn (return_reg, val);
2965 expand_null_return_1 (last_insn);
2968 /* Output a return with no value. If LAST_INSN is nonzero,
2969 pretend that the return takes place after LAST_INSN. */
2972 expand_null_return_1 (rtx last_insn)
2974 rtx end_label = cleanup_label ? cleanup_label : return_label;
2976 clear_pending_stack_adjust ();
2977 do_pending_stack_adjust ();
2981 end_label = return_label = gen_label_rtx ();
2982 expand_goto_internal (NULL_TREE, end_label, last_insn);
2985 /* Generate RTL to evaluate the expression RETVAL and return it
2986 from the current function. */
2989 expand_return (tree retval)
2991 /* If there are any cleanups to be performed, then they will
2992 be inserted following LAST_INSN. It is desirable
2993 that the last_insn, for such purposes, should be the
2994 last insn before computing the return value. Otherwise, cleanups
2995 which call functions can clobber the return value. */
2996 /* ??? rms: I think that is erroneous, because in C++ it would
2997 run destructors on variables that might be used in the subsequent
2998 computation of the return value. */
3004 /* If function wants no value, give it none. */
3005 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3007 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3009 expand_null_return ();
3013 if (retval == error_mark_node)
3015 /* Treat this like a return of no value from a function that
3017 expand_null_return ();
3020 else if (TREE_CODE (retval) == RESULT_DECL)
3021 retval_rhs = retval;
3022 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3023 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3024 retval_rhs = TREE_OPERAND (retval, 1);
3025 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3026 /* Recognize tail-recursive call to void function. */
3027 retval_rhs = retval;
3029 retval_rhs = NULL_TREE;
3031 last_insn = get_last_insn ();
3033 /* Distribute return down conditional expr if either of the sides
3034 may involve tail recursion (see test below). This enhances the number
3035 of tail recursions we see. Don't do this always since it can produce
3036 sub-optimal code in some cases and we distribute assignments into
3037 conditional expressions when it would help. */
3039 if (optimize && retval_rhs != 0
3040 && frame_offset == 0
3041 && TREE_CODE (retval_rhs) == COND_EXPR
3042 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3043 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3045 rtx label = gen_label_rtx ();
3048 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3049 start_cleanup_deferral ();
3050 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3051 DECL_RESULT (current_function_decl),
3052 TREE_OPERAND (retval_rhs, 1));
3053 TREE_SIDE_EFFECTS (expr) = 1;
3054 expand_return (expr);
3057 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3058 DECL_RESULT (current_function_decl),
3059 TREE_OPERAND (retval_rhs, 2));
3060 TREE_SIDE_EFFECTS (expr) = 1;
3061 expand_return (expr);
3062 end_cleanup_deferral ();
3066 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3068 /* If the result is an aggregate that is being returned in one (or more)
3069 registers, load the registers here. The compiler currently can't handle
3070 copying a BLKmode value into registers. We could put this code in a
3071 more general area (for use by everyone instead of just function
3072 call/return), but until this feature is generally usable it is kept here
3073 (and in expand_call). The value must go into a pseudo in case there
3074 are cleanups that will clobber the real return register. */
3077 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3078 && GET_CODE (result_rtl) == REG)
3081 unsigned HOST_WIDE_INT bitpos, xbitpos;
3082 unsigned HOST_WIDE_INT big_endian_correction = 0;
3083 unsigned HOST_WIDE_INT bytes
3084 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3085 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3086 unsigned int bitsize
3087 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3088 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3089 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3090 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3091 enum machine_mode tmpmode, result_reg_mode;
3095 expand_null_return ();
3099 /* Structures whose size is not a multiple of a word are aligned
3100 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3101 machine, this means we must skip the empty high order bytes when
3102 calculating the bit offset. */
3103 if (BYTES_BIG_ENDIAN
3104 && bytes % UNITS_PER_WORD)
3105 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3108 /* Copy the structure BITSIZE bits at a time. */
3109 for (bitpos = 0, xbitpos = big_endian_correction;
3110 bitpos < bytes * BITS_PER_UNIT;
3111 bitpos += bitsize, xbitpos += bitsize)
3113 /* We need a new destination pseudo each time xbitpos is
3114 on a word boundary and when xbitpos == big_endian_correction
3115 (the first time through). */
3116 if (xbitpos % BITS_PER_WORD == 0
3117 || xbitpos == big_endian_correction)
3119 /* Generate an appropriate register. */
3120 dst = gen_reg_rtx (word_mode);
3121 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3123 /* Clear the destination before we move anything into it. */
3124 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3127 /* We need a new source operand each time bitpos is on a word
3129 if (bitpos % BITS_PER_WORD == 0)
3130 src = operand_subword_force (result_val,
3131 bitpos / BITS_PER_WORD,
3134 /* Use bitpos for the source extraction (left justified) and
3135 xbitpos for the destination store (right justified). */
3136 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3137 extract_bit_field (src, bitsize,
3138 bitpos % BITS_PER_WORD, 1,
3139 NULL_RTX, word_mode, word_mode,
3144 /* Find the smallest integer mode large enough to hold the
3145 entire structure and use that mode instead of BLKmode
3146 on the USE insn for the return register. */
3147 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3148 tmpmode != VOIDmode;
3149 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3150 /* Have we found a large enough mode? */
3151 if (GET_MODE_SIZE (tmpmode) >= bytes)
3154 /* No suitable mode found. */
3155 if (tmpmode == VOIDmode)
3158 PUT_MODE (result_rtl, tmpmode);
3160 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3161 result_reg_mode = word_mode;
3163 result_reg_mode = tmpmode;
3164 result_reg = gen_reg_rtx (result_reg_mode);
3167 for (i = 0; i < n_regs; i++)
3168 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3171 if (tmpmode != result_reg_mode)
3172 result_reg = gen_lowpart (tmpmode, result_reg);
3174 expand_value_return (result_reg);
3176 else if (retval_rhs != 0
3177 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3178 && (GET_CODE (result_rtl) == REG
3179 || (GET_CODE (result_rtl) == PARALLEL)))
3181 /* Calculate the return value into a temporary (usually a pseudo
3183 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3184 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3186 val = assign_temp (nt, 0, 0, 1);
3187 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3188 val = force_not_mem (val);
3190 /* Return the calculated value, doing cleanups first. */
3191 expand_value_return (val);
3195 /* No cleanups or no hard reg used;
3196 calculate value into hard return reg. */
3197 expand_expr (retval, const0_rtx, VOIDmode, 0);
3199 expand_value_return (result_rtl);
3203 /* Attempt to optimize a potential tail recursion call into a goto.
3204 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3205 where to place the jump to the tail recursion label.
3207 Return TRUE if the call was optimized into a goto. */
3210 optimize_tail_recursion (tree arguments, rtx last_insn)
3212 /* Finish checking validity, and if valid emit code to set the
3213 argument variables for the new call. */
3214 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3216 if (tail_recursion_label == 0)
3218 tail_recursion_label = gen_label_rtx ();
3219 emit_label_after (tail_recursion_label,
3220 tail_recursion_reentry);
3223 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3230 /* Emit code to alter this function's formal parms for a tail-recursive call.
3231 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3232 FORMALS is the chain of decls of formals.
3233 Return 1 if this can be done;
3234 otherwise return 0 and do not emit any code. */
3237 tail_recursion_args (tree actuals, tree formals)
3239 tree a = actuals, f = formals;
3243 /* Check that number and types of actuals are compatible
3244 with the formals. This is not always true in valid C code.
3245 Also check that no formal needs to be addressable
3246 and that all formals are scalars. */
3248 /* Also count the args. */
3250 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3252 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3253 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3255 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3258 if (a != 0 || f != 0)
3261 /* Compute all the actuals. */
3263 argvec = (rtx *) alloca (i * sizeof (rtx));
3265 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3266 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3268 /* Find which actual values refer to current values of previous formals.
3269 Copy each of them now, before any formal is changed. */
3271 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3275 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3276 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3282 argvec[i] = copy_to_reg (argvec[i]);
3285 /* Store the values of the actuals into the formals. */
3287 for (f = formals, a = actuals, i = 0; f;
3288 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3290 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3291 emit_move_insn (DECL_RTL (f), argvec[i]);
3294 rtx tmp = argvec[i];
3295 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
3296 promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
3298 if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3300 tmp = gen_reg_rtx (DECL_MODE (f));
3301 convert_move (tmp, argvec[i], unsignedp);
3303 convert_move (DECL_RTL (f), tmp, unsignedp);
3311 /* Generate the RTL code for entering a binding contour.
3312 The variables are declared one by one, by calls to `expand_decl'.
3314 FLAGS is a bitwise or of the following flags:
3316 1 - Nonzero if this construct should be visible to
3319 2 - Nonzero if this contour does not require a
3320 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3321 language-independent code should set this flag because they
3322 will not create corresponding BLOCK nodes. (There should be
3323 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3324 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3325 when expand_end_bindings is called.
3327 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3328 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3332 expand_start_bindings_and_block (int flags, tree block)
3334 struct nesting *thisblock = ALLOC_NESTING ();
3336 int exit_flag = ((flags & 1) != 0);
3337 int block_flag = ((flags & 2) == 0);
3339 /* If a BLOCK is supplied, then the caller should be requesting a
3340 NOTE_INSN_BLOCK_BEG note. */
3341 if (!block_flag && block)
3344 /* Create a note to mark the beginning of the block. */
3347 note = emit_note (NOTE_INSN_BLOCK_BEG);
3348 NOTE_BLOCK (note) = block;
3351 note = emit_note (NOTE_INSN_DELETED);
3353 /* Make an entry on block_stack for the block we are entering. */
3355 thisblock->desc = BLOCK_NESTING;
3356 thisblock->next = block_stack;
3357 thisblock->all = nesting_stack;
3358 thisblock->depth = ++nesting_depth;
3359 thisblock->data.block.stack_level = 0;
3360 thisblock->data.block.cleanups = 0;
3361 thisblock->data.block.exception_region = 0;
3362 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3364 thisblock->data.block.conditional_code = 0;
3365 thisblock->data.block.last_unconditional_cleanup = note;
3366 /* When we insert instructions after the last unconditional cleanup,
3367 we don't adjust last_insn. That means that a later add_insn will
3368 clobber the instructions we've just added. The easiest way to
3369 fix this is to just insert another instruction here, so that the
3370 instructions inserted after the last unconditional cleanup are
3371 never the last instruction. */
3372 emit_note (NOTE_INSN_DELETED);
3375 && !(block_stack->data.block.cleanups == NULL_TREE
3376 && block_stack->data.block.outer_cleanups == NULL_TREE))
3377 thisblock->data.block.outer_cleanups
3378 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3379 block_stack->data.block.outer_cleanups);
3381 thisblock->data.block.outer_cleanups = 0;
3382 thisblock->data.block.label_chain = 0;
3383 thisblock->data.block.innermost_stack_block = stack_block_stack;
3384 thisblock->data.block.first_insn = note;
3385 thisblock->data.block.block_start_count = ++current_block_start_count;
3386 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3387 block_stack = thisblock;
3388 nesting_stack = thisblock;
3390 /* Make a new level for allocating stack slots. */
3394 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3395 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3396 expand_expr are made. After we end the region, we know that all
3397 space for all temporaries that were created by TARGET_EXPRs will be
3398 destroyed and their space freed for reuse. */
3401 expand_start_target_temps (void)
3403 /* This is so that even if the result is preserved, the space
3404 allocated will be freed, as we know that it is no longer in use. */
3407 /* Start a new binding layer that will keep track of all cleanup
3408 actions to be performed. */
3409 expand_start_bindings (2);
3411 target_temp_slot_level = temp_slot_level;
3415 expand_end_target_temps (void)
3417 expand_end_bindings (NULL_TREE, 0, 0);
3419 /* This is so that even if the result is preserved, the space
3420 allocated will be freed, as we know that it is no longer in use. */
3424 /* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3425 in question represents the outermost pair of curly braces (i.e. the "body
3426 block") of a function or method.
3428 For any BLOCK node representing a "body block" of a function or method, the
3429 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3430 represents the outermost (function) scope for the function or method (i.e.
3431 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3432 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3435 is_body_block (tree stmt)
3437 if (lang_hooks.no_body_blocks)
3440 if (TREE_CODE (stmt) == BLOCK)
3442 tree parent = BLOCK_SUPERCONTEXT (stmt);
3444 if (parent && TREE_CODE (parent) == BLOCK)
3446 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3448 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3456 /* True if we are currently emitting insns in an area of output code
3457 that is controlled by a conditional expression. This is used by
3458 the cleanup handling code to generate conditional cleanup actions. */
3461 conditional_context (void)
3463 return block_stack && block_stack->data.block.conditional_code;
3466 /* Return an opaque pointer to the current nesting level, so frontend code
3467 can check its own sanity. */
3470 current_nesting_level (void)
3472 return cfun ? block_stack : 0;
3475 /* Emit a handler label for a nonlocal goto handler.
3476 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3479 expand_nl_handler_label (rtx slot, rtx before_insn)
3482 rtx handler_label = gen_label_rtx ();
3484 /* Don't let cleanup_cfg delete the handler. */
3485 LABEL_PRESERVE_P (handler_label) = 1;
3488 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3489 insns = get_insns ();
3491 emit_insn_before (insns, before_insn);
3493 emit_label (handler_label);
3495 return handler_label;
3498 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3501 expand_nl_goto_receiver (void)
3503 #ifdef HAVE_nonlocal_goto
3504 if (! HAVE_nonlocal_goto)
3506 /* First adjust our frame pointer to its actual value. It was
3507 previously set to the start of the virtual area corresponding to
3508 the stacked variables when we branched here and now needs to be
3509 adjusted to the actual hardware fp value.
3511 Assignments are to virtual registers are converted by
3512 instantiate_virtual_regs into the corresponding assignment
3513 to the underlying register (fp in this case) that makes
3514 the original assignment true.
3515 So the following insn will actually be
3516 decrementing fp by STARTING_FRAME_OFFSET. */
3517 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3519 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3520 if (fixed_regs[ARG_POINTER_REGNUM])
3522 #ifdef ELIMINABLE_REGS
3523 /* If the argument pointer can be eliminated in favor of the
3524 frame pointer, we don't need to restore it. We assume here
3525 that if such an elimination is present, it can always be used.
3526 This is the case on all known machines; if we don't make this
3527 assumption, we do unnecessary saving on many machines. */
3528 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3531 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3532 if (elim_regs[i].from == ARG_POINTER_REGNUM
3533 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3536 if (i == ARRAY_SIZE (elim_regs))
3539 /* Now restore our arg pointer from the address at which it
3540 was saved in our stack frame. */
3541 emit_move_insn (virtual_incoming_args_rtx,
3542 copy_to_reg (get_arg_pointer_save_area (cfun)));
3547 #ifdef HAVE_nonlocal_goto_receiver
3548 if (HAVE_nonlocal_goto_receiver)
3549 emit_insn (gen_nonlocal_goto_receiver ());
3553 /* Make handlers for nonlocal gotos taking place in the function calls in
3557 expand_nl_goto_receivers (struct nesting *thisblock)
3560 rtx afterward = gen_label_rtx ();
3565 /* Record the handler address in the stack slot for that purpose,
3566 during this block, saving and restoring the outer value. */
3567 if (thisblock->next != 0)
3568 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3570 rtx save_receiver = gen_reg_rtx (Pmode);
3571 emit_move_insn (XEXP (slot, 0), save_receiver);
3574 emit_move_insn (save_receiver, XEXP (slot, 0));
3575 insns = get_insns ();
3577 emit_insn_before (insns, thisblock->data.block.first_insn);
3580 /* Jump around the handlers; they run only when specially invoked. */
3581 emit_jump (afterward);
3583 /* Make a separate handler for each label. */
3584 link = nonlocal_labels;
3585 slot = nonlocal_goto_handler_slots;
3586 label_list = NULL_RTX;
3587 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3588 /* Skip any labels we shouldn't be able to jump to from here,
3589 we generate one special handler for all of them below which just calls
3591 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3594 lab = expand_nl_handler_label (XEXP (slot, 0),
3595 thisblock->data.block.first_insn);
3596 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3598 expand_nl_goto_receiver ();
3600 /* Jump to the "real" nonlocal label. */
3601 expand_goto (TREE_VALUE (link));
3604 /* A second pass over all nonlocal labels; this time we handle those
3605 we should not be able to jump to at this point. */
3606 link = nonlocal_labels;
3607 slot = nonlocal_goto_handler_slots;
3609 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3610 if (DECL_TOO_LATE (TREE_VALUE (link)))
3613 lab = expand_nl_handler_label (XEXP (slot, 0),
3614 thisblock->data.block.first_insn);
3615 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3621 expand_nl_goto_receiver ();
3622 expand_builtin_trap ();
3625 nonlocal_goto_handler_labels = label_list;
3626 emit_label (afterward);
3629 /* Warn about any unused VARS (which may contain nodes other than
3630 VAR_DECLs, but such nodes are ignored). The nodes are connected
3631 via the TREE_CHAIN field. */
3634 warn_about_unused_variables (tree vars)
3638 if (warn_unused_variable)
3639 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3640 if (TREE_CODE (decl) == VAR_DECL
3641 && ! TREE_USED (decl)
3642 && ! DECL_IN_SYSTEM_HEADER (decl)
3643 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3644 warning_with_decl (decl, "unused variable `%s'");
3647 /* Generate RTL code to terminate a binding contour.
3649 VARS is the chain of VAR_DECL nodes for the variables bound in this
3650 contour. There may actually be other nodes in this chain, but any
3651 nodes other than VAR_DECLS are ignored.
3653 MARK_ENDS is nonzero if we should put a note at the beginning
3654 and end of this binding contour.
3656 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3657 zero if we can jump into this contour only if it does not have a saved
3658 stack level, and negative if we are not to check for invalid use of
3659 labels (because the front end does that). */
3662 expand_end_bindings (tree vars, int mark_ends, int dont_jump_in)
3664 struct nesting *thisblock = block_stack;
3666 /* If any of the variables in this scope were not used, warn the
3668 warn_about_unused_variables (vars);
3670 if (thisblock->exit_label)
3672 do_pending_stack_adjust ();
3673 emit_label (thisblock->exit_label);
3676 /* If necessary, make handlers for nonlocal gotos taking
3677 place in the function calls in this block. */
3678 if (function_call_count != 0 && nonlocal_labels
3679 /* Make handler for outermost block
3680 if there were any nonlocal gotos to this function. */
3681 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3682 /* Make handler for inner block if it has something
3683 special to do when you jump out of it. */
3684 : (thisblock->data.block.cleanups != 0
3685 || thisblock->data.block.stack_level != 0)))
3686 expand_nl_goto_receivers (thisblock);
3688 /* Don't allow jumping into a block that has a stack level.
3689 Cleanups are allowed, though. */
3690 if (dont_jump_in > 0
3691 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3693 struct label_chain *chain;
3695 /* Any labels in this block are no longer valid to go to.
3696 Mark them to cause an error message. */
3697 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3699 DECL_TOO_LATE (chain->label) = 1;
3700 /* If any goto without a fixup came to this label,
3701 that must be an error, because gotos without fixups
3702 come from outside all saved stack-levels. */
3703 if (TREE_ADDRESSABLE (chain->label))
3704 error_with_decl (chain->label,
3705 "label `%s' used before containing binding contour");
3709 /* Restore stack level in effect before the block
3710 (only if variable-size objects allocated). */
3711 /* Perform any cleanups associated with the block. */
3713 if (thisblock->data.block.stack_level != 0
3714 || thisblock->data.block.cleanups != 0)
3719 /* Don't let cleanups affect ({...}) constructs. */
3720 int old_expr_stmts_for_value = expr_stmts_for_value;
3721 rtx old_last_expr_value = last_expr_value;
3722 tree old_last_expr_type = last_expr_type;
3723 expr_stmts_for_value = 0;
3725 /* Only clean up here if this point can actually be reached. */
3726 insn = get_last_insn ();
3727 if (GET_CODE (insn) == NOTE)
3728 insn = prev_nonnote_insn (insn);
3729 reachable = (! insn || GET_CODE (insn) != BARRIER);
3731 /* Do the cleanups. */
3732 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3734 do_pending_stack_adjust ();
3736 expr_stmts_for_value = old_expr_stmts_for_value;
3737 last_expr_value = old_last_expr_value;
3738 last_expr_type = old_last_expr_type;
3740 /* Restore the stack level. */
3742 if (reachable && thisblock->data.block.stack_level != 0)
3744 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3745 thisblock->data.block.stack_level, NULL_RTX);
3746 if (nonlocal_goto_handler_slots != 0)
3747 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3751 /* Any gotos out of this block must also do these things.
3752 Also report any gotos with fixups that came to labels in this
3754 fixup_gotos (thisblock,
3755 thisblock->data.block.stack_level,
3756 thisblock->data.block.cleanups,
3757 thisblock->data.block.first_insn,
3761 /* Mark the beginning and end of the scope if requested.
3762 We do this now, after running cleanups on the variables
3763 just going out of scope, so they are in scope for their cleanups. */
3767 rtx note = emit_note (NOTE_INSN_BLOCK_END);
3768 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3771 /* Get rid of the beginning-mark if we don't make an end-mark. */
3772 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3774 /* Restore the temporary level of TARGET_EXPRs. */
3775 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3777 /* Restore block_stack level for containing block. */
3779 stack_block_stack = thisblock->data.block.innermost_stack_block;
3780 POPSTACK (block_stack);
3782 /* Pop the stack slot nesting and free any slots at this level. */
3786 /* Generate code to save the stack pointer at the start of the current block
3787 and set up to restore it on exit. */
3790 save_stack_pointer (void)
3792 struct nesting *thisblock = block_stack;
3794 if (thisblock->data.block.stack_level == 0)
3796 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3797 &thisblock->data.block.stack_level,
3798 thisblock->data.block.first_insn);
3799 stack_block_stack = thisblock;
3803 /* Generate RTL for the automatic variable declaration DECL.
3804 (Other kinds of declarations are simply ignored if seen here.) */
3807 expand_decl (tree decl)
3811 type = TREE_TYPE (decl);
3813 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3814 type in case this node is used in a reference. */
3815 if (TREE_CODE (decl) == CONST_DECL)
3817 DECL_MODE (decl) = TYPE_MODE (type);
3818 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3819 DECL_SIZE (decl) = TYPE_SIZE (type);
3820 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3824 /* Otherwise, only automatic variables need any expansion done. Static and
3825 external variables, and external functions, will be handled by
3826 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3827 nothing. PARM_DECLs are handled in `assign_parms'. */
3828 if (TREE_CODE (decl) != VAR_DECL)
3831 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3834 /* Create the RTL representation for the variable. */
3836 if (type == error_mark_node)
3837 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3839 else if (DECL_SIZE (decl) == 0)
3840 /* Variable with incomplete type. */
3843 if (DECL_INITIAL (decl) == 0)
3844 /* Error message was already done; now avoid a crash. */
3845 x = gen_rtx_MEM (BLKmode, const0_rtx);
3847 /* An initializer is going to decide the size of this array.
3848 Until we know the size, represent its address with a reg. */
3849 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3851 set_mem_attributes (x, decl, 1);
3852 SET_DECL_RTL (decl, x);
3854 else if (DECL_MODE (decl) != BLKmode
3855 /* If -ffloat-store, don't put explicit float vars
3857 && !(flag_float_store
3858 && TREE_CODE (type) == REAL_TYPE)
3859 && ! TREE_THIS_VOLATILE (decl)
3860 && ! DECL_NONLOCAL (decl)
3861 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3863 /* Automatic variable that can go in a register. */
3864 int unsignedp = TREE_UNSIGNED (type);
3865 enum machine_mode reg_mode
3866 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3868 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3870 if (!DECL_ARTIFICIAL (decl))
3871 mark_user_reg (DECL_RTL (decl));
3873 if (POINTER_TYPE_P (type))
3874 mark_reg_pointer (DECL_RTL (decl),
3875 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3877 maybe_set_unchanging (DECL_RTL (decl), decl);
3879 /* If something wants our address, try to use ADDRESSOF. */
3880 if (TREE_ADDRESSABLE (decl))
3881 put_var_into_stack (decl, /*rescan=*/false);
3884 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3885 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3886 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3887 STACK_CHECK_MAX_VAR_SIZE)))
3889 /* Variable of fixed size that goes on the stack. */
3894 /* If we previously made RTL for this decl, it must be an array
3895 whose size was determined by the initializer.
3896 The old address was a register; set that register now
3897 to the proper address. */
3898 if (DECL_RTL_SET_P (decl))
3900 if (GET_CODE (DECL_RTL (decl)) != MEM
3901 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3903 oldaddr = XEXP (DECL_RTL (decl), 0);
3906 /* Set alignment we actually gave this decl. */
3907 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3908 : GET_MODE_BITSIZE (DECL_MODE (decl)));
3909 DECL_USER_ALIGN (decl) = 0;
3911 x = assign_temp (decl, 1, 1, 1);
3912 set_mem_attributes (x, decl, 1);
3913 SET_DECL_RTL (decl, x);
3917 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3918 if (addr != oldaddr)
3919 emit_move_insn (oldaddr, addr);
3923 /* Dynamic-size object: must push space on the stack. */
3925 rtx address, size, x;
3927 /* Record the stack pointer on entry to block, if have
3928 not already done so. */
3929 do_pending_stack_adjust ();
3930 save_stack_pointer ();
3932 /* In function-at-a-time mode, variable_size doesn't expand this,
3934 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3935 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3936 const0_rtx, VOIDmode, 0);
3938 /* Compute the variable's size, in bytes. */
3939 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
3942 /* Allocate space on the stack for the variable. Note that
3943 DECL_ALIGN says how the variable is to be aligned and we
3944 cannot use it to conclude anything about the alignment of
3946 address = allocate_dynamic_stack_space (size, NULL_RTX,
3947 TYPE_ALIGN (TREE_TYPE (decl)));
3949 /* Reference the variable indirect through that rtx. */
3950 x = gen_rtx_MEM (DECL_MODE (decl), address);
3951 set_mem_attributes (x, decl, 1);
3952 SET_DECL_RTL (decl, x);
3955 /* Indicate the alignment we actually gave this variable. */
3956 #ifdef STACK_BOUNDARY
3957 DECL_ALIGN (decl) = STACK_BOUNDARY;
3959 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3961 DECL_USER_ALIGN (decl) = 0;
3965 /* Emit code to perform the initialization of a declaration DECL. */
3968 expand_decl_init (tree decl)
3970 int was_used = TREE_USED (decl);
3972 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3973 for static decls. */
3974 if (TREE_CODE (decl) == CONST_DECL
3975 || TREE_STATIC (decl))
3978 /* Compute and store the initial value now. */
3982 if (DECL_INITIAL (decl) == error_mark_node)
3984 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3986 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3987 || code == POINTER_TYPE || code == REFERENCE_TYPE)
3988 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3992 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3994 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3995 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3999 /* Don't let the initialization count as "using" the variable. */
4000 TREE_USED (decl) = was_used;
4002 /* Free any temporaries we made while initializing the decl. */
4003 preserve_temp_slots (NULL_RTX);
4008 /* CLEANUP is an expression to be executed at exit from this binding contour;
4009 for example, in C++, it might call the destructor for this variable.
4011 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4012 CLEANUP multiple times, and have the correct semantics. This
4013 happens in exception handling, for gotos, returns, breaks that
4014 leave the current scope.
4016 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4017 that is not associated with any particular variable. */
4020 expand_decl_cleanup (tree decl, tree cleanup)
4022 struct nesting *thisblock;
4024 /* Error if we are not in any block. */
4025 if (cfun == 0 || block_stack == 0)
4028 thisblock = block_stack;
4030 /* Record the cleanup if there is one. */
4036 tree *cleanups = &thisblock->data.block.cleanups;
4037 int cond_context = conditional_context ();
4041 rtx flag = gen_reg_rtx (word_mode);
4046 emit_move_insn (flag, const0_rtx);
4047 set_flag_0 = get_insns ();
4050 thisblock->data.block.last_unconditional_cleanup
4051 = emit_insn_after (set_flag_0,
4052 thisblock->data.block.last_unconditional_cleanup);
4054 emit_move_insn (flag, const1_rtx);
4056 cond = build_decl (VAR_DECL, NULL_TREE,
4057 (*lang_hooks.types.type_for_mode) (word_mode, 1));
4058 SET_DECL_RTL (cond, flag);
4060 /* Conditionalize the cleanup. */
4061 cleanup = build (COND_EXPR, void_type_node,
4062 (*lang_hooks.truthvalue_conversion) (cond),
4063 cleanup, integer_zero_node);
4064 cleanup = fold (cleanup);
4066 cleanups = &thisblock->data.block.cleanups;
4069 cleanup = unsave_expr (cleanup);
4071 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4074 /* If this block has a cleanup, it belongs in stack_block_stack. */
4075 stack_block_stack = thisblock;
4082 if (! using_eh_for_cleanups_p)
4083 TREE_ADDRESSABLE (t) = 1;
4085 expand_eh_region_start ();
4092 thisblock->data.block.last_unconditional_cleanup
4093 = emit_insn_after (seq,
4094 thisblock->data.block.last_unconditional_cleanup);
4098 thisblock->data.block.last_unconditional_cleanup
4100 /* When we insert instructions after the last unconditional cleanup,
4101 we don't adjust last_insn. That means that a later add_insn will
4102 clobber the instructions we've just added. The easiest way to
4103 fix this is to just insert another instruction here, so that the
4104 instructions inserted after the last unconditional cleanup are
4105 never the last instruction. */
4106 emit_note (NOTE_INSN_DELETED);
4112 /* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4116 expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
4118 int ret = expand_decl_cleanup (decl, cleanup);
4121 tree node = block_stack->data.block.cleanups;
4122 CLEANUP_EH_ONLY (node) = eh_only;
4127 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4128 DECL_ELTS is the list of elements that belong to DECL's type.
4129 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4132 expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
4134 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4138 /* If any of the elements are addressable, so is the entire union. */
4139 for (t = decl_elts; t; t = TREE_CHAIN (t))
4140 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4142 TREE_ADDRESSABLE (decl) = 1;
4147 expand_decl_cleanup (decl, cleanup);
4148 x = DECL_RTL (decl);
4150 /* Go through the elements, assigning RTL to each. */
4151 for (t = decl_elts; t; t = TREE_CHAIN (t))
4153 tree decl_elt = TREE_VALUE (t);
4154 tree cleanup_elt = TREE_PURPOSE (t);
4155 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4157 /* If any of the elements are addressable, so is the entire
4159 if (TREE_USED (decl_elt))
4160 TREE_USED (decl) = 1;
4162 /* Propagate the union's alignment to the elements. */
4163 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4164 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4166 /* If the element has BLKmode and the union doesn't, the union is
4167 aligned such that the element doesn't need to have BLKmode, so
4168 change the element's mode to the appropriate one for its size. */
4169 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4170 DECL_MODE (decl_elt) = mode
4171 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4173 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4174 instead create a new MEM rtx with the proper mode. */
4175 if (GET_CODE (x) == MEM)
4177 if (mode == GET_MODE (x))
4178 SET_DECL_RTL (decl_elt, x);
4180 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4182 else if (GET_CODE (x) == REG)
4184 if (mode == GET_MODE (x))
4185 SET_DECL_RTL (decl_elt, x);
4187 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4192 /* Record the cleanup if there is one. */
4195 thisblock->data.block.cleanups
4196 = tree_cons (decl_elt, cleanup_elt,
4197 thisblock->data.block.cleanups);
4201 /* Expand a list of cleanups LIST.
4202 Elements may be expressions or may be nested lists.
4204 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4205 goto and handle protection regions specially in that case.
4207 If REACHABLE, we emit code, otherwise just inform the exception handling
4208 code about this finalization. */
4211 expand_cleanups (tree list, int in_fixup, int reachable)
4214 for (tail = list; tail; tail = TREE_CHAIN (tail))
4215 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4216 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
4219 if (! in_fixup && using_eh_for_cleanups_p)
4220 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4222 if (reachable && !CLEANUP_EH_ONLY (tail))
4224 /* Cleanups may be run multiple times. For example,
4225 when exiting a binding contour, we expand the
4226 cleanups associated with that contour. When a goto
4227 within that binding contour has a target outside that
4228 contour, it will expand all cleanups from its scope to
4229 the target. Though the cleanups are expanded multiple
4230 times, the control paths are non-overlapping so the
4231 cleanups will not be executed twice. */
4233 /* We may need to protect from outer cleanups. */
4234 if (in_fixup && using_eh_for_cleanups_p)
4236 expand_eh_region_start ();
4238 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4240 expand_eh_region_end_fixup (TREE_VALUE (tail));
4243 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4250 /* Mark when the context we are emitting RTL for as a conditional
4251 context, so that any cleanup actions we register with
4252 expand_decl_init will be properly conditionalized when those
4253 cleanup actions are later performed. Must be called before any
4254 expression (tree) is expanded that is within a conditional context. */
4257 start_cleanup_deferral (void)
4259 /* block_stack can be NULL if we are inside the parameter list. It is
4260 OK to do nothing, because cleanups aren't possible here. */
4262 ++block_stack->data.block.conditional_code;
4265 /* Mark the end of a conditional region of code. Because cleanup
4266 deferrals may be nested, we may still be in a conditional region
4267 after we end the currently deferred cleanups, only after we end all
4268 deferred cleanups, are we back in unconditional code. */
4271 end_cleanup_deferral (void)
4273 /* block_stack can be NULL if we are inside the parameter list. It is
4274 OK to do nothing, because cleanups aren't possible here. */
4276 --block_stack->data.block.conditional_code;
4280 last_cleanup_this_contour (void)
4282 if (block_stack == 0)
4285 return block_stack->data.block.cleanups;
4288 /* Return 1 if there are any pending cleanups at this point.
4289 Check the current contour as well as contours that enclose
4290 the current contour. */
4293 any_pending_cleanups (void)
4295 struct nesting *block;
4297 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4300 if (block_stack->data.block.cleanups != NULL)
4303 if (block_stack->data.block.outer_cleanups == 0)
4306 for (block = block_stack->next; block; block = block->next)
4307 if (block->data.block.cleanups != 0)
4313 /* Enter a case (Pascal) or switch (C) statement.
4314 Push a block onto case_stack and nesting_stack
4315 to accumulate the case-labels that are seen
4316 and to record the labels generated for the statement.
4318 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4319 Otherwise, this construct is transparent for `exit_something'.
4321 EXPR is the index-expression to be dispatched on.
4322 TYPE is its nominal type. We could simply convert EXPR to this type,
4323 but instead we take short cuts. */
4326 expand_start_case (int exit_flag, tree expr, tree type,
4327 const char *printname)
4329 struct nesting *thiscase = ALLOC_NESTING ();
4331 /* Make an entry on case_stack for the case we are entering. */
4333 thiscase->desc = CASE_NESTING;
4334 thiscase->next = case_stack;
4335 thiscase->all = nesting_stack;
4336 thiscase->depth = ++nesting_depth;
4337 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4338 thiscase->data.case_stmt.case_list = 0;
4339 thiscase->data.case_stmt.index_expr = expr;
4340 thiscase->data.case_stmt.nominal_type = type;
4341 thiscase->data.case_stmt.default_label = 0;
4342 thiscase->data.case_stmt.printname = printname;
4343 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4344 case_stack = thiscase;
4345 nesting_stack = thiscase;
4347 do_pending_stack_adjust ();
4350 /* Make sure case_stmt.start points to something that won't
4351 need any transformation before expand_end_case. */
4352 if (GET_CODE (get_last_insn ()) != NOTE)
4353 emit_note (NOTE_INSN_DELETED);
4355 thiscase->data.case_stmt.start = get_last_insn ();
4357 start_cleanup_deferral ();
4360 /* Start a "dummy case statement" within which case labels are invalid
4361 and are not connected to any larger real case statement.
4362 This can be used if you don't want to let a case statement jump
4363 into the middle of certain kinds of constructs. */
4366 expand_start_case_dummy (void)
4368 struct nesting *thiscase = ALLOC_NESTING ();
4370 /* Make an entry on case_stack for the dummy. */
4372 thiscase->desc = CASE_NESTING;
4373 thiscase->next = case_stack;
4374 thiscase->all = nesting_stack;
4375 thiscase->depth = ++nesting_depth;
4376 thiscase->exit_label = 0;
4377 thiscase->data.case_stmt.case_list = 0;
4378 thiscase->data.case_stmt.start = 0;
4379 thiscase->data.case_stmt.nominal_type = 0;
4380 thiscase->data.case_stmt.default_label = 0;
4381 case_stack = thiscase;
4382 nesting_stack = thiscase;
4383 start_cleanup_deferral ();
4387 check_seenlabel (void)
4389 /* If this is the first label, warn if any insns have been emitted. */
4390 if (case_stack->data.case_stmt.line_number_status >= 0)
4394 restore_line_number_status
4395 (case_stack->data.case_stmt.line_number_status);
4396 case_stack->data.case_stmt.line_number_status = -1;
4398 for (insn = case_stack->data.case_stmt.start;
4400 insn = NEXT_INSN (insn))
4402 if (GET_CODE (insn) == CODE_LABEL)
4404 if (GET_CODE (insn) != NOTE
4405 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4408 insn = PREV_INSN (insn);
4409 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4411 /* If insn is zero, then there must have been a syntax error. */
4415 locus.file = NOTE_SOURCE_FILE (insn);
4416 locus.line = NOTE_LINE_NUMBER (insn);
4417 warning ("%Hunreachable code at beginning of %s", &locus,
4418 case_stack->data.case_stmt.printname);
4426 /* Accumulate one case or default label inside a case or switch statement.
4427 VALUE is the value of the case (a null pointer, for a default label).
4428 The function CONVERTER, when applied to arguments T and V,
4429 converts the value V to the type T.
4431 If not currently inside a case or switch statement, return 1 and do
4432 nothing. The caller will print a language-specific error message.
4433 If VALUE is a duplicate or overlaps, return 2 and do nothing
4434 except store the (first) duplicate node in *DUPLICATE.
4435 If VALUE is out of range, return 3 and do nothing.
4436 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4437 Return 0 on success.
4439 Extended to handle range statements. */
4442 pushcase (tree value, tree (*converter) (tree, tree), tree label,
4448 /* Fail if not inside a real case statement. */
4449 if (! (case_stack && case_stack->data.case_stmt.start))
4452 if (stack_block_stack
4453 && stack_block_stack->depth > case_stack->depth)
4456 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4457 nominal_type = case_stack->data.case_stmt.nominal_type;
4459 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4460 if (index_type == error_mark_node)
4463 /* Convert VALUE to the type in which the comparisons are nominally done. */
4465 value = (*converter) (nominal_type, value);
4469 /* Fail if this value is out of range for the actual type of the index
4470 (which may be narrower than NOMINAL_TYPE). */
4472 && (TREE_CONSTANT_OVERFLOW (value)
4473 || ! int_fits_type_p (value, index_type)))
4476 return add_case_node (value, value, label, duplicate);
4479 /* Like pushcase but this case applies to all values between VALUE1 and
4480 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4481 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4482 starts at VALUE1 and ends at the highest value of the index type.
4483 If both are NULL, this case applies to all values.
4485 The return value is the same as that of pushcase but there is one
4486 additional error code: 4 means the specified range was empty. */
4489 pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
4490 tree label, tree *duplicate)
4495 /* Fail if not inside a real case statement. */
4496 if (! (case_stack && case_stack->data.case_stmt.start))
4499 if (stack_block_stack
4500 && stack_block_stack->depth > case_stack->depth)
4503 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4504 nominal_type = case_stack->data.case_stmt.nominal_type;
4506 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4507 if (index_type == error_mark_node)
4512 /* Convert VALUEs to type in which the comparisons are nominally done
4513 and replace any unspecified value with the corresponding bound. */
4515 value1 = TYPE_MIN_VALUE (index_type);
4517 value2 = TYPE_MAX_VALUE (index_type);
4519 /* Fail if the range is empty. Do this before any conversion since
4520 we want to allow out-of-range empty ranges. */
4521 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4524 /* If the max was unbounded, use the max of the nominal_type we are
4525 converting to. Do this after the < check above to suppress false
4528 value2 = TYPE_MAX_VALUE (nominal_type);
4530 value1 = (*converter) (nominal_type, value1);
4531 value2 = (*converter) (nominal_type, value2);
4533 /* Fail if these values are out of range. */
4534 if (TREE_CONSTANT_OVERFLOW (value1)
4535 || ! int_fits_type_p (value1, index_type))
4538 if (TREE_CONSTANT_OVERFLOW (value2)
4539 || ! int_fits_type_p (value2, index_type))
4542 return add_case_node (value1, value2, label, duplicate);
4545 /* Do the actual insertion of a case label for pushcase and pushcase_range
4546 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4547 slowdown for large switch statements. */
4550 add_case_node (tree low, tree high, tree label, tree *duplicate)
4552 struct case_node *p, **q, *r;
4554 /* If there's no HIGH value, then this is not a case range; it's
4555 just a simple case label. But that's just a degenerate case
4560 /* Handle default labels specially. */
4563 if (case_stack->data.case_stmt.default_label != 0)
4565 *duplicate = case_stack->data.case_stmt.default_label;
4568 case_stack->data.case_stmt.default_label = label;
4569 expand_label (label);
4573 q = &case_stack->data.case_stmt.case_list;
4580 /* Keep going past elements distinctly greater than HIGH. */
4581 if (tree_int_cst_lt (high, p->low))
4584 /* or distinctly less than LOW. */
4585 else if (tree_int_cst_lt (p->high, low))
4590 /* We have an overlap; this is an error. */
4591 *duplicate = p->code_label;
4596 /* Add this label to the chain, and succeed. */
4598 r = (struct case_node *) ggc_alloc (sizeof (struct case_node));
4601 /* If the bounds are equal, turn this into the one-value case. */
4602 if (tree_int_cst_equal (low, high))
4607 r->code_label = label;
4608 expand_label (label);
4618 struct case_node *s;
4624 if (! (b = p->balance))
4625 /* Growth propagation from left side. */
4632 if ((p->left = s = r->right))
4641 if ((r->parent = s))
4649 case_stack->data.case_stmt.case_list = r;
4652 /* r->balance == +1 */
4657 struct case_node *t = r->right;
4659 if ((p->left = s = t->right))
4663 if ((r->right = s = t->left))
4677 if ((t->parent = s))
4685 case_stack->data.case_stmt.case_list = t;
4692 /* p->balance == +1; growth of left side balances the node. */
4702 if (! (b = p->balance))
4703 /* Growth propagation from right side. */
4711 if ((p->right = s = r->left))
4719 if ((r->parent = s))
4728 case_stack->data.case_stmt.case_list = r;
4732 /* r->balance == -1 */
4736 struct case_node *t = r->left;
4738 if ((p->right = s = t->left))
4743 if ((r->left = s = t->right))
4757 if ((t->parent = s))
4766 case_stack->data.case_stmt.case_list = t;
4772 /* p->balance == -1; growth of right side balances the node. */
4785 /* Returns the number of possible values of TYPE.
4786 Returns -1 if the number is unknown, variable, or if the number does not
4787 fit in a HOST_WIDE_INT.
4788 Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4789 do not increase monotonically (there may be duplicates);
4790 to 1 if the values increase monotonically, but not always by 1;
4791 otherwise sets it to 0. */
4794 all_cases_count (tree type, int *sparseness)
4797 HOST_WIDE_INT count, minval, lastval;
4801 switch (TREE_CODE (type))
4808 count = 1 << BITS_PER_UNIT;
4813 if (TYPE_MAX_VALUE (type) != 0
4814 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4815 TYPE_MIN_VALUE (type))))
4816 && 0 != (t = fold (build (PLUS_EXPR, type, t,
4817 convert (type, integer_zero_node))))
4818 && host_integerp (t, 1))
4819 count = tree_low_cst (t, 1);
4825 /* Don't waste time with enumeral types with huge values. */
4826 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4827 || TYPE_MAX_VALUE (type) == 0
4828 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4831 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4834 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4836 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4838 if (*sparseness == 2 || thisval <= lastval)
4840 else if (thisval != minval + count)
4851 #define BITARRAY_TEST(ARRAY, INDEX) \
4852 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4853 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4854 #define BITARRAY_SET(ARRAY, INDEX) \
4855 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4856 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4858 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4859 with the case values we have seen, assuming the case expression
4861 SPARSENESS is as determined by all_cases_count.
4863 The time needed is proportional to COUNT, unless
4864 SPARSENESS is 2, in which case quadratic time is needed. */
4867 mark_seen_cases (tree type, unsigned char *cases_seen, HOST_WIDE_INT count,
4870 tree next_node_to_try = NULL_TREE;
4871 HOST_WIDE_INT next_node_offset = 0;
4873 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4874 tree val = make_node (INTEGER_CST);
4876 TREE_TYPE (val) = type;
4880 else if (sparseness == 2)
4883 unsigned HOST_WIDE_INT xlo;
4885 /* This less efficient loop is only needed to handle
4886 duplicate case values (multiple enum constants
4887 with the same value). */
4888 TREE_TYPE (val) = TREE_TYPE (root->low);
4889 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4890 t = TREE_CHAIN (t), xlo++)
4892 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4893 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4897 /* Keep going past elements distinctly greater than VAL. */
4898 if (tree_int_cst_lt (val, n->low))
4901 /* or distinctly less than VAL. */
4902 else if (tree_int_cst_lt (n->high, val))
4907 /* We have found a matching range. */
4908 BITARRAY_SET (cases_seen, xlo);
4918 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4920 for (n = root; n; n = n->right)
4922 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4923 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4924 while (! tree_int_cst_lt (n->high, val))
4926 /* Calculate (into xlo) the "offset" of the integer (val).
4927 The element with lowest value has offset 0, the next smallest
4928 element has offset 1, etc. */
4930 unsigned HOST_WIDE_INT xlo;
4934 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4936 /* The TYPE_VALUES will be in increasing order, so
4937 starting searching where we last ended. */
4938 t = next_node_to_try;
4939 xlo = next_node_offset;
4945 t = TYPE_VALUES (type);
4948 if (tree_int_cst_equal (val, TREE_VALUE (t)))
4950 next_node_to_try = TREE_CHAIN (t);
4951 next_node_offset = xlo + 1;
4956 if (t == next_node_to_try)
4965 t = TYPE_MIN_VALUE (type);
4967 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4971 add_double (xlo, xhi,
4972 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4976 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
4977 BITARRAY_SET (cases_seen, xlo);
4979 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4981 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4987 /* Given a switch statement with an expression that is an enumeration
4988 type, warn if any of the enumeration type's literals are not
4989 covered by the case expressions of the switch. Also, warn if there
4990 are any extra switch cases that are *not* elements of the
4995 At one stage this function would: ``If all enumeration literals
4996 were covered by the case expressions, turn one of the expressions
4997 into the default expression since it should not be possible to fall
4998 through such a switch.''
5000 That code has since been removed as: ``This optimization is
5001 disabled because it causes valid programs to fail. ANSI C does not
5002 guarantee that an expression with enum type will have a value that
5003 is the same as one of the enumeration literals.'' */
5006 check_for_full_enumeration_handling (tree type)
5008 struct case_node *n;
5011 /* True iff the selector type is a numbered set mode. */
5014 /* The number of possible selector values. */
5017 /* For each possible selector value. a one iff it has been matched
5018 by a case value alternative. */
5019 unsigned char *cases_seen;
5021 /* The allocated size of cases_seen, in chars. */
5022 HOST_WIDE_INT bytes_needed;
5024 size = all_cases_count (type, &sparseness);
5025 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5027 if (size > 0 && size < 600000
5028 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5029 this optimization if we don't have enough memory rather than
5030 aborting, as xmalloc would do. */
5032 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5035 tree v = TYPE_VALUES (type);
5037 /* The time complexity of this code is normally O(N), where
5038 N being the number of members in the enumerated type.
5039 However, if type is an ENUMERAL_TYPE whose values do not
5040 increase monotonically, O(N*log(N)) time may be needed. */
5042 mark_seen_cases (type, cases_seen, size, sparseness);
5044 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5045 if (BITARRAY_TEST (cases_seen, i) == 0)
5046 warning ("enumeration value `%s' not handled in switch",
5047 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5052 /* Now we go the other way around; we warn if there are case
5053 expressions that don't correspond to enumerators. This can
5054 occur since C and C++ don't enforce type-checking of
5055 assignments to enumeration variables. */
5057 if (case_stack->data.case_stmt.case_list
5058 && case_stack->data.case_stmt.case_list->left)
5059 case_stack->data.case_stmt.case_list
5060 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5061 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5063 for (chain = TYPE_VALUES (type);
5064 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5065 chain = TREE_CHAIN (chain))
5070 if (TYPE_NAME (type) == 0)
5071 warning ("case value `%ld' not in enumerated type",
5072 (long) TREE_INT_CST_LOW (n->low));
5074 warning ("case value `%ld' not in enumerated type `%s'",
5075 (long) TREE_INT_CST_LOW (n->low),
5076 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5079 : DECL_NAME (TYPE_NAME (type))));
5081 if (!tree_int_cst_equal (n->low, n->high))
5083 for (chain = TYPE_VALUES (type);
5084 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5085 chain = TREE_CHAIN (chain))
5090 if (TYPE_NAME (type) == 0)
5091 warning ("case value `%ld' not in enumerated type",
5092 (long) TREE_INT_CST_LOW (n->high));
5094 warning ("case value `%ld' not in enumerated type `%s'",
5095 (long) TREE_INT_CST_LOW (n->high),
5096 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5099 : DECL_NAME (TYPE_NAME (type))));
5106 /* Maximum number of case bit tests. */
5107 #define MAX_CASE_BIT_TESTS 3
5109 /* By default, enable case bit tests on targets with ashlsi3. */
5110 #ifndef CASE_USE_BIT_TESTS
5111 #define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
5112 != CODE_FOR_nothing)
5116 /* A case_bit_test represents a set of case nodes that may be
5117 selected from using a bit-wise comparison. HI and LO hold
5118 the integer to be tested against, LABEL contains the label
5119 to jump to upon success and BITS counts the number of case
5120 nodes handled by this test, typically the number of bits
5123 struct case_bit_test
5131 /* Determine whether "1 << x" is relatively cheap in word_mode. */
5133 static bool lshift_cheap_p ()
5135 static bool init = false;
5136 static bool cheap = true;
5140 rtx reg = gen_rtx_REG (word_mode, 10000);
5141 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
5142 cheap = cost < COSTS_N_INSNS (3);
5149 /* Comparison function for qsort to order bit tests by decreasing
5150 number of case nodes, i.e. the node with the most cases gets
5153 static int case_bit_test_cmp (p1, p2)
5157 const struct case_bit_test *d1 = p1;
5158 const struct case_bit_test *d2 = p2;
5160 return d2->bits - d1->bits;
5163 /* Expand a switch statement by a short sequence of bit-wise
5164 comparisons. "switch(x)" is effectively converted into
5165 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
5168 INDEX_EXPR is the value being switched on, which is of
5169 type INDEX_TYPE. MINVAL is the lowest case value of in
5170 the case nodes, of INDEX_TYPE type, and RANGE is highest
5171 value minus MINVAL, also of type INDEX_TYPE. NODES is
5172 the set of case nodes, and DEFAULT_LABEL is the label to
5173 branch to should none of the cases match.
5175 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
5179 emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
5180 tree range, case_node_ptr nodes, rtx default_label)
5182 struct case_bit_test test[MAX_CASE_BIT_TESTS];
5183 enum machine_mode mode;
5184 rtx expr, index, label;
5185 unsigned int i,j,lo,hi;
5186 struct case_node *n;
5190 for (n = nodes; n; n = n->right)
5192 label = label_rtx (n->code_label);
5193 for (i = 0; i < count; i++)
5194 if (same_case_target_p (label, test[i].label))
5199 if (count >= MAX_CASE_BIT_TESTS)
5203 test[i].label = label;
5210 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5211 n->low, minval)), 1);
5212 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5213 n->high, minval)), 1);
5214 for (j = lo; j <= hi; j++)
5215 if (j >= HOST_BITS_PER_WIDE_INT)
5216 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
5218 test[i].lo |= (HOST_WIDE_INT) 1 << j;
5221 qsort (test, count, sizeof(*test), case_bit_test_cmp);
5223 index_expr = fold (build (MINUS_EXPR, index_type,
5224 convert (index_type, index_expr),
5225 convert (index_type, minval)));
5226 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5228 index = protect_from_queue (index, 0);
5229 do_pending_stack_adjust ();
5231 mode = TYPE_MODE (index_type);
5232 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
5233 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
5236 index = convert_to_mode (word_mode, index, 0);
5237 index = expand_binop (word_mode, ashl_optab, const1_rtx,
5238 index, NULL_RTX, 1, OPTAB_WIDEN);
5240 for (i = 0; i < count; i++)
5242 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
5243 expr = expand_binop (word_mode, and_optab, index, expr,
5244 NULL_RTX, 1, OPTAB_WIDEN);
5245 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
5246 word_mode, 1, test[i].label);
5249 emit_jump (default_label);
5252 /* Terminate a case (Pascal) or switch (C) statement
5253 in which ORIG_INDEX is the expression to be tested.
5254 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5255 type as given in the source before any compiler conversions.
5256 Generate the code to test it and jump to the right place. */
5259 expand_end_case_type (tree orig_index, tree orig_type)
5261 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5262 rtx default_label = 0;
5263 struct case_node *n, *m;
5264 unsigned int count, uniq;
5270 rtx before_case, end, lab;
5271 struct nesting *thiscase = case_stack;
5272 tree index_expr, index_type;
5273 bool exit_done = false;
5276 /* Don't crash due to previous errors. */
5277 if (thiscase == NULL)
5280 index_expr = thiscase->data.case_stmt.index_expr;
5281 index_type = TREE_TYPE (index_expr);
5282 unsignedp = TREE_UNSIGNED (index_type);
5283 if (orig_type == NULL)
5284 orig_type = TREE_TYPE (orig_index);
5286 do_pending_stack_adjust ();
5288 /* This might get a spurious warning in the presence of a syntax error;
5289 it could be fixed by moving the call to check_seenlabel after the
5290 check for error_mark_node, and copying the code of check_seenlabel that
5291 deals with case_stack->data.case_stmt.line_number_status /
5292 restore_line_number_status in front of the call to end_cleanup_deferral;
5293 However, this might miss some useful warnings in the presence of
5294 non-syntax errors. */
5297 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5298 if (index_type != error_mark_node)
5300 /* If the switch expression was an enumerated type, check that
5301 exactly all enumeration literals are covered by the cases.
5302 The check is made when -Wswitch was specified and there is no
5303 default case, or when -Wswitch-enum was specified. */
5304 if (((warn_switch && !thiscase->data.case_stmt.default_label)
5305 || warn_switch_enum)
5306 && TREE_CODE (orig_type) == ENUMERAL_TYPE
5307 && TREE_CODE (index_expr) != INTEGER_CST)
5308 check_for_full_enumeration_handling (orig_type);
5310 if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5311 warning ("switch missing default case");
5313 /* If we don't have a default-label, create one here,
5314 after the body of the switch. */
5315 if (thiscase->data.case_stmt.default_label == 0)
5317 thiscase->data.case_stmt.default_label
5318 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5319 /* Share the exit label if possible. */
5320 if (thiscase->exit_label)
5322 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
5323 thiscase->exit_label);
5326 expand_label (thiscase->data.case_stmt.default_label);
5328 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5330 before_case = get_last_insn ();
5332 if (thiscase->data.case_stmt.case_list
5333 && thiscase->data.case_stmt.case_list->left)
5334 thiscase->data.case_stmt.case_list
5335 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5337 /* Simplify the case-list before we count it. */
5338 group_case_nodes (thiscase->data.case_stmt.case_list);
5339 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5342 /* Get upper and lower bounds of case values.
5343 Also convert all the case values to the index expr's data type. */
5347 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5349 /* Check low and high label values are integers. */
5350 if (TREE_CODE (n->low) != INTEGER_CST)
5352 if (TREE_CODE (n->high) != INTEGER_CST)
5355 n->low = convert (index_type, n->low);
5356 n->high = convert (index_type, n->high);
5358 /* Count the elements and track the largest and smallest
5359 of them (treating them as signed even if they are not). */
5367 if (INT_CST_LT (n->low, minval))
5369 if (INT_CST_LT (maxval, n->high))
5372 /* A range counts double, since it requires two compares. */
5373 if (! tree_int_cst_equal (n->low, n->high))
5376 /* Count the number of unique case node targets. */
5378 lab = label_rtx (n->code_label);
5379 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
5380 if (same_case_target_p (label_rtx (m->code_label), lab))
5387 /* Compute span of values. */
5389 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5391 end_cleanup_deferral ();
5395 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5397 emit_jump (default_label);
5400 /* Try implementing this switch statement by a short sequence of
5401 bit-wise comparisons. However, we let the binary-tree case
5402 below handle constant index expressions. */
5403 else if (CASE_USE_BIT_TESTS
5404 && ! TREE_CONSTANT (index_expr)
5405 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
5406 && compare_tree_int (range, 0) > 0
5407 && lshift_cheap_p ()
5408 && ((uniq == 1 && count >= 3)
5409 || (uniq == 2 && count >= 5)
5410 || (uniq == 3 && count >= 6)))
5412 /* Optimize the case where all the case values fit in a
5413 word without having to subtract MINVAL. In this case,
5414 we can optimize away the subtraction. */
5415 if (compare_tree_int (minval, 0) > 0
5416 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
5418 minval = integer_zero_node;
5421 emit_case_bit_tests (index_type, index_expr, minval, range,
5422 thiscase->data.case_stmt.case_list,
5426 /* If range of values is much bigger than number of values,
5427 make a sequence of conditional branches instead of a dispatch.
5428 If the switch-index is a constant, do it this way
5429 because we can optimize it. */
5431 else if (count < case_values_threshold ()
5432 || compare_tree_int (range, 10 * count) > 0
5433 /* RANGE may be signed, and really large ranges will show up
5434 as negative numbers. */
5435 || compare_tree_int (range, 0) < 0
5436 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5439 || TREE_CONSTANT (index_expr))
5441 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5443 /* If the index is a short or char that we do not have
5444 an insn to handle comparisons directly, convert it to
5445 a full integer now, rather than letting each comparison
5446 generate the conversion. */
5448 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5449 && ! have_insn_for (COMPARE, GET_MODE (index)))
5451 enum machine_mode wider_mode;
5452 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5453 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5454 if (have_insn_for (COMPARE, wider_mode))
5456 index = convert_to_mode (wider_mode, index, unsignedp);
5462 do_pending_stack_adjust ();
5464 index = protect_from_queue (index, 0);
5465 if (GET_CODE (index) == MEM)
5466 index = copy_to_reg (index);
5467 if (GET_CODE (index) == CONST_INT
5468 || TREE_CODE (index_expr) == INTEGER_CST)
5470 /* Make a tree node with the proper constant value
5471 if we don't already have one. */
5472 if (TREE_CODE (index_expr) != INTEGER_CST)
5475 = build_int_2 (INTVAL (index),
5476 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5477 index_expr = convert (index_type, index_expr);
5480 /* For constant index expressions we need only
5481 issue an unconditional branch to the appropriate
5482 target code. The job of removing any unreachable
5483 code is left to the optimization phase if the
5484 "-O" option is specified. */
5485 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5486 if (! tree_int_cst_lt (index_expr, n->low)
5487 && ! tree_int_cst_lt (n->high, index_expr))
5491 emit_jump (label_rtx (n->code_label));
5493 emit_jump (default_label);
5497 /* If the index expression is not constant we generate
5498 a binary decision tree to select the appropriate
5499 target code. This is done as follows:
5501 The list of cases is rearranged into a binary tree,
5502 nearly optimal assuming equal probability for each case.
5504 The tree is transformed into RTL, eliminating
5505 redundant test conditions at the same time.
5507 If program flow could reach the end of the
5508 decision tree an unconditional jump to the
5509 default code is emitted. */
5512 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
5513 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5514 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5515 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5516 default_label, index_type);
5517 emit_jump_if_reachable (default_label);
5522 table_label = gen_label_rtx ();
5523 if (! try_casesi (index_type, index_expr, minval, range,
5524 table_label, default_label))
5526 index_type = thiscase->data.case_stmt.nominal_type;
5528 /* Index jumptables from zero for suitable values of
5529 minval to avoid a subtraction. */
5531 && compare_tree_int (minval, 0) > 0
5532 && compare_tree_int (minval, 3) < 0)
5534 minval = integer_zero_node;
5538 if (! try_tablejump (index_type, index_expr, minval, range,
5539 table_label, default_label))
5543 /* Get table of labels to jump to, in order of case index. */
5545 ncases = tree_low_cst (range, 0) + 1;
5546 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5547 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5549 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5551 /* Compute the low and high bounds relative to the minimum
5552 value since that should fit in a HOST_WIDE_INT while the
5553 actual values may not. */
5555 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5556 n->low, minval)), 1);
5557 HOST_WIDE_INT i_high
5558 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5559 n->high, minval)), 1);
5562 for (i = i_low; i <= i_high; i ++)
5564 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5567 /* Fill in the gaps with the default. */
5568 for (i = 0; i < ncases; i++)
5569 if (labelvec[i] == 0)
5570 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5572 /* Output the table. */
5573 emit_label (table_label);
5575 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5576 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5577 gen_rtx_LABEL_REF (Pmode, table_label),
5578 gen_rtvec_v (ncases, labelvec),
5579 const0_rtx, const0_rtx));
5581 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5582 gen_rtvec_v (ncases, labelvec)));
5584 /* If the case insn drops through the table,
5585 after the table we must jump to the default-label.
5586 Otherwise record no drop-through after the table. */
5587 #ifdef CASE_DROPS_THROUGH
5588 emit_jump (default_label);
5594 before_case = NEXT_INSN (before_case);
5595 end = get_last_insn ();
5596 if (squeeze_notes (&before_case, &end))
5598 reorder_insns (before_case, end,
5599 thiscase->data.case_stmt.start);
5602 end_cleanup_deferral ();
5604 if (thiscase->exit_label && !exit_done)
5605 emit_label (thiscase->exit_label);
5607 POPSTACK (case_stack);
5612 /* Convert the tree NODE into a list linked by the right field, with the left
5613 field zeroed. RIGHT is used for recursion; it is a list to be placed
5614 rightmost in the resulting list. */
5616 static struct case_node *
5617 case_tree2list (struct case_node *node, struct case_node *right)
5619 struct case_node *left;
5622 right = case_tree2list (node->right, right);
5624 node->right = right;
5625 if ((left = node->left))
5628 return case_tree2list (left, node);
5634 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5637 do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
5639 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5645 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5646 (GET_MODE (op1) == VOIDmode
5647 ? GET_MODE (op2) : GET_MODE (op1)),
5651 /* Not all case values are encountered equally. This function
5652 uses a heuristic to weight case labels, in cases where that
5653 looks like a reasonable thing to do.
5655 Right now, all we try to guess is text, and we establish the
5658 chars above space: 16
5667 If we find any cases in the switch that are not either -1 or in the range
5668 of valid ASCII characters, or are control characters other than those
5669 commonly used with "\", don't treat this switch scanning text.
5671 Return 1 if these nodes are suitable for cost estimation, otherwise
5675 estimate_case_costs (case_node_ptr node)
5677 tree min_ascii = integer_minus_one_node;
5678 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5682 /* If we haven't already made the cost table, make it now. Note that the
5683 lower bound of the table is -1, not zero. */
5685 if (! cost_table_initialized)
5687 cost_table_initialized = 1;
5689 for (i = 0; i < 128; i++)
5692 COST_TABLE (i) = 16;
5693 else if (ISPUNCT (i))
5695 else if (ISCNTRL (i))
5696 COST_TABLE (i) = -1;
5699 COST_TABLE (' ') = 8;
5700 COST_TABLE ('\t') = 4;
5701 COST_TABLE ('\0') = 4;
5702 COST_TABLE ('\n') = 2;
5703 COST_TABLE ('\f') = 1;
5704 COST_TABLE ('\v') = 1;
5705 COST_TABLE ('\b') = 1;
5708 /* See if all the case expressions look like text. It is text if the
5709 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5710 as signed arithmetic since we don't want to ever access cost_table with a
5711 value less than -1. Also check that none of the constants in a range
5712 are strange control characters. */
5714 for (n = node; n; n = n->right)
5716 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5719 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5720 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5721 if (COST_TABLE (i) < 0)
5725 /* All interesting values are within the range of interesting
5726 ASCII characters. */
5730 /* Determine whether two case labels branch to the same target. */
5733 same_case_target_p (rtx l1, rtx l2)
5740 i1 = next_real_insn (l1);
5741 i2 = next_real_insn (l2);
5745 if (i1 && simplejump_p (i1))
5747 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5750 if (i2 && simplejump_p (i2))
5752 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5757 /* Delete nodes that branch to the default label from a list of
5758 case nodes. Eg. case 5: default: becomes just default: */
5761 strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
5768 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5775 /* Scan an ordered list of case nodes
5776 combining those with consecutive values or ranges.
5778 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5781 group_case_nodes (case_node_ptr head)
5783 case_node_ptr node = head;
5787 rtx lab = label_rtx (node->code_label);
5788 case_node_ptr np = node;
5790 /* Try to group the successors of NODE with NODE. */
5791 while (((np = np->right) != 0)
5792 /* Do they jump to the same place? */
5793 && same_case_target_p (label_rtx (np->code_label), lab)
5794 /* Are their ranges consecutive? */
5795 && tree_int_cst_equal (np->low,
5796 fold (build (PLUS_EXPR,
5797 TREE_TYPE (node->high),
5800 /* An overflow is not consecutive. */
5801 && tree_int_cst_lt (node->high,
5802 fold (build (PLUS_EXPR,
5803 TREE_TYPE (node->high),
5805 integer_one_node))))
5807 node->high = np->high;
5809 /* NP is the first node after NODE which can't be grouped with it.
5810 Delete the nodes in between, and move on to that node. */
5816 /* Take an ordered list of case nodes
5817 and transform them into a near optimal binary tree,
5818 on the assumption that any target code selection value is as
5819 likely as any other.
5821 The transformation is performed by splitting the ordered
5822 list into two equal sections plus a pivot. The parts are
5823 then attached to the pivot as left and right branches. Each
5824 branch is then transformed recursively. */
5827 balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
5840 /* Count the number of entries on branch. Also count the ranges. */
5844 if (!tree_int_cst_equal (np->low, np->high))
5848 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5852 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5860 /* Split this list if it is long enough for that to help. */
5865 /* Find the place in the list that bisects the list's total cost,
5866 Here I gets half the total cost. */
5871 /* Skip nodes while their cost does not reach that amount. */
5872 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5873 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5874 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5877 npp = &(*npp)->right;
5882 /* Leave this branch lopsided, but optimize left-hand
5883 side and fill in `parent' fields for right-hand side. */
5885 np->parent = parent;
5886 balance_case_nodes (&np->left, np);
5887 for (; np->right; np = np->right)
5888 np->right->parent = np;
5892 /* If there are just three nodes, split at the middle one. */
5894 npp = &(*npp)->right;
5897 /* Find the place in the list that bisects the list's total cost,
5898 where ranges count as 2.
5899 Here I gets half the total cost. */
5900 i = (i + ranges + 1) / 2;
5903 /* Skip nodes while their cost does not reach that amount. */
5904 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5909 npp = &(*npp)->right;
5914 np->parent = parent;
5917 /* Optimize each of the two split parts. */
5918 balance_case_nodes (&np->left, np);
5919 balance_case_nodes (&np->right, np);
5923 /* Else leave this branch as one level,
5924 but fill in `parent' fields. */
5926 np->parent = parent;
5927 for (; np->right; np = np->right)
5928 np->right->parent = np;
5933 /* Search the parent sections of the case node tree
5934 to see if a test for the lower bound of NODE would be redundant.
5935 INDEX_TYPE is the type of the index expression.
5937 The instructions to generate the case decision tree are
5938 output in the same order as nodes are processed so it is
5939 known that if a parent node checks the range of the current
5940 node minus one that the current node is bounded at its lower
5941 span. Thus the test would be redundant. */
5944 node_has_low_bound (case_node_ptr node, tree index_type)
5947 case_node_ptr pnode;
5949 /* If the lower bound of this node is the lowest value in the index type,
5950 we need not test it. */
5952 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5955 /* If this node has a left branch, the value at the left must be less
5956 than that at this node, so it cannot be bounded at the bottom and
5957 we need not bother testing any further. */
5962 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5963 node->low, integer_one_node));
5965 /* If the subtraction above overflowed, we can't verify anything.
5966 Otherwise, look for a parent that tests our value - 1. */
5968 if (! tree_int_cst_lt (low_minus_one, node->low))
5971 for (pnode = node->parent; pnode; pnode = pnode->parent)
5972 if (tree_int_cst_equal (low_minus_one, pnode->high))
5978 /* Search the parent sections of the case node tree
5979 to see if a test for the upper bound of NODE would be redundant.
5980 INDEX_TYPE is the type of the index expression.
5982 The instructions to generate the case decision tree are
5983 output in the same order as nodes are processed so it is
5984 known that if a parent node checks the range of the current
5985 node plus one that the current node is bounded at its upper
5986 span. Thus the test would be redundant. */
5989 node_has_high_bound (case_node_ptr node, tree index_type)
5992 case_node_ptr pnode;
5994 /* If there is no upper bound, obviously no test is needed. */
5996 if (TYPE_MAX_VALUE (index_type) == NULL)
5999 /* If the upper bound of this node is the highest value in the type
6000 of the index expression, we need not test against it. */
6002 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6005 /* If this node has a right branch, the value at the right must be greater
6006 than that at this node, so it cannot be bounded at the top and
6007 we need not bother testing any further. */
6012 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6013 node->high, integer_one_node));
6015 /* If the addition above overflowed, we can't verify anything.
6016 Otherwise, look for a parent that tests our value + 1. */
6018 if (! tree_int_cst_lt (node->high, high_plus_one))
6021 for (pnode = node->parent; pnode; pnode = pnode->parent)
6022 if (tree_int_cst_equal (high_plus_one, pnode->low))
6028 /* Search the parent sections of the
6029 case node tree to see if both tests for the upper and lower
6030 bounds of NODE would be redundant. */
6033 node_is_bounded (case_node_ptr node, tree index_type)
6035 return (node_has_low_bound (node, index_type)
6036 && node_has_high_bound (node, index_type));
6039 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6042 emit_jump_if_reachable (rtx label)
6044 if (GET_CODE (get_last_insn ()) != BARRIER)
6048 /* Emit step-by-step code to select a case for the value of INDEX.
6049 The thus generated decision tree follows the form of the
6050 case-node binary tree NODE, whose nodes represent test conditions.
6051 INDEX_TYPE is the type of the index of the switch.
6053 Care is taken to prune redundant tests from the decision tree
6054 by detecting any boundary conditions already checked by
6055 emitted rtx. (See node_has_high_bound, node_has_low_bound
6056 and node_is_bounded, above.)
6058 Where the test conditions can be shown to be redundant we emit
6059 an unconditional jump to the target code. As a further
6060 optimization, the subordinates of a tree node are examined to
6061 check for bounded nodes. In this case conditional and/or
6062 unconditional jumps as a result of the boundary check for the
6063 current node are arranged to target the subordinates associated
6064 code for out of bound conditions on the current node.
6066 We can assume that when control reaches the code generated here,
6067 the index value has already been compared with the parents
6068 of this node, and determined to be on the same side of each parent
6069 as this node is. Thus, if this node tests for the value 51,
6070 and a parent tested for 52, we don't need to consider
6071 the possibility of a value greater than 51. If another parent
6072 tests for the value 50, then this node need not test anything. */
6075 emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
6078 /* If INDEX has an unsigned type, we must make unsigned branches. */
6079 int unsignedp = TREE_UNSIGNED (index_type);
6080 enum machine_mode mode = GET_MODE (index);
6081 enum machine_mode imode = TYPE_MODE (index_type);
6083 /* See if our parents have already tested everything for us.
6084 If they have, emit an unconditional jump for this node. */
6085 if (node_is_bounded (node, index_type))
6086 emit_jump (label_rtx (node->code_label));
6088 else if (tree_int_cst_equal (node->low, node->high))
6090 /* Node is single valued. First see if the index expression matches
6091 this node and then check our children, if any. */
6093 do_jump_if_equal (index,
6094 convert_modes (mode, imode,
6095 expand_expr (node->low, NULL_RTX,
6098 label_rtx (node->code_label), unsignedp);
6100 if (node->right != 0 && node->left != 0)
6102 /* This node has children on both sides.
6103 Dispatch to one side or the other
6104 by comparing the index value with this node's value.
6105 If one subtree is bounded, check that one first,
6106 so we can avoid real branches in the tree. */
6108 if (node_is_bounded (node->right, index_type))
6110 emit_cmp_and_jump_insns (index,
6113 expand_expr (node->high, NULL_RTX,
6116 GT, NULL_RTX, mode, unsignedp,
6117 label_rtx (node->right->code_label));
6118 emit_case_nodes (index, node->left, default_label, index_type);
6121 else if (node_is_bounded (node->left, index_type))
6123 emit_cmp_and_jump_insns (index,
6126 expand_expr (node->high, NULL_RTX,
6129 LT, NULL_RTX, mode, unsignedp,
6130 label_rtx (node->left->code_label));
6131 emit_case_nodes (index, node->right, default_label, index_type);
6136 /* Neither node is bounded. First distinguish the two sides;
6137 then emit the code for one side at a time. */
6139 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6141 /* See if the value is on the right. */
6142 emit_cmp_and_jump_insns (index,
6145 expand_expr (node->high, NULL_RTX,
6148 GT, NULL_RTX, mode, unsignedp,
6149 label_rtx (test_label));
6151 /* Value must be on the left.
6152 Handle the left-hand subtree. */
6153 emit_case_nodes (index, node->left, default_label, index_type);
6154 /* If left-hand subtree does nothing,
6156 emit_jump_if_reachable (default_label);
6158 /* Code branches here for the right-hand subtree. */
6159 expand_label (test_label);
6160 emit_case_nodes (index, node->right, default_label, index_type);
6164 else if (node->right != 0 && node->left == 0)
6166 /* Here we have a right child but no left so we issue conditional
6167 branch to default and process the right child.
6169 Omit the conditional branch to default if we it avoid only one
6170 right child; it costs too much space to save so little time. */
6172 if (node->right->right || node->right->left
6173 || !tree_int_cst_equal (node->right->low, node->right->high))
6175 if (!node_has_low_bound (node, index_type))
6177 emit_cmp_and_jump_insns (index,
6180 expand_expr (node->high, NULL_RTX,
6183 LT, NULL_RTX, mode, unsignedp,
6187 emit_case_nodes (index, node->right, default_label, index_type);
6190 /* We cannot process node->right normally
6191 since we haven't ruled out the numbers less than
6192 this node's value. So handle node->right explicitly. */
6193 do_jump_if_equal (index,
6196 expand_expr (node->right->low, NULL_RTX,
6199 label_rtx (node->right->code_label), unsignedp);
6202 else if (node->right == 0 && node->left != 0)
6204 /* Just one subtree, on the left. */
6205 if (node->left->left || node->left->right
6206 || !tree_int_cst_equal (node->left->low, node->left->high))
6208 if (!node_has_high_bound (node, index_type))
6210 emit_cmp_and_jump_insns (index,
6213 expand_expr (node->high, NULL_RTX,
6216 GT, NULL_RTX, mode, unsignedp,
6220 emit_case_nodes (index, node->left, default_label, index_type);
6223 /* We cannot process node->left normally
6224 since we haven't ruled out the numbers less than
6225 this node's value. So handle node->left explicitly. */
6226 do_jump_if_equal (index,
6229 expand_expr (node->left->low, NULL_RTX,
6232 label_rtx (node->left->code_label), unsignedp);
6237 /* Node is a range. These cases are very similar to those for a single
6238 value, except that we do not start by testing whether this node
6239 is the one to branch to. */
6241 if (node->right != 0 && node->left != 0)
6243 /* Node has subtrees on both sides.
6244 If the right-hand subtree is bounded,
6245 test for it first, since we can go straight there.
6246 Otherwise, we need to make a branch in the control structure,
6247 then handle the two subtrees. */
6248 tree test_label = 0;
6250 if (node_is_bounded (node->right, index_type))
6251 /* Right hand node is fully bounded so we can eliminate any
6252 testing and branch directly to the target code. */
6253 emit_cmp_and_jump_insns (index,
6256 expand_expr (node->high, NULL_RTX,
6259 GT, NULL_RTX, mode, unsignedp,
6260 label_rtx (node->right->code_label));
6263 /* Right hand node requires testing.
6264 Branch to a label where we will handle it later. */
6266 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6267 emit_cmp_and_jump_insns (index,
6270 expand_expr (node->high, NULL_RTX,
6273 GT, NULL_RTX, mode, unsignedp,
6274 label_rtx (test_label));
6277 /* Value belongs to this node or to the left-hand subtree. */
6279 emit_cmp_and_jump_insns (index,
6282 expand_expr (node->low, NULL_RTX,
6285 GE, NULL_RTX, mode, unsignedp,
6286 label_rtx (node->code_label));
6288 /* Handle the left-hand subtree. */
6289 emit_case_nodes (index, node->left, default_label, index_type);
6291 /* If right node had to be handled later, do that now. */
6295 /* If the left-hand subtree fell through,
6296 don't let it fall into the right-hand subtree. */
6297 emit_jump_if_reachable (default_label);
6299 expand_label (test_label);
6300 emit_case_nodes (index, node->right, default_label, index_type);
6304 else if (node->right != 0 && node->left == 0)
6306 /* Deal with values to the left of this node,
6307 if they are possible. */
6308 if (!node_has_low_bound (node, index_type))
6310 emit_cmp_and_jump_insns (index,
6313 expand_expr (node->low, NULL_RTX,
6316 LT, NULL_RTX, mode, unsignedp,
6320 /* Value belongs to this node or to the right-hand subtree. */
6322 emit_cmp_and_jump_insns (index,
6325 expand_expr (node->high, NULL_RTX,
6328 LE, NULL_RTX, mode, unsignedp,
6329 label_rtx (node->code_label));
6331 emit_case_nodes (index, node->right, default_label, index_type);
6334 else if (node->right == 0 && node->left != 0)
6336 /* Deal with values to the right of this node,
6337 if they are possible. */
6338 if (!node_has_high_bound (node, index_type))
6340 emit_cmp_and_jump_insns (index,
6343 expand_expr (node->high, NULL_RTX,
6346 GT, NULL_RTX, mode, unsignedp,
6350 /* Value belongs to this node or to the left-hand subtree. */
6352 emit_cmp_and_jump_insns (index,
6355 expand_expr (node->low, NULL_RTX,
6358 GE, NULL_RTX, mode, unsignedp,
6359 label_rtx (node->code_label));
6361 emit_case_nodes (index, node->left, default_label, index_type);
6366 /* Node has no children so we check low and high bounds to remove
6367 redundant tests. Only one of the bounds can exist,
6368 since otherwise this node is bounded--a case tested already. */
6369 int high_bound = node_has_high_bound (node, index_type);
6370 int low_bound = node_has_low_bound (node, index_type);
6372 if (!high_bound && low_bound)
6374 emit_cmp_and_jump_insns (index,
6377 expand_expr (node->high, NULL_RTX,
6380 GT, NULL_RTX, mode, unsignedp,
6384 else if (!low_bound && high_bound)
6386 emit_cmp_and_jump_insns (index,
6389 expand_expr (node->low, NULL_RTX,
6392 LT, NULL_RTX, mode, unsignedp,
6395 else if (!low_bound && !high_bound)
6397 /* Widen LOW and HIGH to the same width as INDEX. */
6398 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6399 tree low = build1 (CONVERT_EXPR, type, node->low);
6400 tree high = build1 (CONVERT_EXPR, type, node->high);
6401 rtx low_rtx, new_index, new_bound;
6403 /* Instead of doing two branches, emit one unsigned branch for
6404 (index-low) > (high-low). */
6405 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6406 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6407 NULL_RTX, unsignedp,
6409 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6413 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6414 mode, 1, default_label);
6417 emit_jump (label_rtx (node->code_label));
6422 #include "gt-stmt.h"