1 /* Expands front end tree to back end RTL for GNU C-Compiler
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
45 #include "insn-config.h"
48 #include "hard-reg-set.h"
57 #define obstack_chunk_alloc xmalloc
58 #define obstack_chunk_free free
59 struct obstack stmt_obstack;
61 /* Assume that case vectors are not pc-relative. */
62 #ifndef CASE_VECTOR_PC_RELATIVE
63 #define CASE_VECTOR_PC_RELATIVE 0
66 /* Functions and data structures for expanding case statements. */
68 /* Case label structure, used to hold info on labels within case
69 statements. We handle "range" labels; for a single-value label
70 as in C, the high and low limits are the same.
72 An AVL tree of case nodes is initially created, and later transformed
73 to a list linked via the RIGHT fields in the nodes. Nodes with
74 higher case values are later in the list.
76 Switch statements can be output in one of two forms. A branch table
77 is used if there are more than a few labels and the labels are dense
78 within the range between the smallest and largest case value. If a
79 branch table is used, no further manipulations are done with the case
82 The alternative to the use of a branch table is to generate a series
83 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
84 and PARENT fields to hold a binary tree. Initially the tree is
85 totally unbalanced, with everything on the right. We balance the tree
86 with nodes on the left having lower case values than the parent
87 and nodes on the right having higher values. We then output the tree
92 struct case_node *left; /* Left son in binary tree */
93 struct case_node *right; /* Right son in binary tree; also node chain */
94 struct case_node *parent; /* Parent of node in binary tree */
95 tree low; /* Lowest index value for this label */
96 tree high; /* Highest index value for this label */
97 tree code_label; /* Label to jump to when node matches */
101 typedef struct case_node case_node;
102 typedef struct case_node *case_node_ptr;
104 /* These are used by estimate_case_costs and balance_case_nodes. */
106 /* This must be a signed type, and non-ANSI compilers lack signed char. */
107 static short cost_table_[129];
108 static int use_cost_table;
109 static int cost_table_initialized;
111 /* Special care is needed because we allow -1, but TREE_INT_CST_LOW
113 #define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT)((I) + 1)]
115 /* Stack of control and binding constructs we are currently inside.
117 These constructs begin when you call `expand_start_WHATEVER'
118 and end when you call `expand_end_WHATEVER'. This stack records
119 info about how the construct began that tells the end-function
120 what to do. It also may provide information about the construct
121 to alter the behavior of other constructs within the body.
122 For example, they may affect the behavior of C `break' and `continue'.
124 Each construct gets one `struct nesting' object.
125 All of these objects are chained through the `all' field.
126 `nesting_stack' points to the first object (innermost construct).
127 The position of an entry on `nesting_stack' is in its `depth' field.
129 Each type of construct has its own individual stack.
130 For example, loops have `loop_stack'. Each object points to the
131 next object of the same type through the `next' field.
133 Some constructs are visible to `break' exit-statements and others
134 are not. Which constructs are visible depends on the language.
135 Therefore, the data structure allows each construct to be visible
136 or not, according to the args given when the construct is started.
137 The construct is visible if the `exit_label' field is non-null.
138 In that case, the value should be a CODE_LABEL rtx. */
143 struct nesting *next;
148 /* For conds (if-then and if-then-else statements). */
151 /* Label for the end of the if construct.
152 There is none if EXITFLAG was not set
153 and no `else' has been seen yet. */
155 /* Label for the end of this alternative.
156 This may be the end of the if or the next else/elseif. */
162 /* Label at the top of the loop; place to loop back to. */
164 /* Label at the end of the whole construct. */
166 /* Label before a jump that branches to the end of the whole
167 construct. This is where destructors go if any. */
169 /* Label for `continue' statement to jump to;
170 this is in front of the stepper of the loop. */
173 /* For variable binding contours. */
176 /* Sequence number of this binding contour within the function,
177 in order of entry. */
178 int block_start_count;
179 /* Nonzero => value to restore stack to on exit. */
181 /* The NOTE that starts this contour.
182 Used by expand_goto to check whether the destination
183 is within each contour or not. */
185 /* Innermost containing binding contour that has a stack level. */
186 struct nesting *innermost_stack_block;
187 /* List of cleanups to be run on exit from this contour.
188 This is a list of expressions to be evaluated.
189 The TREE_PURPOSE of each link is the ..._DECL node
190 which the cleanup pertains to. */
192 /* List of cleanup-lists of blocks containing this block,
193 as they were at the locus where this block appears.
194 There is an element for each containing block,
195 ordered innermost containing block first.
196 The tail of this list can be 0,
197 if all remaining elements would be empty lists.
198 The element's TREE_VALUE is the cleanup-list of that block,
199 which may be null. */
201 /* Chain of labels defined inside this binding contour.
202 For contours that have stack levels or cleanups. */
203 struct label_chain *label_chain;
204 /* Number of function calls seen, as of start of this block. */
205 int n_function_calls;
206 /* Nonzero if this is associated with a EH region. */
207 int exception_region;
208 /* The saved target_temp_slot_level from our outer block.
209 We may reset target_temp_slot_level to be the level of
210 this block, if that is done, target_temp_slot_level
211 reverts to the saved target_temp_slot_level at the very
213 int block_target_temp_slot_level;
214 /* True if we are currently emitting insns in an area of
215 output code that is controlled by a conditional
216 expression. This is used by the cleanup handling code to
217 generate conditional cleanup actions. */
218 int conditional_code;
219 /* A place to move the start of the exception region for any
220 of the conditional cleanups, must be at the end or after
221 the start of the last unconditional cleanup, and before any
222 conditional branch points. */
223 rtx last_unconditional_cleanup;
224 /* When in a conditional context, this is the specific
225 cleanup list associated with last_unconditional_cleanup,
226 where we place the conditionalized cleanups. */
229 /* For switch (C) or case (Pascal) statements,
230 and also for dummies (see `expand_start_case_dummy'). */
233 /* The insn after which the case dispatch should finally
234 be emitted. Zero for a dummy. */
236 /* A list of case labels; it is first built as an AVL tree.
237 During expand_end_case, this is converted to a list, and may be
238 rearranged into a nearly balanced binary tree. */
239 struct case_node *case_list;
240 /* Label to jump to if no case matches. */
242 /* The expression to be dispatched on. */
244 /* Type that INDEX_EXPR should be converted to. */
246 /* Name of this kind of statement, for warnings. */
247 const char *printname;
248 /* Used to save no_line_numbers till we see the first case label.
249 We set this to -1 when we see the first case label in this
251 int line_number_status;
256 /* Allocate and return a new `struct nesting'. */
258 #define ALLOC_NESTING() \
259 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
261 /* Pop the nesting stack element by element until we pop off
262 the element which is at the top of STACK.
263 Update all the other stacks, popping off elements from them
264 as we pop them from nesting_stack. */
266 #define POPSTACK(STACK) \
267 do { struct nesting *target = STACK; \
268 struct nesting *this; \
269 do { this = nesting_stack; \
270 if (loop_stack == this) \
271 loop_stack = loop_stack->next; \
272 if (cond_stack == this) \
273 cond_stack = cond_stack->next; \
274 if (block_stack == this) \
275 block_stack = block_stack->next; \
276 if (stack_block_stack == this) \
277 stack_block_stack = stack_block_stack->next; \
278 if (case_stack == this) \
279 case_stack = case_stack->next; \
280 nesting_depth = nesting_stack->depth - 1; \
281 nesting_stack = this->all; \
282 obstack_free (&stmt_obstack, this); } \
283 while (this != target); } while (0)
285 /* In some cases it is impossible to generate code for a forward goto
286 until the label definition is seen. This happens when it may be necessary
287 for the goto to reset the stack pointer: we don't yet know how to do that.
288 So expand_goto puts an entry on this fixup list.
289 Each time a binding contour that resets the stack is exited,
291 If the target label has now been defined, we can insert the proper code. */
295 /* Points to following fixup. */
296 struct goto_fixup *next;
297 /* Points to the insn before the jump insn.
298 If more code must be inserted, it goes after this insn. */
300 /* The LABEL_DECL that this jump is jumping to, or 0
301 for break, continue or return. */
303 /* The BLOCK for the place where this goto was found. */
305 /* The CODE_LABEL rtx that this is jumping to. */
307 /* Number of binding contours started in current function
308 before the label reference. */
309 int block_start_count;
310 /* The outermost stack level that should be restored for this jump.
311 Each time a binding contour that resets the stack is exited,
312 if the target label is *not* yet defined, this slot is updated. */
314 /* List of lists of cleanup expressions to be run by this goto.
315 There is one element for each block that this goto is within.
316 The tail of this list can be 0,
317 if all remaining elements would be empty.
318 The TREE_VALUE contains the cleanup list of that block as of the
319 time this goto was seen.
320 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
321 tree cleanup_list_list;
324 /* Within any binding contour that must restore a stack level,
325 all labels are recorded with a chain of these structures. */
329 /* Points to following fixup. */
330 struct label_chain *next;
336 /* Chain of all pending binding contours. */
337 struct nesting *x_block_stack;
339 /* If any new stacks are added here, add them to POPSTACKS too. */
341 /* Chain of all pending binding contours that restore stack levels
343 struct nesting *x_stack_block_stack;
345 /* Chain of all pending conditional statements. */
346 struct nesting *x_cond_stack;
348 /* Chain of all pending loops. */
349 struct nesting *x_loop_stack;
351 /* Chain of all pending case or switch statements. */
352 struct nesting *x_case_stack;
354 /* Separate chain including all of the above,
355 chained through the `all' field. */
356 struct nesting *x_nesting_stack;
358 /* Number of entries on nesting_stack now. */
361 /* Number of binding contours started so far in this function. */
362 int x_block_start_count;
364 /* Each time we expand an expression-statement,
365 record the expr's type and its RTL value here. */
366 tree x_last_expr_type;
367 rtx x_last_expr_value;
369 /* Nonzero if within a ({...}) grouping, in which case we must
370 always compute a value for each expr-stmt in case it is the last one. */
371 int x_expr_stmts_for_value;
373 /* Filename and line number of last line-number note,
374 whether we actually emitted it or not. */
375 const char *x_emit_filename;
378 struct goto_fixup *x_goto_fixup_chain;
381 #define block_stack (cfun->stmt->x_block_stack)
382 #define stack_block_stack (cfun->stmt->x_stack_block_stack)
383 #define cond_stack (cfun->stmt->x_cond_stack)
384 #define loop_stack (cfun->stmt->x_loop_stack)
385 #define case_stack (cfun->stmt->x_case_stack)
386 #define nesting_stack (cfun->stmt->x_nesting_stack)
387 #define nesting_depth (cfun->stmt->x_nesting_depth)
388 #define current_block_start_count (cfun->stmt->x_block_start_count)
389 #define last_expr_type (cfun->stmt->x_last_expr_type)
390 #define last_expr_value (cfun->stmt->x_last_expr_value)
391 #define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
392 #define emit_filename (cfun->stmt->x_emit_filename)
393 #define emit_lineno (cfun->stmt->x_emit_lineno)
394 #define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
396 /* Non-zero if we are using EH to handle cleanus. */
397 static int using_eh_for_cleanups_p = 0;
399 static int n_occurrences PARAMS ((int, const char *));
400 static void expand_goto_internal PARAMS ((tree, rtx, rtx));
401 static int expand_fixup PARAMS ((tree, rtx, rtx));
402 static rtx expand_nl_handler_label PARAMS ((rtx, rtx));
403 static void expand_nl_goto_receiver PARAMS ((void));
404 static void expand_nl_goto_receivers PARAMS ((struct nesting *));
405 static void fixup_gotos PARAMS ((struct nesting *, rtx, tree,
407 static bool check_operand_nalternatives PARAMS ((tree, tree));
408 static bool check_unique_operand_names PARAMS ((tree, tree));
409 static tree resolve_operand_names PARAMS ((tree, tree, tree,
411 static char *resolve_operand_name_1 PARAMS ((char *, tree, tree));
412 static void expand_null_return_1 PARAMS ((rtx));
413 static void expand_value_return PARAMS ((rtx));
414 static int tail_recursion_args PARAMS ((tree, tree));
415 static void expand_cleanups PARAMS ((tree, tree, int, int));
416 static void check_seenlabel PARAMS ((void));
417 static void do_jump_if_equal PARAMS ((rtx, rtx, rtx, int));
418 static int estimate_case_costs PARAMS ((case_node_ptr));
419 static void group_case_nodes PARAMS ((case_node_ptr));
420 static void balance_case_nodes PARAMS ((case_node_ptr *,
422 static int node_has_low_bound PARAMS ((case_node_ptr, tree));
423 static int node_has_high_bound PARAMS ((case_node_ptr, tree));
424 static int node_is_bounded PARAMS ((case_node_ptr, tree));
425 static void emit_jump_if_reachable PARAMS ((rtx));
426 static void emit_case_nodes PARAMS ((rtx, case_node_ptr, rtx, tree));
427 static struct case_node *case_tree2list PARAMS ((case_node *, case_node *));
428 static void mark_cond_nesting PARAMS ((struct nesting *));
429 static void mark_loop_nesting PARAMS ((struct nesting *));
430 static void mark_block_nesting PARAMS ((struct nesting *));
431 static void mark_case_nesting PARAMS ((struct nesting *));
432 static void mark_case_node PARAMS ((struct case_node *));
433 static void mark_goto_fixup PARAMS ((struct goto_fixup *));
434 static void free_case_nodes PARAMS ((case_node_ptr));
437 using_eh_for_cleanups ()
439 using_eh_for_cleanups_p = 1;
442 /* Mark N (known to be a cond-nesting) for GC. */
445 mark_cond_nesting (n)
450 ggc_mark_rtx (n->exit_label);
451 ggc_mark_rtx (n->data.cond.endif_label);
452 ggc_mark_rtx (n->data.cond.next_label);
458 /* Mark N (known to be a loop-nesting) for GC. */
461 mark_loop_nesting (n)
467 ggc_mark_rtx (n->exit_label);
468 ggc_mark_rtx (n->data.loop.start_label);
469 ggc_mark_rtx (n->data.loop.end_label);
470 ggc_mark_rtx (n->data.loop.alt_end_label);
471 ggc_mark_rtx (n->data.loop.continue_label);
477 /* Mark N (known to be a block-nesting) for GC. */
480 mark_block_nesting (n)
485 struct label_chain *l;
487 ggc_mark_rtx (n->exit_label);
488 ggc_mark_rtx (n->data.block.stack_level);
489 ggc_mark_rtx (n->data.block.first_insn);
490 ggc_mark_tree (n->data.block.cleanups);
491 ggc_mark_tree (n->data.block.outer_cleanups);
493 for (l = n->data.block.label_chain; l != NULL; l = l->next)
496 ggc_mark_tree (l->label);
499 ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
501 /* ??? cleanup_ptr never points outside the stack, does it? */
507 /* Mark N (known to be a case-nesting) for GC. */
510 mark_case_nesting (n)
515 ggc_mark_rtx (n->exit_label);
516 ggc_mark_rtx (n->data.case_stmt.start);
518 ggc_mark_tree (n->data.case_stmt.default_label);
519 ggc_mark_tree (n->data.case_stmt.index_expr);
520 ggc_mark_tree (n->data.case_stmt.nominal_type);
522 mark_case_node (n->data.case_stmt.case_list);
535 ggc_mark_tree (c->low);
536 ggc_mark_tree (c->high);
537 ggc_mark_tree (c->code_label);
539 mark_case_node (c->right);
540 mark_case_node (c->left);
548 struct goto_fixup *g;
553 ggc_mark_rtx (g->before_jump);
554 ggc_mark_tree (g->target);
555 ggc_mark_tree (g->context);
556 ggc_mark_rtx (g->target_rtl);
557 ggc_mark_rtx (g->stack_level);
558 ggc_mark_tree (g->cleanup_list_list);
564 /* Clear out all parts of the state in F that can safely be discarded
565 after the function has been compiled, to let garbage collection
566 reclaim the memory. */
572 /* We're about to free the function obstack. If we hold pointers to
573 things allocated there, then we'll try to mark them when we do
574 GC. So, we clear them out here explicitly. */
584 struct stmt_status *p;
589 mark_block_nesting (p->x_block_stack);
590 mark_cond_nesting (p->x_cond_stack);
591 mark_loop_nesting (p->x_loop_stack);
592 mark_case_nesting (p->x_case_stack);
594 ggc_mark_tree (p->x_last_expr_type);
595 /* last_epxr_value is only valid if last_expr_type is nonzero. */
596 if (p->x_last_expr_type)
597 ggc_mark_rtx (p->x_last_expr_value);
599 mark_goto_fixup (p->x_goto_fixup_chain);
605 gcc_obstack_init (&stmt_obstack);
609 init_stmt_for_function ()
611 cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
613 /* We are not currently within any block, conditional, loop or case. */
615 stack_block_stack = 0;
622 current_block_start_count = 0;
624 /* No gotos have been expanded yet. */
625 goto_fixup_chain = 0;
627 /* We are not processing a ({...}) grouping. */
628 expr_stmts_for_value = 0;
630 last_expr_value = NULL_RTX;
633 /* Return nonzero if anything is pushed on the loop, condition, or case
638 return cond_stack || loop_stack || case_stack;
641 /* Record the current file and line. Called from emit_line_note. */
643 set_file_and_line_for_stmt (file, line)
647 /* If we're outputting an inline function, and we add a line note,
648 there may be no CFUN->STMT information. So, there's no need to
652 emit_filename = file;
657 /* Emit a no-op instruction. */
664 last_insn = get_last_insn ();
666 && (GET_CODE (last_insn) == CODE_LABEL
667 || (GET_CODE (last_insn) == NOTE
668 && prev_real_insn (last_insn) == 0)))
669 emit_insn (gen_nop ());
672 /* Return the rtx-label that corresponds to a LABEL_DECL,
673 creating it if necessary. */
679 if (TREE_CODE (label) != LABEL_DECL)
682 if (!DECL_RTL_SET_P (label))
683 SET_DECL_RTL (label, gen_label_rtx ());
685 return DECL_RTL (label);
689 /* Add an unconditional jump to LABEL as the next sequential instruction. */
695 do_pending_stack_adjust ();
696 emit_jump_insn (gen_jump (label));
700 /* Emit code to jump to the address
701 specified by the pointer expression EXP. */
704 expand_computed_goto (exp)
707 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
709 #ifdef POINTERS_EXTEND_UNSIGNED
710 if (GET_MODE (x) != Pmode)
711 x = convert_memory_address (Pmode, x);
715 /* Be sure the function is executable. */
716 if (current_function_check_memory_usage)
717 emit_library_call (chkr_check_exec_libfunc, LCT_CONST_MAKE_BLOCK,
718 VOIDmode, 1, x, ptr_mode);
720 do_pending_stack_adjust ();
721 emit_indirect_jump (x);
723 current_function_has_computed_jump = 1;
726 /* Handle goto statements and the labels that they can go to. */
728 /* Specify the location in the RTL code of a label LABEL,
729 which is a LABEL_DECL tree node.
731 This is used for the kind of label that the user can jump to with a
732 goto statement, and for alternatives of a switch or case statement.
733 RTL labels generated for loops and conditionals don't go through here;
734 they are generated directly at the RTL level, by other functions below.
736 Note that this has nothing to do with defining label *names*.
737 Languages vary in how they do that and what that even means. */
743 struct label_chain *p;
745 do_pending_stack_adjust ();
746 emit_label (label_rtx (label));
747 if (DECL_NAME (label))
748 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
750 if (stack_block_stack != 0)
752 p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
753 p->next = stack_block_stack->data.block.label_chain;
754 stack_block_stack->data.block.label_chain = p;
759 /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
760 from nested functions. */
763 declare_nonlocal_label (label)
766 rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
768 nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
769 LABEL_PRESERVE_P (label_rtx (label)) = 1;
770 if (nonlocal_goto_handler_slots == 0)
772 emit_stack_save (SAVE_NONLOCAL,
773 &nonlocal_goto_stack_level,
774 PREV_INSN (tail_recursion_reentry));
776 nonlocal_goto_handler_slots
777 = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
780 /* Generate RTL code for a `goto' statement with target label LABEL.
781 LABEL should be a LABEL_DECL tree node that was or will later be
782 defined with `expand_label'. */
790 /* Check for a nonlocal goto to a containing function. */
791 context = decl_function_context (label);
792 if (context != 0 && context != current_function_decl)
794 struct function *p = find_function_data (context);
795 rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
796 rtx handler_slot, static_chain, save_area, insn;
799 /* Find the corresponding handler slot for this label. */
800 handler_slot = p->x_nonlocal_goto_handler_slots;
801 for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
802 link = TREE_CHAIN (link))
803 handler_slot = XEXP (handler_slot, 1);
804 handler_slot = XEXP (handler_slot, 0);
806 p->has_nonlocal_label = 1;
807 current_function_has_nonlocal_goto = 1;
808 LABEL_REF_NONLOCAL_P (label_ref) = 1;
810 /* Copy the rtl for the slots so that they won't be shared in
811 case the virtual stack vars register gets instantiated differently
812 in the parent than in the child. */
814 static_chain = copy_to_reg (lookup_static_chain (label));
816 /* Get addr of containing function's current nonlocal goto handler,
817 which will do any cleanups and then jump to the label. */
818 handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
819 virtual_stack_vars_rtx,
822 /* Get addr of containing function's nonlocal save area. */
823 save_area = p->x_nonlocal_goto_stack_level;
825 save_area = replace_rtx (copy_rtx (save_area),
826 virtual_stack_vars_rtx, static_chain);
828 #if HAVE_nonlocal_goto
829 if (HAVE_nonlocal_goto)
830 emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
831 save_area, label_ref));
835 /* Restore frame pointer for containing function.
836 This sets the actual hard register used for the frame pointer
837 to the location of the function's incoming static chain info.
838 The non-local goto handler will then adjust it to contain the
839 proper value and reload the argument pointer, if needed. */
840 emit_move_insn (hard_frame_pointer_rtx, static_chain);
841 emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
843 /* USE of hard_frame_pointer_rtx added for consistency;
844 not clear if really needed. */
845 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
846 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
847 emit_indirect_jump (handler_slot);
850 /* Search backwards to the jump insn and mark it as a
852 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
854 if (GET_CODE (insn) == JUMP_INSN)
856 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
857 const0_rtx, REG_NOTES (insn));
860 else if (GET_CODE (insn) == CALL_INSN)
865 expand_goto_internal (label, label_rtx (label), NULL_RTX);
868 /* Generate RTL code for a `goto' statement with target label BODY.
869 LABEL should be a LABEL_REF.
870 LAST_INSN, if non-0, is the rtx we should consider as the last
871 insn emitted (for the purposes of cleaning up a return). */
874 expand_goto_internal (body, label, last_insn)
879 struct nesting *block;
882 if (GET_CODE (label) != CODE_LABEL)
885 /* If label has already been defined, we can tell now
886 whether and how we must alter the stack level. */
888 if (PREV_INSN (label) != 0)
890 /* Find the innermost pending block that contains the label.
891 (Check containment by comparing insn-uids.)
892 Then restore the outermost stack level within that block,
893 and do cleanups of all blocks contained in it. */
894 for (block = block_stack; block; block = block->next)
896 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
898 if (block->data.block.stack_level != 0)
899 stack_level = block->data.block.stack_level;
900 /* Execute the cleanups for blocks we are exiting. */
901 if (block->data.block.cleanups != 0)
903 expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
904 do_pending_stack_adjust ();
910 /* Ensure stack adjust isn't done by emit_jump, as this
911 would clobber the stack pointer. This one should be
912 deleted as dead by flow. */
913 clear_pending_stack_adjust ();
914 do_pending_stack_adjust ();
916 /* Don't do this adjust if it's to the end label and this function
917 is to return with a depressed stack pointer. */
918 if (label == return_label
919 && (((TREE_CODE (TREE_TYPE (current_function_decl))
921 && (TYPE_RETURNS_STACK_DEPRESSED
922 (TREE_TYPE (current_function_decl))))))
925 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
928 if (body != 0 && DECL_TOO_LATE (body))
929 error ("jump to `%s' invalidly jumps into binding contour",
930 IDENTIFIER_POINTER (DECL_NAME (body)));
932 /* Label not yet defined: may need to put this goto
933 on the fixup list. */
934 else if (! expand_fixup (body, label, last_insn))
936 /* No fixup needed. Record that the label is the target
937 of at least one goto that has no fixup. */
939 TREE_ADDRESSABLE (body) = 1;
945 /* Generate if necessary a fixup for a goto
946 whose target label in tree structure (if any) is TREE_LABEL
947 and whose target in rtl is RTL_LABEL.
949 If LAST_INSN is nonzero, we pretend that the jump appears
950 after insn LAST_INSN instead of at the current point in the insn stream.
952 The fixup will be used later to insert insns just before the goto.
953 Those insns will restore the stack level as appropriate for the
954 target label, and will (in the case of C++) also invoke any object
955 destructors which have to be invoked when we exit the scopes which
956 are exited by the goto.
958 Value is nonzero if a fixup is made. */
961 expand_fixup (tree_label, rtl_label, last_insn)
966 struct nesting *block, *end_block;
968 /* See if we can recognize which block the label will be output in.
969 This is possible in some very common cases.
970 If we succeed, set END_BLOCK to that block.
971 Otherwise, set it to 0. */
974 && (rtl_label == cond_stack->data.cond.endif_label
975 || rtl_label == cond_stack->data.cond.next_label))
976 end_block = cond_stack;
977 /* If we are in a loop, recognize certain labels which
978 are likely targets. This reduces the number of fixups
979 we need to create. */
981 && (rtl_label == loop_stack->data.loop.start_label
982 || rtl_label == loop_stack->data.loop.end_label
983 || rtl_label == loop_stack->data.loop.continue_label))
984 end_block = loop_stack;
988 /* Now set END_BLOCK to the binding level to which we will return. */
992 struct nesting *next_block = end_block->all;
995 /* First see if the END_BLOCK is inside the innermost binding level.
996 If so, then no cleanups or stack levels are relevant. */
997 while (next_block && next_block != block)
998 next_block = next_block->all;
1003 /* Otherwise, set END_BLOCK to the innermost binding level
1004 which is outside the relevant control-structure nesting. */
1005 next_block = block_stack->next;
1006 for (block = block_stack; block != end_block; block = block->all)
1007 if (block == next_block)
1008 next_block = next_block->next;
1009 end_block = next_block;
1012 /* Does any containing block have a stack level or cleanups?
1013 If not, no fixup is needed, and that is the normal case
1014 (the only case, for standard C). */
1015 for (block = block_stack; block != end_block; block = block->next)
1016 if (block->data.block.stack_level != 0
1017 || block->data.block.cleanups != 0)
1020 if (block != end_block)
1022 /* Ok, a fixup is needed. Add a fixup to the list of such. */
1023 struct goto_fixup *fixup
1024 = (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1025 /* In case an old stack level is restored, make sure that comes
1026 after any pending stack adjust. */
1027 /* ?? If the fixup isn't to come at the present position,
1028 doing the stack adjust here isn't useful. Doing it with our
1029 settings at that location isn't useful either. Let's hope
1032 do_pending_stack_adjust ();
1033 fixup->target = tree_label;
1034 fixup->target_rtl = rtl_label;
1036 /* Create a BLOCK node and a corresponding matched set of
1037 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1038 this point. The notes will encapsulate any and all fixup
1039 code which we might later insert at this point in the insn
1040 stream. Also, the BLOCK node will be the parent (i.e. the
1041 `SUPERBLOCK') of any other BLOCK nodes which we might create
1042 later on when we are expanding the fixup code.
1044 Note that optimization passes (including expand_end_loop)
1045 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1046 as a placeholder. */
1049 rtx original_before_jump
1050 = last_insn ? last_insn : get_last_insn ();
1055 block = make_node (BLOCK);
1056 TREE_USED (block) = 1;
1058 if (!cfun->x_whole_function_mode_p)
1059 insert_block (block);
1063 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1064 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1069 start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
1070 if (cfun->x_whole_function_mode_p)
1071 NOTE_BLOCK (start) = block;
1072 fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
1073 end = emit_note (NULL, NOTE_INSN_BLOCK_END);
1074 if (cfun->x_whole_function_mode_p)
1075 NOTE_BLOCK (end) = block;
1076 fixup->context = block;
1078 emit_insns_after (start, original_before_jump);
1081 fixup->block_start_count = current_block_start_count;
1082 fixup->stack_level = 0;
1083 fixup->cleanup_list_list
1084 = ((block->data.block.outer_cleanups
1085 || block->data.block.cleanups)
1086 ? tree_cons (NULL_TREE, block->data.block.cleanups,
1087 block->data.block.outer_cleanups)
1089 fixup->next = goto_fixup_chain;
1090 goto_fixup_chain = fixup;
1096 /* Expand any needed fixups in the outputmost binding level of the
1097 function. FIRST_INSN is the first insn in the function. */
1100 expand_fixups (first_insn)
1103 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
1106 /* When exiting a binding contour, process all pending gotos requiring fixups.
1107 THISBLOCK is the structure that describes the block being exited.
1108 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1109 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1110 FIRST_INSN is the insn that began this contour.
1112 Gotos that jump out of this contour must restore the
1113 stack level and do the cleanups before actually jumping.
1115 DONT_JUMP_IN nonzero means report error there is a jump into this
1116 contour from before the beginning of the contour.
1117 This is also done if STACK_LEVEL is nonzero. */
1120 fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1121 struct nesting *thisblock;
1127 struct goto_fixup *f, *prev;
1129 /* F is the fixup we are considering; PREV is the previous one. */
1130 /* We run this loop in two passes so that cleanups of exited blocks
1131 are run first, and blocks that are exited are marked so
1134 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1136 /* Test for a fixup that is inactive because it is already handled. */
1137 if (f->before_jump == 0)
1139 /* Delete inactive fixup from the chain, if that is easy to do. */
1141 prev->next = f->next;
1143 /* Has this fixup's target label been defined?
1144 If so, we can finalize it. */
1145 else if (PREV_INSN (f->target_rtl) != 0)
1149 /* If this fixup jumped into this contour from before the beginning
1150 of this contour, report an error. This code used to use
1151 the first non-label insn after f->target_rtl, but that's
1152 wrong since such can be added, by things like put_var_into_stack
1153 and have INSN_UIDs that are out of the range of the block. */
1154 /* ??? Bug: this does not detect jumping in through intermediate
1155 blocks that have stack levels or cleanups.
1156 It detects only a problem with the innermost block
1157 around the label. */
1159 && (dont_jump_in || stack_level || cleanup_list)
1160 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1161 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1162 && ! DECL_ERROR_ISSUED (f->target))
1164 error_with_decl (f->target,
1165 "label `%s' used before containing binding contour");
1166 /* Prevent multiple errors for one label. */
1167 DECL_ERROR_ISSUED (f->target) = 1;
1170 /* We will expand the cleanups into a sequence of their own and
1171 then later on we will attach this new sequence to the insn
1172 stream just ahead of the actual jump insn. */
1176 /* Temporarily restore the lexical context where we will
1177 logically be inserting the fixup code. We do this for the
1178 sake of getting the debugging information right. */
1181 set_block (f->context);
1183 /* Expand the cleanups for blocks this jump exits. */
1184 if (f->cleanup_list_list)
1187 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1188 /* Marked elements correspond to blocks that have been closed.
1189 Do their cleanups. */
1190 if (TREE_ADDRESSABLE (lists)
1191 && TREE_VALUE (lists) != 0)
1193 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1194 /* Pop any pushes done in the cleanups,
1195 in case function is about to return. */
1196 do_pending_stack_adjust ();
1200 /* Restore stack level for the biggest contour that this
1201 jump jumps out of. */
1203 && ! (f->target_rtl == return_label
1204 && ((TREE_CODE (TREE_TYPE (current_function_decl))
1206 && (TYPE_RETURNS_STACK_DEPRESSED
1207 (TREE_TYPE (current_function_decl))))))
1208 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1210 /* Finish up the sequence containing the insns which implement the
1211 necessary cleanups, and then attach that whole sequence to the
1212 insn stream just ahead of the actual jump insn. Attaching it
1213 at that point insures that any cleanups which are in fact
1214 implicit C++ object destructions (which must be executed upon
1215 leaving the block) appear (to the debugger) to be taking place
1216 in an area of the generated code where the object(s) being
1217 destructed are still "in scope". */
1219 cleanup_insns = get_insns ();
1223 emit_insns_after (cleanup_insns, f->before_jump);
1229 /* For any still-undefined labels, do the cleanups for this block now.
1230 We must do this now since items in the cleanup list may go out
1231 of scope when the block ends. */
1232 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1233 if (f->before_jump != 0
1234 && PREV_INSN (f->target_rtl) == 0
1235 /* Label has still not appeared. If we are exiting a block with
1236 a stack level to restore, that started before the fixup,
1237 mark this stack level as needing restoration
1238 when the fixup is later finalized. */
1240 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1241 means the label is undefined. That's erroneous, but possible. */
1242 && (thisblock->data.block.block_start_count
1243 <= f->block_start_count))
1245 tree lists = f->cleanup_list_list;
1248 for (; lists; lists = TREE_CHAIN (lists))
1249 /* If the following elt. corresponds to our containing block
1250 then the elt. must be for this block. */
1251 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1255 set_block (f->context);
1256 expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1257 do_pending_stack_adjust ();
1258 cleanup_insns = get_insns ();
1261 if (cleanup_insns != 0)
1263 = emit_insns_after (cleanup_insns, f->before_jump);
1265 f->cleanup_list_list = TREE_CHAIN (lists);
1269 f->stack_level = stack_level;
1273 /* Return the number of times character C occurs in string S. */
1275 n_occurrences (c, s)
1285 /* Generate RTL for an asm statement (explicit assembler code).
1286 BODY is a STRING_CST node containing the assembler code text,
1287 or an ADDR_EXPR containing a STRING_CST. */
1293 if (current_function_check_memory_usage)
1295 error ("`asm' cannot be used in function where memory usage is checked");
1299 if (TREE_CODE (body) == ADDR_EXPR)
1300 body = TREE_OPERAND (body, 0);
1302 emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1303 TREE_STRING_POINTER (body)));
1307 /* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
1308 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
1309 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
1310 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1311 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
1312 constraint allows the use of a register operand. And, *IS_INOUT
1313 will be true if the operand is read-write, i.e., if it is used as
1314 an input as well as an output. If *CONSTRAINT_P is not in
1315 canonical form, it will be made canonical. (Note that `+' will be
1316 rpelaced with `=' as part of this process.)
1318 Returns TRUE if all went well; FALSE if an error occurred. */
1321 parse_output_constraint (constraint_p,
1328 const char **constraint_p;
1336 const char *constraint = *constraint_p;
1339 /* Assume the constraint doesn't allow the use of either a register
1341 *allows_mem = false;
1342 *allows_reg = false;
1344 /* Allow the `=' or `+' to not be at the beginning of the string,
1345 since it wasn't explicitly documented that way, and there is a
1346 large body of code that puts it last. Swap the character to
1347 the front, so as not to uglify any place else. */
1348 p = strchr (constraint, '=');
1350 p = strchr (constraint, '+');
1352 /* If the string doesn't contain an `=', issue an error
1356 error ("output operand constraint lacks `='");
1360 /* If the constraint begins with `+', then the operand is both read
1361 from and written to. */
1362 *is_inout = (*p == '+');
1364 /* Canonicalize the output constraint so that it begins with `='. */
1365 if (p != constraint || is_inout)
1368 size_t c_len = strlen (constraint);
1370 if (p != constraint)
1371 warning ("output constraint `%c' for operand %d is not at the beginning",
1374 /* Make a copy of the constraint. */
1375 buf = alloca (c_len + 1);
1376 strcpy (buf, constraint);
1377 /* Swap the first character and the `=' or `+'. */
1378 buf[p - constraint] = buf[0];
1379 /* Make sure the first character is an `='. (Until we do this,
1380 it might be a `+'.) */
1382 /* Replace the constraint with the canonicalized string. */
1383 *constraint_p = ggc_alloc_string (buf, c_len);
1384 constraint = *constraint_p;
1387 /* Loop through the constraint string. */
1388 for (p = constraint + 1; *p; ++p)
1393 error ("operand constraint contains incorrectly positioned '+' or '='");
1397 if (operand_num + 1 == ninputs + noutputs)
1399 error ("`%%' constraint used with last operand");
1404 case 'V': case 'm': case 'o':
1408 case '?': case '!': case '*': case '&': case '#':
1409 case 'E': case 'F': case 'G': case 'H':
1410 case 's': case 'i': case 'n':
1411 case 'I': case 'J': case 'K': case 'L': case 'M':
1412 case 'N': case 'O': case 'P': case ',':
1415 case '0': case '1': case '2': case '3': case '4':
1416 case '5': case '6': case '7': case '8': case '9':
1418 error ("matching constraint not valid in output operand");
1422 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1423 excepting those that expand_call created. So match memory
1440 if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1442 #ifdef EXTRA_CONSTRAINT
1445 /* Otherwise we can't assume anything about the nature of
1446 the constraint except that it isn't purely registers.
1447 Treat it like "g" and hope for the best. */
1458 /* Generate RTL for an asm statement with arguments.
1459 STRING is the instruction template.
1460 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1461 Each output or input has an expression in the TREE_VALUE and
1462 and a tree list in TREE_PURPOSE which in turn contains a constraint
1463 name in TREE_VALUE (or NULL_TREE) and a constraint string
1465 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1466 that is clobbered by this insn.
1468 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1469 Some elements of OUTPUTS may be replaced with trees representing temporary
1470 values. The caller should copy those temporary values to the originally
1473 VOL nonzero means the insn is volatile; don't optimize it. */
1476 expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1477 tree string, outputs, inputs, clobbers;
1479 const char *filename;
1482 rtvec argvec, constraintvec;
1484 int ninputs = list_length (inputs);
1485 int noutputs = list_length (outputs);
1490 /* Vector of RTX's of evaluated output operands. */
1491 rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1492 int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1493 rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1494 enum machine_mode *inout_mode
1495 = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1496 const char **constraints
1497 = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1498 /* The insn we have emitted. */
1500 int old_generating_concat_p = generating_concat_p;
1502 /* An ASM with no outputs needs to be treated as volatile, for now. */
1506 if (current_function_check_memory_usage)
1508 error ("`asm' cannot be used in function where memory usage is checked");
1512 if (! check_operand_nalternatives (outputs, inputs))
1515 if (! check_unique_operand_names (outputs, inputs))
1518 string = resolve_operand_names (string, outputs, inputs, constraints);
1520 #ifdef MD_ASM_CLOBBERS
1521 /* Sometimes we wish to automatically clobber registers across an asm.
1522 Case in point is when the i386 backend moved from cc0 to a hard reg --
1523 maintaining source-level compatibility means automatically clobbering
1524 the flags register. */
1525 MD_ASM_CLOBBERS (clobbers);
1528 /* Count the number of meaningful clobbered registers, ignoring what
1529 we would ignore later. */
1531 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1533 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1535 i = decode_reg_name (regname);
1536 if (i >= 0 || i == -4)
1539 error ("unknown register name `%s' in `asm'", regname);
1544 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1546 tree val = TREE_VALUE (tail);
1547 tree type = TREE_TYPE (val);
1552 /* If there's an erroneous arg, emit no insn. */
1553 if (type == error_mark_node)
1556 /* Make sure constraint has `=' and does not have `+'. Also, see
1557 if it allows any register. Be liberal on the latter test, since
1558 the worst that happens if we get it wrong is we issue an error
1561 /* Try to parse the output constraint. If that fails, there's
1562 no point in going further. */
1563 if (!parse_output_constraint (&constraints[i],
1572 /* If an output operand is not a decl or indirect ref and our constraint
1573 allows a register, make a temporary to act as an intermediate.
1574 Make the asm insn write into that, then our caller will copy it to
1575 the real output operand. Likewise for promoted variables. */
1577 generating_concat_p = 0;
1579 real_output_rtx[i] = NULL_RTX;
1580 if ((TREE_CODE (val) == INDIRECT_REF
1583 && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1584 && ! (GET_CODE (DECL_RTL (val)) == REG
1585 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1590 mark_addressable (TREE_VALUE (tail));
1593 = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1594 EXPAND_MEMORY_USE_WO);
1596 if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1597 error ("output number %d not directly addressable", i);
1598 if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1599 || GET_CODE (output_rtx[i]) == CONCAT)
1601 real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1602 output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1604 emit_move_insn (output_rtx[i], real_output_rtx[i]);
1609 output_rtx[i] = assign_temp (type, 0, 0, 1);
1610 TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1613 generating_concat_p = old_generating_concat_p;
1617 inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1618 inout_opnum[ninout++] = i;
1623 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1625 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1629 /* Make vectors for the expression-rtx, constraint strings,
1630 and named operands. */
1632 argvec = rtvec_alloc (ninputs);
1633 constraintvec = rtvec_alloc (ninputs);
1635 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1636 : GET_MODE (output_rtx[0])),
1637 TREE_STRING_POINTER (string),
1638 empty_string, 0, argvec, constraintvec,
1641 MEM_VOLATILE_P (body) = vol;
1643 /* Eval the inputs and put them into ARGVEC.
1644 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1646 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1649 int allows_reg = 0, allows_mem = 0;
1650 const char *constraint, *orig_constraint;
1654 /* If there's an erroneous arg, emit no insn,
1655 because the ASM_INPUT would get VOIDmode
1656 and that could cause a crash in reload. */
1657 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1660 /* ??? Can this happen, and does the error message make any sense? */
1661 if (TREE_PURPOSE (tail) == NULL_TREE)
1663 error ("hard register `%s' listed as input operand to `asm'",
1664 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1668 orig_constraint = constraint = constraints[i + noutputs];
1669 c_len = strlen (constraint);
1671 /* Make sure constraint has neither `=', `+', nor '&'. */
1673 for (j = 0; j < c_len; j++)
1674 switch (constraint[j])
1676 case '+': case '=': case '&':
1677 if (constraint == orig_constraint)
1679 error ("input operand constraint contains `%c'",
1686 if (constraint == orig_constraint
1687 && i + 1 == ninputs - ninout)
1689 error ("`%%' constraint used with last operand");
1694 case 'V': case 'm': case 'o':
1699 case '?': case '!': case '*': case '#':
1700 case 'E': case 'F': case 'G': case 'H':
1701 case 's': case 'i': case 'n':
1702 case 'I': case 'J': case 'K': case 'L': case 'M':
1703 case 'N': case 'O': case 'P': case ',':
1706 /* Whether or not a numeric constraint allows a register is
1707 decided by the matching constraint, and so there is no need
1708 to do anything special with them. We must handle them in
1709 the default case, so that we don't unnecessarily force
1710 operands to memory. */
1711 case '0': case '1': case '2': case '3': case '4':
1712 case '5': case '6': case '7': case '8': case '9':
1715 unsigned long match;
1717 match = strtoul (constraint + j, &end, 10);
1718 if (match >= (unsigned long) noutputs)
1720 error ("matching constraint references invalid operand number");
1724 /* Try and find the real constraint for this dup. Only do
1725 this if the matching constraint is the only alternative. */
1727 && (j == 0 || (j == 1 && constraint[0] == '%')))
1729 constraint = constraints[match];
1730 c_len = strlen (constraint);
1735 j = end - constraint;
1749 if (! ISALPHA (constraint[j]))
1751 error ("invalid punctuation `%c' in constraint",
1755 if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1757 #ifdef EXTRA_CONSTRAINT
1760 /* Otherwise we can't assume anything about the nature of
1761 the constraint except that it isn't purely registers.
1762 Treat it like "g" and hope for the best. */
1770 if (! allows_reg && allows_mem)
1771 mark_addressable (TREE_VALUE (tail));
1773 op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1775 /* Never pass a CONCAT to an ASM. */
1776 generating_concat_p = 0;
1777 if (GET_CODE (op) == CONCAT)
1778 op = force_reg (GET_MODE (op), op);
1780 if (asm_operand_ok (op, constraint) <= 0)
1783 op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1784 else if (!allows_mem)
1785 warning ("asm operand %d probably doesn't match constraints",
1787 else if (CONSTANT_P (op))
1788 op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1790 else if (GET_CODE (op) == REG
1791 || GET_CODE (op) == SUBREG
1792 || GET_CODE (op) == ADDRESSOF
1793 || GET_CODE (op) == CONCAT)
1795 tree type = TREE_TYPE (TREE_VALUE (tail));
1796 tree qual_type = build_qualified_type (type,
1798 | TYPE_QUAL_CONST));
1799 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1801 emit_move_insn (memloc, op);
1805 else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1806 /* We won't recognize volatile memory as available a
1807 memory_operand at this point. Ignore it. */
1809 else if (queued_subexp_p (op))
1812 /* ??? Leave this only until we have experience with what
1813 happens in combine and elsewhere when constraints are
1815 warning ("asm operand %d probably doesn't match constraints",
1818 generating_concat_p = old_generating_concat_p;
1819 ASM_OPERANDS_INPUT (body, i) = op;
1821 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1822 = gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1826 /* Protect all the operands from the queue now that they have all been
1829 generating_concat_p = 0;
1831 for (i = 0; i < ninputs - ninout; i++)
1832 ASM_OPERANDS_INPUT (body, i)
1833 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1835 for (i = 0; i < noutputs; i++)
1836 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1838 /* For in-out operands, copy output rtx to input rtx. */
1839 for (i = 0; i < ninout; i++)
1841 int j = inout_opnum[i];
1844 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1847 sprintf (buffer, "%d", j);
1848 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1849 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1852 generating_concat_p = old_generating_concat_p;
1854 /* Now, for each output, construct an rtx
1855 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1856 ARGVEC CONSTRAINTS OPNAMES))
1857 If there is more than one, put them inside a PARALLEL. */
1859 if (noutputs == 1 && nclobbers == 0)
1861 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1862 insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1865 else if (noutputs == 0 && nclobbers == 0)
1867 /* No output operands: put in a raw ASM_OPERANDS rtx. */
1868 insn = emit_insn (body);
1879 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1881 /* For each output operand, store a SET. */
1882 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1884 XVECEXP (body, 0, i)
1885 = gen_rtx_SET (VOIDmode,
1887 gen_rtx_ASM_OPERANDS
1888 (GET_MODE (output_rtx[i]),
1889 TREE_STRING_POINTER (string),
1890 constraints[i], i, argvec, constraintvec,
1893 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1896 /* If there are no outputs (but there are some clobbers)
1897 store the bare ASM_OPERANDS into the PARALLEL. */
1900 XVECEXP (body, 0, i++) = obody;
1902 /* Store (clobber REG) for each clobbered register specified. */
1904 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1906 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1907 int j = decode_reg_name (regname);
1911 if (j == -3) /* `cc', which is not a register */
1914 if (j == -4) /* `memory', don't cache memory across asm */
1916 XVECEXP (body, 0, i++)
1917 = gen_rtx_CLOBBER (VOIDmode,
1920 gen_rtx_SCRATCH (VOIDmode)));
1924 /* Ignore unknown register, error already signaled. */
1928 /* Use QImode since that's guaranteed to clobber just one reg. */
1929 XVECEXP (body, 0, i++)
1930 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1933 insn = emit_insn (body);
1936 /* For any outputs that needed reloading into registers, spill them
1937 back to where they belong. */
1938 for (i = 0; i < noutputs; ++i)
1939 if (real_output_rtx[i])
1940 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1945 /* A subroutine of expand_asm_operands. Check that all operands have
1946 the same number of alternatives. Return true if so. */
1949 check_operand_nalternatives (outputs, inputs)
1950 tree outputs, inputs;
1952 if (outputs || inputs)
1954 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1956 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1959 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1961 error ("too many alternatives in `asm'");
1968 const char *constraint
1969 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1971 if (n_occurrences (',', constraint) != nalternatives)
1973 error ("operand constraints for `asm' differ in number of alternatives");
1977 if (TREE_CHAIN (tmp))
1978 tmp = TREE_CHAIN (tmp);
1980 tmp = next, next = 0;
1987 /* A subroutine of expand_asm_operands. Check that all operand names
1988 are unique. Return true if so. We rely on the fact that these names
1989 are identifiers, and so have been canonicalized by get_identifier,
1990 so all we need are pointer comparisons. */
1993 check_unique_operand_names (outputs, inputs)
1994 tree outputs, inputs;
1998 for (i = outputs; i ; i = TREE_CHAIN (i))
2000 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2004 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2005 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2009 for (i = inputs; i ; i = TREE_CHAIN (i))
2011 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2015 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2016 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2018 for (j = outputs; j ; j = TREE_CHAIN (j))
2019 if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2026 error ("duplicate asm operand name '%s'",
2027 IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
2031 /* A subroutine of expand_asm_operands. Resolve the names of the operands
2032 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
2033 STRING and in the constraints to those numbers. */
2036 resolve_operand_names (string, outputs, inputs, pconstraints)
2038 tree outputs, inputs;
2039 const char **pconstraints;
2041 char *buffer = xstrdup (TREE_STRING_POINTER (string));
2045 /* Assume that we will not need extra space to perform the substitution.
2046 This because we get to remove '[' and ']', which means we cannot have
2047 a problem until we have more than 999 operands. */
2050 while ((p = strchr (p, '%')) != NULL)
2054 p = resolve_operand_name_1 (p, outputs, inputs);
2057 string = build_string (strlen (buffer), buffer);
2060 /* Collect output constraints here because it's convenient.
2061 There should be no named operands here; this is verified
2062 in expand_asm_operand. */
2063 for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2064 *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2066 /* Substitute [<name>] in input constraint strings. */
2067 for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2069 const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2070 if (strchr (c, '[') == NULL)
2074 p = buffer = xstrdup (c);
2075 while ((p = strchr (p, '[')) != NULL)
2076 p = resolve_operand_name_1 (p, outputs, inputs);
2078 *pconstraints = ggc_alloc_string (buffer, -1);
2086 /* A subroutine of resolve_operand_names. P points to the '[' for a
2087 potential named operand of the form [<name>]. In place, replace
2088 the name and brackets with a number. Return a pointer to the
2089 balance of the string after substitution. */
2092 resolve_operand_name_1 (p, outputs, inputs)
2094 tree outputs, inputs;
2101 /* Collect the operand name. */
2102 q = strchr (p, ']');
2105 error ("missing close brace for named operand");
2106 return strchr (p, '\0');
2110 /* Resolve the name to a number. */
2111 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2113 const char *c = IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (t)));
2114 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2117 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2119 const char *c = IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (t)));
2120 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2125 error ("undefined named operand '%s'", p + 1);
2129 /* Replace the name with the number. Unfortunately, not all libraries
2130 get the return value of sprintf correct, so search for the end of the
2131 generated string by hand. */
2132 sprintf (p, "%d", op);
2133 p = strchr (p, '\0');
2135 /* Verify the no extra buffer space assumption. */
2139 /* Shift the rest of the buffer down to fill the gap. */
2140 memmove (p, q + 1, strlen (q + 1) + 1);
2145 /* Generate RTL to evaluate the expression EXP
2146 and remember it in case this is the VALUE in a ({... VALUE; }) constr. */
2149 expand_expr_stmt (exp)
2152 bool want_value = last_expr_value != NULL_RTX;
2154 /* If -W, warn about statements with no side effects,
2155 except for an explicit cast to void (e.g. for assert()), and
2156 except inside a ({...}) where they may be useful. */
2157 if (expr_stmts_for_value == 0 && exp != error_mark_node)
2159 if (! TREE_SIDE_EFFECTS (exp))
2161 if ((extra_warnings || warn_unused_value)
2162 && !(TREE_CODE (exp) == CONVERT_EXPR
2163 && VOID_TYPE_P (TREE_TYPE (exp))))
2164 warning_with_file_and_line (emit_filename, emit_lineno,
2165 "statement with no effect");
2167 else if (warn_unused_value)
2168 warn_if_unused_value (exp);
2171 /* If EXP is of function type and we are expanding statements for
2172 value, convert it to pointer-to-function. */
2173 if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2174 exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2176 /* The call to `expand_expr' could cause last_expr_type and
2177 last_expr_value to get reset. Therefore, we set last_expr_value
2178 and last_expr_type *after* calling expand_expr. */
2179 last_expr_value = expand_expr (exp,
2180 (want_value && expr_stmts_for_value
2181 ? NULL_RTX : const0_rtx),
2183 last_expr_type = TREE_TYPE (exp);
2185 /* If all we do is reference a volatile value in memory,
2186 copy it to a register to be sure it is actually touched. */
2187 if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
2188 && TREE_THIS_VOLATILE (exp))
2190 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
2192 else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
2193 last_expr_value = copy_to_reg (last_expr_value);
2196 rtx lab = gen_label_rtx ();
2198 /* Compare the value with itself to reference it. */
2199 emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
2200 expand_expr (TYPE_SIZE (last_expr_type),
2201 NULL_RTX, VOIDmode, 0),
2207 /* If this expression is part of a ({...}) and is in memory, we may have
2208 to preserve temporaries. */
2209 preserve_temp_slots (last_expr_value);
2211 /* Free any temporaries used to evaluate this expression. Any temporary
2212 used as a result of this expression will already have been preserved
2216 if (! want_value && last_expr_value)
2218 protect_from_queue (last_expr_value, 0);
2219 last_expr_value = NULL_RTX;
2221 else if (want_value && ! last_expr_value)
2222 last_expr_value = const0_rtx;
2227 /* Warn if EXP contains any computations whose results are not used.
2228 Return 1 if a warning is printed; 0 otherwise. */
2231 warn_if_unused_value (exp)
2234 if (TREE_USED (exp))
2237 /* Don't warn about void constructs. This includes casting to void,
2238 void function calls, and statement expressions with a final cast
2240 if (VOID_TYPE_P (TREE_TYPE (exp)))
2243 /* If this is an expression with side effects, don't warn. */
2244 if (TREE_SIDE_EFFECTS (exp))
2247 switch (TREE_CODE (exp))
2249 case PREINCREMENT_EXPR:
2250 case POSTINCREMENT_EXPR:
2251 case PREDECREMENT_EXPR:
2252 case POSTDECREMENT_EXPR:
2257 case METHOD_CALL_EXPR:
2259 case TRY_CATCH_EXPR:
2260 case WITH_CLEANUP_EXPR:
2265 /* For a binding, warn if no side effect within it. */
2266 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2269 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2271 case TRUTH_ORIF_EXPR:
2272 case TRUTH_ANDIF_EXPR:
2273 /* In && or ||, warn if 2nd operand has no side effect. */
2274 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2277 if (TREE_NO_UNUSED_WARNING (exp))
2279 if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2281 /* Let people do `(foo (), 0)' without a warning. */
2282 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2284 return warn_if_unused_value (TREE_OPERAND (exp, 1));
2288 case NON_LVALUE_EXPR:
2289 /* Don't warn about conversions not explicit in the user's program. */
2290 if (TREE_NO_UNUSED_WARNING (exp))
2292 /* Assignment to a cast usually results in a cast of a modify.
2293 Don't complain about that. There can be an arbitrary number of
2294 casts before the modify, so we must loop until we find the first
2295 non-cast expression and then test to see if that is a modify. */
2297 tree tem = TREE_OPERAND (exp, 0);
2299 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2300 tem = TREE_OPERAND (tem, 0);
2302 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2303 || TREE_CODE (tem) == CALL_EXPR)
2309 /* Don't warn about automatic dereferencing of references, since
2310 the user cannot control it. */
2311 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2312 return warn_if_unused_value (TREE_OPERAND (exp, 0));
2316 /* Referencing a volatile value is a side effect, so don't warn. */
2318 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2319 && TREE_THIS_VOLATILE (exp))
2322 /* If this is an expression which has no operands, there is no value
2323 to be unused. There are no such language-independent codes,
2324 but front ends may define such. */
2325 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2326 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2330 warning_with_file_and_line (emit_filename, emit_lineno,
2331 "value computed is not used");
2336 /* Clear out the memory of the last expression evaluated. */
2344 /* Begin a statement which will return a value.
2345 Return the RTL_EXPR for this statement expr.
2346 The caller must save that value and pass it to expand_end_stmt_expr. */
2349 expand_start_stmt_expr (want_value)
2354 /* Make the RTL_EXPR node temporary, not momentary,
2355 so that rtl_expr_chain doesn't become garbage. */
2356 t = make_node (RTL_EXPR);
2357 do_pending_stack_adjust ();
2358 start_sequence_for_rtl_expr (t);
2360 expr_stmts_for_value++;
2361 last_expr_value = want_value ? const0_rtx : NULL_RTX;
2365 /* Restore the previous state at the end of a statement that returns a value.
2366 Returns a tree node representing the statement's value and the
2367 insns to compute the value.
2369 The nodes of that expression have been freed by now, so we cannot use them.
2370 But we don't want to do that anyway; the expression has already been
2371 evaluated and now we just want to use the value. So generate a RTL_EXPR
2372 with the proper type and RTL value.
2374 If the last substatement was not an expression,
2375 return something with type `void'. */
2378 expand_end_stmt_expr (t)
2383 if (last_expr_type == 0)
2385 last_expr_type = void_type_node;
2386 last_expr_value = const0_rtx;
2388 else if (last_expr_value == 0)
2389 /* There are some cases where this can happen, such as when the
2390 statement is void type. */
2391 last_expr_value = const0_rtx;
2392 else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2393 /* Remove any possible QUEUED. */
2394 last_expr_value = protect_from_queue (last_expr_value, 0);
2398 TREE_TYPE (t) = last_expr_type;
2399 RTL_EXPR_RTL (t) = last_expr_value;
2400 RTL_EXPR_SEQUENCE (t) = get_insns ();
2402 rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2406 /* Don't consider deleting this expr or containing exprs at tree level. */
2407 TREE_SIDE_EFFECTS (t) = 1;
2408 /* Propagate volatility of the actual RTL expr. */
2409 TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2412 expr_stmts_for_value--;
2417 /* Generate RTL for the start of an if-then. COND is the expression
2418 whose truth should be tested.
2420 If EXITFLAG is nonzero, this conditional is visible to
2421 `exit_something'. */
2424 expand_start_cond (cond, exitflag)
2428 struct nesting *thiscond = ALLOC_NESTING ();
2430 /* Make an entry on cond_stack for the cond we are entering. */
2432 thiscond->next = cond_stack;
2433 thiscond->all = nesting_stack;
2434 thiscond->depth = ++nesting_depth;
2435 thiscond->data.cond.next_label = gen_label_rtx ();
2436 /* Before we encounter an `else', we don't need a separate exit label
2437 unless there are supposed to be exit statements
2438 to exit this conditional. */
2439 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2440 thiscond->data.cond.endif_label = thiscond->exit_label;
2441 cond_stack = thiscond;
2442 nesting_stack = thiscond;
2444 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2447 /* Generate RTL between then-clause and the elseif-clause
2448 of an if-then-elseif-.... */
2451 expand_start_elseif (cond)
2454 if (cond_stack->data.cond.endif_label == 0)
2455 cond_stack->data.cond.endif_label = gen_label_rtx ();
2456 emit_jump (cond_stack->data.cond.endif_label);
2457 emit_label (cond_stack->data.cond.next_label);
2458 cond_stack->data.cond.next_label = gen_label_rtx ();
2459 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2462 /* Generate RTL between the then-clause and the else-clause
2463 of an if-then-else. */
2466 expand_start_else ()
2468 if (cond_stack->data.cond.endif_label == 0)
2469 cond_stack->data.cond.endif_label = gen_label_rtx ();
2471 emit_jump (cond_stack->data.cond.endif_label);
2472 emit_label (cond_stack->data.cond.next_label);
2473 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
2476 /* After calling expand_start_else, turn this "else" into an "else if"
2477 by providing another condition. */
2480 expand_elseif (cond)
2483 cond_stack->data.cond.next_label = gen_label_rtx ();
2484 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2487 /* Generate RTL for the end of an if-then.
2488 Pop the record for it off of cond_stack. */
2493 struct nesting *thiscond = cond_stack;
2495 do_pending_stack_adjust ();
2496 if (thiscond->data.cond.next_label)
2497 emit_label (thiscond->data.cond.next_label);
2498 if (thiscond->data.cond.endif_label)
2499 emit_label (thiscond->data.cond.endif_label);
2501 POPSTACK (cond_stack);
2505 /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this
2506 loop should be exited by `exit_something'. This is a loop for which
2507 `expand_continue' will jump to the top of the loop.
2509 Make an entry on loop_stack to record the labels associated with
2513 expand_start_loop (exit_flag)
2516 struct nesting *thisloop = ALLOC_NESTING ();
2518 /* Make an entry on loop_stack for the loop we are entering. */
2520 thisloop->next = loop_stack;
2521 thisloop->all = nesting_stack;
2522 thisloop->depth = ++nesting_depth;
2523 thisloop->data.loop.start_label = gen_label_rtx ();
2524 thisloop->data.loop.end_label = gen_label_rtx ();
2525 thisloop->data.loop.alt_end_label = 0;
2526 thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2527 thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2528 loop_stack = thisloop;
2529 nesting_stack = thisloop;
2531 do_pending_stack_adjust ();
2533 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2534 emit_label (thisloop->data.loop.start_label);
2539 /* Like expand_start_loop but for a loop where the continuation point
2540 (for expand_continue_loop) will be specified explicitly. */
2543 expand_start_loop_continue_elsewhere (exit_flag)
2546 struct nesting *thisloop = expand_start_loop (exit_flag);
2547 loop_stack->data.loop.continue_label = gen_label_rtx ();
2551 /* Begin a null, aka do { } while (0) "loop". But since the contents
2552 of said loop can still contain a break, we must frob the loop nest. */
2555 expand_start_null_loop ()
2557 struct nesting *thisloop = ALLOC_NESTING ();
2559 /* Make an entry on loop_stack for the loop we are entering. */
2561 thisloop->next = loop_stack;
2562 thisloop->all = nesting_stack;
2563 thisloop->depth = ++nesting_depth;
2564 thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2565 thisloop->data.loop.end_label = gen_label_rtx ();
2566 thisloop->data.loop.alt_end_label = NULL_RTX;
2567 thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2568 thisloop->exit_label = thisloop->data.loop.end_label;
2569 loop_stack = thisloop;
2570 nesting_stack = thisloop;
2575 /* Specify the continuation point for a loop started with
2576 expand_start_loop_continue_elsewhere.
2577 Use this at the point in the code to which a continue statement
2581 expand_loop_continue_here ()
2583 do_pending_stack_adjust ();
2584 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2585 emit_label (loop_stack->data.loop.continue_label);
2588 /* Finish a loop. Generate a jump back to the top and the loop-exit label.
2589 Pop the block off of loop_stack. */
2594 rtx start_label = loop_stack->data.loop.start_label;
2595 rtx insn = get_last_insn ();
2596 int needs_end_jump = 1;
2598 /* Mark the continue-point at the top of the loop if none elsewhere. */
2599 if (start_label == loop_stack->data.loop.continue_label)
2600 emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2602 do_pending_stack_adjust ();
2604 /* If optimizing, perhaps reorder the loop.
2605 First, try to use a condjump near the end.
2606 expand_exit_loop_if_false ends loops with unconditional jumps,
2609 if (test) goto label;
2611 goto loop_stack->data.loop.end_label
2615 If we find such a pattern, we can end the loop earlier. */
2618 && GET_CODE (insn) == CODE_LABEL
2619 && LABEL_NAME (insn) == NULL
2620 && GET_CODE (PREV_INSN (insn)) == BARRIER)
2623 rtx jump = PREV_INSN (PREV_INSN (label));
2625 if (GET_CODE (jump) == JUMP_INSN
2626 && GET_CODE (PATTERN (jump)) == SET
2627 && SET_DEST (PATTERN (jump)) == pc_rtx
2628 && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2629 && (XEXP (SET_SRC (PATTERN (jump)), 0)
2630 == loop_stack->data.loop.end_label))
2634 /* The test might be complex and reference LABEL multiple times,
2635 like the loop in loop_iterations to set vtop. To handle this,
2637 insn = PREV_INSN (label);
2638 reorder_insns (label, label, start_label);
2640 for (prev = PREV_INSN (jump);; prev = PREV_INSN (prev))
2642 /* We ignore line number notes, but if we see any other note,
2643 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2644 NOTE_INSN_LOOP_*, we disable this optimization. */
2645 if (GET_CODE (prev) == NOTE)
2647 if (NOTE_LINE_NUMBER (prev) < 0)
2651 if (GET_CODE (prev) == CODE_LABEL)
2653 if (GET_CODE (prev) == JUMP_INSN)
2655 if (GET_CODE (PATTERN (prev)) == SET
2656 && SET_DEST (PATTERN (prev)) == pc_rtx
2657 && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2658 && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2660 && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2662 XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2664 emit_note_after (NOTE_INSN_LOOP_END, prev);
2673 /* If the loop starts with a loop exit, roll that to the end where
2674 it will optimize together with the jump back.
2676 We look for the conditional branch to the exit, except that once
2677 we find such a branch, we don't look past 30 instructions.
2679 In more detail, if the loop presently looks like this (in pseudo-C):
2682 if (test) goto end_label;
2687 transform it to look like:
2693 if (test) goto end_label;
2694 goto newstart_label;
2697 Here, the `test' may actually consist of some reasonably complex
2698 code, terminating in a test. */
2703 ! (GET_CODE (insn) == JUMP_INSN
2704 && GET_CODE (PATTERN (insn)) == SET
2705 && SET_DEST (PATTERN (insn)) == pc_rtx
2706 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2710 rtx last_test_insn = NULL_RTX;
2712 /* Scan insns from the top of the loop looking for a qualified
2713 conditional exit. */
2714 for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2715 insn = NEXT_INSN (insn))
2717 if (GET_CODE (insn) == NOTE)
2720 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2721 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2722 /* The code that actually moves the exit test will
2723 carefully leave BLOCK notes in their original
2724 location. That means, however, that we can't debug
2725 the exit test itself. So, we refuse to move code
2726 containing BLOCK notes at low optimization levels. */
2729 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2731 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2735 /* We've come to the end of an EH region, but
2736 never saw the beginning of that region. That
2737 means that an EH region begins before the top
2738 of the loop, and ends in the middle of it. The
2739 existence of such a situation violates a basic
2740 assumption in this code, since that would imply
2741 that even when EH_REGIONS is zero, we might
2742 move code out of an exception region. */
2746 /* We must not walk into a nested loop. */
2747 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2750 /* We already know this INSN is a NOTE, so there's no
2751 point in looking at it to see if it's a JUMP. */
2755 if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2758 if (last_test_insn && num_insns > 30)
2762 /* We don't want to move a partial EH region. Consider:
2776 This isn't legal C++, but here's what it's supposed to
2777 mean: if cond() is true, stop looping. Otherwise,
2778 call bar, and keep looping. In addition, if cond
2779 throws an exception, catch it and keep looping. Such
2780 constructs are certainy legal in LISP.
2782 We should not move the `if (cond()) 0' test since then
2783 the EH-region for the try-block would be broken up.
2784 (In this case we would the EH_BEG note for the `try'
2785 and `if cond()' but not the call to bar() or the
2788 So we don't look for tests within an EH region. */
2791 if (GET_CODE (insn) == JUMP_INSN
2792 && GET_CODE (PATTERN (insn)) == SET
2793 && SET_DEST (PATTERN (insn)) == pc_rtx)
2795 /* This is indeed a jump. */
2796 rtx dest1 = NULL_RTX;
2797 rtx dest2 = NULL_RTX;
2798 rtx potential_last_test;
2799 if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2801 /* A conditional jump. */
2802 dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2803 dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2804 potential_last_test = insn;
2808 /* An unconditional jump. */
2809 dest1 = SET_SRC (PATTERN (insn));
2810 /* Include the BARRIER after the JUMP. */
2811 potential_last_test = NEXT_INSN (insn);
2815 if (dest1 && GET_CODE (dest1) == LABEL_REF
2816 && ((XEXP (dest1, 0)
2817 == loop_stack->data.loop.alt_end_label)
2819 == loop_stack->data.loop.end_label)))
2821 last_test_insn = potential_last_test;
2825 /* If this was a conditional jump, there may be
2826 another label at which we should look. */
2833 if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2835 /* We found one. Move everything from there up
2836 to the end of the loop, and add a jump into the loop
2837 to jump to there. */
2838 rtx newstart_label = gen_label_rtx ();
2839 rtx start_move = start_label;
2842 /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2843 then we want to move this note also. */
2844 if (GET_CODE (PREV_INSN (start_move)) == NOTE
2845 && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2846 == NOTE_INSN_LOOP_CONT))
2847 start_move = PREV_INSN (start_move);
2849 emit_label_after (newstart_label, PREV_INSN (start_move));
2851 /* Actually move the insns. Start at the beginning, and
2852 keep copying insns until we've copied the
2854 for (insn = start_move; insn; insn = next_insn)
2856 /* Figure out which insn comes after this one. We have
2857 to do this before we move INSN. */
2858 if (insn == last_test_insn)
2859 /* We've moved all the insns. */
2860 next_insn = NULL_RTX;
2862 next_insn = NEXT_INSN (insn);
2864 if (GET_CODE (insn) == NOTE
2865 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2866 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2867 /* We don't want to move NOTE_INSN_BLOCK_BEGs or
2868 NOTE_INSN_BLOCK_ENDs because the correct generation
2869 of debugging information depends on these appearing
2870 in the same order in the RTL and in the tree
2871 structure, where they are represented as BLOCKs.
2872 So, we don't move block notes. Of course, moving
2873 the code inside the block is likely to make it
2874 impossible to debug the instructions in the exit
2875 test, but such is the price of optimization. */
2878 /* Move the INSN. */
2879 reorder_insns (insn, insn, get_last_insn ());
2882 emit_jump_insn_after (gen_jump (start_label),
2883 PREV_INSN (newstart_label));
2884 emit_barrier_after (PREV_INSN (newstart_label));
2885 start_label = newstart_label;
2891 emit_jump (start_label);
2892 emit_note (NULL, NOTE_INSN_LOOP_END);
2894 emit_label (loop_stack->data.loop.end_label);
2896 POPSTACK (loop_stack);
2901 /* Finish a null loop, aka do { } while (0). */
2904 expand_end_null_loop ()
2906 do_pending_stack_adjust ();
2907 emit_label (loop_stack->data.loop.end_label);
2909 POPSTACK (loop_stack);
2914 /* Generate a jump to the current loop's continue-point.
2915 This is usually the top of the loop, but may be specified
2916 explicitly elsewhere. If not currently inside a loop,
2917 return 0 and do nothing; caller will print an error message. */
2920 expand_continue_loop (whichloop)
2921 struct nesting *whichloop;
2925 whichloop = loop_stack;
2928 expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2933 /* Generate a jump to exit the current loop. If not currently inside a loop,
2934 return 0 and do nothing; caller will print an error message. */
2937 expand_exit_loop (whichloop)
2938 struct nesting *whichloop;
2942 whichloop = loop_stack;
2945 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2949 /* Generate a conditional jump to exit the current loop if COND
2950 evaluates to zero. If not currently inside a loop,
2951 return 0 and do nothing; caller will print an error message. */
2954 expand_exit_loop_if_false (whichloop, cond)
2955 struct nesting *whichloop;
2958 rtx label = gen_label_rtx ();
2963 whichloop = loop_stack;
2966 /* In order to handle fixups, we actually create a conditional jump
2967 around an unconditional branch to exit the loop. If fixups are
2968 necessary, they go before the unconditional branch. */
2970 do_jump (cond, NULL_RTX, label);
2971 last_insn = get_last_insn ();
2972 if (GET_CODE (last_insn) == CODE_LABEL)
2973 whichloop->data.loop.alt_end_label = last_insn;
2974 expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2981 /* Return nonzero if the loop nest is empty. Else return zero. */
2984 stmt_loop_nest_empty ()
2986 /* cfun->stmt can be NULL if we are building a call to get the
2987 EH context for a setjmp/longjmp EH target and the current
2988 function was a deferred inline function. */
2989 return (cfun->stmt == NULL || loop_stack == NULL);
2992 /* Return non-zero if we should preserve sub-expressions as separate
2993 pseudos. We never do so if we aren't optimizing. We always do so
2994 if -fexpensive-optimizations.
2996 Otherwise, we only do so if we are in the "early" part of a loop. I.e.,
2997 the loop may still be a small one. */
3000 preserve_subexpressions_p ()
3004 if (flag_expensive_optimizations)
3007 if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
3010 insn = get_last_insn_anywhere ();
3013 && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
3014 < n_non_fixed_regs * 3));
3018 /* Generate a jump to exit the current loop, conditional, binding contour
3019 or case statement. Not all such constructs are visible to this function,
3020 only those started with EXIT_FLAG nonzero. Individual languages use
3021 the EXIT_FLAG parameter to control which kinds of constructs you can
3024 If not currently inside anything that can be exited,
3025 return 0 and do nothing; caller will print an error message. */
3028 expand_exit_something ()
3032 for (n = nesting_stack; n; n = n->all)
3033 if (n->exit_label != 0)
3035 expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
3042 /* Generate RTL to return from the current function, with no value.
3043 (That is, we do not do anything about returning any value.) */
3046 expand_null_return ()
3048 rtx last_insn = get_last_insn ();
3050 /* If this function was declared to return a value, but we
3051 didn't, clobber the return registers so that they are not
3052 propagated live to the rest of the function. */
3053 clobber_return_register ();
3055 expand_null_return_1 (last_insn);
3058 /* Generate RTL to return from the current function, with value VAL. */
3061 expand_value_return (val)
3064 rtx last_insn = get_last_insn ();
3065 rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
3067 /* Copy the value to the return location
3068 unless it's already there. */
3070 if (return_reg != val)
3072 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3073 #ifdef PROMOTE_FUNCTION_RETURN
3074 int unsignedp = TREE_UNSIGNED (type);
3075 enum machine_mode old_mode
3076 = DECL_MODE (DECL_RESULT (current_function_decl));
3077 enum machine_mode mode
3078 = promote_mode (type, old_mode, &unsignedp, 1);
3080 if (mode != old_mode)
3081 val = convert_modes (mode, old_mode, val, unsignedp);
3083 if (GET_CODE (return_reg) == PARALLEL)
3084 emit_group_load (return_reg, val, int_size_in_bytes (type));
3086 emit_move_insn (return_reg, val);
3089 expand_null_return_1 (last_insn);
3092 /* Output a return with no value. If LAST_INSN is nonzero,
3093 pretend that the return takes place after LAST_INSN. */
3096 expand_null_return_1 (last_insn)
3099 rtx end_label = cleanup_label ? cleanup_label : return_label;
3101 clear_pending_stack_adjust ();
3102 do_pending_stack_adjust ();
3106 end_label = return_label = gen_label_rtx ();
3107 expand_goto_internal (NULL_TREE, end_label, last_insn);
3110 /* Generate RTL to evaluate the expression RETVAL and return it
3111 from the current function. */
3114 expand_return (retval)
3117 /* If there are any cleanups to be performed, then they will
3118 be inserted following LAST_INSN. It is desirable
3119 that the last_insn, for such purposes, should be the
3120 last insn before computing the return value. Otherwise, cleanups
3121 which call functions can clobber the return value. */
3122 /* ??? rms: I think that is erroneous, because in C++ it would
3123 run destructors on variables that might be used in the subsequent
3124 computation of the return value. */
3130 /* If function wants no value, give it none. */
3131 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3133 expand_expr (retval, NULL_RTX, VOIDmode, 0);
3135 expand_null_return ();
3139 if (retval == error_mark_node)
3141 /* Treat this like a return of no value from a function that
3143 expand_null_return ();
3146 else if (TREE_CODE (retval) == RESULT_DECL)
3147 retval_rhs = retval;
3148 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3149 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3150 retval_rhs = TREE_OPERAND (retval, 1);
3151 else if (VOID_TYPE_P (TREE_TYPE (retval)))
3152 /* Recognize tail-recursive call to void function. */
3153 retval_rhs = retval;
3155 retval_rhs = NULL_TREE;
3157 last_insn = get_last_insn ();
3159 /* Distribute return down conditional expr if either of the sides
3160 may involve tail recursion (see test below). This enhances the number
3161 of tail recursions we see. Don't do this always since it can produce
3162 sub-optimal code in some cases and we distribute assignments into
3163 conditional expressions when it would help. */
3165 if (optimize && retval_rhs != 0
3166 && frame_offset == 0
3167 && TREE_CODE (retval_rhs) == COND_EXPR
3168 && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3169 || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3171 rtx label = gen_label_rtx ();
3174 do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3175 start_cleanup_deferral ();
3176 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3177 DECL_RESULT (current_function_decl),
3178 TREE_OPERAND (retval_rhs, 1));
3179 TREE_SIDE_EFFECTS (expr) = 1;
3180 expand_return (expr);
3183 expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3184 DECL_RESULT (current_function_decl),
3185 TREE_OPERAND (retval_rhs, 2));
3186 TREE_SIDE_EFFECTS (expr) = 1;
3187 expand_return (expr);
3188 end_cleanup_deferral ();
3192 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3194 /* If the result is an aggregate that is being returned in one (or more)
3195 registers, load the registers here. The compiler currently can't handle
3196 copying a BLKmode value into registers. We could put this code in a
3197 more general area (for use by everyone instead of just function
3198 call/return), but until this feature is generally usable it is kept here
3199 (and in expand_call). The value must go into a pseudo in case there
3200 are cleanups that will clobber the real return register. */
3203 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3204 && GET_CODE (result_rtl) == REG)
3207 unsigned HOST_WIDE_INT bitpos, xbitpos;
3208 unsigned HOST_WIDE_INT big_endian_correction = 0;
3209 unsigned HOST_WIDE_INT bytes
3210 = int_size_in_bytes (TREE_TYPE (retval_rhs));
3211 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3212 unsigned int bitsize
3213 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3214 rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3215 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3216 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3217 enum machine_mode tmpmode, result_reg_mode;
3221 expand_null_return ();
3225 /* Structures whose size is not a multiple of a word are aligned
3226 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
3227 machine, this means we must skip the empty high order bytes when
3228 calculating the bit offset. */
3229 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
3230 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3233 /* Copy the structure BITSIZE bits at a time. */
3234 for (bitpos = 0, xbitpos = big_endian_correction;
3235 bitpos < bytes * BITS_PER_UNIT;
3236 bitpos += bitsize, xbitpos += bitsize)
3238 /* We need a new destination pseudo each time xbitpos is
3239 on a word boundary and when xbitpos == big_endian_correction
3240 (the first time through). */
3241 if (xbitpos % BITS_PER_WORD == 0
3242 || xbitpos == big_endian_correction)
3244 /* Generate an appropriate register. */
3245 dst = gen_reg_rtx (word_mode);
3246 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3248 /* Clobber the destination before we move anything into it. */
3249 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
3252 /* We need a new source operand each time bitpos is on a word
3254 if (bitpos % BITS_PER_WORD == 0)
3255 src = operand_subword_force (result_val,
3256 bitpos / BITS_PER_WORD,
3259 /* Use bitpos for the source extraction (left justified) and
3260 xbitpos for the destination store (right justified). */
3261 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3262 extract_bit_field (src, bitsize,
3263 bitpos % BITS_PER_WORD, 1,
3264 NULL_RTX, word_mode, word_mode,
3269 /* Find the smallest integer mode large enough to hold the
3270 entire structure and use that mode instead of BLKmode
3271 on the USE insn for the return register. */
3272 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3273 tmpmode != VOIDmode;
3274 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3275 /* Have we found a large enough mode? */
3276 if (GET_MODE_SIZE (tmpmode) >= bytes)
3279 /* No suitable mode found. */
3280 if (tmpmode == VOIDmode)
3283 PUT_MODE (result_rtl, tmpmode);
3285 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3286 result_reg_mode = word_mode;
3288 result_reg_mode = tmpmode;
3289 result_reg = gen_reg_rtx (result_reg_mode);
3292 for (i = 0; i < n_regs; i++)
3293 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3296 if (tmpmode != result_reg_mode)
3297 result_reg = gen_lowpart (tmpmode, result_reg);
3299 expand_value_return (result_reg);
3301 else if (retval_rhs != 0
3302 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3303 && (GET_CODE (result_rtl) == REG
3304 || (GET_CODE (result_rtl) == PARALLEL)))
3306 /* Calculate the return value into a temporary (usually a pseudo
3308 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3309 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3311 val = assign_temp (nt, 0, 0, 1);
3312 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3313 val = force_not_mem (val);
3315 /* Return the calculated value, doing cleanups first. */
3316 expand_value_return (val);
3320 /* No cleanups or no hard reg used;
3321 calculate value into hard return reg. */
3322 expand_expr (retval, const0_rtx, VOIDmode, 0);
3324 expand_value_return (result_rtl);
3328 /* Return 1 if the end of the generated RTX is not a barrier.
3329 This means code already compiled can drop through. */
3332 drop_through_at_end_p ()
3334 rtx insn = get_last_insn ();
3335 while (insn && GET_CODE (insn) == NOTE)
3336 insn = PREV_INSN (insn);
3337 return insn && GET_CODE (insn) != BARRIER;
3340 /* Attempt to optimize a potential tail recursion call into a goto.
3341 ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3342 where to place the jump to the tail recursion label.
3344 Return TRUE if the call was optimized into a goto. */
3347 optimize_tail_recursion (arguments, last_insn)
3351 /* Finish checking validity, and if valid emit code to set the
3352 argument variables for the new call. */
3353 if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3355 if (tail_recursion_label == 0)
3357 tail_recursion_label = gen_label_rtx ();
3358 emit_label_after (tail_recursion_label,
3359 tail_recursion_reentry);
3362 expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3369 /* Emit code to alter this function's formal parms for a tail-recursive call.
3370 ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3371 FORMALS is the chain of decls of formals.
3372 Return 1 if this can be done;
3373 otherwise return 0 and do not emit any code. */
3376 tail_recursion_args (actuals, formals)
3377 tree actuals, formals;
3379 tree a = actuals, f = formals;
3383 /* Check that number and types of actuals are compatible
3384 with the formals. This is not always true in valid C code.
3385 Also check that no formal needs to be addressable
3386 and that all formals are scalars. */
3388 /* Also count the args. */
3390 for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3392 if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3393 != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3395 if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3398 if (a != 0 || f != 0)
3401 /* Compute all the actuals. */
3403 argvec = (rtx *) alloca (i * sizeof (rtx));
3405 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3406 argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3408 /* Find which actual values refer to current values of previous formals.
3409 Copy each of them now, before any formal is changed. */
3411 for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3415 for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3416 if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3422 argvec[i] = copy_to_reg (argvec[i]);
3425 /* Store the values of the actuals into the formals. */
3427 for (f = formals, a = actuals, i = 0; f;
3428 f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3430 if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3431 emit_move_insn (DECL_RTL (f), argvec[i]);
3433 convert_move (DECL_RTL (f), argvec[i],
3434 TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3441 /* Generate the RTL code for entering a binding contour.
3442 The variables are declared one by one, by calls to `expand_decl'.
3444 FLAGS is a bitwise or of the following flags:
3446 1 - Nonzero if this construct should be visible to
3449 2 - Nonzero if this contour does not require a
3450 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
3451 language-independent code should set this flag because they
3452 will not create corresponding BLOCK nodes. (There should be
3453 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3454 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
3455 when expand_end_bindings is called.
3457 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3458 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
3462 expand_start_bindings_and_block (flags, block)
3466 struct nesting *thisblock = ALLOC_NESTING ();
3468 int exit_flag = ((flags & 1) != 0);
3469 int block_flag = ((flags & 2) == 0);
3471 /* If a BLOCK is supplied, then the caller should be requesting a
3472 NOTE_INSN_BLOCK_BEG note. */
3473 if (!block_flag && block)
3476 /* Create a note to mark the beginning of the block. */
3479 note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3480 NOTE_BLOCK (note) = block;
3483 note = emit_note (NULL, NOTE_INSN_DELETED);
3485 /* Make an entry on block_stack for the block we are entering. */
3487 thisblock->next = block_stack;
3488 thisblock->all = nesting_stack;
3489 thisblock->depth = ++nesting_depth;
3490 thisblock->data.block.stack_level = 0;
3491 thisblock->data.block.cleanups = 0;
3492 thisblock->data.block.n_function_calls = 0;
3493 thisblock->data.block.exception_region = 0;
3494 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3496 thisblock->data.block.conditional_code = 0;
3497 thisblock->data.block.last_unconditional_cleanup = note;
3498 /* When we insert instructions after the last unconditional cleanup,
3499 we don't adjust last_insn. That means that a later add_insn will
3500 clobber the instructions we've just added. The easiest way to
3501 fix this is to just insert another instruction here, so that the
3502 instructions inserted after the last unconditional cleanup are
3503 never the last instruction. */
3504 emit_note (NULL, NOTE_INSN_DELETED);
3505 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3508 && !(block_stack->data.block.cleanups == NULL_TREE
3509 && block_stack->data.block.outer_cleanups == NULL_TREE))
3510 thisblock->data.block.outer_cleanups
3511 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3512 block_stack->data.block.outer_cleanups);
3514 thisblock->data.block.outer_cleanups = 0;
3515 thisblock->data.block.label_chain = 0;
3516 thisblock->data.block.innermost_stack_block = stack_block_stack;
3517 thisblock->data.block.first_insn = note;
3518 thisblock->data.block.block_start_count = ++current_block_start_count;
3519 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3520 block_stack = thisblock;
3521 nesting_stack = thisblock;
3523 /* Make a new level for allocating stack slots. */
3527 /* Specify the scope of temporaries created by TARGET_EXPRs. Similar
3528 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3529 expand_expr are made. After we end the region, we know that all
3530 space for all temporaries that were created by TARGET_EXPRs will be
3531 destroyed and their space freed for reuse. */
3534 expand_start_target_temps ()
3536 /* This is so that even if the result is preserved, the space
3537 allocated will be freed, as we know that it is no longer in use. */
3540 /* Start a new binding layer that will keep track of all cleanup
3541 actions to be performed. */
3542 expand_start_bindings (2);
3544 target_temp_slot_level = temp_slot_level;
3548 expand_end_target_temps ()
3550 expand_end_bindings (NULL_TREE, 0, 0);
3552 /* This is so that even if the result is preserved, the space
3553 allocated will be freed, as we know that it is no longer in use. */
3557 /* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3558 in question represents the outermost pair of curly braces (i.e. the "body
3559 block") of a function or method.
3561 For any BLOCK node representing a "body block" of a function or method, the
3562 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3563 represents the outermost (function) scope for the function or method (i.e.
3564 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
3565 *that* node in turn will point to the relevant FUNCTION_DECL node. */
3568 is_body_block (stmt)
3571 if (TREE_CODE (stmt) == BLOCK)
3573 tree parent = BLOCK_SUPERCONTEXT (stmt);
3575 if (parent && TREE_CODE (parent) == BLOCK)
3577 tree grandparent = BLOCK_SUPERCONTEXT (parent);
3579 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3587 /* True if we are currently emitting insns in an area of output code
3588 that is controlled by a conditional expression. This is used by
3589 the cleanup handling code to generate conditional cleanup actions. */
3592 conditional_context ()
3594 return block_stack && block_stack->data.block.conditional_code;
3597 /* Return an opaque pointer to the current nesting level, so frontend code
3598 can check its own sanity. */
3601 current_nesting_level ()
3603 return cfun ? block_stack : 0;
3606 /* Emit a handler label for a nonlocal goto handler.
3607 Also emit code to store the handler label in SLOT before BEFORE_INSN. */
3610 expand_nl_handler_label (slot, before_insn)
3611 rtx slot, before_insn;
3614 rtx handler_label = gen_label_rtx ();
3616 /* Don't let cleanup_cfg delete the handler. */
3617 LABEL_PRESERVE_P (handler_label) = 1;
3620 emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3621 insns = get_insns ();
3623 emit_insns_before (insns, before_insn);
3625 emit_label (handler_label);
3627 return handler_label;
3630 /* Emit code to restore vital registers at the beginning of a nonlocal goto
3633 expand_nl_goto_receiver ()
3635 #ifdef HAVE_nonlocal_goto
3636 if (! HAVE_nonlocal_goto)
3638 /* First adjust our frame pointer to its actual value. It was
3639 previously set to the start of the virtual area corresponding to
3640 the stacked variables when we branched here and now needs to be
3641 adjusted to the actual hardware fp value.
3643 Assignments are to virtual registers are converted by
3644 instantiate_virtual_regs into the corresponding assignment
3645 to the underlying register (fp in this case) that makes
3646 the original assignment true.
3647 So the following insn will actually be
3648 decrementing fp by STARTING_FRAME_OFFSET. */
3649 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3651 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3652 if (fixed_regs[ARG_POINTER_REGNUM])
3654 #ifdef ELIMINABLE_REGS
3655 /* If the argument pointer can be eliminated in favor of the
3656 frame pointer, we don't need to restore it. We assume here
3657 that if such an elimination is present, it can always be used.
3658 This is the case on all known machines; if we don't make this
3659 assumption, we do unnecessary saving on many machines. */
3660 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3663 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3664 if (elim_regs[i].from == ARG_POINTER_REGNUM
3665 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3668 if (i == ARRAY_SIZE (elim_regs))
3671 /* Now restore our arg pointer from the address at which it
3672 was saved in our stack frame. */
3673 emit_move_insn (virtual_incoming_args_rtx,
3674 copy_to_reg (get_arg_pointer_save_area (cfun)));
3679 #ifdef HAVE_nonlocal_goto_receiver
3680 if (HAVE_nonlocal_goto_receiver)
3681 emit_insn (gen_nonlocal_goto_receiver ());
3685 /* Make handlers for nonlocal gotos taking place in the function calls in
3689 expand_nl_goto_receivers (thisblock)
3690 struct nesting *thisblock;
3693 rtx afterward = gen_label_rtx ();
3698 /* Record the handler address in the stack slot for that purpose,
3699 during this block, saving and restoring the outer value. */
3700 if (thisblock->next != 0)
3701 for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3703 rtx save_receiver = gen_reg_rtx (Pmode);
3704 emit_move_insn (XEXP (slot, 0), save_receiver);
3707 emit_move_insn (save_receiver, XEXP (slot, 0));
3708 insns = get_insns ();
3710 emit_insns_before (insns, thisblock->data.block.first_insn);
3713 /* Jump around the handlers; they run only when specially invoked. */
3714 emit_jump (afterward);
3716 /* Make a separate handler for each label. */
3717 link = nonlocal_labels;
3718 slot = nonlocal_goto_handler_slots;
3719 label_list = NULL_RTX;
3720 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3721 /* Skip any labels we shouldn't be able to jump to from here,
3722 we generate one special handler for all of them below which just calls
3724 if (! DECL_TOO_LATE (TREE_VALUE (link)))
3727 lab = expand_nl_handler_label (XEXP (slot, 0),
3728 thisblock->data.block.first_insn);
3729 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3731 expand_nl_goto_receiver ();
3733 /* Jump to the "real" nonlocal label. */
3734 expand_goto (TREE_VALUE (link));
3737 /* A second pass over all nonlocal labels; this time we handle those
3738 we should not be able to jump to at this point. */
3739 link = nonlocal_labels;
3740 slot = nonlocal_goto_handler_slots;
3742 for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3743 if (DECL_TOO_LATE (TREE_VALUE (link)))
3746 lab = expand_nl_handler_label (XEXP (slot, 0),
3747 thisblock->data.block.first_insn);
3748 label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3754 expand_nl_goto_receiver ();
3755 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), LCT_NORETURN,
3760 nonlocal_goto_handler_labels = label_list;
3761 emit_label (afterward);
3764 /* Warn about any unused VARS (which may contain nodes other than
3765 VAR_DECLs, but such nodes are ignored). The nodes are connected
3766 via the TREE_CHAIN field. */
3769 warn_about_unused_variables (vars)
3774 if (warn_unused_variable)
3775 for (decl = vars; decl; decl = TREE_CHAIN (decl))
3776 if (TREE_CODE (decl) == VAR_DECL
3777 && ! TREE_USED (decl)
3778 && ! DECL_IN_SYSTEM_HEADER (decl)
3779 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3780 warning_with_decl (decl, "unused variable `%s'");
3783 /* Generate RTL code to terminate a binding contour.
3785 VARS is the chain of VAR_DECL nodes for the variables bound in this
3786 contour. There may actually be other nodes in this chain, but any
3787 nodes other than VAR_DECLS are ignored.
3789 MARK_ENDS is nonzero if we should put a note at the beginning
3790 and end of this binding contour.
3792 DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3793 (That is true automatically if the contour has a saved stack level.) */
3796 expand_end_bindings (vars, mark_ends, dont_jump_in)
3801 struct nesting *thisblock = block_stack;
3803 /* If any of the variables in this scope were not used, warn the
3805 warn_about_unused_variables (vars);
3807 if (thisblock->exit_label)
3809 do_pending_stack_adjust ();
3810 emit_label (thisblock->exit_label);
3813 /* If necessary, make handlers for nonlocal gotos taking
3814 place in the function calls in this block. */
3815 if (function_call_count != thisblock->data.block.n_function_calls
3817 /* Make handler for outermost block
3818 if there were any nonlocal gotos to this function. */
3819 && (thisblock->next == 0 ? current_function_has_nonlocal_label
3820 /* Make handler for inner block if it has something
3821 special to do when you jump out of it. */
3822 : (thisblock->data.block.cleanups != 0
3823 || thisblock->data.block.stack_level != 0)))
3824 expand_nl_goto_receivers (thisblock);
3826 /* Don't allow jumping into a block that has a stack level.
3827 Cleanups are allowed, though. */
3829 || thisblock->data.block.stack_level != 0)
3831 struct label_chain *chain;
3833 /* Any labels in this block are no longer valid to go to.
3834 Mark them to cause an error message. */
3835 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3837 DECL_TOO_LATE (chain->label) = 1;
3838 /* If any goto without a fixup came to this label,
3839 that must be an error, because gotos without fixups
3840 come from outside all saved stack-levels. */
3841 if (TREE_ADDRESSABLE (chain->label))
3842 error_with_decl (chain->label,
3843 "label `%s' used before containing binding contour");
3847 /* Restore stack level in effect before the block
3848 (only if variable-size objects allocated). */
3849 /* Perform any cleanups associated with the block. */
3851 if (thisblock->data.block.stack_level != 0
3852 || thisblock->data.block.cleanups != 0)
3857 /* Don't let cleanups affect ({...}) constructs. */
3858 int old_expr_stmts_for_value = expr_stmts_for_value;
3859 rtx old_last_expr_value = last_expr_value;
3860 tree old_last_expr_type = last_expr_type;
3861 expr_stmts_for_value = 0;
3863 /* Only clean up here if this point can actually be reached. */
3864 insn = get_last_insn ();
3865 if (GET_CODE (insn) == NOTE)
3866 insn = prev_nonnote_insn (insn);
3867 reachable = (! insn || GET_CODE (insn) != BARRIER);
3869 /* Do the cleanups. */
3870 expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3872 do_pending_stack_adjust ();
3874 expr_stmts_for_value = old_expr_stmts_for_value;
3875 last_expr_value = old_last_expr_value;
3876 last_expr_type = old_last_expr_type;
3878 /* Restore the stack level. */
3880 if (reachable && thisblock->data.block.stack_level != 0)
3882 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3883 thisblock->data.block.stack_level, NULL_RTX);
3884 if (nonlocal_goto_handler_slots != 0)
3885 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3889 /* Any gotos out of this block must also do these things.
3890 Also report any gotos with fixups that came to labels in this
3892 fixup_gotos (thisblock,
3893 thisblock->data.block.stack_level,
3894 thisblock->data.block.cleanups,
3895 thisblock->data.block.first_insn,
3899 /* Mark the beginning and end of the scope if requested.
3900 We do this now, after running cleanups on the variables
3901 just going out of scope, so they are in scope for their cleanups. */
3905 rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3906 NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3909 /* Get rid of the beginning-mark if we don't make an end-mark. */
3910 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3912 /* Restore the temporary level of TARGET_EXPRs. */
3913 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3915 /* Restore block_stack level for containing block. */
3917 stack_block_stack = thisblock->data.block.innermost_stack_block;
3918 POPSTACK (block_stack);
3920 /* Pop the stack slot nesting and free any slots at this level. */
3924 /* Generate code to save the stack pointer at the start of the current block
3925 and set up to restore it on exit. */
3928 save_stack_pointer ()
3930 struct nesting *thisblock = block_stack;
3932 if (thisblock->data.block.stack_level == 0)
3934 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3935 &thisblock->data.block.stack_level,
3936 thisblock->data.block.first_insn);
3937 stack_block_stack = thisblock;
3941 /* Generate RTL for the automatic variable declaration DECL.
3942 (Other kinds of declarations are simply ignored if seen here.) */
3948 struct nesting *thisblock;
3951 type = TREE_TYPE (decl);
3953 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3954 type in case this node is used in a reference. */
3955 if (TREE_CODE (decl) == CONST_DECL)
3957 DECL_MODE (decl) = TYPE_MODE (type);
3958 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3959 DECL_SIZE (decl) = TYPE_SIZE (type);
3960 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3964 /* Otherwise, only automatic variables need any expansion done. Static and
3965 external variables, and external functions, will be handled by
3966 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3967 nothing. PARM_DECLs are handled in `assign_parms'. */
3968 if (TREE_CODE (decl) != VAR_DECL)
3971 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3974 thisblock = block_stack;
3976 /* Create the RTL representation for the variable. */
3978 if (type == error_mark_node)
3979 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3981 else if (DECL_SIZE (decl) == 0)
3982 /* Variable with incomplete type. */
3985 if (DECL_INITIAL (decl) == 0)
3986 /* Error message was already done; now avoid a crash. */
3987 x = gen_rtx_MEM (BLKmode, const0_rtx);
3989 /* An initializer is going to decide the size of this array.
3990 Until we know the size, represent its address with a reg. */
3991 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3993 set_mem_attributes (x, decl, 1);
3994 SET_DECL_RTL (decl, x);
3996 else if (DECL_MODE (decl) != BLKmode
3997 /* If -ffloat-store, don't put explicit float vars
3999 && !(flag_float_store
4000 && TREE_CODE (type) == REAL_TYPE)
4001 && ! TREE_THIS_VOLATILE (decl)
4002 && (DECL_REGISTER (decl) || optimize)
4003 /* if -fcheck-memory-usage, check all variables. */
4004 && ! current_function_check_memory_usage)
4006 /* Automatic variable that can go in a register. */
4007 int unsignedp = TREE_UNSIGNED (type);
4008 enum machine_mode reg_mode
4009 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
4011 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
4013 if (GET_CODE (DECL_RTL (decl)) == REG)
4014 REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
4015 else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
4017 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
4018 REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
4021 mark_user_reg (DECL_RTL (decl));
4023 if (POINTER_TYPE_P (type))
4024 mark_reg_pointer (DECL_RTL (decl),
4025 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
4027 maybe_set_unchanging (DECL_RTL (decl), decl);
4029 /* If something wants our address, try to use ADDRESSOF. */
4030 if (TREE_ADDRESSABLE (decl))
4031 put_var_into_stack (decl);
4034 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
4035 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
4036 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
4037 STACK_CHECK_MAX_VAR_SIZE)))
4039 /* Variable of fixed size that goes on the stack. */
4044 /* If we previously made RTL for this decl, it must be an array
4045 whose size was determined by the initializer.
4046 The old address was a register; set that register now
4047 to the proper address. */
4048 if (DECL_RTL_SET_P (decl))
4050 if (GET_CODE (DECL_RTL (decl)) != MEM
4051 || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
4053 oldaddr = XEXP (DECL_RTL (decl), 0);
4056 /* Set alignment we actually gave this decl. */
4057 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
4058 : GET_MODE_BITSIZE (DECL_MODE (decl)));
4059 DECL_USER_ALIGN (decl) = 0;
4061 x = assign_temp (TREE_TYPE (decl), 1, 1, 1);
4062 set_mem_attributes (x, decl, 1);
4063 SET_DECL_RTL (decl, x);
4067 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
4068 if (addr != oldaddr)
4069 emit_move_insn (oldaddr, addr);
4073 /* Dynamic-size object: must push space on the stack. */
4075 rtx address, size, x;
4077 /* Record the stack pointer on entry to block, if have
4078 not already done so. */
4079 do_pending_stack_adjust ();
4080 save_stack_pointer ();
4082 /* In function-at-a-time mode, variable_size doesn't expand this,
4084 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4085 expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4086 const0_rtx, VOIDmode, 0);
4088 /* Compute the variable's size, in bytes. */
4089 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4092 /* Allocate space on the stack for the variable. Note that
4093 DECL_ALIGN says how the variable is to be aligned and we
4094 cannot use it to conclude anything about the alignment of
4096 address = allocate_dynamic_stack_space (size, NULL_RTX,
4097 TYPE_ALIGN (TREE_TYPE (decl)));
4099 /* Reference the variable indirect through that rtx. */
4100 x = gen_rtx_MEM (DECL_MODE (decl), address);
4101 set_mem_attributes (x, decl, 1);
4102 SET_DECL_RTL (decl, x);
4105 /* Indicate the alignment we actually gave this variable. */
4106 #ifdef STACK_BOUNDARY
4107 DECL_ALIGN (decl) = STACK_BOUNDARY;
4109 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4111 DECL_USER_ALIGN (decl) = 0;
4115 /* Emit code to perform the initialization of a declaration DECL. */
4118 expand_decl_init (decl)
4121 int was_used = TREE_USED (decl);
4123 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
4124 for static decls. */
4125 if (TREE_CODE (decl) == CONST_DECL
4126 || TREE_STATIC (decl))
4129 /* Compute and store the initial value now. */
4131 if (DECL_INITIAL (decl) == error_mark_node)
4133 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4135 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4136 || code == POINTER_TYPE || code == REFERENCE_TYPE)
4137 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4141 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4143 emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4144 expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4148 /* Don't let the initialization count as "using" the variable. */
4149 TREE_USED (decl) = was_used;
4151 /* Free any temporaries we made while initializing the decl. */
4152 preserve_temp_slots (NULL_RTX);
4156 /* CLEANUP is an expression to be executed at exit from this binding contour;
4157 for example, in C++, it might call the destructor for this variable.
4159 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4160 CLEANUP multiple times, and have the correct semantics. This
4161 happens in exception handling, for gotos, returns, breaks that
4162 leave the current scope.
4164 If CLEANUP is nonzero and DECL is zero, we record a cleanup
4165 that is not associated with any particular variable. */
4168 expand_decl_cleanup (decl, cleanup)
4171 struct nesting *thisblock;
4173 /* Error if we are not in any block. */
4174 if (cfun == 0 || block_stack == 0)
4177 thisblock = block_stack;
4179 /* Record the cleanup if there is one. */
4185 tree *cleanups = &thisblock->data.block.cleanups;
4186 int cond_context = conditional_context ();
4190 rtx flag = gen_reg_rtx (word_mode);
4195 emit_move_insn (flag, const0_rtx);
4196 set_flag_0 = get_insns ();
4199 thisblock->data.block.last_unconditional_cleanup
4200 = emit_insns_after (set_flag_0,
4201 thisblock->data.block.last_unconditional_cleanup);
4203 emit_move_insn (flag, const1_rtx);
4205 cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4206 SET_DECL_RTL (cond, flag);
4208 /* Conditionalize the cleanup. */
4209 cleanup = build (COND_EXPR, void_type_node,
4210 truthvalue_conversion (cond),
4211 cleanup, integer_zero_node);
4212 cleanup = fold (cleanup);
4214 cleanups = thisblock->data.block.cleanup_ptr;
4217 cleanup = unsave_expr (cleanup);
4219 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4222 /* If this block has a cleanup, it belongs in stack_block_stack. */
4223 stack_block_stack = thisblock;
4230 if (! using_eh_for_cleanups_p)
4231 TREE_ADDRESSABLE (t) = 1;
4233 expand_eh_region_start ();
4240 thisblock->data.block.last_unconditional_cleanup
4241 = emit_insns_after (seq,
4242 thisblock->data.block.last_unconditional_cleanup);
4246 thisblock->data.block.last_unconditional_cleanup
4248 /* When we insert instructions after the last unconditional cleanup,
4249 we don't adjust last_insn. That means that a later add_insn will
4250 clobber the instructions we've just added. The easiest way to
4251 fix this is to just insert another instruction here, so that the
4252 instructions inserted after the last unconditional cleanup are
4253 never the last instruction. */
4254 emit_note (NULL, NOTE_INSN_DELETED);
4255 thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4261 /* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
4262 DECL_ELTS is the list of elements that belong to DECL's type.
4263 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
4266 expand_anon_union_decl (decl, cleanup, decl_elts)
4267 tree decl, cleanup, decl_elts;
4269 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4273 /* If any of the elements are addressable, so is the entire union. */
4274 for (t = decl_elts; t; t = TREE_CHAIN (t))
4275 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4277 TREE_ADDRESSABLE (decl) = 1;
4282 expand_decl_cleanup (decl, cleanup);
4283 x = DECL_RTL (decl);
4285 /* Go through the elements, assigning RTL to each. */
4286 for (t = decl_elts; t; t = TREE_CHAIN (t))
4288 tree decl_elt = TREE_VALUE (t);
4289 tree cleanup_elt = TREE_PURPOSE (t);
4290 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4292 /* Propagate the union's alignment to the elements. */
4293 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4294 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4296 /* If the element has BLKmode and the union doesn't, the union is
4297 aligned such that the element doesn't need to have BLKmode, so
4298 change the element's mode to the appropriate one for its size. */
4299 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4300 DECL_MODE (decl_elt) = mode
4301 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4303 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4304 instead create a new MEM rtx with the proper mode. */
4305 if (GET_CODE (x) == MEM)
4307 if (mode == GET_MODE (x))
4308 SET_DECL_RTL (decl_elt, x);
4310 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4312 else if (GET_CODE (x) == REG)
4314 if (mode == GET_MODE (x))
4315 SET_DECL_RTL (decl_elt, x);
4317 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4322 /* Record the cleanup if there is one. */
4325 thisblock->data.block.cleanups
4326 = tree_cons (decl_elt, cleanup_elt,
4327 thisblock->data.block.cleanups);
4331 /* Expand a list of cleanups LIST.
4332 Elements may be expressions or may be nested lists.
4334 If DONT_DO is nonnull, then any list-element
4335 whose TREE_PURPOSE matches DONT_DO is omitted.
4336 This is sometimes used to avoid a cleanup associated with
4337 a value that is being returned out of the scope.
4339 If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4340 goto and handle protection regions specially in that case.
4342 If REACHABLE, we emit code, otherwise just inform the exception handling
4343 code about this finalization. */
4346 expand_cleanups (list, dont_do, in_fixup, reachable)
4353 for (tail = list; tail; tail = TREE_CHAIN (tail))
4354 if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4356 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4357 expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4360 if (! in_fixup && using_eh_for_cleanups_p)
4361 expand_eh_region_end_cleanup (TREE_VALUE (tail));
4365 /* Cleanups may be run multiple times. For example,
4366 when exiting a binding contour, we expand the
4367 cleanups associated with that contour. When a goto
4368 within that binding contour has a target outside that
4369 contour, it will expand all cleanups from its scope to
4370 the target. Though the cleanups are expanded multiple
4371 times, the control paths are non-overlapping so the
4372 cleanups will not be executed twice. */
4374 /* We may need to protect from outer cleanups. */
4375 if (in_fixup && using_eh_for_cleanups_p)
4377 expand_eh_region_start ();
4379 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4381 expand_eh_region_end_fixup (TREE_VALUE (tail));
4384 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4392 /* Mark when the context we are emitting RTL for as a conditional
4393 context, so that any cleanup actions we register with
4394 expand_decl_init will be properly conditionalized when those
4395 cleanup actions are later performed. Must be called before any
4396 expression (tree) is expanded that is within a conditional context. */
4399 start_cleanup_deferral ()
4401 /* block_stack can be NULL if we are inside the parameter list. It is
4402 OK to do nothing, because cleanups aren't possible here. */
4404 ++block_stack->data.block.conditional_code;
4407 /* Mark the end of a conditional region of code. Because cleanup
4408 deferrals may be nested, we may still be in a conditional region
4409 after we end the currently deferred cleanups, only after we end all
4410 deferred cleanups, are we back in unconditional code. */
4413 end_cleanup_deferral ()
4415 /* block_stack can be NULL if we are inside the parameter list. It is
4416 OK to do nothing, because cleanups aren't possible here. */
4418 --block_stack->data.block.conditional_code;
4421 /* Move all cleanups from the current block_stack
4422 to the containing block_stack, where they are assumed to
4423 have been created. If anything can cause a temporary to
4424 be created, but not expanded for more than one level of
4425 block_stacks, then this code will have to change. */
4430 struct nesting *block = block_stack;
4431 struct nesting *outer = block->next;
4433 outer->data.block.cleanups
4434 = chainon (block->data.block.cleanups,
4435 outer->data.block.cleanups);
4436 block->data.block.cleanups = 0;
4440 last_cleanup_this_contour ()
4442 if (block_stack == 0)
4445 return block_stack->data.block.cleanups;
4448 /* Return 1 if there are any pending cleanups at this point.
4449 If THIS_CONTOUR is nonzero, check the current contour as well.
4450 Otherwise, look only at the contours that enclose this one. */
4453 any_pending_cleanups (this_contour)
4456 struct nesting *block;
4458 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4461 if (this_contour && block_stack->data.block.cleanups != NULL)
4463 if (block_stack->data.block.cleanups == 0
4464 && block_stack->data.block.outer_cleanups == 0)
4467 for (block = block_stack->next; block; block = block->next)
4468 if (block->data.block.cleanups != 0)
4474 /* Enter a case (Pascal) or switch (C) statement.
4475 Push a block onto case_stack and nesting_stack
4476 to accumulate the case-labels that are seen
4477 and to record the labels generated for the statement.
4479 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4480 Otherwise, this construct is transparent for `exit_something'.
4482 EXPR is the index-expression to be dispatched on.
4483 TYPE is its nominal type. We could simply convert EXPR to this type,
4484 but instead we take short cuts. */
4487 expand_start_case (exit_flag, expr, type, printname)
4491 const char *printname;
4493 struct nesting *thiscase = ALLOC_NESTING ();
4495 /* Make an entry on case_stack for the case we are entering. */
4497 thiscase->next = case_stack;
4498 thiscase->all = nesting_stack;
4499 thiscase->depth = ++nesting_depth;
4500 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4501 thiscase->data.case_stmt.case_list = 0;
4502 thiscase->data.case_stmt.index_expr = expr;
4503 thiscase->data.case_stmt.nominal_type = type;
4504 thiscase->data.case_stmt.default_label = 0;
4505 thiscase->data.case_stmt.printname = printname;
4506 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4507 case_stack = thiscase;
4508 nesting_stack = thiscase;
4510 do_pending_stack_adjust ();
4512 /* Make sure case_stmt.start points to something that won't
4513 need any transformation before expand_end_case. */
4514 if (GET_CODE (get_last_insn ()) != NOTE)
4515 emit_note (NULL, NOTE_INSN_DELETED);
4517 thiscase->data.case_stmt.start = get_last_insn ();
4519 start_cleanup_deferral ();
4522 /* Start a "dummy case statement" within which case labels are invalid
4523 and are not connected to any larger real case statement.
4524 This can be used if you don't want to let a case statement jump
4525 into the middle of certain kinds of constructs. */
4528 expand_start_case_dummy ()
4530 struct nesting *thiscase = ALLOC_NESTING ();
4532 /* Make an entry on case_stack for the dummy. */
4534 thiscase->next = case_stack;
4535 thiscase->all = nesting_stack;
4536 thiscase->depth = ++nesting_depth;
4537 thiscase->exit_label = 0;
4538 thiscase->data.case_stmt.case_list = 0;
4539 thiscase->data.case_stmt.start = 0;
4540 thiscase->data.case_stmt.nominal_type = 0;
4541 thiscase->data.case_stmt.default_label = 0;
4542 case_stack = thiscase;
4543 nesting_stack = thiscase;
4544 start_cleanup_deferral ();
4547 /* End a dummy case statement. */
4550 expand_end_case_dummy ()
4552 end_cleanup_deferral ();
4553 POPSTACK (case_stack);
4556 /* Return the data type of the index-expression
4557 of the innermost case statement, or null if none. */
4560 case_index_expr_type ()
4563 return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4570 /* If this is the first label, warn if any insns have been emitted. */
4571 if (case_stack->data.case_stmt.line_number_status >= 0)
4575 restore_line_number_status
4576 (case_stack->data.case_stmt.line_number_status);
4577 case_stack->data.case_stmt.line_number_status = -1;
4579 for (insn = case_stack->data.case_stmt.start;
4581 insn = NEXT_INSN (insn))
4583 if (GET_CODE (insn) == CODE_LABEL)
4585 if (GET_CODE (insn) != NOTE
4586 && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4589 insn = PREV_INSN (insn);
4590 while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4592 /* If insn is zero, then there must have been a syntax error. */
4594 warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4595 NOTE_LINE_NUMBER (insn),
4596 "unreachable code at beginning of %s",
4597 case_stack->data.case_stmt.printname);
4604 /* Accumulate one case or default label inside a case or switch statement.
4605 VALUE is the value of the case (a null pointer, for a default label).
4606 The function CONVERTER, when applied to arguments T and V,
4607 converts the value V to the type T.
4609 If not currently inside a case or switch statement, return 1 and do
4610 nothing. The caller will print a language-specific error message.
4611 If VALUE is a duplicate or overlaps, return 2 and do nothing
4612 except store the (first) duplicate node in *DUPLICATE.
4613 If VALUE is out of range, return 3 and do nothing.
4614 If we are jumping into the scope of a cleanup or var-sized array, return 5.
4615 Return 0 on success.
4617 Extended to handle range statements. */
4620 pushcase (value, converter, label, duplicate)
4622 tree (*converter) PARAMS ((tree, tree));
4629 /* Fail if not inside a real case statement. */
4630 if (! (case_stack && case_stack->data.case_stmt.start))
4633 if (stack_block_stack
4634 && stack_block_stack->depth > case_stack->depth)
4637 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4638 nominal_type = case_stack->data.case_stmt.nominal_type;
4640 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4641 if (index_type == error_mark_node)
4644 /* Convert VALUE to the type in which the comparisons are nominally done. */
4646 value = (*converter) (nominal_type, value);
4650 /* Fail if this value is out of range for the actual type of the index
4651 (which may be narrower than NOMINAL_TYPE). */
4653 && (TREE_CONSTANT_OVERFLOW (value)
4654 || ! int_fits_type_p (value, index_type)))
4657 return add_case_node (value, value, label, duplicate);
4660 /* Like pushcase but this case applies to all values between VALUE1 and
4661 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
4662 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
4663 starts at VALUE1 and ends at the highest value of the index type.
4664 If both are NULL, this case applies to all values.
4666 The return value is the same as that of pushcase but there is one
4667 additional error code: 4 means the specified range was empty. */
4670 pushcase_range (value1, value2, converter, label, duplicate)
4671 tree value1, value2;
4672 tree (*converter) PARAMS ((tree, tree));
4679 /* Fail if not inside a real case statement. */
4680 if (! (case_stack && case_stack->data.case_stmt.start))
4683 if (stack_block_stack
4684 && stack_block_stack->depth > case_stack->depth)
4687 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4688 nominal_type = case_stack->data.case_stmt.nominal_type;
4690 /* If the index is erroneous, avoid more problems: pretend to succeed. */
4691 if (index_type == error_mark_node)
4696 /* Convert VALUEs to type in which the comparisons are nominally done
4697 and replace any unspecified value with the corresponding bound. */
4699 value1 = TYPE_MIN_VALUE (index_type);
4701 value2 = TYPE_MAX_VALUE (index_type);
4703 /* Fail if the range is empty. Do this before any conversion since
4704 we want to allow out-of-range empty ranges. */
4705 if (value2 != 0 && tree_int_cst_lt (value2, value1))
4708 /* If the max was unbounded, use the max of the nominal_type we are
4709 converting to. Do this after the < check above to suppress false
4712 value2 = TYPE_MAX_VALUE (nominal_type);
4714 value1 = (*converter) (nominal_type, value1);
4715 value2 = (*converter) (nominal_type, value2);
4717 /* Fail if these values are out of range. */
4718 if (TREE_CONSTANT_OVERFLOW (value1)
4719 || ! int_fits_type_p (value1, index_type))
4722 if (TREE_CONSTANT_OVERFLOW (value2)
4723 || ! int_fits_type_p (value2, index_type))
4726 return add_case_node (value1, value2, label, duplicate);
4729 /* Do the actual insertion of a case label for pushcase and pushcase_range
4730 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
4731 slowdown for large switch statements. */
4734 add_case_node (low, high, label, duplicate)
4739 struct case_node *p, **q, *r;
4741 /* If there's no HIGH value, then this is not a case range; it's
4742 just a simple case label. But that's just a degenerate case
4747 /* Handle default labels specially. */
4750 if (case_stack->data.case_stmt.default_label != 0)
4752 *duplicate = case_stack->data.case_stmt.default_label;
4755 case_stack->data.case_stmt.default_label = label;
4756 expand_label (label);
4760 q = &case_stack->data.case_stmt.case_list;
4767 /* Keep going past elements distinctly greater than HIGH. */
4768 if (tree_int_cst_lt (high, p->low))
4771 /* or distinctly less than LOW. */
4772 else if (tree_int_cst_lt (p->high, low))
4777 /* We have an overlap; this is an error. */
4778 *duplicate = p->code_label;
4783 /* Add this label to the chain, and succeed. */
4785 r = (struct case_node *) xmalloc (sizeof (struct case_node));
4788 /* If the bounds are equal, turn this into the one-value case. */
4789 if (tree_int_cst_equal (low, high))
4794 r->code_label = label;
4795 expand_label (label);
4805 struct case_node *s;
4811 if (! (b = p->balance))
4812 /* Growth propagation from left side. */
4819 if ((p->left = s = r->right))
4828 if ((r->parent = s))
4836 case_stack->data.case_stmt.case_list = r;
4839 /* r->balance == +1 */
4844 struct case_node *t = r->right;
4846 if ((p->left = s = t->right))
4850 if ((r->right = s = t->left))
4864 if ((t->parent = s))
4872 case_stack->data.case_stmt.case_list = t;
4879 /* p->balance == +1; growth of left side balances the node. */
4889 if (! (b = p->balance))
4890 /* Growth propagation from right side. */
4898 if ((p->right = s = r->left))
4906 if ((r->parent = s))
4915 case_stack->data.case_stmt.case_list = r;
4919 /* r->balance == -1 */
4923 struct case_node *t = r->left;
4925 if ((p->right = s = t->left))
4930 if ((r->left = s = t->right))
4944 if ((t->parent = s))
4953 case_stack->data.case_stmt.case_list = t;
4959 /* p->balance == -1; growth of right side balances the node. */
4972 /* Returns the number of possible values of TYPE.
4973 Returns -1 if the number is unknown, variable, or if the number does not
4974 fit in a HOST_WIDE_INT.
4975 Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4976 do not increase monotonically (there may be duplicates);
4977 to 1 if the values increase monotonically, but not always by 1;
4978 otherwise sets it to 0. */
4981 all_cases_count (type, spareness)
4986 HOST_WIDE_INT count, minval, lastval;
4990 switch (TREE_CODE (type))
4997 count = 1 << BITS_PER_UNIT;
5002 if (TYPE_MAX_VALUE (type) != 0
5003 && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
5004 TYPE_MIN_VALUE (type))))
5005 && 0 != (t = fold (build (PLUS_EXPR, type, t,
5006 convert (type, integer_zero_node))))
5007 && host_integerp (t, 1))
5008 count = tree_low_cst (t, 1);
5014 /* Don't waste time with enumeral types with huge values. */
5015 if (! host_integerp (TYPE_MIN_VALUE (type), 0)
5016 || TYPE_MAX_VALUE (type) == 0
5017 || ! host_integerp (TYPE_MAX_VALUE (type), 0))
5020 lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
5023 for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
5025 HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
5027 if (*spareness == 2 || thisval < lastval)
5029 else if (thisval != minval + count)
5039 #define BITARRAY_TEST(ARRAY, INDEX) \
5040 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5041 & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
5042 #define BITARRAY_SET(ARRAY, INDEX) \
5043 ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5044 |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
5046 /* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5047 with the case values we have seen, assuming the case expression
5049 SPARSENESS is as determined by all_cases_count.
5051 The time needed is proportional to COUNT, unless
5052 SPARSENESS is 2, in which case quadratic time is needed. */
5055 mark_seen_cases (type, cases_seen, count, sparseness)
5057 unsigned char *cases_seen;
5058 HOST_WIDE_INT count;
5061 tree next_node_to_try = NULL_TREE;
5062 HOST_WIDE_INT next_node_offset = 0;
5064 struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5065 tree val = make_node (INTEGER_CST);
5067 TREE_TYPE (val) = type;
5071 else if (sparseness == 2)
5074 unsigned HOST_WIDE_INT xlo;
5076 /* This less efficient loop is only needed to handle
5077 duplicate case values (multiple enum constants
5078 with the same value). */
5079 TREE_TYPE (val) = TREE_TYPE (root->low);
5080 for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5081 t = TREE_CHAIN (t), xlo++)
5083 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5084 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5088 /* Keep going past elements distinctly greater than VAL. */
5089 if (tree_int_cst_lt (val, n->low))
5092 /* or distinctly less than VAL. */
5093 else if (tree_int_cst_lt (n->high, val))
5098 /* We have found a matching range. */
5099 BITARRAY_SET (cases_seen, xlo);
5109 case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5111 for (n = root; n; n = n->right)
5113 TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5114 TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5115 while (! tree_int_cst_lt (n->high, val))
5117 /* Calculate (into xlo) the "offset" of the integer (val).
5118 The element with lowest value has offset 0, the next smallest
5119 element has offset 1, etc. */
5121 unsigned HOST_WIDE_INT xlo;
5125 if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5127 /* The TYPE_VALUES will be in increasing order, so
5128 starting searching where we last ended. */
5129 t = next_node_to_try;
5130 xlo = next_node_offset;
5136 t = TYPE_VALUES (type);
5139 if (tree_int_cst_equal (val, TREE_VALUE (t)))
5141 next_node_to_try = TREE_CHAIN (t);
5142 next_node_offset = xlo + 1;
5147 if (t == next_node_to_try)
5156 t = TYPE_MIN_VALUE (type);
5158 neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5162 add_double (xlo, xhi,
5163 TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5167 if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5168 BITARRAY_SET (cases_seen, xlo);
5170 add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5172 &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5178 /* Called when the index of a switch statement is an enumerated type
5179 and there is no default label.
5181 Checks that all enumeration literals are covered by the case
5182 expressions of a switch. Also, warn if there are any extra
5183 switch cases that are *not* elements of the enumerated type.
5185 If all enumeration literals were covered by the case expressions,
5186 turn one of the expressions into the default expression since it should
5187 not be possible to fall through such a switch. */
5190 check_for_full_enumeration_handling (type)
5193 struct case_node *n;
5196 /* True iff the selector type is a numbered set mode. */
5199 /* The number of possible selector values. */
5202 /* For each possible selector value. a one iff it has been matched
5203 by a case value alternative. */
5204 unsigned char *cases_seen;
5206 /* The allocated size of cases_seen, in chars. */
5207 HOST_WIDE_INT bytes_needed;
5212 size = all_cases_count (type, &sparseness);
5213 bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5215 if (size > 0 && size < 600000
5216 /* We deliberately use calloc here, not cmalloc, so that we can suppress
5217 this optimization if we don't have enough memory rather than
5218 aborting, as xmalloc would do. */
5220 (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5223 tree v = TYPE_VALUES (type);
5225 /* The time complexity of this code is normally O(N), where
5226 N being the number of members in the enumerated type.
5227 However, if type is a ENUMERAL_TYPE whose values do not
5228 increase monotonically, O(N*log(N)) time may be needed. */
5230 mark_seen_cases (type, cases_seen, size, sparseness);
5232 for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5233 if (BITARRAY_TEST (cases_seen, i) == 0)
5234 warning ("enumeration value `%s' not handled in switch",
5235 IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5240 /* Now we go the other way around; we warn if there are case
5241 expressions that don't correspond to enumerators. This can
5242 occur since C and C++ don't enforce type-checking of
5243 assignments to enumeration variables. */
5245 if (case_stack->data.case_stmt.case_list
5246 && case_stack->data.case_stmt.case_list->left)
5247 case_stack->data.case_stmt.case_list
5248 = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5250 for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5252 for (chain = TYPE_VALUES (type);
5253 chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5254 chain = TREE_CHAIN (chain))
5259 if (TYPE_NAME (type) == 0)
5260 warning ("case value `%ld' not in enumerated type",
5261 (long) TREE_INT_CST_LOW (n->low));
5263 warning ("case value `%ld' not in enumerated type `%s'",
5264 (long) TREE_INT_CST_LOW (n->low),
5265 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5268 : DECL_NAME (TYPE_NAME (type))));
5270 if (!tree_int_cst_equal (n->low, n->high))
5272 for (chain = TYPE_VALUES (type);
5273 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5274 chain = TREE_CHAIN (chain))
5279 if (TYPE_NAME (type) == 0)
5280 warning ("case value `%ld' not in enumerated type",
5281 (long) TREE_INT_CST_LOW (n->high));
5283 warning ("case value `%ld' not in enumerated type `%s'",
5284 (long) TREE_INT_CST_LOW (n->high),
5285 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5288 : DECL_NAME (TYPE_NAME (type))));
5294 /* Free CN, and its children. */
5297 free_case_nodes (cn)
5302 free_case_nodes (cn->left);
5303 free_case_nodes (cn->right);
5310 /* Terminate a case (Pascal) or switch (C) statement
5311 in which ORIG_INDEX is the expression to be tested.
5312 Generate the code to test it and jump to the right place. */
5315 expand_end_case (orig_index)
5318 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5319 rtx default_label = 0;
5320 struct case_node *n;
5327 rtx before_case, end;
5328 struct nesting *thiscase = case_stack;
5329 tree index_expr, index_type;
5332 /* Don't crash due to previous errors. */
5333 if (thiscase == NULL)
5336 table_label = gen_label_rtx ();
5337 index_expr = thiscase->data.case_stmt.index_expr;
5338 index_type = TREE_TYPE (index_expr);
5339 unsignedp = TREE_UNSIGNED (index_type);
5341 do_pending_stack_adjust ();
5343 /* This might get an spurious warning in the presence of a syntax error;
5344 it could be fixed by moving the call to check_seenlabel after the
5345 check for error_mark_node, and copying the code of check_seenlabel that
5346 deals with case_stack->data.case_stmt.line_number_status /
5347 restore_line_number_status in front of the call to end_cleanup_deferral;
5348 However, this might miss some useful warnings in the presence of
5349 non-syntax errors. */
5352 /* An ERROR_MARK occurs for various reasons including invalid data type. */
5353 if (index_type != error_mark_node)
5355 /* If switch expression was an enumerated type, check that all
5356 enumeration literals are covered by the cases.
5357 No sense trying this if there's a default case, however. */
5359 if (!thiscase->data.case_stmt.default_label
5360 && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5361 && TREE_CODE (index_expr) != INTEGER_CST)
5362 check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5364 /* If we don't have a default-label, create one here,
5365 after the body of the switch. */
5366 if (thiscase->data.case_stmt.default_label == 0)
5368 thiscase->data.case_stmt.default_label
5369 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5370 expand_label (thiscase->data.case_stmt.default_label);
5372 default_label = label_rtx (thiscase->data.case_stmt.default_label);
5374 before_case = get_last_insn ();
5376 if (thiscase->data.case_stmt.case_list
5377 && thiscase->data.case_stmt.case_list->left)
5378 thiscase->data.case_stmt.case_list
5379 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5381 /* Simplify the case-list before we count it. */
5382 group_case_nodes (thiscase->data.case_stmt.case_list);
5384 /* Get upper and lower bounds of case values.
5385 Also convert all the case values to the index expr's data type. */
5388 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5390 /* Check low and high label values are integers. */
5391 if (TREE_CODE (n->low) != INTEGER_CST)
5393 if (TREE_CODE (n->high) != INTEGER_CST)
5396 n->low = convert (index_type, n->low);
5397 n->high = convert (index_type, n->high);
5399 /* Count the elements and track the largest and smallest
5400 of them (treating them as signed even if they are not). */
5408 if (INT_CST_LT (n->low, minval))
5410 if (INT_CST_LT (maxval, n->high))
5413 /* A range counts double, since it requires two compares. */
5414 if (! tree_int_cst_equal (n->low, n->high))
5418 /* Compute span of values. */
5420 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5422 end_cleanup_deferral ();
5426 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5428 emit_jump (default_label);
5431 /* If range of values is much bigger than number of values,
5432 make a sequence of conditional branches instead of a dispatch.
5433 If the switch-index is a constant, do it this way
5434 because we can optimize it. */
5436 else if (count < case_values_threshold ()
5437 || compare_tree_int (range, 10 * count) > 0
5438 /* RANGE may be signed, and really large ranges will show up
5439 as negative numbers. */
5440 || compare_tree_int (range, 0) < 0
5441 #ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5444 || TREE_CODE (index_expr) == INTEGER_CST
5445 || (TREE_CODE (index_expr) == COMPOUND_EXPR
5446 && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5448 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5450 /* If the index is a short or char that we do not have
5451 an insn to handle comparisons directly, convert it to
5452 a full integer now, rather than letting each comparison
5453 generate the conversion. */
5455 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5456 && ! have_insn_for (COMPARE, GET_MODE (index)))
5458 enum machine_mode wider_mode;
5459 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5460 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5461 if (have_insn_for (COMPARE, wider_mode))
5463 index = convert_to_mode (wider_mode, index, unsignedp);
5469 do_pending_stack_adjust ();
5471 index = protect_from_queue (index, 0);
5472 if (GET_CODE (index) == MEM)
5473 index = copy_to_reg (index);
5474 if (GET_CODE (index) == CONST_INT
5475 || TREE_CODE (index_expr) == INTEGER_CST)
5477 /* Make a tree node with the proper constant value
5478 if we don't already have one. */
5479 if (TREE_CODE (index_expr) != INTEGER_CST)
5482 = build_int_2 (INTVAL (index),
5483 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5484 index_expr = convert (index_type, index_expr);
5487 /* For constant index expressions we need only
5488 issue an unconditional branch to the appropriate
5489 target code. The job of removing any unreachable
5490 code is left to the optimisation phase if the
5491 "-O" option is specified. */
5492 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5493 if (! tree_int_cst_lt (index_expr, n->low)
5494 && ! tree_int_cst_lt (n->high, index_expr))
5498 emit_jump (label_rtx (n->code_label));
5500 emit_jump (default_label);
5504 /* If the index expression is not constant we generate
5505 a binary decision tree to select the appropriate
5506 target code. This is done as follows:
5508 The list of cases is rearranged into a binary tree,
5509 nearly optimal assuming equal probability for each case.
5511 The tree is transformed into RTL, eliminating
5512 redundant test conditions at the same time.
5514 If program flow could reach the end of the
5515 decision tree an unconditional jump to the
5516 default code is emitted. */
5519 = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5520 && estimate_case_costs (thiscase->data.case_stmt.case_list));
5521 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5522 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5523 default_label, index_type);
5524 emit_jump_if_reachable (default_label);
5529 if (! try_casesi (index_type, index_expr, minval, range,
5530 table_label, default_label))
5532 index_type = thiscase->data.case_stmt.nominal_type;
5534 /* Index jumptables from zero for suitable values of
5535 minval to avoid a subtraction. */
5537 && compare_tree_int (minval, 0) > 0
5538 && compare_tree_int (minval, 3) < 0)
5540 minval = integer_zero_node;
5544 if (! try_tablejump (index_type, index_expr, minval, range,
5545 table_label, default_label))
5549 /* Get table of labels to jump to, in order of case index. */
5551 ncases = tree_low_cst (range, 0) + 1;
5552 labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5553 memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5555 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5558 = tree_low_cst (n->low, 0) - tree_low_cst (minval, 0);
5563 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5564 if (i + tree_low_cst (minval, 0)
5565 == tree_low_cst (n->high, 0))
5571 /* Fill in the gaps with the default. */
5572 for (i = 0; i < ncases; i++)
5573 if (labelvec[i] == 0)
5574 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5576 /* Output the table */
5577 emit_label (table_label);
5579 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5580 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5581 gen_rtx_LABEL_REF (Pmode, table_label),
5582 gen_rtvec_v (ncases, labelvec),
5583 const0_rtx, const0_rtx));
5585 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5586 gen_rtvec_v (ncases, labelvec)));
5588 /* If the case insn drops through the table,
5589 after the table we must jump to the default-label.
5590 Otherwise record no drop-through after the table. */
5591 #ifdef CASE_DROPS_THROUGH
5592 emit_jump (default_label);
5598 before_case = NEXT_INSN (before_case);
5599 end = get_last_insn ();
5600 if (squeeze_notes (&before_case, &end))
5602 reorder_insns (before_case, end,
5603 thiscase->data.case_stmt.start);
5606 end_cleanup_deferral ();
5608 if (thiscase->exit_label)
5609 emit_label (thiscase->exit_label);
5611 free_case_nodes (case_stack->data.case_stmt.case_list);
5612 POPSTACK (case_stack);
5617 /* Convert the tree NODE into a list linked by the right field, with the left
5618 field zeroed. RIGHT is used for recursion; it is a list to be placed
5619 rightmost in the resulting list. */
5621 static struct case_node *
5622 case_tree2list (node, right)
5623 struct case_node *node, *right;
5625 struct case_node *left;
5628 right = case_tree2list (node->right, right);
5630 node->right = right;
5631 if ((left = node->left))
5634 return case_tree2list (left, node);
5640 /* Generate code to jump to LABEL if OP1 and OP2 are equal. */
5643 do_jump_if_equal (op1, op2, label, unsignedp)
5644 rtx op1, op2, label;
5647 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5649 if (INTVAL (op1) == INTVAL (op2))
5653 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5654 (GET_MODE (op1) == VOIDmode
5655 ? GET_MODE (op2) : GET_MODE (op1)),
5659 /* Not all case values are encountered equally. This function
5660 uses a heuristic to weight case labels, in cases where that
5661 looks like a reasonable thing to do.
5663 Right now, all we try to guess is text, and we establish the
5666 chars above space: 16
5675 If we find any cases in the switch that are not either -1 or in the range
5676 of valid ASCII characters, or are control characters other than those
5677 commonly used with "\", don't treat this switch scanning text.
5679 Return 1 if these nodes are suitable for cost estimation, otherwise
5683 estimate_case_costs (node)
5686 tree min_ascii = integer_minus_one_node;
5687 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5691 /* If we haven't already made the cost table, make it now. Note that the
5692 lower bound of the table is -1, not zero. */
5694 if (! cost_table_initialized)
5696 cost_table_initialized = 1;
5698 for (i = 0; i < 128; i++)
5701 COST_TABLE (i) = 16;
5702 else if (ISPUNCT (i))
5704 else if (ISCNTRL (i))
5705 COST_TABLE (i) = -1;
5708 COST_TABLE (' ') = 8;
5709 COST_TABLE ('\t') = 4;
5710 COST_TABLE ('\0') = 4;
5711 COST_TABLE ('\n') = 2;
5712 COST_TABLE ('\f') = 1;
5713 COST_TABLE ('\v') = 1;
5714 COST_TABLE ('\b') = 1;
5717 /* See if all the case expressions look like text. It is text if the
5718 constant is >= -1 and the highest constant is <= 127. Do all comparisons
5719 as signed arithmetic since we don't want to ever access cost_table with a
5720 value less than -1. Also check that none of the constants in a range
5721 are strange control characters. */
5723 for (n = node; n; n = n->right)
5725 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5728 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5729 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5730 if (COST_TABLE (i) < 0)
5734 /* All interesting values are within the range of interesting
5735 ASCII characters. */
5739 /* Scan an ordered list of case nodes
5740 combining those with consecutive values or ranges.
5742 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
5745 group_case_nodes (head)
5748 case_node_ptr node = head;
5752 rtx lb = next_real_insn (label_rtx (node->code_label));
5754 case_node_ptr np = node;
5756 /* Try to group the successors of NODE with NODE. */
5757 while (((np = np->right) != 0)
5758 /* Do they jump to the same place? */
5759 && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5760 || (lb != 0 && lb2 != 0
5761 && simplejump_p (lb)
5762 && simplejump_p (lb2)
5763 && rtx_equal_p (SET_SRC (PATTERN (lb)),
5764 SET_SRC (PATTERN (lb2)))))
5765 /* Are their ranges consecutive? */
5766 && tree_int_cst_equal (np->low,
5767 fold (build (PLUS_EXPR,
5768 TREE_TYPE (node->high),
5771 /* An overflow is not consecutive. */
5772 && tree_int_cst_lt (node->high,
5773 fold (build (PLUS_EXPR,
5774 TREE_TYPE (node->high),
5776 integer_one_node))))
5778 node->high = np->high;
5780 /* NP is the first node after NODE which can't be grouped with it.
5781 Delete the nodes in between, and move on to that node. */
5787 /* Take an ordered list of case nodes
5788 and transform them into a near optimal binary tree,
5789 on the assumption that any target code selection value is as
5790 likely as any other.
5792 The transformation is performed by splitting the ordered
5793 list into two equal sections plus a pivot. The parts are
5794 then attached to the pivot as left and right branches. Each
5795 branch is then transformed recursively. */
5798 balance_case_nodes (head, parent)
5799 case_node_ptr *head;
5800 case_node_ptr parent;
5813 /* Count the number of entries on branch. Also count the ranges. */
5817 if (!tree_int_cst_equal (np->low, np->high))
5821 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5825 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5833 /* Split this list if it is long enough for that to help. */
5838 /* Find the place in the list that bisects the list's total cost,
5839 Here I gets half the total cost. */
5844 /* Skip nodes while their cost does not reach that amount. */
5845 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5846 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5847 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5850 npp = &(*npp)->right;
5855 /* Leave this branch lopsided, but optimize left-hand
5856 side and fill in `parent' fields for right-hand side. */
5858 np->parent = parent;
5859 balance_case_nodes (&np->left, np);
5860 for (; np->right; np = np->right)
5861 np->right->parent = np;
5865 /* If there are just three nodes, split at the middle one. */
5867 npp = &(*npp)->right;
5870 /* Find the place in the list that bisects the list's total cost,
5871 where ranges count as 2.
5872 Here I gets half the total cost. */
5873 i = (i + ranges + 1) / 2;
5876 /* Skip nodes while their cost does not reach that amount. */
5877 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5882 npp = &(*npp)->right;
5887 np->parent = parent;
5890 /* Optimize each of the two split parts. */
5891 balance_case_nodes (&np->left, np);
5892 balance_case_nodes (&np->right, np);
5896 /* Else leave this branch as one level,
5897 but fill in `parent' fields. */
5899 np->parent = parent;
5900 for (; np->right; np = np->right)
5901 np->right->parent = np;
5906 /* Search the parent sections of the case node tree
5907 to see if a test for the lower bound of NODE would be redundant.
5908 INDEX_TYPE is the type of the index expression.
5910 The instructions to generate the case decision tree are
5911 output in the same order as nodes are processed so it is
5912 known that if a parent node checks the range of the current
5913 node minus one that the current node is bounded at its lower
5914 span. Thus the test would be redundant. */
5917 node_has_low_bound (node, index_type)
5922 case_node_ptr pnode;
5924 /* If the lower bound of this node is the lowest value in the index type,
5925 we need not test it. */
5927 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5930 /* If this node has a left branch, the value at the left must be less
5931 than that at this node, so it cannot be bounded at the bottom and
5932 we need not bother testing any further. */
5937 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5938 node->low, integer_one_node));
5940 /* If the subtraction above overflowed, we can't verify anything.
5941 Otherwise, look for a parent that tests our value - 1. */
5943 if (! tree_int_cst_lt (low_minus_one, node->low))
5946 for (pnode = node->parent; pnode; pnode = pnode->parent)
5947 if (tree_int_cst_equal (low_minus_one, pnode->high))
5953 /* Search the parent sections of the case node tree
5954 to see if a test for the upper bound of NODE would be redundant.
5955 INDEX_TYPE is the type of the index expression.
5957 The instructions to generate the case decision tree are
5958 output in the same order as nodes are processed so it is
5959 known that if a parent node checks the range of the current
5960 node plus one that the current node is bounded at its upper
5961 span. Thus the test would be redundant. */
5964 node_has_high_bound (node, index_type)
5969 case_node_ptr pnode;
5971 /* If there is no upper bound, obviously no test is needed. */
5973 if (TYPE_MAX_VALUE (index_type) == NULL)
5976 /* If the upper bound of this node is the highest value in the type
5977 of the index expression, we need not test against it. */
5979 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5982 /* If this node has a right branch, the value at the right must be greater
5983 than that at this node, so it cannot be bounded at the top and
5984 we need not bother testing any further. */
5989 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5990 node->high, integer_one_node));
5992 /* If the addition above overflowed, we can't verify anything.
5993 Otherwise, look for a parent that tests our value + 1. */
5995 if (! tree_int_cst_lt (node->high, high_plus_one))
5998 for (pnode = node->parent; pnode; pnode = pnode->parent)
5999 if (tree_int_cst_equal (high_plus_one, pnode->low))
6005 /* Search the parent sections of the
6006 case node tree to see if both tests for the upper and lower
6007 bounds of NODE would be redundant. */
6010 node_is_bounded (node, index_type)
6014 return (node_has_low_bound (node, index_type)
6015 && node_has_high_bound (node, index_type));
6018 /* Emit an unconditional jump to LABEL unless it would be dead code. */
6021 emit_jump_if_reachable (label)
6024 if (GET_CODE (get_last_insn ()) != BARRIER)
6028 /* Emit step-by-step code to select a case for the value of INDEX.
6029 The thus generated decision tree follows the form of the
6030 case-node binary tree NODE, whose nodes represent test conditions.
6031 INDEX_TYPE is the type of the index of the switch.
6033 Care is taken to prune redundant tests from the decision tree
6034 by detecting any boundary conditions already checked by
6035 emitted rtx. (See node_has_high_bound, node_has_low_bound
6036 and node_is_bounded, above.)
6038 Where the test conditions can be shown to be redundant we emit
6039 an unconditional jump to the target code. As a further
6040 optimization, the subordinates of a tree node are examined to
6041 check for bounded nodes. In this case conditional and/or
6042 unconditional jumps as a result of the boundary check for the
6043 current node are arranged to target the subordinates associated
6044 code for out of bound conditions on the current node.
6046 We can assume that when control reaches the code generated here,
6047 the index value has already been compared with the parents
6048 of this node, and determined to be on the same side of each parent
6049 as this node is. Thus, if this node tests for the value 51,
6050 and a parent tested for 52, we don't need to consider
6051 the possibility of a value greater than 51. If another parent
6052 tests for the value 50, then this node need not test anything. */
6055 emit_case_nodes (index, node, default_label, index_type)
6061 /* If INDEX has an unsigned type, we must make unsigned branches. */
6062 int unsignedp = TREE_UNSIGNED (index_type);
6063 enum machine_mode mode = GET_MODE (index);
6064 enum machine_mode imode = TYPE_MODE (index_type);
6066 /* See if our parents have already tested everything for us.
6067 If they have, emit an unconditional jump for this node. */
6068 if (node_is_bounded (node, index_type))
6069 emit_jump (label_rtx (node->code_label));
6071 else if (tree_int_cst_equal (node->low, node->high))
6073 /* Node is single valued. First see if the index expression matches
6074 this node and then check our children, if any. */
6076 do_jump_if_equal (index,
6077 convert_modes (mode, imode,
6078 expand_expr (node->low, NULL_RTX,
6081 label_rtx (node->code_label), unsignedp);
6083 if (node->right != 0 && node->left != 0)
6085 /* This node has children on both sides.
6086 Dispatch to one side or the other
6087 by comparing the index value with this node's value.
6088 If one subtree is bounded, check that one first,
6089 so we can avoid real branches in the tree. */
6091 if (node_is_bounded (node->right, index_type))
6093 emit_cmp_and_jump_insns (index,
6096 expand_expr (node->high, NULL_RTX,
6099 GT, NULL_RTX, mode, unsignedp,
6100 label_rtx (node->right->code_label));
6101 emit_case_nodes (index, node->left, default_label, index_type);
6104 else if (node_is_bounded (node->left, index_type))
6106 emit_cmp_and_jump_insns (index,
6109 expand_expr (node->high, NULL_RTX,
6112 LT, NULL_RTX, mode, unsignedp,
6113 label_rtx (node->left->code_label));
6114 emit_case_nodes (index, node->right, default_label, index_type);
6119 /* Neither node is bounded. First distinguish the two sides;
6120 then emit the code for one side at a time. */
6122 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6124 /* See if the value is on the right. */
6125 emit_cmp_and_jump_insns (index,
6128 expand_expr (node->high, NULL_RTX,
6131 GT, NULL_RTX, mode, unsignedp,
6132 label_rtx (test_label));
6134 /* Value must be on the left.
6135 Handle the left-hand subtree. */
6136 emit_case_nodes (index, node->left, default_label, index_type);
6137 /* If left-hand subtree does nothing,
6139 emit_jump_if_reachable (default_label);
6141 /* Code branches here for the right-hand subtree. */
6142 expand_label (test_label);
6143 emit_case_nodes (index, node->right, default_label, index_type);
6147 else if (node->right != 0 && node->left == 0)
6149 /* Here we have a right child but no left so we issue conditional
6150 branch to default and process the right child.
6152 Omit the conditional branch to default if we it avoid only one
6153 right child; it costs too much space to save so little time. */
6155 if (node->right->right || node->right->left
6156 || !tree_int_cst_equal (node->right->low, node->right->high))
6158 if (!node_has_low_bound (node, index_type))
6160 emit_cmp_and_jump_insns (index,
6163 expand_expr (node->high, NULL_RTX,
6166 LT, NULL_RTX, mode, unsignedp,
6170 emit_case_nodes (index, node->right, default_label, index_type);
6173 /* We cannot process node->right normally
6174 since we haven't ruled out the numbers less than
6175 this node's value. So handle node->right explicitly. */
6176 do_jump_if_equal (index,
6179 expand_expr (node->right->low, NULL_RTX,
6182 label_rtx (node->right->code_label), unsignedp);
6185 else if (node->right == 0 && node->left != 0)
6187 /* Just one subtree, on the left. */
6188 if (node->left->left || node->left->right
6189 || !tree_int_cst_equal (node->left->low, node->left->high))
6191 if (!node_has_high_bound (node, index_type))
6193 emit_cmp_and_jump_insns (index,
6196 expand_expr (node->high, NULL_RTX,
6199 GT, NULL_RTX, mode, unsignedp,
6203 emit_case_nodes (index, node->left, default_label, index_type);
6206 /* We cannot process node->left normally
6207 since we haven't ruled out the numbers less than
6208 this node's value. So handle node->left explicitly. */
6209 do_jump_if_equal (index,
6212 expand_expr (node->left->low, NULL_RTX,
6215 label_rtx (node->left->code_label), unsignedp);
6220 /* Node is a range. These cases are very similar to those for a single
6221 value, except that we do not start by testing whether this node
6222 is the one to branch to. */
6224 if (node->right != 0 && node->left != 0)
6226 /* Node has subtrees on both sides.
6227 If the right-hand subtree is bounded,
6228 test for it first, since we can go straight there.
6229 Otherwise, we need to make a branch in the control structure,
6230 then handle the two subtrees. */
6231 tree test_label = 0;
6233 if (node_is_bounded (node->right, index_type))
6234 /* Right hand node is fully bounded so we can eliminate any
6235 testing and branch directly to the target code. */
6236 emit_cmp_and_jump_insns (index,
6239 expand_expr (node->high, NULL_RTX,
6242 GT, NULL_RTX, mode, unsignedp,
6243 label_rtx (node->right->code_label));
6246 /* Right hand node requires testing.
6247 Branch to a label where we will handle it later. */
6249 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6250 emit_cmp_and_jump_insns (index,
6253 expand_expr (node->high, NULL_RTX,
6256 GT, NULL_RTX, mode, unsignedp,
6257 label_rtx (test_label));
6260 /* Value belongs to this node or to the left-hand subtree. */
6262 emit_cmp_and_jump_insns (index,
6265 expand_expr (node->low, NULL_RTX,
6268 GE, NULL_RTX, mode, unsignedp,
6269 label_rtx (node->code_label));
6271 /* Handle the left-hand subtree. */
6272 emit_case_nodes (index, node->left, default_label, index_type);
6274 /* If right node had to be handled later, do that now. */
6278 /* If the left-hand subtree fell through,
6279 don't let it fall into the right-hand subtree. */
6280 emit_jump_if_reachable (default_label);
6282 expand_label (test_label);
6283 emit_case_nodes (index, node->right, default_label, index_type);
6287 else if (node->right != 0 && node->left == 0)
6289 /* Deal with values to the left of this node,
6290 if they are possible. */
6291 if (!node_has_low_bound (node, index_type))
6293 emit_cmp_and_jump_insns (index,
6296 expand_expr (node->low, NULL_RTX,
6299 LT, NULL_RTX, mode, unsignedp,
6303 /* Value belongs to this node or to the right-hand subtree. */
6305 emit_cmp_and_jump_insns (index,
6308 expand_expr (node->high, NULL_RTX,
6311 LE, NULL_RTX, mode, unsignedp,
6312 label_rtx (node->code_label));
6314 emit_case_nodes (index, node->right, default_label, index_type);
6317 else if (node->right == 0 && node->left != 0)
6319 /* Deal with values to the right of this node,
6320 if they are possible. */
6321 if (!node_has_high_bound (node, index_type))
6323 emit_cmp_and_jump_insns (index,
6326 expand_expr (node->high, NULL_RTX,
6329 GT, NULL_RTX, mode, unsignedp,
6333 /* Value belongs to this node or to the left-hand subtree. */
6335 emit_cmp_and_jump_insns (index,
6338 expand_expr (node->low, NULL_RTX,
6341 GE, NULL_RTX, mode, unsignedp,
6342 label_rtx (node->code_label));
6344 emit_case_nodes (index, node->left, default_label, index_type);
6349 /* Node has no children so we check low and high bounds to remove
6350 redundant tests. Only one of the bounds can exist,
6351 since otherwise this node is bounded--a case tested already. */
6352 int high_bound = node_has_high_bound (node, index_type);
6353 int low_bound = node_has_low_bound (node, index_type);
6355 if (!high_bound && low_bound)
6357 emit_cmp_and_jump_insns (index,
6360 expand_expr (node->high, NULL_RTX,
6363 GT, NULL_RTX, mode, unsignedp,
6367 else if (!low_bound && high_bound)
6369 emit_cmp_and_jump_insns (index,
6372 expand_expr (node->low, NULL_RTX,
6375 LT, NULL_RTX, mode, unsignedp,
6378 else if (!low_bound && !high_bound)
6380 /* Widen LOW and HIGH to the same width as INDEX. */
6381 tree type = type_for_mode (mode, unsignedp);
6382 tree low = build1 (CONVERT_EXPR, type, node->low);
6383 tree high = build1 (CONVERT_EXPR, type, node->high);
6384 rtx low_rtx, new_index, new_bound;
6386 /* Instead of doing two branches, emit one unsigned branch for
6387 (index-low) > (high-low). */
6388 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6389 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6390 NULL_RTX, unsignedp,
6392 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6396 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6397 mode, 1, default_label);
6400 emit_jump (label_rtx (node->code_label));