1 /* Lower complex number operations to scalar operations.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-pass.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
34 #include "gimple-iterator.h"
35 #include "gimplify-me.h"
39 #include "tree-ssa-propagate.h"
40 #include "tree-hasher.h"
45 /* For each complex ssa name, a lattice value. We're interested in finding
46 out whether a complex number is degenerate in some way, having only real
47 or only complex parts. */
57 /* The type complex_lattice_t holds combinations of the above
59 typedef int complex_lattice_t;
61 #define PAIR(a, b) ((a) << 2 | (b))
64 static vec<complex_lattice_t> complex_lattice_values;
66 /* For each complex variable, a pair of variables for the components exists in
68 static int_tree_htab_type *complex_variable_components;
70 /* For each complex SSA_NAME, a pair of ssa names for the components. */
71 static vec<tree> complex_ssa_name_components;
73 /* Vector of PHI triplets (original complex PHI and corresponding real and
74 imag PHIs if real and/or imag PHIs contain temporarily
75 non-SSA_NAME/non-invariant args that need to be replaced by SSA_NAMEs. */
76 static vec<gphi *> phis_to_revisit;
78 /* Lookup UID in the complex_variable_components hashtable and return the
81 cvc_lookup (unsigned int uid)
83 struct int_tree_map in;
85 return complex_variable_components->find_with_hash (in, uid).to;
88 /* Insert the pair UID, TO into the complex_variable_components hashtable. */
91 cvc_insert (unsigned int uid, tree to)
97 loc = complex_variable_components->find_slot_with_hash (h, uid, INSERT);
102 /* Return true if T is not a zero constant. In the case of real values,
103 we're only interested in +0.0. */
106 some_nonzerop (tree t)
110 /* Operations with real or imaginary part of a complex number zero
111 cannot be treated the same as operations with a real or imaginary
112 operand if we care about the signs of zeros in the result. */
113 if (TREE_CODE (t) == REAL_CST && !flag_signed_zeros)
114 zerop = real_identical (&TREE_REAL_CST (t), &dconst0);
115 else if (TREE_CODE (t) == FIXED_CST)
116 zerop = fixed_zerop (t);
117 else if (TREE_CODE (t) == INTEGER_CST)
118 zerop = integer_zerop (t);
124 /* Compute a lattice value from the components of a complex type REAL
127 static complex_lattice_t
128 find_lattice_value_parts (tree real, tree imag)
131 complex_lattice_t ret;
133 r = some_nonzerop (real);
134 i = some_nonzerop (imag);
135 ret = r * ONLY_REAL + i * ONLY_IMAG;
137 /* ??? On occasion we could do better than mapping 0+0i to real, but we
138 certainly don't want to leave it UNINITIALIZED, which eventually gets
139 mapped to VARYING. */
140 if (ret == UNINITIALIZED)
147 /* Compute a lattice value from gimple_val T. */
149 static complex_lattice_t
150 find_lattice_value (tree t)
154 switch (TREE_CODE (t))
157 return complex_lattice_values[SSA_NAME_VERSION (t)];
160 real = TREE_REALPART (t);
161 imag = TREE_IMAGPART (t);
168 return find_lattice_value_parts (real, imag);
171 /* Determine if LHS is something for which we're interested in seeing
172 simulation results. */
175 is_complex_reg (tree lhs)
177 return TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE && is_gimple_reg (lhs);
180 /* Mark the incoming parameters to the function as VARYING. */
183 init_parameter_lattice_values (void)
187 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm))
188 if (is_complex_reg (parm)
189 && (ssa_name = ssa_default_def (cfun, parm)) != NULL_TREE)
190 complex_lattice_values[SSA_NAME_VERSION (ssa_name)] = VARYING;
193 /* Initialize simulation state for each statement. Return false if we
194 found no statements we want to simulate, and thus there's nothing
195 for the entire pass to do. */
198 init_dont_simulate_again (void)
201 bool saw_a_complex_op = false;
203 FOR_EACH_BB_FN (bb, cfun)
205 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
208 gphi *phi = gsi.phi ();
209 prop_set_simulate_again (phi,
210 is_complex_reg (gimple_phi_result (phi)));
213 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
220 stmt = gsi_stmt (gsi);
221 op0 = op1 = NULL_TREE;
223 /* Most control-altering statements must be initially
224 simulated, else we won't cover the entire cfg. */
225 sim_again_p = stmt_ends_bb_p (stmt);
227 switch (gimple_code (stmt))
230 if (gimple_call_lhs (stmt))
231 sim_again_p = is_complex_reg (gimple_call_lhs (stmt));
235 sim_again_p = is_complex_reg (gimple_assign_lhs (stmt));
236 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
237 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR)
238 op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
240 op0 = gimple_assign_rhs1 (stmt);
241 if (gimple_num_ops (stmt) > 2)
242 op1 = gimple_assign_rhs2 (stmt);
246 op0 = gimple_cond_lhs (stmt);
247 op1 = gimple_cond_rhs (stmt);
255 switch (gimple_expr_code (stmt))
267 if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE
268 || TREE_CODE (TREE_TYPE (op1)) == COMPLEX_TYPE)
269 saw_a_complex_op = true;
274 if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
275 saw_a_complex_op = true;
280 /* The total store transformation performed during
281 gimplification creates such uninitialized loads
282 and we need to lower the statement to be able
284 if (TREE_CODE (op0) == SSA_NAME
285 && ssa_undefined_value_p (op0))
286 saw_a_complex_op = true;
293 prop_set_simulate_again (stmt, sim_again_p);
297 return saw_a_complex_op;
301 /* Evaluate statement STMT against the complex lattice defined above. */
303 static enum ssa_prop_result
304 complex_visit_stmt (gimple *stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
307 complex_lattice_t new_l, old_l, op1_l, op2_l;
311 lhs = gimple_get_lhs (stmt);
312 /* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs. */
314 return SSA_PROP_VARYING;
316 /* These conditions should be satisfied due to the initial filter
317 set up in init_dont_simulate_again. */
318 gcc_assert (TREE_CODE (lhs) == SSA_NAME);
319 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
322 ver = SSA_NAME_VERSION (lhs);
323 old_l = complex_lattice_values[ver];
325 switch (gimple_expr_code (stmt))
329 new_l = find_lattice_value (gimple_assign_rhs1 (stmt));
333 new_l = find_lattice_value_parts (gimple_assign_rhs1 (stmt),
334 gimple_assign_rhs2 (stmt));
339 op1_l = find_lattice_value (gimple_assign_rhs1 (stmt));
340 op2_l = find_lattice_value (gimple_assign_rhs2 (stmt));
342 /* We've set up the lattice values such that IOR neatly
344 new_l = op1_l | op2_l;
353 op1_l = find_lattice_value (gimple_assign_rhs1 (stmt));
354 op2_l = find_lattice_value (gimple_assign_rhs2 (stmt));
356 /* Obviously, if either varies, so does the result. */
357 if (op1_l == VARYING || op2_l == VARYING)
359 /* Don't prematurely promote variables if we've not yet seen
361 else if (op1_l == UNINITIALIZED)
363 else if (op2_l == UNINITIALIZED)
367 /* At this point both numbers have only one component. If the
368 numbers are of opposite kind, the result is imaginary,
369 otherwise the result is real. The add/subtract translates
370 the real/imag from/to 0/1; the ^ performs the comparison. */
371 new_l = ((op1_l - ONLY_REAL) ^ (op2_l - ONLY_REAL)) + ONLY_REAL;
373 /* Don't allow the lattice value to flip-flop indefinitely. */
380 new_l = find_lattice_value (gimple_assign_rhs1 (stmt));
388 /* If nothing changed this round, let the propagator know. */
390 return SSA_PROP_NOT_INTERESTING;
392 complex_lattice_values[ver] = new_l;
393 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
396 /* Evaluate a PHI node against the complex lattice defined above. */
398 static enum ssa_prop_result
399 complex_visit_phi (gphi *phi)
401 complex_lattice_t new_l, old_l;
406 lhs = gimple_phi_result (phi);
408 /* This condition should be satisfied due to the initial filter
409 set up in init_dont_simulate_again. */
410 gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
412 /* We've set up the lattice values such that IOR neatly models PHI meet. */
413 new_l = UNINITIALIZED;
414 for (i = gimple_phi_num_args (phi) - 1; i >= 0; --i)
415 new_l |= find_lattice_value (gimple_phi_arg_def (phi, i));
417 ver = SSA_NAME_VERSION (lhs);
418 old_l = complex_lattice_values[ver];
421 return SSA_PROP_NOT_INTERESTING;
423 complex_lattice_values[ver] = new_l;
424 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
427 /* Create one backing variable for a complex component of ORIG. */
430 create_one_component_var (tree type, tree orig, const char *prefix,
431 const char *suffix, enum tree_code code)
433 tree r = create_tmp_var (type, prefix);
435 DECL_SOURCE_LOCATION (r) = DECL_SOURCE_LOCATION (orig);
436 DECL_ARTIFICIAL (r) = 1;
438 if (DECL_NAME (orig) && !DECL_IGNORED_P (orig))
440 const char *name = IDENTIFIER_POINTER (DECL_NAME (orig));
441 name = ACONCAT ((name, suffix, NULL));
442 DECL_NAME (r) = get_identifier (name);
444 SET_DECL_DEBUG_EXPR (r, build1 (code, type, orig));
445 DECL_HAS_DEBUG_EXPR_P (r) = 1;
446 DECL_IGNORED_P (r) = 0;
447 TREE_NO_WARNING (r) = TREE_NO_WARNING (orig);
451 DECL_IGNORED_P (r) = 1;
452 TREE_NO_WARNING (r) = 1;
458 /* Retrieve a value for a complex component of VAR. */
461 get_component_var (tree var, bool imag_p)
463 size_t decl_index = DECL_UID (var) * 2 + imag_p;
464 tree ret = cvc_lookup (decl_index);
468 ret = create_one_component_var (TREE_TYPE (TREE_TYPE (var)), var,
469 imag_p ? "CI" : "CR",
470 imag_p ? "$imag" : "$real",
471 imag_p ? IMAGPART_EXPR : REALPART_EXPR);
472 cvc_insert (decl_index, ret);
478 /* Retrieve a value for a complex component of SSA_NAME. */
481 get_component_ssa_name (tree ssa_name, bool imag_p)
483 complex_lattice_t lattice = find_lattice_value (ssa_name);
484 size_t ssa_name_index;
487 if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG))
489 tree inner_type = TREE_TYPE (TREE_TYPE (ssa_name));
490 if (SCALAR_FLOAT_TYPE_P (inner_type))
491 return build_real (inner_type, dconst0);
493 return build_int_cst (inner_type, 0);
496 ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p;
497 ret = complex_ssa_name_components[ssa_name_index];
500 if (SSA_NAME_VAR (ssa_name))
501 ret = get_component_var (SSA_NAME_VAR (ssa_name), imag_p);
503 ret = TREE_TYPE (TREE_TYPE (ssa_name));
504 ret = make_ssa_name (ret);
506 /* Copy some properties from the original. In particular, whether it
507 is used in an abnormal phi, and whether it's uninitialized. */
508 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret)
509 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name);
510 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
511 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == VAR_DECL)
513 SSA_NAME_DEF_STMT (ret) = SSA_NAME_DEF_STMT (ssa_name);
514 set_ssa_default_def (cfun, SSA_NAME_VAR (ret), ret);
517 complex_ssa_name_components[ssa_name_index] = ret;
523 /* Set a value for a complex component of SSA_NAME, return a
524 gimple_seq of stuff that needs doing. */
527 set_component_ssa_name (tree ssa_name, bool imag_p, tree value)
529 complex_lattice_t lattice = find_lattice_value (ssa_name);
530 size_t ssa_name_index;
535 /* We know the value must be zero, else there's a bug in our lattice
536 analysis. But the value may well be a variable known to contain
537 zero. We should be safe ignoring it. */
538 if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG))
541 /* If we've already assigned an SSA_NAME to this component, then this
542 means that our walk of the basic blocks found a use before the set.
543 This is fine. Now we should create an initialization for the value
544 we created earlier. */
545 ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p;
546 comp = complex_ssa_name_components[ssa_name_index];
550 /* If we've nothing assigned, and the value we're given is already stable,
551 then install that as the value for this SSA_NAME. This preemptively
552 copy-propagates the value, which avoids unnecessary memory allocation. */
553 else if (is_gimple_min_invariant (value)
554 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
556 complex_ssa_name_components[ssa_name_index] = value;
559 else if (TREE_CODE (value) == SSA_NAME
560 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
562 /* Replace an anonymous base value with the variable from cvc_lookup.
563 This should result in better debug info. */
564 if (SSA_NAME_VAR (ssa_name)
565 && (!SSA_NAME_VAR (value) || DECL_IGNORED_P (SSA_NAME_VAR (value)))
566 && !DECL_IGNORED_P (SSA_NAME_VAR (ssa_name)))
568 comp = get_component_var (SSA_NAME_VAR (ssa_name), imag_p);
569 replace_ssa_name_symbol (value, comp);
572 complex_ssa_name_components[ssa_name_index] = value;
576 /* Finally, we need to stabilize the result by installing the value into
579 comp = get_component_ssa_name (ssa_name, imag_p);
581 /* Do all the work to assign VALUE to COMP. */
583 value = force_gimple_operand (value, &list, false, NULL);
584 last = gimple_build_assign (comp, value);
585 gimple_seq_add_stmt (&list, last);
586 gcc_assert (SSA_NAME_DEF_STMT (comp) == last);
591 /* Extract the real or imaginary part of a complex variable or constant.
592 Make sure that it's a proper gimple_val and gimplify it if not.
593 Emit any new code before gsi. */
596 extract_component (gimple_stmt_iterator *gsi, tree t, bool imagpart_p,
597 bool gimple_p, bool phiarg_p = false)
599 switch (TREE_CODE (t))
602 return imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t);
612 case VIEW_CONVERT_EXPR:
615 tree inner_type = TREE_TYPE (TREE_TYPE (t));
617 t = build1 ((imagpart_p ? IMAGPART_EXPR : REALPART_EXPR),
618 inner_type, unshare_expr (t));
621 t = force_gimple_operand_gsi (gsi, t, true, NULL, true,
628 t = get_component_ssa_name (t, imagpart_p);
629 if (TREE_CODE (t) == SSA_NAME && SSA_NAME_DEF_STMT (t) == NULL)
630 gcc_assert (phiarg_p);
638 /* Update the complex components of the ssa name on the lhs of STMT. */
641 update_complex_components (gimple_stmt_iterator *gsi, gimple *stmt, tree r,
647 lhs = gimple_get_lhs (stmt);
649 list = set_component_ssa_name (lhs, false, r);
651 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
653 list = set_component_ssa_name (lhs, true, i);
655 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
659 update_complex_components_on_edge (edge e, tree lhs, tree r, tree i)
663 list = set_component_ssa_name (lhs, false, r);
665 gsi_insert_seq_on_edge (e, list);
667 list = set_component_ssa_name (lhs, true, i);
669 gsi_insert_seq_on_edge (e, list);
673 /* Update an assignment to a complex variable in place. */
676 update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i)
680 gimple_assign_set_rhs_with_ops (gsi, COMPLEX_EXPR, r, i);
681 stmt = gsi_stmt (*gsi);
683 if (maybe_clean_eh_stmt (stmt))
684 gimple_purge_dead_eh_edges (gimple_bb (stmt));
686 if (gimple_in_ssa_p (cfun))
687 update_complex_components (gsi, gsi_stmt (*gsi), r, i);
691 /* Generate code at the entry point of the function to initialize the
692 component variables for a complex parameter. */
695 update_parameter_components (void)
697 edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
700 for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm))
702 tree type = TREE_TYPE (parm);
705 if (TREE_CODE (type) != COMPLEX_TYPE || !is_gimple_reg (parm))
708 type = TREE_TYPE (type);
709 ssa_name = ssa_default_def (cfun, parm);
713 r = build1 (REALPART_EXPR, type, ssa_name);
714 i = build1 (IMAGPART_EXPR, type, ssa_name);
715 update_complex_components_on_edge (entry_edge, ssa_name, r, i);
719 /* Generate code to set the component variables of a complex variable
720 to match the PHI statements in block BB. */
723 update_phi_components (basic_block bb)
727 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
729 gphi *phi = gsi.phi ();
731 if (is_complex_reg (gimple_phi_result (phi)))
733 gphi *p[2] = { NULL, NULL };
734 unsigned int i, j, n;
735 bool revisit_phi = false;
737 for (j = 0; j < 2; j++)
739 tree l = get_component_ssa_name (gimple_phi_result (phi), j > 0);
740 if (TREE_CODE (l) == SSA_NAME)
741 p[j] = create_phi_node (l, bb);
744 for (i = 0, n = gimple_phi_num_args (phi); i < n; ++i)
746 tree comp, arg = gimple_phi_arg_def (phi, i);
747 for (j = 0; j < 2; j++)
750 comp = extract_component (NULL, arg, j > 0, false, true);
751 if (TREE_CODE (comp) == SSA_NAME
752 && SSA_NAME_DEF_STMT (comp) == NULL)
754 /* For the benefit of any gimple simplification during
755 this pass that might walk SSA_NAME def stmts,
756 don't add SSA_NAMEs without definitions into the
757 PHI arguments, but put a decl in there instead
758 temporarily, and revisit this PHI later on. */
759 if (SSA_NAME_VAR (comp))
760 comp = SSA_NAME_VAR (comp);
762 comp = create_tmp_reg (TREE_TYPE (comp),
766 SET_PHI_ARG_DEF (p[j], i, comp);
772 phis_to_revisit.safe_push (phi);
773 phis_to_revisit.safe_push (p[0]);
774 phis_to_revisit.safe_push (p[1]);
780 /* Expand a complex move to scalars. */
783 expand_complex_move (gimple_stmt_iterator *gsi, tree type)
785 tree inner_type = TREE_TYPE (type);
787 gimple *stmt = gsi_stmt (*gsi);
789 if (is_gimple_assign (stmt))
791 lhs = gimple_assign_lhs (stmt);
792 if (gimple_num_ops (stmt) == 2)
793 rhs = gimple_assign_rhs1 (stmt);
797 else if (is_gimple_call (stmt))
799 lhs = gimple_call_lhs (stmt);
805 if (TREE_CODE (lhs) == SSA_NAME)
807 if (is_ctrl_altering_stmt (stmt))
811 /* The value is not assigned on the exception edges, so we need not
812 concern ourselves there. We do need to update on the fallthru
814 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
818 r = build1 (REALPART_EXPR, inner_type, lhs);
819 i = build1 (IMAGPART_EXPR, inner_type, lhs);
820 update_complex_components_on_edge (e, lhs, r, i);
822 else if (is_gimple_call (stmt)
823 || gimple_has_side_effects (stmt)
824 || gimple_assign_rhs_code (stmt) == PAREN_EXPR)
826 r = build1 (REALPART_EXPR, inner_type, lhs);
827 i = build1 (IMAGPART_EXPR, inner_type, lhs);
828 update_complex_components (gsi, stmt, r, i);
832 if (gimple_assign_rhs_code (stmt) != COMPLEX_EXPR)
834 r = extract_component (gsi, rhs, 0, true);
835 i = extract_component (gsi, rhs, 1, true);
839 r = gimple_assign_rhs1 (stmt);
840 i = gimple_assign_rhs2 (stmt);
842 update_complex_assignment (gsi, r, i);
845 else if (rhs && TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs))
851 loc = gimple_location (stmt);
852 r = extract_component (gsi, rhs, 0, false);
853 i = extract_component (gsi, rhs, 1, false);
855 x = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs));
856 t = gimple_build_assign (x, r);
857 gimple_set_location (t, loc);
858 gsi_insert_before (gsi, t, GSI_SAME_STMT);
860 if (stmt == gsi_stmt (*gsi))
862 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
863 gimple_assign_set_lhs (stmt, x);
864 gimple_assign_set_rhs1 (stmt, i);
868 x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
869 t = gimple_build_assign (x, i);
870 gimple_set_location (t, loc);
871 gsi_insert_before (gsi, t, GSI_SAME_STMT);
873 stmt = gsi_stmt (*gsi);
874 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN);
875 gimple_return_set_retval (as_a <greturn *> (stmt), lhs);
882 /* Expand complex addition to scalars:
883 a + b = (ar + br) + i(ai + bi)
884 a - b = (ar - br) + i(ai + bi)
888 expand_complex_addition (gimple_stmt_iterator *gsi, tree inner_type,
889 tree ar, tree ai, tree br, tree bi,
891 complex_lattice_t al, complex_lattice_t bl)
895 switch (PAIR (al, bl))
897 case PAIR (ONLY_REAL, ONLY_REAL):
898 rr = gimplify_build2 (gsi, code, inner_type, ar, br);
902 case PAIR (ONLY_REAL, ONLY_IMAG):
904 if (code == MINUS_EXPR)
905 ri = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, bi);
910 case PAIR (ONLY_IMAG, ONLY_REAL):
911 if (code == MINUS_EXPR)
912 rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ar, br);
918 case PAIR (ONLY_IMAG, ONLY_IMAG):
920 ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
923 case PAIR (VARYING, ONLY_REAL):
924 rr = gimplify_build2 (gsi, code, inner_type, ar, br);
928 case PAIR (VARYING, ONLY_IMAG):
930 ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
933 case PAIR (ONLY_REAL, VARYING):
934 if (code == MINUS_EXPR)
936 rr = gimplify_build2 (gsi, code, inner_type, ar, br);
940 case PAIR (ONLY_IMAG, VARYING):
941 if (code == MINUS_EXPR)
944 ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
947 case PAIR (VARYING, VARYING):
949 rr = gimplify_build2 (gsi, code, inner_type, ar, br);
950 ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
957 update_complex_assignment (gsi, rr, ri);
960 /* Expand a complex multiplication or division to a libcall to the c99
961 compliant routines. */
964 expand_complex_libcall (gimple_stmt_iterator *gsi, tree ar, tree ai,
965 tree br, tree bi, enum tree_code code)
968 enum built_in_function bcode;
973 old_stmt = gsi_stmt (*gsi);
974 lhs = gimple_assign_lhs (old_stmt);
975 type = TREE_TYPE (lhs);
977 mode = TYPE_MODE (type);
978 gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT);
980 if (code == MULT_EXPR)
981 bcode = ((enum built_in_function)
982 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
983 else if (code == RDIV_EXPR)
984 bcode = ((enum built_in_function)
985 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
988 fn = builtin_decl_explicit (bcode);
990 stmt = gimple_build_call (fn, 4, ar, ai, br, bi);
991 gimple_call_set_lhs (stmt, lhs);
993 gsi_replace (gsi, stmt, false);
995 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
996 gimple_purge_dead_eh_edges (gsi_bb (*gsi));
998 if (gimple_in_ssa_p (cfun))
1000 type = TREE_TYPE (type);
1001 update_complex_components (gsi, stmt,
1002 build1 (REALPART_EXPR, type, lhs),
1003 build1 (IMAGPART_EXPR, type, lhs));
1004 SSA_NAME_DEF_STMT (lhs) = stmt;
1008 /* Expand complex multiplication to scalars:
1009 a * b = (ar*br - ai*bi) + i(ar*bi + br*ai)
1013 expand_complex_multiplication (gimple_stmt_iterator *gsi, tree inner_type,
1014 tree ar, tree ai, tree br, tree bi,
1015 complex_lattice_t al, complex_lattice_t bl)
1021 complex_lattice_t tl;
1022 rr = ar, ar = br, br = rr;
1023 ri = ai, ai = bi, bi = ri;
1024 tl = al, al = bl, bl = tl;
1027 switch (PAIR (al, bl))
1029 case PAIR (ONLY_REAL, ONLY_REAL):
1030 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
1034 case PAIR (ONLY_IMAG, ONLY_REAL):
1036 if (TREE_CODE (ai) == REAL_CST
1037 && real_identical (&TREE_REAL_CST (ai), &dconst1))
1040 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
1043 case PAIR (ONLY_IMAG, ONLY_IMAG):
1044 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
1045 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr);
1049 case PAIR (VARYING, ONLY_REAL):
1050 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
1051 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
1054 case PAIR (VARYING, ONLY_IMAG):
1055 rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
1056 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr);
1057 ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
1060 case PAIR (VARYING, VARYING):
1061 if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type))
1063 expand_complex_libcall (gsi, ar, ai, br, bi, MULT_EXPR);
1068 tree t1, t2, t3, t4;
1070 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
1071 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
1072 t3 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
1074 /* Avoid expanding redundant multiplication for the common
1075 case of squaring a complex number. */
1076 if (ar == br && ai == bi)
1079 t4 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
1081 rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2);
1082 ri = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t3, t4);
1090 update_complex_assignment (gsi, rr, ri);
1093 /* Keep this algorithm in sync with fold-const.c:const_binop().
1095 Expand complex division to scalars, straightforward algorithm.
1096 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1101 expand_complex_div_straight (gimple_stmt_iterator *gsi, tree inner_type,
1102 tree ar, tree ai, tree br, tree bi,
1103 enum tree_code code)
1105 tree rr, ri, div, t1, t2, t3;
1107 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, br);
1108 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, bi);
1109 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2);
1111 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
1112 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
1113 t3 = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2);
1114 rr = gimplify_build2 (gsi, code, inner_type, t3, div);
1116 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
1117 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
1118 t3 = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2);
1119 ri = gimplify_build2 (gsi, code, inner_type, t3, div);
1121 update_complex_assignment (gsi, rr, ri);
1124 /* Keep this algorithm in sync with fold-const.c:const_binop().
1126 Expand complex division to scalars, modified algorithm to minimize
1127 overflow with wide input ranges. */
1130 expand_complex_div_wide (gimple_stmt_iterator *gsi, tree inner_type,
1131 tree ar, tree ai, tree br, tree bi,
1132 enum tree_code code)
1134 tree rr, ri, ratio, div, t1, t2, tr, ti, compare;
1135 basic_block bb_cond, bb_true, bb_false, bb_join;
1138 /* Examine |br| < |bi|, and branch. */
1139 t1 = gimplify_build1 (gsi, ABS_EXPR, inner_type, br);
1140 t2 = gimplify_build1 (gsi, ABS_EXPR, inner_type, bi);
1141 compare = fold_build2_loc (gimple_location (gsi_stmt (*gsi)),
1142 LT_EXPR, boolean_type_node, t1, t2);
1143 STRIP_NOPS (compare);
1145 bb_cond = bb_true = bb_false = bb_join = NULL;
1146 rr = ri = tr = ti = NULL;
1147 if (TREE_CODE (compare) != INTEGER_CST)
1153 tmp = create_tmp_var (boolean_type_node);
1154 stmt = gimple_build_assign (tmp, compare);
1155 if (gimple_in_ssa_p (cfun))
1157 tmp = make_ssa_name (tmp, stmt);
1158 gimple_assign_set_lhs (stmt, tmp);
1161 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1163 cond = fold_build2_loc (gimple_location (stmt),
1164 EQ_EXPR, boolean_type_node, tmp, boolean_true_node);
1165 stmt = gimple_build_cond_from_tree (cond, NULL_TREE, NULL_TREE);
1166 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1168 /* Split the original block, and create the TRUE and FALSE blocks. */
1169 e = split_block (gsi_bb (*gsi), stmt);
1172 bb_true = create_empty_bb (bb_cond);
1173 bb_false = create_empty_bb (bb_true);
1175 /* Wire the blocks together. */
1176 e->flags = EDGE_TRUE_VALUE;
1177 redirect_edge_succ (e, bb_true);
1178 make_edge (bb_cond, bb_false, EDGE_FALSE_VALUE);
1179 make_edge (bb_true, bb_join, EDGE_FALLTHRU);
1180 make_edge (bb_false, bb_join, EDGE_FALLTHRU);
1181 add_bb_to_loop (bb_true, bb_cond->loop_father);
1182 add_bb_to_loop (bb_false, bb_cond->loop_father);
1184 /* Update dominance info. Note that bb_join's data was
1185 updated by split_block. */
1186 if (dom_info_available_p (CDI_DOMINATORS))
1188 set_immediate_dominator (CDI_DOMINATORS, bb_true, bb_cond);
1189 set_immediate_dominator (CDI_DOMINATORS, bb_false, bb_cond);
1192 rr = create_tmp_reg (inner_type);
1193 ri = create_tmp_reg (inner_type);
1196 /* In the TRUE branch, we compute
1198 div = (br * ratio) + bi;
1199 tr = (ar * ratio) + ai;
1200 ti = (ai * ratio) - ar;
1203 if (bb_true || integer_nonzerop (compare))
1207 *gsi = gsi_last_bb (bb_true);
1208 gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT);
1211 ratio = gimplify_build2 (gsi, code, inner_type, br, bi);
1213 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, ratio);
1214 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, bi);
1216 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio);
1217 tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ai);
1219 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio);
1220 ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, ar);
1222 tr = gimplify_build2 (gsi, code, inner_type, tr, div);
1223 ti = gimplify_build2 (gsi, code, inner_type, ti, div);
1227 stmt = gimple_build_assign (rr, tr);
1228 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1229 stmt = gimple_build_assign (ri, ti);
1230 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1231 gsi_remove (gsi, true);
1235 /* In the FALSE branch, we compute
1237 divisor = (d * ratio) + c;
1238 tr = (b * ratio) + a;
1239 ti = b - (a * ratio);
1242 if (bb_false || integer_zerop (compare))
1246 *gsi = gsi_last_bb (bb_false);
1247 gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT);
1250 ratio = gimplify_build2 (gsi, code, inner_type, bi, br);
1252 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, ratio);
1253 div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, br);
1255 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio);
1256 tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ar);
1258 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio);
1259 ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, t1);
1261 tr = gimplify_build2 (gsi, code, inner_type, tr, div);
1262 ti = gimplify_build2 (gsi, code, inner_type, ti, div);
1266 stmt = gimple_build_assign (rr, tr);
1267 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1268 stmt = gimple_build_assign (ri, ti);
1269 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1270 gsi_remove (gsi, true);
1275 *gsi = gsi_start_bb (bb_join);
1279 update_complex_assignment (gsi, rr, ri);
1282 /* Expand complex division to scalars. */
1285 expand_complex_division (gimple_stmt_iterator *gsi, tree inner_type,
1286 tree ar, tree ai, tree br, tree bi,
1287 enum tree_code code,
1288 complex_lattice_t al, complex_lattice_t bl)
1292 switch (PAIR (al, bl))
1294 case PAIR (ONLY_REAL, ONLY_REAL):
1295 rr = gimplify_build2 (gsi, code, inner_type, ar, br);
1299 case PAIR (ONLY_REAL, ONLY_IMAG):
1301 ri = gimplify_build2 (gsi, code, inner_type, ar, bi);
1302 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri);
1305 case PAIR (ONLY_IMAG, ONLY_REAL):
1307 ri = gimplify_build2 (gsi, code, inner_type, ai, br);
1310 case PAIR (ONLY_IMAG, ONLY_IMAG):
1311 rr = gimplify_build2 (gsi, code, inner_type, ai, bi);
1315 case PAIR (VARYING, ONLY_REAL):
1316 rr = gimplify_build2 (gsi, code, inner_type, ar, br);
1317 ri = gimplify_build2 (gsi, code, inner_type, ai, br);
1320 case PAIR (VARYING, ONLY_IMAG):
1321 rr = gimplify_build2 (gsi, code, inner_type, ai, bi);
1322 ri = gimplify_build2 (gsi, code, inner_type, ar, bi);
1323 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri);
1325 case PAIR (ONLY_REAL, VARYING):
1326 case PAIR (ONLY_IMAG, VARYING):
1327 case PAIR (VARYING, VARYING):
1328 switch (flag_complex_method)
1331 /* straightforward implementation of complex divide acceptable. */
1332 expand_complex_div_straight (gsi, inner_type, ar, ai, br, bi, code);
1336 if (SCALAR_FLOAT_TYPE_P (inner_type))
1338 expand_complex_libcall (gsi, ar, ai, br, bi, code);
1344 /* wide ranges of inputs must work for complex divide. */
1345 expand_complex_div_wide (gsi, inner_type, ar, ai, br, bi, code);
1357 update_complex_assignment (gsi, rr, ri);
1360 /* Expand complex negation to scalars:
1365 expand_complex_negation (gimple_stmt_iterator *gsi, tree inner_type,
1370 rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ar);
1371 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai);
1373 update_complex_assignment (gsi, rr, ri);
1376 /* Expand complex conjugate to scalars:
1381 expand_complex_conjugate (gimple_stmt_iterator *gsi, tree inner_type,
1386 ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai);
1388 update_complex_assignment (gsi, ar, ri);
1391 /* Expand complex comparison (EQ or NE only). */
1394 expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai,
1395 tree br, tree bi, enum tree_code code)
1397 tree cr, ci, cc, type;
1400 cr = gimplify_build2 (gsi, code, boolean_type_node, ar, br);
1401 ci = gimplify_build2 (gsi, code, boolean_type_node, ai, bi);
1402 cc = gimplify_build2 (gsi,
1403 (code == EQ_EXPR ? TRUTH_AND_EXPR : TRUTH_OR_EXPR),
1404 boolean_type_node, cr, ci);
1406 stmt = gsi_stmt (*gsi);
1408 switch (gimple_code (stmt))
1412 greturn *return_stmt = as_a <greturn *> (stmt);
1413 type = TREE_TYPE (gimple_return_retval (return_stmt));
1414 gimple_return_set_retval (return_stmt, fold_convert (type, cc));
1419 type = TREE_TYPE (gimple_assign_lhs (stmt));
1420 gimple_assign_set_rhs_from_tree (gsi, fold_convert (type, cc));
1421 stmt = gsi_stmt (*gsi);
1426 gcond *cond_stmt = as_a <gcond *> (stmt);
1427 gimple_cond_set_code (cond_stmt, EQ_EXPR);
1428 gimple_cond_set_lhs (cond_stmt, cc);
1429 gimple_cond_set_rhs (cond_stmt, boolean_true_node);
1440 /* Expand inline asm that sets some complex SSA_NAMEs. */
1443 expand_complex_asm (gimple_stmt_iterator *gsi)
1445 gasm *stmt = as_a <gasm *> (gsi_stmt (*gsi));
1448 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1450 tree link = gimple_asm_output_op (stmt, i);
1451 tree op = TREE_VALUE (link);
1452 if (TREE_CODE (op) == SSA_NAME
1453 && TREE_CODE (TREE_TYPE (op)) == COMPLEX_TYPE)
1455 tree type = TREE_TYPE (op);
1456 tree inner_type = TREE_TYPE (type);
1457 tree r = build1 (REALPART_EXPR, inner_type, op);
1458 tree i = build1 (IMAGPART_EXPR, inner_type, op);
1459 gimple_seq list = set_component_ssa_name (op, false, r);
1462 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
1464 list = set_component_ssa_name (op, true, i);
1466 gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
1471 /* Process one statement. If we identify a complex operation, expand it. */
1474 expand_complex_operations_1 (gimple_stmt_iterator *gsi)
1476 gimple *stmt = gsi_stmt (*gsi);
1477 tree type, inner_type, lhs;
1478 tree ac, ar, ai, bc, br, bi;
1479 complex_lattice_t al, bl;
1480 enum tree_code code;
1482 if (gimple_code (stmt) == GIMPLE_ASM)
1484 expand_complex_asm (gsi);
1488 lhs = gimple_get_lhs (stmt);
1489 if (!lhs && gimple_code (stmt) != GIMPLE_COND)
1492 type = TREE_TYPE (gimple_op (stmt, 0));
1493 code = gimple_expr_code (stmt);
1495 /* Initial filter for operations we handle. */
1501 case TRUNC_DIV_EXPR:
1503 case FLOOR_DIV_EXPR:
1504 case ROUND_DIV_EXPR:
1508 if (TREE_CODE (type) != COMPLEX_TYPE)
1510 inner_type = TREE_TYPE (type);
1515 /* Note, both GIMPLE_ASSIGN and GIMPLE_COND may have an EQ_EXPR
1516 subcode, so we need to access the operands using gimple_op. */
1517 inner_type = TREE_TYPE (gimple_op (stmt, 1));
1518 if (TREE_CODE (inner_type) != COMPLEX_TYPE)
1526 /* GIMPLE_COND may also fallthru here, but we do not need to
1527 do anything with it. */
1528 if (gimple_code (stmt) == GIMPLE_COND)
1531 if (TREE_CODE (type) == COMPLEX_TYPE)
1532 expand_complex_move (gsi, type);
1533 else if (is_gimple_assign (stmt)
1534 && (gimple_assign_rhs_code (stmt) == REALPART_EXPR
1535 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR)
1536 && TREE_CODE (lhs) == SSA_NAME)
1538 rhs = gimple_assign_rhs1 (stmt);
1539 rhs = extract_component (gsi, TREE_OPERAND (rhs, 0),
1540 gimple_assign_rhs_code (stmt)
1543 gimple_assign_set_rhs_from_tree (gsi, rhs);
1544 stmt = gsi_stmt (*gsi);
1551 /* Extract the components of the two complex values. Make sure and
1552 handle the common case of the same value used twice specially. */
1553 if (is_gimple_assign (stmt))
1555 ac = gimple_assign_rhs1 (stmt);
1556 bc = (gimple_num_ops (stmt) > 2) ? gimple_assign_rhs2 (stmt) : NULL;
1558 /* GIMPLE_CALL can not get here. */
1561 ac = gimple_cond_lhs (stmt);
1562 bc = gimple_cond_rhs (stmt);
1565 ar = extract_component (gsi, ac, false, true);
1566 ai = extract_component (gsi, ac, true, true);
1572 br = extract_component (gsi, bc, 0, true);
1573 bi = extract_component (gsi, bc, 1, true);
1576 br = bi = NULL_TREE;
1578 if (gimple_in_ssa_p (cfun))
1580 al = find_lattice_value (ac);
1581 if (al == UNINITIALIZED)
1584 if (TREE_CODE_CLASS (code) == tcc_unary)
1590 bl = find_lattice_value (bc);
1591 if (bl == UNINITIALIZED)
1602 expand_complex_addition (gsi, inner_type, ar, ai, br, bi, code, al, bl);
1606 expand_complex_multiplication (gsi, inner_type, ar, ai, br, bi, al, bl);
1609 case TRUNC_DIV_EXPR:
1611 case FLOOR_DIV_EXPR:
1612 case ROUND_DIV_EXPR:
1614 expand_complex_division (gsi, inner_type, ar, ai, br, bi, code, al, bl);
1618 expand_complex_negation (gsi, inner_type, ar, ai);
1622 expand_complex_conjugate (gsi, inner_type, ar, ai);
1627 expand_complex_comparison (gsi, ar, ai, br, bi, code);
1636 /* Entry point for complex operation lowering during optimization. */
1639 tree_lower_complex (void)
1641 gimple_stmt_iterator gsi;
1646 if (!init_dont_simulate_again ())
1649 complex_lattice_values.create (num_ssa_names);
1650 complex_lattice_values.safe_grow_cleared (num_ssa_names);
1652 init_parameter_lattice_values ();
1653 ssa_propagate (complex_visit_stmt, complex_visit_phi);
1655 complex_variable_components = new int_tree_htab_type (10);
1657 complex_ssa_name_components.create (2 * num_ssa_names);
1658 complex_ssa_name_components.safe_grow_cleared (2 * num_ssa_names);
1660 update_parameter_components ();
1662 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
1663 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
1664 for (i = 0; i < n_bbs; i++)
1666 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
1667 update_phi_components (bb);
1668 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1669 expand_complex_operations_1 (&gsi);
1674 if (!phis_to_revisit.is_empty ())
1676 unsigned int n = phis_to_revisit.length ();
1677 for (unsigned int j = 0; j < n; j += 3)
1678 for (unsigned int k = 0; k < 2; k++)
1679 if (gphi *phi = phis_to_revisit[j + k + 1])
1681 unsigned int m = gimple_phi_num_args (phi);
1682 for (unsigned int l = 0; l < m; ++l)
1684 tree op = gimple_phi_arg_def (phi, l);
1685 if (TREE_CODE (op) == SSA_NAME
1686 || is_gimple_min_invariant (op))
1688 tree arg = gimple_phi_arg_def (phis_to_revisit[j], l);
1689 op = extract_component (NULL, arg, k > 0, false, false);
1690 SET_PHI_ARG_DEF (phi, l, op);
1693 phis_to_revisit.release ();
1696 gsi_commit_edge_inserts ();
1698 delete complex_variable_components;
1699 complex_variable_components = NULL;
1700 complex_ssa_name_components.release ();
1701 complex_lattice_values.release ();
1707 const pass_data pass_data_lower_complex =
1709 GIMPLE_PASS, /* type */
1710 "cplxlower", /* name */
1711 OPTGROUP_NONE, /* optinfo_flags */
1712 TV_NONE, /* tv_id */
1713 PROP_ssa, /* properties_required */
1714 PROP_gimple_lcx, /* properties_provided */
1715 0, /* properties_destroyed */
1716 0, /* todo_flags_start */
1717 TODO_update_ssa, /* todo_flags_finish */
1720 class pass_lower_complex : public gimple_opt_pass
1723 pass_lower_complex (gcc::context *ctxt)
1724 : gimple_opt_pass (pass_data_lower_complex, ctxt)
1727 /* opt_pass methods: */
1728 opt_pass * clone () { return new pass_lower_complex (m_ctxt); }
1729 virtual unsigned int execute (function *) { return tree_lower_complex (); }
1731 }; // class pass_lower_complex
1736 make_pass_lower_complex (gcc::context *ctxt)
1738 return new pass_lower_complex (ctxt);
1744 const pass_data pass_data_lower_complex_O0 =
1746 GIMPLE_PASS, /* type */
1747 "cplxlower0", /* name */
1748 OPTGROUP_NONE, /* optinfo_flags */
1749 TV_NONE, /* tv_id */
1750 PROP_cfg, /* properties_required */
1751 PROP_gimple_lcx, /* properties_provided */
1752 0, /* properties_destroyed */
1753 0, /* todo_flags_start */
1754 TODO_update_ssa, /* todo_flags_finish */
1757 class pass_lower_complex_O0 : public gimple_opt_pass
1760 pass_lower_complex_O0 (gcc::context *ctxt)
1761 : gimple_opt_pass (pass_data_lower_complex_O0, ctxt)
1764 /* opt_pass methods: */
1765 virtual bool gate (function *fun)
1767 /* With errors, normal optimization passes are not run. If we don't
1768 lower complex operations at all, rtl expansion will abort. */
1769 return !(fun->curr_properties & PROP_gimple_lcx);
1772 virtual unsigned int execute (function *) { return tree_lower_complex (); }
1774 }; // class pass_lower_complex_O0
1779 make_pass_lower_complex_O0 (gcc::context *ctxt)
1781 return new pass_lower_complex_O0 (ctxt);