PR rtl-optimization/32664
authorkkojima <kkojima@138bc75d-0d04-0410-961f-82ee72b054a4>
Tue, 10 Jul 2007 01:01:11 +0000 (01:01 +0000)
committerkkojima <kkojima@138bc75d-0d04-0410-961f-82ee72b054a4>
Tue, 10 Jul 2007 01:01:11 +0000 (01:01 +0000)
* mode-switching.c (create_pre_exit): Skip barrier insns.

git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@126507 138bc75d-0d04-0410-961f-82ee72b054a4

gcc/ChangeLog
gcc/mode-switching.c

index db3a99c..0261aa5 100644 (file)
@@ -1,3 +1,8 @@
+2007-07-10  Kaz Kojima  <kkojima@gcc.gnu.org>
+
+       PR rtl-optimization/32664
+       * mode-switching.c (create_pre_exit): Skip barrier insns.
+
 2007-07-10  Zdenek Dvorak  <dvorakz@suse.cz>
 
        * tree-scalar-evolution.c (scev_const_prop): Add arguments to
index 0e4f58c..e935acd 100644 (file)
@@ -246,21 +246,37 @@ create_pre_exit (int n_entities, int *entity_map, const int *num_modes)
 
                if (INSN_P (return_copy))
                  {
-                   if (GET_CODE (PATTERN (return_copy)) == USE
-                       && GET_CODE (XEXP (PATTERN (return_copy), 0)) == REG
-                       && (FUNCTION_VALUE_REGNO_P
-                           (REGNO (XEXP (PATTERN (return_copy), 0)))))
-                     {
-                       maybe_builtin_apply = 1;
-                       last_insn = return_copy;
-                       continue;
-                     }
-                   if (GET_CODE (PATTERN (return_copy)) == ASM_INPUT
-                       && strcmp (XSTR (PATTERN (return_copy), 0), "") == 0)
+                   return_copy_pat = PATTERN (return_copy);
+                   switch (GET_CODE (return_copy_pat))
                      {
+                     case USE:
+                       /* Skip __builtin_apply pattern.  */
+                       if (GET_CODE (XEXP (return_copy_pat, 0)) == REG
+                           && (FUNCTION_VALUE_REGNO_P
+                               (REGNO (XEXP (return_copy_pat, 0)))))
+                         {
+                           maybe_builtin_apply = 1;
+                           last_insn = return_copy;
+                           continue;
+                         }
+                       break;
+
+                     case ASM_OPERANDS:
+                       /* Skip barrier insns.  */
+                       if (!MEM_VOLATILE_P (return_copy_pat))
+                         break;
+
+                       /* Fall through.  */
+
+                     case ASM_INPUT:
+                     case UNSPEC_VOLATILE:
                        last_insn = return_copy;
                        continue;
+
+                     default:
+                       break;
                      }
+
                    /* If the return register is not (in its entirety)
                       likely spilled, the return copy might be
                       partially or completely optimized away.  */