backport: ChangeLog.tuples: ChangeLog from gimple-tuples-branch.
authorRichard Biener <rguenth@gcc.gnu.org>
Mon, 28 Jul 2008 14:33:56 +0000 (14:33 +0000)
committerRichard Biener <rguenth@gcc.gnu.org>
Mon, 28 Jul 2008 14:33:56 +0000 (14:33 +0000)
2008-07-28  Richard Guenther  <rguenther@suse.de>

Merge from gimple-tuples-branch.

* ChangeLog.tuples: ChangeLog from gimple-tuples-branch.
* gimple.def: New file.
* gsstruct.def: Likewise.
* gimple-iterator.c: Likewise.
* gimple-pretty-print.c: Likewise.
* tree-gimple.c: Removed.  Merged into ...
* gimple.c: ... here.  New file.
* tree-gimple.h: Removed.  Merged into ...
* gimple.h: ... here.  New file.

* Makefile.in: Add dependencies on GIMPLE_H and tree-iterator.h.
* configure.ac: Added support for ENABLE_GIMPLE_CHECKING and the
--enable-checking=gimple flag.
* config.in: Likewise.
* configure: Regenerated.

* tree-ssa-operands.h: Tuplified.
* tree-vrp.c: Likewise.
* tree-loop-linear.c: Likewise.
* tree-into-ssa.c: Likewise.
* tree-ssa-loop-im.c: Likewise.
* tree-dump.c: Likewise.
* tree-complex.c: Likewise.
* cgraphbuild.c: Likewise.
* tree-ssa-threadupdate.c: Likewise.
* tree-ssa-loop-niter.c: Likewise.
* tree-pretty-print.c: Likewise.
* tracer.c: Likewise.
* gengtype.c: Likewise.
* tree-loop-distribution.c: Likewise.
* tree-ssa-loop-unswitch.c: Likewise.
* cgraph.c: Likewise.
* cgraph.h: Likewise.
* tree-ssa-loop-manip.c: Likewise.
* value-prof.c: Likewise.
* tree-ssa-loop-ch.c: Likewise.
* tree-tailcall.c: Likewise.
* value-prof.h: Likewise.
* tree.c: Likewise.
* tree.h: Likewise.
* tree-pass.h: Likewise.
* ipa-cp.c: Likewise.
* tree-scalar-evolution.c: Likewise.
* tree-scalar-evolution.h: Likewise.
* target.h: Likewise.
* lambda-mat.c: Likewise.
* tree-phinodes.c: Likewise.
* diagnostic.h: Likewise.
* builtins.c: Likewise.
* tree-ssa-alias-warnings.c: Likewise.
* cfghooks.c: Likewise.
* fold-const.c: Likewise.
* cfghooks.h: Likewise.
* omp-low.c: Likewise.
* tree-ssa-dse.c: Likewise.
* ipa-reference.c: Likewise.
* tree-ssa-uncprop.c: Likewise.
* toplev.c: Likewise.
* tree-gimple.c: Likewise.
* tree-gimple.h: Likewise.
* tree-chrec.c: Likewise.
* tree-chrec.h: Likewise.
* tree-ssa-sccvn.c: Likewise.
* tree-ssa-sccvn.h: Likewise.
* cgraphunit.c: Likewise.
* tree-ssa-copyrename.c: Likewise.
* tree-ssa-ccp.c: Likewise.
* tree-ssa-loop-ivopts.c: Likewise.
* tree-nomudflap.c: Likewise.
* tree-call-cdce.c: Likewise.
* ipa-pure-const.c: Likewise.
* c-format.c: Likewise.
* tree-stdarg.c: Likewise.
* tree-ssa-math-opts.c: Likewise.
* tree-ssa-dom.c: Likewise.
* tree-nrv.c: Likewise.
* tree-ssa-propagate.c: Likewise.
* ipa-utils.c: Likewise.
* tree-ssa-propagate.h: Likewise.
* tree-ssa-alias.c: Likewise.
* gimple-low.c: Likewise.
* tree-ssa-sink.c: Likewise.
* ipa-inline.c: Likewise.
* c-semantics.c: Likewise.
* dwarf2out.c: Likewise.
* expr.c: Likewise.
* tree-ssa-loop-ivcanon.c: Likewise.
* predict.c: Likewise.
* tree-ssa-loop.c: Likewise.
* tree-parloops.c: Likewise.
* tree-ssa-address.c: Likewise.
* tree-ssa-ifcombine.c: Likewise.
* matrix-reorg.c: Likewise.
* c-decl.c: Likewise.
* tree-eh.c: Likewise.
* c-pretty-print.c: Likewise.
* lambda-trans.c: Likewise.
* function.c: Likewise.
* langhooks.c: Likewise.
* ebitmap.h: Likewise.
* tree-vectorizer.c: Likewise.
* function.h: Likewise.
* langhooks.h: Likewise.
* tree-vectorizer.h: Likewise.
* ipa-type-escape.c: Likewise.
* ipa-type-escape.h: Likewise.
* domwalk.c: Likewise.
* tree-if-conv.c: Likewise.
* profile.c: Likewise.
* domwalk.h: Likewise.
* tree-data-ref.c: Likewise.
* tree-data-ref.h: Likewise.
* tree-flow-inline.h: Likewise.
* tree-affine.c: Likewise.
* tree-vect-analyze.c: Likewise.
* c-typeck.c: Likewise.
* gimplify.c: Likewise.
* coretypes.h: Likewise.
* tree-ssa-phiopt.c: Likewise.
* calls.c: Likewise.
* tree-ssa-coalesce.c: Likewise.
* tree.def: Likewise.
* tree-dfa.c: Likewise.
* except.c: Likewise.
* except.h: Likewise.
* cfgexpand.c: Likewise.
* tree-cfgcleanup.c: Likewise.
* tree-ssa-pre.c: Likewise.
* tree-ssa-live.c: Likewise.
* tree-sra.c: Likewise.
* tree-ssa-live.h: Likewise.
* tree-predcom.c: Likewise.
* lambda.h: Likewise.
* tree-mudflap.c: Likewise.
* ipa-prop.c: Likewise.
* print-tree.c: Likewise.
* tree-ssa-copy.c: Likewise.
* ipa-prop.h: Likewise.
* tree-ssa-forwprop.c: Likewise.
* ggc-page.c: Likewise.
* c-omp.c: Likewise.
* tree-ssa-dce.c: Likewise.
* tree-vect-patterns.c: Likewise.
* tree-ssa-ter.c: Likewise.
* tree-nested.c: Likewise.
* tree-ssa.c: Likewise.
* lambda-code.c: Likewise.
* tree-ssa-loop-prefetch.c: Likewise.
* tree-inline.c: Likewise.
* tree-inline.h: Likewise.
* tree-iterator.c: Likewise.
* tree-optimize.c: Likewise.
* tree-ssa-phiprop.c: Likewise.
* tree-vect-transform.c: Likewise.
* tree-object-size.c: Likewise.
* tree-outof-ssa.c: Likewise.
* cfgloop.c: Likewise.
* system.h: Likewise.
* tree-profile.c: Likewise.
* cfgloop.h: Likewise.
* c-gimplify.c: Likewise.
* c-common.c: Likewise.
* tree-vect-generic.c: Likewise.
* tree-flow.h: Likewise.
* c-common.h: Likewise.
* basic-block.h: Likewise.
* tree-ssa-structalias.c: Likewise.
* tree-switch-conversion.c: Likewise.
* tree-ssa-structalias.h: Likewise.
* tree-cfg.c: Likewise.
* passes.c: Likewise.
* ipa-struct-reorg.c: Likewise.
* ipa-struct-reorg.h: Likewise.
* tree-ssa-reassoc.c: Likewise.
* cfgrtl.c: Likewise.
* varpool.c: Likewise.
* stmt.c: Likewise.
* tree-ssanames.c: Likewise.
* tree-ssa-threadedge.c: Likewise.
* langhooks-def.h: Likewise.
* tree-ssa-operands.c: Likewise.
* config/alpha/alpha.c: Likewise.
* config/frv/frv.c: Likewise.
* config/s390/s390.c: Likewise.
* config/m32c/m32c.c: Likewise.
* config/m32c/m32c-protos.h: Likewise.
* config/spu/spu.c: Likewise.
* config/sparc/sparc.c: Likewise.
* config/i386/i386.c: Likewise.
* config/sh/sh.c: Likewise.
* config/xtensa/xtensa.c: Likewise.
* config/stormy16/stormy16.c: Likewise.
* config/ia64/ia64.c: Likewise.
* config/rs6000/rs6000.c: Likewise.
* config/pa/pa.c: Likewise.
* config/mips/mips.c: Likewise.

From-SVN: r138207

251 files changed:
gcc/ChangeLog
gcc/ChangeLog.tuples [new file with mode: 0644]
gcc/Makefile.in
gcc/ada/ChangeLog
gcc/ada/Make-lang.in
gcc/ada/ada-tree.h
gcc/ada/gigi.h
gcc/ada/trans.c
gcc/ada/utils.c
gcc/basic-block.h
gcc/builtins.c
gcc/c-common.c
gcc/c-common.h
gcc/c-decl.c
gcc/c-format.c
gcc/c-gimplify.c
gcc/c-omp.c
gcc/c-pretty-print.c
gcc/c-semantics.c
gcc/c-typeck.c
gcc/calls.c
gcc/cfgexpand.c
gcc/cfghooks.c
gcc/cfghooks.h
gcc/cfgloop.c
gcc/cfgloop.h
gcc/cfgrtl.c
gcc/cgraph.c
gcc/cgraph.h
gcc/cgraphbuild.c
gcc/cgraphunit.c
gcc/config.in
gcc/config/alpha/alpha.c
gcc/config/frv/frv.c
gcc/config/i386/i386.c
gcc/config/ia64/ia64.c
gcc/config/m32c/m32c-protos.h
gcc/config/m32c/m32c.c
gcc/config/mips/mips.c
gcc/config/pa/pa.c
gcc/config/rs6000/rs6000.c
gcc/config/s390/s390.c
gcc/config/sh/sh.c
gcc/config/sparc/sparc.c
gcc/config/spu/spu.c
gcc/config/stormy16/stormy16.c
gcc/config/xtensa/xtensa.c
gcc/configure
gcc/configure.ac
gcc/coretypes.h
gcc/cp/ChangeLog
gcc/cp/Make-lang.in
gcc/cp/cp-gimplify.c
gcc/cp/cp-tree.h
gcc/cp/decl.c
gcc/cp/decl2.c
gcc/cp/except.c
gcc/cp/init.c
gcc/cp/optimize.c
gcc/cp/semantics.c
gcc/diagnostic.h
gcc/domwalk.c
gcc/domwalk.h
gcc/dwarf2out.c
gcc/ebitmap.h
gcc/except.c
gcc/except.h
gcc/expr.c
gcc/fold-const.c
gcc/fortran/ChangeLog
gcc/fortran/Make-lang.in
gcc/fortran/f95-lang.c
gcc/fortran/trans-array.c
gcc/fortran/trans-array.h
gcc/fortran/trans-decl.c
gcc/fortran/trans-expr.c
gcc/fortran/trans-intrinsic.c
gcc/fortran/trans-io.c
gcc/fortran/trans-openmp.c
gcc/fortran/trans-stmt.c
gcc/fortran/trans.c
gcc/fortran/trans.h
gcc/function.c
gcc/function.h
gcc/gengtype.c
gcc/ggc-page.c
gcc/gimple-iterator.c [new file with mode: 0644]
gcc/gimple-low.c
gcc/gimple-pretty-print.c [new file with mode: 0644]
gcc/gimple.c [new file with mode: 0644]
gcc/gimple.def [new file with mode: 0644]
gcc/gimple.h [new file with mode: 0644]
gcc/gimplify.c
gcc/gsstruct.def [new file with mode: 0644]
gcc/ipa-cp.c
gcc/ipa-inline.c
gcc/ipa-prop.c
gcc/ipa-prop.h
gcc/ipa-pure-const.c
gcc/ipa-reference.c
gcc/ipa-struct-reorg.c
gcc/ipa-struct-reorg.h
gcc/ipa-type-escape.c
gcc/ipa-type-escape.h
gcc/ipa-utils.c
gcc/java/ChangeLog
gcc/java/Make-lang.in
gcc/java/expr.c
gcc/java/java-gimplify.c
gcc/java/java-tree.h
gcc/lambda-code.c
gcc/lambda-mat.c
gcc/lambda-trans.c
gcc/lambda.h
gcc/langhooks-def.h
gcc/langhooks.c
gcc/langhooks.h
gcc/matrix-reorg.c
gcc/objc/ChangeLog
gcc/objc/Make-lang.in
gcc/objc/objc-act.c
gcc/objc/objc-act.h
gcc/objcp/Make-lang.in
gcc/omp-low.c
gcc/passes.c
gcc/predict.c
gcc/print-tree.c
gcc/profile.c
gcc/stmt.c
gcc/system.h
gcc/target.h
gcc/testsuite/ChangeLog
gcc/testsuite/g++.dg/torture/pr36826.C [new file with mode: 0644]
gcc/testsuite/gcc.c-torture/compile/20080721-1.c [new file with mode: 0644]
gcc/testsuite/gcc.dg/20080615-1.c [new file with mode: 0644]
gcc/testsuite/gcc.dg/fold-alloca-1.c
gcc/testsuite/gcc.dg/gomp/block-1.c
gcc/testsuite/gcc.dg/gomp/block-2.c
gcc/testsuite/gcc.dg/gomp/block-3.c
gcc/testsuite/gcc.dg/gomp/block-4.c
gcc/testsuite/gcc.dg/gomp/block-5.c
gcc/testsuite/gcc.dg/gomp/block-6.c
gcc/testsuite/gcc.dg/gomp/block-7.c
gcc/testsuite/gcc.dg/gomp/block-8.c
gcc/testsuite/gcc.dg/torture/20080716-1.c [new file with mode: 0644]
gcc/testsuite/gcc.dg/tree-ssa/20030728-1.c
gcc/testsuite/gcc.dg/tree-ssa/20080530.c [new file with mode: 0644]
gcc/testsuite/gcc.dg/tree-ssa/pr21658.c
gcc/testsuite/gcc.dg/tree-ssa/pr30375.c
gcc/testsuite/gcc.dg/tree-ssa/tailcall-3.c [new file with mode: 0644]
gcc/testsuite/gfortran.dg/gomp/block-1.f90
gcc/toplev.c
gcc/tracer.c
gcc/tree-affine.c
gcc/tree-call-cdce.c
gcc/tree-cfg.c
gcc/tree-cfgcleanup.c
gcc/tree-chrec.c
gcc/tree-chrec.h
gcc/tree-complex.c
gcc/tree-data-ref.c
gcc/tree-data-ref.h
gcc/tree-dfa.c
gcc/tree-dump.c
gcc/tree-eh.c
gcc/tree-flow-inline.h
gcc/tree-flow.h
gcc/tree-gimple.c [deleted file]
gcc/tree-gimple.h [deleted file]
gcc/tree-if-conv.c
gcc/tree-inline.c
gcc/tree-inline.h
gcc/tree-into-ssa.c
gcc/tree-iterator.c
gcc/tree-loop-distribution.c
gcc/tree-loop-linear.c
gcc/tree-mudflap.c
gcc/tree-nested.c
gcc/tree-nomudflap.c
gcc/tree-nrv.c
gcc/tree-object-size.c
gcc/tree-optimize.c
gcc/tree-outof-ssa.c
gcc/tree-parloops.c
gcc/tree-pass.h
gcc/tree-phinodes.c
gcc/tree-predcom.c
gcc/tree-pretty-print.c
gcc/tree-profile.c
gcc/tree-scalar-evolution.c
gcc/tree-scalar-evolution.h
gcc/tree-sra.c
gcc/tree-ssa-address.c
gcc/tree-ssa-alias-warnings.c
gcc/tree-ssa-alias.c
gcc/tree-ssa-ccp.c
gcc/tree-ssa-coalesce.c
gcc/tree-ssa-copy.c
gcc/tree-ssa-copyrename.c
gcc/tree-ssa-dce.c
gcc/tree-ssa-dom.c
gcc/tree-ssa-dse.c
gcc/tree-ssa-forwprop.c
gcc/tree-ssa-ifcombine.c
gcc/tree-ssa-live.c
gcc/tree-ssa-live.h
gcc/tree-ssa-loop-ch.c
gcc/tree-ssa-loop-im.c
gcc/tree-ssa-loop-ivcanon.c
gcc/tree-ssa-loop-ivopts.c
gcc/tree-ssa-loop-manip.c
gcc/tree-ssa-loop-niter.c
gcc/tree-ssa-loop-prefetch.c
gcc/tree-ssa-loop-unswitch.c
gcc/tree-ssa-loop.c
gcc/tree-ssa-math-opts.c
gcc/tree-ssa-operands.c
gcc/tree-ssa-operands.h
gcc/tree-ssa-phiopt.c
gcc/tree-ssa-phiprop.c
gcc/tree-ssa-pre.c
gcc/tree-ssa-propagate.c
gcc/tree-ssa-propagate.h
gcc/tree-ssa-reassoc.c
gcc/tree-ssa-sccvn.c
gcc/tree-ssa-sccvn.h
gcc/tree-ssa-sink.c
gcc/tree-ssa-structalias.c
gcc/tree-ssa-structalias.h
gcc/tree-ssa-ter.c
gcc/tree-ssa-threadedge.c
gcc/tree-ssa-threadupdate.c
gcc/tree-ssa-uncprop.c
gcc/tree-ssa.c
gcc/tree-ssanames.c
gcc/tree-stdarg.c
gcc/tree-switch-conversion.c
gcc/tree-tailcall.c
gcc/tree-vect-analyze.c
gcc/tree-vect-generic.c
gcc/tree-vect-patterns.c
gcc/tree-vect-transform.c
gcc/tree-vectorizer.c
gcc/tree-vectorizer.h
gcc/tree-vrp.c
gcc/tree.c
gcc/tree.def
gcc/tree.h
gcc/value-prof.c
gcc/value-prof.h
gcc/varpool.c

index 545f711..d950a3a 100644 (file)
@@ -1,3 +1,204 @@
+2008-07-28  Richard Guenther  <rguenther@suse.de>
+
+       Merge from gimple-tuples-branch.
+
+       * ChangeLog.tuples: ChangeLog from gimple-tuples-branch.
+       * gimple.def: New file.
+       * gsstruct.def: Likewise.
+       * gimple-iterator.c: Likewise.
+       * gimple-pretty-print.c: Likewise.
+       * tree-gimple.c: Removed.  Merged into ...
+       * gimple.c: ... here.  New file.
+       * tree-gimple.h: Removed.  Merged into ...
+       * gimple.h: ... here.  New file.
+
+       * Makefile.in: Add dependencies on GIMPLE_H and tree-iterator.h.
+       * configure.ac: Added support for ENABLE_GIMPLE_CHECKING and the
+       --enable-checking=gimple flag.
+       * config.in: Likewise.
+       * configure: Regenerated.
+
+       * tree-ssa-operands.h: Tuplified.
+       * tree-vrp.c: Likewise.
+       * tree-loop-linear.c: Likewise.
+       * tree-into-ssa.c: Likewise.
+       * tree-ssa-loop-im.c: Likewise.
+       * tree-dump.c: Likewise.
+       * tree-complex.c: Likewise.
+       * cgraphbuild.c: Likewise.
+       * tree-ssa-threadupdate.c: Likewise.
+       * tree-ssa-loop-niter.c: Likewise.
+       * tree-pretty-print.c: Likewise.
+       * tracer.c: Likewise.
+       * gengtype.c: Likewise.
+       * tree-loop-distribution.c: Likewise.
+       * tree-ssa-loop-unswitch.c: Likewise.
+       * cgraph.c: Likewise.
+       * cgraph.h: Likewise.
+       * tree-ssa-loop-manip.c: Likewise.
+       * value-prof.c: Likewise.
+       * tree-ssa-loop-ch.c: Likewise.
+       * tree-tailcall.c: Likewise.
+       * value-prof.h: Likewise.
+       * tree.c: Likewise.
+       * tree.h: Likewise.
+       * tree-pass.h: Likewise.
+       * ipa-cp.c: Likewise.
+       * tree-scalar-evolution.c: Likewise.
+       * tree-scalar-evolution.h: Likewise.
+       * target.h: Likewise.
+       * lambda-mat.c: Likewise.
+       * tree-phinodes.c: Likewise.
+       * diagnostic.h: Likewise.
+       * builtins.c: Likewise.
+       * tree-ssa-alias-warnings.c: Likewise.
+       * cfghooks.c: Likewise.
+       * fold-const.c: Likewise.
+       * cfghooks.h: Likewise.
+       * omp-low.c: Likewise.
+       * tree-ssa-dse.c: Likewise.
+       * ipa-reference.c: Likewise.
+       * tree-ssa-uncprop.c: Likewise.
+       * toplev.c: Likewise.
+       * tree-gimple.c: Likewise.
+       * tree-gimple.h: Likewise.
+       * tree-chrec.c: Likewise.
+       * tree-chrec.h: Likewise.
+       * tree-ssa-sccvn.c: Likewise.
+       * tree-ssa-sccvn.h: Likewise.
+       * cgraphunit.c: Likewise.
+       * tree-ssa-copyrename.c: Likewise.
+       * tree-ssa-ccp.c: Likewise.
+       * tree-ssa-loop-ivopts.c: Likewise.
+       * tree-nomudflap.c: Likewise.
+       * tree-call-cdce.c: Likewise.
+       * ipa-pure-const.c: Likewise.
+       * c-format.c: Likewise.
+       * tree-stdarg.c: Likewise.
+       * tree-ssa-math-opts.c: Likewise.
+       * tree-ssa-dom.c: Likewise.
+       * tree-nrv.c: Likewise.
+       * tree-ssa-propagate.c: Likewise.
+       * ipa-utils.c: Likewise.
+       * tree-ssa-propagate.h: Likewise.
+       * tree-ssa-alias.c: Likewise.
+       * gimple-low.c: Likewise.
+       * tree-ssa-sink.c: Likewise.
+       * ipa-inline.c: Likewise.
+       * c-semantics.c: Likewise.
+       * dwarf2out.c: Likewise.
+       * expr.c: Likewise.
+       * tree-ssa-loop-ivcanon.c: Likewise.
+       * predict.c: Likewise.
+       * tree-ssa-loop.c: Likewise.
+       * tree-parloops.c: Likewise.
+       * tree-ssa-address.c: Likewise.
+       * tree-ssa-ifcombine.c: Likewise.
+       * matrix-reorg.c: Likewise.
+       * c-decl.c: Likewise.
+       * tree-eh.c: Likewise.
+       * c-pretty-print.c: Likewise.
+       * lambda-trans.c: Likewise.
+       * function.c: Likewise.
+       * langhooks.c: Likewise.
+       * ebitmap.h: Likewise.
+       * tree-vectorizer.c: Likewise.
+       * function.h: Likewise.
+       * langhooks.h: Likewise.
+       * tree-vectorizer.h: Likewise.
+       * ipa-type-escape.c: Likewise.
+       * ipa-type-escape.h: Likewise.
+       * domwalk.c: Likewise.
+       * tree-if-conv.c: Likewise.
+       * profile.c: Likewise.
+       * domwalk.h: Likewise.
+       * tree-data-ref.c: Likewise.
+       * tree-data-ref.h: Likewise.
+       * tree-flow-inline.h: Likewise.
+       * tree-affine.c: Likewise.
+       * tree-vect-analyze.c: Likewise.
+       * c-typeck.c: Likewise.
+       * gimplify.c: Likewise.
+       * coretypes.h: Likewise.
+       * tree-ssa-phiopt.c: Likewise.
+       * calls.c: Likewise.
+       * tree-ssa-coalesce.c: Likewise.
+       * tree.def: Likewise.
+       * tree-dfa.c: Likewise.
+       * except.c: Likewise.
+       * except.h: Likewise.
+       * cfgexpand.c: Likewise.
+       * tree-cfgcleanup.c: Likewise.
+       * tree-ssa-pre.c: Likewise.
+       * tree-ssa-live.c: Likewise.
+       * tree-sra.c: Likewise.
+       * tree-ssa-live.h: Likewise.
+       * tree-predcom.c: Likewise.
+       * lambda.h: Likewise.
+       * tree-mudflap.c: Likewise.
+       * ipa-prop.c: Likewise.
+       * print-tree.c: Likewise.
+       * tree-ssa-copy.c: Likewise.
+       * ipa-prop.h: Likewise.
+       * tree-ssa-forwprop.c: Likewise.
+       * ggc-page.c: Likewise.
+       * c-omp.c: Likewise.
+       * tree-ssa-dce.c: Likewise.
+       * tree-vect-patterns.c: Likewise.
+       * tree-ssa-ter.c: Likewise.
+       * tree-nested.c: Likewise.
+       * tree-ssa.c: Likewise.
+       * lambda-code.c: Likewise.
+       * tree-ssa-loop-prefetch.c: Likewise.
+       * tree-inline.c: Likewise.
+       * tree-inline.h: Likewise.
+       * tree-iterator.c: Likewise.
+       * tree-optimize.c: Likewise.
+       * tree-ssa-phiprop.c: Likewise.
+       * tree-vect-transform.c: Likewise.
+       * tree-object-size.c: Likewise.
+       * tree-outof-ssa.c: Likewise.
+       * cfgloop.c: Likewise.
+       * system.h: Likewise.
+       * tree-profile.c: Likewise.
+       * cfgloop.h: Likewise.
+       * c-gimplify.c: Likewise.
+       * c-common.c: Likewise.
+       * tree-vect-generic.c: Likewise.
+       * tree-flow.h: Likewise.
+       * c-common.h: Likewise.
+       * basic-block.h: Likewise.
+       * tree-ssa-structalias.c: Likewise.
+       * tree-switch-conversion.c: Likewise.
+       * tree-ssa-structalias.h: Likewise.
+       * tree-cfg.c: Likewise.
+       * passes.c: Likewise.
+       * ipa-struct-reorg.c: Likewise.
+       * ipa-struct-reorg.h: Likewise.
+       * tree-ssa-reassoc.c: Likewise.
+       * cfgrtl.c: Likewise.
+       * varpool.c: Likewise.
+       * stmt.c: Likewise.
+       * tree-ssanames.c: Likewise.
+       * tree-ssa-threadedge.c: Likewise.
+       * langhooks-def.h: Likewise.
+       * tree-ssa-operands.c: Likewise.
+       * config/alpha/alpha.c: Likewise.
+       * config/frv/frv.c: Likewise.
+       * config/s390/s390.c: Likewise.
+       * config/m32c/m32c.c: Likewise.
+       * config/m32c/m32c-protos.h: Likewise.
+       * config/spu/spu.c: Likewise.
+       * config/sparc/sparc.c: Likewise.
+       * config/i386/i386.c: Likewise.
+       * config/sh/sh.c: Likewise.
+       * config/xtensa/xtensa.c: Likewise.
+       * config/stormy16/stormy16.c: Likewise.
+       * config/ia64/ia64.c: Likewise.
+       * config/rs6000/rs6000.c: Likewise.
+       * config/pa/pa.c: Likewise.
+       * config/mips/mips.c: Likewise.
+
 2008-07-28  Simon Baldwin  <simonb@google.com>
 
        * c-pragma.c (handle_pragma_message): New function.
diff --git a/gcc/ChangeLog.tuples b/gcc/ChangeLog.tuples
new file mode 100644 (file)
index 0000000..d5e3383
--- /dev/null
@@ -0,0 +1,8231 @@
+2008-07-28  Richard Guenther  <rguenther@suse.de>
+
+       Merge with mainline @138201.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-07-27  Jakub Jelinek  <jakub@redhat.com>
+
+       * omp-low.c (expand_omp_atomic_fetch_op): Fix a merge glitch.
+
+2008-07-27  Andrew Pinski  <andrew_pinski@playstation.sony.com>
+
+       * config/spu/spu.c (spu_gimplify_va_arg_expr): Unshare the args
+       and skip trees.
+
+2008-07-27  Richard Guenther  <rguenther@suse.de>
+
+       * tree-eh.c (lookup_expr_eh_region): Do not allocate a tree
+       annotation.
+       * tree-dfa.c (create_tree_common_ann): Set eh region to -1.
+       * tree-flow.h (struct tree_ann_common_d): Reorder rn member
+       to pack with type.
+
+2008-07-26  Richard Guenther  <rguenther@suse.de>
+
+       Merge with mainline @138159.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-07-26  Richard Guenther  <rguenther@suse.de>
+
+       * gimple.h (CALL_STMT_CANNOT_INLINE_P): Remove.
+
+2008-07-26  Jan Hubicka  <jh@suse.cz>
+
+       * ipa-inline.c (cgraph_decide_inlining_of_small_function): Use
+       gimple_call_set_cannot_inline.
+       (cgraph_decide_inlining): Likewise.
+       (cgraph_decide_inlining_incrementally): Likewise.
+
+2008-07-26  Richard Guenther  <rguenther@suse.de>
+
+       Merge with mainline @138092.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-07-26  Richard Guenther  <rguenther@suse.de>
+
+       Merge with mainline @138091.
+
+2008-07-25  Richard Guenther  <rguenther@suse.de>
+
+       * config/ia64/ia64.c (ia64_gimplify_va_arg): Unshare valist
+       before reusing it.
+
+2008-07-25  Andreas Krebbel  <krebbel1@de.ibm.com>
+
+       * tree-tailcall.c (process_assignment): Prevent tail call
+       optimization if the modes of the return types don't match.
+
+2008-07-24  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-inline.c (expand_call_inline): Allow casts in assert.
+
+2008-07-24  Richard Guenther  <rguenther@suse.de>
+
+       PR middle-end/36885
+       * tree.c (walk_tree_1): Also walk CHANGE_DYNAMIC_TYPE_EXPR operands.
+       * gimple.c (gss_for_code): GIMPLE_CHANGE_DYNAMIC_TYPE is GSS_WITH_OPS.
+       * gsstruct.def (GSS_CHANGE_DYNAMIC_TYPE): Remove. 
+
+2008-07-24  Richard Guenther  <rguenther@suse.de>
+
+       * tree-sra.c (sra_walk_expr): Also handle CONVERT_EXPR.
+       (sra_walk_gimple_assign): Correctly detect assigns we can handle.
+       * expr.c (expand_expr_real_1): Pass MOVE_NONTEMPORAL to
+       expand_assignment.
+
+2008-07-23  Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-dse.c (get_kill_of_stmt_lhs): Tuplify correct.
+       * gimple-pretty-print.c (dump_gimple_label): Dump non-local flag.
+       * tree.h (maybe_fold_stmt_addition): Declare.
+       * tree-ssa-ccp.c (maybe_fold_stmt_addition): Export.
+       (fold_gimple_assign): Return the new rhs instead of modifying the stmt.
+       (fold_stmt): Deal with that.
+       (fold_stmt_inplace): Only replace the rhs of a statement if
+       that has enough operand space to hold the new operands.
+       * tree-ssa-forwprop.c (forward_propagate_addr_expr_1): Fix
+       POINTER_PLUS_EXPR handling.
+
+2008-07-23  Richard Guenther  <rguenther@suse.de>
+
+       * tree-eh.c (record_in_goto_queue): Fix bootstrap with
+       --disable-checking.
+
+2008-07-23  Aldy Hernandez  <aldyh@redhat.com>
+
+       Merge with mainline @138071.
+       
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+                       
+2008-07-23  Richard Guenther  <rguenther@suse.de>
+
+       * tree-eh.c (union treemple): Add tree * member.
+       (find_goto_replacement): Adjust.
+       (replace_goto_queue_cond_clause): Use the address of the
+       individual labels as unique identifier.
+       (replace_goto_queue_1): Use the statement as unique identifier
+       for GIMPLE_GOTO.
+       (record_in_goto_queue): Add checking.
+       (record_in_goto_queue_label): Adjust.
+       (maybe_record_in_goto_queue): Likewise.
+       (do_goto_redirection): Get leh_tf_state.
+       (lower_try_finally_nofallthru): Pass it.
+       (lower_try_finally_onedest): Likewise.
+       (lower_try_finally_copy): Likewise.
+       (lower_try_finally_switch): Likewise.
+
+2008-07-22  Aldy Hernandez  <aldyh@redhat.com>
+           Jakub Jelinek  <jakub@redhat.com>
+
+       * gimplify.c (gimple_do_not_emit_location_p): New.
+       (gimple_set_do_not_emit_location): New.
+       (annotate_one_with_location): Do not annotate if
+       gimple_do_not_emit_location_p.
+       (gimplify_cond_expr): Do not optimize if the COND_EXPR and
+       GOTO_EXPR have different locations.
+       Do not emit location information for some GIMPLE_COND's.
+
+2008-07-22  Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-ccp.c (ccp_fold): Use gimple_expr_type.
+       (fold_gimple_assign): Likewise.
+       * tree-inline.c (remap_gimple_op_r): Do not set TREE_BLOCK on
+       non-statements.  Recurse to copy_tree_body_r with NULL block.
+       (copy_phis_for_bb): Likewise.
+       * tree-cfg.c (move_stmt_op): Do not set TREE_BLOCK on
+       non-statements.
+
+2008-07-22  Diego Novillo  <dnovillo@google.com>
+           Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-dom.c (hashable_expr_equal_p): Do nothing if
+       either TYPE0 or TYPE1 is NULL.
+
+2008-07-21  Diego Novillo  <dnovillo@google.com>
+
+       * tree-ssa-alias-warnings.c (struct gimple_tree_map): New.
+       Change every use of struct tree_map to struct gimple_tree_map.
+       (gimple_tree_map_eq): New.
+       (gimple_tree_map_hash): New.
+       * tree-ssa-ccp.c (evaluate_stmt): Remove stale FIXME note.
+       * gimplify.c (gimplify_expr): Remove stale FIXME note.
+       * tree-ssa-pre.c: Remove stale references to GIMPLE_MODIFY_STMT.
+       * tree-vect-generic.c (expand_vector_operations_1): Change
+       FIXME tuples to NOTE.
+
+2008-07-21  Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-phiprop.c (propagate_with_phi): Only look through
+       SSA_NAME copies.
+
+2008-07-21  Richard Guenther  <rguenther@suse.de>
+
+       * gimplify.c (gimplify_init_constructor): Clear TREE_SIDE_EFFECTS
+       on the remaining empty constructor.
+
+2008-07-21  Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-ccp.c (fold_gimple_assign): Handle pointer conversions
+       like fold_stmt_r did.
+       * gimple-pretty-print.c (dump_gimple_cond): Place semicolons
+       where trunk did.
+       * tree-inline.c (copy_bb): Do not insert GIMPLE_NOPs.
+
+2008-07-21  Richard Guenther  <rguenther@suse.de>
+
+       PR tree-optimization/36826
+       * tree-ssa-pre.c (eliminate): Do not eliminate in unused
+       statements.
+
+2008-07-20  Richard Guenther  <rguenther@suse.de>
+
+       * gimple.h (gimple_expr_type): The expression type is always
+       the base type of an integral sub-type result type.
+       * tree-eh.c (replace_goto_queue_cond_clause): Copy the sequence
+       before handing it off to gsi_insert_seq_after.
+       * tree-sra.c (insert_edge_copies_seq): Make sure to not keep an
+       uninserted but marked for update sequence.
+
+2008-07-20  Richard Guenther  <rguenther@suse.de>
+
+       * gimple.c (DEFTREECODE): Add REALIGN_LOAD_EXPR.
+
+2008-07-19  Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-sccvn.h (get_constant_value_id): Declare.
+       (vn_constant_eq_with_type): Make sure an integral type is
+       never equal to a non-integral type.
+       (vn_hash_constant_with_type): Adjust.
+       * tree-ssa-sccvn.c (get_constant_value_id): New function.
+       * tree-ssa-pre.c (get_expr_value_id): For newly created
+       constant value-ids make sure to add the expression to its
+       expression-set.
+
+2008-07-18  Jakub Jelinek  <jakub@redhat.com>
+
+       * gimple.c (gimple_regimplify_operands): Moved to...
+       * gimplify.c (gimple_regimplify_operands): ... here.  Rework using
+       lower_omp_1 regimplification code, if regimplified LHS of
+       GIMPLE_ASSIGN or GIMPLE_CALL requires simpler RHS, create a temporary.
+       * omp-low.c (gimple_regimplify_operands): Use
+       gimple_regimplify_operands.
+
+2008-07-18  Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-reassoc.c (get_rank): For single rhs process its
+       operands.
+
+2008-07-18  Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-loop-niter.c (expand_simple_operations): Expand
+       as much as trunk does.
+       * tree-ssa-sccvn.c (simplify_binary_expression): For comparisons
+       always expand the first operand.
+
+2008-07-18  Jakub Jelinek  <jakub@redhat.com>
+
+       * gimple-pretty-print.c (dump_gimple_cond): Print a semicolon if
+       goto or else goto has been printed.
+       (dump_gimple_goto): Print as goto instead of gimple_goto, print
+       a semicolon at the end.
+       (dump_gimple_asm): Print a semicolon at the end.
+
+       * gimplify.c (gimplify_cond_expr): If COND_EXPR has both non-trivial
+       THEN and ELSE statements and the THEN sequence can't fallthru, avoid
+       adding label_cont and jump to it.
+
+2008-07-18  Aldy Hernandez  <aldyh@redhat.com>
+
+       * Makefile.in.  Remove TREE_GIMPLE_H definition.
+       Rename all TREE_GIMPLE_H uses to GIMPLE_H.
+       Depend on tree-iterator.h when necessary.
+       * tree-into-ssa.c: Include gimple.h instead of tree-gimple.h.
+       * tree-complex.c: Same.
+       * cgraphbuild.c: Same.
+       * cgraph.c: Same.
+       * builtins.c: Same.
+       * tree-ssa-sccvn.c: Same.
+       * tree-ssa-copyrename.c: Same.
+       * tree-nomudflap.c: Same.
+       * tree-call-cdce.c: Same.
+       * ipa-pure-const.c: Same.
+       * ipa-utils.c: Same.
+       * tree-ssa-alias.c: Same.
+       * tree-ssa-sink.c: Same.
+       * langhooks.c: Same.
+       * function.c: Same.
+       * ipa-type-escape.c: Same.
+       * tree-affine.c: Same.
+       * c-typeck.c: Same.
+       * tree-dfa.c: Same.
+       * tree-ssa-pre.c: Same.
+       * tree-sra.c: Same.
+       * c-omp.c: Same.
+       * tree-ssa-dce.c: Same.
+       * tree-nested.c: Same.
+       * tree-ssa.c: Same.
+       * tree-inline.c: Same.
+       * tree-iterator.c: Same.
+       * c-gimplify.c: Same.
+       * tree-vect-generic.c: Same.
+       * tree-flow.h: Same.
+       * tree-ssa-structalias.c: Same.
+       * ipa-struct-reorg.c: Same.
+       * tree-ssa-reassoc.c: Same.
+       * config/alpha/alpha.c: Same.
+       * config/s390/s390.c: Same.
+       * config/m32c/m32c.c: Same.
+       * config/spu/spu.c: Same.
+       * config/sparc/sparc.c: Same.
+       * config/i386/i386.c: Same.
+       * config/sh/sh.c: Same.
+       * config/xtensa/xtensa.c: Same.
+       * config/stormy16/stormy16.c: Same.
+       * config/ia64/ia64.c: Same.
+       * config/rs6000/rs6000: Same.
+       * config/mips/mips.c: Same.
+       * varpool.c: Same.
+       * cgraphunit.c: Same.  Include tree-iterator.h
+       * tree-mudflap.c: Same.
+       * gimplify.c: Same.
+       * c-decl.c: Same.
+       * omp-low.c: Same.
+       * c-semantics: Same.
+       * gimple-low.c: Same.
+       * tree-gimple.c: Merge contents into...
+       * gimple.c: ...here.
+       * tree-gimple.h: Merge contents into...
+       * gimple.h: ...here.
+
+2008-07-17  Jakub Jelinek  <jakub@redhat.com>
+
+       * omp-low.c (expand_omp_atomic_pipeline): Call
+       force_gimple_operand_gsi on RHS for IADDR initialization.
+
+       * gimplify.c (gimplify_cond_expr): Push statements in between
+       gimple_push_condition and gimple_pop_condition into a new
+       gimple_seq, append it after gimple_pop_condition cleanups.
+
+2008-07-17  Richard Guenther  <rguenther@suse.de>
+
+       * tree-complex.c (init_dont_simulate_again): Handle
+       {REAL,IMAG}PART_EXPR correctly.
+       * gimple-pretty-print.c (dump_unary_rhs): Dump VIEW_CONVERT_EXPR
+       and ASSERT_EXPR the same way as on trunk.
+       * tree-ssa-dom.c (initialize_hash_element): Do not record
+       the type for single rhs assigns.
+       (hashable_expr_equal_p): Deal with NULL types.
+       (eliminate_redundant_computations): Use print_gimple_expr.
+       * tree-vrp.c (stmt_interesting_for_vrp): Fix builtin call check.
+       (vrp_visit_stmt): Likewise.
+       * tree-ssa-forwprop.c (simplify_gimple_switch): Fix typo.
+
+2008-07-16  Richard Guenther  <rguenther@suse.de>
+
+       PR tree-optimization/36792
+       * tree-ssa-pre.c (get_or_alloc_expr_for): Handle unary
+       expressions inserted by SCCVN.
+       (do_SCCVN_insertion): Adjust comment.
+       (execute_pre): Allow insertion for FRE again.
+
+       * tree-ssa-sccvn.c (simplify_unary_expression): Deal with
+       the GIMPLE_SINGLE_RHS statemens we get.
+       (init_scc_vn): Init VN_INFO->expr to NULL.
+
+2008-07-16  Richard Guenther  <rguenther@suse.de>
+
+       Revert
+       2008-07-16  Richard Guenther  <rguenther@suse.de>
+       * tree-ssa-pre.c (get_constant_for_value_id): Only hand out
+       constants of the correct type.
+       (fully_constant_expression): Pass the required type to
+       get_constant_for_value_id.
+
+       * tree-ssa-sccvn.h (vn_hash_constant_with_type): New function.
+       (vn_constant_eq_with_type): Likewise.
+       * tree-ssa-sccvn.c (vn_constant_eq): Use vn_constant_eq_with_type.
+       (get_or_alloc_constant_value_id): Use vn_hash_constant_with_type.
+       * tree-ssa-pre.c (pre_expr_eq): Use vn_constant_eq_with_type.
+       (pre_expr_hash): Use vn_hash_constant_with_type.
+       (get_representative_for): Use constants as their representative.
+       (fully_constant_expression): Use constant parts in expressions
+       directly.
+
+2008-07-15  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-ssa-propagate.c (valid_gimple_expression_p): Remove.
+       * tree-ssa-propagate.h (valid_gimple_expression_p): Remove.
+       * tree-eh.c: Factor out common code in operation_could_trap_p and 
+       stmt_could_throw_1_p into...
+       (operation_could_trap_helper_p): New.
+       * gimplify.c (gimplify_expr): Rephrase fixme.
+       * tree-mudflap.c: Remove fixme.
+
+2008-07-17  Jakub Jelinek  <jakub@redhat.com>
+
+       * tree-eh.c (collect_finally_tree): Call collect_finally_tree_1
+       with region as second argument for GIMPLE_TRY_FINALLY's cleanup.
+
+       * gimplify.c (gimplify_expr): Call gimple_try_set_catch_is_cleanup
+       for TRY_CATCH_EXPR.
+
+2008-07-16  Jakub Jelinek  <jakub@redhat.com>
+
+       * tree-gimple.c (is_gimple_reg_rhs): Don't check for CALL_EXPRs
+       with side-effects.
+       * gimple.c (extract_ops_from_tree): Remove assert.
+       * gimplify.c (is_gimple_reg_or_call_rhs): New function.
+       (rhs_predicate_for): Return it instead of is_gimple_reg_rhs.
+       (gimplify_expr): Handle is_gimple_reg_or_call_rhs.
+
+       * tree-ssa-threadedge.c (record_temporary_equivalences_for_stmts,
+       simplify_control_stmt_condition): Pass stmt instead of NULL as second
+       argument to simplify callback.
+
+       * tree-vect-patterns.c (vect_recog_pow_pattern): Don't call
+       gimple_call_set_lhs with lhs of last_stmt.
+       * tree-vect-transform.c (vectorizable_call): Build a new dummy
+       assignment, replace the call with it and move over stmt_info.
+
+       * tree-ssa-loop-niter.c (infer_loop_bounds_from_array): Use
+       is_gimple_assign instead of gimple_code () == GIMPLE_ASSIGN and
+       is_gimple_call instead of gimple_code () == GIMPLE_CALL.
+       * tree-ssa-propagate.c (update_call_from_tree, substitute_and_fold):
+       Likewise.
+       * tree-ssa-sccvn.c (visit_use): Likewise.
+       * tree-eh.c (stmt_could_throw_p): Likewise.
+       * tree-optimize.c (execute_fixup_cfg): Likewise.
+       * omp-low.c (check_omp_nesting_restrictions, scan_omp_1_stmt,
+       optimize_omp_library_calls): Likewise.
+       * tree-ssa-loop-im.c (movement_possibility, stmt_cost,
+       determine_invariantness_stmt): Likewise.
+       * tree-ssa-phiprop.c (phivn_valid_p, phiprop_insert_phi,
+       propagate_with_phi): Likewise.
+       * tree-ssa-ccp.c (get_default_value, surely_varying_stmt_p,
+       ccp_fold_builtin, gimplify_and_update_call_from_tree): Likewise.
+       * ipa-struct-reorg.c (is_result_of_mult, create_general_new_stmt):
+       Likewise.
+       * tree-ssa-coalesce.c (build_ssa_conflict_graph): Likewise.
+       * tree-object-size.c (alloc_object_size, call_object_size,
+       check_for_plus_in_loops): Likewise.
+       * tree-ssa.c (verify_ssa): Likewise.
+       * predict.c (expr_expected_value_1, tree_bb_level_predictions,
+       tree_estimate_probability): Likewise.
+       * tree-cfg.c (verify_stmt): Likewise.
+       * tree-ssa-loop-ch.c (should_duplicate_loop_header_p,
+       copy_loop_headers): Likewise.
+       * tree-ssa-ter.c (is_replaceable_p): Likewise.
+       * ipa-prop.c (ipa_count_arguments, ipa_compute_jump_functions):
+       Likewise.
+       * tree-ssa-dom.c (gimple_assign_unary_useless_conversion_p,
+       record_equivalences_from_stmt, optimize_stmt,
+       get_lhs_or_phi_result): Likewise.
+       * tree-ssa-sink.c (is_hidden_global_store): Likewise.
+       * tree-nrv.c (tree_nrv, execute_return_slot_opt): Likewise.
+       * value-prof.c (gimple_divmod_fixed_value,
+       gimple_mod_pow2, gimple_mod_subtract): Likewise.
+       * tree-predcom.c (name_for_ref, find_looparound_phi,
+       replace_ref_with, remove_name_from_operation): Likewise.
+       * tree-ssa-math-opts.c (is_division_by, execute_cse_reciprocals,
+       execute_cse_sincos, execute_convert_to_rsqrt): Likewise.
+       * tree-complex.c (expand_complex_move, expand_complex_operations_1):
+       Likewise.
+       * tree-outof-ssa.c (identical_copies_p): Likewise.
+       * tree-ssa-pre.c (is_exception_related): Likewise.
+       * tree-sra.c (scalarize_use, scalarize_copy): Likewise.
+       * tree-ssa-alias.c (count_uses_and_derefs, update_alias_info_1,
+       is_escape_site): Likewise.
+       * lambda-code.c (can_put_in_inner_loop,
+       cannot_convert_bb_to_perfect_nest): Likewise.
+       * tree-tailcall.c (find_tail_calls, eliminate_tail_call): Likewise.
+       * ipa-type-escape.c (look_for_casts_stmt, is_cast_from_non_pointer):
+       Likewise.
+       * tree-vect-transform.c (vectorizable_reduction): Likewise.
+       * tree-ssa-threadedge.c (record_temporary_equivalences_from_stmts):
+       Likewise.
+       * tree-ssa-phiopt.c (nt_init_block): Likewise.
+       * tree-ssa-structalias.c (find_func_aliases): Likewise. 
+       * tree-ssa-forwprop.c (can_propagate_from,
+       forward_propagate_comparison, simplify_not_neg_expr,
+       simplify_gimple_switch, tree_ssa_forward_propagate_single_use_vars):
+       Likewise.
+       * tree-ssa-dce.c (eliminate_unnecessary_stmts): Likewise.
+       * tree-ssa-dse.c (get_kill_of_stmt_lhs, dse_possible_dead_store_p,
+       dse_optimize_stmt, execute_simple_dse): Likewise.
+       * tree-ssa-loop-ivopts.c (find_interesting_uses_op,
+       find_interesting_uses_stmt, rewrite_use_nonlinear_expr): Likewise.
+       * tree-vrp.c (stmt_overflow_infinity, vrp_stmt_computes_nonzero,
+       register_edge_assert_for_2, register_edge_assert_for,
+       find_assert_locations, check_all_array_refs,
+       remove_range_assertions, stmt_interesting_for_vrp, vrp_visit_stmt,
+       simplify_stmt_using_ranges): Likewise.
+       * tree-ssa-loop-prefetch.c (gather_memory_references): Likewise.
+       * tree-ssa-copy.c (may_propagate_copy_into_stmt,
+       propagate_tree_value_into_stmt): Likewise.
+       * tree-inline.c (copy_bb, expand_call_inline,
+       gimple_expand_calls_inline, fold_marked_statements): Likewise.
+
+       * tree-ssa-copyrename.c (rename_ssa_copies): Use
+       gimple_assign_ssa_name_copy_p.
+
+       * tree-ssa-forwprop.c (forward_propagate_addr_expr_1): Check
+       gimple_assign_rhs_code, type of rhs is TREE_TYPE (lhs), update
+       rhs_code.
+
+2008-07-16  Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-pre.c (get_constant_for_value_id): Only hand out
+       constants of the correct type.
+       (fully_constant_expression): Pass the required type to
+       get_constant_for_value_id.
+
+2008-07-15  Aldy Hernandez  <aldyh@redhat.com>
+
+       Merge with mainline @137837.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-07-15  Jakub Jelinek  <jakub@redhat.com>
+
+       * common.opt (-fgimple-conversion=): Remove.
+
+       * tree-affine.c (aff_combination_expand): Tuplify.
+       * cfgexpand.c (gimple_assign_rhs_to_tree): Remove prototype.
+       * tree-outof-ssa.c (gimple_assign_rhs_to_tree): Likewise.
+       * tree-gimple.h (gimple_assign_rhs_to_tree): New prototype.
+
+2008-07-15  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree.h: Remove gimple_unreachable_1 prototype.
+
+2008-07-15  Aldy Hernandez  <aldyh@redhat.com>
+
+       * Makefile.in (gimple-dummy.o): Remove.
+       * gimple-dummy.c: Delete.
+       * tree-ssa-propagate.c (valid_gimple_expression_p): Change
+       gimple_unreachable to gcc_unreachable.
+       * tree-affine.c (aff_combination_expand): Same.
+       * tree-vect-transform.c (vectorizable_call): Same.
+
+2008-07-15  Richard Guenther  <rguenther@suse.de>
+
+       * gimplify.c (gimplify_expr): Gimplify an unused volatile load
+       properly.
+
+2008-07-15  Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-pre.c: Tuplify.  Enable FRE and PRE.
+       (execute_pre): Disable SCCVN insertion even for FRE.
+       * tree-ssa-sccvn.h (copy_reference_ops_from_call): Declare.
+       * tree-ssa-sccvn.c (copy_reference_ops_from_call): Export.
+       (vn_get_expr_for): Handle more expression kinds.
+       (visit_reference_op_load): Properly set a value id for
+       inserted names.
+       (simplify_binary_expression): Use valid_gimple_rhs_p instead of
+       valid_gimple_expression_p.
+       (simplify_unary_expression): Likewise.
+       (process_scc): Clear the cached/simplified expressions at the
+       start of the iteration.
+       (free_scc_vn): Do not clear SSA_NAME_VALUE.
+       (run_scc_vn): Remove broken special case in printing VNs.
+       * tree-ssa-propagate.c (valid_gimple_rhs_p): Allow
+       gimple-min-invariants and SSA names.
+
+2008-07-14  Andreas Krebbel  <krebbel1@de.ibm.com>
+
+       * config/s390/s390.c: (s390_gimplify_va_arg) Unshare the args* tree.
+
+2008-07-14  Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-math-opts.c (execute_cse_reciprocals): Process
+       SSA_NAME defs of calls.
+       * gimple-pretty-print.c (dump_unary_rhs): Do not prefix
+       CONSTRUCTOR with [constructor].
+
+2008-07-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * config/alpha/alpha.c (va_list_skip_additions): Change
+       GIMPLE_STMT_OPERAND to TREE_OPERAND.
+
+2008-07-14  Jakub Jelinek  <jakub@redhat.com>
+
+       * tree-vect-transform.c (vect_create_data_ref_ptr): Update comment.
+       * config/s390/s390.c (s390_va_start): Build MODIFY_EXPR instead of
+       GIMPLE_MODIFY_STMT.
+       (s390_gimplify_va_arg): Use gimplify_assign.
+       * config/sh/sh.c (sh_va_start): Build MODIFY_EXPR instead of
+       GIMPLE_MODIFY_STMT.
+       (sh_gimplify_va_arg_expr): Use gimplify_assign.
+       * config/sparc/sparc.c (sparc_gimplify_va_arg): Likewise.
+       * config/spu/spu.c (spu_va_start): Build MODIFY_EXPR instead of
+       GIMPLE_MODIFY_STMT.
+       (spu_gimplify_va_arg_expr): Use gimplify_assign.
+       * config/stormy16/stormy16.c (xstormy16_expand_builtin_va_start):
+       Build MODIFY_EXPR instead of GIMPLE_MODIFY_STMT.
+       (xstormy16_gimplify_va_arg_expr): Likewise.  Use gimplify_assign.
+       * config/xtensa/xtensa.c (xtensa_va_start): Build MODIFY_EXPR
+       instead of GIMPLE_MODIFY_STMT.
+       (xtensa_gimplify_va_arg_expr): Use gimplify_assign.
+
+2008-07-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * config/alpha/alpha.c (va_list_skip_additions): Rename
+       GIMPLE_MODIFY_STMT to MODIFY_EXPR.
+       (alpha_va_start): Same.
+       (alpha_gimplify_va_arg_1): Use gimplify_assign.
+       (alpha_gimplify_va_arg): Same.
+       * config/frv/frv.c (frv_expand_builtin_va_start): Rename
+       GIMPLE_MODIFY_STMT to MODIFY_EXPR.
+       * config/ia64/ia64.c (ia64_gimplify_va_arg): Use gimplify_assign.
+       * config/alpha/mips.c (mips_va_start): Rename GIMPLE_MODIFY_STMT to
+       MODIFY_EXPR or use gimplify_assign when appropriate.
+       (mips_gimplify_va_arg_expr): Same.
+
+2008-07-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * config/rs6000/rs6000.c (rs6000_va_start): Change
+       GIMPLE_MODIFY_STMT to MODIFY_EXPR.
+       (rs6000_gimplify_va_arg): Use gimplify_assign.
+       Build GIMPLE_GOTO directly.
+
+2008-07-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-dump.c (dequeue_and_dump): Remove GIMPLE_MODIFY_STMT case.
+       * tree-ssa-loop-niter.c (simplify_replace_tree): Remove GIMPLE_STMT_P
+       call.
+       * tree-pretty-print.c (dump_generic_node): Remove any references to
+       GIMPLE_MODIFY_STMT, GIMPLE_STMT_P, GIMPLE_TUPLE_P.
+       (op_prio): Same.
+       (op_symbol_code): Same.
+       * java/java-gimplify.c (java_gimplify_expr): Same.
+       (java_gimplify_modify_expr): Same.
+       * java/java-tree.h: Rename GENERIC_NEXT to TREE_CHAIN.
+       * tree-tailcall.c (find_tail_calls): Update comment.
+       * tree.c (tree_code_class_string): Remove "gimple_stmt".
+       (tree_node_kind): Remove "gimple statments".
+       (tree_code_size): Remove tcc_gimple_stmt.
+       (make_node_stat): Same.
+       (copy_node_stat): Remove any references to
+       GIMPLE_MODIFY_STMT, GIMPLE_STMT_P, GIMPLE_TUPLE_P, tcc_gimple_stmt,
+       TS_GIMPLE_STATEMENT, GENERIC_TREE_OPERAND, GENERIC_TREE_TYPE,
+       GIMPLE_TUPLE_HAS_LOCUS_P, GIMPLE_STMT_LOCUS, GIMPLE_STMT_BLOCK,
+       IS_GIMPLE_STMT_CODE_CLASS, GIMPLE_STMT_BLOCK.
+       (expr_align): Same.
+       (tree_node_structure): Same.
+       (build2_stat): Same.
+       (set_expr_locus): Same.
+       (walk_tree_1): Same.
+       (tree_block): Same.
+       (build_gimple_modify_stmt_stat): Remove.
+       (expr_location): Remove.
+       (set_expr_location): Remove.
+       (expr_hash_location): Remove.
+       (expr_locus): Remove.
+       (expr_filename): Remove.
+       (expr_lineno): Remove.
+       (generic_tree_operand): Remove.
+       (generic_tree_type): Remove.
+       * tree.h (tree_code_class): Remove tcc_gimple_stmt.
+       (IS_GIMPLE_STMT_CODE_CLASS): Remove.
+       (struct gimple_stmt): Remove.
+       (GIMPLE_STMT_CHECK): Remove.
+       (TREE_OPERAND_CHECK): Remove GIMPLE_TUPLE_P.
+       (TREE_CHAIN): Same.
+       (GIMPLE_STMT_OPERAND_CHECK): Remove.
+       (GIMPLE_STMT_OPERAND_CHECK): Remove.
+       (GIMPLE_STMT_P): Remove.
+       (GIMPLE_TUPLE_P): Remove.
+       (GIMPLE_TUPLE_HAS_LOCUS_P): Remove.
+       (GENERIC_TREE_OPERAND): Remove.
+       (GENERIC_TREE_TYPE): Remove.
+       (GENERIC_NEXT): Remove.
+       (IS_CONVERT_EXPR_CODE_P): Rename GENERIC_TREE_TYPE to TREE_TYPE.
+       (MOVE_NONTEMPORAL): Remove GIMPLE_MODIFY_STMT_CHECK.
+       Remove GIMPLE_STMT_OPERAND, GIMPLE_STMT_LOCUS, GIMPLE_STMT_BLOCK.
+       (EXPR_LOCATION, SET_EXPR_LOCATION, EXPR_HAS_LOCATION, EXPR_LOCUS,
+       EXPR_FILENAME, EXPR_LINENO): Do ont call functions.
+       (CAN_HAVE_LOCATION_P): Remove GIMPLE_STMT_P.
+       (union tree_node): Remove gstmt.
+       (build_gimple_modify_stmt*): Remove.
+       (expr_location, set_expr_location, expr_has_location,
+       expr_locus, expr_filename, expr_lineno, generic_tree_operand,
+       generic_tree_type): Remove.
+       * tree-scalar-evolution.c (follow_ssa_edge): Update comment.
+       (interpret_condition_phi): Same.
+       * gimple.h (gimplify_assign): New.
+       * builtins.c, fold-const.c, omp-low.c, tree-ssa-dse.c, tree-gimple.c,
+       tree-ssa-math-opts.c, tree-nrv.c, gimple-low.c, dwarf2out.c,
+       expr.c, tree-parloops.c, matrix-reorg.c, c-decl.c, tree-eh.c,
+       c-pretty-print.c, langhooks.c, function.c, tree-affine.c,
+       gimplify.c, tree.def, cfgexpand.c, tree-predcom.c, print-tree.c,
+       tree-ssa-ter.c, tree-ssa.c, tree-inline.c, gimple.c, gimple.h,
+       tree-cfg.c, config/i386/i386.c, stmt.c, tree-ssa-operands.c)
+       Remove any references to
+       GIMPLE_MODIFY_STMT, GIMPLE_STMT_P, GIMPLE_TUPLE_P, tcc_gimple_stmt,
+       TS_GIMPLE_STATEMENT, GENERIC_TREE_OPERAND, GENERIC_TREE_TYPE,
+       GIMPLE_TUPLE_HAS_LOCUS_P, GIMPLE_STMT_LOCUS, GIMPLE_STMT_BLOCK,
+       IS_GIMPLE_STMT_CODE_CLASS, GIMPLE_STMT_BLOCK.
+       Call gimplify_assign or generate a GIMPLE_ASSIGN directly when
+       appropriate.
+
+2008-07-14  Jakub Jelinek  <jakub@redhat.com>
+
+       * cfgexpand.c (gimple_cond_pred_to_tree): New function.
+       (gimple_to_tree) <case GIMPLE_COND>: Use it.
+       (release_stmt_tree): New function.
+       (expand_gimple_cond): Call just gimple_cond_pred_to_tree
+       instead of gimple_to_tree, ggc_free pred before returning.
+       (expand_gimple_tailcall): Call release_stmt_tree.
+       (expand_gimple_basic_block): Call release_stmt_tree instead
+       of ggc_free.
+
+       * gimplify.c (internal_get_tmp_var, gimplify_decl_expr,
+       gimplify_init_ctor_eval, gimplify_target_expr): Call ggc_free
+       on the INIT_EXPR.
+
+2008-07-14  Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-ccp.c (ccp_fold): Move ADDR_EXPR handing to
+       GIMPLE_SINGLE_RHS case.
+       * tree-ssa-ifcombine.c (ifcombine_iforif): Fix typo.
+
+2008-07-14  Jakub Jelinek  <jakub@redhat.com>
+
+       * system.h (CONST_CAST2): Avoid using a union for all GCCs <= 4.0.x.
+
+2008-07-12  Diego Novillo  <dnovillo@google.com>
+
+       * tree-loop-distribution.c: Tuplify.
+       * tree-ssa-propagate.c (substitute_and_fold): Remove
+       stale FIXME tuples note.
+
+2008-07-11  Jakub Jelinek  <jakub@redhat.com>
+
+       * tree-dump.c (dump_options): Don't set TDF_RHS_ONLY for -all.
+
+       * omp-low.c (lower_omp_1): Only use rhs_predicate_for if
+       GIMPLE_SINGLE_RHS.
+
+       * tree-vect-analyze.c (vect_determine_vectorization_factor): Handle
+       GIMPLE_CALL with non-NULL lhs.
+       (vect_build_slp_tree): Likewise.  Fix reversed check for references.
+       * tree-vectorizer.c (stmt_vec_info_htab, stmt_vec_info_eq,
+       stmt_vec_info_hash, init_stmt_vec_info_htab,
+       free_stmt_vec_info_htab): Remove.
+       (stmt_vec_info_vec): New variable.
+       (init_stmt_vec_info_vec, free_stmt_vec_info_vec): New functions.
+       (new_loop_vec_info): Clear stmt uid before calling
+       set_vinfo_for_stmt.
+       (vect_is_simple_use): Handle GIMPLE_CALL with non-NULL lhs.
+       (vectorize_loops): Call {init,free}_stmt_vec_info_vec instead of
+       {init,free}_stmt_vec_info_htab.
+       * tree-parloops.c (parallelize_loops): Likewise.
+       * tree-ssa-operands.c (get_expr_operands): Handle VEC_COND_EXPR.
+       * tree-vectorizer.h (stmt_vec_info_htab): Removed.
+       (init_stmt_vec_info_htab, free_stmt_vec_info_htab): Remove
+       prototypes.
+       (stmt_vec_info_vec): New extern decl.
+       (init_stmt_vec_info_vec, free_stmt_vec_info_vec): New prototypes.
+       (vinfo_for_stmt, set_vinfo_for_stmt): Rewritten using stmt uid
+       and vector instead of a hash table.
+       * tree-vect-patterns.c (vect_recog_pow_pattern): Request
+       a GIMPLE_CALL with non-NULL lhs instead of GIMPLE_ASSIGN.
+       (vect_pattern_recog_1): Use is_gimple_call instead of comparing
+       gimple_code with GIMPLE_CALL.
+       * gimple.c (gimple_rhs_class_table): Use GIMPLE_SINGLE_RHS for
+       DOT_PROD_EXPR and VEC_COND_EXPR.
+       * tree-vect-transform.c (vect_get_slp_vect_defs): Use gimple_get_lhs
+       instead of gimple_assign_lhs.
+       (get_initial_def_for_induction): Use build_int_cst even for
+       pointers.  Use POINTER_PLUS_EXPR for POINTER_TYPE_P (scalar_type).
+       (vect_get_vec_def_for_operand): Use is_gimple_call instead of
+       comparing gimple_code with GIMPLE_CALL.
+       (vectorizable_call): Likewise.  Use gimple_call_lhs instead of
+       gimple_assign_lhs.  Build a vector of arguments, use
+       gimple_build_call_vec.
+       (vect_get_vec_def_for_stmt_copy): Use gimple_get_lhs.
+       (vectorizable_live_operation): Handle GIMPLE_SINGLE_RHS operands.
+
+2008-07-11  Doug Kwan  <dougkwan@google.com>
+
+       * gimple-dummy.c (vectorizable_induction, vectorizable_type_promotion,
+       vectorizable_type_demotion, vectorizable_conversion,
+       vectorizable_operation, vectorizable_assignment,
+       vectorizable_load, vectorizable_call, vectorizable_store,
+       vectorizable_condition, vectorizable_reduction,
+       vectorizable_live_operation, vectorizable_function,
+       vect_estimate_min_profitable_iters, vect_model_simple_cost,
+       vect_model_store_cost, vect_model_load_cost, vect_transform_loop):
+       Remove DUMMY_FNs.
+       * tree-ssa-loop.c (gate_tree_vectorize): Reenable vectorizer.
+       * tree-vectorizer.c (vect_is_simple_use): Fix handling of
+       GIMPLE_NOP.
+       * tree-vectorizer.h (vectorizable_load, vectorizable_store,
+       vectorizable_operation, vectorizable_type_promotion,
+       vectorizable_type_demotion, vectorizable_conversion,
+       vectorizable_assignment, vectorizable_call, vectorizable_condition,
+       vectorizable_live_operation, vectorizable_reduction,
+       vectorizable_induction): Adjust prototypes.
+       * tree-vect-transform.c (vectorizable_load, vectorizable_store,
+       vectorizable_operation, vectorizable_type_promotion,
+       vectorizable_type_demotion, vectorizable_conversion,
+       vectorizable_assignment, vectorizable_call, vectorizable_condition,
+       vectorizable_live_operation, vectorizable_reduction,
+       vectorizable_induction, vect_transform_stmt,
+       vect_create_data_ref_ptr, vect_create_addr_base_for_vector_ref,
+       vect_get_vec_def_for_operand, vect_init_vector,
+       vect_finish_stmt_generation, vect_create_epilog_for_reduction,
+       get_initial_def_for_reduction, cost_for_stmt,
+       vect_estimate_min_profitable_iters, vect_model_reduction_cost,
+       vect_cost_strided_group_size, vect_model_load_cost, bump_vector_ptr,
+       vect_get_constant_vectors, vect_get_slp_vect_defs,
+       vect_get_slp_defs, get_initial_def_for_induction,
+       vect_get_vec_def_for_stmt_copy, vect_get_vec_defs,
+       vectorizable_function, vect_gen_widened_results_half,
+       vect_permute_store_chain, vect_setup_realignment,
+       vect_permute_load_chain, vect_transform_strided_load,
+       vect_is_simple_cond, vect_build_loop_niters,
+       vect_generate_tmps_on_preheader, vect_update_ivs_after_vectorizer,
+       vect_do_peeling_for_loop_bound, vect_gen_niters_for_prolog_loop,
+       vect_do_peeling_for_alignment, vect_create_cond_for_align_checks,
+       vect_create_cond_for_alias_checks, vect_loop_versioning,
+       vect_remove_stores, vect_schedule_slp_instance,
+       vect_transform_loop): Tuplify.
+
+2008-07-10  Richard Guenther  <rguenther@suse.de>
+
+       * gimple.h (gimple_assign_ssa_name_copy_p): Declare.
+       (gimple_has_lhs): New function.
+       * gimple.c (gimple_assign_ssa_name_copy_p): New function.
+       * tree-ssa-copy.c (propagate_tree_value_into_stmt): Remove
+       redundant gimple_set_location call.
+       * gimple-iterator.c (gsi_remove): Do not free stmt operands.
+       * tree-ssa-structalias.c (find_func_aliases): Correctly let
+       all things with pointers escape.
+       * tree-pass.h (TDF_RHS_ONLY): New flag.
+       * diagnostic.h (print_gimple_expr): Declare.
+       * gimple-pretty-print.c (print_gimple_expr): New function.
+       (dump_gimple_assign): Dump the RHS as expression if TDF_RHS_ONLY.
+       (dump_gimple_call): Likewise.
+       (dump_gimple_cond): Likewise.
+       * tree-ssa-propagate.c (fold_predicate_in): Use print_gimple_expr.
+       * tree-ssa-sccvn.c (visit_use): Use gimple_has_lhs.
+       Use print_gimple_expr.  Handle tcc_expression correctly.
+
+2008-07-09  Jakub Jelinek  <jakub@redhat.com>
+
+       * gimplify.c (struct gimplify_ctx): Move to tree-gimple.h.
+       (push_gimplify_context): Don't allocate bind_expr_stack,
+       temp_htab nor c itself here.  Add c argument.
+       (pop_gimplify_context): Allow bind_expr_stack being NULL.  Check
+       c->temp_htab instead of optimize whether htab_delete should be called.
+       Don't free c.
+       (gimple_push_bind_expr): Create bind_expr_stack lazily.
+       (lookup_tmp_var): Create temp_htab lazily.
+       (gimplify_scan_omp_clauses, gimplify_omp_parallel, gimplify_omp_task,
+       gimplify_body, force_gimple_operand): Adjust push_gimplify_context
+       callers.
+       * omp-low.c (lower_omp_sections, lower_omp_single, lower_omp_master,
+       lower_omp_ordered, lower_omp_critical, lower_omp_for,
+       create_task_copyfn, lower_omp_taskreg, execute_lower_omp): 
+       * tree-ssa-ccp.c (gimplify_and_update_call_from_tree): Likewise.
+       * tree-sra.c (generate_element_init): Likewise.
+       * tree-mudflap.c (execute_mudflap_function_ops,
+       execute_mudflap_function_decls): Likewise.
+       * tree-inline.c (setup_one_parameter, optimize_inline_calls): Likewise.
+       * tree-gimple.h (struct gimplify_ctx): New type.
+       (push_gimplify_context): Adjust prototype.
+
+       * gimple.h (gimple_rhs_class_table): New extern decl.
+       (get_gimple_rhs_class): Change into inline.
+       * gimple.c (get_gimple_rhs_class): Removed.
+       (gimple_rhs_class_table): New variable.
+
+2008-07-09  Doug Kwan  <dougkwan@google.com>
+           Diego Novillo  <dnovillo@google.com>
+
+       * tree-ssa-ccp.c (fold_stmt_r): Remove gcc_unreachable
+       call from COND_EXPR handler.
+       * tree-if-conv.c: Tuplify.
+       * gimple.c (gimple_could_trap_p_1): Factor out of ...
+       (gimple_could_trap_p): ... here.
+       Call it.
+       (gimple_assign_rhs_could_trap_p): New.
+       * gimple.h (gimple_assign_rhs_could_trap_p): Declare.
+       * tree-ssa-operands.c (get_expr_operands): Handle
+       COND_EXPR.
+
+2008-07-08  Jakub Jelinek  <jakub@redhat.com>
+
+       Merge with mainline @137633.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-07-08  Jakub Jelinek  <jakub@redhat.com>
+
+       * tree-cfg.c (remove_useless_stmts_cond): Avoid calling
+       fold_binary.
+
+2008-07-07  Jakub Jelinek  <jakub@redhat.com>
+
+       * gimple-pretty-print.c (dump_gimple_return): Don't print space
+       after return if return has no argument.
+       (dump_gimple_seq): Don't print newline after last statement in
+       the sequence.
+       (dump_gimple_fmt, dump_gimple_try, dump_gimple_omp_for,
+       dump_gimple_omp_single, dump_gimple_omp_sections,
+       dump_gimple_omp_block, dump_gimple_omp_critical,
+       dump_gimple_omp_parallel, dump_gimple_omp_task): Use
+       newline_and_indent after dump_gimple_seq instead of INDENT.
+       (dump_gimple_bind): Likewise.  If there are no bind vars, don't
+       print two newlines before the sequence, just one.
+       * tree-cfg.c (dump_function_to_file): Fix dumping lowered gimple
+       before CFG is built.
+
+2008-07-07  Diego Novillo  <dnovillo@google.com>
+
+       * tree-vrp.c (ssa_name_nonnegative_p): Tuplify.
+       (ssa_name_nonzero_p): Tuplify.
+       (extract_range_from_comparison): Tidy.
+       (vrp_evaluate_conditional_warnv_with_ops): Tidy.
+       (vrp_evaluate_conditional): Change STMT argument to gimple.
+       Update all users.
+       (simplify_stmt_for_jump_threading): Change WITHIN_STMT argument
+       to gimple.
+       Update all users.
+       (identify_jump_threads): Tidy.
+       * tree-tailcall.c (find_tail_calls): Remove stale FIXME note.
+       * tree.c (tree_node_structure): Likewise.
+       * tree.h (struct tree_base): Likewise.
+       (struct gimple_stmt): Likewise.
+       * builtins.c (expand_builtin_memcpy): Likewise.
+       (expand_builtin_memset_args): Likewise.
+       * tree-chrec.h (build_polynomial_chrec): Do not initialize VAL.
+       * tree-ssa-ccp.c (fold_stmt_r): Do not handle COND_EXPR.
+       (fold_stmt): Remove #if 0 code.
+       * tree-ssa-dom.c (EXTRA_DETAILS): Remove.
+       Update all users.
+       (hashable_expr_equal_p): Remove stale FIXME note.
+       (simplify_stmt_for_jump_threading): Convert WITHIN_STMT
+       argument to gimple.  Update all users.
+       * tree-ssa-propagate.c: Include gimple.h
+       (get_rhs): Remove.
+       (set_rhs): Remove.
+       (fold_predicate_in): Tuplify.
+       * Makefile.in (tree-ssa-propagate.o): Add dependency on
+       GIMPLE_H.
+       * tree-ssa-propagate.h (get_rhs, set_rhs): Remove.
+       * tree-parloops.c (create_parallel_loop): Remove FIXME
+       tuples note.
+       * tree-eh.c (lookup_stmt_eh_region): Remove FIXME tuples
+       editorial note.
+       * cfgexpand.c (gimple_to_tree): Likewise.
+       * tree-ssa-dce.c (mark_stmt_if_obviously_necessary): Tidy.
+       Do not handle GIMPLE_BIND.
+       * tree-inline.c (remap_gimple_op_r): Remove stale FIXME notes.
+       * tree-optimize.c (execute_fixup_cfg): Likewise.
+       * tree-flow.h (struct tree_ann_common_d): Remove stale
+       FIXME tuples notes.
+       (tree_vrp_evaluate_conditional): Change last argument type to
+       gimple.  Update all users.
+       (thread_across_edge): Likewise.
+       * gimple.c (gimple_has_side_effects): If any argument has
+       TREE_SIDE_EFFECTS set, assert that S has volatile operands.
+       (gimple_rhs_has_side_effects): Likewise.
+       * gimple.h (gimple_phi_capacity): Remove stale FIXME tuples note.
+       * tree-cfg.c (group_case_labels): Remove FIXME tuples note.
+       Assert that the new size is smaller than the old size.
+       (remove_useless_stmts_warn_notreached): Remove #if 0 code.
+       (remove_useless_stmts_cond): Remove stale FIXME tuples note.
+       (remove_useless_stmts_1): Likewise.
+       (verify_types_in_gimple_assign): Likewise.
+       * passes.c (init_optimization_passes): Re-enable
+       pass_ipa_increase_alignment.
+       * tree-ssa-threadedge.c
+       (record_temporary_equivalences_from_stmts_at_dest):
+       Change SIMPLIFY to accept two arguments of type gimple.
+       Update all users.
+       (simplify_control_stmt_condition): Likewise.
+       (thread_accross_edge): Likewise.
+       * tree-ssa-operands.c (add_call_clobber_ops): Re-enable
+       calls to ipa_reference_get_not_read_globals and
+       ipa_reference_get_not_written_global.
+
+2008-07-07  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-ssa-dom.c: Remove FIXME tuples.
+
+2008-07-07  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-cfg.c (verify_types_in_gimple_assign): Remove
+       FIXME tuples.
+
+2008-07-04  Jakub Jelinek  <jakub@redhat.com>
+
+       * cfgexpand.c (gimple_to_tree) <case GIMPLE_CALL>: Copy
+       CALL_EXPR_VA_ARG_PACK.
+
+       * gimple.c (gimple_build_bind): Set gimple_bind_block rather
+       than gimple_block.
+       (gimple_copy) <case GIMPLE_BIND>: Don't unshare gimple_bind_block.
+
+       * gimple.h (GF_ASM_INPUT, GF_ASM_VOLATILE, GF_CALL_CANNOT_INLINE,
+       GF_CALL_FROM_TRUNK, GF_CALL_RETURN_SLOT_OPT, GF_CALL_TAILCALL,
+       GF_CALL_VA_ARG_PACK, GF_OMP_PARALLEL_COMBINED, GF_OMP_RETURN_NOWAIT,
+       GF_OMP_SECTION_LAST, GF_PREDICT_TAKEN): Change from static const ints
+       into enum values.
+       (struct gimple_statement_base): Move subcode after flags to make it
+       16-bit aligned.
+
+       * tree-ssa-structalias.c (find_func_aliases): Handle ADDR_EXPR
+       as GIMPLE_SINGLE_RHS instead of GIMPLE_UNARY_RHS.
+
+       * tree-ssa-operands.c (get_expr_operands): Don't handle
+       OMP_FOR, OMP_PARALLEL, OMP_TASK, OMP_SECTIONS and PREDICT_EXPR
+       here.
+
+       * gimple.def (GIMPLE_PREDICT): New.
+       * gimple.h: Update comment above GF_* flags.
+       (GF_PREDICT_TAKEN): New.
+       (gimple_build_predict): New prototype.
+       (gimple_predict_predictor, gimple_predict_outcome,
+       gimple_predict_set_predictor, gimple_predict_set_outcome): New
+       inlines.
+       * gimple.c (gss_for_code): Handle GIMPLE_PREDICT.
+       (gimple_size, walk_gimple_op): Likewise.
+       (gimple_build_predict): New function.
+       * gimple-pretty-print.c (dump_gimple_stmt): Handle GIMPLE_PREDICT.
+       * predict.c (tree_bb_level_predictions): Likewise.
+       * cfgexpand.c (gimple_to_tree): Likewise.
+       * tree-inline.c (estimate_num_insns): Likewise.
+       * tree-ssa-dce.c (mark_stmt_if_obviously_necessary): Likewise.
+       * gimple-low.c (lower_stmt): Likewise.
+       * tree-cfg.c (verify_types_in_gimple_seq_2): Likewise.
+       (verify_types_in_gimple_stmt): Likewise.  Don't handle PREDICT_EXPR.
+       * gimplify.c (gimplify_expr): Gimplify PREDICT_EXPR into
+       GIMPLE_PREDICT.
+       * expr.c (expand_expr_real): Don't handle PREDICT_EXPR.
+
+2008-07-04  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-ssa-forwprop.c (rhs_to_tree): Remove fixme.
+       (forward_propagate_into_cond): Add comment.
+       (forward_propagate_into_gimple_cond): Add comment.
+       (forward_propagate_addr_expr_1): Enable optimization.
+
+2008-07-04  David Edelsohn  <edelsohn@gnu.org>
+
+       Revert:
+       * config/rs6000/rs6000.c (rs6000_gimplify_va_arg): Unshare t.
+
+       * config/rs6000/rs6000.c (rs6000_gimplify_va_arg): Unshare reg.
+
+2008-07-03  David Edelsohn  <edelsohn@gnu.org>
+
+       * config/rs6000/rs6000.c (rs6000_gimplify_va_arg): Unshare t.
+
+2008-07-03  Doug Kwan  <dougkwan@google.com>
+
+       * gimple-dummy.c (vectorizable_function): New dummy.
+       (vect_pattern_recog): Remove dummy.
+       * tree-vectorizer.h (vect_recog_func_ptr): Adjust types for tuples.
+       * tree-vect-patterns.c (File): Re-enable all code.
+       (widened_name_p): Tuplify.
+       (vect_recog_temp_ssa_var): New.
+       (vect_recog_dot_prod_pattern): Tuplify.
+       (vect_recog_widen_mult_pattern): Same.
+       (vect_recog_pow_pattern): Same.
+       (vect_recog_widen_sum_pattern): Same.
+       (vect_pattern_recog_1): Tuplify. Factor out code to create SSA
+       temporaries to vect_recog_temp_ssa_var.  Remove code for building
+       assignment statement.
+       (vect_pattern_recog): Tuplify.
+
+2008-07-03  Janis Johnson  <janis187@us.ibm.com>
+            David Edelsohn  <edelsohn@gnu.org>
+
+       * config/rs6000/rs6000.c (rs6000_va_start): Unshare valist.
+       (rs6000_gimplify_va_arg): Unshare valist, reg, addr, ovf.
+
+2008-07-03  Jakub Jelinek  <jakub@redhat.com>
+
+       * tree-inline.c (remap_gimple_op_r): Remap TREE_BLOCK of
+       expressions.
+
+       * tree-switch-conversion.c (gen_inbound_check): Force RHS to be
+        gimple operand.  Use fold_build* instead of build*.
+       (build_arrays): Likewise.  Convert RHS to LHS's type.
+
+       * tree-switch-conversion.c (struct switch_conv_info): Change
+       arr_ref_first and arr_ref_last to gimple.
+       (check_range, check_final_bb, gather_default_values,
+       build_constructors, build_one_array, build_arrays, gen_def_assign,
+       fix_phi_nodes, gen_inbound_check, process_switch, do_switchconv):
+       Tuplify.
+       (create_temp_arrays): Formatting.
+
+       * gimple.h (gimple_try_set_kind): New inline function.
+       * tree-eh.c (same_handler_p, optimize_double_finally,
+       refactor_eh_r, refactor_eh): Tuplify.
+       * passes.c (init_optimization_passes): Reenable pass_refactor_eh.
+
+2008-07-02  Jakub Jelinek  <jakub@redhat.com>
+
+       * tree-parloops.c (struct reduction_info): Change reduc_stmt,
+       reduc_phi, keep_res and new_phi field types to gimple.
+       (struct elv_data): Add info field.
+       (reduction_phi, loop_parallel_p, take_address_of,
+       initialize_reductions, eliminate_local_variables_stmt,
+       eliminate_local_variables, expr_invariant_in_region_p,
+       separate_decls_in_region_name, separate_decls_in_region_stmt,
+       create_phi_for_local_result, create_call_for_reduction_1,
+       create_call_for_reduction, create_loads_for_reductions,
+       create_final_loads_for_reduction, create_stores_for_reduction,
+       create_loads_and_stores_for_name, separate_decls_in_region,
+       canonicalize_loop_ivs, transform_to_exit_first_loop,
+       create_parallel_loop, gen_parallel_loop,
+       loop_has_vector_phi_nodes, parallelize_loops): Tuplify.
+       * tree-cfg.c (gimple_duplicate_sese_tail): Tuplify.
+       (move_block_to_fn): Don't call gsi_next after calling
+       remove_phi_node.
+
+2008-07-02  Aldy Hernandez  <aldyh@redhat.com>
+           Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-forwprop.c (can_propagate_from): Exclude loads
+        from decls explicitly.
+       (gate_forwprop): Enable.
+
+2008-07-02  Jakub Jelinek  <jakub@redhat.com>
+
+       * tree-vectorizer.h (vinfo_for_stmt): Use htab_find_slot_with_hash.
+       (set_vinfo_for_stmt): Likewise.  If info is NULL, delete entry from
+       hash table.
+       * tree-vectorizer.c (stmt_vec_info_eq, stmt_vec_info_hash): New
+       functions.
+       (init_stmt_vec_info_htab): Use them instead of htab_hash_pointer
+       and htab_eq_pointer.
+       (free_stmt_vec_info): Free stmt_info only after set_vinfo_for_stmt
+       call.
+
+       Merge with mainline @137346.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+       * builtins.c (gimple_rewrite_call_expr): Fix -Wc++-compat and/or
+       -Wcast-qual warnings.
+       * gimple.c (gimple_alloc_stat, gimple_build_omp_for,
+       gimple_range_check_failed, gimple_copy): Likewise.
+       * tree-mudflap.c (mx_xfn_xform_decls): Likewise.
+       * tree-nested.c (convert_nonlocal_reference_stmt,
+       convert_local_reference_stmt): Likewise.
+       * gimple-iterator.c (gsi_insert_before_without_update,
+       gsi_insert_after_without_update): Likewise.
+       * tree-ssa-loop-im.c (init_lim_data, get_lim_data,
+       clear_lim_data): Likewise.
+       * tree-ssa-sccvn.c (vn_nary_op_insert_stmt): Likewise.
+       * tree-vrp.c (check_all_array_refs): Likewise.
+       * value-prof.c (histogram_eq): Likewise.
+       * cgraphbuild.c (record_reference): Don't handle OMP_PARALLEL
+       and OMP_TASK here.
+       (build_cgraph_edges): Handle GIMPLE_OMP_{PARALLEL,TASK}.
+       * cgraph.c (cgraph_add_new_function): Call gimple_register_cfg_hooks
+       instead of tree_register_cfg_hooks.
+       * omp-low.c (finalize_task_copyfn): Ensure the new function's
+       body is a gimple_seq with just GIMPLE_BIND in it.
+       (scan_omp_1_stmt): Fix -Wc++-compat and/or -Wcast-qual warnings.
+       * tree-cfg.c (move_stmt_op, have_similar_memory_accesses_1,
+       ref_base_address_1): Likewise.
+       (move_stmt_r): Handle gimple_block updating.
+       * tree-ssa-alias.c (update_alias_info_1): Tuplify.
+       (update_alias_info): Likewise.
+       * tree-switch-conversion.c: Stub out temporarily.
+
+2008-06-30  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-ssa-forwprop.c: Remove obsolete comment.
+       (get_prop_source_stmt): Wrap call to gimple_assign_lhs with a
+       TREE_TYPE.
+       (forward_propagate_comparison): Use build2 instead of
+       fold_binary.
+       
+2008-06-27  Diego Novillo  <dnovillo@google.com>
+
+       * cfgexpand.c (gimple_assign_rhs_to_tree): Factor out of ...
+       (gimple_to_tree): ... here.
+       Update comments referring to mainline merge.
+       * tree-ssa-ter.c: Tuplify.
+       * tree-outof-ssa.c (gimple_assign_rhs_to_tree): Declare.
+       (replace_use_variable): Call it.
+       (rewrite_trees): Tuplify.
+       (remove_ssa_form): Call it.
+       * gimple.h: Tidy some formatting.
+       * ipa-struct-reorg.c: Include gimple.h
+       * Makefile.in (ipa-struct-reorg.o): Add dependency on
+       GIMPLE_H.
+
+2008-06-27  Richard Guenther  <rguenther@suse.de>
+
+       * tree-ssa-sccvn.c (vn_get_expr_for): New function.
+       (vuses_to_vec): Tuplify.
+       (copy_vuses_from_stmt): Likewise.
+       (vdefs_to_vec): Likewise.
+       (copy_vdefs_from_stmt): Likewise.
+       (shared_vuses_from_stmt): Likewise.
+       (copy_reference_ops_from_call): New function split out from
+       copy_reference_ops_from_ref.
+       (create_reference_ops_from_call): New function.
+       (shared_reference_ops_from_call): Likewise.
+       (get_def_ref_stmt_vuses): Tuplify.
+       (vn_reference_lookup): Likewise.
+       (vn_nary_op_lookup_stmt): New function.
+       (vn_nary_op_insert_stmt): Likewise.
+       (vn_phi_lookup): Tuplify.
+       (vn_phi_insert): Likewise.
+       (defs_to_varying): Likewise.
+       (visit_unary_op): Likewise.
+       (visit_binary_op): Likewise.
+       (visit_reference_op_call): New function.
+       (visit_reference_op_load): Tuplify.
+       (visit_reference_op_store): Likewise.
+       (visit_phi): Likewise.
+       (stmt_has_constants): New function.
+       (simplify_binary_expression): Tuplify.
+       (simplify_unary_expression): Likewise.
+       (try_to_simplify): Likewise.
+       (visit_use): Likewise.
+       (compare_ops): Likewise.
+       (DFS): Likewise.
+       (run_scc_vn): Likewise.
+       * tree-ssa-sccvn.h (shared_vuses_from_stmt): Adjust prototype.
+       (copy_vuses_from_stmt): Likewise.
+       (vn_get_expr_for): Declare.
+       (vn_nary_op_lookup_stmt): Likewise.
+       (vn_nary_op_insert_stmt): Likewise.
+       * tree-dfa.c (get_single_def_stmt): Tuplify.
+       (get_single_def_stmt_from_phi): Likewise.
+       (get_single_def_stmt_with_phi): Likewise.
+       * tree-ssa-pre.c (do_SCCVN_insertion): Use vn_get_expr_for.
+       (eliminate): Likewise.
+       (execute_pre): Enable SCCVN.
+       (gate_fre): Enable.
+       * tree-flow.h (get_single_def_stmt): Adjust prototype.
+       (get_single_def_stmt_from_phi): Likewise.
+       (get_single_def_stmt_with_phi): Likewise.
+       (vn_lookup_or_add_with_stmt): Likewise.
+       (vn_lookup_with_stmt): Likewise.
+       * gimple.c (gimple_fold): Fix.
+       * tree-vn.c (vn_add): Disable call to add_to_value.
+       (vn_add_with_vuses): Likewise.
+       (vn_lookup_with_stmt): Tuplify.
+       (vn_lookup_or_add_with_stmt): Likewise.
+
+2008-06-27  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple.c (gimple_cond_get_ops_from_tree): Handle TRUTH_NOT_EXPR.
+       
+2008-06-25  Diego Novillo  <dnovillo@google.com>
+
+       * gimple.h (enum gimple_rhs_class): Move from tree-gimple.h
+       (struct gimple_statement_base): Change CODE field to 8
+       bits.
+       Change SUBCODE field to 16 bits.
+       (gimple_set_subcode): Move to gimple.c.
+       (gimple_subcode): Remove.
+       (gimple_expr_type): Handle GIMPLE_ASSIGN, GIMPLE_CALL and
+       GIMPLE_COND explicitly.
+       Move earlier in the file.
+       (gimple_expr_code): New.
+       (gimple_omp_subcode): New.
+       (gimple_omp_set_subcode): New.
+       (gimple_omp_return_nowait_p): Call gimple_omp_subcode.
+       (gimple_omp_section_last_p): Likewise.
+       (gimple_omp_parallel_combined_p): Likewise.
+       (gimple_assign_rhs_code): New.
+       (gimple_assign_set_rhs_code): New.
+       (gimple_assign_cast_p): Call gimple_assign_rhs_code.
+       Handle VIEW_CONVERT_EXPR.
+       (gimple_call_tail_p): Access subcode field directly.
+       (gimple_call_cannot_inline_p): Likewise.
+       (gimple_call_return_slot_opt_p): Likewise.
+       (gimple_call_from_thunk_p): Likewise.
+       (gimple_call_va_arg_pack_p): Likewise.
+       (gimple_call_copy_flags): Likewise.
+       (gimple_cond_code): Likewise.
+       (gimple_cond_set_code): Likewise.
+       (gimple_cond_make_false): Likewise.
+       (gimple_cond_make_true): Likewise.
+       (gimple_asm_volatile_p): Likewise.
+       (gimple_asm_input_p): Likewise.
+       (gimple_eh_filter_must_not_throw): Likewise.
+       (gimple_eh_filter_set_must_not_throw): Likewise.
+       (gimple_try_kind): Likewise.
+       (gimple_try_catch_is_cleanup): Likewise.
+       (gimple_wce_cleanup_eh_only): Likewise.
+
+       * tree-vrp.c (simplify_div_or_mod_using_ranges): Call
+       gimple_assign_set_rhs_code instead of gimple_set_subcode.
+       (simplify_abs_using_ranges): Likewise.
+       * tree-complex.c (init_dont_simulate_again): Call
+       gimple_expr_code instead of gimple_subcode.
+       (complex_visit_stmt): Likewise.
+       (expand_complex_move): Call gimple_assign_rhs_code
+       instead of gimple_subcode.
+       (expand_complex_operations_1): Likewise.
+       * value-prof.c (gimple_divmod_fixed_value): Likewise.
+       (gimple_mod_pow2): Likewise.
+       (gimple_mod_subtract): Likewise.
+       (gimple_divmod_values_to_profile): Likewise.
+       * tree-ssa-alias-warnings.c (find_alias_site_helper):
+       Call gimple_assign_cast_p.
+       (already_warned_in_frontend_p): Likewise.
+       * gimple.def: Add comments warning about code orderings.
+       * omp-low.c (expand_omp_taskreg): Call gimple_omp_set_subcode.
+       (expand_omp_atomic_fetch_op): Call gimple_assign_rhs_code.
+       * tree-gimple.c (get_gimple_rhs_class): Move to gimple.c
+       (get_gimple_rhs_num_ops): Likewise.
+       (gimple_assign_rhs_code): Move to gimple.h.
+       * tree-gimple.h (enum gimple_rhs_class): Move to gimple.h.
+       * tree-ssa-ccp.c (fold_gimple_assign): Call
+       gimple_assign_rhs_code instead of gimple_subcode.
+       * tree-stdarg.c (va_list_counter_bump): Likewise.
+       (check_all_va_list_escapes): Likewise.
+       (execute_optimize_stdarg): Likewise.
+       * tree-ssa-math-opts.c (is_division_by): Likewise.
+       (replace_reciprocal): Likewise.
+       (execute_cse_reciprocals): Likewise.
+       (execute_convert_to_rsqrt): Likewise.
+       * tree-ssa-dom.c (initialize_hash_element): Likewise.
+       (simple_iv_increment_p): Likewise.
+       (gimple_assign_unary_useless_conversion_p): Likewise.
+       * tree-ssa-alias.c (is_escape_site): Call gimple_assign_cast_p.
+       * predict.c (expr_expected_value_1): Call gimple_assign_rhs_code
+       instead of gimple_subcode.
+       * tree-eh.c (tree_could_trap_p): Call gimple_expr_code
+       instead of gimple_subcode.
+       * ipa-type-escape.c (is_array_access_through_pointer_and_index):
+       Call gimple_assign_rhs_code instead of gimple_subcode.
+       (check_assign): Likewise.
+       * gimplify.c (gimplify_omp_parallel): Call gimple_omp_set_subcode
+       instead of gimple_set_subcode.
+       * tree-mudflap.c (mf_xform_derefs): Call gimple_assign_rhs_code
+       instead of gimple_subcode.
+       * tree-ssa-forwprop.c (get_prop_source_stmt): Likewise.
+       (can_propagate_from): Likewise.
+       (remove_prop_source_from_use): Likewise.
+       (forward_propagate_addr_into_variable_array): Likewise.
+       * tree-object-size.c (plus_stmt_object_size): Likewise.
+       (collect_object_sizes_for): Likewise.
+       (check_for_plus_in_loops_1): Likewise.
+       (check_for_plus_in_loops): Likewise.
+       * gimple.c (gimple_set_subcode): Move from gimple.h
+       (gimple_check_failed): Access subcode field directly.
+       (gimple_assign_single_p): Call gimple_assign_rhs_code
+       instead of gimple_subcode.
+       (gimple_assign_unary_nop_p): Likewise.
+       (gimple_get_lhs): Change argument type to const_gimple.
+       (gimple_could_trap_p): Call gimple_assign_rhs_code
+       instead of gimple_subcode.
+       (get_gimple_rhs_class): Move from tree-gimple.c.
+       (get_gimple_rhs_num_ops): Likewise.
+       * tree-ssa-structalias.c (update_alias_info): Call
+       gimple_assign_rhs_code instead of gimple_subcode.
+       (find_func_aliases): Likewise.
+       * tree-ssa-threadedge.c (record_temporary_equivalences_from_phis):
+       Likewise.
+
+2008-06-25  Doug Kwan  <dougkwan@google.com>
+
+       * gimple-dummy.c: (vect_free_slp_tree) Remove dummy.
+       (vect_analyze_loop): Same.
+       (vectorizable_induction): New dummy.
+       (vectorizable_type_promotion): Same.
+       (vectorizable_type_demotion): Same.
+       (vectorizable_conversion): Same.
+       (vectorizable_operation): Same.
+       (vectorizable_assignment): Same.
+       (vectorizable_load): Same.
+       (vectorizable_call): Same.
+       (vectorizable_store): Same.
+       (vectorizable_condition): Same.
+       (vectorizable_reduction): Same.
+       (vectorizable_live_operation): Same.
+       (vect_estimate_min_profitable_iters): Same.
+       (vect_model_simple_cost): Same.
+       (vect_model_store_cost): Same.
+       (vect_model_load_cost): Same.
+       (vect_pattern_recog): Same.
+       * tree-vectorizer.h (struct _stmt_vec_info): Change fields FIRST_DR
+       and NEXT_DR from tree to gimple type.
+       (vectorizable_load): Change type of parameter STMT to gimple.
+       (vectorizable_store): Same.
+       (vectorizable_operation): Same.
+       (vectorizable_type_promotion): Same.
+       (vectorizable_type_demotion): Same.
+       (vectorizable_conversion): Same.
+       (vectorizable_assignment): Same.
+       (vectorizable_function): Same.
+       (vectorizable_call): Same.
+       (vectorizable_condition): Same.
+       (vectorizable_live_operation): Same.
+       (vectorizable_reduction): Same.
+       (vectorizable_induction): Same.
+       * tree-vect-analyze.c (File): Re-enable all previously disabled code.
+       (vect_determine_vectorization_factor): Tuplify.
+       (vect_analyze_operations): Same.
+       (exist_non_indexing_operands_for_use_p): Same.
+       (vect_analyze_scalar_cycles_1): Same.
+       (vect_insert_into_interleaving_chain): Same.
+       (vect_same_range_drs): Same.
+       (vect_compute_data_ref_alignment): Same.
+       (vect_verify_datarefs_alignment): Same.
+       (vector_alignment_reachable_p): Same.
+       (vect_enhance_data_refs_alignment): Same.
+       (vect_analyze_group_access): Same.
+       (vect_analyze_data_ref_access): Same.
+       (vect_free_slp_tree): Same.
+       (vect_get_and_check_slp_defs): Same.
+       (vect_build_slp_tree): Same.
+       (vect_print_slp_tree): Same.
+       (vect_mark_slp_stmts): Same.
+       (vect_analyze_slp_instance): Same.
+       (vect_analyze_slp): Same.
+       (vect_detect_hybrid_slp_stmts): Same.
+       (vect_analyze_data_refs): Same.
+       (vect_mark_relevant): Same.
+       (process_use): Same.
+       (vect_mark_stmts_to_be_vectorized): Same.
+       (vect_can_advance_ivs_p): Same.
+       (vect_get_loop_niters): Same.
+       (vect_analyze_loop_form): Same.
+
+2008-06-25  Diego Novillo  <dnovillo@google.com>
+
+       * tree-vectorizer.c (hash_gimple_stmt): Remove.
+       (eq_gimple_stmt): Remove.
+       (init_stmt_vec_info_htab): Use htab_hash_pointer and
+       htab_eq_pointer for STMT_VEC_INFO_HTAB.
+
+2008-06-24  Doug Kwan  <dougkwan@google.com>
+
+       * gimple-dummy.c (vect_set_verbosity_level): Remove.
+       (vectorize_loops): Same.
+       (vect_free_slp_tree): New dummy.
+       (vect_analyze_loop): Same.
+       (vect_transform_loop): Same.
+       * tree-vectorize.c (hashtab.h): New include.
+       (File): Re-enable all previously commented out code.    
+       (stmt_vec_info_htab): New var.
+       (rename_variables_in_bb): Tuplify.
+       (slpeel_update_phis_for_duplicate_loop): Same.
+       (slpeel_update_phi_nodes_for_guard1): Same.
+       (slpeel_update_phi_nodes_for_guard2): Same.
+       (slpeel_make_loop_iterate_ntimes): Same.
+       (slpeel_tree_duplicate_loop_to_edge_cfg): Same.
+       (slpeel_add_loop_guard): Same.
+       (slpeel_can_duplicate_loop_p): Same.
+       (set_prologue_iterations): Same.
+       (find_loop_location): Same.
+       (new_stmt_vec_info): Same.
+       (hash_gimple_stmt): New function.
+       (init_stmt_vec_info_htab): New function.
+       (free_stmt_vec_info_htab): New function.
+       (free_stmt_vec_info): Replace statement info with hash table entry.
+       (new_loop_vec_info): Tuplify.
+       (destroy_loop_vec_info): Same.
+       (vect_supportable_dr_alignment): Same
+       (vect_is_simple_use): Same.
+       (supportable_widening_operation): Same.
+       (supportable_narrowing_operation): Same.
+       (report_vec_op): New function. Code factored out from ...
+       (vect_is_simple_reduction): Call it. Tuplify.
+       (vectorize_loops): Set up and tear down stmt_vec_info hash table.
+       * tree-vectorizer.h (struct _slp_tree): Change statement fields
+       (struct _loop_vec_info): Same.
+       (struct _stmt_vec_info):
+       (nested_in_vect_loop): Re-enable.
+       (init_stmt_vec_info_htab): New prototype.
+       (free_stmt_vec_info_htab): New prototype.
+       (vinfo_for_stmt): Use hash table instead of statement info.
+       (set_stmt_info): Remove.
+       (set_vinfo_for_stmt): New inline.
+       (is_pattern_stmt_p): Tuplify.
+       (vect_is_simple_use): Tuplify prototype.
+       (vect_is_simple_reduction): Same.
+       (supportable_widening_operation): Same.
+       (supportable_narrowing_operation): Same.
+       (new_stmt_vec_info): Same.
+       (free_stmt_vec_info): Same.
+       Makefile.in (tree-vectorize.o): Add HASTAB_H dependency.
+
+2008-06-22  Andrew Pinski  <pinskia@gmail.com>
+
+       * gimple-pretty-print.c (dump_binary_rhs): Print out MIN_EXPR and
+       MAX_EXPR specially.
+
+2008-06-20  Doug Kwan  <dougkwan@google.com>
+
+       * tree-loop-linear.c (File): Re-enable all previously disabled code.
+       (gather_interchange_stats): Tuplify.
+       (linear_transform_loops): Likewise.
+       * gimple-dummy.c (get_type): Remove unused dummy.
+       (ipa_add_method): Same.
+       (ipa_callsite_callee): Same.
+       (ipa_callsite_compute_count): Same.
+       (ipa_callsite_compute_param): Same.
+       (ipa_callsite_param): Same.
+       (ipa_callsite_param_count): Same.
+       (ipa_callsite_param_count_set): Same.
+       (ipa_edges_create): Same.
+       (ipa_edges_free): Same.
+       (ipa_free): Same.
+       (ipa_jf_get_info_type): Same.
+       (ipa_method_compute_modify): Same.
+       (ipa_method_compute_tree_map): Same.
+       (ipa_method_formal_compute_count): Same.
+       (ipa_method_formal_count): Same.
+       (ipa_method_formal_count_set): Same.
+       (ipa_method_get_tree): Same.
+       (ipa_method_modify_print): Same.
+       (ipa_method_tree_print): Same.
+       (ipa_methodlist_init): Same.
+       (ipa_methodlist_not_empty): Same.
+       (ipa_node_create): Same.
+       (ipa_nodes_create): Same.
+       (ipa_nodes_free): Same.
+       (ipa_remove_method): Same.
+       (vec_set_verbosity_level): Same.
+       * tree-ssa-loop.c (tree_linear_transform): Re-enable.
+
+2008-06-19  Jan Hubicka  <jh@suse.cz>
+
+       * gimple.c (gimple_alloc): Annotate with MEM_STAT_INFO
+       (gimple_build_with_ops): Likewise.
+       (gimple_build_assign): Likewise.
+       (gimple_build_assign_with_ops): Likewise.
+       * gimple.h (gimple_build_assign, gimple_build_assign_with_ops):
+       Annotate with MEM_STAT_INFO.
+
+2008-06-17  Jan Hubicka  <jh@suse.cz>
+
+       * config/i386.c (ix86_gimplify_va_arg): Fix sharing issue.
+
+2008-06-17  Jan Hubicka  <jh@suse.cz>
+
+       * gimplify.c (gimplify_modify_expr): Unshare only returned value when
+       want_value is set instead of copying operands all the time.
+
+2008-06-17  Diego Novillo  <dnovillo@google.com>
+
+       * tree-ssa-loop-niter.c (get_val_for): Fix FIXME note.
+
+2008-06-17  Steven Bosscher  <stevenb.gcc@gmail.com>
+
+       * gimple-dummy.c (canonicalize_induction_variables): Remove dummy.
+       (remove_empty_loops): Likewise.
+       (tree_unroll_loops_completely): Likewise.
+       * tree-ssa-ivcanon.c (tree_num_loop_insns): Replace bsi with gsi.
+       (create_canonical_iv): Convert to tuples.
+       (try_unroll_loop_completely): Likewise.
+       (empty_loop_p): Likewise.
+       (remove_empty_loop): Likewise.
+       * tree-ssa-loop.c (tree_ssa_loop_ivcanon): Re-enable.
+       (tree_ssa_empty_loop): Likewise.
+       (tree_complete_unroll): Likewise.
+       (tree_complete_unroll_inner): Likewise.
+
+2008-06-16  Doug Kwan  <dougkwan@google.com>
+
+       * tree-ssa-ifcombine.c (File): Re-enable all previously diabled code.
+       (bb_no_side_effects_p): Tuplify.
+       (same_phi_args_p): Likewise.
+       (get_name_for_bit_test): Likewise.
+       (operand_precision): New.
+       (integral_operand_p): New.
+       (recognize_single_bit_test): Tuplify.
+       (regcognize_bits_test): Tuplify.
+       (ifcombine_ifandif): Likewise.
+       (ifcombine_iforif): Likewise.
+       (tree_ssa_ifcombine): Likewise.
+       * passes.c: Re-enable pass_tree_ifcombine.
+
+2008-06-16  Doug Kwan  <dougkwan@google.com>
+
+       * tree-ssa-loop-unswitch.c (File): Re-enable all previously disabled
+       code.
+       (tree_may_unswitch_on):  Tuplify.
+       (simplify_using_entry_checks):  Likewise.
+       (tree_unswitch_single_loop): Likewise.
+       * gimple-dummy.c (tree_ssa_unswitch_loops): Remove dummy.
+       * tree-ssa-loop.c (gate_tree_ssa_loop_unswitch):  Re-enable.
+
+2008-06-16  Steven Bosscher  <steven@gcc.gnu.org>
+
+       * tree-ssa-reassoc.c: Convrt to tuples.
+
+2008-06-15  Diego Novillo  <dnovillo@google.com>
+
+       * tree-ssa-live.c (mark_all_vars_used_1): Mark as used
+       the TREE_BLOCK of any _EXPR node.
+
+2008-06-13  Doug Kwan  <dougkwan@google.com>
+
+       * tree-mudflap.c (File): Re-enable previously disabled code.
+       (mf_decl_cache_locals): Tuplify.
+       (mf_build_check_statement_for): Likewise.
+       (mf_xform_derefs_1): Re-enable.
+       (mf_xform_derefs): Tuplify.
+       (execute_mudflap_function_decls): Likewise.
+       (mx_register_decls): Tuplify.  Add a new formal parameter for
+       the location of newly generated statements.  Change function to
+       return modified gimple sequence instead of of modifying in-place.
+       (mx_xfn_xform_decls): Tuplify.
+       (mf_xform_decls): Tuplify.
+       * passes.c (init_optimization_passes): Re-enable mudflap passes.
+
+2008-06-13  Jakub Jelinek  <jakub@redhat.com>
+
+       Merge with mainline @136757.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-06-13  Doug Kwan  <dougkwan@google.com>
+
+       * ipa-reference.c (scan_stmt_for_static_refs): Rename walk_subtrees
+       parameter to handled_ops_p and correct bug of using walk_tree callback
+       semantics.
+       * ipa-pure-const.c (scan_function_stmt): Likewise.
+       * tree-inline.c (mark_local_labes_stmt): Rename walk_subtrees
+       parameter to handle_ops_p.
+       (replace_locals_stmt): Likewise.
+
+2008-06-10  Steven Bosscher  <steven@gcc.gnu.org>
+
+       * passes.c (init_optimization_passes): Re-enable pass_uncprop.
+       * tree-ssa-uncprop (associate_equivalences_with_edges): Convert
+       to tuples.
+       (uncprop_into_successor_phis): Likewise.
+
+2008-06-09  Diego Novillo  <dnovillo@google.com>
+
+       * ipa-cp.c (cgraph_gate_cp): Fix 'FIXME tuples' note.
+       * cfg.c (compact_blocks): Likewise.
+       * dominance.c (free_dominance_info): Likewise.
+       * gimple-low.c (gimple_stmt_may_fallthru): Likewise
+       * c-gimplify.c (add_block_to_enclosing): Likewise.
+       * lambda-mat.c: Include tree-flow.h.
+       * lambda-trans.c: Likewise.
+       * Makefile.in (lambda-mat.o, lambda-trans.o): Add dependency
+       on $(TREE_FLOW_H).
+       * builtins.c (expand_builtin_setjmp_receiver): Remove #if 0 markers.
+       (fold_call_stmt): Call gimple_call_va_arg_pack_p.
+       * tree-gimple.h (gimple_bind_expr_stack): Declare.
+       * cgraphunit.c (update_call_expr): Do not try to access operand 0
+       of a FUNCTION_DECL.
+       * tree-ssa-loop-ivopts.c (stmt_invariant_in_loop_p): New.
+       * tree-flow.h (stmt_invariant_in_loop_p): Declare.
+       * gimple-dummy.c (remove_iv): Remove.
+       (gimple_stmt_may_fallthru):
+       * ipa-type-escape.c (check_tree): Call DECL_P before testing
+       DECL_INITIAL.
+       (check_assign): Do not access TREE_OPERAND 0 on the RHS
+       operand.
+       * gimplify.c (gimple_bind_expr_stack): New.
+       * gimple.h (struct gimple_statement_bind): Add more comments
+       for field BLOCK.
+       (gimple_cond_set_condition): New.
+       * gimple.c (gimple_cond_set_condition_from_tree): Call it.
+       (gimple_copy_no_def_use): Remove.  Update all users.
+       (gimple_has_side_effects):
+       (gimple_rhs_has_side_effects):
+       * passes.c (init_optimization_passes): Enable
+       pass_release_ssa_names, pass_ipa_type_escape, pass_ipa_pta,
+       pass_ipa_struct_reorg, pass_record_bounds,
+       pass_late_warn_uninitialized and pass_rename_ssa_copies.
+
+       * lambda-code.c: Tuplify.
+       * ipa-struct-reorg.c: Tuplify.
+       * ipa-struct-reorg.h: Tuplify.
+
+2008-06-09  Jakub Jelinek  <jakub@redhat.com>
+
+       * gimplify.c (gimplify_omp_for): Call gimple_omp_for_set_incr
+       with the RHS of the GIMPLE_MODIFY_STMT instead of the
+       GIMPLE_MODIFY_STMT itself.
+       * gimple-pretty-print.c (dump_gimple_omp_for): Adjust for it.
+       * tree-nested.c (walk_gimple_omp_for): Likewise.
+       * omp-low.c (extract_omp_for_data, lower_omp_for): Likewise.
+
+2008-06-09  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-ssa-forwprop.c (forward_propagate_into_cond): First argument is
+       a GSI.
+       (forward_propagate_addr_into_variable_array_index): Accept a GSI
+       instead of a gimple statement.  Update accordingly.
+       (forward_propagate_addr_expr_1): Same.
+       (forward_propagate_addr_expr): Pass a GSI to
+       forward_propagate_addr_expr_1.
+       (simplify_not_neg_expr): Argument is a GSI.
+       Adjust accordingly.
+       (tree_ssa_forward_propagate_single_use_va): Pass GSI to
+       simplify_not_neg_expr and forward_propagate_into_cond.
+
+2008-06-09  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree.h (IS_CONVERT_EXPR_P): New.
+       (CONVERT_EXPR_P): Use IS_CONVERT_EXPR_P.
+       * tree-ssa-forwprop.c (get_prop_dest_stmt): Convert to tuples.
+       (get_prop_source_stmt): Same.
+       (can_propagate_from): Same.
+       (remove_prop_source_from_use): Same.
+       (combine_cond_expr_cond): Same.
+       (forward_propagate_into_cond): Same.
+       (tidy_after_forward_propagate_addr): Same.
+       (forward_propagate_addr_into_variable_array_index): Same.
+       (forward_propagate_addr_expr_1): Same.
+       (forward_propagate_addr_expr): Same.
+       (forward_propagate_comparison): Same.
+       (simplify_not_neg_expr): Same.
+       (tree_ssa_forward_propagate_single_use_vars): Same.
+       (simplify_gimple_switch): Rename from -simplify_switch_expr.
+       (rhs_to_tree): New.
+       (forward_propagate_into_gimple_cond): New.
+       * Makefile.in (tree-ssa-forwprop.o): Depend on GIMPLE_H.
+       * passes.c (init_optimization_passes): Enable pass_forwprop.
+
+2008-06-09  Jakub Jelinek  <jakub@redhat.com>
+
+       * tree.def (OMP_SECTIONS_SWITCH, OMP_ATOMIC_LOAD, OMP_ATOMIC_STORE):
+       Removed.
+       * tree-gimple.c (is_gimple_stmt): Don't handle them.
+       * gimplify.c (gimplify_expr): Likewise.
+       * tree-pretty-print.c (dump_generic_node): Likewise.
+       * tree-ssa-operands.c (get_expr_operands): Likewise.
+       * expr.c (expand_expr_real_1): Likewise.
+       * omp-low.c (expand_omp_atomic_pipeline): Adjust comment.
+
+2008-06-09  Jakub Jelinek  <jakub@redhat.com>
+
+       Merge with mainline @136433.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-06-08  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @136432.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-06-08  Diego Novillo  <dnovillo@google.com>
+
+       * tree-call-cdce.c: Tuplify.
+
+2008-06-06  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-06/msg00353.html
+
+       * gimple.def: Add a third argument to all GIMPLE codes.
+       Update all users.
+       * calls.c (gimple_alloca_call_p): Fix comment.
+       * system.h (CONST_CAST_GIMPLE): Define.
+       * gimple.c (gimple_ops_offset_): Declare.  Fill in from
+       third argument in gimple.def.
+       (gimple_set_stored_syms, gimple_set_loaded_syms): Move
+       from tree-ssa-operands.c
+       (gimple_alloc): Add argument NUM_OPS.  Update all users.
+       Compute size by adding enough space for NUM_OPS operands.
+       (gimple_alloc_ops): Remove.  Update all users.
+       (gimple_assign_set_rhs_from_tree): Change first argument
+       to gimple_stmt_iterator *.  Update all users.
+       Allocate a new statement, if there are more operands that
+       can fit in the old one.
+       (gimple_assign_set_rhs_with_ops): Likewise.
+       (gimple_copy): Fix clearing of stores/loads sets in the
+       copied statement.
+       * gimple.h (struct gimple_statement_base): Remove fields
+       UNUSED_1, UNUSED_2, UNUSED_3.
+       Add fields MODIFIED, HAS_VOLATILE_OPS, REFERENCES_MEMORY_P,
+       UID and NUM_OPS.
+       Re-organize existing fields to eliminate holes on 64 bit
+       hosts.
+       Update all users.
+       (struct gimple_statement_with_ops_base): Define.
+       Factor out struct gimple_statement_with_ops.  Include fields GSBASE,
+       ADDRESSES_TAKEN, DEF_OPS and USE_OPS.
+       (struct gimple_statement_with_ops): Include struct
+       gimple_statement_with_ops_base.
+       Remove field NUM_OPS.
+       (struct gimple_statement_with_memory_ops_base): Define.
+       Factor out of struct gimple_statement_with_memory_ops.
+       Include fields OPBASE, VDEF_OPS, VUSE_OPS, STORES and
+       LOADS.
+       Remove fields HAS_VOLATILE_OPS and REFERENCES_MEMORY_P.
+       (struct gimple_statement_with_memory_ops): Include
+       struct gimple_statement_with_memory_ops_base.
+       (struct gimple_statement_phi): Change size_t fields to
+       unsigned.  Update all users.
+       (struct gimple_statement_asm): Include struct
+       gimple_statement_with_memory_ops_base.
+       Change fields NI and NO to unsigned char.
+       Change field NC to short.
+       Update all users.
+       Add field OP.
+       (struct gimple_statement_change_dynamic_type): Remove.
+       Update all users.
+       (union gimple_statement_d): Rename field WITH_OPS to GSOPS.
+       Rename field WITH_MEM_OPS to GSMEM.
+       Update all users.
+       (gimple_addresses_taken): New.
+       (gimple_addresses_taken_ptr): New.
+       (gimple_set_addresses_taken): New.
+       (gimple_num_ops): Move earlier.
+       (gimple_ops): Use gimple_ops_offset_ to compute address
+       of the operand vector.
+       (gimple_op): Call gimple_ops.
+       (gimple_op_ptr): Likewise.
+       (gimple_set_op): Likewise.
+       (gimple_switch_num_labels): Call gimple_num_ops.
+       (gimple_switch_set_num_labels): Call gimple_set_num_ops.
+       (gimple_switch_set_label): Call gimple_num_ops.
+       (gimple_return_retval_ptr): Likewise.
+       (gimple_return_retval): Likewise.
+       (gimple_set_retval): Likewise.
+       (gimple_cdt_new_type): Use operand 1.
+       (gimple_cdt_new_type_ptr): Likewise.
+       (gimple_cdt_set_new_type): Likewise.
+       * tree-cfg.c (gimple_block_ends_with_call_p): Use
+       CONST_CAST_BB.
+       * tree-ssa-operands.c (gimple_set_stored_syms,
+       gimple_set_loaded_syms): Declare.
+       * value-prof.c (gimple_divmod_fixed_value_transform,
+       gimple_mod_pow2_value_transform,
+       gimple_mod_subtract_transform): Change argument to
+       gimple_stmt_iterator *.  Update all users.
+       * tree-ssa-ccp.c (fold_gimple_assign): Change parameter
+       to gimple_stmt_iterator *.  Update all users
+
+2008-06-05  Jakub Jelinek  <jakub@redhat.com>
+
+       * gimplify.c (gimplify_modify_expr_to_memcpy): Use gimplify_arg
+       for gimplification of call arguments.
+       (gimplify_modify_expr_to_memset): Likewise.
+
+2008-06-02  Jakub Jelinek  <jakub@redhat.com>
+
+       PR tree-optimization/36389
+       * gimplify.c (gimplify_cond_expr): If one or both branches are
+       GOTO_EXPRs jumping to LABEL_DECLs, don't create unnecessary
+       extra LABEL_DECLs and jumps around.
+       * tree-cfg.c (remove_useless_stmts_cond): Set may_branch also
+       for GIMPLE_COND stmts.
+       * tree-eh.c (replace_goto_queue_cond_clause): Set label to
+       create_artificial_label () rather than LABEL_EXPR.
+
+2008-05-30  Diego Novillo  <dnovillo@google.com>
+
+       * calls.c (gimple_alloca_call_p): Fix detection of
+       alloca() calls.
+
+2008-05-30  Diego Novillo  <dnovillo@google.com>
+
+       * toplev.c: Include gimple.h.
+       (dump_memory_report): Call dump_gimple_statistics.
+       * Makefile.in (toplev.o): Add dependency on GIMPLE_H.
+       * gimple-low.c (pass_lower_cf): Restore disabled bits.
+       * tree-dfa.c (pass_referenced_vars): Likewise.
+       * cfgexpand.c (pass_expand): Likewise.
+       * tree-outof-ssa.c (pass_del_ssa): Likewise.
+       * gimple.c (gimple_alloc): Fix non-C99 declaration.
+       * gimplify.c (gimplify_function_tree): Remove calls to
+       dump_tree_statistics and dump_gimple_statistics.
+
+2008-05-30  Jakub Jelinek  <jakub@redhat.com>
+
+       * omp-low.c (diagnose_omp_structured_block_errors): Temporarily
+       switch cfun to DECL_STRUCT_FUNCTION (fndecl).
+
+       * omp-low.c (scan_sharing_clauses): Call scan_omp on
+       OMP_CLAUSE_REDUCTION_GIMPLE_{INIT,MERGE} instead of
+       scan_omp_op on OMP_CLAUSE_REDUCTION_{INIT,MERGE}.
+       (lower_rec_input_clauses): Clear
+       OMP_CLAUSE_REDUCTION_GIMPLE_{INIT,MERGE} instead of
+       OMP_CLAUSE_REDUCTION_{INIT,MERGE}.  Call lower_omp
+       on OMP_CLAUSE_REDUCTION_GIMPLE_MERGE before gimple_seq_add_seq
+       to replace all occurences of placeholder in the seq.
+       * gimplify.c (gimplify_scan_omp_clauses): Clear
+       OMP_CLAUSE_REDUCTION_{INIT,MERGE} after gimplifying it.
+
+       * tree-nested.c (init_tmp_var_with_call, init_tmp_var, save_tmp_var):
+       Only set location if not gsi_end_p.
+       (walk_gimple_omp_for): Avoid adding an empty statement to seq.
+
+       * tree-inline.c (remap_gimple_stmt): Remap body of
+       GIMPLE_OMP_{MASTER,ORDERED,SECTION,SECTIONS,SINGLE}.
+
+2008-05-28  Andrew Pinski  <pinskia@gmail.com>
+
+       * gimple-pretty-print.c (dump_unary_rhs): Handle conversions correctly.
+       Handle PAREN_EXPR, ABS_EXPR, and NEGATE_EXPR.
+
+2008-05-29  Jakub Jelinek  <jakub@redhat.com>
+
+       * omp-low.c (lower_omp_1): Handle regimplification of GIMPLE_ASM.
+       Fix GIMPLE_ASSIGN regimplification.
+       * gimple-pretty-print.c (dump_gimple_omp_sections,
+       dump_gimple_omp_block, dump_gimple_omp_critical): New functions.
+       (dump_gimple_omp_parallel): Formatting.
+       (dump_gimple_stmt): Handle remaining GIMPLE_OMP_* statements.
+
+2008-05-29  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-inline.c (remap_gimple_stmt): Handle GIMPLE_OMP_* cases.
+
+2008-05-29  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-pretty-print.c (dump_gimple_stmt): Add cases for
+       GIMPLE_OMP_MASTER and GIMPLE_OMP_ORDERED.
+
+2008-05-29  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (expand_omp_parallel): Parse an assignment from an
+       ADDR_EXPR correctly.
+       * gimple-pretty-print.c (dump_gimple_omp_parallel): Print braces when
+       appropriate.
+
+2008-05-29  Jakub Jelinek  <jakub@redhat.com>
+
+       * omp-low.c (lower_omp_1): Fix regimplification of GIMPLE_COND and
+       GIMPLE_CALL.
+
+2008-05-28  Jakub Jelinek  <jakub@redhat.com>
+
+       * gimple.c (gimple_regimplify_operands): Don't call gimple_num_ops
+       twice.  Write regimplified operand to the correct operand slot.
+       * gimplify.c (rhs_predicate_for): No longer static.
+       * tree-gimple.h (rhs_predicate_for): New prototype.
+       * omp-low.c (lower_omp_1): Don't call gimple_regimplify_operands,
+       instead forcefully gimplify_expr each operand with the right
+       predicate.
+
+       * gimple.h (gimple_omp_atomic_store_val_ptr,
+       gimple_omp_atomic_load_lhs_ptr, gimple_omp_atomic_load_rhs_ptr): New
+       inlines.
+       * gimple.c (walk_gimple_op): Handle GIMPLE_OMP_ATOMIC_LOAD and
+       GIMPLE_OMP_ATOMIC_STORE.
+       * omp-low.c (lower_omp_1): Handle GIMPLE_OMP_ATOMIC_LOAD.
+
+       * gimple-pretty-print.c (dump_gimple_omp_for): Don't indent twice
+       before gimple_omp_body, don't emit extra newline after it.
+       (dump_gimple_omp_single): Likewise.
+
+2008-05-27  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-nested.c (walk_omp_for): Rename to...
+       (walk_gimple_omp_for): Enable and convert to tuples.
+       (convert_nonlocal_reference_stmt): Enable call to walk_gimple_omp_for.
+       (convert_local_reference_stmt): Same.
+       * gimple.c (walk_gimple_op): Remove fixme note.
+
+2008-05-27  Diego Novillo  <dnovillo@google.com>
+
+       * tree-gimple.c (get_gimple_rhs_class): Handle
+       POLYNOMIAL_CHREC.
+       * tree-cfg.c (verify_types_in_gimple_assign): Do not
+       check every operand against the LHS type for tcc_binary
+       expressions.
+
+2008-05-26  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @135951.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-05-22  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-cfg.c (make_edges): Change OMP_SECTION to GIMPLE_OMP_SECTIONS.
+       Update comments.
+       (verify_types_in_gimple_stmt): Update comments.
+       (verify_stmt): Same.
+
+2008-05-21 Andreas Tobler  <a.tobler@schweiz.org>
+
+       * ../configure: Regenerate with the correct autoconf version: 2.59.
+
+2008-05-21  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (lower_reduction_clauses): Generate OMP_ATOMIC_* directly.
+       Concatenate atomic code correctly.
+
+2008-05-20  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (WALK_SUBSTMTS): New.
+       (check_combined_parallel): Walk sub-statements.
+       (diagnose_sb_1): Same.
+       (diagnose_sb_2): Same.
+       Handle switch labels and return statements correctly.
+       * testsuite/gcc.dg/gomp/block-7.c: Adjust for new error message.
+       * testsuite/gcc.dg/gomp/block-2.c: Same.
+       * testsuite/gcc.dg/gomp/block-4.c: Same.
+       * testsuite/gcc.dg/gomp/block-6.c: Same.
+       * testsuite/gcc.dg/gomp/block-8.c: Same.
+       * testsuite/gcc.dg/gomp/block-1.c: Same.
+       * testsuite/gcc.dg/gomp/block-3.c: Same.
+       * testsuite/gcc.dg/gomp/block-5.c: Same.
+
+2008-05-20  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (lower_omp_single): Append to bind variables.
+       (lower_omp_master): Same.
+       (lower_omp_ordered): Same.
+       (lower_omp_critical): Same.
+       * gimplify.c (gimplify_modify_expr_to_memcpy): Make sure we are
+       fully gimplified.
+       (gimplify_omp_for): Same.
+       * gimple.h (gimple_bind_set_vars): New.
+
+2008-05-16  Andreas Tobler  <a.tobler@schweiz.org>
+
+       * config/alpha/alpha.c (alpha_gimplify_va_arg): Adjust arguments.
+       * config/ia64/ia64.c (ia64_gimplify_va_arg): Likewise.
+       * config/pa/pa.c (hppa_gimplify_va_arg_expr): Likewise.
+       * config/m32c/m32c-protos.h (m32c_gimplify_va_arg_expr): Likewise.
+       * config/spu/spu.c (spu_gimplify_va_arg_expr): Likewise.
+       * config/stormy16/stormy16.c (xstormy16_gimplify_va_arg_expr): Likewise.
+       * config/xtensa/xtensa.c (xtensa_gimplify_va_arg_expr): Likewise.
+
+       * config/sparc/sparc.c (sparc_gimplify_va_arg): Adjust arguments.
+
+2008-05-16  Diego Novillo  <dnovillo@google.com>
+
+       * tree-into-ssa.c (rewrite_uses_p): Fix return type.
+       * tree-vectorizer.h (nested_in_vect_loop_p): Add return value.
+       * tree-ssa-pre.c (execute_pre): Add return value.
+
+2008-05-15  Aldy Hernandez  <aldyh@redhat.com>
+
+       * config/s390/s390.c (s390_gimplify_va_arg): Adjust for tuples.
+
+2008-05-15  Diego Novillo  <dnovillo@google.com>
+
+       * Makefile.in (STRICT_WARN): Revert to mainline behaviour.
+       (builtins.o-warn, expr.o-warn, dse.o-warn,
+       ebitmap.o-warn, lower-subreg.o-warn, tree-chrec.o-warn,
+       tree-ssa-structalias.o-warn, varasm.o-warn): Remove.
+       * config/arm/arm.c (arm_return_in_memory): Fix return type.
+       * config/arm/arm-protos.h (arm_return_in_memory): Likewise.
+
+2008-05-15  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimplify.c (gimplify_omp_for): Handle a MODIFY_EXPR in
+       gimple_omp_for_incr.
+       * cfgexpand.c (gimple_to_tree): Do not die on compile errors.
+
+2008-05-15  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (check_omp_nesting_restrictions): Fetch clauses from for.
+       (expand_omp_atomic): Parse GIMPLE_OMP_ATOMIC correctly.
+       (lower_omp_single_simple): Create GIMPLE_COND with both tlabel and
+       flabel.
+       (lower_omp_for): Make sure we have a body before look inside.
+       * gimple-low.c (lower_stmt): Add case for GIMPLE_OMP_SECTIONS_SWITCH.
+       * gimple-pretty-print.c (dump_gimple_omp_single): New.
+       (dump_gimple_stmt): Add case for GIMPLE_OMP_SINGLE.
+       * gimplify.c (gimplify_omp_workshare): Remove fixme.  Enable code.
+       * gimple.c (gss_for_code): Add case for GIMPLE_OMP_{RETURN,
+       SECTIONS_SWITCH}.
+       (gimple_regimplify_operands):  Do not regimplify GIMPLE_ASM
+       operands.  Do not look inside empty operands.
+       * gimple.h (is_gimple_omp): Fix typo for GIMPLE_OMP_ATOMIC*.
+       * tree-cfg.c (make_edges): Rename OMP_SECTION to GIMPLE_OMP_SECTION.
+
+2008-05-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (optimize_omp_library_calls): Make sure statement is
+       a GIMPLE_CALL before we look inside of it.
+       * tree-cfg.c (move_stmt_op): Exit gracefully.
+
+2008-05-13  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (scan_omp_op): Remove walk_subtrees.  Call walk_tree.
+       (scan_omp_for): Scan OMP body.
+       (scan_omp_1_stmt): Set handled_ops_p.
+       (expand_omp_parallel): Parse ADDR_EXPR correctly.
+       (diagnose_sb_1): Rename walk_subtrees to handled_ops_p and set
+       appropriately.
+       (diagnose_sb_2): Same.
+       * gimple-pretty-print.c (dump_gimple_omp_for): Print braces around
+       OMP body.
+       * tree-inline.c (estimate_num_insns): GIMPLE_OMP_CONTINUE does not
+       have a body.
+       * tree-cfg.c (move_stmt_op): Parse move_stmt_d out of data correctly.
+       (move_stmt_r): Rename walk_subtrees to handled_ops_p and set
+       appropriately.
+
+2008-05-12  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @135126.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-05-11 Doug Kwan  <dougkwan@google.com>
+
+       * gimplify.c (gimple_pop_condition): Clear
+       conditional_cleanups field after the associated gimple sequence has
+       been freed implicitly.
+       (gimplify_cleanup_point_expr): Clear conditional_cleanups field
+       of gimplfiy_ctxp after resetting the conditions field.
+       * gimple.h (gimple_call_return_type): Handle REFERENCE_TYPE like
+       POINTER_TYPE. 
+
+2008-05-10  Kaz Kojima  <kkojima@gcc.gnu.org>
+
+       * config/sh/sh.c (sh_gimplify_va_arg_expr): Change pre_p and
+       post_p types to gimple_seq *.
+
+2008-05-09  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (maybe_catch_exception): Return body when no exceptions.
+       (gate_lower_omp): Enable pass.
+       * gimple-low.c (lower_stmt): Add GIMPLE_OMP_ATOMIC_* cases.
+
+2008-05-08  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (extract_omp_for_data): Update comment.
+       (get_ws_args_for): Same.
+       (lower_send_shared_vars): Same.
+       (expand_omp_parallel): Same.
+       (expand_omp_for_static_nochunk): Same.
+       (expand_omp_for_static_chunk): Same.
+       (expand_omp_sections): Same.
+       (expand_omp_atomic_fetch_op): Same.
+       (expand_omp_atomic_pipeline): Same.
+       (build_omp_regions_1): Same.
+       (lower_omp_for): Same.
+       (expand_omp_atomic_mutex): Change OMP_ATOMIC_STORE to
+       GIMPLE_OMP_ATOMIC_STORE.
+       * gimple-pretty-print.c (dump_gimple_omp_parallel): Print child fn
+       and data_arg.
+       * tree-cfg.c (make_edges): Enable commented out code and convert
+       to tuples.
+       (replace_by_duplicate_decl): Same.
+       (replace_ssa_name): Same.
+       (move_stmt_r): Same.
+       (move_stmt_op): New.
+       (mark_virtual_ops_in_bb): Enable and convert to tuples.
+       (makr_virtual_ops_in_region): Same.
+       (move_block_to_fn): Convert to tuples.
+       (find_outermost_region_in_block): Enable and convert to tuples.
+       (move_sese_region_to_fn): Same.
+
+2008-05-05  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (expand_omp_parallel): Remove fixmes.
+       (expand_omp_for_static_chunk): Enable and tuplify code.
+       (expand_omp_sections): Remove fixmes.  Tuplify.
+       (lower_omp_sections): Same.
+       (diagnose_sb_0): Remove fixme.
+       * tree-ssa.c (redirect_edge_var_map_dup): Fix typo in comment.
+       * gimple.c (gimple_size): Add case for GIMPLE_OMP_SECTIONS_SWITCH.
+       (gimple_build_omp_sections): New.
+       * gimple.h (gimple_build_omp_sections_switch): New prototype.
+
+2008-05-03  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (diagnose_sb_0): Tuplify and enable.
+       (diagnose_sb_1): Same.
+       (diagnose_sb_2): Same.
+       (diagnose_omp_structured_block_errors): Tuplify.
+       * gimple-dummy.c (DUMMY_FN): Remove
+       diagnose_omp_structured_block_errors.
+       * c-decl.c (c_gimple_diagnostics_recursively): Remove fixme
+       and enable call to diagnose_omp_structured_block_errors.
+       * Makefile.in (GTFILES): Add omp-low.c again.
+
+2008-05-02  Rafael Espindola  <espindola@google.com>
+
+       * tree-gimple.c (is_gimple_condexpr): Do not allow
+       trapping comparisons.
+       * tree-eh.c (tree_could_trap_p): Fix handling of floating
+       point comparisons.
+
+2008-05-02  Doug Kwan  <dougkwan@google.com>
+
+       * value-prof.c (gimple_divmod_fixed_value): Remove formal parameters
+       OPERATION, OP1 and OP2 and derive their values from parameter STMT
+       instead.  Update prototype and caller.
+       (gimple_mod_pow2): Remove formal parameters OPERATION, OP1 and OP2
+       and derive their values from parameter STMT instead.  Update prototype
+       and caller.
+       (gimple_mod_pow2_value_transform): Remove temporares OP, OP1 and OP2.
+       Use a new tempory LHS_TYPE to store assignment LHS type.
+       (gimple_mode_subtract): Remove formal parameters OPERATION, OP1 and OP2
+       and derive their values from parameter STMT instead.  Update prototype
+       and caller.  Fix a bug in a call to gimple_build_assign_with_ops.
+       (gimple_mod_subtract_transform): Remove temporares OP, OP1 and OP2.
+       Use a new tempory LHS_TYPE to store assignment LHS type.
+       (gimple_ic, gimple_indirect_call_to_profile): Fix bug in tree-code
+       tests.
+       * tree-profile.c (File): Re-enable all previously disabled code.
+       (tree_gen_edge_profiler): Tuplify.
+       (prepare_instrumented_value): Ditto.
+       (tree_gen_interval_profiler): Ditto.
+       (tree_gen_pow2_profiler): Ditto.
+       (tree_gen_one_value_profiler): Ditto.
+       (tree_gen_ic_profiler): Ditto.
+       (tree_gen_ic_func_profiler): Ditto.
+       (tree_gen_const_delta_profiler): Re-format formal parameters for
+       proper alignement.
+       (tree_gen_average_profiler): Tuplify.
+       (tree_gen_ior_profiler): Ditto.
+       (do_tree_profiling): Re-enable previously disabled code.  Remove
+       FIXME.
+       (tree_profiling): Ditto.
+       * gimple.c (gimple_set_bb):  Remove assertion.
+       * tree-cfg.c (change_bb_for_stmt): Remove.  Redirect callers to
+       gimple_set_bb.
+       (gimple_merge_blocks): Call gimple_set_bb instead of
+       change_bb_for_stmt.
+       (gimple_split_block): Ditto.
+       (verify_stmts): Add code to check that label_to_block_map and labels
+       are consistent.
+
+2008-04-22  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @134843.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-05-01  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-05/msg00053.html
+
+       * tree-vrp.c (vrp_visit_phi_node): Cast variable I to int
+       for printing.
+       * cgraph.c (cgraph_release_function_body): Only call
+       gimple_set_body if NODE->DECL has a struct function.
+       * tree.c (make_node_stat): Do not call gimple_set_body.
+       * cp/Make-lang.in (cp/semantics.o): Add dependency on
+       $(GIMPLE_H).
+       * cp/semantics.c: Include gimple.h
+       (expand_or_defer_fn): Assert that FN has a gimple body.
+       * function.h (struct function): Add field GIMPLE_BODY.
+       * gimple.c (gimple_bodies_vec): Remove.
+       (gimple_bodies_map): Remove.
+       (gimple_set_body): Re-write to use GIMPLE_BODY in FN's
+       function object.
+       (gimple_body): Likewise.
+
+2008-05-01  Oleg Ryjkov  <olegr@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-05/msg00053.html
+
+       * tree-eh.c (record_in_goto_queue, record_in_goto_queue_label):
+       New functions.
+       (maybe_record_in_goto_queue): Refactored and added support for
+       recording labels embedded in GIMPLE_CONDs.
+       (lower_catch, lower_cleanup): Fix 3 typos that were introduced
+       during the conversion to tuples.
+       (lower_eh_constructs_2): Call maybe_record_in_goto_queue for
+       GIMPLE_CONDs.
+
+2008-05-01  Rafael Espindola <espindola@google.com>
+
+       * gimple-pretty-print.c (dump_gimple_try): Print like trunk does.
+       (dump_gimple_catch): Print like trunk does.
+
+2008-05-01  Rafael Espindola <espindola@google.com>
+
+       * passes.c (gimple_verify_flow_info): Enable remaining
+       pass_rename_ssa_copies passes.
+
+2008-05-01  Rafael Espindola <espindola@google.com>
+
+       * tree-cfg.c (gimple_verify_flow_info): Handle switches with only the
+       default label.
+
+2008-04-30  Doug Kwan  <dougkwan@google.com>
+
+       * cfgexpand.c (gimple_to_tree): Change code to annotate EH region
+       numbers only if numbers are greater than zero.  Also propagate EH
+       region number to CALL_EXPRs nested in assignments.
+
+2008-04-29  Doug Kwan  <dougkwan@google.com>
+
+       * tree-eh.c (lower_try_finally_dup_block): Call
+       copy_gimple_seq_and_replace_locals instead of gimple_seq_copy.
+       (optimize_double_finally): Add a note about replacing unsave_expr_now
+       with copy_gimple_seq_and_replace_locals.
+       * tree-inline.c (mark_local_labels_stmt, replace_locals_op,
+       replace_locals_stmt, copy_gimple_seq_and_replace_locals): New.
+       * tree-inline.h (copy_gimple_seq_and_replace_locals): New prototype.
+
+2008-04-29  Rafael Espindola <espindola@google.com>
+
+       * gimple-pretty-print.c (dump_gimple_return): Add missing space.
+       * tree-ssa-threadedge.c (simplify_control_stmt_condition): Fix type
+       of variable.
+
+2008-04-29  Rafael Espindola <espindola@google.com>
+
+       * gimple-pretty-print.c (pp_cfg_jump): add missing ";".
+
+2008-04-29  Rafael Espindola <espindola@google.com>
+
+       * gimple-pretty-print.c (dump_gimple_assign): print ";" at the end.
+       (dump_gimple_return):print ";" at the end.
+       (dump_gimple_call): print ";" at the end.
+       (dump_gimple_cond): use op_symbol_code instead of tree_code_name.
+       (pp_cfg_jump): print ";" at the end.
+
+2008-04-29  Rafael Espindola <espindola@google.com>
+
+       * ipa-cp.c (ipcp_driver): Disable.
+       * matrix-reorg.c (matrix_reorg): Comment body.
+       (gate_matrix_reorg): Disable.
+       * passes.c (init_optimization_passes): Enable first pass_merge_phi,
+       pass_ipa_cp and pass_ipa_matrix_reorg.
+
+2008-04-29  Doug Kwan  <dougkwan@google.com>
+
+       * tree-eh.c (lower_catch): Fix bug of accessing sub-statements
+       using gimple_catch_handler.  Fix bug of mixing up GIMPLE_GOTO and
+       GIMPLE_LABEL in statement building.
+       (lower_cleanup): Fix bug of mixing up gimple_try_clean and
+       gimple_try_eval.
+       (lower_cleanup): Use gimple codes instead of tree codes in switch
+       statement.
+       (tree-cfg.c): Add code to generate EH edges of GIMPLE_ASSIGN.
+
+2008-04-28  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-04/msg02051.html
+
+       * tree-ssa-phiprop.c (phiprop_insert_phi): Remove
+       OLD_ARG_CODE.  Use TREE_CODE (ARG) instead.
+       Assert that NEW_VAR is a GIMPLE register.
+       (propagate_with_phi): Fix test of is_gimple_reg_type for
+       ARG.
+       Do not set subcode of USE_STMT to NOP_EXPR.
+
+2008-04-28  Doug Kwan  <dougkwan@google.com>
+
+       * tree-inline.c (remap_gimple_op_r): Remove code to handle RESX_EXPR
+       region number remapping.
+       (remap_gimple_stmt): Add code to handle GIMPLE_RESX region number
+       remapping.
+
+2008-04-28  Rafael Espindola <espindola@google.com>
+
+       * cfgexpand.c (gimple_to_tree): Add support for switch stmts without
+       a default label.
+       * fold-const.c (tree_call_nonnegative_warnv_p): Remove the code
+       argument.
+       (tree_invalid_nonnegative_warnv_p): Update call to
+       tree_call_nonnegative_warnv_p.
+       * gimple.h (gimple_location_ptr): New.
+       * tree-flow.h (simplify_stmt_using_ranges): Change signature.
+       * tree-ssa-propagate.c (substitute_and_fold): Call
+       simplify_stmt_using_ranges.
+       * tree-vrp.c (struct assert_locus_d): Tuplify.
+       (switch_update): Tuplify.
+       (stmt_overflow_infinity): New.
+       (gimple_assign_nonnegative_warnv_p): New.
+       (gimple_call_nonnegative_warnv_p): New.
+       (gimple_stmt_nonnegative_warnv_p): New.
+       (gimple_assign_nonzero_warnv_p): New.
+       (gimple_stmt_nonzero_warnv_p): New.
+       (vrp_stmt_computes_nonzero): Tuplify.
+       (extract_range_basic): Tuplify.
+       (extract_range_from_expr): Tuplify. Rename to
+       extract_range_from_assignment.
+       (adjust_range_with_scev): Tuplify.
+       (vrp_var_may_overflow): Tuplify.
+       (build_assert_expr_for): Tuplify.
+       (fp_predicate): Tuplify.
+       (infer_value_range): Tuplify.
+       (dump_asserts_for): Tuplify.
+       (register_new_assert_for): Tuplify.
+       (register_edge_assert_for_2): Tuplify.
+       (register_edge_assert_for_1): Tuplify.
+       (register_edge_assert_for): Tuplify.
+       (find_conditional_asserts): Tuplify.
+       (find_switch_asserts): Tuplify.
+       (find_assert_locations): Tuplify.
+       (process_assert_insertions_for): Tuplify.
+       (process_assert_insertions): Tuplify.
+       (check_array_ref): Tuplify.
+       (search_for_addr_array): Tuplify.
+       (check_array_bounds): Tuplify.
+       (check_all_array_refs): Tuplify.
+       (remove_range_assertions): Tuplify.
+       (stmt_interesting_for_vrp): Tuplify.
+       (vrp_initialize): Tuplify.
+       (vrp_visit_assignment): Tuplify. Rename to vrp_visit_assignment_or_call.
+       (vrp_visit_cond_stmt): Tuplify.
+       (find_case_label_index): Tuplify.
+       (find_case_label_range): Tuplify.
+       (vrp_visit_switch_stmt): Tuplify.
+       (vrp_visit_stmt): Tuplify.
+       (vrp_visit_phi_node): Tuplify.
+       (simplify_div_or_mod_using_ranges): Tuplify.
+       (simplify_abs_using_ranges): Tuplify.
+       (simplify_cond_using_ranges): Tuplify.
+       (simplify_switch_using_ranges): Tuplify.
+       (simplify_stmt_using_ranges): Tuplify.
+       (simplify_stmt_for_jump_threading): Tuplify.
+       (identify_jump_threads): Tuplify.
+       (execute_vrp): Tuplify.
+       (gate_vrp): Enable.
+       * tree.h (tree_call_nonnegative_warnv_p): Remove the code argument.
+
+2008-04-28  Doug Kwan  <dougkwan@google.com>
+
+       * cp/cp-gimplify.c (finish_bc_block):  Tuplify.
+       (build_bc_goto):  Renamed to get_bc_label.  Return a label
+       only.  Uupdate  callers.
+       (get_bc_label): New
+       (gimplify_cp_loop): Tuplify.  Also check COND for error_mark_node
+       before gimplifying it.
+       (gimplify_for_stmt): Tuplify.
+       (gimplify_while_stmt): Tuplify.
+       (gimplify_for_stmt): Tuplify.
+       (gimplify_do_stmt): Tuplify.
+       (gimplify_switch_stmt): Tuplify.
+       (cp_gimplify_omp_switch_stmt): Add temporary code to pop block
+       label stack.
+       (cp_gimplify_expr): Pass pre_p to gimplify_while_stmt,
+       gimplify_do_stmt and gimplify_switch_stmt.  Tuplify
+       gimplification of CONTINUE_STMT and BREAK_STMT.
+
+2008-04-26  Rafael Espindola <espindola@google.com>
+
+       * gimple.c (gimple_build_assign_with_ops): Don't set SSA_NAME_DEF_STMT.
+       * gimple.h (gimple_assign_set_lhs): Set SSA_NAME_DEF_STMT.
+       (gimple_call_set_lhs): Set SSA_NAME_DEF_STMT.
+       * omp-low.c (expand_parallel_call): Don't set SSA_NAME_DEF_STMT if not
+       needed.
+       (expand_omp_for_generic): Same.
+       (expand_omp_for_static_nochunk): Same.
+       (expand_omp_for_static_chunk): Same.
+       (expand_omp_sections): Same.
+       (expand_omp_atomic_mutex): Same.
+       * predict.c (strip_builtin_expect): Same.
+       * tree-cfg.c (gimple_merge_blocks): Same.
+       * tree-inline.c (remap_ssa_name): Same.
+       (setup_one_parameter): Same.
+       * tree-predcom.c (replace_ref_with): Same.
+       (replace_ref_with): Same.
+       (initialize_root_vars_lm): Same.
+       (reassociate_to_the_same_stmt): Same.
+       * tree-scalar-evolution.c (scev_const_prop): Same.
+       * tree-ssa-loop-im.c (rewrite_reciprocal): Same.
+       * tree-ssa-loop-ivopts.c (rewrite_use_nonlinear_expr): Same.
+       * tree-ssa-loop-manip.c (create_iv): Same.
+       * tree-ssa-math-opts.c (execute_cse_sincos_1): Same.
+       * tree-ssa-phiopt.c (minmax_replacement): Same.
+       (abs_replacement): Same.
+
+2008-04-25  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-04/msg01965.html
+
+       * gimplify.c (annotate_all_with_location_after): New.
+       (gimplify_expr): Call it.
+
+2008-04-25  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @134692.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-04-25  Doug Kwan  <dougkwan@google.com>
+
+       * tree-inline.c (expand_call_inline):  Save old call statement
+       and use it to look up correct EH region.
+
+2008-04-24  Doug Kwan  <dougkwan@google.com>
+
+       * Makefile.in (STRICT_WARN): Remove -Wno-format and
+       -Wno-missing-format-attribute.
+       * gimplify.c (gimple_pop_condition): Remove redundant and incorrect
+       gimple_seq_free.
+       (gimplify_init_ctor_eval_range): Add a fall-through label for
+       GIMPLE_COND statement.
+
+2008-04-25  Rafael Espindola <espindola@google.com>
+
+       * tree-ssa-dom.c (avail_expr_eq): Return false if the hashes don't
+       match.
+
+2008-04-24  Oleg Ryjkov  <olegr@google.com>
+
+       * gimplify.c (gimplify_expr): Fix the assertion that verifies validity
+       of parameters.
+       * tree-inline.c (estimate_num_insns): Handle
+       GIMPLE_CHANGE_DYNAMIC_TYPE.
+       * tree-cfg.c (verify_types_in_gimple_stmt): Likewise.
+
+2008-04-24  Rafael Espindola <espindola@google.com>
+
+       * tree-ssa-dom.c (initialize_hash_element): Fix the type of the code
+       variable.
+
+2008-04-23  Rafael Espindola <espindola@google.com>
+
+       * omp-low.c (gate_lower_omp): Return 0.
+       * passes.c (init_optimization_passes): Enable all passes whose
+       corresponding dump options are used in the testsuite.
+       * tree-loop-distribution.c (tree_loop_distribution): Comment body.
+       (gate_tree_loop_distribution): Return 0.
+       * tree-ssa-forwprop.c (tree_ssa_forward_propagate_single_use_vars):
+       Comment body.
+       (gate_forwprop): Return 0.
+       * tree-ssa-loop.c (gate_tree_ssa_loop_unswitch): Return 0.
+       (gate_tree_vectorize): Return 0.
+       (gate_tree_ssa_loop_ivcanon): Return 0.
+       (tree_ssa_empty_loop): Return 0.
+       (gate_tree_complete_unroll): Return 0.
+       * tree-ssa-pre.c (gate_fre): Return 0.
+       * tree-ssa-reassoc.c (execute_reassoc): Comment body.
+       (gate_tree_ssa_reassoc): Return 0.
+       * tree-stdarg.c (gate_optimize_stdarg): Return 0.
+       (execute_optimize_stdarg): Comment body.
+       * tree-vrp.c (execute_vrp): Comment body.
+       (gate_vrp): Return 0.
+
+2008-04-22  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (lower_omp_parallel): Add new argument to
+       gimple_omp_parallel_set_combined_p.
+       (lower_omp_1): Remove debugging code.
+       * gimple.h (gimple_omp_parallel_combined_p): Add new argument.
+
+2008-04-22  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @134552.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-04-21  Doug Kwan  <dougkwan@google.com>
+
+       * tree-ssa-phiopt.c (FILE): Uncomment all previously disabled code.
+       (tree_ssa_phiopt): Remove FIXME and re-enable code.
+       (tree_ssa_cs_elim): Remove FIXME and re-enable code.
+       (tree_ssa_phiopt_worker): Tuplify.
+       (replace_phi_edge_with_variable): Tuplify.
+       (conditional_replacement): Tuplify and simplify optimization logic.
+       Construct a boolean tree and let fold and force_gimple_operand_gsi
+       do optimization and code genaration.
+       (value_replacement): Tuplify.
+       (minmax_replacement): Tuplify.
+       (abs_replacement): Tuplify.
+       (nt_init_block): Tuplify.
+       (cond_store_replacement): Tuplify.
+       * gimple.h (gimple_seq_singleton_p): Fix empty sequence bug.
+       * passes.c (init_optimization_passes): Re-enable pass_cselim
+       and pass_phiopt.
+
+2008-04-21  Diego Novillo  <dnovillo@google.com>
+
+       * tree.c (make_node_stat): Clear gimple_body() for newly
+       created FUNCTION_DECLs.
+       * tree-gimple.c (rhs_predicate_for): Move to gimplify.c.
+       * tree-gimple.h (rhs_predicate_for): Remove declaration.
+       * gimple-pretty-print.c (dump_gimple_assign): Add support
+       for showing volatile operands.
+       (dump_gimple_call): Likewise.
+       Add support for showing __builtin_va_arg_pack, static
+       chains, return slot optimized and tail calls.
+       (dump_gimple_phi): Remove code to print memory symbols.
+       * gimplify.c (is_gimple_formal_tmp_or_call_rhs): New.
+       (is_gimple_mem_or_call_rhs): New.
+       (rhs_predicate_for): Call them.
+       (internal_get_tmp_var): Use is_gimple_formal_tmp_or_call_rhs as
+       the gimplification predicate.
+       Use the last statement in *PRE_P to get the temporary to
+       be updated when in SSA form.
+       (gimplify_bind_expr): Clear out *EXPR_P before returning.
+       (gimplify_call_expr): Do not build a GIMPLE_CALL if
+       WANT_VALUE is true.
+       Call gimple_build_call_from_tree if WANT_VALUE is false.
+       Remove local variable ARGS.
+       (gimplify_modify_expr): If after gimplification *FROM_P
+       is a CALL_EXPR, create a GIMPLE_CALL instead of a
+       GIMPLE_ASSIGN.
+       Document why the gimplification of the RHS should accept
+       CALL_EXPRs.
+       (gimplify_expr): Document where the generated statement
+       is stored.
+       Accept is_gimple_formal_tmp_or_call_rhs and
+       is_gimple_mem_or_call_rhs as gimplification predicates.
+       When gimplifying statements, clear out *EXPR_P before
+       returning.
+       When generating an rvalue, call is_gimple_formal_tmp_or_call_rhs
+       to test *EXPR_P.
+       * tree-dfa.c (mark_symbols_for_renaming): Remove
+       ATTRIBUTE_UNUSED.
+       * tree-flow.h (stmt_references_memory_p): Remove.
+       * gimple.c (gimple_build_call_from_tree): New.
+       * gimple.h (struct gimple_statement_with_memory_ops): Add
+       bitfield references_memory_p.
+       (gimple_build_call_from_tree): Declare.
+       (gimple_references_memory_p): Rename from
+       stmt_references_memory_p.  Move here.  Update all users.
+       (gimple_set_references_memory): New.
+       (gimple_assign_set_rhs1): When the assignment has more
+       than one operand on the RHS, assert that the operands are
+       gimple values.
+       (gimple_assign_set_rhs2): Assert that the operand is a
+       gimple value.
+       (gimple_call_set_return_slot_opt): Fix mask clearing.
+       (gimple_call_set_from_thunk): Likewise.
+       (gimple_call_set_va_arg_pack): Likewise.
+       * tree-cfg.c (dump_function_to_file): Do not indent when
+       doing a GIMPLE dump.
+       * tree-ssa-operands.c (add_virtual_operand): Call
+       gimple_set_references_memory.
+       (get_addr_dereference_operands): Likewise.
+       (get_tmr_operands): Likewise.
+       (maybe_add_call_clobbered_vops): Likewise.
+       (get_asm_expr_operands): Likewise.
+       (parse_ssa_operands): Likewise.
+       (build_ssa_operands): Likewise.
+       (stmt_references_memory_p): Remove.
+
+2008-04-21  Rafael Espindola <espindola@google.com>
+
+       Cherry pick http://gcc.gnu.org/ml/gcc-patches/2008-04/msg01213.html
+
+       * params.def (PARAM_MAX_FIELDS_FOR_FIELD_SENSITIVE): Set default
+       to zero, thus disable creation of SFTs.
+       * gcc.dg/tree-ssa/salias-1.c: Remove.
+       * gcc.dg/tree-ssa/pr26421.c: Adjust pattern.
+       * gcc.dg/tree-ssa/alias-15.c: Likewise.
+       * gcc.dg/tree-ssa/ssa-lim-3.c: Run at -O2.
+
+2008-04-20  Zdenek Dvorak  <ook@ucw.cz>
+
+       * passes.c (init_optimization_passes): Enable pass_dce_loop.
+
+2008-04-20  Zdenek Dvorak  <ook@ucw.cz>
+
+       * tree-data-ref.c (split_constant_offset_1): Use POINTER_PLUS_EXPR
+       for pointer addition.
+       (split_constant_offset): Set VAR to EXP before conversions are
+       stripped, when no offset is removed.  Handle chrec_dont_know.
+       * tree-predcom.c: Tuplified.
+       * passes.c (init_optimization_passes): Enable pass_predcom.
+
+2008-04-18  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (lower_rec_input_clauses): Remove fixme and
+       ATTRIBUTE_UNUSED.
+       (lower_lastprivate_clauses): Same.
+       (lower_reduction_clauses): Same.
+       (lower_copyprivate_clauses): Same.
+       (lower_send_clauses): Same.
+       (lower_send_shared_vars): Same.
+       (maybe_catch_exception): Convert to tuples.
+       (lower_omp_sections): Same.
+       (lower_omp_single_simple): Same.
+       (lower_omp_single_copy): Same.
+       (lower_omp_single): Same.
+       (lower_omp_master): Same.
+       (lower_omp_ordered): Same.
+       (lower_omp_critical): Same.
+       (lower_omp_for_lastprivate): Same.
+       (lower_omp_for): Same.
+       (check_combined_parallel): Same.
+       (lower_omp_parallel): Same.
+       (lower_omp_1): Same.
+       (execute_lower_omp): Enable.
+       * gimple-dummy.c: Remove dummy functions for lower_omp_*.
+       * gimple-low.c (lower_omp_directive): Convert to tuples.
+       (lower_stmt): Remove fixme.
+       * gimple.h (gimple_seq_alloc_with_stmt): New.
+       (gimple_omp_section_set_last): New.
+       (gimple_omp_parallel_set_combined_p): New.
+       (gimple_bind_add_stmt): New.
+       (gimple_bind_add_seq): New.
+       * tree-cfg.c (verify_node_sharing): Fix typo in comment.
+
+2008-04-17  Oleg Ryjkov  <olegr@google.com>
+
+       * Reverting forwprop patch.
+
+       2008-04-16  Oleg Ryjkov  <olegr@google.com>
+
+               * tree-ssa-forwprop.c (get_prop_dest_stmtm get_prop_source_stmt,
+               can_propagate_from, remove_prop_source_from_use,
+               tidy_after_forward_propagate_addr,
+               forward_propagate_addr_into_variable_array_index,
+               forward_propagate_addr_expr_1, forward_propagate_addr_expr,
+               forward_propagate_comparison, simplify_not_neg_expr,
+               simplify_switch_expr,
+               tree_ssa_forward_propagate_single_use_variables): Tuplified.
+               (forward_propagate_into_cond): Tuplified and moved some functionality
+               into forward_propagate_into_cond_gimple.
+               (rhs_to_tree, forward_propagate_into_cond_gimple): New functions.
+               * passes.c (init_optimization_passes): Enabled pass_forwprop.
+               * tree-cfg.c (find_taken_edge_cond_expr): Fixed comment.
+
+2008-04-16  Oleg Ryjkov  <olegr@google.com>
+
+       * tree-ssa-forwprop.c (get_prop_dest_stmtm get_prop_source_stmt,
+       can_propagate_from, remove_prop_source_from_use,
+       tidy_after_forward_propagate_addr,
+       forward_propagate_addr_into_variable_array_index,
+       forward_propagate_addr_expr_1, forward_propagate_addr_expr,
+       forward_propagate_comparison, simplify_not_neg_expr,
+       simplify_switch_expr,
+       tree_ssa_forward_propagate_single_use_variables): Tuplified.
+       (forward_propagate_into_cond): Tuplified and moved some functionality
+       into forward_propagate_into_cond_gimple.
+       (rhs_to_tree, forward_propagate_into_cond_gimple): New functions.
+       * passes.c (init_optimization_passes): Enabled pass_forwprop.
+       * tree-cfg.c (find_taken_edge_cond_expr): Fixed comment.
+
+2008-04-16  Doug Kwan  <dougkwan@google.com>
+
+       * Makefile.in (STRICT_WARN): Disable -Wmissing-format-attribute
+       and -Wformat temporarily for bootstrapping.
+       * lambda-code.c (invariant_in_loop_and_outer_loops): Comment out
+       to avoid defined-and-not-used warning.
+       * tree-cfg.c (gather_blocks_in_sese_region): Comment out to avoid
+       defined-and-not-used warning.
+2008-04-16  Doug Kwan  <dougkwan@google.com>
+
+       * Makefile.in (GTFILES): Add back ipa-reference.h and ipa-reference.c.
+       * gimple-dummy.c (memory_identifier_string): Remove.
+       * ipa-cp.c (constant_val_insert): Tuplify.
+       * ipa-prop.c (File): Uncomment all previously disabled code.
+       (ipa_method_modify_stmt): Tuplify.
+       (ipa_method_compute_modify): Tuplify.
+       (ipa_callsite_tree): Renamed to ipa_callsite_stmt.  Update callers.
+       (ipa_callsite_stmt): New.
+       (ipa_callsite_compute_count): Tuplify.
+       (ipa_callsite_compute_param): Tuplify.
+       * ipa-reference.c (File): Uncomment all previously disabled code.
+       (get_asm_stmt_operands): Tuplify.
+       (check_call): Tuplify.  Also add code to handle assignment of
+       returned value.
+       (scan_for_static_refs): Remove.  Tuplify body and split into two
+       new functions scan_stmt_for_static_refs and scan_op_for_static_refs.
+       Update callers.
+       (scan_stmt_for_static_refs): New.  Split from scan_for_static_refs.
+       (scan_op_for_static_refs): New.  Split from scan_for_static_refs.
+       (analyze_variable): Update walk_tree call.
+       (analyze_function): Tuplify.
+       * passes.c (init_optimization_passes): Re-enable pass_ipa_reference.
+       * tree-flow.h (struct function_ann_d): Uncomment field
+       ipa_reference_vars_info.
+
+2008-04-15  Doug Kwan  <dougkwan@google.com>
+
+       * tree-eh.c (operation_could_trap_p): Fix filtering logic.
+
+2008-04-15  Bill Maddox  <maddox@google.com>
+
+       * tree-ssa-dom.c: (degenerate_phi_result, remove_stmt_or_phi,
+       get_rhs_or_phi_arg, get_lhs_or_phi_result, propagate_rhs_into_lhs,
+       eliminate_const_or_copy, eliminate_degenerate_phis_1,
+       eliminate_degenerate_phis): Convert to tuples.
+       * passes.c (init_optimization_passes): Enable pass_phi_only_cprop.
+       
+2008-04-15  Oleg Ryjkov  <olegr@google.com>
+
+       * tree-ssa-dom.c (record_const_or_copy): Moving a variable declaration to
+       the top of a block.
+
+2008-04-15  Doug Kwan  <dougkwan@google.com>
+
+       * tree-ssa-sink.c (File): Uncomment all previously disabled code.
+       (find_bb_for_arg): Tuplify.
+       (all_immediate_uses_sample_place): Tuplify.
+       (nearest_common_dominator_of_uses): Tuplify.
+       (statement_sink_location): Tuplify.  Remove parameter tobb and update
+       caller.
+       (sink_code_in_bb): Tuplify.
+       * passes.c (init_optimization_passes): Re-enable pass_sink_code.
+
+2008-04-14  Bill Maddox  <maddox@google.com>
+
+       * tree-ssa-threadupdate.c (remove_ctrl_stmt_and_useless_edge,
+       create_edge_and_update_destination_phis, redirection_block_p):
+       Convert to tuples.
+       * tree.c (iterative_hash_exprs_commutative): New function.
+       * tree.h (iterative_hash_exprs_commutative): Declare.
+       * tree-ssa_dom.c (enum expr_kind, struct hashable_expr,
+       struct cond_equivalence): New types.
+       (struct edge_info): Use struct cond_equivalence.
+       (avail_exprs_stack): Stack of expr_hash_elt*, not expressions.
+       (stmts_to_rescan): Use type gimple for statements.
+       (struct expr_hash_elt): Represent statement RHS with hashable_expr,
+       not a tree.  Add unique stamp to each entry for efficient and reliable
+       element removal.
+       (struct eq_expr_value): Delete unused type.
+       (initialize_hash_element): Convert to tuples.  Now applies
+       only to statements.
+       (initialize_expr_from_cond, initialize_hash_element_from_expr):
+       New functions.  Replace former functionality of initialize_hash_element
+       for conditions presented as trees.
+       (hashable_expr_equal_p, iterative_hash_hashable_expr): New functions.
+       (print_expr_hash_elt): New function.
+       (free_expr_hash_elt): New function.
+       (tree_ssa_dominator_optimize): Convert to tuples
+       (canonicalize_comparison): Convert to tuples.
+       (remove_local_expressions_from_table): Use new avail_exprs_stack
+       representation.  Add optional extra diagnostic details.
+       (simplify_stmt_for_jump_threading, dom_thread_across_edge,
+       dom_opt_finalize_block): Convert to tuples.
+       (record_cond, build_and_record_new_cond): Convert to tuples.
+       Use new hashable_expr type.  Extra diagnostic details.
+       (record_const_or_copy_1): Extra diagnostic details.
+       (record_const_or_copy): Add assertion.
+       (simple_iv_increment_p, cprop_into_successor_phis, record_edge_info):
+       Convert to tuples.
+       (eliminate_redundant_computations): Convert to tuples.
+       Additional diagnostic details.
+       (gimple_assign_unary_useless_conversion_p): New function.
+       (record_equivalences_from_statement): Convert to tuples.
+       Additional diagnostic details.
+       (cprop_operand, cprop_into_stmt): Convert to tuples.
+       (optimize_stmt): Convert to tuples.
+       (lookup_avail_expr): Use hashable_expr.  Additional diagnostic details.
+       (avail_expr_hash, avail_expr_eq): Use hashable_expr.
+       * tree-ssa-copy.c (may_propagate_copy_into_stmt,
+       propagate_tree_value_into_stmt): New functions.
+       * tree-flow.h: (may_propagate_copy_into_stmt,
+       propagate_tree_value_into_stmt): Declare.
+       (thread_across_edge): Change declaration of callback to accept a
+       gimple statement.
+       * gimple.c (gimple_call_flags): Declare argument as a constant.
+       (gimple_has_side_effects): Declare argument as a constant.
+       Examine function arguments for side-effects.
+       (gimple_rhs_has_side_effects): New function.
+       * gimple.h (gimple_call_flags): Declare argument as a constant.
+       (gimple_has_side_effects): Declare argument as a constant.
+       (gimple_rhs_has_side_effects): Declare new function.
+       (is_gimple_assign): New inline function.
+       (gimple_switch_index_ptr): New function.
+       * passes.c (init_optimization_passes): Enable pass_dominator.
+       * tree-ssa_threadedge.c (potentially_threadable_block,
+       lhs_of_dominating_assert, record_temporary_equivalences_from_phis):
+       Convert to tuples.
+       (fold_assignment_stmt): New function.
+       (record_temporary_equivalences_from_stmts_at_dest,
+       simplify_control_stmt_condition, thread_across_edge): Convert to tuples.
+
+2008-04-14  Doug Kwan  <dougkwan@google.com>
+
+       * c-typeck.c (convert_for_assignment): Call c_common_unsigned_type
+
+2008-04-14  Doug Kwan  <dougkwan@google.com>
+
+       * gimple-iterator.c (gsi_move_to_bb_end): Use gsi_last_bb
+       instead of calling both gsi_last and bb_seq.
+       * passes.c (init_optimization_passes): Re-eanble second tail-recursion
+       pass.
+
+2008-04-14  Doug Kwan  <dougkwan@google.com>
+
+       * tree-nrv.c (dest_safe_for_nrv_p):  Uncomment.
+       (execute_return_slot_opt): Tuplify.
+       * passes.c (init_optimization_passes): Re-enable pass_return_slot.
+
+2008-04-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-cfg.c (gimple_can_merge_blocks_p): Fix typo.
+       * omp-low.c (maybe_lookup_ctx): Uncomment and set ATTRIBUTE_UNUSED.
+       (lower_lastprivate_clauses): Same.
+       (lower_reduction_clauses): Same.
+       (lower_send_clauses): Same.
+       (expand_omp_for_generic): Uncomment and convert for tuples.
+       (expand_omp_for): Remove fixme.
+       (expand_omp_sections): Same.
+       (lower_omp_parallel): Partially convert for tuples.
+       (lower_omp_regimplify_p): Rename from lower_omp_2.
+       (lower_omp_1): Convert for tuples.
+       (lower_omp): Same.
+       (gimple-dummy.c): Add lower_omp_parallel, lower_omp_for,
+       lower_omp_sections, lower_omp_single, lower_omp_master,
+       lower_omp_ordered, lower_omp_critical.
+
+2008-04-13  Diego Novillo  <dnovillo@google.com>
+
+       * tree-cfg.c (need_fake_edge_p): Initialize CALL_FLAGS.
+
+2008-04-12  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @134237.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-04-13  Rafael Espindola <espindola@google.com>
+
+       * config/extensions.m4: Add. Was missing from previous merge.
+
+2008-04-12  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133860.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-04-11  Oleg Ryjkov  <olegr@google.com>
+
+       * tree-sra.c (scalarize_use): Moving a variable declaration to
+       the top of a block.
+
+2008-04-11  Oleg Ryjkov  <olegr@google.com>
+
+       * tree-sra.c (insert_edge_copies_seq): Removed unused variable.
+
+2008-04-11  Oleg Ryjkov  <olegr@google.com>
+
+       * tree-sra.c (sra_build_assignment): Tuplified.
+       (mark_all_v_defs): Removed.
+       (mark_all_v_defs_seq, mark_all_v_defs_stmt): New functions.
+       (sra_walk_expr): Tuplified.
+       (sra_walk_tree_list): Removed.
+       (sra_walk_call_expr, sra_walk_gimple_asm,
+       sra_walk_gimple_modifY_stmt, ): Tuplified and renamed.
+       (sra_walk_gimple_call, sra_walk_gimple_asm,
+       sra_walk_gimple_assign): New names for tuplified functions.
+       (sra_walk_function, find_candidates_for_sra, scan_use, scan_copy,
+       scan_ldst, instantiate_element, decide_instantiations,
+       mark_all_v_defs_1, sra_build_assignment, sra_build_bf_assignment,
+       sra_build_elt_assignment, generate_copy_inout,
+       generate_element_copy, generate_element_zero,
+       generate_one_element_init, generate_element_init_1): Tuplified.
+       (insert_edge_copies): Removed.
+       (insert_edge_copies_seq): New function.
+       (sra_insert_before, sra_insert_after, sra_replace,
+       sra_explode_bitfield_assignment, sra_sync_for_bitfield_assignment,
+       scalarize_use, scalarize_copy, scalarize_init, mark_no_trap,
+       scalarize_ldst, scalarize_parms, scalarize_function): Tuplified.
+       (tree_sra, tree_sra_early): Enabled
+       (sra_init_cache): Removed extra space.
+       * tree-flow.h (insert_edge_copies_seq): New declaration.
+       * gimple.h (gimple_asm_input_op_ptr, gimple_asm_output_op_ptr,
+       gimple_return_retval_ptr): New functions.
+       * passes.c (init_optimization_passes): Enabled pass_sra,
+       pass_early_sra.
+
+2008-04-11  Doug Kwan  <dougkwan@google.com>
+
+       * ipa-pure-const.c (get_asm_expr_operands): Tuplify.
+       (get_asm_expr_operands): Tuplify.
+       (check_call): Tuplify. Add code to handle return value assignment.
+       (scan_function): Remove. Original code is tuplified and split into
+       two new functions scan_function_op and scan_function_stmt.
+       (scan_function_op): New function.
+       (scan_function_stmt): New function.
+       (analyze_function): Tuplify and re-enable previously disabled code. 
+       * passes.c (init_optimization_passes): Re-enable pass_ipa_pure_const.
+
+2008-04-11 Oleg Ryjkov  <olegr@google.com>
+
+       * builtins.c (fold_call_stmt): Return the proper value.
+       * tree-ssa-ccp.c (maybe_fold_stmt_addition): Modify arguments to
+       allow this function to be called on a GIMPLE_ASSIGN.
+       (fold_stmt_r): Update the call to maybe_fold_stmt_addition.
+       (fold_gimple_assign): Manually call maybe_fold_stmt_addition to
+       fold a POINTER_PLUS_EXPR.
+
+2008-04-11  Doug Kwan  <dougkwan@google.com>
+
+       * tree-ssa-phiprop.c (File): Uncomment all previously disabled code.
+       (struct phiprop_d): Change type of vop_stmt to gimple.
+       (phivn_valid_p): Tuplify.
+       (phiprop_insert_phi): Tuplify.
+       (propagate_with_phi): Tuplify.
+       (tree_ssa_phiprop_1): Tuplify.
+       (tree_ssa_phiprop): Tuplify.
+       * passes.c (init_optimization_passes): Re-enable pass_phiprop.
+
+2008-04-11  Rafael Espindola <espindola@google.com>
+
+       * tree-ssa-math-opts.c (execute_cse_sincos_1): fix warning.
+
+2008-04-10  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-04/msg00913.html
+
+       * ipa-inline.c (cgraph_clone_inlined_nodes): Change uses
+       of DECL_SAVED_TREE with gimple_body.
+       (cgraph_decide_inlining_incrementally): Likewise.
+       * gimple-iterator.c (gsi_insert_seq_before_without_update):
+       Do nothing if SEQ is NULL.
+       (gsi_insert_seq_after_without_update): Likewise.
+       * tree-ssa-live.c (mark_all_vars_used_1): Do not handle
+       EXPR trees.
+       (remove_unused_locals): Mark the gimple_block of every
+       statement as used.
+       * tree-inline.c (remap_gimple_stmt): Forward declare.
+       (remap_decls): Tidy comments.
+       (remap_gimple_seq): New.
+       (copy_gimple_bind): New.
+       (remap_gimple_stmt): Call it.
+       Handle High GIMPLE statements.
+       (copy_bb): Regimplify operands on COPY_GSI instead of
+       GSI.
+       (copy_cfg_body): Tidy.
+       (copy_generic_body): Remove unused function.
+       (clone_body): Tuplify.
+       * c-common.c (c_warn_unused_result): Remove assertion for
+       FUNCTION_TYPE.
+       * gimple.c (gimple_seq_copy): Rename from
+       gimple_seq_deep_copy.  Update all users.
+       (walk_gimple_stmt): Assert that STMT has no substatements
+       in the default case.
+       (gimple_copy_1): Merge into gimple_copy.
+       (gimple_copy): Always do deep copying.
+       Handle statements with substatements.
+       (gimple_shallow_copy): Remove unused function.
+       (gimple_deep_copy): Remove.  Update all users.
+       * gimple.h: Tidy comments and structure fields
+       everywhere.
+       (gimple_has_substatements): New.
+       (walk_stmt_fn): Change last argument to struct walk_stmt_info *.
+       Update all users.
+
+2008-04-10 Oleg Ryjkov  <olegr@google.com>
+
+       * tree.h (struct tree_base): Added a new flag default_def_flag.
+       (SSA_NAME_IS_DEFAULT_DEF): Changed to use the new flag.
+
+2008-04-08  Doug Kwan  <dougkwan@google.com>
+
+       * gimplify.c (gimple_push_cleanup): Initialize cleanup_stmts to NULL.
+
+2008-04-08  Doug Kwan  <dougkwan@google.com>
+       
+       * tree-tailcall.c (struct tailcall): Remove call_block and replace
+       call_bsi by call_gsi.
+       (independent_of_stmt_p): Change type of parameter 'at' to GIMPLE and
+       change all tree using code to GIMPLE using equivalent.
+       Remove parameter block_stmt_iterator type parameter bsi with
+       gimple_stmt_iterator type parameter gsi. Replace uses of bsi with
+       that of gsi in function body as appropriate.
+       (process_assignment):  Remove parameter 'ass' and change type of
+       parameter 'stmt' to GIMPLE.  Change all tree using code to GIMPLE
+       using equivalent.
+       (propagate_through_phis, eliminate_tail_call, optimize_tail_call):
+       Change all tree using code to GIMPLE using equivalent.
+       (find_tail_calls): Change all tree using code to GIMPLE using
+       equivalent.  Remove code that handles GIMPLE_MODIFY_STMT containing
+       a CALL_EXPR.
+       (add_successor_phi_arg, adjust_return_value_with_ops,
+       update_accumulator_with_ops, adjust_accumulator_values,
+       create_tailcall_accumulator): New functions from refactoring of
+       duplicated logic.
+       (adjust_accumulator_values, adjust_return_value): Refactor.
+       (tree_optimize_tail_calls_1): Refactor and change all tree using code
+       to GIMPLE using equivalent.  Remove code to reverse phi list.
+       * passes.c (init_optimization_passes):  Re-enable pass_tail_recursion
+       and pass_tail_calls.
+
+2008-04-04  Doug Kwan  <dougkwan@google.com>
+
+       * tree-ssa-math-opts.c (struct occurrence): Change field type of
+       recip_def_stmt to gimple.
+       (is_division_by): Tuplify.
+       (insert_reciprocals): Tuplify.
+       (replace_reciprocals): Tuplify.
+       (execute_cse_reciprocals_1): Tuplify.
+       (execute_cse_reciprocals): Tuplify.
+       (maybe_record_sincos): Use vector of gimples instead of vector of
+       trees.
+       (execute_cse_sincos_1): Tuplify.  When adjusting recorded old call
+       sites, generate new gimple assign statements and remove old gimple
+       call statements.
+       (execute_cse_sincos): Tuplify.
+       (execute_convert_to_rsqrt): Tuplify.
+       * passes.c (init_optimization_passes): Enable pass_cse_sincos,
+       pass_cse_reciprocals and pass_convert_to_rsqrt.
+
+2008-04-04  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (gimple_build_cond_empty): New.
+       (expand_parallel_call): Convert COND_EXPR to GIMPLE_COND.
+       (expand_omp_for_generic): Same.
+       (expand_omp_for_static_nochunk): Same. 
+       (expand_omp_for_static_chunk): Same.
+       (expand_omp_atomic_pipeline): Same.
+
+2008-04-04  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-04/msg00413.html
+
+       * tree-ssa-loop-im.c (movement_possibility): Return
+       MOVE_IMPOSSIBLE if that statement makes no assignment.
+       * tree-complex.c (get_component_ssa_name): Fix comment.
+       (set_component_ssa_name): Assert that COMP's definition
+       is LAST.
+       * cgraph.c (cgraph_update_edges_for_call_stmt): Tuplify.
+       (dump_cgraph_node): Likewise.
+       * tree.c (auto_var_in_fn_p): Fix comment.
+       * cgraphunit.c (verify_cgraph_node): Remove tuples hack
+       that avoided verifying nodes.
+       * gimple-pretty-print.c (dump_gimple_asm): Show the
+       volatile flag.
+
+       * gimple-dummy.c (optimize_inline_calls): Remove.
+       (tree_function_versioning): Remove.
+       (tree_versionalbe_function_p): Remove.
+       * ipa-inline.c (cgraph_clone_inlined_bodies): Re-format.
+       (cgraph_decide_inlining): Re-format.
+       (cgraph_decide_inlining_incrementally): Re-format.
+       (pass_ipa_inline): Re-format.
+       * gimplify.c (gimplify_call_expr): Carry all the
+       CALL_EXPR flags to the newly created GIMPLE_CALL.
+       (gimplify_asm_expr): Carry ASM_VOLATILE_P and ASM_INPUT_P
+       over to the new GIMPLE_ASM.
+       * tree-dfa.c (find_new_referenced_vars): Change argument
+       to gimple.  Update all callers.
+       * cfgexpand.c (set_expr_location_r): New private.
+       (gimple_to_tree): Call it.
+       Set ASM_VOLATILE_P and ASM_INPUT_P for ASM_EXPRs.
+       Carry flags from GIMPLE_CALL for CALL_EXPRs.
+       Set TREE_BLOCK on the newly created expression.
+       * tree-inline.c: Tuplify.
+       * tree-inline.h: Tuplify.
+       * tree-optimize.c (execute_fixup_cfg): Tuplify.
+       * gimple.c (gimple_build_call_vec): Change second
+       argument type to VEC(tree, heap).  Update all callers.
+       (gimple_build_assign_with_ops): If the LHS is an
+       SSA_NAME, set SSA_NAME_DEF_STMT on it.
+       (walk_gimple_seq): Change return type to gimple.  Update
+       all users.
+       If walk_gimple_stmt returned a value, store it in
+       WI->CALLBACK_RESULT.
+       (walk_gimple_op): Walk gimple_call_chain_ptr() and
+       gimple_call_fn_ptr() for GIMPLE_CALL.
+       (walk_gimple_stmt): Add new local HANDLED_OPS.
+       If callback_stmt() sets HANDLED_OPS, return its return
+       value.
+       If any call to walk_gimple_seq returns a non-NULL value,
+       return WI->CALLBACK_RESULT.
+       (gimple_copy_1): New function.
+       (gimple_deep_copy): Rename from gimple_copy.  Update all
+       users.
+       Call gimple_copy_1.
+       (gimple_shallow_copy): New.
+       (gimple_regimplify_operands): New.
+       * gimple.h (GF_ASM_INPUT): Define.
+       (GF_ASM_VOLATILE): Define.
+       (GF_CALL_CANNOT_INLINE): Define.
+       (GF_CALL_FROM_THUNK): Define.
+       (GF_CALL_RETURN_SLOT_OPT): Define.
+       (GF_CALL_VA_ARG_PACK): Define.
+       (gimple_stmt_iterator): Move earlier in the file.
+       (gimple_omp_return_nowait_p): Fix return expression.
+       (gimple_omp_section_last_p): Likewise.
+       (gimple_omp_parallel_combined_p): Likewise.
+       (gimple_call_noreturn_p): Likewise.
+       (gimple_call_nothrow_p): Likewise.
+       (gimple_asm_volatile_p): Likewise.
+       (gimple_try_catch_is_cleanup): Likewise.
+       (gimple_assign_set_rhs1): assert that RHS is a gimple
+       operand.
+       (is_gimple_call): New.  Change all users that tested
+       gimple_code() == GIMPLE_CALL.
+       (gimple_call_fn_ptr): New.
+       (gimple_call_chain_ptr): New.
+       (gimple_call_set_chain): Accept NULL values for CHAIN.
+       (gimple_call_set_tail): Add bool argument specifying the
+       value of the flag.  Update all users.
+       (gimple_asm_set_volatile): Likewise.
+       (gimple_call_set_cannot_inline): Rename from
+       gimple_call_mark_uninlinable.  Add bool argument
+       specifying the value of the flag.  Update all users.
+       (gimple_call_set_return_slot_opt): New.
+       (gimple_call_return_slot_opt_p): New.
+       (gimple_call_set_from_thunk): New.
+       (gimple_call_from_thunk_p): New.
+       (gimple_call_va_arg_pack_p): New.
+       (gimple_call_copy_flags): New.
+       (gimple_asm_set_input): New.
+       (gimple_asm_input_p): New.
+       (gimple_asm_clear_volatile): Remove.
+       (walk_stmt_fn): Add second argument bool *.  Update all
+       users.
+       (struct walk_stmt_info): Add new field callback_result.
+       * tree-cfg.c (gimple_split_block): Tuplify.
+       (gimple_block_ends_with_condjump_p): Tuplify.
+       (need_fake_edge_p): Tuplify.
+       (gimple_flow_call_edges_add): Tuplify.
+       (gimple_purge_dead_abnormal_call_edges): Tuplify.
+       (gimple_purge_dead_eh_edges): Remove ATTRIBUTE_UNUSED.
+       (gimple_cfg_hooks): Add gimple_block_ends_with_condjump_p
+       and gimple_flow_call_edges_add
+       * passes.c (init_optimization_passes): Enable
+       pass_cleanup_cfg, pass_inline_parameters,
+       pass_ipa_inline and pass_apply_inline.
+       (execute_todo): Re-enable check for TODO_update_ssa_any
+       if need_ssa_update_p() returns true.
+       * tree-ssa-operands.c (ssa_operands_active): Return false
+       if cfun is NULL.
+
+
+2008-04-04  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133632.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-04-04  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133631.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-04-04  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133630.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-04-03  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (expand_omp_sections): Use
+       gimple_omp_sections_control.
+       (lower_omp_sections): Same.
+       * gimplify.c (gimplify_omp_workshare): Adjust OMP clauses before
+       creating gimple tuple.
+       Add gimple tuple to sequence.
+       Set OMP_SECTIONS_CONTROL in newly created tuple.
+       * gimple.h (gimple_statement_omp_sections): Add control.
+       (gimple_omp_sections_control): New.
+       (gimple_omp_sections_control_ptr): New.
+       (gimple_omp_sections_set_control): New.
+
+2008-04-03  Oleg Ryjkov  <olegr@google.com>
+
+       * tree-nested.c (convert_nl_goto_receiver): Changed to hadle gimple
+       statements instead of trees.
+       (lower_nested_functions): Pass convert_nl_goto_receiver as the first
+       parameter to walk_all_functions.
+
+2008-04-03  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133624.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-04-03  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133612.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-04-03  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-pretty-print.c (dump_generic_node): Remove
+       OMP_{RETURN,CONTINUE} cases.
+       * tree.h (OMP_RETURN_NOWAIT): Remove.
+       * omp-low.c (dump_omp_region): Rename OMP_{CONTINUE,RETURN} to
+       GIMPLE_OMP_{CONTINUE,RETURN}.
+       (expand_omp_for_generic): Handle new arguments to
+       GIMPLE_OMP_CONTINUE.
+       (expand_omp_for_static_nochunk): Same.
+       (expand_omp_for_static_chunk): Same.
+       (expand_omp_sections): Same.
+       (expand_omp): Rename all OMP_* to GIMPLE_OMP_*.
+       (lower_omp_sections): Rename OMP_CONTINUE to GIMPLE_OMP_CONTINUE.
+       (lower_omp_for): Same.
+       * tree-gimple.c (is_gimple_stmt): Remove OMP_{RETURN,CONTINUE}
+       cases.
+       * gsstruct.def: Add GSS_OMP_CONTINUE.
+       * gimple-pretty-print.c (dump_gimple_omp_continue): New.
+       (dump_gimple_omp_return): New.
+       (dump_gimple_stmt): Add cases for GIMPLE_OMP_{CONTINUE,RETURN}.
+       * gimplify.c (gimplify_expr): Remove cases for
+       OMP_{CONTINUE,RETURN}.
+       * tree.def (DEFTREECODE): Remove OMP_{RETURN,CONTINUE}.
+       * tree-cfgcleanup.c (cleanup_omp_return): Rename
+       OMP_SECTIONS_SWITCH to GIMPLE_OMP_SECTIONS_SWITCH.
+       * gimple.c (gss_for_code): GIMPLE_OMP_RETURN returns GSS_BASE.
+       GIMPLE_OMP_CONTINUE return GSS_OMP_CONTINUE.
+       (gimple_size): Adjust size of GIMPLE_OMP_{RETURN,CONTINUE}.
+       (gimple_build_omp_continue): Change arguments.
+       (walk_gimple_op): Walk GIMPLE_OMP_CONTINUE operands.
+       (walk_gimple_stmt): Remove GIMPLE_OMP_CONTINUE case.
+       * gimple.h (struct gimple_statement_omp_continue): New.
+       (union gimple_statement_d): Add gimple_omp_continue.
+       (gimple_build_omp_continue): Change argument types in prototype.
+       (gimple_omp_return_set_nowait): Rename OMP_RETURN to
+       GIMPLE_OMP_RETURN.
+       (gimple_omp_continue_control_def): New.
+       (gimple_omp_continue_control_def_ptr): New.
+       (gimple_omp_continue_set_control_def): New.
+       (gimple_omp_continue_control_use): New.
+       (gimple_omp_continue_control_use_ptr): New.
+       (gimple_omp_continue_set_control_use): New.
+       * tree-cfg.c (make_edges): Rename OMP_ to GIMPLE_OMP_.
+       (gimple_redirect_edge_and_branch): Same.
+       * tree-ssa-operands.c (get_expr_operands): Remove OMP_CONTINUE
+       case.
+
+2008-04-02  Doug Kwan  <dougkwan@google.com>
+
+       * tree-complex.c (expand_complex_comparison): Set condition code
+       correctly for the NE_EXPR case.
+       * cfgexpand.c (gimple_to_tree): Generate assignment to return value
+       if necessary.
+
+2008-04-02  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133597.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-04-02  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133527.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-04-02  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133519.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-04-02  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133453.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-04-01  Doug Kwan  <dougkwan@google.com>
+           Bill Maddox  <maddox@google.com>.
+
+       * value-prof.c (interesting_stringop_to_profile_p) Call
+       validate_gimple_arglist instead of validate_arglist.
+       * tree.h (validate_arglist): New declaration.
+       * builtins.c (expand_builtin_setjmp_receiver, expand_builtin_longjmp,
+       expand_builtin_nonlocal_goto, expand_builtin_nonlocal_goto,
+       expand_builtin_update_setjmp_buf, expand_builtin_prefetch,
+       expand_builtin_apply, expand_builtin_return, expand_builtin_mathfn,
+       expand_builtin_mathfn_2, expand_builtin_mathfn_3,
+       expand_builtin_interclass_mathfn, expand_builtin_sincos,
+       expand_builtin_cexpi, expand_builtin_int_roundingfn,
+       expand_builtin_int_roundingfn_2, expand_builtin_pow,
+       expand_builtin_powi, expand_builtin_strlen, expand_builtin_strlen,
+       expand_builtin_strstr, expand_builtin_strchr, expand_builtin_strrchr,
+       expand_builtin_memcpy, expand_builtin_memcpy,
+       expand_builtin_mempcpy_args, expand_builtin_bcopy, expand_movstr,
+       expand_builtin_stpcpy, expand_builtin_strncpy, builtin_memset_gen_str,
+       expand_builtin_bzero, expand_builtin_memchr, expand_builtin_memcmp,
+       expand_builtin_strcmp, expand_builtin_strncmp, expand_builtin_strcat,
+       expand_builtin_strncat, expand_builtin_strspn, expand_builtin_alloca,
+       expand_builtin_bswap, expand_builtin_unop, expand_builtin_fputs,
+       expand_builtin_fabs, expand_builtin_copysign,
+       expand_builtin___clear_cache, expand_builtin_init_trampoline,
+       expand_builtin_adjust_trampoline, expand_builtin_signbit,
+       expand_builtin, validate_arg, expand_builtin_object_size,
+       expand_builtin_object_size, expand_builtin_memory_chk) Re-enable code
+       previously disabled for GIMPLE.
+       (expand_builtin_memcpy, expand_builtin_memset_args): Re-enable code
+       previously disabled for GIMPLE.  Look up tree attribute for original
+       GIMPLE statement.
+       (validate_arglist): Use old interface of tree node instead of GIMPLE
+       statement.
+       (validate_gimple_arglist):  New function.
+       * cfgexpand.c (gimple_to_tree): Set GIMPLE statement tree attribute
+       for builtin function calls.
+       * tree-flow.h (struct tree_ann_common_d): New field stmt.
+       * gimple.h (validate_gimple_arglist): New declaration.
+       (validate_arglist): Moved to tree.h.
+
+2008-03-31  Oleg Ryjkov  <olegr@google.com>
+
+       * gimplify.c (gimplify_switch_expr): Verify the return value from
+       gimplify_expr.
+
+2008-03-31  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133452.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-31  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133440.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-31  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133423.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-30  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133342.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-28  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133341.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-28  Oleg Ryjkov  <olegr@google.com>
+
+       * tree-eh.c (replace_goto_queue_1): Do a deep copy of the replacement
+       sequence.
+       (maybe_record_in_goto_queue): Set is_label flag when recording
+       GIMPLE_GOTOs.
+       (do_return_redirection, do_return_redirection): Changed to set
+       cont_stmt.
+       (lower_try_finally_onedest): Fix the code that assumes that gotos
+       themselves(instead of the labels) are recorded in the goto_queue.
+       (lower_try_finally_copy): Likewise.
+       (lower_try_finally_switch): Likewise, also fix the VEC_* operations.
+       * gimple.h (gimple_build_switch): Fixed comment.
+
+2008-03-28  Doug Kwan  <dougkwan@google.com>
+
+       * omp-low.c (expand_omp_sections): Fix build breakage due to an
+       uninitialized variable.
+
+2008-03-28  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133315.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-28  Diego Novillo  <dnovillo@google.com>
+
+       * omp-low.c (expand_omp_for_static_chunk): Initialize
+       V_MAIN and V_BACK.
+       (expand_omp_for): Initialize VIN.
+
+2008-03-28  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133313.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-27  Doug Kwan  <dougkwan@google.com>
+
+       * c-decl.c (merge_decls): Also copy gimple bodies of decls.
+       * gimplify.c (gimplify_call_expr): Do not exit early when
+       gimplifying __builtin_va_start().
+
+2008-03-27  Oleg Ryjkov  <olegr@google.com>
+
+       * gimple.c (walk_gimple_op): Add a check for assignments with more
+       than one operand on the LHS.
+
+2008-03-27  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree.c (empty_body_p): Remove.
+       * tree.h (empty_body_p): Remove prototype.
+       * omp-low.c (struct omp_context): Convert to tuples.
+       (struct omp_context): Comment and convert to tuples.
+       (scan_omp_op): New.
+       (is_parallel_ctx): Convert to tuples.
+       (extract_omp_for_data): Same.
+       (workshare_safe_to_combine_p): Same.
+       (get_ws_args_for): Same.
+       (determine_parallel_type): Same.
+       (use_pointer_for_field): Same.
+       (dump_omp_region): Same.
+       (debug_all_omp_regions): Same.
+       (new_omp_region): Same.
+       (new_omp_context): Same.
+       (scan_sharing_clauses): Same.
+       (create_omp_child_function): Same.
+       (scan_omp_parallel): Same.
+       (scan_omp_for): Same.
+       (scan_omp_sections): Same.
+       (scan_omp_single): Same.
+       (check_omp_nesting_restrictions): Same.
+       (scan_omp_1_op): New.
+       (scan_omp_1_stmt): Rename from scan_omp_1.  Rewrite for tuples.
+       (scan_omp): Convert to tuples.
+       (build_omp_barrier): Same.
+       (maybe_lookup_ctx): Same.
+       (lower_rec_input_clauses): Same.
+       (lower_lastprivate_clauses): Same.
+       (lower_reduction_clauses): Same.
+       (lower_copyprivate_clauses): Same.
+       (lower_send_clauses): Same.
+       (lower_send_shared_vars): Same.
+       (expand_parallel_call): Same.
+       (remove_exit_barrier): Same.
+       (remove_exit_barriers): Same.
+       (optimize_omp_library_calls): Same.
+       (expand_omp_parallel): Same.
+       (expand_omp_for_generic): Comment out, and convert to tuples.
+       (expand_omp_for_static_nochunk): Convert to tuples.
+       (expand_omp_for_static_chunk): Same.
+       (expand_omp_for): Same.
+       (expand_omp_sections): Same.
+       (expand_omp_synch): Same.
+       (expand_omp_atomic_fetch_op): Same.
+       (expand_omp_atomic_pipeline): Same.
+       (expand_omp_atomic_mutex): Same.
+       (expand_omp_atomic): Same.
+       (expand_omp): Same.
+       (build_omp_regions_1): Same. 
+       (execute_expand_omp): Enable.
+       (execute_lower_omp): Enable and convert to tuples.
+       * gimple-dummy.c (omp_reduction_init): Remove.
+       * tree-flow.h (struct omp_region): Convert 'type' to tuples.
+       (new_omp_region): Adjust prototype for tuples.
+       * gimple.c (empty_stmt_p): New.
+       (empty_body_p): New.
+       * gimple.h (empty_body_p): New prototype.
+       (gimple_has_location): Remove fixmes.
+       * tree-cfg.c (gimple_block_label): Remove ATTRIBUTE_UNUSED.
+
+2008-03-27  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133311.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-27  Rafael Espindola <espindola@google.com>
+
+       * gcc/tree-nested.c (save_tmp_var): Use gsi_insert_after_without_update
+       instead of gsi_insert_after.
+
+2008-03-26 Jakub Staszak <kuba@et.pl>
+
+       * gimple.h (gimple_uid, gimple_set_uid): Defined.
+       (gimple_statement_base): Field UID added.
+       * tree-ssa-dse.c (execute_simple_dse): #if 0 removed, bitmap_empty_p
+       condition added. (memory_ssa_name_same, memory_address_same,
+       get_kill_of_stmt_lhs, dse_possible_dead_store_p, dse_optimize_stmt,
+       dse_record_phis, dse_finalize_block, tree_ssa_dse): Tuplified.
+       * passes.c (init_optimization_passes): pass_simple_dse and pass_dse
+       enabled.
+       * testsuite/gcc.dg/tree-ssa/ssa-dse-10.c: {dg-final} changed.
+       * testsuite/gcc.dg/tree-ssa/pr30375.c: {dg-final} changed.
+
+2008-03-26  Rafael Espindola <espindola@google.com>
+       * gcc/tree-ssa-operands.c (ssa_operands_active): Assert that cfun is
+       not NULL.
+       * gcc/tree-nested.c (init_tmp_var): Use
+       gsi_insert_before_without_update instead of gsi_insert_before.
+
+2008-03-25  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133309.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-25  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133306.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-25  Bill Maddox  <maddox@google.com>
+
+       * tree-ssa-dom.c (loop_depth_of_name): Tuplify.
+       * tree-ssa-copy.c (stmt_may_generate_copy,
+       copy_prop_visit_assignment, copy_prop_visi_cond_stmt,
+       copy_prop_visit_stmt, copy_prop_visit_phi_node,
+       init_copy_prop, execute_copy_prop): Tuplify.
+       * passes.c (init_optimization_passes):
+       Enable pass_copy_prop.
+
+2008-03-25  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133257.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-25  Oleg Ryjkov  <olegr@google.com>
+
+       * gimple.c (gimple_build_bind): Added a new parameter - the 
+       containing block.
+       * gimple.h (gimple_build_bind): Changed the header accordingly.
+       * gimplify.c (gimplify_bind_expr, gimplify_function_tree): Updated
+       the callers of gimple_build_bind.
+       (gimplify_body): Updated the call to gimple_build_bind and moved
+       the copying of block information into gimplify_bind_expr.
+
+2008-03-25  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133255.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-25  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133251.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-25  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133250.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-24  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133246.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-24  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133222.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-24  Andrew Pinski  <pinskia@gmail.com>
+
+       * passes.c (init_optimization_passes): Enable pass_split_crit_edges.
+
+2008-03-24  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133169.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-21  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133168.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-21  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133167.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-20  Oleg Ryjkov  <olegr@google.com>
+
+       * tree-eh.c (lower_try_finally): Correctly set the lowered sequence.
+
+2008-03-20  Jakub Staszak  <kuba@et.pl>
+            Oleg Ryjkov  <olegr@google.com>
+
+       * tree-ssa-sink.c (is_hidden_global_store): Tuplified.
+       * tree-ssa-dce.c (mark_stmt_necessary, mark_operand_necessary,
+       mark_stmt_if_obviously_necessary,
+       mark_control_dependent_edges_necessary,
+       find_obviously_necessary_stmts, propagate_necessity,
+       remove_dead_phis, eliminate_unnecessary_stmts, tree_dce_init,
+       tree_dce_done): Tuplified.
+       * tree-flow.h (is_hidden_global_store): Tuplified the declaration.
+       * passes.c (init_optimization_passes): Enabled pass_dce and
+       pass_cd_dce.
+
+2008-03-20  Oleg Ryjkov <olegr@google.com>
+
+       * tree-complex.c (init_dont_simulate_again, complex_visit_stmt,
+       update_complex_components, expand_complex_operations_1): Consider
+       GIMPLE_CALLs with a lhs, not only GIMPLE_ASSIGNs.
+       * gimplify.c (get_tmp_var_for): Removed.
+       (gimplify_call_expr): Remove call to get_tmp_var_for, set
+       gimple_register on a new lhs in some cases.
+
+2008-03-20  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133162.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-20  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133161.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-20  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133140.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-20  Rafael Espindola <espindola@google.com>
+
+       Merge with mainline @133138.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-18  Bill Maddox  <maddox@google.com>
+
+       * builtins.c (fold_builtin_object_size):
+       Enable call to compute_builtin_object_size, previously
+       stubbed out awaiting tuplification.
+       * tree-ssa-ccp.c (valid_gimple_rhs_p, valid_gimple_call_p,
+       move_ssa_defining_stmt_for_defs, update_call_from_tree):
+       Deleted, moved to tree-ssa-propagate.c.
+       (get_maxval_strlen): Use gimple_assign_single_p.
+       Handle assignment with unary NOP correctly.
+       * tree-ssa-propagate.c (valid_gimple_rhs_p, valid_gimple_call_p,
+       move_ssa_defining_stmt_for_defs, update_call_from_tree):
+       Moved here from tree-ssa-ccp.c.
+       * tree-ssa-propagate.h (valid_gimple_rhs_p, valid_gimple_call_p,
+       move_ssa_defining_stmt_for_defs, update_call_from_tree): Declared.
+       * gimple-dummy.c (compute_builtin_object_size): Removed dummy.
+       * tree_object_size.c (addr_object_size, alloc_object_size)
+       Tuplified.
+       (pass_through_call, compute_builtin_object_size): Tuplified.
+       (expr_object_size): Tuplified.  Some cases broken out.
+       (plus_expr_object_size): Deleted.
+       (call_object_size, unknown_object_size, plus_stmt_object_size):
+       New functions.  Handle cases broken out from expr_object_size.
+       (cond_expr_object_size): Fix comment.
+       (collect_object_sizes_for): Tuplify.
+       (check_for_plus_in_loops_1, check_for_plus_in_loops): Tuplify.
+       (compute_object_sizes): Tuplify.
+       * gimple.c (gimple_assign_single_p, gimple_assign_unary_nop_p):
+       New functions.
+       * gimple.h (gimple_assign_single_p, gimple_assign_unary_nop_p):
+       Declare.
+       * passes.c (init_optimization_passes): Enable pass_object_sizes.
+
+2008-03-18  Diego Novillo  <dnovillo@google.com>
+            Oleg Ryjkov  <olegr@google.com>
+
+       * gimplify.c (gimplify_body): Copy the block information from
+       the tree function body to gimple function body.
+       (gimplify_function_tree): Correctly wrap the function body
+       into the try/finally block if creating one.
+       * gimple.c (gimple_seq_has_side_effects): Removed.
+       * gimple.h (gimple_seq_has_side_effects): Removed declaration.
+       * tree-cfg.c (remove_useless_stmts_tf, remove_useless_stmts_tc):
+       Modified to use gimple_seq_empty_p instead of
+       gimple_seq_has_side_effects.
+
+2008-03-18  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple.c (walk_gimple_stmt): Collapse common code.
+       Add case for GIMPLE_WITH_CLEANUP_EXPR.
+
+2008-03-17  Zdenek Dvorak  <ook@ucw.cz>
+
+       * gimple-iterator.c (gsi_for_stmt): Use gsi_start_phis.
+       (gsi_start_phis): New function.
+       * gimple.h (gsi_start_phis): Declare.
+       * tree-into-ssa.c (initialize_flags_in_bb, rewrite_initialize_block,
+       rewrite_add_phi_arguments, rewrite_update_init_block,
+       prepare_block_for_update): Use gsi_start_phis.
+       * tree-complex.c (init_dont_simulate_again, update_phi_components):
+       Ditto.
+       * tree-ssa-loop-manip.c (find_uses_to_rename_bb,
+       verify_loop_closed_ssa, split_loop_exit_edge,
+       tree_transform_and_unroll_loop): Ditto.
+       * tree-scalar-evolution.c (loop_closed_phi_def,
+       analyze_scalar_evolution_for_all_loop_phi_nodes, scev_const_prop):
+       Ditto.
+       * tree-phinodes.c (reserve_phi_args_for_new_edge, remove_phi_args):
+       Ditto.
+       * tree-ssa-copyrename.c (rename_ssa_copies): Ditto.
+       * tree-ssa-ccp.c (ccp_initialize): Ditto.
+       * tree-ssa-loop-ivopts.c (find_bivs, mark_bivs,
+       find_interesting_uses_outside, find_interesting_uses,
+       determine_set_costs): Ditto.
+       * tree-ssa-propagate.c (simulate_block, ssa_prop_init,
+       substitute_and_fold): Ditto.
+       * tree-ssa-alias.c (dump_points_to_info, create_structure_vars): Ditto.
+       * gimple-pretty-print.c (dump_phi_nodes): Ditto.
+       * tree-data-ref.c (stmts_from_loop): Ditto.
+       * tree-ssa-coalesce.c (build_ssa_conflict_graph,
+       create_outofssa_var_map, coalesce_partitions): Ditto.
+       * tree-dfa.c (collect_dfa_stats): Ditto.
+       * tree-cfgcleanup.c (phi_alternatives_equal, remove_forwarder_block,
+       remove_forwarder_block_with_phi, merge_phi_nodes): Ditto.
+       * tree-ssa-live.c (remove_unused_locals, calculate_live_on_exit,
+       verify_live_on_entry): Ditto.
+       * tree-ssa.c (ssa_redirect_edge, flush_pending_stmts, verify_ssa,
+       execute_late_warn_uninitialized, execute_update_addresses_taken):
+       Ditto.
+       * tree-outof-ssa.c (eliminate_build, eliminate_virtual_phis,
+       rewrite_trees, remove_ssa_form, insert_backedge_copies):
+       Ditto.
+       * cfgloop.c (find_subloop_latch_edge_by_ivs): Ditto.
+       * tree-ssa-structalias.c (compute_points_to_sets, ipa_pta_execute):
+       Ditto.
+       * tree-cfg.c (remove_phi_nodes_and_edges_for_unreachable_block,
+       reinstall_phi_args, verify_stmts, gimple_make_forwarder_block,
+       add_phi_args_after_copy_edge, gimple_lv_adjust_loop_header_phi):
+       Ditto.
+
+2008-03-16  Zdenek Dvorak  <ook@ucw.cz>
+
+       * tree-ssa-loop-ivopts.c: Tuplify.
+       * gimple-dummy.c (multiplier_allowed_in_address_p, multiply_by_cost,
+       tree_ssa_iv_optimize): Removed.
+       * tree-ssa-phiopt.c (empty_block_p): Tuplify.
+       * gimple.h (gimple_cond_lhs_ptr, gimple_cond_rhs_ptr): New.
+       * passes.c (init_optimization_passes): Enable pass_iv_optimize.
+
+       * gimplify.c (gimplify_omp_workshare, gimplify_expr): Avoid using
+       uninitialized sequence.
+
+2008-03-13  Bill Maddox  <maddox@google.com>
+
+       * tree.h (fold_call_stmt, gimple_fold_builtin_snprintf_chk):
+       Declare new functions.
+       * builtins.c (fold_builtin_object_size): Disable call to
+       compute_builtin_object_size, which has not been converted.
+       (gimple_rewrite_call_expr, gimple_fold_builtin_sprintf_chk,
+       gimple_fold_builtin_snprintf_chk, gimple_fold_builtin_varargs,
+       fold_call_stmt): New functions.
+       * tree-chrec.h (build_polynomial_chrec): Silence uninitialized
+       variable warning.
+       * tree-ssa-ccp.c (likely_value): Recognize additional cases
+       of constant values.
+       (surely_varying_stmt_p): Note that return statements are not
+       interesting to CCP as they no longer contain assignments.
+       (ccp_fold): Add missing spaces.
+       (valid_gimple_call_p): New function.
+       (get_maxval_strlen): Don't trip over unary operator.
+       (ccp_fold_builtin): Use fold_call_stmt and
+       gimple_fold_builtin_snprintf_chk.  Enable disabled
+       call now that target has been converted for tuples.
+       Add missing spaces.
+       (move_ssa_defining_stmt_for_defs): New function.
+       (update_call_from_tree): New function.
+       (fold_gimple_call): Use update_call_from_tree.
+       (fold_stmt_inplace): Assert that operand folding tree
+       walk goes to completion, i.e., does not return non-null.
+       (optimize_stack_restore, optimize_stdarg_builtin):
+       Convert to tuples
+       (convert_to_gimple_builtin): Removed.
+       (gimplify_and_update_call_from_tree): New function.
+       Replaces convert_to_gimple_builtin.
+       (execute_fold_all_builtins): Convert to tuples.
+       * tree-ssa-propagate.c (replace_uses_in): Removed
+       replaced_address argument.  Made function static.
+       (replace_vuses_in): Removed replaced_address argument.
+       (substitute_and_fold): Removed replaced_address boolean,
+       which was set but never examined.
+       * tree-ssa-propagate.h (replace_uses_in): Removed declaration.
+       * gimple-iterator.c (update_modified_stmt): Moved to
+       head of file to avoid a forward declaration.
+       (update_modified_stmts): New function.
+       (gsi_insert_seq_before_without_update,
+       gsi_insert_before_without_update,
+       gsi_insert_seq_after_without_update,
+       gsi_insert_after_without_update): New functions.
+       (gsi_insert_before, gsi_insert_seq_before,
+       gsi_insert_after, gsi_insert_seq_after): Call the
+       _without_update variants.
+       * gimplify.c (gimplify_seq_add_stmt, gimplify_seq_add_seq):
+       New functions.
+       (gimple_pop_condition, gimplify_return_expr, gimplify_loop_expr,
+       gimplify_switch_expr, gimplify_case_label_expr,
+       gimplify_self_mod_expr, gimplify_call_expr,
+       gimplify_modify_expr_to_memcpy, gimplify_modify_expr_to_memset,
+       gimplify_init_ctor_eval_range, gimpllify_modify_expr_complex_part,
+       gimplify_modify_expr, gimplify_asm_expr, gimplify_cleanup_point_expr,
+       gimple_push_cleanup, gimplify_omp_parallel, gimplify_omp_atomic,
+       gimplify_expr, gimplify_body, gimplify_function_tree): When adding
+       to statement sequences in the gimplifier, do not update operands.
+       * tree-dfa.c (find_new_referenced_vars): Convert to tuples.
+       * tree-flow.h (find_new_referenced_vars): Declare with new signature.
+       * gimple.h (gimple_return_set_retval): Fix argument validation.
+       (gsi_insert_seq_before_without_update,
+       gsi_insert_before_without_update,
+       gsi_insert_seq_after_without_update,
+       gsi_insert_after_without_update): Declare new functions.
+       * gimple.c (gimple_build_return): Rely on gimple_return_set_retval
+       to perform argument validation.
+       * passes.c (init_optimization_passes): Enable pass_fold_builtins.
+       
+2008-03-13  Oleg Ryjkov  <olegr@google.com>
+
+       * tree-cfg.c (gimplify_val): Removed.
+       (gimplify_build1, gimplify_build2, gimplify_build3): Use
+       force_gimple_operand_gsi instead of gimplify_val.
+       * tree-complex.c (extract_component): Use force_gimple_operand_gsi
+       instead of gimplify_val.
+       * tree-vect-generic.c (expand_vector_parallel): Ditto.
+
+2008-03-13  Diego Novillo  <dnovillo@google.com>
+           Oleg Ryjkov  <olegr@google.com>
+
+       * tree-ssa-operands.c (get_expr_operands): Handle FILTER_EXPR and
+       EXC_PTR_EXPR.
+
+2008-03-12  Diego Novillo  <dnovillo@google.com>
+           Oleg Ryjkov  <olegr@google.com>
+
+       * cfgexpand.c (gimple_to_tree): Record the correct type when
+       converting GIMPLE_CALL.
+
+2008-03-12  Zdenek Dvorak  <ook@ucw.cz>
+
+       * tree-ssa-loop-im.c (stmt_cost, rewrite_bittest,
+       determine_invariantness_stmt, gather_mem_refs_stmt):
+       Use gimple_assign_rhs_code.
+       * cfgexpand.c (gimple_to_tree): Ditto.
+       * tree-inline.c (estimate_num_insns): Ditto.
+       * tree-vect-generic.c (expand_vector_operations_1): Ditto.
+       * tree-ssa-ccp.c (likely_value, ccp_fold, evaluate_stmt,
+       * gimple.c (gimple_fold, gimple_assign_set_rhs_with_ops): Ditto.
+       * tree-ssa-structalias.c (handle_ptr_arith): Ditto.
+       fold_gimple_assign): Ditto.
+       * value-prof.c (gimple_divmod_fixed_value_transform): Ditto.
+       * tree-ssa-loop-ch.c (copy_loop_headers): Ditto.
+       * tree-ssa-propagate.c (stmt_makes_single_load,
+       substitute_and_fold): Ditto.
+       * tree-ssa-loop-niter.c (chain_of_csts_start): Exclude memory loads.
+       (get_val_for): Assert that the statement is an assignment.
+       (derive_constant_upper_bound_assign,
+       expand_simple_operations): Use gimple_assign_rhs_code.
+       * tree-ssa-loop-manip.c (create_iv, ip_normal_pos,
+       standard_iv_increment_position, determine_exit_conditions,
+       tree_transform_and_unroll_loop): Tuplify.
+       * tree-scalar-evolution.c (interpret_expr): Fail for chrecs.
+       (interpret_gimple_assign, follow_ssa_edge_in_rhs): Use
+       gimple_assign_rhs_code.
+       * tree-gimple.c (gimple_assign_rhs_code): New function.
+       * tree-gimple.h (gimple_assign_rhs_code): Declare.
+       * tree-ssa-loop-ivopts.c (single_dom_exit): Enable.
+       * gimple-dummy.c (compute_data_dependences_for_loop, dump_ddrs,
+       free_data_refs, free_dependence_relations,
+       gimple_duplicate_loop_to_header_edge, tree_ssa_prefetch_arrays,
+       estimated_loop_iterations_int): Removed.
+       * tree-ssa-loop-ivcanon.c (tree_num_loop_insns): Tuplify.
+       * predict.c, tree-data-ref.c, tree-ssa-loop-prefetch.c: Tuplify.
+       * tree-data-ref.h (struct data_reference, struct rdg_vertex): Change
+       the type of stmt to gimple.
+       (get_references_in_stmt, create_data_ref, rdg_vertex_for_stmt,
+       stores_from_loop, remove_similar_memory_refs,
+       have_similar_memory_accesses): Declaration changed.
+       * gimple-iterator.c (gsi_insert_seq_on_edge_immediate): New.
+       * gimple-pretty-print.c (dump_gimple_assign): Dump nontemporal
+       move.  Use gimple_assign_rhs_code.
+       (dump_unary_rhs, dump_binary_rhs): Use gimple_assign_rhs_code.
+       * gimplify.c (gimplify_modify_expr): Set lhs of the assignment to
+       the new SSA name.
+       * tree-ssa-coalesce.c (build_ssa_conflict_graph,
+       create_outofssa_var_map): Use gimple_assign_copy_p.
+       * tree-predcom.c (mark_virtual_ops_for_renaming): Enable.
+       * tree-inline.c (estimate_num_insns): Use gimple_assign_rhs_code.
+       * tree-flow.h (mark_virtual_ops_for_renaming): Declaration changed.
+       * gimple.h (struct gimple_statement_base): Change unused_4 flag
+       to nontemporal_move flag.
+       (gimple_assign_nontemporal_move_p, gimple_assign_set_nontemporal_move):
+       New functions.
+       (gsi_insert_seq_on_edge_immediate): Declare.
+       (gimple_assign_rhs2): Return NULL if the statement does not have two
+       operands.
+       (gimple_assign_subcode): Removed.
+       * tree-cfg.c (verify_types_in_gimple_assign): Use
+       gimple_assign_rhs_code.
+       (gimple_lv_adjust_loop_header_phi, gimple_lv_add_condition_to_bb):
+       Tuplify.
+       (gimple_cfg_hooks): Enable lv_add_condition_to_bb and
+       lv_adjust_loop_header_phi hooks.
+       * passes.c (init_optimization_passes): Enable pass_profile,
+       pass_check_data_deps and pass_loop_prefetch.
+
+2008-03-11  Diego Novillo <dnovillo@google.com>
+
+       * tree-ssa-operands.h: Added declaration of add_to_addressable_set.
+       * tree-ssa-operands.h (add_to_addressable_set): New function.
+       (gimple_add_to_addresses_taken): Moved most of the logic to
+       add_addressable_set.
+       * tree-ssa-structalias.c (update_alias_info): Record the variables
+       whose address is taken inside a phi node.
+
+2008-03-11  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-phinodes.c (allocate_phi_node): Update for tuples.
+       * gimplify.c (gimplify_function_tree): Dump memory stats.
+       * gimple.c:  Declare gimple_alloc_counts, gimple_alloc_sizes,
+       and gimple_alloc_kind_names.
+       (gimple_alloc): Gather statistics for tuples.
+       (gimple_build_asm_1): Same.
+       (gimple_seq_alloc): Same.
+       (dump_gimple_statistics): New.
+       * gimple.h: Define gimple_alloc_kind.
+       (gimple_alloc_kind): New.
+       (dump_gimple_statistics): Protoize.
+       * tree-ssa-copy.c (replace_exp_1): Mark for_propagation as unused
+       (merged from mainline).
+
+2008-03-11  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @133081.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-06  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @132948
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-03-06  Diego Novillo  <dnovillo@google.com>
+
+       * config/rs6000/rs6000.c (rs6000_alloc_sdmode_stack_slot):
+       Call walk_gimple_op.
+       * tree-complex.c (expand_complex_div_wide): Call
+       split_block with STMT.
+
+2008-03-06  Diego Novillo  <dnovillo@google.com>
+
+       * gimple.h (struct gimple_statement_base): Rename field
+       'locus' to 'location'.  Update all users.
+       (gimple_location): Rename from gimple_locus.  Update all
+       users.
+       (gimple_set_location): Rename from gimple_set_locus.
+       Update all users.
+       (gimple_has_location): Rename from gimple_location_empty_p.
+       Change sign of return value.  Update all users.
+       * gimplify.c (tree_annotate_all_with_location): Rename
+       from tree_annotate_all_with_locus.
+       (annotate_all_with_location): Rename from
+       annotate_all_with_locus.
+
+2008-03-05  Diego Novillo  <dnovillo@google.com>
+
+       * gimple.c (gimple_set_lhs): Remove return keywords.
+
+2008-03-05  David Daney  <ddaney@avtrex.com>
+
+       * builtins.c (expand_builtin___clear_cache): Disable call to
+       validate_arglist.
+       * config/mips/mips.c (mips_gimplify_va_arg_expr): Tuplify.
+
+2008-03-05  Bill Maddox  <maddox@google.com>
+
+       * tree-ssa-ccp.c (dump_lattice_value, debug_lattice_value):
+       Re-enable functions #if'd out.
+       (test_default_value, likely_value, surely_varying_stmt_p,
+       ccp_initialize, ccp_visit_phi_node, ccp_fold, evaluate_stmt,
+       visit_assignment, visit_cond_stmt, ccp_visit_stmt):
+       Convert to tuples.
+       (fold_gimple_call):  Don't trip over call that simplifies to
+       another call, not a constant.
+       * tree-ssa-propagate.c (ssa_prop_init): Initialize in-worklist
+       flag for phi nodes as well as statements.
+       (valid_gimple_expression_p): Add fixme comment to remove this.
+       function.  It currently has static uses, but asserts when called.
+       (stmt_makes_single_load, stmt_makes_single_store):
+       Convert to tuples
+       (replace_phi_args_in): Convert to tuples.  Fix typo in comment.
+       * gimple.c (gimple_copy_no_def_use, gimple_get_lhs,
+       gimple_set_lhs): New function.
+       * gimple.h (gimple_copy_no_def_use, gimple_get_lhs,
+       gimple_set_lhs): Declare new function.
+       * tree-cfg.c (replace_uses_by): Add comment regarding empty
+       operands.
+       * passes.c (init_optimization_passes): Enable pass_ccp and
+       pass_store_ccp.
+       
+2008-03-05  David Daney  <ddaney@avtrex.com>
+
+       * tree-phinodes.c (allocate_phi_node): Fix syntax errors from
+       previous commit.
+
+2008-03-05  Rafael Espindola  <espindola@google.com>
+
+       * tree-phinodes.c (allocate_phi_node): free free_phinodes[bucket]
+       if empty.
+
+2008-03-05  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc/2008-03/msg00256.html
+
+       * fold-const.c (tree_binary_nonnegative_warnv_p): Fix
+       invalid C90 declaration.
+       (tree_single_nonnegative_warnv_p): Likewise.
+       * gimplify.c (gimplify_bind_expr): Likewise.
+       (gimplify_return_expr): Likewise.
+
+2008-03-04  Zdenek Dvorak  <ook@ucw.cz>
+
+       * tree-ssa-loop-niter.c, tree-scalar-evolution.c: Tuplified.
+       * tree-ssa-loop-manip.c (split_loop_exit_edge): Ditto.
+       * tree-chrec.c (chrec_fold_plus, chrec_apply, chrec_convert_1,
+       convert_affine_scev, chrec_convert_rhs, chrec_convert,
+       chrec_convert_aggressive): Pass statements as gimple.
+       * tree-scalar-evolution.h (get_loop_exit_condition, simple_iv):
+       Type changed.
+       * tree-chrec.h (chrec_convert, chrec_convert_rhs): Ditto.
+       * tree-ssa-loop-ivopts.c (abnormal_ssa_name_p,
+       idx_contains_abnormal_ssa_name_p, contains_abnormal_ssa_name_p,
+       expr_invariant_in_loop_p, tree_int_cst_sign_bit): Enabled.
+       * gimple-dummy.c (chrec_dont_know, chrec_known, chrec_not_analyzed_yet,
+       analyze_scalar_evolution, chrec_contains_symbols_defined_in_loop,
+       estimate_numbers_of_iterations, expr_invariant_in_loop_p,
+       free_numbers_of_iterations_estimates,
+       free_numbers_of_iterations_estimates_loop, get_loop_exit_condition,
+       instantiate_parameters, nowrap_type_p. scev_const_prop. scev_finalize,
+       scev_initialize, scev_probably_wraps_p, scev_reset,
+       tree_int_cst_sign_bit, number_of_iterations_exit, loop_niter_by_eval,
+       substitute_in_loop_info): Removed.
+       * tree-ssa-loop.c (tree_loop_optimizer_init): Merged into...
+       (tree_ssa_loop_init): ... here.  Enable scev_initialize call.
+       (tree_ssa_loop_done): Enable scev finalization.
+       * gimple-iterator.c (gsi_insert_seq_nodes_before): Allow inserting
+       at the end of basic block.
+       (gsi_for_stmt): Handle phi nodes.
+       * cfgloop.h (struct nb_iter_bound): Change type of stmt field to gimple.
+       * tree-flow.h (scev_probably_wraps_p, convert_affine_scev,
+       stmt_dominates_stmt_p): Types changed.
+       * Makefile.in (gt-tree-scalar-evolution.h): Enable.
+       * gimple.c (extract_ops_from_tree): Export.
+       (gimple_copy): Do not share bitmaps.
+       * gimple.h (extract_ops_from_tree): Declare.
+       (gimple_call_set_lhs): Allow lhs to be NULL.
+       * tree-cfg.c (add_phi_args_after_copy_edge,
+       add_phi_args_after_copy_bb): Tuplify.
+       * passes.c (init_optimization_passes): Enable pass_ch, pass_scev_cprop.
+
+2008-03-04  Oleg Ryjkov  <olegr@google.com>
+
+       * tree-ssa-dse.c (execute_simple_dse): Commented out.
+       * passes.c (init_optimization_passes): Disabling pass_simple_dse.
+
+2008-03-04  Bill Maddox  <maddox@google.com>
+
+       * tree-cfg.c (remove_useless_stmts_1):
+       Correctly distinguish try-catch and try-finally.
+       
+2008-03-04  Oleg Ryjkov  <olegr@google.com>
+
+       * tree-ssa-dse.c (execute_simple_dse): Tuplified.
+       * gimplify.c (gimplify_return_expr): Copy the NO_WARNING flag
+       to the newly created expr from the tree.
+       * tree-cfg.c (gimplify_build1): Tuplified.
+       * passes.c (init_optimization_passes): Enabled
+       pass_warn_function_return, pass_update_address_taken,
+       pass_simple_dse and pass_build_alias passes.
+
+2008-03-04  Rafael Espindola  <espindola@google.com>
+
+       * fold-const.c (tree_simple_nonnegative_warnv_p): New.
+       (tree_unary_nonnegative_warnv_p): New.
+       (tree_binary_nonnegative_warnv_p): New.
+       (tree_single_nonnegative_warnv_p): New.
+       (tree_invalid_nonnegative_warnv_p): New.
+       (tree_expr_nonnegative_warnv_p): Redefine in term of the new functions.
+
+2008-03-04  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple.c (walk_gimple_seq): Do not set wi->gsi.
+       (walk_gimple_stmt): Set wi->gsi.
+       * tree-cfg.c (verify_stmt): Use walk_gimple_op instead of
+       walk_gimple_stmt.
+       (verify_stmts): Same.
+
+2008-03-04  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-03/msg00219.html
+
+       * expr.c (expand_expr_real): Call lookup_expr_eh_region.
+       * tree-eh.c (lookup_stmt_eh_region_fn):
+       (lookup_stmt_eh_region): Fix comment.
+       (lookup_expr_eh_region): Handle missing cfun and missing
+       EH table.
+       (record_in_finally_tree): Fix comment.
+       (collect_finally_tree_1): Remove handler for
+       GIMPLE_SWITCH.
+       (maybe_record_in_goto_queue): Remove local variable
+       NEW_IS_LABEL.
+       Record GIMPLE_GOTOs instead of their label.
+       (verify_norecord_switch_expr): Retrieve the CASE_LABEL
+       from the case label expression.
+       (do_return_redirection): Change sign of assertion.
+       (lower_try_finally_onedest): Assert that
+       TF->GOTO_QUEUE[0] contains a GIMPLE statement.
+       (lower_try_finally_copy): Assert that Q contains a GIMPLE
+       statement.
+       (lower_try_finally_switch): Build a new GIMPLE label for
+       CONT_STMT.
+       (mark_eh_edge): Tuplify.
+       (verify_eh_edges): Tuplify.
+       (tree_can_throw_external): Remove unused function.
+       (optimize_double_finally): Remove #if 0.
+       * gimple-pretty-print.c (GIMPLE_NIY): Tidy.
+       (dump_gimple_resx): Fix format string for
+       dump_gimple_fmt.
+       * gimplify.c (gimplify_cleanup_point_expr): Initialize
+       BODY_SEQUENCE.
+       * calls.c (emit_call_1): Remove ATTRIBUTE_UNUSED markers.
+       * cfgexpand.c (gimple_to_tree) <GIMPLE_NOP>: Assign new
+       expression to T.
+       <GIMPLE_RESX>: Handle.
+       Always assign the value from lookup_stmt_eh_region to
+       ANN->RN.
+       * tree-cfg.c (start_recording_case_labels):
+       (recording_case_labels_p): Re-enable.
+       (get_cases_for_edge): Likewise.
+       (gimple_verify_flow_info): Re-enable call to
+       verify_eh_edges.
+       (gimple_redirect_edge_and_branch): Re-enable handling of
+       GIMPLE_SWITCH.
+       (gimple_block_ends_with_call_p): Tuplify.
+       (struct gimple_cfg_hooks): Enable block_ends_with_call_p
+       callback.
+
+2008-03-04  Rafael Espindola  <espindola@google.com>
+
+       * fold-const.c (tree_unary_nonzero_warnv_p): New.
+       (tree_binary_nonzero_warnv_p): New.
+       (tree_single_nonzero_warnv_p): New.
+       (tree_expr_nonzero_warnv_p): Redefine using the new functions.
+
+2008-03-04  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple.c (walk_gimple_op): Update comment.
+       (walk_gimple_stmt): Same.
+
+2008-03-03  Aldy Hernandez  <aldyh@redhat.com>
+
+       * cgraphbuild.c (build_cgraph_edges): Use walk_gimple_op instead of
+       walk_gimple_stmt.
+       * tree-ssa-alias-warnings.c (find_references_in_function): Same.
+       * tree-ssa-ccp.c (fold_stmt): Change walk_gimple_stmt call to
+       walk_gimple_op.
+       * tree-nrv.c (tree_nrv): Same.
+       * tree-ssa-alias.c (count_uses_and_derefs): Same.
+       * cfgexpand.c (discover_nonconstant_array_refs_r): Same.
+       * tree-nested.c (convert_nonlocal_reference_stmt): Make first
+       argument a GSI.
+       (convert_local_reference_op): Same.
+       (convert_nl_goto_reference): Same.
+       (convert_tramp_reference_stmt): Same.
+       (convert_gimple_call): Same.
+       * tree-inline.c (inline_forbidden_p_stmt): Same.
+       * tree-ssa.c (execute_early_warn_uninitialized): Change
+       walk_gimple_stmt call to walk_gimple_op.
+       * gimple.c (walk_gimple_seq): Pass GSI to walk_gimple_stmt.
+       (walk_gimple_stmt): Move operand walking code to...
+       (walk_gimple_op): ...here.
+       (walk_gimple_stmt): First argument is now a GSI.
+       * gimple.h: Change walk_stmt_fn argument to a GSI.
+       (walk_gimple_stmt): Make first argument is a GSI.
+       (walk_gimple_op): New prototype.
+       * tree-cfg.c (verify_stmt): Change argument to a GSI.  Adjust
+       accordingly.
+
+2008-02-29  Zdenek Dvorak  <ook@ucw.cz>
+
+       * tree-ssa-alias.c (is_escape_site): Detect pure/const functions
+       correctly.
+
+2008-02-28  Zdenek Dvorak  <ook@ucw.cz>
+
+       * tree-ssa-loop-im.c: Tuplify.
+       * tree-ssa-loop-manip.c (add_exit_phis_edge, find_uses_to_rename_stmt,
+       find_uses_to_rename_bb, check_loop_closed_ssa_use,
+       check_loop_closed_ssa_stmt, verify_loop_closed_ssa): Ditto.
+       * gimple-dummy.c (rewrite_into_loop_closed_ssa, tree_ssa_lim,
+       verify_loop_closed_ssa, replace_exp): Removed.
+       * tree-ssa-loop.c (tree_ssa_loop_init, tree_ssa_loop_done): Comment
+       out scev initialization and finalization.
+       * gimple-iterator.c (gsi_remove): Rename remove_eh_info to
+       remove_permanently.  Do not free operands if remove_permanently
+       is false.
+       (gimple_find_edge_insert_loc): Use gsi_last_bb.
+       * tree-eh.c (operation_could_trap_p): Factored out of ...
+       (tree_could_trap_p): ... here.
+       * tree-ssa-copy.c (replace_exp): Enable.
+       * tree-flow.h (movement_possibility): Declaration changed.
+       (operation_could_trap_p): Declare.
+       * Makefile.in (tree-ssa-loop-im.o): Add pointer-set.h dependency.
+       (gimple.o): Add FLAGS_H dependency.
+       * gimple.c: Include flags.h.
+       (gimple_could_trap_p): New function.
+       * gimple.h (gimple_could_trap_p): Declare.
+       * tree-cfg.c (replace_uses_by): Check that op is not null.
+       * passes.c (init_optimization_passes): Enable pass_lim.
+
+2008-02-28  Rafael Espindola  <espindola@google.com>
+
+       * tree-outof-ssa.c (insert_backedge_copies): Don't build
+       uninitialized assignment.
+
+2008-02-28  Rafael Espindola  <espindola@google.com>
+
+       * tree-dfa.c (dump_dfa_stats): cast dfa_stats.max_num_phi_args to long.
+
+2008-02-26  Diego Novillo  <dnovillo@google.com>
+
+       * tree-ssa-copyrename.c (rename_ssa_copies): Do not mix
+       declarations with code.
+       * tree-ssa-coalesce.c (create_outofssa_var_map): Likewise.
+       * tree-dfa.c (dump_dfa_stats): Use %ld for
+       dfa_stats.max_num_phi_args.
+
+2008-02-26  Bill Maddox  <maddox@google.com>
+       
+       * tree-ssa-ccp.c (maybe_fold_stmt_addition):
+       Reinstated this function for tuples as-is.
+       (valid_gimple_rhs_p): New function.  Mostly lifted from
+       valid_gimple_epxression_p, which is likely obsolete.
+       (fold_stmt_r): Reinstated commented-out cases for
+       tuples. Replaced call to obsolete function set_rhs.
+       (get_maxval_strlen): Convert to tuples.
+       (ccp_fold_builtin): Partial conversion to tuples.
+       (fold_gimple_assign): New function.
+       (fold_gimple_cond): New function.
+       (fold_gimple_call): New function.
+       (fold_stmt): Convert to tuples.
+       (fold_stmt_inplace): Convert to tuples.
+       * tree-ssa-propagate.c (substitute_and_fold):
+       Update call to fold_stmt for revised argument signature.
+       * gimple-dummy.c (fold_stmt): Removed dummy definition.
+       * gimplify.c (gimplify_call_expr): Removed obsolete
+       manipulation of TREE_NOTHROW flag.
+       * cfgexpand.c (gimple_to_tree): Set nothrow flag
+       of call expression based on call statement flags.
+       Handle GIMPLE_NOP statement.
+       * tree-flow.h (notice_special_calls, fold_stmt):
+       Update prototypes for tuples.
+       * gimple.c (gimple_cond_set_condition_from_tree):
+       New function.
+       (gimple_seq_has_side_effects): New function.
+       * gimple.h (gimple_cond_set_condition_from_tree,
+       gimple_seq_has_side_effects): New prototypes.
+       (gimple_call_nothrow_p): New function.
+       (gsi_stmt_ptr): Add comment regarding usage of this
+       function vs. gsi_replace.
+       * tree-cfg.c (struct rus_data): Convert to tuples.
+       (remove_useless_stmts_1, remove_useless_stmts_warn_notreached,
+       remove_useless_stmts_cond, remove_useless_stmts_tf,
+       remove_useless_stmts_tc, remove_useless_stmts_goto,
+       remove_useless_stmts_label, notice_special_calls,
+       remove_useless_stmts): Convert to tuples.
+       (update_call_expr_flags): Removed.
+       * passes.c (init_optimization_passes): Enable
+       pass_remove_useless_stmts.
+       
+2008-02-25  Bill Maddox  <maddox@google.com>
+
+       * tree-into-ssa.c (rewrite_initialize_block):
+       Avoid declaration following statement in block.
+       * tree-nrv.c (tree_nrv):
+       Avoid declaration following statement in block.
+       * tree-eh.c (collect_finally_tree): Fix typo in comment.
+       Avoid cast to union type.
+       (replace_goto_queue_cond_clause, replace_goto_queue_1,
+       maybe_record_in_goto_queue, verify_norecord_switch_expr,
+       lower_try_finally_fallthru_label): Avoid cast to union type.
+       * fortran/Make-lang.in: Add -Wno-missing-format-attribute
+       to fortran-warn options to avoid conflict with -Wno-format.
+       * gimplify.c (gimplify_switch_expr):  Change assignment
+       to initialiation.
+       (gimplify_case_label_expr): Avoid declaration following
+       statement in block.
+       * tree-ssa-coalesce.c (create_outofssa_var_map): Avoid
+       declaration following statement in block.
+       * tree-ssa.c (ssa_redirect_edge, flush_pending_stmts):
+       Avoid declaration following statements in block.
+       * Makefile.in: Add -Wno-missing-format-attribute
+       to builtins.o-warn and expr.o-warn options to avoid conflict
+       with -Wno-format.  Removed fortran/*-warn options, -Wno-format
+       and -Wno-uninitialized, which belong in fortran/Make-lang.in,
+       and are applied to all fortran files there.
+
+2008-02-25  Oleg Ryjkov  <olegr@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg01236.html
+
+       * tree-ssa-copyrename.c: Include gimple.h
+       Tuplify.
+       * tree-ssa.c (warn_uninit): Pass &LOCUS to warning().
+       * passes.c (init_optimization_passes): Enable
+       pass_early_warn_uninitialized, pass_rebuild_cgraph_edges
+       and pass_early_inline.
+
+2008-02-25  Diego Novillo  <dnovillo@google.com>
+
+       * Makefile.in (tree-ssa-copyrename.o): Add dependency on
+       gimple.h.
+
+2008-02-25  Diego Novillo  <dnovillo@google.com>
+
+       * tree-cfgcleanup.c (tree_forwarder_block_p): Revert
+       previous change.
+
+2008-02-25  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg01217.html
+
+       * tree-ssa-alias-warnings.c: Tuplify.
+       * cgraphunit.c (cgraph_process_new_functions): Remove
+       calls to gimple_unreachable.
+       (cgraph_expand_function): Likewise.
+       * omp-low.c (execute_expand_omp): Return 0 after call to
+       gimple_unreachable.
+       * tree-scalar-evolution.c (scev_const_prop): Likewise.
+       (execute_lower_omp): Likewise.
+       * tree-ssa-dse.c (execute_simple_dse): Likewise.
+       * tree-ssa-dom.c (tree_ssa_dominator_optimize): Likewise.
+       (eliminate_degenerate_phis): Likewise.
+       * tree-ssa-propagate.c (get_rhs): Likewise.
+       (valid_gimple_expression_p): Likewise.
+       (set_rhs): Likewise.
+       (fold_predicate_in): Likewise.
+       * tree-nrv.c (execute_return_slot_opt): Likewise.
+       * predict.c (tree_estimate_probability): Likewise.
+       * tree-parloops.c (parallelize_loops): Likewise.
+       * tree-if-conv.c (main_tree_if_conversion): Likewise.
+       * tree-ssa-phiopt.c (tree_ssa_phiopt): Likewise.
+       (tree_ssa_cs_elim): Likewise.
+       * tree-sra.c (tree_sra): Likewise.
+       (tree_sra_early): Likewise.
+       * tree-predcom.c (tree_predictive_commoning): Likewise.
+       * tree-ssa-copy.c (execute_copy_prop): Likewise.
+       * lambda-code.c (gcc_loop_to_lambda_loop): Likewise.
+       (perfect_nest_p): Likewise.
+       (can_convert_to_perfect_nest): Likewise.
+       (perfect_nestify): Likewise.
+       * tree-optimize.c (execute_fixup_cfg): Likewise.
+       * tree-object-size.c (compute_object_sizes): Likewise.
+       * tree-cfg.c (remove_useless_stmts): Likewise.
+       (gimple_duplicate_sese_tail): Likewise.
+       (move_sese_region_to_fn): Likewise.
+       (gimple_purge_dead_abnormal_call_edges): Likewise.
+       (execute_warn_function_return): Likewise.
+       * value-prof.c (gimple_histogram_value): Remove calls to
+       gimple_unreachable.
+       (gimple_stringop_fixed_value): Likewise.
+       * tree-flow-inline.h (op_iter_next_use): Likewise.
+       * tree-dfa.c (dump_variable): Likewise.
+       * tree-ssa-copy.c (may_propagate_copy): Likewise.
+       (may_propagate_copy_into_asm): Likewise.
+       (merge_alias_info): Likewise.
+       (replace_exp_1): Likewise.
+       * tree-ssa.c (delete_tree_ssa): Likewise.
+       * tree-cfg.c (make_edges): Likewise.
+       (remove_useless_stmts): Likewise.
+       (gimple_duplicate_sese_tail): Likewise.
+       (move_sese_region_to_fn): Likewise.
+       (gimple_purge_dead_abnormal_call_edges): Likewise.
+       (execute_warn_function_return): Likewise.
+       * passes.c (finish_optimization_passes): Likewise.
+       (init_optimization_passes): Likewise.
+       * tree-ssa-operands.c (add_call_clobber_ops): Likewise.
+       * tree-eh.c (lookup_expr_eh_region): New.
+       (tree_can_throw_external): Return false after call to
+       gimple_unreachable.
+       (maybe_clean_or_replace_eh_stmt): Re-enable.
+       Call stmt_could_throw_p.
+       * expr.c (expand_expr_real): Call lookup_expr_eh_region.
+       * profile.c: Tuplify.
+       * calls.c: Include tree-flow.h.
+       (emit_call_1): Call lookup_expr_eh_region.
+       (expand_call): Likewise.
+       * cfgexpand.c (gimple_to_tree): Call SET_EXPR_LOCATION on
+       generated expression tree T.
+       Set EH region number on T's annotation.
+       * common.opt (fgimple-conversion=): Add RejectNegative
+       and Joined attributes.
+       * tree-inline.c (unsave_r): Abort if *TP is a
+       STATEMENT_LIST.
+       (unsave_expr_now): Re-enable.
+       * tree-flow.h (struct tree_ann_common_d): Add field RN.
+       * Makefile.in (builtins.o-warn): Add -Wno-format.
+       (expr.o-warn): Likewise.
+       (fortran/check.o-warn): Likewise.
+       (fortran/interface.o-warn): Likewise.
+       (fortran/resolve.o-warn): Likewise.
+       (fortran/simplify.o-warn): Likewise.
+       (fortran/target-memory.o-warn): Likewise.
+       (calls.o): Add dependency on tree-flow.h
+       * gimple.c (gimple_build_asm_1): Call ggc_alloc_string to
+       copy STRING.
+       * gimple.h (gimple_filename): New.
+       (gimple_lineno): New.
+       * passes.c (init_optimization_passes): Disable
+       pass_remove_useless_stmts pass_mudflap_1,
+       pass_warn_function_return and pass_lower_omp.
+       * config/rs6000/rs6000.c (rs6000_gimplify_va_arg): Tuplify.
+       (rs6000_alloc_sdmode_stack_slot): Likewise.
+       * tree-cfgcleanup.c (tree_forwarder_block_p): Fix test
+       for PHI_WANTED.
+
+2008-02-25  Rafael Espindola  <espindola@google.com>
+
+       * tree-cfgcleanup.c (tree_forwarder_block_p): fix thinko.
+
+2008-02-24  Rafael Espindola  <espindola@google.com>
+
+       * gimple-iterator.c (gsi_split_seq_before): Don't abort if at the
+       beginning.
+
+2008-02-23  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg01025.html
+
+       * gimplify.c (gimplify_one_sizepos): Call gimplify_expr with
+       STMT_P.
+
+2008-02-22  Zdenek Dvorak  <ook@ucw.cz>
+
+       * gimple.c (gimple_assign_copy_p): Do not consider unary operations
+       to be copies.
+
+2008-02-22  Diego Novillo  <dnovillo@google.com>
+
+       * Makefile.in (fortran/parse.o-warn,
+       fortran/simplify.o-warn): New.
+
+2008-02-22  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00961.html
+
+       Revert
+
+       2008-02-20  Zdenek Dvorak  <ook@ucw.cz>
+
+               * tree-tailcall.c: Tuplify.
+               * gimplify.c (force_gimple_operand): Remove ATTRIBUTE_UNUSED.
+               * gimple.c (gimple_assign_copy_p): Do not
+               consider unary operations to be copies.
+               (copy_or_nop_cast_stmt_rhs): New.
+               * gimple.h (copy_or_nop_cast_stmt_rhs): Declare.
+
+2008-02-21  Rafael Espindola  <espindola@google.com>
+
+       * gimple.h (gimple_cond_true_p): Define.
+       (gimple_cond_false_p): Define.
+       (gimple_cond_single_var_p): Define.
+
+2008-02-21  Rafael Espindola  <espindola@google.com>
+
+       * tree-tailcall.c (adjust_accumulator_values): Check that a phi was
+       found.
+
+2008-02-21  Rafael Espindola  <espindola@google.com>
+
+       * tree-tailcall.c (adjust_accumulator_values): Initialize phi.
+
+2008-02-20  Zdenek Dvorak  <ook@ucw.cz>
+
+       * tree-tailcall.c: Tuplify.
+       * gimplify.c (force_gimple_operand): Remove ATTRIBUTE_UNUSED.
+       * gimple.c (gimple_assign_copy_p): Do not consider unary operations
+       to be copies.
+       (copy_or_nop_cast_stmt_rhs): New.
+       * gimple.h (copy_or_nop_cast_stmt_rhs): Declare.
+
+2008-02-20  Oleg Ryjkov  <olegr@google.com>
+
+       * gimplify.c (gimplify_expr): Save locus into the right
+       sequence.
+
+2008-02-20  Rafael Espindola  <espindola@google.com>
+
+       * passes.c (init_optimization_passes): Enable pass_mark_used_blocks
+       and pass_cleanup_cfg_post_optimizing.
+       * tree-optimize.c (execute_cleanup_cfg_post_optimizing): call cleanup_tree_cfg.
+
+2008-02-20  Rafael Espindola  <espindola@google.com>
+
+       * passes.c (init_optimization_passes):  Enable pass_nrv.
+       * tree-nrv.c (finalize_nrv_r): data is now a walk_stmt_info.
+       (tree_nrv): port to gimple.
+
+2008-02-19  Oleg Ryjkov  <olegr@google.com>
+
+       * gimple-pretty-print.c: Introduce support for TDF_RAW
+       flag.
+       (dump_gimple_fmt): New function.
+       (dump_gimple_assign, dump_gimple_return): Update to print
+       RAW gimple.
+       (dump_gimple_call_args): New function.
+       (dump_gimple_call, dump_gimple_switch, dump_gimple_cond,
+       dump_gimple_label, dump_gimple_bind, dump_gimple_try):
+       Update to print RAW gimple.
+       (dump_gimple_catch, dump_gimple_eh_filter,
+       dump_gimple_resx): New functions.
+       (dump_gimple_omp_for, dump_gimple_asm, dump_gimple_phi):
+       Update to print RAW gimple.
+       (dump_gimple_omp_parallel, dump_gimple_omp_atomic_load,
+       dump_gimple_omp_atomic_store): New, code moved from 
+       dump_gimple_stmt, then added support to print RAW gimple.
+       (gimple_dump_stmt): Some code refactored into helper
+       subroutines.
+       * tree-cfg.c(dump_function_to_file): Print RAW gimple when
+       TDF_RAW is set.
+
+2008-02-19  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00793.html
+
+       * tree-complex.c (expand_complex_div_wide): Call gsi_bb.
+       * tree.h (std_gimplify_va_arg_expr): Change gimple_seq
+       arguments to gimple_seq *.
+       Update all users.
+       (gimplify_parameters): Change return type to gimple_seq.
+       Update all users.
+       * target.h (struct gcc_target)<gimplify_va_arg_expr>:
+       Change gimple_seq arguments to gimple_seq *.
+       Update all users.
+       * tree-phinodes.c (free_phinodes): Convert to VEC.
+       Update all users.
+       * omp-low.c (lower_rec_input_clauses): Change gimple_seq
+       arguments to gimple_seq *.  Update all users.
+       (lower_reduction_clauses): Convert sub_list to
+       gimple_seq.
+       (lower_regimplify): Convert PRE to gimple_seq.
+       (lower_regimplify): Call gimple_seq_insert_before instead
+       of tsi_link_before.
+       * tree-gimple.h (get_initialized_tmp_var,
+       get_formal_tmp_var, gimplify_expr, gimplify_type_sizes,
+       gimplify_one_sizepos, gimplify_stmt, gimplify_and_add,
+       gimplify_va_arg_expr): Change gimple_seq arguments to
+       gimple_seq *.  Update all users.
+       * gimple-iterator.c: Include value-prof.h.
+       (gsi_link_seq_before): Remove.  Update all users.
+       (gsi_link_seq_after): Remove.  Update all users.
+       (gsi_link_after): Remove.  Update all users.
+       (gsi_link_before): Remove.  Update all users.
+       (update_bb_for_stmts): New.
+       (gsi_insert_seq_nodes_before): New.
+       (gsi_insert_seq_nodes_after): New.
+       (gsi_insert_seq_before): Re-write.  Call
+       gsi_insert_seq_nodes_before.
+       (gsi_insert_seq_after): Re-write.  Call
+       gsi_insert_seq_nodes_after.
+       (gsi_replace): Re-enable EH updating.
+       (update_modified_stmt): Move earlier in the file.
+       (gsi_insert_after): Re-write.  Call
+       gsi_insert_seq_nodes_after.
+       (gsi_insert_before): Re-write.  Call
+       gsi_insert_seq_nodes_before.
+       (gsi_remove): Move from gimple.h.  Re-write.
+       * langhooks.h (struct lang_hooks): Change gimple_seq
+       arguments for gimplify_expr to gimple_seq *.
+       Update all users.
+       * coretypes.h (struct gimple_seq_d): Rename from struct
+       gimple_sequence.  Update all users.
+       (struct gimple_seq_node_d): New.
+       (gimple_seq_node): New.
+       (const_gimple_seq_node): New.
+       * tree-flow.h (force_gimple_operand): Change gimple_seq
+       argument to gimple_seq *.  Update all users.
+       * c-common.h (c_gimplify_expr): Change gimple_seq
+       argument to gimple_seq *.  Update all users.
+       * Makefile.in (build):
+       * gimple.c (gimple_seq_cache): New.
+       (gimple_seq_alloc): Take sequences from gimple_seq_cache,
+       if possible.
+       (gimple_seq_free): New.
+       (gimple_seq_add_stmt): Rename from gimple_seq_add.
+       Change gimple_seq argument to gimple_seq *.  Update all users.
+       (gimple_seq_add_seq): Rename from gimple_seq_append.
+       Update all users.
+       (gimple_remove): Remove.  Update all users.
+       (gimple_seq_reverse): Remove unused function.
+       (gimple_set_bb): Only update block-to-labels map if
+       CFUN->CFG exists.
+       * gimple.h (struct gimple_seq_node_d): New.
+       (struct gimple_seq_d): Change fields 'first' and 'last'
+       to type gimple_seq_node.  Update all users.
+       Add field 'next_free'.
+       (gimple_seq_first): Change return type to
+       gimple_seq_node.  Update all users.
+       (gimple_seq_first_stmt): New.
+       (gimple_seq_last): Change return type to gimple_seq_node.
+       Update all users.
+       (gimple_seq_last_stmt): New.
+       (gimple_seq_set_first): Change second argument to type
+       gimple_seq_node.  Update all users.
+       (gimple_seq_set_last): Change second argument to type
+       gimple_seq_node.  Update all users.
+       (gimple_seq_init): Remove.  Update all users.
+       (struct gimple_statement_base): Remove field 'next' and
+       'prev'.  Update all users.
+       (struct gimple_statement_omp): Change fields of type
+       struct gimple_sequence to type gimple_seq.  Update all
+       users.
+       (struct gimple_statement_bind): Likewise.
+       (struct gimple_statement_catch): Likewise.
+       (struct gimple_statement_eh_filter): Likewise.
+       (struct gimple_statement_try): Likewise.
+       (struct gimple_statement_wce): Likewise.
+       (struct gimple_statement_omp_for): Likewise.
+       (gimple_set_prev): Remove.  Update all users.
+       (gimple_set_next): Remove.  Update all users.
+       (gimple_next): Remove.  Update all users.
+       (gimple_prev): Remove.  Update all users.
+       (gimple_seq_bb): New.
+       (gimple_catch_handler_ptr): New.
+       (gimple_stmt_iterator): Remove field 'stmt'.
+       Add field 'ptr'.  Update all users.
+       (gsi_remove): Move to gimple-iterator.c
+       * tree-cfg.c (pass_build_cfg): Re-enable PROP_gimple_leh.
+       * Makefile.in (builtins.o-warn, expr.o-warn, dse.o-warn,
+       ebitmap.o-warn, lower-subreg.o-warn, tree-chrec.o-warn):
+       Change -Wno-error to -Wno-uninitialized.
+
+2008-02-19  Rafael Espindola  <espindola@google.com>
+
+       * tree-eh.c (collect_finally_tree): handle GIMPLE_SWITCH.
+
+2008-02-19  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple.h (gimple_cond_invert): Remove prototype.
+       (update_stmt_if_modified): Fix typo in comment.
+       * gimple.c (walk_gimple_asm): Fix typo in comment.
+
+2008-02-13  Bill Maddox  <maddox@google.com>
+
+       * tree-inline.c (estimate_operator_cost): 
+       Removed operators superseded by tuplified statement forms.
+       Small cosmetic reordering of a few switch cases.
+
+2008-02-13  Oleg Ryjkov  <olegr@google.com>
+
+       * tree.h: New function declaration.
+       * gimple-dummy.c (tree_inlinable_function_p): Removed.
+       * ipa-inline.c (compute_inline_parameters): Removed
+       gcc_unreachable ().
+       * calls.c (gimple_alloca_call_p): New function.
+       * tree-inline.c (inline_forbidden_p_1): Split in two and removed.
+       (inline_forbidden_p_op, inline_forbidden_p_stmt): New functions.
+       (inline_forbidden_p): Tuplified.
+       (estimate_operator_cost): Added missing cases.
+       * passes.c (init_optimization_passes): Enabled pass_inline_parameters,
+       pass_ipa_function_and_variable_visibility, pass_ipa_early_inline,
+       pass_inline_parameters, pass_rebuild_cgraph_edges passes.
+
+2008-02-13  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00459.html
+
+       * gimple.def (GIMPLE_TRY): Tidy comment.
+       * gimple-pretty-print.c (dump_gimple_stmt): Handle TDF_VOPS and
+       TDF_MEMSYMS.
+
+2008-02-13  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00455.html
+
+       * tree-complex.c (update_complex_components): Handle
+       GIMPLE_CALL and GIMPLE_ASSIGN.
+       (expand_complex_libcall): Set SSA_NAME_DEF_STMT on LHS.
+       * tree-ssa-operands.c (maybe_add_call_clobbered_vops):
+       Rename from get_call_expr_operands.  Update all users.
+       Convert to handle GIMPLE_CALL instead of CALL_EXPR.
+       (get_modify_stmt_operands): Remove.  Update all users.
+       (get_expr_operands): Do not handle CALL_EXPR, COND_EXPR,
+       VEC_COND_EXPR, GIMPLE_MODIFY_STMT, BLOCK, EXC_PTR_EXPR
+       and FILTER_EXPR.
+       (parse_ssa_operands): Call maybe_add_call_clobbered_vops
+       after parsing operands if STMT is a GIMPLE_CALL.
+
+2008-02-12  Zdenek Dvorak  <ook@ucw.cz>
+           Bill Maddox  <maddox@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00429.html
+
+       * tree-inline.h (estimate_num_insns_fn): Declare.
+       * cgraphunit.c (cgraph_process_new_functions):
+       Use estimate_num_insns_fn.
+       * ipa-inline.c (compute_inline_parameters): Ditto.
+       * gimple-dummy.c (estimate_move_cost, estimate_num_insns):
+       Removed.
+       * tree-inline.c (struct eni_data, estimate_num_insns_1):
+       Removed.
+       (estimate_num_insns): Rewritten.
+       (operation_cost, estimate_num_insns_fn): New functions.
+       * gimple.c (gimple_copy): Unshare operands.  Update
+       operand caches.
+       * gimple.h (gimple_set_no_warning): New.
+       (gimple_cond_set_true_label, gimple_cond_set_false_label):
+       Allow setting the label to NULL.
+       * tree-ssa-operands.c (copy_virtual_operands): Handle statements
+       without virtual operands.
+
+2008-02-12  Zdenek Dvorak  <ook@ucw.cz>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00429.html
+
+       * tree-into-ssa.c (update_ssa): Remove ATTRIBUTE_UNUSED.
+       * tree-ssa-loop-ch.c: Tuplified.
+       * gimple-iterator.c (gsi_commit_one_edge_insert): Ditto.
+       * tree-cfg.c (gimple_redirect_edge_and_branch,
+       gimple_try_redirect_by_replacing_jump, gimple_merge_blocks,
+       gimple_block_label, gimple_redirect_edge_and_branch_force,
+       gimple_duplicate_bb): Ditto.
+       (make_cond_expr_edges): Remove the labels.
+       (cleanup_dead_labels): Handle conditions without labels.
+       (gimple_make_forwarder_block): Do not reverse the list
+       of phi nodes.
+       (gimple_duplicate_sese_region): Enable ssa updates.
+       (gimple_cfg_hooks): Enable edge redirection and bb duplication.
+       * gimple-pretty-print.c (dump_gimple_cond): Do not dump
+       branches if labels are null.
+       (dump_implicit_edges): Dump implicit GIMPLE_COND edges.
+
+2008-02-12  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00429.html
+
+       * tree-vrp.c (range_includes_zero_p): Partially re-enable.
+       (ssa_name_nonnegative_p): Partially re-enable.
+       (ssa_name_nonzero_p): Likewise.
+       * gimple-dummy.c (ssa_name_nonzero_p): Remove.
+       (ssa_name_nonnegative_p): Remove.
+       * tree-cfg.c (gimple_split_block): Convert to tuples.
+
+2008-02-12  Aldy Hernandez  <aldyh@redhat.com>
+
+       Merge with mainline @132177
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2008-02-08  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00273.html
+       
+       * gimplify.c (gimplify_call_expr): Remove code to set
+       TREE_SIDE_EFFECTS.
+       * gimple.c (gimple_has_side_effects): New.
+       * gimple.h (gimple_has_side_effects): Declare.
+       * tree-cfg.c (stmt_can_make_abnormal_goto): Call.
+
+2008-02-07  Diego Novillo  <dnovillo@google.com>
+
+       * gimple.h (gimple_call_num_args): Rename from
+       gimple_call_nargs.  Update all users.
+
+2008-02-07  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimplify.c: Remove various FIXMEs that have been resolved.
+
+2008-02-06  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple.def: Add GIMPLE_OMP_ATOMIC_{LOAD,STORE} entires.
+       * gsstruct.def: Add GSS_OMP_ATOMIC_{LOAD,STORE} entries.
+       * gimple-pretty-print.c (dump_gimple_stmt): Add case for
+       GIMPLE_OMP_ATOMIC_{LOAD,STORE}.
+       * gimplify.c (gimplify_omp_atomic): Enable and convert to tuples.
+       * tree.def: Add FIXME notes.
+       * gimple.c (gss_for_code): Add cases for
+       GIMPLE_OMP_ATOMIC_{LOAD,STORE}.
+       (gimple_size): Same.
+       (gimple_build_omp_atomic_load): New.
+       (gimple_build_omp_atomic_store): New.
+       * gimple.h (struct gimple_statement_omp_atomic_{load,store}): New.
+       (union gimple_statement_d): Add gimple_omp_atomic_{load,store}.
+       (gimple_omp_atomic_store_set_val): New.
+       (gimple_omp_atomic_store_val): New.
+       (gimple_omp_atomic_load_set_lhs): New.
+       (gimple_omp_atomic_load_lhs): New.
+       (gimple_omp_atomic_load_set_rhs): New.
+       (gimple_omp_atomic_load_rhs): New.
+       * tree-cfg.c (verify_types_in_gimple_seq_2): Add cases for
+       GIMPLE_OMP_ATOMIC_{LOAD,STORE}.
+
+2008-02-05  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00161.html
+
+       * gimple-iterator.c (gsi_split_seq_after): Change
+       argument type to gimple_stmt_iterator.  Update all users.
+       (gsi_for_stmt): Return gimple_stmt_iterator.  Update all users.
+       (gimple_find_edge_insert_loc): Change GSI argument to
+       gimple_stmt_iterator.  Update all users.
+       * gimple.c (gimple_seq_deep_copy): Change argument to gimple_seq.
+       Update all users.
+       * gimple.h (gsi_alloc): Remove.  Update all users.
+       (gsi_start): Return gimple_stmt_iterator.  Update all users.
+       (gsi_start_bb): Likewise.
+       (gsi_last): Likewise.
+       (gsi_last_bb): Likewise.
+       (gsi_end_p): Change argument to gimple_stmt_iterator.
+       Update all users.
+       (gsi_one_before_end_p): Likewise.
+       (gsi_stmt): Likewise.
+       (gsi_bb): Likewise.
+       (gsi_seq): Likewise.
+       (gsi_split_seq_after): Likewise.
+       (gsi_for_stmt): Likewise.
+       (gsi_after_labels): Return gimple_stmt_iterator.  Update all users.
+
+2008-02-05  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-pretty-print.c (dump_gimple_stmt): Print clauses for
+       GIMPLE_OMP_PARALLEL.
+       Handle GIMPLE_OMP_FOR.
+       (dump_gimple_omp_for): New.
+       * tree-pretty-print.c (dump_omp_clauses): Remove static.
+       * diagnostic.h (dump_omp_clauses): Add prototype.
+       * gimplify.c (gimplify_omp_for): Convert to tuples and enable.
+       (gimplify_omp_workshare): Same.
+       (gimplify_expr) [OMP_MASTER, OMP_ORDERED, OMP_CRITICAL]: Same.
+
+2008-02-04  Oleg Ryjkov <olegr@google.com>
+
+       * tree-gimple.c (get_gimple_rhs_class): Handle
+       FILTER_EXPR.
+       * tree-eh.c (union treemple): Declare.
+       Convert to tuples.
+       * except.c:
+       * except.h (lang_protect_cleanup_actions): Change return
+       type to 'gimple'.  Update all users.
+       * tree-inline.c (count_insns_seq): New.
+       * tree-inline.h (count_insns_seq): Declare.
+       * gimple.c (gimple_seq_deep_copy): New.
+       * gimple.h (gimple_seq_deep_copy): Declare.
+       (gimple_singleton_p): New.
+       * passes.c (init_optimization_passes): Enable
+       pass_lower_eh.
+
+2008-02-02  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree.h (OMP_CLAUSE_REDUCTION_GIMPLE_INIT): New.
+       (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE): New.
+       (struct tree_omp_clause): Add gimple_reduction_{init,merge} fields.
+       * gimplify.c (gimplify_scan_omp_clauses): Gimplify reduction
+       init/merge fields into separate sequences.
+       (gimplify_and_add): Use gimplify_expr directly.
+       * omp-low.c (lower_rec_input_clauses): Extract reduction info from
+       gimple tuples in structure.
+       (lower_reduction_clauses): Same.
+
+2008-02-01  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00036.html
+
+       * tree-complex.c (expand_complex_comparison): Set the RHS
+       of the new comparison to boolean_true_node.
+       * gimplify.c (gimplify_call_expr): Fix creation of new
+       GIMPLE_CALL.  Create the vector of arguments in the same
+       order as the original expression.
+
+2008-02-01  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00032.html
+
+       * tree-pas.h (struct tree_opt_pass): Remove field
+       WORKS_WITH_TUPLES_P.  Adjust all users.
+       * opts.c (decode_options): Force -O0.
+       * cfgexpand.c (gimple_to_tree): Handle GIMPLE_RETURN,
+       GIMPLE_ASM, GIMPLE_CALL and GIMPLE_SWITCH.
+       Show unrecognized tuple when failing.
+       (expand_gimple_basic_block): Do not use
+       gimple_stmt_iterator to go through the statements in BB.
+       Handle GIMPLE_CALL instead of CALL_EXPR.
+       (gimple_expand_cfg): Rename from tree_expand_cfg.  Update
+       all users.
+       Remove PROP_gimple_leh.
+       * gimple.c (walk_gimple_seq): Guard against WI == NULL.
+       * tree-cfg.c (execute_warn_function_return): Move #if 0
+       markers.
+       * passes.c (init_optimization_passes): Adjust #if 0
+       markers to execute pass_expand.
+       Guard more passes under #if 0.
+
+2008-02-01  Rafael Espindola  <espindola@google.com>
+
+       * passes.c (init_optimization_passes): Enable pass_del_ssa.
+
+       * tree-outof-ssa.c (insert_copy_on_edge): Port to tuples.
+       (eliminate_build): Likewise.
+       (eliminate_virtual_phis): Likewise.
+       (rewrite_trees): Likewise. Remove stmt_ann_t ann.
+       (stmt_list): Changed from tree to gimple_seq.
+       (identical_copies_p): Port to tuples.
+       (identical_stmt_lists_p): Likewise.
+       (init_analyze_edges_for_bb): Likewise.
+       (fini_analyze_edges_for_bb): Likewise.
+       (process_single_block_loop_latch): Likewise.
+       (analyze_edges_for_bb): LIkewise.
+       (remove_ssa_form): Likewise.
+       (insert_backedge_copies):
+       (rewrite_out_of_ssa):Likewise.
+       (pass_del_ssa): flip works_with_tuples_p. Don't require PROP_alias.
+
+       * tree-ssa-coalesce.c (build_ssa_conflict_graph): Port to tuples.
+       (abnormal_corrupt): Port to tuples.
+       (fail_abnormal_edge_coalesce): Port to tuples.
+       (create_outofssa_var_map):Port to tuples.
+       (coalesce_partitions): Port to tuples.
+
+2008-02-01  Rafael Espindola  <espindola@google.com>
+
+       * tree-ssa-operands.c (free_stmt_operands): Only free
+       with_ops.addresses_taken if stmt has ops.
+
+2008-01-31  Rafael Espindola  <espindola@google.com>
+
+       * tree-optimize.c (pass_free_cfg_annotations): Flip
+       works_with_tuples_p.
+       * passes.c (init_optimization_passes): Enable
+       pass_free_cfg_annotations.
+
+2008-01-30  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-01/msg01461.html
+
+       * tree-into-ssa.c (compute_global_livein): Initialize
+       PHI with call to gsi_stmt.
+       (rewrite_update_init_block): Likewise.
+       * tree-complex.c (expand_complex_comparison): Initialize
+       TYPE.
+       (expand_complex_operations_1): Initialize BR and BI.
+       * ebitmap.h (ebitmap_iter_init): Initialize OURN.
+       * Makefile.in (dse.o-warn, ebitmap.o-warn,
+       lower-subreg.o-warn, tree-chrec.o-warn, varasm.o-warn):
+       New.
+
+2008-01-30  Rafael Espindola  <espindola@google.com>
+
+       * gimplify.c (gimplify_scan_omp_clauses): Comment out
+       pop_gimplify_context calls
+
+2008-01-30  Rafael Espindola  <espindola@google.com>
+
+       * gimple.h (gsi_after_labels): Add.
+
+2008-01-30  Rafael Espindola  <espindola@google.com>
+
+       * Makefile.in (tree-complex.o): Uncomment gt-tree-vect-generic.h.
+       (GTFILES): Add tree-vect-generic.c.
+       * passes.c (init_optimization_passes): Enable pass_lower_vector.
+       * tree-vect-generic.c: uncomment all the file.
+       (elem_op_func): Use gimple_stmt_iterator instead of
+       block_stmt_iterator.
+       (tree_vec_extract): Likewise. Rename bsi to gsi.
+       (do_unop): Likewise.
+       (do_binop): Likewise.
+       (do_plus_minus): Likewise.
+       (do_negate): Likewise.
+       (expand_vector_piecewise): Likewise.
+       (expand_vector_parallel): Likewise.
+       (expand_vector_addition): Likewise.
+       (expand_vector_operation): Likewise. Remove "tree rhs" param. Add
+       "gimple assign" param. Use gimple_assign_rhs1|2 instead of
+       TREE_OPERAND.
+       (expand_vector_operations_1): Use gimple_stmt_iterator instead of
+       block_stmt_iterator. Rename bsi to gsi. Use gsi_stmt instead of
+       bsi_stmt. Use gimple_assign_* instead of GIMPLE_STMT_OPERAND. Use
+       gimple_assign_set_rhs_from_tree instead of setting *p_rhs.
+       (gate_expand_vector_operations): Use gimple_stmt_iterator instead
+       of block_stmt_iterator. Use gsi_* instead of bsi_*
+       (pass_lower_vector): flip works_with_tuples_p
+       (pass_lower_vector_ssa) flip works_with_tuples_
+
+2008-01-29  Rafael Espindola  <espindola@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-01/msg01434.html
+
+       * gimple.c (gimple_assign_set_rhs_with_ops): Fix
+       allocation of operand vector.
+
+2008-01-29  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-01/msg01434.html
+
+       * tree-into-ssa.c (prepare_block_for_update): Initialize
+       PHI.
+       * tree.c (build_gimple_modify_stmt): Fix documentation.
+
+       * tree-complex.c: Convert to tuples.
+       (pass_lower_complex_O0): Re-enable.
+
+       * ipa-cp.c (constant_val_insert): Disable.
+       * tree-gimple.c (get_gimple_rhs_class): Handle SSA_NAME.
+
+       * tree-ssa-propagate.c: Fix documentation to refer to
+       prop_simulate_again_p.
+       (add_ssa_edge): Call prop_simulate_again_p.
+       (simulate_stmt): Likewise.
+       (simulate_block): Fix loop iterating over PHI nodes.
+       * tree-ssa-propagate.h (prop_set_simulate_again): New.
+       (prop_simulate_again_p): New.
+       * gimple-iterator.c (gsi_insert_seq_after): Re-write to
+       avoid use of gimple_stmt_iterator.
+       (gimple_find_edge_insert_loc): Move from tree-cfg.c.
+       Re-enable.
+       (gsi_commit_edge_inserts): Likewise.
+       (gsi_commit_one_edge_insert): Likewise.
+       (gsi_insert_on_edge): Likewise.
+       (gsi_insert_on_edge_immediate): Likewise.
+       (gsi_insert_seq_on_edge): New.
+       * gimplify.c (gimplify_cond_expr): Call
+       gimple_cond_get_ops_from_tree.
+       * tree-optimize.c (pass_all_optimizations): Enable.
+       * gimple.c (gimple_cond_get_ops_from_tree): New.
+       (gimple_build_cond_from_tree): New.
+       * basic-block.h (struct edge_def): Replace field
+       edge_def_insns.t with gimple_seq edge_def_insns.g.
+       Update all users.
+       * gimple.h (gimple_stmt_iterator):
+       * tree-cfg.c (change_bb_for_stmt):
+       * passes.c (init_optimization_passes): Move #if0 around
+       to disable passes in groups instead of individually.
+       (execute_function_todo): Re-enable call to update_ssa.
+
+2008-01-25  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-pretty-print.c (dump_gimple_stmt): Handle
+       GIMPLE_OMP_PARALLEL.
+       * gimplify.c (pop_gimplify_context): Add comment.
+       (gimplify_and_return_first): New.
+       (gimplify_scan_omp_clauses): Remove unused 'gs'.
+       Remove #if 0.
+       (gimplify_omp_parallel): Convert to tuples.
+       (gimplify_omp_workshare): Remove redundant retval.
+       (gimplify_expr): Adapt to lack of retval for gimplify_omp_variable and
+       gimplify_omp_workshare.
+       Abort on OMP_ATOMIC_{LOAD,STORE} case.
+
+2008-01-21  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @131695
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge
+       string.
+       * configure: Regenerate.
+
+2008-01-15  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-01/msg00674.html
+
+       * tree-into-ssa.c (mark_def_sites): Force operand scan.
+       * gimple.def (GIMPLE_ASSIGN): Move into section of
+       statements with memory operands.
+       * gimple-dummy.c (remove_unused_locals): Remove.
+       * tree-flow-inline.h (noreturn_call_p,
+       mark_stmt_modified, update_stmt, update_stmt_if_modified,
+       stmt_modified_p): Remove.
+       * tree-ssa-live.c: Convert to tuples.
+       * gimple.c (gimple_set_modified): Replace
+       mark_stmt_modified.  Update all callers.
+       (gimple_remove): Call free_stmt_operands.
+       * gimple.h: Include tree-ssa-operands.h
+       Move extern function declarations earlier in the file.
+       (GIMPLE_CHECK2): Remove.
+       (GIMPLE_RANGE_CHECK): Remove.
+       (struct gimple_statement_with_ops): Fix GTY markers.
+       (struct gimple_statement_with_memory_ops): Likewise.
+       (gimple_modified_p): Replace stmt_modifed_p.  Update all
+       users.
+       (update_stmt): Move from tree-flow-inline.h.  Update all
+       users.
+       (update_stmt_if_modified): Likewise.
+       (gimple_has_ops): Use range GIMPLE_COND ... GIMPLE_RETURN
+       (gimple_has_mem_ops): Use range GIMPLE_ASSIGN ... GIMPLE_RETURN
+       (gimple_num_ops): Call gimple_has_ops.
+       (gimple_ops): Likewise.
+       (gimple_op): Likewise.
+       (gimple_op_ptr): Likewise.
+       (gimple_set_op): Likewise.
+       (gimple_set_addresses_taken): Remove.  Update all users.
+       (gimple_add_to_addresses_taken): Likewise.
+       (gimple_call_noreturn_p): Replace noreturn_call_p.
+       Update all users.
+       (gimple_phi_arg): Assert that INDEX is smaller than
+       CAPACITY.
+       * passes.c (init_optimization_passes): Enable
+       pass_referenced_vars, pass_reset_cc_flags and pass_build_ssa.
+       * tree-ssa-operands.c (gimple_set_stored_syms): Do not
+       free the STORES field if SYMS is empty.
+       (gimple_set_loaded_syms): Likewise.
+       (finalize_ssa_stmt_operands): Only call
+       finalize_ssa_vdefs and finalize_ssa_vuses if STMT has
+       memory operands.
+       (get_expr_operands): Handle CASE_LABEL_EXPR.
+       (free_stmt_operands): Free bitmaps ADDRESSES_TAKEN,
+       STORES and LOADS.
+       (gimple_add_to_addresses_taken): Rename from
+       add_to_addressable_set.
+
+2008-01-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-dfa.c (dump_dfa_stats): Add %z modifier to size_t argument.
+
+2008-01-11  Andrew MacLeod  <amacleod@redhat.com>
+           Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-01/msg00493.html
+
+       * tree.h (struct ssa_use_operand_d): Replace field STMT
+       with a union field LOC containing the fields STMT and
+       SSA_NAME.
+       Update all users.
+       * tree-flow.h (struct immediate_use_iterator_d): Document
+       how immediate use iteration works.
+
+2008-01-10  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-01/msg00444.html
+
+       * Makefile.in (tree-complex.o): Remove gt-tree-vect-generic.h
+
+2008-01-10  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-01/msg00436.html
+
+       * tree-ssa-operands.h (PHI_RESULT_PTR): Call
+       gimple_phi_result_ptr.
+       (PHI_ARG_DEF_PTR): Call gimple_phi_arg_imm_use_ptr.
+       (free_ssa_operands): Remove.
+       (copy_virtual_operands, create_ssa_artificial_load_stmt,
+       add_to_addressable_set, discard_stmt_changes,
+       FOR_EACH_PHI_OR_STMT_USE, FOR_EACH_PHI_OR_STMT_DEF): Convert
+       to use 'gimple'.
+       * tree-into-ssa.c: Convert to use 'gimple'.
+       (rewrite_uses_p): New.  Use instead of REWRITE_THIS_STMT.
+       (set_rewrite_uses): New.  Use instead of REWRITE_THIS_STMT.
+       (register_defs_p): New.  Use instead of
+       REGISTER_DEFS_IN_THIS_STMT.
+       (set_register_defs): New.  Use instead of
+       REGISTER_DEFS_IN_THIS_STMT.
+       (REWRITE_THIS_STMT): Remove.
+       (REGISTER_DEFS_IN_THIS_STMT): Remove.
+       * tree-pretty-print.c (dump_vops): Remove.  Update all
+       users.
+       (dump_generic_node): Do not handle PHI_NODE.
+       * tree.c (tree_node_kind): Remove "phi_nodes" entry.
+       (tree_code_size): Do not handle PHI_NODE.
+       (tree_size): Likewise.
+       (make_node_stat): Likewise.
+       (tree_node_structure): Likewise.
+       (phi_node_elt_check_failed): Remove.
+       * tree.h (PHI_NODE_ELT_CHECK): Remove.
+       (PHI_RESULT_TREE): Remove.
+       (PHI_ARG_DEF_TREE): Remove.
+       (PHI_CHAIN): Remove.
+       (PHI_NUM_ARGS): Remove.
+       (PHI_ARG_CAPACITY): Remove.
+       (PHI_ARG_ELT): Remove.
+       (PHI_ARG_EDGE): Remove.
+       (PHI_BB): Remove.
+       (PHI_ARG_IMM_USE_NODE): Remove.
+       (struct tree_phi_node): Remove.
+       (union tree_node): Remove field 'phi'.
+       (release_phi_node): Change argument to gimple.
+       (duplicate_ssa_name): Change second argument to gimple.
+       (swap_tree_operands): Remove.
+       (enum tree_node_kind): Remove phi_kind entry.
+       * tree-phinodes.c: Convert to use gimple.
+       * tree-gimple.c (is_gimple_stmt): Do not handle PHI_NODE.
+       * cgraphunit.c (cgraph_process_new_functions): Call
+       gimple_unreachable to mark unconverted sections.
+       (verify_cgraph_node): Do not ICE if THIS_CFUN is NULL.
+       Convert statement verification to use gimple.
+       (cgraph_expand_function): Call gimple_unreachable to mark
+       unconverted sections.
+       * tree-ssa-propagate.c: Convert to use gimple.
+       (STMT_IN_SSA_EDGE_WORKLIST): Use flag GF_PLF_1 as marker.
+       (DONT_SIMULATE_AGAIN): Remove.  Use gimple_visited.
+       * gimple-dummy.c: Remove converted functions.
+       * tree-ssa-alias.c (may_be_aliased): Re-enable.
+       * tree-flow-inline.h (gimple_phi_arg_imm_use_ptr): Rename
+       from get_phi_arg_def_ptr.
+       (bsi_start, bsi_after_labels, bsi_last, bsi_end_p,
+       bsi_next, bsi_prev, bsi_stmt, bsi_stmt_ptr): Remove.
+       * tree.def (PHI_NODE): Remove.  Update most users.
+       * tree-dfa.c: Convert to use gimple.
+       * common.opt (fgimple-conversion): Default to 0.
+       * tree-ssa.c: Convert to use gimple.
+       * tree-optimize.c (pass_early_local_passes,
+       pass_all_early_optimization, pass_init_datastructures):
+       Enable.
+       * tree-flow.h (tree_ann_type): Remove STMT_ANN.  Update
+       all users.
+       (struct stmt_ann_d): Remove.  Update all users.
+       * Makefile.in (GIMPLE_H): Add tree-ssa-operands.h.
+       (tree-ssa-copy.o, tree-phi-nodes.o, tree-complex.o):
+       Re-enable.
+       * gimple.h (enum plf_mask): Define.
+       (GF_NO_WARNING): Remove
+       (bb_seq): Return NULL if BB->il.gimple is NULL.
+       (struct gimple_statement_base): Add bitfields no_warning,
+       visited, unused_1, unused_2, unused_3, unused_4 and plf.
+       (gimple_flags, gimple_set_flags, gimple_add_flag):
+       Remove.  Update all users.
+       (gimple_set_visited): New.
+       (gimple_visited_p): New.
+       (gimple_set_plf): New.
+       (gimple_plf): New.
+       (gimple_has_volatile_ops): New.
+       (gimple_set_has_volatile_ops): New.
+       (gimple_addresses_taken): New.
+       (gimple_set_addresses_taken): New.
+       (gimple_add_to_addresses_taken): New.
+       (gimple_phi_result_ptr): New.
+       (gsi_stmt_ptr): New.
+       (gsi_bb): New.
+       (gsi_seq): New.
+       * tree-cfg.c (execute_build_cfg): Do not call
+       gimple_set_body after building the CFG.
+       (pass_build_cfg): Do not require PROP_gimple_leh.
+       (gimplify_val): Convert to gimple.
+       * passes.c (init_optimization_passes): Disable all of
+       pass_all_early_optimizations.
+       * tree-ssanames.c: Convert to gimple.
+       * tree-ssa-operands.c: Convert to gimple.
+       (gimple_set_stored_syms): New.
+       (gimple_set_loaded_syms): New.
+
+2008-01-03  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @131303
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge
+       string.
+       * configure: Regenerate.
+
+2007-12-20  Diego Novillo  <dnovillo@google.com>
+
+       * Makefile.in (builtins.o-warn, expr.o-warn): Add
+       -Wno-error.
+
+2007-12-05  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2007-12/msg00221.html
+
+       * gimple.c (walk_gimple_asm): Guard against WI being
+       NULL.
+       (walk_gimple_stmt): Likewise.
+
+2007-12-05  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2007-12/msg00216.html
+
+       * tree.h (gimple_unreachable_1): Declare.
+       (gimple_unreachable): Define.
+       Replace calls to gcc_unreachable with gimple_unreachable
+       in unconverted code.
+       * gimple-dummy.c (gimple_unreachable_1): New function.
+       * common.opt (fgimple-only): Remove.
+       (fgimple-conversion): Define.
+       * Makefile.in (STRICT_WARN): Add -Wno-return-type.
+
+2007-11-30  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @130470
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge
+       string.
+       * configure: Regenerate.
+       * tree-inline.c (copy_decl_no_change,
+       copy_decl_for_dup_finish): Re-enable.
+
+2007-11-26  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple.h (gimple_bind_block): Update comment.
+       
+2007-11-19  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @130291.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2007-11-19  Diego Novillo  <dnovillo@google.com>
+
+       * tree-flow-inline.h (get_lineno): Call IS_LOCATION_EMPTY
+       and LOCATION_LINE instead of accessing location fields
+       directly.
+       * gimple-low.c (lower_function_body): Use
+       UNKNOWN_LOCATION instead of unknown_location.
+
+2007-11-16  Diego Novillo  <dnovillo@google.com>
+
+       * cgraph.c (cgraph_create_edge): Call gimple_body
+       instead of DECL_SAVED_TREE.
+       (dump_cgraph_node): Likewise.
+       * cgraphunit.c (verify_cgraph_node): Likewise.
+       (cgraph_analyze_functions): Likewise.
+       (cgraph_mark_functions_to_output): Likewise.
+       (cgraph_optimize): Likewise.
+       * tree-eh.c (lower_eh_constructs): Add notes for future
+       changes to calls to DECL_SAVED_TREE.
+       (refactor_eh_r): Likewise.
+       * cfgexpand.c (gimple_to_tree): New function.
+       (maybe_dump_rtl_for_gimple_stmt): Rename from
+       maybe_dump_rtl_for_tree_stmt.  Convert to tuples.
+       (label_rtx_for_bb): Convert to tuples.
+       (expand_gimple_cond): Rename from
+       expand_gimple_cond_expr.  Convert to tuples.
+       (expand_gimple_tailcall): Convert to tuples.
+       (expand_gimple_basic_block): Convert to tuples.
+       (discover_nonconstant_array_refs_r): Convert to tuples.
+       (discover_nonconstant_array_refs): Convert to tuples.
+       * tree-mudflap.c (execute_mudflap_function_decls): Add
+       deprecation comment for DECL_SAVED_TREE call.
+       * tree-inline.c (copy_generic_body): Call gimple_body
+       instead of DECL_SAVED_TREE.
+       (inlinable_function_p): Likewise.
+       (clone_body): Add deprecation comment for DECL_SAVED_TREE
+       call.
+       * tree-cfg.c (remove_useless_stmts_bind): Call
+       gimple_body instead of DECL_SAVED_TREE.
+       (remove_useless_stmts): Likewise.
+
+2007-11-10  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-cfg.c (verify_types_in_gimple_call): OBJ_TYPE_REF are allowed
+       as function types.
+
+       [objc]
+       * objc-act.c (objc_gimplify_expr): Change pre and post to sequences.
+       * objc-act.h (objc_gimplify_expr): Change prototype accordingly.
+       
+       [testsuite]
+       * lib/objc.exp: Set -I regardless of libobjc.
+
+2007-11-10  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-gimple.h (tree_annotate_all_with_locus): New.
+       * gimple-dummy.c: Add omp_reduction_init and
+       diagnose_omp_structured_block_errors.
+       * gimplify.c (tree_should_carry_locus_p): New.
+       (tree_annotate_one_with_locus): New.
+       (tree_annotate_all_with_locus): New.
+
+2007-11-08  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @129982
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2007-11-01  Diego Novillo  <dnovillo@google.com>
+
+       * cgraphbuild.c (initialize_inline_failed): Re-enable.
+       (build_cgraph_edges): Adapt to use tuples.
+       (pass_build_cgraph): Re-enable.
+       (rebuild_cgraph_edges): Adapt to use tuples.
+       * cgraph.c (cgraph_release_function_body): Also NULLify the
+       gimple body.
+       (debug_cgraph_node): New.
+       (debug_cgraph): New.
+       * cgraph.h (struct cgraph_edge): Change field CALL_STMT to
+       type gimple.  Update all users.
+       * cgraphunit.c (cgraph_analyze_functions): If DECL does not
+       have a struct function entry, discard it.
+       * gimple.h (GF_CALL_CANNOT_INLINE): New.
+       (gimple_call_lhs_ptr): New.
+       (gimple_call_mark_uninlinable): New.
+       (gimple_call_cannot_inline_p): New.
+       (struct walk_stmt_info): Add field PSET.
+
+2007-11-01  Diego Novillo  <dnovillo@google.com>
+
+       * tree-optimize.c (tree_rest_of_compilation): Call
+       gimple_set_body to remove the body if not inlining.
+       * gimple.c (walk_gimple_stmt): Update documentation.
+       Pass WI->PSET to every call to walk_tree.
+
+2007-11-01  Diego Novillo  <dnovillo@google.com>
+
+       * langhooks.h (struct lang_hooks_for_callgraph): Remove third
+       argument from function pointer ANALYZE_EXPR.  Update all
+       users.
+
+2007-10-30  Diego Novillo  <dnovillo@google.com>
+
+       * gimple.c (gimple_build_omp_return): Call
+       gimple_omp_return_set_nowait if needed.
+       * gimple.h (GIMPLE_CHECK, GIMPLE_CHECK2, GIMPLE_RANGE_CHECK):
+       Move earlier in the file.
+       (GF_NO_WARNING): Change to static const unsigned.
+       (GF_CALL_TAILCALL): New.
+       (GF_OMP_PARALLEL_COMBINED): Rename from
+       OMP_PARALLEL_COMBINED_FLAG.  Change to static const unsigned.
+       Update all users.
+       (GF_OMP_RETURN_NOWAIT): Rename from OMP_RETURN_NOWAIT_FLAG.
+       Change to static const unsigned.
+       Update all users.
+       (GF_OMP_SECTION_LAST): Rename from OMP_SECTION_LAST_FLAG.
+       Change to static const unsigned.
+       Update all users.
+       (gimple_omp_return_set_nowait): New.
+       (gimple_call_set_tail): New.
+       (gimple_call_tail_p): New.
+
+2007-10-30  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimplify.c (get_tmp_var_for): Remove reference to IS_FORMAL in
+       opening comment.
+       (gimplify_return_expr): Return GS_ERROR if we have errored.
+       (gimplify_statement_list): Handle case where voidify_wrapper returns a
+       temporary.
+       (gimplify_call_expr): Return gracefully on error.
+       (gimplify_cond_expr): Same.
+       * gimple.h (gimple_call_return_type): Do not error on methods.
+
+2007-10-29  Diego Novillo  <dnovillo@google.com>
+           Tom Tromey  <tromey@redhat.com>
+
+       * gimple.h (gimple_locus): Update comment.
+       (gimple_set_locus): Likewise.
+       (gimple_locus_empty_p): Add support for mapped locations.
+
+2007-10-29  Diego Novillo  <dnovillo@google.com>
+
+       * tree-optimize.c (execute_cleanup_cfg_pre_ipa): Re-enable.
+       * gimple.c (walk_gimple_asm): Tidy comment.
+
+2007-10-29  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-gimple.h (build_gimple_eh_filter_tree): Remove.
+       * gimplify.c (build_gimple_eh_filter_tree): Move from here...
+       * cp/cp-gimplify.c: ...to here.
+       (cp_gimplify_init_expr): Convert to tuples.
+       (gimplify_must_not_throw_expr): Make function return a
+       gimplify_status and convert to tuples.
+       (cp_gimplify_expr): Convert MUST_NOT_THROW_EXPR, INIT_EXPR, and
+       USING_STMT to tuples.
+
+2007-10-28  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimplify.c (gimplify_body): Make work when body contains more than
+       a GIMPLE_BIND statement.
+
+2007-10-26  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @129659.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2007-10-26  Diego Novillo  <dnovillo@google.com>
+
+       * gimple-low.c (lower_builtin_setjmp): Pass ARG to first
+       call to gimple_build_call.
+
+2007-10-17  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-gimple.c (get_gimple_rhs_class): Add case for EXC_PTR_EXPR.
+       * gimplify.c (gimple_conditional_context): Enable.
+       (gimplify_cleanup_point_expr): Enable.  Adjust for tuples.
+       (gimple_push_cleanup): Enable.
+       (gimplify_target_expr): Do not gimplify TARGET_EXPR_CLEANUP before
+       calling gimple_push_cleanup.
+       (gimplify_expr): Rename `try' to `try_'.
+       Enable CLEANUP_POINT_EXPR case.
+       Gimplify CATCH_EXPR and EH_FILTER_EXPR cases correctly.
+
+2007-10-17  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple.c (gimple_size): Handle GIMPLE_WITH_CLEANUP_EXPR.
+       * gimple.h (struct gimple_statement_catch): Make handler a structure,
+       not a pointer.
+       (struct gimple_statement_eh_filter): Make failure a structure, not a
+       pointer.
+       document EH_FILTER_MUST_NOT_THROW subcode flag.
+       (gimple_catch_handler): Handler is now a struct.
+       (gimple_catch_set_handler): Same.
+       (gimple_eh_filter_failure): Failure is now a struct.
+       (gimple_eh_filter_set_failure): Same.
+       (gimple_eh_filter_must_not_throw): New.
+       (gimple_eh_filter_set_must_not_throw): New.
+       (gsi_next): Update comment.
+       (gsi_prev): Same.
+       * tree-cfg.c (verify_types_in_gimple_seq_2): Handle GIMPLE_EH_FILTER.
+
+2007-10-18  Diego Novillo  <dnovillo@google.com>
+
+       * gimple-iterator.c (gsi_delink): Remove.
+       * gimple.h (gsi_delink): Remove.
+
+2007-10-17  Diego Novillo  <dnovillo@google.com>
+
+       * coretypes.h (const_gimple_seq): New typedef.
+       * gimple.h (gimple_seq_first): Constify argument.
+       (gimple_seq_last): Likewise.
+       (gimple_seq_copy): Likewise.
+       (gimple_seq_empty_p): Likewise.
+       (gimple_set_prev): Rename from set_gimple_prev.
+       Update all users.
+       (gimple_set_next): Rename from set_gimple_next.
+       Update all users.
+       (gimple_set_flags): Rename from set_gimple_flags.
+       Update all users.
+       (gimple_add_flag): Rename from add_gimple_flag.
+       Update all users.
+       (gimple_set_subcode): Rename from set_gimple_subcode.  
+       Update all users.
+       (gimple_set_block): Rename from set_gimple_block.
+       Update all users.
+       (gimple_set_locus): Rename from set_gimple_locus.
+       Update all users.
+       (gimple_locus_empty_p): Constify argument.
+       (gimple_has_ops): Likewise.
+       (gimple_has_mem_ops): Likewise.
+       (gimple_def_ops): Likewise.
+       (gimple_set_def_ops): Rename from set_gimple_def_ops.
+       Update all users.
+       (gimple_use_ops): Constify argument.
+       (gimple_set_use_ops): Rename from set_gimple_use_ops.
+       Update all users.
+       (gimple_vuse_ops): Constify argument.
+       (gimple_set_vuse_ops): Rename from set_gimple_vuse_ops.
+       Update all users.
+       (gimple_vdef_ops): Constify argument.
+       (gimple_set_vdef_ops): Rename from set_gimple_vdef_ops.
+       Update all users.
+       (gimple_loaded_syms): Constify argument.
+       (gimple_stored_syms): Constify argument.
+       (gimple_modified): Constify argument.
+       (gimple_set_modified): Rename from set_gimple_modified.
+       Update all users.
+       (gimple_omp_return_nowait_p): Constify argument.
+       (gimple_omp_section_last_p): Constify argument.
+       (gimple_omp_parallel_combined_p): Constify argument.
+       (gimple_build_return): Rename from build_gimple_return.
+       Update all users.
+       (gimple_build_assign): Rename from build_gimple_assign.
+       Update all users.
+       (gimple_build_assign_with_ops): Rename from
+       build_gimple_assign_with_ops.
+       Update all users.
+       (gimple_build_call_vec): Rename from build_gimple_call_vec.
+       Update all users.
+       (gimple_build_call): Rename from build_gimple_call.
+       Update all users.
+       (gimple_build_cond): Rename from build_gimple_cond.
+       Update all users.
+       (gimple_build_label): Rename from build_gimple_label.
+       Update all users.
+       (gimple_build_goto): Rename from build_gimple_goto.
+       Update all users.
+       (gimple_build_nop): Rename from build_gimple_nop.
+       Update all users.
+       (gimple_build_asm): Rename from build_gimple_asm.
+       Update all users.
+       (gimple_build_asm_vec): Rename from build_gimple_asm_vec.
+       Update all users.
+       (gimple_build_catch): Rename from build_gimple_catch.
+       Update all users.
+       (gimple_build_eh_filter): Rename from build_gimple_eh_filter.
+       Update all users.
+       (gimple_build_try): Rename from build_gimple_try.
+       Update all users.
+       (gimple_build_wce): Rename from build_gimple_wce.
+       Update all users.
+       (gimple_build_resx): Rename from build_gimple_resx.
+       Update all users.
+       (gimple_build_switch): Rename from build_gimple_switch.
+       Update all users.
+       (gimple_build_switch_vec): Rename from
+       build_gimple_switch_vec.  Update all users.
+       (gimple_build_omp_parallel): Rename from
+       build_gimple_omp_parallel.  Update all users.
+       (gimple_build_omp_for): Rename from build_gimple_omp_for.
+       Update all users.
+       (gimple_build_omp_critical): Rename from
+       build_gimple_omp_critical.
+       Update all users.
+       (gimple_build_omp_section): Rename from
+       build_gimple_omp_section.  Update all users.
+       (gimple_build_omp_continue): Rename from
+       build_gimple_omp_continue.  Update all users.
+       (gimple_build_omp_master): Rename from
+       build_gimple_omp_master.  Update all users.
+       (gimple_build_omp_ordered): Rename from
+       build_gimple_omp_ordered.  Update all users.
+       (gimple_build_omp_sections): Rename from
+       build_gimple_omp_sections.  Update all users.
+       (gimple_build_omp_single): Rename from
+       build_gimple_omp_single.  Update all users.
+       (gimple_set_body): Rename from set_gimple_body.  Update all
+       users.
+       (gimple_set_bb): Rename from set_gimple_bb.  Update all users.
+       (is_gimple_operand): Constify argument.
+       (gimple_ops): Likewise.
+       (gimple_op_ptr): Likewise.
+       (gimple_call_lhs): Likewise.
+       (gimple_call_return_type): Likewise.
+       (gimple_call_chain): Likewise.
+       (gimple_call_arg_ptr): Likewise.
+       (gimple_cond_true_label): Likewise.
+       (gimple_bind_vars): Likewise.
+       (gimple_bind_set_body): Likewise.
+       (gimple_bind_block): Likewise.
+       (gimple_asm_ninputs): Likewise.
+       (gimple_asm_noutputs): Likewise.
+       (gimple_asm_nclobbers): Likewise.
+       (gimple_asm_input_op): Likewise.
+       (gimple_asm_output_op): Likewise.
+       (gimple_asm_clobber_op): Likewise.
+       (gimple_asm_string): Likewise.
+       (gimple_asm_volatile_p): Likewise.
+       (gimple_catch_types): Likewise.
+       (gimple_catch_handler): Likewise.
+       (gimple_eh_filter_types): Likewise.
+       (gimple_eh_filter_failure): Likewise.
+       (gimple_try_kind): Likewise.
+       (gimple_try_set_eval): Likewise.
+       (gimple_try_set_cleanup): Likewise.
+       (gimple_wce_set_cleanup): Likewise.
+       (gimple_wce_cleanup_eh_only): Likewise.
+       (gimple_phi_capacity): Likewise.
+       (gimple_phi_num_args): Likewise.
+       (gimple_phi_result): Likewise.
+       (gimple_resx_region): Likewise.
+       (gimple_switch_num_labels): Likewise.
+       (gimple_switch_label): Likewise.
+       (gimple_switch_default_label): Likewise.
+       (gimple_omp_set_body): Likewise.
+       (gimple_omp_critical_name): Likewise.
+       (gimple_omp_for_clauses): Likewise.
+       (gimple_omp_for_index): Likewise.
+       (gimple_omp_for_initial): Likewise.
+       (gimple_omp_for_final): Likewise.
+       (gimple_omp_for_incr): Likewise.
+       (gimple_omp_for_set_pre_body): Likewise.
+       (gimple_omp_parallel_clauses): Likewise.
+       (gimple_omp_parallel_child_fn): Likewise.
+       (gimple_omp_parallel_data_arg): Likewise.
+       (gimple_omp_single_clauses): Likewise.
+       (gimple_omp_sections_clauses): Likewise.
+       (gimple_omp_for_cond): Likewise.
+       (gimple_return_retval): Likewise.
+       (is_gimple_omp): Likewise.
+       (gimple_nop_p): Likewise.
+       (gimple_expr_type): Likewise.
+
+2007-10-17  Diego Novillo  <dnovillo@google.com>
+
+       * tree-ssa-loop-manip.c (gimple_duplicate_loop_to_header_edge): Rename
+       from tree_duplicate_loop_to_header_edge.
+       Update all users.
+       * value-prof.c: Convert and enable all functions in the file.
+       (gimple_divmod_fixed_value): Rename from
+       tree_divmod_fixed_value.
+       (gimple_mod_pow2): Rename from tree_mod_pow2.
+       (gimple_mod_subtract): Rename from tree_mod_subtract.
+       (gimple_divmod_fixed_value_transform): Rename from
+       tree_divmod_fixed_value_transform.
+       (gimple_mod_pow2_value_transform): Rename from
+       tree_mod_pow2_value_transform.
+       (gimple_mod_subtract_transform): Rename from
+       tree_mod_subtract_transform.
+       (gimple_stringops_transform): Rename from tree_stringops_transform.
+       (gimple_ic_transform): Rename from tree_ic_transform.
+       Update all users.
+       * value-prof.h (gimple_register_value_prof_hooks): Rename from
+       tree_register_value_prof_hooks.  Update all users.
+       * tree.h (OMP_DIRECTIVE_P): Remove.  Update all users.
+       (validate_arglist): Move to gimple.h.
+       * builtins.c: (validate_arglist): Change first argument to
+       const_gimple.  Disable most callers.
+       * gimple.def: Document all GIMPLE codes.
+       * tree-gimple.c (is_gimple_operand): New.
+       (get_gimple_rhs_class)<ADDR_EXPR>: Accept as a
+       GIMPLE_SINGLE_RHS.
+       <WITH_SIZE_EXPR>: Likewise.
+       (get_gimple_rhs_num_ops): New.
+       (is_gimple_call_addr): Tidy.
+       * tree-gimple.h (get_gimple_rhs_num_ops): Declare.
+       * gimple-dummy.c (free_histograms, stringop_block_profile):
+       Remove.
+       * gimple-low.c (lower_function_body): Tidy.
+       * predict.c (gimple_predicted_by_p): Rename from
+       tree_predicted_by_p.  Update all users.
+       (gimple_predict_edge): Rename from tree_predict_edge.  Update
+       all users.
+       * gimple-iterator.c (gsi_link_seq_after): Update documentation.
+       (gsi_link_after): Likewise.
+       * tree-eh.c (stmt_could_throw_1_p): New.
+       (stmt_could_throw_p): New.
+       * gimple-pretty-print.c (dump_unary_rhs): Print ADDR_EXPR as
+       a single operand.
+       (dump_gimple_switch): Support NULL case labels.
+       (dump_gimple_asm): Dump outputs first.
+       * gimplify.c (compare_case_labels): Make the default label
+       sort first.
+       (sort_case_labels): Do not special case the default label.
+       (gimplify_init_ctor_eval): Gimplify initializer expressions.
+       (gimplify_modify_expr): Unshare the operands before setting
+       them on the new GIMPLE_ASSIGN statement.
+       (gimplify_asm_expr): NULLify the chain on operands before
+       putting them on the input/output vectors.
+       * tree-cfgcleanup.c: Convert and enable CFG cleanup functions.
+       (cleanup_control_expr_graph): Call gimple_fold.
+       * tree-flow.h (gimple_block_label): Rename from
+       tree_block_label.  Update all users.
+       (gimple_duplicate_sese_region): Rename from
+       tree_duplicate_sese_region.  Update all users.
+       (gimple_duplicate_sese_tail): Rename from
+       tree_duplicate_sese_tail.  Update all users.
+       (gimple_purge_dead_abnormal_call_edges): Rename from
+       tree_purge_dead_abnormal_call_edges.  Update all users.
+       (gimple_purge_all_dead_eh_edges): Rename from
+       tree_purge_all_dead_eh_edges.  Update all users.
+       (stmt_could_throw_p): Declare.
+       (add_stmt_to_eh_region_fn): Move from except.h.
+       (remove_stmt_from_eh_region_fn): Likewise.
+       (lookup_stmt_eh_region_fn): Likewise.
+       (lookup_stmt_eh_region): Likewise.
+       (verify_eh_edges): Likewise.
+       * Makefile.in (GIMPLE_H): Add dependencies on GGC_H, TM_H and
+       TARGET_H.
+       (ipa-inline.o): Add dependency on TREE_FLOW_H.
+       (out_object_file): Add dependency on TREE_GIMPLE_H.
+       * gimple.c (gimple_set_code): Rename from set_gimple_code.
+       (gimple_size): New.
+       (gimple_alloc): New.
+       (gimple_alloc_ops): New.
+       (build_gimple_with_ops): Call them.
+       (build_gimple_return): Only call gimple_return_set_retval if
+       RETVAL is not NULL.
+       (build_gimple_call): Validate argument FN.
+       (extract_ops_from_tree): New.
+       (build_gimple_assign_with_ops): New.
+       (build_gimple_assign): Call them.
+       (build_gimple_nop):  Call gimple_alloc.
+       (build_gimple_bind): Likewise.
+       (build_gimple_asm_1): Tidy.
+       (build_gimple_asm_vec): Tidy.
+       (build_gimple_asm): Tidy.
+       (build_gimple_catch): Call gimple_alloc.
+       (build_gimple_eh_filter): Likewise.
+       (build_gimple_try): Likewise.
+       (build_gimple_wce): Likewise.
+       (build_gimple_phi): Remove.
+       (build_gimple_resx): Call gimple_alloc.
+       (build_gimple_switch_1): Tidy.
+       (build_gimple_switch): Tidy.
+       (build_gimple_omp_critical): Call gimple_alloc.
+       (build_gimple_omp_for): Likewise.
+       (build_gimple_omp_parallel): Likewise.
+       (build_gimple_omp_section): Likewise.
+       (build_gimple_omp_master): Likewise.
+       (build_gimple_omp_continue): Likewise.
+       (build_gimple_omp_ordered): Likewise.
+       (build_gimple_omp_return): Likewise.
+       (build_gimple_omp_sections): Likewise.
+       (build_gimple_omp_single): Likewise.
+       (gimple_check_failed): Change GS to const_gimple.  Update all
+       users.
+       (gimple_range_check_failed): Likewise.
+       (walk_gimple_seq): Change return type to tree.  Update all
+       users.
+       If the call to walk_gimple_stmt returns non-NULL,
+       return it immediately.
+       (walk_gimple_asm): Change return type to tree.  Update all
+       users.
+       If the call to walk_tree returns non-NULL, return it.
+       (walk_gimple_stmt): Likewise.
+       (gimple_fold): New.
+       (gimple_assign_set_rhs_from_tree): New.
+       (gimple_assign_set_rhs_with_ops): New.
+       (gimple_copy): New.
+       * basic-block.h (gimple_predicted_by_p): Rename from
+       tree_predicted_by_p.  Update all users.
+       (gimple_predict_edge): Rename from tree_predict_edge.  Update
+       all users.
+       * gimple.h: Add documentation to all inline functions.
+       (gimple_seq_first): Return NULL if S is NULL.
+       (gimple_seq_last): Likewise.
+       (GF_ASM_VOLATILE): Define.
+       (GF_NO_WARNING): Rename from GIMPLE_NO_WARNING.  Update all
+       users.
+       (build_gimple_assign_with_ops): Declare.
+       (build_gimple_asm): Change unsigned arguments to size_t.
+       Update all users.
+       (build_gimple_switch): Likewise.
+       (build_gimple_phi): Remove.
+       (validate_arglist): Declare.
+       (gimple_fold): Declare.
+       (gimple_assign_set_rhs_from_tree): Declare.
+       (gimple_assign_set_rhs_with_ops): Declare.
+       (gimple_copy): Declare.
+       (is_gimple_operand): Declare.
+       (gimple_num_ops): Change argument to const_gimple.
+       Return 0 if GS is not one of the statements that has tree
+       operands.
+       (gimple_ops): New.
+       (gimple_op): Change argument to const_gimple.
+       If GS is not a statement that has tree operands, return NULL.
+       (gimple_op_ptr): Likewise.
+       (gimple_assign_subcode): Change argument to const_gimple.
+       (gimple_assign_operand): Remove.  Update all users.
+       (gimple_assign_set_operand): Remove.  Update all users.
+       (gimple_assign_lhs): Change argument type to const_gimple.
+       (gimple_assign_rhs1): Likewise.
+       (gimple_assign_rhs2): Likewise.
+       (gimple_assign_set_rhs1): Assert that RHS is a valid operand.
+       (gimple_assign_set_rhs2): Likewise.
+       (gimple_call_lhs): Call gimple_op.
+       (gimple_call_set_lhs): Assert that LHS is a valid operand.
+       Call gimple_set_op.
+       (gimple_call_set_fn): New.
+       (gimple_call_fndecl): Change argument type to const_gimple.
+       Call gimple_call_fn.
+       (gimple_call_chain): Call gimple_op.
+       (gimple_call_set_chain): Assert that CHAIN is valid.
+       Call gimple_set_op.
+       (gimple_call_nargs): Change argument to const_gimple.
+       Call gimple_op.
+       (gimple_call_arg_ptr): Call gimple_op_ptr.
+       (gimple_call_set_arg): Assert that ARG is valid.
+       Call gimple_set_op.
+       (gimple_cond_code): Change argument to const_gimple.
+       (gimple_cond_lhs): Change argument to const_gimple.
+       (gimple_cond_set_lhs): Assert that the operand is valid.
+       Call gimple_set_op.
+       (gimple_cond_rhs): Change argument to const_gimple.
+       Call gimple_op.
+       (gimple_cond_true_label): Call gimple_op.
+       (gimple_cond_false_label): Likewise.
+       (gimple_label_label): Likewise.
+       (gimple_cond_set_true_label): Assert that the operand is
+       valid.
+       Call gimple_set_op.
+       (gimple_cond_set_false_label): Likewise.
+       (gimple_goto_dest): Change argument to const_gimple.
+       Call gimple_set_op.
+       (gimple_goto_set_dest): Assert that the operand is valid.
+       Call gimple_set_op.
+       (gimple_asm_ninputs): Change return type to size_t.  Update
+       all users.
+       (gimple_asm_noutputs): Likewise.
+       (gimple_asm_nclobbers): Rename from gimple_asm_nclobbered.
+       Change return type to size_t.
+       Update all users.
+       (gimple_asm_set_input_op): Assert that the argument is
+       valid.
+       (gimple_asm_set_output_op): Likewise.
+       (gimple_asm_set_clobber_op): Likewise.
+       (gimple_asm_volatile_p): New.
+       (gimple_asm_set_volatile): New.
+       (gimple_asm_clear_volatile): New.
+       (gimple_phi_set_capacity): Remove.
+       (gimple_phi_set_nargs): Remove.
+       (gimple_expr_type): New.
+       (struct walk_stmt_info): Remove fields want_bind_expr and
+       want_return_expr.  Update all users.
+       * tree-cfg.c: Convert all functions for CFG cleanup and
+       verification.
+       (gimple_redirect_edge_and_branch): Rename from
+       tree_redirect_edge_and_branch.
+       (gimple_try_redirect_by_replacing_jump): Rename from
+       tree_try_redirect_by_replacing_jump.
+       (gimple_verify_flow_info): Rename from tree_verify_flow_info.
+       (gimple_make_forwarder_block): Rename from
+       tree_make_forwarder_block.
+       (gimple_cfg2vcg): Rename from tree_cfg2vcg.
+       (gimple_merge_blocks): Rename from tree_merge_blocks.
+       (gimple_can_merge_blocks_p): Rename from tree_merge_blocks_p.
+       (gimple_can_remove_branch_p): Rename from tree_can_remove_branch_p.
+       (gimple_redirect_edge_and_branch): Rename from
+       tree_redirect_edge_and_branch.
+       (gimple_move_block_after): Rename from tree_move_block_after.
+       (gimple_predict_edge): Rename from tree_predict_edge.
+       (gimple_predicted_by_p): Rename from tree_predicted_by_p.
+       (gimple_duplicate_bb): Rename from tree_duplicate_bb.
+       (gimple_can_duplicate_bb_p): Rename from tree_can_duplicate_bb_p.
+       (gimple_split_edge): Rename from tree_split_edge.
+       (gimple_make_forwarder_block): Rename from tree_make_forwarder_block.
+       (gimple_block_ends_with_call_p): Rename from
+       tree_block_ends_with_call_p.
+       (gimple_block_ends_with_condjump_p): Rename from
+       tree_block_ends_with_condjump_p.
+       (gimple_flow_call_edges_add): Rename from
+       tree_flow_call_edges_add.
+       (gimple_execute_on_growing_pred): Rename from
+       tree_execute_on_growing_pred.
+       (gimple_execute_on_shrinking_pred): Rename from
+       tree_execute_on_shrinking_pred.
+       (gimple_duplicate_loop_to_header_edge): Rename from
+       tree_duplicate_loop_to_header_edge.
+       (gimple_lv_add_condition_to_bb): Rename from
+       tree_lv_add_condition_to_bb.
+       (gimple_lv_adjust_loop_header_phi): Rename from
+       tree_lv_adjust_loop_header_phi.
+       (struct pass_build_cfg): Enable TODO_verify_stmts and
+       TODO_cleanup_cfg.
+       * passes.c (execute_function_todo): Enable call to
+       cleanup_tree_cfg.
+
+2007-10-17  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple.def: Add GIMPLE_WITH_CLEANUP_EXPR.
+       * gsstruct.def: Add GSS_WCE.
+       * gimple-iterator.c (gsi_delink): New.
+       (gsi_split_seq_*): Update comment.
+       * gimple.c (gss_for_code): Handle GIMPLE_WCE.  Adjust whitespace.
+       (build_gimple_wce): New.
+       * gimple.h (struct gimple_statement_wce): New.
+       (union gimple_statement_d): Add gimple_wce.
+       (build_gimple_wce): Protoize.
+       (gimple_wce_cleanup): New.
+       (gimple_wce_set_cleanup): New.
+       (gimple_wce_cleanup_eh_only): New.
+       (gimple_wce_set_cleanup_eh_only): New.
+       (gsi_delink): Protoize.
+
+2007-10-12  Aldy Hernandez  <aldyh@redhat.com>
+
+       * Makefile.in (GIMPLE_H): Depend on TARGET_H.
+       * gimple.h (gsi_alloc): New.
+       (_ALLOC_GSI): Remove.
+       Replace _ALLOC_GSI uses by gsi_alloc.
+
+2007-10-12  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple.h (_ALLOC_GSI): New.
+       Use it throughout when allocating a new gimple_stmt_iterator.
+
+2007-10-11  Aldy Hernandez  <aldyh@redhat.com>
+
+       Merge with mainline @129233.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2007-10-11  Aldy Hernandez  <aldyh@redhat.com>
+
+       * cfg.c: Include tree-flow.h.
+       (remove_edge_raw): Call redirect_edge_var_map_clear.
+       (redirect_edge_succ_nodup): Call redirect_edge_var_map_dup.
+       * tree-flow-inline.h (redirect_edge_var_map_def): New.
+       (redirect_edge_var_map_result): New.
+       * tree-cfgcleanup.c (remove_forwarder_block_with_phi): Replace
+       PENDING_STMT use with redirect_edge_var_map_*.
+       * tree-ssa.c (edge_var_maps): New definition.
+       (redirect_edge_var_map_add): New.
+       (redirect_edge_var_map_clear): New.
+       (redirect_edge_var_map_dup): New.
+       (redirect_edge_var_map_vector): New.
+       (redirect_edge_var_map_destroy): New.
+       (ssa_redirect_edge): Replace PENDING_STMT use with
+       redirect_edge_var_map_*.
+       (flush_pending_stmts): Same.
+       (delete_tree_ssa): Destroy edge var map.
+       * tree-flow.h (struct _edge_var_map): New.
+       Define edge_var_map vector type.
+       Declare redirect_edge_var_map_* prototypes.
+       * Makefile.in (cfg.o): Depend on TREE_FLOW_H.
+       * tree-cfg.c (reinstall_phi_args): Replace
+       PENDING_STMT use with redirect_edge_var_map_*.
+
+2007-10-02  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @128957.
+
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2007-09-24  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @128708.
+       
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+
+2007-09-24  Diego Novillo  <dnovillo@google.com>
+
+       * gimple.h (gimple_code): Change argument type to
+       const_gimple.
+       (gimple_flags): Likewise.
+       (gimple_subcode): Likewise.
+       (gimple_next): Likewise.
+       (gimple_prev): Likewise.
+       (gimple_bb): Likewise.
+       (gimple_block): Likewise.
+       (gimple_locus): Likewise.
+
+2007-09-15  Diego Novillo  <dnovillo@google.com>
+
+       * gimple-iterator.c (gsi_replace): Call gsi_insert_before and
+       gsi_remove to do the replacement.
+       (gsi_insert_before): Take the basic block from the iterator.
+       (gsi_insert_after): Likewise.
+       * gimple-iterator.h: Move inside gimple.h.
+       * tree-eh.c (make_eh_edge, make_eh_edges): Enable.
+       * gimple-pretty-print.c (dump_gimple_seq, dump_gimple_cond,
+       dump_gimple_bind, dump_gimple_try): Do not handle TDF_DETAILS.
+       (dump_gimple_switch): Fix display of case labels.
+       (dump_gimple_stmt): Handle TDF_DIAGNOSTIC.
+       (pp_cfg_jump): New.
+       (dump_implicit_edges): New.
+       (gimple_dump_bb_buff): Call it.
+       * domwalk.c (walk_dominator_tree):
+       * gimplify.c (gimplify_switch_expr): Fix generation of
+       GIMPLE_SWITCH labels.
+       (gimplify_case_label_expr): Emit a label for CASE_LABEL
+       instead of the CASE_LABEL_EXPR.
+       * Makefile.in (GIMPLE_H): Add dependency on BASIC_BLOCK_H.
+       * gimple.c (walk_gimple_stmt): Reverse meaning of return value
+       of CALLBACK_STMT.  Update all users.
+       Walk sub-statements of statements with bodies.
+       * gimple.h (GCC_GIMPLE_H): Rename from GCC_GIMPLE_IR_H.
+       Include vec.h, tm.h, hard-reg-set.h and basic-block.h.
+       (bb_seq, set_bb_seq): Move from tree-flow-inline.h
+       (gimple_label_set_label): Do not allow CASE_LABEL_EXPR.
+       (gsi_start_bb): New.  Update all users that were calling
+       gsi_start (bb_seq ()).
+       (struct gimple_stmt_iterator): Add field 'bb'.
+       * tree-cfg.c (build_gimple_cfg): Enable.
+       (create_bb): Create a new sequence if the given one is NULL.
+       (make_gimple_switch_edges): Rename from make_switch_expr_edges.
+       Update all users.
+       (cleanup_dead_labels): Fix handling of GIMPLE_SWITCH.
+       (group_case_labels): Likewise.
+       (verify_types_in_gimple_stmt): Do not allow CASE_LABEL_EXPR in
+       a GIMPLE_LABEL.
+
+2007-09-13  Diego Novillo  <dnovillo@google.com>
+
+       * tree-pretty-print.c (dump_symbols, dump_generic_bb,
+       dump_bb_header, dump_bb_end, dump_phi_nodes, pp_cfg_jump,
+       dump_implicit_edges, dump_generic_bb_buff): Remove.
+       * value-prof.c (histogram_eq): Enable.
+       (dump_histograms_for_stmt): Adapt to tuples.
+       * value-prof.h (struct):
+       * cfghooks.h (struct cfg_hooks)<dump_bb>: Add int argument.
+       Update all users.
+       * gimple-dummy.c (gimple_remove_stmt_histograms,
+       remove_stmt_from_eh_region): Remove.
+       * gimple-iterator.h (gsi_start): Support NULL sequences.
+       * tree-eh.c (add_stmt_to_eh_region): Enable.
+       (remove_stmt_from_eh_region): Enable.
+       (lookup_stmt_eh_region): Enable.
+       (tree_could_throw_p): Enable.
+       (stmt_can_throw_internal): Enable.
+       * gimple-pretty-print.c: Include value-prof.h
+       (dump_gimple_cond): Do not insert a newline initially.
+       (dump_gimple_bind): Likewise.
+       (dump_gimple_try): Likewise.
+       (dump_gimple_asm): Likewise.
+       (dump_symbols): Move from tree-pretty-print.c
+       (dump_gimple_phi): New.
+       (dump_gimple_stmt): Call it..
+       (dump_bb_header): Move from tree-pretty-print.c
+       (dump_bb_end): Likewise.
+       (dump_phi_nodes): Likewise.
+       (gimple_dump_bb_buff): New.
+       (gimple_dump_bb): New.
+       * Makefile.in (gimple-pretty-print.o): Add dependency
+       value-prof.h
+       * tree-cfg.c (fold_cond_expr_cond): Handle cases where
+       fold_binary returns NULL.
+       (make_cond_expr_edges): Take locus from the first statement of
+       the destination blocks.
+       (tree_dump_bb): Remove.
+       (gimple_debug_bb): Rename from debug_tree_bb.
+       Update all users.
+       (gimple_debug_bb_n): Rename from debug_tree_bb_n.
+       Update all users.
+       (gimple_debug_cfg): Rename from debug_tree_cfg.
+       Update all users.
+       (gimple_dump_cfg): Rename from dump_tree_cfg.
+       Update all users.
+       (is_ctrl_altering_stmt): Call gimple_call_flags to determine
+       whether the function is const/pure.
+       * tree-ssa-operands.c (stmt_references_memory_p): Enable.
+
+2007-09-11  Diego Novillo  <dnovillo@google.com>
+
+       * tree.h (struct tree_ssa_name): Add field def_stmt.
+       (SSA_NAME_DEF_STMT): Return it.
+       * tree-phinodes.c (create_phi_node): Return gimple.
+       Update all callers.
+       (add_phi_args): Change PHI argument to gimple.  Update all
+       callers.
+       (remove_phi_node): Call gimple_remove.
+       (phi_reverse): Remove.  Update all users.
+       * tree-ssa-alias-warnings.c: Disable.
+       * input.h (IS_LOCATION_EMPTY): Fix comparison of LOCATION_LINE.
+       * fold-const.c: Include gimple.h.
+       (fold_undefer_overflow_warnings): Change type of argument STMT to
+       gimple.  Update all users.
+       * cfghooks.h (struct cfg_hooks)<can_merge_blocks_p>: Change
+       arguments to basic_block.  Update all users.
+       * tree-gimple.c (get_gimple_rhs_class): Change argument to
+       enum tree_code.  Update all users.  Move calls to
+       is_gimple_lvalue and is gimple_val...
+       (is_gimple_formal_tmp_rhs): ... here.
+       * tree-ssa-ccp.c (fold_stmt_r): Enable.
+       (fold_stmt): Enable.
+       (fold_stmt_inplace): Enable.  Adapt to tuples.
+       * ipa-pure-const.c (analyze_function): Disable.
+       * tree-ssa-propagate.c (get_rhs): Adapt to tuples.  Update all
+       users.
+       * gimple-dummy.c (ssa_operands_active): Remove dummy function.
+       (remove_stmt_from_eh_region, create_phi_node, add_phi_arg,
+       substitute_in_loop_info, release_defs, push_stmt_changes,
+       pop_stmt_changes, replace_exp): Add dummy functions.
+       * predict.c: Disable.
+       * gimple-iterator.c: Include tm.h and tree-flow.h
+       (gsi_delink): Remove.  Update all users.
+       (update_modified_stmt): Move from tree-cfg.c.
+       (gsi_insert_before): New.
+       (gsi_insert_seq_before): New.
+       (gsi_insert_after): New.
+       (gsi_insert_seq_after): New.
+       (gsi_for_stmt): New.
+       (gsi_move_after): New.
+       (gsi_move_before): New.
+       (gsi_move_to_bb_end): New.
+       * gimple-iterator.h (gsi_remove): New.
+       * tree-eh.c (add_stmt_to_eh_region_fn): Add comment
+       (add_stmt_to_eh_region): Likewise.
+       (remove_stmt_from_eh_region_fn): Likewise.
+       (remove_stmt_from_eh_region): Likewise.
+       (lookup_stmt_eh_region_fn): Likewise.
+       (lookup_stmt_eh_region): Likewise.
+       (tree_could_throw_p): Likewise.
+       (stmt_can_throw_internal): Likewise.
+       (tree_can_throw_external): Likewise.
+       * gimple-pretty-print.c (op_gimple_cond): Remove.  Update all
+       users.
+       * tree-affine.c (tree_to_aff_combination_expand): Disable.
+       * tree-flow-inline.h (op_iter_init_vdef): Call gimple_code
+       instead of TREE_CODE.
+       * gimplify.c (gimplify_cond_expr): Use enum tree_code instead
+       of enum gimple_cond as the subcode for GIMPLE_COND.
+       Do not switch around the conditional when the then clause is empty.
+       (tree_to_gimple_tuple): Remove.  Update all users.
+       (gimplify_omp_for):
+       (force_gimple_operand_gsi): Rename from force_gimple_operand_bsi.  Update all users.
+       * tree-dfa.c (find_referenced_vars): Disable.
+       (collect_dfa_stats): Likewise.
+       (collect_dfa_stats_r): Likewise.
+       * cfgexpand.c: Disable.
+       * tree-mudflap.c: Disable.
+       * print-tree.c: Include diagnostic.h.
+       (print_node): Call print_gimple_stmt when printing SSA_NAMEs.
+       * lambda-code.c: Disable.
+       * tree-profile.c (tree_init_edge_profiler): Disable.
+       (tree_gen_edge_profiler): Disable.
+       * tree-flow.h (block_stmt_iterator): Remove.  Update all users.
+       (const_block_stmt_iterator): Likewise.
+       * Makefile.in (print-tree.o): Add dependency on $(DIAGNOSTIC_H).
+       (gimple-iterator.o): Add dependency on $(TREE_FLOW_H) and
+       value-prof.h.
+       (gimple.o): Likewise.
+       (fold-const.o): Add dependency on $(GIMPLE_H).
+       * gimple.c: Include tm.h, hard-reg-set.h, basic-block.h,
+       tree-flow.h and value-prof.h.
+       (build_gimple_with_ops): Do not allocate operands if NUM_OPS == 0.
+       (build_gimple_return): Remove argument RESULT_DECL_P.  Update
+       all users.
+       (build_gimple_cond): Use enum tree_code instead of enum
+       gimple_cond as the subcode.  Update all users.
+       (gimple_cond_invert): Remove.  Update all users.
+       (walk_gimple_seq): Add arguments CALLBACK_STMT and
+       CALLBACK_OP.  Update all users.
+       (walk_gimple_asm): Likewise.
+       (walk_gimple_stmt): Likewise.
+       (gimple_remove): New.
+       (gimple_seq_reverse): New.
+       (set_gimple_bb): New.
+       * gimple.h (gimple_statement_base): Split field 'flags' into
+       fields 'subcode' and 'flags'.  Update all users.
+       (gimple_statement_with_ops): Rename field 'base' to
+       'gsbase'.  Update all users.
+       (gimple_statement_omp): Likewise.
+       (gimple_statement_bind): Likewise.
+       (gimple_statement_catch): Likewise.
+       (gimple_statement_eh_filter): Likewise.
+       (gimple_statement_phi): Likewise.
+       (gimple_statement_resx): Likewise.
+       (gimple_statement_try): Likewise.
+       (gimple_statement_omp_parallel): Likewise.
+       (enum gimple_cond): Remove.  Update all users.
+       (GIMPLE_NO_WARNING): Define.
+       (set_gimple_flags): Assert that FLAGS fits in 8 bits.
+       (add_gimple_flag): Likewise.
+       (set_gimple_subcode): New.  Use instead of set_gimple_flags
+       everywhere.
+       (gimple_subcode): New.  Use instead of gimple_flags
+       everywhere.
+       (gimple_no_warning_p): New.
+       (gimple_cond_set_code): New.
+       (gimple_cond_make_false): New.
+       (gimple_cond_make_true): New.
+       (gimple_phi_num_args): Rename from gimple_phi_nargs.  Update
+       all users.
+       * tree-cfg.c (build_gimple_cfg): Return immediately.
+       (set_bb_for_stmt): Remove.  Move functionality to
+       set_gimple_bb.  Update all users.
+       (factor_computed_gotos):
+       (bsi_for_stmt): Remove.
+       (bsi_insert_before): Remove.
+       (bsi_insert_seq_before): Remove.
+       (bsi_insert_after): Remove.
+       (bsi_insert_seq_after): Remove.
+       (bsi_remove): Remove.
+       (bsi_move_after): Remove.
+       (bsi_move_before): Remove.
+       (bsi_move_to_bb_end): Remove.
+       (bsi_replace): Remove.
+       (tree_verify_flow_info): Adapt to tuples.
+       (tree_make_forwarder_block): Likewise.
+       (tree_try_redirect_by_replacing_jump): Likewise.
+       (tree_redirect_edge_and_branch): Likewise.
+       (tree_purge_dead_eh_edges): Likewise.
+       (gimple_cfg_hooks): Enable some hooks.
+       * tree-ssanames.c (make_ssa_name): Change type of STMT to
+       gimple.  Update all users.
+       * tree-ssa-operands.c (ssa_operands_active): Enable.
+
+2007-08-31  Diego Novillo  <dnovillo@google.com>
+
+       * tree-gimple.c (is_gimple_addressable): Tidy.
+       * Makefile.in (tree-ssa-structalias.o): Disable dependency on
+       GTY header file.
+       (tree-ssa-propagate.o): Likewise.
+       (tree-phinodes.o): Likewise.
+       (tree-scalar-evolution.o): Likewise.
+       (tree-vect-generic.o): Likewise.
+       * gimple.h (struct gimple_statement_phi): Change type of
+       fields 'capacity' and 'nargs' to size_t.
+       Update all users.
+       * tree-cfg.c (verify_gimple_unary_expr): Remove.  Update all
+       users.
+       (verify_gimple_binary_expr): Remove.  Update all users.
+       (verify_types_in_gimple_min_lval): Rename from
+       verify_gimple_min_lval.
+       (verify_types_in_gimple_reference): Rename from
+       verify_gimple_reference.
+       (verify_gimple_tree_expr): Remove.
+       Move checks to verify_types_in_gimple_assign.
+       (verify_types_in_gimple_call): Rename from verify_gimple_call.
+       (verify_types_in_gimple_cond): Rename from verify_gimple_cond.
+       (verify_types_in_gimple_assign): Rename from verify_gimple_assign.
+       (verify_types_in_gimple_switch): Rename from verify_gimple_switch.
+       (verify_types_in_gimple_phi): New.
+       verify_types_in_gimple_return): Rename from
+       verify_gimple_return.
+       (verify_types_in_gimple_stmt): Rename from verify_gimple_stmt.
+       (verify_types_in_gimple_seq): Rename from verify_gimple_seq.
+
+2007-08-30  Chris Matthews  <chrismatthews@google.com>
+           Diego Novillo  <dnovillo@google.com>
+
+       * gimplify.c (gimplify_body): Call verify_gimple_seq if
+       ENABLE_TYPES_CHECKING is set.
+       * gimple.h (is_gimple_omp): New.
+       * tree-cfg.c (verify_gimple_tree_expr): Rename from
+       verify_gimple_expr.
+       Verify tree nodes that should disappear after conversion to
+       GIMPLE.
+       Do not handle COND_EXPR, CALL_EXPR.
+       (verify_gimple_modify_stmt): Remove.
+       (verify_gimple_call): New.
+       (verify_gimple_cond): New.
+       (verify_gimple_assign): New.
+       (verify_gimple_return): New.
+       (verify_gimple_switch): New.
+       (verify_gimple_stmt):  Change input argument to type gimple.
+       Call new verifiers.
+       (verify_gimple_seq): Rename from verify_gimple_1.
+
+2007-08-30  Diego Novillo  <dnovillo@google.com>
+
+       * gimple-low.c (gimple_try_catch_may_fallthru): Call
+       gimple_code instead of gimple_flags.
+       * gimple.c (set_gimple_body): Use gimple_seq instead of gimple
+       when accessing vector gimple_bodies_vec.
+       (gimple_body): Likewise.
+       (gimple_assign_copy_p): New.
+       * gimple.h (enum gimple_try_kind): New.
+       (GIMPLE_TRY_CATCH): Move inside enum gimple_try_kind.
+       (GIMPLE_TRY_FINALLY): Likewise.
+       (gimple_assign_copy_p): Declare.
+       (gimple_assign_rhs_code): New.
+       Update callers that used to call gimple_flags.
+       (gimple_cond_code): New.
+       Update callers that used to call gimple_flags.
+       (gimple_try_kind): New.
+       (gimple_nop_p): Tidy comment.
+       * gimple-pretty-print.c (dump_unary_rhs): New.
+       (dump_gimple_assign): Call it.
+
+2007-08-30  Diego Novillo  <dnovillo@google.com>
+
+       * gimplify.c (gimplify_cond_expr): When gimplifying a ?: call
+       gimplify_stmt with the expression just built instead of the
+       original expression.
+       Use the correct labels when calling build_gimple_cond.
+
+
+2007-08-30  Diego Novillo  <dnovillo@google.com>
+
+       * tree-vrp.c: Disable.
+       * tree-loop-linear.c: Disable.
+       * tree-into-ssa.c: Disable.
+       * tree-ssa-loop-im.c: Disable.
+       * tree-complex.c: Disable.
+       * cgraphbuild.c: Disable most functions.
+       * tree-ssa-threadupdate.c: Disable.
+       * tree-ssa-loop-niter.c: Disable.
+       * tree-pretty-print.c: Disable.
+       * tree-ssa-loop-unswitch.c: Disable.
+       * tree-ssa-loop-manip.c: Disable.
+       * value-prof.c: Disable.
+       * tree-tailcall.c: Disable.
+       * tree-ssa-loop-ch.c: Disable.
+       * tree-scalar-evolution.c: Disable.
+       * tree-phinodes.c: Disable.
+       * omp-low.c: Disable.
+       * tree-ssa-dse.c: Disable.
+       * ipa-reference.c: Disable.
+       * tree-ssa-uncprop.c: Disable.
+       * tree-ssa-sccvn.c: Disable.
+       * cgraphunit.c (verify_cgraph_node): Disable basic block traversal.
+       * tree-ssa-copyrename.c: Disable.
+       * tree-ssa-ccp.c: Disable most functions.
+       * tree-ssa-loop-ivopts.c: Disable.
+       * tree-stdarg.c: Disable.
+       * tree-ssa-math-opts.c: Disable.
+       * tree-ssa-dom.c: Disable most functions.
+       * tree-nrv.c: Disable.
+       * tree-ssa-propagate.c: Disable.
+       * gimple-dummy.c: New file.
+       * tree-ssa-alias.c: Disable most functions.
+       * tree-ssa-sink.c: Disable.
+       * expr.c (expand_expr_real): Disable call to
+       lookup_stmt_eh_region.
+       * tree-ssa-loop-ivcanon.c: Disable.
+       * predict.c (strip_builtin_expect): Disable.
+       (tree_predict_by_opcode): Disable.
+       (return_prediction): Disable.
+       (apply_return_prediction): Disable.
+       (tree_bb_level_predictions): Disable.
+       (tree_estimate_probability): Disable.
+       (predict_paths_leading_to): Disable.
+       * gimple-iterator.c (gsi_replace): Mark unused arguments with
+       ATTRIBUTE_UNUSED.
+       * tree-ssa-ifcombine.c: Disable.
+       * matrix-reorg.c: Disable.
+       * c-decl.c (c_gimple_diagnostics_recursively): Disable call to
+       diagnose_omp_structured_block_errors.
+       * tree-eh.c: Disable most functions.
+       * tree-vectorizer.c: Disable.
+       * tree-vectorizer.h (nested_in_vect_loop_p): Disable.
+       * ipa-type-escape.c: Disable.
+       * tree-if-conv.c: Disable.
+       * profile.c: Disable.
+       * tree-data-ref.c: Disable.
+       * tree-flow-inline.h (bsi_start): Set bsi.gsi to NULL if the
+       block is invalid.
+       (bsi_last): Likewise.
+       (op_iter_next_use): Disable call to PHI_ARG_DEF_PTR.
+       * tree-vect-analyze.c: Disable.
+       * gimplify.c (gimple_conditional_context): Disable.
+       (gimple_push_cleanup): Disable.
+       (gimplify_omp_parallel): Disable calls to
+       push_gimplify_context and pop_gimplify_context.
+       * tree-ssa-phiopt.c: Disable.
+       * calls.c (emit_call_1): Disable calls to lookup_stmt_eh_region.
+       (expand_call): Likewise.
+       (emit_library_call_value_1): Initialize low_to_save and
+       high_to_save.
+       * tree-ssa-coalesce.c: Disable.
+       * tree-dfa.c (make_rename_temp): Disable call to
+       mark_sym_for_renaming.
+       (find_new_referenced_vars_1): Likewise.
+       (collect_dfa_stats): Disable CFG iteration.
+       (collect_dfa_stats_r): Disable.
+       (mark_symbols_for_renaming): Disable.
+       * cfgexpand.c (maybe_dump_rtl_for_tree_stmt): Disable.
+       (label_rtx_for_bb): Disable.
+       (expand_gimple_cond_expr): Disable.
+       (expand_gimple_tailcall): Disable.
+       (expand_gimple_basic_block): Disable.
+       * tree-cfgcleanup.c: Disable.
+       * tree-ssa-pre.c: Disable.
+       * tree-ssa-live.c: Disable.
+       * tree-sra.c: Disable most functions..
+       * tree-predcom.c: Disable.
+       * tree-mudflap.c: Disable.
+       * ipa-prop.c: Disable.
+       * tree-ssa-copy.c (may_propagate_copy): Disable.
+       (propagate_value): Disable.
+       (execute_copy_prop): Disable.
+       (do_copy_prop): Disable.
+       (store_copy_prop): Disable.
+       * tree-ssa-forwprop.c: Disable.
+       * tree-ssa-dce.c: Disable.
+       * tree-vect-patterns.c: Disable.
+       * tree-ssa-ter.c: Disable.
+       * tree-ssa.c: Disable.
+       * lambda-code.c: Disable.
+       * tree-ssa-loop-prefetch.c: Disable.
+       * tree-inline.c: Disable most functions.
+       * tree-optimize.c (execute_fixup_cfg_pre_ipa): Disable.
+       (execute_fixup_cfg): Disable.
+       (execute_cleanup_cfg_post_optimizing): Disable.
+       (execute_fixup_cfg): Disable.
+       * tree-vect-transform.c: Disable.
+       * tree-object-size.c: Disable.
+       * tree-outof-ssa.c: Disable.
+       * cfgloop.c (find_subloop_latch_edge_by_profile): Disable.
+       (find_subloop_latch_edge_by_ivs): Disable.
+       * tree-profile.c: Disable most functions.
+       * c-gimplify.c (add_block_to_enclosing): Disable.
+       * tree-vect-generic.c: Disable.
+       * tree-flow.h (struct function_ann_d): Disable field
+       reference_vars_info.
+       * Makefile.in: Force -Werror even during stage 1.
+       (OBJS-common): Add gimple-dummy.o.
+       (GTFILES): Remove tree-scalar-evolution.c,
+       tree-ssa-propagate.c, tree-vect-generic.c,
+       tree-ssa-structalias.h, tree-ssa-structalias.c,
+       ipa-reference.h, omp-low.c, tree-phinodes.c, ipa-reference.c
+       * tree-ssa-structalias.c: Disable.
+       * tree-cfg.c: Disable most functions.
+       * passes.c (finish_optimization_passes): Disable call to
+       end_branch_prob.
+       (init_optimization_passes): Disable the registration of
+       several passes.
+       (execute_function_todo): Disable calls to update_ssa and
+       need_ssa_update_p.
+       * tree-ssa-reassoc.c: Disable.
+       * tree-ssanames.c: Disable.
+       * tree-ssa-threadedge.c: Disable.
+       * tree-ssa-operands.c: Disable.
+
+2007-08-29  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-ssa-operands.h (pop_stmt_changes, push_stmt_changes):
+       Argument is gimple.
+       (struct ssa_operand_iterator_d): Member phi_stmt is gimple.
+       * value-prof.c (gimple_add_histogram_value): Adjust for tuples.
+       (gimple_duplicate_stmt_histograms): Same.
+       * value-prof.h (gimple_add_histogram_value): Same.
+       (gimple_duplicate_stmt_histograms): Same.
+       * tree-flow-inline.h (clear_and_done_ssa_iter): Same.
+       (op_iter_init): Same.
+       (op_iter_init_phiuse): Same.
+       * tree-flow.h (stmt_references_memory_p): Same.
+       * tree-cfg.c (tree_can_merge_blocks_p): Same.
+       (remove_bb): Same.
+       (find_taken_edge): Same.
+       (tree_cfg2vcg): Same.
+       (first_stmt): Same.
+       (last_stmt): Same.
+       (bsi_move_after): Same.
+       (bsi_move_before): Same.
+       (tree_find_edge_insert_loc): Same.
+       Remove code handling a GIMPLE_MODIFY_STMT inside a RETURN_EXPR.
+       (delete_tree_cfg_annotations): Remove code to remove annotations.
+       * tree-ssa-operands.c (struct scb_d): Stmt_p is a gimple *.
+       (push_stmt_changes): Adjust for tuples.
+       (mark_difference_for_renaming): Same.
+       (pop_stmt_changes): Same.
+       (stmt_references_memory_p): Same.
+
+2007-08-29  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-ssa-operands.h: Remove DEF_OPS, USE_OPS, VUSE_OPS,
+       VDEF_OPS, LOADED_SYMS, STORED_SYMS.
+       * tree-pretty-print.c (dump_generic_node): Adjust accordingly.
+       (dump_vops): use gimple_v*_ops.  Adjust accordingly.
+       * gimple.def: Add to comment.
+       * tree-ssa-alias.c (create_structure_vars): Adjust for tuples.
+       * tree-flow-inline.h (op_iter_init): Make stmt gimple.  Use
+       gimple_*_ops functions.
+       (op_iter_init_use): Make stmt gimple.
+       (op_iter_init_def): Same.
+       (op_iter_init_vdef): Same.
+       (single_ssa_tree_operand): Same.
+       (single_ssa_use_operand): Same.
+       (single_ssa_def_operand): Same.
+       (zero_ssa_operands): Same.
+       (num_ssa_operands): Same.
+       (compare_ssa_operands_equal): Make stmt[12] gimple.
+       (link_use_stmts_after): Adjust for tuples.
+       * tree-ssa-structalias.c (update_alias_info): Use
+       gimple_stored_syms and gimple_loaded_syms.
+       * gimple.h (gimple_has_mem_ops): New.
+       (gimple_def_ops): New.
+       (set_gimple_def_ops): New.
+       (gimple_use_ops): New.
+       (set_gimple_use_ops): New.
+       (gimple_vuse_ops): New.
+       (set_gimple_vuse_ops): New.
+       (gimple_vdef_ops): New.
+       (set_gimple_vdef_ops): New.
+       (gimple_loaded_syms): New.
+       (gimple_stored_syms): New.
+       * tree-ssa-operands.c (finalize_ssa_defs): Adjust for tuples.
+       (copy_virtual_operands): Same.
+       (swap_tree_operands): Same.
+
+2007-08-28  Aldy Hernandez  <aldyh@redhat.com>
+
+       * Revert the parts of Kaveh's constification patch.that duplicate
+       API calls.
+
+2007-08-27  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @127831.
+       
+       * configure.ac (ACX_PKGVERSION): Update revision merge string.
+       * configure: Regenerate.
+       
+2007-08-25  Diego Novillo  <dnovillo@google.com>
+
+       * tree.c (build_gimple_modify_stmt_stat): Add depecrate note.
+       * omp-low.c (scan_omp_1): Disable.
+       (scan_omp): Likewise.
+       (lower_omp_for): Likewise.
+       (lower_omp_parallel): Likewise.
+       (lower_omp_1): Likewise.
+       (lower_omp): Likewise.
+       (diagnose_sb_1): Likewise.
+       (diagnose_sb_2): Likewise.
+       (diagnose_omp_structured_block_errors): Likewise.
+       * tree-gimple.h (_TREE_GIMPLE_H): Rename from _TREE_SIMPLE_H.
+       (gimplify_body): Return the new GIMPLE body.
+       (struct walk_stmt_info): Move to gimple.h.
+       * gimple-low.c (lower_function_body): Assert that the function
+       body is a single GIMPLE_BIND statement.
+       Create a new gimple sequence to lower the existing body.
+       Replace the function body with the new lowered sequence.
+       (pass_lower_cf): Enable GIMPLE lowering.
+       (lower_omp_directive): Disable.
+       (lower_stmt): Do not call lower_omp_directive.
+       (gimple_stmt_may_fallthru): Factor out of ...
+       (gimple_seq_may_fallthru): ... here.
+       * gimple-iterator.c (gsi_replace): New.
+       * gimple-iterator.h (gsi_replace): Declare.
+       * gimple-pretty-print.c: Do not include gimple-iterator.h
+       * gimplify.c (gimplify_asm_expr): Tidy.
+       Store the whole TREE_LIST node in the inputs and outputs vectors.
+       (gimple_push_cleanup): Disable completely.
+       (gimplify_body): Return a GIMPLE_BIND holding the gimplified
+       body.
+       Update all users.
+       (gimplify_function_tree): Create a GIMPLE sequence to hold
+       the gimplified body.
+       * tree-flow.h (gimple_stmt_may_fallthru): Declare.
+       * Makefile.in (GIMPLE_H): Add gimple-iterator.h.
+
+2007-08-25  Diego Novillo  <dnovillo@google.com>
+
+       * tree-nested.c: Re-implement to use GIMPLE tuples.
+       (init_tmp_var_with_call): New.
+       (init_tmp_var): Adapt to GIMPLE tuples.
+       (save_tmp_var): Likewise.
+       (convert_nl_goto_receiver): Likewise.
+       (finalize_nesting_tree_1): Likewise.
+       (gsi_gimplify_val): Likewise.
+       Rename from tsi_gimplify_val.  Update all users.
+       (walk_asm_expr): Remove.
+       (walk_stmts): Remove.
+       (walk_body): Call walk_gimple_seq.
+       Add new argument callback_op.  Update all users.
+       (walk_function): Add argument callback_op.  Update all users.
+       (convert_nonlocal_reference_op): Rename from
+       convert_nonlocal_omp_reference.  Update all users.
+       (convert_nonlocal_reference_stmt): New.  Handle GIMPLE
+       statements that used to be tree nodes.
+       (convert_local_reference_op): Rename from
+       convert_local_reference.  Update all users.
+       (convert_local_reference_stmt): New.  Handle GIMPLE statements
+       that used to be tree nodes.
+       (convert_nl_goto_reference): Convert to walk_stmt_fn callback.
+       Update all users.
+       (convert_tramp_reference_op): Rename from
+       convert_tramp_reference.  Update all users.
+       (convert_tramp_reference_stmt): New.  Handle GIMPLE statements
+       that used to be tree nodes.
+       (convert_gimple_call): Rename from convert_call_expr.  Convert
+       to be a walk_stmt_fn callback.
+       * gimple.c (gimple_seq_add): Rename from gimple_add.  Update
+       all users.
+       (walk_gimple_seq): Rename from walk_seq_ops.  Update all
+       users.
+       (walk_gimple_stmt): Rename from walk_tuple_ops.  Update all
+       users.
+       Use two callback functions one for statements and another for
+       operands.  If either is NULL do not invoke it.
+       Allow callbacks to replace operands.
+       (WALKIT): Remove.
+       (walk_gimple_asm): New.
+       * gimple.h: Include ggc.h
+       (gimple_seq_alloc): New.  Use everywhere a GIMPLE sequence is
+       allocated.
+       (gimple_op_ptr): New.
+       (gimple_call_arg_ptr): New.
+       (gimple_catch_types_ptr): New.
+       (gimple_eh_filter_types_ptr): New.
+       (gimple_omp_critical_name_ptr): New.
+       (gimple_omp_for_clauses_ptr): New.
+       (gimple_omp_for_index_ptr): New.
+       (gimple_omp_for_initial_ptr): New.
+       (gimple_omp_for_final_ptr): New.
+       (gimple_omp_for_incr_ptr): New.
+       (gimple_omp_parallel_clauses_ptr): New.
+       (gimple_omp_parallel_child_fn_ptr): New.
+       (gimple_omp_parallel_data_arg_ptr): New.
+       (gimple_omp_single_clauses_ptr): New.
+       (gimple_omp_sections_clauses_ptr): New.
+       (walk_stmt_fn): New type.
+       (struct walk_stmt_info): Move from tree-gimple.h.
+       Rename field callback to callback_op.
+       Add new field callback_stmt.
+       Replace field tsi with gsi of type gimple_stmt_iterator.
+       (walk_gimple_seq): Declare.
+       (walk_gimple_stmt): Declare.
+       * tree-cfg.c (execute_build_cfg): Do not call
+       build_gimple_cfg.
+       (pass_build_cfg): Enable.
+       Disable TODO_verify_stmts and TODO_cleanup_cfg.
+
+2007-08-22  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-phinodes.c (reserve_phi_args_for_new_edge): Comment out
+       call to phi_nodes_ptr.
+       (remove_phi_node): Same.
+       * tree-flow-inline.h (get_lineno): Work with empty statements or
+       empty locations.
+       (phi_nodes_ptr): Remove.
+       (phi_arg_index_from_use): Adjust for tuples.
+       (bsi_stmt_ptr): Add ATTRIBUTE_UNUSED.
+       (op_iter_init): Remove assertion.
+       (next_imm_use_stmt): Change NULL_TREE to NULL.
+       * tree-dfa.c (mark_symbols_for_renaming): Adjust for tuples.
+       * tree-flow.h: Remove bb_for_stmt prototype.
+       (mark_symbols_for_renaming): Make argument gimple.
+
+2007-08-22  Aldy Hernandez  <aldyh@redhat.com>
+
+       Change all instances of bb_for_stmt to gimple_bb throughout.
+
+       * gimple.h (gimple_omp_parallel_combined_p): New.
+       * tree-ssa-operands.h (update_stmt_operands): Argument is now
+       gimple.
+       * tree-ssa-threadupdate.c (rederiction_block_p): Use gimple_nop_p
+       instead of IS_EMPTY_STMT.
+       * tree-ssa-loop-niter.c: Disable use of COND_EXPR_COND.
+       * tree-pretty-print.c (dump_bb_header): Adjust for tuples.
+       * cgraph.c (cgraph_add_new_function): Rename
+       tree_register_cfg_hooks to gimple_register_cfg_hooks.
+       * value-prof.c (set_histogram_value): Stmt type is now gimple.
+       (gimple_histogram_value): Same.
+       (gimple_remove_histogram_value): Same.
+       (gimple_remove_stmt_histograms): Same.
+       * tree.h (struct ssa_use_operand_d): Same.
+       * value-prof.h (struct histogram_value_t): Same.
+       Change gimple_*histogram* prototypes to use gimple instead of
+       tree.
+       * ipa-cp.c (ipcp_insert_stage): Rename tree_register_cfg_hooks to
+       gimple_register_cfg_hooks.
+       * cfghooks.c (gimple_register_cfg_hooks): Rename from
+       tree_register_cfg_hooks.  Initialize cfg_hooks to gimple_cfg_hooks.
+       (current_ir_type): Rename tree_cfg_hooks to gimple_cfg_hooks.
+       * input.h (IS_LOCATION_EMPTY): New.
+       * cfghooks.h: Rename tree_cfg_hooks to gimple_cfg_hooks.  Rename
+       tree_register_cfg_hooks to gimple_register_cfg_hooks.
+       * omp-low.c (determine_parallel_type): Adjust for tuples.
+       (expand_omp_parallel): Rename bb_stmt_list to bb_seq.
+       (expand_omp_for_generic): Call gimple_omp_return_nowait_p.
+       (expand_omp_for_static_nochunk): Same.
+       (expand_omp_sections): Call gimple_omp_section_last_p.
+       (expand_omp_single): Comment out call to find_omp_clause.
+       Call gimple_omp_return_nowait_p.
+       * cgraphunit.c: Rename tree_register_cfg_hooks to
+       gimple_register_cfg_hooks.
+       * tree-ssa-propagate.c: Comment out non working code.
+       * matrix-reorg.c (matrix_reorg): Rename tree_register_cfg_hooks to
+       gimple_register_cfg_hooks.
+       * tree-eh.c (add_stmt_to_eh_region_fn): Change `t' type to gimple.
+       Adjust accordingly.
+       (add_stmt_to_eh_region): Same.
+       (remove_stmt_from_eh_region_fn): Same.
+       (remove_stmt_from_eh_region): Same.
+       (lookup_stmt_eh_region_fn): Same.
+       (lookup_stmt_eh_region): Same.
+       (make_eh_edges): Adjust for tuples.
+       (stmt_can_throw_internal): Rename from tree_can_throw_internal.
+       Adjust for tuples.
+       (maybe_clean_or_replace_eh_stmt): Arg types are now gimple.
+       * tree-vectorizer.c: Rename tree_register_cfg_hooks to
+       gimple_register_cfg_hooks.
+       * tree-if-conv.c (combine_blocks): Adjust for tuples.
+       * profile.c (branch_prob): Same.
+       * tree-flow-inline.h (bb_for_stmt): Remove.
+       (get_lineno): Adjust for tuples.
+       (noreturn_call_p): Same.
+       (mark_stmt_modified): Same.
+       (update_stmt): Same.
+       (update_stmt_if_modified): Same.
+       (stmt_modified_p): Same.
+       (link_imm_use_stmt): Same.
+       (relink_imm_use_stmt): Same.
+       (single_imm_use): Same.
+       (gimple_phi_arg_def): New.
+       (gimple_phi_arg_edge): New.
+       (phi_nodes): Adjust for tuples.
+       (phi_nodes_ptr): Same.
+       (set_phi_nodes): Same.
+       (bb_seq): Rename from bb_stmt_list and adjust for tuples.
+       (set_bb_seq): Rename from set_bb_stmt_list and adjust for tuples.
+       (bsi_start): Adjust for tuples.
+       (bsi_after_labels): Adjust for tuples.
+       (bsi_last): Same.
+       (bsi_next): Same.
+       (bsi_prev): Same.
+       (bsi_stmt): Same.
+       (bsi_stmt_ptr): Same.
+       (loop_containing_stmt): Same.
+       (delink_stmt_imm_use): Same.
+       (first_imm_use_stmt): Same.
+       (next_imm_use_stmt): Same.
+       * gimplify.c (force_gimple_operand_bsi): Same.
+       * coretypes.h (const_gimple): New.
+       * tree-ssa-phiopt.c (empty_block_p): Call gimple_nop_p.
+       (conditional_replacement): Comment out COND_EXPR_COND.
+       (value_replacement): Comment out COND_EXPR_COND.
+       (minmax_replacement): Same.
+       (abs_replacement): Same.
+       * except.h (*eh_region*): Change trees to gimple.
+       (struct throw_stmt_node): Change stmt type to gimple.
+       * cfgexpand.c (label_rtx_for_bb): Adjust for tuples.
+       (expand_gimple_cond_expr): Same.
+       (expand_gimple_basic_block): Same.
+       * tree-cfgcleanup.c (split_bbs_on_noreturn_calls): Same.
+       * tree-ssa-pre.c (realify_fake_stores): Comment out non working
+       code.
+       * tree-ssa-forwprop.c (propagate_with_phi): Rename
+       tree_can_throw_internal stmt_can_throw_internal.
+       * tree-inline.c (copy_edges_for_bb): Rename
+       tree_can_throw_internal to stmt_can_throw_internal.
+       (initialize_cfun): Same.
+       (copy_cfg_body): Same.
+       (gimple_expand_calls_inline): Same.
+       (make_nonlocal_label_edges): Rename tree_can_make_abnormal_goto to
+       stmt_can_make_abnormal_goto.
+       * tree-optimize.c (tree_lowering_passes): Rename
+       tree_register_cfg_hooks to gimple_register_cfg_hooks.
+       (tree_rest_of_compilation): Same.
+       * tree-flow.h (struct gimple_df): Make modified_noreturn_calls a
+       vector of gimple types.
+       Adjust prototypes for tuples.
+       (struct block_stmt_iterator): Make iterator a gimple iterator.
+       (enum bsi_iterator_update): Remove BSI_CHAIN_START and
+       BSI_CHAIN_END.
+       * Makefile.in (tree-cfg.o): Depend on GIMPLE_H.
+       (GTFILES): Move gimple.[hc] entries before tree-flow.h.
+       * basic-block.h (struct edge_def): Make goto_locus of type
+       location_t.
+       (union basic_block_il_dependent): Adjust for tuples.
+       (gimple_bb_info): Rename from tree_bb_info.  Adjust for tuples.
+       * tree-cfg.c: Include gimple.h.
+       Adjust prototypes for tuples.
+       (build_gimple_cfg): Rename from build_tree_cfg.  Adjust for
+       tuples.
+       (execute_build_cfg): Rename build_tree_cfg call to
+       build_gimple_cfg.
+       (factor_computed_gotos): Adjust for tuples.
+       (make_blocks): Same.
+       (create_bb): Same.
+       (fold_cond_expr_cond): Same.
+       (make_edges): Same.
+       (make_cond_expr_edges): Same.
+       (make_switch_expr_edges): Same.
+       (make_goto_expr_edges): Same.
+       (cleanup_dead_labels): Same.
+       (group_case_labels): Same.
+       (tree_can_merge_blocks_p): Same.
+       (replace_uses_by): Same.
+       (tree_merge_blocks): Same.
+       (is_ctrl): Same.
+       (is_ctrl_altering_stmt): Same.
+       (computed_goto_p): Same.
+       (simple_goto_p): Same.
+       (stmt_can_make_abnormal_goto): Rename from
+       tree_can_make_abnormal_goto.
+       Adjust for tuples.
+       (stmt_starts_bb_p): Adjust for tuples.
+       (stmt_ends_bb_p): Same.
+       (first_stmt): Same.
+       (last_stmt): Same.
+       (last_and_only_stmt): Same.
+       (set_bb_for_stmt): Same.
+       (change_bb_for_stmt): Same.
+       (bsi_for_stmt): Same.
+       (update_modified_stmt): Rename from update_modified_stmts.
+       Adjust for tuples.
+       Handle only one statement.
+       (bsi_insert_before): Adjust for tuples.
+       (bsi_insert_after): Same.
+       (bsi_insert_seq_before): New.
+       (bsi_insert_seq_after): New.
+       (bsi_remove): Ajust for tuples.
+       (bsi_replace): Same.
+       (verify_stmt): Same.
+       (gimple_split_block): Rename from tree_split_block.
+       Adjust for tuples.
+       (tree_purge_dead_abnormal_call_edges): Adjust for tuples.
+       (tree_cfg_hooks): Same.
+       * tree-ssa-operands.c (update_stmt_operands): Same.
+
+2007-08-22  Aldy Hernandez  <aldyh@redhat.com>
+
+       * Makefile.in (TREE_GIMPLE_H): Depend on GIMPLE_H.
+       * gimple-iterator.h: Do not include gimple.h.
+       * gimple.h (OMP_SECTION_LAST_FLAG): New.
+       (gimple_omp_return_nowait_p): New.
+       (gimple_omp_section_last_p): New.
+       (gimple_switch_set_num_labels): New.
+       (gimple_nop_p): New.
+       
+2007-08-17  Aldy Hernandez  <aldyh@redhat.com>
+
+       Revert this change:
+
+       2007-08-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple.c (gimple_call_flags): Make sure this is a GIMPLE_CALL.
+       
+2007-08-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-iterator.c (gsi_split_seq_after): New.
+       (gsi_split_seq_before): New.
+       * gimple-iterator.h: Prototype above two functions.
+       
+2007-08-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple.h (gimple_has_ops): New.
+       (gimple_modified): New.
+       (set_gimple_modified): New.
+       (gimple_switch_default_label): Call gimple_switch_label.
+       (gimple_switch_set_default_label): Call gimple_switch_set_label.
+
+2007-08-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple.c (gimple_call_flags): Make sure this is a GIMPLE_CALL.
+       
+2007-08-14  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @127480.
+
+       * version.c (VERSUFFIX): Update.
+       
+2007-08-09  Diego Novillo  <dnovillo@google.com>
+
+       * gimple-low.c: Document conversion to Low GIMPLE.
+       * Makefile.in (gimple.o): Add dependency on gt-gimple.h
+       (GTFILES): Add gimple.c.
+       * gimple.c (gimple_bodies_vec): New.
+       (gimple_bodies_map): Rename from gimple_bodies.
+       (gss_for_code): Return GSS_ASM for GIMPLE_ASM.
+       (walk_tuple_ops): Handle GSS_ASM like GSS_WITH_OPS.
+       (set_gimple_body): Push body into gimple_bodies_vec and create
+       a mapping to array index in gimple_bodies_map.
+       (gimple_body): Corresponding changes to use gimple_bodies_map
+       and gimple_bodies_vec.
+       * gimple.h: Create VEC templates for gimple_seq.
+
+2007-08-08  Aldy Hernandez  <aldy@quesejoda.com>
+
+       * gimple-low.c (struct return_statements_t): Declare.
+       (struct lower_data): Make return_statements a vector.
+       (lower_function_body): Adjust for tuples.
+       (pass_lower_cf): Add PROP_gimple_any to properties_required.
+       (lower_sequence): Rename from lower_stmt_body.
+       Adjust for tuples.
+       (lower_omp_directive): Adjust for tuples.
+       (lower_stmt): Same.
+       (lower_gimple_bind): Rename from lower_bind_expr.
+       Adjust for tuples.
+       (gimple_try_catch_may_fallthru): New.
+       (gimple_seq_may_fallthru): New.
+       (lower_gimple_return): Rename from lower_return_expr and adjust
+       for tuples.
+       (lower_builtin_setjmp): Adjust for tuples.
+       * gimple-iterator.c: New.
+       * gimple-iterator.h: Include gimple.h.
+       (enum gsi_iterator_update): Declare.
+       (gsi_link_seq_before): New prototype.
+       (gsi_link_before): Same.
+       (gsi_link_seq_after): Same.
+       (gsi_link_after): Same.
+       (gsi_delink): Same.
+       * gimplify.c (gimplify_body): Comment out verify_gimple_1 call.
+       * tree-flow.h (gimple_seq_may_fallthru): New prototype.
+       * Makefile.in (OBJS-common): Add gimple-iterator.o.
+       (gimple-iterator.o): New.
+       (gimple-pretty-print.o): Do not depend on gimple-iterator.h.
+       * gimple.c (set_gimple_prev): Move to gimple.h.
+       (set_gimple_next): Same.
+       (gimple_call_flags): New.
+       * gimple.h (struct gimple_sequence): Add GTY marker.
+       (struct gimple_statement_bind): Add block field.
+       (set_gimple_prev): New.
+       (set_gimple_next): New.
+       (gimple_call_flags): Protoize.
+       (gimple_call_fndecl): New.
+       (gimple_bind_block): New.
+       (gimple_bind_set_block): New.
+
+
+2007-08-08  Diego Novillo  <dnovillo@google.com>
+
+       * gimple.h (GIMPLE_CHECK, GIMPLE_CHECK2, GIMPLE_RANGE_CHECK):
+       Do not return the checked statement.  Update all users.
+       Enable on compilers other than GCC.
+
+2007-08-07  Chris Matthews  <chrismatthews@google.com>
+
+       * gimple_iterator.h (gsi_start): Changed to produce a pointer instead of
+       struct.  Updated clients.
+       (gsi_last): Same.
+       (gsi_end_p): Changed to operate on a pointer instead of struct.  Updated
+       clients.
+       (gsi_one_before_end_p): Same.
+       (gsi_next): Same.
+       (gsi_prev): Same.
+       (gsi_stmt): Same.
+       
+2007-08-07  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline @127277.
+
+       * version.c (VERSUFFIX): Update.
+
+2007-08-07  Diego Novillo  <dnovillo@google.com>
+
+       * gimple.h (gimple_call_return): New.
+       * gimplify.c (get_tmp_var_for): Call it.
+
+2007-08-02  Chris Matthews  <chrismatthews@google.com>
+
+       * gimplify.c (gimplify_asm_expr): Created new gimple tuple.
+       * gimple-pretty-printer.c (dump_gimple_asm): Added function to dump a
+       GIMPLE_ASM statement.
+       (dump_gimple_stmt): Updated to use the dump_gimple_asm function.
+       * gimple.c (gss_for_code): Made asm statements return as with_mem_ops.
+       (build_gimple_with_ops): Asm statements are added as a specal case for
+       allocation because they have extra fields that are not
+       allocated correctly in the current generic op allocator.
+       (build_gimple_asm_1): Added a helper function to setup the basics of a
+       GIMPLE_ASM tuple.
+       (build_gimple_asm_vec): Create a GIMPLE_ASM tuple from vector arguments.
+       (build_gimple_asm): Changed to call the new helper function.
+
+2007-08-03  Diego Novillo  <dnovillo@google.com>
+
+       * gimple-pretty-print.c (INDENT): Tidy.
+       (dump_binary_rhs): New.
+       (dump_gimple_assign): Call it.
+       * gimplify.c (gimplify_modify_expr_complex_part): If the value
+       is not interesting, nullify *EXPR_P.
+       (gimplify_body): Do not add the sequence to the GIMPLE_BIND more
+       than once.
+
+2007-08-01  Diego Novillo  <dnovillo@google.com>
+
+       * gimplify.c (gimplify_switch_expr): Remove switch_body_seq_.
+       Change switch_body_seq to struct gimple_sequence.
+       Adjust all uses.
+       Call gimplify_stmt instead of gimplify_statement_list
+       (gimplify_to_stmt_list): Remove.
+       Update all users.
+       * tree-mudflap.c: Include gimple.h
+       (mf_decl_cache_locals): Convert to emit GIMPLE.
+       (mf_build_check_statement_for): Add FIXME and unreachable
+       markers to convert to GIMPLE.
+       * Makefile.in (tree-mudflap.o): Depend on $(GIMPLE_H).
+       * config/i386/i386.c (ix86_gimplify_va_arg): Adapt to emit
+       GIMPLE.
+
+2007-08-01  Diego Novillo  <dnovillo@google.com>
+
+       * gimplify.c (gimplify_switch_expr): Do not call sort_case_labels
+       if there are no labels other than 'default'.
+       * gimple.h (gimple_num_ops, gimple_op, gimple_set_op): Use
+       result of GIMPLE_RANGE_CHECK call.
+
+2007-08-01  Diego Novillo  <dnovillo@google.com>
+
+       * DEV-PHASE: Revert to mainline version.
+       * version.c (VERSUFFIX): Add branch name and revision number
+       of latest mainline merge.
+
+2007-07-31  Diego Novillo  <dnovillo@google.com>
+
+       Mainline merge (@127100).
+       * DEV-PHASE: Updated.
+
+2007-07-31  Diego Novillo  <dnovillo@google.com>
+
+       * dominance.c (free_dominance_info): If there is no CFG,
+       do nothing.
+       * cfg.c (compact_blocks): Likewise.
+
+2007-07-30  Diego Novillo  <dnovillo@google.com>
+
+       * gimplify.c (get_tmp_var_for): When creating a new temporary
+       for a GIMPLE_CALL, use the type returned by the function call
+       instead of the type of the function decl.
+       * gimple.c (build_gimple_return): Accept NULL and RESULT_DECL
+       return values.
+
+2007-07-30  Diego Novillo  <dnovillo@google.com>
+
+       * gimplify.c (gimplify_init_constructor): If both sides
+       of the constructor are in GIMPLE form but the assignment
+       has not been emitted, emit it.
+
+2007-07-28  Diego Novillo  <dnovillo@google.com>
+
+       * gimplify.c (gimplify_return_expr): Do not create a MODIFY_EXPR
+       as return argument
+       * gimple.c (build_gimple_return): Assert that the returned value
+       is a GIMPLE value.
+
+2007-07-27  Diego Novillo  <dnovillo@google.com>
+
+       * tree-gimple.c (get_gimple_rhs_class): New.
+       (is_gimple_formal_tmp_rhs): Call it.
+       * tree-gimple.h (enum gimple_rhs_class): New.
+       * gimple-iterator.h (gsi_next): Assert that there is nothing
+       beyond the end of the sequence.
+       (gsi_prev): Assert that there is nothing before the start of
+       the sequence.
+       * gimplify.c (gimplify_switch_expr): Tidy creation of default label.
+       (gimplify_expr): Fix concatenation of internal sequences to PRE_P.
+       * gimple.c (get_num_ops_for): Remove.  Update users.
+       (build_gimple_assign): Call get_gimple_rhs_class to determine
+       how many operands to allocate.
+       (gimple_add): Assert that GS does not have previous or next
+       statements.
+       (gimple_seq_append): Move from gimple.h.
+
+2007-07-27  Aldy Hernandez  <aldy@quesejoda.com>
+
+       * cgraph.c: Fix line wrap.
+       * cgraph.h: Same.
+       * tree-pass.h (struct tree_opt_pass): Add works_with_tuples_p
+       field.
+       * cgraphunit.c (cgraph_analyze_functions): Remove check for
+       DECL_SAVED_TREE.
+       (cgraph_expand_function): Comment out TREE_ASM_WRITTEN assertion.
+       * tree-optimize.c (tree_lowering_passes): Comment out call to
+       compact_blocks.
+       * passes.c (execute_one_pass): Return if pass has not been
+       converted to tuples.
+       * tree-vrp.c, regrename.c, fwprop.c, tree-into-ssa.c,
+       tree-complex.c, see.c, cgraphbuild.c, tracer.c, cgraph.c,
+       postreload-gcse.c, postreload.c, tree-ssa-loop-ch.c,
+       tree-tailcall.c, tree-pass.h, ipa-cp.c, final.c, omp-low.c,
+       tree-ssa-dse.c, ipa-reference.c, tree-ssa-uncprop.c,
+       auto-inc-dec.c, reorg.c, tree-ssa-copyrename.c, tree-ssa-ccp.c,
+       df-core.c, mode-switching.c, tree-nomudflap.c, modulo-sched.c,
+       ipa-pure-const.c, cse.c, web.c, tree-stdarg.c,
+       tree-ssa-math-opts.c, tree-ssa-dom.c, tree-nrv.c,
+       tree-ssa-alias.c, loop-init.c, gimple-low.c, tree-ssa-sink.c,
+       ipa-inline.c, global.c, jump.c, ifcvt.c, predict.c,
+       tree-ssa-loop.c, recog.c, dse.c, tree-ssa-ifcombine.c,
+       matrix-reorg.c, c-decl.c, tree-eh.c, regmove.c, local-alloc.c,
+       function.c, tree-vectorizer.c, gcse.c, ipa-type-escape.c,
+       tree-if-conv.c, init-regs.c, ipa.c, tree-ssa-phiopt.c,
+       rtl-factoring.c, lower-subreg.c, bt-load.c, tree-dfa.c except.c,
+       emit-rtl.c, cfgexpand.c, tree-cfgcleanup.c, cfgcleanup.c,
+       tree-ssa-pre.c, tree-sra.c, tree-mudflap.c, tree-ssa-copy.c,
+       cfglayout.c, tree-ssa-forwprop.c, tree-ssa-dce.c, tree-ssa.c,
+       regclass.c, integrate.c, tree-optimize.c, tree-object-size.c,
+       combine.c, tree-outof-ssa.c, bb-reorder.c, stack-ptr-mod.c,
+       var-tracking.c, tree-profile.c, tree-vect-generic.c, reg-stack.c,
+       sched-rgn.c, tree-ssa-structalias.c, tree-cfg.c, passes.c,
+       tree-ssa-reassoc.c, combine-stack-adj.c, cfgrtl.c, dce.c,
+       tree-ssanames.c: Set works_with_tuples_p field to 0.
+
+2007-07-25  Diego Novillo  <dnovillo@google.com>
+
+       * gimple.c: Use ENABLE_GIMPLE_CHECKING instead of
+       ENABLE_TREE_CHECKING.
+       (set_gimple_code): New.
+       (set_gimple_prev): New.
+       (set_gimple_next): New.
+       * gimple.h: Use ENABLE_GIMPLE_CHECKING instead of
+       ENABLE_TREE_CHECKING.
+       (gimple_code): Inline function replacement for GIMPLE_CODE.
+       (gimple_flags): Inline function replacement for GIMPLE_SUBCODE_FLAGS.
+       (gimple_next): Inline function replacement for GIMPLE_NEXT.
+       (gimple_prev): Inline function replacement for GIMPLE_PREV.
+       (gimple_locus): Inline function replacement for GIMPLE_LOCUS.
+       (gimple_locus_empty_p): Inline function replacement for
+       GIMPLE_LOCUS_EMPTY_P.
+       (struct gimple_statement_base): Rename field 'subcode_flags'
+       to 'flags'.
+       (set_gimple_flags): New.
+       (gimple_bb): New.
+       (set_gimple_bb): New.
+       (set_gimple_block): New.
+       (set_gimple_locus): New.
+       (add_gimple_flag): Rename from gimple_add_subcode_flag.
+
+2007-07-25  Diego Novillo  <dnovillo@google.com>
+
+       * tree-gimple.h (build_gimple_eh_filter_tree): Rename from
+       gimple_build_eh_filter_tree.
+       * gimple.h (build_gimple_return): Rename from gimple_build_return.
+       (build_gimple_assign): Rename from gimple_build_assign.
+       (build_gimple_call_vec): Rename from gimple_build_call_vec.
+       (build_gimple_call): Rename from gimple_build_call.
+       (build_gimple_label): Rename from gimple_build_label.
+       (build_gimple_goto): Rename from gimple_build_goto.
+       (build_gimple_nop): Rename from gimple_build_nop.
+       (build_gimple_bind): Rename from gimple_build_bind.
+       (build_gimple_asm): Rename from gimple_build_asm.
+       (build_gimple_catch): Rename from gimple_build_catch.
+       (build_gimple_eh_filter): Rename from gimple_build_eh_filter.
+       (build_gimple_try): Rename from gimple_build_try.
+       (build_gimple_phi): Rename from gimple_build_phi.
+       (build_gimple_resx): Rename from gimple_build_resx.
+       (build_gimple_switch): Rename from gimple_build_switch.
+       (build_gimple_switch_vec): Rename from gimple_build_switch_vec.
+       (build_gimple_omp_parallel): Rename from gimple_build_omp_parallel.
+       (build_gimple_omp_for): Rename from gimple_build_omp_for.
+       (build_gimple_omp_critical): Rename from gimple_build_omp_critical.
+       (build_gimple_omp_section): Rename from gimple_build_omp_section.
+       (build_gimple_omp_continue): Rename from gimple_build_omp_continue.
+       (build_gimple_omp_master): Rename from gimple_build_omp_master.
+       (build_gimple_omp_return): Rename from gimple_build_omp_return.
+       (build_gimple_omp_ordered): Rename from gimple_build_omp_ordered.
+       (build_gimple_omp_sections): Rename from gimple_build_omp_sections.
+       (build_gimple_omp_single): Rename from gimple_build_omp_single.
+       Update all users.
+
+2007-07-24  Chris Matthews  <chrismatthews@google.com>
+
+       * configure.ac: Added support for ENABLE_GIMPLE_CHECKING and the
+       --enable-checking=gimple flag.
+       * config.in: Same.
+       * configure: Regenerated.
+       * gimplify.c (cpt_same_type): Same.
+       (gimple_push_condition): Same.
+       (gimplify_addr_expr): Same.
+       (gimplify_expr): Same.
+       (gimplify_body): Same.
+       (check_pointer_types_r): Same.
+
+2007-07-24  Diego Novillo  <dnovillo@google.com>
+
+       * gimple.def: Re-organize codes that take tree operands so
+       they are consecutive.
+       * gsstruct.def (GSS_LABEL, GSS_ASSIGN_BINARY, GSS_ASSIGN_UNARY_REG,
+       GSS_ASSIGN_UNARY_MEM, GSS_COND, GSS_GOTO, GSS_SWITCH, GSS_CALL,
+       GSS_RETURN): Remove.  Update al users.
+       * gimple.c (gss_for_code): New.
+       (gimple_statement_structure): Call it.
+       (get_num_ops_for): New.
+       (gimple_build_with_ops): New.
+       (gimple_build_return, gimple_build_call_1, gimple_build_assign,
+       gimple_build_cond, gimple_build_label, gimple_build_goto,
+       gimple_build_switch_1, ): Call it.
+       (gss_for_assign): Remove.  Update all users.
+       (gimple_check_failed): Do not assume that subcode is a valid tree
+       code.
+       (gimple_range_check_failed): New.
+       (walk_tuple_ops): Implement in terms of gimple_num_ops and
+       gimple_op when dealing with GSS_WITH_OPS and GSS_WITH_MEM_OPS
+       statements.
+       * gimple.h (struct gimple_statement_with_ops): Add fields 'num_ops'
+       and 'op'.
+       (struct gimple_statement_label, gimple_statement_assign_binary,
+       gimple_statement_assign_unary_reg, gimple_statement_assign_unary_mem,
+       gimple_statement_cond, gimple_statement_goto, gimple_statement_switch,
+       gimple_statement_call, gimple_statement_return): Remove.
+       Update all users.
+       (gimple_range_check_failed): Declare.
+       (GIMPLE_RANGE_CHECK): Define.
+       (gimple_num_ops): New.
+       (gimple_op): New.
+       (gimple_set_op): New.
+       (gimple_assign_rhs1): Rename from gimple_assign_binary_rhs1.
+       (gimple_assign_set_rhs1): Rename from gimple_assign_binary_set_rhs1.
+       (gimple_assign_rhs2): Rename from gimple_assign_binary_rhs2.
+       (gimple_assign_set_rhs2): Rename from gimple_assign_binary_set_rhs2.
+       (gimple_assign_unary_rhs): Remove.  Update all users.
+       (gimple_assign_unary_set_rhs): Likewise.
+       (gimple_switch_num_labels): Rename from gimple_switch_nlabels.
+       (gimple_call_fn, gimple_call_lhs, gimple_call_chain,
+       gimple_call_set_chain, gimple_call_nargs, gimple_call_arg,
+       gimple_call_set_arg, gimple_cond_lhs, gimple_cond_set_lhs,
+       gimple_cond_rhs, gimple_cond_set_rhs, gimple_cond_true_label,
+       gimple_cond_false_label, gimple_cond_set_true_label,
+       gimple_cond_set_false_label, gimple_label_label,
+       gimple_label_set_label, gimple_goto_dest,
+       gimple_goto_set_dest, gimple_asm_input_op,
+       gimple_asm_set_input_op, gimple_asm_output_op,
+       gimple_asm_set_output_op, gimple_asm_clobber_op,
+       gimple_asm_set_clobber_op, gimple_switch_num_labels,
+       gimple_switch_index, gimple_switch_set_index,
+       gimple_switch_default_label, gimple_switch_set_default_label,
+       gimple_switch_label, gimple_switch_set_label,
+       gimple_return_retval, gimple_return_set_retval): Implement
+       using the array of operands in field 'with_ops'.
+       (gimple_asm_set_ninputs, gimple_asm_set_noutputs,
+       gimple_asm_set_nclobbered, gimple_asm_set_string): Remove.
+       Update all users.
+
+
+2007-07-24  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree.c (get_callee_fndecl): Revert previous change.
+       * gimplify.c (gimplify_call_expr): Use result from
+       get_callee_fndecl if available.
+       * c-common.c (c_warn_unused_result): Do not use
+       get_callee_fndecl.
+       Add assertion.
+
+2007-07-24  Diego Novillo  <dnovillo@google.com>
+
+       Merge with mainline (@126872)
+
+2007-07-23  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree.c (get_callee_fndecl): Work when only the CALL_EXPR_FN has
+       been passed.
+       * c-common.c (c_warn_unused_result): Use get_callee_fndecl.
+       
+2007-07-23  Aldy Hernandez  <aldyh@redhat.com>
+
+       * c-decl.c: Include gimple.h.
+       (c_gimple_diagnostics_recursively): Call c_warn_unused_result with
+       tuplified body.
+       Exit if -fgimple-only.
+       * gimplify.c (gimplify_call_expr): Call gimple_build_call_vec with
+       the correct callee.
+       * c-gimplify.c (c_genericize): Remove exit.
+       * c-common.c: Include gimple.h.
+       (c_warn_unused_result): Tuplify.
+       * c-common.h (c_warn_unused_result): Protoize.
+       * Makefile.in (c-decl.o): Depend on GIMPLE_H.
+       (c-common.o): Same.
+       * gimple.c (gimple_build_catch): Handler is a sequence.
+       Adjust accordingly.
+       (gimple_build_eh_filter): Failure is a sequence.
+       Adjust accordingly.
+       (walk_tuple_ops): case GIMPLE_CATCH: Walk handler as a sequence.
+       case GIMPLE_EH_FILTER: Walkder failure as a sequence.
+       * gimple.h (gimple_statement_catch): Make handler a sequence.
+       (gimple_statement_eh_filter): Make failure a sequence.
+       (gimple_build_catch): Make second argument a sequence.
+       (gimple_build_eh_filter): Same.
+       (gimple_catch_handler): Return a sequence.
+       (gimple_catch_set_handler): Make second argument a sequence.
+       (gimple_eh_filter_failure): Return a sequence.
+       (gimple_eh_filter_set_failture): Make second argument a sequence.
+
+2007-07-20  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * gimplify.c (gimplify_return_expr): Handle an empty ret_expr
+       gracefully.
+
+2007-07-19  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * testsuite/gcc.dg/gimple/instrument.c: New.
+       * gimplify.c (gimplify_function_tree): Generate tuples for function
+       instrumentation.
+
+2007-07-18  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * gimplify.c (gimple_add_tmp_var): Remove abort.  Add comment.
+       
+2007-07-18  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * gimplify.c (gimple_add_tmp_var): Remove abort.  Add comment.
+
+2007-07-18  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * gimplify.c (gimplify_and_add): Remove unecessary temporary sequence.
+       Remove fixme.  Add comment.
+       (gimplify_loop_expr): Tuplefy.
+       (gimplify_bind_expr): Streamline GIMPLE_TRY_FINALLY tuple.
+       (gimplify_expr): Tuplefy TRY_*_EXPR cases.
+       * gimple.c: Fix some spacing.
+       (gimple_build_try, gimple_omp_build_*): Handle empty sequences.
+       (gimple_push): Remove.
+       * gimple.h (gimple_push): Remove.
+
+2007-07-18  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * tree-gimple.h (declare_vars): Update arguments.
+       * gimplify.c (pop_gimplify_context): Enable call to declare_vars.
+       (declare_vars): Convert to use tuples.
+       (gimple_add_tmp_var): Same.
+       * gimple.h (GIMPLE_BLOCK): New.
+
+2007-07-17  Chris Matthews  <chrismatthews@google.com>
+
+       * gimplify.c (gs_build_eh_filter): Renamed to gs_build_eh_filter_tree
+       to avoid name conflict.
+       * gimple-pretty-print.c: Renamed to debug_gs_* to debug_gimple_*.
+       Updated all users.
+       * gimple.h: Renamed all functions with GS and gs prefixes to GIMPLE and
+       gimple, and updated users.
+       Renamed gs_seq_* functions to gimple_seq_*. Updated all users. 
+       * gimple.def: Definitions changed to from GS_* to GIMPLE_*.
+
+2007-07-16  Chris Matthews  <chrismatthews@google.com>
+
+       * gimple.c (gs_build_switch): Changed nlabels to represent total number
+       of labels including the default.
+       (gs_build_switch_1): Same.
+       (walk_tuple_ops): Same.
+       * gimple-pretty-print.c (dump_gs_switch): Same.
+
+2007-07-16  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * gimplify.c (gimplify_ctx): Rename current_bind_expr_seq to
+       bind_expr_stack and make it a vector.
+       (push_gimplify_context): Adjust bind_expr_stack for vectors.
+       (pop_gimplify_context): Same.
+       (gimple_push_bind_expr): Same.
+       (gimple_pop_bind_expr): Same.
+       (gimple_current_bind_expr): Same.
+       (get_tmp_var_for): Use ``code''.
+       (gimplify_bind_expr): Remove comment.
+       (gimplify_case_label_expr): Add whitespace.
+       * gimple.c (gs_pop): Remove.
+       * gimple.h: Define vectors of a gimple type.
+       (gs_pop): Remove prototype.
+       * Makefile.in (GIMPLE_H): Add vec.h.
+
+2007-07-15  Diego Novillo  <dnovillo@google.com>
+
+       * gimple.c: Rename from gimple-ir.c.
+       Update all users.
+       * gimple.h: Rename from gimple-ir.h.
+       Update all users.
+
+2007-07-15  Diego Novillo  <dnovillo@google.com>
+
+       * gimple-ir.c (gimple_statement_structure): Remove code
+       after gcc_unreachable call.
+       * gimplify.c (get_tmp_var_for): New.
+       (gimplify_call_expr): Call it.
+
+2007-07-14  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * testsuite/gcc.dg/gimple/with_size_expr.c: Check for GS_TRY.
+       * Makefile.in (gimple-ir.o): Add diagnostic.h dependency.
+       * gimple-pretty-print.c (dump_gs_call): Dump LHS if available.
+       (dump_gs_try): New.
+       (dump_gimple_stmt): Add case for GS_TRY.
+       * gimple-ir.c.  Include diagnostic.h.
+       (gs_build_try): Cleanup and eval are sequences.
+       Remove catch_p and finally_p arguments.  Add catch_finally argument.
+       (gs_omp_build_critical): Body is a gs_seq.
+       (gs_omp_build_parallel): Same.
+       (gs_omp_build_section): Same.
+       (gs_omp_build_master): Same.
+       (gs_omp_build_continue): Same.
+       (gs_omp_build_ordered): Same.
+       (gs_omp_build_sections): Same.
+       (gs_omp_build_single): Same.
+       (gs_omp_build_for): Body and pre_body is a gs_seq.
+       (gs_push): New.
+       (gs_pop): New.
+       (walk_tuple_ops): Walk GS_TRY tuples eval and cleanups correctly.
+       Dump tuple before we ICE.
+       * gimple-ir.h (gs_seq_copy): New.
+       (struct gimple_statement_try): Eval and cleanups are gs_seq's.
+       (gs_bind_set_body): Use gs_seq_copy.
+       (gs_try_eval): Return address of eval.
+       (gs_try_cleanup): Return address of cleanup.
+       (gs_try_set_eval): Use gs_seq_copy.
+       (gs_try_set_cleanup): Same.
+       (gs_omp_set_body): Same.
+       (gs_omp_for_set_pre_body): Same.
+       * gimplify.c (struct gimplify_ctx): Rename current_bind_expr to
+       current_bind_expr_seq, and make it a sequence.
+       (pop_gimplify_context): Adjust for current_bind_expr_seq.
+       (gimple_push_bind_expr): Same.
+       (gimple_pop_bind_expr): Same.
+       (gimple_current_bind_expr): Same.
+       (build_stack_save_restore): Generate tuples.
+       (gimplify_bind_expr): Same.
+
+2007-07-13  Diego Novillo  <dnovillo@google.com>
+
+       * gimple-ir.c (gs_add): Swap arguments.
+       Update all users.
+       * gimple-ir.h (gs_seq_append): Likewise.
+
+2007-07-12  Diego Novillo  <dnovillo@google.com>
+
+       * tree.c (create_artificial_label): Move from gimplify.c
+       (get_name): Likewise.
+       * tree.h (create_artificial_label, get_name): Move
+       declarations earlier in the file.
+       * diagnostic.h (dump_gimple_stmt, print_gimple_stmt,
+       dump_gimple_seq): Rearrange.
+       * tree-gimple.h (gimplify_function_tree): Move from tree.h.
+       * gimple-pretty-print.c (do_niy): Tidy.
+       (maybe_init_pretty_print): Add comment.
+       (newline_and_indent): Likewise.
+       Remove "gimpleir: " prefix.
+       (debug_gimple_stmt): Add comment.
+       (dump_gs_seq): Remove.
+       (dump_gimple_seq): Add argument SPC.
+       Update all users.
+       If FLAGS contains TDF_DETAILS, emit "gimpleir:" prefix.
+       (dump_gs_cond): If FLAGS contains TDF_DETAILS, emit
+       "gimpleir:" prefix.
+       (dump_gs_bind): Likewise.
+       * function.h (struct function): Remove field 'gimplified'.
+       * gimple-ir.c (gimple_bodies): New private variable.
+       (set_gimple_body): New.
+       (gimple_body): New.
+       * gimple-ir.h: Include pointer-set.h.
+       Add comment before data structure definitons.
+       (set_gimple_body): Declare.
+       (gimple_body): Declare.
+       * gimplify.c (create_artificial_label): Move to tree.c
+       (get_name): Likewise.
+       (gimplify_function_tree): Change return type to void.
+       Call set_gimple_body after gimplification and nullify
+       DECL_SAVED_TREE.
+       Update all callers.
+       * common.opt (fgimple-only): New option.
+       * tree-optimize.c (tree_rest_of_compilation): Do not nullify
+       DECL_SAVED_TREE.
+       * c-gimplify.c (c_genericize): Restore gimplification logic to
+       mainline version.
+       If -fgimple-only was given, exit.
+       * Makefile.in (GIMPLE_IR_H): Add pointer-set.h
+       * tree-cfg.c (execute_build_cfg): Nullify GIMPLE body after
+       building the CFG.
+       (dump_function_to_file): If DECL_SAVED_TREE is NULL dump the
+       GIMPLE body of the function.
+
+2007-07-12  Diego Novillo  <dnovillo@google.com>
+
+       * omp-low.c (lower_regimplify): Use a local GIMPLE sequence to
+       hold the result from gimplification.
+       * tree-gimple.c (is_gimple_min_val): Reformat.
+       * tree-gimple.h (enum fallback_t): Document values.
+       (gimplify_expr): Remove IS_STATEMENT argument.
+       Update all users.
+       * langhooks.c (lhd_gimplify_expr): Likewise.
+       * gimplify.c (gimplify_statement_list): If a temporary
+       was returned from voidify_wrapper_expr abort to mark the failure.
+       (gimplify_expr): Remove argument IS_STATEMENT.
+       Update all users.
+       Assert that FALLBACK is used with the appropriate GIMPLE_TEST_F
+       Restore logic to use internal queue.
+       Do not abort if on return from gimplify_call_expr, *EXPR_P has
+       a CALL_EXPR.
+
+2007-07-12  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-ir.c (gs_build_switch_1): Allocate one less tree.
+       (gs_build_switch_1): Offset labels by one.
+       (gs_switch_label): Same.
+       (gs_switch_set_label): Same.
+
+2007-07-12  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-ir.c (walk_seq_ops): Rename from walk_tree_seq.
+       (walk_tuple_ops): Rename from walk_tree_tuple.
+       * gimple-ir.h, gimplify.c, gimple-ir.c: Rename all calls to
+       walk_tree_seq and walk_tree_tuple accordingly.
+
+2007-07-12  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-ir.c (gs_build_switch_1): Fix spacing.
+       Allocate one more tree.
+       (gs_build_switch): Set labels starting at 1.
+       (walk_tree_seq): New.
+       (walk_tree_tuple): New.
+       * gimple-ir.h: Add prototypes for walk_tree_tuple and walk_tree_seq.
+       * gimplify.c (check_pointer_types_r): Uncomment.
+       (gimplify_body): Walk gimple sequence with check_pointer_types_r.
+
+2007-07-11  Chris Matthews  <chrismatthews@google.com> 
+
+       * tree-pretty-print.c (dump_generic_node): Removed space before default
+       label colon.
+       * tree.h (sort_case_labels): Moved to gimple-ir.h.
+       * gimplify.c (sort_case_labels): Changed to a vector instead of tree
+       vector.
+       (gimplify_switch_expr): Initial implementation with tuples.
+       (gimplify_expr): Changed gimplify_case_label_expr parameter.
+       (gimplify_case_label_expr): Added a gs_seq parameter, and put cases in
+       that.
+       (dump_gimple_stmt): Removed semicolon.
+       (dump_gs_label):  Refactored from dump_gimple_expr.
+       (dump_gs_switch): Added.
+       (gs_build_switch_vec): Added.
+       * gimple-ir.c (gs_build_switch_1): Added.
+       (gs_build_switch): Refactored to use gs_build_switch_1.
+       (gs_build_switch_vec): Added.
+       * gs_switch.c: New test case.
+       * gs_switch1.c: New test case.
+       * gs_switch2.c: New test case.
+       
+2007-07-05  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * gimple-pretty-print.c (dump_gimple_stmt): Alphabetize cases.
+       Add case for GS_NOP.
+       * gimplify.c (gimplify_body): Handle null bodies.
+       Use GS_CODE instead of GS_SUBCODE_FLAGS.
+
+2007-07-05  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * testsuite/gcc.dg/gimple/with_size_expr.c: Clean up dump.
+       * testsuite/gcc.dg/gimple/gs_bind.c: Clean up dump.
+       * gimplify.c (struct gimplify_ctx): Make current_bind_expr a tuple.
+       (pop_gimplify_context): Accept gimple.
+       Comment out call to declare_vars.
+       (gimple_current_bind_expr): Return gimple.
+       (unshare_all_trees): Remove.
+       (gimplify_self_mod_expr): Remove comment.
+       (gimplify_cleanup_point_expr): Correct typo in call to gs_seq_init.
+       (gimplify_body): Remove body local.  
+       Build GS_BIND tuples when needed.
+       Do not call unshare_all_trees.
+       Call pop_gimplify_context with appropriate argument.
+       Comment out call to walk_tree.
+       * tree-pretty-print.c (print_declaration): Remove static.
+       * diagnostic.h (print_declaration): Prototype.
+       * tree-gimple.h (pop_gimplify_context): Accept gimple tuple.
+       (gimple_current_bind_expr): Return tuple.
+       * gimple-pretty-print.c (dump_gs_seq): New.
+       (dump_gs_bind): New.
+       (dump_gimple_stmt): Add case for GS_BIND.  Print semi-colons after
+       each statement.
+
+2007-06-29  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * gimplify.c (gimple_push_condition): Enable.  Call gs_seq_init with
+       address.
+       (gimplify_cond_expr): Push and pop conditions.  Use other GS_COND
+       predicates when appropriate
+
+2007-06-28  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * testsuite/gcc.dg/gimple/gs_goto.c: New.
+       * testsuite/gcc.dg/gimple/gs_cond.c: New.
+       * tree-gimple.h (gimplify_stmt): Return bool.
+       * gimple-pretty-print.c (INDENT): New.
+       (newline_and_indent): New.
+       (op_gs_cond): New.
+       (dump_gs_cond): New.
+       (dump_gimple_stmt): New.
+       * gimple-ir.c (gs_cond_invert): New.
+       * gimple-ir.h (enum gs_cond): Add comment.
+       (gs_cond_invert): Protoize.
+       * gimplify.c (gimplify_cond_expr): Rewrite for tuples.
+       (gimplify_stmt): Return true if we added a statement to the queue.
+       (gimplify_expr): Enable gimplify_cond_expr.
+       Build tuples for GOTO_EXPRs and LABEL_EXPRs.
+
+2007-06-27  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * gimple-ir.h (gs_seq_last): Return last statement.
+       * testsuite/gcc.dg/gimple/compound_expr.c: Add checks.
+       * testsuite/gcc.dg/gimple/gs_call.c: Same.
+       * testsuite/gcc.dg/gimple/constructors.c: Same.
+       * testsuite/gcc.dg/gimple/gs_assign.c: Same.
+
+2007-06-27  Aldy Hernandez  <aldyh@redhat.com> 
+
+       Put this patch back in.
+
+       2007-06-22  Aldy Hernandez  <aldyh@redhat.com> 
+
+       * gimplify.c (gimplify_modify_expr): Return after a successful
+       call to gimplify_modify_expr_rhs.
+
+2007-06-26  Aldy Hernandez  <aldyh@redhat.com>
+
+       * testsuite/gcc.dg/gimple/gimple.exp: Pass -fdump-tree-gimple-details
+       * testsuite/gcc.dg/gimple/compound_expr.c: Add dg-final.
+       * testsuite/gcc.dg/gimple/gs_return.c: Same.
+       * tree.h (gimplify_function_tree): Add return value.
+       * diagnostic.h (debug_c_tree): Move under tree-pretty-print.c section.
+       (dump_gimple_seq): New.
+       to tests.
+       * gimple-pretty-print.c (dump_gimple_seq): New.
+       * gimplify.c (gimplify_function_tree): Add return value.
+       Remove debug call and exit.
+       Comment out non-working code.
+       * c-gimplify.c (c_genericize): Dump gimple IR.  Exit.
+
+2007-06-26  Diego Novillo  <dnovillo@google.com>
+
+       * gimple-ir.c (gs_build_call_1): Fix formatting.
+
+2007-06-26  Diego Novillo  <dnovillo@google.com>
+
+       * gimple-pretty-print.c (dump_gs_assign, dump_gs_return,
+       dump_gs_call): New functions.
+       (dump_gimple_stmt): Call them.
+       * gimple-ir.c (gs_build_call_1): Factor out of gs_build_call.
+       (gs_build_call): Call it.
+       (gs_build_call_vec): New function.
+       * gimple-ir.h (struct gimple_statement_call): Change type of
+       field 'nargs' to size_t.  Update all users.
+       (gs_build_call_vec): Declare.
+       (gs_call_set_fn): Remove.
+       (gs_call_set_nargs): Remove.
+       * gimplify.c: Include "vec.h"
+       (gimplify_return_expr): Fix formatting
+       (gimplify_call_expr): Call gs_build_call_vec.
+       (gimplify_expr): Do not try to test if NULL expressions
+       are in GIMPLE form.
+       (gimplify_function_tree): Do not call debug_gimple_seq.
+       * Makefile.in (gimplify.o): Include vec.h
+
+2007-06-25  Chris Matthews  <chrismatthews@google.com>
+
+       * gimplify.c (gimple_current_bind_expr): Changed to work with gs_seq
+       accessors 
+       (gimplify_and_add): Same.
+       (annotate_all_with_locus): Same.
+       (gimplify_self_mod_expr): Same.
+       (gimplify_cleanup_point_expr): Same.
+       (gimplify_expr): Same.
+       (gimplify_body): Same.
+       (force_gimple_operand): Same.
+       (gimplify_init_ctor_eval_range): Added GS_ prefix.
+       * gimple-iterator.h (gsi_last): Changed to gs_seq accessors.  Changed
+       gimple_stmt_iterator to use a gimple instead of gimple *.
+       (gsi_one_before_end_p): Same.
+       (gsi_start): Same.
+       * gimple-ir.h (gs_cond): Prepended GS_ to names.
+       (gs_seq_first): Replaced macro.
+       (gs_seq_last): Same.
+       (gs_seq_set_first): Same.
+       (gs_seq_set_last): Same.
+       (gs_seq_init): Same.
+       (gs_seq_empty_p): Same.
+       (gs_assign_operand) Changed opno to be a size_t to match set.
+       (gs_bind_body): Changed to use gs_seq.
+       (gs_bind_set_body): Changed to use gs_seq, and gs_seq_set_first, and last. 
+       (gs_asm_ninputs): Renamed.
+       (gs_asm_noutputs): Renamed.
+       (gs_asm_nclobbered): Renamed.
+       (gs_asm_set_ninputs): Renamed.
+       (gs_asm_set_noutputs): Renamed.
+       (gs_asm_set_nclobbered): Renamed.
+       (gs_asm_set_input_op): Renamed.
+       (gs_asm_input_op): Renamed.
+       (gs_asm_set_output_op): Renamed.
+       (gs_asm_output_op): Renamed.
+       (gs_omp_body): Changed to use gs_seq.
+       (gs_omp_set_body): Changed to use gs_seq accessors.
+       (gs_omp_for_pre_body): Changed to use gs_seq.
+       (gs_omp_for_set_pre_body): Changed to use gs_seq accessors.
+       (gs_seq_append): Changed to use gs_seq accessors.
+       * gimple-ir.c (gs_add): Same.
+       (gs_build_asm): Changed argument names to match accessors, and changed
+       functions to new accessor names.
+       (gs_build_cond): Reformatted.
+       (gs_build_phi): Same.
+       (gs_build_try): Renamed args to try_p and catch_p.
+       (gs_build_omp_return): Change to correct arguments, and added a subcode 
+       flag.
+       * function.c (gimplify-oaraneters): Changed to gs_seq accessors.
+
+2007-06-22  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimplify.c (gimplify_modify_expr): Return after a successful
+       call to gimplify_modify_expr_rhs.
+
+2007-06-21  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-ir.h (gs_assign_binary_rhs1): Add assertion for
+       GSS_ASSIGN_BINARY.
+       (gs_assign_binary_set_rhs1): Same.
+       (gs_assign_binary_rhs2): Same.
+       (gs_assign_binary_set_rhs2): Same.
+       (gs_assign_unary_rhs): Same.
+       (gs_assign_unary_set_rhs): Same.
+
+2007-06-21  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gcc.dg/gimple/gimple.exp: New.
+       * gcc.dg/gimple/compound_expr.c: New.
+       * gcc.dg/gimple/with_size_expr.c: New.
+       * gcc.dg/gimple/compound_expr.c: New.
+       * gcc.dg/gimple/gs_call.c: New.
+       * gcc.dg/gimple/constructors.c: New.
+       * gcc.dg/gimple/gs_return.c: New.
+       * gcc.dg/gimple/gs_assign.c: New.
+
+2007-06-21  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-ir.c (gs_build_cond): Change order of arguments.  Make labels
+       of type tree.
+       (gs_build_asm): Fix formatting.
+       * gimple-ir.h (gimple_statement_cond): Make labels of type tree.
+       (gs_build_cond): Change order and type of arguments.
+       (gs_build_asm): Fix formatting.
+       (gs_omp_build_for): Same.
+       (gs_assign_binary_rhs1): Remove assert.
+       (gs_assign_binary_set_rhs1): Same.
+       (gs_assign_binary_rhs2): Same.
+       (gs_assign_binary_set_rhs2): Same.
+       (gs_assign_unary_rhs): Same.
+       (gs_cond_true_label): Return a tree.
+       (gs_cond_set_true_label): Make label a tree.
+       (gs_cond_set_false_label): Make label a tree.
+       (gs_cond_false_label): Return a tree.
+       * gimplify.c (gimplify_init_ctor_eval_range): Build tuples.
+       (gimplify_init_ctor_eval): Same.
+       (gimplify_init_constructor): Enable.  Adjust for tuples.
+       (gimplify_modify_expr_rhs): Uncomment call to
+       gimplify_init_constructor.
+
+2007-06-21  Diego Novillo  <dnovillo@google.com>
+
+       * gimple.def: Rename from gs.def.
+       Adjust all users.
+
+2007-06-21  Diego Novillo  <dnovillo@google.com>
+
+       * tree-pretty-print.c (pred_symbol_code, do_gs_niy,
+       debug_gimple_stmt, debug_gimple_seq, print_gimple_stmt,
+       dump_gimple_stmt): Move to gimple-pretty-print.c
+       * diagnostic.h: Add comment for functions in gimple-pretty-print.c
+       * gimple-pretty-print.c: New file.
+       * gimple-ir.c (gs_build_return): Fix spacing.
+       (gs_build_assign): Likewise.
+       * gimple-ir.h: Fix spacing.
+       (gs_assign_set_operand): Change OPNO to size_t.
+       Add assertions for OPNO's value.
+       (gs_assign_lhs): Rename from gs_assign_operand_lhs.
+       (gs_assign_binary_rhs1): Rename from gs_assign_operand_rhs.
+       Assert that GS is GSS_ASSIGN_BINARY
+       (gs_assign_binary_set_rhs1): Rename from gs_assign_set_rhs.
+       Assert that GS is GSS_ASSIGN_BINARY.
+       (gs_assign_binary_set_rhs2): Rename from gs_assign_set_rhs2.
+       Assert that GS is GSS_ASSIGN_BINARY.
+       (gs_assign_unary_rhs): New.
+       (gs_assign_unary_set_rhs): New.
+       (gs_call_fn, gs_call_lhs, gs_call_chain, gs_call_arg,
+       gs_cond_lhs, gs_cond_rhs, gs_label_label, gs_goto_dest,
+       gs_bind_vars, gs_asm_in_op, gs_asm_out_op, gs_asm_clobber_op,
+       gs_catch_types, gs_catch_handler, gs_eh_filter_types,
+       gs_eh_filter_failure, gs_try_eval, gs_try_cleanup,
+       gs_phi_result, gs_switch_index, gs_switch_default_label,
+       gs_switch_label,gs_omp_critical_name, gs_omp_for_clauses,
+       gs_omp_for_index, gs_omp_for_initial, gs_omp_for_final,
+       gs_omp_for_incr, gs_omp_parallel_clauses,
+       gs_omp_parallel_child_fn, gs_omp_parallel_data_arg,
+       gs_omp_single_clauses, gs_omp_sections_clauses,
+       gs_return_retval): Change return type to 'tree'.
+       * Makefile.in (OBJS-common): Add gimple-pretty-print.o.
+       (gimple-pretty-print.o): New rule.
+
+2007-06-20  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-pretty-print.c (dump_gimple_stmt): Change pred_symbol_code
+       to op_symbol_code.
+
+2007-06-19  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimplify.c (gimplify_modify_expr_rhs): Enable.  Adjust for tuples.
+       (gimplify_modify_expr): Call gimplify_modify_expr_rhs.
+       (gimplify_compound_expr): Enable.  Adjust for tuples.  Remove comment
+       that no longer applies.
+       (gimplify_expr): Enable call to gimplify_compound_expr.
+
+2007-06-18  Chris Matthews  <chrismatthews@google.com>
+
+       * Makefile.in (GTFILES): Added gimeple-ir.h.
+
+       * gimple-ir.c (gs_build_return, gs_build_call, gs_build_assign): 
+       Changed to use new accessors.
+       (gs_build_cond, gs_build_label, gs_build_goto,
+       gs_build_nop, gs_build_bind gs_build_asm, gs_build_catch,
+       gs_build_eh_filter, gs_build_try, gs_build_phi,
+       gs_build_resx, gs_build_switch, gs_omp_build_critical,
+       gs_omp_build_for, gs_omp_build_parallel,
+       gs_omp_build_section, gs_omp_build_master,
+       gs_omp_build_ordered, gs_omp_continue,
+       gs_omp_build_ordered, gs_omp_build_return,
+       gs_omp_build_sections, gs_omp_build_single): New
+       functions.
+
+       * gimple-ir.h (struct gimple_statement_switch): Changed
+       default label to be in labels[0].
+
+       (struct gimple_statement_asm): Corrected the allocation
+       length.
+
+       (enum gs_cond): New enum.
+       (gs_assign_set_operand): Changed to work with new accessors.
+       (gs_assign_operand_lhs, gs_assign_operand_rhs,
+       gs_assign_operand_set_lhs, gs_assign_set_rhs,
+       gs_assign_operand_rhs2, gs_assign_set_rhs2, gs_call_fn,
+       gs_call_set_fn, gs_call_lhs, gs_call_set_lhs,
+       gs_call_chain, gs_call_set_chain, gs_call_nargs,
+       gs_call_set_nargs, gs_call_arg, gs_call_set_arg,
+       gs_cond_lhs, gs_cond_set_lhs, gs_cond_rhs,
+       gs_cond_set_rhs, gs_cond_true_label,
+       gs_cond_set_true_label, gs_cond_set_false_label,
+       gs_cond_false_label, gs_label_label, gs_label_set_label,
+       gs_goto_dest, gs_goto_set_dest, gs_bind_vars,
+       gs_bind_set_vars, gs_bind_body, gs_bind_set_body,
+       gs_asm_ni, gs_asm_set_ni, gs_asm_no, gs_asm_set_no,
+       gs_asm_nc, gs_asm_set_nc, gs_asm_in_op, gs_asm_set_in_op,
+       gs_asm_out_op, gs_asm_set_out_op, gs_asm_clobber_op,
+       gs_asm_set_clobber_op, gs_asm_string, gs_asm_set_string,
+       gs_catch_types, gs_catch_handler, gs_catch_set_types,
+       gs_catch_set_handler, gs_eh_filter_types,
+       gs_eh_filter_failure, gs_eh_filter_set_types,
+       gs_eh_filter_set_failure, gs_try_eval, gs_try_cleanup,
+       gs_try_set_eval, gs_try_set_cleanup, gs_phi_capacity,
+       gs_phi_set_capacity, gs_phi_nargs, gs_phi_set_nargs,
+       gs_phi_result, gs_phi_set_result, gs_phi_arg,
+       gs_phi_set_arg, gs_resx_region, gs_resx_set_region,
+       gs_switch_nlabels, gs_switch_set_nlabels,
+       gs_switch_index, gs_switch_set_index,
+       gs_switch_default_label, gs_switch_set_default_label,
+       gs_switch_label, gs_switch_set_label, gs_omp_body,
+       gs_omp_set_body, gs_omp_critical_name,
+       gs_omp_critical_set_name, gs_omp_for_clauses,
+       gs_omp_for_set_clauses, gs_omp_for_index,
+       gs_omp_for_set_index, gs_omp_for_initial,
+       gs_omp_for_set_initial, gs_omp_for_final,
+       gs_omp_for_set_final, gs_omp_for_incr,
+       gs_omp_for_set_incr, gs_omp_for_pre_body,
+       gs_omp_for_set_pre_body, gs_omp_parallel_clauses,
+       gs_omp_parallel_set_clauses, gs_omp_parallel_child_fn,
+       gs_omp_parallel_set_child_fn, gs_omp_parallel_data_arg,
+       gs_omp_parallel_set_data_arg, gs_omp_single_clauses,
+       gs_omp_single_set_clauses, gs_omp_sections_clauses,
+       gs_omp_sections_set_clauses, gs_assign_omp_for_cond,
+       gs_omp_for_cond gs_return_set_retval,
+       gs_add_subcode_flag): New accessor functions.
+       (gs_return_retval): Renamed gs_return_operand_retval to match accessor 
+       conventions.
+
+2007-05-31  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-ir.c (gs_build_call): New.
+       * gimple-ir.h (GS_CALL_LHS): New.
+       (GS_CALL_FN): New.
+       (GS_CALL_CHAIN): New.
+       (GS_CALL_NARGS): New.
+       (GS_CALL_ARG): New.
+       (gs_call_lhs): New.
+       (gs_call_fn): New.
+       (gs_call_chain): New.
+       (gs_call_nargs): New.
+       (gs_call_arg): New.
+       * gimplify.c (gimplify_modify_expr_to_memcpy): Enable and rewrite for 
+       tuples.
+       (gimplify_modify_expr_to_memset): Same.
+       (gimplify_statement_list): Same.
+       (gimplify_expr): Enable STATEMENT_LIST case.
+
+2007-05-29  Aldy Hernandez  <aldyh@redhat.com>
+
+       Merged revisions 124007-125166 from mainline.
+
+2007-05-23  Aldy Hernandez  <aldyh@redhat.com>
+
+       * builtins.c (std_gimplify_va_arg_expr): Add argument to gimplify_expr.
+       Remove seq argument.
+       (gimplify_va_arg_expr): Same.
+       * tree-gimple.h: Same.
+       * langhooks.c (lhd_gimplify_expr): Change pre_p and post_p types to
+       sequences.
+       * langhooks-def.h (lhd_gimplify_expr): Change 
+       * langhooks.h (struct lang_hooks): Remove argument.
+       * gimplify.c (internal_get_tmp_var): Adjust calls to gimplify_expr
+       for new arguments.
+       (gimplify_switch_expr): Same.
+       (gimplify_var_or_parm_decl): Same.
+       (gimplify_compound_lval): Same.
+       (gimplify_self_mod_expr): Same.
+       (gimplify_arg): Same.
+       (gimplify_call_expr): Same.
+       (gimplify_init_ctor_preeval): Same.
+       (gimplify_init_constructor): Same.
+       (gimplify_modify_expr_rhs): Same.
+       (gimplify_modify_expr): Same.
+       (gimplify_save_expr): Same.
+       (gimplify_addr_expr): Same.
+       (gimplify_asm_expr): Same.
+       (gimplify_target_expr): Same.
+       (omp_check_private): Same.
+       (gimplify_scan_omp_clauses): Same.
+       (gimplify_omp_parallel): Same.
+       (gimplify_omp_for): Same.
+       (goa_stabilize_expr): Same.
+       (gimplify_omp_atomic): Same.
+       (gimplify_one_sizepos): Same.
+       (force_gimple_operand): Same.
+       (gimplify_expr): Remove seq_p argument.  Add new is_statement
+       argument.  Adjust accordingly.  Make seq_p required.
+
+2007-05-07  Aldy Hernandez  <aldyh@redhat.com>
+
+       * function.c (gimplify_parameters): Use new GS_SEQ_INIT definition.
+       * gimple-ir.h (GS_SEQ_INIT): Do not use C99 constructs.
+       * gimplify.c: Disable non working code throughout.
+       Pass additional call to gimplify_expr throughout.
+       (gimplify_ctx): Make conditional_cleanups a sequence.
+       (gimple_push_condition): Use GS_SEQ_EMPTY_P.
+       (gimple_pop_condition): Adapt for sequences.
+       (gimplify_and_add): Use gs_seq_append regardless of side effects.
+       (internal_get_tmp_var): Use sequences.
+       (get_formal_tmp_var): Same.
+       (get_initialized_tmp_var): Same.
+       (annotate_one_with_locus): Change GS_LOCUS to GS_LOCUS_EMPTY_P.
+       (gimplify_bind_expr): Use sequences.
+       Change append_to_statement_list to gimplify_and_add.
+       (gimplify_return_expr): Add gimplified code to pre_p.
+       (gimplify_decl_expr): New seq_p parameter.
+       (gimplify_loop_expr): Adapt for sequences.
+       Use gimplify_and_add instead of append_to_statement_list.
+       (gimplify_switch_expr): Same.
+       (gimplify_compound_lval): Use sequences.
+       (gimplify_self_mod_expr): Same.
+       Use gs_seq_append instead of append_to_statement_list.
+       (gimplify_arg): Use sequences.
+       (gimplify_call_expr): Same.
+       (gimplify_cond_expr): Use sequences.
+       (gimplify_init_ctor_preeval): Use sequences.
+       (gimplify_init_ctor_eval_range): Same.
+       Use gimplify_and_add instead of append_to_statement_list.
+       (gimplify_init_ctor_eval): Use sequences.
+       (gimplify_init_constructor): Same.
+       Remove one call to append_to_statement_list.
+       (gimplify_modify_expr_rhs): Use sequences.
+       (gimplify_modify_expr_complex_part): Use sequences.
+       Remove call to tree_to_gimple_tuple.
+       Build GS_ASSIGN tuple.
+       (gimplify_modify_expr): Use new argument.  Use sequences.
+       Do not call append_to_statement_list.
+       Build GS_ASSIGN tuple.
+       Do not call tree_to_gimple_tuple.
+       Set *expr_p to NULL when we do not want the value.
+       (gimplify_compound_expr): Use sequences.
+       (gimplify_save_expr): Same.
+       (gimplify_addr_expr): Same.
+       (gimplify_asm_expr): Same.
+       (gimplify_cleanup_point_expr): Same.
+       (gimple_push_cleanup): Same.
+       Build GS_ASSIGN tuples.
+       (gimplify_target_expr): Use sequences.
+       (gimplify_scan_omp_clauses): Same.
+       Add argument to gimplify_stmt calls.
+       (gimplify_omp_parallel): Same.
+       (gimplify_omp_for): Use sequences.
+       (gimplify_omp_workshare): Same.
+       (goa_stabilize_expr): Same.
+       (gimplify_omp_atomic_pipeline): Same.
+       (gimplify_omp_atomic_mutex): Same.
+       (gimplify_omp_atomic): Same.
+       (gimplify_expr): Same.
+       Call GS_SEQ_INIT with argument.
+       Use new seq_p argument.
+       Do not call tree_to_gimple_tuple.
+       Pass additional argument to gimplify_decl_expr.
+       Do not pass seq_p argument to gimplify_return_expr.
+       Call gs_seq_append instead of append_to_statement_list.
+       Check that all statements have been converted to tuples.
+       Make pre_p and seq_p sequences coexist.
+       (gimplify_type_sizes): Use sequences.
+       (gimplify_one_sizepos): Same.
+       (gimplify_body): Make parm_stmts a sequence.
+       Add argument to seq_p.
+       (gimplify_function_tree): Call debug_gimple_seq.
+       (force_gimple_operand): Use sequences.
+       (force_gimple_operand_bsi): Use sequences.
+
+2007-05-04  Aldy Hernandez  <aldyh@redhat.com>
+
+       * omp-low.c (build_omp_barrier): Adjust arguments for sequences.
+       (lower_rec_input_clauses): Disable non working code.
+       (lower_regimplify): Pass additional argument to gimplify_expr.
+       * tree-mudflap.c (mx_register_decls): Disable non working code.
+       * tree-inline.c (copy_bb): Disable non working code.
+       (setup_one_parameter): Same.
+       * tree-cfg.c (make_edges): Same.
+
+2007-05-04  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-gimple.h (get_initialized_tmp_var): Adjust prototype
+       for sequences.
+       (get_formal_tmp_var): Same.
+       (gimplify_type_sizes): Same.
+       (gimplify_one_sizepos): Same.
+       (gimplify_stmt): Same.
+       (gimplify_and_add): Same.
+       (gimplify_va_arg_expr): Same.
+       * langhooks.h (lang_hooks): Same.
+       * function.c (gimplify_parm_type): Adjust for sequences.
+       (gimplify_parameters): Same.
+       * c-gimplify.c (gimplify_compound_literal_expr): Same.
+       (c_gimplify_expr): Same.
+       * tree-flow.h (force_gimple_operand): Same.
+       * c-common.h (c_gimplify_expr): Adjust prototype for sequences.
+       * config/i386/i386.c (ix86_gimplify_va_arg): Adjust for sequences.
+       Change call to append_to_statement_list to gimplify_and_add.
+       Add parameter to gimplify_expr.
+
+2007-05-04  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-ir.c (gs_code_name): Constify.
+       (gs_build_assign): New.
+       (gimple_statement_structure): Abstract code out to...
+       (gss_for_assign): ...here.
+       (gs_add): Set the last item correctly.
+       * gimple-ir.h (GS_LOCUS_EMPTY_P): New.
+       (GS_SEQ_INIT): Add a cast.
+       (gimple_statement_base): Make code a gs_code enum.
+       (gimple_statement_with_ops): Remove address_taken.
+       (GS_ASSIGN_BINARY_LHS): New.
+       (GS_ASSIGN_BINARY_RHS1): New.
+       (GS_ASSIGN_BINARY_RHS2): New.
+       (GS_ASSIGN_UNARY_REG_LHS): New.
+       (GS_ASSIGN_UNARY_REG_RHS): New.
+       (GS_ASSIGN_UNARY_MEM_LHS): New.
+       (GS_ASSIGN_UNARY_MEM_RHS): New.
+       (gs_seq_append): New.
+       Move gs_seq typedef to...
+       * coretypes.h: ...here.
+       * gimple-iterator.h (gsi_stmt_ptr): Add FIXME note.
+
+2007-05-04  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree.h (std_gimplify_va_arg_expr): Change tree * to a sequence
+       in prototype.
+       (gimplify_parameters): Return a sequence.
+       * target.h (gimplify_va_arg_expr): Change tree * to a sequence.
+       * builtins.c (std_gimplify_va_arg_expr): Same.
+       Pass additional argument to gimplify_expr.
+       (gimplify_va_arg_expr): Change tree * to a sequence.
+       Change append_to_statement_list call to gimplify_and_add.
+       Pass additional argument to gimplify_expr calls.
+
+2007-05-04  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-pretty-print.c (do_gs_niy): New.
+       (debug_gimple_stmt): New.
+       (debug_gimple_seq): New.
+       (print_gimple_stmt): New.
+       (dump_gimple_stmt): New.
+       * diagnostic.h: Add prototypes for dump_gimple_stmt,
+       print_gimple_stmt, debug_gimple_stmt, debug_gimple_seq.
+
+2007-04-26  Aldy Hernandez  <aldyh@redhat.com>
+
+       * tree-gimple.h (annotate_all_with_locus): First argument is now a
+       sequence.
+       * gimple-ir.h (GS_LOCUS): New.
+       (gimple_statement_base): Locus is of type location_t.
+       * gimplify.c (internal_get_tmp_var): Use sequences.
+       (should_carry_locus_p): Adjust for gimple ir.
+       (annotate_one_with_locus): Same.
+       (annotate_all_with_locus): Same.
+       (gimplify_stmt): Adjust for sequences.
+       (gimplify_expr): Same.
+
+2007-04-25  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-ir.h (GS_SEQ_EMPTY_P): New.
+       Move gs_build_return, gs_add, and gimple_statement_structure
+       prototypes.
+       (gs_assign_operand): Fix typos in gss.
+       Include gimple-iterator.h.
+       * Makefile.in (GIMPLE_IR_H): Add gimple-iterator.h.
+       (TREE_GIMPLE_H): Same.
+
+2007-04-25  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-iterator.h (gsi_one_before_end_p): Use GS_SEQ_LAST.
+
+2007-04-25  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-iterator.h: New file.
+
+2007-04-23  Aldy Hernandez  <aldyh@redhat.com>
+
+       * gimple-ir.c: New file.
+       * gimple-ir.h: New file.
+       * gsstruct.def: New file.
+       * gs.def: New file.
+       * gengtype.c (open_base_files): Add gimple-ir.h.
+       * tree-gimple.h: Include gimple-ir.h.
+       Add sequence to gimplify_expr and gimplify_body prototypes.
+       * gimplify.c: Include gimple-ir.h.
+       (gimplify_and_add): Adjust for gimple IR.
+       (gimplify_return_expr): Same.
+       (gimplify_stmt): Add seq_p argument.
+       (gimplify_expr): Add seq_p sequence and adjust accordingly.
+       (gimplify_body): Same.
+       * coretypes.h: Add gimple_statement_d and gimple definitions.
+       * Makefile.in (GIMPLE_IR_H): New.
+       (TREE_GIMPLE_H): Add gimple-ir.h.
+       (OBJS-common): Add gimple-ir.o.
+       (gimplify.o): Add GIMPLE_IR_H.
+       (gimple-ir.o): New.
+       (build/gencheck.o): Add gs.def.
+
+Local Variables:
+mode: change-log
+End:
index f73686f..3dbaa73 100644 (file)
@@ -798,6 +798,8 @@ TREE_H = tree.h all-tree.def tree.def c-common.def $(lang_tree_files) \
 BASIC_BLOCK_H = basic-block.h $(BITMAP_H) sbitmap.h varray.h $(PARTITION_H) \
           hard-reg-set.h $(PREDICT_H) vec.h $(FUNCTION_H) \
           cfghooks.h $(OBSTACK_H)
+GIMPLE_H = gimple.h gimple.def gsstruct.def pointer-set.h vec.h \
+       $(GGC_H) $(BASIC_BLOCK_H) $(TM_H) $(TARGET_H) tree-ssa-operands.h
 GCOV_IO_H = gcov-io.h gcov-iov.h auto-host.h
 COVERAGE_H = coverage.h $(GCOV_IO_H)
 DEMANGLE_H = $(srcdir)/../include/demangle.h
@@ -844,9 +846,8 @@ SYMTAB_H = $(srcdir)/../libcpp/include/symtab.h $(OBSTACK_H)
 CPP_ID_DATA_H = $(CPPLIB_H) $(srcdir)/../libcpp/include/cpp-id-data.h
 CPP_INTERNAL_H = $(srcdir)/../libcpp/internal.h $(CPP_ID_DATA_H)
 TREE_DUMP_H = tree-dump.h $(SPLAY_TREE_H) tree-pass.h
-TREE_GIMPLE_H = tree-gimple.h tree-iterator.h
 TREE_FLOW_H = tree-flow.h tree-flow-inline.h tree-ssa-operands.h \
-               $(BITMAP_H) $(BASIC_BLOCK_H) hard-reg-set.h $(TREE_GIMPLE_H) \
+               $(BITMAP_H) $(BASIC_BLOCK_H) hard-reg-set.h $(GIMPLE_H) \
                $(HASHTAB_H) $(CGRAPH_H) $(IPA_REFERENCE_H)
 TREE_SSA_LIVE_H = tree-ssa-live.h $(PARTITION_H) vecprim.h
 PRETTY_PRINT_H = pretty-print.h $(INPUT_H) $(OBSTACK_H)
@@ -1091,7 +1092,10 @@ OBJS-common = \
        gcse.o \
        genrtl.o \
        ggc-common.o \
+       gimple.o \
+       gimple-iterator.o \
        gimple-low.o \
+       gimple-pretty-print.o \
        gimplify.o \
        global.o \
        graph.o \
@@ -1182,7 +1186,6 @@ OBJS-common = \
        tree-dfa.o \
        tree-dump.o \
        tree-eh.o \
-       tree-gimple.o \
        tree-if-conv.o \
        tree-into-ssa.o \
        tree-iterator.o \
@@ -1794,12 +1797,12 @@ c-decl.o : c-decl.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
     $(EXPR_H) debug.h $(TOPLEV_H) intl.h $(TM_P_H) $(TREE_INLINE_H) $(TIMEVAR_H) \
     opts.h $(C_PRAGMA_H) gt-c-decl.h $(CGRAPH_H) $(HASHTAB_H) libfuncs.h \
     except.h $(LANGHOOKS_DEF_H) $(TREE_DUMP_H) $(C_COMMON_H) $(CPPLIB_H) \
-    $(DIAGNOSTIC_H) $(INPUT_H) langhooks.h $(TREE_GIMPLE_H) tree-mudflap.h  \
-    pointer-set.h $(BASIC_BLOCK_H)
+    $(DIAGNOSTIC_H) $(INPUT_H) langhooks.h $(GIMPLE_H) tree-mudflap.h  \
+    pointer-set.h $(BASIC_BLOCK_H) $(GIMPLE_H) tree-iterator.h
 c-typeck.o : c-typeck.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
     $(TREE_H) $(C_TREE_H) $(TARGET_H) $(FLAGS_H) intl.h output.h $(EXPR_H) \
     $(RTL_H) $(TOPLEV_H) $(TM_P_H) langhooks.h $(GGC_H) $(TREE_FLOW_H) \
-    $(TREE_GIMPLE_H) tree-iterator.h
+    $(GIMPLE_H) tree-iterator.h
 c-lang.o : c-lang.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
     $(C_TREE_H) $(DIAGNOSTIC_H) \
     $(GGC_H) langhooks.h $(LANGHOOKS_DEF_H) $(C_COMMON_H) gtype-c.h \
@@ -1860,7 +1863,8 @@ c-common.o : c-common.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
        $(DIAGNOSTIC_H) gt-c-common.h langhooks.h $(VARRAY_H) $(RTL_H) \
        $(TARGET_H) $(C_TREE_H) tree-iterator.h langhooks.h tree-mudflap.h \
        intl.h opts.h $(REAL_H) $(CPPLIB_H) $(TREE_INLINE_H) $(HASHTAB_H) \
-       $(BUILTINS_DEF) $(CGRAPH_H) $(BASIC_BLOCK_H) $(TARGET_DEF_H)
+       $(BUILTINS_DEF) $(CGRAPH_H) $(BASIC_BLOCK_H) $(TARGET_DEF_H) \
+       $(GIMPLE_H)
 
 c-pretty-print.o : c-pretty-print.c $(C_PRETTY_PRINT_H) \
        $(C_TREE_H) $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(REAL_H) \
@@ -1893,8 +1897,8 @@ c-format.o : c-format.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) la
 c-semantics.o : c-semantics.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
        $(TREE_H) $(FLAGS_H) $(TOPLEV_H) output.h $(RTL_H) $(GGC_H) \
        $(PREDICT_H) $(TREE_INLINE_H) $(C_COMMON_H) except.h $(FUNCTION_H) \
-       langhooks.h $(SPLAY_TREE_H) $(TIMEVAR_H) $(TREE_GIMPLE_H) \
-       $(VARRAY_H)
+       langhooks.h $(SPLAY_TREE_H) $(TIMEVAR_H) $(GIMPLE_H) \
+       $(VARRAY_H) tree-iterator.h
 
 c-dump.o : c-dump.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
        $(C_TREE_H) $(TREE_DUMP_H)
@@ -1907,7 +1911,7 @@ c-pch.o : c-pch.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(CPPLIB_H) $(TREE_H) \
          $< $(OUTPUT_OPTION)
 
 c-omp.o : c-omp.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
-       $(FUNCTION_H) $(C_COMMON_H) $(TOPLEV_H) $(TREE_GIMPLE_H) $(BITMAP_H) \
+       $(FUNCTION_H) $(C_COMMON_H) $(TOPLEV.H) $(GIMPLE_H) $(BITMAP_H) \
        langhooks.h
 
 # Language-independent files.
@@ -2022,7 +2026,7 @@ double-int.o: double-int.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H)
 langhooks.o : langhooks.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(TREE_H) $(TOPLEV_H) $(TREE_INLINE_H) $(RTL_H) insn-config.h $(INTEGRATE_H) \
    langhooks.h $(TARGET_H) $(LANGHOOKS_DEF_H) $(FLAGS_H) $(GGC_H) $(DIAGNOSTIC_H) \
-   intl.h $(TREE_GIMPLE_H)
+   intl.h $(GIMPLE_H)
 tree.o : tree.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
    all-tree.def $(FLAGS_H) $(FUNCTION_H) $(PARAMS_H) \
    $(TOPLEV_H) $(GGC_H) $(HASHTAB_H) $(TARGET_H) output.h $(TM_P_H) langhooks.h \
@@ -2034,11 +2038,12 @@ tree-dump.o: tree-dump.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
 tree-inline.o : tree-inline.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(TREE_H) $(RTL_H) $(EXPR_H) $(FLAGS_H) $(PARAMS_H) $(INPUT_H) insn-config.h \
    $(VARRAY_H) $(HASHTAB_H) $(TOPLEV_H) langhooks.h $(TREE_INLINE_H) $(CGRAPH_H) \
-   intl.h $(FUNCTION_H) $(GGC_H) $(TREE_GIMPLE_H) \
+   intl.h $(FUNCTION_H) $(GGC_H) $(GIMPLE_H) \
    debug.h $(DIAGNOSTIC_H) except.h $(TREE_FLOW_H) tree-iterator.h tree-mudflap.h \
    $(IPA_PROP_H) value-prof.h tree-pass.h $(TARGET_H) $(INTEGRATE_H)
 print-tree.o : print-tree.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
-   $(GGC_H) langhooks.h $(REAL_H) tree-iterator.h fixed-value.h $(TREE_FLOW_H)
+   $(GGC_H) langhooks.h $(REAL_H) tree-iterator.h fixed-value.h \
+   $(DIAGNOSTIC_H) $(TREE_FLOW_H)
 stor-layout.o : stor-layout.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(TREE_H) $(PARAMS_H) $(FLAGS_H) $(FUNCTION_H) $(EXPR_H) output.h $(RTL_H) \
    $(GGC_H) $(TM_P_H) $(TARGET_H) langhooks.h $(REGS_H) gt-stor-layout.h \
@@ -2047,7 +2052,7 @@ tree-ssa-structalias.o: tree-ssa-structalias.c tree-ssa-structalias.h \
    $(SYSTEM_H) $(CONFIG_H) coretypes.h $(TM_H) $(GGC_H) $(OBSTACK_H) $(BITMAP_H) \
    $(FLAGS_H) $(RTL_H) $(TM_P_H) hard-reg-set.h $(BASIC_BLOCK_H) output.h errors.h \
    $(DIAGNOSTIC_H) $(TREE_H) $(C_COMMON_H) $(TREE_FLOW_H) $(TREE_INLINE_H) varray.h \
-   $(C_TREE_H) $(TREE_GIMPLE_H) $(HASHTAB_H) $(FUNCTION_H) $(CGRAPH_H) tree-pass.h \
+   $(C_TREE_H) $(GIMPLE_H) $(HASHTAB_H) $(FUNCTION_H) $(CGRAPH_H) tree-pass.h \
    $(TIMEVAR_H) alloc-pool.h $(SPLAY_TREE_H) $(PARAMS_H) gt-tree-ssa-structalias.h \
    $(CGRAPH_H) $(ALIAS_H) pointer-set.h
 tree-ssa.o : tree-ssa.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
@@ -2055,13 +2060,13 @@ tree-ssa.o : tree-ssa.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
    $(TOPLEV_H) $(FUNCTION_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
    $(TREE_DUMP_H) langhooks.h tree-pass.h $(BASIC_BLOCK_H) $(BITMAP_H) \
    $(FLAGS_H) $(GGC_H) hard-reg-set.h $(HASHTAB_H) pointer-set.h \
-   $(TREE_GIMPLE_H) $(TREE_INLINE_H) $(VARRAY_H)
+   $(GIMPLE_H) $(TREE_INLINE_H) $(VARRAY_H)
 tree-into-ssa.o : tree-into-ssa.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
    $(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) output.h $(DIAGNOSTIC_H) \
    $(FUNCTION_H) $(TIMEVAR_H) $(TM_H) coretypes.h $(TREE_DUMP_H) \
    langhooks.h domwalk.h tree-pass.h $(GGC_H) $(PARAMS_H) $(BASIC_BLOCK_H) \
    $(BITMAP_H) $(CFGLOOP_H) $(FLAGS_H) hard-reg-set.h $(HASHTAB_H) \
-   $(TREE_GIMPLE_H) $(TREE_INLINE_H) $(VARRAY_H) vecprim.h
+   $(GIMPLE_H) $(TREE_INLINE_H) $(VARRAY_H) vecprim.h
 tree-ssa-ter.o : tree-ssa-ter.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
    $(TREE_H) $(DIAGNOSTIC_H) $(TM_H) coretypes.h $(TREE_DUMP_H) \
    $(TREE_SSA_LIVE_H) $(BITMAP_H)
@@ -2078,7 +2083,7 @@ tree-ssa-dse.o : tree-ssa-dse.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
 tree-ssa-forwprop.o : tree-ssa-forwprop.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
    $(TM_H) $(GGC_H) $(TREE_H) $(RTL_H) $(TM_P_H) $(BASIC_BLOCK_H) \
    $(TREE_FLOW_H) tree-pass.h $(TREE_DUMP_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) \
-   langhooks.h $(FLAGS_H)
+   langhooks.h $(FLAGS_H) $(GIMPLE_H)
 tree-ssa-phiprop.o : tree-ssa-phiprop.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
    $(TM_H) $(GGC_H) $(TREE_H) $(RTL_H) $(TM_P_H) $(BASIC_BLOCK_H) \
    $(TREE_FLOW_H) tree-pass.h $(TREE_DUMP_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) \
@@ -2103,7 +2108,7 @@ tree-ssa-propagate.o : tree-ssa-propagate.c $(TREE_FLOW_H) $(CONFIG_H) \
    $(DIAGNOSTIC_H) $(FUNCTION_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
    $(TREE_DUMP_H) $(BASIC_BLOCK_H) tree-pass.h langhooks.h \
    tree-ssa-propagate.h vec.h value-prof.h gt-tree-ssa-propagate.h $(FLAGS_H) \
-   $(VARRAY_H)
+   $(VARRAY_H) $(GIMPLE_H)
 tree-ssa-dom.o : tree-ssa-dom.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
    $(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) output.h $(DIAGNOSTIC_H) \
    $(FUNCTION_H) $(TIMEVAR_H) $(TM_H) coretypes.h $(TREE_DUMP_H) \
@@ -2127,7 +2132,7 @@ tree-ssanames.o : tree-ssanames.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
    $(TM_H) $(TREE_H) $(VARRAY_H) $(GGC_H) $(TREE_FLOW_H)
 tree-phinodes.o : tree-phinodes.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
    $(TM_H) $(TREE_H) $(VARRAY_H) $(GGC_H) $(BASIC_BLOCK_H) $(TREE_FLOW_H) \
-   gt-tree-phinodes.h $(RTL_H) $(TOPLEV_H)
+   gt-tree-phinodes.h $(RTL_H) $(TOPLEV.H)  $(GIMPLE_H)
 domwalk.o : domwalk.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(TREE_H) $(BASIC_BLOCK_H) $(TREE_FLOW_H) domwalk.h $(GGC_H)
 tree-ssa-live.o : tree-ssa-live.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
@@ -2136,18 +2141,18 @@ tree-ssa-live.o : tree-ssa-live.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
 tree-ssa-copyrename.o : tree-ssa-copyrename.c $(TREE_FLOW_H) $(CONFIG_H) \
    $(SYSTEM_H) $(TREE_H) $(DIAGNOSTIC_H) $(FUNCTION_H) $(TIMEVAR_H) tree-pass.h \
    $(TM_H) coretypes.h $(TREE_DUMP_H) $(TREE_SSA_LIVE_H) $(BASIC_BLOCK_H) \
-   $(BITMAP_H) $(FLAGS_H) $(HASHTAB_H) langhooks.h $(TREE_GIMPLE_H) \
-   $(TREE_INLINE_H)
+   $(BITMAP_H) $(FLAGS_H) $(HASHTAB_H) langhooks.h $(GIMPLE_H) \
+   $(TREE_INLINE_H) $(GIMPLE_H)
 tree-ssa-pre.o : tree-ssa-pre.c $(TREE_FLOW_H) $(CONFIG_H) \
    $(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) $(FIBHEAP_H) \
    $(TM_H) coretypes.h $(TREE_DUMP_H) tree-pass.h $(FLAGS_H) langhooks.h $(CFGLOOP_H) \
-   alloc-pool.h $(BASIC_BLOCK_H) $(BITMAP_H) $(HASHTAB_H) $(TREE_GIMPLE_H) \
+   alloc-pool.h $(BASIC_BLOCK_H) $(BITMAP_H) $(HASHTAB_H) $(GIMPLE_H) \
    $(TREE_INLINE_H) tree-iterator.h tree-ssa-sccvn.h $(PARAMS_H) \
    $(DBGCNT_H)
 tree-ssa-sccvn.o : tree-ssa-sccvn.c $(TREE_FLOW_H) $(CONFIG_H) \
    $(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) $(FIBHEAP_H) \
    $(TM_H) coretypes.h $(TREE_DUMP_H) tree-pass.h $(FLAGS_H) $(CFGLOOP_H) \
-   alloc-pool.h $(BASIC_BLOCK_H) $(BITMAP_H) langhooks.h $(HASHTAB_H) $(TREE_GIMPLE_H) \
+   alloc-pool.h $(BASIC_BLOCK_H) $(BITMAP_H) langhooks.h $(HASHTAB_H) $(GIMPLE_H) \
    $(TREE_INLINE_H) tree-iterator.h tree-ssa-propagate.h tree-ssa-sccvn.h \
    $(PARAMS_H)
 tree-vrp.o : tree-vrp.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
@@ -2178,10 +2183,10 @@ tree-ssa-sink.o : tree-ssa-sink.c $(TREE_FLOW_H) $(CONFIG_H) \
    $(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) \
    $(TM_H) coretypes.h $(TREE_DUMP_H) tree-pass.h $(FLAGS_H) alloc-pool.h \
    $(BASIC_BLOCK_H) $(BITMAP_H) $(CFGLOOP_H) $(FIBHEAP_H) $(HASHTAB_H) \
-   langhooks.h $(REAL_H) $(TREE_GIMPLE_H) $(TREE_INLINE_H) tree-iterator.h
+   langhooks.h $(REAL_H) $(GIMPLE_H) $(TREE_INLINE_H) tree-iterator.h
 tree-nested.o: tree-nested.c $(CONFIG_H) $(SYSTEM_H) $(TM_H) $(TREE_H) \
    $(RTL_H) $(TM_P_H) $(FUNCTION_H) $(TREE_DUMP_H) $(TREE_INLINE_H) \
-   tree-iterator.h $(TREE_GIMPLE_H) $(CGRAPH_H) $(EXPR_H) langhooks.h \
+   tree-iterator.h $(GIMPLE_H) $(CGRAPH_H) $(EXPR_H) langhooks.h \
    $(GGC_H) gt-tree-nested.h coretypes.h $(TREE_FLOW_H) pointer-set.h
 tree-if-conv.o: tree-if-conv.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(TREE_H) $(FLAGS_H) $(TIMEVAR_H) $(BASIC_BLOCK_H) $(TREE_FLOW_H) \
@@ -2189,13 +2194,13 @@ tree-if-conv.o: tree-if-conv.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(SCEV_H) tree-pass.h $(DIAGNOSTIC_H) $(TARGET_H) $(TREE_DUMP_H) \
    $(VARRAY_H)
 tree-iterator.o : tree-iterator.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) \
-   coretypes.h $(GGC_H) tree-iterator.h $(TREE_GIMPLE_H) gt-tree-iterator.h
+   coretypes.h $(GGC_H) tree-iterator.h $(GIMPLE_H) gt-tree-iterator.h
 tree-dfa.o : tree-dfa.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
    $(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) output.h $(DIAGNOSTIC_H) \
    $(TREE_INLINE_H) $(HASHTAB_H) pointer-set.h $(FLAGS_H) $(FUNCTION_H) \
    $(TIMEVAR_H) convert.h $(TM_H) coretypes.h langhooks.h $(TREE_DUMP_H) \
    tree-pass.h $(PARAMS_H) $(CGRAPH_H) $(BASIC_BLOCK_H) hard-reg-set.h \
-   $(TREE_GIMPLE_H)
+   $(GIMPLE_H)
 tree-ssa-operands.o : tree-ssa-operands.c $(TREE_FLOW_H) $(CONFIG_H) \
    $(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) $(TREE_INLINE_H) \
    $(FLAGS_H) $(FUNCTION_H) $(TM_H) $(TIMEVAR_H) tree-pass.h $(TOPLEV_H) \
@@ -2250,7 +2255,7 @@ tree-ssa-loop-ivopts.o : tree-ssa-loop-ivopts.c $(TREE_FLOW_H) $(CONFIG_H) \
    $(CFGLOOP_H) $(PARAMS_H) langhooks.h $(BASIC_BLOCK_H) hard-reg-set.h \
    tree-chrec.h $(VARRAY_H) tree-affine.h pointer-set.h $(TARGET_H)
 tree-affine.o : tree-affine.c tree-affine.h $(CONFIG_H) pointer-set.h \
-   $(SYSTEM_H) $(RTL_H) $(TREE_H) $(TM_P_H) hard-reg-set.h $(TREE_GIMPLE_H) \
+   $(SYSTEM_H) $(RTL_H) $(TREE_H) $(TM_P_H) hard-reg-set.h $(GIMPLE_H) \
    output.h $(DIAGNOSTIC_H) $(TM_H) coretypes.h $(TREE_DUMP_H) $(FLAGS_H)
 tree-ssa-loop-manip.o : tree-ssa-loop-manip.c $(TREE_FLOW_H) $(CONFIG_H) \
    $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) $(RTL_H) $(TM_P_H) hard-reg-set.h \
@@ -2274,12 +2279,12 @@ tree-ssa-alias.o : tree-ssa-alias.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
    $(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) $(TREE_INLINE_H) $(FLAGS_H) \
    $(FUNCTION_H) $(TIMEVAR_H) convert.h $(TM_H) coretypes.h langhooks.h \
    $(TREE_DUMP_H) tree-pass.h $(PARAMS_H) $(BASIC_BLOCK_H) $(DIAGNOSTIC_H) \
-   hard-reg-set.h $(TREE_GIMPLE_H) vec.h tree-ssa-structalias.h \
+   hard-reg-set.h $(GIMPLE_H) vec.h tree-ssa-structalias.h \
    $(IPA_TYPE_ESCAPE_H) vecprim.h pointer-set.h alloc-pool.h
 tree-ssa-reassoc.o : tree-ssa-reassoc.c $(TREE_FLOW_H) $(CONFIG_H) \
    $(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) errors.h $(TIMEVAR_H) \
    $(TM_H) coretypes.h $(TREE_DUMP_H) tree-pass.h $(FLAGS_H) tree-iterator.h\
-   $(BASIC_BLOCK_H) $(TREE_GIMPLE_H) $(TREE_INLINE_H) vec.h langhooks.h \
+   $(BASIC_BLOCK_H) $(GIMPLE_H) $(TREE_INLINE_H) vec.h langhooks.h \
    alloc-pool.h pointer-set.h $(CFGLOOP_H)
 tree-optimize.o : tree-optimize.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
    $(RTL_H) $(TREE_H) $(TM_P_H) hard-reg-set.h $(EXPR_H) $(GGC_H) output.h \
@@ -2289,25 +2294,28 @@ tree-optimize.o : tree-optimize.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
    $(CFGLOOP_H) except.h
 
 c-gimplify.o : c-gimplify.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) \
-   $(C_TREE_H) $(C_COMMON_H) $(DIAGNOSTIC_H) $(TREE_GIMPLE_H) $(VARRAY_H) \
+   $(C_TREE_H) $(C_COMMON_H) $(DIAGNOSTIC_H) $(GIMPLE_H) $(VARRAY_H) \
    $(FLAGS_H) langhooks.h $(TOPLEV_H) $(RTL_H) $(TREE_FLOW_H) $(LANGHOOKS_DEF_H) \
    $(TM_H) coretypes.h $(C_PRETTY_PRINT_H) $(CGRAPH_H) $(BASIC_BLOCK_H) \
    hard-reg-set.h $(TREE_DUMP_H) $(TREE_INLINE_H)
-gimplify.o : gimplify.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) \
-   $(DIAGNOSTIC_H) $(TREE_GIMPLE_H) $(TREE_INLINE_H) $(VARRAY_H) langhooks.h \
+gimplify.o : gimplify.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) $(GIMPLE_H) \
+   $(DIAGNOSTIC_H) $(GIMPLE_H) $(TREE_INLINE_H) $(VARRAY_H) langhooks.h \
    $(LANGHOOKS_DEF_H) $(TREE_FLOW_H) $(CGRAPH_H) $(TIMEVAR_H) $(TM_H) \
    coretypes.h except.h $(FLAGS_H) $(RTL_H) $(FUNCTION_H) $(EXPR_H) output.h \
    $(GGC_H) gt-gimplify.h $(HASHTAB_H) $(TARGET_H) $(TOPLEV_H) $(OPTABS_H) \
-   $(REAL_H) $(SPLAY_TREE_H)
+   $(REAL_H) $(SPLAY_TREE_H) vec.h tree-iterator.h
+gimple-iterator.o : gimple-iterator.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
+   $(TREE_H) $(GIMPLE_H) $(TREE_FLOW_H) value-prof.h
 gimple-low.o : gimple-low.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) \
-   $(DIAGNOSTIC_H) $(TREE_GIMPLE_H) $(TREE_INLINE_H) $(VARRAY_H) langhooks.h \
+   $(DIAGNOSTIC_H) $(GIMPLE_H) $(TREE_INLINE_H) $(VARRAY_H) langhooks.h \
    $(LANGHOOKS_DEF_H) $(TREE_FLOW_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
    except.h $(FLAGS_H) $(RTL_H) $(FUNCTION_H) $(EXPR_H) tree-pass.h \
-   $(HASHTAB_H) $(TOPLEV_H)
+   $(HASHTAB_H) $(TOPLEV.H) tree-iterator.h
 omp-low.o : omp-low.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
-   $(RTL_H) $(TREE_GIMPLE_H) $(TREE_INLINE_H) langhooks.h $(DIAGNOSTIC_H) \
+   $(RTL_H) $(GIMPLE_H) $(TREE_INLINE_H) langhooks.h $(DIAGNOSTIC_H) \
    $(TREE_FLOW_H) $(TIMEVAR_H) $(FLAGS_H) $(EXPR_H) $(TOPLEV_H) tree-pass.h \
-   $(GGC_H) except.h $(SPLAY_TREE_H) $(OPTABS_H) $(CFGLOOP_H)
+   $(GGC_H) except.h $(SPLAY_TREE_H) $(OPTABS_H) $(CFGLOOP_H) \
+   tree-iterator.h
 tree-browser.o : tree-browser.c tree-browser.def $(CONFIG_H) $(SYSTEM_H) \
    $(TREE_H) $(TREE_INLINE_H) $(DIAGNOSTIC_H) $(HASHTAB_H) \
    $(TM_H) coretypes.h
@@ -2365,16 +2373,19 @@ tree-stdarg.o: tree-stdarg.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
 tree-object-size.o: tree-object-size.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
    $(TM_H) $(TREE_H) $(TOPLEV_H) $(DIAGNOSTIC_H) $(TREE_FLOW_H) tree-pass.h \
    tree-ssa-propagate.h
-tree-gimple.o : tree-gimple.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) $(EXPR_H) \
-   $(RTL_H) $(TREE_GIMPLE_H) $(TM_H) coretypes.h $(BITMAP_H) $(GGC_H) \
-   output.h $(TREE_FLOW_H)
+gimple.o : gimple.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TREE_H) \
+   $(GGC_H) $(GIMPLE_H) $(GIMPLE_H) $(DIAGNOSTIC_H) gt-gimple.h \
+   $(TREE_FLOW_H) value-prof.h $(FLAGS_H)
+gimple-pretty-print.o : gimple-pretty-print.c $(CONFIG_H) $(SYSTEM_H) \
+   $(TREE_H) $(DIAGNOSTIC_H) $(REAL_H) $(HASHTAB_H) $(TREE_FLOW_H) \
+   $(TM_H) coretypes.h tree-pass.h $(GIMPLE_H) value-prof.h
 tree-mudflap.o : $(CONFIG_H) $(SYSTEM_H) $(TREE_H) $(TREE_INLINE_H) \
-   $(TREE_GIMPLE_H) $(DIAGNOSTIC_H) $(HASHTAB_H) langhooks.h tree-mudflap.h \
+   $(GIMPLE_H) $(DIAGNOSTIC_H) $(HASHTAB_H) langhooks.h tree-mudflap.h \
    $(TM_H) coretypes.h $(TREE_DUMP_H) tree-pass.h $(CGRAPH_H) $(GGC_H) \
    gt-tree-mudflap.h $(BASIC_BLOCK_H) $(FLAGS_H) $(FUNCTION_H) hard-reg-set.h \
-   $(RTL_H) $(TM_P_H) $(TREE_FLOW_H) $(TOPLEV_H)
+   $(RTL_H) $(TM_P_H) $(TREE_FLOW_H) $(TOPLEV.H) $(GIMPLE_H) tree-iterator.h
 tree-nomudflap.o : $(CONFIG_H) $(SYSTEM_H) $(TREE_H) $(TREE_INLINE_H) \
-   $(C_TREE_H) $(C_COMMON_H) $(TREE_GIMPLE_H) $(DIAGNOSTIC_H) $(HASHTAB_H) \
+   $(C_TREE_H) $(C_COMMON_H) $(GIMPLE_H) $(DIAGNOSTIC_H) $(HASHTAB_H) \
    output.h $(VARRAY_H) langhooks.h tree-mudflap.h $(TM_H) coretypes.h \
    $(GGC_H) gt-tree-mudflap.h tree-pass.h $(TOPLEV_H)
 tree-pretty-print.o : tree-pretty-print.c $(CONFIG_H) $(SYSTEM_H) \
@@ -2383,7 +2394,8 @@ tree-pretty-print.o : tree-pretty-print.c $(CONFIG_H) $(SYSTEM_H) \
    value-prof.h fixed-value.h output.h
 fold-const.o : fold-const.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(TREE_H) $(FLAGS_H) $(REAL_H) $(TOPLEV_H) $(HASHTAB_H) $(EXPR_H) $(RTL_H) \
-   $(GGC_H) $(TM_P_H) langhooks.h $(MD5_H) intl.h fixed-value.h $(TARGET_H)
+   $(GGC_H) $(TM_P_H) langhooks.h $(MD5_H) intl.h fixed-value.h $(TARGET_H) \
+   $(GIMPLE_H)
 diagnostic.o : diagnostic.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(TREE_H) version.h $(TM_P_H) $(FLAGS_H) $(INPUT_H) $(TOPLEV_H) intl.h \
    $(DIAGNOSTIC_H) langhooks.h $(LANGHOOKS_DEF_H) diagnostic.def opts.h
@@ -2406,7 +2418,7 @@ toplev.o : toplev.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
    value-prof.h $(PARAMS_H) $(TM_P_H) reload.h dwarf2asm.h $(TARGET_H) \
    langhooks.h insn-flags.h $(CFGLAYOUT_H) $(CFGLOOP_H) hosthooks.h \
    $(CGRAPH_H) $(COVERAGE_H) alloc-pool.h $(GGC_H) $(INTEGRATE_H) \
-   opts.h params.def tree-mudflap.h $(REAL_H) tree-pass.h
+   opts.h params.def tree-mudflap.h $(REAL_H) tree-pass.h $(GIMPLE_H)
        $(CC) $(ALL_CFLAGS) $(ALL_CPPFLAGS) \
          -DTARGET_NAME=\"$(target_noncanonical)\" \
          -c $(srcdir)/toplev.c $(OUTPUT_OPTION)
@@ -2448,7 +2460,7 @@ varasm.o : varasm.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
    $(HASHTAB_H) $(TARGET_H) langhooks.h gt-varasm.h $(BASIC_BLOCK_H) \
    $(CFGLAYOUT_H) $(CGRAPH_H) targhooks.h tree-mudflap.h $(REAL_H) tree-iterator.h
 function.o : function.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
-   $(TREE_H) $(CFGLAYOUT_H) $(TREE_GIMPLE_H) $(FLAGS_H) $(FUNCTION_H) $(EXPR_H) \
+   $(TREE_H) $(CFGLAYOUT_H) $(GIMPLE_H) $(FLAGS_H) $(FUNCTION_H) $(EXPR_H) \
    $(OPTABS_H) libfuncs.h $(REGS_H) hard-reg-set.h insn-config.h $(RECOG_H) \
    output.h $(TOPLEV_H) except.h $(HASHTAB_H) $(GGC_H) $(TM_P_H) langhooks.h \
    gt-function.h $(TARGET_H) $(BASIC_BLOCK_H) $(INTEGRATE_H) $(PREDICT_H) \
@@ -2477,7 +2489,7 @@ dojump.o : dojump.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_
    $(FLAGS_H) $(FUNCTION_H) $(EXPR_H) $(OPTABS_H) $(INSN_ATTR_H) insn-config.h \
    langhooks.h $(GGC_H) gt-dojump.h vecprim.h
 builtins.o : builtins.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
-   $(TREE_H) $(TREE_GIMPLE_H) $(FLAGS_H) $(TARGET_H) $(FUNCTION_H) $(REGS_H) \
+   $(TREE_H) $(GIMPLE_H) $(FLAGS_H) $(TARGET_H) $(FUNCTION_H) $(REGS_H) \
    $(EXPR_H) $(OPTABS_H) insn-config.h $(RECOG_H) output.h typeclass.h \
    hard-reg-set.h $(TOPLEV_H) hard-reg-set.h except.h $(TM_P_H) $(PREDICT_H) \
    libfuncs.h $(REAL_H) langhooks.h $(BASIC_BLOCK_H) tree-mudflap.h \
@@ -2485,7 +2497,7 @@ builtins.o : builtins.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
 calls.o : calls.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
    $(TREE_H) $(FLAGS_H) $(EXPR_H) $(OPTABS_H) langhooks.h $(TARGET_H) \
    libfuncs.h $(REGS_H) $(TOPLEV_H) output.h $(FUNCTION_H) $(TIMEVAR_H) $(TM_P_H) \
-   $(CGRAPH_H) except.h sbitmap.h $(DBGCNT_H)
+   $(CGRAPH_H) except.h sbitmap.h $(DBGCNT_H) $(TREE_FLOW_H)
 expmed.o : expmed.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
    $(FLAGS_H) insn-config.h $(EXPR_H) $(OPTABS_H) $(RECOG_H) $(REAL_H) \
    $(TOPLEV_H) $(TM_P_H) langhooks.h $(DF_H) $(TARGET_H)
@@ -2551,16 +2563,16 @@ cgraph.o : cgraph.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
    $(TREE_INLINE_H) $(VARRAY_H) $(TREE_DUMP_H) $(TREE_FLOW_H)
 cgraphunit.o : cgraphunit.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(TREE_H) langhooks.h $(TREE_INLINE_H) $(TOPLEV_H) $(FLAGS_H) $(GGC_H) \
-   $(TARGET_H) $(CGRAPH_H) intl.h pointer-set.h $(FUNCTION_H) $(TREE_GIMPLE_H) \
+   $(TARGET_H) $(CGRAPH_H) intl.h pointer-set.h $(FUNCTION_H) $(GIMPLE_H) \
    $(TREE_FLOW_H) tree-pass.h $(C_COMMON_H) debug.h $(DIAGNOSTIC_H) \
    $(FIBHEAP_H) output.h $(PARAMS_H) $(RTL_H) $(TIMEVAR_H) $(IPA_PROP_H) \
-   gt-cgraphunit.h 
+   gt-cgraphunit.h tree-iterator.h
 cgraphbuild.o : cgraphbuild.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
-   $(TREE_H) langhooks.h $(CGRAPH_H) intl.h pointer-set.h $(TREE_GIMPLE_H) \
+   $(TREE_H) langhooks.h $(CGRAPH_H) intl.h pointer-set.h $(GIMPLE_H) \
    $(TREE_FLOW_H) tree-pass.h
 varpool.o : varpool.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(TREE_H) $(CGRAPH_H) langhooks.h $(DIAGNOSTIC_H) $(HASHTAB_H) \
-   $(GGC_H) $(TIMEVAR_H) debug.h $(TARGET_H) output.h $(TREE_GIMPLE_H) \
+   $(GGC_H) $(TIMEVAR_H) debug.h $(TARGET_H) output.h $(GIMPLE_H) \
    $(TREE_FLOW_H) gt-varpool.h
 ipa.o : ipa.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(CGRAPH_H) \
    tree-pass.h $(TIMEVAR_H)
@@ -2584,30 +2596,31 @@ ipa-inline.o : ipa-inline.c gt-ipa-inline.h $(CONFIG_H) $(SYSTEM_H) coretypes.h
    $(HASHTAB_H) $(COVERAGE_H) $(GGC_H) $(TREE_FLOW_H) $(RTL_H) $(IPA_PROP_H)
 ipa-utils.o : ipa-utils.c $(IPA_UTILS_H) $(CONFIG_H) $(SYSTEM_H) \
    coretypes.h $(TM_H) $(TREE_H) $(TREE_FLOW_H) $(TREE_INLINE_H) langhooks.h \
-   pointer-set.h $(GGC_H) $(C_COMMON_H) $(TREE_GIMPLE_H) \
+   pointer-set.h $(GGC_H) $(C_COMMON_H) $(GIMPLE_H) \
    $(CGRAPH_H) output.h $(FLAGS_H) tree-pass.h $(TIMEVAR_H) $(DIAGNOSTIC_H)
 ipa-reference.o : ipa-reference.c $(CONFIG_H) $(SYSTEM_H) \
    coretypes.h $(TM_H) $(TREE_H) $(TREE_FLOW_H) $(TREE_INLINE_H) langhooks.h \
    pointer-set.h $(GGC_H) $(IPA_REFERENCE_H) $(IPA_UTILS_H) $(C_COMMON_H) \
-   $(TREE_GIMPLE_H) $(CGRAPH_H) output.h $(FLAGS_H) tree-pass.h \
+   $(GIMPLE_H) $(CGRAPH_H) output.h $(FLAGS_H) tree-pass.h \
    $(TIMEVAR_H) $(DIAGNOSTIC_H) $(FUNCTION_H)
 
 ipa-pure-const.o : ipa-pure-const.c $(CONFIG_H) $(SYSTEM_H) \
    coretypes.h $(TM_H) $(TREE_H) $(TREE_FLOW_H) $(TREE_INLINE_H) langhooks.h \
    pointer-set.h $(GGC_H) $(IPA_UTILS_H) $(C_COMMON_H) $(TARGET_H) \
-   $(TREE_GIMPLE_H) $(CGRAPH_H) output.h $(FLAGS_H) tree-pass.h $(TIMEVAR_H) \
+   $(GIMPLE_H) $(CGRAPH_H) output.h $(FLAGS_H) tree-pass.h $(TIMEVAR_H) \
    $(DIAGNOSTIC_H)
 ipa-type-escape.o : ipa-type-escape.c $(CONFIG_H) $(SYSTEM_H) \
    coretypes.h $(TM_H) $(TREE_H) $(TREE_FLOW_H) $(TREE_INLINE_H) langhooks.h \
    pointer-set.h $(GGC_H) $(IPA_TYPE_ESCAPE_H) $(IPA_UTILS_H) $(C_COMMON_H) \
-   $(TREE_GIMPLE_H) $(CGRAPH_H) output.h $(FLAGS_H) tree-pass.h \
+   $(GIMPLE_H) $(CGRAPH_H) output.h $(FLAGS_H) tree-pass.h \
    $(TIMEVAR_H) $(DIAGNOSTIC_H) $(FUNCTION_H)
 ipa-struct-reorg.o: ipa-struct-reorg.c ipa-struct-reorg.h $(CONFIG_H) $(SYSTEM_H) \
-   coretypes.h $(TM_H) $(GGC_H) $(TREE_H) $(RTL_H) $(TREE_GIMPLE_H) tree-inline.h \
+   coretypes.h $(TM_H) $(GGC_H) $(TREE_H) $(RTL_H) $(GIMPLE_H) tree-inline.h \
    $(TREE_FLOW_H) langhooks.h pointer-set.h $(HASHTAB_H) $(C_TREE_H) $(TOPLEV_H) \
    $(FLAGS_H) debug.h $(TARGET_H) $(CGRAPH_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) \
    $(PARAMS_H) $(FIBHEAP_H) intl.h $(FUNCTION_H) $(BASIC_BLOCK_H) tree-iterator.h \
-   tree-pass.h opts.h $(IPA_TYPE_ESCAPE_H) $(TREE_DUMP_H) $(C_COMMON_H)
+   tree-pass.h opts.h $(IPA_TYPE_ESCAPE_H) $(TREE_DUMP_H) $(C_COMMON_H) \
+   $(GIMPLE_H)
 
 coverage.o : coverage.c $(GCOV_IO_H) $(CONFIG_H) $(SYSTEM_H) coretypes.h \
    $(TM_H) $(RTL_H) $(TREE_H) $(FLAGS_H) output.h $(REGS_H) $(EXPR_H) \
@@ -2658,12 +2671,12 @@ mode-switching.o : mode-switching.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
 tree-ssa-dce.o : tree-ssa-dce.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) \
     $(RTL_H) $(TM_P_H) $(TREE_FLOW_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) $(TM_H) \
     coretypes.h $(TREE_DUMP_H) tree-pass.h $(FLAGS_H) $(BASIC_BLOCK_H) \
-    $(GGC_H) hard-reg-set.h $(OBSTACK_H) $(TREE_GIMPLE_H) $(CFGLOOP_H) \
+    $(GGC_H) hard-reg-set.h $(OBSTACK_H) $(GIMPLE_H) $(CFGLOOP_H) \
     tree-scalar-evolution.h
 tree-call-cdce.o : tree-call-cdce.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) \
     $(RTL_H) $(TM_P_H) $(TREE_FLOW_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) $(TM_H) \
     coretypes.h $(TREE_DUMP_H) tree-pass.h $(FLAGS_H) $(BASIC_BLOCK_H) \
-    $(GGC_H) hard-reg-set.h $(OBSTACK_H) $(TREE_GIMPLE_H)
+    $(GGC_H) hard-reg-set.h $(OBSTACK_H) $(GIMPLE_H)
 tree-ssa-ccp.o : tree-ssa-ccp.c $(TREE_FLOW_H) $(CONFIG_H) \
    $(SYSTEM_H) $(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) output.h \
    $(DIAGNOSTIC_H) $(FUNCTION_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
@@ -2671,19 +2684,19 @@ tree-ssa-ccp.o : tree-ssa-ccp.c $(TREE_FLOW_H) $(CONFIG_H) \
    tree-ssa-propagate.h value-prof.h $(FLAGS_H) $(TARGET_H) $(TOPLEV_H)
 tree-sra.o : tree-sra.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) $(RTL_H) \
     $(TM_P_H) $(TREE_FLOW_H) $(DIAGNOSTIC_H) $(TREE_INLINE_H) \
-    $(TIMEVAR_H) $(TM_H) coretypes.h $(TREE_DUMP_H) $(TREE_GIMPLE_H) \
+    $(TIMEVAR_H) $(TM_H) coretypes.h $(TREE_DUMP_H) $(GIMPLE_H) \
     langhooks.h tree-pass.h $(FLAGS_H) $(EXPR_H) $(BASIC_BLOCK_H) \
     $(BITMAP_H) $(GGC_H) hard-reg-set.h $(OBSTACK_H) $(PARAMS_H) $(TARGET_H)
 tree-switch-conversion.o : tree-switch-conversion.c $(CONFIG_H) $(SYSTEM_H) \
     $(TREE_H) $(TM_P_H) $(TREE_FLOW_H) $(DIAGNOSTIC_H) $(TREE_INLINE_H) \
-    $(TIMEVAR_H) $(TM_H) coretypes.h $(TREE_DUMP_H) $(TREE_GIMPLE_H) \
+    $(TIMEVAR_H) $(TM_H) coretypes.h $(TREE_DUMP_H) $(GIMPLE_H) \
     tree-pass.h $(FLAGS_H) $(EXPR_H) $(BASIC_BLOCK_H) output.h \
     $(GGC_H) $(OBSTACK_H) $(PARAMS_H) $(CPPLIB_H) $(PARAMS_H)
 tree-complex.o : tree-complex.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TREE_H) \
-    $(TM_H) $(RTL_H) $(REAL_H) $(FLAGS_H) $(TREE_FLOW_H) $(TREE_GIMPLE_H) \
+    $(TM_H) $(RTL_H) $(REAL_H) $(FLAGS_H) $(TREE_FLOW_H) $(GIMPLE_H) \
     tree-iterator.h tree-pass.h tree-ssa-propagate.h $(DIAGNOSTIC_H)
 tree-vect-generic.o : tree-vect-generic.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) \
-    $(TM_H) $(TREE_FLOW_H) $(TREE_GIMPLE_H) tree-iterator.h tree-pass.h \
+    $(TM_H) $(TREE_FLOW_H) $(GIMPLE_H) tree-iterator.h tree-pass.h \
     $(FLAGS_H) $(OPTABS_H) $(RTL_H) $(MACHMODE_H) $(EXPR_H) \
     langhooks.h $(FLAGS_H) $(DIAGNOSTIC_H) gt-tree-vect-generic.h $(GGC_H) \
     coretypes.h insn-codes.h
@@ -2953,9 +2966,9 @@ ifcvt.o : ifcvt.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
    $(REAL_H) $(OPTABS_H) $(CFGLOOP_H) hard-reg-set.h $(TIMEVAR_H) tree-pass.h \
    $(DF_H) $(DBGCNT_H)
 lambda-mat.o : lambda-mat.c $(LAMBDA_H) $(GGC_H) $(SYSTEM_H) $(CONFIG_H) \
-   $(TM_H) coretypes.h $(TREE_H)
+   $(TM_H) coretypes.h $(TREE_H) $(TREE_FLOW_H)
 lambda-trans.o: lambda-trans.c $(LAMBDA_H) $(GGC_H) $(SYSTEM_H) $(CONFIG_H) \
-   $(TM_H) coretypes.h $(TARGET_H) $(TREE_H)
+   $(TM_H) coretypes.h $(TARGET_H) $(TREE_H) $(TREE_FLOW_H)
 lambda-code.o: lambda-code.c $(LAMBDA_H) $(GGC_H) $(SYSTEM_H) $(CONFIG_H) \
    $(TM_H) $(OPTABS_H) $(TREE_H) $(RTL_H) $(BASIC_BLOCK_H) \
    $(DIAGNOSTIC_H) $(TREE_FLOW_H) $(TREE_DUMP_H) $(TIMEVAR_H) $(CFGLOOP_H) \
@@ -2978,7 +2991,7 @@ $(out_object_file): $(out_file) $(CONFIG_H) coretypes.h $(TM_H) $(TREE_H) \
    $(RTL_H) $(REGS_H) hard-reg-set.h insn-config.h conditions.h \
    output.h $(INSN_ATTR_H) $(SYSTEM_H) $(TOPLEV_H) $(TARGET_H) libfuncs.h \
    $(TARGET_DEF_H) $(FUNCTION_H) $(SCHED_INT_H) $(TM_P_H) $(EXPR_H) \
-   langhooks.h $(GGC_H) $(OPTABS_H) $(REAL_H) tm-constrs.h
+   langhooks.h $(GGC_H) $(OPTABS_H) $(REAL_H) tm-constrs.h $(GIMPLE_H)
        $(CC) -c $(ALL_CFLAGS) $(ALL_CPPFLAGS) \
                $(out_file) $(OUTPUT_OPTION)
 
@@ -3203,6 +3216,7 @@ GTFILES = $(CPP_ID_DATA_H) $(srcdir)/input.h $(srcdir)/coretypes.h \
   $(srcdir)/alias.c $(srcdir)/bitmap.c $(srcdir)/cselib.c $(srcdir)/cgraph.c \
   $(srcdir)/ipa-prop.c $(srcdir)/ipa-cp.c $(srcdir)/ipa-inline.c $(srcdir)/matrix-reorg.c \
   $(srcdir)/dbxout.c $(srcdir)/ipa-struct-reorg.c $(srcdir)/dwarf2out.c $(srcdir)/dwarf2asm.c \
+  $(srcdir)/tree-vect-generic.c \
   $(srcdir)/dojump.c \
   $(srcdir)/emit-rtl.c $(srcdir)/except.c $(srcdir)/explow.c $(srcdir)/expr.c \
   $(srcdir)/function.c $(srcdir)/except.h \
@@ -3211,18 +3225,24 @@ GTFILES = $(CPP_ID_DATA_H) $(srcdir)/input.h $(srcdir)/coretypes.h \
   $(srcdir)/reg-stack.c $(srcdir)/cfglayout.c $(srcdir)/cfglayout.h \
   $(srcdir)/sdbout.c $(srcdir)/stor-layout.c \
   $(srcdir)/stringpool.c $(srcdir)/tree.c $(srcdir)/varasm.c \
-  $(srcdir)/tree-mudflap.c $(srcdir)/tree-flow.h $(srcdir)/tree-scalar-evolution.c \
+  $(srcdir)/gimple.h $(srcdir)/gimple.c \
+  $(srcdir)/tree-mudflap.c $(srcdir)/tree-flow.h \
   $(srcdir)/tree-ssanames.c $(srcdir)/tree-eh.c $(srcdir)/tree-ssa-address.c \
-  $(srcdir)/tree-phinodes.c $(srcdir)/tree-cfg.c \
-  $(srcdir)/tree-dfa.c $(srcdir)/tree-ssa-propagate.c \
+  $(srcdir)/tree-cfg.c \
+  $(srcdir)/tree-dfa.c \
   $(srcdir)/tree-iterator.c $(srcdir)/gimplify.c \
-  $(srcdir)/tree-chrec.h $(srcdir)/tree-vect-generic.c \
+  $(srcdir)/tree-chrec.h \
+  $(srcdir)/tree-scalar-evolution.c \
   $(srcdir)/tree-ssa-operands.h \
   $(srcdir)/tree-profile.c $(srcdir)/tree-nested.c \
-  $(srcdir)/ipa-reference.c $(srcdir)/tree-ssa-structalias.h \
-  $(srcdir)/tree-ssa-structalias.c $(srcdir)/tree-parloops.c \
-  $(srcdir)/omp-low.c $(srcdir)/varpool.c \
+  $(srcdir)/varpool.c \
+  $(srcdir)/tree-parloops.c \
+  $(srcdir)/omp-low.c \
   $(srcdir)/targhooks.c $(out_file) $(srcdir)/passes.c $(srcdir)/cgraphunit.c \
+  $(srcdir)/tree-ssa-propagate.c \
+  $(srcdir)/tree-phinodes.c \
+  $(srcdir)/ipa-reference.c $(srcdir)/tree-ssa-structalias.h \
+  $(srcdir)/tree-ssa-structalias.c \
   @all_gtfiles@
 
 # Compute the list of GT header files from the corresponding C sources,
@@ -3301,7 +3321,7 @@ build/genautomata.o : genautomata.c $(RTL_BASE_H) $(OBSTACK_H)            \
   $(BCONFIG_H) $(SYSTEM_H) coretypes.h $(GTM_H) errors.h vec.h         \
   $(HASHTAB_H) gensupport.h
 build/gencheck.o : gencheck.c tree.def $(BCONFIG_H) $(GTM_H)           \
-       $(SYSTEM_H) coretypes.h $(lang_tree_files)
+       $(SYSTEM_H) coretypes.h $(lang_tree_files) gimple.def
 build/genchecksum.o : genchecksum.c $(BCONFIG_H) $(SYSTEM_H) $(MD5_H)
 build/gencodes.o : gencodes.c $(RTL_BASE_H) $(BCONFIG_H) $(SYSTEM_H)   \
   coretypes.h $(GTM_H) errors.h gensupport.h
index fb57909..a4859fd 100644 (file)
@@ -1,3 +1,40 @@
+2008-07-28  Richard Guenther  <rguenther@suse.de>
+
+       Merge from gimple-tuples-branch.
+
+       2008-07-22  Olivier Hainque  <hainque@adacore.com>
+
+       * gigi.h (end_subprog_body): New ELAB_P argument, saying if
+       this is called for an elab proc to be discarded if empty.
+       * utils.c (end_subprog_body): Honor ELAB_P.
+       (build_function_stub): Adjust call to end_subprog_body.
+       * trans.c (Subprogram_Body_to_gnu): Likewise.
+       (gigi): Reorganize processing of elab procs to prevent
+       gimplifying twice, using the new end_subprog_body argument.
+
+       2008-07-19  Richard Guenther  <rguenther@suse.de>
+
+       * Make-lang.in (trans.o): Add tree-iterator.h dependency.
+       (utils.o): Likewise.
+       * trans.c: Include tree-iterator.h.
+       (gnat_gimplify_expr): Adjust prototype.  Fix typo.
+       (gnat_gimplify_stmt): Use SET_EXPR_LOCATION.
+       (set_expr_location_from_node): Likewise.
+       (gigi): Tuplify.
+       * ada-tree.h (union lang_tree_node): Use TREE_CHAIN instead
+       of GENERIC_NEXT.
+       * utils.c: Include tree-iterator.h.
+       * gigi.h (gnat_gimplify_expr): Adjust prototype.
+
+       2008-07-18  Aldy Hernandez  <aldyh@redhat.com>
+
+       * trans.c: Include gimple.h instead of tree-gimple.h.
+       * utils.c: Same.
+
+       2008-07-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * trans.c (gnat_gimplify_expr): Use gimplify_assign.
+
 2008-07-25  Jan Hubicka  <jh@suse.cz>
 
        * utils.c (end_subprog_body): Remove inline trees check.
index 4136ebe..268fa70 100644 (file)
@@ -1106,13 +1106,13 @@ ada/trans.o : ada/trans.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(TREE_H) $(RTL_H) $(EXPR_H) $(FLAGS_H) $(FUNCTION_H) ada/ada.h except.h \
    ada/types.h ada/atree.h ada/nlists.h ada/elists.h ada/uintp.h ada/sinfo.h \
    ada/einfo.h ada/namet.h ada/snames.h ada/stringt.h ada/urealp.h ada/fe.h \
-   $(ADA_TREE_H) ada/gigi.h gt-ada-trans.h
+   $(ADA_TREE_H) ada/gigi.h gt-ada-trans.h tree-iterator.h
 
 ada/utils.o : ada/utils.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(TREE_H) $(FLAGS_H) $(EXPR_H) convert.h defaults.h langhooks.h \
    ada/ada.h ada/types.h ada/atree.h ada/nlists.h ada/elists.h ada/sinfo.h \
    ada/einfo.h ada/namet.h ada/stringt.h ada/uintp.h ada/fe.h $(ADA_TREE_H) \
-   ada/gigi.h gt-ada-utils.h gtype-ada.h $(TARGET_H)
+   ada/gigi.h gt-ada-utils.h gtype-ada.h $(TARGET_H) tree-iterator.h
 
 ada/utils2.o : ada/utils2.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(TREE_H) $(FLAGS_H) ada/ada.h ada/types.h ada/atree.h ada/nlists.h \
index 044cea8..9c31e46 100644 (file)
@@ -26,7 +26,7 @@
 /* Ada uses the lang_decl and lang_type fields to hold a tree.  */
 union lang_tree_node
   GTY((desc ("0"),
-       chain_next ("(union lang_tree_node *)GENERIC_NEXT (&%h.t)")))
+       chain_next ("(union lang_tree_node *)TREE_CHAIN (&%h.t)")))
 {
   union tree_node GTY((tag ("0"))) t;
 };
index 86ff090..aaf5e7f 100644 (file)
@@ -233,8 +233,8 @@ extern tree gnat_to_gnu (Node_Id gnat_node);
 extern void gnat_expand_stmt (tree gnu_stmt);
 
 /* ??? missing documentation */
-extern int gnat_gimplify_expr (tree *expr_p, tree *pre_p,
-                               tree *post_p ATTRIBUTE_UNUSED);
+extern int gnat_gimplify_expr (tree *expr_p, gimple_seq *pre_p,
+                               gimple_seq *post_p ATTRIBUTE_UNUSED);
 
 /* Do the processing for the declaration of a GNAT_ENTITY, a type.  If
    a separate Freeze node exists, delay the bulk of the processing.  Otherwise
@@ -668,10 +668,10 @@ extern tree create_label_decl (tree label_name);
    appearing in the subprogram.  */
 extern void begin_subprog_body (tree subprog_decl);
 
-/* Finish the definition of the current subprogram and compile it all the way
-   to assembler language output.  BODY is the tree corresponding to
-   the subprogram.  */
-extern void end_subprog_body (tree body);
+/* Finish the definition of the current subprogram BODY and compile it all the
+   way to assembler language output.  ELAB_P tells if this is called for an
+   elaboration routine, to be entirely discarded if empty.  */
+extern void end_subprog_body (tree body, bool elab_p);
 
 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
    EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
index 89b10c6..abc71f5 100644 (file)
@@ -40,7 +40,8 @@
 #include "except.h"
 #include "debug.h"
 #include "output.h"
-#include "tree-gimple.h"
+#include "tree-iterator.h"
+#include "gimple.h"
 #include "ada.h"
 #include "types.h"
 #include "atree.h"
@@ -356,7 +357,6 @@ gigi (Node_Id gnat_root, int max_gnat_node, int number_name,
   for (info = elab_info_list; info; info = info->next)
     {
       tree gnu_body = DECL_SAVED_TREE (info->elab_proc);
-      tree gnu_stmts;
 
       /* Unshare SAVE_EXPRs between subprograms.  These are not unshared by
         the gimplifier for obvious reasons, but it turns out that we need to
@@ -368,30 +368,14 @@ gigi (Node_Id gnat_root, int max_gnat_node, int number_name,
         an upstream bug for which we would not change the outcome.  */
       walk_tree_without_duplicates (&gnu_body, unshare_save_expr, NULL);
 
-      /* Set the current function to be the elaboration procedure and gimplify
-        what we have.  */
-      current_function_decl = info->elab_proc;
-      gimplify_body (&gnu_body, info->elab_proc, true);
+      /* Process the function as others, but for indicating this is an
+        elab proc, to be discarded if empty, then propagate the status
+        up to the GNAT tree node.  */
+      begin_subprog_body (info->elab_proc);
+      end_subprog_body (gnu_body, true);
 
-      /* We should have a BIND_EXPR, but it may or may not have any statements
-        in it.  If it doesn't have any, we have nothing to do.  */
-      gnu_stmts = gnu_body;
-      if (TREE_CODE (gnu_stmts) == BIND_EXPR)
-       gnu_stmts = BIND_EXPR_BODY (gnu_stmts);
-
-      /* If there are no statements, there is no elaboration code.  */
-      if (!gnu_stmts || !STATEMENT_LIST_HEAD (gnu_stmts))
-       {
-         Set_Has_No_Elaboration_Code (info->gnat_node, 1);
-         cgraph_remove_node (cgraph_node (info->elab_proc));
-       }
-      else
-       {
-         /* Otherwise, compile the function.  Note that we'll be gimplifying
-            it twice, but that's fine for the nodes we use.  */
-         begin_subprog_body (info->elab_proc);
-         end_subprog_body (gnu_body);
-       }
+      if (empty_body_p (gimple_body (info->elab_proc)))
+       Set_Has_No_Elaboration_Code (info->gnat_node, 1);
     }
 
   /* We cannot track the location of errors past this point.  */
@@ -2003,7 +1987,7 @@ Subprogram_Body_to_gnu (Node_Id gnat_node)
       : Sloc (gnat_node)),
      &DECL_STRUCT_FUNCTION (gnu_subprog_decl)->function_end_locus);
 
-  end_subprog_body (gnu_result);
+  end_subprog_body (gnu_result, false);
 
   /* Disconnect the trees for parameters that we made variables for from the
      GNAT entities since these are unusable after we end the function.  */
@@ -5334,7 +5318,8 @@ pop_stack (tree *gnu_stack_ptr)
 /* Generate GIMPLE in place for the expression at *EXPR_P.  */
 
 int
-gnat_gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p ATTRIBUTE_UNUSED)
+gnat_gimplify_expr (tree *expr_p, gimple_seq *pre_p,
+                   gimple_seq *post_p ATTRIBUTE_UNUSED)
 {
   tree expr = *expr_p;
   tree op;
@@ -5419,14 +5404,14 @@ gnat_gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p ATTRIBUTE_UNUSED)
               && TREE_CODE_CLASS (TREE_CODE (op)) != tcc_constant)
        {
          tree new_var = create_tmp_var (TREE_TYPE (op), "A");
-         tree mod = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (op), new_var, op);
+         gimple stmt;
 
          TREE_ADDRESSABLE (new_var) = 1;
 
+         stmt = gimplify_assign (new_var, op, pre_p);
          if (EXPR_HAS_LOCATION (op))
-           SET_EXPR_LOCUS (mod, EXPR_LOCUS (op));
+           gimple_set_location (stmt, *EXPR_LOCUS (op));
 
-         gimplify_and_add (mod, pre_p);
          TREE_OPERAND (expr, 0) = new_var;
          recompute_tree_invariant_for_addr_expr (expr);
          return GS_ALL_DONE;
@@ -5494,7 +5479,7 @@ gnat_gimplify_stmt (tree *stmt_p)
          append_to_statement_list (LOOP_STMT_UPDATE (stmt), stmt_p);
 
        t = build1 (GOTO_EXPR, void_type_node, gnu_start_label);
-       set_expr_location (t, DECL_SOURCE_LOCATION (gnu_end_label));
+       SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (gnu_end_label));
        append_to_statement_list (t, stmt_p);
 
        append_to_statement_list (build1 (LABEL_EXPR, void_type_node,
@@ -6913,7 +6898,7 @@ set_expr_location_from_node (tree node, Node_Id gnat_node)
   if (!Sloc_to_locus (Sloc (gnat_node), &locus))
     return;
 
-  set_expr_location (node, locus);
+  SET_EXPR_LOCATION (node, locus);
 }
 \f
 /* Return a colon-separated list of encodings contained in encoded Ada
index 4668fa2..cde8d4d 100644 (file)
@@ -43,7 +43,8 @@
 #include "function.h"
 #include "cgraph.h"
 #include "tree-inline.h"
-#include "tree-gimple.h"
+#include "tree-iterator.h"
+#include "gimple.h"
 #include "tree-dump.h"
 #include "pointer-set.h"
 #include "langhooks.h"
@@ -2199,12 +2200,12 @@ gnat_genericize (tree fndecl)
   pointer_set_destroy (p_set);
 }
 
-/* Finish the definition of the current subprogram and compile it all the way
-   to assembler language output.  BODY is the tree corresponding to
-   the subprogram.  */
+/* Finish the definition of the current subprogram BODY and compile it all the
+   way to assembler language output.  ELAB_P tells if this is called for an
+   elaboration routine, to be entirely discarded if empty.  */
 
 void
-end_subprog_body (tree body)
+end_subprog_body (tree body, bool elab_p)
 {
   tree fndecl = current_function_decl;
 
@@ -2246,7 +2247,13 @@ end_subprog_body (tree body)
   if (!DECL_CONTEXT (fndecl))
     {
       gnat_gimplify_function (fndecl);
-      cgraph_finalize_function (fndecl, false);
+
+      /* If this is an empty elaboration proc, just discard the node.
+        Otherwise, compile further.  */
+      if (elab_p && empty_body_p (gimple_body (fndecl)))
+       cgraph_remove_node (cgraph_node (fndecl));
+      else
+       cgraph_finalize_function (fndecl, false);
     }
   else
     /* Register this function with cgraph just far enough to get it
@@ -3117,7 +3124,7 @@ build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
   gnat_poplevel ();
 
   allocate_struct_function (gnu_stub_decl, false);
-  end_subprog_body (gnu_body);
+  end_subprog_body (gnu_body, false);
 }
 \f
 /* Build a type to be used to represent an aliased object whose nominal
index 4aa864d..ff6d025 100644 (file)
@@ -122,7 +122,7 @@ struct edge_def GTY(())
 
   /* Instructions queued on the edge.  */
   union edge_def_insns {
-    tree GTY ((tag ("true"))) t;
+    gimple_seq GTY ((tag ("true"))) g;
     rtx GTY ((tag ("false"))) r;
   } GTY ((desc ("current_ir_type () == IR_GIMPLE"))) insns;
 
@@ -231,7 +231,7 @@ struct basic_block_def GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb")
   struct basic_block_def *next_bb;
 
   union basic_block_il_dependent {
-      struct tree_bb_info * GTY ((tag ("0"))) tree;
+      struct gimple_bb_info * GTY ((tag ("0"))) gimple;
       struct rtl_bb_info * GTY ((tag ("1"))) rtl;
     } GTY ((desc ("((%1.flags & BB_RTL) != 0)"))) il;
 
@@ -266,13 +266,13 @@ struct rtl_bb_info GTY(())
   int visited;
 };
 
-struct tree_bb_info GTY(())
+struct gimple_bb_info GTY(())
 {
-  /* Pointers to the first and last trees of the block.  */
-  tree stmt_list;
+  /* Sequence of statements in this block.  */
+  gimple_seq seq;
 
-  /* Chain of PHI nodes for this block.  */
-  tree phi_nodes;
+  /* PHI nodes for this block.  */
+  gimple_seq phi_nodes;
 };
 
 typedef struct basic_block_def *basic_block;
@@ -383,7 +383,7 @@ struct control_flow_graph GTY(())
   int x_last_basic_block;
 
   /* Mapping of labels to their associated blocks.  At present
-     only used for the tree CFG.  */
+     only used for the gimple CFG.  */
   VEC(basic_block,gc) *x_label_to_block_map;
 
   enum profile_status {
@@ -831,9 +831,9 @@ extern bool maybe_hot_bb_p (const_basic_block);
 extern bool maybe_hot_edge_p (edge);
 extern bool probably_cold_bb_p (const_basic_block);
 extern bool probably_never_executed_bb_p (const_basic_block);
-extern bool tree_predicted_by_p (const_basic_block, enum br_predictor);
+extern bool gimple_predicted_by_p (const_basic_block, enum br_predictor);
 extern bool rtl_predicted_by_p (const_basic_block, enum br_predictor);
-extern void tree_predict_edge (edge, enum br_predictor, int);
+extern void gimple_predict_edge (edge, enum br_predictor, int);
 extern void rtl_predict_edge (edge, enum br_predictor, int);
 extern void predict_edge_def (edge, enum br_predictor, enum prediction);
 extern void guess_outgoing_edge_probabilities (basic_block);
index b46cd26..288ad59 100644 (file)
@@ -27,7 +27,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "real.h"
 #include "rtl.h"
 #include "tree.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "flags.h"
 #include "regs.h"
 #include "hard-reg-set.h"
@@ -3287,6 +3287,7 @@ expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
                                            false, /*endp=*/0);
       HOST_WIDE_INT expected_size = -1;
       unsigned int expected_align = 0;
+      tree_ann_common_t ann;
 
       if (result)
        {
@@ -3308,7 +3309,10 @@ expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
       if (src_align == 0)
        return NULL_RTX;
  
-      stringop_block_profile (exp, &expected_align, &expected_size);
+      ann = tree_common_ann (exp);
+      if (ann)
+        stringop_block_profile (ann->stmt, &expected_align, &expected_size);
+
       if (expected_align < dest_align)
        expected_align = dest_align;
       dest_mem = get_memory_rtx (dest, len);
@@ -3883,6 +3887,7 @@ expand_builtin_memset_args (tree dest, tree val, tree len,
   rtx dest_mem, dest_addr, len_rtx;
   HOST_WIDE_INT expected_size = -1;
   unsigned int expected_align = 0;
+  tree_ann_common_t ann;
 
   dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
 
@@ -3890,7 +3895,10 @@ expand_builtin_memset_args (tree dest, tree val, tree len,
   if (dest_align == 0)
     return NULL_RTX;
 
-  stringop_block_profile (orig_exp, &expected_align, &expected_size);
+  ann = tree_common_ann (orig_exp);
+  if (ann)
+    stringop_block_profile (ann->stmt, &expected_align, &expected_size);
+
   if (expected_align < dest_align)
     expected_align = dest_align;
 
@@ -4755,7 +4763,8 @@ expand_builtin_va_start (tree exp)
    current (padded) address and increment by the (padded) size.  */
 
 tree
-std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
+std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
+                         gimple_seq *post_p)
 {
   tree addr, t, type_size, rounded_size, valist_tmp;
   unsigned HOST_WIDE_INT align, boundary;
@@ -4875,7 +4884,7 @@ dummy_object (tree type)
    builtin function, but a very special sort of operator.  */
 
 enum gimplify_status
-gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
+gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
 {
   tree promoted_type, have_va_type;
   tree valist = TREE_OPERAND (*expr_p, 0);
@@ -4917,7 +4926,7 @@ gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
         Call abort to encourage the user to fix the program.  */
       inform ("if this code is reached, the program will abort");
       t = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
-      append_to_statement_list (t, pre_p);
+      gimplify_and_add (t, pre_p);
 
       /* This is dead code, but go ahead and finish so that the
         mode of the result comes out right.  */
@@ -4939,13 +4948,14 @@ gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
              tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
              valist = build_fold_addr_expr_with_type (valist, p1);
            }
+
          gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
        }
       else
        gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
 
       if (!targetm.gimplify_va_arg_expr)
-       /* FIXME:Once most targets are converted we should merely
+       /* FIXME: Once most targets are converted we should merely
           assert this is non-null.  */
        return GS_ALL_DONE;
 
@@ -7292,7 +7302,7 @@ integer_valued_real_p (tree t)
     case COMPOUND_EXPR:
     case MODIFY_EXPR:
     case BIND_EXPR:
-      return integer_valued_real_p (GENERIC_TREE_OPERAND (t, 1));
+      return integer_valued_real_p (TREE_OPERAND (t, 1));
 
     case PLUS_EXPR:
     case MINUS_EXPR:
@@ -10572,7 +10582,7 @@ fold_builtin_n (tree fndecl, tree *args, int nargs, bool ignore)
     }
   if (ret)
     {
-      ret = build1 (NOP_EXPR, GENERIC_TREE_TYPE (ret), ret);
+      ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
       TREE_NO_WARNING (ret) = 1;
       return ret;
     }
@@ -10833,6 +10843,61 @@ validate_arg (const_tree arg, enum tree_code code)
 /* This function validates the types of a function call argument list
    against a specified list of tree_codes.  If the last specifier is a 0,
    that represents an ellipses, otherwise the last specifier must be a
+   VOID_TYPE.
+
+   This is the GIMPLE version of validate_arglist.  Eventually we want to
+   completely convert builtins.c to work from GIMPLEs and the tree based
+   validate_arglist will then be removed.  */
+
+bool
+validate_gimple_arglist (const_gimple call, ...)
+{
+  enum tree_code code;
+  bool res = 0;
+  va_list ap;
+  const_tree arg;
+  size_t i;
+
+  va_start (ap, call);
+  i = 0;
+
+  do
+    {
+      code = va_arg (ap, enum tree_code);
+      switch (code)
+       {
+       case 0:
+         /* This signifies an ellipses, any further arguments are all ok.  */
+         res = true;
+         goto end;
+       case VOID_TYPE:
+         /* This signifies an endlink, if no arguments remain, return
+            true, otherwise return false.  */
+         res = (i == gimple_call_num_args (call));
+         goto end;
+       default:
+         /* If no parameters remain or the parameter's code does not
+            match the specified code, return false.  Otherwise continue
+            checking any remaining arguments.  */
+         arg = gimple_call_arg (call, i++);
+         if (!validate_arg (arg, code))
+           goto end;
+         break;
+       }
+    }
+  while (1);
+
+  /* We need gotos here since we can only have one VA_CLOSE in a
+     function.  */
+ end: ;
+  va_end (ap);
+
+  return res;
+}
+
+/* This function validates the types of a function call argument list
+   against a specified list of tree_codes.  If the last specifier is a 0,
+   that represents an ellipses, otherwise the last specifier must be a
    VOID_TYPE.  */
 
 bool
@@ -11439,6 +11504,7 @@ fold_builtin_fputs (tree arg0, tree arg1, bool ignore, bool unlocked, tree len)
 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
    produced.  False otherwise.  This is done so that we don't output the error
    or warning twice or three times.  */
+
 bool
 fold_builtin_next_arg (tree exp, bool va_start_p)
 {
@@ -13113,3 +13179,303 @@ do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
   return result;
 }
 #endif
+
+/* FIXME tuples.
+   The functions below provide an alternate interface for folding
+   builtin function calls presented as GIMPLE_CALL statements rather
+   than as CALL_EXPRs.  The folded result is still expressed as a
+   tree.  There is too much code duplication in the handling of
+   varargs functions, and a more intrusive re-factoring would permit
+   better sharing of code between the tree and statement-based
+   versions of these functions.  */
+
+/* Construct a new CALL_EXPR using the tail of the argument list of STMT
+   along with N new arguments specified as the "..." parameters.  SKIP
+   is the number of arguments in STMT to be omitted.  This function is used
+   to do varargs-to-varargs transformations.  */
+
+static tree
+gimple_rewrite_call_expr (gimple stmt, int skip, tree fndecl, int n, ...)
+{
+  int oldnargs = gimple_call_num_args (stmt);
+  int nargs = oldnargs - skip + n;
+  tree fntype = TREE_TYPE (fndecl);
+  tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
+  tree *buffer;
+  int i, j;
+  va_list ap;
+
+  buffer = XALLOCAVEC (tree, nargs);
+  va_start (ap, n);
+  for (i = 0; i < n; i++)
+    buffer[i] = va_arg (ap, tree);
+  va_end (ap);
+  for (j = skip; j < oldnargs; j++, i++)
+    buffer[i] = gimple_call_arg (stmt, j);
+
+  return fold (build_call_array (TREE_TYPE (fntype), fn, nargs, buffer));
+}
+
+/* Fold a call STMT to __{,v}sprintf_chk.  Return NULL_TREE if
+   a normal call should be emitted rather than expanding the function
+   inline.  FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK.  */
+
+static tree
+gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
+{
+  tree dest, size, len, fn, fmt, flag;
+  const char *fmt_str;
+  int nargs = gimple_call_num_args (stmt);
+
+  /* Verify the required arguments in the original call.  */
+  if (nargs < 4)
+    return NULL_TREE;
+  dest = gimple_call_arg (stmt, 0);
+  if (!validate_arg (dest, POINTER_TYPE))
+    return NULL_TREE;
+  flag = gimple_call_arg (stmt, 1);
+  if (!validate_arg (flag, INTEGER_TYPE))
+    return NULL_TREE;
+  size = gimple_call_arg (stmt, 2);
+  if (!validate_arg (size, INTEGER_TYPE))
+    return NULL_TREE;
+  fmt = gimple_call_arg (stmt, 3);
+  if (!validate_arg (fmt, POINTER_TYPE))
+    return NULL_TREE;
+
+  if (! host_integerp (size, 1))
+    return NULL_TREE;
+
+  len = NULL_TREE;
+
+  if (!init_target_chars ())
+    return NULL_TREE;
+
+  /* Check whether the format is a literal string constant.  */
+  fmt_str = c_getstr (fmt);
+  if (fmt_str != NULL)
+    {
+      /* If the format doesn't contain % args or %%, we know the size.  */
+      if (strchr (fmt_str, target_percent) == 0)
+       {
+         if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
+           len = build_int_cstu (size_type_node, strlen (fmt_str));
+       }
+      /* If the format is "%s" and first ... argument is a string literal,
+        we know the size too.  */
+      else if (fcode == BUILT_IN_SPRINTF_CHK
+              && strcmp (fmt_str, target_percent_s) == 0)
+       {
+         tree arg;
+
+         if (nargs == 5)
+           {
+             arg = gimple_call_arg (stmt, 4);
+             if (validate_arg (arg, POINTER_TYPE))
+               {
+                 len = c_strlen (arg, 1);
+                 if (! len || ! host_integerp (len, 1))
+                   len = NULL_TREE;
+               }
+           }
+       }
+    }
+
+  if (! integer_all_onesp (size))
+    {
+      if (! len || ! tree_int_cst_lt (len, size))
+       return NULL_TREE;
+    }
+
+  /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
+     or if format doesn't contain % chars or is "%s".  */
+  if (! integer_zerop (flag))
+    {
+      if (fmt_str == NULL)
+       return NULL_TREE;
+      if (strchr (fmt_str, target_percent) != NULL
+         && strcmp (fmt_str, target_percent_s))
+       return NULL_TREE;
+    }
+
+  /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available.  */
+  fn = built_in_decls[fcode == BUILT_IN_VSPRINTF_CHK
+                     ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF];
+  if (!fn)
+    return NULL_TREE;
+
+  return gimple_rewrite_call_expr (stmt, 4, fn, 2, dest, fmt);
+}
+
+/* Fold a call STMT to {,v}snprintf.  Return NULL_TREE if
+   a normal call should be emitted rather than expanding the function
+   inline.  FCODE is either BUILT_IN_SNPRINTF_CHK or
+   BUILT_IN_VSNPRINTF_CHK.  If MAXLEN is not NULL, it is maximum length
+   passed as second argument.  */
+
+tree
+gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
+                                  enum built_in_function fcode)
+{
+  tree dest, size, len, fn, fmt, flag;
+  const char *fmt_str;
+
+  /* Verify the required arguments in the original call.  */
+  if (gimple_call_num_args (stmt) < 5)
+    return NULL_TREE;
+  dest = gimple_call_arg (stmt, 0);
+  if (!validate_arg (dest, POINTER_TYPE))
+    return NULL_TREE;
+  len = gimple_call_arg (stmt, 1);
+  if (!validate_arg (len, INTEGER_TYPE))
+    return NULL_TREE;
+  flag = gimple_call_arg (stmt, 2);
+  if (!validate_arg (flag, INTEGER_TYPE))
+    return NULL_TREE;
+  size = gimple_call_arg (stmt, 3);
+  if (!validate_arg (size, INTEGER_TYPE))
+    return NULL_TREE;
+  fmt = gimple_call_arg (stmt, 4);
+  if (!validate_arg (fmt, POINTER_TYPE))
+    return NULL_TREE;
+
+  if (! host_integerp (size, 1))
+    return NULL_TREE;
+
+  if (! integer_all_onesp (size))
+    {
+      if (! host_integerp (len, 1))
+       {
+         /* If LEN is not constant, try MAXLEN too.
+            For MAXLEN only allow optimizing into non-_ocs function
+            if SIZE is >= MAXLEN, never convert to __ocs_fail ().  */
+         if (maxlen == NULL_TREE || ! host_integerp (maxlen, 1))
+           return NULL_TREE;
+       }
+      else
+       maxlen = len;
+
+      if (tree_int_cst_lt (size, maxlen))
+       return NULL_TREE;
+    }
+
+  if (!init_target_chars ())
+    return NULL_TREE;
+
+  /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
+     or if format doesn't contain % chars or is "%s".  */
+  if (! integer_zerop (flag))
+    {
+      fmt_str = c_getstr (fmt);
+      if (fmt_str == NULL)
+       return NULL_TREE;
+      if (strchr (fmt_str, target_percent) != NULL
+         && strcmp (fmt_str, target_percent_s))
+       return NULL_TREE;
+    }
+
+  /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
+     available.  */
+  fn = built_in_decls[fcode == BUILT_IN_VSNPRINTF_CHK
+                     ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF];
+  if (!fn)
+    return NULL_TREE;
+
+  return gimple_rewrite_call_expr (stmt, 5, fn, 3, dest, len, fmt);
+}
+
+/* Builtins with folding operations that operate on "..." arguments
+   need special handling; we need to store the arguments in a convenient
+   data structure before attempting any folding.  Fortunately there are
+   only a few builtins that fall into this category.  FNDECL is the
+   function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
+   result of the function call is ignored.  */
+
+static tree
+gimple_fold_builtin_varargs (tree fndecl, gimple stmt, bool ignore ATTRIBUTE_UNUSED)
+{
+  enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
+  tree ret = NULL_TREE;
+
+  switch (fcode)
+    {
+    case BUILT_IN_SPRINTF_CHK:
+    case BUILT_IN_VSPRINTF_CHK:
+      ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
+      break;
+
+    case BUILT_IN_SNPRINTF_CHK:
+    case BUILT_IN_VSNPRINTF_CHK:
+      ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
+
+    default:
+      break;
+    }
+  if (ret)
+    {
+      ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
+      TREE_NO_WARNING (ret) = 1;
+      return ret;
+    }
+  return NULL_TREE;
+}
+
+/* A wrapper function for builtin folding that prevents warnings for
+   "statement without effect" and the like, caused by removing the
+   call node earlier than the warning is generated.  */
+
+tree
+fold_call_stmt (gimple stmt, bool ignore)
+{
+  tree ret = NULL_TREE;
+  tree fndecl = gimple_call_fndecl (stmt);
+  if (fndecl
+      && TREE_CODE (fndecl) == FUNCTION_DECL
+      && DECL_BUILT_IN (fndecl)
+      && !gimple_call_va_arg_pack_p (stmt))
+    {
+      int nargs = gimple_call_num_args (stmt);
+
+      /* FIXME: Don't use a list in this interface.  */
+      if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
+        {
+          tree arglist = NULL_TREE;
+          int i;
+          for (i = nargs - 1; i >= 0; i--)
+            arglist = tree_cons (NULL_TREE, gimple_call_arg (stmt, i), arglist);
+         return targetm.fold_builtin (fndecl, arglist, ignore);
+        }
+      else
+       {
+         if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
+           {
+              tree args[MAX_ARGS_TO_FOLD_BUILTIN];
+              int i;
+              for (i = 0; i < nargs; i++)
+                args[i] = gimple_call_arg (stmt, i);
+             ret = fold_builtin_n (fndecl, args, nargs, ignore);
+           }
+         if (!ret)
+           ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
+         if (ret)
+           {
+             /* Propagate location information from original call to
+                expansion of builtin.  Otherwise things like
+                maybe_emit_chk_warning, that operate on the expansion
+                of a builtin, will use the wrong location information.  */
+             if (gimple_has_location (stmt))
+                {
+                 tree realret = ret;
+                 if (TREE_CODE (ret) == NOP_EXPR)
+                   realret = TREE_OPERAND (ret, 0);
+                 if (CAN_HAVE_LOCATION_P (realret)
+                     && !EXPR_HAS_LOCATION (realret))
+                   SET_EXPR_LOCATION (realret, gimple_location (stmt));
+                  return realret;
+                }
+             return ret;
+           }
+       }
+    }
+  return NULL_TREE;
+}
index c9ffd9c..caac53e 100644 (file)
@@ -48,6 +48,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "real.h"
 #include "cgraph.h"
 #include "target-def.h"
+#include "gimple.h"
 #include "fixed-value.h"
 
 cpp_reader *parse_in;          /* Declared in c-pragma.h.  */
@@ -7376,71 +7377,67 @@ c_parse_error (const char *gmsgid, enum cpp_ttype token, tree value)
    inlining, so we don't have to worry about that.  */
 
 void
-c_warn_unused_result (tree *top_p)
+c_warn_unused_result (gimple_seq seq)
 {
-  tree t = *top_p;
-  tree_stmt_iterator i;
   tree fdecl, ftype;
+  gimple_stmt_iterator i;
 
-  switch (TREE_CODE (t))
+  for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
     {
-    case STATEMENT_LIST:
-      for (i = tsi_start (*top_p); !tsi_end_p (i); tsi_next (&i))
-       c_warn_unused_result (tsi_stmt_ptr (i));
-      break;
-
-    case COND_EXPR:
-      c_warn_unused_result (&COND_EXPR_THEN (t));
-      c_warn_unused_result (&COND_EXPR_ELSE (t));
-      break;
-    case BIND_EXPR:
-      c_warn_unused_result (&BIND_EXPR_BODY (t));
-      break;
-    case TRY_FINALLY_EXPR:
-    case TRY_CATCH_EXPR:
-      c_warn_unused_result (&TREE_OPERAND (t, 0));
-      c_warn_unused_result (&TREE_OPERAND (t, 1));
-      break;
-    case CATCH_EXPR:
-      c_warn_unused_result (&CATCH_BODY (t));
-      break;
-    case EH_FILTER_EXPR:
-      c_warn_unused_result (&EH_FILTER_FAILURE (t));
-      break;
+      gimple g = gsi_stmt (i);
 
-    case CALL_EXPR:
-      if (TREE_USED (t))
-       break;
-
-      /* This is a naked call, as opposed to a CALL_EXPR nested inside
-        a MODIFY_EXPR.  All calls whose value is ignored should be
-        represented like this.  Look for the attribute.  */
-      fdecl = get_callee_fndecl (t);
-      if (fdecl)
-       ftype = TREE_TYPE (fdecl);
-      else
+      switch (gimple_code (g))
        {
-         ftype = TREE_TYPE (CALL_EXPR_FN (t));
-         /* Look past pointer-to-function to the function type itself.  */
-         ftype = TREE_TYPE (ftype);
-       }
+       case GIMPLE_BIND:
+         c_warn_unused_result (gimple_bind_body (g));
+         break;
+       case GIMPLE_TRY:
+         c_warn_unused_result (gimple_try_eval (g));
+         c_warn_unused_result (gimple_try_cleanup (g));
+         break;
+       case GIMPLE_CATCH:
+         c_warn_unused_result (gimple_catch_handler (g));
+         break;
+       case GIMPLE_EH_FILTER:
+         c_warn_unused_result (gimple_eh_filter_failure (g));
+         break;
 
-      if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
-       {
-         if (fdecl)
-           warning (0, "%Hignoring return value of %qD, "
-                    "declared with attribute warn_unused_result",
-                    EXPR_LOCUS (t), fdecl);
+       case GIMPLE_CALL:
+         if (gimple_call_lhs (g))
+           break;
+
+         /* This is a naked call, as opposed to a GIMPLE_CALL with an
+            LHS.  All calls whose value is ignored should be
+            represented like this.  Look for the attribute.  */
+         fdecl = gimple_call_fn (g);
+         if (TREE_CODE (fdecl) == FUNCTION_DECL)
+           ftype = TREE_TYPE (fdecl);
          else
-           warning (0, "%Hignoring return value of function "
-                    "declared with attribute warn_unused_result",
-                    EXPR_LOCUS (t));
-       }
-      break;
+           {
+             ftype = TREE_TYPE (fdecl);
+             /* Look past pointer-to-function to the function type itself.  */
+             ftype = TREE_TYPE (ftype);
+           }
 
-    default:
-      /* Not a container, not a call, or a call whose value is used.  */
-      break;
+         if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
+           {
+             location_t loc = gimple_location (g);
+
+             if (fdecl)
+               warning (0, "%Hignoring return value of %qD, "
+                        "declared with attribute warn_unused_result",
+                        &loc, fdecl);
+             else
+               warning (0, "%Hignoring return value of function "
+                        "declared with attribute warn_unused_result",
+                        &loc);
+           }
+         break;
+
+       default:
+         /* Not a container, not a call, or a call whose value is used.  */
+         break;
+       }
     }
 }
 
index 9850dfa..1ff5d66 100644 (file)
@@ -897,7 +897,7 @@ extern void dump_time_statistics (void);
 
 extern bool c_dump_tree (void *, tree);
 
-extern void c_warn_unused_result (tree *);
+extern void c_warn_unused_result (gimple_seq);
 
 extern void verify_sequence_points (tree);
 
@@ -928,7 +928,7 @@ extern void warn_for_div_by_zero (tree divisor);
 
 /* In c-gimplify.c  */
 extern void c_genericize (tree);
-extern int c_gimplify_expr (tree *, tree *, tree *);
+extern int c_gimplify_expr (tree *, gimple_seq *, gimple_seq *);
 extern tree c_build_bind_expr (tree, tree);
 
 /* In c-pch.c  */
index dd5f1ea..bdb6819 100644 (file)
@@ -52,7 +52,8 @@ along with GCC; see the file COPYING3.  If not see
 #include "c-pragma.h"
 #include "langhooks.h"
 #include "tree-mudflap.h"
-#include "tree-gimple.h"
+#include "gimple.h"
+#include "tree-iterator.h"
 #include "diagnostic.h"
 #include "tree-dump.h"
 #include "cgraph.h"
@@ -61,6 +62,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "except.h"
 #include "langhooks-def.h"
 #include "pointer-set.h"
+#include "gimple.h"
 
 /* In grokdeclarator, distinguish syntactic contexts of declarators.  */
 enum decl_context
@@ -248,7 +250,7 @@ extern char C_SIZEOF_STRUCT_LANG_IDENTIFIER_isnt_accurate
 
 union lang_tree_node
   GTY((desc ("TREE_CODE (&%h.generic) == IDENTIFIER_NODE"),
-       chain_next ("TREE_CODE (&%h.generic) == INTEGER_TYPE ? (union lang_tree_node *) TYPE_NEXT_VARIANT (&%h.generic) : ((union lang_tree_node *) GENERIC_NEXT (&%h.generic))")))
+       chain_next ("TREE_CODE (&%h.generic) == INTEGER_TYPE ? (union lang_tree_node *) TYPE_NEXT_VARIANT (&%h.generic) : ((union lang_tree_node *) TREE_CHAIN (&%h.generic))")))
 {
   union tree_node GTY ((tag ("0"),
                        desc ("tree_node_structure (&%h)")))
@@ -1836,6 +1838,7 @@ merge_decls (tree newdecl, tree olddecl, tree newtype, tree oldtype)
          DECL_INITIAL (newdecl) = DECL_INITIAL (olddecl);
          DECL_STRUCT_FUNCTION (newdecl) = DECL_STRUCT_FUNCTION (olddecl);
          DECL_SAVED_TREE (newdecl) = DECL_SAVED_TREE (olddecl);
+         gimple_set_body (newdecl, gimple_body (olddecl));
          DECL_ARGUMENTS (newdecl) = DECL_ARGUMENTS (olddecl);
 
          /* Set DECL_INLINE on the declaration if we've got a body
@@ -1870,6 +1873,10 @@ merge_decls (tree newdecl, tree olddecl, tree newtype, tree oldtype)
            sizeof (struct tree_decl_common) - sizeof (struct tree_common));
     switch (TREE_CODE (olddecl))
       {
+      case FUNCTION_DECL:
+       gimple_set_body (olddecl, gimple_body (newdecl));
+       /* fall through */
+
       case FIELD_DECL:
       case VAR_DECL:
       case PARM_DECL:
@@ -1877,7 +1884,6 @@ merge_decls (tree newdecl, tree olddecl, tree newtype, tree oldtype)
       case RESULT_DECL:
       case CONST_DECL:
       case TYPE_DECL:
-      case FUNCTION_DECL:
        memcpy ((char *) olddecl + sizeof (struct tree_decl_common),
                (char *) newdecl + sizeof (struct tree_decl_common),
                tree_code_size (TREE_CODE (olddecl)) - sizeof (struct tree_decl_common));
@@ -4050,7 +4056,7 @@ grokdeclarator (const struct c_declarator *declarator,
      "signed".  */
   if (bitfield && !flag_signed_bitfields && !declspecs->explicit_signed_p
       && TREE_CODE (type) == INTEGER_TYPE)
-    type = c_common_unsigned_type (type);
+    type = unsigned_type_for (type);
 
   /* Figure out the type qualifiers for the declaration.  There are
      two ways a declaration can become qualified.  One is something
@@ -6634,9 +6640,10 @@ static void
 c_gimple_diagnostics_recursively (tree fndecl)
 {
   struct cgraph_node *cgn;
+  gimple_seq body = gimple_body (fndecl);
 
   /* Handle attribute((warn_unused_result)).  Relies on gimple input.  */
-  c_warn_unused_result (&DECL_SAVED_TREE (fndecl));
+  c_warn_unused_result (body);
 
   /* Notice when OpenMP structured block constraints are violated.  */
   if (flag_openmp)
index fa28763..9d20d12 100644 (file)
@@ -2262,7 +2262,7 @@ check_format_types (format_wanted_type *types, const char *format_start,
          && TREE_CODE (cur_type) == INTEGER_TYPE
          && (!pedantic || i == 0 || (i == 1 && char_type_flag))
          && (TYPE_UNSIGNED (wanted_type)
-             ? wanted_type == c_common_unsigned_type (cur_type)
+             ? wanted_type == unsigned_type_for (cur_type)
              : wanted_type == c_common_signed_type (cur_type)))
        continue;
       /* Likewise, "signed char", "unsigned char" and "char" are
index 12292a7..342848a 100644 (file)
@@ -31,7 +31,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "varray.h"
 #include "c-tree.h"
 #include "c-common.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "hard-reg-set.h"
 #include "basic-block.h"
 #include "tree-flow.h"
@@ -104,7 +104,6 @@ c_genericize (tree fndecl)
   /* Go ahead and gimplify for now.  */
   gimplify_function_tree (fndecl);
 
-  /* Dump the genericized tree IR.  */
   dump_function (TDI_generic, fndecl);
 
   /* Genericize all nested functions now.  We do things in this order so
@@ -118,14 +117,16 @@ c_genericize (tree fndecl)
 static void
 add_block_to_enclosing (tree block)
 {
+  unsigned i;
   tree enclosing;
+  gimple bind;
+  VEC(gimple, heap) *stack = gimple_bind_expr_stack ();
 
-  for (enclosing = gimple_current_bind_expr ();
-       enclosing; enclosing = TREE_CHAIN (enclosing))
-    if (BIND_EXPR_BLOCK (enclosing))
+  for (i = 0; VEC_iterate (gimple, stack, i, bind); i++)
+    if (gimple_bind_block (bind))
       break;
 
-  enclosing = BIND_EXPR_BLOCK (enclosing);
+  enclosing = gimple_bind_block (bind);
   BLOCK_SUBBLOCKS (enclosing) = chainon (BLOCK_SUBBLOCKS (enclosing), block);
 }
 
@@ -178,7 +179,7 @@ c_build_bind_expr (tree block, tree body)
    decl instead.  */
 
 static enum gimplify_status
-gimplify_compound_literal_expr (tree *expr_p, tree *pre_p)
+gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
 {
   tree decl_s = COMPOUND_LITERAL_EXPR_DECL_STMT (*expr_p);
   tree decl = DECL_EXPR_DECL (decl_s);
@@ -249,10 +250,12 @@ optimize_compound_literals_in_ctor (tree orig_ctor)
   return ctor;
 }
 
-/* Do C-specific gimplification.  Args are as for gimplify_expr.  */
+/* Do C-specific gimplification on *EXPR_P.  PRE_P and POST_P are as in
+   gimplify_expr.  */
 
 int
-c_gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p ATTRIBUTE_UNUSED)
+c_gimplify_expr (tree *expr_p, gimple_seq *pre_p,
+                gimple_seq *post_p ATTRIBUTE_UNUSED)
 {
   enum tree_code code = TREE_CODE (*expr_p);
 
index 1da71d2..7da659c 100644 (file)
@@ -29,7 +29,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "function.h"
 #include "c-common.h"
 #include "toplev.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "bitmap.h"
 #include "langhooks.h"
 
index 4386c39..67a466b 100644 (file)
@@ -1852,14 +1852,13 @@ static void
 pp_c_assignment_expression (c_pretty_printer *pp, tree e)
 {
   if (TREE_CODE (e) == MODIFY_EXPR 
-      || TREE_CODE (e) == GIMPLE_MODIFY_STMT
       || TREE_CODE (e) == INIT_EXPR)
     {
-      pp_c_unary_expression (pp, GENERIC_TREE_OPERAND (e, 0));
+      pp_c_unary_expression (pp, TREE_OPERAND (e, 0));
       pp_c_whitespace (pp);
       pp_equal (pp);
       pp_space (pp);
-      pp_c_expression (pp, GENERIC_TREE_OPERAND (e, 1));
+      pp_c_expression (pp, TREE_OPERAND (e, 1));
     }
   else
     pp_c_conditional_expression (pp, e);
@@ -2007,7 +2006,6 @@ pp_c_expression (c_pretty_printer *pp, tree e)
       break;
 
     case MODIFY_EXPR:
-    case GIMPLE_MODIFY_STMT:
     case INIT_EXPR:
       pp_assignment_expression (pp, e);
       break;
index c7933be..62faee5 100644 (file)
@@ -43,7 +43,8 @@ along with GCC; see the file COPYING3.  If not see
 #include "timevar.h"
 #include "predict.h"
 #include "tree-inline.h"
-#include "tree-gimple.h"
+#include "gimple.h"
+#include "tree-iterator.h"
 #include "langhooks.h"
 
 /* Create an empty statement tree rooted at T.  */
index 5d7036a..160229a 100644 (file)
@@ -42,7 +42,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "ggc.h"
 #include "target.h"
 #include "tree-iterator.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-flow.h"
 
 /* Possible cases of implicit bad conversions.  Used to select
index a4470fa..71bdf8c 100644 (file)
@@ -25,6 +25,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "tm.h"
 #include "rtl.h"
 #include "tree.h"
+#include "gimple.h"
 #include "flags.h"
 #include "expr.h"
 #include "optabs.h"
@@ -41,6 +42,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "cgraph.h"
 #include "except.h"
 #include "dbgcnt.h"
+#include "tree-flow.h"
 
 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits.  */
 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
@@ -380,7 +382,7 @@ emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
     add_reg_note (call_insn, REG_EH_REGION, const0_rtx);
   else
     {
-      int rn = lookup_stmt_eh_region (fntree);
+      int rn = lookup_expr_eh_region (fntree);
 
       /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
         throw, which we already took care of.  */
@@ -542,7 +544,26 @@ setjmp_call_p (const_tree fndecl)
   return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
 }
 
+
+/* Return true if STMT is an alloca call.  */
+
+bool
+gimple_alloca_call_p (const_gimple stmt)
+{
+  tree fndecl;
+
+  if (!is_gimple_call (stmt))
+    return false;
+
+  fndecl = gimple_call_fndecl (stmt);
+  if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
+    return true;
+
+  return false;
+}
+
 /* Return true when exp contains alloca call.  */
+
 bool
 alloca_call_p (const_tree exp)
 {
@@ -2251,7 +2272,7 @@ expand_call (tree exp, rtx target, int ignore)
   if (currently_expanding_call++ != 0
       || !flag_optimize_sibling_calls
       || args_size.var
-      || lookup_stmt_eh_region (exp) >= 0
+      || lookup_expr_eh_region (exp) >= 0
       || dbg_cnt (tail_call) == false)
     try_tail_call = 0;
 
@@ -3229,7 +3250,7 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
 #ifdef REG_PARM_STACK_SPACE
   /* Define the boundary of the register parm stack space that needs to be
      save, if any.  */
-  int low_to_save, high_to_save;
+  int low_to_save = 0, high_to_save = 0;
   rtx save_area = 0;            /* Place that it is saved.  */
 #endif
 
index 69bd69c..b4c33ab 100644 (file)
@@ -42,6 +42,352 @@ along with GCC; see the file COPYING3.  If not see
 #include "value-prof.h"
 #include "target.h"
 
+
+/* Return an expression tree corresponding to the RHS of GIMPLE
+   statement STMT.  */
+
+tree
+gimple_assign_rhs_to_tree (gimple stmt)
+{
+  tree t;
+  enum gimple_rhs_class class;
+    
+  class = get_gimple_rhs_class (gimple_expr_code (stmt));
+
+  if (class == GIMPLE_BINARY_RHS)
+    t = build2 (gimple_assign_rhs_code (stmt),
+               TREE_TYPE (gimple_assign_lhs (stmt)),
+               gimple_assign_rhs1 (stmt),
+               gimple_assign_rhs2 (stmt));
+  else if (class == GIMPLE_UNARY_RHS)
+    t = build1 (gimple_assign_rhs_code (stmt),
+               TREE_TYPE (gimple_assign_lhs (stmt)),
+               gimple_assign_rhs1 (stmt));
+  else if (class == GIMPLE_SINGLE_RHS)
+    t = gimple_assign_rhs1 (stmt);
+  else
+    gcc_unreachable ();
+
+  return t;
+}
+
+/* Return an expression tree corresponding to the PREDICATE of GIMPLE_COND
+   statement STMT.  */
+
+static tree
+gimple_cond_pred_to_tree (gimple stmt)
+{
+  return build2 (gimple_cond_code (stmt), boolean_type_node,
+                gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
+}
+
+/* Helper for gimple_to_tree.  Set EXPR_LOCATION for every expression
+   inside *TP.  DATA is the location to set.  */
+
+static tree
+set_expr_location_r (tree *tp, int *ws ATTRIBUTE_UNUSED, void *data)
+{
+  location_t *loc = (location_t *) data;
+  if (EXPR_P (*tp))
+    SET_EXPR_LOCATION (*tp, *loc);
+
+  return NULL_TREE;
+}
+
+
+/* RTL expansion has traditionally been done on trees, so the
+   transition to doing it on GIMPLE tuples is very invasive to the RTL
+   expander.  To facilitate the transition, this function takes a
+   GIMPLE tuple STMT and returns the same statement in the form of a
+   tree.  */
+
+static tree
+gimple_to_tree (gimple stmt)
+{
+  tree t;
+  int rn;
+  tree_ann_common_t ann;
+  location_t loc;
+
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_ASSIGN:
+      {
+       tree lhs = gimple_assign_lhs (stmt);
+
+       t = gimple_assign_rhs_to_tree (stmt);
+       t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
+       if (gimple_assign_nontemporal_move_p (stmt))
+         MOVE_NONTEMPORAL (t) = true;
+      }
+      break;
+                                        
+    case GIMPLE_COND:
+      t = gimple_cond_pred_to_tree (stmt);
+      t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
+      break;
+
+    case GIMPLE_GOTO:
+      t = build1 (GOTO_EXPR, void_type_node, gimple_goto_dest (stmt));
+      break;
+
+    case GIMPLE_LABEL:
+      t = build1 (LABEL_EXPR, void_type_node, gimple_label_label (stmt));
+      break;
+
+    case GIMPLE_RETURN:
+      {
+       tree retval = gimple_return_retval (stmt);
+
+       if (retval && retval != error_mark_node)
+         {
+           tree result = DECL_RESULT (current_function_decl);
+
+           /* If we are not returning the current function's RESULT_DECL,
+              build an assignment to it.  */
+           if (retval != result)
+             {
+               /* I believe that a function's RESULT_DECL is unique.  */
+               gcc_assert (TREE_CODE (retval) != RESULT_DECL);
+
+               retval = build2 (MODIFY_EXPR, TREE_TYPE (result),
+                                result, retval);
+             }
+         }
+       t = build1 (RETURN_EXPR, void_type_node, retval);
+      }
+      break;
+
+    case GIMPLE_ASM:
+      {
+       size_t i, n;
+       tree out, in, cl;
+       const char *s;
+
+       out = NULL_TREE;
+       n = gimple_asm_noutputs (stmt);
+       if (n > 0)
+         {
+           t = out = gimple_asm_output_op (stmt, 0);
+           for (i = 1; i < n; i++)
+             {
+               TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
+               t = gimple_asm_output_op (stmt, i);
+             }
+         }
+
+       in = NULL_TREE;
+       n = gimple_asm_ninputs (stmt);
+       if (n > 0)
+         {
+           t = in = gimple_asm_input_op (stmt, 0);
+           for (i = 1; i < n; i++)
+             {
+               TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
+               t = gimple_asm_input_op (stmt, i);
+             }
+         }
+
+       cl = NULL_TREE;
+       n = gimple_asm_nclobbers (stmt);
+       if (n > 0)
+         {
+           t = cl = gimple_asm_clobber_op (stmt, 0);
+           for (i = 1; i < n; i++)
+             {
+               TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
+               t = gimple_asm_clobber_op (stmt, i);
+             }
+         }
+
+       s = gimple_asm_string (stmt);
+       t = build4 (ASM_EXPR, void_type_node, build_string (strlen (s), s),
+                   out, in, cl);
+        ASM_VOLATILE_P (t) = gimple_asm_volatile_p (stmt);
+        ASM_INPUT_P (t) = gimple_asm_input_p (stmt);
+      }
+    break;
+
+    case GIMPLE_CALL:
+      {
+       size_t i;
+        tree fn;
+       tree_ann_common_t ann;
+        
+       t = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
+
+        fn = gimple_call_fn (stmt);
+        if (TREE_CODE (fn) == FUNCTION_DECL)
+          CALL_EXPR_FN (t) = build1 (ADDR_EXPR,
+                                     build_pointer_type (TREE_TYPE (fn)),
+                                     fn);
+        else
+          CALL_EXPR_FN (t) = fn;
+        
+        TREE_TYPE (t) = gimple_call_return_type (stmt);
+
+       CALL_EXPR_STATIC_CHAIN (t) = gimple_call_chain (stmt);
+
+       for (i = 0; i < gimple_call_num_args (stmt); i++)
+         CALL_EXPR_ARG (t, i) = gimple_call_arg (stmt, i);
+
+       if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
+         TREE_SIDE_EFFECTS (t) = 1;
+
+       if (gimple_call_flags (stmt) & ECF_NOTHROW)
+         TREE_NOTHROW (t) = 1;
+
+        CALL_EXPR_TAILCALL (t) = gimple_call_tail_p (stmt);
+        CALL_EXPR_RETURN_SLOT_OPT (t) = gimple_call_return_slot_opt_p (stmt);
+        CALL_FROM_THUNK_P (t) = gimple_call_from_thunk_p (stmt);
+        CALL_CANNOT_INLINE_P (t) = gimple_call_cannot_inline_p (stmt);
+        CALL_EXPR_VA_ARG_PACK (t) = gimple_call_va_arg_pack_p (stmt);
+
+        /* If the call has a LHS then create a MODIFY_EXPR to hold it.  */
+       {
+         tree lhs = gimple_call_lhs (stmt);
+
+         if (lhs)
+           t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
+       }
+
+        /* Record the original call statement, as it may be used
+           to retrieve profile information during expansion.  */
+       if (TREE_CODE (fn) == FUNCTION_DECL && DECL_BUILT_IN (fn))
+         {
+           ann = get_tree_common_ann (t);
+           ann->stmt = stmt;
+         }
+      }
+    break;
+
+    case GIMPLE_SWITCH:
+      {
+       tree label_vec;
+       size_t i;
+       tree elt = gimple_switch_label (stmt, 0);
+
+       label_vec = make_tree_vec (gimple_switch_num_labels (stmt));
+
+       if (!CASE_LOW (elt) && !CASE_HIGH (elt))
+         {
+           for (i = 1; i < gimple_switch_num_labels (stmt); i++)
+             TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, i);
+
+           /* The default case in a SWITCH_EXPR must be at the end of
+              the label vector.  */
+           TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, 0);
+         }
+       else
+         {
+           for (i = 0; i < gimple_switch_num_labels (stmt); i++)
+             TREE_VEC_ELT (label_vec, i) = gimple_switch_label (stmt, i);
+         }
+
+       t = build3 (SWITCH_EXPR, void_type_node, gimple_switch_index (stmt),
+                   NULL, label_vec);
+      }
+    break;
+
+    case GIMPLE_NOP:
+    case GIMPLE_PREDICT:
+      t = build1 (NOP_EXPR, void_type_node, size_zero_node);
+      break;
+
+    case GIMPLE_RESX:
+      t = build_resx (gimple_resx_region (stmt));
+      break;
+       
+    default:
+      if (errorcount == 0)
+       {
+         error ("Unrecognized GIMPLE statement during RTL expansion");
+         print_gimple_stmt (stderr, stmt, 4, 0);
+         gcc_unreachable ();
+       }
+      else
+       {
+         /* Ignore any bad gimple codes if we're going to die anyhow,
+            so we can at least set TREE_ASM_WRITTEN and have the rest
+            of compilation advance without sudden ICE death.  */
+         t = build1 (NOP_EXPR, void_type_node, size_zero_node);
+         break;
+       }
+    }
+
+  /* If STMT is inside an exception region, record it in the generated
+     expression.  */
+  rn = lookup_stmt_eh_region (stmt);
+  if (rn >= 0)
+    {
+      tree call = get_call_expr_in (t);
+
+      ann = get_tree_common_ann (t);
+      ann->rn = rn;
+      
+      /* For a CALL_EXPR on the RHS of an assignment, calls.c looks up
+        the CALL_EXPR not the assignment statment for EH region number. */
+      if (call && call != t)
+       {
+         ann = get_tree_common_ann (call);
+         ann->rn = rn;
+       }
+    }
+
+  /* Set EXPR_LOCATION in all the embedded expressions.  */
+  loc = gimple_location (stmt);
+  walk_tree (&t, set_expr_location_r, (void *) &loc, NULL);
+
+  TREE_BLOCK (t) = gimple_block (stmt);
+
+  return t;
+}
+
+
+/* Release back to GC memory allocated by gimple_to_tree.  */
+
+static void
+release_stmt_tree (gimple stmt, tree stmt_tree)
+{
+  tree_ann_common_t ann;
+
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_ASSIGN:
+      if (get_gimple_rhs_class (gimple_expr_code (stmt)) != GIMPLE_SINGLE_RHS)
+       ggc_free (TREE_OPERAND (stmt_tree, 1));
+      break;
+    case GIMPLE_COND:
+      ggc_free (COND_EXPR_COND (stmt_tree));
+      break;
+    case GIMPLE_RETURN:
+      if (TREE_OPERAND (stmt_tree, 0)
+         && TREE_CODE (TREE_OPERAND (stmt_tree, 0)) == MODIFY_EXPR)
+       ggc_free (TREE_OPERAND (stmt_tree, 0));
+      break;
+    case GIMPLE_CALL:
+      if (gimple_call_lhs (stmt))
+       {
+         if (TREE_CODE (gimple_call_fn (stmt)) == FUNCTION_DECL)
+           ggc_free (CALL_EXPR_FN (TREE_OPERAND (stmt_tree, 1)));
+         ann = tree_common_ann (TREE_OPERAND (stmt_tree, 1));
+         if (ann)
+           ggc_free (ann);
+         ggc_free (TREE_OPERAND (stmt_tree, 1));
+       }
+      else if (TREE_CODE (gimple_call_fn (stmt)) == FUNCTION_DECL)
+       ggc_free (CALL_EXPR_FN (stmt_tree));
+      break;
+    default:
+      break;
+    }
+  ann = tree_common_ann (stmt_tree);
+  if (ann)
+    ggc_free (ann);
+  ggc_free (stmt_tree);
+}
+
+
 /* Verify that there is exactly single jump instruction since last and attach
    REG_BR_PROB note specifying probability.
    ??? We really ought to pass the probability down to RTL expanders and let it
@@ -1181,12 +1527,12 @@ expand_used_vars (void)
    generated for STMT should have been appended.  */
 
 static void
-maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
+maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
 {
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "\n;; ");
-      print_generic_expr (dump_file, stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
       fprintf (dump_file, "\n");
 
       print_rtl (dump_file, since ? NEXT_INSN (since) : since);
@@ -1200,10 +1546,11 @@ static struct pointer_map_t *lab_rtx_for_bb;
 /* Returns the label_rtx expression for a label starting basic block BB.  */
 
 static rtx
-label_rtx_for_bb (basic_block bb)
+label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
 {
-  tree_stmt_iterator tsi;
-  tree lab, lab_stmt;
+  gimple_stmt_iterator gsi;
+  tree lab;
+  gimple lab_stmt;
   void **elt;
 
   if (bb->flags & BB_RTL)
@@ -1215,13 +1562,13 @@ label_rtx_for_bb (basic_block bb)
 
   /* Find the tree label if it is present.  */
      
-  for (tsi = tsi_start (bb_stmt_list (bb)); !tsi_end_p (tsi); tsi_next (&tsi))
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      lab_stmt = tsi_stmt (tsi);
-      if (TREE_CODE (lab_stmt) != LABEL_EXPR)
+      lab_stmt = gsi_stmt (gsi);
+      if (gimple_code (lab_stmt) != GIMPLE_LABEL)
        break;
 
-      lab = LABEL_EXPR_LABEL (lab_stmt);
+      lab = gimple_label_label (lab_stmt);
       if (DECL_NONLOCAL (lab))
        break;
 
@@ -1233,29 +1580,28 @@ label_rtx_for_bb (basic_block bb)
   return (rtx) *elt;
 }
 
-/* A subroutine of expand_gimple_basic_block.  Expand one COND_EXPR.
+
+/* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_COND.
    Returns a new basic block if we've terminated the current basic
    block and created a new one.  */
 
 static basic_block
-expand_gimple_cond_expr (basic_block bb, tree stmt)
+expand_gimple_cond (basic_block bb, gimple stmt)
 {
   basic_block new_bb, dest;
   edge new_edge;
   edge true_edge;
   edge false_edge;
-  tree pred = COND_EXPR_COND (stmt);
+  tree pred = gimple_cond_pred_to_tree (stmt);
   rtx last2, last;
 
-  gcc_assert (COND_EXPR_THEN (stmt) == NULL_TREE);
-  gcc_assert (COND_EXPR_ELSE (stmt) == NULL_TREE);
   last2 = last = get_last_insn ();
 
   extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
-  if (EXPR_LOCUS (stmt))
+  if (gimple_has_location (stmt))
     {
-      set_curr_insn_source_location (*(EXPR_LOCUS (stmt)));
-      set_curr_insn_block (TREE_BLOCK (stmt));
+      set_curr_insn_source_location (gimple_location (stmt));
+      set_curr_insn_block (gimple_block (stmt));
     }
 
   /* These flags have no purpose in RTL land.  */
@@ -1268,20 +1614,22 @@ expand_gimple_cond_expr (basic_block bb, tree stmt)
     {
       jumpif (pred, label_rtx_for_bb (true_edge->dest));
       add_reg_br_prob_note (last, true_edge->probability);
-      maybe_dump_rtl_for_tree_stmt (stmt, last);
+      maybe_dump_rtl_for_gimple_stmt (stmt, last);
       if (true_edge->goto_locus)
        set_curr_insn_source_location (true_edge->goto_locus);
       false_edge->flags |= EDGE_FALLTHRU;
+      ggc_free (pred);
       return NULL;
     }
   if (true_edge->dest == bb->next_bb)
     {
       jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
       add_reg_br_prob_note (last, false_edge->probability);
-      maybe_dump_rtl_for_tree_stmt (stmt, last);
+      maybe_dump_rtl_for_gimple_stmt (stmt, last);
       if (false_edge->goto_locus)
        set_curr_insn_source_location (false_edge->goto_locus);
       true_edge->flags |= EDGE_FALLTHRU;
+      ggc_free (pred);
       return NULL;
     }
 
@@ -1308,15 +1656,16 @@ expand_gimple_cond_expr (basic_block bb, tree stmt)
     BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
   update_bb_for_insn (new_bb);
 
-  maybe_dump_rtl_for_tree_stmt (stmt, last2);
+  maybe_dump_rtl_for_gimple_stmt (stmt, last2);
 
   if (false_edge->goto_locus)
     set_curr_insn_source_location (false_edge->goto_locus);
 
+  ggc_free (pred);
   return new_bb;
 }
 
-/* A subroutine of expand_gimple_basic_block.  Expand one CALL_EXPR
+/* A subroutine of expand_gimple_basic_block.  Expand one GIMPLE_CALL
    that has CALL_EXPR_TAILCALL set.  Returns non-null if we actually
    generated a tail call (something that might be denied by the ABI
    rules governing the call; see calls.c).
@@ -1327,23 +1676,26 @@ expand_gimple_cond_expr (basic_block bb, tree stmt)
    tailcall) and the normal result happens via a sqrt instruction.  */
 
 static basic_block
-expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
+expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
 {
   rtx last2, last;
   edge e;
   edge_iterator ei;
   int probability;
   gcov_type count;
+  tree stmt_tree = gimple_to_tree (stmt);
 
   last2 = last = get_last_insn ();
 
-  expand_expr_stmt (stmt);
+  expand_expr_stmt (stmt_tree);
+
+  release_stmt_tree (stmt, stmt_tree);
 
   for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
     if (CALL_P (last) && SIBLING_CALL_P (last))
       goto found;
 
-  maybe_dump_rtl_for_tree_stmt (stmt, last2);
+  maybe_dump_rtl_for_gimple_stmt (stmt, last2);
 
   *can_fallthru = true;
   return NULL;
@@ -1418,7 +1770,7 @@ expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
        BB_END (bb) = PREV_INSN (last);
     }
 
-  maybe_dump_rtl_for_tree_stmt (stmt, last2);
+  maybe_dump_rtl_for_gimple_stmt (stmt, last2);
 
   return bb;
 }
@@ -1428,50 +1780,52 @@ expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
 static basic_block
 expand_gimple_basic_block (basic_block bb)
 {
-  tree_stmt_iterator tsi;
-  tree stmts = bb_stmt_list (bb);
-  tree stmt = NULL;
+  gimple_stmt_iterator gsi;
+  gimple_seq stmts;
+  gimple stmt = NULL;
   rtx note, last;
   edge e;
   edge_iterator ei;
   void **elt;
 
   if (dump_file)
-    {
-      fprintf (dump_file,
-              "\n;; Generating RTL for tree basic block %d\n",
-              bb->index);
-    }
-
-  bb->il.tree = NULL;
+    fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
+            bb->index);
+
+  /* Note that since we are now transitioning from GIMPLE to RTL, we
+     cannot use the gsi_*_bb() routines because they expect the basic
+     block to be in GIMPLE, instead of RTL.  Therefore, we need to
+     access the BB sequence directly.  */
+  stmts = bb_seq (bb);
+  bb->il.gimple = NULL;
   init_rtl_bb_info (bb);
   bb->flags |= BB_RTL;
 
   /* Remove the RETURN_EXPR if we may fall though to the exit
      instead.  */
-  tsi = tsi_last (stmts);
-  if (!tsi_end_p (tsi)
-      && TREE_CODE (tsi_stmt (tsi)) == RETURN_EXPR)
+  gsi = gsi_last (stmts);
+  if (!gsi_end_p (gsi)
+      && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
     {
-      tree ret_stmt = tsi_stmt (tsi);
+      gimple ret_stmt = gsi_stmt (gsi);
 
       gcc_assert (single_succ_p (bb));
       gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
 
       if (bb->next_bb == EXIT_BLOCK_PTR
-         && !TREE_OPERAND (ret_stmt, 0))
+         && !gimple_return_retval (ret_stmt))
        {
-         tsi_delink (&tsi);
+         gsi_remove (&gsi, false);
          single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
        }
     }
 
-  tsi = tsi_start (stmts);
-  if (!tsi_end_p (tsi))
+  gsi = gsi_start (stmts);
+  if (!gsi_end_p (gsi))
     {
-      stmt = tsi_stmt (tsi);
-      if (TREE_CODE (stmt) != LABEL_EXPR)
-       stmt = NULL_TREE;
+      stmt = gsi_stmt (gsi);
+      if (gimple_code (stmt) != GIMPLE_LABEL)
+       stmt = NULL;
     }
 
   elt = pointer_map_contains (lab_rtx_for_bb, bb);
@@ -1482,8 +1836,10 @@ expand_gimple_basic_block (basic_block bb)
 
       if (stmt)
        {
-         expand_expr_stmt (stmt);
-         tsi_next (&tsi);
+         tree stmt_tree = gimple_to_tree (stmt);
+         expand_expr_stmt (stmt_tree);
+         release_stmt_tree (stmt, stmt_tree);
+         gsi_next (&gsi);
        }
 
       if (elt)
@@ -1496,7 +1852,7 @@ expand_gimple_basic_block (basic_block bb)
        BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
       note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
 
-      maybe_dump_rtl_for_tree_stmt (stmt, last);
+      maybe_dump_rtl_for_gimple_stmt (stmt, last);
     }
   else
     note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
@@ -1517,36 +1873,22 @@ expand_gimple_basic_block (basic_block bb)
        ei_next (&ei);
     }
 
-  for (; !tsi_end_p (tsi); tsi_next (&tsi))
+  for (; !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      tree stmt = tsi_stmt (tsi);
+      gimple stmt = gsi_stmt (gsi);
       basic_block new_bb;
 
-      if (!stmt)
-       continue;
-
       /* Expand this statement, then evaluate the resulting RTL and
         fixup the CFG accordingly.  */
-      if (TREE_CODE (stmt) == COND_EXPR)
+      if (gimple_code (stmt) == GIMPLE_COND)
        {
-         new_bb = expand_gimple_cond_expr (bb, stmt);
+         new_bb = expand_gimple_cond (bb, stmt);
          if (new_bb)
            return new_bb;
        }
       else
        {
-         tree call = get_call_expr_in (stmt);
-         int region;
-         /* For the benefit of calls.c, converting all this to rtl,
-            we need to record the call expression, not just the outer
-            modify statement.  */
-         if (call && call != stmt)
-           {
-             if ((region = lookup_stmt_eh_region (stmt)) > 0)
-               add_stmt_to_eh_region (call, region);
-             gimple_duplicate_stmt_histograms (cfun, call, cfun, stmt);
-           }
-         if (call && CALL_EXPR_TAILCALL (call))
+         if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
            {
              bool can_fallthru;
              new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
@@ -1560,9 +1902,11 @@ expand_gimple_basic_block (basic_block bb)
            }
          else
            {
+             tree stmt_tree = gimple_to_tree (stmt);
              last = get_last_insn ();
-             expand_expr_stmt (stmt);
-             maybe_dump_rtl_for_tree_stmt (stmt, last);
+             expand_expr_stmt (stmt_tree);
+             maybe_dump_rtl_for_gimple_stmt (stmt, last);
+             release_stmt_tree (stmt, stmt_tree);
            }
        }
     }
@@ -1621,7 +1965,7 @@ construct_init_block (void)
      otherwise we have to jump into proper target.  */
   if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
     {
-      tree label = tree_block_label (e->dest);
+      tree label = gimple_block_label (e->dest);
 
       emit_jump (label_rtx (label));
       flags = 0;
@@ -1779,14 +2123,14 @@ static void
 discover_nonconstant_array_refs (void)
 {
   basic_block bb;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
 
   FOR_EACH_BB (bb)
-    {
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-       walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r,
-                  NULL , NULL);
-    }
+    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+      {
+       gimple stmt = gsi_stmt (gsi);
+       walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
+      }
 }
 
 /* Translate the intermediate representation contained in the CFG
@@ -1799,7 +2143,7 @@ discover_nonconstant_array_refs (void)
    the expansion.  */
 
 static unsigned int
-tree_expand_cfg (void)
+gimple_expand_cfg (void)
 {
   basic_block bb, init_block;
   sbitmap blocks;
@@ -1949,7 +2293,7 @@ struct rtl_opt_pass pass_expand =
   RTL_PASS,
   "expand",                            /* name */
   NULL,                                 /* gate */
-  tree_expand_cfg,                     /* execute */
+  gimple_expand_cfg,                   /* execute */
   NULL,                                 /* sub */
   NULL,                                 /* next */
   0,                                    /* static_pass_number */
index 5815a7e..00d7151 100644 (file)
@@ -51,9 +51,9 @@ cfg_layout_rtl_register_cfg_hooks (void)
 /* Initialization of functions specific to the tree IR.  */
 
 void
-tree_register_cfg_hooks (void)
+gimple_register_cfg_hooks (void)
 {
-  cfg_hooks = &tree_cfg_hooks;
+  cfg_hooks = &gimple_cfg_hooks;
 }
 
 /* Returns current ir type.  */
@@ -61,7 +61,7 @@ tree_register_cfg_hooks (void)
 enum ir_type
 current_ir_type (void)
 {
-  if (cfg_hooks == &tree_cfg_hooks)
+  if (cfg_hooks == &gimple_cfg_hooks)
     return IR_GIMPLE;
   else if (cfg_hooks == &rtl_cfg_hooks)
     return IR_RTL_CFGRTL;
@@ -291,7 +291,7 @@ dump_bb (basic_block bb, FILE *outf, int indent)
   putc ('\n', outf);
 
   if (cfg_hooks->dump_bb)
-    cfg_hooks->dump_bb (bb, outf, indent);
+    cfg_hooks->dump_bb (bb, outf, indent, 0);
 }
 
 /* Redirect edge E to the given basic block DEST and update underlying program
index e581d9c..537c05f 100644 (file)
@@ -28,7 +28,7 @@ struct cfg_hooks
 
   /* Debugging.  */
   int (*verify_flow_info) (void);
-  void (*dump_bb) (basic_block, FILE *, int);
+  void (*dump_bb) (basic_block, FILE *, int, int);
 
   /* Basic CFG manipulation.  */
 
@@ -181,7 +181,7 @@ extern void lv_add_condition_to_bb (basic_block, basic_block, basic_block,
                                    void *);
 
 /* Hooks containers.  */
-extern struct cfg_hooks tree_cfg_hooks;
+extern struct cfg_hooks gimple_cfg_hooks;
 extern struct cfg_hooks rtl_cfg_hooks;
 extern struct cfg_hooks cfg_layout_rtl_cfg_hooks;
 
@@ -189,6 +189,6 @@ extern struct cfg_hooks cfg_layout_rtl_cfg_hooks;
 extern enum ir_type current_ir_type (void);
 extern void rtl_register_cfg_hooks (void);
 extern void cfg_layout_rtl_register_cfg_hooks (void);
-extern void tree_register_cfg_hooks (void);
+extern void gimple_register_cfg_hooks (void);
 
 #endif  /* GCC_CFGHOOKS_H */
index f565708..4c9bbf0 100644 (file)
@@ -563,11 +563,13 @@ find_subloop_latch_edge_by_profile (VEC (edge, heap) *latches)
    another edge.  */
 
 static edge
-find_subloop_latch_edge_by_ivs (struct loop *loop, VEC (edge, heap) *latches)
+find_subloop_latch_edge_by_ivs (struct loop *loop ATTRIBUTE_UNUSED, VEC (edge, heap) *latches)
 {
   edge e, latch = VEC_index (edge, latches, 0);
   unsigned i;
-  tree phi, lop;
+  gimple phi;
+  gimple_stmt_iterator psi;
+  tree lop;
   basic_block bb;
 
   /* Find the candidate for the latch edge.  */
@@ -582,15 +584,16 @@ find_subloop_latch_edge_by_ivs (struct loop *loop, VEC (edge, heap) *latches)
 
   /* Check for a phi node that would deny that this is a latch edge of
      a subloop.  */
-  for (phi = phi_nodes (loop->header); phi; phi = PHI_CHAIN (phi))
+  for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
     {
+      phi = gsi_stmt (psi);
       lop = PHI_ARG_DEF_FROM_EDGE (phi, latch);
 
       /* Ignore the values that are not changed inside the subloop.  */
       if (TREE_CODE (lop) != SSA_NAME
          || SSA_NAME_DEF_STMT (lop) == phi)
        continue;
-      bb = bb_for_stmt (SSA_NAME_DEF_STMT (lop));
+      bb = gimple_bb (SSA_NAME_DEF_STMT (lop));
       if (!bb || !flow_bb_inside_loop_p (loop, bb))
        continue;
 
index 056e8f0..d21d50b 100644 (file)
@@ -49,7 +49,7 @@ struct lpt_decision GTY (())
 struct nb_iter_bound GTY ((chain_next ("%h.next")))
 {
   /* The statement STMT is executed at most ...  */
-  tree stmt;
+  gimple stmt;
 
   /* ... BOUND + 1 times (BOUND must be an unsigned constant).
      The + 1 is added for the following reasons:
index 240455b..f9e3e17 100644 (file)
@@ -76,7 +76,7 @@ static void rtl_delete_block (basic_block);
 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
 static edge rtl_redirect_edge_and_branch (edge, basic_block);
 static basic_block rtl_split_block (basic_block, void *);
-static void rtl_dump_bb (basic_block, FILE *, int);
+static void rtl_dump_bb (basic_block, FILE *, int, int);
 static int rtl_verify_flow_info_1 (void);
 static void rtl_make_forwarder_block (edge);
 \f
@@ -1510,7 +1510,7 @@ commit_edge_insertions (void)
    at start and end).  */
 
 static void
-rtl_dump_bb (basic_block bb, FILE *outf, int indent)
+rtl_dump_bb (basic_block bb, FILE *outf, int indent, int flags ATTRIBUTE_UNUSED)
 {
   rtx insn;
   rtx last;
index b88ab68..51181cb 100644 (file)
@@ -81,7 +81,7 @@ The callgraph:
 #include "varray.h"
 #include "output.h"
 #include "intl.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-dump.h"
 #include "tree-flow.h"
 
@@ -503,9 +503,12 @@ edge_eq (const void *x, const void *y)
   return ((const struct cgraph_edge *) x)->call_stmt == y;
 }
 
-/* Return callgraph edge representing CALL_EXPR statement.  */
+
+/* Return the callgraph edge representing the GIMPLE_CALL statement
+   CALL_STMT.  */
+
 struct cgraph_edge *
-cgraph_edge (struct cgraph_node *node, tree call_stmt)
+cgraph_edge (struct cgraph_node *node, gimple call_stmt)
 {
   struct cgraph_edge *e, *e2;
   int n = 0;
@@ -526,6 +529,7 @@ cgraph_edge (struct cgraph_node *node, tree call_stmt)
        break;
       n++;
     }
+
   if (n > 100)
     {
       node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
@@ -540,13 +544,15 @@ cgraph_edge (struct cgraph_node *node, tree call_stmt)
          *slot = e2;
        }
     }
+
   return e;
 }
 
-/* Change call_stmt of edge E to NEW_STMT.  */
+
+/* Change field call_smt of edge E to NEW_STMT.  */
 
 void
-cgraph_set_call_stmt (struct cgraph_edge *e, tree new_stmt)
+cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
 {
   if (e->caller->call_site_hash)
     {
@@ -571,7 +577,7 @@ cgraph_set_call_stmt (struct cgraph_edge *e, tree new_stmt)
 
 struct cgraph_edge *
 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
-                   tree call_stmt, gcov_type count, int freq, int nest)
+                   gimple call_stmt, gcov_type count, int freq, int nest)
 {
   struct cgraph_edge *edge = GGC_NEW (struct cgraph_edge);
 #ifdef ENABLE_CHECKING
@@ -581,9 +587,9 @@ cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
     gcc_assert (e->call_stmt != call_stmt);
 #endif
 
-  gcc_assert (get_call_expr_in (call_stmt));
+  gcc_assert (is_gimple_call (call_stmt));
 
-  if (!DECL_SAVED_TREE (callee->decl))
+  if (!gimple_body (callee->decl))
     edge->inline_failed = N_("function body not available");
   else if (callee->local.redefined_extern_inline)
     edge->inline_failed = N_("redefined extern inline functions are not "
@@ -691,14 +697,15 @@ cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
   e->callee = n;
 }
 
-/* Update or remove corresponding cgraph edge if a call OLD_CALL
-   in OLD_STMT changed into NEW_STMT.  */
+
+/* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
+   OLD_STMT changed into NEW_STMT.  */
 
 void
-cgraph_update_edges_for_call_stmt (tree old_stmt, tree old_call,
-                                  tree new_stmt)
+cgraph_update_edges_for_call_stmt (gimple old_stmt, gimple new_stmt)
 {
-  tree new_call = get_call_expr_in (new_stmt);
+  tree new_call = (is_gimple_call (new_stmt)) ? gimple_call_fn (new_stmt) : 0;
+  tree old_call = (is_gimple_call (old_stmt)) ? gimple_call_fn (old_stmt) : 0;
   struct cgraph_node *node = cgraph_node (cfun->decl);
 
   if (old_call != new_call)
@@ -716,7 +723,7 @@ cgraph_update_edges_for_call_stmt (tree old_stmt, tree old_call,
          cgraph_remove_edge (e);
          if (new_call)
            {
-             new_decl = get_callee_fndecl (new_call);
+             new_decl = gimple_call_fndecl (new_stmt);
              if (new_decl)
                {
                  ne = cgraph_create_edge (node, cgraph_node (new_decl),
@@ -736,6 +743,7 @@ cgraph_update_edges_for_call_stmt (tree old_stmt, tree old_call,
     }
 }
 
+
 /* Remove all callees from the node.  */
 
 void
@@ -791,6 +799,7 @@ cgraph_release_function_body (struct cgraph_node *node)
       delete_tree_ssa ();
       delete_tree_cfg_annotations ();
       cfun->eh = NULL;
+      gimple_set_body (node->decl, NULL);
       current_function_decl = old_decl;
       pop_cfun();
     }
@@ -1006,8 +1015,8 @@ dump_cgraph_node (FILE *f, struct cgraph_node *node)
     fprintf (f, " needed");
   else if (node->reachable)
     fprintf (f, " reachable");
-  if (DECL_SAVED_TREE (node->decl))
-    fprintf (f, " tree");
+  if (gimple_body (node->decl))
+    fprintf (f, " body");
   if (node->output)
     fprintf (f, " output");
   if (node->local.local)
@@ -1147,7 +1156,7 @@ cgraph_function_possibly_inlined_p (tree decl)
 /* Create clone of E in the node N represented by CALL_EXPR the callgraph.  */
 struct cgraph_edge *
 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
-                  tree call_stmt, gcov_type count_scale, int freq_scale,
+                  gimple call_stmt, gcov_type count_scale, int freq_scale,
                   int loop_nest, bool update_original)
 {
   struct cgraph_edge *new;
@@ -1178,8 +1187,8 @@ cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
    function's profile to reflect the fact that part of execution is handled
    by node.  */
 struct cgraph_node *
-cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq, int loop_nest,
-                  bool update_original)
+cgraph_clone_node (struct cgraph_node *n, gcov_type count, int freq,
+                  int loop_nest, bool update_original)
 {
   struct cgraph_node *new = cgraph_create_node ();
   struct cgraph_edge *e;
@@ -1335,8 +1344,8 @@ cgraph_add_new_function (tree fndecl, bool lowered)
          {
            push_cfun (DECL_STRUCT_FUNCTION (fndecl));
            current_function_decl = fndecl;
-           tree_register_cfg_hooks ();
-            tree_lowering_passes (fndecl);
+           gimple_register_cfg_hooks ();
+           tree_lowering_passes (fndecl);
            bitmap_obstack_initialize (NULL);
            if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
              execute_pass_list (pass_early_local_passes.pass.sub);
@@ -1357,7 +1366,7 @@ cgraph_add_new_function (tree fndecl, bool lowered)
           to expansion.  */
        push_cfun (DECL_STRUCT_FUNCTION (fndecl));
        current_function_decl = fndecl;
-       tree_register_cfg_hooks ();
+       gimple_register_cfg_hooks ();
        if (!lowered)
           tree_lowering_passes (fndecl);
        bitmap_obstack_initialize (NULL);
index f36f6f5..7a19dd6 100644 (file)
@@ -91,7 +91,7 @@ struct cgraph_local_info GTY(())
 };
 
 /* Information about the function that needs to be computed globally
-   once compilation is finished.  Available only with -funit-at-time.  */
+   once compilation is finished.  Available only with -funit-at-a-time.  */
 
 struct cgraph_global_info GTY(())
 {
@@ -100,7 +100,8 @@ struct cgraph_global_info GTY(())
   /* Expected offset of the stack frame of inlined function.  */
   HOST_WIDE_INT stack_frame_offset;
 
-  /* For inline clones this points to the function they will be inlined into.  */
+  /* For inline clones this points to the function they will be
+     inlined into.  */
   struct cgraph_node *inlined_to;
 
   /* Estimated size of the function after inlining.  */
@@ -196,7 +197,7 @@ struct cgraph_edge GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_call
   struct cgraph_edge *next_caller;
   struct cgraph_edge *prev_callee;
   struct cgraph_edge *next_callee;
-  tree call_stmt;
+  gimple call_stmt;
   PTR GTY ((skip (""))) aux;
   /* When NULL, inline this call.  When non-NULL, points to the explanation
      why function was not inlined.  */
@@ -306,19 +307,19 @@ void cgraph_release_function_body (struct cgraph_node *);
 void cgraph_node_remove_callees (struct cgraph_node *node);
 struct cgraph_edge *cgraph_create_edge (struct cgraph_node *,
                                        struct cgraph_node *,
-                                       tree, gcov_type, int, int);
+                                       gimple, gcov_type, int, int);
 struct cgraph_node *cgraph_node (tree);
 struct cgraph_node *cgraph_node_for_asm (tree asmname);
-struct cgraph_edge *cgraph_edge (struct cgraph_node *, tree);
-void cgraph_set_call_stmt (struct cgraph_edge *, tree);
-void cgraph_update_edges_for_call_stmt (tree, tree, tree);
+struct cgraph_edge *cgraph_edge (struct cgraph_node *, gimple);
+void cgraph_set_call_stmt (struct cgraph_edge *, gimple);
+void cgraph_update_edges_for_call_stmt (gimple, gimple);
 struct cgraph_local_info *cgraph_local_info (tree);
 struct cgraph_global_info *cgraph_global_info (tree);
 struct cgraph_rtl_info *cgraph_rtl_info (tree);
 const char * cgraph_node_name (struct cgraph_node *);
 struct cgraph_edge * cgraph_clone_edge (struct cgraph_edge *,
                                        struct cgraph_node *,
-                                       tree, gcov_type, int, int, bool);
+                                       gimple, gcov_type, int, int, bool);
 struct cgraph_node * cgraph_clone_node (struct cgraph_node *, gcov_type, int,
                                        int, bool);
 
index 60b6911..958fed7 100644 (file)
@@ -1,5 +1,6 @@
 /* Callgraph construction.
-   Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
+   Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
+   Free Software Foundation, Inc.
    Contributed by Jan Hubicka
 
 This file is part of GCC.
@@ -28,7 +29,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "pointer-set.h"
 #include "cgraph.h"
 #include "intl.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-pass.h"
 
 /* Walk tree and record all calls and references to functions/variables.
@@ -60,18 +61,6 @@ record_reference (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
        cgraph_mark_needed_node (cgraph_node (decl));
       break;
 
-    case OMP_PARALLEL:
-      if (OMP_PARALLEL_FN (*tp))
-       cgraph_mark_needed_node (cgraph_node (OMP_PARALLEL_FN (*tp)));
-      break;
-
-    case OMP_TASK:
-      if (OMP_TASK_FN (*tp))
-       cgraph_mark_needed_node (cgraph_node (OMP_TASK_FN (*tp)));
-      if (OMP_TASK_COPYFN (*tp))
-       cgraph_mark_needed_node (cgraph_node (OMP_TASK_COPYFN (*tp)));
-      break;
-
     default:
       /* Save some cycles by not walking types and declaration as we
         won't find anything useful there anyway.  */
@@ -107,7 +96,7 @@ initialize_inline_failed (struct cgraph_node *node)
                           "considered for inlining");
       else if (!node->local.inlinable)
        e->inline_failed = N_("function not inlinable");
-      else if (CALL_STMT_CANNOT_INLINE_P (e->call_stmt))
+      else if (gimple_call_cannot_inline_p (e->call_stmt))
        e->inline_failed = N_("mismatched arguments");
       else
        e->inline_failed = N_("function not considered for inlining");
@@ -142,34 +131,54 @@ build_cgraph_edges (void)
   basic_block bb;
   struct cgraph_node *node = cgraph_node (current_function_decl);
   struct pointer_set_t *visited_nodes = pointer_set_create ();
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   tree step;
 
   /* Create the callgraph edges and record the nodes referenced by the function.
      body.  */
   FOR_EACH_BB (bb)
-    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
       {
-       tree stmt = bsi_stmt (bsi);
-       tree call = get_call_expr_in (stmt);
+       gimple stmt = gsi_stmt (gsi);
        tree decl;
 
-       if (call && (decl = get_callee_fndecl (call)))
+       if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
          {
-           int i;
-           int n = call_expr_nargs (call);
+           size_t i;
+           size_t n = gimple_call_num_args (stmt);
            cgraph_create_edge (node, cgraph_node (decl), stmt,
                                bb->count, compute_call_stmt_bb_frequency (bb),
                                bb->loop_depth);
            for (i = 0; i < n; i++)
-             walk_tree (&CALL_EXPR_ARG (call, i),
-                        record_reference, node, visited_nodes);
-           if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
-             walk_tree (&GIMPLE_STMT_OPERAND (stmt, 0),
-                        record_reference, node, visited_nodes);
+             walk_tree (gimple_call_arg_ptr (stmt, i), record_reference,
+                        node, visited_nodes);
+           if (gimple_call_lhs (stmt))
+             walk_tree (gimple_call_lhs_ptr (stmt), record_reference, node,
+                        visited_nodes);
          }
        else
-         walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
+         {
+           struct walk_stmt_info wi;
+           memset (&wi, 0, sizeof (wi));
+           wi.info = node;
+           wi.pset = visited_nodes;
+           walk_gimple_op (stmt, record_reference, &wi);
+           if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
+               && gimple_omp_parallel_child_fn (stmt))
+             {
+               tree fn = gimple_omp_parallel_child_fn (stmt);
+               cgraph_mark_needed_node (cgraph_node (fn));
+             }
+           if (gimple_code (stmt) == GIMPLE_OMP_TASK)
+             {
+               tree fn = gimple_omp_task_child_fn (stmt);
+               if (fn)
+                 cgraph_mark_needed_node (cgraph_node (fn));
+               fn = gimple_omp_task_copy_fn (stmt);
+               if (fn)
+                 cgraph_mark_needed_node (cgraph_node (fn));
+             }
+         }
       }
 
   /* Look for initializers of constant variables and private statics.  */
@@ -228,23 +237,23 @@ rebuild_cgraph_edges (void)
 {
   basic_block bb;
   struct cgraph_node *node = cgraph_node (current_function_decl);
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
 
   cgraph_node_remove_callees (node);
 
   node->count = ENTRY_BLOCK_PTR->count;
 
   FOR_EACH_BB (bb)
-    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
       {
-       tree stmt = bsi_stmt (bsi);
-       tree call = get_call_expr_in (stmt);
+       gimple stmt = gsi_stmt (gsi);
        tree decl;
 
-       if (call && (decl = get_callee_fndecl (call)))
+       if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
          cgraph_create_edge (node, cgraph_node (decl), stmt,
                              bb->count, compute_call_stmt_bb_frequency (bb),
                              bb->loop_depth);
+
       }
   initialize_inline_failed (node);
   gcc_assert (!node->global.inlined_to);
index 9f4f87c..48dd70b 100644 (file)
@@ -130,7 +130,8 @@ along with GCC; see the file COPYING3.  If not see
 #include "intl.h"
 #include "function.h"
 #include "ipa-prop.h"
-#include "tree-gimple.h"
+#include "gimple.h"
+#include "tree-iterator.h"
 #include "tree-pass.h"
 #include "output.h"
 
@@ -404,7 +405,7 @@ cgraph_process_new_functions (void)
             transformations that has been already performed on the whole
             cgraph but not on this function.  */
 
-         tree_register_cfg_hooks ();
+         gimple_register_cfg_hooks ();
          if (!node->analyzed)
            cgraph_analyze_function (node);
          push_cfun (DECL_STRUCT_FUNCTION (fndecl));
@@ -555,7 +556,7 @@ verify_cgraph_node (struct cgraph_node *node)
   struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
   struct function *saved_cfun = cfun;
   basic_block this_block;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   bool error_found = false;
 
   if (errorcount || sorrycount)
@@ -637,7 +638,8 @@ verify_cgraph_node (struct cgraph_node *node)
     }
 
   if (node->analyzed
-      && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
+      && gimple_body (node->decl)
+      && !TREE_ASM_WRITTEN (node->decl)
       && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
     {
       if (this_cfun->cfg)
@@ -648,12 +650,13 @@ verify_cgraph_node (struct cgraph_node *node)
          /* Reach the trees by walking over the CFG, and note the
             enclosing basic-blocks in the call edges.  */
          FOR_EACH_BB_FN (this_block, this_cfun)
-           for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
+           for (gsi = gsi_start_bb (this_block);
+                 !gsi_end_p (gsi);
+                 gsi_next (&gsi))
              {
-               tree stmt = bsi_stmt (bsi);
-               tree call = get_call_expr_in (stmt);
+               gimple stmt = gsi_stmt (gsi);
                tree decl;
-               if (call && (decl = get_callee_fndecl (call)))
+               if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
                  {
                    struct cgraph_edge *e = cgraph_edge (node, stmt);
                    if (e)
@@ -661,7 +664,7 @@ verify_cgraph_node (struct cgraph_node *node)
                        if (e->aux)
                          {
                            error ("shared call_stmt:");
-                           debug_generic_stmt (stmt);
+                           debug_gimple_stmt (stmt);
                            error_found = true;
                          }
                        if (e->callee->decl != cgraph_node (decl)->decl
@@ -677,7 +680,7 @@ verify_cgraph_node (struct cgraph_node *node)
                    else
                      {
                        error ("missing callgraph edge for call stmt:");
-                       debug_generic_stmt (stmt);
+                       debug_gimple_stmt (stmt);
                        error_found = true;
                      }
                  }
@@ -695,7 +698,7 @@ verify_cgraph_node (struct cgraph_node *node)
              error ("edge %s->%s has no corresponding call_stmt",
                     cgraph_node_name (e->caller),
                     cgraph_node_name (e->callee));
-             debug_generic_stmt (e->call_stmt);
+             debug_gimple_stmt (e->call_stmt);
              error_found = true;
            }
          e->aux = 0;
@@ -856,7 +859,7 @@ cgraph_analyze_functions (void)
     {
       fprintf (cgraph_dump_file, "Initial entry points:");
       for (node = cgraph_nodes; node != first_analyzed; node = node->next)
-       if (node->needed && DECL_SAVED_TREE (node->decl))
+       if (node->needed && gimple_body (node->decl))
          fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
       fprintf (cgraph_dump_file, "\n");
     }
@@ -878,14 +881,14 @@ cgraph_analyze_functions (void)
       /* ??? It is possible to create extern inline function and later using
         weak alias attribute to kill its body. See
         gcc.c-torture/compile/20011119-1.c  */
-      if (!DECL_SAVED_TREE (decl))
+      if (!DECL_STRUCT_FUNCTION (decl))
        {
          cgraph_reset_node (node);
          continue;
        }
 
       gcc_assert (!node->analyzed && node->reachable);
-      gcc_assert (DECL_SAVED_TREE (decl));
+      gcc_assert (gimple_body (decl));
 
       cgraph_analyze_function (node);
 
@@ -908,7 +911,7 @@ cgraph_analyze_functions (void)
     {
       fprintf (cgraph_dump_file, "Unit entry points:");
       for (node = cgraph_nodes; node != first_analyzed; node = node->next)
-       if (node->needed && DECL_SAVED_TREE (node->decl))
+       if (node->needed && gimple_body (node->decl))
          fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
       fprintf (cgraph_dump_file, "\n\nInitial ");
       dump_cgraph (cgraph_dump_file);
@@ -922,10 +925,10 @@ cgraph_analyze_functions (void)
       tree decl = node->decl;
       next = node->next;
 
-      if (node->local.finalized && !DECL_SAVED_TREE (decl))
+      if (node->local.finalized && !gimple_body (decl))
        cgraph_reset_node (node);
 
-      if (!node->reachable && DECL_SAVED_TREE (decl))
+      if (!node->reachable && gimple_body (decl))
        {
          if (cgraph_dump_file)
            fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
@@ -934,7 +937,7 @@ cgraph_analyze_functions (void)
        }
       else
        node->next_needed = NULL;
-      gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
+      gcc_assert (!node->local.finalized || gimple_body (decl));
       gcc_assert (node->analyzed == node->local.finalized);
     }
   if (cgraph_dump_file)
@@ -987,7 +990,7 @@ cgraph_mark_functions_to_output (void)
       /* We need to output all local functions that are used and not
         always inlined, as well as those that are reachable from
         outside the current compilation unit.  */
-      if (DECL_SAVED_TREE (decl)
+      if (gimple_body (decl)
          && !node->global.inlined_to
          && (node->needed
              || (e && node->reachable))
@@ -998,14 +1001,16 @@ cgraph_mark_functions_to_output (void)
        {
          /* We should've reclaimed all functions that are not needed.  */
 #ifdef ENABLE_CHECKING
-         if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
+         if (!node->global.inlined_to
+             && gimple_body (decl)
              && !DECL_EXTERNAL (decl))
            {
              dump_cgraph_node (stderr, node);
              internal_error ("failed to reclaim unneeded function");
            }
 #endif
-         gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
+         gcc_assert (node->global.inlined_to
+                     || !gimple_body (decl)
                      || DECL_EXTERNAL (decl));
 
        }
@@ -1035,7 +1040,6 @@ cgraph_expand_function (struct cgraph_node *node)
   /* Make sure that BE didn't give up on compiling.  */
   /* ??? Can happen with nested function of extern inline.  */
   gcc_assert (TREE_ASM_WRITTEN (decl));
-
   current_function_decl = NULL;
   if (!cgraph_preserve_function_body_p (decl))
     {
@@ -1224,7 +1228,7 @@ ipa_passes (void)
 {
   set_cfun (NULL);
   current_function_decl = NULL;
-  tree_register_cfg_hooks ();
+  gimple_register_cfg_hooks ();
   bitmap_obstack_initialize (NULL);
   execute_ipa_pass_list (all_ipa_passes);
   bitmap_obstack_release (NULL);
@@ -1324,7 +1328,7 @@ cgraph_optimize (void)
       for (node = cgraph_nodes; node; node = node->next)
        if (node->analyzed
            && (node->global.inlined_to
-               || DECL_SAVED_TREE (node->decl)))
+               || gimple_body (node->decl)))
          {
            error_found = true;
            dump_cgraph_node (stderr, node);
@@ -1413,10 +1417,10 @@ update_call_expr (struct cgraph_node *new_version)
   struct cgraph_edge *e;
 
   gcc_assert (new_version);
+
+  /* Update the call expr on the edges to call the new version.  */
   for (e = new_version->callers; e; e = e->next_caller)
-    /* Update the call expr on the edges
-       to call the new version.  */
-    TREE_OPERAND (CALL_EXPR_FN (get_call_expr_in (e->call_stmt)), 0) = new_version->decl;
+    gimple_call_set_fn (e->call_stmt, new_version->decl);
 }
 
 
index ec20c1c..ca66efe 100644 (file)
 #undef ENABLE_TREE_CHECKING
 #endif
 
+/* Define if you want operations on GIMPLE (the basic data structure of
+   the high-level optimizers) to be checked for dynamic type safety at
+   runtime.  This is moderately expensive.  */
+#ifndef USED_FOR_TARGET
+#undef ENABLE_GIMPLE_CHECKING
+#endif
 
 /* Define if you want all gimple types to be verified after gimplifiation.
    This is cheap. */
index 3509942..e2835ac 100644 (file)
@@ -1,6 +1,6 @@
 /* Subroutines used for code generation on the DEC Alpha.
    Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001,
-   2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
+   2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
    Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
 
 This file is part of GCC.
@@ -51,7 +51,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "langhooks.h"
 #include <splay-tree.h>
 #include "cfglayout.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-flow.h"
 #include "tree-stdarg.h"
 #include "tm-constrs.h"
@@ -5817,11 +5817,11 @@ va_list_skip_additions (tree lhs)
       if (TREE_CODE (stmt) == PHI_NODE)
        return stmt;
 
-      if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
-         || GIMPLE_STMT_OPERAND (stmt, 0) != lhs)
+      if (TREE_CODE (stmt) != MODIFY_EXPR
+         || TREE_OPERAND (stmt, 0) != lhs)
        return lhs;
 
-      rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+      rhs = TREE_OPERAND (stmt, 1);
       if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
        rhs = TREE_OPERAND (rhs, 0);
 
@@ -5856,11 +5856,17 @@ va_list_skip_additions (tree lhs)
    current statement.  */
 
 static bool
-alpha_stdarg_optimize_hook (struct stdarg_info *si, const_tree lhs, const_tree rhs)
+alpha_stdarg_optimize_hook (struct stdarg_info *si, const_gimple stmt)
 {
   tree base, offset, arg1, arg2;
   int offset_arg = 1;
 
+#if 1
+  /* FIXME tuples.  */
+  (void) si;
+  (void) stmt;
+  return false;
+#else
   while (handled_component_p (rhs))
     rhs = TREE_OPERAND (rhs, 0);
   if (TREE_CODE (rhs) != INDIRECT_REF
@@ -5953,6 +5959,7 @@ alpha_stdarg_optimize_hook (struct stdarg_info *si, const_tree lhs, const_tree r
 escapes:
   si->va_list_escapes = true;
   return false;
+#endif
 }
 #endif
 
@@ -6087,7 +6094,7 @@ alpha_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
     {
       nextarg = plus_constant (nextarg, offset);
       nextarg = plus_constant (nextarg, NUM_ARGS * UNITS_PER_WORD);
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (valist), valist,
+      t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
                  make_tree (ptr_type_node, nextarg));
       TREE_SIDE_EFFECTS (t) = 1;
 
@@ -6106,20 +6113,20 @@ alpha_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
       t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
       t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
                  size_int (offset));
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (base_field), base_field, t);
+      t = build2 (MODIFY_EXPR, TREE_TYPE (base_field), base_field, t);
       TREE_SIDE_EFFECTS (t) = 1;
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
       t = build_int_cst (NULL_TREE, NUM_ARGS * UNITS_PER_WORD);
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (offset_field),
-                 offset_field, t);
+      t = build2 (MODIFY_EXPR, TREE_TYPE (offset_field), offset_field, t);
       TREE_SIDE_EFFECTS (t) = 1;
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
     }
 }
 
 static tree
-alpha_gimplify_va_arg_1 (tree type, tree base, tree offset, tree *pre_p)
+alpha_gimplify_va_arg_1 (tree type, tree base, gimple_seq offset,
+                        gimple_seq *pre_p)
 {
   tree type_size, ptr_type, addend, t, addr, internal_post;
 
@@ -6128,9 +6135,9 @@ alpha_gimplify_va_arg_1 (tree type, tree base, tree offset, tree *pre_p)
   if (targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
     {
       t = build_int_cst (TREE_TYPE (offset), 6*8);
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (offset), offset,
-                 build2 (MAX_EXPR, TREE_TYPE (offset), offset, t));
-      gimplify_and_add (t, pre_p);
+      gimplify_assign (offset,
+                      build2 (MAX_EXPR, TREE_TYPE (offset), offset, t),
+                      pre_p);
     }
 
   addend = offset;
@@ -6182,15 +6189,15 @@ alpha_gimplify_va_arg_1 (tree type, tree base, tree offset, tree *pre_p)
       t = size_binop (MULT_EXPR, t, size_int (8));
     }
   t = fold_convert (TREE_TYPE (offset), t);
-  t = build2 (GIMPLE_MODIFY_STMT, void_type_node, offset,
-             build2 (PLUS_EXPR, TREE_TYPE (offset), offset, t));
-  gimplify_and_add (t, pre_p);
+  gimplify_assign (offset, build2 (PLUS_EXPR, TREE_TYPE (offset), offset, t),
+                  pre_p);
 
   return build_va_arg_indirect_ref (addr);
 }
 
 static tree
-alpha_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
+alpha_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
+                      gimple_seq *post_p)
 {
   tree offset_field, base_field, offset, base, t, r;
   bool indirect;
@@ -6222,9 +6229,8 @@ alpha_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
   r = alpha_gimplify_va_arg_1 (type, base, offset, pre_p);
 
   /* Stuff the offset temporary back into its field.  */
-  t = build2 (GIMPLE_MODIFY_STMT, void_type_node, offset_field,
-             fold_convert (TREE_TYPE (offset_field), offset));
-  gimplify_and_add (t, pre_p);
+  gimplify_assign (offset_field,
+                  fold_convert (TREE_TYPE (offset_field), offset), pre_p);
 
   if (indirect)
     r = build_va_arg_indirect_ref (r);
index 6ba924b..07e8eaa 100644 (file)
@@ -2207,7 +2207,7 @@ frv_expand_builtin_va_start (tree valist, rtx nextarg)
       debug_rtx (nextarg);
     }
 
-  t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (valist), valist,
+  t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
              fold_convert (TREE_TYPE (valist),
                            make_tree (sizetype, nextarg)));
   TREE_SIDE_EFFECTS (t) = 1;
index 7b4c243..62d1b8d 100644 (file)
@@ -48,7 +48,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "target-def.h"
 #include "langhooks.h"
 #include "cgraph.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "dwarf2.h"
 #include "df.h"
 #include "tm-constrs.h"
@@ -6327,8 +6327,8 @@ ix86_va_start (tree valist, rtx nextarg)
   if (cfun->va_list_gpr_size)
     {
       type = TREE_TYPE (gpr);
-      t = build2 (GIMPLE_MODIFY_STMT, type, gpr,
-                 build_int_cst (type, n_gpr * 8));
+      t = build2 (MODIFY_EXPR, type,
+                 gpr, build_int_cst (type, n_gpr * 8));
       TREE_SIDE_EFFECTS (t) = 1;
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
     }
@@ -6336,7 +6336,7 @@ ix86_va_start (tree valist, rtx nextarg)
   if (cfun->va_list_fpr_size)
     {
       type = TREE_TYPE (fpr);
-      t = build2 (GIMPLE_MODIFY_STMT, type, fpr,
+      t = build2 (MODIFY_EXPR, type, fpr,
                  build_int_cst (type, n_fpr * 16 + 8*X86_64_REGPARM_MAX));
       TREE_SIDE_EFFECTS (t) = 1;
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
@@ -6348,7 +6348,7 @@ ix86_va_start (tree valist, rtx nextarg)
   if (words != 0)
     t = build2 (POINTER_PLUS_EXPR, type, t,
                size_int (words * UNITS_PER_WORD));
-  t = build2 (GIMPLE_MODIFY_STMT, type, ovf, t);
+  t = build2 (MODIFY_EXPR, type, ovf, t);
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
@@ -6358,7 +6358,7 @@ ix86_va_start (tree valist, rtx nextarg)
         Prologue of the function save it right above stack frame.  */
       type = TREE_TYPE (sav);
       t = make_tree (type, frame_pointer_rtx);
-      t = build2 (GIMPLE_MODIFY_STMT, type, sav, t);
+      t = build2 (MODIFY_EXPR, type, sav, t);
       TREE_SIDE_EFFECTS (t) = 1;
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
     }
@@ -6367,7 +6367,8 @@ ix86_va_start (tree valist, rtx nextarg)
 /* Implement va_arg.  */
 
 static tree
-ix86_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
+ix86_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
+                     gimple_seq *post_p)
 {
   static const int intreg[6] = { 0, 1, 2, 3, 4, 5 };
   tree f_gpr, f_fpr, f_ovf, f_sav;
@@ -6497,16 +6498,14 @@ ix86_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
          /* int_addr = gpr + sav; */
          t = fold_convert (sizetype, gpr);
          t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, t);
-         t = build2 (GIMPLE_MODIFY_STMT, void_type_node, int_addr, t);
-         gimplify_and_add (t, pre_p);
+         gimplify_assign (int_addr, t, pre_p);
        }
       if (needed_sseregs)
        {
          /* sse_addr = fpr + sav; */
          t = fold_convert (sizetype, fpr);
          t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, t);
-         t = build2 (GIMPLE_MODIFY_STMT, void_type_node, sse_addr, t);
-         gimplify_and_add (t, pre_p);
+         gimplify_assign (sse_addr, t, pre_p);
        }
       if (need_temp)
        {
@@ -6515,8 +6514,7 @@ ix86_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
 
          /* addr = &temp; */
          t = build1 (ADDR_EXPR, build_pointer_type (type), temp);
-         t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
-         gimplify_and_add (t, pre_p);
+         gimplify_assign (addr, t, pre_p);
 
          for (i = 0; i < XVECLEN (container, 0); i++)
            {
@@ -6549,8 +6547,7 @@ ix86_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
                                       size_int (INTVAL (XEXP (slot, 1))));
              dest = build_va_arg_indirect_ref (dest_addr);
 
-             t = build2 (GIMPLE_MODIFY_STMT, void_type_node, dest, src);
-             gimplify_and_add (t, pre_p);
+             gimplify_assign (dest, src, pre_p);
            }
        }
 
@@ -6558,22 +6555,19 @@ ix86_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
        {
          t = build2 (PLUS_EXPR, TREE_TYPE (gpr), gpr,
                      build_int_cst (TREE_TYPE (gpr), needed_intregs * 8));
-         t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr, t);
-         gimplify_and_add (t, pre_p);
+         gimplify_assign (gpr, t, pre_p);
        }
+
       if (needed_sseregs)
        {
          t = build2 (PLUS_EXPR, TREE_TYPE (fpr), fpr,
                      build_int_cst (TREE_TYPE (fpr), needed_sseregs * 16));
-         t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr, t);
-         gimplify_and_add (t, pre_p);
+         gimplify_assign (fpr, t, pre_p);
        }
 
-      t = build1 (GOTO_EXPR, void_type_node, lab_over);
-      gimplify_and_add (t, pre_p);
+      gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
 
-      t = build1 (LABEL_EXPR, void_type_node, lab_false);
-      append_to_statement_list (t, pre_p);
+      gimple_seq_add_stmt (pre_p, gimple_build_label (lab_false));
     }
 
   /* ... otherwise out of the overflow area.  */
@@ -6601,20 +6595,14 @@ ix86_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
       t = fold_convert (TREE_TYPE (ovf), t);
     }
   gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
-
-  t2 = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
-  gimplify_and_add (t2, pre_p);
+  gimplify_assign (addr, t, pre_p);
 
   t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
              size_int (rsize * UNITS_PER_WORD));
-  t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
-  gimplify_and_add (t, pre_p);
+  gimplify_assign (unshare_expr (ovf), t, pre_p);
 
   if (container)
-    {
-      t = build1 (LABEL_EXPR, void_type_node, lab_over);
-      append_to_statement_list (t, pre_p);
-    }
+    gimple_seq_add_stmt (pre_p, gimple_build_label (lab_over));
 
   ptrtype = build_pointer_type (type);
   addr = fold_convert (ptrtype, addr);
index 29a9a8d..c07b21d 100644 (file)
@@ -50,7 +50,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "hashtab.h"
 #include "langhooks.h"
 #include "cfglayout.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "intl.h"
 #include "df.h"
 #include "debug.h"
@@ -275,7 +275,7 @@ static tree ia64_handle_model_attribute (tree *, tree, tree, int, bool *);
 static tree ia64_handle_version_id_attribute (tree *, tree, tree, int, bool *);
 static void ia64_encode_section_info (tree, rtx, int);
 static rtx ia64_struct_value_rtx (tree, int);
-static tree ia64_gimplify_va_arg (tree, tree, tree *, tree *);
+static tree ia64_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
 static bool ia64_scalar_mode_supported_p (enum machine_mode mode);
 static bool ia64_vector_mode_supported_p (enum machine_mode mode);
 static bool ia64_cannot_force_const_mem (rtx);
@@ -4342,7 +4342,8 @@ ia64_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
 /* Implement va_arg.  */
 
 static tree
-ia64_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
+ia64_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
+                     gimple_seq *post_p)
 {
   /* Variable sized types are passed by reference.  */
   if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
@@ -4365,8 +4366,7 @@ ia64_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
       t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
                  size_int (-2 * UNITS_PER_WORD));
       t = fold_convert (TREE_TYPE (valist), t);
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (valist), valist, t);
-      gimplify_and_add (t, pre_p);
+      gimplify_assign (unshare_expr (valist), t, pre_p);
     }
 
   return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
index a63191c..ec98d81 100644 (file)
@@ -1,5 +1,5 @@
 /* Target Prototypes for R8C/M16C/M32C
-   Copyright (C) 2005, 2007
+   Copyright (C) 2005, 2007, 2008
    Free Software Foundation, Inc.
    Contributed by Red Hat.
 
@@ -108,7 +108,7 @@ int  m32c_split_psi_p (rtx *);
 #ifdef TREE_CODE
 
 void m32c_function_arg_advance (CUMULATIVE_ARGS *, MM, tree, int);
-tree m32c_gimplify_va_arg_expr (tree, tree, tree *, tree *);
+tree m32c_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
 void m32c_init_cumulative_args (CUMULATIVE_ARGS *, tree, rtx, tree, int);
 bool m32c_promote_function_return (const_tree);
 int  m32c_special_page_vector_p (tree);
index b0733dd..0e03c9e 100644 (file)
@@ -47,7 +47,7 @@
 #include "target-def.h"
 #include "tm_p.h"
 #include "langhooks.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "df.h"
 
 /* Prototypes */
index d1def42..913acc7 100644 (file)
@@ -56,7 +56,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "langhooks.h"
 #include "cfglayout.h"
 #include "sched-int.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "bitmap.h"
 #include "diagnostic.h"
 
@@ -4959,12 +4959,12 @@ mips_va_start (tree valist, rtx nextarg)
       if (cum->stack_words > 0)
        t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovfl), t,
                    size_int (cum->stack_words * UNITS_PER_WORD));
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovfl), ovfl, t);
+      t = build2 (MODIFY_EXPR, TREE_TYPE (ovfl), ovfl, t);
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
       /* Emit code to initialize GTOP, the top of the GPR save area.  */
       t = make_tree (TREE_TYPE (gtop), virtual_incoming_args_rtx);
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gtop), gtop, t);
+      t = build2 (MODIFY_EXPR, TREE_TYPE (gtop), gtop, t);
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
       /* Emit code to initialize FTOP, the top of the FPR save area.
@@ -4976,18 +4976,18 @@ mips_va_start (tree valist, rtx nextarg)
       if (fpr_offset)
        t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ftop), t,
                    size_int (-fpr_offset));
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ftop), ftop, t);
+      t = build2 (MODIFY_EXPR, TREE_TYPE (ftop), ftop, t);
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
       /* Emit code to initialize GOFF, the offset from GTOP of the
         next GPR argument.  */
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (goff), goff,
+      t = build2 (MODIFY_EXPR, TREE_TYPE (goff), goff,
                  build_int_cst (TREE_TYPE (goff), gpr_save_area_size));
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
       /* Likewise emit code to initialize FOFF, the offset from FTOP
         of the next FPR argument.  */
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (foff), foff,
+      t = build2 (MODIFY_EXPR, TREE_TYPE (foff), foff,
                  build_int_cst (TREE_TYPE (foff), fpr_save_area_size));
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
     }
@@ -5001,7 +5001,8 @@ mips_va_start (tree valist, rtx nextarg)
 /* Implement TARGET_GIMPLIFY_VA_ARG_EXPR.  */
 
 static tree
-mips_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
+mips_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
+                          gimple_seq *post_p)
 {
   tree addr;
   bool indirect_p;
@@ -5100,8 +5101,7 @@ mips_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
              /* [1] Emit code for: off &= -rsize.      */
              t = build2 (BIT_AND_EXPR, TREE_TYPE (off), off,
                          build_int_cst (NULL_TREE, -rsize));
-             t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (off), off, t);
-             gimplify_and_add (t, pre_p);
+             gimplify_assign (off, t, pre_p);
            }
          osize = rsize;
        }
@@ -5137,7 +5137,7 @@ mips_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
          u = size_int (-osize);
          t = build2 (BIT_AND_EXPR, sizetype, t, u);
          t = fold_convert (TREE_TYPE (ovfl), t);
-         align = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovfl), ovfl, t);
+         align = build2 (MODIFY_EXPR, TREE_TYPE (ovfl), ovfl, t);
        }
       else
        align = NULL;
index 238d352..76d84ba 100644 (file)
@@ -1,6 +1,6 @@
 /* Subroutines for insn-output.c for HPPA.
    Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001,
-   2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
+   2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
    Contributed by Tim Moore (moore@cs.utah.edu), based on sparc.c
 
 This file is part of GCC.
@@ -125,7 +125,7 @@ static void pa_asm_out_destructor (rtx, int);
 static void pa_init_builtins (void);
 static rtx hppa_builtin_saveregs (void);
 static void hppa_va_start (tree, rtx);
-static tree hppa_gimplify_va_arg_expr (tree, tree, tree *, tree *);
+static tree hppa_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
 static bool pa_scalar_mode_supported_p (enum machine_mode);
 static bool pa_commutative_p (const_rtx x, int outer_code);
 static void copy_fp_args (rtx) ATTRIBUTE_UNUSED;
@@ -5998,7 +5998,8 @@ hppa_va_start (tree valist, rtx nextarg)
 }
 
 static tree
-hppa_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
+hppa_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
+                          gimple_seq *post_p)
 {
   if (TARGET_64BIT)
     {
index 5e2f7ac..0e03be0 100644 (file)
@@ -52,7 +52,7 @@
 #include "reload.h"
 #include "cfglayout.h"
 #include "sched-int.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-flow.h"
 #include "intl.h"
 #include "params.h"
@@ -958,7 +958,7 @@ static void rs6000_darwin_file_start (void);
 
 static tree rs6000_build_builtin_va_list (void);
 static void rs6000_va_start (tree, rtx);
-static tree rs6000_gimplify_va_arg (tree, tree, tree *, tree *);
+static tree rs6000_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
 static bool rs6000_must_pass_in_stack (enum machine_mode, const_tree);
 static bool rs6000_scalar_mode_supported_p (enum machine_mode);
 static bool rs6000_vector_mode_supported_p (enum machine_mode);
@@ -6713,9 +6713,12 @@ rs6000_va_start (tree valist, rtx nextarg)
 
   valist = build_va_arg_indirect_ref (valist);
   gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
-  fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
-  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
-  sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
+  fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
+               f_fpr, NULL_TREE);
+  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
+               f_ovf, NULL_TREE);
+  sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
+               f_sav, NULL_TREE);
 
   /* Count number of gp and fp argument registers used.  */
   words = crtl->args.info.words;
@@ -6731,7 +6734,7 @@ rs6000_va_start (tree valist, rtx nextarg)
 
   if (cfun->va_list_gpr_size)
     {
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
+      t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
                  build_int_cst (NULL_TREE, n_gpr));
       TREE_SIDE_EFFECTS (t) = 1;
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
@@ -6739,7 +6742,7 @@ rs6000_va_start (tree valist, rtx nextarg)
 
   if (cfun->va_list_fpr_size)
     {
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
+      t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
                  build_int_cst (NULL_TREE, n_fpr));
       TREE_SIDE_EFFECTS (t) = 1;
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
@@ -6750,7 +6753,7 @@ rs6000_va_start (tree valist, rtx nextarg)
   if (words != 0)
     t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
                size_int (words * UNITS_PER_WORD));
-  t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
+  t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
@@ -6767,7 +6770,7 @@ rs6000_va_start (tree valist, rtx nextarg)
   if (cfun->machine->varargs_save_offset)
     t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
                size_int (cfun->machine->varargs_save_offset));
-  t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
+  t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 }
@@ -6775,7 +6778,8 @@ rs6000_va_start (tree valist, rtx nextarg)
 /* Implement va_arg.  */
 
 tree
-rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
+rs6000_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
+                       gimple_seq *post_p)
 {
   tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
   tree gpr, fpr, ovf, sav, reg, t, u;
@@ -6784,6 +6788,7 @@ rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
   int align;
   tree ptrtype = build_pointer_type (type);
   int regalign = 0;
+  gimple stmt;
 
   if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
     {
@@ -6802,14 +6807,14 @@ rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
          if (elem_size < UNITS_PER_WORD)
            {
              tree real_part, imag_part;
-             tree post = NULL_TREE;
+             gimple_seq post = NULL;
 
              real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
                                                  &post);
              /* Copy the value into a temporary, lest the formal temporary
                 be reused out from under us.  */
              real_part = get_initialized_tmp_var (real_part, pre_p, &post);
-             append_to_statement_list (post, pre_p);
+             gimple_seq_add_seq (pre_p, post);
 
              imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
                                                  post_p);
@@ -6829,9 +6834,12 @@ rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
 
   valist = build_va_arg_indirect_ref (valist);
   gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
-  fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
-  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
-  sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
+  fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
+               f_fpr, NULL_TREE);
+  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
+               f_ovf, NULL_TREE);
+  sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
+               f_sav, NULL_TREE);
 
   size = int_size_in_bytes (type);
   rsize = (size + 3) / 4;
@@ -6885,18 +6893,19 @@ rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
       if (n_reg == 2 && reg == gpr)
        {
          regalign = 1;
-         u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), reg,
+         u = build2 (BIT_AND_EXPR, TREE_TYPE (reg), unshare_expr (reg),
                     build_int_cst (TREE_TYPE (reg), n_reg - 1));
-         u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, u);
+         u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg),
+                     unshare_expr (reg), u);
        }
       /* _Decimal128 is passed in even/odd fpr pairs; the stored
         reg number is 0 for f1, so we want to make it odd.  */
       else if (reg == fpr && TYPE_MODE (type) == TDmode)
        {
          regalign = 1;
-         t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), reg,
+         t = build2 (BIT_IOR_EXPR, TREE_TYPE (reg), unshare_expr (reg),
                      build_int_cst (TREE_TYPE (reg), 1));
-         u = build2 (MODIFY_EXPR, void_type_node, reg, t);
+         u = build2 (MODIFY_EXPR, void_type_node, unshare_expr (reg), t);
        }
 
       t = fold_convert (TREE_TYPE (reg), size_int (8 - n_reg + 1));
@@ -6909,7 +6918,7 @@ rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
       if (sav_ofs)
        t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
 
-      u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
+      u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), unshare_expr (reg),
                  build_int_cst (TREE_TYPE (reg), n_reg));
       u = fold_convert (sizetype, u);
       u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
@@ -6922,22 +6931,18 @@ rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
          && TYPE_MODE (type) == SDmode)
        t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
 
-      t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
-      gimplify_and_add (t, pre_p);
+      gimplify_assign (addr, t, pre_p);
 
-      t = build1 (GOTO_EXPR, void_type_node, lab_over);
-      gimplify_and_add (t, pre_p);
+      gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
 
-      t = build1 (LABEL_EXPR, void_type_node, lab_false);
-      append_to_statement_list (t, pre_p);
+      stmt = gimple_build_label (lab_false);
+      gimple_seq_add_stmt (pre_p, stmt);
 
       if ((n_reg == 2 && !regalign) || n_reg > 2)
        {
          /* Ensure that we don't find any more args in regs.
             Alignment has taken care of for special cases.  */
-         t = build_gimple_modify_stmt (reg,
-                                       build_int_cst (TREE_TYPE (reg), 8));
-         gimplify_and_add (t, pre_p);
+         gimplify_assign (reg, build_int_cst (TREE_TYPE (reg), 8), pre_p);
        }
     }
 
@@ -6955,17 +6960,15 @@ rs6000_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
     }
   gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
 
-  u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
-  gimplify_and_add (u, pre_p);
+  gimplify_assign (unshare_expr (addr), t, pre_p);
 
   t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
-  t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
-  gimplify_and_add (t, pre_p);
+  gimplify_assign (unshare_expr (ovf), t, pre_p);
 
   if (lab_over)
     {
-      t = build1 (LABEL_EXPR, void_type_node, lab_over);
-      append_to_statement_list (t, pre_p);
+      stmt = gimple_build_label (lab_over);
+      gimple_seq_add_stmt (pre_p, stmt);
     }
 
   if (STRICT_ALIGNMENT
@@ -11321,15 +11324,14 @@ rs6000_alloc_sdmode_stack_slot (void)
 {
   tree t;
   basic_block bb;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
 
   gcc_assert (cfun->machine->sdmode_stack_slot == NULL_RTX);
 
   FOR_EACH_BB (bb)
-    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
       {
-       tree ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
-                                                rs6000_check_sdmode, NULL);
+       tree ret = walk_gimple_op (gsi_stmt (gsi), rs6000_check_sdmode, NULL);
        if (ret)
          {
            rtx stack = assign_stack_local (DDmode, GET_MODE_SIZE (DDmode), 0);
index 936e0a0..3c7d92b 100644 (file)
@@ -50,7 +50,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "debug.h"
 #include "langhooks.h"
 #include "optabs.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "df.h"
 
 
@@ -8424,15 +8424,15 @@ s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
 
   if (cfun->va_list_gpr_size)
     {
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (gpr), gpr,
-                 build_int_cst (NULL_TREE, n_gpr));
+      t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
+                 build_int_cst (NULL_TREE, n_gpr));
       TREE_SIDE_EFFECTS (t) = 1;
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
     }
 
   if (cfun->va_list_fpr_size)
     {
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (fpr), fpr,
+      t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
                  build_int_cst (NULL_TREE, n_fpr));
       TREE_SIDE_EFFECTS (t) = 1;
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
@@ -8452,7 +8452,7 @@ s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
 
       t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t, size_int (off));
 
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
+      t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
       TREE_SIDE_EFFECTS (t) = 1;
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
     }
@@ -8465,7 +8465,7 @@ s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
       t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
                  size_int (-RETURN_REGNUM * UNITS_PER_WORD));
   
-      t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
+      t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
       TREE_SIDE_EFFECTS (t) = 1;
       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
     }
@@ -8496,8 +8496,8 @@ s390_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
    } */
 
 static tree
-s390_gimplify_va_arg (tree valist, tree type, tree *pre_p, 
-                     tree *post_p ATTRIBUTE_UNUSED)
+s390_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p, 
+                     gimple_seq *post_p ATTRIBUTE_UNUSED)
 {
   tree f_gpr, f_fpr, f_ovf, f_sav;
   tree gpr, fpr, ovf, sav, reg, t, u;
@@ -8512,9 +8512,13 @@ s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
   valist = build_va_arg_indirect_ref (valist);
   gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
   fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
-  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
   sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
 
+  /* The tree for args* cannot be shared between gpr/fpr and ovf since
+     both appear on a lhs.  */
+  valist = unshare_expr (valist);
+  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
+
   size = int_size_in_bytes (type);
 
   if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
@@ -8598,14 +8602,11 @@ s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
              fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
   t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, fold_convert (sizetype, u));
 
-  t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
-  gimplify_and_add (t, pre_p);
+  gimplify_assign (addr, t, pre_p);
 
-  t = build1 (GOTO_EXPR, void_type_node, lab_over);
-  gimplify_and_add (t, pre_p);
+  gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
 
-  t = build1 (LABEL_EXPR, void_type_node, lab_false);
-  append_to_statement_list (t, pre_p);
+  gimple_seq_add_stmt (pre_p, gimple_build_label (lab_false));
 
 
   /* ... Otherwise out of the overflow area.  */
@@ -8617,16 +8618,13 @@ s390_gimplify_va_arg (tree valist, tree type, tree *pre_p,
 
   gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
 
-  u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
-  gimplify_and_add (u, pre_p);
+  gimplify_assign (addr, t, pre_p);
 
   t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, 
              size_int (size));
-  t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, ovf, t);
-  gimplify_and_add (t, pre_p);
+  gimplify_assign (ovf, t, pre_p);
 
-  t = build1 (LABEL_EXPR, void_type_node, lab_over);
-  append_to_statement_list (t, pre_p);
+  gimple_seq_add_stmt (pre_p, gimple_build_label (lab_over));
 
 
   /* Increment register save count.  */
index e311362..c7b8f58 100644 (file)
@@ -51,7 +51,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "intl.h"
 #include "sched-int.h"
 #include "ggc.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "cfgloop.h"
 #include "alloc-pool.h"
 #include "tm-constrs.h"
@@ -262,7 +262,7 @@ static bool sh_pretend_outgoing_varargs_named (CUMULATIVE_ARGS *);
 static tree sh_build_builtin_va_list (void);
 static tree sh_canonical_va_list_type (tree);
 static void sh_va_start (tree, rtx);
-static tree sh_gimplify_va_arg_expr (tree, tree, tree *, tree *);
+static tree sh_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
 static bool sh_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
                                  const_tree, bool);
 static bool sh_callee_copies (CUMULATIVE_ARGS *, enum machine_mode,
@@ -7200,7 +7200,7 @@ sh_va_start (tree valist, rtx nextarg)
   /* Call __builtin_saveregs.  */
   u = make_tree (sizetype, expand_builtin_saveregs ());
   u = fold_convert (ptr_type_node, u);
-  t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, next_fp, u);
+  t = build2 (MODIFY_EXPR, ptr_type_node, next_fp, u);
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
@@ -7211,11 +7211,11 @@ sh_va_start (tree valist, rtx nextarg)
     nfp = 0;
   u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
                   size_int (UNITS_PER_WORD * nfp));
-  t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, next_fp_limit, u);
+  t = build2 (MODIFY_EXPR, ptr_type_node, next_fp_limit, u);
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
-  t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, next_o, u);
+  t = build2 (MODIFY_EXPR, ptr_type_node, next_o, u);
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
@@ -7226,12 +7226,12 @@ sh_va_start (tree valist, rtx nextarg)
     nint = 0;
   u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
                   size_int (UNITS_PER_WORD * nint));
-  t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, next_o_limit, u);
+  t = build2 (MODIFY_EXPR, ptr_type_node, next_o_limit, u);
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
   u = make_tree (ptr_type_node, nextarg);
-  t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, next_stack, u);
+  t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 }
@@ -7260,8 +7260,8 @@ find_sole_member (tree type)
 /* Implement `va_arg'.  */
 
 static tree
-sh_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
-                        tree *post_p ATTRIBUTE_UNUSED)
+sh_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
+                        gimple_seq *post_p ATTRIBUTE_UNUSED)
 {
   HOST_WIDE_INT size, rsize;
   tree tmp, pptr_type_node;
@@ -7351,11 +7351,9 @@ sh_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
          bool is_double = size == 8 && TREE_CODE (eff_type) == REAL_TYPE;
 
          tmp = build1 (ADDR_EXPR, pptr_type_node, next_fp);
-         tmp = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, tmp);
-         gimplify_and_add (tmp, pre_p);
+         gimplify_assign (addr, tmp, pre_p);
 
-         tmp = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, next_fp_tmp, valist);
-         gimplify_and_add (tmp, pre_p);
+         gimplify_assign (next_fp_tmp, valist, pre_p);
          tmp = next_fp_limit;
          if (size > 4 && !is_double)
            tmp = build2 (POINTER_PLUS_EXPR, TREE_TYPE (tmp), tmp,
@@ -7375,9 +7373,7 @@ sh_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
                            size_int (UNITS_PER_WORD));
              tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
                            next_fp_tmp, tmp);
-             tmp = build2 (GIMPLE_MODIFY_STMT, ptr_type_node,
-                           next_fp_tmp, tmp);
-             gimplify_and_add (tmp, pre_p);
+             gimplify_assign (next_fp_tmp, tmp, pre_p);
            }
          if (is_double)
            gimplify_and_add (cmp, pre_p);
@@ -7409,13 +7405,10 @@ sh_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
          gimplify_and_add (tmp, pre_p);
 
          tmp = build1 (ADDR_EXPR, pptr_type_node, next_stack);
-         tmp = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, tmp);
-         gimplify_and_add (tmp, pre_p);
-         tmp = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, next_fp_tmp, valist);
-         gimplify_and_add (tmp, pre_p);
+         gimplify_assign (addr, tmp, pre_p);
+         gimplify_assign (next_fp_tmp, valist, pre_p);
 
-         tmp = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, valist, next_fp_tmp);
-         gimplify_and_add (tmp, post_p);
+         gimplify_assign (valist, next_fp_tmp, post_p);
          valist = next_fp_tmp;
        }
       else
@@ -7429,8 +7422,7 @@ sh_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
          gimplify_and_add (tmp, pre_p);
 
          tmp = build1 (ADDR_EXPR, pptr_type_node, next_o);
-         tmp = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, tmp);
-         gimplify_and_add (tmp, pre_p);
+         gimplify_assign (addr, tmp, pre_p);
 
          tmp = build1 (GOTO_EXPR, void_type_node, lab_over);
          gimplify_and_add (tmp, pre_p);
@@ -7439,15 +7431,10 @@ sh_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
          gimplify_and_add (tmp, pre_p);
 
          if (size > 4 && ! (TARGET_SH4 || TARGET_SH2A))
-           {
-             tmp = build2 (GIMPLE_MODIFY_STMT, ptr_type_node,
-                           next_o, next_o_limit);
-             gimplify_and_add (tmp, pre_p);
-           }
+           gimplify_assign (next_o, next_o_limit, pre_p);
 
          tmp = build1 (ADDR_EXPR, pptr_type_node, next_stack);
-         tmp = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, tmp);
-         gimplify_and_add (tmp, pre_p);
+         gimplify_assign (addr, tmp, pre_p);
        }
 
       if (!result)
@@ -7463,8 +7450,7 @@ sh_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
   tmp = std_gimplify_va_arg_expr (valist, type, pre_p, NULL);
   if (result)
     {
-      tmp = build2 (GIMPLE_MODIFY_STMT, void_type_node, result, tmp);
-      gimplify_and_add (tmp, pre_p);
+      gimplify_assign (result, tmp, pre_p);
 
       tmp = build1 (LABEL_EXPR, void_type_node, lab_over);
       gimplify_and_add (tmp, pre_p);
index 5e6f574..adf28c0 100644 (file)
@@ -48,7 +48,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "target.h"
 #include "target-def.h"
 #include "cfglayout.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "langhooks.h"
 #include "params.h"
 #include "df.h"
@@ -410,7 +410,7 @@ static rtx sparc_struct_value_rtx (tree, int);
 static bool sparc_return_in_memory (const_tree, const_tree);
 static bool sparc_strict_argument_naming (CUMULATIVE_ARGS *);
 static void sparc_va_start (tree, rtx);
-static tree sparc_gimplify_va_arg (tree, tree, tree *, tree *);
+static tree sparc_gimplify_va_arg (tree, tree, gimple_seq *, gimple_seq *);
 static bool sparc_vector_mode_supported_p (enum machine_mode);
 static bool sparc_pass_by_reference (CUMULATIVE_ARGS *,
                                     enum machine_mode, const_tree, bool);
@@ -5709,7 +5709,8 @@ sparc_va_start (tree valist, rtx nextarg)
 /* Implement `va_arg' for stdarg.  */
 
 static tree
-sparc_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
+sparc_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
+                      gimple_seq *post_p)
 {
   HOST_WIDE_INT size, rsize, align;
   tree addr, incr;
@@ -5792,8 +5793,7 @@ sparc_gimplify_va_arg (tree valist, tree type, tree *pre_p, tree *post_p)
     addr = fold_convert (ptrtype, addr);
 
   incr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, incr, size_int (rsize));
-  incr = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, valist, incr);
-  gimplify_and_add (incr, post_p);
+  gimplify_assign (valist, incr, post_p);
 
   return build_va_arg_indirect_ref (addr);
 }
index e645adb..83bd9f5 100644 (file)
@@ -1,4 +1,4 @@
-/* Copyright (C) 2006, 2007 Free Software Foundation, Inc.
+/* Copyright (C) 2006, 2007, 2008 Free Software Foundation, Inc.
 
    This file is free software; you can redistribute it and/or modify it under
    the terms of the GNU General Public License as published by the Free
@@ -50,7 +50,7 @@
 #include "assert.h"
 #include "c-common.h"
 #include "machmode.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tm-constrs.h"
 #include "spu-builtins.h"
 #include "ddg.h"
@@ -118,8 +118,8 @@ static unsigned char spu_pass_by_reference (CUMULATIVE_ARGS *cum, enum machine_m
                                            const_tree type, unsigned char named);
 static tree spu_build_builtin_va_list (void);
 static void spu_va_start (tree, rtx);
-static tree spu_gimplify_va_arg_expr (tree valist, tree type, tree * pre_p,
-                                     tree * post_p);
+static tree spu_gimplify_va_arg_expr (tree valist, tree type,
+                                     gimple_seq * pre_p, gimple_seq * post_p);
 static int regno_aligned_for_load (int regno);
 static int store_with_one_insn_p (rtx mem);
 static int mem_is_padded_component_ref (rtx x);
@@ -3238,7 +3238,7 @@ spu_va_start (tree valist, rtx nextarg)
   if (crtl->args.pretend_args_size > 0)
     t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (args), t,
                size_int (-STACK_POINTER_OFFSET));
-  t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (args), args, t);
+  t = build2 (MODIFY_EXPR, TREE_TYPE (args), args, t);
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
@@ -3247,7 +3247,7 @@ spu_va_start (tree valist, rtx nextarg)
   t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (skip), t,
              size_int (crtl->args.pretend_args_size
                         - STACK_POINTER_OFFSET));
-  t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (skip), skip, t);
+  t = build2 (MODIFY_EXPR, TREE_TYPE (skip), skip, t);
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 }
@@ -3270,8 +3270,8 @@ spu_va_start (tree valist, rtx nextarg)
     ret = *(TYPE *)addr;
  */
 static tree
-spu_gimplify_va_arg_expr (tree valist, tree type, tree * pre_p,
-                         tree * post_p ATTRIBUTE_UNUSED)
+spu_gimplify_va_arg_expr (tree valist, tree type, gimple_seq * pre_p,
+                         gimple_seq * post_p ATTRIBUTE_UNUSED)
 {
   tree f_args, f_skip;
   tree args, skip;
@@ -3303,22 +3303,21 @@ spu_gimplify_va_arg_expr (tree valist, tree type, tree * pre_p,
   /* build conditional expression to calculate addr. The expression
      will be gimplified later. */
   paddedsize = size_int (rsize);
-  tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node, args, paddedsize);
+  tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (args), paddedsize);
   tmp = build2 (TRUTH_AND_EXPR, boolean_type_node,
-               build2 (GT_EXPR, boolean_type_node, tmp, skip),
-               build2 (LE_EXPR, boolean_type_node, args, skip));
+               build2 (GT_EXPR, boolean_type_node, tmp, unshare_expr (skip)),
+               build2 (LE_EXPR, boolean_type_node, unshare_expr (args),
+               unshare_expr (skip)));
 
   tmp = build3 (COND_EXPR, ptr_type_node, tmp,
-               build2 (POINTER_PLUS_EXPR, ptr_type_node, skip,
-                       size_int (32)), args);
+               build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (skip),
+                       size_int (32)), unshare_expr (args));
 
-  tmp = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, addr, tmp);
-  gimplify_and_add (tmp, pre_p);
+  gimplify_assign (addr, tmp, pre_p);
 
   /* update VALIST.__args */
   tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node, addr, paddedsize);
-  tmp = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (args), args, tmp);
-  gimplify_and_add (tmp, pre_p);
+  gimplify_assign (unshare_expr (args), tmp, pre_p);
 
   addr = fold_convert (build_pointer_type (type), addr);
 
index 6cbe52d..64ecec8 100644 (file)
@@ -45,7 +45,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "target-def.h"
 #include "tm_p.h"
 #include "langhooks.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "df.h"
 #include "ggc.h"
 
@@ -1350,11 +1350,11 @@ xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
   u = build_int_cst (NULL_TREE, INCOMING_FRAME_SP_OFFSET);
   u = fold_convert (TREE_TYPE (count), u);
   t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), t, u);
-  t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (base), base, t);
+  t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
-  t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (count), count, 
+  t = build2 (MODIFY_EXPR, TREE_TYPE (count), count, 
              build_int_cst (NULL_TREE,
                             crtl->args.info * UNITS_PER_WORD));
   TREE_SIDE_EFFECTS (t) = 1;
@@ -1366,8 +1366,8 @@ xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
    Note:  This algorithm is documented in stormy-abi.  */
    
 static tree
-xstormy16_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
-                               tree *post_p ATTRIBUTE_UNUSED)
+xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
+                               gimple_seq *post_p ATTRIBUTE_UNUSED)
 {
   tree f_base, f_count;
   tree base, count;
@@ -1408,8 +1408,7 @@ xstormy16_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
       gimplify_and_add (t, pre_p);
 
       t = build2 (POINTER_PLUS_EXPR, ptr_type_node, base, count_tmp);
-      t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
-      gimplify_and_add (t, pre_p);
+      gimplify_assign (addr, t, pre_p);
 
       t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
       gimplify_and_add (t, pre_p);
@@ -1427,7 +1426,7 @@ xstormy16_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
       tree r, u;
 
       r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
-      u = build2 (GIMPLE_MODIFY_STMT, void_type_node, count_tmp, r);
+      u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
 
       t = fold_convert (TREE_TYPE (count), r);
       t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
@@ -1444,16 +1443,14 @@ xstormy16_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
   t = fold_convert (TREE_TYPE (t), fold (t));
   t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
   t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t);
-  t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
-  gimplify_and_add (t, pre_p);
+  gimplify_assign (addr, t, pre_p);
 
   t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
   gimplify_and_add (t, pre_p);
 
   t = fold_convert (TREE_TYPE (count), size_tree);
   t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
-  t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (count), count, t);
-  gimplify_and_add (t, pre_p);
+  gimplify_assign (count, t, pre_p);
   
   addr = fold_convert (build_pointer_type (type), addr);
   return build_va_arg_indirect_ref (addr);
index 38a621d..000df14 100644 (file)
@@ -48,7 +48,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "target.h"
 #include "target-def.h"
 #include "langhooks.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "df.h"
 
 
@@ -142,8 +142,9 @@ static section *xtensa_select_rtx_section (enum machine_mode, rtx,
 static bool xtensa_rtx_costs (rtx, int, int, int *);
 static tree xtensa_build_builtin_va_list (void);
 static bool xtensa_return_in_memory (const_tree, const_tree);
+static tree xtensa_gimplify_va_arg_expr (tree, tree, gimple_seq *,
+                                        gimple_seq *);
 static rtx xtensa_function_value (const_tree, const_tree, bool);
-static tree xtensa_gimplify_va_arg_expr (tree, tree, tree *, tree *);
 static void xtensa_init_builtins (void);
 static tree xtensa_fold_builtin (tree, tree, bool);
 static rtx xtensa_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
@@ -2538,14 +2539,14 @@ xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
   /* Call __builtin_saveregs; save the result in __va_reg */
   u = make_tree (sizetype, expand_builtin_saveregs ());
   u = fold_convert (ptr_type_node, u);
-  t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, reg, u);
+  t = build2 (MODIFY_EXPR, ptr_type_node, reg, u);
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
   /* Set the __va_stk member to ($arg_ptr - 32).  */
   u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
   u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u, size_int (-32));
-  t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, stk, u);
+  t = build2 (MODIFY_EXPR, ptr_type_node, stk, u);
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
 
@@ -2554,7 +2555,7 @@ xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
      alignment offset for __va_stk.  */
   if (arg_words >= MAX_ARGS_IN_REGISTERS)
     arg_words += 2;
-  t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx,
+  t = build2 (MODIFY_EXPR, integer_type_node, ndx,
              build_int_cst (integer_type_node, arg_words * UNITS_PER_WORD));
   TREE_SIDE_EFFECTS (t) = 1;
   expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
@@ -2564,8 +2565,8 @@ xtensa_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
 /* Implement `va_arg'.  */
 
 static tree
-xtensa_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
-                            tree *post_p ATTRIBUTE_UNUSED)
+xtensa_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
+                            gimple_seq *post_p ATTRIBUTE_UNUSED)
 {
   tree f_stk, stk;
   tree f_reg, reg;
@@ -2624,8 +2625,7 @@ xtensa_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
                  build_int_cst (integer_type_node, align - 1));
       t = build2 (BIT_AND_EXPR, integer_type_node, t,
                  build_int_cst (integer_type_node, -align));
-      t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, orig_ndx, t);
-      gimplify_and_add (t, pre_p);
+      gimplify_assign (orig_ndx, t, pre_p);
     }
 
 
@@ -2635,8 +2635,7 @@ xtensa_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
 
   t = fold_convert (integer_type_node, va_size);
   t = build2 (PLUS_EXPR, integer_type_node, orig_ndx, t);
-  t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx, t);
-  gimplify_and_add (t, pre_p);
+  gimplify_assign (ndx, t, pre_p);
 
 
   /* Check if the argument is in registers:
@@ -2661,8 +2660,7 @@ xtensa_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
                  NULL_TREE);
       gimplify_and_add (t, pre_p);
 
-      t = build2 (GIMPLE_MODIFY_STMT, void_type_node, array, reg);
-      gimplify_and_add (t, pre_p);
+      gimplify_assign (array, reg, pre_p);
 
       t = build1 (GOTO_EXPR, void_type_node, lab_over);
       gimplify_and_add (t, pre_p);
@@ -2694,14 +2692,12 @@ xtensa_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p,
 
   t = size_binop (PLUS_EXPR, va_size, size_int (32));
   t = fold_convert (integer_type_node, t);
-  t = build2 (GIMPLE_MODIFY_STMT, integer_type_node, ndx, t);
-  gimplify_and_add (t, pre_p);
+  gimplify_assign (ndx, t, pre_p);
 
   t = build1 (LABEL_EXPR, void_type_node, lab_false2);
   gimplify_and_add (t, pre_p);
 
-  t = build2 (GIMPLE_MODIFY_STMT, void_type_node, array, stk);
-  gimplify_and_add (t, pre_p);
+  gimplify_assign (array, stk, pre_p);
 
   if (lab_over)
     {
index c015f6e..3293be1 100755 (executable)
@@ -1017,7 +1017,7 @@ Optional Features:
                          enable expensive run-time checks.  With LIST,
                          enable only specific categories of checks.
                          Categories are: yes,no,all,none,release.
-                         Flags are: assert,df,fold,gc,gcac,misc,
+                         Flags are: assert,df,fold,gc,gcac,gimple,misc,
                          rtlflag,rtl,runtime,tree,valgrind,types.
   --enable-coverage=LEVEL
                          enable compiler's code coverage collection.
@@ -7266,25 +7266,25 @@ do
        # these set all the flags to specific states
        yes)            ac_assert_checking=1 ; ac_checking=1 ; ac_df_checking= ;
                        ac_fold_checking= ; ac_gc_checking=1 ;
-                       ac_gc_always_collect= ; ac_rtl_checking= ;
+                       ac_gc_always_collect= ; ac_gimple_checking=1 ; ac_rtl_checking= ;
                        ac_rtlflag_checking=1 ; ac_runtime_checking=1 ;
                        ac_tree_checking=1 ; ac_valgrind_checking= ;
                        ac_types_checking=1 ;;
        no|none)        ac_assert_checking= ; ac_checking= ; ac_df_checking= ;
                        ac_fold_checking= ; ac_gc_checking= ;
-                       ac_gc_always_collect= ; ac_rtl_checking= ;
+                       ac_gc_always_collect= ; ac_gimple_checking= ; ac_rtl_checking= ;
                        ac_rtlflag_checking= ; ac_runtime_checking= ;
                        ac_tree_checking= ; ac_valgrind_checking= ;
                        ac_types_checking= ;;
        all)            ac_assert_checking=1 ; ac_checking=1 ; ac_df_checking=1 ;
                        ac_fold_checking=1 ; ac_gc_checking=1 ;
-                       ac_gc_always_collect=1 ; ac_rtl_checking=1 ;
+                       ac_gc_always_collect=1 ; ac_gimple_checking=1 ; ac_rtl_checking=1 ;
                        ac_rtlflag_checking=1 ; ac_runtime_checking=1 ;
                        ac_tree_checking=1 ; ac_valgrind_checking= ;
                        ac_types_checking=1 ;;
        release)        ac_assert_checking=1 ; ac_checking= ; ac_df_checking= ;
                        ac_fold_checking= ; ac_gc_checking= ;
-                       ac_gc_always_collect= ; ac_rtl_checking= ;
+                       ac_gc_always_collect= ; ac_gimple_checking= ; ac_rtl_checking= ;
                        ac_rtlflag_checking= ; ac_runtime_checking=1 ;
                        ac_tree_checking= ; ac_valgrind_checking= ;
                        ac_types_checking= ;;
@@ -7294,6 +7294,7 @@ do
        fold)           ac_fold_checking=1 ;;
        gc)             ac_gc_checking=1 ;;
        gcac)           ac_gc_always_collect=1 ;;
+       gimple)         ac_gimple_checking=1 ;;
        misc)           ac_checking=1 ;;
        rtl)            ac_rtl_checking=1 ;;
        rtlflag)        ac_rtlflag_checking=1 ;;
@@ -7332,6 +7333,13 @@ cat >>confdefs.h <<\_ACEOF
 _ACEOF
 
 fi
+if test x$ac_gimple_checking != x ; then
+
+cat >>confdefs.h <<\_ACEOF
+#define ENABLE_GIMPLE_CHECKING 1
+_ACEOF
+
+fi
 
 if test x$ac_runtime_checking != x ; then
 
@@ -14723,13 +14731,13 @@ if test "${lt_cv_nm_interface+set}" = set; then
 else
   lt_cv_nm_interface="BSD nm"
   echo "int some_variable = 0;" > conftest.$ac_ext
-  (eval echo "\"\$as_me:14726: $ac_compile\"" >&5)
+  (eval echo "\"\$as_me:14734: $ac_compile\"" >&5)
   (eval "$ac_compile" 2>conftest.err)
   cat conftest.err >&5
-  (eval echo "\"\$as_me:14729: $NM \\\"conftest.$ac_objext\\\"\"" >&5)
+  (eval echo "\"\$as_me:14737: $NM \\\"conftest.$ac_objext\\\"\"" >&5)
   (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
   cat conftest.err >&5
-  (eval echo "\"\$as_me:14732: output\"" >&5)
+  (eval echo "\"\$as_me:14740: output\"" >&5)
   cat conftest.out >&5
   if $GREP 'External.*some_variable' conftest.out > /dev/null; then
     lt_cv_nm_interface="MS dumpbin"
@@ -15784,7 +15792,7 @@ ia64-*-hpux*)
   ;;
 *-*-irix6*)
   # Find out which ABI we are using.
-  echo '#line 15787 "configure"' > conftest.$ac_ext
+  echo '#line 15795 "configure"' > conftest.$ac_ext
   if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5
   (eval $ac_compile) 2>&5
   ac_status=$?
@@ -16404,11 +16412,11 @@ else
    -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
    -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
    -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:16407: $lt_compile\"" >&5)
+   (eval echo "\"\$as_me:16415: $lt_compile\"" >&5)
    (eval "$lt_compile" 2>conftest.err)
    ac_status=$?
    cat conftest.err >&5
-   echo "$as_me:16411: \$? = $ac_status" >&5
+   echo "$as_me:16419: \$? = $ac_status" >&5
    if (exit $ac_status) && test -s "$ac_outfile"; then
      # The compiler can only warn and ignore the option if not recognized
      # So say no if there are warnings other than the usual output.
@@ -16726,11 +16734,11 @@ else
    -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
    -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
    -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:16729: $lt_compile\"" >&5)
+   (eval echo "\"\$as_me:16737: $lt_compile\"" >&5)
    (eval "$lt_compile" 2>conftest.err)
    ac_status=$?
    cat conftest.err >&5
-   echo "$as_me:16733: \$? = $ac_status" >&5
+   echo "$as_me:16741: \$? = $ac_status" >&5
    if (exit $ac_status) && test -s "$ac_outfile"; then
      # The compiler can only warn and ignore the option if not recognized
      # So say no if there are warnings other than the usual output.
@@ -16831,11 +16839,11 @@ else
    -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
    -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
    -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:16834: $lt_compile\"" >&5)
+   (eval echo "\"\$as_me:16842: $lt_compile\"" >&5)
    (eval "$lt_compile" 2>out/conftest.err)
    ac_status=$?
    cat out/conftest.err >&5
-   echo "$as_me:16838: \$? = $ac_status" >&5
+   echo "$as_me:16846: \$? = $ac_status" >&5
    if (exit $ac_status) && test -s out/conftest2.$ac_objext
    then
      # The compiler can only warn and ignore the option if not recognized
@@ -16886,11 +16894,11 @@ else
    -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
    -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
    -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:16889: $lt_compile\"" >&5)
+   (eval echo "\"\$as_me:16897: $lt_compile\"" >&5)
    (eval "$lt_compile" 2>out/conftest.err)
    ac_status=$?
    cat out/conftest.err >&5
-   echo "$as_me:16893: \$? = $ac_status" >&5
+   echo "$as_me:16901: \$? = $ac_status" >&5
    if (exit $ac_status) && test -s out/conftest2.$ac_objext
    then
      # The compiler can only warn and ignore the option if not recognized
@@ -19683,7 +19691,7 @@ else
   lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
   lt_status=$lt_dlunknown
   cat > conftest.$ac_ext <<_LT_EOF
-#line 19686 "configure"
+#line 19694 "configure"
 #include "confdefs.h"
 
 #if HAVE_DLFCN_H
@@ -19783,7 +19791,7 @@ else
   lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
   lt_status=$lt_dlunknown
   cat > conftest.$ac_ext <<_LT_EOF
-#line 19786 "configure"
+#line 19794 "configure"
 #include "confdefs.h"
 
 #if HAVE_DLFCN_H
index 03c3d1c..6d266f2 100644 (file)
@@ -350,7 +350,7 @@ AC_ARG_ENABLE(checking,
                          enable expensive run-time checks.  With LIST,
                          enable only specific categories of checks.
                          Categories are: yes,no,all,none,release.
-                         Flags are: assert,df,fold,gc,gcac,misc,
+                         Flags are: assert,df,fold,gc,gcac,gimple,misc,
                          rtlflag,rtl,runtime,tree,valgrind,types.],
 [ac_checking_flags="${enableval}"],[
 # Determine the default checks.
@@ -366,25 +366,25 @@ do
        # these set all the flags to specific states
        yes)            ac_assert_checking=1 ; ac_checking=1 ; ac_df_checking= ;
                        ac_fold_checking= ; ac_gc_checking=1 ;
-                       ac_gc_always_collect= ; ac_rtl_checking= ;
+                       ac_gc_always_collect= ; ac_gimple_checking=1 ; ac_rtl_checking= ;
                        ac_rtlflag_checking=1 ; ac_runtime_checking=1 ;
                        ac_tree_checking=1 ; ac_valgrind_checking= ;
                        ac_types_checking=1 ;;
        no|none)        ac_assert_checking= ; ac_checking= ; ac_df_checking= ;
                        ac_fold_checking= ; ac_gc_checking= ;
-                       ac_gc_always_collect= ; ac_rtl_checking= ;
+                       ac_gc_always_collect= ; ac_gimple_checking= ; ac_rtl_checking= ;
                        ac_rtlflag_checking= ; ac_runtime_checking= ;
                        ac_tree_checking= ; ac_valgrind_checking= ;
                        ac_types_checking= ;;
        all)            ac_assert_checking=1 ; ac_checking=1 ; ac_df_checking=1 ;
                        ac_fold_checking=1 ; ac_gc_checking=1 ;
-                       ac_gc_always_collect=1 ; ac_rtl_checking=1 ;
+                       ac_gc_always_collect=1 ; ac_gimple_checking=1 ; ac_rtl_checking=1 ;
                        ac_rtlflag_checking=1 ; ac_runtime_checking=1 ;
                        ac_tree_checking=1 ; ac_valgrind_checking= ;
                        ac_types_checking=1 ;;
        release)        ac_assert_checking=1 ; ac_checking= ; ac_df_checking= ;
                        ac_fold_checking= ; ac_gc_checking= ;
-                       ac_gc_always_collect= ; ac_rtl_checking= ;
+                       ac_gc_always_collect= ; ac_gimple_checking= ; ac_rtl_checking= ;
                        ac_rtlflag_checking= ; ac_runtime_checking=1 ;
                        ac_tree_checking= ; ac_valgrind_checking= ;
                        ac_types_checking= ;;
@@ -394,6 +394,7 @@ do
        fold)           ac_fold_checking=1 ;;
        gc)             ac_gc_checking=1 ;;
        gcac)           ac_gc_always_collect=1 ;;
+       gimple)         ac_gimple_checking=1 ;;
        misc)           ac_checking=1 ;;
        rtl)            ac_rtl_checking=1 ;;
        rtlflag)        ac_rtlflag_checking=1 ;;
@@ -422,6 +423,12 @@ if test x$ac_assert_checking != x ; then
   AC_DEFINE(ENABLE_ASSERT_CHECKING, 1,
 [Define if you want assertions enabled.  This is a cheap check.])
 fi
+if test x$ac_gimple_checking != x ; then
+  AC_DEFINE(ENABLE_GIMPLE_CHECKING, 1,
+[Define if you want operations on GIMPLE (the basic data structure of
+the high-level optimizers) to be checked for dynamic type safety at
+runtime.  This is moderately expensive.])
+fi
 GCC_TARGET_TEMPLATE(ENABLE_RUNTIME_CHECKING)
 if test x$ac_runtime_checking != x ; then
   AC_DEFINE(ENABLE_RUNTIME_CHECKING, 1,
index f5d62ca..e1d66a5 100644 (file)
@@ -47,11 +47,20 @@ typedef struct rtvec_def *rtvec;
 typedef const struct rtvec_def *const_rtvec;
 union tree_node;
 typedef union tree_node *tree;
+union gimple_statement_d;
+typedef union gimple_statement_d *gimple;
 typedef const union tree_node *const_tree;
+typedef const union gimple_statement_d *const_gimple;
 union section;
 typedef union section section;
 struct cl_target_option;
 struct cl_optimization;
+struct gimple_seq_d;
+typedef struct gimple_seq_d *gimple_seq;
+typedef const struct gimple_seq_d *const_gimple_seq;
+struct gimple_seq_node_d;
+typedef struct gimple_seq_node_d *gimple_seq_node;
+typedef const struct gimple_seq_node_d *const_gimple_seq_node;
 
 /* The major intermediate representations of GCC.  */
 enum ir_type {
index 6a13613..ad2964c 100644 (file)
@@ -1,3 +1,143 @@
+2008-07-28  Richard Guenther  <rguenther@suse.de>
+
+       Merge from gimple-tuples-branch.
+
+       2008-07-22  Aldy Hernandez  <aldyh@redhat.com>
+
+       * cp-gimplify.c (gimplify_if_stmt): Set location on newly created
+       COND_EXPR.
+
+       2008-07-18  Jakub Jelinek  <jakub@redhat.com>
+
+       * decl.c (finish_function): Call gimple_body after cp_genericize.
+
+       2008-07-18  Aldy Hernandez  <aldyh@redhat.com>
+
+       * optimize.c: Include gimple.h instead of tree-gimple.h.
+       * Make-lang.in (cp-gimplify.o): Depend on tree-iterator.h.
+       * cp-gimplify.c: Rename tree-gimple.h to gimple.h.  Include
+       tree-iterator.h.
+
+       2008-07-16  Jakub Jelinek  <jakub@redhat.com>
+
+       * optimize.c (maybe_clone_body): Clear DECL_SAVED_TREE for the clone.
+
+       2008-07-14  Jakub Jelinek  <jakub@redhat.com>
+
+       * cp-gimplify.c (cp_gimplify_expr): Update comment.
+
+       2008-07-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * cp-tree.h (union lang_tree_node): Rename GENERIC_NEXT to
+       TREE_CHAIN.
+       * cp-gimplify.c (cxx_omp_clause_apply_fn): Rename
+       GIMPLE_MODIFY_STMT to MODIFY_EXPR.
+       (cxx_omp_clause_copy_ctor): Same.
+       (cxx_omp_clause_assign_op): Same.
+
+       2008-05-28  Jakub Jelinek  <jakub@redhat.com>
+
+       * cp-gimplify.c (cp_gimplify_omp_for): Add pre_p argument.  Tuplify.
+       (cp_gimplify_expr): Adjust caller.
+
+       2008-05-11 Doug Kwan  <dougkwan@google.com>
+
+       * init.c (build_vec_delete): Add type conversion for argument
+       0 of POINTER_PLUS_EXPR.
+
+       2008-04-29  Doug Kwan  <dougkwan@google.com>
+
+       * decl2 (File): Include "gimple.h"
+       (cp_write_global_declarations): Use gimple_body instead of
+       DECL_SAVED_TREE.
+       * Make-lang.in (cp/decl2.o): Add $(GIMPLE_H)
+
+       2008-04-10  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-04/msg00913.html
+
+       * optimize.c (maybe_clone_body): Re-enable call to
+       clone_body.
+       * cp-gimplify.c (cp_gimplify_omp_for): Mark disabled
+       code with call to gimple_unreachable.
+       (cp_genericize): Fix handling of clone bodies.
+
+       2008-04-04  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-04/msg00413.html
+
+       * optimize.c (maybe_clone_body): Re-enable.
+
+       2008-02-19  Diego Novillo  <dnovillo@google.com>
+                   Oleg Ryjkov  <olegr@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00804.html
+
+       * cp-gimplify.c (gimplify_for_stmt): Change gimple_seq
+       argument to gimple_seq *.  Update all users.
+       (gimplify_must_not_throw_expr): Likewise.
+
+       2008-02-04  Oleg Ryjkov <olegr@google.com>
+
+       * except.c: Include gimple.h
+       (cp_protect_cleanup_actions): Convert to tuples.
+       * Make-lang.in (cp/except.o): Add dependency on gimple.h
+
+       2007-11-10  Aldy Hernandez  <aldyh@redhat.com>
+
+       * cp-gimplify.c (gimplify_cp_loop): Call tree_annotate_all_with_locus
+       instead of annotating each block manually.
+
+       2007-10-30  Aldy Hernandez  <aldyh@redhat.com>
+
+       * cp-gimplify.c (gimplify_cp_loop): Tuplify.
+       (gimplify_for_stmt): Same.
+       (gimplify_switch_stmt): Same.
+       (cp_gimplify_expr): [FOR_STMT]: Do not call gimplify_for_stmt.  Return
+       GS_OK.
+       [WHILE_STMT]: Return GS_OK.
+       [SWITCH_STMT]: Same.
+       [CONTINUE_STMT]: Same.
+       [BREAK_STMT]: Same.
+       (cp_genericize): Set gimple_body() of cloned functions when needed.
+
+       2007-10-29  Aldy Hernandez  <aldy@quesejoda.com>
+
+       * cp-gimplify.c: Move build_gimple_eh_filter_tree here.
+       (cp_gimplify_init_expr): Convert to tuples.
+       (gimplify_must_not_throw_expr): Make function return a
+       gimplify_status and convert to tuples.
+
+       2007-10-18  Aldy Hernandez  <aldy@quesejoda.com>
+
+       * cp-gimplify.c (genericize_try_block): Enable and do not call
+       gimplify_stmt.
+       (genericize_catch_block): Same.
+       (genericize_eh_spec_block): Same.
+       Rename gimple_build_eh_filter_tree to build_gimple_eh_filter_tree.
+       (cp_gimplify_expr): Enable TRY_BLOCK, HANDLER, and EH_SPEC_BLOCK.
+
+       2007-10-16  Aldy Hernandez  <aldy@quesejoda.com>
+
+       * optimize.c (maybe_clone_body): Comment out call to clone_body.
+       * decl.c (finish_function): Use gimple_body instead of
+       DECL_SAVED_TREE.
+       * cp-tree.h (cp_gimplify_expr): Last 2 arguments are sequences.
+       * cp-gimplify.c (genericize_try_block): Comment out.
+       (genericize_catch_block): Same.
+       (genericize_eh_spec_block): Same.
+       (gimplify_cp_loop): Comment out calls to gimplify_stmt.
+       (gimplify_for_stmt): Comment out.
+       (gimplify_switch_stmt): Comment out call to gimplify_stmt.
+       (cp_gimplify_omp_for): Same.
+       (gimplify_must_not_throw_expr): Argument pre_p is a sequence.
+       Comment out call to gimplify_stmt and append_to_statement_list.
+       Rename gimple_build_eh_filter_tree to build_gimple_eh_filter_tree.
+       (cp_gimplify_init_expr): Arguments pre_p and post_p are sequences.
+       (cp_gimplify_expr): Same.
+       Comment out calls to genericize_*_block.  Comment out call to
+       gimplify_for_stmt.
+
 2008-07-27  H.J. Lu  <hongjiu.lu@intel.com>
 
        PR c++/36944
index c787720..0396344 100644 (file)
@@ -239,7 +239,7 @@ cp/decl.o: cp/decl.c $(CXX_TREE_H) $(TM_H) $(FLAGS_H) cp/decl.h \
   debug.h gt-cp-decl.h $(TIMEVAR_H) $(TREE_FLOW_H) $(TARGET_H)
 cp/decl2.o: cp/decl2.c $(CXX_TREE_H) $(TM_H) $(FLAGS_H) cp/decl.h $(EXPR_H) \
   output.h except.h toplev.h $(RTL_H) $(C_COMMON_H) gt-cp-decl2.h $(CGRAPH_H) \
-  $(C_PRAGMA_H) $(TREE_DUMP_H) intl.h $(TARGET_H)
+  $(C_PRAGMA_H) $(TREE_DUMP_H) intl.h $(TARGET_H) $(GIMPLE_H)
 cp/cp-objcp-common.o : cp/cp-objcp-common.c $(CONFIG_H) $(SYSTEM_H) \
   coretypes.h $(TM_H) $(TREE_H) $(CXX_TREE_H) $(C_COMMON_H) toplev.h \
   langhooks.h $(LANGHOOKS_DEF_H) $(DIAGNOSTIC_H) debug.h \
@@ -280,17 +280,17 @@ cp/repo.o: cp/repo.c $(CXX_TREE_H) $(TM_H) toplev.h $(DIAGNOSTIC_H) \
   gt-cp-repo.h
 cp/semantics.o: cp/semantics.c $(CXX_TREE_H) $(TM_H) except.h toplev.h \
   $(FLAGS_H) debug.h output.h $(RTL_H) $(TIMEVAR_H) $(EXPR_H) \
-  $(TREE_INLINE_H) $(CGRAPH_H) $(TARGET_H) $(C_COMMON_H)
+  $(TREE_INLINE_H) $(CGRAPH_H) $(TARGET_H) $(C_COMMON_H) $(GIMPLE_H)
 cp/dump.o: cp/dump.c $(CXX_TREE_H) $(TM_H) $(TREE_DUMP_H)
 cp/optimize.o: cp/optimize.c $(CXX_TREE_H) $(TM_H) rtl.h $(INTEGRATE_H) \
-  insn-config.h input.h $(PARAMS_H) debug.h $(TREE_INLINE_H) $(TREE_GIMPLE_H) \
+  insn-config.h input.h $(PARAMS_H) debug.h $(TREE_INLINE_H) $(GIMPLE_H) \
   $(TARGET_H)
 cp/mangle.o: cp/mangle.c $(CXX_TREE_H) $(TM_H) toplev.h $(REAL_H) \
   gt-cp-mangle.h $(TARGET_H) $(TM_P_H)
 cp/parser.o: cp/parser.c $(CXX_TREE_H) $(TM_H) $(DIAGNOSTIC_H) gt-cp-parser.h \
   output.h $(TARGET_H)
 cp/cp-gimplify.o: cp/cp-gimplify.c $(CXX_TREE_H) toplev.h $(C_COMMON_H) \
-       $(TM_H) coretypes.h pointer-set.h
+       $(TM_H) coretypes.h pointer-set.h tree-iterator.h
 
 cp/name-lookup.o: cp/name-lookup.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
        $(TM_H) $(CXX_TREE_H) $(TIMEVAR_H) gt-cp-name-lookup.h toplev.h \
index c6d64df..8dda74d 100644 (file)
@@ -28,7 +28,8 @@ along with GCC; see the file COPYING3.  If not see
 #include "cp-tree.h"
 #include "c-common.h"
 #include "toplev.h"
-#include "tree-gimple.h"
+#include "tree-iterator.h"
+#include "gimple.h"
 #include "hashtab.h"
 #include "pointer-set.h"
 #include "flags.h"
@@ -62,20 +63,14 @@ begin_bc_block (enum bc_t bc)
    If we saw a break (or continue) in the scope, append a LABEL_EXPR to
    body.  Otherwise, just forget the label.  */
 
-static tree
-finish_bc_block (enum bc_t bc, tree label, tree body)
+static gimple_seq
+finish_bc_block (enum bc_t bc, tree label, gimple_seq body)
 {
   gcc_assert (label == bc_label[bc]);
 
   if (TREE_USED (label))
     {
-      tree t, sl = NULL;
-
-      t = build1 (LABEL_EXPR, void_type_node, label);
-
-      append_to_statement_list (body, &sl);
-      append_to_statement_list (t, &sl);
-      body = sl;
+      gimple_seq_add_stmt (&body, gimple_build_label (label));
     }
 
   bc_label[bc] = TREE_CHAIN (label);
@@ -83,11 +78,11 @@ finish_bc_block (enum bc_t bc, tree label, tree body)
   return body;
 }
 
-/* Build a GOTO_EXPR to represent a break or continue statement.  BC
-   indicates which.  */
+/* Get the LABEL_EXPR to represent a break or continue statement
+   in the current block scope.  BC indicates which.  */
 
 static tree
-build_bc_goto (enum bc_t bc)
+get_bc_label (enum bc_t bc)
 {
   tree label = bc_label[bc];
 
@@ -103,7 +98,7 @@ build_bc_goto (enum bc_t bc)
 
   /* Mark the label used for finish_bc_block.  */
   TREE_USED (label) = 1;
-  return build1 (GOTO_EXPR, void_type_node, label);
+  return label;
 }
 
 /* Genericize a TRY_BLOCK.  */
@@ -114,13 +109,6 @@ genericize_try_block (tree *stmt_p)
   tree body = TRY_STMTS (*stmt_p);
   tree cleanup = TRY_HANDLERS (*stmt_p);
 
-  gimplify_stmt (&body);
-
-  if (CLEANUP_P (*stmt_p))
-    /* A cleanup is an expression, so it doesn't need to be genericized.  */;
-  else
-    gimplify_stmt (&cleanup);
-
   *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
 }
 
@@ -132,12 +120,28 @@ genericize_catch_block (tree *stmt_p)
   tree type = HANDLER_TYPE (*stmt_p);
   tree body = HANDLER_BODY (*stmt_p);
 
-  gimplify_stmt (&body);
-
   /* FIXME should the caught type go in TREE_TYPE?  */
   *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
 }
 
+/* A terser interface for building a representation of an exception
+   specification.  */
+
+static tree
+build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
+{
+  tree t;
+
+  /* FIXME should the allowed types go in TREE_TYPE?  */
+  t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
+  append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
+
+  t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
+  append_to_statement_list (body, &TREE_OPERAND (t, 0));
+
+  return t;
+}
+
 /* Genericize an EH_SPEC_BLOCK by converting it to a
    TRY_CATCH_EXPR/EH_FILTER_EXPR pair.  */
 
@@ -147,9 +151,8 @@ genericize_eh_spec_block (tree *stmt_p)
   tree body = EH_SPEC_STMTS (*stmt_p);
   tree allowed = EH_SPEC_RAISES (*stmt_p);
   tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
-  gimplify_stmt (&body);
 
-  *stmt_p = gimple_build_eh_filter (body, allowed, failure);
+  *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
 }
 
 /* Genericize an IF_STMT by turning it into a COND_EXPR.  */
@@ -158,6 +161,7 @@ static void
 gimplify_if_stmt (tree *stmt_p)
 {
   tree stmt, cond, then_, else_;
+  location_t locus = EXPR_LOCATION (*stmt_p);
 
   stmt = *stmt_p;
   cond = IF_COND (stmt);
@@ -175,6 +179,8 @@ gimplify_if_stmt (tree *stmt_p)
     stmt = else_;
   else
     stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
+  if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
+    SET_EXPR_LOCATION (stmt, locus);
   *stmt_p = stmt;
 }
 
@@ -185,15 +191,20 @@ gimplify_if_stmt (tree *stmt_p)
    evaluated before the loop body as in while and for loops, or after the
    loop body as in do-while loops.  */
 
-static tree
+static gimple_seq
 gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
 {
-  tree top, entry, exit, cont_block, break_block, stmt_list, t;
+  gimple top, entry, stmt;
+  gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
+  tree cont_block, break_block;
   location_t stmt_locus;
 
   stmt_locus = input_location;
-  stmt_list = NULL_TREE;
-  entry = NULL_TREE;
+  stmt_list = NULL;
+  body_seq = NULL;
+  incr_seq = NULL;
+  exit_seq = NULL;
+  entry = NULL;
 
   break_block = begin_bc_block (bc_break);
   cont_block = begin_bc_block (bc_continue);
@@ -201,12 +212,12 @@ gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
   /* If condition is zero don't generate a loop construct.  */
   if (cond && integer_zerop (cond))
     {
-      top = NULL_TREE;
-      exit = NULL_TREE;
+      top = NULL;
       if (cond_is_first)
        {
-         t = build_bc_goto (bc_break);
-         append_to_statement_list (t, &stmt_list);
+         stmt = gimple_build_goto (get_bc_label (bc_break));
+         gimple_set_location (stmt, stmt_locus);
+         gimple_seq_add_stmt (&stmt_list, stmt);
        }
     }
   else
@@ -215,44 +226,55 @@ gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
         back through the main gimplifier to lower it.  Given that we
         have to gimplify the loop body NOW so that we can resolve
         break/continue stmts, seems easier to just expand to gotos.  */
-      top = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
+      top = gimple_build_label (create_artificial_label ());
 
       /* If we have an exit condition, then we build an IF with gotos either
         out of the loop, or to the top of it.  If there's no exit condition,
         then we just build a jump back to the top.  */
-      exit = build_and_jump (&LABEL_EXPR_LABEL (top));
       if (cond && !integer_nonzerop (cond))
        {
-         t = build_bc_goto (bc_break);
-         exit = fold_build3 (COND_EXPR, void_type_node, cond, exit, t);
-         gimplify_stmt (&exit);
+         if (cond != error_mark_node)
+           { 
+             gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
+             stmt = gimple_build_cond (NE_EXPR, cond,
+                                       build_int_cst (TREE_TYPE (cond), 0),
+                                       gimple_label_label (top),
+                                       get_bc_label (bc_break));
+             gimple_seq_add_stmt (&exit_seq, stmt);
+           }
 
          if (cond_is_first)
            {
              if (incr)
                {
-                 entry = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
-                 t = build_and_jump (&LABEL_EXPR_LABEL (entry));
+                 entry = gimple_build_label (create_artificial_label ());
+                 stmt = gimple_build_goto (gimple_label_label (entry));
                }
              else
-               t = build_bc_goto (bc_continue);
-             append_to_statement_list (t, &stmt_list);
+               stmt = gimple_build_goto (get_bc_label (bc_continue));
+             gimple_set_location (stmt, stmt_locus);
+             gimple_seq_add_stmt (&stmt_list, stmt);
            }
        }
+      else
+       {
+         stmt = gimple_build_goto (gimple_label_label (top));
+         gimple_seq_add_stmt (&exit_seq, stmt);
+       }
     }
 
-  gimplify_stmt (&body);
-  gimplify_stmt (&incr);
+  gimplify_stmt (&body, &body_seq);
+  gimplify_stmt (&incr, &incr_seq);
 
-  body = finish_bc_block (bc_continue, cont_block, body);
+  body_seq = finish_bc_block (bc_continue, cont_block, body_seq);
 
-  append_to_statement_list (top, &stmt_list);
-  append_to_statement_list (body, &stmt_list);
-  append_to_statement_list (incr, &stmt_list);
-  append_to_statement_list (entry, &stmt_list);
-  append_to_statement_list (exit, &stmt_list);
+  gimple_seq_add_stmt (&stmt_list, top);
+  gimple_seq_add_seq (&stmt_list, body_seq);
+  gimple_seq_add_seq (&stmt_list, incr_seq);
+  gimple_seq_add_stmt (&stmt_list, entry);
+  gimple_seq_add_seq (&stmt_list, exit_seq);
 
-  annotate_all_with_locus (&stmt_list, stmt_locus);
+  annotate_all_with_location (stmt_list, stmt_locus);
 
   return finish_bc_block (bc_break, break_block, stmt_list);
 }
@@ -261,45 +283,52 @@ gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
    prequeue and hand off to gimplify_cp_loop.  */
 
 static void
-gimplify_for_stmt (tree *stmt_p, tree *pre_p)
+gimplify_for_stmt (tree *stmt_p, gimple_seq *pre_p)
 {
   tree stmt = *stmt_p;
 
   if (FOR_INIT_STMT (stmt))
     gimplify_and_add (FOR_INIT_STMT (stmt), pre_p);
 
-  *stmt_p = gimplify_cp_loop (FOR_COND (stmt), FOR_BODY (stmt),
-                             FOR_EXPR (stmt), 1);
+  gimple_seq_add_seq (pre_p,
+                     gimplify_cp_loop (FOR_COND (stmt), FOR_BODY (stmt),
+                                       FOR_EXPR (stmt), 1));
+  *stmt_p = NULL_TREE;
 }
 
 /* Gimplify a WHILE_STMT node.  */
 
 static void
-gimplify_while_stmt (tree *stmt_p)
+gimplify_while_stmt (tree *stmt_p, gimple_seq *pre_p)
 {
   tree stmt = *stmt_p;
-  *stmt_p = gimplify_cp_loop (WHILE_COND (stmt), WHILE_BODY (stmt),
-                             NULL_TREE, 1);
+  gimple_seq_add_seq (pre_p,
+                     gimplify_cp_loop (WHILE_COND (stmt), WHILE_BODY (stmt),
+                                       NULL_TREE, 1));
+  *stmt_p = NULL_TREE;
 }
 
 /* Gimplify a DO_STMT node.  */
 
 static void
-gimplify_do_stmt (tree *stmt_p)
+gimplify_do_stmt (tree *stmt_p, gimple_seq *pre_p)
 {
   tree stmt = *stmt_p;
-  *stmt_p = gimplify_cp_loop (DO_COND (stmt), DO_BODY (stmt),
-                             NULL_TREE, 0);
+  gimple_seq_add_seq (pre_p,
+                     gimplify_cp_loop (DO_COND (stmt), DO_BODY (stmt),
+                                       NULL_TREE, 0));
+  *stmt_p = NULL_TREE;
 }
 
 /* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR.  */
 
 static void
-gimplify_switch_stmt (tree *stmt_p)
+gimplify_switch_stmt (tree *stmt_p, gimple_seq *pre_p)
 {
   tree stmt = *stmt_p;
-  tree break_block, body;
+  tree break_block, body, t;
   location_t stmt_locus = input_location;
+  gimple_seq seq = NULL;
 
   break_block = begin_bc_block (bc_break);
 
@@ -307,12 +336,14 @@ gimplify_switch_stmt (tree *stmt_p)
   if (!body)
     body = build_empty_stmt ();
 
-  *stmt_p = build3 (SWITCH_EXPR, SWITCH_STMT_TYPE (stmt),
-                   SWITCH_STMT_COND (stmt), body, NULL_TREE);
-  SET_EXPR_LOCATION (*stmt_p, stmt_locus);
-  gimplify_stmt (stmt_p);
+  t = build3 (SWITCH_EXPR, SWITCH_STMT_TYPE (stmt),
+             SWITCH_STMT_COND (stmt), body, NULL_TREE);
+  SET_EXPR_LOCATION (t, stmt_locus);
+  gimplify_and_add (t, &seq);
 
-  *stmt_p = finish_bc_block (bc_break, break_block, *stmt_p);
+  seq = finish_bc_block (bc_break, break_block, seq);
+  gimple_seq_add_seq (pre_p, seq);
+  *stmt_p = NULL_TREE;
 }
 
 /* Hook into the middle of gimplifying an OMP_FOR node.  This is required
@@ -321,10 +352,12 @@ gimplify_switch_stmt (tree *stmt_p)
    regular gimplifier.  */
 
 static enum gimplify_status
-cp_gimplify_omp_for (tree *expr_p)
+cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
 {
   tree for_stmt = *expr_p;
   tree cont_block;
+  gimple stmt;
+  gimple_seq seq = NULL;
 
   /* Protect ourselves from recursion.  */
   if (OMP_FOR_GIMPLIFYING_P (for_stmt))
@@ -336,10 +369,15 @@ cp_gimplify_omp_for (tree *expr_p)
      statement expressions within the INIT, COND, or INCR expressions.  */
   cont_block = begin_bc_block (bc_continue);
 
-  gimplify_stmt (expr_p);
+  gimplify_and_add (for_stmt, &seq);
+  stmt = gimple_seq_last_stmt (seq);
+  if (gimple_code (stmt) == GIMPLE_OMP_FOR)
+    gimple_omp_set_body (stmt, finish_bc_block (bc_continue, cont_block,
+                                               gimple_omp_body (stmt)));
+  else
+    seq = finish_bc_block (bc_continue, cont_block, seq);
+  gimple_seq_add_seq (pre_p, seq);
 
-  OMP_FOR_BODY (for_stmt)
-    = finish_bc_block (bc_continue, cont_block, OMP_FOR_BODY (for_stmt));
   OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
 
   return GS_ALL_DONE;
@@ -383,7 +421,7 @@ gimplify_expr_stmt (tree *stmt_p)
 /* Gimplify initialization from an AGGR_INIT_EXPR.  */
 
 static void
-cp_gimplify_init_expr (tree *expr_p, tree *pre_p, tree *post_p)
+cp_gimplify_init_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
 {
   tree from = TREE_OPERAND (*expr_p, 1);
   tree to = TREE_OPERAND (*expr_p, 0);
@@ -441,31 +479,31 @@ cp_gimplify_init_expr (tree *expr_p, tree *pre_p, tree *post_p)
 
 /* Gimplify a MUST_NOT_THROW_EXPR.  */
 
-static void
-gimplify_must_not_throw_expr (tree *expr_p, tree *pre_p)
+static enum gimplify_status
+gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
 {
   tree stmt = *expr_p;
   tree temp = voidify_wrapper_expr (stmt, NULL);
   tree body = TREE_OPERAND (stmt, 0);
 
-  gimplify_stmt (&body);
-
-  stmt = gimple_build_eh_filter (body, NULL_TREE,
-                                build_call_n (terminate_node, 0));
+  stmt = build_gimple_eh_filter_tree (body, NULL_TREE,
+                                     build_call_n (terminate_node, 0));
 
+  gimplify_and_add (stmt, pre_p);
   if (temp)
     {
-      append_to_statement_list (stmt, pre_p);
       *expr_p = temp;
+      return GS_OK;
     }
-  else
-    *expr_p = stmt;
+
+  *expr_p = NULL;
+  return GS_ALL_DONE;
 }
 
 /* Do C++-specific gimplification.  Args are as for gimplify_expr.  */
 
 int
-cp_gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p)
+cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
 {
   int saved_stmts_are_full_exprs_p = 0;
   enum tree_code code = TREE_CODE (*expr_p);
@@ -498,11 +536,10 @@ cp_gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p)
       break;
 
     case MUST_NOT_THROW_EXPR:
-      gimplify_must_not_throw_expr (expr_p, pre_p);
-      ret = GS_OK;
+      ret = gimplify_must_not_throw_expr (expr_p, pre_p);
       break;
 
-      /* We used to do this for GIMPLE_MODIFY_STMT as well, but that's unsafe; the
+      /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
         LHS of an assignment might also be involved in the RHS, as in bug
         25979.  */
     case INIT_EXPR:
@@ -539,7 +576,7 @@ cp_gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p)
     case USING_STMT:
       /* Just ignore for now.  Eventually we will want to pass this on to
         the debugger.  */
-      *expr_p = build_empty_stmt ();
+      *expr_p = NULL;
       ret = GS_ALL_DONE;
       break;
 
@@ -550,35 +587,37 @@ cp_gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p)
 
     case FOR_STMT:
       gimplify_for_stmt (expr_p, pre_p);
-      ret = GS_ALL_DONE;
+      ret = GS_OK;
       break;
 
     case WHILE_STMT:
-      gimplify_while_stmt (expr_p);
-      ret = GS_ALL_DONE;
+      gimplify_while_stmt (expr_p, pre_p);
+      ret = GS_OK;
       break;
 
     case DO_STMT:
-      gimplify_do_stmt (expr_p);
-      ret = GS_ALL_DONE;
+      gimplify_do_stmt (expr_p, pre_p);
+      ret = GS_OK;
       break;
 
     case SWITCH_STMT:
-      gimplify_switch_stmt (expr_p);
-      ret = GS_ALL_DONE;
+      gimplify_switch_stmt (expr_p, pre_p);
+      ret = GS_OK;
       break;
 
     case OMP_FOR:
-      ret = cp_gimplify_omp_for (expr_p);
+      ret = cp_gimplify_omp_for (expr_p, pre_p);
       break;
 
     case CONTINUE_STMT:
-      *expr_p = build_bc_goto (bc_continue);
+      gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
+      *expr_p = NULL_TREE;
       ret = GS_ALL_DONE;
       break;
 
     case BREAK_STMT:
-      *expr_p = build_bc_goto (bc_break);
+      gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
+      *expr_p = NULL_TREE;
       ret = GS_ALL_DONE;
       break;
 
@@ -835,13 +874,13 @@ cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
       end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1);
 
       p1 = create_tmp_var (TREE_TYPE (start1), NULL);
-      t = build2 (GIMPLE_MODIFY_STMT, void_type_node, p1, start1);
+      t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
       append_to_statement_list (t, &ret);
 
       if (arg2)
        {
          p2 = create_tmp_var (TREE_TYPE (start2), NULL);
-         t = build2 (GIMPLE_MODIFY_STMT, void_type_node, p2, start2);
+         t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
          append_to_statement_list (t, &ret);
        }
 
@@ -864,14 +903,14 @@ cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
 
       t = TYPE_SIZE_UNIT (inner_type);
       t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p1), p1, t);
-      t = build2 (GIMPLE_MODIFY_STMT, void_type_node, p1, t);
+      t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
       append_to_statement_list (t, &ret);
 
       if (arg2)
        {
          t = TYPE_SIZE_UNIT (inner_type);
          t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p2), p2, t);
-         t = build2 (GIMPLE_MODIFY_STMT, void_type_node, p2, t);
+         t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
          append_to_statement_list (t, &ret);
        }
 
@@ -925,7 +964,7 @@ cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
   if (info)
     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
   if (ret == NULL)
-    ret = build2 (GIMPLE_MODIFY_STMT, void_type_node, dst, src);
+    ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
 
   return ret;
 }
@@ -941,7 +980,7 @@ cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
   if (info)
     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
   if (ret == NULL)
-    ret = build2 (GIMPLE_MODIFY_STMT, void_type_node, dst, src);
+    ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
 
   return ret;
 }
index 67f15e5..f9a2af8 100644 (file)
@@ -534,7 +534,7 @@ enum cp_tree_node_structure_enum {
 
 /* The resulting tree type.  */
 union lang_tree_node GTY((desc ("cp_tree_node_structure (&%h)"),
-       chain_next ("(union lang_tree_node *)GENERIC_NEXT (&%h.generic)")))
+       chain_next ("(union lang_tree_node *)TREE_CHAIN (&%h.generic)")))
 {
   union tree_node GTY ((tag ("TS_CP_GENERIC"),
                        desc ("tree_node_structure (&%h)"))) generic;
@@ -4938,7 +4938,8 @@ extern void init_shadowed_var_for_decl            (void);
 extern tree cxx_staticp                         (tree);
 
 /* in cp-gimplify.c */
-extern int cp_gimplify_expr                    (tree *, tree *, tree *);
+extern int cp_gimplify_expr                    (tree *, gimple_seq *,
+                                                gimple_seq *);
 extern void cp_genericize                      (tree);
 
 /* -- end of C++ */
index 0cc17a4..630faac 100644 (file)
@@ -12038,7 +12038,7 @@ finish_function (int flags)
       f->extern_decl_map = NULL;
 
       /* Handle attribute((warn_unused_result)).  Relies on gimple input.  */
-      c_warn_unused_result (&DECL_SAVED_TREE (fndecl));
+      c_warn_unused_result (gimple_body (fndecl));
     }
   /* Clear out the bits we don't need.  */
   local_names = NULL;
index a156e32..20b0826 100644 (file)
@@ -51,6 +51,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "c-pragma.h"
 #include "tree-dump.h"
 #include "intl.h"
+#include "gimple.h"
 
 extern cpp_reader *parse_in;
 
@@ -3439,7 +3440,7 @@ cp_write_global_declarations (void)
              reconsider = true;
            }
 
-         if (!DECL_SAVED_TREE (decl))
+         if (!gimple_body (decl))
            continue;
 
          /* We lie to the back end, pretending that some functions
index adb1b97..56a5512 100644 (file)
@@ -39,6 +39,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-inline.h"
 #include "tree-iterator.h"
 #include "target.h"
+#include "gimple.h"
 
 static void push_eh_cleanup (tree);
 static tree prepare_eh_type (tree);
@@ -53,7 +54,7 @@ static tree wrap_cleanups_r (tree *, int *, void *);
 static int complete_ptr_ref_or_void_ptr_p (tree, tree);
 static bool is_admissible_throw_operand (tree);
 static int can_convert_eh (tree, tree);
-static tree cp_protect_cleanup_actions (void);
+static gimple cp_protect_cleanup_actions (void);
 
 /* Sets up all the global eh stuff that needs to be initialized at the
    start of compilation.  */
@@ -92,14 +93,14 @@ init_exception_processing (void)
 /* Returns an expression to be executed if an unhandled exception is
    propagated out of a cleanup region.  */
 
-static tree
+static gimple
 cp_protect_cleanup_actions (void)
 {
   /* [except.terminate]
 
      When the destruction of an object during stack unwinding exits
      using an exception ... void terminate(); is called.  */
-  return build_call_n (terminate_node, 0);
+  return gimple_build_call (terminate_node, 0);
 }
 
 static tree
index 3dd6c4e..3deb85d 100644 (file)
@@ -3275,6 +3275,7 @@ build_vec_delete (tree base, tree maxindex,
     {
       /* Step back one from start of vector, and read dimension.  */
       tree cookie_addr;
+      tree size_ptr_type = build_pointer_type (sizetype);
 
       if (TREE_SIDE_EFFECTS (base))
        {
@@ -3284,8 +3285,8 @@ build_vec_delete (tree base, tree maxindex,
       type = strip_array_types (TREE_TYPE (type));
       cookie_addr = fold_build1 (NEGATE_EXPR, sizetype, TYPE_SIZE_UNIT (sizetype));
       cookie_addr = build2 (POINTER_PLUS_EXPR,
-                           build_pointer_type (sizetype),
-                           base,
+                           size_ptr_type,
+                           fold_convert (size_ptr_type, base),
                            cookie_addr);
       maxindex = cp_build_indirect_ref (cookie_addr, NULL, tf_warning_or_error);
     }
index a91f8d5..ed43b43 100644 (file)
@@ -1,5 +1,5 @@
 /* Perform optimizations on tree structure.
-   Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2007
+   Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2007, 2008
    Free Software Foundation, Inc.
    Written by Mark Michell (mark@codesourcery.com).
 
@@ -40,7 +40,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "langhooks.h"
 #include "diagnostic.h"
 #include "tree-dump.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 
 /* Prototypes.  */
 
@@ -72,35 +72,40 @@ update_cloned_parm (tree parm, tree cloned_parm, bool first)
   DECL_GIMPLE_REG_P (cloned_parm) = DECL_GIMPLE_REG_P (parm);
 }
 
-/* FN is a function that has a complete body, and CLONE is a function whose
-   body is to be set to a copy of FN, mapping argument declarations according
-   to the ARG_MAP splay_tree.  */
+
+/* FN is a function in High GIMPLE form that has a complete body and no
+   CFG.  CLONE is a function whose body is to be set to a copy of FN,
+   mapping argument declarations according to the ARG_MAP splay_tree.  */
 
 static void
 clone_body (tree clone, tree fn, void *arg_map)
 {
   copy_body_data id;
+  gimple_seq new_body;
+
+  /* FN must already be in GIMPLE form.  */
+  gcc_assert (gimple_body (fn));
 
-  /* Clone the body, as if we were making an inline call.  But, remap the
-     parameters in the callee to the parameters of caller.  */
+  /* Clone the body, as if we were making an inline call.  But, remap
+     the parameters in the callee to the parameters of caller.  */
   memset (&id, 0, sizeof (id));
   id.src_fn = fn;
   id.dst_fn = clone;
   id.src_cfun = DECL_STRUCT_FUNCTION (fn);
-  id.decl_map = (struct pointer_map_t *)arg_map;
+  id.decl_map = (struct pointer_map_t *) arg_map;
 
   id.copy_decl = copy_decl_no_change;
   id.transform_call_graph_edges = CB_CGE_DUPLICATE;
   id.transform_new_cfg = true;
   id.transform_return_to_modify = false;
-  id.transform_lang_insert_block = insert_block;
+  id.transform_lang_insert_block = NULL;
 
   /* We're not inside any EH region.  */
   id.eh_region = -1;
 
   /* Actually copy the body.  */
-  append_to_statement_list_force (copy_generic_body (&id),
-                                 &DECL_SAVED_TREE (clone));
+  new_body = remap_gimple_seq (gimple_body (fn), &id);
+  gimple_set_body (clone, new_body);
 }
 
 /* FN is a function that has a complete body.  Clone the body as
@@ -228,6 +233,7 @@ maybe_clone_body (tree fn)
       /* Now, expand this function into RTL, if appropriate.  */
       finish_function (0);
       BLOCK_ABSTRACT_ORIGIN (DECL_INITIAL (clone)) = DECL_INITIAL (fn);
+      DECL_SAVED_TREE (clone) = NULL;
       expand_or_defer_fn (clone);
       first = false;
     }
index ffa6493..5ada422 100644 (file)
@@ -46,6 +46,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-iterator.h"
 #include "vec.h"
 #include "target.h"
+#include "gimple.h"
 
 /* There routines provide a modular interface to perform many parsing
    operations.  They may therefore be used during actual parsing, or
@@ -3201,6 +3202,8 @@ expand_or_defer_fn (tree fn)
       return;
     }
 
+  gcc_assert (gimple_body (fn));
+
   /* Replace AGGR_INIT_EXPRs with appropriate CALL_EXPRs.  */
   cp_walk_tree_without_duplicates (&DECL_SAVED_TREE (fn),
                                   simplify_aggr_init_exprs_r,
index 1d13eee..8906cc4 100644 (file)
@@ -215,14 +215,24 @@ extern char *diagnostic_build_prefix (diagnostic_info *);
 extern char *file_name_as_prefix (const char *);
 
 /* In tree-pretty-print.c  */
+extern void print_declaration (pretty_printer *, tree, int, int);
 extern int dump_generic_node (pretty_printer *, tree, int, int, bool);
 extern void print_generic_stmt (FILE *, tree, int);
 extern void print_generic_stmt_indented (FILE *, tree, int, int);
 extern void print_generic_expr (FILE *, tree, int);
 extern void print_generic_decl (FILE *, tree, int);
+extern void debug_c_tree (tree);
+extern void dump_omp_clauses (pretty_printer *, tree, int, int);
 
+/* In gimple-pretty-print.c  */
 extern void debug_generic_expr (tree);
 extern void debug_generic_stmt (tree);
 extern void debug_tree_chain (tree);
-extern void debug_c_tree (tree);
+extern void debug_gimple_stmt (gimple);
+extern void debug_gimple_seq (gimple_seq);
+extern void print_gimple_seq (FILE *, gimple_seq, int, int);
+extern void print_gimple_stmt (FILE *, gimple, int, int);
+extern void print_gimple_expr (FILE *, gimple, int, int);
+extern void dump_gimple_stmt (pretty_printer *, gimple, int, int);
+
 #endif /* ! GCC_DIAGNOSTIC_H */
index cec95a5..8f77922 100644 (file)
@@ -144,7 +144,7 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
 {
   void *bd = NULL;
   basic_block dest;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   bool is_interesting;
   basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks * 2);
   int sp = 0;
@@ -168,8 +168,8 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
            {
              bool recycled;
 
-             /* First get some local data, reusing any local data pointer we may
-                have saved.  */
+             /* First get some local data, reusing any local data
+                pointer we may have saved.  */
              if (VEC_length (void_p, walk_data->free_block_data) > 0)
                {
                  bd = VEC_pop (void_p, walk_data->free_block_data);
@@ -199,13 +199,14 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
          if (is_interesting && walk_data->before_dom_children_walk_stmts)
            {
              if (walk_data->walk_stmts_backward)
-               for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
+               for (gsi = gsi_last (bb_seq (bb)); !gsi_end_p (gsi);
+                    gsi_prev (&gsi))
                  (*walk_data->before_dom_children_walk_stmts) (walk_data, bb,
-                                                               bsi);
+                                                               gsi);
              else
-               for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+               for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
                  (*walk_data->before_dom_children_walk_stmts) (walk_data, bb,
-                                                               bsi);
+                                                               gsi);
            }
 
          /* Callback for operations to execute before we have walked the
@@ -239,13 +240,14 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
          if (is_interesting && walk_data->after_dom_children_walk_stmts)
            {
              if (walk_data->walk_stmts_backward)
-               for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
+               for (gsi = gsi_last (bb_seq (bb)); !gsi_end_p (gsi);
+                    gsi_prev (&gsi))
                  (*walk_data->after_dom_children_walk_stmts) (walk_data, bb,
-                                                              bsi);
+                                                              gsi);
              else
-               for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+               for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
                  (*walk_data->after_dom_children_walk_stmts) (walk_data, bb,
-                                                              bsi);
+                                                              gsi);
            }
 
          /* Callback for operations to execute after we have walked the
index 3fc6810..61117ef 100644 (file)
@@ -66,7 +66,7 @@ struct dom_walk_data
   /* Function to call to walk statements before the recursive walk
      of the dominator children.  */
   void (*before_dom_children_walk_stmts) (struct dom_walk_data *,
-                                         basic_block, block_stmt_iterator);
+                                         basic_block, gimple_stmt_iterator);
 
   /* Function to call after the statement walk occurring before the
      recursive walk of the dominator children.  */
@@ -81,7 +81,7 @@ struct dom_walk_data
   /* Function to call to walk statements after the recursive walk
      of the dominator children.  */
   void (*after_dom_children_walk_stmts) (struct dom_walk_data *,
-                                        basic_block, block_stmt_iterator);
+                                        basic_block, gimple_stmt_iterator);
 
   /* Function to call after the statement walk occurring after the
      recursive walk of the dominator children. 
index d37a2c2..a8e06ef 100644 (file)
@@ -9636,9 +9636,8 @@ loc_descriptor_from_tree_1 (tree loc, int want_address)
     CASE_CONVERT:
     case VIEW_CONVERT_EXPR:
     case SAVE_EXPR:
-    case GIMPLE_MODIFY_STMT:
-      return loc_descriptor_from_tree_1 (GENERIC_TREE_OPERAND (loc, 0),
-                                        want_address);
+    case MODIFY_EXPR:
+      return loc_descriptor_from_tree_1 (TREE_OPERAND (loc, 0), want_address);
 
     case COMPONENT_REF:
     case BIT_FIELD_REF:
@@ -10442,7 +10441,7 @@ static tree
 reference_to_unused (tree * tp, int * walk_subtrees,
                     void * data ATTRIBUTE_UNUSED)
 {
-  if (! EXPR_P (*tp) && ! GIMPLE_STMT_P (*tp) && ! CONSTANT_CLASS_P (*tp))
+  if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
     *walk_subtrees = 0;
 
   if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
index 8efb437..2916e44 100644 (file)
@@ -121,7 +121,7 @@ ebitmap_iter_init (ebitmap_iterator *i, ebitmap bmp, unsigned int min)
 static inline bool
 ebitmap_iter_cond (ebitmap_iterator *i, unsigned int *n)
 {
-  unsigned int ourn;
+  unsigned int ourn = 0;
 
   if (i->size == 0)
     return false;
index 5118a53..962dd0b 100644 (file)
@@ -86,7 +86,7 @@ along with GCC; see the file COPYING3.  If not see
 
 /* Protect cleanup actions with must-not-throw regions, with a call
    to the given failure handler.  */
-tree (*lang_protect_cleanup_actions) (void);
+gimple (*lang_protect_cleanup_actions) (void);
 
 /* Return true if type A catches type B.  */
 int (*lang_eh_type_covers) (tree a, tree b);
index ae3264e..8f9efb2 100644 (file)
@@ -108,20 +108,13 @@ extern void dump_eh_tree (FILE *, struct function *);
 extern bool eh_region_outer_p (struct function *, int, int);
 extern int eh_region_outermost (struct function *, int, int);
 
-/* tree-eh.c */
-extern void add_stmt_to_eh_region_fn (struct function *, tree, int);
-extern bool remove_stmt_from_eh_region_fn (struct function *, tree);
-extern int lookup_stmt_eh_region_fn (struct function *, const_tree);
-extern int lookup_stmt_eh_region (const_tree);
-extern bool verify_eh_edges (tree);
-
 /* If non-NULL, this is a function that returns an expression to be
    executed if an unhandled exception is propagated out of a cleanup
    region.  For example, in C++, an exception thrown by a destructor
    during stack unwinding is required to result in a call to
    `std::terminate', so the C++ version of this function returns a
    CALL_EXPR for `std::terminate'.  */
-extern tree (*lang_protect_cleanup_actions) (void);
+extern gimple (*lang_protect_cleanup_actions) (void);
 
 /* Return true if type A catches type B.  */
 extern int (*lang_eh_type_covers) (tree a, tree b);
@@ -175,7 +168,7 @@ extern tree (*lang_eh_runtime_type) (tree);
 
 struct throw_stmt_node GTY(())
 {
-  tree stmt;
+  gimple stmt;
   int region_nr;
 };
 
index e7c24e2..4984780 100644 (file)
@@ -6539,9 +6539,6 @@ safe_from_p (const_rtx x, tree exp, int top_p)
     case tcc_type:
       /* Should never get a type here.  */
       gcc_unreachable ();
-
-    case tcc_gimple_stmt:
-      gcc_unreachable ();
     }
 
   /* If we have an rtl, find any enclosed object.  Then see if we conflict
@@ -7048,8 +7045,7 @@ expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
 
   /* Handle ERROR_MARK before anybody tries to access its type.  */
   if (TREE_CODE (exp) == ERROR_MARK
-      || TREE_CODE (exp) == PREDICT_EXPR
-      || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
+      || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
     {
       ret = CONST0_RTX (tmode);
       return ret ? ret : const0_rtx;
@@ -7057,7 +7053,8 @@ expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
 
   if (flag_non_call_exceptions)
     {
-      rn = lookup_stmt_eh_region (exp);
+      rn = lookup_expr_eh_region (exp);
+
       /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw.  */
       if (rn >= 0)
        last = get_last_insn ();
@@ -7131,18 +7128,9 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
                                                                  type)   \
                                 : (expr))
 
-  if (GIMPLE_STMT_P (exp))
-    {
-      type = void_type_node;
-      mode = VOIDmode;
-      unsignedp = 0;
-    }
-  else
-    {
-      type = TREE_TYPE (exp);
-      mode = TYPE_MODE (type);
-      unsignedp = TYPE_UNSIGNED (type);
-    }
+  type = TREE_TYPE (exp);
+  mode = TYPE_MODE (type);
+  unsignedp = TYPE_UNSIGNED (type);
 
   ignore = (target == const0_rtx
            || ((code == NOP_EXPR || code == CONVERT_EXPR 
@@ -9090,16 +9078,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
        tree lhs = TREE_OPERAND (exp, 0);
        tree rhs = TREE_OPERAND (exp, 1);
        gcc_assert (ignore);
-       expand_assignment (lhs, rhs, false);
-       return const0_rtx;
-      }
-
-    case GIMPLE_MODIFY_STMT:
-      {
-       tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
-       tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
-
-       gcc_assert (ignore);
 
        /* Check for |= or &= of a bitfield of size one into another bitfield
           of size 1.  In this case, (unless we need the result of the
@@ -9358,17 +9336,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
     case VEC_PACK_TRUNC_EXPR:
     case VEC_PACK_SAT_EXPR:
     case VEC_PACK_FIX_TRUNC_EXPR:
-      {
-       mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
-       goto binop;
-      }
-
-    case OMP_ATOMIC_LOAD:
-    case OMP_ATOMIC_STORE:
-      /* OMP expansion is not run when there were errors, so these codes
-                 can get here.  */
-      gcc_assert (errorcount != 0);
-      return NULL_RTX;
+      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+      goto binop;
 
     default:
       return lang_hooks.expand_expr (exp, original_target, tmode,
index e1e04bb..f8085dc 100644 (file)
@@ -65,6 +65,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "hashtab.h"
 #include "langhooks.h"
 #include "md5.h"
+#include "gimple.h"
 
 /* Nonzero if we are folding constants inside an initializer; zero
    otherwise.  */
@@ -949,7 +950,7 @@ fold_defer_overflow_warnings (void)
    deferred code.  */
 
 void
-fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
+fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
 {
   const char *warnmsg;
   location_t locus;
@@ -971,7 +972,7 @@ fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
   if (!issue || warnmsg == NULL)
     return;
 
-  if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
+  if (gimple_no_warning_p (stmt))
     return;
 
   /* Use the smallest code level when deciding to issue the
@@ -982,10 +983,10 @@ fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
   if (!issue_strict_overflow_warning (code))
     return;
 
-  if (stmt == NULL_TREE || !expr_has_location (stmt))
+  if (stmt == NULL)
     locus = input_location;
   else
-    locus = expr_location (stmt);
+    locus = gimple_location (stmt);
   warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
 }
 
@@ -995,7 +996,7 @@ fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
 void
 fold_undefer_and_ignore_overflow_warnings (void)
 {
-  fold_undefer_overflow_warnings (false, NULL_TREE, 0);
+  fold_undefer_overflow_warnings (false, NULL, 0);
 }
 
 /* Whether we are deferring overflow warnings.  */
@@ -2639,7 +2640,7 @@ fold_convert (tree type, tree arg)
 
     case VOID_TYPE:
       tem = fold_ignored_result (arg);
-      if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
+      if (TREE_CODE (tem) == MODIFY_EXPR)
        return tem;
       return fold_build1 (NOP_EXPR, type, tem);
 
@@ -2682,7 +2683,6 @@ maybe_lvalue_p (const_tree x)
   case WITH_CLEANUP_EXPR:
   case COMPOUND_EXPR:
   case MODIFY_EXPR:
-  case GIMPLE_MODIFY_STMT:
   case TARGET_EXPR:
   case COND_EXPR:
   case BIND_EXPR:
@@ -7847,17 +7847,16 @@ fold_unary (enum tree_code code, tree type, tree op0)
            return fold_convert (type, fold_addr_expr (base));
         }
 
-      if ((TREE_CODE (op0) == MODIFY_EXPR
-          || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
-         && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
+      if (TREE_CODE (op0) == MODIFY_EXPR
+         && TREE_CONSTANT (TREE_OPERAND (op0, 1))
          /* Detect assigning a bitfield.  */
-         && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
+         && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
               && DECL_BIT_FIELD
-              (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
+              (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
        {
          /* Don't leave an assignment inside a conversion
             unless assigning a bitfield.  */
-         tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
+         tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
          /* First do the assignment, then return converted constant.  */
          tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
          TREE_NO_WARNING (tem) = 1;
@@ -9249,8 +9248,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1)
   tree t1 = NULL_TREE;
   bool strict_overflow_p;
 
-  gcc_assert ((IS_EXPR_CODE_CLASS (kind)
-              || IS_GIMPLE_STMT_CODE_CLASS (kind))
+  gcc_assert (IS_EXPR_CODE_CLASS (kind)
              && TREE_CODE_LENGTH (code) == 2
              && op0 != NULL_TREE
              && op1 != NULL_TREE);
@@ -13169,8 +13167,7 @@ fold (tree expr)
       return expr;
     }
 
-  if (IS_EXPR_CODE_CLASS (kind)
-      || IS_GIMPLE_STMT_CODE_CLASS (kind))
+  if (IS_EXPR_CODE_CLASS (kind))
     {
       tree type = TREE_TYPE (t);
       tree op0, op1, op2;
@@ -14110,7 +14107,7 @@ tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
    *STRICT_OVERFLOW_P.  */
 
 bool
-tree_call_nonnegative_warnv_p (enum tree_code code,  tree type, tree fndecl,
+tree_call_nonnegative_warnv_p (tree type, tree fndecl,
                               tree arg0, tree arg1, bool *strict_overflow_p)
 {
   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
@@ -14231,7 +14228,7 @@ tree_call_nonnegative_warnv_p (enum tree_code code,  tree type, tree fndecl,
       default:
        break;
       }
-  return tree_simple_nonnegative_warnv_p (code,
+  return tree_simple_nonnegative_warnv_p (CALL_EXPR,
                                          type);
 }
 
@@ -14273,10 +14270,9 @@ tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
            else
              break;
          }
-       if ((TREE_CODE (t) == MODIFY_EXPR
-            || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
-           && GENERIC_TREE_OPERAND (t, 0) == temp)
-         return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
+       if (TREE_CODE (t) == MODIFY_EXPR
+           && TREE_OPERAND (t, 0) == temp)
+         return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
                                                strict_overflow_p);
 
        return false;
@@ -14287,8 +14283,7 @@ tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
        tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
        tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
 
-       return tree_call_nonnegative_warnv_p (TREE_CODE (t),
-                                             TREE_TYPE (t),
+       return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
                                              get_callee_fndecl (t),
                                              arg0,
                                              arg1,
@@ -14296,8 +14291,7 @@ tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
       }
     case COMPOUND_EXPR:
     case MODIFY_EXPR:
-    case GIMPLE_MODIFY_STMT:
-      return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
+      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
                                            strict_overflow_p);
     case BIND_EXPR:
       return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
@@ -14672,9 +14666,8 @@ tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
 
     case COMPOUND_EXPR:
     case MODIFY_EXPR:
-    case GIMPLE_MODIFY_STMT:
     case BIND_EXPR:
-      return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
+      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
                                        strict_overflow_p);
 
     case SAVE_EXPR:
index fd3cff5..cfcfa12 100644 (file)
@@ -1,3 +1,92 @@
+2008-07-28  Richard Guenther  <rguenther@suse.de>
+
+       Merge from gimple-tuples-branch.
+
+       2008-07-18  Aldy Hernandez  <aldyh@redhat.com>
+
+       * trans-expr.c: Include gimple.h instead of tree-gimple.h.
+       * trans-array.c: Same.
+       * trans-openmp.c: Same.
+       * trans-stmt.c: Same.
+       * f95-lang.c: Same.
+       * trans-io.c: Same.
+       * trans-decl.c: Same.
+       * trans-intrinsic.c: Same.
+       * trans.c: Same.  Include tree-iterator.h.
+       * Make-lang.in (trans.o): Depend on tree-iterator.h
+
+       2008-07-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * trans-array.h (gfc_conv_descriptor_data_set_internal):
+       Rename to gfc_conv_descriptor_data_set.
+       (gfc_conv_descriptor_data_set_tuples): Remove.
+       * trans-array.c (gfc_conv_descriptor_data_get): Rename
+       from gfc_conv_descriptor_data_set_internal.
+       Remove last argument to gfc_add_modify.
+       (gfc_trans_allocate_array_storage): Rename gfc_add_modify_expr to
+       gfc_add_modify.
+       (gfc_trans_create_temp_array): Same.
+       (gfc_conv_array_transpose): Same.
+       (gfc_grow_array): Same.
+       (gfc_put_offset_into_var): Same.
+       (gfc_trans_array_ctor_element): Same.
+       (gfc_trans_array_constructor_subarray): Same.
+       (gfc_trans_array_constructor_value): Same.
+       (gfc_trans_scalarized_loop_end): Same.
+       (gfc_array_init_size): Same.
+       (gfc_array_allocate): Same.
+       (gfc_trans_array_bounds): Same.
+       (gfc_trans_auto_array_allocation): Same.
+       (gfc_trans_g77_array): Same.
+       (gfc_trans_dummy_array_bias): Same.
+       (gfc_conv_expr_descriptor): Same.
+       (structure_alloc_comps): Same.
+       * trans-expr.c: Same.
+       * trans-openmp.c (gfc_omp_clause_default_ctor): Same.
+       Rename gfc_conv_descriptor_data_set_tuples to
+       gfc_conv_descriptor_data_set.
+       (gfc_omp_clause_copy_ctor): Change build_gimple_modify_stmt to
+       build2_v.
+       (gfc_omp_clause_assign_op): Same.
+       (gfc_trans_omp_array_reduction): Rename gfc_add_modify_expr to
+       gfc_add_modify.
+       (gfc_trans_omp_atomic): Same.
+       (gfc_trans_omp_do): Same.  Change GIMPLE_MODIFY_STMT to MODIFY_EXPR.
+       Rename gfc_add_modify_stmt to gfc_add_modify.
+       * trans-stmt.c: Rename gfc_add_modify_expr to
+       gfc_add_modify.
+       * trans.c: Rename gfc_add_modify_expr to
+       gfc_add_modify.
+       (gfc_add_modify): Remove last argument.
+       Rename GIMPLE_MODIFY_STMT to MODIFY_EXPR.
+       * trans.h: Remove gfc_add_modify_expr, gfc_add_modify_stmt.
+       Add prototype for gfc_add_modify.
+       * f95-lang.c (union lang_tree_node): Rename GENERIC_NEXT to TREE_CHAIN.
+       * trans-decl.c: Rename gfc_add_modify_stmt to gfc_add_modify.
+       * trans-io.c: Same.
+       * trans-intrinsic.c: Same.
+
+       2008-02-25  Aldy Hernandez  <aldyh@redhat.com>
+
+       * Make-lang.in (fortran-warn): Add -Wno-format.
+
+       2008-02-19  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00804.html
+
+       * fortran/Make-lang.in (fortran-warn): Remove.
+
+       2007-11-22  Aldy Hernandez  <aldyh@redhat.com>
+
+       * trans-expr.c (gfc_trans_string_copy): Use "void *" when building a
+       memset.
+
+       2007-11-10  Aldy Hernandez  <aldyh@redhat.com>
+
+       * Make-lang.in (fortran-warn): Set to -Wno-format.
+       * trans.c (gfc_trans_code): Update comment to say GENERIC.
+       Call tree_annotate_all_with_locus instead of annotate_all_with_locus.
+
 2008-07-27  Tobias Burnus  <burnus@net-b.de>
 
        PR fortran/36132
index b0944c9..77ddb6a 100644 (file)
@@ -47,6 +47,7 @@ GFORTRAN_TARGET_INSTALL_NAME := $(target_noncanonical)-$(shell echo gfortran|sed
 
 # Use strict warnings for this front end.
 fortran-warn = $(STRICT_WARN)
+fortran-warn += -Wno-format -Wno-missing-format-attribute -Wno-uninitialized # FIXME tuples: Do not merge.
 
 # These are the groups of object files we have.  The F95_PARSER_OBJS are
 # all the front end files, the F95_OBJS are the files for the translation
@@ -311,9 +312,9 @@ fortran/f95-lang.o: $(GFORTRAN_TRANS_DEPS) fortran/mathbuiltins.def \
   $(BUILTINS_DEF) fortran/types.def
 fortran/scanner.o: toplev.h fortran/cpp.h
 fortran/convert.o: $(GFORTRAN_TRANS_DEPS)
-fortran/trans.o: $(GFORTRAN_TRANS_DEPS)
+fortran/trans.o: $(GFORTRAN_TRANS_DEPS) tree-iterator.h
 fortran/trans-decl.o: $(GFORTRAN_TRANS_DEPS) gt-fortran-trans-decl.h \
-  $(CGRAPH_H) $(TARGET_H) $(FUNCTION_H) $(FLAGS_H) $(RTL_H) $(TREE_GIMPLE_H) \
+  $(CGRAPH_H) $(TARGET_H) $(FUNCTION_H) $(FLAGS_H) $(RTL_H) $(GIMPLE_H) \
   $(TREE_DUMP_H)
 fortran/trans-types.o: $(GFORTRAN_TRANS_DEPS) gt-fortran-trans-types.h \
   $(REAL_H) toplev.h $(TARGET_H) $(FLAGS_H) dwarf2out.h
index 941f7cd..d2768b1 100644 (file)
@@ -29,7 +29,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "system.h"
 #include "coretypes.h"
 #include "tree.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "flags.h"
 #include "langhooks.h"
 #include "langhooks-def.h"
@@ -62,7 +62,7 @@ GTY(())
 
 union lang_tree_node
 GTY((desc ("TREE_CODE (&%h.generic) == IDENTIFIER_NODE"),
-     chain_next ("(union lang_tree_node *)GENERIC_NEXT (&%h.generic)")))
+     chain_next ("(union lang_tree_node *)TREE_CHAIN (&%h.generic)")))
 
 {
   union tree_node GTY((tag ("0"),
index 14bab8e..fe6b63d 100644 (file)
@@ -80,7 +80,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "system.h"
 #include "coretypes.h"
 #include "tree.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "ggc.h"
 #include "toplev.h"
 #include "real.h"
@@ -161,12 +161,10 @@ gfc_conv_descriptor_data_get (tree desc)
    
    This function gets called through the following macros:
      gfc_conv_descriptor_data_set
-     gfc_conv_descriptor_data_set_tuples.  */
+     gfc_conv_descriptor_data_set.  */
 
 void
-gfc_conv_descriptor_data_set_internal (stmtblock_t *block,
-                                      tree desc, tree value,
-                                      bool tuples_p)
+gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
 {
   tree field, type, t;
 
@@ -177,7 +175,7 @@ gfc_conv_descriptor_data_set_internal (stmtblock_t *block,
   gcc_assert (DATA_FIELD == 0);
 
   t = fold_build3 (COMPONENT_REF, TREE_TYPE (field), desc, field, NULL_TREE);
-  gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value), tuples_p);
+  gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
 }
 
 
@@ -547,7 +545,7 @@ gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
   /* The offset is zero because we create temporaries with a zero
      lower bound.  */
   tmp = gfc_conv_descriptor_offset (desc);
-  gfc_add_modify_expr (pre, tmp, gfc_index_zero_node);
+  gfc_add_modify (pre, tmp, gfc_index_zero_node);
 
   if (dealloc && !onstack)
     {
@@ -627,7 +625,7 @@ gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post,
 
   /* Fill in the array dtype.  */
   tmp = gfc_conv_descriptor_dtype (desc);
-  gfc_add_modify_expr (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
+  gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
 
   /*
      Fill in the bounds and stride.  This is a packed array, so:
@@ -661,13 +659,13 @@ gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post,
         
       /* Store the stride and bound components in the descriptor.  */
       tmp = gfc_conv_descriptor_stride (desc, gfc_rank_cst[n]);
-      gfc_add_modify_expr (pre, tmp, size);
+      gfc_add_modify (pre, tmp, size);
 
       tmp = gfc_conv_descriptor_lbound (desc, gfc_rank_cst[n]);
-      gfc_add_modify_expr (pre, tmp, gfc_index_zero_node);
+      gfc_add_modify (pre, tmp, gfc_index_zero_node);
 
       tmp = gfc_conv_descriptor_ubound (desc, gfc_rank_cst[n]);
-      gfc_add_modify_expr (pre, tmp, loop->to[n]);
+      gfc_add_modify (pre, tmp, loop->to[n]);
 
       tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                         loop->to[n], gfc_index_one_node);
@@ -752,7 +750,7 @@ gfc_conv_array_transpose (gfc_se * se, gfc_expr * expr)
   se->expr = dest;
 
   /* Copy across the dtype field.  */
-  gfc_add_modify_expr (&se->pre,
+  gfc_add_modify (&se->pre,
                       gfc_conv_descriptor_dtype (dest),
                       gfc_conv_descriptor_dtype (src));
 
@@ -769,15 +767,15 @@ gfc_conv_array_transpose (gfc_se * se, gfc_expr * expr)
       dest_index = gfc_rank_cst[n];
       src_index = gfc_rank_cst[1 - n];
 
-      gfc_add_modify_expr (&se->pre,
+      gfc_add_modify (&se->pre,
                           gfc_conv_descriptor_stride (dest, dest_index),
                           gfc_conv_descriptor_stride (src, src_index));
 
-      gfc_add_modify_expr (&se->pre,
+      gfc_add_modify (&se->pre,
                           gfc_conv_descriptor_lbound (dest, dest_index),
                           gfc_conv_descriptor_lbound (src, src_index));
 
-      gfc_add_modify_expr (&se->pre,
+      gfc_add_modify (&se->pre,
                           gfc_conv_descriptor_ubound (dest, dest_index),
                           gfc_conv_descriptor_ubound (src, src_index));
 
@@ -803,7 +801,7 @@ gfc_conv_array_transpose (gfc_se * se, gfc_expr * expr)
   else
     dest_info->offset = gfc_index_zero_node;
 
-  gfc_add_modify_expr (&se->pre,
+  gfc_add_modify (&se->pre,
                       gfc_conv_descriptor_offset (dest),
                       dest_info->offset);
          
@@ -847,7 +845,7 @@ gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
 
   /* Add EXTRA to the upper bound.  */
   tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, ubound, extra);
-  gfc_add_modify_expr (pblock, ubound, tmp);
+  gfc_add_modify (pblock, ubound, tmp);
 
   /* Get the value of the current data pointer.  */
   arg0 = gfc_conv_descriptor_data_get (desc);
@@ -957,7 +955,7 @@ gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
   /* We should have already created the offset variable.  We cannot
      create it here because we may be in an inner scope.  */
   gcc_assert (*offsetvar != NULL_TREE);
-  gfc_add_modify_expr (pblock, *offsetvar, *poffset);
+  gfc_add_modify (pblock, *offsetvar, *poffset);
   *poffset = *offsetvar;
   TREE_USED (*offsetvar) = 1;
 }
@@ -996,7 +994,7 @@ gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
        {
          /* The temporary is an array of pointers.  */
          se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
-         gfc_add_modify_expr (&se->pre, tmp, se->expr);
+         gfc_add_modify (&se->pre, tmp, se->expr);
        }
       else
        {
@@ -1011,7 +1009,7 @@ gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
        {
          if (first_len)
            {
-             gfc_add_modify_expr (&se->pre, first_len_val,
+             gfc_add_modify (&se->pre, first_len_val,
                                   se->string_length);
              first_len = false;
            }
@@ -1033,7 +1031,7 @@ gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
     {
       /* TODO: Should the frontend already have done this conversion?  */
       se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
-      gfc_add_modify_expr (&se->pre, tmp, se->expr);
+      gfc_add_modify (&se->pre, tmp, se->expr);
     }
 
   gfc_add_block_to_block (pblock, &se->pre);
@@ -1104,7 +1102,7 @@ gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
   /* Increment the offset.  */
   tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                     *poffset, gfc_index_one_node);
-  gfc_add_modify_expr (&body, *poffset, tmp);
+  gfc_add_modify (&body, *poffset, tmp);
 
   /* Finish the loop.  */
   gfc_trans_scalarizing_loops (&loop, &body);
@@ -1243,7 +1241,7 @@ gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
            }
          if (!INTEGER_CST_P (*poffset))
             {
-              gfc_add_modify_expr (&body, *offsetvar, *poffset);
+              gfc_add_modify (&body, *offsetvar, *poffset);
               *poffset = *offsetvar;
             }
        }
@@ -1289,13 +1287,13 @@ gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
          /* Make a temporary, store the current value in that
             and return it, once the loop is done.  */
          tmp_loopvar = gfc_create_var (TREE_TYPE (loopvar), "loopvar");
-         gfc_add_modify_expr (pblock, tmp_loopvar, loopvar);
+         gfc_add_modify (pblock, tmp_loopvar, loopvar);
 
          /* Initialize the loop.  */
          gfc_init_se (&se, NULL);
          gfc_conv_expr_val (&se, c->iterator->start);
          gfc_add_block_to_block (pblock, &se.pre);
-         gfc_add_modify_expr (pblock, loopvar, se.expr);
+         gfc_add_modify (pblock, loopvar, se.expr);
 
          gfc_init_se (&se, NULL);
          gfc_conv_expr_val (&se, c->iterator->end);
@@ -1348,7 +1346,7 @@ gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
 
          /* Increase loop variable by step.  */
          tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (loopvar), loopvar, step);
-         gfc_add_modify_expr (&body, loopvar, tmp);
+         gfc_add_modify (&body, loopvar, tmp);
 
          /* Finish the loop.  */
          tmp = gfc_finish_block (&body);
@@ -1360,7 +1358,7 @@ gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
          gfc_add_expr_to_block (pblock, tmp);
 
          /* Restore the original value of the loop counter.  */
-         gfc_add_modify_expr (pblock, loopvar, tmp_loopvar);
+         gfc_add_modify (pblock, loopvar, tmp_loopvar);
        }
     }
   mpz_clear (size);
@@ -2640,7 +2638,7 @@ gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
   loopbody = gfc_finish_block (pbody);
 
   /* Initialize the loopvar.  */
-  gfc_add_modify_expr (&loop->code[n], loop->loopvar[n], loop->from[n]);
+  gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
 
   exit_label = gfc_build_label_decl (NULL_TREE);
 
@@ -2661,7 +2659,7 @@ gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
   /* Increment the loopvar.  */
   tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                     loop->loopvar[n], gfc_index_one_node);
-  gfc_add_modify_expr (&block, loop->loopvar[n], tmp);
+  gfc_add_modify (&block, loop->loopvar[n], tmp);
 
   /* Build the loop.  */
   tmp = gfc_finish_block (&block);
@@ -3621,7 +3619,7 @@ gfc_array_init_size (tree descriptor, int rank, tree * poffset,
 
   /* Set the dtype.  */
   tmp = gfc_conv_descriptor_dtype (descriptor);
-  gfc_add_modify_expr (pblock, tmp, gfc_get_dtype (TREE_TYPE (descriptor)));
+  gfc_add_modify (pblock, tmp, gfc_get_dtype (TREE_TYPE (descriptor)));
 
   or_expr = NULL_TREE;
 
@@ -3652,7 +3650,7 @@ gfc_array_init_size (tree descriptor, int rank, tree * poffset,
             }
        }
       tmp = gfc_conv_descriptor_lbound (descriptor, gfc_rank_cst[n]);
-      gfc_add_modify_expr (pblock, tmp, se.expr);
+      gfc_add_modify (pblock, tmp, se.expr);
 
       /* Work out the offset for this component.  */
       tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, se.expr, stride);
@@ -3669,11 +3667,11 @@ gfc_array_init_size (tree descriptor, int rank, tree * poffset,
       gfc_add_block_to_block (pblock, &se.pre);
 
       tmp = gfc_conv_descriptor_ubound (descriptor, gfc_rank_cst[n]);
-      gfc_add_modify_expr (pblock, tmp, se.expr);
+      gfc_add_modify (pblock, tmp, se.expr);
 
       /* Store the stride.  */
       tmp = gfc_conv_descriptor_stride (descriptor, gfc_rank_cst[n]);
-      gfc_add_modify_expr (pblock, tmp, stride);
+      gfc_add_modify (pblock, tmp, stride);
 
       /* Calculate the size of this dimension.  */
       size = fold_build2 (PLUS_EXPR, gfc_array_index_type, se.expr, size);
@@ -3713,11 +3711,11 @@ gfc_array_init_size (tree descriptor, int rank, tree * poffset,
 
   var = gfc_create_var (TREE_TYPE (size), "size");
   gfc_start_block (&thenblock);
-  gfc_add_modify_expr (&thenblock, var, gfc_index_zero_node);
+  gfc_add_modify (&thenblock, var, gfc_index_zero_node);
   thencase = gfc_finish_block (&thenblock);
 
   gfc_start_block (&elseblock);
-  gfc_add_modify_expr (&elseblock, var, size);
+  gfc_add_modify (&elseblock, var, size);
   elsecase = gfc_finish_block (&elseblock);
 
   tmp = gfc_evaluate_now (or_expr, pblock);
@@ -3803,7 +3801,7 @@ gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree pstat)
   gfc_add_expr_to_block (&se->pre, tmp);
 
   tmp = gfc_conv_descriptor_offset (se->expr);
-  gfc_add_modify_expr (&se->pre, tmp, offset);
+  gfc_add_modify (&se->pre, tmp, offset);
 
   if (expr->ts.type == BT_DERIVED
        && expr->ts.derived->attr.alloc_comp)
@@ -4001,7 +3999,7 @@ gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
           gfc_init_se (&se, NULL);
           gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
           gfc_add_block_to_block (pblock, &se.pre);
-          gfc_add_modify_expr (pblock, lbound, se.expr);
+          gfc_add_modify (pblock, lbound, se.expr);
         }
       ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
       if (as->upper[dim] && !INTEGER_CST_P (ubound))
@@ -4009,7 +4007,7 @@ gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
           gfc_init_se (&se, NULL);
           gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
           gfc_add_block_to_block (pblock, &se.pre);
-          gfc_add_modify_expr (pblock, ubound, se.expr);
+          gfc_add_modify (pblock, ubound, se.expr);
         }
       /* The offset of this dimension.  offset = offset - lbound * stride.  */
       tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, lbound, size);
@@ -4029,7 +4027,7 @@ gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
           tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, ubound, tmp);
           tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, size, tmp);
           if (stride)
-            gfc_add_modify_expr (pblock, stride, tmp);
+            gfc_add_modify (pblock, stride, tmp);
           else
             stride = gfc_evaluate_now (tmp, pblock);
 
@@ -4039,7 +4037,7 @@ gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
                             stride, gfc_index_zero_node);
          tmp = fold_build3 (COND_EXPR, gfc_array_index_type, tmp,
                             stride, gfc_index_zero_node);
-         gfc_add_modify_expr (pblock, stride, tmp);
+         gfc_add_modify (pblock, stride, tmp);
         }
 
       size = stride;
@@ -4112,7 +4110,7 @@ gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym, tree fnbody)
   if (sym->attr.cray_pointee)
     {
       if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
-       gfc_add_modify_expr (&block, GFC_TYPE_ARRAY_OFFSET (type), offset);
+       gfc_add_modify (&block, GFC_TYPE_ARRAY_OFFSET (type), offset);
       gfc_add_expr_to_block (&block, fnbody);
       return gfc_finish_block (&block);
     }
@@ -4125,11 +4123,11 @@ gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym, tree fnbody)
 
   /* Allocate memory to hold the data.  */
   tmp = gfc_call_malloc (&block, TREE_TYPE (decl), size);
-  gfc_add_modify_expr (&block, decl, tmp);
+  gfc_add_modify (&block, decl, tmp);
 
   /* Set offset of the array.  */
   if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
-    gfc_add_modify_expr (&block, GFC_TYPE_ARRAY_OFFSET (type), offset);
+    gfc_add_modify (&block, GFC_TYPE_ARRAY_OFFSET (type), offset);
 
 
   /* Automatic arrays should not have initializers.  */
@@ -4177,13 +4175,13 @@ gfc_trans_g77_array (gfc_symbol * sym, tree body)
 
   /* Set the offset.  */
   if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
-    gfc_add_modify_expr (&block, GFC_TYPE_ARRAY_OFFSET (type), offset);
+    gfc_add_modify (&block, GFC_TYPE_ARRAY_OFFSET (type), offset);
 
   /* Set the pointer itself if we aren't using the parameter directly.  */
   if (TREE_CODE (parm) != PARM_DECL)
     {
       tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
-      gfc_add_modify_expr (&block, parm, tmp);
+      gfc_add_modify (&block, parm, tmp);
     }
   stmt = gfc_finish_block (&block);
 
@@ -4278,7 +4276,7 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body)
       TREE_USED (partial) = 1;
       tmp = gfc_conv_descriptor_stride (dumdesc, gfc_rank_cst[0]);
       tmp = fold_build2 (EQ_EXPR, boolean_type_node, tmp, gfc_index_one_node);
-      gfc_add_modify_expr (&block, partial, tmp);
+      gfc_add_modify (&block, partial, tmp);
     }
   else
     {
@@ -4298,7 +4296,7 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body)
       tmp = fold_build3 (COND_EXPR, gfc_array_index_type, tmp,
                         gfc_index_one_node, stride);
       stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
-      gfc_add_modify_expr (&block, stride, tmp);
+      gfc_add_modify (&block, stride, tmp);
 
       /* Allow the user to disable array repacking.  */
       stmt_unpacked = NULL_TREE;
@@ -4332,7 +4330,7 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body)
     }
   else
     tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
-  gfc_add_modify_expr (&block, tmpdesc, fold_convert (type, tmp));
+  gfc_add_modify (&block, tmpdesc, fold_convert (type, tmp));
 
   offset = gfc_index_zero_node;
   size = gfc_index_one_node;
@@ -4359,7 +4357,7 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body)
           gfc_conv_expr_type (&se, sym->as->lower[n],
                               gfc_array_index_type);
           gfc_add_block_to_block (&block, &se.pre);
-          gfc_add_modify_expr (&block, lbound, se.expr);
+          gfc_add_modify (&block, lbound, se.expr);
         }
 
       ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
@@ -4373,7 +4371,7 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body)
              gfc_conv_expr_type (&se, sym->as->upper[n],
                                   gfc_array_index_type);
              gfc_add_block_to_block (&block, &se.pre);
-              gfc_add_modify_expr (&block, ubound, se.expr);
+              gfc_add_modify (&block, ubound, se.expr);
             }
 
          /* Check the sizes match.  */
@@ -4400,7 +4398,7 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body)
           tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
                             dubound, dlbound);
           tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type, tmp, lbound);
-          gfc_add_modify_expr (&block, ubound, tmp);
+          gfc_add_modify (&block, ubound, tmp);
        }
       /* The offset of this dimension.  offset = offset - lbound * stride.  */
       tmp = fold_build2 (MULT_EXPR, gfc_array_index_type, lbound, stride);
@@ -4440,7 +4438,7 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body)
                                   stmt_unpacked, stmt_packed);
               else
                 tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
-              gfc_add_modify_expr (&block, stride, tmp);
+              gfc_add_modify (&block, stride, tmp);
             }
         }
       else
@@ -4456,14 +4454,14 @@ gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc, tree body)
                                 ubound, tmp);
              tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
                                 GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
-             gfc_add_modify_expr (&block, stride, tmp);
+             gfc_add_modify (&block, stride, tmp);
            }
        }
     }
 
   /* Set the offset.  */
   if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
-    gfc_add_modify_expr (&block, GFC_TYPE_ARRAY_OFFSET (type), offset);
+    gfc_add_modify (&block, GFC_TYPE_ARRAY_OFFSET (type), offset);
 
   gfc_trans_vla_type_sizes (sym, &block);
 
@@ -4758,7 +4756,7 @@ gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
          if (se->direct_byref)
            {
              /* Copy the descriptor for pointer assignments.  */
-             gfc_add_modify_expr (&se->pre, se->expr, desc);
+             gfc_add_modify (&se->pre, se->expr, desc);
 
              /* Add any offsets from subreferences.  */
              gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
@@ -4991,7 +4989,7 @@ gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
 
       /* Set the dtype.  */
       tmp = gfc_conv_descriptor_dtype (parm);
-      gfc_add_modify_expr (&loop.pre, tmp, gfc_get_dtype (parmtype));
+      gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
 
       /* Set offset for assignments to pointer only to zero if it is not
          the full array.  */
@@ -5060,11 +5058,11 @@ gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
              from = gfc_index_one_node;
            }
          tmp = gfc_conv_descriptor_lbound (parm, gfc_rank_cst[dim]);
-         gfc_add_modify_expr (&loop.pre, tmp, from);
+         gfc_add_modify (&loop.pre, tmp, from);
 
          /* Set the new upper bound.  */
          tmp = gfc_conv_descriptor_ubound (parm, gfc_rank_cst[dim]);
-         gfc_add_modify_expr (&loop.pre, tmp, to);
+         gfc_add_modify (&loop.pre, tmp, to);
 
          /* Multiply the stride by the section stride to get the
             total stride.  */
@@ -5089,7 +5087,7 @@ gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
 
          /* Store the new stride.  */
          tmp = gfc_conv_descriptor_stride (parm, gfc_rank_cst[dim]);
-         gfc_add_modify_expr (&loop.pre, tmp, stride);
+         gfc_add_modify (&loop.pre, tmp, stride);
 
          dim++;
        }
@@ -5106,14 +5104,14 @@ gfc_conv_expr_descriptor (gfc_se * se, gfc_expr * expr, gfc_ss * ss)
        {
          /* Set the offset.  */
          tmp = gfc_conv_descriptor_offset (parm);
-         gfc_add_modify_expr (&loop.pre, tmp, base);
+         gfc_add_modify (&loop.pre, tmp, base);
        }
       else
        {
          /* Only the callee knows what the correct offset it, so just set
             it to zero here.  */
          tmp = gfc_conv_descriptor_offset (parm);
-         gfc_add_modify_expr (&loop.pre, tmp, gfc_index_zero_node);
+         gfc_add_modify (&loop.pre, tmp, gfc_index_zero_node);
        }
       desc = parm;
     }
@@ -5588,7 +5586,7 @@ structure_alloc_comps (gfc_symbol * der_type, tree decl,
            {
              rank = c->as ? c->as->rank : 0;
              tmp = fold_convert (TREE_TYPE (dcmp), comp);
-             gfc_add_modify_expr (&fnblock, dcmp, tmp);
+             gfc_add_modify (&fnblock, dcmp, tmp);
              tmp = structure_alloc_comps (c->ts.derived, comp, dcmp,
                                           rank, purpose);
              gfc_add_expr_to_block (&fnblock, tmp);
index 145f4a8..18de51c 100644 (file)
@@ -120,11 +120,7 @@ tree gfc_conv_array_ubound (tree, int);
 
 /* Build expressions for accessing components of an array descriptor.  */
 tree gfc_conv_descriptor_data_get (tree);
-void gfc_conv_descriptor_data_set_internal (stmtblock_t *, tree, tree, bool);
-#define gfc_conv_descriptor_data_set(BLOCK, T1, T2)                    \
-  gfc_conv_descriptor_data_set_internal ((BLOCK), (T1), (T2), false)
-#define gfc_conv_descriptor_data_set_tuples(BLOCK, T1, T2)             \
-  gfc_conv_descriptor_data_set_internal ((BLOCK), (T1), (T2), true)
+void gfc_conv_descriptor_data_set (stmtblock_t *, tree, tree);
 tree gfc_conv_descriptor_data_addr (tree);
 tree gfc_conv_descriptor_offset (tree);
 tree gfc_conv_descriptor_dtype (tree);
index d2cb6a3..1dfa05c 100644 (file)
@@ -26,7 +26,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "coretypes.h"
 #include "tree.h"
 #include "tree-dump.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "ggc.h"
 #include "toplev.h"
 #include "tm.h"
@@ -2576,7 +2576,7 @@ gfc_trans_assign_aux_var (gfc_symbol * sym, tree fnbody)
 
   /* Set the initial value to length. See the comments in
      function gfc_add_assign_aux_vars in this file.  */
-  gfc_add_modify_expr (&body, GFC_DECL_STRING_LEN (sym->backend_decl),
+  gfc_add_modify (&body, GFC_DECL_STRING_LEN (sym->backend_decl),
                       build_int_cst (NULL_TREE, -2));
 
   gfc_add_expr_to_block (&body, fnbody);
@@ -2607,7 +2607,7 @@ gfc_trans_vla_one_sizepos (tree *tp, stmtblock_t *body)
 
   var = gfc_create_var_np (TREE_TYPE (t), NULL);
   gfc_add_decl_to_function (var);
-  gfc_add_modify_expr (body, var, val);
+  gfc_add_modify (body, var, val);
   if (TREE_CODE (t) == SAVE_EXPR)
     TREE_OPERAND (t, 0) = var;
   *tp = var;
@@ -3412,7 +3412,7 @@ gfc_generate_function_code (gfc_namespace * ns)
     {
       tree alternate_return;
       alternate_return = gfc_get_fake_result_decl (sym, 0);
-      gfc_add_modify_expr (&body, alternate_return, integer_zero_node);
+      gfc_add_modify (&body, alternate_return, integer_zero_node);
     }
 
   if (ns->entries)
index e145c0c..05ee390 100644 (file)
@@ -30,7 +30,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "ggc.h"
 #include "toplev.h"
 #include "real.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "langhooks.h"
 #include "flags.h"
 #include "gfortran.h"
@@ -115,7 +115,7 @@ gfc_make_safe_expr (gfc_se * se)
 
   /* We need a temporary for this result.  */
   var = gfc_create_var (TREE_TYPE (se->expr), NULL);
-  gfc_add_modify_expr (&se->pre, var, se->expr);
+  gfc_add_modify (&se->pre, var, se->expr);
   se->expr = var;
 }
 
@@ -257,7 +257,7 @@ gfc_conv_string_length (gfc_charlen * cl, stmtblock_t * pblock)
   gfc_add_block_to_block (pblock, &se.pre);
 
   if (cl->backend_decl)
-    gfc_add_modify_expr (pblock, cl->backend_decl, se.expr);
+    gfc_add_modify (pblock, cl->backend_decl, se.expr);
   else
     cl->backend_decl = gfc_evaluate_now (se.expr, pblock);
 }
@@ -993,7 +993,7 @@ gfc_conv_string_tmp (gfc_se * se, tree type, tree len)
                             fold_build2 (MULT_EXPR, TREE_TYPE (len), len,
                                          fold_convert (TREE_TYPE (len),
                                                        TYPE_SIZE (type))));
-      gfc_add_modify_expr (&se->pre, var, tmp);
+      gfc_add_modify (&se->pre, var, tmp);
 
       /* Free the temporary afterwards.  */
       tmp = gfc_call_free (convert (pvoid_type_node, var));
@@ -1509,7 +1509,7 @@ gfc_get_interface_mapping_array (stmtblock_t * block, gfc_symbol * sym,
   type = gfc_get_nodesc_array_type (type, sym->as, packed);
 
   var = gfc_create_var (type, "ifm");
-  gfc_add_modify_expr (block, var, fold_convert (type, data));
+  gfc_add_modify (block, var, fold_convert (type, data));
 
   return var;
 }
@@ -2165,7 +2165,7 @@ gfc_conv_subref_array_arg (gfc_se * parmse, gfc_expr * expr,
 
   tmp_index = fold_build2 (MINUS_EXPR, gfc_array_index_type,
                           tmp_index, rse.loop->from[0]);
-  gfc_add_modify_expr (&rse.loop->code[0], offset, tmp_index);
+  gfc_add_modify (&rse.loop->code[0], offset, tmp_index);
 
   tmp_index = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                           rse.loop->loopvar[0], offset);
@@ -2895,9 +2895,9 @@ fill_with_spaces (tree start, tree type, tree size)
   /* Initialize variables.  */
   gfc_init_block (&block);
   i = gfc_create_var (sizetype, "i");
-  gfc_add_modify_expr (&block, i, fold_convert (sizetype, size));
+  gfc_add_modify (&block, i, fold_convert (sizetype, size));
   el = gfc_create_var (build_pointer_type (type), "el");
-  gfc_add_modify_expr (&block, el, fold_convert (TREE_TYPE (el), start));
+  gfc_add_modify (&block, el, fold_convert (TREE_TYPE (el), start));
   exit_label = gfc_build_label_decl (NULL_TREE);
   TREE_USED (exit_label) = 1;
 
@@ -2913,14 +2913,14 @@ fill_with_spaces (tree start, tree type, tree size)
   gfc_add_expr_to_block (&loop, tmp);
 
   /* Assignment.  */
-  gfc_add_modify_expr (&loop, fold_build1 (INDIRECT_REF, type, el),
+  gfc_add_modify (&loop, fold_build1 (INDIRECT_REF, type, el),
                       build_int_cst (type,
                                      lang_hooks.to_target_charset (' ')));
 
   /* Increment loop variables.  */
-  gfc_add_modify_expr (&loop, i, fold_build2 (MINUS_EXPR, sizetype, i,
+  gfc_add_modify (&loop, i, fold_build2 (MINUS_EXPR, sizetype, i,
                                              TYPE_SIZE_UNIT (type)));
-  gfc_add_modify_expr (&loop, el, fold_build2 (POINTER_PLUS_EXPR,
+  gfc_add_modify (&loop, el, fold_build2 (POINTER_PLUS_EXPR,
                                               TREE_TYPE (el), el,
                                               TYPE_SIZE_UNIT (type)));
 
@@ -2989,7 +2989,7 @@ gfc_trans_string_copy (stmtblock_t * block, tree dlength, tree dest,
   if (dsc != NULL_TREE && ssc != NULL_TREE
       && TREE_TYPE (dsc) == TREE_TYPE (ssc))
     {
-      gfc_add_modify_expr (block, dsc, ssc);
+      gfc_add_modify (block, dsc, ssc);
       return;
     }
 
@@ -3135,7 +3135,7 @@ gfc_conv_statement_function (gfc_se * se, gfc_expr * expr)
           gfc_conv_expr (&lse, args->expr);
 
           gfc_add_block_to_block (&se->pre, &lse.pre);
-          gfc_add_modify_expr (&se->pre, temp_vars[n], lse.expr);
+          gfc_add_modify (&se->pre, temp_vars[n], lse.expr);
           gfc_add_block_to_block (&se->pre, &lse.post);
         }
 
@@ -3420,7 +3420,7 @@ gfc_trans_subcomponent_assign (tree dest, gfc_component * cm, gfc_expr * expr)
          se.want_pointer = 1;
          gfc_conv_expr (&se, expr);
          gfc_add_block_to_block (&block, &se.pre);
-         gfc_add_modify_expr (&block, dest,
+         gfc_add_modify (&block, dest,
                               fold_convert (TREE_TYPE (dest), se.expr));
          gfc_add_block_to_block (&block, &se.post);
        }
@@ -3441,7 +3441,7 @@ gfc_trans_subcomponent_assign (tree dest, gfc_component * cm, gfc_expr * expr)
          gfc_add_block_to_block (&block, &se.pre);
 
          tmp = fold_convert (TREE_TYPE (dest), se.expr);
-         gfc_add_modify_expr (&block, dest, tmp);
+         gfc_add_modify (&block, dest, tmp);
 
          if (cm->ts.type == BT_DERIVED && cm->ts.derived->attr.alloc_comp)
            tmp = gfc_copy_alloc_comp (cm->ts.derived, se.expr, dest,
@@ -3459,7 +3459,7 @@ gfc_trans_subcomponent_assign (tree dest, gfc_component * cm, gfc_expr * expr)
          /* Shift the lbound and ubound of temporaries to being unity, rather
             than zero, based.  Calculate the offset for all cases.  */
          offset = gfc_conv_descriptor_offset (dest);
-         gfc_add_modify_expr (&block, offset, gfc_index_zero_node);
+         gfc_add_modify (&block, offset, gfc_index_zero_node);
          tmp2 =gfc_create_var (gfc_array_index_type, NULL);
          for (n = 0; n < expr->rank; n++)
            {
@@ -3470,21 +3470,21 @@ gfc_trans_subcomponent_assign (tree dest, gfc_component * cm, gfc_expr * expr)
                  tmp = gfc_conv_descriptor_ubound (dest, gfc_rank_cst[n]);
                  span = fold_build2 (MINUS_EXPR, gfc_array_index_type, tmp,
                            gfc_conv_descriptor_lbound (dest, gfc_rank_cst[n]));
-                 gfc_add_modify_expr (&block, tmp,
+                 gfc_add_modify (&block, tmp,
                                       fold_build2 (PLUS_EXPR,
                                                    gfc_array_index_type,
                                                    span, gfc_index_one_node));
                  tmp = gfc_conv_descriptor_lbound (dest, gfc_rank_cst[n]);
-                 gfc_add_modify_expr (&block, tmp, gfc_index_one_node);
+                 gfc_add_modify (&block, tmp, gfc_index_one_node);
                }
              tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
                                 gfc_conv_descriptor_lbound (dest,
                                                             gfc_rank_cst[n]),
                                 gfc_conv_descriptor_stride (dest,
                                                             gfc_rank_cst[n]));
-             gfc_add_modify_expr (&block, tmp2, tmp);
+             gfc_add_modify (&block, tmp2, tmp);
              tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type, offset, tmp2);
-             gfc_add_modify_expr (&block, offset, tmp);
+             gfc_add_modify (&block, offset, tmp);
            }
        }
       else
@@ -3499,7 +3499,7 @@ gfc_trans_subcomponent_assign (tree dest, gfc_component * cm, gfc_expr * expr)
        {
          gfc_init_se (&se, NULL);
          gfc_conv_expr (&se, expr);
-         gfc_add_modify_expr (&block, dest,
+         gfc_add_modify (&block, dest,
                               fold_convert (TREE_TYPE (dest), se.expr));
        }
       else
@@ -3745,7 +3745,7 @@ gfc_conv_expr_val (gfc_se * se, gfc_expr * expr)
   if (se->post.head)
     {
       val = gfc_create_var (TREE_TYPE (se->expr), NULL);
-      gfc_add_modify_expr (&se->pre, val, se->expr);
+      gfc_add_modify (&se->pre, val, se->expr);
       se->expr = val;
       gfc_add_block_to_block (&se->pre, &se->post);
     }
@@ -3791,7 +3791,7 @@ gfc_conv_expr_reference (gfc_se * se, gfc_expr * expr)
       if (se->post.head)
        {
          var = gfc_create_var (TREE_TYPE (se->expr), NULL);
-         gfc_add_modify_expr (&se->pre, var, se->expr);
+         gfc_add_modify (&se->pre, var, se->expr);
          gfc_add_block_to_block (&se->pre, &se->post);
          se->expr = var;
        }
@@ -3805,7 +3805,7 @@ gfc_conv_expr_reference (gfc_se * se, gfc_expr * expr)
       se->want_pointer = 1;
       gfc_conv_expr (se, expr);
       var = gfc_create_var (TREE_TYPE (se->expr), NULL);
-      gfc_add_modify_expr (&se->pre, var, se->expr);
+      gfc_add_modify (&se->pre, var, se->expr);
       se->expr = var;
       return;
     }
@@ -3826,7 +3826,7 @@ gfc_conv_expr_reference (gfc_se * se, gfc_expr * expr)
   else
     {
       var = gfc_create_var (TREE_TYPE (se->expr), NULL);
-      gfc_add_modify_expr (&se->pre, var, se->expr);
+      gfc_add_modify (&se->pre, var, se->expr);
     }
   gfc_add_block_to_block (&se->pre, &se->post);
 
@@ -3879,7 +3879,7 @@ gfc_trans_pointer_assignment (gfc_expr * expr1, gfc_expr * expr2)
 
       gfc_add_block_to_block (&block, &lse.pre);
       gfc_add_block_to_block (&block, &rse.pre);
-      gfc_add_modify_expr (&block, lse.expr,
+      gfc_add_modify (&block, lse.expr,
                           fold_convert (TREE_TYPE (lse.expr), rse.expr));
       gfc_add_block_to_block (&block, &rse.post);
       gfc_add_block_to_block (&block, &lse.post);
@@ -3912,7 +3912,7 @@ gfc_trans_pointer_assignment (gfc_expr * expr1, gfc_expr * expr2)
              tmp = fold_convert (gfc_array_index_type, size_in_bytes (tmp));
              if (!INTEGER_CST_P (tmp))
                gfc_add_block_to_block (&lse.post, &rse.pre);
-             gfc_add_modify_expr (&lse.post, GFC_DECL_SPAN(decl), tmp);
+             gfc_add_modify (&lse.post, GFC_DECL_SPAN(decl), tmp);
            }
 
          break;
@@ -3926,7 +3926,7 @@ gfc_trans_pointer_assignment (gfc_expr * expr1, gfc_expr * expr2)
          lse.expr = tmp;
          lse.direct_byref = 1;
          gfc_conv_expr_descriptor (&lse, expr2, rss);
-         gfc_add_modify_expr (&lse.pre, desc, tmp);
+         gfc_add_modify (&lse.pre, desc, tmp);
          break;
         }
       gfc_add_block_to_block (&block, &lse.pre);
@@ -4038,7 +4038,7 @@ gfc_trans_scalar_assign (gfc_se * lse, gfc_se * rse, gfc_typespec ts,
       gfc_add_block_to_block (&block, &rse->pre);
       gfc_add_block_to_block (&block, &lse->pre);
 
-      gfc_add_modify_expr (&block, lse->expr,
+      gfc_add_modify (&block, lse->expr,
                           fold_convert (TREE_TYPE (lse->expr), rse->expr));
 
       /* Do a deep copy if the rhs is a variable, if it is not the
@@ -4055,7 +4055,7 @@ gfc_trans_scalar_assign (gfc_se * lse, gfc_se * rse, gfc_typespec ts,
       gfc_add_block_to_block (&block, &lse->pre);
       gfc_add_block_to_block (&block, &rse->pre);
 
-      gfc_add_modify_expr (&block, lse->expr,
+      gfc_add_modify (&block, lse->expr,
                           fold_convert (TREE_TYPE (lse->expr), rse->expr));
     }
 
index 2dc2d4f..a56f4c1 100644 (file)
@@ -30,7 +30,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "ggc.h"
 #include "toplev.h"
 #include "real.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "flags.h"
 #include "gfortran.h"
 #include "arith.h"
@@ -1316,7 +1316,7 @@ gfc_conv_intrinsic_char (gfc_se * se, gfc_expr * expr)
   var = gfc_create_var (type, "char");
 
   arg[0] = fold_build1 (NOP_EXPR, type, arg[0]);
-  gfc_add_modify_expr (&se->pre, var, arg[0]);
+  gfc_add_modify (&se->pre, var, arg[0]);
   se->expr = gfc_build_addr_expr (build_pointer_type (type), var);
   se->string_length = integer_one_node;
 }
@@ -1479,7 +1479,7 @@ gfc_conv_intrinsic_minmax (gfc_se * se, gfc_expr * expr, int op)
     args[0] = gfc_evaluate_now (args[0], &se->pre);
 
   mvar = gfc_create_var (type, "M");
-  gfc_add_modify_expr (&se->pre, mvar, args[0]);
+  gfc_add_modify (&se->pre, mvar, args[0]);
   for (i = 1, argexpr = argexpr->next; i < nargs; i++)
     {
       tree cond, isnan;
@@ -1718,7 +1718,7 @@ gfc_conv_intrinsic_anyall (gfc_se * se, gfc_expr * expr, int op)
     tmp = convert (type, boolean_true_node);
   else
     tmp = convert (type, boolean_false_node);
-  gfc_add_modify_expr (&se->pre, resvar, tmp);
+  gfc_add_modify (&se->pre, resvar, tmp);
 
   /* Walk the arguments.  */
   arrayss = gfc_walk_expr (actual->expr);
@@ -1744,7 +1744,7 @@ gfc_conv_intrinsic_anyall (gfc_se * se, gfc_expr * expr, int op)
     tmp = convert (type, boolean_false_node);
   else
     tmp = convert (type, boolean_true_node);
-  gfc_add_modify_expr (&block, resvar, tmp);
+  gfc_add_modify (&block, resvar, tmp);
 
   /* And break out of the loop.  */
   tmp = build1_v (GOTO_EXPR, exit_label);
@@ -1802,7 +1802,7 @@ gfc_conv_intrinsic_count (gfc_se * se, gfc_expr * expr)
   type = gfc_typenode_for_spec (&expr->ts);
   /* Initialize the result.  */
   resvar = gfc_create_var (type, "count");
-  gfc_add_modify_expr (&se->pre, resvar, build_int_cst (type, 0));
+  gfc_add_modify (&se->pre, resvar, build_int_cst (type, 0));
 
   /* Walk the arguments.  */
   arrayss = gfc_walk_expr (actual->expr);
@@ -1875,7 +1875,7 @@ gfc_conv_intrinsic_arith (gfc_se * se, gfc_expr * expr, int op)
   else
     tmp = gfc_build_const (type, integer_one_node);
 
-  gfc_add_modify_expr (&se->pre, resvar, tmp);
+  gfc_add_modify (&se->pre, resvar, tmp);
 
   /* Walk the arguments.  */
   actual = expr->value.function.actual;
@@ -1932,7 +1932,7 @@ gfc_conv_intrinsic_arith (gfc_se * se, gfc_expr * expr, int op)
   gfc_add_block_to_block (&block, &arrayse.pre);
 
   tmp = fold_build2 (op, type, resvar, arrayse.expr);
-  gfc_add_modify_expr (&block, resvar, tmp);
+  gfc_add_modify (&block, resvar, tmp);
   gfc_add_block_to_block (&block, &arrayse.post);
 
   if (maskss)
@@ -1999,7 +1999,7 @@ gfc_conv_intrinsic_dot_product (gfc_se * se, gfc_expr * expr)
   else
     tmp = gfc_build_const (type, integer_zero_node);
 
-  gfc_add_modify_expr (&se->pre, resvar, tmp);
+  gfc_add_modify (&se->pre, resvar, tmp);
 
   /* Walk argument #1.  */
   actual = expr->value.function.actual;
@@ -2056,7 +2056,7 @@ gfc_conv_intrinsic_dot_product (gfc_se * se, gfc_expr * expr)
       tmp = fold_build2 (MULT_EXPR, type, arrayse1.expr, arrayse2.expr);
       tmp = fold_build2 (PLUS_EXPR, type, resvar, tmp);
     }
-  gfc_add_modify_expr (&block, resvar, tmp);
+  gfc_add_modify (&block, resvar, tmp);
 
   /* Finish up the loop block and the loop.  */
   tmp = gfc_finish_block (&block);
@@ -2146,7 +2146,7 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, int op)
      possible value is HUGE in both cases.  */
   if (op == GT_EXPR)
     tmp = fold_build1 (NEGATE_EXPR, TREE_TYPE (tmp), tmp);
-  gfc_add_modify_expr (&se->pre, limit, tmp);
+  gfc_add_modify (&se->pre, limit, tmp);
 
   if (op == GT_EXPR && expr->ts.type == BT_INTEGER)
     tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (tmp), tmp,
@@ -2167,7 +2167,7 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, int op)
   /* Initialize the position to zero, following Fortran 2003.  We are free
      to do this because Fortran 95 allows the result of an entirely false
      mask to be processor dependent.  */
-  gfc_add_modify_expr (&loop.pre, pos, gfc_index_zero_node);
+  gfc_add_modify (&loop.pre, pos, gfc_index_zero_node);
 
   gfc_mark_ss_chain_used (arrayss, 1);
   if (maskss)
@@ -2200,7 +2200,7 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, int op)
   gfc_start_block (&ifblock);
 
   /* Assign the value to the limit...  */
-  gfc_add_modify_expr (&ifblock, limit, arrayse.expr);
+  gfc_add_modify (&ifblock, limit, arrayse.expr);
 
   /* Remember where we are.  An offset must be added to the loop
      counter to obtain the required position.  */
@@ -2210,11 +2210,11 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, int op)
   else
     tmp = build_int_cst (gfc_array_index_type, 1);
   
-  gfc_add_modify_expr (&block, offset, tmp);
+  gfc_add_modify (&block, offset, tmp);
 
   tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (pos),
                     loop.loopvar[0], offset);
-  gfc_add_modify_expr (&ifblock, pos, tmp);
+  gfc_add_modify (&ifblock, pos, tmp);
 
   ifbody = gfc_finish_block (&ifblock);
 
@@ -2258,7 +2258,7 @@ gfc_conv_intrinsic_minmaxloc (gfc_se * se, gfc_expr * expr, int op)
         the pos variable the same way as above.  */
 
       gfc_init_block (&elseblock);
-      gfc_add_modify_expr (&elseblock, pos, gfc_index_zero_node);
+      gfc_add_modify (&elseblock, pos, gfc_index_zero_node);
       elsetmp = gfc_finish_block (&elseblock);
 
       tmp = build3_v (COND_EXPR, maskse.expr, tmp, elsetmp);
@@ -2329,7 +2329,7 @@ gfc_conv_intrinsic_minmaxval (gfc_se * se, gfc_expr * expr, int op)
     tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (tmp),
                       tmp, build_int_cst (type, 1));
 
-  gfc_add_modify_expr (&se->pre, limit, tmp);
+  gfc_add_modify (&se->pre, limit, tmp);
 
   /* Walk the arguments.  */
   actual = expr->value.function.actual;
@@ -3003,12 +3003,12 @@ gfc_conv_intrinsic_spacing (gfc_se * se, gfc_expr * expr)
   gfc_add_expr_to_block (&block, tmp);
 
   tmp = fold_build2 (MINUS_EXPR, integer_type_node, e, prec);
-  gfc_add_modify_expr (&block, e, fold_build2 (MAX_EXPR, integer_type_node,
+  gfc_add_modify (&block, e, fold_build2 (MAX_EXPR, integer_type_node,
                                               tmp, emin));
 
   tmp = build_call_expr (built_in_decls[scalbn], 2,
                         build_real_from_int_cst (type, integer_one_node), e);
-  gfc_add_modify_expr (&block, res, tmp);
+  gfc_add_modify (&block, res, tmp);
 
   /* Finish by building the IF statement.  */
   cond = fold_build2 (EQ_EXPR, boolean_type_node, arg,
@@ -3071,7 +3071,7 @@ gfc_conv_intrinsic_rrspacing (gfc_se * se, gfc_expr * expr)
 
   e = gfc_create_var (integer_type_node, NULL);
   x = gfc_create_var (type, NULL);
-  gfc_add_modify_expr (&se->pre, x,
+  gfc_add_modify (&se->pre, x,
                       build_call_expr (built_in_decls[fabs], 1, arg));
 
 
@@ -3083,7 +3083,7 @@ gfc_conv_intrinsic_rrspacing (gfc_se * se, gfc_expr * expr)
   tmp = fold_build2 (MINUS_EXPR, integer_type_node,
                     build_int_cst (NULL_TREE, prec), e);
   tmp = build_call_expr (built_in_decls[scalbn], 2, x, tmp);
-  gfc_add_modify_expr (&block, x, tmp);
+  gfc_add_modify (&block, x, tmp);
   stmt = gfc_finish_block (&block);
 
   cond = fold_build2 (NE_EXPR, boolean_type_node, x,
@@ -3300,7 +3300,7 @@ gfc_conv_intrinsic_sizeof (gfc_se *se, gfc_expr *expr)
       else
        tmp = fold_convert (gfc_array_index_type,
                            size_in_bytes (type)); 
-      gfc_add_modify_expr (&argse.pre, source_bytes, tmp);
+      gfc_add_modify (&argse.pre, source_bytes, tmp);
 
       /* Obtain the size of the array in bytes.  */
       for (n = 0; n < arg->rank; n++)
@@ -3315,7 +3315,7 @@ gfc_conv_intrinsic_sizeof (gfc_se *se, gfc_expr *expr)
                             tmp, gfc_index_one_node);
          tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
                             tmp, source_bytes);
-         gfc_add_modify_expr (&argse.pre, source_bytes, tmp);
+         gfc_add_modify (&argse.pre, source_bytes, tmp);
        }
       se->expr = source_bytes;
     }
@@ -3473,13 +3473,13 @@ gfc_conv_intrinsic_array_transfer (gfc_se * se, gfc_expr * expr)
        {
          tree idx;
          idx = gfc_rank_cst[n];
-         gfc_add_modify_expr (&argse.pre, source_bytes, tmp);
+         gfc_add_modify (&argse.pre, source_bytes, tmp);
          stride = gfc_conv_descriptor_stride (argse.expr, idx);
          lower = gfc_conv_descriptor_lbound (argse.expr, idx);
          upper = gfc_conv_descriptor_ubound (argse.expr, idx);
          tmp = fold_build2 (MINUS_EXPR, gfc_array_index_type,
                             upper, lower);
-         gfc_add_modify_expr (&argse.pre, extent, tmp);
+         gfc_add_modify (&argse.pre, extent, tmp);
          tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                             extent, gfc_index_one_node);
          tmp = fold_build2 (MULT_EXPR, gfc_array_index_type,
@@ -3487,7 +3487,7 @@ gfc_conv_intrinsic_array_transfer (gfc_se * se, gfc_expr * expr)
        }
     }
 
-  gfc_add_modify_expr (&argse.pre, source_bytes, tmp);
+  gfc_add_modify (&argse.pre, source_bytes, tmp);
   gfc_add_block_to_block (&se->pre, &argse.pre);
   gfc_add_block_to_block (&se->post, &argse.post);
 
@@ -3522,7 +3522,7 @@ gfc_conv_intrinsic_array_transfer (gfc_se * se, gfc_expr * expr)
                        size_in_bytes (mold_type)); 
  
   dest_word_len = gfc_create_var (gfc_array_index_type, NULL);
-  gfc_add_modify_expr (&se->pre, dest_word_len, tmp);
+  gfc_add_modify (&se->pre, dest_word_len, tmp);
 
   /* Finally convert SIZE, if it is present.  */
   arg = arg->next;
@@ -3551,8 +3551,8 @@ gfc_conv_intrinsic_array_transfer (gfc_se * se, gfc_expr * expr)
   else
     tmp = source_bytes;
 
-  gfc_add_modify_expr (&se->pre, size_bytes, tmp);
-  gfc_add_modify_expr (&se->pre, size_words,
+  gfc_add_modify (&se->pre, size_bytes, tmp);
+  gfc_add_modify (&se->pre, size_words,
                       fold_build2 (CEIL_DIV_EXPR, gfc_array_index_type,
                                    size_bytes, dest_word_len));
 
@@ -3569,8 +3569,8 @@ gfc_conv_intrinsic_array_transfer (gfc_se * se, gfc_expr * expr)
                         tmp, gfc_index_one_node);
       tmp = fold_build2 (MIN_EXPR, gfc_array_index_type,
                         tmp, size_words);
-      gfc_add_modify_expr (&se->pre, size_words, tmp);
-      gfc_add_modify_expr (&se->pre, size_bytes,
+      gfc_add_modify (&se->pre, size_words, tmp);
+      gfc_add_modify (&se->pre, size_bytes,
                           fold_build2 (MULT_EXPR, gfc_array_index_type,
                                        size_words, dest_word_len));
       upper = fold_build2 (PLUS_EXPR, gfc_array_index_type,
@@ -3971,7 +3971,7 @@ gfc_conv_intrinsic_repeat (gfc_se * se, gfc_expr * expr)
                      build_int_cst (size_type_node, 0));
   tmp = fold_build3 (COND_EXPR, ncopies_type, cond,
                     build_int_cst (ncopies_type, 0), ncopies);
-  gfc_add_modify_expr (&se->pre, n, tmp);
+  gfc_add_modify (&se->pre, n, tmp);
   ncopies = n;
 
   /* Check that ncopies is not too large: ncopies should be less than
@@ -4006,7 +4006,7 @@ gfc_conv_intrinsic_repeat (gfc_se * se, gfc_expr * expr)
          memmove (dest + (i * slen * size), src, slen*size);  */
   gfc_start_block (&block);
   count = gfc_create_var (ncopies_type, "count");
-  gfc_add_modify_expr (&block, count, build_int_cst (ncopies_type, 0));
+  gfc_add_modify (&block, count, build_int_cst (ncopies_type, 0));
   exit_label = gfc_build_label_decl (NULL_TREE);
 
   /* Start the loop body.  */
@@ -4037,7 +4037,7 @@ gfc_conv_intrinsic_repeat (gfc_se * se, gfc_expr * expr)
   /* Increment count.  */
   tmp = fold_build2 (PLUS_EXPR, ncopies_type,
                     count, build_int_cst (TREE_TYPE (count), 1));
-  gfc_add_modify_expr (&body, count, tmp);
+  gfc_add_modify (&body, count, tmp);
 
   /* Build the loop.  */
   tmp = build1_v (LOOP_EXPR, gfc_finish_block (&body));
@@ -4101,7 +4101,7 @@ gfc_conv_intrinsic_loc (gfc_se * se, gfc_expr * expr)
   /* Create a temporary variable for loc return value.  Without this, 
      we get an error an ICE in gcc/expr.c(expand_expr_addr_expr_1).  */
   temp_var = gfc_create_var (gfc_get_int_type (gfc_index_integer_kind), NULL);
-  gfc_add_modify_expr (&se->pre, temp_var, se->expr);
+  gfc_add_modify (&se->pre, temp_var, se->expr);
   se->expr = temp_var;
 }
 
index 3d17a4c..e304d16 100644 (file)
@@ -24,7 +24,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "system.h"
 #include "coretypes.h"
 #include "tree.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "ggc.h"
 #include "toplev.h"
 #include "real.h"
@@ -450,7 +450,7 @@ set_parameter_const (stmtblock_t *block, tree var, enum iofield type,
                       var, TYPE_FIELDS (TREE_TYPE (var)), NULL_TREE);
   tmp = fold_build3 (COMPONENT_REF, TREE_TYPE (p->field), var, p->field,
                     NULL_TREE);
-  gfc_add_modify_expr (block, tmp, build_int_cst (TREE_TYPE (p->field), val));
+  gfc_add_modify (block, tmp, build_int_cst (TREE_TYPE (p->field), val));
   return p->mask;
 }
 
@@ -505,7 +505,7 @@ set_parameter_value (stmtblock_t *block, tree var, enum iofield type,
                       var, TYPE_FIELDS (TREE_TYPE (var)), NULL_TREE);
 
   tmp = fold_build3 (COMPONENT_REF, dest_type, var, p->field, NULL_TREE);
-  gfc_add_modify_expr (block, tmp, se.expr);
+  gfc_add_modify (block, tmp, se.expr);
   return p->mask;
 }
 
@@ -535,7 +535,7 @@ set_parameter_ref (stmtblock_t *block, stmtblock_t *postblock,
       /* If this is for the iostat variable initialize the
         user variable to LIBERROR_OK which is zero.  */
       if (type == IOPARM_common_iostat)
-       gfc_add_modify_expr (block, se.expr,
+       gfc_add_modify (block, se.expr,
                             build_int_cst (TREE_TYPE (se.expr), LIBERROR_OK));
     }
   else
@@ -549,13 +549,13 @@ set_parameter_ref (stmtblock_t *block, stmtblock_t *postblock,
       /* If this is for the iostat variable, initialize the
         user variable to LIBERROR_OK which is zero.  */
       if (type == IOPARM_common_iostat)
-       gfc_add_modify_expr (block, tmpvar,
+       gfc_add_modify (block, tmpvar,
                             build_int_cst (TREE_TYPE (tmpvar), LIBERROR_OK));
 
       addr = build_fold_addr_expr (tmpvar);
        /* After the I/O operation, we set the variable from the temporary.  */
       tmp = convert (TREE_TYPE (se.expr), tmpvar);
-      gfc_add_modify_expr (postblock, se.expr, tmp);
+      gfc_add_modify (postblock, se.expr, tmp);
      }
 
   if (p->param_type == IOPARM_ptype_common)
@@ -563,7 +563,7 @@ set_parameter_ref (stmtblock_t *block, stmtblock_t *postblock,
                       var, TYPE_FIELDS (TREE_TYPE (var)), NULL_TREE);
   tmp = fold_build3 (COMPONENT_REF, TREE_TYPE (p->field),
                     var, p->field, NULL_TREE);
-  gfc_add_modify_expr (block, tmp, addr);
+  gfc_add_modify (block, tmp, addr);
   return p->mask;
 }
 
@@ -672,9 +672,9 @@ set_string (stmtblock_t * block, stmtblock_t * postblock, tree var,
                               fold_convert (long_integer_type_node, tmp));
       gfc_free (msg);
 
-      gfc_add_modify_expr (&se.pre, io,
+      gfc_add_modify (&se.pre, io,
                 fold_convert (TREE_TYPE (io), GFC_DECL_ASSIGN_ADDR (se.expr)));
-      gfc_add_modify_expr (&se.pre, len, GFC_DECL_STRING_LEN (se.expr));
+      gfc_add_modify (&se.pre, len, GFC_DECL_STRING_LEN (se.expr));
     }
   else
     {
@@ -688,8 +688,8 @@ set_string (stmtblock_t * block, stmtblock_t * postblock, tree var,
        gcc_unreachable ();
 
       gfc_conv_string_parameter (&se);
-      gfc_add_modify_expr (&se.pre, io, fold_convert (TREE_TYPE (io), se.expr));
-      gfc_add_modify_expr (&se.pre, len, se.string_length);
+      gfc_add_modify (&se.pre, io, fold_convert (TREE_TYPE (io), se.expr));
+      gfc_add_modify (&se.pre, len, se.string_length);
     }
 
   gfc_add_block_to_block (block, &se.pre);
@@ -764,10 +764,10 @@ set_internal_unit (stmtblock_t * block, stmtblock_t * post_block,
 
   /* The cast is needed for character substrings and the descriptor
      data.  */
-  gfc_add_modify_expr (&se.pre, io, fold_convert (TREE_TYPE (io), tmp));
-  gfc_add_modify_expr (&se.pre, len,
+  gfc_add_modify (&se.pre, io, fold_convert (TREE_TYPE (io), tmp));
+  gfc_add_modify (&se.pre, len,
                       fold_convert (TREE_TYPE (len), se.string_length));
-  gfc_add_modify_expr (&se.pre, desc, se.expr);
+  gfc_add_modify (&se.pre, desc, se.expr);
 
   gfc_add_block_to_block (block, &se.pre);
   gfc_add_block_to_block (post_block, &se.post);
@@ -865,7 +865,7 @@ set_error_locus (stmtblock_t * block, tree var, locus * where)
   str = gfc_build_cstring_const (f->filename);
 
   str = gfc_build_addr_expr (pchar_type_node, str);
-  gfc_add_modify_expr (block, locus_file, str);
+  gfc_add_modify (block, locus_file, str);
 
   line = LOCATION_LINE (where->lb->location);
   set_parameter_const (block, var, IOPARM_common_line, line);
index 06329a9..49fd1f6 100644 (file)
@@ -23,7 +23,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "system.h"
 #include "coretypes.h"
 #include "tree.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "ggc.h"
 #include "toplev.h"
 #include "real.h"
@@ -147,7 +147,7 @@ gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
 
   gfc_init_block (&cond_block);
 
-  gfc_add_modify_expr (&cond_block, decl, outer);
+  gfc_add_modify (&cond_block, decl, outer);
   rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
   size = gfc_conv_descriptor_ubound (decl, rank);
   size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
@@ -164,11 +164,11 @@ gfc_omp_clause_default_ctor (tree clause, tree decl, tree outer)
   ptr = gfc_allocate_array_with_status (&cond_block,
                                        build_int_cst (pvoid_type_node, 0),
                                        size, NULL);
-  gfc_conv_descriptor_data_set_tuples (&cond_block, decl, ptr);
+  gfc_conv_descriptor_data_set (&cond_block, decl, ptr);
   then_b = gfc_finish_block (&cond_block);
 
   gfc_init_block (&cond_block);
-  gfc_conv_descriptor_data_set_tuples (&cond_block, decl, null_pointer_node);
+  gfc_conv_descriptor_data_set (&cond_block, decl, null_pointer_node);
   else_b = gfc_finish_block (&cond_block);
 
   cond = fold_build2 (NE_EXPR, boolean_type_node,
@@ -191,7 +191,7 @@ gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
 
   if (! GFC_DESCRIPTOR_TYPE_P (type)
       || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
-    return build_gimple_modify_stmt (dest, src);
+    return build2_v (MODIFY_EXPR, dest, src);
 
   gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_FIRSTPRIVATE);
 
@@ -199,7 +199,7 @@ gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
      and copied from SRC.  */
   gfc_start_block (&block);
 
-  gfc_add_modify_expr (&block, dest, src);
+  gfc_add_modify (&block, dest, src);
   rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
   size = gfc_conv_descriptor_ubound (dest, rank);
   size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
@@ -216,7 +216,7 @@ gfc_omp_clause_copy_ctor (tree clause, tree dest, tree src)
   ptr = gfc_allocate_array_with_status (&block,
                                        build_int_cst (pvoid_type_node, 0),
                                        size, NULL);
-  gfc_conv_descriptor_data_set_tuples (&block, dest, ptr);
+  gfc_conv_descriptor_data_set (&block, dest, ptr);
   call = build_call_expr (built_in_decls[BUILT_IN_MEMCPY], 3, ptr,
                          fold_convert (pvoid_type_node,
                                        gfc_conv_descriptor_data_get (src)),
@@ -236,7 +236,7 @@ gfc_omp_clause_assign_op (tree clause ATTRIBUTE_UNUSED, tree dest, tree src)
 
   if (! GFC_DESCRIPTOR_TYPE_P (type)
       || GFC_TYPE_ARRAY_AKIND (type) != GFC_ARRAY_ALLOCATABLE)
-    return build_gimple_modify_stmt (dest, src);
+    return build2_v (MODIFY_EXPR, dest, src);
 
   /* Handle copying allocatable arrays.  */
   gfc_start_block (&block);
@@ -602,7 +602,7 @@ gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
 
       gfc_start_block (&block);
 
-      gfc_add_modify_expr (&block, decl, outer_sym.backend_decl);
+      gfc_add_modify (&block, decl, outer_sym.backend_decl);
       rank = gfc_rank_cst[GFC_TYPE_ARRAY_RANK (type) - 1];
       size = gfc_conv_descriptor_ubound (decl, rank);
       size = fold_build2 (MINUS_EXPR, gfc_array_index_type, size,
@@ -619,7 +619,7 @@ gfc_trans_omp_array_reduction (tree c, gfc_symbol *sym, locus where)
       ptr = gfc_allocate_array_with_status (&block,
                                            build_int_cst (pvoid_type_node, 0),
                                            size, NULL);
-      gfc_conv_descriptor_data_set_tuples (&block, decl, ptr);
+      gfc_conv_descriptor_data_set (&block, decl, ptr);
       gfc_add_expr_to_block (&block, gfc_trans_assignment (e1, e2, false));
       stmt = gfc_finish_block (&block);
     }
@@ -1062,7 +1062,7 @@ gfc_trans_omp_atomic (gfc_code *code)
          tree accum = gfc_create_var (TREE_TYPE (rse.expr), NULL);
          gfc_actual_arglist *arg;
 
-         gfc_add_modify_stmt (&block, accum, rse.expr);
+         gfc_add_modify (&block, accum, rse.expr);
          for (arg = expr2->value.function.actual->next->next; arg;
               arg = arg->next)
            {
@@ -1070,7 +1070,7 @@ gfc_trans_omp_atomic (gfc_code *code)
              gfc_conv_expr (&rse, arg->expr);
              gfc_add_block_to_block (&block, &rse.pre);
              x = fold_build2 (op, TREE_TYPE (accum), accum, rse.expr);
-             gfc_add_modify_stmt (&block, accum, x);
+             gfc_add_modify (&block, accum, x);
            }
 
          rse.expr = accum;
@@ -1204,11 +1204,11 @@ gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
       /* Loop body.  */
       if (simple)
        {
-         TREE_VEC_ELT (init, i) = build2_v (GIMPLE_MODIFY_STMT, dovar, from);
+         TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, dovar, from);
          TREE_VEC_ELT (cond, i) = fold_build2 (simple > 0 ? LE_EXPR : GE_EXPR,
                                                boolean_type_node, dovar, to);
          TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, dovar, step);
-         TREE_VEC_ELT (incr, i) = fold_build2 (GIMPLE_MODIFY_STMT, type, dovar,
+         TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type, dovar,
                                                TREE_VEC_ELT (incr, i));
        }
       else
@@ -1225,13 +1225,13 @@ gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
          tmp = fold_build2 (TRUNC_DIV_EXPR, type, tmp, step);
          tmp = gfc_evaluate_now (tmp, pblock);
          count = gfc_create_var (type, "count");
-         TREE_VEC_ELT (init, i) = build2_v (GIMPLE_MODIFY_STMT, count,
+         TREE_VEC_ELT (init, i) = build2_v (MODIFY_EXPR, count,
                                             build_int_cst (type, 0));
          TREE_VEC_ELT (cond, i) = fold_build2 (LT_EXPR, boolean_type_node,
                                                count, tmp);
          TREE_VEC_ELT (incr, i) = fold_build2 (PLUS_EXPR, type, count,
                                                build_int_cst (type, 1));
-         TREE_VEC_ELT (incr, i) = fold_build2 (GIMPLE_MODIFY_STMT, type,
+         TREE_VEC_ELT (incr, i) = fold_build2 (MODIFY_EXPR, type,
                                                count, TREE_VEC_ELT (incr, i));
 
          /* Initialize DOVAR.  */
@@ -1260,7 +1260,7 @@ gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
                 than value after iterator increment.  */
              tmp = gfc_evaluate_now (step, pblock);
              tmp = fold_build2 (PLUS_EXPR, type, dovar, tmp);
-             tmp = fold_build2 (GIMPLE_MODIFY_STMT, type, dovar, tmp);
+             tmp = fold_build2 (MODIFY_EXPR, type, dovar, tmp);
              for (c = omp_clauses; c ; c = OMP_CLAUSE_CHAIN (c))
                if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
                    && OMP_CLAUSE_DECL (c) == dovar)
@@ -1308,7 +1308,7 @@ gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
   dovar_init = nreverse (dovar_init);
   while (dovar_init)
     {
-      gfc_add_modify_stmt (&body, TREE_PURPOSE (dovar_init),
+      gfc_add_modify (&body, TREE_PURPOSE (dovar_init),
                           TREE_VALUE (dovar_init));
       dovar_init = TREE_CHAIN (dovar_init);
     }
@@ -1381,7 +1381,7 @@ gfc_trans_omp_parallel (gfc_code *code)
   omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
                                       code->loc);
   stmt = gfc_trans_omp_code (code->block->next, true);
-  stmt = build4_v (OMP_PARALLEL, stmt, omp_clauses, NULL, NULL);
+  stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
   gfc_add_expr_to_block (&block, stmt);
   return gfc_finish_block (&block);
 }
@@ -1421,7 +1421,7 @@ gfc_trans_omp_parallel_do (gfc_code *code)
     stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
   else
     poplevel (0, 0, 0);
-  stmt = build4_v (OMP_PARALLEL, stmt, omp_clauses, NULL, NULL);
+  stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
   OMP_PARALLEL_COMBINED (stmt) = 1;
   gfc_add_expr_to_block (&block, stmt);
   return gfc_finish_block (&block);
@@ -1446,7 +1446,7 @@ gfc_trans_omp_parallel_sections (gfc_code *code)
     stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
   else
     poplevel (0, 0, 0);
-  stmt = build4_v (OMP_PARALLEL, stmt, omp_clauses, NULL, NULL);
+  stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
   OMP_PARALLEL_COMBINED (stmt) = 1;
   gfc_add_expr_to_block (&block, stmt);
   return gfc_finish_block (&block);
@@ -1471,7 +1471,7 @@ gfc_trans_omp_parallel_workshare (gfc_code *code)
     stmt = build3_v (BIND_EXPR, NULL, stmt, poplevel (1, 0, 0));
   else
     poplevel (0, 0, 0);
-  stmt = build4_v (OMP_PARALLEL, stmt, omp_clauses, NULL, NULL);
+  stmt = build2 (OMP_PARALLEL, void_type_node, stmt, omp_clauses);
   OMP_PARALLEL_COMBINED (stmt) = 1;
   gfc_add_expr_to_block (&block, stmt);
   return gfc_finish_block (&block);
@@ -1503,7 +1503,7 @@ gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
     }
   stmt = gfc_finish_block (&body);
 
-  stmt = build3_v (OMP_SECTIONS, stmt, omp_clauses, NULL_TREE);
+  stmt = build2 (OMP_SECTIONS, void_type_node, stmt, omp_clauses);
   gfc_add_expr_to_block (&block, stmt);
 
   return gfc_finish_block (&block);
@@ -1522,16 +1522,13 @@ static tree
 gfc_trans_omp_task (gfc_code *code)
 {
   stmtblock_t block;
-  tree stmt, body_stmt, omp_clauses;
+  tree stmt, omp_clauses;
 
   gfc_start_block (&block);
   omp_clauses = gfc_trans_omp_clauses (&block, code->ext.omp_clauses,
                                       code->loc);
-  body_stmt = gfc_trans_omp_code (code->block->next, true);
-  stmt = make_node (OMP_TASK);
-  TREE_TYPE (stmt) = void_type_node;
-  OMP_TASK_CLAUSES (stmt) = omp_clauses;
-  OMP_TASK_BODY (stmt) = body_stmt;
+  stmt = gfc_trans_omp_code (code->block->next, true);
+  stmt = build2 (OMP_TASK, void_type_node, stmt, omp_clauses);
   gfc_add_expr_to_block (&block, stmt);
   return gfc_finish_block (&block);
 }
index 79a1446..bf4305b 100644 (file)
@@ -25,7 +25,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "system.h"
 #include "coretypes.h"
 #include "tree.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "ggc.h"
 #include "toplev.h"
 #include "real.h"
@@ -127,8 +127,8 @@ gfc_trans_label_assign (gfc_code * code)
       label_tree = gfc_build_addr_expr (pvoid_type_node, label_tree);
     }
 
-  gfc_add_modify_expr (&se.pre, len, len_tree);
-  gfc_add_modify_expr (&se.pre, addr, label_tree);
+  gfc_add_modify (&se.pre, len, len_tree);
+  gfc_add_modify (&se.pre, addr, label_tree);
 
   return gfc_finish_block (&se.pre);
 }
@@ -271,9 +271,9 @@ gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
                                              &tmp_loop, info, tmp,
                                              false, true, false,
                                             & arg->expr->where);
-         gfc_add_modify_expr (&se->pre, size, tmp);
+         gfc_add_modify (&se->pre, size, tmp);
          tmp = fold_convert (pvoid_type_node, info->data);
-         gfc_add_modify_expr (&se->pre, data, tmp);
+         gfc_add_modify (&se->pre, data, tmp);
          gfc_merge_block_scope (&block);
 
          /* Obtain the argument descriptor for unpacking.  */
@@ -294,7 +294,7 @@ gfc_conv_elemental_dependencies (gfc_se * se, gfc_se * loopse,
                                          offset, tmp);
            }
          info->offset = gfc_create_var (gfc_array_index_type, NULL);     
-         gfc_add_modify_expr (&se->pre, info->offset, offset);
+         gfc_add_modify (&se->pre, info->offset, offset);
 
          /* Copy the result back using unpack.  */
          tmp = build_call_expr (gfor_fndecl_in_unpack, 2, parmse.expr, data);
@@ -349,7 +349,7 @@ gfc_trans_call (gfc_code * code, bool dependency_check)
          se.expr = convert (gfc_typenode_for_spec (&sym->ts), se.expr);
          if (sym->backend_decl == NULL)
            sym->backend_decl = gfc_get_symbol_decl (sym);
-         gfc_add_modify_expr (&se.pre, sym->backend_decl, se.expr);
+         gfc_add_modify (&se.pre, sym->backend_decl, se.expr);
        }
       else
        gfc_add_expr_to_block (&se.pre, se.expr);
@@ -734,7 +734,7 @@ gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
   type = TREE_TYPE (dovar);
 
   /* Initialize the DO variable: dovar = from.  */
-  gfc_add_modify_expr (pblock, dovar, from);
+  gfc_add_modify (pblock, dovar, from);
 
   /* Cycle and exit statements are implemented with gotos.  */
   cycle_label = gfc_build_label_decl (NULL_TREE);
@@ -763,7 +763,7 @@ gfc_trans_simple_do (gfc_code * code, stmtblock_t *pblock, tree dovar,
 
   /* Increment the loop variable.  */
   tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
-  gfc_add_modify_expr (&body, dovar, tmp);
+  gfc_add_modify (&body, dovar, tmp);
 
   /* The loop exit.  */
   tmp = build1_v (GOTO_EXPR, exit_label);
@@ -911,7 +911,7 @@ gfc_trans_do (gfc_code * code)
       tmp = fold_build1 (FIX_TRUNC_EXPR, utype, tmp);
     }
   countm1 = gfc_create_var (utype, "countm1");
-  gfc_add_modify_expr (&block, countm1, tmp);
+  gfc_add_modify (&block, countm1, tmp);
 
   /* Cycle and exit statements are implemented with gotos.  */
   cycle_label = gfc_build_label_decl (NULL_TREE);
@@ -919,7 +919,7 @@ gfc_trans_do (gfc_code * code)
   TREE_USED (exit_label) = 1;
 
   /* Initialize the DO variable: dovar = from.  */
-  gfc_add_modify_expr (&block, dovar, from);
+  gfc_add_modify (&block, dovar, from);
 
   /* If the loop is empty, go directly to the exit label.  */
   tmp = fold_build3 (COND_EXPR, void_type_node, empty,
@@ -949,7 +949,7 @@ gfc_trans_do (gfc_code * code)
 
   /* Increment the loop variable.  */
   tmp = fold_build2 (PLUS_EXPR, type, dovar, step);
-  gfc_add_modify_expr (&body, dovar, tmp);
+  gfc_add_modify (&body, dovar, tmp);
 
   /* End with the loop condition.  Loop until countm1 == 0.  */
   cond = fold_build2 (EQ_EXPR, boolean_type_node, countm1,
@@ -961,7 +961,7 @@ gfc_trans_do (gfc_code * code)
 
   /* Decrement the loop count.  */
   tmp = fold_build2 (MINUS_EXPR, utype, countm1, build_int_cst (utype, 1));
-  gfc_add_modify_expr (&body, countm1, tmp);
+  gfc_add_modify (&body, countm1, tmp);
 
   /* End of loop body.  */
   tmp = gfc_finish_block (&body);
@@ -1479,7 +1479,7 @@ gfc_trans_character_select (gfc_code *code)
   tmp = build_call_expr (fndecl, 4, init, build_int_cst (NULL_TREE, n),
                         se.expr, se.string_length);
   case_num = gfc_create_var (integer_type_node, "case_num");
-  gfc_add_modify_expr (&block, case_num, tmp);
+  gfc_add_modify (&block, case_num, tmp);
 
   gfc_add_block_to_block (&block, &se.post);
 
@@ -1609,7 +1609,7 @@ forall_make_variable_temp (gfc_code *c, stmtblock_t *pre, stmtblock_t *post)
        {
          /* Use the variable offset for the temporary.  */
          tmp = gfc_conv_descriptor_offset (tse.expr);
-         gfc_add_modify_expr (pre, tmp,
+         gfc_add_modify (pre, tmp,
                gfc_conv_array_offset (old_sym->backend_decl));
        }
     }
@@ -1765,7 +1765,7 @@ gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
 
   /* Initialize the mask index outside the FORALL nest.  */
   if (mask_flag && forall_tmp->mask)
-    gfc_add_modify_expr (outer, forall_tmp->maskindex, gfc_index_zero_node);
+    gfc_add_modify (outer, forall_tmp->maskindex, gfc_index_zero_node);
 
   iter = forall_tmp->this_loop;
   nvar = forall_tmp->nvar;
@@ -1798,7 +1798,7 @@ gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
 
       /* Increment the loop variable.  */
       tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), var, step);
-      gfc_add_modify_expr (&block, var, tmp);
+      gfc_add_modify (&block, var, tmp);
 
       /* Advance to the next mask element.  Only do this for the
         innermost loop.  */
@@ -1807,26 +1807,26 @@ gfc_trans_forall_loop (forall_info *forall_tmp, tree body,
          tree maskindex = forall_tmp->maskindex;
          tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                             maskindex, gfc_index_one_node);
-         gfc_add_modify_expr (&block, maskindex, tmp);
+         gfc_add_modify (&block, maskindex, tmp);
        }
 
       /* Decrement the loop counter.  */
       tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), count,
                         build_int_cst (TREE_TYPE (var), 1));
-      gfc_add_modify_expr (&block, count, tmp);
+      gfc_add_modify (&block, count, tmp);
 
       body = gfc_finish_block (&block);
 
       /* Loop var initialization.  */
       gfc_init_block (&block);
-      gfc_add_modify_expr (&block, var, start);
+      gfc_add_modify (&block, var, start);
 
 
       /* Initialize the loop counter.  */
       tmp = fold_build2 (MINUS_EXPR, TREE_TYPE (var), step, start);
       tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (var), end, tmp);
       tmp = fold_build2 (TRUNC_DIV_EXPR, TREE_TYPE (var), tmp, step);
-      gfc_add_modify_expr (&block, count, tmp);
+      gfc_add_modify (&block, count, tmp);
 
       /* The loop expression.  */
       tmp = build1_v (LOOP_EXPR, body);
@@ -1918,7 +1918,7 @@ gfc_do_allocate (tree bytesize, tree size, tree * pdata, stmtblock_t * pblock,
       *pdata = convert (pvoid_type_node, tmpvar);
 
       tmp = gfc_call_malloc (pblock, TREE_TYPE (tmpvar), bytesize);
-      gfc_add_modify_expr (pblock, tmpvar, tmp);
+      gfc_add_modify (pblock, tmpvar, tmp);
     }
   return tmpvar;
 }
@@ -1954,13 +1954,13 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
 
       /* Use the scalar assignment as is.  */
       gfc_add_block_to_block (&block, &lse.pre);
-      gfc_add_modify_expr (&block, lse.expr, tmp);
+      gfc_add_modify (&block, lse.expr, tmp);
       gfc_add_block_to_block (&block, &lse.post);
 
       /* Increment the count1.  */
       tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
                         gfc_index_one_node);
-      gfc_add_modify_expr (&block, count1, tmp);
+      gfc_add_modify (&block, count1, tmp);
 
       tmp = gfc_finish_block (&block);
     }
@@ -2016,14 +2016,14 @@ generate_loop_for_temp_to_lhs (gfc_expr *expr, tree tmp1, tree count3,
       /* Increment count1.  */
       tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                         count1, gfc_index_one_node);
-      gfc_add_modify_expr (&body, count1, tmp);
+      gfc_add_modify (&body, count1, tmp);
 
       /* Increment count3.  */
       if (count3)
        {
          tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                             count3, gfc_index_one_node);
-         gfc_add_modify_expr (&body, count3, tmp);
+         gfc_add_modify (&body, count3, tmp);
        }
 
       /* Generate the copying loops.  */
@@ -2117,21 +2117,21 @@ generate_loop_for_rhs_to_temp (gfc_expr *expr2, tree tmp1, tree count3,
       /* Increment count1.  */
       tmp = fold_build2 (PLUS_EXPR, TREE_TYPE (count1), count1,
                         gfc_index_one_node);
-      gfc_add_modify_expr (&block, count1, tmp);
+      gfc_add_modify (&block, count1, tmp);
     }
   else
     {
       /* Increment count1.  */
       tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                         count1, gfc_index_one_node);
-      gfc_add_modify_expr (&body1, count1, tmp);
+      gfc_add_modify (&body1, count1, tmp);
 
       /* Increment count3.  */
       if (count3)
        {
          tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                             count3, gfc_index_one_node);
-         gfc_add_modify_expr (&body1, count3, tmp);
+         gfc_add_modify (&body1, count3, tmp);
        }
 
       /* Generate the copying loops.  */
@@ -2257,7 +2257,7 @@ compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
 
   /* Otherwise, create a temporary variable to compute the result.  */
   number = gfc_create_var (gfc_array_index_type, "num");
-  gfc_add_modify_expr (block, number, gfc_index_zero_node);
+  gfc_add_modify (block, number, gfc_index_zero_node);
 
   gfc_start_block (&body);
   if (inner_size_body)
@@ -2267,7 +2267,7 @@ compute_overall_iter_number (forall_info *nested_forall_info, tree inner_size,
                       number, inner_size);
   else
     tmp = inner_size;
-  gfc_add_modify_expr (&body, number, tmp);
+  gfc_add_modify (&body, number, tmp);
   tmp = gfc_finish_block (&body);
 
   /* Generate loops.  */
@@ -2378,13 +2378,13 @@ gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
   if (wheremask)
     {
       count = gfc_create_var (gfc_array_index_type, "count");
-      gfc_add_modify_expr (block, count, gfc_index_zero_node);
+      gfc_add_modify (block, count, gfc_index_zero_node);
     }
   else
     count = NULL;
 
   /* Initialize count1.  */
-  gfc_add_modify_expr (block, count1, gfc_index_zero_node);
+  gfc_add_modify (block, count1, gfc_index_zero_node);
 
   /* Calculate the size of temporary needed in the assignment. Return loop, lss
      and rss which are used in function generate_loop_for_rhs_to_temp().  */
@@ -2423,11 +2423,11 @@ gfc_trans_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
   gfc_add_expr_to_block (block, tmp);
 
   /* Reset count1.  */
-  gfc_add_modify_expr (block, count1, gfc_index_zero_node);
+  gfc_add_modify (block, count1, gfc_index_zero_node);
 
   /* Reset count.  */
   if (wheremask)
-    gfc_add_modify_expr (block, count, gfc_index_zero_node);
+    gfc_add_modify (block, count, gfc_index_zero_node);
 
   /* Generate codes to copy the temporary to lhs.  */
   tmp = generate_loop_for_temp_to_lhs (expr1, tmp1, count, count1,
@@ -2469,7 +2469,7 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
   tree tmp, tmp1, ptemp1;
 
   count = gfc_create_var (gfc_array_index_type, "count");
-  gfc_add_modify_expr (block, count, gfc_index_zero_node);
+  gfc_add_modify (block, count, gfc_index_zero_node);
 
   inner_size = integer_one_node;
   lss = gfc_walk_expr (expr1);
@@ -2490,14 +2490,14 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
       rse.want_pointer = 1;
       gfc_conv_expr (&rse, expr2);
       gfc_add_block_to_block (&body, &rse.pre);
-      gfc_add_modify_expr (&body, lse.expr,
+      gfc_add_modify (&body, lse.expr,
                           fold_convert (TREE_TYPE (lse.expr), rse.expr));
       gfc_add_block_to_block (&body, &rse.post);
 
       /* Increment count.  */
       tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                         count, gfc_index_one_node);
-      gfc_add_modify_expr (&body, count, tmp);
+      gfc_add_modify (&body, count, tmp);
 
       tmp = gfc_finish_block (&body);
 
@@ -2507,7 +2507,7 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
       gfc_add_expr_to_block (block, tmp);
 
       /* Reset count.  */
-      gfc_add_modify_expr (block, count, gfc_index_zero_node);
+      gfc_add_modify (block, count, gfc_index_zero_node);
 
       gfc_start_block (&body);
       gfc_init_se (&lse, NULL);
@@ -2516,12 +2516,12 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
       lse.want_pointer = 1;
       gfc_conv_expr (&lse, expr1);
       gfc_add_block_to_block (&body, &lse.pre);
-      gfc_add_modify_expr (&body, lse.expr, rse.expr);
+      gfc_add_modify (&body, lse.expr, rse.expr);
       gfc_add_block_to_block (&body, &lse.post);
       /* Increment count.  */
       tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                         count, gfc_index_one_node);
-      gfc_add_modify_expr (&body, count, tmp);
+      gfc_add_modify (&body, count, tmp);
       tmp = gfc_finish_block (&body);
 
       /* Generate body and loops according to the information in
@@ -2566,7 +2566,7 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
       /* Increment count.  */
       tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                         count, gfc_index_one_node);
-      gfc_add_modify_expr (&body, count, tmp);
+      gfc_add_modify (&body, count, tmp);
 
       tmp = gfc_finish_block (&body);
 
@@ -2576,13 +2576,13 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
       gfc_add_expr_to_block (block, tmp);
 
       /* Reset count.  */
-      gfc_add_modify_expr (block, count, gfc_index_zero_node);
+      gfc_add_modify (block, count, gfc_index_zero_node);
 
       parm = gfc_build_array_ref (tmp1, count, NULL);
       lss = gfc_walk_expr (expr1);
       gfc_init_se (&lse, NULL);
       gfc_conv_expr_descriptor (&lse, expr1, lss);
-      gfc_add_modify_expr (&lse.pre, lse.expr, parm);
+      gfc_add_modify (&lse.pre, lse.expr, parm);
       gfc_start_block (&body);
       gfc_add_block_to_block (&body, &lse.pre);
       gfc_add_block_to_block (&body, &lse.post);
@@ -2590,7 +2590,7 @@ gfc_trans_pointer_assign_need_temp (gfc_expr * expr1, gfc_expr * expr2,
       /* Increment count.  */
       tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                         count, gfc_index_one_node);
-      gfc_add_modify_expr (&body, count, tmp);
+      gfc_add_modify (&body, count, tmp);
 
       tmp = gfc_finish_block (&body);
 
@@ -2822,7 +2822,7 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
       /* As the mask array can be very big, prefer compact boolean types.  */
       tree mask_type = gfc_get_logical_type (gfc_logical_kinds[0].kind);
 
-      gfc_add_modify_expr (&block, maskindex, gfc_index_zero_node);
+      gfc_add_modify (&block, maskindex, gfc_index_zero_node);
 
       /* Start of mask assignment loop body.  */
       gfc_start_block (&body);
@@ -2836,12 +2836,12 @@ gfc_trans_forall_1 (gfc_code * code, forall_info * nested_forall_info)
       se.expr = convert (mask_type, se.expr);
 
       tmp = gfc_build_array_ref (mask, maskindex, NULL);
-      gfc_add_modify_expr (&body, tmp, se.expr);
+      gfc_add_modify (&body, tmp, se.expr);
 
       /* Advance to the next mask element.  */
       tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                         maskindex, gfc_index_one_node);
-      gfc_add_modify_expr (&body, maskindex, tmp);
+      gfc_add_modify (&body, maskindex, tmp);
 
       /* Generate the loops.  */
       tmp = gfc_finish_block (&body);
@@ -2999,7 +2999,7 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
   /* Variable to index the temporary.  */
   count = gfc_create_var (gfc_array_index_type, "count");
   /* Initialize count.  */
-  gfc_add_modify_expr (block, count, gfc_index_zero_node);
+  gfc_add_modify (block, count, gfc_index_zero_node);
 
   gfc_start_block (&body);
 
@@ -3041,14 +3041,14 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
   gfc_add_block_to_block (&body1, &lse.pre);
   gfc_add_block_to_block (&body1, &rse.pre);
 
-  gfc_add_modify_expr (&body1, cond, fold_convert (mask_type, rse.expr));
+  gfc_add_modify (&body1, cond, fold_convert (mask_type, rse.expr));
 
   if (mask && (cmask || pmask))
     {
       tmp = gfc_build_array_ref (mask, count, NULL);
       if (invert)
        tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, tmp);
-      gfc_add_modify_expr (&body1, mtmp, tmp);
+      gfc_add_modify (&body1, mtmp, tmp);
     }
 
   if (cmask)
@@ -3057,7 +3057,7 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
       tmp = cond;
       if (mask)
        tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
-      gfc_add_modify_expr (&body1, tmp1, tmp);
+      gfc_add_modify (&body1, tmp1, tmp);
     }
 
   if (pmask)
@@ -3066,7 +3066,7 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
       tmp = fold_build1 (TRUTH_NOT_EXPR, mask_type, cond);
       if (mask)
        tmp = fold_build2 (TRUTH_AND_EXPR, mask_type, mtmp, tmp);
-      gfc_add_modify_expr (&body1, tmp1, tmp);
+      gfc_add_modify (&body1, tmp1, tmp);
     }
 
   gfc_add_block_to_block (&body1, &lse.post);
@@ -3081,7 +3081,7 @@ gfc_evaluate_where_mask (gfc_expr * me, forall_info * nested_forall_info,
       /* Increment count.  */
       tmp1 = fold_build2 (PLUS_EXPR, gfc_array_index_type, count,
                           gfc_index_one_node);
-      gfc_add_modify_expr (&body1, count, tmp1);
+      gfc_add_modify (&body1, count, tmp1);
 
       /* Generate the copying loops.  */
       gfc_trans_scalarizing_loops (&loop, &body1);
@@ -3242,7 +3242,7 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
       /* Increment count1.  */
       tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                         count1, gfc_index_one_node);
-      gfc_add_modify_expr (&body, count1, tmp);
+      gfc_add_modify (&body, count1, tmp);
 
       /* Use the scalar assignment as is.  */
       gfc_add_block_to_block (&block, &body);
@@ -3258,7 +3258,7 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
              expression.  */
           tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                             count1, gfc_index_one_node);
-          gfc_add_modify_expr (&body, count1, tmp);
+          gfc_add_modify (&body, count1, tmp);
           gfc_trans_scalarized_loop_boundary (&loop, &body);
 
           /* We need to copy the temporary to the actual lhs.  */
@@ -3292,14 +3292,14 @@ gfc_trans_where_assign (gfc_expr *expr1, gfc_expr *expr2,
           /* Increment count2.  */
           tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                             count2, gfc_index_one_node);
-          gfc_add_modify_expr (&body, count2, tmp);
+          gfc_add_modify (&body, count2, tmp);
         }
       else
         {
           /* Increment count1.  */
           tmp = fold_build2 (PLUS_EXPR, gfc_array_index_type,
                             count1, gfc_index_one_node);
-          gfc_add_modify_expr (&body, count1, tmp);
+          gfc_add_modify (&body, count1, tmp);
         }
 
       /* Generate the copying loops.  */
@@ -3492,8 +3492,8 @@ gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
                       /* Variables to control maskexpr.  */
                       count1 = gfc_create_var (gfc_array_index_type, "count1");
                       count2 = gfc_create_var (gfc_array_index_type, "count2");
-                      gfc_add_modify_expr (block, count1, gfc_index_zero_node);
-                      gfc_add_modify_expr (block, count2, gfc_index_zero_node);
+                      gfc_add_modify (block, count1, gfc_index_zero_node);
+                      gfc_add_modify (block, count2, gfc_index_zero_node);
 
                       tmp = gfc_trans_where_assign (expr1, expr2,
                                                    cmask, invert,
@@ -3510,8 +3510,8 @@ gfc_trans_where_2 (gfc_code * code, tree mask, bool invert,
                   /* Variables to control maskexpr.  */
                   count1 = gfc_create_var (gfc_array_index_type, "count1");
                   count2 = gfc_create_var (gfc_array_index_type, "count2");
-                  gfc_add_modify_expr (block, count1, gfc_index_zero_node);
-                  gfc_add_modify_expr (block, count2, gfc_index_zero_node);
+                  gfc_add_modify (block, count1, gfc_index_zero_node);
+                  gfc_add_modify (block, count2, gfc_index_zero_node);
 
                   tmp = gfc_trans_where_assign (expr1, expr2,
                                                cmask, invert,
@@ -3893,7 +3893,7 @@ gfc_trans_allocate (gfc_code * code)
       gfc_init_se (&se, NULL);
       gfc_conv_expr_lhs (&se, code->expr);
       tmp = convert (TREE_TYPE (se.expr), stat);
-      gfc_add_modify_expr (&block, se.expr, tmp);
+      gfc_add_modify (&block, se.expr, tmp);
     }
 
   return gfc_finish_block (&block);
@@ -3942,7 +3942,7 @@ gfc_trans_deallocate (gfc_code * code)
       apstat = build_fold_addr_expr (astat);
 
       /* Initialize astat to 0.  */
-      gfc_add_modify_expr (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
+      gfc_add_modify (&block, astat, build_int_cst (TREE_TYPE (astat), 0));
     }
   else
     pstat = apstat = stat = astat = NULL_TREE;
@@ -3997,7 +3997,7 @@ gfc_trans_deallocate (gfc_code * code)
       if (code->expr)
        {
          apstat = fold_build2 (PLUS_EXPR, TREE_TYPE (stat), astat, stat);
-         gfc_add_modify_expr (&se.pre, astat, apstat);
+         gfc_add_modify (&se.pre, astat, apstat);
        }
 
       tmp = gfc_finish_block (&se.pre);
@@ -4011,7 +4011,7 @@ gfc_trans_deallocate (gfc_code * code)
       gfc_init_se (&se, NULL);
       gfc_conv_expr_lhs (&se, code->expr);
       tmp = convert (TREE_TYPE (se.expr), astat);
-      gfc_add_modify_expr (&block, se.expr, tmp);
+      gfc_add_modify (&block, se.expr, tmp);
     }
 
   return gfc_finish_block (&block);
index d6aef87..911e379 100644 (file)
@@ -23,7 +23,8 @@ along with GCC; see the file COPYING3.  If not see
 #include "system.h"
 #include "coretypes.h"
 #include "tree.h"
-#include "tree-gimple.h"
+#include "gimple.h"
+#include "tree-iterator.h"
 #include "ggc.h"
 #include "toplev.h"
 #include "defaults.h"
@@ -142,19 +143,18 @@ gfc_evaluate_now (tree expr, stmtblock_t * pblock)
     return expr;
 
   var = gfc_create_var (TREE_TYPE (expr), NULL);
-  gfc_add_modify_expr (pblock, var, expr);
+  gfc_add_modify (pblock, var, expr);
 
   return var;
 }
 
 
-/* Build a MODIFY_EXPR (or GIMPLE_MODIFY_STMT) node and add it to a
-   given statement block PBLOCK.  A MODIFY_EXPR is an assignment:
+/* Build a MODIFY_EXPR node and add it to a given statement block PBLOCK.  
+   A MODIFY_EXPR is an assignment:
    LHS <- RHS.  */
 
 void
-gfc_add_modify (stmtblock_t * pblock, tree lhs, tree rhs,
-               bool tuples_p)
+gfc_add_modify (stmtblock_t * pblock, tree lhs, tree rhs)
 {
   tree tmp;
 
@@ -167,8 +167,7 @@ gfc_add_modify (stmtblock_t * pblock, tree lhs, tree rhs,
              || AGGREGATE_TYPE_P (TREE_TYPE (lhs)));
 #endif
 
-  tmp = fold_build2 (tuples_p ? GIMPLE_MODIFY_STMT : MODIFY_EXPR,
-                    void_type_node, lhs, rhs);
+  tmp = fold_build2 (MODIFY_EXPR, void_type_node, lhs, rhs);
   gfc_add_expr_to_block (pblock, tmp);
 }
 
@@ -434,7 +433,7 @@ gfc_trans_runtime_check (bool error, bool once, tree cond, stmtblock_t * pblock,
   gfc_add_expr_to_block (&block, tmp);
 
   if (once)
-    gfc_add_modify_expr (&block, tmpvar, boolean_false_node);
+    gfc_add_modify (&block, tmpvar, boolean_false_node);
 
   body = gfc_finish_block (&block);
 
@@ -495,7 +494,7 @@ gfc_call_malloc (stmtblock_t * block, tree type, tree size)
   size = fold_build2 (MAX_EXPR, size_type_node, size,
                      build_int_cst (size_type_node, 1));
 
-  gfc_add_modify_expr (&block2, res,
+  gfc_add_modify (&block2, res,
                       build_call_expr (built_in_decls[BUILT_IN_MALLOC], 1,
                       size));
   null_result = fold_build2 (EQ_EXPR, boolean_type_node, res,
@@ -593,10 +592,10 @@ gfc_allocate_with_status (stmtblock_t * block, tree size, tree status)
       stmtblock_t set_status_block;
 
       gfc_start_block (&set_status_block);
-      gfc_add_modify_expr (&set_status_block,
+      gfc_add_modify (&set_status_block,
                           fold_build1 (INDIRECT_REF, status_type, status),
                           build_int_cst (status_type, LIBERROR_ALLOCATION));
-      gfc_add_modify_expr (&set_status_block, res,
+      gfc_add_modify (&set_status_block, res,
                           build_int_cst (pvoid_type_node, 0));
 
       tmp = fold_build2 (EQ_EXPR, boolean_type_node, status,
@@ -607,7 +606,7 @@ gfc_allocate_with_status (stmtblock_t * block, tree size, tree status)
 
   /* The allocation itself.  */
   gfc_start_block (&alloc_block);
-  gfc_add_modify_expr (&alloc_block, res,
+  gfc_add_modify (&alloc_block, res,
                       build_call_expr (built_in_decls[BUILT_IN_MALLOC], 1,
                                        fold_build2 (MAX_EXPR, size_type_node,
                                                     size,
@@ -689,7 +688,7 @@ gfc_allocate_array_with_status (stmtblock_t * block, tree mem, tree size,
   /* If mem is NULL, we call gfc_allocate_with_status.  */
   gfc_start_block (&alloc_block);
   tmp = gfc_allocate_with_status (&alloc_block, size, status);
-  gfc_add_modify_expr (&alloc_block, res, fold_convert (type, tmp));
+  gfc_add_modify (&alloc_block, res, fold_convert (type, tmp));
   alloc = gfc_finish_block (&alloc_block);
 
   /* Otherwise, we issue a runtime error or set the status variable.  */
@@ -708,9 +707,9 @@ gfc_allocate_array_with_status (stmtblock_t * block, tree mem, tree size,
       gfc_add_expr_to_block (&set_status_block, tmp);
 
       tmp = gfc_allocate_with_status (&set_status_block, size, status);
-      gfc_add_modify_expr (&set_status_block, res, fold_convert (type, tmp));
+      gfc_add_modify (&set_status_block, res, fold_convert (type, tmp));
 
-      gfc_add_modify_expr (&set_status_block,
+      gfc_add_modify (&set_status_block,
                           fold_build1 (INDIRECT_REF, status_type, status),
                           build_int_cst (status_type, LIBERROR_ALLOCATION));
 
@@ -885,7 +884,7 @@ gfc_call_realloc (stmtblock_t * block, tree mem, tree size)
   /* Call realloc and check the result.  */
   tmp = build_call_expr (built_in_decls[BUILT_IN_REALLOC], 2,
                         fold_convert (pvoid_type_node, mem), size);
-  gfc_add_modify_expr (block, res, fold_convert (type, tmp));
+  gfc_add_modify (block, res, fold_convert (type, tmp));
   null_result = fold_build2 (EQ_EXPR, boolean_type_node, res,
                             build_int_cst (pvoid_type_node, 0));
   nonzero = fold_build2 (NE_EXPR, boolean_type_node, size,
@@ -985,7 +984,7 @@ gfc_trans_code (gfc_code * code)
 
   gfc_start_block (&block);
 
-  /* Translate statements one by one to GIMPLE trees until we reach
+  /* Translate statements one by one into GENERIC trees until we reach
      the end of this gfc_code branch.  */
   for (; code; code = code->next)
     {
@@ -1173,7 +1172,7 @@ gfc_trans_code (gfc_code * code)
       if (res != NULL_TREE && ! IS_EMPTY_STMT (res))
        {
          if (TREE_CODE (res) == STATEMENT_LIST)
-           annotate_all_with_locus (&res, input_location);
+           tree_annotate_all_with_location (&res, input_location);
          else
            SET_EXPR_LOCATION (res, input_location);
            
index 7704748..6e09f24 100644 (file)
@@ -348,12 +348,8 @@ void gfc_trans_vla_type_sizes (gfc_symbol *, stmtblock_t *);
 void gfc_add_expr_to_block (stmtblock_t *, tree);
 /* Add a block to the end of a block.  */
 void gfc_add_block_to_block (stmtblock_t *, stmtblock_t *);
-/* Add a MODIFY_EXPR or a GIMPLE_MODIFY_STMT to a block.  */
-void gfc_add_modify (stmtblock_t *, tree, tree, bool);
-#define gfc_add_modify_expr(BLOCK, LHS, RHS) \
-       gfc_add_modify ((BLOCK), (LHS), (RHS), false)
-#define gfc_add_modify_stmt(BLOCK, LHS, RHS) \
-       gfc_add_modify ((BLOCK), (LHS), (RHS), true)
+/* Add a MODIFY_EXPR to a block.  */
+void gfc_add_modify (stmtblock_t *, tree, tree);
 
 /* Initialize a statement block.  */
 void gfc_init_block (stmtblock_t *);
index fa2a84d..532a94f 100644 (file)
@@ -59,7 +59,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "langhooks.h"
 #include "target.h"
 #include "cfglayout.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-pass.h"
 #include "predict.h"
 #include "df.h"
@@ -1547,7 +1547,7 @@ static tree
 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
 {
   tree t = *tp;
-  if (! EXPR_P (t) && ! GIMPLE_STMT_P (t))
+  if (! EXPR_P (t))
     {
       *walk_subtrees = 0;
       if (DECL_P (t) && DECL_RTL_SET_P (t))
@@ -3092,7 +3092,7 @@ gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
       else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
               && !TYPE_SIZES_GIMPLIFIED (t))
        {
-         gimplify_type_sizes (t, (tree *) data);
+         gimplify_type_sizes (t, (gimple_seq *) data);
          *walk_subtrees = 1;
        }
     }
@@ -3102,15 +3102,15 @@ gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
 
 /* Gimplify the parameter list for current_function_decl.  This involves
    evaluating SAVE_EXPRs of variable sized parameters and generating code
-   to implement callee-copies reference parameters.  Returns a list of
-   statements to add to the beginning of the function, or NULL if nothing
-   to do.  */
+   to implement callee-copies reference parameters.  Returns a sequence of
+   statements to add to the beginning of the function.  */
 
-tree
+gimple_seq
 gimplify_parameters (void)
 {
   struct assign_parm_data_all all;
-  tree fnargs, parm, stmts = NULL;
+  tree fnargs, parm;
+  gimple_seq stmts = NULL;
 
   assign_parms_initialize_all (&all);
   fnargs = assign_parms_augmented_arg_list (&all);
@@ -3170,12 +3170,11 @@ gimplify_parameters (void)
                  t = built_in_decls[BUILT_IN_ALLOCA];
                  t = build_call_expr (t, 1, DECL_SIZE_UNIT (parm));
                  t = fold_convert (ptr_type, t);
-                 t = build_gimple_modify_stmt (addr, t);
+                 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
                  gimplify_and_add (t, &stmts);
                }
 
-             t = build_gimple_modify_stmt (local, parm);
-             gimplify_and_add (t, &stmts);
+             gimplify_assign (local, parm, &stmts);
 
              SET_DECL_VALUE_EXPR (parm, local);
              DECL_HAS_VALUE_EXPR_P (parm) = 1;
index ece44fd..9de0181 100644 (file)
@@ -428,6 +428,10 @@ struct function GTY(())
 
   /* The control flow graph for this function.  */
   struct control_flow_graph *cfg;
+
+  /* GIMPLE body for this function.  */
+  struct gimple_seq_d *gimple_body;
+
   /* SSA and dataflow information.  */
   struct gimple_df *gimple_df;
 
index f7309b1..adc355a 100644 (file)
@@ -1537,7 +1537,7 @@ open_base_files (void)
       "hard-reg-set.h", "basic-block.h", "cselib.h", "insn-addr.h",
       "optabs.h", "libfuncs.h", "debug.h", "ggc.h", "cgraph.h",
       "tree-flow.h", "reload.h", "cpp-id-data.h", "tree-chrec.h",
-      "cfglayout.h", "except.h", "output.h", "cfgloop.h", NULL
+      "cfglayout.h", "except.h", "output.h", "gimple.h", "cfgloop.h", NULL
     };
     const char *const *ifp;
     outf_p gtype_desc_c;
index 98a6c28..0afe0d8 100644 (file)
@@ -172,7 +172,6 @@ along with GCC; see the file COPYING3.  If not see
    thing you need to do to add a new special allocation size.  */
 
 static const size_t extra_order_size_table[] = {
-  sizeof (struct stmt_ann_d),
   sizeof (struct var_ann_d),
   sizeof (struct tree_decl_non_common),
   sizeof (struct tree_field_decl),
@@ -184,9 +183,6 @@ static const size_t extra_order_size_table[] = {
   sizeof (struct basic_block_def),
   sizeof (bitmap_element),
   sizeof (bitmap_head),
-  /* PHI nodes with one to three arguments are already covered by the
-     above sizes.  */
-  sizeof (struct tree_phi_node) + sizeof (struct phi_arg_d) * 3,
   TREE_EXP_SIZE (2),
   RTL_SIZE (2),                        /* MEM, PLUS, etc.  */
   RTL_SIZE (9),                        /* INSN */
diff --git a/gcc/gimple-iterator.c b/gcc/gimple-iterator.c
new file mode 100644 (file)
index 0000000..a52c830
--- /dev/null
@@ -0,0 +1,771 @@
+/* Iterator routines for GIMPLE statements.
+   Copyright (C) 2007, 2008 Free Software Foundation, Inc.
+   Contributed by Aldy Hernandez  <aldy@quesejoda.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 3, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3.  If not see
+<http://www.gnu.org/licenses/>.  */
+
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
+#include "tree.h"
+#include "gimple.h"
+#include "tree-flow.h"
+#include "value-prof.h"
+
+
+/* Mark the statement STMT as modified, and update it.  */
+
+static inline void
+update_modified_stmt (gimple stmt)
+{
+  if (!ssa_operands_active ())
+    return;
+  update_stmt_if_modified (stmt);
+}
+
+
+/* Mark the statements in SEQ as modified, and update them.  */
+
+static void
+update_modified_stmts (gimple_seq seq)
+{
+  gimple_stmt_iterator gsi;
+  if (!ssa_operands_active ())
+    return;  
+  for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
+    update_stmt_if_modified (gsi_stmt (gsi));
+}
+
+
+/* Set BB to be the basic block for all the statements in the list
+   starting at FIRST and LAST.  */
+
+static void
+update_bb_for_stmts (gimple_seq_node first, basic_block bb)
+{
+  gimple_seq_node n;
+  
+  for (n = first; n; n = n->next)
+    gimple_set_bb (n->stmt, bb);
+}
+
+
+/* Insert the sequence delimited by nodes FIRST and LAST before
+   iterator I.  M specifies how to update iterator I after insertion
+   (see enum gsi_iterator_update).
+
+   This routine assumes that there is a forward and backward path
+   between FIRST and LAST (i.e., they are linked in a doubly-linked
+   list).  Additionally, if FIRST == LAST, this routine will properly
+   insert a single node.  */
+
+static void
+gsi_insert_seq_nodes_before (gimple_stmt_iterator *i,
+                            gimple_seq_node first,
+                            gimple_seq_node last,
+                            enum gsi_iterator_update mode)
+{
+  basic_block bb;
+  gimple_seq_node cur = i->ptr;
+
+  if ((bb = gsi_bb (*i)) != NULL)
+    update_bb_for_stmts (first, bb);
+
+  /* Link SEQ before CUR in the sequence.  */
+  if (cur)
+    {
+      first->prev = cur->prev;
+      if (first->prev)
+       first->prev->next = first;
+      else
+       gimple_seq_set_first (i->seq, first);
+      last->next = cur;
+      cur->prev = last;
+    }
+  else
+    {
+      gimple_seq_node itlast = gimple_seq_last (i->seq);
+
+      /* If CUR is NULL, we link at the end of the sequence (this case happens
+        when gsi_after_labels is called for a basic block that contains only
+        labels, so it returns an iterator after the end of the block, and
+        we need to insert before it; it might be cleaner to add a flag to the
+        iterator saying whether we are at the start or end of the list).  */
+      first->prev = itlast;
+      if (itlast)
+       itlast->next = first;
+      else
+       gimple_seq_set_first (i->seq, first);
+      gimple_seq_set_last (i->seq, last);
+    }
+
+  /* Update the iterator, if requested.  */
+  switch (mode)
+    {
+    case GSI_NEW_STMT:
+    case GSI_CONTINUE_LINKING:
+      i->ptr = first;
+      break;
+    case GSI_SAME_STMT:
+      break;
+    default:
+      gcc_unreachable ();
+    }
+}
+
+
+/* Inserts the sequence of statements SEQ before the statement pointed
+   by iterator I.  MODE indicates what to do with the iterator after
+   insertion (see enum gsi_iterator_update).
+
+   This function does not scan for new operands.  It is provided for
+   the use of the gimplifier, which manipulates statements for which
+   def/use information has not yet been constructed.  Most callers
+   should use gsi_insert_seq_before.  */
+
+void
+gsi_insert_seq_before_without_update (gimple_stmt_iterator *i, gimple_seq seq,
+                                      enum gsi_iterator_update mode)
+{
+  gimple_seq_node first, last;
+
+  if (seq == NULL)
+    return;
+
+  /* Don't allow inserting a sequence into itself.  */
+  gcc_assert (seq != i->seq);
+
+  first = gimple_seq_first (seq);
+  last = gimple_seq_last (seq);
+
+  gimple_seq_set_first (seq, NULL);
+  gimple_seq_set_last (seq, NULL);
+  gimple_seq_free (seq);
+
+  /* Empty sequences need no work.  */
+  if (!first || !last)
+    {
+      gcc_assert (first == last);
+      return;
+    }
+
+  gsi_insert_seq_nodes_before (i, first, last, mode);
+}
+
+
+/* Inserts the sequence of statements SEQ before the statement pointed
+   by iterator I.  MODE indicates what to do with the iterator after
+   insertion (see enum gsi_iterator_update). Scan the statements in SEQ
+   for new operands.  */
+
+void
+gsi_insert_seq_before (gimple_stmt_iterator *i, gimple_seq seq,
+                      enum gsi_iterator_update mode)
+{
+  update_modified_stmts (seq);
+  gsi_insert_seq_before_without_update (i, seq, mode);
+}
+
+
+/* Insert the sequence delimited by nodes FIRST and LAST after
+   iterator I.  M specifies how to update iterator I after insertion
+   (see enum gsi_iterator_update).
+
+   This routine assumes that there is a forward and backward path
+   between FIRST and LAST (i.e., they are linked in a doubly-linked
+   list).  Additionally, if FIRST == LAST, this routine will properly
+   insert a single node.  */
+
+static void
+gsi_insert_seq_nodes_after (gimple_stmt_iterator *i,
+                           gimple_seq_node first,
+                           gimple_seq_node last,
+                           enum gsi_iterator_update m)
+{
+  basic_block bb;
+  gimple_seq_node cur = i->ptr;
+
+  /* If the iterator is inside a basic block, we need to update the
+     basic block information for all the nodes between FIRST and LAST.  */
+  if ((bb = gsi_bb (*i)) != NULL)
+    update_bb_for_stmts (first, bb);
+
+  /* Link SEQ after CUR.  */
+  if (cur)
+    {
+      last->next = cur->next;
+      if (last->next)
+       last->next->prev = last;
+      else
+       gimple_seq_set_last (i->seq, last);
+      first->prev = cur;
+      cur->next = first;
+    }
+  else
+    {
+      gcc_assert (!gimple_seq_last (i->seq));
+      gimple_seq_set_first (i->seq, first);
+      gimple_seq_set_last (i->seq, last);
+    }
+
+  /* Update the iterator, if requested.  */
+  switch (m)
+    {
+    case GSI_NEW_STMT:
+      i->ptr = first;
+      break;
+    case GSI_CONTINUE_LINKING:
+      i->ptr = last;
+      break;
+    case GSI_SAME_STMT:
+      gcc_assert (cur);
+      break;
+    default:
+      gcc_unreachable ();
+    }
+}
+
+
+/* Links sequence SEQ after the statement pointed-to by iterator I.
+   MODE is as in gsi_insert_after.
+
+   This function does not scan for new operands.  It is provided for
+   the use of the gimplifier, which manipulates statements for which
+   def/use information has not yet been constructed.  Most callers
+   should use gsi_insert_seq_after.  */
+
+void
+gsi_insert_seq_after_without_update (gimple_stmt_iterator *i, gimple_seq seq,
+                                     enum gsi_iterator_update mode)
+{
+  gimple_seq_node first, last;
+
+  if (seq == NULL)
+    return;
+
+  /* Don't allow inserting a sequence into itself.  */
+  gcc_assert (seq != i->seq);
+
+  first = gimple_seq_first (seq);
+  last = gimple_seq_last (seq);
+
+  gimple_seq_set_first (seq, NULL);
+  gimple_seq_set_last (seq, NULL);
+  gimple_seq_free (seq);
+
+  /* Empty sequences need no work.  */
+  if (!first || !last)
+    {
+      gcc_assert (first == last);
+      return;
+    }
+
+  gsi_insert_seq_nodes_after (i, first, last, mode);
+}
+
+
+/* Links sequence SEQ after the statement pointed-to by iterator I.
+   MODE is as in gsi_insert_after.  Scan the statements in SEQ
+   for new operands.  */
+
+void
+gsi_insert_seq_after (gimple_stmt_iterator *i, gimple_seq seq,
+                     enum gsi_iterator_update mode)
+{
+  update_modified_stmts (seq);
+  gsi_insert_seq_after_without_update (i, seq, mode);
+}
+
+
+/* Move all statements in the sequence after I to a new sequence.
+   Return this new sequence.  */
+
+gimple_seq
+gsi_split_seq_after (gimple_stmt_iterator i)
+{
+  gimple_seq_node cur, next;
+  gimple_seq old_seq, new_seq;
+
+  cur = i.ptr;
+
+  /* How can we possibly split after the end, or before the beginning?  */
+  gcc_assert (cur && cur->next);
+  next = cur->next;
+
+  old_seq = i.seq;
+  new_seq = gimple_seq_alloc ();
+
+  gimple_seq_set_first (new_seq, next);
+  gimple_seq_set_last (new_seq, gimple_seq_last (old_seq));
+  gimple_seq_set_last (old_seq, cur);
+  cur->next = NULL;
+  next->prev = NULL;
+
+  return new_seq;
+}
+
+
+/* Move all statements in the sequence before I to a new sequence.
+   Return this new sequence.  I is set to the head of the new list.  */
+
+gimple_seq
+gsi_split_seq_before (gimple_stmt_iterator *i)
+{
+  gimple_seq_node cur, prev;
+  gimple_seq old_seq, new_seq;
+
+  cur = i->ptr;
+
+  /* How can we possibly split after the end?  */
+  gcc_assert (cur);
+  prev = cur->prev;
+
+  old_seq = i->seq;
+  new_seq = gimple_seq_alloc ();
+  i->seq = new_seq;
+
+  /* Set the limits on NEW_SEQ.  */
+  gimple_seq_set_first (new_seq, cur);
+  gimple_seq_set_last (new_seq, gimple_seq_last (old_seq));
+
+  /* Cut OLD_SEQ before I.  */
+  gimple_seq_set_last (old_seq, prev);
+  cur->prev = NULL;
+  if (prev)
+    prev->next = NULL;
+  else
+    gimple_seq_set_first (old_seq, NULL);
+
+  return new_seq;
+}
+
+
+/* Replace the statement pointed-to by GSI to STMT.  If UPDATE_EH_INFO
+   is true, the exception handling information of the original
+   statement is moved to the new statement.  */
+
+void
+gsi_replace (gimple_stmt_iterator *gsi, gimple stmt, bool update_eh_info)
+{
+  int eh_region;
+  gimple orig_stmt = gsi_stmt (*gsi);
+
+  if (stmt == orig_stmt)
+    return;
+
+  gimple_set_location (stmt, gimple_location (orig_stmt));
+  gimple_set_bb (stmt, gsi_bb (*gsi));
+
+  /* Preserve EH region information from the original statement, if
+     requested by the caller.  */
+  if (update_eh_info)
+    {
+      eh_region = lookup_stmt_eh_region (orig_stmt);
+      if (eh_region >= 0)
+       {
+         remove_stmt_from_eh_region (orig_stmt);
+         add_stmt_to_eh_region (stmt, eh_region);
+       }
+    }
+
+  gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
+  gimple_remove_stmt_histograms (cfun, orig_stmt);
+  delink_stmt_imm_use (orig_stmt);
+  *gsi_stmt_ptr (gsi) = stmt;
+  gimple_set_modified (stmt, true);
+  update_modified_stmt (stmt);
+}
+
+
+/* Insert statement STMT before the statement pointed-to by iterator I.
+   M specifies how to update iterator I after insertion (see enum
+   gsi_iterator_update).
+
+   This function does not scan for new operands.  It is provided for
+   the use of the gimplifier, which manipulates statements for which
+   def/use information has not yet been constructed.  Most callers
+   should use gsi_insert_before.  */
+
+void
+gsi_insert_before_without_update (gimple_stmt_iterator *i, gimple stmt,
+                                  enum gsi_iterator_update m)
+{
+  gimple_seq_node n;
+
+  n = GGC_NEW (struct gimple_seq_node_d);
+  n->prev = n->next = NULL;
+  n->stmt = stmt;
+  gsi_insert_seq_nodes_before (i, n, n, m);
+}
+
+/* Insert statement STMT before the statement pointed-to by iterator I.
+   Update STMT's basic block and scan it for new operands.  M
+   specifies how to update iterator I after insertion (see enum
+   gsi_iterator_update).  */
+
+void
+gsi_insert_before (gimple_stmt_iterator *i, gimple stmt,
+                   enum gsi_iterator_update m)
+{
+  update_modified_stmt (stmt);
+  gsi_insert_before_without_update (i, stmt, m);
+}
+
+
+/* Insert statement STMT after the statement pointed-to by iterator I.
+   M specifies how to update iterator I after insertion (see enum
+   gsi_iterator_update).
+
+   This function does not scan for new operands.  It is provided for
+   the use of the gimplifier, which manipulates statements for which
+   def/use information has not yet been constructed.  Most callers
+   should use gsi_insert_after.  */
+
+void
+gsi_insert_after_without_update (gimple_stmt_iterator *i, gimple stmt,
+                                 enum gsi_iterator_update m)
+{
+  gimple_seq_node n;
+
+  n = GGC_NEW (struct gimple_seq_node_d);
+  n->prev = n->next = NULL;
+  n->stmt = stmt;
+  gsi_insert_seq_nodes_after (i, n, n, m);
+}
+
+
+/* Insert statement STMT after the statement pointed-to by iterator I.
+   Update STMT's basic block and scan it for new operands.  M
+   specifies how to update iterator I after insertion (see enum
+   gsi_iterator_update).  */
+
+void
+gsi_insert_after (gimple_stmt_iterator *i, gimple stmt,
+                 enum gsi_iterator_update m)
+{
+  update_modified_stmt (stmt);
+  gsi_insert_after_without_update (i, stmt, m);
+}
+
+
+/* Remove the current stmt from the sequence.  The iterator is updated
+   to point to the next statement.
+
+   REMOVE_PERMANENTLY is true when the statement is going to be removed
+   from the IL and not reinserted elsewhere.  In that case we remove the
+   statement pointed to by iterator I from the EH tables, and free its
+   operand caches.  Otherwise we do not modify this information.  */
+
+void
+gsi_remove (gimple_stmt_iterator *i, bool remove_permanently)
+{
+  gimple_seq_node cur, next, prev;
+  gimple stmt = gsi_stmt (*i);
+
+  /* Free all the data flow information for STMT.  */
+  gimple_set_bb (stmt, NULL);
+  delink_stmt_imm_use (stmt);
+  gimple_set_modified (stmt, true);
+
+  if (remove_permanently)
+    {
+      remove_stmt_from_eh_region (stmt);
+      gimple_remove_stmt_histograms (cfun, stmt);
+    }
+
+  /* Update the iterator and re-wire the links in I->SEQ.  */
+  cur = i->ptr;
+  next = cur->next;
+  prev = cur->prev;
+
+  if (prev)
+    prev->next = next;
+  else
+    gimple_seq_set_first (i->seq, next);
+
+  if (next)
+    next->prev = prev;
+  else
+    gimple_seq_set_last (i->seq, prev);
+
+  i->ptr = next;
+}
+
+
+/* Finds iterator for STMT.  */
+
+gimple_stmt_iterator
+gsi_for_stmt (gimple stmt)
+{
+  gimple_stmt_iterator i;
+  basic_block bb = gimple_bb (stmt);
+
+  if (gimple_code (stmt) == GIMPLE_PHI)
+    i = gsi_start_phis (bb);
+  else
+    i = gsi_start_bb (bb);
+
+  for (; !gsi_end_p (i); gsi_next (&i))
+    if (gsi_stmt (i) == stmt)
+      return i;
+
+  gcc_unreachable ();
+}
+
+
+/* Move the statement at FROM so it comes right after the statement at TO.  */
+
+void
+gsi_move_after (gimple_stmt_iterator *from, gimple_stmt_iterator *to)
+{
+  gimple stmt = gsi_stmt (*from);
+  gsi_remove (from, false);
+
+  /* We must have GSI_NEW_STMT here, as gsi_move_after is sometimes used to
+     move statements to an empty block.  */
+  gsi_insert_after (to, stmt, GSI_NEW_STMT);
+}
+
+
+/* Move the statement at FROM so it comes right before the statement
+   at TO.  */
+
+void
+gsi_move_before (gimple_stmt_iterator *from, gimple_stmt_iterator *to)
+{
+  gimple stmt = gsi_stmt (*from);
+  gsi_remove (from, false);
+
+  /* For consistency with gsi_move_after, it might be better to have
+     GSI_NEW_STMT here; however, that breaks several places that expect
+     that TO does not change.  */
+  gsi_insert_before (to, stmt, GSI_SAME_STMT);
+}
+
+
+/* Move the statement at FROM to the end of basic block BB.  */
+
+void
+gsi_move_to_bb_end (gimple_stmt_iterator *from, basic_block bb)
+{
+  gimple_stmt_iterator last = gsi_last_bb (bb);
+#ifdef ENABLE_CHECKING
+  gcc_assert (gsi_bb (last) == bb);
+#endif
+
+  /* Have to check gsi_end_p because it could be an empty block.  */
+  if (!gsi_end_p (last) && is_ctrl_stmt (gsi_stmt (last)))
+    gsi_move_before (from, &last);
+  else
+    gsi_move_after (from, &last);
+}
+
+
+/* Add STMT to the pending list of edge E.  No actual insertion is
+   made until a call to gsi_commit_edge_inserts () is made.  */
+
+void
+gsi_insert_on_edge (edge e, gimple stmt)
+{
+  gimple_seq_add_stmt (&PENDING_STMT (e), stmt);
+}
+
+/* Add the sequence of statements SEQ to the pending list of edge E.
+   No actual insertion is made until a call to gsi_commit_edge_inserts
+   is made.  */
+
+void
+gsi_insert_seq_on_edge (edge e, gimple_seq seq)
+{
+  gimple_seq_add_seq (&PENDING_STMT (e), seq);
+}
+
+
+/* Insert the statement pointed-to by GSI into edge E.  Every attempt
+   is made to place the statement in an existing basic block, but
+   sometimes that isn't possible.  When it isn't possible, the edge is
+   split and the statement is added to the new block.
+
+   In all cases, the returned *GSI points to the correct location.  The
+   return value is true if insertion should be done after the location,
+   or false if it should be done before the location.  If new basic block
+   has to be created, it is stored in *NEW_BB.  */
+
+static bool
+gimple_find_edge_insert_loc (edge e, gimple_stmt_iterator *gsi,
+                            basic_block *new_bb)
+{
+  basic_block dest, src;
+  gimple tmp;
+
+  dest = e->dest;
+
+  /* If the destination has one predecessor which has no PHI nodes,
+     insert there.  Except for the exit block.
+
+     The requirement for no PHI nodes could be relaxed.  Basically we
+     would have to examine the PHIs to prove that none of them used
+     the value set by the statement we want to insert on E.  That
+     hardly seems worth the effort.  */
+restart:
+  if (single_pred_p (dest)
+      && ! phi_nodes (dest)
+      && dest != EXIT_BLOCK_PTR)
+    {
+      *gsi = gsi_start_bb (dest);
+      if (gsi_end_p (*gsi))
+       return true;
+
+      /* Make sure we insert after any leading labels.  */
+      tmp = gsi_stmt (*gsi);
+      while (gimple_code (tmp) == GIMPLE_LABEL)
+       {
+         gsi_next (gsi);
+         if (gsi_end_p (*gsi))
+           break;
+         tmp = gsi_stmt (*gsi);
+       }
+
+      if (gsi_end_p (*gsi))
+       {
+         *gsi = gsi_last_bb (dest);
+         return true;
+       }
+      else
+       return false;
+    }
+
+  /* If the source has one successor, the edge is not abnormal and
+     the last statement does not end a basic block, insert there.
+     Except for the entry block.  */
+  src = e->src;
+  if ((e->flags & EDGE_ABNORMAL) == 0
+      && single_succ_p (src)
+      && src != ENTRY_BLOCK_PTR)
+    {
+      *gsi = gsi_last_bb (src);
+      if (gsi_end_p (*gsi))
+       return true;
+
+      tmp = gsi_stmt (*gsi);
+      if (!stmt_ends_bb_p (tmp))
+       return true;
+
+      if (gimple_code (tmp) == GIMPLE_RETURN)
+        {
+         gsi_prev (gsi);
+         return true;
+        }
+    }
+
+  /* Otherwise, create a new basic block, and split this edge.  */
+  dest = split_edge (e);
+  if (new_bb)
+    *new_bb = dest;
+  e = single_pred_edge (dest);
+  goto restart;
+}
+
+
+/* Similar to gsi_insert_on_edge+gsi_commit_edge_inserts.  If a new
+   block has to be created, it is returned.  */
+
+basic_block
+gsi_insert_on_edge_immediate (edge e, gimple stmt)
+{
+  gimple_stmt_iterator gsi;
+  basic_block new_bb = NULL;
+
+  gcc_assert (!PENDING_STMT (e));
+
+  if (gimple_find_edge_insert_loc (e, &gsi, &new_bb))
+    gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
+  else
+    gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
+
+  return new_bb;
+}
+
+/* Insert STMTS on edge E.  If a new block has to be created, it
+   is returned.  */
+
+basic_block
+gsi_insert_seq_on_edge_immediate (edge e, gimple_seq stmts)
+{
+  gimple_stmt_iterator gsi;
+  basic_block new_bb = NULL;
+
+  gcc_assert (!PENDING_STMT (e));
+
+  if (gimple_find_edge_insert_loc (e, &gsi, &new_bb))
+    gsi_insert_seq_after (&gsi, stmts, GSI_NEW_STMT);
+  else
+    gsi_insert_seq_before (&gsi, stmts, GSI_NEW_STMT);
+
+  return new_bb;
+}
+
+/* This routine will commit all pending edge insertions, creating any new
+   basic blocks which are necessary.  */
+
+void
+gsi_commit_edge_inserts (void)
+{
+  basic_block bb;
+  edge e;
+  edge_iterator ei;
+
+  gsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
+
+  FOR_EACH_BB (bb)
+    FOR_EACH_EDGE (e, ei, bb->succs)
+      gsi_commit_one_edge_insert (e, NULL);
+}
+
+
+/* Commit insertions pending at edge E. If a new block is created, set NEW_BB
+   to this block, otherwise set it to NULL.  */
+
+void
+gsi_commit_one_edge_insert (edge e, basic_block *new_bb)
+{
+  if (new_bb)
+    *new_bb = NULL;
+
+  if (PENDING_STMT (e))
+    {
+      gimple_stmt_iterator gsi;
+      gimple_seq seq = PENDING_STMT (e);
+
+      PENDING_STMT (e) = NULL;
+
+      if (gimple_find_edge_insert_loc (e, &gsi, new_bb))
+       gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT);
+      else
+       gsi_insert_seq_before (&gsi, seq, GSI_NEW_STMT);
+    }
+}
+
+/* Returns iterator at the start of the list of phi nodes of BB.  */
+
+gimple_stmt_iterator
+gsi_start_phis (basic_block bb)
+{
+  return gsi_start (phi_nodes (bb));
+}
index 99175d5..cc77874 100644 (file)
@@ -1,4 +1,4 @@
-/* Tree lowering pass.  Lowers GIMPLE into unstructured form.
+/* GIMPLE lowering pass.  Converts High GIMPLE into Low GIMPLE.
 
    Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
 
@@ -25,7 +25,8 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree.h"
 #include "rtl.h"
 #include "varray.h"
-#include "tree-gimple.h"
+#include "gimple.h"
+#include "tree-iterator.h"
 #include "tree-inline.h"
 #include "diagnostic.h"
 #include "langhooks.h"
@@ -40,76 +41,123 @@ along with GCC; see the file COPYING3.  If not see
 #include "toplev.h"
 #include "tree-pass.h"
 
+/* The differences between High GIMPLE and Low GIMPLE are the
+   following:
+
+   1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
+
+   2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
+      flow and exception regions are built as an on-the-side region
+      hierarchy (See tree-eh.c:lower_eh_constructs).
+
+   3- Multiple identical return statements are grouped into a single
+      return and gotos to the unique return site.  */
+
+/* Match a return statement with a label.  During lowering, we identify
+   identical return statements and replace duplicates with a jump to
+   the corresponding label.  */
+struct return_statements_t
+{
+  tree label;
+  gimple stmt;
+};
+typedef struct return_statements_t return_statements_t;
+
+DEF_VEC_O(return_statements_t);
+DEF_VEC_ALLOC_O(return_statements_t,heap);
+
 struct lower_data
 {
   /* Block the current statement belongs to.  */
   tree block;
 
-  /* A TREE_LIST of label and return statements to be moved to the end
+  /* A vector of label and return statements to be moved to the end
      of the function.  */
-  tree return_statements;
+  VEC(return_statements_t,heap) *return_statements;
 
   /* True if the function calls __builtin_setjmp.  */
   bool calls_builtin_setjmp;
 };
 
-static void lower_stmt (tree_stmt_iterator *, struct lower_data *);
-static void lower_bind_expr (tree_stmt_iterator *, struct lower_data *);
-static void lower_cond_expr (tree_stmt_iterator *, struct lower_data *);
-static void lower_return_expr (tree_stmt_iterator *, struct lower_data *);
-static void lower_builtin_setjmp (tree_stmt_iterator *);
+static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
+static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
+static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
+static void lower_builtin_setjmp (gimple_stmt_iterator *);
 
-/* Lower the body of current_function_decl.  */
+
+/* Lower the body of current_function_decl from High GIMPLE into Low
+   GIMPLE.  */
 
 static unsigned int
 lower_function_body (void)
 {
   struct lower_data data;
-  tree *body_p = &DECL_SAVED_TREE (current_function_decl);
-  tree bind = *body_p;
-  tree_stmt_iterator i;
-  tree t, x;
-
-  gcc_assert (TREE_CODE (bind) == BIND_EXPR);
+  gimple_seq body = gimple_body (current_function_decl);
+  gimple_seq lowered_body;
+  gimple_stmt_iterator i;
+  gimple bind;
+  tree t;
+  gimple x;
+
+  /* The gimplifier should've left a body of exactly one statement,
+     namely a GIMPLE_BIND.  */
+  gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
+             && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
 
   memset (&data, 0, sizeof (data));
   data.block = DECL_INITIAL (current_function_decl);
   BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
   BLOCK_CHAIN (data.block) = NULL_TREE;
   TREE_ASM_WRITTEN (data.block) = 1;
+  data.return_statements = VEC_alloc (return_statements_t, heap, 8);
+
+  bind = gimple_seq_first_stmt (body);
+  lowered_body = NULL;
+  gimple_seq_add_stmt (&lowered_body, bind);
+  i = gsi_start (lowered_body);
+  lower_gimple_bind (&i, &data);
 
-  *body_p = alloc_stmt_list ();
-  i = tsi_start (*body_p);
-  tsi_link_after (&i, bind, TSI_NEW_STMT);
-  lower_bind_expr (&i, &data);
+  /* Once the old body has been lowered, replace it with the new
+     lowered sequence.  */
+  gimple_set_body (current_function_decl, lowered_body);
 
-  i = tsi_last (*body_p);
+  i = gsi_last (lowered_body);
 
   /* If the function falls off the end, we need a null return statement.
-     If we've already got one in the return_statements list, we don't
+     If we've already got one in the return_statements vector, we don't
      need to do anything special.  Otherwise build one by hand.  */
-  if (block_may_fallthru (*body_p)
-      && (data.return_statements == NULL
-          || TREE_OPERAND (TREE_VALUE (data.return_statements), 0) != NULL))
+  if (gimple_seq_may_fallthru (lowered_body)
+      && (VEC_empty (return_statements_t, data.return_statements)
+         || gimple_return_retval (VEC_last (return_statements_t,
+                                  data.return_statements)->stmt) != NULL))
     {
-      x = build1 (RETURN_EXPR, void_type_node, NULL);
-      SET_EXPR_LOCATION (x, cfun->function_end_locus);
-      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
+      x = gimple_build_return (NULL);
+      gimple_set_location (x, cfun->function_end_locus);
+      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
     }
 
   /* If we lowered any return statements, emit the representative
      at the end of the function.  */
-  for (t = data.return_statements ; t ; t = TREE_CHAIN (t))
+  while (!VEC_empty (return_statements_t, data.return_statements))
     {
-      x = build1 (LABEL_EXPR, void_type_node, TREE_PURPOSE (t));
-      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
+      return_statements_t t;
+
+      /* Unfortunately, we can't use VEC_pop because it returns void for
+        objects.  */
+      t = *VEC_last (return_statements_t, data.return_statements);
+      VEC_truncate (return_statements_t,
+                   data.return_statements,
+                   VEC_length (return_statements_t,
+                               data.return_statements) - 1);
+
+      x = gimple_build_label (t.label);
+      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
 
       /* Remove the line number from the representative return statement.
         It now fills in for many such returns.  Failure to remove this
         will result in incorrect results for coverage analysis.  */
-      x = TREE_VALUE (t);
-      SET_EXPR_LOCATION (x, UNKNOWN_LOCATION);
-      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
+      gimple_set_location (t.stmt, UNKNOWN_LOCATION);
+      gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
     }
 
   /* If the function calls __builtin_setjmp, we need to emit the computed
@@ -123,21 +171,21 @@ lower_function_body (void)
       /* This mark will create forward edges from every call site.  */
       DECL_NONLOCAL (disp_label) = 1;
       cfun->has_nonlocal_label = 1;
-      x = build1 (LABEL_EXPR, void_type_node, disp_label);
-      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
+      x = gimple_build_label (disp_label);
+      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
 
       /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
         and insert.  */
       disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
       arg = build_addr (disp_label, current_function_decl);
       t = implicit_built_in_decls[BUILT_IN_SETJMP_DISPATCHER];
-      t = build_call_expr (t, 1, arg);
-      x = build_gimple_modify_stmt (disp_var, t);
+      x = gimple_build_call (t, 1, arg);
+      gimple_call_set_lhs (x, disp_var);
 
       /* Build 'goto DISP_VAR;' and insert.  */
-      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
-      x = build1 (GOTO_EXPR, void_type_node, disp_var);
-      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
+      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
+      x = gimple_build_goto (disp_var);
+      gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
     }
 
   gcc_assert (data.block == DECL_INITIAL (current_function_decl));
@@ -145,6 +193,7 @@ lower_function_body (void)
     = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
 
   clear_block_marks (data.block);
+  VEC_free(return_statements_t, heap, data.return_statements);
   return 0;
 }
 
@@ -168,134 +217,131 @@ struct gimple_opt_pass pass_lower_cf =
 };
 
 
-/* Lower the EXPR.  Unlike gimplification the statements are not relowered
+/* Lower sequence SEQ.  Unlike gimplification the statements are not relowered
    when they are changed -- if this has to be done, the lowering routine must
    do it explicitly.  DATA is passed through the recursion.  */
 
 static void
-lower_stmt_body (tree expr, struct lower_data *data)
+lower_sequence (gimple_seq seq, struct lower_data *data)
 {
-  tree_stmt_iterator tsi;
+  gimple_stmt_iterator gsi;
 
-  for (tsi = tsi_start (expr); !tsi_end_p (tsi); )
-    lower_stmt (&tsi, data);
+  for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
+    lower_stmt (&gsi, data);
 }
 
 
-/* Lower the OpenMP directive statement pointed by TSI.  DATA is
+/* Lower the OpenMP directive statement pointed by GSI.  DATA is
    passed through the recursion.  */
 
 static void
-lower_omp_directive (tree_stmt_iterator *tsi, struct lower_data *data)
+lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
 {
-  tree stmt;
+  gimple stmt;
   
-  stmt = tsi_stmt (*tsi);
+  stmt = gsi_stmt (*gsi);
 
-  lower_stmt_body (OMP_BODY (stmt), data);
-  tsi_link_before (tsi, stmt, TSI_SAME_STMT);
-  tsi_link_before (tsi, OMP_BODY (stmt), TSI_SAME_STMT);
-  OMP_BODY (stmt) = NULL_TREE;
-  tsi_delink (tsi);
+  lower_sequence (gimple_omp_body (stmt), data);
+  gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+  gsi_insert_seq_before (gsi, gimple_omp_body (stmt), GSI_SAME_STMT);
+  gimple_omp_set_body (stmt, NULL);
+  gsi_remove (gsi, false);
 }
 
 
-/* Lower statement TSI.  DATA is passed through the recursion.  */
+/* Lower statement GSI.  DATA is passed through the recursion.  */
 
 static void
-lower_stmt (tree_stmt_iterator *tsi, struct lower_data *data)
+lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
 {
-  tree stmt = tsi_stmt (*tsi);
+  gimple stmt = gsi_stmt (*gsi);
 
-  if (EXPR_HAS_LOCATION (stmt) && data)
-    TREE_BLOCK (stmt) = data->block;
+  gimple_set_block (stmt, data->block);
 
-  switch (TREE_CODE (stmt))
+  switch (gimple_code (stmt))
     {
-    case BIND_EXPR:
-      lower_bind_expr (tsi, data);
-      return;
-    case COND_EXPR:
-      lower_cond_expr (tsi, data);
-      return;
-    case RETURN_EXPR:
-      lower_return_expr (tsi, data);
+    case GIMPLE_BIND:
+      lower_gimple_bind (gsi, data);
       return;
 
-    case TRY_FINALLY_EXPR:
-    case TRY_CATCH_EXPR:
-      lower_stmt_body (TREE_OPERAND (stmt, 0), data);
-      lower_stmt_body (TREE_OPERAND (stmt, 1), data);
+    case GIMPLE_COND:
+      /* The gimplifier has already lowered this into gotos.  */
       break;
-    case CATCH_EXPR:
-      lower_stmt_body (CATCH_BODY (stmt), data);
+
+    case GIMPLE_RETURN:
+      lower_gimple_return (gsi, data);
+      return;
+
+    case GIMPLE_TRY:
+      lower_sequence (gimple_try_eval (stmt), data);
+      lower_sequence (gimple_try_cleanup (stmt), data);
       break;
-    case EH_FILTER_EXPR:
-      lower_stmt_body (EH_FILTER_FAILURE (stmt), data);
+
+    case GIMPLE_CATCH:
+      lower_sequence (gimple_catch_handler (stmt), data);
       break;
-      
-    case NOP_EXPR:
-    case ASM_EXPR:
-    case GOTO_EXPR:
-    case PREDICT_EXPR:
-    case LABEL_EXPR:
-    case SWITCH_EXPR:
-    case CHANGE_DYNAMIC_TYPE_EXPR:
-    case OMP_FOR:
-    case OMP_SECTIONS:
-    case OMP_SECTIONS_SWITCH:
-    case OMP_SECTION:
-    case OMP_SINGLE:
-    case OMP_MASTER:
-    case OMP_ORDERED:
-    case OMP_CRITICAL:
-    case OMP_RETURN:
-    case OMP_ATOMIC_LOAD:
-    case OMP_ATOMIC_STORE:
-    case OMP_CONTINUE:
+
+    case GIMPLE_EH_FILTER:
+      lower_sequence (gimple_eh_filter_failure (stmt), data);
       break;
 
-    case GIMPLE_MODIFY_STMT:
-      if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CALL_EXPR)
-       stmt = GIMPLE_STMT_OPERAND (stmt, 1);
-      else
-       break;
-      /* FALLTHRU */
+    case GIMPLE_NOP:
+    case GIMPLE_ASM:
+    case GIMPLE_ASSIGN:
+    case GIMPLE_GOTO:
+    case GIMPLE_PREDICT:
+    case GIMPLE_LABEL:
+    case GIMPLE_SWITCH:
+    case GIMPLE_CHANGE_DYNAMIC_TYPE:
+    case GIMPLE_OMP_FOR:
+    case GIMPLE_OMP_SECTIONS:
+    case GIMPLE_OMP_SECTIONS_SWITCH:
+    case GIMPLE_OMP_SECTION:
+    case GIMPLE_OMP_SINGLE:
+    case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_ORDERED:
+    case GIMPLE_OMP_CRITICAL:
+    case GIMPLE_OMP_RETURN:
+    case GIMPLE_OMP_ATOMIC_LOAD:
+    case GIMPLE_OMP_ATOMIC_STORE:
+    case GIMPLE_OMP_CONTINUE:
+      break;
 
-    case CALL_EXPR:
+    case GIMPLE_CALL:
       {
-       tree decl = get_callee_fndecl (stmt);
+       tree decl = gimple_call_fndecl (stmt);
+
        if (decl
            && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
            && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
          {
            data->calls_builtin_setjmp = true;
-           lower_builtin_setjmp (tsi);
+           lower_builtin_setjmp (gsi);
            return;
          }
       }
       break;
 
-    case OMP_PARALLEL:
-    case OMP_TASK:
-      lower_omp_directive (tsi, data);
+    case GIMPLE_OMP_PARALLEL:
+    case GIMPLE_OMP_TASK:
+      lower_omp_directive (gsi, data);
       return;
 
     default:
       gcc_unreachable ();
     }
 
-  tsi_next (tsi);
+  gsi_next (gsi);
 }
 
 /* Lower a bind_expr TSI.  DATA is passed through the recursion.  */
 
 static void
-lower_bind_expr (tree_stmt_iterator *tsi, struct lower_data *data)
+lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
 {
   tree old_block = data->block;
-  tree stmt = tsi_stmt (*tsi);
-  tree new_block = BIND_EXPR_BLOCK (stmt);
+  gimple stmt = gsi_stmt (*gsi);
+  tree new_block = gimple_bind_block (stmt);
 
   if (new_block)
     {
@@ -325,8 +371,8 @@ lower_bind_expr (tree_stmt_iterator *tsi, struct lower_data *data)
        }
     }
 
-  record_vars (BIND_EXPR_VARS (stmt));
-  lower_stmt_body (BIND_EXPR_BODY (stmt), data);
+  record_vars (gimple_bind_vars (stmt));
+  lower_sequence (gimple_bind_body (stmt), data);
 
   if (new_block)
     {
@@ -337,9 +383,9 @@ lower_bind_expr (tree_stmt_iterator *tsi, struct lower_data *data)
       data->block = old_block;
     }
 
-  /* The BIND_EXPR no longer carries any useful information -- kill it.  */
-  tsi_link_before (tsi, BIND_EXPR_BODY (stmt), TSI_SAME_STMT);
-  tsi_delink (tsi);
+  /* The GIMPLE_BIND no longer carries any useful information -- kill it.  */
+  gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
+  gsi_remove (gsi, false);
 }
 
 /* Try to determine whether a TRY_CATCH expression can fall through.
@@ -390,6 +436,58 @@ try_catch_may_fallthru (const_tree stmt)
     }
 }
 
+
+/* Same as above, but for a GIMPLE_TRY_CATCH.  */
+
+static bool
+gimple_try_catch_may_fallthru (gimple stmt)
+{
+  gimple_stmt_iterator i;
+
+  /* We don't handle GIMPLE_TRY_FINALLY.  */
+  gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
+
+  /* If the TRY block can fall through, the whole TRY_CATCH can
+     fall through.  */
+  if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
+    return true;
+
+  i = gsi_start (gimple_try_cleanup (stmt));
+  switch (gimple_code (gsi_stmt (i)))
+    {
+    case GIMPLE_CATCH:
+      /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
+        catch expression and a body.  The whole try/catch may fall
+        through iff any of the catch bodies falls through.  */
+      for (; !gsi_end_p (i); gsi_next (&i))
+       {
+         if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
+           return true;
+       }
+      return false;
+
+    case GIMPLE_EH_FILTER:
+      /* The exception filter expression only matters if there is an
+        exception.  If the exception does not match EH_FILTER_TYPES,
+        we will execute EH_FILTER_FAILURE, and we will fall through
+        if that falls through.  If the exception does match
+        EH_FILTER_TYPES, the stack unwinder will continue up the
+        stack, so we will not fall through.  We don't know whether we
+        will throw an exception which matches EH_FILTER_TYPES or not,
+        so we just ignore EH_FILTER_TYPES and assume that we might
+        throw an exception which doesn't match.  */
+      return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
+
+    default:
+      /* This case represents statements to be executed when an
+        exception occurs.  Those statements are implicitly followed
+        by a GIMPLE_RESX to resume execution after the exception.  So
+        in this case the try/catch never falls through.  */
+      return false;
+    }
+}
+
+
 /* Try to determine if we can fall out of the bottom of BLOCK.  This guess
    need not be 100% accurate; simply be conservative and return true if we
    don't know.  This is used only to avoid stupidly generating extra code.
@@ -440,9 +538,9 @@ block_may_fallthru (const_tree block)
       return (block_may_fallthru (TREE_OPERAND (stmt, 0))
              && block_may_fallthru (TREE_OPERAND (stmt, 1)));
 
-    case GIMPLE_MODIFY_STMT:
-      if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CALL_EXPR)
-       stmt = GIMPLE_STMT_OPERAND (stmt, 1);
+    case MODIFY_EXPR:
+      if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
+       stmt = TREE_OPERAND (stmt, 1);
       else
        return true;
       /* FALLTHRU */
@@ -459,138 +557,110 @@ block_may_fallthru (const_tree block)
     }
 }
 
-/* Lower a cond_expr TSI.  DATA is passed through the recursion.  */
 
-static void
-lower_cond_expr (tree_stmt_iterator *tsi, struct lower_data *data)
+/* Try to determine if we can continue executing the statement
+   immediately following STMT.  This guess need not be 100% accurate;
+   simply be conservative and return true if we don't know.  This is
+   used only to avoid stupidly generating extra code. If we're wrong,
+   we'll just delete the extra code later.  */
+
+bool
+gimple_stmt_may_fallthru (gimple stmt)
 {
-  tree stmt = tsi_stmt (*tsi);
-  bool then_is_goto, else_is_goto;
-  tree then_branch, else_branch;
-  tree then_goto, else_goto;
-  
-  then_branch = COND_EXPR_THEN (stmt);
-  else_branch = COND_EXPR_ELSE (stmt);
+  if (!stmt)
+    return true;
 
-  lower_stmt_body (then_branch, data);
-  lower_stmt_body (else_branch, data);
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_GOTO:
+    case GIMPLE_RETURN:
+    case GIMPLE_RESX:
+      /* Easy cases.  If the last statement of the seq implies 
+        control transfer, then we can't fall through.  */
+      return false;
 
-  then_goto = expr_only (then_branch);
-  then_is_goto = then_goto && simple_goto_p (then_goto);
+    case GIMPLE_SWITCH:
+      /* Switch has already been lowered and represents a
+        branch to a selected label and hence can not fall through.  */
+      return true;
 
-  else_goto = expr_only (else_branch);
-  else_is_goto = else_goto && simple_goto_p (else_goto);
+    case GIMPLE_COND:
+      /* GIMPLE_COND's are already lowered into a two-way branch.  They
+        can't fall through.  */
+      return false;
 
-  if (!then_is_goto || !else_is_goto)
-    {
-      tree then_label, else_label, end_label, t;
-
-      then_label = NULL_TREE;
-      else_label = NULL_TREE;
-      end_label = NULL_TREE;
-      /* Replace the cond_expr with explicit gotos.  */
-      if (!then_is_goto)
-       {
-         t = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
-         if (TREE_SIDE_EFFECTS (then_branch))
-           then_label = t;
-         else
-           end_label = t;
-         then_goto = build_and_jump (&LABEL_EXPR_LABEL (t));
-       }
+    case GIMPLE_BIND:
+      return gimple_seq_may_fallthru (gimple_bind_body (stmt));
 
-      if (!else_is_goto)
-       {
-         t = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
-         if (TREE_SIDE_EFFECTS (else_branch))
-           else_label = t;
-         else
-           {
-             /* Both THEN and ELSE can be no-ops if one or both contained an
-                empty BIND_EXPR that was associated with the toplevel block
-                of an inlined function.  In that case remove_useless_stmts
-                can't have cleaned things up for us; kill the whole 
-                conditional now.  */
-             if (end_label)
-               {
-                 tsi_delink (tsi);
-                 return;
-               }
-             else
-               end_label = t;
-           }
-         else_goto = build_and_jump (&LABEL_EXPR_LABEL (t));
-       }
+    case GIMPLE_TRY:
+      if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
+        return gimple_try_catch_may_fallthru (stmt);
 
-      if (then_label)
-       {
-         bool may_fallthru = block_may_fallthru (then_branch);
+      /* It must be a GIMPLE_TRY_FINALLY.  */
 
-         tsi_link_after (tsi, then_label, TSI_CONTINUE_LINKING);
-         tsi_link_after (tsi, then_branch, TSI_CONTINUE_LINKING);
-  
-         if (else_label && may_fallthru)
-           {
-             end_label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
-             t = build_and_jump (&LABEL_EXPR_LABEL (end_label));
-             tsi_link_after (tsi, t, TSI_CONTINUE_LINKING);
-           }
-       }
-  
-      if (else_label)
-       {
-         tsi_link_after (tsi, else_label, TSI_CONTINUE_LINKING);
-         tsi_link_after (tsi, else_branch, TSI_CONTINUE_LINKING);
-       }
+      /* The finally clause is always executed after the try clause,
+        so if it does not fall through, then the try-finally will not
+        fall through.  Otherwise, if the try clause does not fall
+        through, then when the finally clause falls through it will
+        resume execution wherever the try clause was going.  So the
+        whole try-finally will only fall through if both the try
+        clause and the finally clause fall through.  */
+      return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
+             && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
+
+    case GIMPLE_ASSIGN:
+      return true;
 
-      if (end_label)
-       tsi_link_after (tsi, end_label, TSI_CONTINUE_LINKING);
+    case GIMPLE_CALL:
+      /* Functions that do not return do not fall through.  */
+      return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
+    
+    default:
+      return true;
     }
+}
+
 
-  COND_EXPR_THEN (stmt) = then_goto;
-  COND_EXPR_ELSE (stmt) = else_goto;
+/* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ.  */
 
-  tsi_next (tsi);
+bool
+gimple_seq_may_fallthru (gimple_seq seq)
+{
+  return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
 }
 
-/* Lower a return_expr TSI.  DATA is passed through the recursion.  */
+
+/* Lower a GIMPLE_RETURN GSI.  DATA is passed through the recursion.  */
 
 static void
-lower_return_expr (tree_stmt_iterator *tsi, struct lower_data *data)
+lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
 {
-  tree stmt = tsi_stmt (*tsi);
-  tree value, t, label;
-
-  /* Extract the value being returned.  */
-  value = TREE_OPERAND (stmt, 0);
-  if (value && TREE_CODE (value) == GIMPLE_MODIFY_STMT)
-    value = GIMPLE_STMT_OPERAND (value, 1);
+  gimple stmt = gsi_stmt (*gsi);
+  gimple t;
+  int i;
+  return_statements_t tmp_rs;
 
   /* Match this up with an existing return statement that's been created.  */
-  for (t = data->return_statements; t ; t = TREE_CHAIN (t))
+  for (i = VEC_length (return_statements_t, data->return_statements) - 1;
+       i >= 0; i--)
     {
-      tree tvalue = TREE_OPERAND (TREE_VALUE (t), 0);
-      if (tvalue && TREE_CODE (tvalue) == GIMPLE_MODIFY_STMT)
-       tvalue = GIMPLE_STMT_OPERAND (tvalue, 1);
+      tmp_rs = *VEC_index (return_statements_t, data->return_statements, i);
 
-      if (value == tvalue)
-       {
-         label = TREE_PURPOSE (t);
-         goto found;
-       }
+      if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
+       goto found;
     }
 
   /* Not found.  Create a new label and record the return statement.  */
-  label = create_artificial_label ();
-  data->return_statements = tree_cons (label, stmt, data->return_statements);
+  tmp_rs.label = create_artificial_label ();
+  tmp_rs.stmt = stmt;
+  VEC_safe_push (return_statements_t, heap, data->return_statements, &tmp_rs);
 
   /* Generate a goto statement and remove the return statement.  */
  found:
-  t = build1 (GOTO_EXPR, void_type_node, label);
-  SET_EXPR_LOCUS (t, EXPR_LOCUS (stmt));
-  tsi_link_before (tsi, t, TSI_SAME_STMT);
-  tsi_delink (tsi);
+  t = gimple_build_goto (tmp_rs.label);
+  gimple_set_location (t, gimple_location (stmt));
+  gsi_insert_before (gsi, t, GSI_SAME_STMT);
+  gsi_remove (gsi, false);
 }
 
 /* Lower a __builtin_setjmp TSI.
@@ -647,71 +717,66 @@ lower_return_expr (tree_stmt_iterator *tsi, struct lower_data *data)
    to the receivers, thus keeping the complexity explosion localized.  */
 
 static void
-lower_builtin_setjmp (tree_stmt_iterator *tsi)
+lower_builtin_setjmp (gimple_stmt_iterator *gsi)
 {
-  tree stmt = tsi_stmt (*tsi);
+  gimple stmt = gsi_stmt (*gsi);
   tree cont_label = create_artificial_label ();
   tree next_label = create_artificial_label ();
   tree dest, t, arg;
+  gimple g;
 
   /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
      passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
   FORCED_LABEL (next_label) = 1;
 
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
-    {
-      dest = GIMPLE_STMT_OPERAND (stmt, 0);
-      stmt = GIMPLE_STMT_OPERAND (stmt, 1);
-    }
-  else
-    dest = NULL_TREE;
+  dest = gimple_call_lhs (stmt);
 
   /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
   arg = build_addr (next_label, current_function_decl);
   t = implicit_built_in_decls[BUILT_IN_SETJMP_SETUP];
-  t = build_call_expr (t, 2, CALL_EXPR_ARG (stmt, 0), arg);
-  SET_EXPR_LOCUS (t, EXPR_LOCUS (stmt));
-  tsi_link_before (tsi, t, TSI_SAME_STMT);
+  g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
+  gimple_set_location (g, gimple_location (stmt));
+  gsi_insert_before (gsi, g, GSI_SAME_STMT);
 
   /* Build 'DEST = 0' and insert.  */
   if (dest)
     {
-      t = build_gimple_modify_stmt (dest, fold_convert (TREE_TYPE (dest),
-                                                       integer_zero_node));
-      SET_EXPR_LOCUS (t, EXPR_LOCUS (stmt));
-      tsi_link_before (tsi, t, TSI_SAME_STMT);
+      g = gimple_build_assign (dest, fold_convert (TREE_TYPE (dest),
+                                                  integer_zero_node));
+      gimple_set_location (g, gimple_location (stmt));
+      gsi_insert_before (gsi, g, GSI_SAME_STMT);
     }
 
   /* Build 'goto CONT_LABEL' and insert.  */
-  t = build1 (GOTO_EXPR, void_type_node, cont_label);
-  tsi_link_before (tsi, t, TSI_SAME_STMT);
+  g = gimple_build_goto (cont_label);
+  gsi_insert_before (gsi, g, TSI_SAME_STMT);
 
   /* Build 'NEXT_LABEL:' and insert.  */
-  t = build1 (LABEL_EXPR, void_type_node, next_label);
-  tsi_link_before (tsi, t, TSI_SAME_STMT);
+  g = gimple_build_label (next_label);
+  gsi_insert_before (gsi, g, GSI_SAME_STMT);
 
   /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
   arg = build_addr (next_label, current_function_decl);
   t = implicit_built_in_decls[BUILT_IN_SETJMP_RECEIVER];
-  t = build_call_expr (t, 1, arg);
-  SET_EXPR_LOCUS (t, EXPR_LOCUS (stmt));
-  tsi_link_before (tsi, t, TSI_SAME_STMT);
+  g = gimple_build_call (t, 1, arg);
+  gimple_set_location (g, gimple_location (stmt));
+  gsi_insert_before (gsi, g, GSI_SAME_STMT);
 
   /* Build 'DEST = 1' and insert.  */
   if (dest)
     {
-      t = build_gimple_modify_stmt (dest, fold_convert (TREE_TYPE (dest),
-                                                       integer_one_node));
-      SET_EXPR_LOCUS (t, EXPR_LOCUS (stmt));
-      tsi_link_before (tsi, t, TSI_SAME_STMT);
+      g = gimple_build_assign (dest, fold_convert (TREE_TYPE (dest),
+                                                  integer_one_node));
+      gimple_set_location (g, gimple_location (stmt));
+      gsi_insert_before (gsi, g, GSI_SAME_STMT);
     }
 
   /* Build 'CONT_LABEL:' and insert.  */
-  t = build1 (LABEL_EXPR, void_type_node, cont_label);
-  tsi_link_before (tsi, t, TSI_SAME_STMT);
+  g = gimple_build_label (cont_label);
+  gsi_insert_before (gsi, g, GSI_SAME_STMT);
 
   /* Remove the call to __builtin_setjmp.  */
-  tsi_delink (tsi);
+  gsi_remove (gsi, false);
 }
 \f
 
diff --git a/gcc/gimple-pretty-print.c b/gcc/gimple-pretty-print.c
new file mode 100644 (file)
index 0000000..d334d85
--- /dev/null
@@ -0,0 +1,1857 @@
+/* Pretty formatting of GIMPLE statements and expressions.
+   Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
+   Free Software Foundation, Inc.
+   Contributed by Aldy Hernandez <aldyh@redhat.com> and
+   Diego Novillo <dnovillo@google.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 3, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3.  If not see
+<http://www.gnu.org/licenses/>.  */
+
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
+#include "tree.h"
+#include "diagnostic.h"
+#include "real.h"
+#include "hashtab.h"
+#include "tree-flow.h"
+#include "tree-pass.h"
+#include "gimple.h"
+#include "value-prof.h"
+
+#define INDENT(SPACE)                                                  \
+  do { int i; for (i = 0; i < SPACE; i++) pp_space (buffer); } while (0)
+
+static pretty_printer buffer;
+static bool initialized = false;
+
+#define GIMPLE_NIY do_niy (buffer,gs)
+
+/* Try to print on BUFFER a default message for the unrecognized
+   gimple statement GS.  */
+
+static void
+do_niy (pretty_printer *buffer, gimple gs)
+{
+  pp_printf (buffer, "<<< Unknown GIMPLE statement: %s >>>\n",
+            gimple_code_name[(int) gimple_code (gs)]);
+}
+
+
+/* Initialize the pretty printer on FILE if needed.  */
+
+static void
+maybe_init_pretty_print (FILE *file)
+{
+  if (!initialized)
+    {
+      pp_construct (&buffer, NULL, 0);
+      pp_needs_newline (&buffer) = true;
+      initialized = true;
+    }
+
+  buffer.buffer->stream = file;
+}
+
+
+/* Emit a newline and SPC indentantion spaces to BUFFER.  */
+
+static void
+newline_and_indent (pretty_printer *buffer, int spc)
+{
+  pp_newline (buffer);
+  INDENT (spc);
+}
+
+
+/* Print the GIMPLE statement GS on stderr.  */
+
+void
+debug_gimple_stmt (gimple gs)
+{
+  print_gimple_stmt (stderr, gs, 0, TDF_VOPS|TDF_MEMSYMS);
+  fprintf (stderr, "\n");
+}
+
+
+/* Dump GIMPLE statement G to FILE using SPC indentantion spaces and
+   FLAGS as in dump_gimple_stmt.  */
+
+void
+print_gimple_stmt (FILE *file, gimple g, int spc, int flags)
+{
+  maybe_init_pretty_print (file);
+  dump_gimple_stmt (&buffer, g, spc, flags);
+  pp_flush (&buffer);
+}
+
+
+/* Dump GIMPLE statement G to FILE using SPC indentantion spaces and
+   FLAGS as in dump_gimple_stmt.  Print only the right-hand side
+   of the statement.  */
+
+void
+print_gimple_expr (FILE *file, gimple g, int spc, int flags)
+{
+  flags |= TDF_RHS_ONLY;
+  maybe_init_pretty_print (file);
+  dump_gimple_stmt (&buffer, g, spc, flags);
+}
+
+
+/* Print the GIMPLE sequence SEQ on BUFFER using SPC indentantion
+   spaces and FLAGS as in dump_gimple_stmt.  */
+
+static void
+dump_gimple_seq (pretty_printer *buffer, gimple_seq seq, int spc, int flags)
+{
+  gimple_stmt_iterator i;
+
+  for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
+    {
+      gimple gs = gsi_stmt (i);
+      INDENT (spc);
+      dump_gimple_stmt (buffer, gs, spc, flags);
+      if (!gsi_one_before_end_p (i))
+       pp_newline (buffer);
+    }
+}
+
+
+/* Dump GIMPLE sequence SEQ to FILE using SPC indentantion spaces and
+   FLAGS as in dump_gimple_stmt.  */
+
+void
+print_gimple_seq (FILE *file, gimple_seq seq, int spc, int flags)
+{
+  maybe_init_pretty_print (file);
+  dump_gimple_seq (&buffer, seq, spc, flags);
+  pp_flush (&buffer);
+}
+
+
+/* Print the GIMPLE sequence SEQ on stderr.  */
+
+void
+debug_gimple_seq (gimple_seq seq)
+{
+  print_gimple_seq (stderr, seq, 0, TDF_VOPS|TDF_MEMSYMS);
+}
+
+
+/* A simple helper to pretty-print some of the gimple tuples in the printf
+   style. The format modifiers are preceeded by '%' and are:
+     'G' - outputs a string corresponding to the code of the given gimple,
+     'S' - outputs a gimple_seq with indent of spc + 2,
+     'T' - outputs the tree t,
+     'd' - outputs an int as a decimal,
+     's' - outputs a string,
+     'n' - outputs a newline,
+     '+' - increases indent by 2 then outputs a newline,
+     '-' - decreases indent by 2 then outputs a newline.   */
+
+static void
+dump_gimple_fmt (pretty_printer *buffer, int spc, int flags,
+                 const char *fmt, ...)
+{
+  va_list args;
+  const char *c;
+  const char *tmp;
+
+  va_start (args, fmt);
+  for (c = fmt; *c; c++)
+    {
+      if (*c == '%')
+        {
+          gimple_seq seq;
+          tree t;
+          gimple g;
+          switch (*++c)
+            {
+              case 'G':
+                g = va_arg (args, gimple);
+                tmp = gimple_code_name[gimple_code (g)];
+                pp_string (buffer, tmp);
+                break;
+
+              case 'S':
+                seq = va_arg (args, gimple_seq);
+                pp_newline (buffer);
+                dump_gimple_seq (buffer, seq, spc + 2, flags);
+                newline_and_indent (buffer, spc);
+                break;
+
+              case 'T':
+                t = va_arg (args, tree);
+                if (t == NULL_TREE)
+                  pp_string (buffer, "NULL");
+                else
+                  dump_generic_node (buffer, t, spc, flags, false);
+                break;
+
+              case 'd':
+                pp_decimal_int (buffer, va_arg (args, int));
+                break;
+
+              case 's':
+                pp_string (buffer, va_arg (args, char *));
+                break;
+
+              case 'n':
+                newline_and_indent (buffer, spc);
+                break;
+
+              case '+':
+                spc += 2;
+                newline_and_indent (buffer, spc);
+                break;
+
+              case '-':
+                spc -= 2;
+                newline_and_indent (buffer, spc);
+                break;
+
+              default:
+                gcc_unreachable ();
+            }
+        } 
+      else
+        pp_character (buffer, *c);
+    }
+  va_end (args);
+}
+
+
+/* Helper for dump_gimple_assign.  Print the unary RHS of the
+   assignment GS.  BUFFER, SPC and FLAGS are as in dump_gimple_stmt.  */
+
+static void
+dump_unary_rhs (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  enum tree_code rhs_code = gimple_assign_rhs_code (gs);
+  tree lhs = gimple_assign_lhs (gs);
+  tree rhs = gimple_assign_rhs1 (gs);
+
+  switch (rhs_code)
+    {
+    case VIEW_CONVERT_EXPR:
+    case ASSERT_EXPR:
+      dump_generic_node (buffer, rhs, spc, flags, false);
+      break;
+
+    case FIXED_CONVERT_EXPR:
+    case FIX_TRUNC_EXPR:
+    case FLOAT_EXPR:
+    CASE_CONVERT:
+      pp_string (buffer, "(");
+      dump_generic_node (buffer, TREE_TYPE (lhs), spc, flags, false);
+      pp_string (buffer, ") ");
+      dump_generic_node (buffer, rhs, spc, flags, false);
+      break;
+      
+    case PAREN_EXPR:
+      pp_string (buffer, "((");
+      dump_generic_node (buffer, rhs, spc, flags, false);
+      pp_string (buffer, "))");
+      break;
+      
+    case ABS_EXPR:
+      pp_string (buffer, "ABS_EXPR <");
+      dump_generic_node (buffer, rhs, spc, flags, false);
+      pp_string (buffer, ">");
+      break;
+
+    default:
+      if (TREE_CODE_CLASS (rhs_code) == tcc_declaration
+         || TREE_CODE_CLASS (rhs_code) == tcc_constant
+         || TREE_CODE_CLASS (rhs_code) == tcc_reference
+         || rhs_code == SSA_NAME
+         || rhs_code == ADDR_EXPR
+         || rhs_code == CONSTRUCTOR)
+       ; /* do nothing.  */
+      else if (rhs_code == BIT_NOT_EXPR)
+       pp_string (buffer, "~");
+      else if (rhs_code == TRUTH_NOT_EXPR)
+       pp_string (buffer, "!");
+      else if (rhs_code == NEGATE_EXPR)
+       pp_string (buffer, "-");
+      else
+       {
+         pp_string (buffer, "[");
+         pp_string (buffer, tree_code_name [rhs_code]);
+         pp_string (buffer, "] ");
+       }
+
+      dump_generic_node (buffer, rhs, spc, flags, false);
+      break;
+    }
+}
+
+
+/* Helper for dump_gimple_assign.  Print the binary RHS of the
+   assignment GS.  BUFFER, SPC and FLAGS are as in dump_gimple_stmt.  */
+
+static void
+dump_binary_rhs (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  switch (gimple_assign_rhs_code (gs))
+    {
+    case COMPLEX_EXPR:
+      pp_string (buffer, "COMPLEX_EXPR <");
+      dump_generic_node (buffer, gimple_assign_rhs1 (gs), spc, flags, false);
+      pp_string (buffer, ", ");
+      dump_generic_node (buffer, gimple_assign_rhs2 (gs), spc, flags, false);
+      pp_string (buffer, ">");
+      break;
+      
+    case MIN_EXPR:
+      pp_string (buffer, "MIN_EXPR <");
+      dump_generic_node (buffer, gimple_assign_rhs1 (gs), spc, flags, false);
+      pp_string (buffer, ", ");
+      dump_generic_node (buffer, gimple_assign_rhs2 (gs), spc, flags, false);
+      pp_string (buffer, ">");
+      break;
+      
+    case MAX_EXPR:
+      pp_string (buffer, "MAX_EXPR <");
+      dump_generic_node (buffer, gimple_assign_rhs1 (gs), spc, flags, false);
+      pp_string (buffer, ", ");
+      dump_generic_node (buffer, gimple_assign_rhs2 (gs), spc, flags, false);
+      pp_string (buffer, ">");
+      break;
+
+    default:
+      dump_generic_node (buffer, gimple_assign_rhs1 (gs), spc, flags, false);
+      pp_space (buffer);
+      pp_string (buffer, op_symbol_code (gimple_assign_rhs_code (gs)));
+      pp_space (buffer);
+      dump_generic_node (buffer, gimple_assign_rhs2 (gs), spc, flags, false);
+    }
+}
+
+
+/* Dump the gimple assignment GS.  BUFFER, SPC and FLAGS are as in
+   dump_gimple_stmt.  */
+
+static void
+dump_gimple_assign (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  if (flags & TDF_RAW)
+    {
+      tree last;
+      if (gimple_num_ops (gs) == 2)
+        last = NULL_TREE;
+      else if (gimple_num_ops (gs) == 3)
+        last = gimple_assign_rhs2 (gs);
+      else
+        gcc_unreachable ();
+
+      dump_gimple_fmt (buffer, spc, flags, "%G <%s, %T, %T, %T>", gs,
+                       tree_code_name[gimple_assign_rhs_code (gs)],
+                       gimple_assign_lhs (gs), gimple_assign_rhs1 (gs), last);
+    }
+  else
+    {
+      if (!(flags & TDF_RHS_ONLY))
+       {
+         dump_generic_node (buffer, gimple_assign_lhs (gs), spc, flags, false);
+         pp_space (buffer);
+         pp_character (buffer, '=');
+
+         if (gimple_assign_nontemporal_move_p (gs))
+           pp_string (buffer, "{nt}");
+
+         if (gimple_has_volatile_ops (gs))
+           pp_string (buffer, "{v}");
+
+         pp_space (buffer);
+       }
+
+      if (gimple_num_ops (gs) == 2)
+        dump_unary_rhs (buffer, gs, spc, flags);
+      else if (gimple_num_ops (gs) == 3)
+        dump_binary_rhs (buffer, gs, spc, flags);
+      else
+        gcc_unreachable ();
+      if (!(flags & TDF_RHS_ONLY))
+       pp_semicolon(buffer);
+    }
+}
+
+
+/* Dump the return statement GS.  BUFFER, SPC and FLAGS are as in
+   dump_gimple_stmt.  */
+
+static void
+dump_gimple_return (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  tree t;
+
+  t = gimple_return_retval (gs);
+  if (flags & TDF_RAW)
+    dump_gimple_fmt (buffer, spc, flags, "%G <%T>", gs, t);
+  else
+    {
+      pp_string (buffer, "return");
+      if (t)
+       {
+         pp_space (buffer);
+         dump_generic_node (buffer, t, spc, flags, false);
+       }
+      pp_semicolon (buffer);
+    }
+}
+
+
+/* Dump the call arguments for a gimple call. BUFFER, FLAGS are as in
+   dump_gimple_call.  */
+
+static void
+dump_gimple_call_args (pretty_printer *buffer, gimple gs, int flags)
+{
+  size_t i;
+
+  for (i = 0; i < gimple_call_num_args (gs); i++)
+    {
+      dump_generic_node (buffer, gimple_call_arg (gs, i), 0, flags, false);
+      if (i < gimple_call_num_args (gs) - 1)
+       pp_string (buffer, ", ");
+    }
+
+  if (gimple_call_va_arg_pack_p (gs))
+    {
+      if (gimple_call_num_args (gs) > 0)
+        {
+          pp_character (buffer, ',');
+          pp_space (buffer);
+        }
+
+      pp_string (buffer, "__builtin_va_arg_pack ()");
+    }
+}
+
+
+/* Dump the call statement GS.  BUFFER, SPC and FLAGS are as in
+   dump_gimple_stmt.  */
+
+static void
+dump_gimple_call (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  tree lhs = gimple_call_lhs (gs);
+
+  if (flags & TDF_RAW)
+    {
+      dump_gimple_fmt (buffer, spc, flags, "%G <%T, %T",
+                     gs, gimple_call_fn (gs), lhs);
+      if (gimple_call_num_args (gs) > 0)
+        {
+          pp_string (buffer, ", ");
+          dump_gimple_call_args (buffer, gs, flags);
+        }
+      pp_string (buffer, ">");
+    }
+  else
+    {
+      if (lhs && !(flags & TDF_RHS_ONLY))
+        {
+          dump_generic_node (buffer, lhs, spc, flags, false);
+          pp_string (buffer, " =");
+
+         if (gimple_has_volatile_ops (gs))
+           pp_string (buffer, "{v}");
+
+         pp_space (buffer);
+        }
+      dump_generic_node (buffer, gimple_call_fn (gs), spc, flags, false);
+      pp_string (buffer, " (");
+      dump_gimple_call_args (buffer, gs, flags);
+      pp_string (buffer, ")");
+      if (!(flags & TDF_RHS_ONLY))
+       pp_semicolon (buffer);
+    }
+
+  if (gimple_call_chain (gs))
+    {
+      pp_string (buffer, " [static-chain: ");
+      dump_generic_node (buffer, gimple_call_chain (gs), spc, flags, false);
+      pp_character (buffer, ']');
+    }
+
+  if (gimple_call_return_slot_opt_p (gs))
+    pp_string (buffer, " [return slot optimization]");
+
+  if (gimple_call_tail_p (gs))
+    pp_string (buffer, " [tail call]");
+}
+
+
+/* Dump the switch statement GS.  BUFFER, SPC and FLAGS are as in
+   dump_gimple_stmt.  */
+
+static void
+dump_gimple_switch (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  unsigned int i;
+
+  GIMPLE_CHECK (gs, GIMPLE_SWITCH);
+  if (flags & TDF_RAW)
+    dump_gimple_fmt (buffer, spc, flags, "%G <%T, ", gs,
+                   gimple_switch_index (gs));
+  else
+    {
+      pp_string (buffer, "switch (");
+      dump_generic_node (buffer, gimple_switch_index (gs), spc, flags, true);
+      pp_string (buffer, ") <");
+    }
+
+  for (i = 0; i < gimple_switch_num_labels (gs); i++)
+    {
+      tree case_label = gimple_switch_label (gs, i);
+      if (case_label == NULL_TREE)
+       continue;
+
+      dump_generic_node (buffer, case_label, spc, flags, false);
+      pp_string (buffer, " ");
+      dump_generic_node (buffer, CASE_LABEL (case_label), spc, flags, false);
+      if (i < gimple_switch_num_labels (gs) - 1)
+        pp_string (buffer, ", ");
+    }
+  pp_string (buffer, ">");
+}
+
+
+/* Dump the gimple conditional GS.  BUFFER, SPC and FLAGS are as in
+   dump_gimple_stmt.  */
+
+static void
+dump_gimple_cond (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  if (flags & TDF_RAW)
+    dump_gimple_fmt (buffer, spc, flags, "%G <%s, %T, %T, %T, %T>", gs,
+                   tree_code_name [gimple_cond_code (gs)],
+                   gimple_cond_lhs (gs), gimple_cond_rhs (gs),
+                   gimple_cond_true_label (gs), gimple_cond_false_label (gs));
+  else
+    {
+      if (!(flags & TDF_RHS_ONLY))
+       pp_string (buffer, "if (");
+      dump_generic_node (buffer, gimple_cond_lhs (gs), spc, flags, false);
+      pp_space (buffer);
+      pp_string (buffer, op_symbol_code (gimple_cond_code (gs)));
+      pp_space (buffer);
+      dump_generic_node (buffer, gimple_cond_rhs (gs), spc, flags, false);
+      if (!(flags & TDF_RHS_ONLY))
+       {
+         pp_string (buffer, ")");
+
+         if (gimple_cond_true_label (gs))
+           {
+             pp_string (buffer, " goto ");
+             dump_generic_node (buffer, gimple_cond_true_label (gs),
+                                spc, flags, false);
+             pp_semicolon (buffer);
+           }
+         if (gimple_cond_false_label (gs))
+           {
+             pp_string (buffer, " else goto ");
+             dump_generic_node (buffer, gimple_cond_false_label (gs),
+                                spc, flags, false);
+             pp_semicolon (buffer);
+           }
+       }
+    }
+}
+
+
+/* Dump a GIMPLE_LABEL tuple on the pretty_printer BUFFER, SPC
+   spaces of indent.  FLAGS specifies details to show in the dump (see
+   TDF_* in tree-pass.h).  */
+
+static void
+dump_gimple_label (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  tree label = gimple_label_label (gs);
+  if (flags & TDF_RAW)
+      dump_gimple_fmt (buffer, spc, flags, "%G <%T>", gs, label);
+  else
+    {
+      dump_generic_node (buffer, label, spc, flags, false);
+      pp_string (buffer, ":");
+    }
+  if (DECL_NONLOCAL (label))
+    pp_string (buffer, " [non-local]");
+}
+
+/* Dump a GIMPLE_GOTO tuple on the pretty_printer BUFFER, SPC
+   spaces of indent.  FLAGS specifies details to show in the dump (see
+   TDF_* in tree-pass.h).  */
+
+static void
+dump_gimple_goto (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  tree label = gimple_goto_dest (gs);
+  if (flags & TDF_RAW)
+    dump_gimple_fmt (buffer, spc, flags, "%G <%T>", gs, label);
+  else
+    dump_gimple_fmt (buffer, spc, flags, "goto %T;", label);
+}
+
+
+/* Dump a GIMPLE_BIND tuple on the pretty_printer BUFFER, SPC
+   spaces of indent.  FLAGS specifies details to show in the dump (see
+   TDF_* in tree-pass.h).  */
+
+static void
+dump_gimple_bind (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  if (flags & TDF_RAW)
+    dump_gimple_fmt (buffer, spc, flags, "%G <", gs);
+  else
+    pp_character (buffer, '{');
+  if (!(flags & TDF_SLIM))
+    {
+      tree var;
+
+      for (var = gimple_bind_vars (gs); var; var = TREE_CHAIN (var))
+       {
+          newline_and_indent (buffer, 2);
+         print_declaration (buffer, var, spc, flags);
+       }
+      if (gimple_bind_vars (gs))
+       pp_newline (buffer);
+    }
+  pp_newline (buffer);
+  dump_gimple_seq (buffer, gimple_bind_body (gs), spc + 2, flags);
+  newline_and_indent (buffer, spc);
+  if (flags & TDF_RAW)
+    pp_character (buffer, '>');
+  else
+    pp_character (buffer, '}');
+}
+
+
+/* Dump a GIMPLE_TRY tuple on the pretty_printer BUFFER, SPC spaces of
+   indent.  FLAGS specifies details to show in the dump (see TDF_* in
+   tree-pass.h).  */
+
+static void
+dump_gimple_try (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  if (flags & TDF_RAW)
+    {
+      const char *type;
+      if (gimple_try_kind (gs) == GIMPLE_TRY_CATCH)
+        type = "GIMPLE_TRY_CATCH";
+      else if (gimple_try_kind (gs) == GIMPLE_TRY_FINALLY)
+        type = "GIMPLE_TRY_FINALLY";
+      else
+        type = "UNKNOWN GIMPLE_TRY";
+      dump_gimple_fmt (buffer, spc, flags,
+                       "%G <%s,%+EVAL <%S>%nCLEANUP <%S>%->", gs, type,
+                       gimple_try_eval (gs), gimple_try_cleanup (gs));
+    }
+  else
+    {
+      pp_string (buffer, "try");
+      newline_and_indent (buffer, spc + 2);
+      pp_string (buffer, "{");
+      pp_newline (buffer);
+
+      dump_gimple_seq (buffer, gimple_try_eval (gs), spc + 4, flags);
+      newline_and_indent (buffer, spc + 2);
+      pp_string (buffer, "}");
+
+      if (gimple_try_kind (gs) == GIMPLE_TRY_CATCH)
+       {
+         newline_and_indent (buffer, spc);
+         pp_string (buffer, "catch");
+         newline_and_indent (buffer, spc + 2);
+         pp_string (buffer, "{");
+       }
+      else if (gimple_try_kind (gs) == GIMPLE_TRY_FINALLY)
+       {
+         newline_and_indent (buffer, spc);
+         pp_string (buffer, "finally");
+         newline_and_indent (buffer, spc + 2);
+         pp_string (buffer, "{");
+       }
+      else
+       pp_string (buffer, " <UNKNOWN GIMPLE_TRY> {");
+
+      pp_newline (buffer);
+      dump_gimple_seq (buffer, gimple_try_cleanup (gs), spc + 4, flags);
+      newline_and_indent (buffer, spc + 2);
+      pp_character (buffer, '}');
+    }
+}
+
+
+/* Dump a GIMPLE_CATCH tuple on the pretty_printer BUFFER, SPC spaces of
+   indent.  FLAGS specifies details to show in the dump (see TDF_* in
+   tree-pass.h).  */
+
+static void
+dump_gimple_catch (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  if (flags & TDF_RAW)
+      dump_gimple_fmt (buffer, spc, flags, "%G <%T, %+CATCH <%S>%->", gs,
+                       gimple_catch_types (gs), gimple_catch_handler (gs));
+  else
+      dump_gimple_fmt (buffer, spc, flags, "catch (%T)%+{%S}",
+                       gimple_catch_types (gs), gimple_catch_handler (gs));
+}
+
+
+/* Dump a GIMPLE_EH_FILTER tuple on the pretty_printer BUFFER, SPC spaces of
+   indent.  FLAGS specifies details to show in the dump (see TDF_* in
+   tree-pass.h).  */
+
+static void
+dump_gimple_eh_filter (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  if (flags & TDF_RAW)
+    dump_gimple_fmt (buffer, spc, flags, "%G <%T, %+FAILURE <%S>%->", gs,
+                     gimple_eh_filter_types (gs),
+                     gimple_eh_filter_failure (gs));
+  else
+    dump_gimple_fmt (buffer, spc, flags, "<<<eh_filter (%T)>>>%+{%+%S%-}",
+                     gimple_eh_filter_types (gs),
+                     gimple_eh_filter_failure (gs));
+}
+
+
+/* Dump a GIMPLE_RESX tuple on the pretty_printer BUFFER, SPC spaces of
+   indent.  FLAGS specifies details to show in the dump (see TDF_* in
+   tree-pass.h).  */
+
+static void
+dump_gimple_resx (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  if (flags & TDF_RAW)
+    dump_gimple_fmt (buffer, spc, flags, "%G <%d>", gs,
+                     gimple_resx_region (gs));
+  else
+    dump_gimple_fmt (buffer, spc, flags, "resx %d", gimple_resx_region (gs));
+}
+
+/* Dump a GIMPLE_OMP_FOR tuple on the pretty_printer BUFFER.  */
+static void
+dump_gimple_omp_for (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  size_t i;
+
+  if (flags & TDF_RAW)
+    {
+      dump_gimple_fmt (buffer, spc, flags, "%G <%+BODY <%S>%nCLAUSES <", gs,
+                       gimple_omp_body (gs));
+      dump_omp_clauses (buffer, gimple_omp_for_clauses (gs), spc, flags);
+      dump_gimple_fmt (buffer, spc, flags, " >,");
+      for (i = 0; i < gimple_omp_for_collapse (gs); i++)
+       dump_gimple_fmt (buffer, spc, flags,
+                        "%+%T, %T, %T, %s, %T,%n",
+                        gimple_omp_for_index (gs, i),
+                        gimple_omp_for_initial (gs, i),
+                        gimple_omp_for_final (gs, i),
+                        tree_code_name[gimple_omp_for_cond (gs, i)],
+                        gimple_omp_for_incr (gs, i));
+      dump_gimple_fmt (buffer, spc, flags, "PRE_BODY <%S>%->",
+                      gimple_omp_for_pre_body (gs));
+    }
+  else
+    {
+      pp_string (buffer, "#pragma omp for");
+      dump_omp_clauses (buffer, gimple_omp_for_clauses (gs), spc, flags);
+      for (i = 0; i < gimple_omp_for_collapse (gs); i++)
+       {
+         if (i)
+           spc += 2;
+         newline_and_indent (buffer, spc);
+         pp_string (buffer, "for (");
+         dump_generic_node (buffer, gimple_omp_for_index (gs, i), spc,
+                            flags, false);
+         pp_string (buffer, " = ");
+         dump_generic_node (buffer, gimple_omp_for_initial (gs, i), spc,
+                            flags, false);
+         pp_string (buffer, "; ");
+
+         dump_generic_node (buffer, gimple_omp_for_index (gs, i), spc,
+                            flags, false);
+         pp_space (buffer);
+         switch (gimple_omp_for_cond (gs, i))
+           {
+           case LT_EXPR:
+             pp_character (buffer, '<');
+             break;
+           case GT_EXPR:
+             pp_character (buffer, '>');
+             break;
+           case LE_EXPR:
+             pp_string (buffer, "<=");
+             break;
+           case GE_EXPR:
+             pp_string (buffer, ">=");
+             break;
+           default:
+             gcc_unreachable ();
+           }
+         pp_space (buffer);
+         dump_generic_node (buffer, gimple_omp_for_final (gs, i), spc,
+                            flags, false);
+         pp_string (buffer, "; ");
+
+         dump_generic_node (buffer, gimple_omp_for_index (gs, i), spc,
+                            flags, false);
+         pp_string (buffer, " = ");
+         dump_generic_node (buffer, gimple_omp_for_incr (gs, i), spc,
+                            flags, false);
+         pp_character (buffer, ')');
+       }
+
+      if (!gimple_seq_empty_p (gimple_omp_body (gs)))
+       {
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '{');
+         pp_newline (buffer);
+         dump_gimple_seq (buffer, gimple_omp_body (gs), spc + 4, flags);
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '}');
+       }
+    }
+}
+
+/* Dump a GIMPLE_OMP_CONTINUE tuple on the pretty_printer BUFFER.  */
+
+static void
+dump_gimple_omp_continue (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  if (flags & TDF_RAW)
+    {
+      dump_gimple_fmt (buffer, spc, flags, "%G <%T, %T>", gs,
+                       gimple_omp_continue_control_def (gs),
+                       gimple_omp_continue_control_use (gs));
+    }
+  else
+    {
+      pp_string (buffer, "#pragma omp continue (");
+      dump_generic_node (buffer, gimple_omp_continue_control_def (gs),
+                        spc, flags, false);
+      pp_character (buffer, ',');
+      pp_space (buffer);
+      dump_generic_node (buffer, gimple_omp_continue_control_use (gs),
+                        spc, flags, false);
+      pp_character (buffer, ')');
+    }
+}
+
+/* Dump a GIMPLE_OMP_SINGLE tuple on the pretty_printer BUFFER.  */
+
+static void
+dump_gimple_omp_single (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  if (flags & TDF_RAW)
+    {
+      dump_gimple_fmt (buffer, spc, flags, "%G <%+BODY <%S>%nCLAUSES <", gs,
+                      gimple_omp_body (gs));
+      dump_omp_clauses (buffer, gimple_omp_single_clauses (gs), spc, flags);
+      dump_gimple_fmt (buffer, spc, flags, " >");
+    }
+  else
+    {
+      pp_string (buffer, "#pragma omp single");
+      dump_omp_clauses (buffer, gimple_omp_single_clauses (gs), spc, flags);
+      if (!gimple_seq_empty_p (gimple_omp_body (gs)))
+       {
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '{');
+         pp_newline (buffer);
+         dump_gimple_seq (buffer, gimple_omp_body (gs), spc + 4, flags);
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '}');
+       }
+    }
+}
+
+/* Dump a GIMPLE_OMP_SECTIONS tuple on the pretty_printer BUFFER.  */
+
+static void
+dump_gimple_omp_sections (pretty_printer *buffer, gimple gs, int spc,
+                         int flags)
+{
+  if (flags & TDF_RAW)
+    {
+      dump_gimple_fmt (buffer, spc, flags, "%G <%+BODY <%S>%nCLAUSES <", gs,
+                      gimple_omp_body (gs));
+      dump_omp_clauses (buffer, gimple_omp_sections_clauses (gs), spc, flags);
+      dump_gimple_fmt (buffer, spc, flags, " >");
+    }
+  else
+    {
+      pp_string (buffer, "#pragma omp sections");
+      if (gimple_omp_sections_control (gs))
+       {
+         pp_string (buffer, " <");
+         dump_generic_node (buffer, gimple_omp_sections_control (gs), spc,
+                            flags, false);
+         pp_character (buffer, '>');
+       }
+      dump_omp_clauses (buffer, gimple_omp_sections_clauses (gs), spc, flags);
+      if (!gimple_seq_empty_p (gimple_omp_body (gs)))
+       {
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '{');
+         pp_newline (buffer);
+         dump_gimple_seq (buffer, gimple_omp_body (gs), spc + 4, flags);
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '}');
+       }
+    }
+}
+
+/* Dump a GIMPLE_OMP_{MASTER,ORDERED,SECTION} tuple on the pretty_printer
+   BUFFER.  */
+
+static void
+dump_gimple_omp_block (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  if (flags & TDF_RAW)
+    dump_gimple_fmt (buffer, spc, flags, "%G <%+BODY <%S> >", gs,
+                    gimple_omp_body (gs));
+  else
+    {
+      switch (gimple_code (gs))
+       {
+       case GIMPLE_OMP_MASTER:
+         pp_string (buffer, "#pragma omp master");
+         break;
+       case GIMPLE_OMP_ORDERED:
+         pp_string (buffer, "#pragma omp ordered");
+         break;
+       case GIMPLE_OMP_SECTION:
+         pp_string (buffer, "#pragma omp section");
+         break;
+       default:
+         gcc_unreachable ();
+       }
+      if (!gimple_seq_empty_p (gimple_omp_body (gs)))
+       {
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '{');
+         pp_newline (buffer);
+         dump_gimple_seq (buffer, gimple_omp_body (gs), spc + 4, flags);
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '}');
+       }
+    }
+}
+
+/* Dump a GIMPLE_OMP_CRITICAL tuple on the pretty_printer BUFFER.  */
+
+static void
+dump_gimple_omp_critical (pretty_printer *buffer, gimple gs, int spc,
+                         int flags)
+{
+  if (flags & TDF_RAW)
+    dump_gimple_fmt (buffer, spc, flags, "%G <%+BODY <%S> >", gs,
+                    gimple_omp_body (gs));
+  else
+    {
+      pp_string (buffer, "#pragma omp critical");
+      if (gimple_omp_critical_name (gs))
+       {
+         pp_string (buffer, " (");
+         dump_generic_node (buffer, gimple_omp_critical_name (gs), spc,
+                            flags, false);
+         pp_character (buffer, ')');
+       }
+      if (!gimple_seq_empty_p (gimple_omp_body (gs)))
+       {
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '{');
+         pp_newline (buffer);
+         dump_gimple_seq (buffer, gimple_omp_body (gs), spc + 4, flags);
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '}');
+       }
+    }
+}
+
+/* Dump a GIMPLE_OMP_RETURN tuple on the pretty_printer BUFFER.  */
+
+static void
+dump_gimple_omp_return (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  if (flags & TDF_RAW)
+    {
+      dump_gimple_fmt (buffer, spc, flags, "%G <nowait=%d>", gs,
+                       (int) gimple_omp_return_nowait_p (gs));
+    }
+  else
+    {
+      pp_string (buffer, "#pragma omp return");
+      if (gimple_omp_return_nowait_p (gs))
+       pp_string (buffer, "(nowait)");
+    }
+}
+
+/* Dump a GIMPLE_ASM tuple on the pretty_printer BUFFER, SPC spaces of
+   indent.  FLAGS specifies details to show in the dump (see TDF_* in
+   tree-pass.h).  */
+
+static void
+dump_gimple_asm (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  unsigned int i;
+
+  if (flags & TDF_RAW)
+    dump_gimple_fmt (buffer, spc, flags, "%G <%+STRING <%n%s%n>", gs,
+                     gimple_asm_string (gs));
+  else
+    {
+      pp_string (buffer, "__asm__");
+      if (gimple_asm_volatile_p (gs))
+       pp_string (buffer, " __volatile__");
+      pp_string (buffer, "(\"");
+      pp_string (buffer, gimple_asm_string (gs));
+      pp_string (buffer, "\"");
+    }
+
+  if (gimple_asm_ninputs (gs)
+     || gimple_asm_noutputs (gs) 
+     || gimple_asm_nclobbers (gs))
+    {
+      if (gimple_asm_noutputs (gs))
+        {
+          if (flags & TDF_RAW)
+            {
+              newline_and_indent (buffer, spc + 2);
+              pp_string (buffer, "OUTPUT: ");
+            }
+          else
+            pp_string (buffer, " : ");
+        }
+
+      for (i = 0; i < gimple_asm_noutputs (gs); i++)
+        {
+          dump_generic_node (buffer, gimple_asm_output_op (gs, i), spc, flags,
+                             false);
+          if ( i < gimple_asm_noutputs (gs) -1)
+            pp_string (buffer, ", ");
+        }
+
+      if (gimple_asm_ninputs (gs))
+        {
+          if (flags & TDF_RAW)
+            {
+              newline_and_indent (buffer, spc + 2);
+              pp_string (buffer, "INPUT: ");
+            }
+          else
+            pp_string (buffer, " : ");
+        }
+
+      for (i = 0; i < gimple_asm_ninputs (gs); i++)
+        {
+          dump_generic_node (buffer, gimple_asm_input_op (gs, i), spc, flags,
+                             false);
+          if (i < gimple_asm_ninputs (gs) -1)
+            pp_string (buffer, " : ");
+        }
+
+      if (gimple_asm_nclobbers (gs))
+        {
+          if (flags & TDF_RAW)
+            {
+              newline_and_indent (buffer, spc + 2);
+              pp_string (buffer, "CLOBBER: ");
+            }
+          else
+            pp_string (buffer, " : ");
+        }
+
+      for (i = 0; i < gimple_asm_nclobbers (gs); i++)
+        {
+          dump_generic_node (buffer, gimple_asm_clobber_op (gs, i), spc, flags,
+                             false);
+          if ( i < gimple_asm_nclobbers (gs) -1)
+            pp_string (buffer, ", ");
+        }
+    }
+  if (flags & TDF_RAW)
+    {
+      newline_and_indent (buffer, spc);
+      pp_character (buffer, '>');
+    }
+  else
+    pp_string (buffer, ");");
+}
+
+
+/* Dump the set of decls SYMS.  BUFFER, SPC and FLAGS are as in
+   dump_generic_node.  */
+
+static void
+dump_symbols (pretty_printer *buffer, bitmap syms, int flags)
+{
+  unsigned i;
+  bitmap_iterator bi;
+
+  if (syms == NULL)
+    pp_string (buffer, "NIL");
+  else
+    {
+      pp_string (buffer, " { ");
+
+      EXECUTE_IF_SET_IN_BITMAP (syms, 0, i, bi)
+       {
+         tree sym = referenced_var_lookup (i);
+         dump_generic_node (buffer, sym, 0, flags, false);
+         pp_string (buffer, " ");
+       }
+
+      pp_string (buffer, "}");
+    }
+}
+
+
+/* Dump a PHI node PHI.  BUFFER, SPC and FLAGS are as in
+   dump_gimple_stmt.  */
+
+static void
+dump_gimple_phi (pretty_printer *buffer, gimple phi, int spc, int flags)
+{
+  size_t i;
+
+  if (flags & TDF_RAW)
+      dump_gimple_fmt (buffer, spc, flags, "%G <%T, ", phi,
+                       gimple_phi_result (phi));
+  else
+    {
+      dump_generic_node (buffer, gimple_phi_result (phi), spc, flags, false);
+      pp_string (buffer, " = PHI <");
+    }
+  for (i = 0; i < gimple_phi_num_args (phi); i++)
+    {
+      dump_generic_node (buffer, gimple_phi_arg_def (phi, i), spc, flags,
+                        false);
+      pp_string (buffer, "(");
+      pp_decimal_int (buffer, gimple_phi_arg_edge (phi, i)->src->index);
+      pp_string (buffer, ")");
+      if (i < gimple_phi_num_args (phi) - 1)
+       pp_string (buffer, ", ");
+    }
+  pp_string (buffer, ">");
+}
+
+
+/* Dump a GIMPLE_OMP_PARALLEL tuple on the pretty_printer BUFFER, SPC spaces
+   of indent.  FLAGS specifies details to show in the dump (see TDF_* in
+   tree-pass.h).  */
+
+static void
+dump_gimple_omp_parallel (pretty_printer *buffer, gimple gs, int spc,
+                          int flags)
+{
+  if (flags & TDF_RAW)
+    {
+      dump_gimple_fmt (buffer, spc, flags, "%G <%+BODY <%S>%nCLAUSES <", gs,
+                       gimple_omp_body (gs));
+      dump_omp_clauses (buffer, gimple_omp_parallel_clauses (gs), spc, flags);
+      dump_gimple_fmt (buffer, spc, flags, " >, %T, %T%n>",
+                       gimple_omp_parallel_child_fn (gs),
+                       gimple_omp_parallel_data_arg (gs));
+    }
+  else
+    {
+      gimple_seq body;
+      pp_string (buffer, "#pragma omp parallel");
+      dump_omp_clauses (buffer, gimple_omp_parallel_clauses (gs), spc, flags);
+      if (gimple_omp_parallel_child_fn (gs))
+       {
+         pp_string (buffer, " [child fn: ");
+         dump_generic_node (buffer, gimple_omp_parallel_child_fn (gs),
+                            spc, flags, false);
+         pp_string (buffer, " (");
+         if (gimple_omp_parallel_data_arg (gs))
+           dump_generic_node (buffer, gimple_omp_parallel_data_arg (gs),
+                              spc, flags, false);
+         else
+           pp_string (buffer, "???");
+         pp_string (buffer, ")]");
+       }
+      body = gimple_omp_body (gs);
+      if (body && gimple_code (gimple_seq_first_stmt (body)) != GIMPLE_BIND)
+       {
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '{');
+         pp_newline (buffer);
+         dump_gimple_seq (buffer, body, spc + 4, flags);
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '}');
+       }
+      else if (body)
+       {
+         pp_newline (buffer);
+         dump_gimple_seq (buffer, body, spc + 2, flags);
+       }
+    }
+}
+
+
+/* Dump a GIMPLE_OMP_TASK tuple on the pretty_printer BUFFER, SPC spaces
+   of indent.  FLAGS specifies details to show in the dump (see TDF_* in
+   tree-pass.h).  */
+
+static void
+dump_gimple_omp_task (pretty_printer *buffer, gimple gs, int spc,
+                     int flags)
+{
+  if (flags & TDF_RAW)
+    {
+      dump_gimple_fmt (buffer, spc, flags, "%G <%+BODY <%S>%nCLAUSES <", gs,
+                       gimple_omp_body (gs));
+      dump_omp_clauses (buffer, gimple_omp_task_clauses (gs), spc, flags);
+      dump_gimple_fmt (buffer, spc, flags, " >, %T, %T, %T, %T, %T%n>",
+                       gimple_omp_task_child_fn (gs),
+                       gimple_omp_task_data_arg (gs),
+                      gimple_omp_task_copy_fn (gs),
+                      gimple_omp_task_arg_size (gs),
+                      gimple_omp_task_arg_size (gs));
+    }
+  else
+    {
+      gimple_seq body;
+      pp_string (buffer, "#pragma omp task");
+      dump_omp_clauses (buffer, gimple_omp_task_clauses (gs), spc, flags);
+      if (gimple_omp_task_child_fn (gs))
+       {
+         pp_string (buffer, " [child fn: ");
+         dump_generic_node (buffer, gimple_omp_task_child_fn (gs),
+                            spc, flags, false);
+         pp_string (buffer, " (");
+         if (gimple_omp_task_data_arg (gs))
+           dump_generic_node (buffer, gimple_omp_task_data_arg (gs),
+                              spc, flags, false);
+         else
+           pp_string (buffer, "???");
+         pp_string (buffer, ")]");
+       }
+      body = gimple_omp_body (gs);
+      if (body && gimple_code (gimple_seq_first_stmt (body)) != GIMPLE_BIND)
+       {
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '{');
+         pp_newline (buffer);
+         dump_gimple_seq (buffer, body, spc + 4, flags);
+         newline_and_indent (buffer, spc + 2);
+         pp_character (buffer, '}');
+       }
+      else if (body)
+       {
+         pp_newline (buffer);
+         dump_gimple_seq (buffer, body, spc + 2, flags);
+       }
+    }
+}
+
+
+/* Dump a GIMPLE_OMP_ATOMIC_LOAD tuple on the pretty_printer BUFFER, SPC
+   spaces of indent.  FLAGS specifies details to show in the dump (see TDF_*
+   in tree-pass.h).  */
+
+static void
+dump_gimple_omp_atomic_load (pretty_printer *buffer, gimple gs, int spc,
+                             int flags)
+{
+  if (flags & TDF_RAW)
+    {
+      dump_gimple_fmt (buffer, spc, flags, "%G <%T, %T>", gs,
+                       gimple_omp_atomic_load_lhs (gs),
+                       gimple_omp_atomic_load_rhs (gs));
+    }
+  else
+    {
+      pp_string (buffer, "#pragma omp atomic_load");
+      newline_and_indent (buffer, spc + 2);
+      dump_generic_node (buffer, gimple_omp_atomic_load_lhs (gs),
+                        spc, flags, false);
+      pp_space (buffer);
+      pp_character (buffer, '=');
+      pp_space (buffer);
+      pp_character (buffer, '*');
+      dump_generic_node (buffer, gimple_omp_atomic_load_rhs (gs),
+                        spc, flags, false);
+    }
+}
+
+/* Dump a GIMPLE_OMP_ATOMIC_STORE tuple on the pretty_printer BUFFER, SPC
+   spaces of indent.  FLAGS specifies details to show in the dump (see TDF_*
+   in tree-pass.h).  */
+
+static void
+dump_gimple_omp_atomic_store (pretty_printer *buffer, gimple gs, int spc,
+                             int flags)
+{
+  if (flags & TDF_RAW)
+    {
+      dump_gimple_fmt (buffer, spc, flags, "%G <%T>", gs,
+                       gimple_omp_atomic_store_val (gs));
+    }
+  else
+    {
+      pp_string (buffer, "#pragma omp atomic_store (");
+      dump_generic_node (buffer, gimple_omp_atomic_store_val (gs),
+                        spc, flags, false);
+      pp_character (buffer, ')');
+    }
+}
+
+/* Dump a GIMPLE_CHANGE_DYNAMIC_TYPE statement GS.  BUFFER, SPC and
+   FLAGS are as in dump_gimple_stmt.  */
+
+static void
+dump_gimple_cdt (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  if (flags & TDF_RAW)
+    dump_gimple_fmt (buffer, spc, flags, "%G <%T, %T>", gs,
+                     gimple_cdt_new_type (gs), gimple_cdt_location (gs));
+  else
+    {
+      pp_string (buffer, "<<<change_dynamic_type (");
+      dump_generic_node (buffer, gimple_cdt_new_type (gs), spc + 2, flags,
+                         false);
+      pp_string (buffer, ") ");
+      dump_generic_node (buffer, gimple_cdt_location (gs), spc + 2, flags,
+                         false);
+      pp_string (buffer, ")>>>");
+    }
+}
+
+
+/* Dump all the memory operands for statement GS.  BUFFER, SPC and
+   FLAGS are as in dump_gimple_stmt.  */
+
+static void
+dump_gimple_mem_ops (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  struct voptype_d *vdefs;
+  struct voptype_d *vuses;
+  int i, n;
+
+  if (!ssa_operands_active () || !gimple_references_memory_p (gs))
+    return;
+
+  /* Even if the statement doesn't have virtual operators yet, it may
+     contain symbol information (this happens before aliases have been
+     computed).  */
+  if ((flags & TDF_MEMSYMS)
+      && gimple_vuse_ops (gs) == NULL
+      && gimple_vdef_ops (gs) == NULL)
+    {
+      if (gimple_loaded_syms (gs))
+       {
+         pp_string (buffer, "# LOADS: ");
+         dump_symbols (buffer, gimple_loaded_syms (gs), flags);
+         newline_and_indent (buffer, spc);
+       }
+
+      if (gimple_stored_syms (gs))
+       {
+         pp_string (buffer, "# STORES: ");
+         dump_symbols (buffer, gimple_stored_syms (gs), flags);
+         newline_and_indent (buffer, spc);
+       }
+
+      return;
+    }
+
+  vuses = gimple_vuse_ops (gs);
+  while (vuses)
+    {
+      pp_string (buffer, "# VUSE <");
+
+      n = VUSE_NUM (vuses);
+      for (i = 0; i < n; i++)
+       {
+         dump_generic_node (buffer, VUSE_OP (vuses, i), spc + 2, flags, false);
+         if (i < n - 1)
+           pp_string (buffer, ", ");
+       }
+
+      pp_string (buffer, ">");
+
+      if (flags & TDF_MEMSYMS)
+       dump_symbols (buffer, gimple_loaded_syms (gs), flags);
+
+      newline_and_indent (buffer, spc);
+      vuses = vuses->next;
+    }
+
+  vdefs = gimple_vdef_ops (gs);
+  while (vdefs)
+    {
+      pp_string (buffer, "# ");
+      dump_generic_node (buffer, VDEF_RESULT (vdefs), spc + 2, flags, false);
+      pp_string (buffer, " = VDEF <");
+
+      n = VDEF_NUM (vdefs);
+      for (i = 0; i < n; i++)
+       {
+         dump_generic_node (buffer, VDEF_OP (vdefs, i), spc + 2, flags, 0);
+         if (i < n - 1)
+           pp_string (buffer, ", ");
+       }
+
+      pp_string (buffer, ">");
+
+      if ((flags & TDF_MEMSYMS) && vdefs->next == NULL)
+       dump_symbols (buffer, gimple_stored_syms (gs), flags);
+
+      newline_and_indent (buffer, spc);
+      vdefs = vdefs->next;
+    }
+}
+
+
+/* Dump the gimple statement GS on the pretty printer BUFFER, SPC
+   spaces of indent.  FLAGS specifies details to show in the dump (see
+   TDF_* in tree-pass.h).  */
+
+void
+dump_gimple_stmt (pretty_printer *buffer, gimple gs, int spc, int flags)
+{
+  if (!gs)
+    return;
+
+  if (flags & TDF_STMTADDR)
+    pp_printf (buffer, "<&%p> ", (void *) gs);
+
+  if ((flags & TDF_LINENO) && gimple_has_location (gs))
+    {
+      expanded_location xloc = expand_location (gimple_location (gs));
+      pp_character (buffer, '[');
+      if (xloc.file)
+       {
+         pp_string (buffer, xloc.file);
+         pp_string (buffer, " : ");
+       }
+      pp_decimal_int (buffer, xloc.line);
+      pp_string (buffer, "] ");
+    }
+
+  if ((flags & (TDF_VOPS|TDF_MEMSYMS))
+      && gimple_has_mem_ops (gs))
+    dump_gimple_mem_ops (buffer, gs, spc, flags);
+
+  switch (gimple_code (gs))
+    {
+    case GIMPLE_ASM:
+      dump_gimple_asm (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_ASSIGN:
+      dump_gimple_assign (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_BIND:
+      dump_gimple_bind (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_CALL:
+      dump_gimple_call (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_COND:
+      dump_gimple_cond (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_LABEL:
+      dump_gimple_label (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_GOTO:
+      dump_gimple_goto (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_NOP:
+      pp_string (buffer, "GIMPLE_NOP");
+      break;
+
+    case GIMPLE_RETURN:
+      dump_gimple_return (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_SWITCH:
+      dump_gimple_switch (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_TRY:
+      dump_gimple_try (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_PHI:
+      dump_gimple_phi (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_OMP_PARALLEL:
+      dump_gimple_omp_parallel (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_OMP_TASK:
+      dump_gimple_omp_task (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_OMP_ATOMIC_LOAD:
+      dump_gimple_omp_atomic_load (buffer, gs, spc, flags);
+
+      break;
+
+    case GIMPLE_OMP_ATOMIC_STORE:
+      dump_gimple_omp_atomic_store (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_OMP_FOR:
+      dump_gimple_omp_for (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_OMP_CONTINUE:
+      dump_gimple_omp_continue (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_OMP_SINGLE:
+      dump_gimple_omp_single (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_OMP_RETURN:
+      dump_gimple_omp_return (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_OMP_SECTIONS:
+      dump_gimple_omp_sections (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_OMP_SECTIONS_SWITCH:
+      pp_string (buffer, "GIMPLE_SECTIONS_SWITCH");
+      break;
+
+    case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_ORDERED:
+    case GIMPLE_OMP_SECTION:
+      dump_gimple_omp_block (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_OMP_CRITICAL:
+      dump_gimple_omp_critical (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_CHANGE_DYNAMIC_TYPE:
+      dump_gimple_cdt (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_CATCH:
+      dump_gimple_catch (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_EH_FILTER:
+      dump_gimple_eh_filter (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_RESX:
+      dump_gimple_resx (buffer, gs, spc, flags);
+      break;
+
+    case GIMPLE_PREDICT:
+      pp_string (buffer, "// predicted ");
+      if (gimple_predict_outcome (gs))
+       pp_string (buffer, "likely by ");
+      else
+       pp_string (buffer, "unlikely by ");
+      pp_string (buffer, predictor_name (gimple_predict_predictor (gs)));
+      pp_string (buffer, " predictor.");
+      break;
+
+    default:
+      GIMPLE_NIY;
+    }
+
+  /* If we're building a diagnostic, the formatted text will be
+     written into BUFFER's stream by the caller; otherwise, write it
+     now.  */
+  if (!(flags & TDF_DIAGNOSTIC))
+    pp_write_text_to_stream (buffer);
+}
+
+
+/* Dumps header of basic block BB to buffer BUFFER indented by INDENT
+   spaces and details described by flags.  */
+
+static void
+dump_bb_header (pretty_printer *buffer, basic_block bb, int indent, int flags)
+{
+  edge e;
+  gimple stmt;
+  edge_iterator ei;
+
+  if (flags & TDF_BLOCKS)
+    {
+      INDENT (indent);
+      pp_string (buffer, "# BLOCK ");
+      pp_decimal_int (buffer, bb->index);
+      if (bb->frequency)
+       {
+          pp_string (buffer, " freq:");
+          pp_decimal_int (buffer, bb->frequency);
+       }
+      if (bb->count)
+       {
+          pp_string (buffer, " count:");
+          pp_widest_integer (buffer, bb->count);
+       }
+
+      if (flags & TDF_LINENO)
+       {
+         gimple_stmt_iterator gsi;
+
+         for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+           if (get_lineno (gsi_stmt (gsi)) != -1)
+             {
+               pp_string (buffer, ", starting at line ");
+               pp_decimal_int (buffer, get_lineno (gsi_stmt (gsi)));
+               break;
+             }
+       }
+      newline_and_indent (buffer, indent);
+
+      pp_string (buffer, "# PRED:");
+      pp_write_text_to_stream (buffer);
+      FOR_EACH_EDGE (e, ei, bb->preds)
+       if (flags & TDF_SLIM)
+         {
+           pp_string (buffer, " ");
+           if (e->src == ENTRY_BLOCK_PTR)
+             pp_string (buffer, "ENTRY");
+           else
+             pp_decimal_int (buffer, e->src->index);
+         }
+       else
+         dump_edge_info (buffer->buffer->stream, e, 0);
+      pp_newline (buffer);
+    }
+  else
+    {
+      stmt = first_stmt (bb);
+      if (!stmt || gimple_code (stmt) != GIMPLE_LABEL)
+       {
+         INDENT (indent - 2);
+         pp_string (buffer, "<bb ");
+         pp_decimal_int (buffer, bb->index);
+         pp_string (buffer, ">:");
+         pp_newline (buffer);
+       }
+    }
+  pp_write_text_to_stream (buffer);
+  check_bb_profile (bb, buffer->buffer->stream);
+}
+
+
+/* Dumps end of basic block BB to buffer BUFFER indented by INDENT
+   spaces.  */
+
+static void
+dump_bb_end (pretty_printer *buffer, basic_block bb, int indent, int flags)
+{
+  edge e;
+  edge_iterator ei;
+
+  INDENT (indent);
+  pp_string (buffer, "# SUCC:");
+  pp_write_text_to_stream (buffer);
+  FOR_EACH_EDGE (e, ei, bb->succs)
+    if (flags & TDF_SLIM)
+      {
+       pp_string (buffer, " ");
+       if (e->dest == EXIT_BLOCK_PTR)
+         pp_string (buffer, "EXIT");
+       else
+         pp_decimal_int (buffer, e->dest->index);
+      }
+    else
+      dump_edge_info (buffer->buffer->stream, e, 1);
+  pp_newline (buffer);
+}
+
+
+/* Dump PHI nodes of basic block BB to BUFFER with details described
+   by FLAGS and indented by INDENT spaces.  */
+
+static void
+dump_phi_nodes (pretty_printer *buffer, basic_block bb, int indent, int flags)
+{
+  gimple_stmt_iterator i;
+
+  for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
+    {
+      gimple phi = gsi_stmt (i);
+      if (is_gimple_reg (gimple_phi_result (phi)) || (flags & TDF_VOPS))
+        {
+          INDENT (indent);
+          pp_string (buffer, "# ");
+          dump_gimple_phi (buffer, phi, indent, flags);
+          pp_newline (buffer);
+        }
+    }
+}
+
+
+/* Dump jump to basic block BB that is represented implicitly in the cfg
+   to BUFFER.  */
+
+static void
+pp_cfg_jump (pretty_printer *buffer, basic_block bb)
+{
+  gimple stmt;
+
+  stmt = first_stmt (bb);
+
+  pp_string (buffer, "goto <bb ");
+  pp_decimal_int (buffer, bb->index);
+  pp_string (buffer, ">");
+  if (stmt && gimple_code (stmt) == GIMPLE_LABEL)
+    {
+      pp_string (buffer, " (");
+      dump_generic_node (buffer, gimple_label_label (stmt), 0, 0, false);
+      pp_string (buffer, ")");
+      pp_semicolon (buffer);
+    }
+  else
+    pp_semicolon (buffer);
+}
+
+
+/* Dump edges represented implicitly in basic block BB to BUFFER, indented
+   by INDENT spaces, with details given by FLAGS.  */
+
+static void
+dump_implicit_edges (pretty_printer *buffer, basic_block bb, int indent,
+                    int flags)
+{
+  edge e;
+  edge_iterator ei;
+  gimple stmt;
+
+  stmt = last_stmt (bb);
+
+  if (stmt && gimple_code (stmt) == GIMPLE_COND)
+    {
+      edge true_edge, false_edge;
+
+      /* When we are emitting the code or changing CFG, it is possible that
+        the edges are not yet created.  When we are using debug_bb in such
+        a situation, we do not want it to crash.  */
+      if (EDGE_COUNT (bb->succs) != 2)
+       return;
+      extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
+
+      INDENT (indent + 2);
+      pp_cfg_jump (buffer, true_edge->dest);
+      newline_and_indent (buffer, indent);
+      pp_string (buffer, "else");
+      newline_and_indent (buffer, indent + 2);
+      pp_cfg_jump (buffer, false_edge->dest);
+      pp_newline (buffer);
+      return;
+    }
+
+  /* If there is a fallthru edge, we may need to add an artificial
+     goto to the dump.  */
+  FOR_EACH_EDGE (e, ei, bb->succs)
+    if (e->flags & EDGE_FALLTHRU)
+      break;
+
+  if (e && e->dest != bb->next_bb)
+    {
+      INDENT (indent);
+
+      if ((flags & TDF_LINENO)
+         && e->goto_locus != UNKNOWN_LOCATION
+         )
+       {
+         expanded_location goto_xloc;
+         goto_xloc = expand_location (e->goto_locus);
+         pp_character (buffer, '[');
+         if (goto_xloc.file)
+           {
+             pp_string (buffer, goto_xloc.file);
+             pp_string (buffer, " : ");
+           }
+         pp_decimal_int (buffer, goto_xloc.line);
+         pp_string (buffer, "] ");
+       }
+
+      pp_cfg_jump (buffer, e->dest);
+      pp_newline (buffer);
+    }
+}
+
+
+/* Dumps basic block BB to buffer BUFFER with details described by FLAGS and
+   indented by INDENT spaces.  */
+
+static void
+gimple_dump_bb_buff (pretty_printer *buffer, basic_block bb, int indent,
+                    int flags)
+{
+  gimple_stmt_iterator gsi;
+  gimple stmt;
+  int label_indent = indent - 2;
+
+  if (label_indent < 0)
+    label_indent = 0;
+
+  dump_bb_header (buffer, bb, indent, flags);
+  dump_phi_nodes (buffer, bb, indent, flags);
+
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+    {
+      int curr_indent;
+
+      stmt = gsi_stmt (gsi);
+
+      curr_indent = gimple_code (stmt) == GIMPLE_LABEL ? label_indent : indent;
+
+      INDENT (curr_indent);
+      dump_gimple_stmt (buffer, stmt, curr_indent, flags);
+      pp_newline (buffer);
+      dump_histograms_for_stmt (cfun, buffer->buffer->stream, stmt);
+    }
+
+  dump_implicit_edges (buffer, bb, indent, flags);
+
+  if (flags & TDF_BLOCKS)
+    dump_bb_end (buffer, bb, indent, flags);
+}
+
+
+/* Dumps basic block BB to FILE with details described by FLAGS and
+   indented by INDENT spaces.  */
+
+void
+gimple_dump_bb (basic_block bb, FILE *file, int indent, int flags)
+{
+  maybe_init_pretty_print (file);
+  gimple_dump_bb_buff (&buffer, bb, indent, flags);
+  pp_flush (&buffer);
+}
diff --git a/gcc/gimple.c b/gcc/gimple.c
new file mode 100644 (file)
index 0000000..69cfd4e
--- /dev/null
@@ -0,0 +1,3144 @@
+/* Gimple IR support functions.
+
+   Copyright 2007, 2008 Free Software Foundation, Inc.
+   Contributed by Aldy Hernandez <aldyh@redhat.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 3, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3.  If not see
+<http://www.gnu.org/licenses/>.  */
+
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
+#include "tree.h"
+#include "ggc.h"
+#include "errors.h"
+#include "hard-reg-set.h"
+#include "basic-block.h"
+#include "gimple.h"
+#include "diagnostic.h"
+#include "tree-flow.h"
+#include "value-prof.h"
+#include "flags.h"
+
+#define DEFGSCODE(SYM, NAME, STRUCT)   NAME,
+const char *const gimple_code_name[] = {
+#include "gimple.def"
+};
+#undef DEFGSCODE
+
+/* All the tuples have their operand vector at the very bottom
+   of the structure.  Therefore, the offset required to find the
+   operands vector the size of the structure minus the size of the 1
+   element tree array at the end (see gimple_ops).  */
+#define DEFGSCODE(SYM, NAME, STRUCT)   (sizeof (STRUCT) - sizeof (tree)),
+const size_t gimple_ops_offset_[] = {
+#include "gimple.def"
+};
+#undef DEFGSCODE
+
+#ifdef GATHER_STATISTICS
+/* Gimple stats.  */
+
+int gimple_alloc_counts[(int) gimple_alloc_kind_all];
+int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
+
+/* Keep in sync with gimple.h:enum gimple_alloc_kind.  */
+static const char * const gimple_alloc_kind_names[] = {
+    "assignments",
+    "phi nodes",
+    "conditionals",
+    "sequences",
+    "everything else"
+};
+
+#endif /* GATHER_STATISTICS */
+
+/* A cache of gimple_seq objects.  Sequences are created and destroyed
+   fairly often during gimplification.  */
+static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
+
+/* Private API manipulation functions shared only with some
+   other files.  */
+extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
+extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
+
+/* Gimple tuple constructors.
+   Note: Any constructor taking a ``gimple_seq'' as a parameter, can
+   be passed a NULL to start with an empty sequence.  */
+
+/* Set the code for statement G to CODE.  */
+
+static inline void
+gimple_set_code (gimple g, enum gimple_code code)
+{
+  g->gsbase.code = code;
+}
+
+
+/* Return the GSS_* identifier for the given GIMPLE statement CODE.  */
+
+static enum gimple_statement_structure_enum
+gss_for_code (enum gimple_code code)
+{
+  switch (code)
+    {
+    case GIMPLE_ASSIGN:
+    case GIMPLE_CALL:
+    case GIMPLE_RETURN:                        return GSS_WITH_MEM_OPS;
+    case GIMPLE_COND:
+    case GIMPLE_GOTO:
+    case GIMPLE_LABEL:
+    case GIMPLE_CHANGE_DYNAMIC_TYPE:
+    case GIMPLE_SWITCH:                        return GSS_WITH_OPS;
+    case GIMPLE_ASM:                   return GSS_ASM;
+    case GIMPLE_BIND:                  return GSS_BIND;
+    case GIMPLE_CATCH:                 return GSS_CATCH;
+    case GIMPLE_EH_FILTER:             return GSS_EH_FILTER;
+    case GIMPLE_NOP:                   return GSS_BASE;
+    case GIMPLE_PHI:                   return GSS_PHI;
+    case GIMPLE_RESX:                  return GSS_RESX;
+    case GIMPLE_TRY:                   return GSS_TRY;
+    case GIMPLE_WITH_CLEANUP_EXPR:     return GSS_WCE;
+    case GIMPLE_OMP_CRITICAL:          return GSS_OMP_CRITICAL;
+    case GIMPLE_OMP_FOR:               return GSS_OMP_FOR;
+    case GIMPLE_OMP_MASTER:            
+    case GIMPLE_OMP_ORDERED:
+    case GIMPLE_OMP_SECTION:           return GSS_OMP;
+    case GIMPLE_OMP_RETURN:
+    case GIMPLE_OMP_SECTIONS_SWITCH:    return GSS_BASE;
+    case GIMPLE_OMP_CONTINUE:          return GSS_OMP_CONTINUE;
+    case GIMPLE_OMP_PARALLEL:          return GSS_OMP_PARALLEL;
+    case GIMPLE_OMP_TASK:              return GSS_OMP_TASK;
+    case GIMPLE_OMP_SECTIONS:          return GSS_OMP_SECTIONS;
+    case GIMPLE_OMP_SINGLE:            return GSS_OMP_SINGLE;
+    case GIMPLE_OMP_ATOMIC_LOAD:       return GSS_OMP_ATOMIC_LOAD;
+    case GIMPLE_OMP_ATOMIC_STORE:      return GSS_OMP_ATOMIC_STORE;
+    case GIMPLE_PREDICT:               return GSS_BASE;
+    default:                           gcc_unreachable ();
+    }
+}
+
+
+/* Return the number of bytes needed to hold a GIMPLE statement with
+   code CODE.  */
+
+static size_t
+gimple_size (enum gimple_code code)
+{
+  enum gimple_statement_structure_enum gss = gss_for_code (code);
+
+  if (gss == GSS_WITH_OPS)
+    return sizeof (struct gimple_statement_with_ops);
+  else if (gss == GSS_WITH_MEM_OPS)
+    return sizeof (struct gimple_statement_with_memory_ops);
+
+  switch (code)
+    {
+    case GIMPLE_ASM:
+      return sizeof (struct gimple_statement_asm);
+    case GIMPLE_NOP:
+      return sizeof (struct gimple_statement_base);
+    case GIMPLE_BIND:
+      return sizeof (struct gimple_statement_bind);
+    case GIMPLE_CATCH:
+      return sizeof (struct gimple_statement_catch);
+    case GIMPLE_EH_FILTER:
+      return sizeof (struct gimple_statement_eh_filter);
+    case GIMPLE_TRY:
+      return sizeof (struct gimple_statement_try);
+    case GIMPLE_RESX:
+      return sizeof (struct gimple_statement_resx);
+    case GIMPLE_OMP_CRITICAL:
+      return sizeof (struct gimple_statement_omp_critical);
+    case GIMPLE_OMP_FOR:
+      return sizeof (struct gimple_statement_omp_for);
+    case GIMPLE_OMP_PARALLEL:
+      return sizeof (struct gimple_statement_omp_parallel);
+    case GIMPLE_OMP_TASK:
+      return sizeof (struct gimple_statement_omp_task);
+    case GIMPLE_OMP_SECTION:
+    case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_ORDERED:
+      return sizeof (struct gimple_statement_omp);
+    case GIMPLE_OMP_RETURN:
+      return sizeof (struct gimple_statement_base);
+    case GIMPLE_OMP_CONTINUE:
+      return sizeof (struct gimple_statement_omp_continue);
+    case GIMPLE_OMP_SECTIONS:
+      return sizeof (struct gimple_statement_omp_sections);
+    case GIMPLE_OMP_SECTIONS_SWITCH:
+      return sizeof (struct gimple_statement_base);
+    case GIMPLE_OMP_SINGLE:
+      return sizeof (struct gimple_statement_omp_single);
+    case GIMPLE_OMP_ATOMIC_LOAD:
+      return sizeof (struct gimple_statement_omp_atomic_load);
+    case GIMPLE_OMP_ATOMIC_STORE:
+      return sizeof (struct gimple_statement_omp_atomic_store);
+    case GIMPLE_WITH_CLEANUP_EXPR:
+      return sizeof (struct gimple_statement_wce);
+    case GIMPLE_CHANGE_DYNAMIC_TYPE:
+      return sizeof (struct gimple_statement_with_ops);
+    case GIMPLE_PREDICT:
+      return sizeof (struct gimple_statement_base);
+    default:
+      break;
+    }
+
+  gcc_unreachable ();
+}
+
+
+/* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
+   operands.  */
+
+#define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO)
+static gimple
+gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
+{
+  size_t size;
+  gimple stmt;
+
+  size = gimple_size (code);
+  if (num_ops > 0)
+    size += sizeof (tree) * (num_ops - 1);
+
+#ifdef GATHER_STATISTICS
+  {
+    enum gimple_alloc_kind kind = gimple_alloc_kind (code);
+    gimple_alloc_counts[(int) kind]++;
+    gimple_alloc_sizes[(int) kind] += size;
+  }
+#endif
+
+  stmt = (gimple) ggc_alloc_cleared_stat (size PASS_MEM_STAT);
+  gimple_set_code (stmt, code);
+  gimple_set_num_ops (stmt, num_ops);
+
+  /* Do not call gimple_set_modified here as it has other side
+     effects and this tuple is still not completely built.  */
+  stmt->gsbase.modified = 1;
+
+  return stmt;
+}
+
+/* Set SUBCODE to be the code of the expression computed by statement G.  */
+
+static inline void
+gimple_set_subcode (gimple g, unsigned subcode)
+{
+  /* We only have 16 bits for the RHS code.  Assert that we are not
+     overflowing it.  */
+  gcc_assert (subcode < (1 << 16));
+  g->gsbase.subcode = subcode;
+}
+
+
+
+/* Build a tuple with operands.  CODE is the statement to build (which
+   must be one of the GIMPLE_WITH_OPS tuples).  SUBCODE is the sub-code
+   for the new tuple.  NUM_OPS is the number of operands to allocate.  */ 
+
+#define gimple_build_with_ops(c, s, n) \
+  gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
+
+static gimple
+gimple_build_with_ops_stat (enum gimple_code code, enum tree_code subcode,
+                           unsigned num_ops MEM_STAT_DECL)
+{
+  gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
+  gimple_set_subcode (s, subcode);
+
+  return s;
+}
+
+
+/* Build a GIMPLE_RETURN statement returning RETVAL.  */
+
+gimple
+gimple_build_return (tree retval)
+{
+  gimple s = gimple_build_with_ops (GIMPLE_RETURN, 0, 1);
+  if (retval)
+    gimple_return_set_retval (s, retval);
+  return s;
+}
+
+/* Helper for gimple_build_call, gimple_build_call_vec and
+   gimple_build_call_from_tree.  Build the basic components of a
+   GIMPLE_CALL statement to function FN with NARGS arguments.  */
+
+static inline gimple
+gimple_build_call_1 (tree fn, unsigned nargs)
+{
+  gimple s = gimple_build_with_ops (GIMPLE_CALL, 0, nargs + 3);
+  gimple_set_op (s, 1, fn);
+  return s;
+}
+
+
+/* Build a GIMPLE_CALL statement to function FN with the arguments
+   specified in vector ARGS.  */
+
+gimple
+gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
+{
+  unsigned i;
+  unsigned nargs = VEC_length (tree, args);
+  gimple call = gimple_build_call_1 (fn, nargs);
+
+  for (i = 0; i < nargs; i++)
+    gimple_call_set_arg (call, i, VEC_index (tree, args, i));
+
+  return call;
+}
+
+
+/* Build a GIMPLE_CALL statement to function FN.  NARGS is the number of
+   arguments.  The ... are the arguments.  */
+
+gimple
+gimple_build_call (tree fn, unsigned nargs, ...)
+{
+  va_list ap;
+  gimple call;
+  unsigned i;
+
+  gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
+
+  call = gimple_build_call_1 (fn, nargs);
+
+  va_start (ap, nargs);
+  for (i = 0; i < nargs; i++)
+    gimple_call_set_arg (call, i, va_arg (ap, tree));
+  va_end (ap);
+
+  return call;
+}
+
+
+/* Build a GIMPLE_CALL statement from CALL_EXPR T.  Note that T is
+   assumed to be in GIMPLE form already.  Minimal checking is done of
+   this fact.  */
+
+gimple
+gimple_build_call_from_tree (tree t)
+{
+  unsigned i, nargs;
+  gimple call;
+  tree fndecl = get_callee_fndecl (t);
+
+  gcc_assert (TREE_CODE (t) == CALL_EXPR);
+
+  nargs = call_expr_nargs (t);
+  call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
+
+  for (i = 0; i < nargs; i++)
+    gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
+
+  gimple_set_block (call, TREE_BLOCK (t));
+
+  /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL.  */
+  gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
+  gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
+  gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
+  gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
+  gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
+  gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
+
+  return call;
+}
+
+
+/* Extract the operands and code for expression EXPR into *SUBCODE_P,
+   *OP1_P and *OP2_P respectively.  */
+
+void
+extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
+                      tree *op2_p)
+{
+  enum gimple_rhs_class class;
+
+  *subcode_p = TREE_CODE (expr);
+  class = get_gimple_rhs_class (*subcode_p);
+
+  if (class == GIMPLE_BINARY_RHS)
+    {
+      *op1_p = TREE_OPERAND (expr, 0);
+      *op2_p = TREE_OPERAND (expr, 1);
+    }
+  else if (class == GIMPLE_UNARY_RHS)
+    {
+      *op1_p = TREE_OPERAND (expr, 0);
+      *op2_p = NULL_TREE;
+    }
+  else if (class == GIMPLE_SINGLE_RHS)
+    {
+      *op1_p = expr;
+      *op2_p = NULL_TREE;
+    }
+  else
+    gcc_unreachable ();
+}
+
+
+/* Build a GIMPLE_ASSIGN statement.
+
+   LHS of the assignment.
+   RHS of the assignment which can be unary or binary.  */
+
+gimple
+gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
+{
+  enum tree_code subcode;
+  tree op1, op2;
+
+  extract_ops_from_tree (rhs, &subcode, &op1, &op2);
+  return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2
+                                           PASS_MEM_STAT);
+}
+
+
+/* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
+   OP1 and OP2.  If OP2 is NULL then SUBCODE must be of class
+   GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS.  */
+
+gimple
+gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
+                                   tree op2 MEM_STAT_DECL)
+{
+  unsigned num_ops;
+  gimple p;
+
+  /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
+     code).  */
+  num_ops = get_gimple_rhs_num_ops (subcode) + 1;
+  
+  p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, subcode, num_ops
+                                 PASS_MEM_STAT);
+  gimple_assign_set_lhs (p, lhs);
+  gimple_assign_set_rhs1 (p, op1);
+  if (op2)
+    {
+      gcc_assert (num_ops > 2);
+      gimple_assign_set_rhs2 (p, op2);
+    }
+
+  return p;
+}
+
+
+/* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
+
+   DST/SRC are the destination and source respectively.  You can pass
+   ungimplified trees in DST or SRC, in which case they will be
+   converted to a gimple operand if necessary.
+
+   This function returns the newly created GIMPLE_ASSIGN tuple.  */
+
+inline gimple
+gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
+{ 
+  tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
+  gimplify_and_add (t, seq_p);
+  ggc_free (t);
+  return gimple_seq_last_stmt (*seq_p);
+}
+
+
+/* Build a GIMPLE_COND statement.
+
+   PRED is the condition used to compare LHS and the RHS.
+   T_LABEL is the label to jump to if the condition is true.
+   F_LABEL is the label to jump to otherwise.  */
+
+gimple
+gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
+                  tree t_label, tree f_label)
+{
+  gimple p;
+
+  gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
+  p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
+  gimple_cond_set_lhs (p, lhs);
+  gimple_cond_set_rhs (p, rhs);
+  gimple_cond_set_true_label (p, t_label);
+  gimple_cond_set_false_label (p, f_label);
+  return p;
+}
+
+
+/* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND.  */
+
+void
+gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
+                               tree *lhs_p, tree *rhs_p)
+{
+  gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
+             || TREE_CODE (cond) == TRUTH_NOT_EXPR
+             || is_gimple_min_invariant (cond)
+             || SSA_VAR_P (cond));
+
+  extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
+
+  /* Canonicalize conditionals of the form 'if (!VAL)'.  */
+  if (*code_p == TRUTH_NOT_EXPR)
+    {
+      *code_p = EQ_EXPR;
+      gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
+      *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
+    }
+  /* Canonicalize conditionals of the form 'if (VAL)'  */
+  else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
+    {
+      *code_p = NE_EXPR;
+      gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
+      *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
+    }
+}
+
+
+/* Build a GIMPLE_COND statement from the conditional expression tree
+   COND.  T_LABEL and F_LABEL are as in gimple_build_cond.  */
+
+gimple
+gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
+{
+  enum tree_code code;
+  tree lhs, rhs;
+
+  gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
+  return gimple_build_cond (code, lhs, rhs, t_label, f_label);
+}
+
+/* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
+   boolean expression tree COND.  */
+
+void
+gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
+{
+  enum tree_code code;
+  tree lhs, rhs;
+
+  gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
+  gimple_cond_set_condition (stmt, code, lhs, rhs);
+}
+
+/* Build a GIMPLE_LABEL statement for LABEL.  */
+
+gimple
+gimple_build_label (tree label)
+{
+  gimple p = gimple_build_with_ops (GIMPLE_LABEL, 0, 1);
+  gimple_label_set_label (p, label);
+  return p;
+}
+
+/* Build a GIMPLE_GOTO statement to label DEST.  */
+
+gimple
+gimple_build_goto (tree dest)
+{
+  gimple p = gimple_build_with_ops (GIMPLE_GOTO, 0, 1);
+  gimple_goto_set_dest (p, dest);
+  return p;
+}
+
+
+/* Build a GIMPLE_NOP statement.  */
+
+gimple 
+gimple_build_nop (void)
+{
+  return gimple_alloc (GIMPLE_NOP, 0);
+}
+
+
+/* Build a GIMPLE_BIND statement.
+   VARS are the variables in BODY.
+   BLOCK is the containing block.  */
+
+gimple
+gimple_build_bind (tree vars, gimple_seq body, tree block)
+{
+  gimple p = gimple_alloc (GIMPLE_BIND, 0);
+  gimple_bind_set_vars (p, vars);
+  if (body)
+    gimple_bind_set_body (p, body);
+  if (block)
+    gimple_bind_set_block (p, block);
+  return p;
+}
+
+/* Helper function to set the simple fields of a asm stmt.
+
+   STRING is a pointer to a string that is the asm blocks assembly code.
+   NINPUT is the number of register inputs.
+   NOUTPUT is the number of register outputs.
+   NCLOBBERS is the number of clobbered registers.
+   */
+
+static inline gimple
+gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs, 
+                    unsigned nclobbers)
+{
+  gimple p;
+  int size = strlen (string);
+
+  p = gimple_build_with_ops (GIMPLE_ASM, 0, ninputs + noutputs + nclobbers);
+
+  p->gimple_asm.ni = ninputs;
+  p->gimple_asm.no = noutputs;
+  p->gimple_asm.nc = nclobbers;
+  p->gimple_asm.string = ggc_alloc_string (string, size);
+
+#ifdef GATHER_STATISTICS
+  gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
+#endif
+  
+  return p;
+}
+
+/* Build a GIMPLE_ASM statement.
+
+   STRING is the assembly code.
+   NINPUT is the number of register inputs.
+   NOUTPUT is the number of register outputs.
+   NCLOBBERS is the number of clobbered registers.
+   INPUTS is a vector of the input register parameters.
+   OUTPUTS is a vector of the output register parameters.
+   CLOBBERS is a vector of the clobbered register parameters.  */
+
+gimple
+gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs, 
+                      VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers)
+{
+  gimple p;
+  unsigned i;
+
+  p = gimple_build_asm_1 (string,
+                          VEC_length (tree, inputs),
+                          VEC_length (tree, outputs), 
+                          VEC_length (tree, clobbers));
+  
+  for (i = 0; i < VEC_length (tree, inputs); i++)
+    gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
+
+  for (i = 0; i < VEC_length (tree, outputs); i++)
+    gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
+
+  for (i = 0; i < VEC_length (tree, clobbers); i++)
+    gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
+  
+  return p;
+}
+
+/* Build a GIMPLE_ASM statement.
+
+   STRING is the assembly code.
+   NINPUT is the number of register inputs.
+   NOUTPUT is the number of register outputs.
+   NCLOBBERS is the number of clobbered registers.
+   ... are trees for each input, output and clobbered register.  */
+
+gimple
+gimple_build_asm (const char *string, unsigned ninputs, unsigned noutputs, 
+                 unsigned nclobbers, ...)
+{
+  gimple p;
+  unsigned i;
+  va_list ap;
+  
+  p = gimple_build_asm_1 (string, ninputs, noutputs, nclobbers);
+  
+  va_start (ap, nclobbers);
+
+  for (i = 0; i < ninputs; i++)
+    gimple_asm_set_input_op (p, i, va_arg (ap, tree));
+
+  for (i = 0; i < noutputs; i++)
+    gimple_asm_set_output_op (p, i, va_arg (ap, tree));
+
+  for (i = 0; i < nclobbers; i++)
+    gimple_asm_set_clobber_op (p, i, va_arg (ap, tree));
+
+  va_end (ap);
+  
+  return p;
+}
+
+/* Build a GIMPLE_CATCH statement.
+
+  TYPES are the catch types.
+  HANDLER is the exception handler.  */
+
+gimple
+gimple_build_catch (tree types, gimple_seq handler)
+{
+  gimple p = gimple_alloc (GIMPLE_CATCH, 0);
+  gimple_catch_set_types (p, types);
+  if (handler)
+    gimple_catch_set_handler (p, handler);
+
+  return p;
+}
+
+/* Build a GIMPLE_EH_FILTER statement.
+
+   TYPES are the filter's types.
+   FAILURE is the filter's failure action.  */
+
+gimple
+gimple_build_eh_filter (tree types, gimple_seq failure)
+{
+  gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
+  gimple_eh_filter_set_types (p, types);
+  if (failure)
+    gimple_eh_filter_set_failure (p, failure);
+
+  return p;
+}
+
+/* Build a GIMPLE_TRY statement.
+
+   EVAL is the expression to evaluate.
+   CLEANUP is the cleanup expression.
+   KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
+   whether this is a try/catch or a try/finally respectively.  */
+
+gimple
+gimple_build_try (gimple_seq eval, gimple_seq cleanup,
+                 enum gimple_try_flags kind)
+{
+  gimple p;
+
+  gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
+  p = gimple_alloc (GIMPLE_TRY, 0);
+  gimple_set_subcode (p, kind);
+  if (eval)
+    gimple_try_set_eval (p, eval);
+  if (cleanup)
+    gimple_try_set_cleanup (p, cleanup);
+
+  return p;
+}
+
+/* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
+
+   CLEANUP is the cleanup expression.  */
+
+gimple
+gimple_build_wce (gimple_seq cleanup)
+{
+  gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
+  if (cleanup)
+    gimple_wce_set_cleanup (p, cleanup);
+
+  return p;
+}
+
+
+/* Build a GIMPLE_RESX statement.
+
+   REGION is the region number from which this resx causes control flow to 
+   leave.  */
+
+gimple
+gimple_build_resx (int region)
+{
+  gimple p = gimple_alloc (GIMPLE_RESX, 0);
+  gimple_resx_set_region (p, region);
+  return p;
+}
+
+
+/* The helper for constructing a gimple switch statement.
+   INDEX is the switch's index.
+   NLABELS is the number of labels in the switch excluding the default.
+   DEFAULT_LABEL is the default label for the switch statement.  */
+
+static inline gimple 
+gimple_build_switch_1 (unsigned nlabels, tree index, tree default_label)
+{
+  /* nlabels + 1 default label + 1 index.  */
+  gimple p = gimple_build_with_ops (GIMPLE_SWITCH, 0, nlabels + 1 + 1);
+  gimple_switch_set_index (p, index);
+  gimple_switch_set_default_label (p, default_label);
+  return p;
+}
+
+
+/* Build a GIMPLE_SWITCH statement.
+
+   INDEX is the switch's index.
+   NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL. 
+   ... are the labels excluding the default.  */
+
+gimple 
+gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
+{
+  va_list al;
+  unsigned i;
+  gimple p;
+  
+  p = gimple_build_switch_1 (nlabels, index, default_label);
+
+  /* Store the rest of the labels.  */
+  va_start (al, default_label);
+  for (i = 1; i <= nlabels; i++)
+    gimple_switch_set_label (p, i, va_arg (al, tree));
+  va_end (al);
+
+  return p;
+}
+
+
+/* Build a GIMPLE_SWITCH statement.
+
+   INDEX is the switch's index.
+   DEFAULT_LABEL is the default label
+   ARGS is a vector of labels excluding the default.  */
+
+gimple
+gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
+{
+  unsigned i;
+  unsigned nlabels = VEC_length (tree, args);
+  gimple p = gimple_build_switch_1 (nlabels, index, default_label);
+
+  /*  Put labels in labels[1 - (nlabels + 1)].
+     Default label is in labels[0].  */
+  for (i = 1; i <= nlabels; i++)
+    gimple_switch_set_label (p, i, VEC_index (tree, args, i - 1));
+
+  return p;
+}
+
+
+/* Build a GIMPLE_OMP_CRITICAL statement.
+
+   BODY is the sequence of statements for which only one thread can execute.
+   NAME is optional identifier for this critical block.  */
+
+gimple 
+gimple_build_omp_critical (gimple_seq body, tree name)
+{
+  gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
+  gimple_omp_critical_set_name (p, name);
+  if (body)
+    gimple_omp_set_body (p, body);
+
+  return p;
+}
+
+/* Build a GIMPLE_OMP_FOR statement.
+
+   BODY is sequence of statements inside the for loop.
+   CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate, 
+   lastprivate, reductions, ordered, schedule, and nowait.
+   COLLAPSE is the collapse count.
+   PRE_BODY is the sequence of statements that are loop invariant.  */
+
+gimple
+gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
+                     gimple_seq pre_body)
+{
+  gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
+  if (body)
+    gimple_omp_set_body (p, body);
+  gimple_omp_for_set_clauses (p, clauses);
+  p->gimple_omp_for.collapse = collapse;
+  p->gimple_omp_for.iter = GGC_CNEWVEC (struct gimple_omp_for_iter, collapse);
+  if (pre_body)
+    gimple_omp_for_set_pre_body (p, pre_body);
+
+  return p;
+}
+
+
+/* Build a GIMPLE_OMP_PARALLEL statement.
+
+   BODY is sequence of statements which are executed in parallel.
+   CLAUSES, are the OMP parallel construct's clauses.
+   CHILD_FN is the function created for the parallel threads to execute.
+   DATA_ARG are the shared data argument(s).  */
+
+gimple 
+gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn, 
+                          tree data_arg)
+{
+  gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
+  if (body)
+    gimple_omp_set_body (p, body);
+  gimple_omp_parallel_set_clauses (p, clauses);
+  gimple_omp_parallel_set_child_fn (p, child_fn);
+  gimple_omp_parallel_set_data_arg (p, data_arg);
+
+  return p;
+}
+
+
+/* Build a GIMPLE_OMP_TASK statement.
+
+   BODY is sequence of statements which are executed by the explicit task.
+   CLAUSES, are the OMP parallel construct's clauses.
+   CHILD_FN is the function created for the parallel threads to execute.
+   DATA_ARG are the shared data argument(s).
+   COPY_FN is the optional function for firstprivate initialization.
+   ARG_SIZE and ARG_ALIGN are size and alignment of the data block.  */
+
+gimple 
+gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
+                      tree data_arg, tree copy_fn, tree arg_size,
+                      tree arg_align)
+{
+  gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
+  if (body)
+    gimple_omp_set_body (p, body);
+  gimple_omp_task_set_clauses (p, clauses);
+  gimple_omp_task_set_child_fn (p, child_fn);
+  gimple_omp_task_set_data_arg (p, data_arg);
+  gimple_omp_task_set_copy_fn (p, copy_fn);
+  gimple_omp_task_set_arg_size (p, arg_size);
+  gimple_omp_task_set_arg_align (p, arg_align);
+
+  return p;
+}
+
+
+/* Build a GIMPLE_OMP_SECTION statement for a sections statement.
+
+   BODY is the sequence of statements in the section.  */
+
+gimple
+gimple_build_omp_section (gimple_seq body)
+{
+  gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
+  if (body)
+    gimple_omp_set_body (p, body);
+
+  return p;
+}
+
+
+/* Build a GIMPLE_OMP_MASTER statement.
+
+   BODY is the sequence of statements to be executed by just the master.  */
+
+gimple 
+gimple_build_omp_master (gimple_seq body)
+{
+  gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
+  if (body)
+    gimple_omp_set_body (p, body);
+
+  return p;
+}
+
+
+/* Build a GIMPLE_OMP_CONTINUE statement.
+
+   CONTROL_DEF is the definition of the control variable.
+   CONTROL_USE is the use of the control variable.  */
+
+gimple 
+gimple_build_omp_continue (tree control_def, tree control_use)
+{
+  gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
+  gimple_omp_continue_set_control_def (p, control_def);
+  gimple_omp_continue_set_control_use (p, control_use);
+  return p;
+}
+
+/* Build a GIMPLE_OMP_ORDERED statement.
+
+   BODY is the sequence of statements inside a loop that will executed in
+   sequence.  */
+
+gimple 
+gimple_build_omp_ordered (gimple_seq body)
+{
+  gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
+  if (body)
+    gimple_omp_set_body (p, body);
+
+  return p;
+}
+
+
+/* Build a GIMPLE_OMP_RETURN statement.
+   WAIT_P is true if this is a non-waiting return.  */
+
+gimple 
+gimple_build_omp_return (bool wait_p)
+{
+  gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
+  if (wait_p)
+    gimple_omp_return_set_nowait (p);
+
+  return p;
+}
+
+
+/* Build a GIMPLE_OMP_SECTIONS statement.
+
+   BODY is a sequence of section statements.
+   CLAUSES are any of the OMP sections contsruct's clauses: private,
+   firstprivate, lastprivate, reduction, and nowait.  */
+
+gimple 
+gimple_build_omp_sections (gimple_seq body, tree clauses)
+{
+  gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
+  if (body)
+    gimple_omp_set_body (p, body);
+  gimple_omp_sections_set_clauses (p, clauses);
+
+  return p;
+}
+
+
+/* Build a GIMPLE_OMP_SECTIONS_SWITCH.  */
+
+gimple
+gimple_build_omp_sections_switch (void)
+{
+  return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
+}
+
+
+/* Build a GIMPLE_OMP_SINGLE statement.
+
+   BODY is the sequence of statements that will be executed once.
+   CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
+   copyprivate, nowait.  */
+
+gimple 
+gimple_build_omp_single (gimple_seq body, tree clauses)
+{
+  gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
+  if (body)
+    gimple_omp_set_body (p, body);
+  gimple_omp_single_set_clauses (p, clauses);
+
+  return p;
+}
+
+
+/* Build a GIMPLE_CHANGE_DYNAMIC_TYPE statement.  TYPE is the new type
+   for the location PTR.  */
+
+gimple
+gimple_build_cdt (tree type, tree ptr)
+{
+  gimple p = gimple_build_with_ops (GIMPLE_CHANGE_DYNAMIC_TYPE, 0, 2);
+  gimple_cdt_set_new_type (p, type);
+  gimple_cdt_set_location (p, ptr);
+
+  return p;
+}
+
+
+/* Build a GIMPLE_OMP_ATOMIC_LOAD statement.  */
+
+gimple
+gimple_build_omp_atomic_load (tree lhs, tree rhs)
+{
+  gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
+  gimple_omp_atomic_load_set_lhs (p, lhs);
+  gimple_omp_atomic_load_set_rhs (p, rhs);
+  return p;
+}
+
+/* Build a GIMPLE_OMP_ATOMIC_STORE statement.
+
+   VAL is the value we are storing.  */
+
+gimple
+gimple_build_omp_atomic_store (tree val)
+{
+  gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
+  gimple_omp_atomic_store_set_val (p, val);
+  return p;
+}
+
+/* Build a GIMPLE_PREDICT statement.  PREDICT is one of the predictors from
+   predict.def, OUTCOME is NOT_TAKEN or TAKEN.  */
+
+gimple
+gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
+{
+  gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
+  /* Ensure all the predictors fit into the lower bits of the subcode.  */
+  gcc_assert (END_PREDICTORS <= GF_PREDICT_TAKEN);
+  gimple_predict_set_predictor (p, predictor);
+  gimple_predict_set_outcome (p, outcome);
+  return p;
+}
+
+/* Return which gimple structure is used by T.  The enums here are defined
+   in gsstruct.def.  */
+
+enum gimple_statement_structure_enum
+gimple_statement_structure (gimple gs)
+{
+  return gss_for_code (gimple_code (gs));
+}
+
+#if defined ENABLE_GIMPLE_CHECKING && (GCC_VERSION >= 2007)
+/* Complain of a gimple type mismatch and die.  */
+
+void
+gimple_check_failed (const_gimple gs, const char *file, int line,
+                    const char *function, enum gimple_code code,
+                    enum tree_code subcode)
+{
+  internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
+                 gimple_code_name[code],
+                 tree_code_name[subcode],
+                 gimple_code_name[gimple_code (gs)],
+                 gs->gsbase.subcode > 0
+                   ? tree_code_name[gs->gsbase.subcode]
+                   : "",
+                 function, trim_filename (file), line);
+}
+
+
+/* Similar to gimple_check_failed, except that instead of specifying a
+   dozen codes, use the knowledge that they're all sequential.  */
+
+void
+gimple_range_check_failed (const_gimple gs, const char *file, int line,
+                          const char *function, enum gimple_code c1,
+                          enum gimple_code c2)
+{
+  char *buffer;
+  unsigned length = 0;
+  enum gimple_code c;
+
+  for (c = c1; c <= c2; ++c)
+    length += 4 + strlen (gimple_code_name[c]);
+
+  length += strlen ("expected ");
+  buffer = XALLOCAVAR (char, length);
+  length = 0;
+
+  for (c = c1; c <= c2; ++c)
+    {
+      const char *prefix = length ? " or " : "expected ";
+
+      strcpy (buffer + length, prefix);
+      length += strlen (prefix);
+      strcpy (buffer + length, gimple_code_name[c]);
+      length += strlen (gimple_code_name[c]);
+    }
+
+  internal_error ("gimple check: %s, have %s in %s, at %s:%d",
+                 buffer, gimple_code_name[gimple_code (gs)],
+                 function, trim_filename (file), line);
+}
+#endif /* ENABLE_GIMPLE_CHECKING */
+
+
+/* Allocate a new GIMPLE sequence in GC memory and return it.  If
+   there are free sequences in GIMPLE_SEQ_CACHE return one of those
+   instead.  */
+
+gimple_seq
+gimple_seq_alloc (void)
+{
+  gimple_seq seq = gimple_seq_cache;
+  if (seq)
+    {
+      gimple_seq_cache = gimple_seq_cache->next_free;
+      gcc_assert (gimple_seq_cache != seq);
+      memset (seq, 0, sizeof (*seq));
+    }
+  else
+    {
+      seq = (gimple_seq) ggc_alloc_cleared (sizeof (*seq));
+#ifdef GATHER_STATISTICS
+      gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
+      gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
+#endif
+    }
+
+  return seq;
+}
+
+/* Return SEQ to the free pool of GIMPLE sequences.  */
+
+void
+gimple_seq_free (gimple_seq seq)
+{
+  if (seq == NULL)
+    return;
+
+  gcc_assert (gimple_seq_first (seq) == NULL);
+  gcc_assert (gimple_seq_last (seq) == NULL);
+
+  /* If this triggers, it's a sign that the same list is being freed
+     twice.  */
+  gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
+  
+  /* Add SEQ to the pool of free sequences.  */
+  seq->next_free = gimple_seq_cache;
+  gimple_seq_cache = seq;
+}
+
+
+/* Link gimple statement GS to the end of the sequence *SEQ_P.  If
+   *SEQ_P is NULL, a new sequence is allocated.  */
+
+void
+gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
+{
+  gimple_stmt_iterator si;
+
+  if (gs == NULL)
+    return;
+
+  if (*seq_p == NULL)
+    *seq_p = gimple_seq_alloc ();
+
+  si = gsi_last (*seq_p);
+  gsi_insert_after (&si, gs, GSI_NEW_STMT);
+}
+
+
+/* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
+   NULL, a new sequence is allocated.  */
+
+void
+gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
+{
+  gimple_stmt_iterator si;
+
+  if (src == NULL)
+    return;
+
+  if (*dst_p == NULL)
+    *dst_p = gimple_seq_alloc ();
+
+  si = gsi_last (*dst_p);
+  gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
+}
+
+
+/* Helper function of empty_body_p.  Return true if STMT is an empty
+   statement.  */
+
+static bool
+empty_stmt_p (gimple stmt)
+{
+  if (gimple_code (stmt) == GIMPLE_NOP)
+    return true;
+  if (gimple_code (stmt) == GIMPLE_BIND)
+    return empty_body_p (gimple_bind_body (stmt));
+  return false;
+}
+
+
+/* Return true if BODY contains nothing but empty statements.  */
+
+bool
+empty_body_p (gimple_seq body)
+{
+  gimple_stmt_iterator i;
+
+
+  if (gimple_seq_empty_p (body))
+    return true;
+  for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
+    if (!empty_stmt_p (gsi_stmt (i)))
+      return false;
+
+  return true;
+}
+
+
+/* Perform a deep copy of sequence SRC and return the result.  */
+
+gimple_seq
+gimple_seq_copy (gimple_seq src)
+{
+  gimple_stmt_iterator gsi;
+  gimple_seq new = gimple_seq_alloc ();
+  gimple stmt;
+
+  for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
+    {
+      stmt = gimple_copy (gsi_stmt (gsi));
+      gimple_seq_add_stmt (&new, stmt);
+    }
+
+  return new;
+}
+
+
+/* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
+   on each one.  WI is as in walk_gimple_stmt.
+   
+   If walk_gimple_stmt returns non-NULL, the walk is stopped, the
+   value is stored in WI->CALLBACK_RESULT and the statement that
+   produced the value is returned.
+
+   Otherwise, all the statements are walked and NULL returned.  */
+
+gimple
+walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
+                walk_tree_fn callback_op, struct walk_stmt_info *wi)
+{
+  gimple_stmt_iterator gsi;
+
+  for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
+    {
+      tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
+      if (ret)
+       {
+         /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
+            to hold it.  */
+         gcc_assert (wi);
+         wi->callback_result = ret;
+         return gsi_stmt (gsi);
+       }
+    }
+
+  if (wi)
+    wi->callback_result = NULL_TREE;
+
+  return NULL;
+}
+
+
+/* Helper function for walk_gimple_stmt.  Walk operands of a GIMPLE_ASM.  */
+
+static tree
+walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
+                struct walk_stmt_info *wi)
+{
+  tree ret;
+  unsigned noutputs;
+  const char **oconstraints;
+  unsigned i;
+  const char *constraint;
+  bool allows_mem, allows_reg, is_inout;
+
+  noutputs = gimple_asm_noutputs (stmt);
+  oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
+
+  if (wi)
+    wi->is_lhs = true;
+
+  for (i = 0; i < noutputs; i++)
+    {
+      tree op = gimple_asm_output_op (stmt, i);
+      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
+      oconstraints[i] = constraint;
+      parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
+                              &is_inout);
+      if (wi)
+       wi->val_only = (allows_reg || !allows_mem);
+      ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
+      if (ret)
+       return ret;
+    }
+
+  for (i = 0; i < gimple_asm_ninputs (stmt); i++)
+    {
+      tree op = gimple_asm_input_op (stmt, i);
+      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
+      parse_input_constraint (&constraint, 0, 0, noutputs, 0,
+                             oconstraints, &allows_mem, &allows_reg);
+      if (wi)
+       wi->val_only = (allows_reg || !allows_mem);
+
+      /* Although input "m" is not really a LHS, we need a lvalue.  */
+      if (wi)
+       wi->is_lhs = !wi->val_only;
+      ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
+      if (ret)
+       return ret;
+    }
+
+  if (wi)
+    {
+      wi->is_lhs = false;
+      wi->val_only = true;
+    }
+
+  return NULL_TREE;
+}
+
+
+/* Helper function of WALK_GIMPLE_STMT.  Walk every tree operand in
+   STMT.  CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
+
+   CALLBACK_OP is called on each operand of STMT via walk_tree.
+   Additional parameters to walk_tree must be stored in WI.  For each operand
+   OP, walk_tree is called as:
+
+       walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
+
+   If CALLBACK_OP returns non-NULL for an operand, the remaining
+   operands are not scanned.
+
+   The return value is that returned by the last call to walk_tree, or
+   NULL_TREE if no CALLBACK_OP is specified.  */
+
+inline tree
+walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
+               struct walk_stmt_info *wi)
+{
+  struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
+  unsigned i;
+  tree ret = NULL_TREE;
+
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_ASSIGN:
+      /* Walk the RHS operands.  A formal temporary LHS may use a
+        COMPONENT_REF RHS.  */
+      if (wi)
+       wi->val_only = !is_gimple_formal_tmp_var (gimple_assign_lhs (stmt));
+
+      for (i = 1; i < gimple_num_ops (stmt); i++)
+       {
+         ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
+                          pset);
+         if (ret)
+           return ret;
+       }
+
+      /* Walk the LHS.  If the RHS is appropriate for a memory, we
+        may use a COMPONENT_REF on the LHS.  */
+      if (wi)
+       {
+          /* If the RHS has more than 1 operand, it is not appropriate
+             for the memory.  */
+         wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
+                         || !gimple_assign_single_p (stmt);
+         wi->is_lhs = true;
+       }
+
+      ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
+      if (ret)
+       return ret;
+
+      if (wi)
+       {
+         wi->val_only = true;
+         wi->is_lhs = false;
+       }
+      break;
+
+    case GIMPLE_CALL:
+      if (wi)
+       wi->is_lhs = false;
+
+      ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
+      if (ret)
+        return ret;
+
+      ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
+      if (ret)
+        return ret;
+
+      for (i = 0; i < gimple_call_num_args (stmt); i++)
+       {
+         ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
+                          pset);
+         if (ret)
+           return ret;
+       }
+
+      if (wi)
+       wi->is_lhs = true;
+
+      ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
+      if (ret)
+       return ret;
+
+      if (wi)
+       wi->is_lhs = false;
+      break;
+
+    case GIMPLE_CATCH:
+      ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
+                      pset);
+      if (ret)
+       return ret;
+      break;
+
+    case GIMPLE_EH_FILTER:
+      ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
+                      pset);
+      if (ret)
+       return ret;
+      break;
+
+    case GIMPLE_CHANGE_DYNAMIC_TYPE:
+      ret = walk_tree (gimple_cdt_location_ptr (stmt), callback_op, wi, pset);
+      if (ret)
+       return ret;
+
+      ret = walk_tree (gimple_cdt_new_type_ptr (stmt), callback_op, wi, pset);
+      if (ret)
+       return ret;
+      break;
+
+    case GIMPLE_ASM:
+      ret = walk_gimple_asm (stmt, callback_op, wi);
+      if (ret)
+       return ret;
+      break;
+
+    case GIMPLE_OMP_CONTINUE:
+      ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
+                      callback_op, wi, pset);
+      if (ret)
+       return ret;
+
+      ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
+                      callback_op, wi, pset);
+      if (ret)
+       return ret;
+      break;
+
+    case GIMPLE_OMP_CRITICAL:
+      ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
+                      pset);
+      if (ret)
+       return ret;
+      break;
+
+    case GIMPLE_OMP_FOR:
+      ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
+                      pset);
+      if (ret)
+       return ret;
+      for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
+       {
+         ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
+                          wi, pset);
+         if (ret)
+           return ret;
+         ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
+                          wi, pset);
+         if (ret)
+           return ret;
+         ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
+                          wi, pset);
+         if (ret)
+           return ret;
+         ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
+                          wi, pset);
+       }
+      if (ret)
+       return ret;
+      break;
+
+    case GIMPLE_OMP_PARALLEL:
+      ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
+                      wi, pset);
+      if (ret)
+       return ret;
+      ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
+                      wi, pset);
+      if (ret)
+       return ret;
+      ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
+                      wi, pset);
+      if (ret)
+       return ret;
+      break;
+
+    case GIMPLE_OMP_TASK:
+      ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
+                      wi, pset);
+      if (ret)
+       return ret;
+      ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
+                      wi, pset);
+      if (ret)
+       return ret;
+      ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
+                      wi, pset);
+      if (ret)
+       return ret;
+      ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
+                      wi, pset);
+      if (ret)
+       return ret;
+      ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
+                      wi, pset);
+      if (ret)
+       return ret;
+      ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
+                      wi, pset);
+      if (ret)
+       return ret;
+      break;
+
+    case GIMPLE_OMP_SECTIONS:
+      ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
+                      wi, pset);
+      if (ret)
+       return ret;
+
+      ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
+                      wi, pset);
+      if (ret)
+       return ret;
+
+      break;
+
+    case GIMPLE_OMP_SINGLE:
+      ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
+                      pset);
+      if (ret)
+       return ret;
+      break;
+
+    case GIMPLE_OMP_ATOMIC_LOAD:
+      ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
+                      pset);
+      if (ret)
+       return ret;
+
+      ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
+                      pset);
+      if (ret)
+       return ret;
+      break;
+
+    case GIMPLE_OMP_ATOMIC_STORE:
+      ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
+                      wi, pset);
+      if (ret)
+       return ret;
+      break;
+
+      /* Tuples that do not have operands.  */
+    case GIMPLE_NOP:
+    case GIMPLE_RESX:
+    case GIMPLE_OMP_RETURN:
+    case GIMPLE_PREDICT:
+      break;
+
+    default:
+      {
+       enum gimple_statement_structure_enum gss;
+       gss = gimple_statement_structure (stmt);
+       if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
+         for (i = 0; i < gimple_num_ops (stmt); i++)
+           {
+             ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
+             if (ret)
+               return ret;
+           }
+      }
+      break;
+    }
+
+  return NULL_TREE;
+}
+
+
+/* Walk the current statement in GSI (optionally using traversal state
+   stored in WI).  If WI is NULL, no state is kept during traversal.
+   The callback CALLBACK_STMT is called.  If CALLBACK_STMT indicates
+   that it has handled all the operands of the statement, its return
+   value is returned.  Otherwise, the return value from CALLBACK_STMT
+   is discarded and its operands are scanned.
+
+   If CALLBACK_STMT is NULL or it didn't handle the operands,
+   CALLBACK_OP is called on each operand of the statement via
+   walk_gimple_op.  If walk_gimple_op returns non-NULL for any
+   operand, the remaining operands are not scanned.  In this case, the
+   return value from CALLBACK_OP is returned.
+
+   In any other case, NULL_TREE is returned.  */
+
+tree
+walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
+                 walk_tree_fn callback_op, struct walk_stmt_info *wi)
+{
+  gimple ret;
+  tree tree_ret;
+  gimple stmt = gsi_stmt (*gsi);
+
+  if (wi)
+    wi->gsi = *gsi;
+
+  if (wi && wi->want_locations && gimple_has_location (stmt))
+    input_location = gimple_location (stmt);
+
+  ret = NULL;
+
+  /* Invoke the statement callback.  Return if the callback handled
+     all of STMT operands by itself.  */
+  if (callback_stmt)
+    {
+      bool handled_ops = false;
+      tree_ret = callback_stmt (gsi, &handled_ops, wi);
+      if (handled_ops)
+       return tree_ret;
+
+      /* If CALLBACK_STMT did not handle operands, it should not have
+        a value to return.  */
+      gcc_assert (tree_ret == NULL);
+
+      /* Re-read stmt in case the callback changed it.  */
+      stmt = gsi_stmt (*gsi);
+    }
+
+  /* If CALLBACK_OP is defined, invoke it on every operand of STMT.  */
+  if (callback_op)
+    {
+      tree_ret = walk_gimple_op (stmt, callback_op, wi);
+      if (tree_ret)
+       return tree_ret;
+    }
+
+  /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them.  */
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_BIND:
+      ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
+                            callback_op, wi);
+      if (ret)
+       return wi->callback_result;
+      break;
+
+    case GIMPLE_CATCH:
+      ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
+                            callback_op, wi);
+      if (ret)
+       return wi->callback_result;
+      break;
+
+    case GIMPLE_EH_FILTER:
+      ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
+                            callback_op, wi);
+      if (ret)
+       return wi->callback_result;
+      break;
+
+    case GIMPLE_TRY:
+      ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
+                            wi);
+      if (ret)
+       return wi->callback_result;
+
+      ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
+                            callback_op, wi);
+      if (ret)
+       return wi->callback_result;
+      break;
+
+    case GIMPLE_OMP_FOR:
+      ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
+                            callback_op, wi);
+      if (ret)
+       return wi->callback_result;
+
+      /* FALL THROUGH.  */
+    case GIMPLE_OMP_CRITICAL:
+    case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_ORDERED:
+    case GIMPLE_OMP_SECTION:
+    case GIMPLE_OMP_PARALLEL:
+    case GIMPLE_OMP_TASK:
+    case GIMPLE_OMP_SECTIONS:
+    case GIMPLE_OMP_SINGLE:
+      ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
+                            wi);
+      if (ret)
+       return wi->callback_result;
+      break;
+
+    case GIMPLE_WITH_CLEANUP_EXPR:
+      ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
+                            callback_op, wi);
+      if (ret)
+       return wi->callback_result;
+      break;
+
+    default:
+      gcc_assert (!gimple_has_substatements (stmt));
+      break;
+    }
+
+  return NULL;
+}
+
+
+/* Set sequence SEQ to be the GIMPLE body for function FN.  */
+
+void
+gimple_set_body (tree fndecl, gimple_seq seq)
+{
+  struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
+  if (fn == NULL)
+    {
+      /* If FNDECL still does not have a function structure associated
+        with it, then it does not make sense for it to receive a
+        GIMPLE body.  */
+      gcc_assert (seq == NULL);
+    }
+  else
+    fn->gimple_body = seq;
+}
+
+
+/* Return the body of GIMPLE statements for function FN.  */
+
+gimple_seq
+gimple_body (tree fndecl)
+{
+  struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
+  return fn ? fn->gimple_body : NULL;
+}
+
+
+/* Detect flags from a GIMPLE_CALL.  This is just like
+   call_expr_flags, but for gimple tuples.  */
+
+int
+gimple_call_flags (const_gimple stmt)
+{
+  int flags;
+  tree decl = gimple_call_fndecl (stmt);
+  tree t;
+
+  if (decl)
+    flags = flags_from_decl_or_type (decl);
+  else
+    {
+      t = TREE_TYPE (gimple_call_fn (stmt));
+      if (t && TREE_CODE (t) == POINTER_TYPE)
+       flags = flags_from_decl_or_type (TREE_TYPE (t));
+      else
+       flags = 0;
+    }
+
+  return flags;
+}
+
+
+/* Return true if GS is a copy assignment.  */
+
+bool
+gimple_assign_copy_p (gimple gs)
+{
+  return gimple_code (gs) == GIMPLE_ASSIGN
+         && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
+           == GIMPLE_SINGLE_RHS
+        && is_gimple_val (gimple_op (gs, 1));
+}
+
+
+/* Return true if GS is a SSA_NAME copy assignment.  */
+
+bool
+gimple_assign_ssa_name_copy_p (gimple gs)
+{
+  return (gimple_code (gs) == GIMPLE_ASSIGN
+         && (get_gimple_rhs_class (gimple_assign_rhs_code (gs))
+             == GIMPLE_SINGLE_RHS)
+         && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
+         && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
+}
+
+
+/* Return true if GS is an assignment with a singleton RHS, i.e.,
+   there is no operator associated with the assignment itself.
+   Unlike gimple_assign_copy_p, this predicate returns true for
+   any RHS operand, including those that perform an operation
+   and do not have the semantics of a copy, such as COND_EXPR.  */
+
+bool
+gimple_assign_single_p (gimple gs)
+{
+  return (gimple_code (gs) == GIMPLE_ASSIGN
+          && get_gimple_rhs_class (gimple_assign_rhs_code (gs))
+            == GIMPLE_SINGLE_RHS);
+}
+
+/* Return true if GS is an assignment with a unary RHS, but the
+   operator has no effect on the assigned value.  The logic is adapted
+   from STRIP_NOPS.  This predicate is intended to be used in tuplifying
+   instances in which STRIP_NOPS was previously applied to the RHS of
+   an assignment.
+
+   NOTE: In the use cases that led to the creation of this function
+   and of gimple_assign_single_p, it is typical to test for either
+   condition and to proceed in the same manner.  In each case, the
+   assigned value is represented by the single RHS operand of the
+   assignment.  I suspect there may be cases where gimple_assign_copy_p,
+   gimple_assign_single_p, or equivalent logic is used where a similar
+   treatment of unary NOPs is appropriate.  */
+   
+bool
+gimple_assign_unary_nop_p (gimple gs)
+{
+  return (gimple_code (gs) == GIMPLE_ASSIGN
+          && (gimple_assign_rhs_code (gs) == NOP_EXPR
+              || gimple_assign_rhs_code (gs) == CONVERT_EXPR
+              || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
+          && gimple_assign_rhs1 (gs) != error_mark_node
+          && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
+              == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
+}
+
+/* Set BB to be the basic block holding G.  */
+
+void
+gimple_set_bb (gimple stmt, basic_block bb)
+{
+  stmt->gsbase.bb = bb;
+
+  /* If the statement is a label, add the label to block-to-labels map
+     so that we can speed up edge creation for GIMPLE_GOTOs.  */
+  if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
+    {
+      tree t;
+      int uid;
+
+      t = gimple_label_label (stmt);
+      uid = LABEL_DECL_UID (t);
+      if (uid == -1)
+       {
+         unsigned old_len = VEC_length (basic_block, label_to_block_map);
+         LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
+         if (old_len <= (unsigned) uid)
+           {
+             unsigned new_len = 3 * uid / 2;
+
+             VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
+                                    new_len);
+           }
+       }
+
+      VEC_replace (basic_block, label_to_block_map, uid, bb);
+    }
+}
+
+
+/* Fold the expression computed by STMT.  If the expression can be
+   folded, return the folded result, otherwise return NULL.  STMT is
+   not modified.  */
+
+tree
+gimple_fold (const_gimple stmt)
+{
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_COND:
+      return fold_binary (gimple_cond_code (stmt),
+                         boolean_type_node,
+                         gimple_cond_lhs (stmt),
+                         gimple_cond_rhs (stmt));
+
+    case GIMPLE_ASSIGN:
+      switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
+       {
+       case GIMPLE_UNARY_RHS:
+         return fold_unary (gimple_assign_rhs_code (stmt),
+                            TREE_TYPE (gimple_assign_lhs (stmt)),
+                            gimple_assign_rhs1 (stmt));
+       case GIMPLE_BINARY_RHS:
+         return fold_binary (gimple_assign_rhs_code (stmt),
+                             TREE_TYPE (gimple_assign_lhs (stmt)),
+                             gimple_assign_rhs1 (stmt),
+                             gimple_assign_rhs2 (stmt));
+       case GIMPLE_SINGLE_RHS:
+         return fold (gimple_assign_rhs1 (stmt));
+       default:;
+       }
+      break;
+
+    case GIMPLE_SWITCH:
+      return gimple_switch_index (stmt);
+
+    case GIMPLE_CALL:
+      return NULL_TREE;
+
+    default:
+      break;
+    }
+
+  gcc_unreachable ();
+}
+
+
+/* Modify the RHS of the assignment pointed-to by GSI using the
+   operands in the expression tree EXPR.
+
+   NOTE: The statement pointed-to by GSI may be reallocated if it
+   did not have enough operand slots.
+
+   This function is useful to convert an existing tree expression into
+   the flat representation used for the RHS of a GIMPLE assignment.
+   It will reallocate memory as needed to expand or shrink the number
+   of operand slots needed to represent EXPR.
+
+   NOTE: If you find yourself building a tree and then calling this
+   function, you are most certainly doing it the slow way.  It is much
+   better to build a new assignment or to use the function
+   gimple_assign_set_rhs_with_ops, which does not require an
+   expression tree to be built.  */
+
+void
+gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
+{
+  enum tree_code subcode;
+  tree op1, op2;
+
+  extract_ops_from_tree (expr, &subcode, &op1, &op2);
+  gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2);
+}
+
+
+/* Set the RHS of assignment statement pointed-to by GSI to CODE with
+   operands OP1 and OP2.
+
+   NOTE: The statement pointed-to by GSI may be reallocated if it
+   did not have enough operand slots.  */
+
+void
+gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
+                               tree op1, tree op2)
+{
+  unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
+  gimple stmt = gsi_stmt (*gsi);
+
+  /* If the new CODE needs more operands, allocate a new statement.  */
+  if (gimple_num_ops (stmt) < new_rhs_ops + 1)
+    {
+      tree lhs = gimple_assign_lhs (stmt);
+      gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
+      memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
+      gsi_replace (gsi, new_stmt, true);
+      stmt = new_stmt;
+
+      /* The LHS needs to be reset as this also changes the SSA name
+        on the LHS.  */
+      gimple_assign_set_lhs (stmt, lhs);
+    }
+
+  gimple_set_num_ops (stmt, new_rhs_ops + 1);
+  gimple_set_subcode (stmt, code);
+  gimple_assign_set_rhs1 (stmt, op1);
+  if (new_rhs_ops > 1)
+    gimple_assign_set_rhs2 (stmt, op2);
+}
+
+
+/* Return the LHS of a statement that performs an assignment,
+   either a GIMPLE_ASSIGN or a GIMPLE_CALL.  Returns NULL_TREE
+   for a call to a function that returns no value, or for a
+   statement other than an assignment or a call.  */
+
+tree
+gimple_get_lhs (const_gimple stmt)
+{
+  enum tree_code code = gimple_code (stmt);
+
+  if (code == GIMPLE_ASSIGN)
+    return gimple_assign_lhs (stmt);
+  else if (code == GIMPLE_CALL)
+    return gimple_call_lhs (stmt);
+  else
+    return NULL_TREE;
+}
+
+
+/* Set the LHS of a statement that performs an assignment,
+   either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
+
+void
+gimple_set_lhs (gimple stmt, tree lhs)
+{
+  enum tree_code code = gimple_code (stmt);
+
+  if (code == GIMPLE_ASSIGN)
+    gimple_assign_set_lhs (stmt, lhs);
+  else if (code == GIMPLE_CALL)
+    gimple_call_set_lhs (stmt, lhs);
+  else
+    gcc_unreachable();
+}
+
+
+/* Return a deep copy of statement STMT.  All the operands from STMT
+   are reallocated and copied using unshare_expr.  The DEF, USE, VDEF
+   and VUSE operand arrays are set to empty in the new copy.  */
+
+gimple
+gimple_copy (gimple stmt)
+{
+  enum gimple_code code = gimple_code (stmt);
+  unsigned num_ops = gimple_num_ops (stmt);
+  gimple copy = gimple_alloc (code, num_ops);
+  unsigned i;
+
+  /* Shallow copy all the fields from STMT.  */
+  memcpy (copy, stmt, gimple_size (code));
+
+  /* If STMT has sub-statements, deep-copy them as well.  */
+  if (gimple_has_substatements (stmt))
+    {
+      gimple_seq new_seq;
+      tree t;
+
+      switch (gimple_code (stmt))
+       {
+       case GIMPLE_BIND:
+         new_seq = gimple_seq_copy (gimple_bind_body (stmt));
+         gimple_bind_set_body (copy, new_seq);
+         gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
+         gimple_bind_set_block (copy, gimple_bind_block (stmt));
+         break;
+
+       case GIMPLE_CATCH:
+         new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
+         gimple_catch_set_handler (copy, new_seq);
+         t = unshare_expr (gimple_catch_types (stmt));
+         gimple_catch_set_types (copy, t);
+         break;
+
+       case GIMPLE_EH_FILTER:
+         new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
+         gimple_eh_filter_set_failure (copy, new_seq);
+         t = unshare_expr (gimple_eh_filter_types (stmt));
+         gimple_eh_filter_set_types (copy, t);
+         break;
+
+       case GIMPLE_TRY:
+         new_seq = gimple_seq_copy (gimple_try_eval (stmt));
+         gimple_try_set_eval (copy, new_seq);
+         new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
+         gimple_try_set_cleanup (copy, new_seq);
+         break;
+
+       case GIMPLE_OMP_FOR:
+         new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
+         gimple_omp_for_set_pre_body (copy, new_seq);
+         t = unshare_expr (gimple_omp_for_clauses (stmt));
+         gimple_omp_for_set_clauses (copy, t);
+         copy->gimple_omp_for.iter
+           = GGC_NEWVEC (struct gimple_omp_for_iter,
+                         gimple_omp_for_collapse (stmt));
+         for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
+           {
+             gimple_omp_for_set_cond (copy, i,
+                                      gimple_omp_for_cond (stmt, i));
+             gimple_omp_for_set_index (copy, i,
+                                       gimple_omp_for_index (stmt, i));
+             t = unshare_expr (gimple_omp_for_initial (stmt, i));
+             gimple_omp_for_set_initial (copy, i, t);
+             t = unshare_expr (gimple_omp_for_final (stmt, i));
+             gimple_omp_for_set_final (copy, i, t);
+             t = unshare_expr (gimple_omp_for_incr (stmt, i));
+             gimple_omp_for_set_incr (copy, i, t);
+           }
+         goto copy_omp_body;
+
+       case GIMPLE_OMP_PARALLEL:
+         t = unshare_expr (gimple_omp_parallel_clauses (stmt));
+         gimple_omp_parallel_set_clauses (copy, t);
+         t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
+         gimple_omp_parallel_set_child_fn (copy, t);
+         t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
+         gimple_omp_parallel_set_data_arg (copy, t);
+         goto copy_omp_body;
+
+       case GIMPLE_OMP_TASK:
+         t = unshare_expr (gimple_omp_task_clauses (stmt));
+         gimple_omp_task_set_clauses (copy, t);
+         t = unshare_expr (gimple_omp_task_child_fn (stmt));
+         gimple_omp_task_set_child_fn (copy, t);
+         t = unshare_expr (gimple_omp_task_data_arg (stmt));
+         gimple_omp_task_set_data_arg (copy, t);
+         t = unshare_expr (gimple_omp_task_copy_fn (stmt));
+         gimple_omp_task_set_copy_fn (copy, t);
+         t = unshare_expr (gimple_omp_task_arg_size (stmt));
+         gimple_omp_task_set_arg_size (copy, t);
+         t = unshare_expr (gimple_omp_task_arg_align (stmt));
+         gimple_omp_task_set_arg_align (copy, t);
+         goto copy_omp_body;
+
+       case GIMPLE_OMP_CRITICAL:
+         t = unshare_expr (gimple_omp_critical_name (stmt));
+         gimple_omp_critical_set_name (copy, t);
+         goto copy_omp_body;
+
+       case GIMPLE_OMP_SECTIONS:
+         t = unshare_expr (gimple_omp_sections_clauses (stmt));
+         gimple_omp_sections_set_clauses (copy, t);
+         t = unshare_expr (gimple_omp_sections_control (stmt));
+         gimple_omp_sections_set_control (copy, t);
+         /* FALLTHRU  */
+
+       case GIMPLE_OMP_SINGLE:
+       case GIMPLE_OMP_SECTION:
+       case GIMPLE_OMP_MASTER:
+       case GIMPLE_OMP_ORDERED:
+       copy_omp_body:
+         new_seq = gimple_seq_copy (gimple_omp_body (stmt));
+         gimple_omp_set_body (copy, new_seq);
+         break;
+
+       case GIMPLE_WITH_CLEANUP_EXPR:
+         new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
+         gimple_wce_set_cleanup (copy, new_seq);
+         break;
+
+       default:
+         gcc_unreachable ();
+       }
+    }
+
+  /* Make copy of operands.  */
+  if (num_ops > 0)
+    {
+      for (i = 0; i < num_ops; i++)
+       gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
+
+      /* Clear out SSA operand vectors on COPY.  Note that we cannot
+        call the API functions for setting addresses_taken, stores
+        and loads.  These functions free the previous values, and we
+        cannot do that on COPY as it will affect the original
+        statement.  */
+      if (gimple_has_ops (stmt))
+       {
+         gimple_set_def_ops (copy, NULL);
+         gimple_set_use_ops (copy, NULL);
+         copy->gsops.opbase.addresses_taken = NULL;
+       }
+
+      if (gimple_has_mem_ops (stmt))
+       {
+         gimple_set_vdef_ops (copy, NULL);
+         gimple_set_vuse_ops (copy, NULL);
+         copy->gsmem.membase.stores = NULL;
+         copy->gsmem.membase.loads = NULL;
+       }
+
+      update_stmt (copy);
+    }
+
+  return copy;
+}
+
+
+/* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
+   a MODIFIED field.  */
+
+void
+gimple_set_modified (gimple s, bool modifiedp)
+{
+  if (gimple_has_ops (s))
+    {
+      s->gsbase.modified = (unsigned) modifiedp;
+
+      if (modifiedp
+         && cfun->gimple_df
+         && is_gimple_call (s)
+         && gimple_call_noreturn_p (s))
+       VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), s);
+    }
+}
+
+
+/* Return true if statement S has side-effects.  We consider a
+   statement to have side effects if:
+
+   - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
+   - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS.  */
+
+bool
+gimple_has_side_effects (const_gimple s)
+{
+  unsigned i;
+
+  /* We don't have to scan the arguments to check for
+     volatile arguments, though, at present, we still
+     do a scan to check for TREE_SIDE_EFFECTS.  */
+  if (gimple_has_volatile_ops (s))
+    return true;
+
+  if (is_gimple_call (s))
+    {
+      unsigned nargs = gimple_call_num_args (s);
+
+      if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
+        return true;
+      else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
+       /* An infinite loop is considered a side effect.  */
+       return true;
+
+      if (gimple_call_lhs (s)
+          && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
+       {
+         gcc_assert (gimple_has_volatile_ops (s));
+         return true;
+       }
+
+      if (TREE_SIDE_EFFECTS (gimple_call_fn (s)))
+        return true;
+
+      for (i = 0; i < nargs; i++)
+        if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
+         {
+           gcc_assert (gimple_has_volatile_ops (s));
+           return true;
+         }
+
+      return false;
+    }
+  else
+    {
+      for (i = 0; i < gimple_num_ops (s); i++)
+       if (TREE_SIDE_EFFECTS (gimple_op (s, i)))
+         {
+           gcc_assert (gimple_has_volatile_ops (s));
+           return true;
+         }
+    }
+
+  return false;
+}
+
+/* Return true if the RHS of statement S has side effects.
+   We may use it to determine if it is admissable to replace
+   an assignment or call with a copy of a previously-computed
+   value.  In such cases, side-effects due the the LHS are
+   preserved.  */
+
+bool
+gimple_rhs_has_side_effects (const_gimple s)
+{
+  unsigned i;
+
+  if (is_gimple_call (s))
+    {
+      unsigned nargs = gimple_call_num_args (s);
+
+      if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
+        return true;
+
+      /* We cannot use gimple_has_volatile_ops here,
+         because we must ignore a volatile LHS.  */
+      if (TREE_SIDE_EFFECTS (gimple_call_fn (s))
+          || TREE_THIS_VOLATILE (gimple_call_fn (s)))
+       {
+         gcc_assert (gimple_has_volatile_ops (s));
+         return true;
+       }
+
+      for (i = 0; i < nargs; i++)
+        if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
+            || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
+          return true;
+
+      return false;
+    }
+  else if (is_gimple_assign (s))
+    {
+      /* Skip the first operand, the LHS. */
+      for (i = 1; i < gimple_num_ops (s); i++)
+       if (TREE_SIDE_EFFECTS (gimple_op (s, i))
+            || TREE_THIS_VOLATILE (gimple_op (s, i)))
+         {
+           gcc_assert (gimple_has_volatile_ops (s));
+           return true;
+         }
+    }
+  else
+    {
+      /* For statements without an LHS, examine all arguments.  */
+      for (i = 0; i < gimple_num_ops (s); i++)
+       if (TREE_SIDE_EFFECTS (gimple_op (s, i))
+            || TREE_THIS_VOLATILE (gimple_op (s, i)))
+         {
+           gcc_assert (gimple_has_volatile_ops (s));
+           return true;
+         }
+    }
+
+  return false;
+}
+
+
+/* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
+   Return true if S can trap.  If INCLUDE_LHS is true and S is a
+   GIMPLE_ASSIGN, the LHS of the assignment is also checked.
+   Otherwise, only the RHS of the assignment is checked.  */
+
+static bool
+gimple_could_trap_p_1 (gimple s, bool include_lhs)
+{
+  unsigned i, start;
+  tree t, div = NULL_TREE;
+  enum tree_code op;
+
+  start = (is_gimple_assign (s) && !include_lhs) ? 1 : 0;
+
+  for (i = start; i < gimple_num_ops (s); i++)
+    if (tree_could_trap_p (gimple_op (s, i)))
+      return true;
+
+  switch (gimple_code (s))
+    {
+    case GIMPLE_ASM:
+      return gimple_asm_volatile_p (s);
+
+    case GIMPLE_CALL:
+      t = gimple_call_fndecl (s);
+      /* Assume that calls to weak functions may trap.  */
+      if (!t || !DECL_P (t) || DECL_WEAK (t))
+       return true;
+      return false;
+
+    case GIMPLE_ASSIGN:
+      t = gimple_expr_type (s);
+      op = gimple_assign_rhs_code (s);
+      if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
+       div = gimple_assign_rhs2 (s);
+      return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
+                                     (INTEGRAL_TYPE_P (t)
+                                      && TYPE_OVERFLOW_TRAPS (t)),
+                                     div));
+
+    default:
+      break;
+    }
+
+  return false;
+
+}
+
+
+/* Return true if statement S can trap.  */
+
+bool
+gimple_could_trap_p (gimple s)
+{
+  return gimple_could_trap_p_1 (s, true);
+}
+
+
+/* Return true if RHS of a GIMPLE_ASSIGN S can trap.  */
+
+bool
+gimple_assign_rhs_could_trap_p (gimple s)
+{
+  gcc_assert (is_gimple_assign (s));
+  return gimple_could_trap_p_1 (s, false);
+}
+
+
+/* Print debugging information for gimple stmts generated.  */
+
+void
+dump_gimple_statistics (void)
+{
+#ifdef GATHER_STATISTICS
+  int i, total_tuples = 0, total_bytes = 0;
+
+  fprintf (stderr, "\nGIMPLE statements\n");
+  fprintf (stderr, "Kind                   Stmts      Bytes\n");
+  fprintf (stderr, "---------------------------------------\n");
+  for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
+    {
+      fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
+         gimple_alloc_counts[i], gimple_alloc_sizes[i]);
+      total_tuples += gimple_alloc_counts[i];
+      total_bytes += gimple_alloc_sizes[i];
+    }
+  fprintf (stderr, "---------------------------------------\n");
+  fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
+  fprintf (stderr, "---------------------------------------\n");
+#else
+  fprintf (stderr, "No gimple statistics\n");
+#endif
+}
+
+
+/* Deep copy SYMS into the set of symbols stored by STMT.  If SYMS is
+   NULL or empty, the storage used is freed up.  */
+
+void
+gimple_set_stored_syms (gimple stmt, bitmap syms, bitmap_obstack *obs)
+{
+  gcc_assert (gimple_has_mem_ops (stmt));
+
+  if (syms == NULL || bitmap_empty_p (syms))
+    BITMAP_FREE (stmt->gsmem.membase.stores);
+  else
+    {
+      if (stmt->gsmem.membase.stores == NULL)
+       stmt->gsmem.membase.stores = BITMAP_ALLOC (obs);
+
+      bitmap_copy (stmt->gsmem.membase.stores, syms);
+    }
+}
+
+
+/* Deep copy SYMS into the set of symbols loaded by STMT.  If SYMS is
+   NULL or empty, the storage used is freed up.  */
+
+void
+gimple_set_loaded_syms (gimple stmt, bitmap syms, bitmap_obstack *obs)
+{
+  gcc_assert (gimple_has_mem_ops (stmt));
+
+  if (syms == NULL || bitmap_empty_p (syms))
+    BITMAP_FREE (stmt->gsmem.membase.loads);
+  else
+    {
+      if (stmt->gsmem.membase.loads == NULL)
+       stmt->gsmem.membase.loads = BITMAP_ALLOC (obs);
+
+      bitmap_copy (stmt->gsmem.membase.loads, syms);
+    }
+}
+
+
+/* Return the number of operands needed on the RHS of a GIMPLE
+   assignment for an expression with tree code CODE.  */
+
+unsigned
+get_gimple_rhs_num_ops (enum tree_code code)
+{
+  enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
+
+  if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
+    return 1;
+  else if (rhs_class == GIMPLE_BINARY_RHS)
+    return 2;
+  else
+    gcc_unreachable ();
+}
+
+#define DEFTREECODE(SYM, STRING, TYPE, NARGS)                              \
+  (unsigned char)                                                          \
+  ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS                                  \
+   : ((TYPE) == tcc_binary                                                 \
+      || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS                     \
+   : ((TYPE) == tcc_constant                                               \
+      || (TYPE) == tcc_declaration                                         \
+      || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS                      \
+   : ((SYM) == TRUTH_AND_EXPR                                              \
+      || (SYM) == TRUTH_OR_EXPR                                                    \
+      || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS                      \
+   : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS                                    \
+   : ((SYM) == COND_EXPR                                                   \
+      || (SYM) == CONSTRUCTOR                                              \
+      || (SYM) == OBJ_TYPE_REF                                             \
+      || (SYM) == ASSERT_EXPR                                              \
+      || (SYM) == ADDR_EXPR                                                \
+      || (SYM) == WITH_SIZE_EXPR                                           \
+      || (SYM) == EXC_PTR_EXPR                                             \
+      || (SYM) == SSA_NAME                                                 \
+      || (SYM) == FILTER_EXPR                                              \
+      || (SYM) == POLYNOMIAL_CHREC                                         \
+      || (SYM) == DOT_PROD_EXPR                                                    \
+      || (SYM) == VEC_COND_EXPR                                                    \
+      || (SYM) == REALIGN_LOAD_EXPR) ? GIMPLE_SINGLE_RHS                   \
+   : GIMPLE_INVALID_RHS),
+#define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
+
+const unsigned char gimple_rhs_class_table[] = {
+#include "all-tree.def"
+};
+
+#undef DEFTREECODE
+#undef END_OF_BASE_TREE_CODES
+
+/* For the definitive definition of GIMPLE, see doc/tree-ssa.texi.  */
+
+/* Validation of GIMPLE expressions.  */
+
+/* Return true if OP is an acceptable tree node to be used as a GIMPLE
+   operand.  */
+
+bool
+is_gimple_operand (const_tree op)
+{
+  return op && get_gimple_rhs_class (TREE_CODE (op)) == GIMPLE_SINGLE_RHS;
+}
+
+
+/* Return true if T is a GIMPLE RHS for an assignment to a temporary.  */
+
+bool
+is_gimple_formal_tmp_rhs (tree t)
+{
+  if (is_gimple_lvalue (t) || is_gimple_val (t))
+    return true;
+
+  return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
+}
+
+/* Returns true iff T is a valid RHS for an assignment to a renamed
+   user -- or front-end generated artificial -- variable.  */
+
+bool
+is_gimple_reg_rhs (tree t)
+{
+  /* If the RHS of the MODIFY_EXPR may throw or make a nonlocal goto
+     and the LHS is a user variable, then we need to introduce a formal
+     temporary.  This way the optimizers can determine that the user
+     variable is only modified if evaluation of the RHS does not throw.
+
+     Don't force a temp of a non-renamable type; the copy could be
+     arbitrarily expensive.  Instead we will generate a VDEF for
+     the assignment.  */
+
+  if (is_gimple_reg_type (TREE_TYPE (t)) && tree_could_throw_p (t))
+    return false;
+
+  return is_gimple_formal_tmp_rhs (t);
+}
+
+/* Returns true iff T is a valid RHS for an assignment to an un-renamed
+   LHS, or for a call argument.  */
+
+bool
+is_gimple_mem_rhs (tree t)
+{
+  /* If we're dealing with a renamable type, either source or dest must be
+     a renamed variable.  */
+  if (is_gimple_reg_type (TREE_TYPE (t)))
+    return is_gimple_val (t);
+  else
+    return is_gimple_formal_tmp_rhs (t);
+}
+
+/*  Return true if T is a valid LHS for a GIMPLE assignment expression.  */
+
+bool
+is_gimple_lvalue (tree t)
+{
+  return (is_gimple_addressable (t)
+         || TREE_CODE (t) == WITH_SIZE_EXPR
+         /* These are complex lvalues, but don't have addresses, so they
+            go here.  */
+         || TREE_CODE (t) == BIT_FIELD_REF);
+}
+
+/*  Return true if T is a GIMPLE condition.  */
+
+bool
+is_gimple_condexpr (tree t)
+{
+  return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
+                               && !tree_could_trap_p (t)
+                               && is_gimple_val (TREE_OPERAND (t, 0))
+                               && is_gimple_val (TREE_OPERAND (t, 1))));
+}
+
+/*  Return true if T is something whose address can be taken.  */
+
+bool
+is_gimple_addressable (tree t)
+{
+  return (is_gimple_id (t) || handled_component_p (t) || INDIRECT_REF_P (t));
+}
+
+/* Return true if T is a valid gimple constant.  */
+
+bool
+is_gimple_constant (const_tree t)
+{
+  switch (TREE_CODE (t))
+    {
+    case INTEGER_CST:
+    case REAL_CST:
+    case FIXED_CST:
+    case STRING_CST:
+    case COMPLEX_CST:
+    case VECTOR_CST:
+      return true;
+
+    /* Vector constant constructors are gimple invariant.  */
+    case CONSTRUCTOR:
+      if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
+       return TREE_CONSTANT (t);
+      else
+       return false;
+
+    default:
+      return false;
+    }
+}
+
+/* Return true if T is a gimple address.  */
+
+bool
+is_gimple_address (const_tree t)
+{
+  tree op;
+
+  if (TREE_CODE (t) != ADDR_EXPR)
+    return false;
+
+  op = TREE_OPERAND (t, 0);
+  while (handled_component_p (op))
+    {
+      if ((TREE_CODE (op) == ARRAY_REF
+          || TREE_CODE (op) == ARRAY_RANGE_REF)
+         && !is_gimple_val (TREE_OPERAND (op, 1)))
+           return false;
+
+      op = TREE_OPERAND (op, 0);
+    }
+
+  if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op))
+    return true;
+
+  switch (TREE_CODE (op))
+    {
+    case PARM_DECL:
+    case RESULT_DECL:
+    case LABEL_DECL:
+    case FUNCTION_DECL:
+    case VAR_DECL:
+    case CONST_DECL:
+      return true;
+
+    default:
+      return false;
+    }
+}
+
+/* Return true if T is a gimple invariant address.  */
+
+bool
+is_gimple_invariant_address (const_tree t)
+{
+  tree op;
+
+  if (TREE_CODE (t) != ADDR_EXPR)
+    return false;
+
+  op = TREE_OPERAND (t, 0);
+  while (handled_component_p (op))
+    {
+      switch (TREE_CODE (op))
+       {
+       case ARRAY_REF:
+       case ARRAY_RANGE_REF:
+         if (!is_gimple_constant (TREE_OPERAND (op, 1))
+             || TREE_OPERAND (op, 2) != NULL_TREE
+             || TREE_OPERAND (op, 3) != NULL_TREE)
+           return false;
+         break;
+
+       case COMPONENT_REF:
+         if (TREE_OPERAND (op, 2) != NULL_TREE)
+           return false;
+         break;
+
+       default:;
+       }
+      op = TREE_OPERAND (op, 0);
+    }
+
+  return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
+}
+
+/* Return true if T is a GIMPLE minimal invariant.  It's a restricted
+   form of function invariant.  */
+
+bool
+is_gimple_min_invariant (const_tree t)
+{
+  if (TREE_CODE (t) == ADDR_EXPR)
+    return is_gimple_invariant_address (t);
+
+  return is_gimple_constant (t);
+}
+
+/* Return true if T looks like a valid GIMPLE statement.  */
+
+bool
+is_gimple_stmt (tree t)
+{
+  const enum tree_code code = TREE_CODE (t);
+
+  switch (code)
+    {
+    case NOP_EXPR:
+      /* The only valid NOP_EXPR is the empty statement.  */
+      return IS_EMPTY_STMT (t);
+
+    case BIND_EXPR:
+    case COND_EXPR:
+      /* These are only valid if they're void.  */
+      return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
+
+    case SWITCH_EXPR:
+    case GOTO_EXPR:
+    case RETURN_EXPR:
+    case LABEL_EXPR:
+    case CASE_LABEL_EXPR:
+    case TRY_CATCH_EXPR:
+    case TRY_FINALLY_EXPR:
+    case EH_FILTER_EXPR:
+    case CATCH_EXPR:
+    case CHANGE_DYNAMIC_TYPE_EXPR:
+    case ASM_EXPR:
+    case RESX_EXPR:
+    case STATEMENT_LIST:
+    case OMP_PARALLEL:
+    case OMP_FOR:
+    case OMP_SECTIONS:
+    case OMP_SECTION:
+    case OMP_SINGLE:
+    case OMP_MASTER:
+    case OMP_ORDERED:
+    case OMP_CRITICAL:
+    case OMP_TASK:
+      /* These are always void.  */
+      return true;
+
+    case CALL_EXPR:
+    case MODIFY_EXPR:
+    case PREDICT_EXPR:
+      /* These are valid regardless of their type.  */
+      return true;
+
+    default:
+      return false;
+    }
+}
+
+/* Return true if T is a variable.  */
+
+bool
+is_gimple_variable (tree t)
+{
+  return (TREE_CODE (t) == VAR_DECL
+         || TREE_CODE (t) == PARM_DECL
+         || TREE_CODE (t) == RESULT_DECL
+         || TREE_CODE (t) == SSA_NAME);
+}
+
+/*  Return true if T is a GIMPLE identifier (something with an address).  */
+
+bool
+is_gimple_id (tree t)
+{
+  return (is_gimple_variable (t)
+         || TREE_CODE (t) == FUNCTION_DECL
+         || TREE_CODE (t) == LABEL_DECL
+         || TREE_CODE (t) == CONST_DECL
+         /* Allow string constants, since they are addressable.  */
+         || TREE_CODE (t) == STRING_CST);
+}
+
+/* Return true if TYPE is a suitable type for a scalar register variable.  */
+
+bool
+is_gimple_reg_type (tree type)
+{
+  /* In addition to aggregate types, we also exclude complex types if not
+     optimizing because they can be subject to partial stores in GNU C by
+     means of the __real__ and __imag__ operators and we cannot promote
+     them to total stores (see gimplify_modify_expr_complex_part).  */
+  return !(AGGREGATE_TYPE_P (type)
+          || (TREE_CODE (type) == COMPLEX_TYPE && !optimize));
+
+}
+
+/* Return true if T is a non-aggregate register variable.  */
+
+bool
+is_gimple_reg (tree t)
+{
+  if (TREE_CODE (t) == SSA_NAME)
+    t = SSA_NAME_VAR (t);
+
+  if (MTAG_P (t))
+    return false;
+
+  if (!is_gimple_variable (t))
+    return false;
+
+  if (!is_gimple_reg_type (TREE_TYPE (t)))
+    return false;
+
+  /* A volatile decl is not acceptable because we can't reuse it as
+     needed.  We need to copy it into a temp first.  */
+  if (TREE_THIS_VOLATILE (t))
+    return false;
+
+  /* We define "registers" as things that can be renamed as needed,
+     which with our infrastructure does not apply to memory.  */
+  if (needs_to_live_in_memory (t))
+    return false;
+
+  /* Hard register variables are an interesting case.  For those that
+     are call-clobbered, we don't know where all the calls are, since
+     we don't (want to) take into account which operations will turn
+     into libcalls at the rtl level.  For those that are call-saved,
+     we don't currently model the fact that calls may in fact change
+     global hard registers, nor do we examine ASM_CLOBBERS at the tree
+     level, and so miss variable changes that might imply.  All around,
+     it seems safest to not do too much optimization with these at the
+     tree level at all.  We'll have to rely on the rtl optimizers to
+     clean this up, as there we've got all the appropriate bits exposed.  */
+  if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
+    return false;
+
+  /* Complex and vector values must have been put into SSA-like form.
+     That is, no assignments to the individual components.  */
+  if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+      || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
+    return DECL_GIMPLE_REG_P (t);
+
+  return true;
+}
+
+
+/* Returns true if T is a GIMPLE formal temporary variable.  */
+
+bool
+is_gimple_formal_tmp_var (tree t)
+{
+  if (TREE_CODE (t) == SSA_NAME)
+    return true;
+
+  return TREE_CODE (t) == VAR_DECL && DECL_GIMPLE_FORMAL_TEMP_P (t);
+}
+
+/* Returns true if T is a GIMPLE formal temporary register variable.  */
+
+bool
+is_gimple_formal_tmp_reg (tree t)
+{
+  /* The intent of this is to get hold of a value that won't change.
+     An SSA_NAME qualifies no matter if its of a user variable or not.  */
+  if (TREE_CODE (t) == SSA_NAME)
+    return true;
+
+  /* We don't know the lifetime characteristics of user variables.  */
+  if (!is_gimple_formal_tmp_var (t))
+    return false;
+
+  /* Finally, it must be capable of being placed in a register.  */
+  return is_gimple_reg (t);
+}
+
+/* Return true if T is a GIMPLE variable whose address is not needed.  */
+
+bool
+is_gimple_non_addressable (tree t)
+{
+  if (TREE_CODE (t) == SSA_NAME)
+    t = SSA_NAME_VAR (t);
+
+  return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
+}
+
+/* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant.  */
+
+bool
+is_gimple_val (tree t)
+{
+  /* Make loads from volatiles and memory vars explicit.  */
+  if (is_gimple_variable (t)
+      && is_gimple_reg_type (TREE_TYPE (t))
+      && !is_gimple_reg (t))
+    return false;
+
+  /* FIXME make these decls.  That can happen only when we expose the
+     entire landing-pad construct at the tree level.  */
+  if (TREE_CODE (t) == EXC_PTR_EXPR || TREE_CODE (t) == FILTER_EXPR)
+    return true;
+
+  return (is_gimple_variable (t) || is_gimple_min_invariant (t));
+}
+
+/* Similarly, but accept hard registers as inputs to asm statements.  */
+
+bool
+is_gimple_asm_val (tree t)
+{
+  if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
+    return true;
+
+  return is_gimple_val (t);
+}
+
+/* Return true if T is a GIMPLE minimal lvalue.  */
+
+bool
+is_gimple_min_lval (tree t)
+{
+  return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF);
+}
+
+/* Return true if T is a typecast operation.  */
+
+bool
+is_gimple_cast (tree t)
+{
+  return (CONVERT_EXPR_P (t)
+          || TREE_CODE (t) == FIX_TRUNC_EXPR);
+}
+
+/* Return true if T is a valid function operand of a CALL_EXPR.  */
+
+bool
+is_gimple_call_addr (tree t)
+{
+  return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
+}
+
+/* If T makes a function call, return the corresponding CALL_EXPR operand.
+   Otherwise, return NULL_TREE.  */
+
+tree
+get_call_expr_in (tree t)
+{
+  if (TREE_CODE (t) == MODIFY_EXPR)
+    t = TREE_OPERAND (t, 1);
+  if (TREE_CODE (t) == WITH_SIZE_EXPR)
+    t = TREE_OPERAND (t, 0);
+  if (TREE_CODE (t) == CALL_EXPR)
+    return t;
+  return NULL_TREE;
+}
+
+
+/* Given a memory reference expression T, return its base address.
+   The base address of a memory reference expression is the main
+   object being referenced.  For instance, the base address for
+   'array[i].fld[j]' is 'array'.  You can think of this as stripping
+   away the offset part from a memory address.
+
+   This function calls handled_component_p to strip away all the inner
+   parts of the memory reference until it reaches the base object.  */
+
+tree
+get_base_address (tree t)
+{
+  while (handled_component_p (t))
+    t = TREE_OPERAND (t, 0);
+  
+  if (SSA_VAR_P (t)
+      || TREE_CODE (t) == STRING_CST
+      || TREE_CODE (t) == CONSTRUCTOR
+      || INDIRECT_REF_P (t))
+    return t;
+  else
+    return NULL_TREE;
+}
+
+void
+recalculate_side_effects (tree t)
+{
+  enum tree_code code = TREE_CODE (t);
+  int len = TREE_OPERAND_LENGTH (t);
+  int i;
+
+  switch (TREE_CODE_CLASS (code))
+    {
+    case tcc_expression:
+      switch (code)
+       {
+       case INIT_EXPR:
+       case MODIFY_EXPR:
+       case VA_ARG_EXPR:
+       case PREDECREMENT_EXPR:
+       case PREINCREMENT_EXPR:
+       case POSTDECREMENT_EXPR:
+       case POSTINCREMENT_EXPR:
+         /* All of these have side-effects, no matter what their
+            operands are.  */
+         return;
+
+       default:
+         break;
+       }
+      /* Fall through.  */
+
+    case tcc_comparison:  /* a comparison expression */
+    case tcc_unary:       /* a unary arithmetic expression */
+    case tcc_binary:      /* a binary arithmetic expression */
+    case tcc_reference:   /* a reference */
+    case tcc_vl_exp:        /* a function call */
+      TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
+      for (i = 0; i < len; ++i)
+       {
+         tree op = TREE_OPERAND (t, i);
+         if (op && TREE_SIDE_EFFECTS (op))
+           TREE_SIDE_EFFECTS (t) = 1;
+       }
+      break;
+
+    default:
+      /* Can never be used with non-expressions.  */
+      gcc_unreachable ();
+   }
+}
+
+/* Canonicalize a tree T for use in a COND_EXPR as conditional.  Returns
+   a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
+   we failed to create one.  */
+
+tree
+canonicalize_cond_expr_cond (tree t)
+{
+  /* For (bool)x use x != 0.  */
+  if (TREE_CODE (t) == NOP_EXPR
+      && TREE_TYPE (t) == boolean_type_node)
+    {
+      tree top0 = TREE_OPERAND (t, 0);
+      t = build2 (NE_EXPR, TREE_TYPE (t),
+                 top0, build_int_cst (TREE_TYPE (top0), 0));
+    }
+  /* For !x use x == 0.  */
+  else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
+    {
+      tree top0 = TREE_OPERAND (t, 0);
+      t = build2 (EQ_EXPR, TREE_TYPE (t),
+                 top0, build_int_cst (TREE_TYPE (top0), 0));
+    }
+  /* For cmp ? 1 : 0 use cmp.  */
+  else if (TREE_CODE (t) == COND_EXPR
+          && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
+          && integer_onep (TREE_OPERAND (t, 1))
+          && integer_zerop (TREE_OPERAND (t, 2)))
+    {
+      tree top0 = TREE_OPERAND (t, 0);
+      t = build2 (TREE_CODE (top0), TREE_TYPE (t),
+                 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
+    }
+
+  if (is_gimple_condexpr (t))
+    return t;
+
+  return NULL_TREE;
+}
+
+#include "gt-gimple.h"
diff --git a/gcc/gimple.def b/gcc/gimple.def
new file mode 100644 (file)
index 0000000..23eaae2
--- /dev/null
@@ -0,0 +1,357 @@
+/* This file contains the definitions of the GIMPLE IR tuples used in GCC.
+
+   Copyright (C) 2007, 2008 Free Software Foundation, Inc.
+   Contributed by Aldy Hernandez <aldyh@redhat.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 3, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3.  If not see
+<http://www.gnu.org/licenses/>.  */
+
+/* The format of this file is
+   DEFGSCODE(GIMPLE_symbol, printable name, structure).
+
+   Where symbol is the enumeration name without the ``GIMPLE_''.
+   The argument STRUCTURE is used to compute offsets into each of the
+   tuple structures that contain operands.  Since vector operands
+   are at different offsets depending on the particular structure
+   used, these offsets are computed at compile time for efficient
+   lookup at runtime.  See gimple_ops().
+
+   If a code does not use operand vectors, STRUCTURE should be NULL.  */
+
+/* Error marker.  This is used in similar ways as ERROR_MARK in tree.def.  */
+DEFGSCODE(GIMPLE_ERROR_MARK, "gimple_error_mark", NULL)
+
+/* IMPORTANT.  Do not rearrange the codes between GIMPLE_COND and
+   GIMPLE_RETURN.  The ordering is exposed by gimple_has_ops calls.
+   These are all the GIMPLE statements with register operands.  */
+
+/* GIMPLE_COND <COND_CODE, OP1, OP2, TRUE_LABEL, FALSE_LABEL>
+   represents the conditional jump:
+   
+   if (OP1 COND_CODE OP2) goto TRUE_LABEL else goto FALSE_LABEL
+
+   COND_CODE is the tree code used as the comparison predicate.  It
+   must be of class tcc_comparison.
+
+   OP1 and OP2 are the operands used in the comparison.  They must be
+   accepted by is_gimple_operand.
+
+   TRUE_LABEL and FALSE_LABEL are the LABEL_DECL nodes used as the
+   jump target for the comparison.  */
+DEFGSCODE(GIMPLE_COND, "gimple_cond", struct gimple_statement_with_ops)
+
+/* GIMPLE_GOTO <TARGET> represents unconditional jumps.
+   TARGET is a LABEL_DECL or an expression node for computed GOTOs.  */
+DEFGSCODE(GIMPLE_GOTO, "gimple_goto", struct gimple_statement_with_ops)
+
+/* GIMPLE_LABEL <LABEL> represents label statements.  LABEL is a
+   LABEL_DECL representing a jump target.  */
+DEFGSCODE(GIMPLE_LABEL, "gimple_label", struct gimple_statement_with_ops)
+
+/* GIMPLE_SWITCH <INDEX, DEFAULT_LAB, LAB1, ..., LABN> represents the
+   multiway branch:
+
+   switch (INDEX)
+   {
+     case LAB1: ...; break;
+     ...
+     case LABN: ...; break;
+     default: ...
+   }
+
+   INDEX is the variable evaluated to decide which label to jump to.
+
+   DEFAULT_LAB, LAB1 ... LABN are the tree nodes representing case labels.
+   They must be CASE_LABEL_EXPR nodes.  */
+DEFGSCODE(GIMPLE_SWITCH, "gimple_switch", struct gimple_statement_with_ops)
+
+/* GIMPLE_CHANGE_DYNAMIC_TYPE indicates a change in the dynamic type
+   of a memory location.  This has no value and generates no
+   executable code.  It is only used for type based alias analysis.
+   This is generated by C++ placement new and it's a direct
+   translation from CHANGE_DYNAMIC_TYPE_EXPR.  The first operand
+   (gimple_cdt_new_type) is the new type.  The second operand
+   (gimple_cdt_location) is the location (pointer) whose type is being
+   changed.  */
+DEFGSCODE(GIMPLE_CHANGE_DYNAMIC_TYPE, "gimple_change_dynamic_type",
+         struct gimple_statement_with_ops)
+
+/* IMPORTANT.
+   
+   Do not rearrange the codes between GIMPLE_ASSIGN and GIMPLE_RETURN.
+   It's exposed by GIMPLE_RANGE_CHECK calls. These are all the GIMPLE
+   statements with memory and register operands.  */
+
+/* GIMPLE_ASSIGN <SUBCODE, LHS, RHS1[, RHS2]> represents the assignment
+   statement
+
+   LHS = RHS1 SUBCODE RHS2.
+
+   SUBCODE is the tree code for the expression computed by the RHS of the
+   assignment.  It must be one of the tree codes accepted by
+   get_gimple_rhs_class.
+
+   LHS is the operand on the LHS of the assignment.  It must be a tree node
+   accepted by is_gimple_operand.
+
+   RHS1 is the first operand on the RHS of the assignment. It must be a tree
+   node accepted by is_gimple_operand.
+
+   RHS2 is the second operand on the RHS of the assignemnt. It must be a tree
+   node accepted by is_gimple_operand.  This argument exists only if SUBCODE is
+   of class GIMPLE_BINARY_RHS.  */
+DEFGSCODE(GIMPLE_ASSIGN, "gimple_assign",
+         struct gimple_statement_with_memory_ops)
+
+/* GIMPLE_ASM <STRING, I1, ..., IN, O1, ... OM, C1, ..., CP>
+   represents inline assembly statements.
+
+   STRING is the string containing the assembly statements.
+   I1 ... IN are the N input operands.
+   O1 ... OM are the M output operands.
+   C1 ... CP are the P clobber operands.  */
+DEFGSCODE(GIMPLE_ASM, "gimple_asm", struct gimple_statement_asm)
+
+/* GIMPLE_CALL <FN, LHS, ARG1, ..., ARGN[, CHAIN]> represents function
+   calls.
+
+   FN is the callee.  It must be accepted by is_gimple_call_addr.
+
+   LHS is the operand where the return value from FN is stored.  It may
+   be NULL.
+
+   ARG1 ... ARGN are the arguments.  They must all be accepted by
+   is_gimple_operand.
+
+    CHAIN is the optional static chain link for nested functions.  */
+DEFGSCODE(GIMPLE_CALL, "gimple_call",
+         struct gimple_statement_with_memory_ops)
+
+/* GIMPLE_RETURN <RETVAL> represents return statements.
+
+   RETVAL is the value to return or NULL.  If a value is returned it
+   must be accepted by is_gimple_operand.  */
+DEFGSCODE(GIMPLE_RETURN, "gimple_return",
+         struct gimple_statement_with_memory_ops)
+
+/* GIMPLE_BIND <VARS, BLOCK, BODY> represents a lexical scope.
+   VARS is the set of variables declared in that scope.
+   BLOCK is the symbol binding block used for debug information.  
+   BODY is the sequence of statements in the scope.  */
+DEFGSCODE(GIMPLE_BIND, "gimple_bind", NULL)
+
+/* GIMPLE_CATCH <TYPES, HANDLER> represents a typed exception handler.
+   TYPES is the type (or list of types) handled.  HANDLER is the
+   sequence of statements that handle these types.  */
+DEFGSCODE(GIMPLE_CATCH, "gimple_catch", NULL)
+
+/* GIMPLE_EH_FILTER <TYPES, FAILURE> represents an exception
+   specification.  TYPES is a list of allowed types and FAILURE is the
+   sequence of statements to execute on failure.  */
+DEFGSCODE(GIMPLE_EH_FILTER, "gimple_eh_filter", NULL)
+
+/* GIMPLE_PHI <RESULT, ARG1, ..., ARGN> represents the PHI node
+
+   RESULT = PHI <ARG1, ..., ARGN>
+
+   RESULT is the SSA name created by this PHI node.
+
+   ARG1 ... ARGN are the arguments to the PHI node.  N must be
+   exactly the same as the number of incoming edges to the basic block
+   holding the PHI node.  Every argument is either an SSA name or a
+   tree node of class tcc_constant.  */
+DEFGSCODE(GIMPLE_PHI, "gimple_phi", NULL)
+
+/* GIMPLE_RESX <REGION> resumes execution after an exception.
+   REGION is the region number being left.  */
+DEFGSCODE(GIMPLE_RESX, "gimple_resx", NULL)
+
+/* GIMPLE_TRY <TRY_KIND, EVAL, CLEANUP>
+   represents a try/catch or a try/finally statement.
+
+   TRY_KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY.
+
+   EVAL is the sequence of statements to execute on entry to GIMPLE_TRY.
+
+   CLEANUP is the sequence of statements to execute according to
+   TRY_KIND.  If TRY_KIND is GIMPLE_TRY_CATCH, CLEANUP is only exected
+   if an exception is thrown during execution of EVAL.  If TRY_KIND is
+   GIMPLE_TRY_FINALLY, CLEANUP is always executed after executing EVAL
+   (regardless of whether EVAL finished normally, or jumped out or an
+   exception was thrown).  */
+DEFGSCODE(GIMPLE_TRY, "gimple_try", NULL)
+
+/* GIMPLE_NOP represents the "do nothing" statement.  */
+DEFGSCODE(GIMPLE_NOP, "gimple_nop", NULL)
+
+
+/* IMPORTANT.
+   
+   Do not rearrange any of the GIMPLE_OMP_* codes.  This ordering is
+   exposed by the range check in gimple_omp_subcode().  */
+
+
+/* Tuples used for lowering of OMP_ATOMIC.  Although the form of the OMP_ATOMIC
+   expression is very simple (just in form mem op= expr), various implicit
+   conversions may cause the expression to become more complex, so that it does
+   not fit the gimple grammar very well.  To overcome this problem, OMP_ATOMIC
+   is rewritten as a sequence of two codes in gimplification:
+
+   GIMPLE_OMP_LOAD (tmp, mem)
+   val = some computations involving tmp;
+   GIMPLE_OMP_STORE (val).  */
+DEFGSCODE(GIMPLE_OMP_ATOMIC_LOAD, "gimple_omp_atomic_load", NULL)
+DEFGSCODE(GIMPLE_OMP_ATOMIC_STORE, "gimple_omp_atomic_store", NULL)
+
+/* GIMPLE_OMP_CONTINUE marks the location of the loop or sections
+   iteration in partially lowered OpenMP code.  */
+DEFGSCODE(GIMPLE_OMP_CONTINUE, "gimple_omp_continue", NULL)
+
+/* GIMPLE_OMP_CRITICAL <NAME, BODY> represents
+
+   #pragma omp critical [name]
+
+   NAME is the name given to the critical section.
+   BODY is the sequence of statements that are inside the critical section.  */
+DEFGSCODE(GIMPLE_OMP_CRITICAL, "gimple_omp_critical", NULL)
+
+/* GIMPLE_OMP_FOR <BODY, CLAUSES, INDEX, INITIAL, FINAL, COND, INCR, PRE_BODY>
+   represents
+
+   PRE_BODY
+   #pragma omp for [clause1 ... clauseN]
+   for (INDEX = INITIAL; INDEX COND FINAL; INDEX {+=,-=} INCR)
+   BODY
+
+   BODY is the loop body.
+
+   CLAUSES is the list of clauses.
+
+   INDEX must be an integer or pointer variable, which is implicitly thread
+   private.  It must be accepted by is_gimple_operand.
+
+   INITIAL is the initial value given to INDEX. It must be
+   accepted by is_gimple_operand.
+
+   FINAL is the final value that INDEX should take. It must
+   be accepted by is_gimple_operand.
+
+   COND is the condition code for the controlling predicate.  It must
+   be one of { <, >, <=, >= }
+
+   INCR is the loop index increment.  It must be tree node of type
+   tcc_constant.
+
+   PRE_BODY is a landing pad filled by the gimplifier with things from
+   INIT, COND, and INCR that are technically part of the OMP_FOR
+   structured block, but are evaluated before the loop body begins.
+
+   INITIAL, FINAL and INCR are required to be loop invariant integer
+   expressions that are evaluated without any synchronization.
+   The evaluation order, frequency of evaluation and side-effects are
+   unspecified by the standard.  */
+DEFGSCODE(GIMPLE_OMP_FOR, "gimple_omp_for", NULL)
+
+/* GIMPLE_OMP_MASTER <BODY> represents #pragma omp master.
+   BODY is the sequence of statements to execute in the master section.  */
+DEFGSCODE(GIMPLE_OMP_MASTER, "gimple_omp_master", NULL)
+
+/* GIMPLE_OMP_ORDERED <BODY> represents #pragma omp ordered.
+   BODY is the sequence of statements to execute in the ordered section.  */
+DEFGSCODE(GIMPLE_OMP_ORDERED, "gimple_omp_ordered", NULL)
+
+/* GIMPLE_OMP_PARALLEL <BODY, CLAUSES, CHILD_FN, DATA_ARG> represents
+
+   #pragma omp parallel [CLAUSES]
+   BODY
+
+   BODY is a the sequence of statements to be executed by all threads.
+
+   CLAUSES is a TREE_LIST node with all the clauses.
+
+   CHILD_FN is set when outlining the body of the parallel region.
+   All the statements in BODY are moved into this newly created
+   function when converting OMP constructs into low-GIMPLE.
+
+   DATA_ARG is a local variable in the parent function containing data
+   to be shared with CHILD_FN.  This is used to implement all the data
+   sharing clauses.  */
+DEFGSCODE(GIMPLE_OMP_PARALLEL, "gimple_omp_parallel", NULL)
+
+/* GIMPLE_OMP_TASK <BODY, CLAUSES, CHILD_FN, DATA_ARG, COPY_FN,
+                   ARG_SIZE, ARG_ALIGN> represents
+
+   #pragma omp task [CLAUSES]
+   BODY
+
+   BODY is a the sequence of statements to be executed by all threads.
+
+   CLAUSES is a TREE_LIST node with all the clauses.
+
+   CHILD_FN is set when outlining the body of the explicit task region.
+   All the statements in BODY are moved into this newly created
+   function when converting OMP constructs into low-GIMPLE.
+
+   DATA_ARG is a local variable in the parent function containing data
+   to be shared with CHILD_FN.  This is used to implement all the data
+   sharing clauses.
+
+   COPY_FN is set when outlining the firstprivate var initialization.
+   All the needed statements are emitted into the newly created
+   function, or when only memcpy is needed, it is NULL.
+
+   ARG_SIZE and ARG_ALIGN are the size and alignment of the incoming
+   data area allocated by GOMP_task and passed to CHILD_FN.  */
+DEFGSCODE(GIMPLE_OMP_TASK, "gimple_omp_task", NULL)
+
+/* OMP_RETURN marks the end of an OpenMP directive.  */
+DEFGSCODE(GIMPLE_OMP_RETURN, "gimple_omp_return", NULL)
+
+/* OMP_SECTION <BODY> represents #pragma omp section.
+   BODY is the sequence of statements in the section body.  */
+DEFGSCODE(GIMPLE_OMP_SECTION, "gimple_omp_section", NULL)
+
+/* OMP_SECTIONS <BODY, CLAUSES, CONTROL> represents #pragma omp sections.
+
+   BODY is the sequence of statements in the sections body.
+   CLAUSES is a TREE_LIST node holding the list of associated clauses.
+   CONTROL is a VAR_DECL used for deciding which of the sections
+   to execute.  */
+DEFGSCODE(GIMPLE_OMP_SECTIONS, "gimple_omp_sections", NULL)
+
+/* GIMPLE_OMP_SECTIONS_SWITCH is a marker placed immediately after
+   OMP_SECTIONS.  It represents the GIMPLE_SWITCH used to decide which
+   branch is taken.  */
+DEFGSCODE(GIMPLE_OMP_SECTIONS_SWITCH, "gimple_omp_sections_switch", NULL)
+
+/* GIMPLE_OMP_SINGLE <BODY, CLAUSES> represents #pragma omp single
+   BODY is the sequence of statements inside the single section.
+   CLAUSES is a TREE_LIST node holding the associated clauses.  */
+DEFGSCODE(GIMPLE_OMP_SINGLE, "gimple_omp_single", NULL)
+
+/* GIMPLE_PREDICT <PREDICT, OUTCOME> specifies a hint for branch prediction.
+
+   PREDICT is one of the predictors from predict.def.
+
+   OUTCOME is NOT_TAKEN or TAKEN.  */
+DEFGSCODE(GIMPLE_PREDICT, "gimple_predict", NULL)
+
+/*  This node represents a cleanup expression.  It is ONLY USED INTERNALLY
+    by the gimplifier as a placeholder for cleanups, and its uses will be
+    cleaned up by the time gimplification is done.
+    
+    This tuple should not exist outside of the gimplifier proper.  */
+DEFGSCODE(GIMPLE_WITH_CLEANUP_EXPR, "gimple_with_cleanup_expr", NULL)
diff --git a/gcc/gimple.h b/gcc/gimple.h
new file mode 100644 (file)
index 0000000..744461d
--- /dev/null
@@ -0,0 +1,4539 @@
+/* Gimple IR definitions.
+
+   Copyright 2007, 2008 Free Software Foundation, Inc.
+   Contributed by Aldy Hernandez <aldyh@redhat.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 3, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3.  If not see
+<http://www.gnu.org/licenses/>.  */
+
+#ifndef GCC_GIMPLE_H
+#define GCC_GIMPLE_H
+
+#include "pointer-set.h"
+#include "vec.h"
+#include "ggc.h"
+#include "tm.h"
+#include "hard-reg-set.h"
+#include "basic-block.h"
+#include "tree-ssa-operands.h"
+
+DEF_VEC_P(gimple);
+DEF_VEC_ALLOC_P(gimple,heap);
+DEF_VEC_ALLOC_P(gimple,gc);
+
+DEF_VEC_P(gimple_seq);
+DEF_VEC_ALLOC_P(gimple_seq,gc);
+DEF_VEC_ALLOC_P(gimple_seq,heap);
+
+enum gimple_code {
+#define DEFGSCODE(SYM, STRING, STRUCT) SYM,
+#include "gimple.def"
+#undef DEFGSCODE
+    LAST_AND_UNUSED_GIMPLE_CODE
+};
+
+extern const char *const gimple_code_name[];
+extern const unsigned char gimple_rhs_class_table[];
+
+/* Error out if a gimple tuple is addressed incorrectly.  */
+#if defined ENABLE_GIMPLE_CHECKING
+extern void gimple_check_failed (const_gimple, const char *, int,          \
+                                 const char *, enum gimple_code,           \
+                                enum tree_code) ATTRIBUTE_NORETURN;
+extern void gimple_range_check_failed (const_gimple, const char *, int,    \
+                                       const char *, enum gimple_code,     \
+                                      enum gimple_code) ATTRIBUTE_NORETURN;
+
+#define GIMPLE_CHECK(GS, CODE)                                         \
+  do {                                                                 \
+    const_gimple __gs = (GS);                                          \
+    if (gimple_code (__gs) != (CODE))                                  \
+      gimple_check_failed (__gs, __FILE__, __LINE__, __FUNCTION__,     \
+                          (CODE), 0);                                  \
+  } while (0)
+#else  /* not ENABLE_GIMPLE_CHECKING  */
+#define GIMPLE_CHECK(GS, CODE)                 (void)0
+#endif
+
+/* Class of GIMPLE expressions suitable for the RHS of assignments.  See
+   get_gimple_rhs_class.  */
+enum gimple_rhs_class
+{
+  GIMPLE_INVALID_RHS,  /* The expression cannot be used on the RHS.  */
+  GIMPLE_BINARY_RHS,   /* The expression is a binary operation.  */
+  GIMPLE_UNARY_RHS,    /* The expression is a unary operation.  */
+  GIMPLE_SINGLE_RHS    /* The expression is a single object (an SSA
+                          name, a _DECL, a _REF, etc.  */
+};
+
+/* Specific flags for individual GIMPLE statements.  These flags are
+   always stored in gimple_statement_base.subcode and they may only be
+   defined for statement codes that do not use sub-codes.
+
+   Values for the masks can overlap as long as the overlapping values
+   are never used in the same statement class.
+
+   The maximum mask value that can be defined is 1 << 15 (i.e., each
+   statement code can hold up to 16 bitflags).
+
+   Keep this list sorted.  */
+enum gf_mask {
+    GF_ASM_INPUT               = 1 << 0,
+    GF_ASM_VOLATILE            = 1 << 1,
+    GF_CALL_CANNOT_INLINE      = 1 << 0,
+    GF_CALL_FROM_THUNK         = 1 << 1,
+    GF_CALL_RETURN_SLOT_OPT    = 1 << 2,
+    GF_CALL_TAILCALL           = 1 << 3,
+    GF_CALL_VA_ARG_PACK                = 1 << 4,
+    GF_OMP_PARALLEL_COMBINED   = 1 << 0,
+
+    /* True on an GIMPLE_OMP_RETURN statement if the return does not require
+       a thread synchronization via some sort of barrier.  The exact barrier
+       that would otherwise be emitted is dependent on the OMP statement with
+       which this return is associated.  */
+    GF_OMP_RETURN_NOWAIT       = 1 << 0,
+
+    GF_OMP_SECTION_LAST                = 1 << 0,
+    GF_PREDICT_TAKEN           = 1 << 15
+};
+
+/* Masks for selecting a pass local flag (PLF) to work on.  These
+   masks are used by gimple_set_plf and gimple_plf.  */
+enum plf_mask {
+    GF_PLF_1   = 1 << 0,
+    GF_PLF_2   = 1 << 1
+};
+
+/* A node in a gimple_seq_d.  */
+struct gimple_seq_node_d GTY((chain_next ("%h.next"), chain_prev ("%h.prev")))
+{
+  gimple stmt;
+  struct gimple_seq_node_d *prev;
+  struct gimple_seq_node_d *next;
+};
+
+/* A double-linked sequence of gimple statements.  */
+struct gimple_seq_d GTY ((chain_next ("%h.next_free")))
+{
+  /* First and last statements in the sequence.  */
+  gimple_seq_node first;
+  gimple_seq_node last;
+
+  /* Sequences are created/destroyed frequently.  To minimize
+     allocation activity, deallocated sequences are kept in a pool of
+     available sequences.  This is the pointer to the next free
+     sequence in the pool.  */
+  gimple_seq next_free;
+};
+
+
+/* Return the first node in GIMPLE sequence S.  */
+
+static inline gimple_seq_node
+gimple_seq_first (const_gimple_seq s)
+{
+  return s ? s->first : NULL;
+}
+
+
+/* Return the first statement in GIMPLE sequence S.  */
+
+static inline gimple
+gimple_seq_first_stmt (const_gimple_seq s)
+{
+  gimple_seq_node n = gimple_seq_first (s);
+  return (n) ? n->stmt : NULL;
+}
+
+
+/* Return the last node in GIMPLE sequence S.  */
+
+static inline gimple_seq_node
+gimple_seq_last (const_gimple_seq s)
+{
+  return s ? s->last : NULL;
+}
+
+
+/* Return the last statement in GIMPLE sequence S.  */
+
+static inline gimple
+gimple_seq_last_stmt (const_gimple_seq s)
+{
+  gimple_seq_node n = gimple_seq_last (s);
+  return (n) ? n->stmt : NULL;
+}
+
+
+/* Set the last node in GIMPLE sequence S to LAST.  */
+
+static inline void
+gimple_seq_set_last (gimple_seq s, gimple_seq_node last)
+{
+  s->last = last;
+}
+
+
+/* Set the first node in GIMPLE sequence S to FIRST.  */
+
+static inline void
+gimple_seq_set_first (gimple_seq s, gimple_seq_node first)
+{
+  s->first = first;
+}
+
+
+/* Return true if GIMPLE sequence S is empty.  */
+
+static inline bool
+gimple_seq_empty_p (const_gimple_seq s)
+{
+  return s == NULL || s->first == NULL;
+}
+
+
+void gimple_seq_add_stmt (gimple_seq *, gimple);
+
+/* Allocate a new sequence and initialize its first element with STMT.  */
+
+static inline gimple_seq
+gimple_seq_alloc_with_stmt (gimple stmt)
+{
+  gimple_seq seq = NULL;
+  gimple_seq_add_stmt (&seq, stmt);
+  return seq;
+}
+
+
+/* Returns the sequence of statements in BB.  */
+
+static inline gimple_seq
+bb_seq (const_basic_block bb)
+{
+  return (!(bb->flags & BB_RTL) && bb->il.gimple) ? bb->il.gimple->seq : NULL;
+}
+
+
+/* Sets the sequence of statements in BB to SEQ.  */
+
+static inline void
+set_bb_seq (basic_block bb, gimple_seq seq)
+{
+  gcc_assert (!(bb->flags & BB_RTL));
+  bb->il.gimple->seq = seq;
+}
+
+/* Iterator object for GIMPLE statement sequences.  */
+
+typedef struct
+{
+  /* Sequence node holding the current statement.  */
+  gimple_seq_node ptr;
+
+  /* Sequence and basic block holding the statement.  These fields
+     are necessary to handle edge cases such as when statement is
+     added to an empty basic block or when the last statement of a
+     block/sequence is removed.  */
+  gimple_seq seq;
+  basic_block bb;
+} gimple_stmt_iterator;
+
+
+/* Data structure definitions for GIMPLE tuples.  NOTE: word markers
+   are for 64 bit hosts.  */
+
+struct gimple_statement_base GTY(())
+{
+  /* [ WORD 1 ]
+     Main identifying code for a tuple.  */
+  ENUM_BITFIELD(gimple_code) code : 8;
+
+  /* Nonzero if a warning should not be emitted on this tuple.  */
+  unsigned int no_warning      : 1;
+
+  /* Nonzero if this tuple has been visited.  Passes are responsible
+     for clearing this bit before using it.  */
+  unsigned int visited         : 1;
+
+  /* Nonzero if this tuple represents a non-temporal move.  */
+  unsigned int nontemporal_move        : 1;
+
+  /* Pass local flags.  These flags are free for any pass to use as
+     they see fit.  Passes should not assume that these flags contain
+     any useful value when the pass starts.  Any initial state that
+     the pass requires should be set on entry to the pass.  See
+     gimple_set_plf and gimple_plf for usage.  */
+  unsigned int plf             : 2;
+
+  /* Nonzero if this statement has been modified and needs to have its
+     operands rescanned.  */
+  unsigned modified            : 1;
+
+  /* Nonzero if this statement contains volatile operands.  */
+  unsigned has_volatile_ops    : 1;
+
+  /* Nonzero if this statement contains memory refernces.  */
+  unsigned references_memory_p         : 1;
+
+  /* The SUBCODE field can be used for tuple-specific flags for tuples
+     that do not require subcodes.  Note that SUBCODE should be at
+     least as wide as tree codes, as several tuples store tree codes
+     in there.  */
+  unsigned int subcode         : 16;
+
+  /* UID of this statement.  */
+  unsigned uid;
+
+  /* [ WORD 2 ]
+     Locus information for debug info.  */
+  location_t location;
+
+  /* Number of operands in this tuple.  */
+  unsigned num_ops;
+
+  /* [ WORD 3 ]
+     Basic block holding this statement.  */
+  struct basic_block_def *bb;
+
+  /* [ WORD 4 ]
+     Lexical block holding this statement.  */
+  tree block;
+};
+
+
+/* Base structure for tuples with operands.  */
+
+struct gimple_statement_with_ops_base GTY(())
+{
+  /* [ WORD  1-4 ]  */
+  struct gimple_statement_base gsbase;
+
+  /* [ WORD 5 ]
+     Symbols whose addresses are taken by this statement (i.e., they
+     appear inside ADDR_EXPR nodes).  */
+  bitmap GTY((skip (""))) addresses_taken;
+
+  /* [ WORD 6-7 ]
+     SSA operand vectors.  NOTE: It should be possible to
+     amalgamate these vectors with the operand vector OP.  However,
+     the SSA operand vectors are organized differently and contain
+     more information (like immediate use chaining).  */
+  struct def_optype_d GTY((skip (""))) *def_ops;
+  struct use_optype_d GTY((skip (""))) *use_ops;
+};
+
+
+/* Statements that take register operands.  */
+
+struct gimple_statement_with_ops GTY(())
+{
+  /* [ WORD 1-7 ]  */
+  struct gimple_statement_with_ops_base opbase;
+
+  /* [ WORD 8 ]
+     Operand vector.  NOTE!  This must always be the last field
+     of this structure.  In particular, this means that this
+     structure cannot be embedded inside another one.  */
+  tree GTY((length ("%h.opbase.gsbase.num_ops"))) op[1];
+};
+
+
+/* Base for statements that take both memory and register operands.  */
+
+struct gimple_statement_with_memory_ops_base GTY(())
+{
+  /* [ WORD 1-7 ]  */
+  struct gimple_statement_with_ops_base opbase;
+
+  /* [ WORD 8-9 ]  
+     Vectors for virtual operands.  */
+  struct voptype_d GTY((skip (""))) *vdef_ops;
+  struct voptype_d GTY((skip (""))) *vuse_ops;
+
+  /* [ WORD 9-10 ]
+     Symbols stored/loaded by this statement.  */
+  bitmap GTY((skip (""))) stores;
+  bitmap GTY((skip (""))) loads;
+};
+
+
+/* Statements that take both memory and register operands.  */
+
+struct gimple_statement_with_memory_ops GTY(())
+{
+  /* [ WORD 1-10 ]  */
+  struct gimple_statement_with_memory_ops_base membase;
+
+  /* [ WORD 11 ]
+     Operand vector.  NOTE!  This must always be the last field
+     of this structure.  In particular, this means that this
+     structure cannot be embedded inside another one.  */
+  tree GTY((length ("%h.membase.opbase.gsbase.num_ops"))) op[1];
+};
+
+
+/* OpenMP statements (#pragma omp).  */
+
+struct gimple_statement_omp GTY(())
+{
+  /* [ WORD 1-4 ]  */
+  struct gimple_statement_base gsbase;
+
+  /* [ WORD 5 ]  */
+  gimple_seq body;
+};
+
+
+/* GIMPLE_BIND */
+
+struct gimple_statement_bind GTY(())
+{
+  /* [ WORD 1-4 ]  */
+  struct gimple_statement_base gsbase;
+
+  /* [ WORD 5 ]
+     Variables declared in this scope.  */
+  tree vars;
+
+  /* [ WORD 6 ]
+     This is different than the BLOCK field in gimple_statement_base,
+     which is analogous to TREE_BLOCK (i.e., the lexical block holding
+     this statement).  This field is the equivalent of BIND_EXPR_BLOCK
+     in tree land (i.e., the lexical scope defined by this bind).  See
+     gimple-low.c.  */
+  tree block;
+
+  /* [ WORD 7 ]  */
+  gimple_seq body;
+};
+
+
+/* GIMPLE_CATCH */
+
+struct gimple_statement_catch GTY(())
+{
+  /* [ WORD 1-4 ]  */
+  struct gimple_statement_base gsbase;
+
+  /* [ WORD 5 ]  */
+  tree types;
+
+  /* [ WORD 6 ]  */
+  gimple_seq handler;
+};
+
+
+/* GIMPLE_EH_FILTER */
+
+struct gimple_statement_eh_filter GTY(())
+{
+  /* [ WORD 1-4 ]  */
+  struct gimple_statement_base gsbase;
+
+  /* Subcode: EH_FILTER_MUST_NOT_THROW.  A boolean flag analogous to
+     the tree counterpart.  */
+
+  /* [ WORD 5 ]
+     Filter types.  */
+  tree types;
+
+  /* [ WORD 6 ]
+     Failure actions.  */
+  gimple_seq failure;
+};
+
+
+/* GIMPLE_PHI */
+
+struct gimple_statement_phi GTY(())
+{
+  /* [ WORD 1-4 ]  */
+  struct gimple_statement_base gsbase;
+
+  /* [ WORD 5 ]  */
+  unsigned capacity;
+  unsigned nargs;
+
+  /* [ WORD 6 ]  */
+  tree result;
+
+  /* [ WORD 7 ]  */
+  struct phi_arg_d GTY ((length ("%h.nargs"))) args[1];
+};
+
+
+/* GIMPLE_RESX */
+
+struct gimple_statement_resx GTY(())
+{
+  /* [ WORD 1-4 ]  */
+  struct gimple_statement_base gsbase;
+
+  /* [ WORD 5 ]
+     Exception region number.  */
+  int region;
+};
+
+
+/* GIMPLE_TRY */
+
+struct gimple_statement_try GTY(())
+{
+  /* [ WORD 1-4 ]  */
+  struct gimple_statement_base gsbase;
+
+  /* [ WORD 5 ]
+     Expression to evaluate.  */
+  gimple_seq eval;
+
+  /* [ WORD 6 ]
+     Cleanup expression.  */
+  gimple_seq cleanup;
+};
+
+/* Kind of GIMPLE_TRY statements.  */
+enum gimple_try_flags
+{
+  /* A try/catch.  */
+  GIMPLE_TRY_CATCH = 1 << 0,
+
+  /* A try/finally.  */
+  GIMPLE_TRY_FINALLY = 1 << 1,
+  GIMPLE_TRY_KIND = GIMPLE_TRY_CATCH | GIMPLE_TRY_FINALLY,
+
+  /* Analogous to TRY_CATCH_IS_CLEANUP.  */
+  GIMPLE_TRY_CATCH_IS_CLEANUP = 1 << 2
+};
+
+/* GIMPLE_WITH_CLEANUP_EXPR */
+
+struct gimple_statement_wce GTY(())
+{
+  /* [ WORD 1-4 ]  */
+  struct gimple_statement_base gsbase;
+
+  /* Subcode: CLEANUP_EH_ONLY.  True if the cleanup should only be
+             executed if an exception is thrown, not on normal exit of its
+             scope.  This flag is analogous to the CLEANUP_EH_ONLY flag
+             in TARGET_EXPRs.  */
+
+  /* [ WORD 5 ]
+     Cleanup expression.  */
+  gimple_seq cleanup;
+};
+
+
+/* GIMPLE_ASM  */
+
+struct gimple_statement_asm GTY(())
+{
+  /* [ WORD 1-10 ]  */
+  struct gimple_statement_with_memory_ops_base membase;
+
+  /* [ WORD 11 ]
+     __asm__ statement.  */
+  const char *string;
+
+  /* [ WORD 12 ]
+       Number of inputs, outputs and clobbers.  */
+  unsigned char ni;
+  unsigned char no;
+  unsigned short nc;
+
+  /* [ WORD 13 ]
+     Operand vector.  NOTE!  This must always be the last field
+     of this structure.  In particular, this means that this
+     structure cannot be embedded inside another one.  */
+  tree GTY((length ("%h.membase.opbase.gsbase.num_ops"))) op[1];
+};
+
+/* GIMPLE_OMP_CRITICAL */
+
+struct gimple_statement_omp_critical GTY(())
+{
+  /* [ WORD 1-5 ]  */
+  struct gimple_statement_omp omp;
+
+  /* [ WORD 6 ]
+     Critical section name.  */
+  tree name;
+};
+
+
+struct gimple_omp_for_iter GTY(())
+{
+  /* Condition code.  */
+  enum tree_code cond;
+
+  /* Index variable.  */
+  tree index;
+    
+  /* Initial value.  */
+  tree initial;
+
+  /* Final value.  */
+  tree final;
+                                 
+  /* Increment.  */
+  tree incr;
+};
+
+/* GIMPLE_OMP_FOR */
+
+struct gimple_statement_omp_for GTY(())
+{
+  /* [ WORD 1-5 ]  */
+  struct gimple_statement_omp omp;
+
+  /* [ WORD 6 ]  */
+  tree clauses;
+
+  /* [ WORD 7 ]
+     Number of elements in iter array.  */
+  size_t collapse;
+
+  /* [ WORD 8 ]  */
+  struct gimple_omp_for_iter * GTY((length ("%h.collapse"))) iter;
+
+  /* [ WORD 9 ]
+     Pre-body evaluated before the loop body begins.  */
+  gimple_seq pre_body;
+};
+
+
+/* GIMPLE_OMP_PARALLEL */
+
+struct gimple_statement_omp_parallel GTY(())
+{
+  /* [ WORD 1-5 ]  */
+  struct gimple_statement_omp omp;
+
+  /* [ WORD 6 ]
+     Clauses.  */
+  tree clauses;
+
+  /* [ WORD 7 ]
+     Child function holding the body of the parallel region.  */
+  tree child_fn;
+
+  /* [ WORD 8 ]
+     Shared data argument.  */
+  tree data_arg;
+};
+
+
+/* GIMPLE_OMP_TASK */
+
+struct gimple_statement_omp_task GTY(())
+{
+  /* [ WORD 1-8 ]  */
+  struct gimple_statement_omp_parallel par;
+
+  /* [ WORD 9 ]
+     Child function holding firstprivate initialization if needed.  */
+  tree copy_fn;
+
+  /* [ WORD 10-11 ]
+     Size and alignment in bytes of the argument data block.  */
+  tree arg_size;
+  tree arg_align;
+};
+
+
+/* GIMPLE_OMP_SECTION */
+/* Uses struct gimple_statement_omp.  */
+
+
+/* GIMPLE_OMP_SECTIONS */
+
+struct gimple_statement_omp_sections GTY(())
+{
+  /* [ WORD 1-5 ]  */
+  struct gimple_statement_omp omp;
+
+  /* [ WORD 6 ]  */
+  tree clauses;
+
+  /* [ WORD 7 ]
+     The control variable used for deciding which of the sections to
+     execute.  */
+  tree control;
+};
+
+/* GIMPLE_OMP_CONTINUE.
+
+   Note: This does not inherit from gimple_statement_omp, because we
+         do not need the body field.  */
+
+struct gimple_statement_omp_continue GTY(())
+{
+  /* [ WORD 1-4 ]  */
+  struct gimple_statement_base gsbase;
+
+  /* [ WORD 5 ]  */
+  tree control_def;
+
+  /* [ WORD 6 ]  */
+  tree control_use;
+};
+
+/* GIMPLE_OMP_SINGLE */
+
+struct gimple_statement_omp_single GTY(())
+{
+  /* [ WORD 1-5 ]  */
+  struct gimple_statement_omp omp;
+
+  /* [ WORD 6 ]  */
+  tree clauses;
+};
+
+
+/* GIMPLE_OMP_ATOMIC_LOAD.  
+   Note: This is based on gimple_statement_base, not g_s_omp, because g_s_omp
+   contains a sequence, which we don't need here.  */
+
+struct gimple_statement_omp_atomic_load GTY(())
+{
+  /* [ WORD 1-4 ]  */
+  struct gimple_statement_base gsbase;
+
+  /* [ WORD 5-6 ]  */
+  tree rhs, lhs;
+};
+
+/* GIMPLE_OMP_ATOMIC_STORE.
+   See note on GIMPLE_OMP_ATOMIC_LOAD.  */
+
+struct gimple_statement_omp_atomic_store GTY(())
+{
+  /* [ WORD 1-4 ]  */
+  struct gimple_statement_base gsbase;
+
+  /* [ WORD 5 ]  */
+  tree val;
+};
+
+enum gimple_statement_structure_enum {
+#define DEFGSSTRUCT(SYM, STRING)       SYM,
+#include "gsstruct.def"
+#undef DEFGSSTRUCT
+    LAST_GSS_ENUM
+};
+
+
+/* Define the overall contents of a gimple tuple.  It may be any of the
+   structures declared above for various types of tuples.  */
+
+union gimple_statement_d GTY ((desc ("gimple_statement_structure (&%h)")))
+{
+  struct gimple_statement_base GTY ((tag ("GSS_BASE"))) gsbase;
+  struct gimple_statement_with_ops GTY ((tag ("GSS_WITH_OPS"))) gsops;
+  struct gimple_statement_with_memory_ops GTY ((tag ("GSS_WITH_MEM_OPS"))) gsmem;
+  struct gimple_statement_omp GTY ((tag ("GSS_OMP"))) omp;
+  struct gimple_statement_bind GTY ((tag ("GSS_BIND"))) gimple_bind;
+  struct gimple_statement_catch GTY ((tag ("GSS_CATCH"))) gimple_catch;
+  struct gimple_statement_eh_filter GTY ((tag ("GSS_EH_FILTER"))) gimple_eh_filter;
+  struct gimple_statement_phi GTY ((tag ("GSS_PHI"))) gimple_phi;
+  struct gimple_statement_resx GTY ((tag ("GSS_RESX"))) gimple_resx;
+  struct gimple_statement_try GTY ((tag ("GSS_TRY"))) gimple_try;
+  struct gimple_statement_wce GTY ((tag ("GSS_WCE"))) gimple_wce;
+  struct gimple_statement_asm GTY ((tag ("GSS_ASM"))) gimple_asm;
+  struct gimple_statement_omp_critical GTY ((tag ("GSS_OMP_CRITICAL"))) gimple_omp_critical;
+  struct gimple_statement_omp_for GTY ((tag ("GSS_OMP_FOR"))) gimple_omp_for;
+  struct gimple_statement_omp_parallel GTY ((tag ("GSS_OMP_PARALLEL"))) gimple_omp_parallel;
+  struct gimple_statement_omp_task GTY ((tag ("GSS_OMP_TASK"))) gimple_omp_task;
+  struct gimple_statement_omp_sections GTY ((tag ("GSS_OMP_SECTIONS"))) gimple_omp_sections;
+  struct gimple_statement_omp_single GTY ((tag ("GSS_OMP_SINGLE"))) gimple_omp_single;
+  struct gimple_statement_omp_continue GTY ((tag ("GSS_OMP_CONTINUE"))) gimple_omp_continue;
+  struct gimple_statement_omp_atomic_load GTY ((tag ("GSS_OMP_ATOMIC_LOAD"))) gimple_omp_atomic_load;
+  struct gimple_statement_omp_atomic_store GTY ((tag ("GSS_OMP_ATOMIC_STORE"))) gimple_omp_atomic_store;
+};
+
+/* In gimple.c.  */
+gimple gimple_build_return (tree);
+
+gimple gimple_build_assign_stat (tree, tree MEM_STAT_DECL);
+#define gimple_build_assign(l,r) gimple_build_assign_stat (l, r MEM_STAT_INFO)
+
+void extract_ops_from_tree (tree, enum tree_code *, tree *, tree *);
+
+gimple gimple_build_assign_with_ops_stat (enum tree_code, tree, tree,
+                                         tree MEM_STAT_DECL);
+#define gimple_build_assign_with_ops(c,o1,o2,o3) \
+  gimple_build_assign_with_ops_stat (c, o1, o2, o3 MEM_STAT_INFO)
+
+gimple gimple_build_call_vec (tree, VEC(tree, heap) *);
+gimple gimple_build_call (tree, unsigned, ...);
+gimple gimple_build_call_from_tree (tree);
+gimple gimplify_assign (tree, tree, gimple_seq *);
+gimple gimple_build_cond (enum tree_code, tree, tree, tree, tree);
+gimple gimple_build_label (tree label);
+gimple gimple_build_goto (tree dest);
+gimple gimple_build_nop (void);
+gimple gimple_build_bind (tree, gimple_seq, tree);
+gimple gimple_build_asm (const char *, unsigned, unsigned, unsigned, ...);
+gimple gimple_build_asm_vec (const char *, VEC(tree,gc) *, VEC(tree,gc) *,
+                             VEC(tree,gc) *);
+gimple gimple_build_catch (tree, gimple_seq);
+gimple gimple_build_eh_filter (tree, gimple_seq);
+gimple gimple_build_try (gimple_seq, gimple_seq, unsigned int);
+gimple gimple_build_wce (gimple_seq);
+gimple gimple_build_resx (int);
+gimple gimple_build_switch (unsigned, tree, tree, ...);
+gimple gimple_build_switch_vec (tree, tree, VEC(tree,heap) *);
+gimple gimple_build_omp_parallel (gimple_seq, tree, tree, tree);
+gimple gimple_build_omp_task (gimple_seq, tree, tree, tree, tree, tree, tree);
+gimple gimple_build_omp_for (gimple_seq, tree, size_t, gimple_seq);
+gimple gimple_build_omp_critical (gimple_seq, tree);
+gimple gimple_build_omp_section (gimple_seq);
+gimple gimple_build_omp_continue (tree, tree);
+gimple gimple_build_omp_master (gimple_seq);
+gimple gimple_build_omp_return (bool);
+gimple gimple_build_omp_ordered (gimple_seq);
+gimple gimple_build_omp_sections (gimple_seq, tree);
+gimple gimple_build_omp_sections_switch (void);
+gimple gimple_build_omp_single (gimple_seq, tree);
+gimple gimple_build_cdt (tree, tree);
+gimple gimple_build_omp_atomic_load (tree, tree);
+gimple gimple_build_omp_atomic_store (tree);
+gimple gimple_build_predict (enum br_predictor, enum prediction);
+enum gimple_statement_structure_enum gimple_statement_structure (gimple);
+enum gimple_statement_structure_enum gss_for_assign (enum tree_code);
+void sort_case_labels (VEC(tree,heap) *);
+void gimple_set_body (tree, gimple_seq);
+gimple_seq gimple_body (tree);
+gimple_seq gimple_seq_alloc (void);
+void gimple_seq_free (gimple_seq);
+void gimple_seq_add_seq (gimple_seq *, gimple_seq);
+gimple_seq gimple_seq_copy (gimple_seq);
+int gimple_call_flags (const_gimple);
+bool gimple_assign_copy_p (gimple);
+bool gimple_assign_ssa_name_copy_p (gimple);
+bool gimple_assign_single_p (gimple);
+bool gimple_assign_unary_nop_p (gimple);
+void gimple_set_bb (gimple, struct basic_block_def *);
+tree gimple_fold (const_gimple);
+void gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *, tree);
+void gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *, enum tree_code,
+                                    tree, tree);
+tree gimple_get_lhs (const_gimple);
+void gimple_set_lhs (gimple, tree);
+gimple gimple_copy (gimple);
+bool is_gimple_operand (const_tree);
+void gimple_set_modified (gimple, bool);
+void gimple_cond_get_ops_from_tree (tree, enum tree_code *, tree *, tree *);
+gimple gimple_build_cond_from_tree (tree, tree, tree);
+void gimple_cond_set_condition_from_tree (gimple, tree);
+bool gimple_has_side_effects (const_gimple);
+bool gimple_rhs_has_side_effects (const_gimple);
+bool gimple_could_trap_p (gimple);
+bool gimple_assign_rhs_could_trap_p (gimple);
+void gimple_regimplify_operands (gimple, gimple_stmt_iterator *);
+bool empty_body_p (gimple_seq);
+unsigned get_gimple_rhs_num_ops (enum tree_code);
+
+/* Returns true iff T is a valid GIMPLE statement.  */
+extern bool is_gimple_stmt (tree);
+
+/* Returns true iff TYPE is a valid type for a scalar register variable.  */
+extern bool is_gimple_reg_type (tree);
+/* Returns true iff T is a scalar register variable.  */
+extern bool is_gimple_reg (tree);
+/* Returns true if T is a GIMPLE temporary variable, false otherwise.  */
+extern bool is_gimple_formal_tmp_var (tree);
+/* Returns true if T is a GIMPLE temporary register variable.  */
+extern bool is_gimple_formal_tmp_reg (tree);
+/* Returns true iff T is any sort of variable.  */
+extern bool is_gimple_variable (tree);
+/* Returns true iff T is any sort of symbol.  */
+extern bool is_gimple_id (tree);
+/* Returns true iff T is a variable or an INDIRECT_REF (of a variable).  */
+extern bool is_gimple_min_lval (tree);
+/* Returns true iff T is something whose address can be taken.  */
+extern bool is_gimple_addressable (tree);
+/* Returns true iff T is any valid GIMPLE lvalue.  */
+extern bool is_gimple_lvalue (tree);
+
+/* Returns true iff T is a GIMPLE address.  */
+bool is_gimple_address (const_tree);
+/* Returns true iff T is a GIMPLE invariant address.  */
+bool is_gimple_invariant_address (const_tree);
+/* Returns true iff T is a valid GIMPLE constant.  */
+bool is_gimple_constant (const_tree);
+/* Returns true iff T is a GIMPLE restricted function invariant.  */
+extern bool is_gimple_min_invariant (const_tree);
+/* Returns true iff T is a GIMPLE rvalue.  */
+extern bool is_gimple_val (tree);
+/* Returns true iff T is a GIMPLE asm statement input.  */
+extern bool is_gimple_asm_val (tree);
+/* Returns true iff T is a valid rhs for a MODIFY_EXPR where the LHS is a
+   GIMPLE temporary, a renamed user variable, or something else,
+   respectively.  */
+extern bool is_gimple_formal_tmp_rhs (tree);
+extern bool is_gimple_reg_rhs (tree);
+extern bool is_gimple_mem_rhs (tree);
+
+/* Returns true iff T is a valid if-statement condition.  */
+extern bool is_gimple_condexpr (tree);
+
+/* Returns true iff T is a type conversion.  */
+extern bool is_gimple_cast (tree);
+/* Returns true iff T is a variable that does not need to live in memory.  */
+extern bool is_gimple_non_addressable (tree t);
+
+/* Returns true iff T is a valid call address expression.  */
+extern bool is_gimple_call_addr (tree);
+/* If T makes a function call, returns the CALL_EXPR operand.  */
+extern tree get_call_expr_in (tree t);
+
+extern void recalculate_side_effects (tree);
+
+/* In gimplify.c  */
+extern tree create_tmp_var_raw (tree, const char *);
+extern tree create_tmp_var_name (const char *);
+extern tree create_tmp_var (tree, const char *);
+extern tree get_initialized_tmp_var (tree, gimple_seq *, gimple_seq *);
+extern tree get_formal_tmp_var (tree, gimple_seq *);
+extern void declare_vars (tree, gimple, bool);
+extern void tree_annotate_all_with_location (tree *, location_t);
+extern void annotate_all_with_location (gimple_seq, location_t);
+
+/* Validation of GIMPLE expressions.  Note that these predicates only check
+   the basic form of the expression, they don't recurse to make sure that
+   underlying nodes are also of the right form.  */
+typedef bool (*gimple_predicate)(tree);
+
+
+/* FIXME we should deduce this from the predicate.  */
+typedef enum fallback_t {
+  fb_none = 0,         /* Do not generate a temporary.  */
+
+  fb_rvalue = 1,       /* Generate an rvalue to hold the result of a
+                          gimplified expression.  */
+
+  fb_lvalue = 2,       /* Generate an lvalue to hold the result of a
+                          gimplified expression.  */
+
+  fb_mayfail = 4,      /* Gimplification may fail.  Error issued
+                          afterwards.  */
+  fb_either= fb_rvalue | fb_lvalue
+} fallback_t;
+
+enum gimplify_status {
+  GS_ERROR     = -2,   /* Something Bad Seen.  */
+  GS_UNHANDLED = -1,   /* A langhook result for "I dunno".  */
+  GS_OK                = 0,    /* We did something, maybe more to do.  */
+  GS_ALL_DONE  = 1     /* The expression is fully gimplified.  */
+};
+
+struct gimplify_ctx
+{
+  struct gimplify_ctx *prev_context;
+
+  VEC(gimple,heap) *bind_expr_stack;
+  tree temps;
+  gimple_seq conditional_cleanups;
+  tree exit_label;
+  tree return_temp;
+  
+  VEC(tree,heap) *case_labels;
+  /* The formal temporary table.  Should this be persistent?  */
+  htab_t temp_htab;
+
+  int conditions;
+  bool save_stack;
+  bool into_ssa;
+  bool allow_rhs_cond_expr;
+};
+
+extern enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
+                                          bool (*) (tree), fallback_t);
+extern void gimplify_type_sizes (tree, gimple_seq *);
+extern void gimplify_one_sizepos (tree *, gimple_seq *);
+extern bool gimplify_stmt (tree *, gimple_seq *);
+extern gimple gimplify_body (tree *, tree, bool);
+extern void push_gimplify_context (struct gimplify_ctx *);
+extern void pop_gimplify_context (gimple);
+extern void gimplify_and_add (tree, gimple_seq *);
+
+/* Miscellaneous helpers.  */
+extern void gimple_add_tmp_var (tree);
+extern gimple gimple_current_bind_expr (void);
+extern VEC(gimple, heap) *gimple_bind_expr_stack (void);
+extern tree voidify_wrapper_expr (tree, tree);
+extern tree build_and_jump (tree *);
+extern tree alloc_stmt_list (void);
+extern void free_stmt_list (tree);
+extern tree force_labels_r (tree *, int *, void *);
+extern enum gimplify_status gimplify_va_arg_expr (tree *, gimple_seq *,
+                                                 gimple_seq *);
+struct gimplify_omp_ctx;
+extern void omp_firstprivatize_variable (struct gimplify_omp_ctx *, tree);
+extern tree gimple_boolify (tree);
+extern gimple_predicate rhs_predicate_for (tree);
+extern tree canonicalize_cond_expr_cond (tree);
+
+/* In omp-low.c.  */
+extern void diagnose_omp_structured_block_errors (tree);
+extern tree omp_reduction_init (tree, tree);
+
+/* In tree-nested.c.  */
+extern void lower_nested_functions (tree);
+extern void insert_field_into_struct (tree, tree);
+
+/* In gimplify.c.  */
+extern void gimplify_function_tree (tree);
+
+/* In cfgexpand.c.  */
+extern tree gimple_assign_rhs_to_tree (gimple);
+
+/* In builtins.c  */
+extern bool validate_gimple_arglist (const_gimple, ...);
+
+/* In tree-ssa-operands.c  */
+extern void gimple_add_to_addresses_taken (gimple, tree);
+
+/* Return the code for GIMPLE statement G.  */
+
+static inline enum gimple_code
+gimple_code (const_gimple g)
+{
+  return g->gsbase.code;
+}
+
+
+/* Return true if statement G has sub-statements.  This is only true for
+   High GIMPLE statements.  */
+
+static inline bool
+gimple_has_substatements (gimple g)
+{
+  switch (gimple_code (g))
+    {
+    case GIMPLE_BIND:
+    case GIMPLE_CATCH:
+    case GIMPLE_EH_FILTER:
+    case GIMPLE_TRY:
+    case GIMPLE_OMP_FOR:
+    case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_ORDERED:
+    case GIMPLE_OMP_SECTION:
+    case GIMPLE_OMP_PARALLEL:
+    case GIMPLE_OMP_TASK:
+    case GIMPLE_OMP_SECTIONS:
+    case GIMPLE_OMP_SINGLE:
+    case GIMPLE_WITH_CLEANUP_EXPR:
+      return true;
+
+    default:
+      return false;
+    }
+}
+         
+
+/* Return the basic block holding statement G.  */
+
+static inline struct basic_block_def *
+gimple_bb (const_gimple g)
+{
+  return g->gsbase.bb;
+}
+
+
+/* Return the lexical scope block holding statement G.  */
+
+static inline tree
+gimple_block (const_gimple g)
+{
+  return g->gsbase.block;
+}
+
+
+/* Set BLOCK to be the lexical scope block holding statement G.  */
+
+static inline void
+gimple_set_block (gimple g, tree block)
+{
+  g->gsbase.block = block;
+}
+
+
+/* Return location information for statement G.  */
+
+static inline location_t
+gimple_location (const_gimple g)
+{
+  return g->gsbase.location;
+}
+
+/* Return pointer to location information for statement G.  */
+
+static inline const location_t *
+gimple_location_ptr (const_gimple g)
+{
+  return &g->gsbase.location;
+}
+
+
+/* Set location information for statement G.  */
+
+static inline void
+gimple_set_location (gimple g, location_t location)
+{
+  g->gsbase.location = location;
+}
+
+
+/* Return true if G contains location information.  */
+
+static inline bool
+gimple_has_location (const_gimple g)
+{
+  return gimple_location (g) != UNKNOWN_LOCATION;
+}
+
+
+/* Return the file name of the location of STMT.  */
+
+static inline const char *
+gimple_filename (const_gimple stmt)
+{
+  return LOCATION_FILE (gimple_location (stmt));
+}
+
+
+/* Return the line number of the location of STMT.  */
+
+static inline int
+gimple_lineno (const_gimple stmt)
+{
+  return LOCATION_LINE (gimple_location (stmt));
+}
+
+
+/* Determine whether SEQ is a singleton. */
+
+static inline bool
+gimple_seq_singleton_p (gimple_seq seq)
+{
+  return ((gimple_seq_first (seq) != NULL)
+         && (gimple_seq_first (seq) == gimple_seq_last (seq)));
+}
+
+/* Return true if no warnings should be emitted for statement STMT.  */
+
+static inline bool
+gimple_no_warning_p (const_gimple stmt)
+{
+  return stmt->gsbase.no_warning;
+}
+
+/* Set the no_warning flag of STMT to NO_WARNING.  */
+
+static inline void
+gimple_set_no_warning (gimple stmt, bool no_warning)
+{
+  stmt->gsbase.no_warning = (unsigned) no_warning;
+}
+
+/* Set the visited status on statement STMT to VISITED_P.  */
+
+static inline void
+gimple_set_visited (gimple stmt, bool visited_p)
+{
+  stmt->gsbase.visited = (unsigned) visited_p;
+}
+
+
+/* Return the visited status for statement STMT.  */
+
+static inline bool
+gimple_visited_p (gimple stmt)
+{
+  return stmt->gsbase.visited;
+}
+
+
+/* Set pass local flag PLF on statement STMT to VAL_P.  */
+
+static inline void
+gimple_set_plf (gimple stmt, enum plf_mask plf, bool val_p)
+{
+  if (val_p)
+    stmt->gsbase.plf |= (unsigned int) plf;
+  else
+    stmt->gsbase.plf &= ~((unsigned int) plf);
+}
+
+
+/* Return the value of pass local flag PLF on statement STMT.  */
+
+static inline unsigned int
+gimple_plf (gimple stmt, enum plf_mask plf)
+{
+  return stmt->gsbase.plf & ((unsigned int) plf);
+}
+
+
+/* Set the uid of statement  */
+
+static inline void
+gimple_set_uid (gimple g, unsigned uid)
+{
+  g->gsbase.uid = uid;
+}
+
+
+/* Return the uid of statement  */
+
+static inline unsigned
+gimple_uid (const_gimple g)
+{
+  return g->gsbase.uid;
+}
+
+
+/* Return true if GIMPLE statement G has register or memory operands.  */
+
+static inline bool
+gimple_has_ops (const_gimple g)
+{
+  return gimple_code (g) >= GIMPLE_COND && gimple_code (g) <= GIMPLE_RETURN;
+}
+
+
+/* Return true if GIMPLE statement G has memory operands.  */
+
+static inline bool
+gimple_has_mem_ops (const_gimple g)
+{
+  return gimple_code (g) >= GIMPLE_ASSIGN && gimple_code (g) <= GIMPLE_RETURN;
+}
+
+/* Return the set of addresses taken by statement G.  */
+
+static inline bitmap
+gimple_addresses_taken (const_gimple g)
+{
+  if (gimple_has_ops (g))
+    return g->gsops.opbase.addresses_taken;
+  else
+    return NULL;
+}
+
+
+/* Return a pointer to the set of addresses taken by statement G.  */
+
+static inline bitmap *
+gimple_addresses_taken_ptr (gimple g)
+{
+  if (gimple_has_ops (g))
+    return &g->gsops.opbase.addresses_taken;
+  else
+    return NULL;
+}
+
+
+/* Set B to be the set of addresses taken by statement G.  The
+   previous set is freed.  */
+
+static inline void
+gimple_set_addresses_taken (gimple g, bitmap b)
+{
+  gcc_assert (gimple_has_ops (g));
+  BITMAP_FREE (g->gsops.opbase.addresses_taken);
+  g->gsops.opbase.addresses_taken = b;
+}
+
+
+/* Return the set of DEF operands for statement G.  */
+
+static inline struct def_optype_d *
+gimple_def_ops (const_gimple g)
+{
+  if (!gimple_has_ops (g))
+    return NULL;
+  return g->gsops.opbase.def_ops;
+}
+
+
+/* Set DEF to be the set of DEF operands for statement G.  */
+
+static inline void
+gimple_set_def_ops (gimple g, struct def_optype_d *def)
+{
+  gcc_assert (gimple_has_ops (g));
+  g->gsops.opbase.def_ops = def;
+}
+
+
+/* Return the set of USE operands for statement G.  */
+
+static inline struct use_optype_d *
+gimple_use_ops (const_gimple g)
+{
+  if (!gimple_has_ops (g))
+    return NULL;
+  return g->gsops.opbase.use_ops;
+}
+
+
+/* Set USE to be the set of USE operands for statement G.  */
+
+static inline void
+gimple_set_use_ops (gimple g, struct use_optype_d *use)
+{
+  gcc_assert (gimple_has_ops (g));
+  g->gsops.opbase.use_ops = use;
+}
+
+
+/* Return the set of VUSE operands for statement G.  */
+
+static inline struct voptype_d *
+gimple_vuse_ops (const_gimple g)
+{
+  if (!gimple_has_mem_ops (g))
+    return NULL;
+  return g->gsmem.membase.vuse_ops;
+}
+
+
+/* Set OPS to be the set of VUSE operands for statement G.  */
+
+static inline void
+gimple_set_vuse_ops (gimple g, struct voptype_d *ops)
+{
+  gcc_assert (gimple_has_mem_ops (g));
+  g->gsmem.membase.vuse_ops = ops;
+}
+
+
+/* Return the set of VDEF operands for statement G.  */
+
+static inline struct voptype_d *
+gimple_vdef_ops (const_gimple g)
+{
+  if (!gimple_has_mem_ops (g))
+    return NULL;
+  return g->gsmem.membase.vdef_ops;
+}
+
+
+/* Set OPS to be the set of VDEF operands for statement G.  */
+
+static inline void
+gimple_set_vdef_ops (gimple g, struct voptype_d *ops)
+{
+  gcc_assert (gimple_has_mem_ops (g));
+  g->gsmem.membase.vdef_ops = ops;
+}
+
+
+/* Return the set of symbols loaded by statement G.  Each element of the
+   set is the DECL_UID of the corresponding symbol.  */
+
+static inline bitmap
+gimple_loaded_syms (const_gimple g)
+{
+  if (!gimple_has_mem_ops (g))
+    return NULL;
+  return g->gsmem.membase.loads;
+}
+
+
+/* Return the set of symbols stored by statement G.  Each element of
+   the set is the DECL_UID of the corresponding symbol.  */
+
+static inline bitmap
+gimple_stored_syms (const_gimple g)
+{
+  if (!gimple_has_mem_ops (g))
+    return NULL;
+  return g->gsmem.membase.stores;
+}
+
+
+/* Return true if statement G has operands and the modified field has
+   been set.  */
+
+static inline bool
+gimple_modified_p (const_gimple g)
+{
+  return (gimple_has_ops (g)) ? (bool) g->gsbase.modified : false;
+}
+
+/* Return the type of the main expression computed by STMT.  Return
+   void_type_node if the statement computes nothing.  */
+
+static inline tree
+gimple_expr_type (const_gimple stmt)
+{
+  enum gimple_code code = gimple_code (stmt);
+
+  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
+    {
+      tree type = TREE_TYPE (gimple_get_lhs (stmt));
+      /* Integral sub-types are never the type of the expression,
+         but they still can be the type of the result as the base
+        type (in which expressions are computed) is trivially
+        convertible to one of its sub-types.  So always return
+        the base type here.  */
+      if (INTEGRAL_TYPE_P (type)
+         && TREE_TYPE (type))
+       type = TREE_TYPE (type);
+      return type;
+    }
+  else if (code == GIMPLE_COND)
+    return boolean_type_node;
+  else
+    return void_type_node;
+}
+
+
+/* Return the tree code for the expression computed by STMT.  This is
+   only valid for GIMPLE_COND, GIMPLE_CALL and GIMPLE_ASSIGN.  For
+   GIMPLE_CALL, return CALL_EXPR as the expression code for
+   consistency.  This is useful when the caller needs to deal with the
+   three kinds of computation that GIMPLE supports.  */
+
+static inline enum tree_code
+gimple_expr_code (const_gimple stmt)
+{
+  enum gimple_code code = gimple_code (stmt);
+  if (code == GIMPLE_ASSIGN || code == GIMPLE_COND)
+    return (enum tree_code) stmt->gsbase.subcode;
+  else if (code == GIMPLE_CALL)
+    return CALL_EXPR;
+  else
+    gcc_unreachable ();
+}
+
+
+/* Mark statement S as modified, and update it.  */
+
+static inline void
+update_stmt (gimple s)
+{
+  if (gimple_has_ops (s))
+    {
+      gimple_set_modified (s, true);
+      update_stmt_operands (s);
+    }
+}
+
+/* Update statement S if it has been optimized.  */
+
+static inline void
+update_stmt_if_modified (gimple s)
+{
+  if (gimple_modified_p (s))
+    update_stmt_operands (s);
+}
+
+/* Return true if statement STMT contains volatile operands.  */
+
+static inline bool
+gimple_has_volatile_ops (const_gimple stmt)
+{
+  if (gimple_has_mem_ops (stmt))
+    return stmt->gsbase.has_volatile_ops;
+  else
+    return false;
+}
+
+
+/* Set the HAS_VOLATILE_OPS flag to VOLATILEP.  */
+
+static inline void
+gimple_set_has_volatile_ops (gimple stmt, bool volatilep)
+{
+  if (gimple_has_mem_ops (stmt))
+    stmt->gsbase.has_volatile_ops = (unsigned) volatilep;
+}
+
+
+/* Return true if statement STMT may access memory.  */
+
+static inline bool
+gimple_references_memory_p (gimple stmt)
+{
+  return gimple_has_mem_ops (stmt) && stmt->gsbase.references_memory_p;
+}
+
+
+/* Set the REFERENCES_MEMORY_P flag for STMT to MEM_P.  */
+
+static inline void
+gimple_set_references_memory (gimple stmt, bool mem_p)
+{
+  if (gimple_has_mem_ops (stmt))
+    stmt->gsbase.references_memory_p = (unsigned) mem_p;
+}
+
+/* Return the subcode for OMP statement S.  */
+
+static inline unsigned
+gimple_omp_subcode (const_gimple s)
+{
+  gcc_assert (gimple_code (s) >= GIMPLE_OMP_ATOMIC_LOAD
+             && gimple_code (s) <= GIMPLE_OMP_SINGLE);
+  return s->gsbase.subcode;
+}
+
+/* Set the subcode for OMP statement S to SUBCODE.  */
+
+static inline void
+gimple_omp_set_subcode (gimple s, unsigned int subcode)
+{
+  /* We only have 16 bits for the subcode.  Assert that we are not
+     overflowing it.  */
+  gcc_assert (subcode < (1 << 16));
+  s->gsbase.subcode = subcode;
+}
+
+/* Set the nowait flag on OMP_RETURN statement S.  */
+
+static inline void
+gimple_omp_return_set_nowait (gimple s)
+{
+  GIMPLE_CHECK (s, GIMPLE_OMP_RETURN);
+  s->gsbase.subcode |= GF_OMP_RETURN_NOWAIT;
+}
+
+
+/* Return true if OMP return statement G has the GF_OMP_RETURN_NOWAIT
+   flag set.  */
+
+static inline bool
+gimple_omp_return_nowait_p (const_gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_RETURN);
+  return (gimple_omp_subcode (g) & GF_OMP_RETURN_NOWAIT) != 0;
+}
+
+
+/* Return true if OMP section statement G has the GF_OMP_SECTION_LAST
+   flag set.  */
+
+static inline bool
+gimple_omp_section_last_p (const_gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_SECTION);
+  return (gimple_omp_subcode (g) & GF_OMP_SECTION_LAST) != 0;
+}
+
+
+/* Set the GF_OMP_SECTION_LAST flag on G.  */
+
+static inline void
+gimple_omp_section_set_last (gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_SECTION);
+  g->gsbase.subcode |= GF_OMP_SECTION_LAST;
+}
+
+
+/* Return true if OMP parallel statement G has the
+   GF_OMP_PARALLEL_COMBINED flag set.  */
+
+static inline bool
+gimple_omp_parallel_combined_p (const_gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_PARALLEL);
+  return (gimple_omp_subcode (g) & GF_OMP_PARALLEL_COMBINED) != 0;
+}
+
+
+/* Set the GF_OMP_PARALLEL_COMBINED field in G depending on the boolean
+   value of COMBINED_P.  */
+
+static inline void
+gimple_omp_parallel_set_combined_p (gimple g, bool combined_p)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_PARALLEL);
+  if (combined_p)
+    g->gsbase.subcode |= GF_OMP_PARALLEL_COMBINED;
+  else
+    g->gsbase.subcode &= ~GF_OMP_PARALLEL_COMBINED;
+}
+
+
+/* Return the number of operands for statement GS.  */
+
+static inline unsigned
+gimple_num_ops (const_gimple gs)
+{
+  return gs->gsbase.num_ops;
+}
+
+
+/* Set the number of operands for statement GS.  */
+
+static inline void
+gimple_set_num_ops (gimple gs, unsigned num_ops)
+{
+  gs->gsbase.num_ops = num_ops;
+}
+
+
+/* Return the array of operands for statement GS.  */
+
+static inline tree *
+gimple_ops (gimple gs)
+{
+  /* Offset in bytes to the location of the operand vector in every
+     tuple structure.  Defined in gimple.c  */
+  extern size_t const gimple_ops_offset_[];
+
+  if (!gimple_has_ops (gs))
+    return NULL;
+
+  /* All the tuples have their operand vector at the very bottom
+     of the structure.  */
+  return ((tree *) ((char *) gs + gimple_ops_offset_[gimple_code (gs)]));
+}
+
+
+/* Return operand I for statement GS.  */
+
+static inline tree
+gimple_op (const_gimple gs, unsigned i)
+{
+  if (gimple_has_ops (gs))
+    {
+      gcc_assert (i < gimple_num_ops (gs));
+      return gimple_ops (CONST_CAST_GIMPLE (gs))[i];
+    }
+  else
+    return NULL_TREE;
+}
+
+/* Return a pointer to operand I for statement GS.  */
+
+static inline tree *
+gimple_op_ptr (const_gimple gs, unsigned i)
+{
+  if (gimple_has_ops (gs))
+    {
+      gcc_assert (i < gimple_num_ops (gs));
+      return gimple_ops (CONST_CAST_GIMPLE (gs)) + i;
+    }
+  else
+    return NULL;
+}
+
+/* Set operand I of statement GS to OP.  */
+
+static inline void
+gimple_set_op (gimple gs, unsigned i, tree op)
+{
+  gcc_assert (gimple_has_ops (gs) && i < gimple_num_ops (gs));
+
+  /* Note.  It may be tempting to assert that OP matches
+     is_gimple_operand, but that would be wrong.  Different tuples
+     accept slightly different sets of tree operands.  Each caller
+     should perform its own validation.  */
+  gimple_ops (gs)[i] = op;
+}
+
+/* Return true if GS is a GIMPLE_ASSIGN.  */
+
+static inline bool
+is_gimple_assign (const_gimple gs)
+{
+  return gimple_code (gs) == GIMPLE_ASSIGN;
+}
+
+/* Determine if expression CODE is one of the valid expressions that can
+   be used on the RHS of GIMPLE assignments.  */
+
+static inline enum gimple_rhs_class
+get_gimple_rhs_class (enum tree_code code)
+{
+  return (enum gimple_rhs_class) gimple_rhs_class_table[(int) code];
+}
+
+/* Return the LHS of assignment statement GS.  */
+
+static inline tree
+gimple_assign_lhs (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
+  return gimple_op (gs, 0);
+}
+
+
+/* Return a pointer to the LHS of assignment statement GS.  */
+
+static inline tree *
+gimple_assign_lhs_ptr (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
+  return gimple_op_ptr (gs, 0);
+}
+
+
+/* Set LHS to be the LHS operand of assignment statement GS.  */
+
+static inline void
+gimple_assign_set_lhs (gimple gs, tree lhs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
+  gcc_assert (is_gimple_operand (lhs));
+  gimple_set_op (gs, 0, lhs);
+
+  if (lhs && TREE_CODE (lhs) == SSA_NAME)
+    SSA_NAME_DEF_STMT (lhs) = gs;
+}
+
+
+/* Return the first operand on the RHS of assignment statement GS.  */
+
+static inline tree
+gimple_assign_rhs1 (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
+  return gimple_op (gs, 1);
+}
+
+
+/* Return a pointer to the first operand on the RHS of assignment
+   statement GS.  */
+
+static inline tree *
+gimple_assign_rhs1_ptr (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
+  return gimple_op_ptr (gs, 1);
+}
+
+/* Set RHS to be the first operand on the RHS of assignment statement GS.  */
+
+static inline void
+gimple_assign_set_rhs1 (gimple gs, tree rhs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
+
+  /* If there are 3 or more operands, the 2 operands on the RHS must be
+     GIMPLE values.  */
+  if (gimple_num_ops (gs) >= 3)
+    gcc_assert (is_gimple_val (rhs));
+  else
+    gcc_assert (is_gimple_operand (rhs));
+
+  gimple_set_op (gs, 1, rhs);
+}
+
+
+/* Return the second operand on the RHS of assignment statement GS.
+   If GS does not have two operands, NULL is returned instead.  */
+
+static inline tree
+gimple_assign_rhs2 (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
+
+  if (gimple_num_ops (gs) >= 3)
+    return gimple_op (gs, 2);
+  else
+    return NULL_TREE;
+}
+
+
+/* Return a pointer to the second operand on the RHS of assignment
+   statement GS.  */
+
+static inline tree *
+gimple_assign_rhs2_ptr (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
+  return gimple_op_ptr (gs, 2);
+}
+
+
+/* Set RHS to be the second operand on the RHS of assignment statement GS.  */
+
+static inline void
+gimple_assign_set_rhs2 (gimple gs, tree rhs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
+
+  /* The 2 operands on the RHS must be GIMPLE values.  */
+  gcc_assert (is_gimple_val (rhs));
+
+  gimple_set_op (gs, 2, rhs);
+}
+
+/* Returns true if GS is a nontemporal move.  */
+
+static inline bool
+gimple_assign_nontemporal_move_p (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
+  return gs->gsbase.nontemporal_move;
+}
+
+/* Sets nontemporal move flag of GS to NONTEMPORAL.  */
+
+static inline void
+gimple_assign_set_nontemporal_move (gimple gs, bool nontemporal)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
+  gs->gsbase.nontemporal_move = nontemporal;
+}
+
+
+/* Return the code of the expression computed on the rhs of assignment
+   statement GS.  In case that the RHS is a single object, returns the
+   tree code of the object.  */
+
+static inline enum tree_code
+gimple_assign_rhs_code (const_gimple gs)
+{
+  enum tree_code code;
+  GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
+
+  code = gimple_expr_code (gs);
+  if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
+    code = TREE_CODE (gimple_assign_rhs1 (gs));
+
+  return code;
+}
+
+
+/* Set CODE to be the code for the expression computed on the RHS of
+   assignment S.  */
+
+static inline void
+gimple_assign_set_rhs_code (gimple s, enum tree_code code)
+{
+  GIMPLE_CHECK (s, GIMPLE_ASSIGN);
+  s->gsbase.subcode = code;
+}
+
+
+/* Return true if S is a type-cast assignment.  */
+
+static inline bool
+gimple_assign_cast_p (gimple s)
+{
+  if (is_gimple_assign (s))
+    {
+      enum tree_code sc = gimple_assign_rhs_code (s);
+      return sc == NOP_EXPR
+            || sc == CONVERT_EXPR
+            || sc == VIEW_CONVERT_EXPR
+            || sc == FIX_TRUNC_EXPR;
+    }
+
+  return false;
+}
+
+
+/* Return true if GS is a GIMPLE_CALL.  */
+
+static inline bool
+is_gimple_call (const_gimple gs)
+{
+  return gimple_code (gs) == GIMPLE_CALL;
+}
+
+/* Return the LHS of call statement GS.  */
+
+static inline tree
+gimple_call_lhs (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CALL);
+  return gimple_op (gs, 0);
+}
+
+
+/* Return a pointer to the LHS of call statement GS.  */
+
+static inline tree *
+gimple_call_lhs_ptr (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CALL);
+  return gimple_op_ptr (gs, 0);
+}
+
+
+/* Set LHS to be the LHS operand of call statement GS.  */
+
+static inline void
+gimple_call_set_lhs (gimple gs, tree lhs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CALL);
+  gcc_assert (!lhs || is_gimple_operand (lhs));
+  gimple_set_op (gs, 0, lhs);
+  if (lhs && TREE_CODE (lhs) == SSA_NAME)
+    SSA_NAME_DEF_STMT (lhs) = gs;
+}
+
+
+/* Return the tree node representing the function called by call
+   statement GS.  This may or may not be a FUNCTION_DECL node.  */
+
+static inline tree
+gimple_call_fn (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CALL);
+  return gimple_op (gs, 1);
+}
+
+
+/* Return a pointer to the tree node representing the function called by call
+   statement GS.  */
+
+static inline tree *
+gimple_call_fn_ptr (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CALL);
+  return gimple_op_ptr (gs, 1);
+}
+
+
+/* Set FN to be the function called by call statement GS.  */
+
+static inline void
+gimple_call_set_fn (gimple gs, tree fn)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CALL);
+  gcc_assert (is_gimple_operand (fn));
+  gimple_set_op (gs, 1, fn);
+}
+
+
+/* If a given GIMPLE_CALL's callee is a FUNCTION_DECL, return it.
+   Otherwise return NULL.  This function is analogous to
+   get_callee_fndecl in tree land.  */
+
+static inline tree
+gimple_call_fndecl (const_gimple gs)
+{
+  tree decl = gimple_call_fn (gs);
+  return (TREE_CODE (decl) == FUNCTION_DECL) ? decl : NULL_TREE;
+}
+
+
+/* Return the type returned by call statement GS.  */
+
+static inline tree
+gimple_call_return_type (const_gimple gs)
+{
+  tree fn = gimple_call_fn (gs);
+  tree type = TREE_TYPE (fn);
+
+  /* See through pointers.  */
+  if (POINTER_TYPE_P (type))
+    type = TREE_TYPE (type);
+
+  gcc_assert (TREE_CODE (type) == FUNCTION_TYPE
+             || TREE_CODE (type) == METHOD_TYPE);
+
+  /* The type returned by a FUNCTION_DECL is the type of its
+     function type.  */
+  return TREE_TYPE (type);
+}
+
+
+/* Return the static chain for call statement GS.  */
+
+static inline tree
+gimple_call_chain (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CALL);
+  return gimple_op (gs, 2);
+}
+
+
+/* Return a pointer to the static chain for call statement GS.  */
+
+static inline tree *
+gimple_call_chain_ptr (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CALL);
+  return gimple_op_ptr (gs, 2);
+}
+
+/* Set CHAIN to be the static chain for call statement GS.  */
+
+static inline void
+gimple_call_set_chain (gimple gs, tree chain)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CALL);
+  gcc_assert (chain == NULL
+              || TREE_CODE (chain) == ADDR_EXPR
+              || DECL_P (chain));
+  gimple_set_op (gs, 2, chain);
+}
+
+
+/* Return the number of arguments used by call statement GS.  */
+
+static inline unsigned
+gimple_call_num_args (const_gimple gs)
+{
+  unsigned num_ops;
+  GIMPLE_CHECK (gs, GIMPLE_CALL);
+  num_ops = gimple_num_ops (gs);
+  gcc_assert (num_ops >= 3);
+  return num_ops - 3;
+}
+
+
+/* Return the argument at position INDEX for call statement GS.  */
+
+static inline tree
+gimple_call_arg (const_gimple gs, unsigned index)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CALL);
+  return gimple_op (gs, index + 3);
+}
+
+
+/* Return a pointer to the argument at position INDEX for call
+   statement GS.  */
+
+static inline tree *
+gimple_call_arg_ptr (const_gimple gs, unsigned index)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CALL);
+  return gimple_op_ptr (gs, index + 3);
+}
+
+
+/* Set ARG to be the argument at position INDEX for call statement GS.  */
+
+static inline void
+gimple_call_set_arg (gimple gs, unsigned index, tree arg)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CALL);
+  gcc_assert (is_gimple_operand (arg));
+  gimple_set_op (gs, index + 3, arg);
+}
+
+
+/* If TAIL_P is true, mark call statement S as being a tail call
+   (i.e., a call just before the exit of a function).  These calls are
+   candidate for tail call optimization.  */
+
+static inline void
+gimple_call_set_tail (gimple s, bool tail_p)
+{
+  GIMPLE_CHECK (s, GIMPLE_CALL);
+  if (tail_p)
+    s->gsbase.subcode |= GF_CALL_TAILCALL;
+  else
+    s->gsbase.subcode &= ~GF_CALL_TAILCALL;
+}
+
+
+/* Return true if GIMPLE_CALL S is marked as a tail call.  */
+
+static inline bool
+gimple_call_tail_p (gimple s)
+{
+  GIMPLE_CHECK (s, GIMPLE_CALL);
+  return (s->gsbase.subcode & GF_CALL_TAILCALL) != 0;
+}
+
+
+/* Set the inlinable status of GIMPLE_CALL S to INLINABLE_P.  */
+
+static inline void
+gimple_call_set_cannot_inline (gimple s, bool inlinable_p)
+{
+  GIMPLE_CHECK (s, GIMPLE_CALL);
+  if (inlinable_p)
+    s->gsbase.subcode |= GF_CALL_CANNOT_INLINE;
+  else
+    s->gsbase.subcode &= ~GF_CALL_CANNOT_INLINE;
+}
+
+
+/* Return true if GIMPLE_CALL S cannot be inlined.  */
+
+static inline bool
+gimple_call_cannot_inline_p (gimple s)
+{
+  GIMPLE_CHECK (s, GIMPLE_CALL);
+  return (s->gsbase.subcode & GF_CALL_CANNOT_INLINE) != 0;
+}
+
+
+/* If RETURN_SLOT_OPT_P is true mark GIMPLE_CALL S as valid for return
+   slot optimization.  This transformation uses the target of the call
+   expansion as the return slot for calls that return in memory.  */
+
+static inline void
+gimple_call_set_return_slot_opt (gimple s, bool return_slot_opt_p)
+{
+  GIMPLE_CHECK (s, GIMPLE_CALL);
+  if (return_slot_opt_p)
+    s->gsbase.subcode |= GF_CALL_RETURN_SLOT_OPT;
+  else
+    s->gsbase.subcode &= ~GF_CALL_RETURN_SLOT_OPT;
+}
+
+
+/* Return true if S is marked for return slot optimization.  */
+
+static inline bool
+gimple_call_return_slot_opt_p (gimple s)
+{
+  GIMPLE_CHECK (s, GIMPLE_CALL);
+  return (s->gsbase.subcode & GF_CALL_RETURN_SLOT_OPT) != 0;
+}
+
+
+/* If FROM_THUNK_P is true, mark GIMPLE_CALL S as being the jump from a
+   thunk to the thunked-to function.  */
+
+static inline void
+gimple_call_set_from_thunk (gimple s, bool from_thunk_p)
+{
+  GIMPLE_CHECK (s, GIMPLE_CALL);
+  if (from_thunk_p)
+    s->gsbase.subcode |= GF_CALL_FROM_THUNK;
+  else
+    s->gsbase.subcode &= ~GF_CALL_FROM_THUNK;
+}
+
+
+/* Return true if GIMPLE_CALL S is a jump from a thunk.  */
+
+static inline bool
+gimple_call_from_thunk_p (gimple s)
+{
+  GIMPLE_CHECK (s, GIMPLE_CALL);
+  return (s->gsbase.subcode & GF_CALL_FROM_THUNK) != 0;
+}
+
+
+/* If PASS_ARG_PACK_P is true, GIMPLE_CALL S is a stdarg call that needs the
+   argument pack in its argument list.  */
+
+static inline void
+gimple_call_set_va_arg_pack (gimple s, bool pass_arg_pack_p)
+{
+  GIMPLE_CHECK (s, GIMPLE_CALL);
+  if (pass_arg_pack_p)
+    s->gsbase.subcode |= GF_CALL_VA_ARG_PACK;
+  else
+    s->gsbase.subcode &= ~GF_CALL_VA_ARG_PACK;
+}
+
+
+/* Return true if GIMPLE_CALL S is a stdarg call that needs the
+   argument pack in its argument list.  */
+
+static inline bool
+gimple_call_va_arg_pack_p (gimple s)
+{
+  GIMPLE_CHECK (s, GIMPLE_CALL);
+  return (s->gsbase.subcode & GF_CALL_VA_ARG_PACK) != 0;
+}
+
+
+/* Return true if S is a noreturn call.  */
+
+static inline bool
+gimple_call_noreturn_p (gimple s)
+{
+  GIMPLE_CHECK (s, GIMPLE_CALL);
+  return (gimple_call_flags (s) & ECF_NORETURN) != 0;
+}
+
+
+/* Return true if S is a nothrow call.  */
+
+static inline bool
+gimple_call_nothrow_p (gimple s)
+{
+  GIMPLE_CHECK (s, GIMPLE_CALL);
+  return (gimple_call_flags (s) & ECF_NOTHROW) != 0;
+}
+
+
+/* Copy all the GF_CALL_* flags from ORIG_CALL to DEST_CALL.  */
+
+static inline void
+gimple_call_copy_flags (gimple dest_call, gimple orig_call)
+{
+  GIMPLE_CHECK (dest_call, GIMPLE_CALL);
+  GIMPLE_CHECK (orig_call, GIMPLE_CALL);
+  dest_call->gsbase.subcode = orig_call->gsbase.subcode;
+}
+
+
+/* Returns true if this is a GIMPLE_ASSIGN or a GIMPLE_CALL with a
+   non-NULL lhs.  */
+
+static inline bool
+gimple_has_lhs (gimple stmt)
+{
+  return (is_gimple_assign (stmt)
+         || (is_gimple_call (stmt)
+             && gimple_call_lhs (stmt) != NULL_TREE));
+}
+
+
+/* Return the code of the predicate computed by conditional statement GS.  */
+
+static inline enum tree_code
+gimple_cond_code (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_COND);
+  return gs->gsbase.subcode;
+}
+
+
+/* Set CODE to be the predicate code for the conditional statement GS.  */
+
+static inline void
+gimple_cond_set_code (gimple gs, enum tree_code code)
+{
+  GIMPLE_CHECK (gs, GIMPLE_COND);
+  gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
+  gs->gsbase.subcode = code;
+}
+
+
+/* Return the LHS of the predicate computed by conditional statement GS.  */
+
+static inline tree
+gimple_cond_lhs (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_COND);
+  return gimple_op (gs, 0);
+}
+
+/* Return the pointer to the LHS of the predicate computed by conditional
+   statement GS.  */
+
+static inline tree *
+gimple_cond_lhs_ptr (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_COND);
+  return gimple_op_ptr (gs, 0);
+}
+
+/* Set LHS to be the LHS operand of the predicate computed by
+   conditional statement GS.  */
+
+static inline void
+gimple_cond_set_lhs (gimple gs, tree lhs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_COND);
+  gcc_assert (is_gimple_operand (lhs));
+  gimple_set_op (gs, 0, lhs);
+}
+
+
+/* Return the RHS operand of the predicate computed by conditional GS.  */
+
+static inline tree
+gimple_cond_rhs (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_COND);
+  return gimple_op (gs, 1);
+}
+
+/* Return the pointer to the RHS operand of the predicate computed by
+   conditional GS.  */
+
+static inline tree *
+gimple_cond_rhs_ptr (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_COND);
+  return gimple_op_ptr (gs, 1);
+}
+
+
+/* Set RHS to be the RHS operand of the predicate computed by
+   conditional statement GS.  */
+
+static inline void
+gimple_cond_set_rhs (gimple gs, tree rhs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_COND);
+  gcc_assert (is_gimple_operand (rhs));
+  gimple_set_op (gs, 1, rhs);
+}
+
+
+/* Return the label used by conditional statement GS when its
+   predicate evaluates to true.  */
+
+static inline tree
+gimple_cond_true_label (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_COND);
+  return gimple_op (gs, 2);
+}
+
+
+/* Set LABEL to be the label used by conditional statement GS when its
+   predicate evaluates to true.  */
+
+static inline void
+gimple_cond_set_true_label (gimple gs, tree label)
+{
+  GIMPLE_CHECK (gs, GIMPLE_COND);
+  gcc_assert (!label || TREE_CODE (label) == LABEL_DECL);
+  gimple_set_op (gs, 2, label);
+}
+
+
+/* Set LABEL to be the label used by conditional statement GS when its
+   predicate evaluates to false.  */
+
+static inline void
+gimple_cond_set_false_label (gimple gs, tree label)
+{
+  GIMPLE_CHECK (gs, GIMPLE_COND);
+  gcc_assert (!label || TREE_CODE (label) == LABEL_DECL);
+  gimple_set_op (gs, 3, label);
+}
+
+
+/* Return the label used by conditional statement GS when its
+   predicate evaluates to false.  */
+
+static inline tree
+gimple_cond_false_label (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_COND);
+  return gimple_op (gs, 3);
+}
+
+
+/* Set the conditional COND_STMT to be of the form 'if (1 == 0)'.  */
+
+static inline void
+gimple_cond_make_false (gimple gs)
+{
+  gimple_cond_set_lhs (gs, boolean_true_node);
+  gimple_cond_set_rhs (gs, boolean_false_node);
+  gs->gsbase.subcode = EQ_EXPR;
+}
+
+
+/* Set the conditional COND_STMT to be of the form 'if (1 == 1)'.  */
+
+static inline void
+gimple_cond_make_true (gimple gs)
+{
+  gimple_cond_set_lhs (gs, boolean_true_node);
+  gimple_cond_set_rhs (gs, boolean_true_node);
+  gs->gsbase.subcode = EQ_EXPR;
+}
+
+/* Check if conditional statemente GS is of the form 'if (1 == 1)',
+  'if (0 == 0)', 'if (1 != 0)' or 'if (0 != 1)' */
+
+static inline bool
+gimple_cond_true_p (const_gimple gs)
+{
+  tree lhs = gimple_cond_lhs (gs);
+  tree rhs = gimple_cond_rhs (gs);
+  enum tree_code code = gimple_cond_code (gs);
+
+  if (lhs != boolean_true_node && lhs != boolean_false_node)
+    return false;
+
+  if (rhs != boolean_true_node && rhs != boolean_false_node)
+    return false;
+
+  if (code == NE_EXPR && lhs != rhs)
+    return true;
+
+  if (code == EQ_EXPR && lhs == rhs)
+      return true;
+
+  return false;
+}
+
+/* Check if conditional statement GS is of the form 'if (1 != 1)',
+   'if (0 != 0)', 'if (1 == 0)' or 'if (0 == 1)' */
+
+static inline bool
+gimple_cond_false_p (const_gimple gs)
+{
+  tree lhs = gimple_cond_lhs (gs);
+  tree rhs = gimple_cond_rhs (gs);
+  enum tree_code code = gimple_cond_code (gs);
+
+  if (lhs != boolean_true_node && lhs != boolean_false_node)
+    return false;
+
+  if (rhs != boolean_true_node && rhs != boolean_false_node)
+    return false;
+
+  if (code == NE_EXPR && lhs == rhs)
+    return true;
+
+  if (code == EQ_EXPR && lhs != rhs)
+      return true;
+
+  return false;
+}
+
+/* Check if conditional statement GS is of the form 'if (var != 0)' or
+   'if (var == 1)' */
+
+static inline bool
+gimple_cond_single_var_p (gimple gs)
+{
+  if (gimple_cond_code (gs) == NE_EXPR
+      && gimple_cond_rhs (gs) == boolean_false_node)
+    return true;
+
+  if (gimple_cond_code (gs) == EQ_EXPR
+      && gimple_cond_rhs (gs) == boolean_true_node)
+    return true;
+
+  return false;
+}
+
+/* Set the code, LHS and RHS of GIMPLE_COND STMT from CODE, LHS and RHS.  */
+
+static inline void
+gimple_cond_set_condition (gimple stmt, enum tree_code code, tree lhs, tree rhs)
+{
+  gimple_cond_set_code (stmt, code);
+  gimple_cond_set_lhs (stmt, lhs);
+  gimple_cond_set_rhs (stmt, rhs);
+}
+
+/* Return the LABEL_DECL node used by GIMPLE_LABEL statement GS.  */
+
+static inline tree
+gimple_label_label (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_LABEL);
+  return gimple_op (gs, 0);
+}
+
+
+/* Set LABEL to be the LABEL_DECL node used by GIMPLE_LABEL statement
+   GS.  */
+
+static inline void
+gimple_label_set_label (gimple gs, tree label)
+{
+  GIMPLE_CHECK (gs, GIMPLE_LABEL);
+  gcc_assert (TREE_CODE (label) == LABEL_DECL);
+  gimple_set_op (gs, 0, label);
+}
+
+
+/* Return the destination of the unconditional jump GS.  */
+
+static inline tree
+gimple_goto_dest (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_GOTO);
+  return gimple_op (gs, 0);
+}
+
+
+/* Set DEST to be the destination of the unconditonal jump GS.  */
+
+static inline void 
+gimple_goto_set_dest (gimple gs, tree dest)
+{
+  GIMPLE_CHECK (gs, GIMPLE_GOTO);
+  gcc_assert (is_gimple_operand (dest));
+  gimple_set_op (gs, 0, dest);
+}
+
+
+/* Return the variables declared in the GIMPLE_BIND statement GS.  */
+
+static inline tree
+gimple_bind_vars (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_BIND);
+  return gs->gimple_bind.vars;
+}
+
+
+/* Set VARS to be the set of variables declared in the GIMPLE_BIND
+   statement GS.  */
+
+static inline void
+gimple_bind_set_vars (gimple gs, tree vars)
+{
+  GIMPLE_CHECK (gs, GIMPLE_BIND);
+  gs->gimple_bind.vars = vars;
+}
+
+
+/* Append VARS to the set of variables declared in the GIMPLE_BIND
+   statement GS.  */
+
+static inline void
+gimple_bind_append_vars (gimple gs, tree vars)
+{
+  GIMPLE_CHECK (gs, GIMPLE_BIND);
+  gs->gimple_bind.vars = chainon (gs->gimple_bind.vars, vars);
+}
+
+
+/* Return the GIMPLE sequence contained in the GIMPLE_BIND statement GS.  */
+
+static inline gimple_seq
+gimple_bind_body (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_BIND);
+  return gs->gimple_bind.body;
+}
+
+
+/* Set SEQ to be the GIMPLE sequence contained in the GIMPLE_BIND
+   statement GS.  */
+
+static inline void
+gimple_bind_set_body (gimple gs, gimple_seq seq)
+{
+  GIMPLE_CHECK (gs, GIMPLE_BIND);
+  gs->gimple_bind.body = seq;
+}
+
+
+/* Append a statement to the end of a GIMPLE_BIND's body.  */
+
+static inline void
+gimple_bind_add_stmt (gimple gs, gimple stmt)
+{
+  GIMPLE_CHECK (gs, GIMPLE_BIND);
+  gimple_seq_add_stmt (&gs->gimple_bind.body, stmt);
+}
+
+
+/* Append a sequence of statements to the end of a GIMPLE_BIND's body.  */
+
+static inline void
+gimple_bind_add_seq (gimple gs, gimple_seq seq)
+{
+  GIMPLE_CHECK (gs, GIMPLE_BIND);
+  gimple_seq_add_seq (&gs->gimple_bind.body, seq);
+}
+
+
+/* Return the TREE_BLOCK node associated with GIMPLE_BIND statement
+   GS.  This is analogous to the BIND_EXPR_BLOCK field in trees.  */
+
+static inline tree
+gimple_bind_block (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_BIND);
+  return gs->gimple_bind.block;
+}
+
+
+/* Set BLOCK to be the TREE_BLOCK node associated with GIMPLE_BIND
+   statement GS.  */
+
+static inline void
+gimple_bind_set_block (gimple gs, tree block)
+{
+  GIMPLE_CHECK (gs, GIMPLE_BIND);
+  gcc_assert (TREE_CODE (block) == BLOCK);
+  gs->gimple_bind.block = block;
+}
+
+
+/* Return the number of input operands for GIMPLE_ASM GS.  */
+
+static inline unsigned
+gimple_asm_ninputs (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  return gs->gimple_asm.ni;
+}
+
+
+/* Return the number of output operands for GIMPLE_ASM GS.  */
+
+static inline unsigned
+gimple_asm_noutputs (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  return gs->gimple_asm.no;
+}
+
+
+/* Return the number of clobber operands for GIMPLE_ASM GS.  */
+
+static inline unsigned
+gimple_asm_nclobbers (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  return gs->gimple_asm.nc;
+}
+
+
+/* Return input operand INDEX of GIMPLE_ASM GS.  */
+
+static inline tree
+gimple_asm_input_op (const_gimple gs, unsigned index)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  gcc_assert (index <= gs->gimple_asm.ni);
+  return gimple_op (gs, index);
+}
+
+/* Return a pointer to input operand INDEX of GIMPLE_ASM GS.  */
+
+static inline tree *
+gimple_asm_input_op_ptr (const_gimple gs, unsigned index)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  gcc_assert (index <= gs->gimple_asm.ni);
+  return gimple_op_ptr (gs, index);
+}
+
+
+/* Set IN_OP to be input operand INDEX in GIMPLE_ASM GS.  */
+
+static inline void
+gimple_asm_set_input_op (gimple gs, unsigned index, tree in_op)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  gcc_assert (index <= gs->gimple_asm.ni);
+  gcc_assert (TREE_CODE (in_op) == TREE_LIST);
+  gimple_set_op (gs, index, in_op);
+}
+
+
+/* Return output operand INDEX of GIMPLE_ASM GS.  */
+
+static inline tree
+gimple_asm_output_op (const_gimple gs, unsigned index)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  gcc_assert (index <= gs->gimple_asm.no);
+  return gimple_op (gs, index + gs->gimple_asm.ni);
+}
+
+/* Return a pointer to output operand INDEX of GIMPLE_ASM GS.  */
+
+static inline tree *
+gimple_asm_output_op_ptr (const_gimple gs, unsigned index)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  gcc_assert (index <= gs->gimple_asm.no);
+  return gimple_op_ptr (gs, index + gs->gimple_asm.ni);
+}
+
+
+/* Set OUT_OP to be output operand INDEX in GIMPLE_ASM GS.  */
+
+static inline void
+gimple_asm_set_output_op (gimple gs, unsigned index, tree out_op)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  gcc_assert (index <= gs->gimple_asm.no);
+  gcc_assert (TREE_CODE (out_op) == TREE_LIST);
+  gimple_set_op (gs, index + gs->gimple_asm.ni, out_op);
+}
+
+
+/* Return clobber operand INDEX of GIMPLE_ASM GS.  */
+
+static inline tree
+gimple_asm_clobber_op (const_gimple gs, unsigned index)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  gcc_assert (index <= gs->gimple_asm.nc);
+  return gimple_op (gs, index + gs->gimple_asm.ni + gs->gimple_asm.no);
+}
+
+
+/* Set CLOBBER_OP to be clobber operand INDEX in GIMPLE_ASM GS.  */
+
+static inline void
+gimple_asm_set_clobber_op (gimple gs, unsigned index, tree clobber_op)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  gcc_assert (index <= gs->gimple_asm.nc);
+  gcc_assert (TREE_CODE (clobber_op) == TREE_LIST);
+  gimple_set_op (gs, index + gs->gimple_asm.ni + gs->gimple_asm.no, clobber_op);
+}
+
+
+/* Return the string representing the assembly instruction in
+   GIMPLE_ASM GS.  */
+
+static inline const char *
+gimple_asm_string (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  return gs->gimple_asm.string;
+}
+
+
+/* Return true if GS is an asm statement marked volatile.  */
+
+static inline bool
+gimple_asm_volatile_p (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  return (gs->gsbase.subcode & GF_ASM_VOLATILE) != 0;
+}
+
+
+/* If VOLATLE_P is true, mark asm statement GS as volatile.  */
+
+static inline void
+gimple_asm_set_volatile (gimple gs, bool volatile_p)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  if (volatile_p)
+    gs->gsbase.subcode |= GF_ASM_VOLATILE;
+  else
+    gs->gsbase.subcode &= ~GF_ASM_VOLATILE;
+}
+
+
+/* If INPUT_P is true, mark asm GS as an ASM_INPUT.  */
+
+static inline void
+gimple_asm_set_input (gimple gs, bool input_p)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  if (input_p)
+    gs->gsbase.subcode |= GF_ASM_INPUT;
+  else
+    gs->gsbase.subcode &= ~GF_ASM_INPUT;
+}
+
+
+/* Return true if asm GS is an ASM_INPUT.  */
+
+static inline bool
+gimple_asm_input_p (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_ASM);
+  return (gs->gsbase.subcode & GF_ASM_INPUT) != 0;
+}
+
+
+/* Return the types handled by GIMPLE_CATCH statement GS.  */
+
+static inline tree
+gimple_catch_types (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CATCH);
+  return gs->gimple_catch.types;
+}
+
+
+/* Return a pointer to the types handled by GIMPLE_CATCH statement GS.  */
+
+static inline tree *
+gimple_catch_types_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CATCH);
+  return &gs->gimple_catch.types;
+}
+
+
+/* Return the GIMPLE sequence representing the body of the handler of
+   GIMPLE_CATCH statement GS.  */
+
+static inline gimple_seq
+gimple_catch_handler (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CATCH);
+  return gs->gimple_catch.handler;
+}
+
+
+/* Return a pointer to the GIMPLE sequence representing the body of
+   the handler of GIMPLE_CATCH statement GS.  */
+
+static inline gimple_seq *
+gimple_catch_handler_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CATCH);
+  return &gs->gimple_catch.handler;
+}
+
+
+/* Set T to be the set of types handled by GIMPLE_CATCH GS.  */
+
+static inline void
+gimple_catch_set_types (gimple gs, tree t)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CATCH);
+  gs->gimple_catch.types = t;
+}
+
+
+/* Set HANDLER to be the body of GIMPLE_CATCH GS.  */
+
+static inline void
+gimple_catch_set_handler (gimple gs, gimple_seq handler)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CATCH);
+  gs->gimple_catch.handler = handler;
+}
+
+
+/* Return the types handled by GIMPLE_EH_FILTER statement GS.  */
+
+static inline tree
+gimple_eh_filter_types (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_EH_FILTER);
+  return gs->gimple_eh_filter.types;
+}
+
+
+/* Return a pointer to the types handled by GIMPLE_EH_FILTER statement
+   GS.  */
+
+static inline tree *
+gimple_eh_filter_types_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_EH_FILTER);
+  return &gs->gimple_eh_filter.types;
+}
+
+
+/* Return the sequence of statement to execute when GIMPLE_EH_FILTER
+   statement fails.  */
+
+static inline gimple_seq
+gimple_eh_filter_failure (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_EH_FILTER);
+  return gs->gimple_eh_filter.failure;
+}
+
+
+/* Set TYPES to be the set of types handled by GIMPLE_EH_FILTER GS.  */
+
+static inline void
+gimple_eh_filter_set_types (gimple gs, tree types)
+{
+  GIMPLE_CHECK (gs, GIMPLE_EH_FILTER);
+  gs->gimple_eh_filter.types = types;
+}
+
+
+/* Set FAILURE to be the sequence of statements to execute on failure
+   for GIMPLE_EH_FILTER GS.  */
+
+static inline void
+gimple_eh_filter_set_failure (gimple gs, gimple_seq failure)
+{
+  GIMPLE_CHECK (gs, GIMPLE_EH_FILTER);
+  gs->gimple_eh_filter.failure = failure;
+}
+
+/* Return the EH_FILTER_MUST_NOT_THROW flag.  */
+
+static inline bool
+
+gimple_eh_filter_must_not_throw (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_EH_FILTER);
+  return gs->gsbase.subcode != 0;
+}
+
+/* Set the EH_FILTER_MUST_NOT_THROW flag to the value MNTP.  */
+
+static inline void
+gimple_eh_filter_set_must_not_throw (gimple gs, bool mntp)
+{
+  GIMPLE_CHECK (gs, GIMPLE_EH_FILTER);
+  gs->gsbase.subcode = (unsigned int) mntp;
+}
+
+
+/* GIMPLE_TRY accessors. */
+
+/* Return the kind of try block represented by GIMPLE_TRY GS.  This is
+   either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY.  */
+
+static inline enum gimple_try_flags
+gimple_try_kind (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_TRY);
+  return (enum gimple_try_flags) (gs->gsbase.subcode & GIMPLE_TRY_KIND);
+}
+
+
+/* Set the kind of try block represented by GIMPLE_TRY GS.  */
+
+static inline void
+gimple_try_set_kind (gimple gs, enum gimple_try_flags kind)
+{
+  GIMPLE_CHECK (gs, GIMPLE_TRY);
+  gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
+  if (gimple_try_kind (gs) != kind)
+    gs->gsbase.subcode = (unsigned int) kind;
+}
+
+
+/* Return the GIMPLE_TRY_CATCH_IS_CLEANUP flag.  */
+
+static inline bool
+gimple_try_catch_is_cleanup (const_gimple gs)
+{
+  gcc_assert (gimple_try_kind (gs) == GIMPLE_TRY_CATCH);
+  return (gs->gsbase.subcode & GIMPLE_TRY_CATCH_IS_CLEANUP) != 0;
+}
+
+
+/* Return the sequence of statements used as the body for GIMPLE_TRY GS.  */
+
+static inline gimple_seq
+gimple_try_eval (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_TRY);
+  return gs->gimple_try.eval;
+}
+
+
+/* Return the sequence of statements used as the cleanup body for
+   GIMPLE_TRY GS.  */
+
+static inline gimple_seq
+gimple_try_cleanup (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_TRY);
+  return gs->gimple_try.cleanup;
+}
+
+
+/* Set the GIMPLE_TRY_CATCH_IS_CLEANUP flag.  */
+
+static inline void
+gimple_try_set_catch_is_cleanup (gimple g, bool catch_is_cleanup)
+{
+  gcc_assert (gimple_try_kind (g) == GIMPLE_TRY_CATCH);
+  if (catch_is_cleanup)
+    g->gsbase.subcode |= GIMPLE_TRY_CATCH_IS_CLEANUP;
+  else
+    g->gsbase.subcode &= ~GIMPLE_TRY_CATCH_IS_CLEANUP;
+}
+
+
+/* Set EVAL to be the sequence of statements to use as the body for
+   GIMPLE_TRY GS.  */
+
+static inline void
+gimple_try_set_eval (gimple gs, gimple_seq eval)
+{
+  GIMPLE_CHECK (gs, GIMPLE_TRY);
+  gs->gimple_try.eval = eval;
+}
+
+
+/* Set CLEANUP to be the sequence of statements to use as the cleanup
+   body for GIMPLE_TRY GS.  */
+
+static inline void
+gimple_try_set_cleanup (gimple gs, gimple_seq cleanup)
+{
+  GIMPLE_CHECK (gs, GIMPLE_TRY);
+  gs->gimple_try.cleanup = cleanup;
+}
+
+
+/* Return the cleanup sequence for cleanup statement GS.  */
+
+static inline gimple_seq
+gimple_wce_cleanup (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_WITH_CLEANUP_EXPR);
+  return gs->gimple_wce.cleanup;
+}
+
+
+/* Set CLEANUP to be the cleanup sequence for GS.  */
+
+static inline void
+gimple_wce_set_cleanup (gimple gs, gimple_seq cleanup)
+{
+  GIMPLE_CHECK (gs, GIMPLE_WITH_CLEANUP_EXPR);
+  gs->gimple_wce.cleanup = cleanup;
+}
+
+
+/* Return the CLEANUP_EH_ONLY flag for a WCE tuple.  */
+
+static inline bool
+gimple_wce_cleanup_eh_only (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_WITH_CLEANUP_EXPR);
+  return gs->gsbase.subcode != 0;
+}
+
+
+/* Set the CLEANUP_EH_ONLY flag for a WCE tuple.  */
+
+static inline void
+gimple_wce_set_cleanup_eh_only (gimple gs, bool eh_only_p)
+{
+  GIMPLE_CHECK (gs, GIMPLE_WITH_CLEANUP_EXPR);
+  gs->gsbase.subcode = (unsigned int) eh_only_p;
+}
+
+
+/* Return the maximum number of arguments supported by GIMPLE_PHI GS.  */
+
+static inline unsigned
+gimple_phi_capacity (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_PHI);
+  return gs->gimple_phi.capacity;
+}
+
+
+/* Return the number of arguments in GIMPLE_PHI GS.  This must always
+   be exactly the number of incoming edges for the basic block holding
+   GS.  */
+
+static inline unsigned
+gimple_phi_num_args (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_PHI);
+  return gs->gimple_phi.nargs;
+}
+
+
+/* Return the SSA name created by GIMPLE_PHI GS.  */
+
+static inline tree
+gimple_phi_result (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_PHI);
+  return gs->gimple_phi.result;
+}
+
+/* Return a pointer to the SSA name created by GIMPLE_PHI GS.  */
+
+static inline tree *
+gimple_phi_result_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_PHI);
+  return &gs->gimple_phi.result;
+}
+
+/* Set RESULT to be the SSA name created by GIMPLE_PHI GS.  */
+
+static inline void
+gimple_phi_set_result (gimple gs, tree result)
+{
+  GIMPLE_CHECK (gs, GIMPLE_PHI);
+  gs->gimple_phi.result = result;
+}
+
+
+/* Return the PHI argument corresponding to incoming edge INDEX for
+   GIMPLE_PHI GS.  */
+
+static inline struct phi_arg_d *
+gimple_phi_arg (gimple gs, unsigned index)
+{
+  GIMPLE_CHECK (gs, GIMPLE_PHI);
+  gcc_assert (index <= gs->gimple_phi.capacity);
+  return &(gs->gimple_phi.args[index]);
+}
+
+/* Set PHIARG to be the argument corresponding to incoming edge INDEX
+   for GIMPLE_PHI GS.  */
+
+static inline void
+gimple_phi_set_arg (gimple gs, unsigned index, struct phi_arg_d * phiarg)
+{
+  GIMPLE_CHECK (gs, GIMPLE_PHI);
+  gcc_assert (index <= gs->gimple_phi.nargs);
+  memcpy (gs->gimple_phi.args + index, phiarg, sizeof (struct phi_arg_d));
+}
+
+/* Return the region number for GIMPLE_RESX GS.  */
+
+static inline int
+gimple_resx_region (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_RESX);
+  return gs->gimple_resx.region;
+}
+
+/* Set REGION to be the region number for GIMPLE_RESX GS.  */
+
+static inline void
+gimple_resx_set_region (gimple gs, int region)
+{
+  GIMPLE_CHECK (gs, GIMPLE_RESX);
+  gs->gimple_resx.region = region;
+}
+
+
+/* Return the number of labels associated with the switch statement GS.  */
+
+static inline unsigned
+gimple_switch_num_labels (const_gimple gs)
+{
+  unsigned num_ops;
+  GIMPLE_CHECK (gs, GIMPLE_SWITCH);
+  num_ops = gimple_num_ops (gs);
+  gcc_assert (num_ops > 1);
+  return num_ops - 1;
+}
+
+
+/* Set NLABELS to be the number of labels for the switch statement GS.  */
+
+static inline void
+gimple_switch_set_num_labels (gimple g, unsigned nlabels)
+{
+  GIMPLE_CHECK (g, GIMPLE_SWITCH);
+  gimple_set_num_ops (g, nlabels + 1);
+}
+
+
+/* Return the index variable used by the switch statement GS.  */
+
+static inline tree
+gimple_switch_index (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_SWITCH);
+  return gimple_op (gs, 0);
+}
+
+
+/* Return a pointer to the index variable for the switch statement GS.  */
+
+static inline tree *
+gimple_switch_index_ptr (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_SWITCH);
+  return gimple_op_ptr (gs, 0);
+}
+
+
+/* Set INDEX to be the index variable for switch statement GS.  */
+
+static inline void
+gimple_switch_set_index (gimple gs, tree index)
+{
+  GIMPLE_CHECK (gs, GIMPLE_SWITCH);
+  gcc_assert (SSA_VAR_P (index) || CONSTANT_CLASS_P (index));
+  gimple_set_op (gs, 0, index);
+}
+
+
+/* Return the label numbered INDEX.  The default label is 0, followed by any
+   labels in a switch statement.  */
+
+static inline tree
+gimple_switch_label (const_gimple gs, unsigned index)
+{
+  GIMPLE_CHECK (gs, GIMPLE_SWITCH);
+  gcc_assert (gimple_num_ops (gs) > index + 1);
+  return gimple_op (gs, index + 1);
+}
+
+/* Set the label number INDEX to LABEL.  0 is always the default label.  */
+
+static inline void
+gimple_switch_set_label (gimple gs, unsigned index, tree label)
+{
+  GIMPLE_CHECK (gs, GIMPLE_SWITCH);
+  gcc_assert (gimple_num_ops (gs) > index + 1);
+  gcc_assert (label == NULL_TREE || TREE_CODE (label) == CASE_LABEL_EXPR);
+  gimple_set_op (gs, index + 1, label);
+}
+
+/* Return the default label for a switch statement.  */
+
+static inline tree
+gimple_switch_default_label (const_gimple gs)
+{
+  return gimple_switch_label (gs, 0);
+}
+
+/* Set the default label for a switch statement.  */
+
+static inline void
+gimple_switch_set_default_label (gimple gs, tree label)
+{
+  gimple_switch_set_label (gs, 0, label);
+}
+
+
+/* Return the body for the OMP statement GS.  */
+
+static inline gimple_seq 
+gimple_omp_body (gimple gs)
+{
+  return gs->omp.body;
+}
+
+/* Set BODY to be the body for the OMP statement GS.  */
+
+static inline void
+gimple_omp_set_body (gimple gs, gimple_seq body)
+{
+  gs->omp.body = body;
+}
+
+
+/* Return the name associated with OMP_CRITICAL statement GS.  */
+
+static inline tree
+gimple_omp_critical_name (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_CRITICAL);
+  return gs->gimple_omp_critical.name;
+}
+
+
+/* Return a pointer to the name associated with OMP critical statement GS.  */
+
+static inline tree *
+gimple_omp_critical_name_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_CRITICAL);
+  return &gs->gimple_omp_critical.name;
+}
+
+
+/* Set NAME to be the name associated with OMP critical statement GS.  */
+
+static inline void
+gimple_omp_critical_set_name (gimple gs, tree name)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_CRITICAL);
+  gs->gimple_omp_critical.name = name;
+}
+
+
+/* Return the clauses associated with OMP_FOR GS.  */
+
+static inline tree
+gimple_omp_for_clauses (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  return gs->gimple_omp_for.clauses;
+}
+
+
+/* Return a pointer to the OMP_FOR GS.  */
+
+static inline tree *
+gimple_omp_for_clauses_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  return &gs->gimple_omp_for.clauses;
+}
+
+
+/* Set CLAUSES to be the list of clauses associated with OMP_FOR GS.  */
+
+static inline void
+gimple_omp_for_set_clauses (gimple gs, tree clauses)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gs->gimple_omp_for.clauses = clauses;
+}
+
+
+/* Get the collapse count of OMP_FOR GS.  */
+
+static inline size_t
+gimple_omp_for_collapse (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  return gs->gimple_omp_for.collapse;
+}
+
+
+/* Return the index variable for OMP_FOR GS.  */
+
+static inline tree
+gimple_omp_for_index (const_gimple gs, size_t i)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  return gs->gimple_omp_for.iter[i].index;
+}
+
+
+/* Return a pointer to the index variable for OMP_FOR GS.  */
+
+static inline tree *
+gimple_omp_for_index_ptr (gimple gs, size_t i)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  return &gs->gimple_omp_for.iter[i].index;
+}
+
+
+/* Set INDEX to be the index variable for OMP_FOR GS.  */
+
+static inline void
+gimple_omp_for_set_index (gimple gs, size_t i, tree index)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  gs->gimple_omp_for.iter[i].index = index;
+}
+
+
+/* Return the initial value for OMP_FOR GS.  */
+
+static inline tree
+gimple_omp_for_initial (const_gimple gs, size_t i)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  return gs->gimple_omp_for.iter[i].initial;
+}
+
+
+/* Return a pointer to the initial value for OMP_FOR GS.  */
+
+static inline tree *
+gimple_omp_for_initial_ptr (gimple gs, size_t i)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  return &gs->gimple_omp_for.iter[i].initial;
+}
+
+
+/* Set INITIAL to be the initial value for OMP_FOR GS.  */
+
+static inline void
+gimple_omp_for_set_initial (gimple gs, size_t i, tree initial)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  gs->gimple_omp_for.iter[i].initial = initial;
+}
+
+
+/* Return the final value for OMP_FOR GS.  */
+
+static inline tree
+gimple_omp_for_final (const_gimple gs, size_t i)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  return gs->gimple_omp_for.iter[i].final;
+}
+
+
+/* Return a pointer to the final value for OMP_FOR GS.  */
+
+static inline tree *
+gimple_omp_for_final_ptr (gimple gs, size_t i)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  return &gs->gimple_omp_for.iter[i].final;
+}
+
+
+/* Set FINAL to be the final value for OMP_FOR GS.  */
+
+static inline void
+gimple_omp_for_set_final (gimple gs, size_t i, tree final)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  gs->gimple_omp_for.iter[i].final = final;
+}
+
+
+/* Return the increment value for OMP_FOR GS.  */
+
+static inline tree
+gimple_omp_for_incr (const_gimple gs, size_t i)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  return gs->gimple_omp_for.iter[i].incr;
+}
+
+
+/* Return a pointer to the increment value for OMP_FOR GS.  */
+
+static inline tree *
+gimple_omp_for_incr_ptr (gimple gs, size_t i)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  return &gs->gimple_omp_for.iter[i].incr;
+}
+
+
+/* Set INCR to be the increment value for OMP_FOR GS.  */
+
+static inline void
+gimple_omp_for_set_incr (gimple gs, size_t i, tree incr)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  gs->gimple_omp_for.iter[i].incr = incr;
+}
+
+
+/* Return the sequence of statements to execute before the OMP_FOR
+   statement GS starts.  */
+
+static inline gimple_seq
+gimple_omp_for_pre_body (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  return gs->gimple_omp_for.pre_body;
+}
+
+
+/* Set PRE_BODY to be the sequence of statements to execute before the
+   OMP_FOR statement GS starts.  */
+
+static inline void
+gimple_omp_for_set_pre_body (gimple gs, gimple_seq pre_body)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gs->gimple_omp_for.pre_body = pre_body;
+}
+
+
+/* Return the clauses associated with OMP_PARALLEL GS.  */
+
+static inline tree
+gimple_omp_parallel_clauses (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_PARALLEL);
+  return gs->gimple_omp_parallel.clauses;
+}
+
+
+/* Return a pointer to the clauses associated with OMP_PARALLEL GS.  */
+
+static inline tree *
+gimple_omp_parallel_clauses_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_PARALLEL);
+  return &gs->gimple_omp_parallel.clauses;
+}
+
+
+/* Set CLAUSES to be the list of clauses associated with OMP_PARALLEL
+   GS.  */
+
+static inline void
+gimple_omp_parallel_set_clauses (gimple gs, tree clauses)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_PARALLEL);
+  gs->gimple_omp_parallel.clauses = clauses;
+}
+
+
+/* Return the child function used to hold the body of OMP_PARALLEL GS.  */
+
+static inline tree
+gimple_omp_parallel_child_fn (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_PARALLEL);
+  return gs->gimple_omp_parallel.child_fn;
+}
+
+/* Return a pointer to the child function used to hold the body of
+   OMP_PARALLEL GS.  */
+
+static inline tree *
+gimple_omp_parallel_child_fn_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_PARALLEL);
+  return &gs->gimple_omp_parallel.child_fn;
+}
+
+
+/* Set CHILD_FN to be the child function for OMP_PARALLEL GS.  */
+
+static inline void
+gimple_omp_parallel_set_child_fn (gimple gs, tree child_fn)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_PARALLEL);
+  gs->gimple_omp_parallel.child_fn = child_fn;
+}
+
+
+/* Return the artificial argument used to send variables and values
+   from the parent to the children threads in OMP_PARALLEL GS.  */
+
+static inline tree
+gimple_omp_parallel_data_arg (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_PARALLEL);
+  return gs->gimple_omp_parallel.data_arg;
+}
+
+
+/* Return a pointer to the data argument for OMP_PARALLEL GS.  */
+
+static inline tree *
+gimple_omp_parallel_data_arg_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_PARALLEL);
+  return &gs->gimple_omp_parallel.data_arg;
+}
+
+
+/* Set DATA_ARG to be the data argument for OMP_PARALLEL GS.  */
+
+static inline void
+gimple_omp_parallel_set_data_arg (gimple gs, tree data_arg)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_PARALLEL);
+  gs->gimple_omp_parallel.data_arg = data_arg;
+}
+
+
+/* Return the clauses associated with OMP_TASK GS.  */
+
+static inline tree
+gimple_omp_task_clauses (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return gs->gimple_omp_parallel.clauses;
+}
+
+
+/* Return a pointer to the clauses associated with OMP_TASK GS.  */
+
+static inline tree *
+gimple_omp_task_clauses_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return &gs->gimple_omp_parallel.clauses;
+}
+
+
+/* Set CLAUSES to be the list of clauses associated with OMP_TASK
+   GS.  */
+
+static inline void
+gimple_omp_task_set_clauses (gimple gs, tree clauses)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  gs->gimple_omp_parallel.clauses = clauses;
+}
+
+
+/* Return the child function used to hold the body of OMP_TASK GS.  */
+
+static inline tree
+gimple_omp_task_child_fn (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return gs->gimple_omp_parallel.child_fn;
+}
+
+/* Return a pointer to the child function used to hold the body of
+   OMP_TASK GS.  */
+
+static inline tree *
+gimple_omp_task_child_fn_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return &gs->gimple_omp_parallel.child_fn;
+}
+
+
+/* Set CHILD_FN to be the child function for OMP_TASK GS.  */
+
+static inline void
+gimple_omp_task_set_child_fn (gimple gs, tree child_fn)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  gs->gimple_omp_parallel.child_fn = child_fn;
+}
+
+
+/* Return the artificial argument used to send variables and values
+   from the parent to the children threads in OMP_TASK GS.  */
+
+static inline tree
+gimple_omp_task_data_arg (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return gs->gimple_omp_parallel.data_arg;
+}
+
+
+/* Return a pointer to the data argument for OMP_TASK GS.  */
+
+static inline tree *
+gimple_omp_task_data_arg_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return &gs->gimple_omp_parallel.data_arg;
+}
+
+
+/* Set DATA_ARG to be the data argument for OMP_TASK GS.  */
+
+static inline void
+gimple_omp_task_set_data_arg (gimple gs, tree data_arg)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  gs->gimple_omp_parallel.data_arg = data_arg;
+}
+
+
+/* Return the clauses associated with OMP_TASK GS.  */
+
+static inline tree
+gimple_omp_taskreg_clauses (const_gimple gs)
+{
+  if (gimple_code (gs) != GIMPLE_OMP_PARALLEL)
+    GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return gs->gimple_omp_parallel.clauses;
+}
+
+
+/* Return a pointer to the clauses associated with OMP_TASK GS.  */
+
+static inline tree *
+gimple_omp_taskreg_clauses_ptr (gimple gs)
+{
+  if (gimple_code (gs) != GIMPLE_OMP_PARALLEL)
+    GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return &gs->gimple_omp_parallel.clauses;
+}
+
+
+/* Set CLAUSES to be the list of clauses associated with OMP_TASK
+   GS.  */
+
+static inline void
+gimple_omp_taskreg_set_clauses (gimple gs, tree clauses)
+{
+  if (gimple_code (gs) != GIMPLE_OMP_PARALLEL)
+    GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  gs->gimple_omp_parallel.clauses = clauses;
+}
+
+
+/* Return the child function used to hold the body of OMP_TASK GS.  */
+
+static inline tree
+gimple_omp_taskreg_child_fn (const_gimple gs)
+{
+  if (gimple_code (gs) != GIMPLE_OMP_PARALLEL)
+    GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return gs->gimple_omp_parallel.child_fn;
+}
+
+/* Return a pointer to the child function used to hold the body of
+   OMP_TASK GS.  */
+
+static inline tree *
+gimple_omp_taskreg_child_fn_ptr (gimple gs)
+{
+  if (gimple_code (gs) != GIMPLE_OMP_PARALLEL)
+    GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return &gs->gimple_omp_parallel.child_fn;
+}
+
+
+/* Set CHILD_FN to be the child function for OMP_TASK GS.  */
+
+static inline void
+gimple_omp_taskreg_set_child_fn (gimple gs, tree child_fn)
+{
+  if (gimple_code (gs) != GIMPLE_OMP_PARALLEL)
+    GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  gs->gimple_omp_parallel.child_fn = child_fn;
+}
+
+
+/* Return the artificial argument used to send variables and values
+   from the parent to the children threads in OMP_TASK GS.  */
+
+static inline tree
+gimple_omp_taskreg_data_arg (const_gimple gs)
+{
+  if (gimple_code (gs) != GIMPLE_OMP_PARALLEL)
+    GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return gs->gimple_omp_parallel.data_arg;
+}
+
+
+/* Return a pointer to the data argument for OMP_TASK GS.  */
+
+static inline tree *
+gimple_omp_taskreg_data_arg_ptr (gimple gs)
+{
+  if (gimple_code (gs) != GIMPLE_OMP_PARALLEL)
+    GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return &gs->gimple_omp_parallel.data_arg;
+}
+
+
+/* Set DATA_ARG to be the data argument for OMP_TASK GS.  */
+
+static inline void
+gimple_omp_taskreg_set_data_arg (gimple gs, tree data_arg)
+{
+  if (gimple_code (gs) != GIMPLE_OMP_PARALLEL)
+    GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  gs->gimple_omp_parallel.data_arg = data_arg;
+}
+
+
+/* Return the copy function used to hold the body of OMP_TASK GS.  */
+
+static inline tree
+gimple_omp_task_copy_fn (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return gs->gimple_omp_task.copy_fn;
+}
+
+/* Return a pointer to the copy function used to hold the body of
+   OMP_TASK GS.  */
+
+static inline tree *
+gimple_omp_task_copy_fn_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return &gs->gimple_omp_task.copy_fn;
+}
+
+
+/* Set CHILD_FN to be the copy function for OMP_TASK GS.  */
+
+static inline void
+gimple_omp_task_set_copy_fn (gimple gs, tree copy_fn)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  gs->gimple_omp_task.copy_fn = copy_fn;
+}
+
+
+/* Return size of the data block in bytes in OMP_TASK GS.  */
+
+static inline tree
+gimple_omp_task_arg_size (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return gs->gimple_omp_task.arg_size;
+}
+
+
+/* Return a pointer to the data block size for OMP_TASK GS.  */
+
+static inline tree *
+gimple_omp_task_arg_size_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return &gs->gimple_omp_task.arg_size;
+}
+
+
+/* Set ARG_SIZE to be the data block size for OMP_TASK GS.  */
+
+static inline void
+gimple_omp_task_set_arg_size (gimple gs, tree arg_size)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  gs->gimple_omp_task.arg_size = arg_size;
+}
+
+
+/* Return align of the data block in bytes in OMP_TASK GS.  */
+
+static inline tree
+gimple_omp_task_arg_align (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return gs->gimple_omp_task.arg_align;
+}
+
+
+/* Return a pointer to the data block align for OMP_TASK GS.  */
+
+static inline tree *
+gimple_omp_task_arg_align_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  return &gs->gimple_omp_task.arg_align;
+}
+
+
+/* Set ARG_SIZE to be the data block align for OMP_TASK GS.  */
+
+static inline void
+gimple_omp_task_set_arg_align (gimple gs, tree arg_align)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_TASK);
+  gs->gimple_omp_task.arg_align = arg_align;
+}
+
+
+/* Return the clauses associated with OMP_SINGLE GS.  */
+
+static inline tree
+gimple_omp_single_clauses (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_SINGLE);
+  return gs->gimple_omp_single.clauses;
+}
+
+
+/* Return a pointer to the clauses associated with OMP_SINGLE GS.  */
+
+static inline tree *
+gimple_omp_single_clauses_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_SINGLE);
+  return &gs->gimple_omp_single.clauses;
+}
+
+
+/* Set CLAUSES to be the clauses associated with OMP_SINGLE GS.  */
+
+static inline void
+gimple_omp_single_set_clauses (gimple gs, tree clauses)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_SINGLE);
+  gs->gimple_omp_single.clauses = clauses;
+}
+
+
+/* Return the clauses associated with OMP_SECTIONS GS.  */
+
+static inline tree
+gimple_omp_sections_clauses (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_SECTIONS);
+  return gs->gimple_omp_sections.clauses;
+}
+
+
+/* Return a pointer to the clauses associated with OMP_SECTIONS GS.  */
+
+static inline tree *
+gimple_omp_sections_clauses_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_SECTIONS);
+  return &gs->gimple_omp_sections.clauses;
+}
+
+
+/* Set CLAUSES to be the set of clauses associated with OMP_SECTIONS
+   GS.  */
+
+static inline void
+gimple_omp_sections_set_clauses (gimple gs, tree clauses)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_SECTIONS);
+  gs->gimple_omp_sections.clauses = clauses;
+}
+
+
+/* Return the control variable associated with the GIMPLE_OMP_SECTIONS
+   in GS.  */
+
+static inline tree
+gimple_omp_sections_control (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_SECTIONS);
+  return gs->gimple_omp_sections.control;
+}
+
+
+/* Return a pointer to the clauses associated with the GIMPLE_OMP_SECTIONS
+   GS.  */
+
+static inline tree *
+gimple_omp_sections_control_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_SECTIONS);
+  return &gs->gimple_omp_sections.control;
+}
+
+
+/* Set CONTROL to be the set of clauses associated with the
+   GIMPLE_OMP_SECTIONS in GS.  */
+
+static inline void
+gimple_omp_sections_set_control (gimple gs, tree control)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_SECTIONS);
+  gs->gimple_omp_sections.control = control;
+}
+
+
+/* Set COND to be the condition code for OMP_FOR GS.  */
+
+static inline void
+gimple_omp_for_set_cond (gimple gs, size_t i, enum tree_code cond)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (TREE_CODE_CLASS (cond) == tcc_comparison);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  gs->gimple_omp_for.iter[i].cond = cond;
+}
+
+
+/* Return the condition code associated with OMP_FOR GS.  */
+
+static inline enum tree_code
+gimple_omp_for_cond (const_gimple gs, size_t i)
+{
+  GIMPLE_CHECK (gs, GIMPLE_OMP_FOR);
+  gcc_assert (i < gs->gimple_omp_for.collapse);
+  return gs->gimple_omp_for.iter[i].cond;
+}
+
+
+/* Set the value being stored in an atomic store.  */
+
+static inline void
+gimple_omp_atomic_store_set_val (gimple g, tree val)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE);
+  g->gimple_omp_atomic_store.val = val;
+}
+
+
+/* Return the value being stored in an atomic store.  */
+
+static inline tree
+gimple_omp_atomic_store_val (const_gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE);
+  return g->gimple_omp_atomic_store.val;
+}
+
+
+/* Return a pointer to the value being stored in an atomic store.  */
+
+static inline tree *
+gimple_omp_atomic_store_val_ptr (gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_STORE);
+  return &g->gimple_omp_atomic_store.val;
+}
+
+
+/* Set the LHS of an atomic load.  */
+
+static inline void
+gimple_omp_atomic_load_set_lhs (gimple g, tree lhs)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_LOAD);
+  g->gimple_omp_atomic_load.lhs = lhs;
+}
+
+
+/* Get the LHS of an atomic load.  */
+
+static inline tree
+gimple_omp_atomic_load_lhs (const_gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_LOAD);
+  return g->gimple_omp_atomic_load.lhs;
+}
+
+
+/* Return a pointer to the LHS of an atomic load.  */
+
+static inline tree *
+gimple_omp_atomic_load_lhs_ptr (gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_LOAD);
+  return &g->gimple_omp_atomic_load.lhs;
+}
+
+
+/* Set the RHS of an atomic load.  */
+
+static inline void
+gimple_omp_atomic_load_set_rhs (gimple g, tree rhs)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_LOAD);
+  g->gimple_omp_atomic_load.rhs = rhs;
+}
+
+
+/* Get the RHS of an atomic load.  */
+
+static inline tree
+gimple_omp_atomic_load_rhs (const_gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_LOAD);
+  return g->gimple_omp_atomic_load.rhs;
+}
+
+
+/* Return a pointer to the RHS of an atomic load.  */
+
+static inline tree *
+gimple_omp_atomic_load_rhs_ptr (gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_ATOMIC_LOAD);
+  return &g->gimple_omp_atomic_load.rhs;
+}
+
+
+/* Get the definition of the control variable in a GIMPLE_OMP_CONTINUE.  */
+
+static inline tree
+gimple_omp_continue_control_def (const_gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_CONTINUE);
+  return g->gimple_omp_continue.control_def;
+}
+
+/* The same as above, but return the address.  */
+
+static inline tree *
+gimple_omp_continue_control_def_ptr (gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_CONTINUE);
+  return &g->gimple_omp_continue.control_def;
+}
+
+/* Set the definition of the control variable in a GIMPLE_OMP_CONTINUE.  */
+
+static inline void
+gimple_omp_continue_set_control_def (gimple g, tree def)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_CONTINUE);
+  g->gimple_omp_continue.control_def = def;
+}
+
+
+/* Get the use of the control variable in a GIMPLE_OMP_CONTINUE.  */
+
+static inline tree
+gimple_omp_continue_control_use (const_gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_CONTINUE);
+  return g->gimple_omp_continue.control_use;
+}
+
+
+/* The same as above, but return the address.  */
+
+static inline tree *
+gimple_omp_continue_control_use_ptr (gimple g)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_CONTINUE);
+  return &g->gimple_omp_continue.control_use;
+}
+
+
+/* Set the use of the control variable in a GIMPLE_OMP_CONTINUE.  */
+
+static inline void
+gimple_omp_continue_set_control_use (gimple g, tree use)
+{
+  GIMPLE_CHECK (g, GIMPLE_OMP_CONTINUE);
+  g->gimple_omp_continue.control_use = use;
+}
+
+
+/* Return a pointer to the return value for GIMPLE_RETURN GS.  */
+
+static inline tree *
+gimple_return_retval_ptr (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_RETURN);
+  gcc_assert (gimple_num_ops (gs) == 1);
+  return gimple_op_ptr (gs, 0);
+}
+
+/* Return the return value for GIMPLE_RETURN GS.  */
+
+static inline tree
+gimple_return_retval (const_gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_RETURN);
+  gcc_assert (gimple_num_ops (gs) == 1);
+  return gimple_op (gs, 0);
+}
+
+
+/* Set RETVAL to be the return value for GIMPLE_RETURN GS.  */
+
+static inline void
+gimple_return_set_retval (gimple gs, tree retval)
+{
+  GIMPLE_CHECK (gs, GIMPLE_RETURN);
+  gcc_assert (gimple_num_ops (gs) == 1);
+  gcc_assert (retval == NULL_TREE
+              || TREE_CODE (retval) == RESULT_DECL
+             || is_gimple_val (retval));
+  gimple_set_op (gs, 0, retval);
+}
+
+
+/* Returns true when the gimple statment STMT is any of the OpenMP types.  */
+
+static inline bool
+is_gimple_omp (const_gimple stmt)
+{
+  return (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
+         || gimple_code (stmt) == GIMPLE_OMP_TASK
+         || gimple_code (stmt) == GIMPLE_OMP_FOR
+         || gimple_code (stmt) == GIMPLE_OMP_SECTIONS
+         || gimple_code (stmt) == GIMPLE_OMP_SECTIONS_SWITCH
+         || gimple_code (stmt) == GIMPLE_OMP_SINGLE
+         || gimple_code (stmt) == GIMPLE_OMP_SECTION
+         || gimple_code (stmt) == GIMPLE_OMP_MASTER
+         || gimple_code (stmt) == GIMPLE_OMP_ORDERED
+         || gimple_code (stmt) == GIMPLE_OMP_CRITICAL
+         || gimple_code (stmt) == GIMPLE_OMP_RETURN
+         || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
+         || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
+         || gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
+}
+
+
+/* Returns TRUE if statement G is a GIMPLE_NOP.  */
+
+static inline bool
+gimple_nop_p (const_gimple g)
+{
+  return gimple_code (g) == GIMPLE_NOP;
+}
+
+
+/* Return the new type set by GIMPLE_CHANGE_DYNAMIC_TYPE statement GS.  */
+
+static inline tree
+gimple_cdt_new_type (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CHANGE_DYNAMIC_TYPE);
+  return gimple_op (gs, 1);
+}
+
+/* Return a pointer to the new type set by GIMPLE_CHANGE_DYNAMIC_TYPE
+   statement GS.  */
+
+static inline tree *
+gimple_cdt_new_type_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CHANGE_DYNAMIC_TYPE);
+  return gimple_op_ptr (gs, 1);
+}
+
+/* Set NEW_TYPE to be the type returned by GIMPLE_CHANGE_DYNAMIC_TYPE
+   statement GS.  */
+
+static inline void
+gimple_cdt_set_new_type (gimple gs, tree new_type)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CHANGE_DYNAMIC_TYPE);
+  gcc_assert (TREE_CODE_CLASS (TREE_CODE (new_type)) == tcc_type);
+  gimple_set_op (gs, 1, new_type);
+}
+
+
+/* Return the location affected by GIMPLE_CHANGE_DYNAMIC_TYPE statement GS.  */
+
+static inline tree
+gimple_cdt_location (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CHANGE_DYNAMIC_TYPE);
+  return gimple_op (gs, 0);
+}
+
+
+/* Return a pointer to the location affected by GIMPLE_CHANGE_DYNAMIC_TYPE
+   statement GS.  */
+
+static inline tree *
+gimple_cdt_location_ptr (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CHANGE_DYNAMIC_TYPE);
+  return gimple_op_ptr (gs, 0);
+}
+
+
+/* Set PTR to be the location affected by GIMPLE_CHANGE_DYNAMIC_TYPE
+   statement GS.  */
+
+static inline void
+gimple_cdt_set_location (gimple gs, tree ptr)
+{
+  GIMPLE_CHECK (gs, GIMPLE_CHANGE_DYNAMIC_TYPE);
+  gimple_set_op (gs, 0, ptr);
+}
+
+
+/* Return the predictor of GIMPLE_PREDICT statement GS.  */
+
+static inline enum br_predictor
+gimple_predict_predictor (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_PREDICT);
+  return (enum br_predictor) (gs->gsbase.subcode & ~GF_PREDICT_TAKEN);
+}
+
+
+/* Set the predictor of GIMPLE_PREDICT statement GS to PREDICT.  */
+
+static inline void
+gimple_predict_set_predictor (gimple gs, enum br_predictor predictor)
+{
+  GIMPLE_CHECK (gs, GIMPLE_PREDICT);
+  gs->gsbase.subcode = (gs->gsbase.subcode & GF_PREDICT_TAKEN)
+                      | (unsigned) predictor;
+}
+
+
+/* Return the outcome of GIMPLE_PREDICT statement GS.  */
+
+static inline enum prediction
+gimple_predict_outcome (gimple gs)
+{
+  GIMPLE_CHECK (gs, GIMPLE_PREDICT);
+  return (gs->gsbase.subcode & GF_PREDICT_TAKEN) ? TAKEN : NOT_TAKEN;
+}
+
+
+/* Set the outcome of GIMPLE_PREDICT statement GS to OUTCOME.  */
+
+static inline void
+gimple_predict_set_outcome (gimple gs, enum prediction outcome)
+{
+  GIMPLE_CHECK (gs, GIMPLE_PREDICT);
+  if (outcome == TAKEN)
+    gs->gsbase.subcode |= GF_PREDICT_TAKEN;
+  else
+    gs->gsbase.subcode &= ~GF_PREDICT_TAKEN;
+}
+
+
+/* Return a new iterator pointing to GIMPLE_SEQ's first statement.  */
+
+static inline gimple_stmt_iterator
+gsi_start (gimple_seq seq)
+{
+  gimple_stmt_iterator i;
+
+  i.ptr = gimple_seq_first (seq);
+  i.seq = seq;
+  i.bb = (i.ptr && i.ptr->stmt) ? gimple_bb (i.ptr->stmt) : NULL;
+
+  return i;
+}
+
+
+/* Return a new iterator pointing to the first statement in basic block BB.  */
+
+static inline gimple_stmt_iterator
+gsi_start_bb (basic_block bb)
+{
+  gimple_stmt_iterator i;
+  gimple_seq seq;
+  
+  seq = bb_seq (bb);
+  i.ptr = gimple_seq_first (seq);
+  i.seq = seq;
+  i.bb = bb;
+
+  return i;
+}
+
+
+/* Return a new iterator initially pointing to GIMPLE_SEQ's last statement.  */
+
+static inline gimple_stmt_iterator
+gsi_last (gimple_seq seq)
+{
+  gimple_stmt_iterator i;
+
+  i.ptr = gimple_seq_last (seq);
+  i.seq = seq;
+  i.bb = (i.ptr && i.ptr->stmt) ? gimple_bb (i.ptr->stmt) : NULL;
+
+  return i;
+}
+
+
+/* Return a new iterator pointing to the last statement in basic block BB.  */
+
+static inline gimple_stmt_iterator
+gsi_last_bb (basic_block bb)
+{
+  gimple_stmt_iterator i;
+  gimple_seq seq;
+
+  seq = bb_seq (bb);
+  i.ptr = gimple_seq_last (seq);
+  i.seq = seq;
+  i.bb = bb;
+
+  return i;
+}
+
+
+/* Return true if I is at the end of its sequence.  */
+
+static inline bool
+gsi_end_p (gimple_stmt_iterator i)
+{
+  return i.ptr == NULL;
+}
+
+
+/* Return true if I is one statement before the end of its sequence.  */
+
+static inline bool
+gsi_one_before_end_p (gimple_stmt_iterator i)
+{
+  return i.ptr != NULL && i.ptr->next == NULL;
+}
+
+
+/* Advance the iterator to the next gimple statement.  */
+
+static inline void
+gsi_next (gimple_stmt_iterator *i)
+{
+  i->ptr = i->ptr->next;
+}
+
+/* Advance the iterator to the previous gimple statement.  */
+
+static inline void
+gsi_prev (gimple_stmt_iterator *i)
+{
+  i->ptr = i->ptr->prev;
+}
+
+/* Return the current stmt.  */
+
+static inline gimple
+gsi_stmt (gimple_stmt_iterator i)
+{
+  return i.ptr->stmt;
+}
+
+/* Return a block statement iterator that points to the first non-label
+   statement in block BB.  */
+
+static inline gimple_stmt_iterator
+gsi_after_labels (basic_block bb)
+{
+  gimple_stmt_iterator gsi = gsi_start_bb (bb);
+
+  while (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
+    gsi_next (&gsi);
+
+  return gsi;
+}
+
+/* Return a pointer to the current stmt.
+   
+  NOTE: You may want to use gsi_replace on the iterator itself,
+  as this performs additional bookkeeping that will not be done
+  if you simply assign through a pointer returned by gsi_stmt_ptr.  */
+
+static inline gimple *
+gsi_stmt_ptr (gimple_stmt_iterator *i)
+{
+  return &i->ptr->stmt;
+}
+
+
+/* Return the basic block associated with this iterator.  */
+
+static inline basic_block
+gsi_bb (gimple_stmt_iterator i)
+{
+  return i.bb;
+}
+
+
+/* Return the sequence associated with this iterator.  */
+
+static inline gimple_seq
+gsi_seq (gimple_stmt_iterator i)
+{
+  return i.seq;
+}
+
+
+enum gsi_iterator_update
+{
+  GSI_NEW_STMT,                /* Only valid when single statement is added, move
+                          iterator to it.  */
+  GSI_SAME_STMT,       /* Leave the iterator at the same statement.  */
+  GSI_CONTINUE_LINKING /* Move iterator to whatever position is suitable
+                          for linking other statements in the same
+                          direction.  */
+};
+
+/* In gimple-iterator.c  */
+gimple_stmt_iterator gsi_start_phis (basic_block);
+gimple_seq gsi_split_seq_after (gimple_stmt_iterator);
+gimple_seq gsi_split_seq_before (gimple_stmt_iterator *);
+void gsi_replace (gimple_stmt_iterator *, gimple, bool);
+void gsi_insert_before (gimple_stmt_iterator *, gimple,
+                       enum gsi_iterator_update);
+void gsi_insert_before_without_update (gimple_stmt_iterator *, gimple,
+                                       enum gsi_iterator_update);
+void gsi_insert_seq_before (gimple_stmt_iterator *, gimple_seq,
+                            enum gsi_iterator_update);
+void gsi_insert_seq_before_without_update (gimple_stmt_iterator *, gimple_seq,
+                                           enum gsi_iterator_update);
+void gsi_insert_after (gimple_stmt_iterator *, gimple,
+                      enum gsi_iterator_update);
+void gsi_insert_after_without_update (gimple_stmt_iterator *, gimple,
+                                      enum gsi_iterator_update);
+void gsi_insert_seq_after (gimple_stmt_iterator *, gimple_seq,
+                          enum gsi_iterator_update);
+void gsi_insert_seq_after_without_update (gimple_stmt_iterator *, gimple_seq,
+                                          enum gsi_iterator_update);
+void gsi_remove (gimple_stmt_iterator *, bool);
+gimple_stmt_iterator gsi_for_stmt (gimple);
+void gsi_move_after (gimple_stmt_iterator *, gimple_stmt_iterator *);
+void gsi_move_before (gimple_stmt_iterator *, gimple_stmt_iterator *);
+void gsi_move_to_bb_end (gimple_stmt_iterator *, struct basic_block_def *);
+void gsi_insert_on_edge (edge, gimple);
+void gsi_insert_seq_on_edge (edge, gimple_seq);
+basic_block gsi_insert_on_edge_immediate (edge, gimple);
+basic_block gsi_insert_seq_on_edge_immediate (edge, gimple_seq);
+void gsi_commit_one_edge_insert (edge, basic_block *);
+void gsi_commit_edge_inserts (void);
+
+
+/* Convenience routines to walk all statements of a gimple function.
+   Note that this is useful exclusively before the code is converted
+   into SSA form.  Once the program is in SSA form, the standard
+   operand interface should be used to analyze/modify statements.  */
+struct walk_stmt_info
+{
+  /* Points to the current statement being walked.  */
+  gimple_stmt_iterator gsi;
+
+  /* Additional data that the callback functions may want to carry
+     through the recursion.  */
+  void *info;
+
+  /* Pointer map used to mark visited tree nodes when calling
+     walk_tree on each operand.  If set to NULL, duplicate tree nodes
+     will be visited more than once.  */
+  struct pointer_set_t *pset;
+
+  /* Indicates whether the operand being examined may be replaced
+     with something that matches is_gimple_val (if true) or something
+     slightly more complicated (if false).  "Something" technically
+     means the common subset of is_gimple_lvalue and is_gimple_rhs,
+     but we never try to form anything more complicated than that, so
+     we don't bother checking.
+
+     Also note that CALLBACK should update this flag while walking the
+     sub-expressions of a statement.  For instance, when walking the
+     statement 'foo (&var)', the flag VAL_ONLY will initially be set
+     to true, however, when walking &var, the operand of that
+     ADDR_EXPR does not need to be a GIMPLE value.  */
+  bool val_only;
+
+  /* True if we are currently walking the LHS of an assignment.  */
+  bool is_lhs;
+
+  /* Optional.  Set to true by the callback functions if they made any
+     changes.  */
+  bool changed;
+
+  /* True if we're interested in location information.  */
+  bool want_locations;
+
+  /* Operand returned by the callbacks.  This is set when calling
+     walk_gimple_seq.  If the walk_stmt_fn or walk_tree_fn callback
+     returns non-NULL, this field will contain the tree returned by
+     the last callback.  */
+  tree callback_result;
+};
+
+/* Callback for walk_gimple_stmt.  Called for every statement found
+   during traversal.  The first argument points to the statement to
+   walk.  The second argument is a flag that the callback sets to
+   'true' if it the callback handled all the operands and
+   sub-statements of the statement (the default value of this flag is
+   'false').  The third argument is an anonymous pointer to data
+   to be used by the callback.  */
+typedef tree (*walk_stmt_fn) (gimple_stmt_iterator *, bool *,
+                             struct walk_stmt_info *);
+
+gimple walk_gimple_seq (gimple_seq, walk_stmt_fn, walk_tree_fn,
+                       struct walk_stmt_info *);
+tree walk_gimple_stmt (gimple_stmt_iterator *, walk_stmt_fn, walk_tree_fn,
+                      struct walk_stmt_info *);
+tree walk_gimple_op (gimple, walk_tree_fn, struct walk_stmt_info *);
+
+#ifdef GATHER_STATISTICS
+/* Enum and arrays used for allocation stats.  Keep in sync with
+   gimple.c:gimple_alloc_kind_names.  */
+enum gimple_alloc_kind
+{
+  gimple_alloc_kind_assign,    /* Assignments.  */
+  gimple_alloc_kind_phi,       /* PHI nodes.  */
+  gimple_alloc_kind_cond,      /* Conditionals.  */
+  gimple_alloc_kind_seq,       /* Sequences.  */
+  gimple_alloc_kind_rest,      /* Everything else.  */
+  gimple_alloc_kind_all
+};
+
+extern int gimple_alloc_counts[];
+extern int gimple_alloc_sizes[];
+
+/* Return the allocation kind for a given stmt CODE.  */
+static inline enum gimple_alloc_kind
+gimple_alloc_kind (enum gimple_code code)
+{
+  switch (code)
+    {
+      case GIMPLE_ASSIGN:
+       return gimple_alloc_kind_assign;
+      case GIMPLE_PHI:
+       return gimple_alloc_kind_phi;
+      case GIMPLE_COND:
+       return gimple_alloc_kind_cond;
+      default:
+       return gimple_alloc_kind_rest;
+    }
+}
+#endif /* GATHER_STATISTICS */
+
+extern void dump_gimple_statistics (void);
+
+#endif  /* GCC_GIMPLE_H */
index b6bc9ca..0f08f3b 100644 (file)
@@ -28,7 +28,8 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree.h"
 #include "rtl.h"
 #include "varray.h"
-#include "tree-gimple.h"
+#include "gimple.h"
+#include "tree-iterator.h"
 #include "tree-inline.h"
 #include "diagnostic.h"
 #include "langhooks.h"
@@ -49,6 +50,8 @@ along with GCC; see the file COPYING3.  If not see
 #include "optabs.h"
 #include "pointer-set.h"
 #include "splay-tree.h"
+#include "vec.h"
+#include "gimple.h"
 
 
 enum gimplify_omp_var_data
@@ -67,6 +70,7 @@ enum gimplify_omp_var_data
                           | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
 };
 
+
 enum omp_region_type
 {
   ORT_WORKSHARE = 0,
@@ -89,7 +93,6 @@ static struct gimplify_ctx *gimplify_ctxp;
 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
 
 
-
 /* Formal (expression) temporary table handling: Multiple occurrences of
    the same scalar expression are evaluated into the same temporary.  */
 
@@ -100,7 +103,7 @@ typedef struct gimple_temp_hash_elt
 } elt_t;
 
 /* Forward declarations.  */
-static enum gimplify_status gimplify_compound_expr (tree *, tree *, bool);
+static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
 
 /* Mark X addressable.  Unlike the langhook we expect X to be in gimple
    form and we don't do any syntax checking.  */
@@ -146,6 +149,49 @@ gimple_tree_eq (const void *p1, const void *p2)
   return 1;
 }
 
+/* Link gimple statement GS to the end of the sequence *SEQ_P.  If
+   *SEQ_P is NULL, a new sequence is allocated.  This function is
+   similar to gimple_seq_add_stmt, but does not scan the operands.
+   During gimplification, we need to manipulate statement sequences
+   before the def/use vectors have been constructed.  */
+
+static void
+gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
+{
+  gimple_stmt_iterator si;
+
+  if (gs == NULL)
+    return;
+
+  if (*seq_p == NULL)
+    *seq_p = gimple_seq_alloc ();
+
+  si = gsi_last (*seq_p);
+
+  gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
+}
+
+/* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
+   NULL, a new sequence is allocated.   This function is
+   similar to gimple_seq_add_seq, but does not scan the operands.
+   During gimplification, we need to manipulate statement sequences
+   before the def/use vectors have been constructed.  */
+
+static void
+gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
+{
+  gimple_stmt_iterator si;
+
+  if (src == NULL)
+    return;
+
+  if (*dst_p == NULL)
+    *dst_p = gimple_seq_alloc ();
+
+  si = gsi_last (*dst_p);
+  gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
+}
+
 /* Set up a context for the gimplifier.  */
 
 void
@@ -158,15 +204,18 @@ push_gimplify_context (struct gimplify_ctx *c)
 
 /* Tear down a context for the gimplifier.  If BODY is non-null, then
    put the temporaries into the outer BIND_EXPR.  Otherwise, put them
-   in the local_decls.  */
+   in the local_decls.
+
+   BODY is not a sequence, but the first tuple in a sequence.  */
 
 void
-pop_gimplify_context (tree body)
+pop_gimplify_context (gimple body)
 {
   struct gimplify_ctx *c = gimplify_ctxp;
   tree t;
 
-  gcc_assert (c && !c->current_bind_expr);
+  gcc_assert (c && (c->bind_expr_stack == NULL
+                   || VEC_empty (gimple, c->bind_expr_stack)));
   gimplify_ctxp = c->prev_context;
 
   for (t = c->temps; t ; t = TREE_CHAIN (t))
@@ -182,23 +231,31 @@ pop_gimplify_context (tree body)
 }
 
 static void
-gimple_push_bind_expr (tree bind)
+gimple_push_bind_expr (gimple gimple_bind)
 {
-  TREE_CHAIN (bind) = gimplify_ctxp->current_bind_expr;
-  gimplify_ctxp->current_bind_expr = bind;
+  if (gimplify_ctxp->bind_expr_stack == NULL)
+    gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
+  VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
 }
 
 static void
 gimple_pop_bind_expr (void)
 {
-  gimplify_ctxp->current_bind_expr
-    = TREE_CHAIN (gimplify_ctxp->current_bind_expr);
+  VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
 }
 
-tree
+gimple
 gimple_current_bind_expr (void)
 {
-  return gimplify_ctxp->current_bind_expr;
+  return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
+}
+
+/* Return the stack GIMPLE_BINDs created during gimplification.  */
+
+VEC(gimple, heap) *
+gimple_bind_expr_stack (void)
+{
+  return gimplify_ctxp->bind_expr_stack;
 }
 
 /* Returns true iff there is a COND_EXPR between us and the innermost
@@ -215,9 +272,9 @@ gimple_conditional_context (void)
 static void
 gimple_push_condition (void)
 {
-#ifdef ENABLE_CHECKING
+#ifdef ENABLE_GIMPLE_CHECKING
   if (gimplify_ctxp->conditions == 0)
-    gcc_assert (!gimplify_ctxp->conditional_cleanups);
+    gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
 #endif
   ++(gimplify_ctxp->conditions);
 }
@@ -226,15 +283,15 @@ gimple_push_condition (void)
    now, add any conditional cleanups we've seen to the prequeue.  */
 
 static void
-gimple_pop_condition (tree *pre_p)
+gimple_pop_condition (gimple_seq *pre_p)
 {
   int conds = --(gimplify_ctxp->conditions);
 
   gcc_assert (conds >= 0);
   if (conds == 0)
     {
-      append_to_statement_list (gimplify_ctxp->conditional_cleanups, pre_p);
-      gimplify_ctxp->conditional_cleanups = NULL_TREE;
+      gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
+      gimplify_ctxp->conditional_cleanups = NULL;
     }
 }
 
@@ -324,13 +381,34 @@ append_to_statement_list_force (tree t, tree *list_p)
     append_to_statement_list_1 (t, list_p);
 }
 
-/* Both gimplify the statement T and append it to LIST_P.  */
+/* Both gimplify the statement T and append it to *SEQ_P.  This function
+   behaves exactly as gimplify_stmt, but you don't have to pass T as a
+   reference.  */
 
 void
-gimplify_and_add (tree t, tree *list_p)
+gimplify_and_add (tree t, gimple_seq *seq_p)
+{
+  gimplify_stmt (&t, seq_p);
+}
+
+/* Gimplify statement T into sequence *SEQ_P, and return the first
+   tuple in the sequence of generated tuples for this statement.
+   Return NULL if gimplifying T produced no tuples.  */
+
+static gimple
+gimplify_and_return_first (tree t, gimple_seq *seq_p)
 {
-  gimplify_stmt (&t);
-  append_to_statement_list (t, list_p);
+  gimple_stmt_iterator last = gsi_last (*seq_p);
+
+  gimplify_and_add (t, seq_p);
+
+  if (!gsi_end_p (last))
+    {
+      gsi_next (&last);
+      return gsi_stmt (last);
+    }
+  else
+    return gimple_seq_first_stmt (*seq_p);
 }
 
 /* Strip off a legitimate source ending from the input string NAME of
@@ -353,20 +431,6 @@ remove_suffix (char *name, int len)
     }
 }
 
-/* Create a nameless artificial label and put it in the current function
-   context.  Returns the newly created label.  */
-
-tree
-create_artificial_label (void)
-{
-  tree lab = build_decl (LABEL_DECL, NULL_TREE, void_type_node);
-
-  DECL_ARTIFICIAL (lab) = 1;
-  DECL_IGNORED_P (lab) = 1;
-  DECL_CONTEXT (lab) = current_function_decl;
-  return lab;
-}
-
 /* Subroutine for find_single_pointer_decl.  */
 
 static tree
@@ -494,31 +558,6 @@ create_tmp_var (tree type, const char *prefix)
   return tmp_var;
 }
 
-/*  Given a tree, try to return a useful variable name that we can use
-    to prefix a temporary that is being assigned the value of the tree.
-    I.E. given  <temp> = &A, return A.  */
-
-const char *
-get_name (const_tree t)
-{
-  const_tree stripped_decl;
-
-  stripped_decl = t;
-  STRIP_NOPS (stripped_decl);
-  if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
-    return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
-  else
-    {
-      switch (TREE_CODE (stripped_decl))
-       {
-       case ADDR_EXPR:
-         return get_name (TREE_OPERAND (stripped_decl, 0));
-       default:
-         return NULL;
-       }
-    }
-}
-
 /* Create a temporary with a name derived from VAL.  Subroutine of
    lookup_tmp_var; nobody else should call this function.  */
 
@@ -573,6 +612,62 @@ lookup_tmp_var (tree val, bool is_formal)
   return ret;
 }
 
+
+/* Return true if T is a CALL_EXPR or an expression that can be
+   assignmed to a temporary.  Note that this predicate should only be
+   used during gimplification.  See the rationale for this in
+   gimplify_modify_expr.  */
+
+static bool
+is_gimple_formal_tmp_or_call_rhs (tree t)
+{
+  return TREE_CODE (t) == CALL_EXPR || is_gimple_formal_tmp_rhs (t);
+}
+
+/* Returns true iff T is a valid RHS for an assignment to a renamed
+   user -- or front-end generated artificial -- variable.  */
+
+static bool
+is_gimple_reg_or_call_rhs (tree t)
+{
+  /* If the RHS of the MODIFY_EXPR may throw or make a nonlocal goto
+     and the LHS is a user variable, then we need to introduce a formal
+     temporary.  This way the optimizers can determine that the user
+     variable is only modified if evaluation of the RHS does not throw.
+
+     Don't force a temp of a non-renamable type; the copy could be
+     arbitrarily expensive.  Instead we will generate a VDEF for
+     the assignment.  */
+
+  if (is_gimple_reg_type (TREE_TYPE (t))
+      && ((TREE_CODE (t) == CALL_EXPR && TREE_SIDE_EFFECTS (t))
+         || tree_could_throw_p (t)))
+    return false;
+
+  return is_gimple_formal_tmp_or_call_rhs (t);
+}
+
+/* Return true if T is a valid memory RHS or a CALL_EXPR.  Note that
+   this predicate should only be used during gimplification.  See the
+   rationale for this in gimplify_modify_expr.  */
+
+static bool
+is_gimple_mem_or_call_rhs (tree t)
+{
+  /* If we're dealing with a renamable type, either source or dest must be
+     a renamed variable.  Also force a temporary if the type doesn't need
+     to be stored in memory, since it's cheap and prevents erroneous
+     tailcalls (PR 17526).  */
+  if (is_gimple_reg_type (TREE_TYPE (t))
+      || (TYPE_MODE (TREE_TYPE (t)) != BLKmode
+         && (TREE_CODE (t) != CALL_EXPR
+              || ! aggregate_value_p (t, t))))
+    return is_gimple_val (t);
+  else
+    return is_gimple_formal_tmp_or_call_rhs (t);
+}
+
+
 /* Returns a formal temporary variable initialized with VAL.  PRE_P is as
    in gimplify_expr.  Only use this function if:
 
@@ -586,11 +681,15 @@ lookup_tmp_var (tree val, bool is_formal)
    For other cases, use get_initialized_tmp_var instead.  */
 
 static tree
-internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal)
+internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
+                      bool is_formal)
 {
   tree t, mod;
 
-  gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_rhs, fb_rvalue);
+  /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
+     can create an INIT_EXPR and convert it into a GIMPLE_CALL below.  */
+  gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_or_call_rhs,
+                fb_rvalue);
 
   t = lookup_tmp_var (val, is_formal);
 
@@ -625,21 +724,25 @@ internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal)
 
   /* gimplify_modify_expr might want to reduce this further.  */
   gimplify_and_add (mod, pre_p);
+  ggc_free (mod);
 
   /* If we're gimplifying into ssa, gimplify_modify_expr will have
-     given our temporary an ssa name.  Find and return it.  */
+     given our temporary an SSA name.  Find and return it.  */
   if (gimplify_ctxp->into_ssa)
-    t = TREE_OPERAND (mod, 0);
+    {
+      gimple last = gimple_seq_last_stmt (*pre_p);
+      t = gimple_get_lhs (last);
+    }
 
   return t;
 }
 
 /* Returns a formal temporary variable initialized with VAL.  PRE_P
-   points to a statement list where side-effects needed to compute VAL
-   should be stored.  */
+   points to a sequence where side-effects needed to compute VAL should be
+   stored.  */
 
 tree
-get_formal_tmp_var (tree val, tree *pre_p)
+get_formal_tmp_var (tree val, gimple_seq *pre_p)
 {
   return internal_get_tmp_var (val, pre_p, NULL, true);
 }
@@ -648,7 +751,7 @@ get_formal_tmp_var (tree val, tree *pre_p)
    are as in gimplify_expr.  */
 
 tree
-get_initialized_tmp_var (tree val, tree *pre_p, tree *post_p)
+get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
 {
   return internal_get_tmp_var (val, pre_p, post_p, false);
 }
@@ -657,27 +760,23 @@ get_initialized_tmp_var (tree val, tree *pre_p, tree *post_p)
    true, generate debug info for them; otherwise don't.  */
 
 void
-declare_vars (tree vars, tree scope, bool debug_info)
+declare_vars (tree vars, gimple scope, bool debug_info)
 {
   tree last = vars;
   if (last)
     {
       tree temps, block;
 
-      /* C99 mode puts the default 'return 0;' for main outside the outer
-        braces.  So drill down until we find an actual scope.  */
-      while (TREE_CODE (scope) == COMPOUND_EXPR)
-       scope = TREE_OPERAND (scope, 0);
-
-      gcc_assert (TREE_CODE (scope) == BIND_EXPR);
+      gcc_assert (gimple_code (scope) == GIMPLE_BIND);
 
       temps = nreverse (last);
 
-      block = BIND_EXPR_BLOCK (scope);
+      block = gimple_block (scope);
+      gcc_assert (!block || TREE_CODE (block) == BLOCK);
       if (!block || !debug_info)
        {
-         TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
-         BIND_EXPR_VARS (scope) = temps;
+         TREE_CHAIN (last) = gimple_bind_vars (scope);
+         gimple_bind_set_vars (scope, temps);
        }
       else
        {
@@ -689,7 +788,8 @@ declare_vars (tree vars, tree scope, bool debug_info)
            BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
          else
            {
-             BIND_EXPR_VARS (scope) = chainon (BIND_EXPR_VARS (scope), temps);
+             gimple_bind_set_vars (scope,
+                                   chainon (gimple_bind_vars (scope), temps));
              BLOCK_VARS (block) = temps;
            }
        }
@@ -752,13 +852,34 @@ gimple_add_tmp_var (tree tmp)
   else if (cfun)
     record_vars (tmp);
   else
-    declare_vars (tmp, DECL_SAVED_TREE (current_function_decl), false);
+    {
+      gimple_seq body_seq;
+
+      /* This case is for nested functions.  We need to expose the locals
+        they create.  */
+      body_seq = gimple_body (current_function_decl);
+      declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
+    }
+}
+
+/* Determines whether to assign a location to the statement GS.  */
+
+static bool
+should_carry_location_p (gimple gs)
+{
+  /* Don't emit a line note for a label.  We particularly don't want to
+     emit one for the break label, since it doesn't actually correspond
+     to the beginning of the loop/switch.  */
+  if (gimple_code (gs) == GIMPLE_LABEL)
+    return false;
+
+  return true;
 }
 
-/* Determines whether to assign a locus to the statement STMT.  */
+/* Same, but for a tree.  */
 
 static bool
-should_carry_locus_p (const_tree stmt)
+tree_should_carry_location_p (const_tree stmt)
 {
   /* Don't emit a line note for a label.  We particularly don't want to
      emit one for the break label, since it doesn't actually correspond
@@ -773,16 +894,87 @@ should_carry_locus_p (const_tree stmt)
   return true;
 }
 
+/* Return true if a location should not be emitted for this statement
+   by annotate_one_with_location.  */
+
+static inline bool
+gimple_do_not_emit_location_p (gimple g)
+{
+  return gimple_plf (g, GF_PLF_1);
+}
+
+/* Mark statement G so a location will not be emitted by
+   annotate_one_with_location.  */
+
+static inline void
+gimple_set_do_not_emit_location (gimple g)
+{
+  /* The PLF flags are initialized to 0 when a new tuple is created,
+     so no need to initialize it anywhere.  */
+  gimple_set_plf (g, GF_PLF_1, true);
+}
+
+/* Set the location for gimple statement GS to LOCUS.  */
+
+static void
+annotate_one_with_location (gimple gs, location_t location)
+{
+  if (!gimple_has_location (gs) 
+      && !gimple_do_not_emit_location_p (gs)
+      && should_carry_location_p (gs))
+    gimple_set_location (gs, location);
+}
+
+/* Same, but for tree T.  */
+
 static void
-annotate_one_with_locus (tree t, location_t locus)
+tree_annotate_one_with_location (tree t, location_t location)
 {
   if (CAN_HAVE_LOCATION_P (t)
-      && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
-    SET_EXPR_LOCATION (t, locus);
+      && ! EXPR_HAS_LOCATION (t) && tree_should_carry_location_p (t))
+    SET_EXPR_LOCATION (t, location);
+}
+
+
+/* Set LOCATION for all the statements after iterator GSI in sequence
+   SEQ.  If GSI is pointing to the end of the sequence, start with the
+   first statement in SEQ.  */
+
+static void
+annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
+                                 location_t location)
+{
+  if (gsi_end_p (gsi))
+    gsi = gsi_start (seq);
+  else
+    gsi_next (&gsi);
+
+  for (; !gsi_end_p (gsi); gsi_next (&gsi))
+    annotate_one_with_location (gsi_stmt (gsi), location);
+}
+
+
+/* Set the location for all the statements in a sequence STMT_P to LOCUS.  */
+
+void
+annotate_all_with_location (gimple_seq stmt_p, location_t location)
+{
+  gimple_stmt_iterator i;
+
+  if (gimple_seq_empty_p (stmt_p))
+    return;
+
+  for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
+    {
+      gimple gs = gsi_stmt (i);
+      annotate_one_with_location (gs, location);
+    }
 }
 
+/* Same, but for statement or statement list in *STMT_P.  */
+
 void
-annotate_all_with_locus (tree *stmt_p, location_t locus)
+tree_annotate_all_with_location (tree *stmt_p, location_t location)
 {
   tree_stmt_iterator i;
 
@@ -798,10 +990,11 @@ annotate_all_with_locus (tree *stmt_p, location_t locus)
       gcc_assert (TREE_CODE (t) != STATEMENT_LIST
                  && TREE_CODE (t) != COMPOUND_EXPR);
 
-      annotate_one_with_locus (t, locus);
+      tree_annotate_one_with_location (t, location);
     }
 }
 
+
 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
    These nodes model computations that should only be done once.  If we
    were to unshare something like SAVE_EXPR(i++), the gimplification
@@ -915,15 +1108,6 @@ unvisit_body (tree *body_p, tree fndecl)
       unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
 }
 
-/* Unshare T and all the trees reached from T via TREE_CHAIN.  */
-
-static void
-unshare_all_trees (tree t)
-{
-  walk_tree (&t, copy_if_shared_r, NULL, NULL);
-  walk_tree (&t, unmark_visited_r, NULL, NULL);
-}
-
 /* Unconditionally make an unshared copy of EXPR.  This is used when using
    stored expressions which span multiple functions, such as BINFO_VTABLE,
    as the normal unsharing process can't tell that they're shared.  */
@@ -934,25 +1118,6 @@ unshare_expr (tree expr)
   walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
   return expr;
 }
-
-/* A terser interface for building a representation of an exception
-   specification.  */
-
-tree
-gimple_build_eh_filter (tree body, tree allowed, tree failure)
-{
-  tree t;
-
-  /* FIXME should the allowed types go in TREE_TYPE?  */
-  t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
-  append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
-
-  t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
-  append_to_statement_list (body, &TREE_OPERAND (t, 0));
-
-  return t;
-}
-
 \f
 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
    contain statements and have a value.  Assign its value to a temporary
@@ -1019,9 +1184,8 @@ voidify_wrapper_expr (tree wrapper, tree temp)
          /* The wrapper is on the RHS of an assignment that we're pushing
             down.  */
          gcc_assert (TREE_CODE (temp) == INIT_EXPR
-                     || TREE_CODE (temp) == GIMPLE_MODIFY_STMT
                      || TREE_CODE (temp) == MODIFY_EXPR);
-         GENERIC_TREE_OPERAND (temp, 1) = *p;
+         TREE_OPERAND (temp, 1) = *p;
          *p = temp;
        }
       else
@@ -1040,28 +1204,28 @@ voidify_wrapper_expr (tree wrapper, tree temp)
    a temporary through which they communicate.  */
 
 static void
-build_stack_save_restore (tree *save, tree *restore)
+build_stack_save_restore (gimple *save, gimple *restore)
 {
-  tree save_call, tmp_var;
+  tree tmp_var;
 
-  save_call =
-    build_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
+  *save = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_SAVE], 0);
   tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
+  gimple_call_set_lhs (*save, tmp_var);
 
-  *save = build_gimple_modify_stmt (tmp_var, save_call);
-  *restore =
-    build_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
-                    1, tmp_var);
+  *restore = gimple_build_call (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
+                           1, tmp_var);
 }
 
 /* Gimplify a BIND_EXPR.  Just voidify and recurse.  */
 
 static enum gimplify_status
-gimplify_bind_expr (tree *expr_p, tree *pre_p)
+gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
 {
   tree bind_expr = *expr_p;
   bool old_save_stack = gimplify_ctxp->save_stack;
   tree t;
+  gimple gimple_bind;
+  gimple_seq body;
 
   tree temp = voidify_wrapper_expr (bind_expr, NULL);
 
@@ -1093,70 +1257,90 @@ gimplify_bind_expr (tree *expr_p, tree *pre_p)
        DECL_GIMPLE_REG_P (t) = 1;
     }
 
-  gimple_push_bind_expr (bind_expr);
+  gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
+                                   BIND_EXPR_BLOCK (bind_expr));
+  gimple_push_bind_expr (gimple_bind);
+
   gimplify_ctxp->save_stack = false;
 
-  gimplify_to_stmt_list (&BIND_EXPR_BODY (bind_expr));
+  /* Gimplify the body into the GIMPLE_BIND tuple's body.  */
+  body = NULL;
+  gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
+  gimple_bind_set_body (gimple_bind, body);
 
   if (gimplify_ctxp->save_stack)
     {
-      tree stack_save, stack_restore;
+      gimple stack_save, stack_restore, gs;
+      gimple_seq cleanup, new_body;
 
       /* Save stack on entry and restore it on exit.  Add a try_finally
         block to achieve this.  Note that mudflap depends on the
         format of the emitted code: see mx_register_decls().  */
       build_stack_save_restore (&stack_save, &stack_restore);
 
-      t = build2 (TRY_FINALLY_EXPR, void_type_node,
-                 BIND_EXPR_BODY (bind_expr), NULL_TREE);
-      append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1));
+      cleanup = new_body = NULL;
+      gimplify_seq_add_stmt (&cleanup, stack_restore);
+      gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
+                            GIMPLE_TRY_FINALLY);
 
-      BIND_EXPR_BODY (bind_expr) = NULL_TREE;
-      append_to_statement_list (stack_save, &BIND_EXPR_BODY (bind_expr));
-      append_to_statement_list (t, &BIND_EXPR_BODY (bind_expr));
+      gimplify_seq_add_stmt (&new_body, stack_save);
+      gimplify_seq_add_stmt (&new_body, gs);
+      gimple_bind_set_body (gimple_bind, new_body);
     }
 
   gimplify_ctxp->save_stack = old_save_stack;
   gimple_pop_bind_expr ();
 
+  gimplify_seq_add_stmt (pre_p, gimple_bind);
+
   if (temp)
     {
       *expr_p = temp;
-      append_to_statement_list (bind_expr, pre_p);
       return GS_OK;
     }
-  else
-    return GS_ALL_DONE;
+
+  *expr_p = NULL_TREE;
+  return GS_ALL_DONE;
 }
 
 /* Gimplify a RETURN_EXPR.  If the expression to be returned is not a
    GIMPLE value, it is assigned to a new temporary and the statement is
    re-written to return the temporary.
 
-   PRE_P points to the list where side effects that must happen before
+   PRE_P points to the sequence where side effects that must happen before
    STMT should be stored.  */
 
 static enum gimplify_status
-gimplify_return_expr (tree stmt, tree *pre_p)
+gimplify_return_expr (tree stmt, gimple_seq *pre_p)
 {
+  gimple ret;
   tree ret_expr = TREE_OPERAND (stmt, 0);
   tree result_decl, result;
 
-  if (!ret_expr || TREE_CODE (ret_expr) == RESULT_DECL
+  if (ret_expr == error_mark_node)
+    return GS_ERROR;
+
+  if (!ret_expr
+      || TREE_CODE (ret_expr) == RESULT_DECL
       || ret_expr == error_mark_node)
-    return GS_ALL_DONE;
+    {
+      gimple ret = gimple_build_return (ret_expr);
+      gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
+      gimplify_seq_add_stmt (pre_p, ret);
+      return GS_ALL_DONE;
+    }
 
   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
     result_decl = NULL_TREE;
   else
     {
-      result_decl = GENERIC_TREE_OPERAND (ret_expr, 0);
+      result_decl = TREE_OPERAND (ret_expr, 0);
+
+      /* See through a return by reference.  */
       if (TREE_CODE (result_decl) == INDIRECT_REF)
-       /* See through a return by reference.  */
        result_decl = TREE_OPERAND (result_decl, 0);
 
       gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
-                  || TREE_CODE (ret_expr) == GIMPLE_MODIFY_STMT
                   || TREE_CODE (ret_expr) == INIT_EXPR)
                  && TREE_CODE (result_decl) == RESULT_DECL);
     }
@@ -1190,34 +1374,30 @@ gimplify_return_expr (tree stmt, tree *pre_p)
       gimplify_ctxp->return_temp = result;
     }
 
-  /* Smash the lhs of the GIMPLE_MODIFY_STMT to the temporary we plan to use.
+  /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
      Then gimplify the whole thing.  */
   if (result != result_decl)
-    GENERIC_TREE_OPERAND (ret_expr, 0) = result;
+    TREE_OPERAND (ret_expr, 0) = result;
 
   gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
 
-  /* If we didn't use a temporary, then the result is just the result_decl.
-     Otherwise we need a simple copy.  This should already be gimple.  */
-  if (result == result_decl)
-    ret_expr = result;
-  else
-    ret_expr = build_gimple_modify_stmt (result_decl, result);
-  TREE_OPERAND (stmt, 0) = ret_expr;
+  ret = gimple_build_return (result);
+  gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
+  gimplify_seq_add_stmt (pre_p, ret);
 
   return GS_ALL_DONE;
 }
 
 static void
-gimplify_vla_decl (tree decl, tree *stmt_p)
+gimplify_vla_decl (tree decl, gimple_seq *seq_p)
 {
   /* This is a variable-sized decl.  Simplify its size and mark it
      for deferred expansion.  Note that mudflap depends on the format
      of the emitted code: see mx_register_decls().  */
   tree t, addr, ptr_type;
 
-  gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
-  gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
+  gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
+  gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
 
   /* All occurrences of this decl in final gimplified code will be
      replaced by indirection.  Setting DECL_VALUE_EXPR does two
@@ -1234,20 +1414,21 @@ gimplify_vla_decl (tree decl, tree *stmt_p)
   t = built_in_decls[BUILT_IN_ALLOCA];
   t = build_call_expr (t, 1, DECL_SIZE_UNIT (decl));
   t = fold_convert (ptr_type, t);
-  t = build_gimple_modify_stmt (addr, t);
+  t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
 
-  gimplify_and_add (t, stmt_p);
+  gimplify_and_add (t, seq_p);
 
   /* Indicate that we need to restore the stack level when the
      enclosing BIND_EXPR is exited.  */
   gimplify_ctxp->save_stack = true;
 }
 
+
 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
    and initialization explicit.  */
 
 static enum gimplify_status
-gimplify_decl_expr (tree *stmt_p)
+gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
 {
   tree stmt = *stmt_p;
   tree decl = DECL_EXPR_DECL (stmt);
@@ -1260,14 +1441,14 @@ gimplify_decl_expr (tree *stmt_p)
   if ((TREE_CODE (decl) == TYPE_DECL
        || TREE_CODE (decl) == VAR_DECL)
       && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
-    gimplify_type_sizes (TREE_TYPE (decl), stmt_p);
+    gimplify_type_sizes (TREE_TYPE (decl), seq_p);
 
   if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
     {
       tree init = DECL_INITIAL (decl);
 
       if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
-       gimplify_vla_decl (decl, stmt_p);
+       gimplify_vla_decl (decl, seq_p);
 
       if (init && init != error_mark_node)
        {
@@ -1275,7 +1456,8 @@ gimplify_decl_expr (tree *stmt_p)
            {
              DECL_INITIAL (decl) = NULL_TREE;
              init = build2 (INIT_EXPR, void_type_node, decl, init);
-             gimplify_and_add (init, stmt_p);
+             gimplify_and_add (init, seq_p);
+             ggc_free (init);
            }
          else
            /* We must still examine initializers for static variables
@@ -1300,27 +1482,49 @@ gimplify_decl_expr (tree *stmt_p)
    EXIT_EXPR, we need to append a label for it to jump to.  */
 
 static enum gimplify_status
-gimplify_loop_expr (tree *expr_p, tree *pre_p)
+gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
 {
   tree saved_label = gimplify_ctxp->exit_label;
-  tree start_label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
-  tree jump_stmt = build_and_jump (&LABEL_EXPR_LABEL (start_label));
+  tree start_label = create_artificial_label ();
 
-  append_to_statement_list (start_label, pre_p);
+  gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
 
   gimplify_ctxp->exit_label = NULL_TREE;
 
   gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
 
+  gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
+
   if (gimplify_ctxp->exit_label)
+    gimplify_seq_add_stmt (pre_p, gimple_build_label (gimplify_ctxp->exit_label));
+
+  gimplify_ctxp->exit_label = saved_label;
+
+  *expr_p = NULL;
+  return GS_ALL_DONE;
+}
+
+/* Gimplifies a statement list onto a sequence.  These may be created either
+   by an enlightened front-end, or by shortcut_cond_expr.  */
+
+static enum gimplify_status
+gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
+{
+  tree temp = voidify_wrapper_expr (*expr_p, NULL);
+
+  tree_stmt_iterator i = tsi_start (*expr_p);
+
+  while (!tsi_end_p (i))
     {
-      append_to_statement_list (jump_stmt, pre_p);
-      *expr_p = build1 (LABEL_EXPR, void_type_node, gimplify_ctxp->exit_label);
+      gimplify_stmt (tsi_stmt_ptr (i), pre_p);
+      tsi_delink (&i);
     }
-  else
-    *expr_p = jump_stmt;
 
-  gimplify_ctxp->exit_label = saved_label;
+  if (temp)
+    {
+      *expr_p = temp;
+      return GS_OK;
+    }
 
   return GS_ALL_DONE;
 }
@@ -1335,70 +1539,63 @@ compare_case_labels (const void *p1, const void *p2)
   const_tree const case1 = *(const_tree const*)p1;
   const_tree const case2 = *(const_tree const*)p2;
 
-  return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
+  /* The 'default' case label always goes first.  */
+  if (!CASE_LOW (case1))
+    return -1;
+  else if (!CASE_LOW (case2))
+    return 1;
+  else
+    return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
 }
 
+
 /* Sort the case labels in LABEL_VEC in place in ascending order.  */
 
 void
-sort_case_labels (tree label_vec)
+sort_case_labels (VEC(tree,heap)* label_vec)
 {
-  size_t len = TREE_VEC_LENGTH (label_vec);
-  tree default_case = TREE_VEC_ELT (label_vec, len - 1);
-
-  if (CASE_LOW (default_case))
-    {
-      size_t i;
-
-      /* The last label in the vector should be the default case
-         but it is not.  */
-      for (i = 0; i < len; ++i)
-       {
-         tree t = TREE_VEC_ELT (label_vec, i);
-         if (!CASE_LOW (t))
-           {
-             default_case = t;
-             TREE_VEC_ELT (label_vec, i) = TREE_VEC_ELT (label_vec, len - 1);
-             TREE_VEC_ELT (label_vec, len - 1) = default_case;
-             break;
-           }
-       }
-    }
-
-  qsort (&TREE_VEC_ELT (label_vec, 0), len - 1, sizeof (tree),
-        compare_case_labels);
+  size_t len = VEC_length (tree, label_vec);
+  qsort (VEC_address (tree, label_vec), len, sizeof (tree),
+         compare_case_labels);
 }
 
+
 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
    branch to.  */
 
 static enum gimplify_status
-gimplify_switch_expr (tree *expr_p, tree *pre_p)
+gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
 {
   tree switch_expr = *expr_p;
+  gimple_seq switch_body_seq = NULL;
   enum gimplify_status ret;
 
-  ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL,
-                      is_gimple_val, fb_rvalue);
+  ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
+                       fb_rvalue);
+  if (ret == GS_ERROR || ret == GS_UNHANDLED)
+    return ret;
 
   if (SWITCH_BODY (switch_expr))
     {
-      VEC(tree,heap) *labels, *saved_labels;
-      tree label_vec, default_case = NULL_TREE;
+      VEC (tree,heap) *labels;
+      VEC (tree,heap) *saved_labels;
+      tree default_case = NULL_TREE;
       size_t i, len;
-
+      gimple gimple_switch;
+      
       /* If someone can be bothered to fill in the labels, they can
         be bothered to null out the body too.  */
       gcc_assert (!SWITCH_LABELS (switch_expr));
 
+      /* save old labels, get new ones from body, then restore the old 
+         labels.  Save all the things from the switch body to append after.  */
       saved_labels = gimplify_ctxp->case_labels;
       gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
 
-      gimplify_to_stmt_list (&SWITCH_BODY (switch_expr));
-
+      gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
       labels = gimplify_ctxp->case_labels;
       gimplify_ctxp->case_labels = saved_labels;
-
       i = 0;
       while (i < VEC_length (tree, labels))
        {
@@ -1428,44 +1625,39 @@ gimplify_switch_expr (tree *expr_p, tree *pre_p)
        }
       len = i;
 
-      label_vec = make_tree_vec (len + 1);
-      SWITCH_LABELS (*expr_p) = label_vec;
-      append_to_statement_list (switch_expr, pre_p);
-
-      if (! default_case)
+      if (!default_case)
        {
+         gimple new_default;
+
          /* If the switch has no default label, add one, so that we jump
             around the switch body.  */
          default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
-                                NULL_TREE, create_artificial_label ());
-         append_to_statement_list (SWITCH_BODY (switch_expr), pre_p);
-         *expr_p = build1 (LABEL_EXPR, void_type_node,
-                           CASE_LABEL (default_case));
+                                NULL_TREE, create_artificial_label ());
+         new_default = gimple_build_label (CASE_LABEL (default_case));
+         gimplify_seq_add_stmt (&switch_body_seq, new_default);
        }
-      else
-       *expr_p = SWITCH_BODY (switch_expr);
 
-      for (i = 0; i < len; ++i)
-       TREE_VEC_ELT (label_vec, i) = VEC_index (tree, labels, i);
-      TREE_VEC_ELT (label_vec, len) = default_case;
+      if (!VEC_empty (tree, labels))
+       sort_case_labels (labels);
 
-      VEC_free (tree, heap, labels);
-
-      sort_case_labels (label_vec);
-
-      SWITCH_BODY (switch_expr) = NULL;
+      gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr), 
+                                               default_case, labels);
+      gimplify_seq_add_stmt (pre_p, gimple_switch);
+      gimplify_seq_add_seq (pre_p, switch_body_seq);
+      VEC_free(tree, heap, labels);
     }
   else
     gcc_assert (SWITCH_LABELS (switch_expr));
 
-  return ret;
+  return GS_ALL_DONE;
 }
 
+
 static enum gimplify_status
-gimplify_case_label_expr (tree *expr_p)
+gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
 {
-  tree expr = *expr_p;
   struct gimplify_ctx *ctxp;
+  gimple gimple_label;
 
   /* Invalid OpenMP programs can play Duff's Device type games with
      #pragma omp parallel.  At least in the C front end, we don't
@@ -1474,8 +1666,10 @@ gimplify_case_label_expr (tree *expr_p)
     if (ctxp->case_labels)
       break;
 
-  VEC_safe_push (tree, heap, ctxp->case_labels, expr);
-  *expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
+  gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
+  VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
+  gimplify_seq_add_stmt (pre_p, gimple_label);
+
   return GS_ALL_DONE;
 }
 
@@ -1720,7 +1914,7 @@ gimplify_var_or_parm_decl (tree *expr_p)
 
 
 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
-   node pointed to by EXPR_P.
+   node *EXPR_P.
 
       compound_lval
              : min_lval '[' val ']'
@@ -1735,15 +1929,15 @@ gimplify_var_or_parm_decl (tree *expr_p)
    union reference must be explicit, which was not always the case when we
    were splitting up array and member refs.
 
-   PRE_P points to the list where side effects that must happen before
+   PRE_P points to the sequence where side effects that must happen before
      *EXPR_P should be stored.
 
-   POST_P points to the list where side effects that must happen after
+   POST_P points to the sequence where side effects that must happen after
      *EXPR_P should be stored.  */
 
 static enum gimplify_status
-gimplify_compound_lval (tree *expr_p, tree *pre_p,
-                       tree *post_p, fallback_t fallback)
+gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
+                       fallback_t fallback)
 {
   tree *p;
   VEC(tree,heap) *stack;
@@ -1798,14 +1992,15 @@ gimplify_compound_lval (tree *expr_p, tree *pre_p,
          /* Gimplify the low bound and element type size and put them into
             the ARRAY_REF.  If these values are set, they have already been
             gimplified.  */
-         if (!TREE_OPERAND (t, 2))
+         if (TREE_OPERAND (t, 2) == NULL_TREE)
            {
              tree low = unshare_expr (array_ref_low_bound (t));
              if (!is_gimple_min_invariant (low))
                {
-                 TREE_OPERAND (t, 2) = low;
-                 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
-                                       is_gimple_formal_tmp_reg, fb_rvalue);
+                 TREE_OPERAND (t, 2) = low;
+                 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
+                                       post_p, is_gimple_formal_tmp_reg,
+                                       fb_rvalue);
                  ret = MIN (ret, tret);
                }
            }
@@ -1822,9 +2017,10 @@ gimplify_compound_lval (tree *expr_p, tree *pre_p,
 
              if (!is_gimple_min_invariant (elmt_size))
                {
-                 TREE_OPERAND (t, 3) = elmt_size;
-                 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
-                                       is_gimple_formal_tmp_reg, fb_rvalue);
+                 TREE_OPERAND (t, 3) = elmt_size;
+                 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
+                                       post_p, is_gimple_formal_tmp_reg,
+                                       fb_rvalue);
                  ret = MIN (ret, tret);
                }
            }
@@ -1844,9 +2040,10 @@ gimplify_compound_lval (tree *expr_p, tree *pre_p,
 
              if (!is_gimple_min_invariant (offset))
                {
-                 TREE_OPERAND (t, 2) = offset;
-                 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
-                                       is_gimple_formal_tmp_reg, fb_rvalue);
+                 TREE_OPERAND (t, 2) = offset;
+                 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
+                                       post_p, is_gimple_formal_tmp_reg,
+                                       fb_rvalue);
                  ret = MIN (ret, tret);
                }
            }
@@ -1896,15 +2093,13 @@ gimplify_compound_lval (tree *expr_p, tree *pre_p,
 
       STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
 
-      /* The innermost expression P may have originally had TREE_SIDE_EFFECTS
-        set which would have caused all the outer expressions in EXPR_P
-        leading to P to also have had TREE_SIDE_EFFECTS set.  */
+      /* The innermost expression P may have originally had
+        TREE_SIDE_EFFECTS set which would have caused all the outer
+        expressions in *EXPR_P leading to P to also have had
+        TREE_SIDE_EFFECTS set.  */
       recalculate_side_effects (t);
     }
 
-  tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback);
-  ret = MIN (ret, tret);
-
   /* If the outermost expression is a COMPONENT_REF, canonicalize its type.  */
   if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
     {
@@ -1930,11 +2125,12 @@ gimplify_compound_lval (tree *expr_p, tree *pre_p,
        in another expression.  */
 
 static enum gimplify_status
-gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p,
+gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
                        bool want_value)
 {
   enum tree_code code;
-  tree lhs, lvalue, rhs, t1, post = NULL, *orig_post_p = post_p;
+  tree lhs, lvalue, rhs, t1;
+  gimple_seq post = NULL, *orig_post_p = post_p;
   bool postfix;
   enum tree_code arith_code;
   enum gimplify_status ret;
@@ -1991,22 +2187,22 @@ gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p,
     }
 
   t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
-  t1 = build_gimple_modify_stmt (lvalue, t1);
 
   if (postfix)
     {
-      gimplify_and_add (t1, orig_post_p);
-      append_to_statement_list (post, orig_post_p);
+      gimplify_assign (lvalue, t1, orig_post_p);
+      gimplify_seq_add_seq (orig_post_p, post);
       *expr_p = lhs;
       return GS_ALL_DONE;
     }
   else
     {
-      *expr_p = t1;
+      *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
       return GS_OK;
     }
 }
 
+
 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR.  */
 
 static void
@@ -2033,10 +2229,12 @@ maybe_with_size_expr (tree *expr_p)
   *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
 }
 
-/* Subroutine of gimplify_call_expr:  Gimplify a single argument.  */
+
+/* Helper for gimplify_call_expr.  Gimplify a single argument *ARG_P
+   Store any side-effects in PRE_P.  */
 
 static enum gimplify_status
-gimplify_arg (tree *expr_p, tree *pre_p)
+gimplify_arg (tree *arg_p, gimple_seq *pre_p)
 {
   bool (*test) (tree);
   fallback_t fb;
@@ -2046,31 +2244,33 @@ gimplify_arg (tree *expr_p, tree *pre_p)
      aggregates into temporaries only to copy the temporaries to
      the argument list.  Make optimizers happy by pulling out to
      temporaries those types that fit in registers.  */
-  if (is_gimple_reg_type (TREE_TYPE (*expr_p)))
+  if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
     test = is_gimple_val, fb = fb_rvalue;
   else
     test = is_gimple_lvalue, fb = fb_either;
 
   /* If this is a variable sized type, we must remember the size.  */
-  maybe_with_size_expr (expr_p);
+  maybe_with_size_expr (arg_p);
 
   /* There is a sequence point before a function call.  Side effects in
      the argument list must occur before the actual call. So, when
      gimplifying arguments, force gimplify_expr to use an internal
      post queue which is then appended to the end of PRE_P.  */
-  return gimplify_expr (expr_p, pre_p, NULL, test, fb);
+  return gimplify_expr (arg_p, pre_p, NULL, test, fb);
 }
 
-/* Gimplify the CALL_EXPR node pointed to by EXPR_P.  PRE_P points to the
-   list where side effects that must happen before *EXPR_P should be stored.
+
+/* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
    WANT_VALUE is true if the result of the call is desired.  */
 
 static enum gimplify_status
-gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
+gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
 {
-  tree decl, parms, p;
+  tree fndecl, parms, p;
   enum gimplify_status ret;
   int i, nargs;
+  gimple call;
+  bool builtin_va_start_p = FALSE;
 
   gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
 
@@ -2091,8 +2291,8 @@ gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
      we gimplify the CALL_EXPR.  At this time we do not manage to
      transform all calls in the same manner as the expanders do, but
      we do transform most of them.  */
-  decl = get_callee_fndecl (*expr_p);
-  if (decl && DECL_BUILT_IN (decl))
+  fndecl = get_callee_fndecl (*expr_p);
+  if (fndecl && DECL_BUILT_IN (fndecl))
     {
       tree new = fold_call_expr (*expr_p, !want_value);
 
@@ -2105,9 +2305,10 @@ gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
          return GS_OK;
        }
 
-      if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
-         && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START)
+      if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
+         && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
         {
+         builtin_va_start_p = TRUE;
          if (call_expr_nargs (*expr_p) < 2)
            {
              error ("too few arguments to function %<va_start%>");
@@ -2120,9 +2321,6 @@ gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
              *expr_p = build_empty_stmt ();
              return GS_OK;
            }
-         /* Avoid gimplifying the second argument to va_start, which needs
-            to be the plain PARM_DECL.  */
-         return gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p);
        }
     }
 
@@ -2135,19 +2333,20 @@ gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
   nargs = call_expr_nargs (*expr_p);
 
   /* Get argument types for verification.  */
-  decl = get_callee_fndecl (*expr_p);
+  fndecl = get_callee_fndecl (*expr_p);
   parms = NULL_TREE;
-  if (decl)
-    parms = TYPE_ARG_TYPES (TREE_TYPE (decl));
+  if (fndecl)
+    parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
   else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
     parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
 
   /* Verify if the type of the argument matches that of the function
      declaration.  If we cannot verify this or there is a mismatch,
      mark the call expression so it doesn't get inlined later.  */
-  if (decl && DECL_ARGUMENTS (decl))
+  if (fndecl && DECL_ARGUMENTS (fndecl))
     {
-      for (i = 0, p = DECL_ARGUMENTS (decl); i < nargs;
+      for (i = 0, p = DECL_ARGUMENTS (fndecl);
+          i < nargs;
           i++, p = TREE_CHAIN (p))
        {
          /* We cannot distinguish a varargs function from the case
@@ -2212,34 +2411,42 @@ gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
          --nargs;
          *expr_p = build_call_array (TREE_TYPE (call), CALL_EXPR_FN (call),
                                      nargs, CALL_EXPR_ARGP (call));
-         /* Copy all CALL_EXPR flags, locus and block, except
+
+         /* Copy all CALL_EXPR flags, location and block, except
             CALL_EXPR_VA_ARG_PACK flag.  */
          CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
          CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
          CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
            = CALL_EXPR_RETURN_SLOT_OPT (call);
          CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
-         CALL_CANNOT_INLINE_P (*expr_p)
-           = CALL_CANNOT_INLINE_P (call);
-         TREE_NOTHROW (*expr_p) = TREE_NOTHROW (call);
+         CALL_CANNOT_INLINE_P (*expr_p) = CALL_CANNOT_INLINE_P (call);
          SET_EXPR_LOCUS (*expr_p, EXPR_LOCUS (call));
          TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
+
          /* Set CALL_EXPR_VA_ARG_PACK.  */
          CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
        }
     }
 
   /* Finally, gimplify the function arguments.  */
-  for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
-       PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
-       PUSH_ARGS_REVERSED ? i-- : i++)
+  if (nargs > 0)
     {
-      enum gimplify_status t;
+      for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
+           PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
+           PUSH_ARGS_REVERSED ? i-- : i++)
+        {
+          enum gimplify_status t;
 
-      t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p);
+          /* Avoid gimplifying the second argument to va_start, which needs to
+             be the plain PARM_DECL.  */
+          if ((i != 1) || !builtin_va_start_p)
+            {
+              t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p);
 
-      if (t == GS_ERROR)
-       ret = GS_ERROR;
+              if (t == GS_ERROR)
+                ret = GS_ERROR;
+            }
+        }
     }
 
   /* Try this again in case gimplification exposed something.  */
@@ -2256,6 +2463,11 @@ gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
          return GS_OK;
        }
     }
+  else
+    {
+      *expr_p = NULL_TREE;
+      return GS_ERROR;
+    }
 
   /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
      decl.  This allows us to eliminate redundant or useless
@@ -2268,6 +2480,24 @@ gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
          && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
        TREE_SIDE_EFFECTS (*expr_p) = 0;
     }
+
+  /* If the value is not needed by the caller, emit a new GIMPLE_CALL
+     and clear *EXPR_P.  Otherwise, leave *EXPR_P in its gimplified
+     form and delegate the creation of a GIMPLE_CALL to
+     gimplify_modify_expr.  This is always possible because when
+     WANT_VALUE is true, the caller wants the result of this call into
+     a temporary, which means that we will emit an INIT_EXPR in
+     internal_get_tmp_var which will then be handled by
+     gimplify_modify_expr.  */
+  if (!want_value)
+    {
+      /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
+        have to do is replicate it as a GIMPLE_CALL tuple.  */
+      call = gimple_build_call_from_tree (*expr_p);
+      gimplify_seq_add_stmt (pre_p, call);
+      *expr_p = NULL_TREE;
+    }
+
   return ret;
 }
 
@@ -2356,6 +2586,10 @@ shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p)
   return expr;
 }
 
+/* Given a conditional expression EXPR with short-circuit boolean
+   predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
+   predicate appart into the equivalent sequence of conditionals.  */
+
 static tree
 shortcut_cond_expr (tree expr)
 {
@@ -2382,6 +2616,7 @@ shortcut_cond_expr (tree expr)
          expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
        }
     }
+
   if (!then_se)
     {
       /* If there is no 'then', turn
@@ -2550,7 +2785,7 @@ gimple_boolify (tree expr)
    its operands.  New statements are inserted to PRE_P.  */
 
 static enum gimplify_status
-gimplify_pure_cond_expr (tree *expr_p, tree *pre_p)
+gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
 {
   tree expr = *expr_p, cond;
   enum gimplify_status ret, tret;
@@ -2565,8 +2800,7 @@ gimplify_pure_cond_expr (tree *expr_p, tree *pre_p)
     TREE_SET_CODE (cond, TRUTH_AND_EXPR);
   else if (code == TRUTH_ORIF_EXPR)
     TREE_SET_CODE (cond, TRUTH_OR_EXPR);
-  ret = gimplify_expr (&cond, pre_p, NULL,
-                             is_gimple_condexpr, fb_rvalue);
+  ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
   COND_EXPR_COND (*expr_p) = cond;
 
   tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
@@ -2612,17 +2846,20 @@ generic_expr_could_trap_p (tree expr)
 
     The second form is used when *EXPR_P is of type void.
 
-    TARGET is the tree for T1 above.
-
     PRE_P points to the list where side effects that must happen before
       *EXPR_P should be stored.  */
 
 static enum gimplify_status
-gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
+gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
 {
   tree expr = *expr_p;
-  tree tmp, tmp2, type;
+  tree tmp, type, arm1, arm2;
   enum gimplify_status ret;
+  tree label_true, label_false, label_cont;
+  bool have_then_clause_p, have_else_clause_p;
+  gimple gimple_cond;
+  enum tree_code pred_code;
+  gimple_seq seq = NULL;
 
   type = TREE_TYPE (expr);
 
@@ -2647,7 +2884,7 @@ gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
              && !generic_expr_could_trap_p (TREE_OPERAND (*expr_p, 2)))
            return gimplify_pure_cond_expr (expr_p, pre_p);
 
-         result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
+         result = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
          ret = GS_ALL_DONE;
        }
       else
@@ -2662,34 +2899,33 @@ gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
            TREE_OPERAND (expr, 2) =
              build_fold_addr_expr (TREE_OPERAND (expr, 2));
 
-         tmp2 = tmp = create_tmp_var (type, "iftmp");
+         tmp = create_tmp_var (type, "iftmp");
 
          expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
                         TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
 
          result = build_fold_indirect_ref (tmp);
-         ret = GS_ALL_DONE;
        }
 
       /* Build the then clause, 't1 = a;'.  But don't build an assignment
         if this branch is void; in C++ it can be, if it's a throw.  */
       if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
        TREE_OPERAND (expr, 1)
-         = build_gimple_modify_stmt (tmp, TREE_OPERAND (expr, 1));
+         = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 1));
 
       /* Build the else clause, 't1 = b;'.  */
       if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
        TREE_OPERAND (expr, 2)
-         = build_gimple_modify_stmt (tmp2, TREE_OPERAND (expr, 2));
+         = build2 (MODIFY_EXPR, TREE_TYPE (tmp), tmp, TREE_OPERAND (expr, 2));
 
       TREE_TYPE (expr) = void_type_node;
       recalculate_side_effects (expr);
 
       /* Move the COND_EXPR to the prequeue.  */
-      gimplify_and_add (expr, pre_p);
+      gimplify_stmt (&expr, pre_p);
 
       *expr_p = result;
-      return ret;
+      return GS_ALL_DONE;
     }
 
   /* Make sure the condition has BOOLEAN_TYPE.  */
@@ -2710,73 +2946,171 @@ gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
             wrapped in a TRY_FINALLY_EXPR.  To prevent that, we need to
             set up a conditional context.  */
          gimple_push_condition ();
-         gimplify_stmt (expr_p);
+         gimplify_stmt (expr_p, &seq);
          gimple_pop_condition (pre_p);
+         gimple_seq_add_seq (pre_p, seq);
 
          return GS_ALL_DONE;
        }
     }
 
   /* Now do the normal gimplification.  */
-  ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
-                      is_gimple_condexpr, fb_rvalue);
-
-  gimple_push_condition ();
-
-  gimplify_to_stmt_list (&TREE_OPERAND (expr, 1));
-  gimplify_to_stmt_list (&TREE_OPERAND (expr, 2));
-  recalculate_side_effects (expr);
-
-  gimple_pop_condition (pre_p);
 
+  /* Gimplify condition.  */
+  ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
+                      fb_rvalue);
   if (ret == GS_ERROR)
-    ;
-  else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)))
-    ret = GS_ALL_DONE;
-  else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2)))
-    /* Rewrite "if (a); else b" to "if (!a) b"  */
-    {
-      TREE_OPERAND (expr, 0) = invert_truthvalue (TREE_OPERAND (expr, 0));
-      ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
-                          is_gimple_condexpr, fb_rvalue);
+    return GS_ERROR;
+  gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
+
+  gimple_push_condition ();
 
-      tmp = TREE_OPERAND (expr, 1);
-      TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 2);
-      TREE_OPERAND (expr, 2) = tmp;
+  have_then_clause_p = have_else_clause_p = false;
+  if (TREE_OPERAND (expr, 1) != NULL
+      && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
+      && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
+      && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
+         == current_function_decl)
+      /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
+        have different locations, otherwise we end up with incorrect
+        location information on the branches.  */
+      && (optimize
+         || !EXPR_HAS_LOCATION (expr)
+         || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
+         || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
+    {
+      label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
+      have_then_clause_p = true;
     }
   else
-    /* Both arms are empty; replace the COND_EXPR with its predicate.  */
-    expr = TREE_OPERAND (expr, 0);
+    label_true = create_artificial_label ();
+  if (TREE_OPERAND (expr, 2) != NULL
+      && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
+      && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
+      && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
+         == current_function_decl)
+      /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
+        have different locations, otherwise we end up with incorrect
+        location information on the branches.  */
+      && (optimize
+         || !EXPR_HAS_LOCATION (expr)
+         || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
+         || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
+    {
+      label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
+      have_else_clause_p = true;
+    }
+  else
+    label_false = create_artificial_label ();
 
-  *expr_p = expr;
-  return ret;
-}
+  gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
+                                &arm2);
 
-/* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
-   a call to __builtin_memcpy.  */
+  gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
+                                   label_false);
 
-static enum gimplify_status
-gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
+  gimplify_seq_add_stmt (&seq, gimple_cond);
+  label_cont = NULL_TREE;
+  if (!have_then_clause_p)
+    {
+      /* For if (...) {} else { code; } put label_true after
+        the else block.  */
+      if (TREE_OPERAND (expr, 1) == NULL_TREE
+         && !have_else_clause_p
+         && TREE_OPERAND (expr, 2) != NULL_TREE)
+       label_cont = label_true;
+      else
+       {
+         gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
+         have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
+         /* For if (...) { code; } else {} or
+            if (...) { code; } else goto label; or
+            if (...) { code; return; } else { ... }
+            label_cont isn't needed.  */
+         if (!have_else_clause_p
+             && TREE_OPERAND (expr, 2) != NULL_TREE
+             && gimple_seq_may_fallthru (seq))
+           {
+             gimple g;
+             label_cont = create_artificial_label ();
+
+             g = gimple_build_goto (label_cont);
+
+             /* GIMPLE_COND's are very low level; they have embedded
+                gotos.  This particular embedded goto should not be marked
+                with the location of the original COND_EXPR, as it would
+                correspond to the COND_EXPR's condition, not the ELSE or the
+                THEN arms.  To avoid marking it with the wrong location, flag
+                it as "no location".  */
+             gimple_set_do_not_emit_location (g);
+
+             gimplify_seq_add_stmt (&seq, g);
+           }
+       }
+    }
+  if (!have_else_clause_p)
+    {
+      gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
+      have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
+    }
+  if (label_cont)
+    gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
+
+  gimple_pop_condition (pre_p);
+  gimple_seq_add_seq (pre_p, seq);
+
+  if (ret == GS_ERROR)
+    ; /* Do nothing.  */
+  else if (have_then_clause_p || have_else_clause_p)
+    ret = GS_ALL_DONE;
+  else
+    {
+      /* Both arms are empty; replace the COND_EXPR with its predicate.  */
+      expr = TREE_OPERAND (expr, 0);
+      gimplify_stmt (&expr, pre_p);
+    }
+
+  *expr_p = NULL;
+  return ret;
+}
+
+/* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
+   a call to __builtin_memcpy.  */
+
+static enum gimplify_status
+gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
+                               gimple_seq *seq_p)
 {
   tree t, to, to_ptr, from, from_ptr;
+  gimple gs;
 
-  to = GENERIC_TREE_OPERAND (*expr_p, 0);
-  from = GENERIC_TREE_OPERAND (*expr_p, 1);
+  to = TREE_OPERAND (*expr_p, 0);
+  from = TREE_OPERAND (*expr_p, 1);
 
   from_ptr = build_fold_addr_expr (from);
+  gimplify_arg (&from_ptr, seq_p);
 
   to_ptr = build_fold_addr_expr (to);
+  gimplify_arg (&to_ptr, seq_p);
+
   t = implicit_built_in_decls[BUILT_IN_MEMCPY];
-  t = build_call_expr (t, 3, to_ptr, from_ptr, size);
+
+  gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
 
   if (want_value)
     {
-      t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
-      t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
+      /* tmp = memcpy() */
+      t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
+      gimple_call_set_lhs (gs, t);
+      gimplify_seq_add_stmt (seq_p, gs);
+
+      *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
+      return GS_ALL_DONE;
     }
 
-  *expr_p = t;
-  return GS_OK;
+  gimplify_seq_add_stmt (seq_p, gs);
+  *expr_p = NULL;
+  return GS_ALL_DONE;
 }
 
 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
@@ -2784,14 +3118,16 @@ gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
    a CONSTRUCTOR with an empty element list.  */
 
 static enum gimplify_status
-gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
+gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
+                               gimple_seq *seq_p)
 {
   tree t, from, to, to_ptr;
+  gimple gs;
 
   /* Assert our assumptions, to abort instead of producing wrong code
      silently if they are not met.  Beware that the RHS CONSTRUCTOR might
      not be immediately exposed.  */
-  from = GENERIC_TREE_OPERAND (*expr_p, 1);  
+  from = TREE_OPERAND (*expr_p, 1);  
   if (TREE_CODE (from) == WITH_SIZE_EXPR)
     from = TREE_OPERAND (from, 0);
 
@@ -2799,20 +3135,28 @@ gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
              && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
 
   /* Now proceed.  */
-  to = GENERIC_TREE_OPERAND (*expr_p, 0);
+  to = TREE_OPERAND (*expr_p, 0);
 
   to_ptr = build_fold_addr_expr (to);
+  gimplify_arg (&to_ptr, seq_p);
   t = implicit_built_in_decls[BUILT_IN_MEMSET];
-  t = build_call_expr (t, 3, to_ptr, integer_zero_node, size);
+
+  gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
 
   if (want_value)
     {
-      t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
-      t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
+      /* tmp = memset() */
+      t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
+      gimple_call_set_lhs (gs, t);
+      gimplify_seq_add_stmt (seq_p, gs);
+
+      *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
+      return GS_ALL_DONE;
     }
 
-  *expr_p = t;
-  return GS_OK;
+  gimplify_seq_add_stmt (seq_p, gs);
+  *expr_p = NULL;
+  return GS_ALL_DONE;
 }
 
 /* A subroutine of gimplify_init_ctor_preeval.  Called via walk_tree,
@@ -2868,12 +3212,12 @@ gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
   return NULL;
 }
 
-/* A subroutine of gimplify_init_constructor.  Pre-evaluate *EXPR_P,
+/* A subroutine of gimplify_init_constructor.  Pre-evaluate EXPR,
    force values that overlap with the lhs (as described by *DATA)
    into temporaries.  */
 
 static void
-gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
+gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
                            struct gimplify_init_ctor_preeval_data *data)
 {
   enum gimplify_status one;
@@ -2900,6 +3244,7 @@ gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
 
       for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
        gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
+
       return;
     }
 
@@ -2907,7 +3252,7 @@ gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
   maybe_with_size_expr (expr_p);
 
   /* Gimplify the constructor element to something appropriate for the rhs
-     of a MODIFY_EXPR.  Given that we know the lhs is an aggregate, we know
+     of a MODIFY_EXPR.  Given that we know the LHS is an aggregate, we know
      the gimplifier will consider this a store to memory.  Doing this
      gimplification now means that we won't have to deal with complicated
      language-specific trees, nor trees like SAVE_EXPR that can induce
@@ -2958,29 +3303,27 @@ gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
    already been taken care of for us, in gimplify_init_ctor_preeval().  */
 
 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
-                                    tree *, bool);
+                                    gimple_seq *, bool);
 
 static void
 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
                               tree value, tree array_elt_type,
-                              tree *pre_p, bool cleared)
+                              gimple_seq *pre_p, bool cleared)
 {
-  tree loop_entry_label, loop_exit_label;
+  tree loop_entry_label, loop_exit_label, fall_thru_label;
   tree var, var_type, cref, tmp;
 
   loop_entry_label = create_artificial_label ();
   loop_exit_label = create_artificial_label ();
+  fall_thru_label = create_artificial_label ();
 
   /* Create and initialize the index variable.  */
   var_type = TREE_TYPE (upper);
   var = create_tmp_var (var_type, NULL);
-  append_to_statement_list (build_gimple_modify_stmt (var, lower), pre_p);
+  gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
 
   /* Add the loop entry label.  */
-  append_to_statement_list (build1 (LABEL_EXPR,
-                                   void_type_node,
-                                   loop_entry_label),
-                           pre_p);
+  gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
 
   /* Build the reference.  */
   cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
@@ -2995,34 +3338,25 @@ gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
     gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
                             pre_p, cleared);
   else
-    append_to_statement_list (build_gimple_modify_stmt (cref, value), pre_p);
+    gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
 
   /* We exit the loop when the index var is equal to the upper bound.  */
-  gimplify_and_add (build3 (COND_EXPR, void_type_node,
-                           build2 (EQ_EXPR, boolean_type_node,
-                                   var, upper),
-                           build1 (GOTO_EXPR,
-                                   void_type_node,
-                                   loop_exit_label),
-                           NULL_TREE),
-                   pre_p);
+  gimplify_seq_add_stmt (pre_p,
+                        gimple_build_cond (EQ_EXPR, var, upper,
+                                           loop_exit_label, fall_thru_label));
+
+  gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
 
   /* Otherwise, increment the index var...  */
   tmp = build2 (PLUS_EXPR, var_type, var,
                fold_convert (var_type, integer_one_node));
-  append_to_statement_list (build_gimple_modify_stmt (var, tmp), pre_p);
+  gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
 
   /* ...and jump back to the loop entry.  */
-  append_to_statement_list (build1 (GOTO_EXPR,
-                                   void_type_node,
-                                   loop_entry_label),
-                           pre_p);
+  gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
 
   /* Add the loop exit label.  */
-  append_to_statement_list (build1 (LABEL_EXPR,
-                                   void_type_node,
-                                   loop_exit_label),
-                           pre_p);
+  gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
 }
 
 /* Return true if FDECL is accessing a field that is zero sized.  */
@@ -3055,7 +3389,7 @@ zero_sized_type (const_tree type)
 
 static void
 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
-                        tree *pre_p, bool cleared)
+                        gimple_seq *pre_p, bool cleared)
 {
   tree array_elt_type = NULL;
   unsigned HOST_WIDE_INT ix;
@@ -3066,7 +3400,7 @@ gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
 
   FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
     {
-      tree cref, init;
+      tree cref;
 
       /* NULL values are created above for gimplification errors.  */
       if (value == NULL)
@@ -3128,12 +3462,28 @@ gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
                                 pre_p, cleared);
       else
        {
-         init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
+         tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
          gimplify_and_add (init, pre_p);
+         ggc_free (init);
        }
     }
 }
 
+
+/* Returns the appropriate RHS predicate for this LHS.  */
+
+gimple_predicate
+rhs_predicate_for (tree lhs)
+{
+  if (is_gimple_formal_tmp_var (lhs))
+    return is_gimple_formal_tmp_or_call_rhs;
+  else if (is_gimple_reg (lhs))
+    return is_gimple_reg_or_call_rhs;
+  else
+    return is_gimple_mem_or_call_rhs;
+}
+
+
 /* A subroutine of gimplify_modify_expr.  Break out elements of a
    CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
 
@@ -3148,12 +3498,11 @@ gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
    If NOTIFY_TEMP_CREATION is false, just do the gimplification.  */
 
 static enum gimplify_status
-gimplify_init_constructor (tree *expr_p, tree *pre_p,
-                          tree *post_p, bool want_value,
-                          bool notify_temp_creation)
+gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
+                          bool want_value, bool notify_temp_creation)
 {
   tree object;
-  tree ctor = GENERIC_TREE_OPERAND (*expr_p, 1);
+  tree ctor = TREE_OPERAND (*expr_p, 1);
   tree type = TREE_TYPE (ctor);
   enum gimplify_status ret;
   VEC(constructor_elt,gc) *elts;
@@ -3163,16 +3512,16 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p,
 
   if (!notify_temp_creation)
     {
-      ret = gimplify_expr (&GENERIC_TREE_OPERAND (*expr_p, 0), pre_p, post_p,
+      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
                           is_gimple_lvalue, fb_lvalue);
       if (ret == GS_ERROR)
        return ret;
     }
-  object = GENERIC_TREE_OPERAND (*expr_p, 0);
 
+  object = TREE_OPERAND (*expr_p, 0);
   elts = CONSTRUCTOR_ELTS (ctor);
-
   ret = GS_ALL_DONE;
+
   switch (TREE_CODE (type))
     {
     case RECORD_TYPE:
@@ -3194,7 +3543,7 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p,
              return GS_OK;
            break;
          }
-
        /* Fetch information about the constructor to direct later processing.
           We might want to make static versions of it in various cases, and
           can only do so if it known to be a valid constant initializer.  */
@@ -3305,7 +3654,7 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p,
                  }
                walk_tree (&DECL_INITIAL (new), force_labels_r, NULL, NULL);
 
-               GENERIC_TREE_OPERAND (*expr_p, 1) = new;
+               TREE_OPERAND (*expr_p, 1) = new;
 
                /* This is no longer an assignment of a CONSTRUCTOR, but
                   we still may have processing to do on the LHS.  So
@@ -3327,7 +3676,7 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p,
              preeval_data.lhs_base_decl = NULL;
            preeval_data.lhs_alias_set = get_alias_set (object);
 
-           gimplify_init_ctor_preeval (&GENERIC_TREE_OPERAND (*expr_p, 1),
+           gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
                                        pre_p, post_p, &preeval_data);
          }
 
@@ -3337,9 +3686,9 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p,
               Note that we still have to gimplify, in order to handle the
               case of variable sized types.  Avoid shared tree structures.  */
            CONSTRUCTOR_ELTS (ctor) = NULL;
+           TREE_SIDE_EFFECTS (ctor) = 0;
            object = unshare_expr (object);
-           gimplify_stmt (expr_p);
-           append_to_statement_list (*expr_p, pre_p);
+           gimplify_stmt (expr_p, pre_p);
          }
 
        /* If we have not block cleared the object, or if there are nonzero
@@ -3383,7 +3732,9 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p,
          {
            ctor = build2 (COMPLEX_EXPR, type, r, i);
            TREE_OPERAND (*expr_p, 1) = ctor;
-           ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
+           ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
+                                pre_p,
+                                post_p,
                                 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
                                 fb_rvalue);
          }
@@ -3435,13 +3786,13 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p,
        for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
          {
            enum gimplify_status tret;
-           tret = gimplify_expr (&ce->value, pre_p, post_p,
-                                 is_gimple_val, fb_rvalue);
+           tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
+                                 fb_rvalue);
            if (tret == GS_ERROR)
              ret = GS_ERROR;
          }
-       if (!is_gimple_reg (GENERIC_TREE_OPERAND (*expr_p, 0)))
-         GENERIC_TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
+       if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
+         TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
       }
       break;
 
@@ -3454,12 +3805,24 @@ gimplify_init_constructor (tree *expr_p, tree *pre_p,
     return GS_ERROR;
   else if (want_value)
     {
-      append_to_statement_list (*expr_p, pre_p);
       *expr_p = object;
       return GS_OK;
     }
   else
-    return GS_ALL_DONE;
+    {
+      /* If we have gimplified both sides of the initializer but have
+        not emitted an assignment, do so now.  */
+      if (*expr_p)
+       {
+         tree lhs = TREE_OPERAND (*expr_p, 0);
+         tree rhs = TREE_OPERAND (*expr_p, 1);
+         gimple init = gimple_build_assign (lhs, rhs);
+         gimplify_seq_add_stmt (pre_p, init);
+         *expr_p = NULL;
+       }
+
+      return GS_ALL_DONE;
+    }
 }
 
 /* Given a pointer value OP0, return a simplified version of an
@@ -3537,8 +3900,9 @@ gimple_fold_indirect_ref_rhs (tree t)
    something changes.  */
 
 static enum gimplify_status
-gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
-                         tree *post_p, bool want_value)
+gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
+                         gimple_seq *pre_p, gimple_seq *post_p,
+                         bool want_value)
 {
   enum gimplify_status ret = GS_OK;
 
@@ -3735,8 +4099,8 @@ gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
          tree wrap = *from_p;
          tree t;
 
-         ret = gimplify_expr (to_p, pre_p, post_p,
-                              is_gimple_min_lval, fb_lvalue);
+         ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
+                              fb_lvalue);
          if (ret != GS_ERROR)
            ret = GS_OK;
 
@@ -3761,51 +4125,6 @@ gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
   return ret;
 }
 
-/* Destructively convert the TREE pointer in TP into a gimple tuple if
-   appropriate.  */
-
-static void
-tree_to_gimple_tuple (tree *tp)
-{
-
-  switch (TREE_CODE (*tp))
-    {
-    case GIMPLE_MODIFY_STMT:
-      return;
-    case MODIFY_EXPR:
-      {
-        struct gimple_stmt *gs;
-       tree lhs = TREE_OPERAND (*tp, 0);
-       bool def_stmt_self_p = false;
-
-       if (TREE_CODE (lhs) == SSA_NAME)
-         {
-           if (SSA_NAME_DEF_STMT (lhs) == *tp)
-             def_stmt_self_p = true;
-         }
-
-        gs = &make_node (GIMPLE_MODIFY_STMT)->gstmt;
-        gs->base = (*tp)->base;
-        /* The set to base above overwrites the CODE.  */
-        TREE_SET_CODE ((tree) gs, GIMPLE_MODIFY_STMT);
-
-       SET_EXPR_LOCUS ((tree) gs, EXPR_LOCUS (*tp));
-        gs->operands[0] = TREE_OPERAND (*tp, 0);
-        gs->operands[1] = TREE_OPERAND (*tp, 1);
-        gs->block = TREE_BLOCK (*tp);
-        *tp = (tree)gs;
-
-       /* If we re-gimplify a set to an SSA_NAME, we must change the
-          SSA name's DEF_STMT link.  */
-       if (def_stmt_self_p)
-         SSA_NAME_DEF_STMT (GIMPLE_STMT_OPERAND (*tp, 0)) = *tp;
-
-        return;
-      }
-    default:
-      break;
-    }
-}
 
 /* Promote partial stores to COMPLEX variables to total stores.  *EXPR_P is
    a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
@@ -3820,13 +4139,14 @@ tree_to_gimple_tuple (tree *tp)
    followed by a DCE pass are necessary in order to fix things up.  */
 
 static enum gimplify_status
-gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
+gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
+                                   bool want_value)
 {
   enum tree_code code, ocode;
   tree lhs, rhs, new_rhs, other, realpart, imagpart;
 
-  lhs = GENERIC_TREE_OPERAND (*expr_p, 0);
-  rhs = GENERIC_TREE_OPERAND (*expr_p, 1);
+  lhs = TREE_OPERAND (*expr_p, 0);
+  rhs = TREE_OPERAND (*expr_p, 1);
   code = TREE_CODE (lhs);
   lhs = TREE_OPERAND (lhs, 0);
 
@@ -3842,20 +4162,13 @@ gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
   else
     new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
 
-  GENERIC_TREE_OPERAND (*expr_p, 0) = lhs;
-  GENERIC_TREE_OPERAND (*expr_p, 1) = new_rhs;
-
-  if (want_value)
-    {
-      tree_to_gimple_tuple (expr_p);
-
-      append_to_statement_list (*expr_p, pre_p);
-      *expr_p = rhs;
-    }
+  gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
+  *expr_p = (want_value) ? rhs : NULL_TREE;
 
   return GS_ALL_DONE;
 }
 
+
 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
 
       modify_expr
@@ -3872,14 +4185,15 @@ gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
        in another expression.  */
 
 static enum gimplify_status
-gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
+gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
+                     bool want_value)
 {
-  tree *from_p = &GENERIC_TREE_OPERAND (*expr_p, 1);
-  tree *to_p = &GENERIC_TREE_OPERAND (*expr_p, 0);
+  tree *from_p = &TREE_OPERAND (*expr_p, 1);
+  tree *to_p = &TREE_OPERAND (*expr_p, 0);
   enum gimplify_status ret = GS_UNHANDLED;
+  gimple assign;
 
   gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
-             || TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT
              || TREE_CODE (*expr_p) == INIT_EXPR);
 
   /* Insert pointer conversions required by the middle-end that are not
@@ -3905,10 +4219,8 @@ gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
      types properly.  */
   if (zero_sized_type (TREE_TYPE (*from_p)))
     {
-      gimplify_stmt (from_p);
-      gimplify_stmt (to_p);
-      append_to_statement_list (*from_p, pre_p);
-      append_to_statement_list (*to_p, pre_p);
+      gimplify_stmt (from_p, pre_p);
+      gimplify_stmt (to_p, pre_p);
       *expr_p = NULL_TREE;
       return GS_ALL_DONE;
     }
@@ -3918,15 +4230,27 @@ gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
      before gimplifying any of the operands so that we can resolve any
      PLACEHOLDER_EXPRs in the size.  Also note that the RTL expander uses
      the size of the expression to be copied, not of the destination, so
-     that is what we must here.  */
+     that is what we must do here.  */
   maybe_with_size_expr (from_p);
 
   ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
   if (ret == GS_ERROR)
     return ret;
 
-  ret = gimplify_expr (from_p, pre_p, post_p,
-                      rhs_predicate_for (*to_p), fb_rvalue);
+  /* As a special case, we have to temporarily allow for assignments
+     with a CALL_EXPR on the RHS.  Since in GIMPLE a function call is
+     a toplevel statement, when gimplifying the GENERIC expression
+     MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
+     GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
+
+     Instead, we need to create the tuple GIMPLE_CALL <a, foo>.  To
+     prevent gimplify_expr from trying to create a new temporary for
+     foo's LHS, we tell it that it should only gimplify until it
+     reaches the CALL_EXPR.  On return from gimplify_expr, the newly
+     created GIMPLE_CALL <foo> will be the last statement in *PRE_P
+     and all we need to do here is set 'a' to be its LHS.  */
+  ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
+                      fb_rvalue);
   if (ret == GS_ERROR)
     return ret;
 
@@ -3945,11 +4269,13 @@ gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
       tree size = TREE_OPERAND (*from_p, 1);
 
       if (TREE_CODE (from) == CONSTRUCTOR)
-       return gimplify_modify_expr_to_memset (expr_p, size, want_value);
+       return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
+
       if (is_gimple_addressable (from))
        {
          *from_p = from;
-         return gimplify_modify_expr_to_memcpy (expr_p, size, want_value);
+         return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
+                                                pre_p);
        }
     }
 
@@ -3961,35 +4287,49 @@ gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
       && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
     return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
 
-  if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
-    {
-      /* If we've somehow already got an SSA_NAME on the LHS, then
-        we're probably modified it twice.  Not good.  */
-      gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
-      *to_p = make_ssa_name (*to_p, *expr_p);
-    }
-
   /* Try to alleviate the effects of the gimplification creating artificial
      temporaries (see for example is_gimple_reg_rhs) on the debug info.  */
   if (!gimplify_ctxp->into_ssa
-      && DECL_P (*from_p) && DECL_IGNORED_P (*from_p)
-      && DECL_P (*to_p) && !DECL_IGNORED_P (*to_p))
+      && DECL_P (*from_p)
+      && DECL_IGNORED_P (*from_p)
+      && DECL_P (*to_p)
+      && !DECL_IGNORED_P (*to_p))
     {
       if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
        DECL_NAME (*from_p)
          = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
       DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
       SET_DECL_DEBUG_EXPR (*from_p, *to_p);
+   }
+
+  if (TREE_CODE (*from_p) == CALL_EXPR)
+    {
+      /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
+        instead of a GIMPLE_ASSIGN.  */
+      assign = gimple_build_call_from_tree (*from_p);
+      gimple_call_set_lhs (assign, *to_p);
     }
+  else
+    assign = gimple_build_assign (*to_p, *from_p);
 
-  if (want_value)
+  gimplify_seq_add_stmt (pre_p, assign);
+
+  if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
     {
-      tree_to_gimple_tuple (expr_p);
+      /* If we've somehow already got an SSA_NAME on the LHS, then
+        we've probably modified it twice.  Not good.  */
+      gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
+      *to_p = make_ssa_name (*to_p, assign);
+      gimple_set_lhs (assign, *to_p);
+    }
 
-      append_to_statement_list (*expr_p, pre_p);
-      *expr_p = *to_p;
+  if (want_value)
+    {
+      *expr_p = unshare_expr (*to_p);
       return GS_OK;
     }
+  else
+    *expr_p = NULL;
 
   return GS_ALL_DONE;
 }
@@ -4071,12 +4411,9 @@ gimplify_boolean_expr (tree *expr_p)
        expressions in the sequence will be emitted.
 
    WANT_VALUE is true when the result of the last COMPOUND_EXPR is used.  */
-/* ??? Should rearrange to share the pre-queue with all the indirect
-   invocations of gimplify_expr.  Would probably save on creations
-   of statement_list nodes.  */
 
 static enum gimplify_status
-gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
+gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
 {
   tree t = *expr_p;
 
@@ -4087,8 +4424,7 @@ gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
       if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
        gimplify_compound_expr (sub_p, pre_p, false);
       else
-       gimplify_stmt (sub_p);
-      append_to_statement_list (*sub_p, pre_p);
+       gimplify_stmt (sub_p, pre_p);
 
       t = TREE_OPERAND (t, 1);
     }
@@ -4099,58 +4435,21 @@ gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
     return GS_OK;
   else
     {
-      gimplify_stmt (expr_p);
+      gimplify_stmt (expr_p, pre_p);
       return GS_ALL_DONE;
     }
 }
 
-/* Gimplifies a statement list.  These may be created either by an
-   enlightened front-end, or by shortcut_cond_expr.  */
-
-static enum gimplify_status
-gimplify_statement_list (tree *expr_p, tree *pre_p)
-{
-  tree temp = voidify_wrapper_expr (*expr_p, NULL);
-
-  tree_stmt_iterator i = tsi_start (*expr_p);
-
-  while (!tsi_end_p (i))
-    {
-      tree t;
-
-      gimplify_stmt (tsi_stmt_ptr (i));
-
-      t = tsi_stmt (i);
-      if (t == NULL)
-       tsi_delink (&i);
-      else if (TREE_CODE (t) == STATEMENT_LIST)
-       {
-         tsi_link_before (&i, t, TSI_SAME_STMT);
-         tsi_delink (&i);
-       }
-      else
-       tsi_next (&i);
-    }
-
-  if (temp)
-    {
-      append_to_statement_list (*expr_p, pre_p);
-      *expr_p = temp;
-      return GS_OK;
-    }
-
-  return GS_ALL_DONE;
-}
 
-/*  Gimplify a SAVE_EXPR node.  EXPR_P points to the expression to
-    gimplify.  After gimplification, EXPR_P will point to a new temporary
-    that holds the original value of the SAVE_EXPR node.
+/* Gimplify a SAVE_EXPR node.  EXPR_P points to the expression to
+   gimplify.  After gimplification, EXPR_P will point to a new temporary
+   that holds the original value of the SAVE_EXPR node.
 
-    PRE_P points to the list where side effects that must happen before
-       *EXPR_P should be stored.  */
+   PRE_P points to the list where side effects that must happen before
+      *EXPR_P should be stored.  */
 
 static enum gimplify_status
-gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p)
+gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
 {
   enum gimplify_status ret = GS_ALL_DONE;
   tree val;
@@ -4168,7 +4467,6 @@ gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p)
        {
          ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
                               is_gimple_stmt, fb_none);
-         append_to_statement_list (TREE_OPERAND (*expr_p, 0), pre_p);
          val = NULL;
        }
       else
@@ -4197,7 +4495,7 @@ gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p)
        *EXPR_P should be stored.  */
 
 static enum gimplify_status
-gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p)
+gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
 {
   tree expr = *expr_p;
   tree op0 = TREE_OPERAND (expr, 0);
@@ -4280,24 +4578,39 @@ gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p)
    value; output operands should be a gimple lvalue.  */
 
 static enum gimplify_status
-gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
+gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
 {
-  tree expr = *expr_p;
-  int noutputs = list_length (ASM_OUTPUTS (expr));
-  const char **oconstraints
-    = (const char **) alloca ((noutputs) * sizeof (const char *));
+  tree expr;
+  int noutputs;
+  const char **oconstraints;
   int i;
   tree link;
   const char *constraint;
   bool allows_mem, allows_reg, is_inout;
   enum gimplify_status ret, tret;
+  gimple stmt;
+  VEC(tree, gc) *inputs;
+  VEC(tree, gc) *outputs;
+  VEC(tree, gc) *clobbers;
+  tree link_next;
+  
+  expr = *expr_p;
+  noutputs = list_length (ASM_OUTPUTS (expr));
+  oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
+
+  inputs = outputs = clobbers = NULL;
 
   ret = GS_ALL_DONE;
-  for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = TREE_CHAIN (link))
+  link_next = NULL_TREE;
+  for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
     {
-      size_t constraint_len;
       bool ok;
-      oconstraints[i] = constraint
+      size_t constraint_len;
+
+      link_next = TREE_CHAIN (link);
+
+      oconstraints[i]
+       = constraint
        = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
       constraint_len = strlen (constraint);
       if (constraint_len == 0)
@@ -4323,6 +4636,9 @@ gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
          ret = tret;
        }
 
+      VEC_safe_push (tree, gc, outputs, link);
+      TREE_CHAIN (link) = NULL_TREE;
+
       if (is_inout)
        {
          /* An input/output operand.  To give the optimizers more
@@ -4416,10 +4732,11 @@ gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
        }
     }
 
-  for (link = ASM_INPUTS (expr); link; ++i, link = TREE_CHAIN (link))
+  link_next = NULL_TREE;
+  for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
     {
-      constraint
-       = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+      link_next = TREE_CHAIN (link);
+      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
                              oconstraints, &allows_mem, &allows_reg);
 
@@ -4455,13 +4772,27 @@ gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
          if (tret == GS_ERROR)
            ret = tret;
        }
+
+      TREE_CHAIN (link) = NULL_TREE;
+      VEC_safe_push (tree, gc, inputs, link);
     }
+  
+  for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
+      VEC_safe_push (tree, gc, clobbers, link);
+    
+  stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
+                               inputs, outputs, clobbers);
+
+  gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
+  gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
+
+  gimplify_seq_add_stmt (pre_p, stmt);
 
   return ret;
 }
 
 /* Gimplify a CLEANUP_POINT_EXPR.  Currently this works by adding
-   WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
+   GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
    gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
    return to this function.
 
@@ -4475,10 +4806,10 @@ gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
    Thing.  */
 
 static enum gimplify_status
-gimplify_cleanup_point_expr (tree *expr_p, tree *pre_p)
+gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
 {
-  tree_stmt_iterator iter;
-  tree body;
+  gimple_stmt_iterator iter;
+  gimple_seq body_sequence = NULL;
 
   tree temp = voidify_wrapper_expr (*expr_p, NULL);
 
@@ -4486,74 +4817,79 @@ gimplify_cleanup_point_expr (tree *expr_p, tree *pre_p)
      CLEANUP_POINT_EXPR and the cleanup.  So save and reset the count and
      any cleanups collected outside the CLEANUP_POINT_EXPR.  */
   int old_conds = gimplify_ctxp->conditions;
-  tree old_cleanups = gimplify_ctxp->conditional_cleanups;
+  gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
   gimplify_ctxp->conditions = 0;
-  gimplify_ctxp->conditional_cleanups = NULL_TREE;
+  gimplify_ctxp->conditional_cleanups = NULL;
 
-  body = TREE_OPERAND (*expr_p, 0);
-  gimplify_to_stmt_list (&body);
+  gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
 
   gimplify_ctxp->conditions = old_conds;
   gimplify_ctxp->conditional_cleanups = old_cleanups;
 
-  for (iter = tsi_start (body); !tsi_end_p (iter); )
+  for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
     {
-      tree *wce_p = tsi_stmt_ptr (iter);
-      tree wce = *wce_p;
+      gimple wce = gsi_stmt (iter);
 
-      if (TREE_CODE (wce) == WITH_CLEANUP_EXPR)
+      if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
        {
-         if (tsi_one_before_end_p (iter))
+         if (gsi_one_before_end_p (iter))
            {
-             tsi_link_before (&iter, TREE_OPERAND (wce, 0), TSI_SAME_STMT);
-             tsi_delink (&iter);
+              /* Note that gsi_insert_seq_before and gsi_remove do not
+                 scan operands, unlike some other sequence mutators.  */
+             gsi_insert_seq_before_without_update (&iter,
+                                                    gimple_wce_cleanup (wce),
+                                                    GSI_SAME_STMT);
+             gsi_remove (&iter, true);
              break;
            }
          else
            {
-             tree sl, tfe;
-             enum tree_code code;
+             gimple try;
+             gimple_seq seq;
+             enum gimple_try_flags kind;
 
-             if (CLEANUP_EH_ONLY (wce))
-               code = TRY_CATCH_EXPR;
+             if (gimple_wce_cleanup_eh_only (wce))
+               kind = GIMPLE_TRY_CATCH;
              else
-               code = TRY_FINALLY_EXPR;
-
-             sl = tsi_split_statement_list_after (&iter);
-             tfe = build2 (code, void_type_node, sl, NULL_TREE);
-             append_to_statement_list (TREE_OPERAND (wce, 0),
-                                       &TREE_OPERAND (tfe, 1));
-             *wce_p = tfe;
-             iter = tsi_start (sl);
+               kind = GIMPLE_TRY_FINALLY;
+             seq = gsi_split_seq_after (iter);
+
+             try = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
+              /* Do not use gsi_replace here, as it may scan operands.
+                 We want to do a simple structural modification only.  */
+              *gsi_stmt_ptr (&iter) = try;
+             iter = gsi_start (seq);
            }
        }
       else
-       tsi_next (&iter);
+       gsi_next (&iter);
     }
 
+  gimplify_seq_add_seq (pre_p, body_sequence);
   if (temp)
     {
       *expr_p = temp;
-      append_to_statement_list (body, pre_p);
       return GS_OK;
     }
   else
     {
-      *expr_p = body;
+      *expr_p = NULL;
       return GS_ALL_DONE;
     }
 }
 
 /* Insert a cleanup marker for gimplify_cleanup_point_expr.  CLEANUP
-   is the cleanup action required.  */
+   is the cleanup action required.  EH_ONLY is true if the cleanup should
+   only be executed if an exception is thrown, not on normal exit.  */
 
 static void
-gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
+gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
 {
-  tree wce;
+  gimple wce;
+  gimple_seq cleanup_stmts = NULL;
 
   /* Errors can result in improperly nested cleanups.  Which results in
-     confusion when trying to resolve the WITH_CLEANUP_EXPR.  */
+     confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR.  */
   if (errorcount || sorrycount)
     return;
 
@@ -4579,15 +4915,17 @@ gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
           }
           val
       */
-
       tree flag = create_tmp_var (boolean_type_node, "cleanup");
-      tree ffalse = build_gimple_modify_stmt (flag, boolean_false_node);
-      tree ftrue = build_gimple_modify_stmt (flag, boolean_true_node);
+      gimple ffalse = gimple_build_assign (flag, boolean_false_node);
+      gimple ftrue = gimple_build_assign (flag, boolean_true_node);
+
       cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
-      wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
-      append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
-      append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups);
-      append_to_statement_list (ftrue, pre_p);
+      gimplify_stmt (&cleanup, &cleanup_stmts);
+      wce = gimple_build_wce (cleanup_stmts);
+
+      gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
+      gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
+      gimplify_seq_add_stmt (pre_p, ftrue);
 
       /* Because of this manipulation, and the EH edges that jump
         threading cannot redirect, the temporary (VAR) will appear
@@ -4596,18 +4934,17 @@ gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
     }
   else
     {
-      wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
-      CLEANUP_EH_ONLY (wce) = eh_only;
-      append_to_statement_list (wce, pre_p);
+      gimplify_stmt (&cleanup, &cleanup_stmts);
+      wce = gimple_build_wce (cleanup_stmts);
+      gimple_wce_set_cleanup_eh_only (wce, eh_only);
+      gimplify_seq_add_stmt (pre_p, wce);
     }
-
-  gimplify_stmt (&TREE_OPERAND (wce, 0));
 }
 
 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR.  */
 
 static enum gimplify_status
-gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
+gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
 {
   tree targ = *expr_p;
   tree temp = TARGET_EXPR_SLOT (targ);
@@ -4633,9 +4970,11 @@ gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
        ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
       else
        {
-         init = build2 (INIT_EXPR, void_type_node, temp, init);
-         ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
-                              fb_none);
+         tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
+         init = init_expr;
+         ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
+         init = NULL;
+         ggc_free (init_expr);
        }
       if (ret == GS_ERROR)
        {
@@ -4643,15 +4982,13 @@ gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
          TARGET_EXPR_INITIAL (targ) = NULL_TREE;
          return GS_ERROR;
        }
-      append_to_statement_list (init, pre_p);
+      if (init)
+       gimplify_and_add (init, pre_p);
 
       /* If needed, push the cleanup for the temp.  */
       if (TARGET_EXPR_CLEANUP (targ))
-       {
-         gimplify_stmt (&TARGET_EXPR_CLEANUP (targ));
-         gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
-                              CLEANUP_EH_ONLY (targ), pre_p);
-       }
+       gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
+                            CLEANUP_EH_ONLY (targ), pre_p);
 
       /* Only expand this once.  */
       TREE_OPERAND (targ, 3) = init;
@@ -4667,29 +5004,23 @@ gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
 
 /* Gimplification of expression trees.  */
 
-/* Gimplify an expression which appears at statement context; usually, this
-   means replacing it with a suitably gimple STATEMENT_LIST.  */
+/* Gimplify an expression which appears at statement context.  The
+   corresponding GIMPLE statements are added to *SEQ_P.  If *SEQ_P is
+   NULL, a new sequence is allocated.
 
-void
-gimplify_stmt (tree *stmt_p)
+   Return true if we actually added a statement to the queue.  */
+
+bool
+gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
 {
-  gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none);
-}
+  gimple_seq_node last;
 
-/* Similarly, but force the result to be a STATEMENT_LIST.  */
+  if (!*seq_p)
+    *seq_p = gimple_seq_alloc ();
 
-void
-gimplify_to_stmt_list (tree *stmt_p)
-{
-  gimplify_stmt (stmt_p);
-  if (!*stmt_p)
-    *stmt_p = alloc_stmt_list ();
-  else if (TREE_CODE (*stmt_p) != STATEMENT_LIST)
-    {
-      tree t = *stmt_p;
-      *stmt_p = alloc_stmt_list ();
-      append_to_statement_list (t, stmt_p);
-    }
+  last = gimple_seq_last (*seq_p);
+  gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
+  return last != gimple_seq_last (*seq_p);
 }
 
 
@@ -5078,7 +5409,7 @@ omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
    and previous omp contexts.  */
 
 static void
-gimplify_scan_omp_clauses (tree *list_p, tree *pre_p,
+gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
                           enum omp_region_type region_type)
 {
   struct gimplify_omp_ctx *ctx, *outer_ctx;
@@ -5090,7 +5421,6 @@ gimplify_scan_omp_clauses (tree *list_p, tree *pre_p,
 
   while ((c = *list_p) != NULL)
     {
-      enum gimplify_status gs;
       bool remove = false;
       bool notice_outer = true;
       const char *check_non_private = NULL;
@@ -5140,11 +5470,22 @@ gimplify_scan_omp_clauses (tree *list_p, tree *pre_p,
                                GOVD_LOCAL | GOVD_SEEN);
              gimplify_omp_ctxp = ctx;
              push_gimplify_context (&gctx);
-             gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c));
-             pop_gimplify_context (OMP_CLAUSE_REDUCTION_INIT (c));
+
+             OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
+             OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
+
+             gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
+                               &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
+             pop_gimplify_context
+               (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
              push_gimplify_context (&gctx);
-             gimplify_stmt (&OMP_CLAUSE_REDUCTION_MERGE (c));
-             pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c));
+             gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
+                               &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
+             pop_gimplify_context 
+               (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
+             OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
+             OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
+
              gimplify_omp_ctxp = outer_ctx;
            }
          else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
@@ -5160,8 +5501,12 @@ gimplify_scan_omp_clauses (tree *list_p, tree *pre_p,
                  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
                  OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
                }
-             gimplify_stmt (&OMP_CLAUSE_LASTPRIVATE_STMT (c));
-             pop_gimplify_context (OMP_CLAUSE_LASTPRIVATE_STMT (c));
+             gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
+                               &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
+             pop_gimplify_context
+               (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
+             OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
+
              gimplify_omp_ctxp = outer_ctx;
            }
          if (notice_outer)
@@ -5196,10 +5541,9 @@ gimplify_scan_omp_clauses (tree *list_p, tree *pre_p,
 
        case OMP_CLAUSE_SCHEDULE:
        case OMP_CLAUSE_NUM_THREADS:
-         gs = gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
-                             is_gimple_val, fb_rvalue);
-         if (gs == GS_ERROR)
-           remove = true;
+         if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
+                            is_gimple_val, fb_rvalue) == GS_ERROR)
+             remove = true;
          break;
 
        case OMP_CLAUSE_NOWAIT:
@@ -5370,10 +5714,12 @@ gimplify_adjust_omp_clauses (tree *list_p)
    variables.  We need to do this scan now, because variable-sized
    decls will be decomposed during gimplification.  */
 
-static enum gimplify_status
-gimplify_omp_parallel (tree *expr_p, tree *pre_p)
+static void
+gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
 {
   tree expr = *expr_p;
+  gimple g;
+  gimple_seq body = NULL;
   struct gimplify_ctx gctx;
 
   gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
@@ -5383,16 +5729,21 @@ gimplify_omp_parallel (tree *expr_p, tree *pre_p)
 
   push_gimplify_context (&gctx);
 
-  gimplify_stmt (&OMP_PARALLEL_BODY (expr));
-
-  if (TREE_CODE (OMP_PARALLEL_BODY (expr)) == BIND_EXPR)
-    pop_gimplify_context (OMP_PARALLEL_BODY (expr));
+  g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
+  if (gimple_code (g) == GIMPLE_BIND)
+    pop_gimplify_context (g);
   else
-    pop_gimplify_context (NULL_TREE);
+    pop_gimplify_context (NULL);
 
   gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
 
-  return GS_ALL_DONE;
+  g = gimple_build_omp_parallel (body,
+                                OMP_PARALLEL_CLAUSES (expr),
+                                NULL_TREE, NULL_TREE);
+  if (OMP_PARALLEL_COMBINED (expr))
+    gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
+  gimplify_seq_add_stmt (pre_p, g);
+  *expr_p = NULL_TREE;
 }
 
 /* Gimplify the contents of an OMP_TASK statement.  This involves
@@ -5400,36 +5751,43 @@ gimplify_omp_parallel (tree *expr_p, tree *pre_p)
    variables.  We need to do this scan now, because variable-sized
    decls will be decomposed during gimplification.  */
 
-static enum gimplify_status
-gimplify_omp_task (tree *expr_p, tree *pre_p)
+static void
+gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
 {
   tree expr = *expr_p;
+  gimple g;
+  gimple_seq body = NULL;
   struct gimplify_ctx gctx;
 
   gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, ORT_TASK);
 
   push_gimplify_context (&gctx);
 
-  gimplify_stmt (&OMP_TASK_BODY (expr));
-
-  if (TREE_CODE (OMP_TASK_BODY (expr)) == BIND_EXPR)
-    pop_gimplify_context (OMP_TASK_BODY (expr));
+  g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
+  if (gimple_code (g) == GIMPLE_BIND)
+    pop_gimplify_context (g);
   else
-    pop_gimplify_context (NULL_TREE);
+    pop_gimplify_context (NULL);
 
   gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
 
-  return GS_ALL_DONE;
+  g = gimple_build_omp_task (body,
+                            OMP_TASK_CLAUSES (expr),
+                            NULL_TREE, NULL_TREE,
+                            NULL_TREE, NULL_TREE, NULL_TREE);
+  gimplify_seq_add_stmt (pre_p, g);
+  *expr_p = NULL_TREE;
 }
 
 /* Gimplify the gross structure of an OMP_FOR statement.  */
 
 static enum gimplify_status
-gimplify_omp_for (tree *expr_p, tree *pre_p)
+gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
 {
-  tree for_stmt, decl, var, t, bodylist;
+  tree for_stmt, decl, var, t;
   enum gimplify_status ret = GS_OK;
-  tree body, init_decl = NULL_TREE;
+  gimple gfor;
+  gimple_seq for_body, for_pre_body;
   int i;
 
   for_stmt = *expr_p;
@@ -5437,12 +5795,12 @@ gimplify_omp_for (tree *expr_p, tree *pre_p)
   gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
                             ORT_WORKSHARE);
 
-  /* If OMP_FOR is re-gimplified, ensure all variables in pre-body
-     are noticed.  */
-  gimplify_stmt (&OMP_FOR_PRE_BODY (for_stmt));
-
-  bodylist = alloc_stmt_list ();
+  /* Handle OMP_FOR_INIT.  */
+  for_pre_body = NULL;
+  gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
+  OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
 
+  for_body = gimple_seq_alloc ();
   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
              == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
@@ -5450,9 +5808,8 @@ gimplify_omp_for (tree *expr_p, tree *pre_p)
   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
     {
       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
-      gcc_assert (TREE_CODE (t) == MODIFY_EXPR
-                 || TREE_CODE (t) == GIMPLE_MODIFY_STMT);
-      decl = GENERIC_TREE_OPERAND (t, 0);
+      gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
+      decl = TREE_OPERAND (t, 0);
       gcc_assert (DECL_P (decl));
       gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
                  || POINTER_TYPE_P (TREE_TYPE (decl)));
@@ -5469,30 +5826,29 @@ gimplify_omp_for (tree *expr_p, tree *pre_p)
       if (!is_gimple_reg (decl))
        {
          var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
-         GENERIC_TREE_OPERAND (t, 0) = var;
+         TREE_OPERAND (t, 0) = var;
+                              
+         gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
 
-         init_decl = build_gimple_modify_stmt (decl, var);
          omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
        }
       else
        var = decl;
 
-      ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
-                           &OMP_FOR_PRE_BODY (for_stmt),
-                           NULL, is_gimple_val, fb_rvalue);
-
-      tree_to_gimple_tuple (&TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i));
+      ret |= gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
+                           is_gimple_val, fb_rvalue);
+      if (ret == GS_ERROR)
+       return ret;
 
+      /* Handle OMP_FOR_COND.  */
       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
       gcc_assert (COMPARISON_CLASS_P (t));
-      gcc_assert (GENERIC_TREE_OPERAND (t, 0) == decl);
-      TREE_OPERAND (t, 0) = var;
+      gcc_assert (TREE_OPERAND (t, 0) == decl);
 
-      ret |= gimplify_expr (&GENERIC_TREE_OPERAND (t, 1),
-                           &OMP_FOR_PRE_BODY (for_stmt),
-                           NULL, is_gimple_val, fb_rvalue);
+      ret |= gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
+                           is_gimple_val, fb_rvalue);
 
-      tree_to_gimple_tuple (&TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i));
+      /* Handle OMP_FOR_INCR.  */
       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
       switch (TREE_CODE (t))
        {
@@ -5500,7 +5856,7 @@ gimplify_omp_for (tree *expr_p, tree *pre_p)
        case POSTINCREMENT_EXPR:
          t = build_int_cst (TREE_TYPE (decl), 1);
          t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
-         t = build_gimple_modify_stmt (var, t);
+         t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
          TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
          break;
 
@@ -5508,15 +5864,15 @@ gimplify_omp_for (tree *expr_p, tree *pre_p)
        case POSTDECREMENT_EXPR:
          t = build_int_cst (TREE_TYPE (decl), -1);
          t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
-         t = build_gimple_modify_stmt (var, t);
+         t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
          TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
          break;
 
-       case GIMPLE_MODIFY_STMT:
-         gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == decl);
-         GIMPLE_STMT_OPERAND (t, 0) = var;
+       case MODIFY_EXPR:
+         gcc_assert (TREE_OPERAND (t, 0) == decl);
+         TREE_OPERAND (t, 0) = var;
 
-         t = GIMPLE_STMT_OPERAND (t, 1);
+         t = TREE_OPERAND (t, 1);
          switch (TREE_CODE (t))
            {
            case PLUS_EXPR:
@@ -5537,64 +5893,84 @@ gimplify_omp_for (tree *expr_p, tree *pre_p)
              gcc_unreachable ();
            }
 
-         ret |= gimplify_expr (&TREE_OPERAND (t, 1),
-                               &OMP_FOR_PRE_BODY (for_stmt),
-                               NULL, is_gimple_val, fb_rvalue);
+         ret |= gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
+                               is_gimple_val, fb_rvalue);
          break;
 
        default:
          gcc_unreachable ();
        }
 
-      if (init_decl)
-       append_to_statement_list (init_decl, &bodylist);
-
       if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
        {
          tree c;
          for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
-         if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
-             && OMP_CLAUSE_DECL (c) == decl
-             && OMP_CLAUSE_LASTPRIVATE_STMT (c) == NULL)
-           {
-             t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
-             gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
-             gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == var);
-             t = GIMPLE_STMT_OPERAND (t, 1);
-             gcc_assert (TREE_CODE (t) == PLUS_EXPR
-                         || TREE_CODE (t) == MINUS_EXPR
-                         || TREE_CODE (t) == POINTER_PLUS_EXPR);
-             gcc_assert (TREE_OPERAND (t, 0) == var);
-             t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
-                         TREE_OPERAND (t, 1));
-             OMP_CLAUSE_LASTPRIVATE_STMT (c)
-               = build_gimple_modify_stmt (decl, t);
+           if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
+               && OMP_CLAUSE_DECL (c) == decl
+               && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
+             {
+               t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
+               gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
+               gcc_assert (TREE_OPERAND (t, 0) == var);
+               t = TREE_OPERAND (t, 1);
+               gcc_assert (TREE_CODE (t) == PLUS_EXPR
+                           || TREE_CODE (t) == MINUS_EXPR
+                           || TREE_CODE (t) == POINTER_PLUS_EXPR);
+               gcc_assert (TREE_OPERAND (t, 0) == var);
+               t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
+                           TREE_OPERAND (t, 1));
+               gimplify_assign (decl, t,
+                                &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
            }
        }
     }
 
-  body = OMP_FOR_BODY (for_stmt);
-  gimplify_to_stmt_list (&body);
-  append_to_statement_list (body, &bodylist);
-  OMP_FOR_BODY (for_stmt) = bodylist;
+  gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
+
   gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
 
+  gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
+                              TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
+                              for_pre_body);
+
+  for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
+    {
+      t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
+      gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
+      gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
+      t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
+      gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
+      gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
+      t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
+      gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
+    }
+
+  gimplify_seq_add_stmt (pre_p, gfor);
   return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
 }
 
 /* Gimplify the gross structure of other OpenMP worksharing constructs.
    In particular, OMP_SECTIONS and OMP_SINGLE.  */
 
-static enum gimplify_status
-gimplify_omp_workshare (tree *expr_p, tree *pre_p)
+static void
+gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
 {
-  tree stmt = *expr_p;
+  tree expr = *expr_p;
+  gimple stmt;
+  gimple_seq body = NULL;
 
-  gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, ORT_WORKSHARE);
-  gimplify_to_stmt_list (&OMP_BODY (stmt));
-  gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
+  gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
+  gimplify_and_add (OMP_BODY (expr), &body);
+  gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
 
-  return GS_ALL_DONE;
+  if (TREE_CODE (expr) == OMP_SECTIONS)
+    stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
+  else if (TREE_CODE (expr) == OMP_SINGLE)
+    stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
+  else
+    gcc_unreachable ();
+
+  gimplify_seq_add_stmt (pre_p, stmt);
 }
 
 /* A subroutine of gimplify_omp_atomic.  The front end is supposed to have
@@ -5644,7 +6020,8 @@ goa_lhs_expr_p (tree expr, tree addr)
    a subexpression, 0 if it did not, or -1 if an error was encountered.  */
 
 static int
-goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
+goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
+                   tree lhs_var)
 {
   tree expr = *expr_p;
   int saw_lhs;
@@ -5661,11 +6038,11 @@ goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
     {
     case tcc_binary:
-      saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
-                                    lhs_addr, lhs_var);
+      saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
+                                    lhs_var);
     case tcc_unary:
-      saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
-                                    lhs_addr, lhs_var);
+      saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
+                                    lhs_var);
       break;
     default:
       break;
@@ -5682,15 +6059,16 @@ goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
   return saw_lhs;
 }
 
+
 /* Gimplify an OMP_ATOMIC statement.  */
 
 static enum gimplify_status
-gimplify_omp_atomic (tree *expr_p, tree *pre_p)
+gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
 {
   tree addr = TREE_OPERAND (*expr_p, 0);
   tree rhs = TREE_OPERAND (*expr_p, 1);
   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
-  tree tmp_load, load, store;
+  tree tmp_load;
 
    tmp_load = create_tmp_var (type, NULL);
    if (goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
@@ -5700,74 +6078,170 @@ gimplify_omp_atomic (tree *expr_p, tree *pre_p)
        != GS_ALL_DONE)
      return GS_ERROR;
 
-   load = build2 (OMP_ATOMIC_LOAD, void_type_node, tmp_load, addr);
-   append_to_statement_list (load, pre_p);
+   gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_load (tmp_load, addr));
    if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
        != GS_ALL_DONE)
      return GS_ERROR;
-   store = build1 (OMP_ATOMIC_STORE, void_type_node, rhs);
-   *expr_p = store;
+   gimplify_seq_add_stmt (pre_p, gimple_build_omp_atomic_store (rhs));
+   *expr_p = NULL;
 
    return GS_ALL_DONE;
-
 }
 
-/*  Gimplifies the expression tree pointed to by EXPR_P.  Return 0 if
-    gimplification failed.
-
-    PRE_P points to the list where side effects that must happen before
-       EXPR should be stored.
-
-    POST_P points to the list where side effects that must happen after
-       EXPR should be stored, or NULL if there is no suitable list.  In
-       that case, we copy the result to a temporary, emit the
-       post-effects, and then return the temporary.
-
-    GIMPLE_TEST_F points to a function that takes a tree T and
-       returns nonzero if T is in the GIMPLE form requested by the
-       caller.  The GIMPLE predicates are in tree-gimple.c.
-
-       This test is used twice.  Before gimplification, the test is
-       invoked to determine whether *EXPR_P is already gimple enough.  If
-       that fails, *EXPR_P is gimplified according to its code and
-       GIMPLE_TEST_F is called again.  If the test still fails, then a new
-       temporary variable is created and assigned the value of the
-       gimplified expression.
 
-    FALLBACK tells the function what sort of a temporary we want.  If the 1
-       bit is set, an rvalue is OK.  If the 2 bit is set, an lvalue is OK.
-       If both are set, either is OK, but an lvalue is preferable.
-
-    The return value is either GS_ERROR or GS_ALL_DONE, since this function
-    iterates until solution.  */
+/* Converts the GENERIC expression tree *EXPR_P to GIMPLE.  If the
+   expression produces a value to be used as an operand inside a GIMPLE
+   statement, the value will be stored back in *EXPR_P.  This value will
+   be a tree of class tcc_declaration, tcc_constant, tcc_reference or
+   an SSA_NAME.  The corresponding sequence of GIMPLE statements is
+   emitted in PRE_P and POST_P.
+
+   Additionally, this process may overwrite parts of the input
+   expression during gimplification.  Ideally, it should be
+   possible to do non-destructive gimplification.
+
+   EXPR_P points to the GENERIC expression to convert to GIMPLE.  If
+      the expression needs to evaluate to a value to be used as
+      an operand in a GIMPLE statement, this value will be stored in
+      *EXPR_P on exit.  This happens when the caller specifies one
+      of fb_lvalue or fb_rvalue fallback flags.
+
+   PRE_P will contain the sequence of GIMPLE statements corresponding
+       to the evaluation of EXPR and all the side-effects that must
+       be executed before the main expression.  On exit, the last
+       statement of PRE_P is the core statement being gimplified.  For
+       instance, when gimplifying 'if (++a)' the last statement in
+       PRE_P will be 'if (t.1)' where t.1 is the result of
+       pre-incrementing 'a'.
+
+   POST_P will contain the sequence of GIMPLE statements corresponding
+       to the evaluation of all the side-effects that must be executed
+       after the main expression.  If this is NULL, the post
+       side-effects are stored at the end of PRE_P.
+
+       The reason why the output is split in two is to handle post
+       side-effects explicitly.  In some cases, an expression may have
+       inner and outer post side-effects which need to be emitted in
+       an order different from the one given by the recursive
+       traversal.  For instance, for the expression (*p--)++ the post
+       side-effects of '--' must actually occur *after* the post
+       side-effects of '++'.  However, gimplification will first visit
+       the inner expression, so if a separate POST sequence was not
+       used, the resulting sequence would be:
+
+                   1   t.1 = *p
+                   2   p = p - 1
+                   3   t.2 = t.1 + 1
+                   4   *p = t.2
+
+       However, the post-decrement operation in line #2 must not be
+       evaluated until after the store to *p at line #4, so the
+       correct sequence should be:
+
+                   1   t.1 = *p
+                   2   t.2 = t.1 + 1
+                   3   *p = t.2
+                   4   p = p - 1
+
+       So, by specifying a separate post queue, it is possible
+       to emit the post side-effects in the correct order.
+       If POST_P is NULL, an internal queue will be used.  Before
+       returning to the caller, the sequence POST_P is appended to
+       the main output sequence PRE_P.
+
+   GIMPLE_TEST_F points to a function that takes a tree T and
+       returns nonzero if T is in the GIMPLE form requested by the
+       caller.  The GIMPLE predicates are in tree-gimple.c.
+
+   FALLBACK tells the function what sort of a temporary we want if
+       gimplification cannot produce an expression that complies with
+       GIMPLE_TEST_F.
+
+       fb_none means that no temporary should be generated
+       fb_rvalue means that an rvalue is OK to generate
+       fb_lvalue means that an lvalue is OK to generate
+       fb_either means that either is OK, but an lvalue is preferable.
+       fb_mayfail means that gimplification may fail (in which case
+       GS_ERROR will be returned)
+
+   The return value is either GS_ERROR or GS_ALL_DONE, since this
+   function iterates until EXPR is completely gimplified or an error
+   occurs.  */
 
 enum gimplify_status
-gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
-              bool (* gimple_test_f) (tree), fallback_t fallback)
+gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
+              bool (*gimple_test_f) (tree), fallback_t fallback)
 {
   tree tmp;
-  tree internal_pre = NULL_TREE;
-  tree internal_post = NULL_TREE;
+  gimple_seq internal_pre = NULL;
+  gimple_seq internal_post = NULL;
   tree save_expr;
-  int is_statement = (pre_p == NULL);
+  bool is_statement;
   location_t saved_location;
   enum gimplify_status ret;
+  gimple_stmt_iterator pre_last_gsi, post_last_gsi;
 
   save_expr = *expr_p;
   if (save_expr == NULL_TREE)
     return GS_ALL_DONE;
 
+  /* If we are gimplifying a top-level statement, PRE_P must be valid.  */
+  is_statement = gimple_test_f == is_gimple_stmt;
+  if (is_statement)
+    gcc_assert (pre_p);
+
+  /* Consistency checks.  */
+  if (gimple_test_f == is_gimple_reg)
+    gcc_assert (fallback & (fb_rvalue | fb_lvalue));
+  else if (gimple_test_f == is_gimple_val
+           || gimple_test_f == is_gimple_formal_tmp_rhs
+           || gimple_test_f == is_gimple_formal_tmp_or_call_rhs
+           || gimple_test_f == is_gimple_formal_tmp_reg
+           || gimple_test_f == is_gimple_formal_tmp_var
+           || gimple_test_f == is_gimple_call_addr
+           || gimple_test_f == is_gimple_condexpr
+           || gimple_test_f == is_gimple_mem_rhs
+           || gimple_test_f == is_gimple_mem_or_call_rhs
+           || gimple_test_f == is_gimple_reg_rhs
+           || gimple_test_f == is_gimple_reg_or_call_rhs
+           || gimple_test_f == is_gimple_asm_val)
+    gcc_assert (fallback & fb_rvalue);
+  else if (gimple_test_f == is_gimple_min_lval
+          || gimple_test_f == is_gimple_lvalue)
+    gcc_assert (fallback & fb_lvalue);
+  else if (gimple_test_f == is_gimple_addressable)
+    gcc_assert (fallback & fb_either);
+  else if (gimple_test_f == is_gimple_stmt)
+    gcc_assert (fallback == fb_none);
+  else
+    {
+      /* We should have recognized the GIMPLE_TEST_F predicate to
+        know what kind of fallback to use in case a temporary is
+        needed to hold the value or address of *EXPR_P.  */
+      gcc_unreachable ();
+    }
+
   /* We used to check the predicate here and return immediately if it
      succeeds.  This is wrong; the design is for gimplification to be
      idempotent, and for the predicates to only test for valid forms, not
      whether they are fully simplified.  */
-
-  /* Set up our internal queues if needed.  */
   if (pre_p == NULL)
     pre_p = &internal_pre;
+
   if (post_p == NULL)
     post_p = &internal_post;
 
+  /* Remember the last statements added to PRE_P and POST_P.  Every
+     new statement added by the gimplification helpers needs to be
+     annotated with location information.  To centralize the
+     responsibility, we remember the last statement that had been
+     added to both queues before gimplifying *EXPR_P.  If
+     gimplification produces new statements in PRE_P and POST_P, those
+     statements will be annotated with the same location information
+     as *EXPR_P.  */
+  pre_last_gsi = gsi_last (*pre_p);
+  post_last_gsi = gsi_last (*post_p);
+
   saved_location = input_location;
   if (save_expr != error_mark_node
       && EXPR_HAS_LOCATION (*expr_p))
@@ -5786,8 +6260,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
 
       /* Die, die, die, my darling.  */
       if (save_expr == error_mark_node
-         || (!GIMPLE_STMT_P (save_expr)
-             && TREE_TYPE (save_expr)
+         || (TREE_TYPE (save_expr)
              && TREE_TYPE (save_expr) == error_mark_node))
        {
          ret = GS_ERROR;
@@ -5831,6 +6304,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
 
        case COND_EXPR:
          ret = gimplify_cond_expr (expr_p, pre_p, fallback);
+
          /* C99 code may assign to an array in a structure value of a
             conditional expression, and this has undefined behavior
             only on execution, so create a temporary if an lvalue is
@@ -5844,6 +6318,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
 
        case CALL_EXPR:
          ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
+
          /* C99 code may assign to an array in a structure returned
             from a function, and this has undefined behavior only on
             execution, so create a temporary if an lvalue is
@@ -5863,23 +6338,9 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
          break;
 
        case MODIFY_EXPR:
-       case GIMPLE_MODIFY_STMT:
        case INIT_EXPR:
          ret = gimplify_modify_expr (expr_p, pre_p, post_p,
                                      fallback != fb_none);
-
-         if (*expr_p)
-           {
-             /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer
-                useful.  */
-             if (TREE_CODE (*expr_p) == INIT_EXPR)
-               TREE_SET_CODE (*expr_p, MODIFY_EXPR);
-
-             /* Convert MODIFY_EXPR to GIMPLE_MODIFY_STMT.  */
-             if (TREE_CODE (*expr_p) == MODIFY_EXPR)
-               tree_to_gimple_tuple (expr_p);
-           }
-
          break;
 
        case TRUTH_ANDIF_EXPR:
@@ -5973,7 +6434,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
          break;
 
        case DECL_EXPR:
-         ret = gimplify_decl_expr (expr_p);
+         ret = gimplify_decl_expr (expr_p, pre_p);
          break;
 
        case EXC_PTR_EXPR:
@@ -6003,20 +6464,27 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
          if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
            ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
                                 NULL, is_gimple_val, fb_rvalue);
+         gimplify_seq_add_stmt (pre_p,
+                         gimple_build_goto (GOTO_DESTINATION (*expr_p)));
          break;
 
-         /* Predictions are always gimplified.  */
        case PREDICT_EXPR:
-         goto out;
+         gimplify_seq_add_stmt (pre_p,
+                       gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
+                                             PREDICT_EXPR_OUTCOME (*expr_p)));
+         ret = GS_ALL_DONE;
+         break;
 
        case LABEL_EXPR:
          ret = GS_ALL_DONE;
          gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
                      == current_function_decl);
+         gimplify_seq_add_stmt (pre_p,
+                         gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
          break;
 
        case CASE_LABEL_EXPR:
-         ret = gimplify_case_label_expr (expr_p);
+         ret = gimplify_case_label_expr (expr_p, pre_p);
          break;
 
        case RETURN_EXPR:
@@ -6068,12 +6536,12 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
          {
            enum gimplify_status r0, r1, r2;
 
-           r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
-                               is_gimple_lvalue, fb_either);
-           r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
-                               is_gimple_val, fb_rvalue);
-           r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, post_p,
-                               is_gimple_val, fb_rvalue);
+           r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
+                               post_p, is_gimple_lvalue, fb_either);
+           r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
+                               post_p, is_gimple_val, fb_rvalue);
+           r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
+                               post_p, is_gimple_val, fb_rvalue);
            recalculate_side_effects (*expr_p);
 
            ret = MIN (r0, MIN (r1, r2));
@@ -6090,10 +6558,24 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
 
        case TRY_FINALLY_EXPR:
        case TRY_CATCH_EXPR:
-         gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 0));
-         gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 1));
-         ret = GS_ALL_DONE;
-         break;
+         {
+           gimple_seq eval, cleanup;
+           gimple try_;
+
+           eval = cleanup = NULL;
+           gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
+           gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
+           try_ = gimple_build_try (eval, cleanup,
+                                    TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
+                                    ? GIMPLE_TRY_FINALLY
+                                    : GIMPLE_TRY_CATCH);
+           if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
+             gimple_try_set_catch_is_cleanup (try_,
+                                              TRY_CATCH_IS_CLEANUP (*expr_p));
+           gimplify_seq_add_stmt (pre_p, try_);
+           ret = GS_ALL_DONE;
+           break;
+         }
 
        case CLEANUP_POINT_EXPR:
          ret = gimplify_cleanup_point_expr (expr_p, pre_p);
@@ -6104,27 +6586,50 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
          break;
 
        case CATCH_EXPR:
-         gimplify_to_stmt_list (&CATCH_BODY (*expr_p));
-         ret = GS_ALL_DONE;
-         break;
+         {
+           gimple c;
+           gimple_seq handler = NULL;
+           gimplify_and_add (CATCH_BODY (*expr_p), &handler);
+           c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
+           gimplify_seq_add_stmt (pre_p, c);
+           ret = GS_ALL_DONE;
+           break;
+         }
 
        case EH_FILTER_EXPR:
-         gimplify_to_stmt_list (&EH_FILTER_FAILURE (*expr_p));
-         ret = GS_ALL_DONE;
-         break;
+         {
+           gimple ehf;
+           gimple_seq failure = NULL;
+
+           gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
+           ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
+           gimple_eh_filter_set_must_not_throw
+             (ehf, EH_FILTER_MUST_NOT_THROW (*expr_p));
+           gimplify_seq_add_stmt (pre_p, ehf);
+           ret = GS_ALL_DONE;
+           break;
+         }
 
        case CHANGE_DYNAMIC_TYPE_EXPR:
-         ret = gimplify_expr (&CHANGE_DYNAMIC_TYPE_LOCATION (*expr_p),
-                              pre_p, post_p, is_gimple_reg, fb_lvalue);
+         {
+           gimple cdt;
+
+           ret = gimplify_expr (&CHANGE_DYNAMIC_TYPE_LOCATION (*expr_p),
+                                pre_p, post_p, is_gimple_reg, fb_lvalue);
+           cdt = gimple_build_cdt (CHANGE_DYNAMIC_TYPE_NEW_TYPE (*expr_p),
+                                   CHANGE_DYNAMIC_TYPE_LOCATION (*expr_p));
+           gimplify_seq_add_stmt (pre_p, cdt);
+           ret = GS_ALL_DONE;
+         }
          break;
 
        case OBJ_TYPE_REF:
          {
            enum gimplify_status r0, r1;
-           r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, post_p,
-                               is_gimple_val, fb_rvalue);
-           r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, post_p,
-                               is_gimple_val, fb_rvalue);
+           r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
+                               post_p, is_gimple_val, fb_rvalue);
+           r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
+                               post_p, is_gimple_val, fb_rvalue);
            ret = MIN (r0, r1);
          }
          break;
@@ -6169,11 +6674,13 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
          break;
 
        case OMP_PARALLEL:
-         ret = gimplify_omp_parallel (expr_p, pre_p);
+         gimplify_omp_parallel (expr_p, pre_p);
+         ret = GS_ALL_DONE;
          break;
 
        case OMP_TASK:
-         ret = gimplify_omp_task (expr_p, pre_p);
+         gimplify_omp_task (expr_p, pre_p);
+         ret = GS_ALL_DONE;
          break;
 
        case OMP_FOR:
@@ -6182,35 +6689,46 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
 
        case OMP_SECTIONS:
        case OMP_SINGLE:
-         ret = gimplify_omp_workshare (expr_p, pre_p);
+         gimplify_omp_workshare (expr_p, pre_p);
+         ret = GS_ALL_DONE;
          break;
 
        case OMP_SECTION:
        case OMP_MASTER:
        case OMP_ORDERED:
        case OMP_CRITICAL:
-         gimplify_to_stmt_list (&OMP_BODY (*expr_p));
-         break;
+         {
+           gimple_seq body = NULL;
+           gimple g;
+
+           gimplify_and_add (OMP_BODY (*expr_p), &body);
+           switch (TREE_CODE (*expr_p))
+             {
+             case OMP_SECTION:
+               g = gimple_build_omp_section (body);
+               break;
+             case OMP_MASTER:
+               g = gimple_build_omp_master (body);
+               break;
+             case OMP_ORDERED:
+               g = gimple_build_omp_ordered (body);
+               break;
+             case OMP_CRITICAL:
+               g = gimple_build_omp_critical (body,
+                                              OMP_CRITICAL_NAME (*expr_p));
+               break;
+             default:
+               gcc_unreachable ();
+             }
+           gimplify_seq_add_stmt (pre_p, g);
+           ret = GS_ALL_DONE;
+           break;
+         }
 
        case OMP_ATOMIC:
          ret = gimplify_omp_atomic (expr_p, pre_p);
          break;
 
-       case OMP_RETURN:
-       case OMP_CONTINUE:
-       case OMP_ATOMIC_STORE:
-       case OMP_SECTIONS_SWITCH:
-         ret = GS_ALL_DONE;
-         break;
-
-       case OMP_ATOMIC_LOAD:
-         if (gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, NULL,
-             is_gimple_val, fb_rvalue) != GS_ALL_DONE)
-           ret = GS_ERROR;
-         else
-           ret = GS_ALL_DONE;
-         break;
-
        case POINTER_PLUS_EXPR:
           /* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
             The second is gimple immediate saving a need for extra statement.
@@ -6244,6 +6762,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
               break;
             }
           /* FALLTHRU */
+
        default:
          switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
            {
@@ -6257,18 +6776,17 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
                 Compare scalar mode aggregates as scalar mode values.  Using
                 memcmp for them would be very inefficient at best, and is
                 plain wrong if bitfields are involved.  */
+               {
+                 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
 
-             {
-               tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
-
-               if (!AGGREGATE_TYPE_P (type))
-                 goto expr_2;
-               else if (TYPE_MODE (type) != BLKmode)
-                 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
-               else
-                 ret = gimplify_variable_sized_compare (expr_p);
+                 if (!AGGREGATE_TYPE_P (type))
+                   goto expr_2;
+                 else if (TYPE_MODE (type) != BLKmode)
+                   ret = gimplify_scalar_mode_aggregate_compare (expr_p);
+                 else
+                   ret = gimplify_variable_sized_compare (expr_p);
 
-               break;
+                 break;
                }
 
            /* If *EXPR_P does not need to be special-cased, handle it
@@ -6284,7 +6802,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
                enum gimplify_status r0, r1;
 
                r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
-                                   post_p, is_gimple_val, fb_rvalue);
+                                   post_p, is_gimple_val, fb_rvalue);
                r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
                                    post_p, is_gimple_val, fb_rvalue);
 
@@ -6305,6 +6823,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
            }
 
          recalculate_side_effects (*expr_p);
+
        dont_recalculate:
          break;
        }
@@ -6372,6 +6891,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
          /* Historically, the compiler has treated a bare reference
             to a non-BLKmode volatile lvalue as forcing a load.  */
          tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
+
          /* Normally, we do not want to create a temporary for a
             TREE_ADDRESSABLE type because such a type should not be
             copied by bitwise-assignment.  However, we make an
@@ -6381,7 +6901,8 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
             given a TREE_ADDRESSABLE type.  */
          tree tmp = create_tmp_var_raw (type, "vol");
          gimple_add_tmp_var (tmp);
-         *expr_p = build_gimple_modify_stmt (tmp, *expr_p);
+         gimplify_assign (tmp, *expr_p, pre_p);
+         *expr_p = NULL;
        }
       else
        /* We can't do anything useful with a volatile reference to
@@ -6393,33 +6914,68 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
     }
 
   /* If we are gimplifying at the statement level, we're done.  Tack
-     everything together and replace the original statement with the
-     gimplified form.  */
+     everything together and return.  */
   if (fallback == fb_none || is_statement)
     {
-      if (internal_pre || internal_post)
+      /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
+         it out for GC to reclaim it.  */
+      *expr_p = NULL_TREE;
+
+      if (!gimple_seq_empty_p (internal_pre)
+         || !gimple_seq_empty_p (internal_post))
        {
-         append_to_statement_list (*expr_p, &internal_pre);
-         append_to_statement_list (internal_post, &internal_pre);
-         annotate_all_with_locus (&internal_pre, input_location);
-         *expr_p = internal_pre;
+         gimplify_seq_add_seq (&internal_pre, internal_post);
+         gimplify_seq_add_seq (pre_p, internal_pre);
        }
-      else if (!*expr_p)
-       ;
-      else if (TREE_CODE (*expr_p) == STATEMENT_LIST)
-       annotate_all_with_locus (expr_p, input_location);
-      else
-       annotate_one_with_locus (*expr_p, input_location);
+
+      /* The result of gimplifying *EXPR_P is going to be the last few
+        statements in *PRE_P and *POST_P.  Add location information
+        to all the statements that were added by the gimplification
+        helpers.  */
+      if (!gimple_seq_empty_p (*pre_p))
+       annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
+
+      if (!gimple_seq_empty_p (*post_p))
+       annotate_all_with_location_after (*post_p, post_last_gsi,
+                                         input_location);
+
       goto out;
     }
 
-  /* Otherwise we're gimplifying a subexpression, so the resulting value is
-     interesting.  */
+#ifdef ENABLE_GIMPLE_CHECKING
+  if (*expr_p)
+    {
+      enum tree_code code = TREE_CODE (*expr_p);
+      /* These expressions should already be in gimple IR form.  */
+      gcc_assert (code != MODIFY_EXPR
+                 && code != ASM_EXPR
+                 && code != BIND_EXPR
+                 && code != CATCH_EXPR
+                 && code != COND_EXPR
+                 && code != EH_FILTER_EXPR
+                 && code != GOTO_EXPR
+                 && code != LABEL_EXPR
+                 && code != LOOP_EXPR
+                 && code != RESX_EXPR
+                 && code != SWITCH_EXPR
+                 && code != TRY_FINALLY_EXPR
+                 && code != OMP_CRITICAL
+                 && code != OMP_FOR
+                 && code != OMP_MASTER
+                 && code != OMP_ORDERED
+                 && code != OMP_PARALLEL
+                 && code != OMP_SECTIONS
+                 && code != OMP_SECTION
+                 && code != OMP_SINGLE);
+    }
+#endif
 
-  /* If it's sufficiently simple already, we're done.  Unless we are
-     handling some post-effects internally; if that's the case, we need to
-     copy into a temp before adding the post-effects to the tree.  */
-  if (!internal_post && (*gimple_test_f) (*expr_p))
+  /* Otherwise we're gimplifying a subexpression, so the resulting
+     value is interesting.  If it's a valid operand that matches
+     GIMPLE_TEST_F, we're done. Unless we are handling some
+     post-effects internally; if that's the case, we need to copy into
+     a temporary before adding the post-effects to POST_P.  */
+  if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
     goto out;
 
   /* Otherwise, we need to create a new temporary for the gimplified
@@ -6429,7 +6985,8 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
      object the lvalue refers to would (probably) be modified by the
      postqueue; we need to copy the value out first, which means an
      rvalue.  */
-  if ((fallback & fb_lvalue) && !internal_post
+  if ((fallback & fb_lvalue)
+      && gimple_seq_empty_p (internal_post)
       && is_gimple_addressable (*expr_p))
     {
       /* An lvalue will do.  Take the address of the expression, store it
@@ -6439,14 +6996,15 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
       gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
       *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
     }
-  else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_rhs (*expr_p))
+  else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_or_call_rhs (*expr_p))
     {
+      /* An rvalue will do.  Assign the gimplified expression into a
+        new temporary TMP and replace the original expression with
+        TMP.  First, make sure that the expression has a type so that
+        it can be assigned into a temporary.  */
       gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
 
-      /* An rvalue will do.  Assign the gimplified expression into a new
-        temporary TMP and replace the original expression with TMP.  */
-
-      if (internal_post || (fallback & fb_lvalue))
+      if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
        /* The postqueue might change the value of the expression between
           the initialization and use of the temporary, so we can't use a
           formal temp.  FIXME do we care?  */
@@ -6459,7 +7017,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
     }
   else
     {
-#ifdef ENABLE_CHECKING
+#ifdef ENABLE_GIMPLE_CHECKING
       if (!(fallback & fb_mayfail))
        {
          fprintf (stderr, "gimplification failed:\n");
@@ -6469,6 +7027,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
        }
 #endif
       gcc_assert (fallback & fb_mayfail);
+
       /* If this is an asm statement, and the user asked for the
         impossible, don't die.  Fail and let gimplify_asm_expr
         issue an error.  */
@@ -6479,10 +7038,10 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
   /* Make sure the temporary matches our predicate.  */
   gcc_assert ((*gimple_test_f) (*expr_p));
 
-  if (internal_post)
+  if (!gimple_seq_empty_p (internal_post))
     {
-      annotate_all_with_locus (&internal_post, input_location);
-      append_to_statement_list (internal_post, pre_p);
+      annotate_all_with_location (internal_post, input_location);
+      gimplify_seq_add_seq (pre_p, internal_post);
     }
 
  out:
@@ -6494,7 +7053,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
    size that we find.  Add to LIST_P any statements generated.  */
 
 void
-gimplify_type_sizes (tree type, tree *list_p)
+gimplify_type_sizes (tree type, gimple_seq *list_p)
 {
   tree field, t;
 
@@ -6577,10 +7136,10 @@ gimplify_type_sizes (tree type, tree *list_p)
 
 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
    a size or position, has had all of its SAVE_EXPRs evaluated.
-   We add any required statements to STMT_P.  */
+   We add any required statements to *STMT_P.  */
 
 void
-gimplify_one_sizepos (tree *expr_p, tree *stmt_p)
+gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
 {
   tree type, expr = *expr_p;
 
@@ -6611,28 +7170,30 @@ gimplify_one_sizepos (tree *expr_p, tree *stmt_p)
       && TYPE_IS_SIZETYPE (type))
     {
       tree tmp;
+      gimple stmt;
 
       *expr_p = create_tmp_var (type, NULL);
       tmp = build1 (NOP_EXPR, type, expr);
-      tmp = build_gimple_modify_stmt (*expr_p, tmp);
+      stmt = gimplify_assign (*expr_p, tmp, stmt_p);
       if (EXPR_HAS_LOCATION (expr))
-       SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr));
+       gimple_set_location (stmt, *EXPR_LOCUS (expr));
       else
-       SET_EXPR_LOCATION (tmp, input_location);
-
-      gimplify_and_add (tmp, stmt_p);
+       gimple_set_location (stmt, input_location);
     }
 }
-\f
 
-/* Gimplify the body of statements pointed to by BODY_P.  FNDECL is the
-   function decl containing BODY.  */
 
-void
+/* Gimplify the body of statements pointed to by BODY_P and return a
+   GIMPLE_BIND containing the sequence of GIMPLE statements
+   corresponding to BODY_P.  FNDECL is the function decl containing
+   *BODY_P.  */
+
+gimple
 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
 {
   location_t saved_location = input_location;
-  tree body, parm_stmts;
+  gimple_seq parm_stmts, seq;
+  gimple outer_bind;
   struct gimplify_ctx gctx;
 
   timevar_push (TV_TREE_GIMPLIFY);
@@ -6652,63 +7213,62 @@ gimplify_body (tree *body_p, tree fndecl, bool do_parms)
 
   /* Resolve callee-copies.  This has to be done before processing
      the body so that DECL_VALUE_EXPR gets processed correctly.  */
-  parm_stmts = do_parms ? gimplify_parameters () : NULL;
+  parm_stmts = (do_parms) ? gimplify_parameters () : NULL;
 
   /* Gimplify the function's body.  */
-  gimplify_stmt (body_p);
-  body = *body_p;
-
-  if (!body)
-    body = alloc_stmt_list ();
-  else if (TREE_CODE (body) == STATEMENT_LIST)
+  seq = NULL;
+  gimplify_stmt (body_p, &seq);
+  outer_bind = gimple_seq_first_stmt (seq);
+  if (!outer_bind)
     {
-      tree t = expr_only (*body_p);
-      if (t)
-       body = t;
+      outer_bind = gimple_build_nop ();
+      gimplify_seq_add_stmt (&seq, outer_bind);
     }
 
-  /* If there isn't an outer BIND_EXPR, add one.  */
-  if (TREE_CODE (body) != BIND_EXPR)
-    {
-      tree b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
-                      NULL_TREE, NULL_TREE);
-      TREE_SIDE_EFFECTS (b) = 1;
-      append_to_statement_list_force (body, &BIND_EXPR_BODY (b));
-      body = b;
-    }
+  /* The body must contain exactly one statement, a GIMPLE_BIND.  If this is
+     not the case, wrap everything in a GIMPLE_BIND to make it so.  */
+  if (gimple_code (outer_bind) == GIMPLE_BIND
+      && gimple_seq_first (seq) == gimple_seq_last (seq))
+    ;
+  else
+    outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
+
+  *body_p = NULL_TREE;
 
   /* If we had callee-copies statements, insert them at the beginning
      of the function.  */
-  if (parm_stmts)
+  if (!gimple_seq_empty_p (parm_stmts))
     {
-      append_to_statement_list_force (BIND_EXPR_BODY (body), &parm_stmts);
-      BIND_EXPR_BODY (body) = parm_stmts;
+      gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
+      gimple_bind_set_body (outer_bind, parm_stmts);
     }
 
-  /* Unshare again, in case gimplification was sloppy.  */
-  unshare_all_trees (body);
-
-  *body_p = body;
-
-  pop_gimplify_context (body);
+  pop_gimplify_context (outer_bind);
   gcc_assert (gimplify_ctxp == NULL);
 
 #ifdef ENABLE_TYPES_CHECKING
   if (!errorcount && !sorrycount)
-    verify_gimple_1 (BIND_EXPR_BODY (*body_p));
+    verify_types_in_gimple_seq (gimple_bind_body (outer_bind));
 #endif
 
   timevar_pop (TV_TREE_GIMPLIFY);
   input_location = saved_location;
+
+  return outer_bind;
 }
 
 /* Entry point to the gimplification pass.  FNDECL is the FUNCTION_DECL
-   node for the function we want to gimplify.  */
+   node for the function we want to gimplify.
+   
+   Returns the sequence of GIMPLE statements corresponding to the body
+   of FNDECL.  */
 
 void
 gimplify_function_tree (tree fndecl)
 {
   tree oldfn, parm, ret;
+  gimple_seq seq;
+  gimple bind;
 
   oldfn = current_function_decl;
   current_function_decl = fndecl;
@@ -6735,7 +7295,13 @@ gimplify_function_tree (tree fndecl)
       && !needs_to_live_in_memory (ret))
     DECL_GIMPLE_REG_P (ret) = 1;
 
-  gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
+  bind = gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
+
+  /* The tree body of the function is no longer needed, replace it
+     with the new GIMPLE body.  */
+  seq = gimple_seq_alloc ();
+  gimple_seq_add_stmt (&seq, bind);
+  gimple_set_body (fndecl, seq);
 
   /* If we're instrumenting function entry/exit, then prepend the call to
      the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
@@ -6745,44 +7311,248 @@ gimplify_function_tree (tree fndecl)
       && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
       && !flag_instrument_functions_exclude_p (fndecl))
     {
-      tree tf, x, bind;
+      tree x;
+      gimple new_bind;
+      gimple tf;
+      gimple_seq cleanup = NULL, body = NULL;
 
-      tf = build2 (TRY_FINALLY_EXPR, void_type_node, NULL, NULL);
-      TREE_SIDE_EFFECTS (tf) = 1;
-      x = DECL_SAVED_TREE (fndecl);
-      append_to_statement_list (x, &TREE_OPERAND (tf, 0));
       x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
-      x = build_call_expr (x, 0);
-      append_to_statement_list (x, &TREE_OPERAND (tf, 1));
+      gimplify_seq_add_stmt (&cleanup, gimple_build_call (x, 0));
+      tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
 
-      bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
-      TREE_SIDE_EFFECTS (bind) = 1;
       x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
-      x = build_call_expr (x, 0);
-      append_to_statement_list (x, &BIND_EXPR_BODY (bind));
-      append_to_statement_list (tf, &BIND_EXPR_BODY (bind));
+      gimplify_seq_add_stmt (&body, gimple_build_call (x, 0));
+      gimplify_seq_add_stmt (&body, tf);
+      new_bind = gimple_build_bind (NULL, body, gimple_block (bind));
+      /* Clear the block for BIND, since it is no longer directly inside
+         the function, but within a try block.  */
+      gimple_set_block (bind, NULL);
 
-      DECL_SAVED_TREE (fndecl) = bind;
+      /* Replace the current function body with the body
+         wrapped in the try/finally TF.  */
+      seq = gimple_seq_alloc ();
+      gimple_seq_add_stmt (&seq, new_bind);
+      gimple_set_body (fndecl, seq);
     }
 
+  DECL_SAVED_TREE (fndecl) = NULL_TREE;
+
   current_function_decl = oldfn;
   pop_cfun ();
 }
-\f
+
+
+/* Some transformations like inlining may invalidate the GIMPLE form
+   for operands.  This function traverses all the operands in STMT and
+   gimplifies anything that is not a valid gimple operand.  Any new
+   GIMPLE statements are inserted before *GSI_P.  */
+
+void
+gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
+{
+  size_t i, num_ops;
+  tree orig_lhs = NULL_TREE, lhs, t;
+  gimple_seq pre = NULL;
+  gimple post_stmt = NULL;
+  struct gimplify_ctx gctx;
+
+  push_gimplify_context (&gctx);
+  gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
+
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_COND:
+      gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
+                    is_gimple_val, fb_rvalue);
+      gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
+                    is_gimple_val, fb_rvalue);
+      break;
+    case GIMPLE_OMP_ATOMIC_LOAD:
+      gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
+                    is_gimple_val, fb_rvalue);
+      break;
+    case GIMPLE_ASM:
+      {
+       size_t i, noutputs = gimple_asm_noutputs (stmt);
+       const char *constraint, **oconstraints;
+       bool allows_mem, allows_reg, is_inout;
+
+       oconstraints
+         = (const char **) alloca ((noutputs) * sizeof (const char *));
+       for (i = 0; i < noutputs; i++)
+         {
+           tree op = gimple_asm_output_op (stmt, i);
+           constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
+           oconstraints[i] = constraint;
+           parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
+                                    &allows_reg, &is_inout);
+           gimplify_expr (&TREE_VALUE (op), &pre, NULL,
+                          is_inout ? is_gimple_min_lval : is_gimple_lvalue,
+                          fb_lvalue | fb_mayfail);
+         }
+       for (i = 0; i < gimple_asm_ninputs (stmt); i++)
+         {
+           tree op = gimple_asm_input_op (stmt, i);
+           constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
+           parse_input_constraint (&constraint, 0, 0, noutputs, 0,
+                                   oconstraints, &allows_mem, &allows_reg);
+           if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
+             allows_reg = 0;
+           if (!allows_reg && allows_mem)
+             gimplify_expr (&TREE_VALUE (op), &pre, NULL,
+                            is_gimple_lvalue, fb_lvalue | fb_mayfail);
+           else
+             gimplify_expr (&TREE_VALUE (op), &pre, NULL,
+                            is_gimple_asm_val, fb_rvalue);
+         }
+      }
+      break;
+    default:
+      /* NOTE: We start gimplifying operands from last to first to
+        make sure that side-effects on the RHS of calls, assignments
+        and ASMs are executed before the LHS.  The ordering is not
+        important for other statements.  */
+      num_ops = gimple_num_ops (stmt);
+      orig_lhs = gimple_get_lhs (stmt);
+      for (i = num_ops; i > 0; i--)
+       {
+         tree op = gimple_op (stmt, i - 1);
+         if (op == NULL_TREE)
+           continue;
+         if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
+           gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
+         else if (i == 2
+                  && is_gimple_assign (stmt)
+                  && num_ops == 2
+                  && get_gimple_rhs_class (gimple_expr_code (stmt))
+                     == GIMPLE_SINGLE_RHS)
+           gimplify_expr (&op, &pre, NULL,
+                          rhs_predicate_for (gimple_assign_lhs (stmt)),
+                          fb_rvalue);
+         else if (i == 2 && is_gimple_call (stmt))
+           {
+             if (TREE_CODE (op) == FUNCTION_DECL)
+               continue;
+             gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
+           }
+         else
+           gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
+         gimple_set_op (stmt, i - 1, op);
+       }
+
+      lhs = gimple_get_lhs (stmt);
+      /* If regimplification of the LHS changed it in a way that requires
+        a simple RHS, create temporary.  */
+      if (orig_lhs != lhs && !is_gimple_formal_tmp_var (lhs))
+       {
+         bool need_temp = false;
+
+         if (is_gimple_assign (stmt)
+             && num_ops == 2
+             && get_gimple_rhs_class (gimple_expr_code (stmt))
+                == GIMPLE_SINGLE_RHS)
+           gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
+                          rhs_predicate_for (gimple_assign_lhs (stmt)),
+                          fb_rvalue);
+         else if (is_gimple_reg (lhs))
+           {
+             if (is_gimple_reg_type (TREE_TYPE (lhs)))
+               {
+                 if (is_gimple_call (stmt))
+                   {
+                     i = gimple_call_flags (stmt);
+                     if ((i & ECF_LOOPING_CONST_OR_PURE)
+                         || !(i & (ECF_CONST | ECF_PURE)))
+                       need_temp = true;
+                   }
+                 if (stmt_can_throw_internal (stmt))
+                   need_temp = true;
+               }
+           }
+         else
+           {
+             if (is_gimple_reg_type (TREE_TYPE (lhs)))
+               need_temp = true;
+             else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
+               {
+                 if (is_gimple_call (stmt))
+                   {
+                     tree fndecl = gimple_call_fndecl (stmt);
+
+                     if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
+                         && !(fndecl && DECL_RESULT (fndecl)
+                              && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
+                       need_temp = true;
+                   }
+                 else
+                   need_temp = true;
+               }
+           }
+         if (need_temp)
+           {
+             tree temp = create_tmp_var (TREE_TYPE (lhs), NULL);
+
+             DECL_GIMPLE_FORMAL_TEMP_P (temp) = 1;
+             if (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE
+                 || TREE_CODE (TREE_TYPE (lhs)) == VECTOR_TYPE)
+               DECL_GIMPLE_REG_P (temp) = 1;
+             if (TREE_CODE (orig_lhs) == SSA_NAME)
+               orig_lhs = SSA_NAME_VAR (orig_lhs);
+             if (TREE_CODE (orig_lhs) == VAR_DECL
+                 && DECL_BASED_ON_RESTRICT_P (orig_lhs))
+               {
+                 DECL_BASED_ON_RESTRICT_P (temp) = 1;
+                 SET_DECL_RESTRICT_BASE (temp,
+                                         DECL_GET_RESTRICT_BASE (orig_lhs));
+               }
+
+             if (gimple_in_ssa_p (cfun))
+               temp = make_ssa_name (temp, NULL);
+             gimple_set_lhs (stmt, temp);
+             post_stmt = gimple_build_assign (lhs, temp);
+             if (TREE_CODE (lhs) == SSA_NAME)
+               SSA_NAME_DEF_STMT (lhs) = post_stmt;
+           }
+       }
+      break;
+    }
+
+  if (!gimple_seq_empty_p (pre))
+    {
+      if (gimple_in_ssa_p (cfun))
+       {
+         gimple_stmt_iterator i;
+
+         for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
+           mark_symbols_for_renaming (gsi_stmt (i));
+       }
+      gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
+    }
+  if (post_stmt)
+    gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
+
+  if (gimple_referenced_vars (cfun))
+    for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
+      add_referenced_var (t);
+
+  pop_gimplify_context (NULL);
+}
+
+
 /* Expands EXPR to list of gimple statements STMTS.  If SIMPLE is true,
    force the result to be either ssa_name or an invariant, otherwise
    just force it to be a rhs expression.  If VAR is not NULL, make the
    base variable of the final destination be VAR if suitable.  */
 
 tree
-force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
+force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
 {
   tree t;
   enum gimplify_status ret;
   gimple_predicate gimple_test_f;
   struct gimplify_ctx gctx;
 
-  *stmts = NULL_TREE;
+  *stmts = NULL;
 
   if (is_gimple_val (expr))
     return expr;
@@ -6794,9 +7564,9 @@ force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
   gimplify_ctxp->allow_rhs_cond_expr = true;
 
   if (var)
-    expr = build_gimple_modify_stmt (var, expr);
+    expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
 
-  if (TREE_CODE (expr) != GIMPLE_MODIFY_STMT
+  if (TREE_CODE (expr) != MODIFY_EXPR
       && TREE_TYPE (expr) == void_type_node)
     {
       gimplify_and_add (expr, stmts);
@@ -6804,16 +7574,13 @@ force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
     }
   else
     {
-      ret = gimplify_expr (&expr, stmts, NULL,
-                          gimple_test_f, fb_rvalue);
+      ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
       gcc_assert (ret != GS_ERROR);
     }
 
   if (gimple_referenced_vars (cfun))
-    {
-      for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
-       add_referenced_var (t);
-    }
+    for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
+      add_referenced_var (t);
 
   pop_gimplify_context (NULL);
 
@@ -6821,33 +7588,34 @@ force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
 }
 
 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR.  If
-   some statements are produced, emits them at BSI.  If BEFORE is true.
-   the statements are appended before BSI, otherwise they are appended after
-   it.  M specifies the way BSI moves after insertion (BSI_SAME_STMT or
-   BSI_CONTINUE_LINKING are the usual values).  */
+   some statements are produced, emits them at GSI.  If BEFORE is true.
+   the statements are appended before GSI, otherwise they are appended after
+   it.  M specifies the way GSI moves after insertion (GSI_SAME_STMT or
+   GSI_CONTINUE_LINKING are the usual values).  */
 
 tree
-force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
+force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
                          bool simple_p, tree var, bool before,
-                         enum bsi_iterator_update m)
+                         enum gsi_iterator_update m)
 {
-  tree stmts;
+  gimple_seq stmts;
 
   expr = force_gimple_operand (expr, &stmts, simple_p, var);
-  if (stmts)
+
+  if (!gimple_seq_empty_p (stmts))
     {
       if (gimple_in_ssa_p (cfun))
        {
-         tree_stmt_iterator tsi;
+         gimple_stmt_iterator i;
 
-         for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
-           mark_symbols_for_renaming (tsi_stmt (tsi));
+         for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
+           mark_symbols_for_renaming (gsi_stmt (i));
        }
 
       if (before)
-       bsi_insert_before (bsi, stmts, m);
+       gsi_insert_seq_before (gsi, stmts, m);
       else
-       bsi_insert_after (bsi, stmts, m);
+       gsi_insert_seq_after (gsi, stmts, m);
     }
 
   return expr;
diff --git a/gcc/gsstruct.def b/gcc/gsstruct.def
new file mode 100644 (file)
index 0000000..b524a37
--- /dev/null
@@ -0,0 +1,48 @@
+/* This file contains the definitions for the gimple IR structure
+   enumeration used in GCC.
+
+   Copyright (C) 2007 Free Software Foundation, Inc.
+   Contributed by Aldy Hernandez <aldyh@redhat.com>
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 3, or (at your option) any later
+version.
+
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3.  If not see
+<http://www.gnu.org/licenses/>.  */
+
+/* The format of this file is
+   DEFGSSTRUCT(GSS_enumeration value, printable name).
+   Each enum value should correspond with a single member of the union
+   gimple_statement_d.  */
+
+DEFGSSTRUCT(GSS_BASE, "base")
+DEFGSSTRUCT(GSS_WITH_OPS, "with_ops")
+DEFGSSTRUCT(GSS_WITH_MEM_OPS, "with_mem_ops")
+DEFGSSTRUCT(GSS_OMP, "omp")
+DEFGSSTRUCT(GSS_BIND, "bind")
+DEFGSSTRUCT(GSS_CATCH, "catch")
+DEFGSSTRUCT(GSS_EH_FILTER, "eh_filter")
+DEFGSSTRUCT(GSS_PHI, "phi")
+DEFGSSTRUCT(GSS_RESX, "resx")
+DEFGSSTRUCT(GSS_TRY, "try")
+DEFGSSTRUCT(GSS_WCE, "with_cleanup_expression")
+DEFGSSTRUCT(GSS_ASM, "asm")
+DEFGSSTRUCT(GSS_OMP_CRITICAL, "omp_critical")
+DEFGSSTRUCT(GSS_OMP_FOR, "omp_for")
+DEFGSSTRUCT(GSS_OMP_PARALLEL, "omp_parallel")
+DEFGSSTRUCT(GSS_OMP_TASK, "omp_task")
+DEFGSSTRUCT(GSS_OMP_SECTIONS, "sections")
+DEFGSSTRUCT(GSS_OMP_SINGLE, "single")
+DEFGSSTRUCT(GSS_OMP_CONTINUE, "omp_continue")
+DEFGSSTRUCT(GSS_OMP_ATOMIC_LOAD, "omp_atomic_load")
+DEFGSSTRUCT(GSS_OMP_ATOMIC_STORE, "omp_atomic_store")
index 92d12c4..af1cc0f 100644 (file)
@@ -359,18 +359,15 @@ ipcp_initialize_node_lattices (struct cgraph_node *node)
 /* Create a new assignment statement and make it the first statement in the
    function.  PARM1 is the lhs of the assignment and VAL is the rhs. */
 static void
-constant_val_insert (tree parm1, tree val)
+constant_val_insert (tree parm1 ATTRIBUTE_UNUSED, tree val ATTRIBUTE_UNUSED)
 {
-  tree init_stmt = NULL;
+  gimple init_stmt = NULL;
   edge e_step;
 
-  init_stmt = build_gimple_modify_stmt (parm1, val);
-
-  if (init_stmt)
-    {
-      e_step = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun));
-      bsi_insert_on_edge_immediate (e_step, init_stmt);
-    }
+  init_stmt = gimple_build_assign (parm1, val);
+  gcc_assert (init_stmt);
+  e_step = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun));
+  gsi_insert_on_edge_immediate (e_step, init_stmt);
 }
 
 /* build INTEGER_CST tree with type TREE_TYPE and value according to LAT.
@@ -810,9 +807,7 @@ ipcp_update_callgraph (void)
            if (ipcp_need_redirect_p (cs))
              {
                cgraph_redirect_edge_callee (cs, orig_callee);
-               TREE_OPERAND (CALL_EXPR_FN (get_call_expr_in (cs->call_stmt)),
-                             0) =
-                 orig_callee->decl;
+               gimple_call_set_fn (cs->call_stmt, orig_callee->decl);
              }
          }
     }
@@ -944,7 +939,7 @@ ipcp_insert_stage (void)
       if (const_param > 0)
        {
          push_cfun (DECL_STRUCT_FUNCTION (node1->decl));
-         tree_register_cfg_hooks ();
+         gimple_register_cfg_hooks ();
          current_function_decl = node1->decl;
 
          for (i = 0; i < count; i++)
index 6d744c4..0ccaa6b 100644 (file)
@@ -195,9 +195,11 @@ cgraph_estimate_size_after_inlining (int times, struct cgraph_node *to,
    clones or re-using node originally representing out-of-line function call.
    */
 void
-cgraph_clone_inlined_nodes (struct cgraph_edge *e, bool duplicate, bool update_original)
+cgraph_clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
+                           bool update_original)
 {
   HOST_WIDE_INT peak;
+
   if (duplicate)
     {
       /* We may eliminate the need for out-of-line copy to be output.
@@ -207,7 +209,7 @@ cgraph_clone_inlined_nodes (struct cgraph_edge *e, bool duplicate, bool update_o
          && !cgraph_new_nodes)
        {
          gcc_assert (!e->callee->global.inlined_to);
-         if (DECL_SAVED_TREE (e->callee->decl))
+         if (gimple_body (e->callee->decl))
            overall_insns -= e->callee->global.insns, nfunctions_inlined++;
          duplicate = false;
        }
@@ -289,7 +291,7 @@ cgraph_mark_inline (struct cgraph_edge *edge)
   struct cgraph_node *what = edge->callee;
   struct cgraph_edge *e, *next;
 
-  gcc_assert (!CALL_STMT_CANNOT_INLINE_P (edge->call_stmt));
+  gcc_assert (!gimple_call_cannot_inline_p (edge->call_stmt));
   /* Look for all calls, mark them inline and clone recursively
      all inlined functions.  */
   for (e = what->callers; e; e = next)
@@ -971,7 +973,7 @@ cgraph_decide_inlining_of_small_functions (void)
        }
       if (!tree_can_inline_p (edge->caller->decl, edge->callee->decl))
        {
-         CALL_STMT_CANNOT_INLINE_P (edge->call_stmt) = true;
+         gimple_call_set_cannot_inline (edge->call_stmt, true);
          edge->inline_failed = N_("target specific option mismatch");
          if (dump_file)
            fprintf (dump_file, " inline_failed:%s.\n", edge->inline_failed);
@@ -992,7 +994,7 @@ cgraph_decide_inlining_of_small_functions (void)
       else
        {
          struct cgraph_node *callee;
-         if (CALL_STMT_CANNOT_INLINE_P (edge->call_stmt)
+         if (gimple_call_cannot_inline_p (edge->call_stmt)
              || !cgraph_check_inline_limits (edge->caller, edge->callee,
                                              &edge->inline_failed, true))
            {
@@ -1126,14 +1128,14 @@ cgraph_decide_inlining (void)
       for (e = node->callers; e; e = next)
        {
          next = e->next_caller;
-         if (!e->inline_failed || CALL_STMT_CANNOT_INLINE_P (e->call_stmt))
+         if (!e->inline_failed || gimple_call_cannot_inline_p (e->call_stmt))
            continue;
          if (cgraph_recursive_inlining_p (e->caller, e->callee,
                                           &e->inline_failed))
            continue;
          if (!tree_can_inline_p (e->caller->decl, e->callee->decl))
            {
-             CALL_STMT_CANNOT_INLINE_P (e->call_stmt) = true;
+             gimple_call_set_cannot_inline (e->call_stmt, true);
              continue;
            }
          cgraph_mark_inline_edge (e, true);
@@ -1172,15 +1174,18 @@ cgraph_decide_inlining (void)
        fprintf (dump_file, "\nDeciding on functions called once:\n");
 
       /* And finally decide what functions are called once.  */
-
       for (i = nnodes - 1; i >= 0; i--)
        {
          node = order[i];
 
-         if (node->callers && !node->callers->next_caller && !node->needed
-             && node->local.inlinable && node->callers->inline_failed
-             && !CALL_STMT_CANNOT_INLINE_P (node->callers->call_stmt)
-             && !DECL_EXTERNAL (node->decl) && !DECL_COMDAT (node->decl))
+         if (node->callers
+             && !node->callers->next_caller
+             && !node->needed
+             && node->local.inlinable
+             && node->callers->inline_failed
+             && !gimple_call_cannot_inline_p (node->callers->call_stmt)
+             && !DECL_EXTERNAL (node->decl)
+             && !DECL_COMDAT (node->decl))
            {
              if (dump_file)
                {
@@ -1342,7 +1347,7 @@ cgraph_decide_inlining_incrementally (struct cgraph_node *node,
       if (!e->callee->local.disregard_inline_limits
          && (mode != INLINE_ALL || !e->callee->local.inlinable))
        continue;
-      if (CALL_STMT_CANNOT_INLINE_P (e->call_stmt))
+      if (gimple_call_cannot_inline_p (e->call_stmt))
        continue;
       /* When the edge is already inlined, we just need to recurse into
         it in order to fully flatten the leaves.  */
@@ -1369,7 +1374,7 @@ cgraph_decide_inlining_incrementally (struct cgraph_node *node,
        }
       if (!tree_can_inline_p (node->decl, e->callee->decl))
        {
-         CALL_STMT_CANNOT_INLINE_P (e->call_stmt) = true;
+         gimple_call_set_cannot_inline (e->call_stmt, true);
          if (dump_file)
            {
              indent_to (dump_file, depth);
@@ -1388,7 +1393,7 @@ cgraph_decide_inlining_incrementally (struct cgraph_node *node,
            }
          continue;
        }
-      if (!DECL_SAVED_TREE (e->callee->decl) && !e->callee->inline_decl)
+      if (!gimple_body (e->callee->decl) && !e->callee->inline_decl)
        {
          if (dump_file)
            {
@@ -1402,7 +1407,8 @@ cgraph_decide_inlining_incrementally (struct cgraph_node *node,
     }
 
   /* Now do the automatic inlining.  */
-  if (!flag_really_no_inline && mode != INLINE_ALL
+  if (!flag_really_no_inline
+      && mode != INLINE_ALL
       && mode != INLINE_ALWAYS_INLINE)
     for (e = node->callees; e; e = e->next_callee)
       {
@@ -1455,7 +1461,7 @@ cgraph_decide_inlining_incrementally (struct cgraph_node *node,
          }
        if (!cgraph_check_inline_limits (node, e->callee, &e->inline_failed,
                                        false)
-           || CALL_STMT_CANNOT_INLINE_P (e->call_stmt))
+           || gimple_call_cannot_inline_p (e->call_stmt))
          {
            if (dump_file)
              {
@@ -1464,7 +1470,7 @@ cgraph_decide_inlining_incrementally (struct cgraph_node *node,
              }
            continue;
          }
-       if (!DECL_SAVED_TREE (e->callee->decl) && !e->callee->inline_decl)
+       if (!gimple_body (e->callee->decl) && !e->callee->inline_decl)
          {
            if (dump_file)
              {
@@ -1476,7 +1482,7 @@ cgraph_decide_inlining_incrementally (struct cgraph_node *node,
          }
        if (!tree_can_inline_p (node->decl, e->callee->decl))
          {
-           CALL_STMT_CANNOT_INLINE_P (e->call_stmt) = true;
+           gimple_call_set_cannot_inline (e->call_stmt, true);
            if (dump_file)
              {
                indent_to (dump_file, depth);
@@ -1585,8 +1591,8 @@ compute_inline_parameters (struct cgraph_node *node)
     = inline_summary (node)->estimated_self_stack_size;
   node->global.stack_frame_offset = 0;
   node->local.inlinable = tree_inlinable_function_p (current_function_decl);
-  inline_summary (node)->self_insns = estimate_num_insns (current_function_decl,
-                                                         &eni_inlining_weights);
+  inline_summary (node)->self_insns
+      = estimate_num_insns_fn (current_function_decl, &eni_inlining_weights);
   if (node->local.inlinable && !node->local.disregard_inline_limits)
     node->local.disregard_inline_limits
       = DECL_DISREGARD_INLINE_LIMITS (current_function_decl);
@@ -1706,6 +1712,7 @@ inline_transform (struct cgraph_node *node)
   for (e = node->callees; e; e = e->next_callee)
     if (!e->inline_failed || warn_inline)
       break;
+
   if (e)
     {
       timevar_push (TV_INTEGRATION);
index c87a577..efa1959 100644 (file)
@@ -156,16 +156,16 @@ ipa_count_formal_params (struct cgraph_node *mt)
    pointers or escaping addresses because all TREE_ADDRESSABLE parameters are
    considered modified anyway.  */
 static void
-ipa_check_stmt_modifications (struct ipa_node_params *info, tree stmt)
+ipa_check_stmt_modifications (struct ipa_node_params *info, gimple stmt)
 {
   int j;
   int index;
   tree lhs;
 
-  switch (TREE_CODE (stmt))
+  switch (gimple_code (stmt))
     {
-    case GIMPLE_MODIFY_STMT:
-      lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+    case GIMPLE_ASSIGN:
+      lhs = gimple_assign_lhs (stmt);
 
       while (handled_component_p (lhs))
        lhs = TREE_OPERAND (lhs, 0);
@@ -176,7 +176,7 @@ ipa_check_stmt_modifications (struct ipa_node_params *info, tree stmt)
        info->param_flags[index].modified = true;
       break;
 
-    case ASM_EXPR:
+    case GIMPLE_ASM:
       /* Asm code could modify any of the parameters.  */
       for (j = 0; j < ipa_get_param_count (info); j++)
        info->param_flags[j].modified = true;
@@ -197,8 +197,8 @@ ipa_detect_param_modifications (struct cgraph_node *node)
   tree decl = node->decl;
   basic_block bb;
   struct function *func;
-  block_stmt_iterator bsi;
-  tree stmt;
+  gimple_stmt_iterator gsi;
+  gimple stmt;
   struct ipa_node_params *info = IPA_NODE_REF (node);
   int i, count;
 
@@ -212,9 +212,9 @@ ipa_detect_param_modifications (struct cgraph_node *node)
   func = DECL_STRUCT_FUNCTION (decl);
   FOR_EACH_BB_FN (bb, func)
     {
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         stmt = bsi_stmt (bsi);
+         stmt = gsi_stmt (gsi);
          ipa_check_stmt_modifications (info, stmt);
        }
     }
@@ -227,17 +227,17 @@ ipa_detect_param_modifications (struct cgraph_node *node)
   info->modification_analysis_done = 1;
 }
 
-/* Count number of arguments callsite CS has and store it in
+/* Count number of arguments callsite CS has and store it in 
    ipa_edge_args structure corresponding to this callsite.  */
 void
 ipa_count_arguments (struct cgraph_edge *cs)
 {
-  tree call_tree;
+  gimple stmt;
   int arg_num;
 
-  call_tree = get_call_expr_in (cs->call_stmt);
-  gcc_assert (TREE_CODE (call_tree) == CALL_EXPR);
-  arg_num = call_expr_nargs (call_tree);
+  stmt = cs->call_stmt;
+  gcc_assert (is_gimple_call (stmt));
+  arg_num = gimple_call_num_args (stmt);
   ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
 }
 
@@ -314,14 +314,15 @@ ipa_print_all_jump_functions (FILE *f)
 static void
 compute_scalar_jump_functions (struct ipa_node_params *info,
                               struct ipa_jump_func *functions,
-                              tree call)
+                              gimple call)
 {
-  call_expr_arg_iterator iter;
   tree arg;
-  int num = 0;
+  unsigned num = 0;
 
-  FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
+  for (num = 0; num < gimple_call_num_args (call); num++)
     {
+      arg = gimple_call_arg (call, num);
+
       if (TREE_CODE (arg) == INTEGER_CST
          || TREE_CODE (arg) == REAL_CST
          || TREE_CODE (arg) == FIXED_CST)
@@ -359,8 +360,6 @@ compute_scalar_jump_functions (struct ipa_node_params *info,
              functions[num].value.formal_id = index;
            }
        }
-
-      num++;
     }
 }
 
@@ -404,15 +403,16 @@ type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
 static bool
 compute_pass_through_member_ptrs (struct ipa_node_params *info,
                                  struct ipa_jump_func *functions,
-                                 tree call)
+                                 gimple call)
 {
-  call_expr_arg_iterator iter;
   bool undecided_members = false;
-  int num = 0;
+  unsigned num;
   tree arg;
 
-  FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
+  for (num = 0; num < gimple_call_num_args (call); num++)
     {
+      arg = gimple_call_arg (call, num);
+
       if (type_like_member_ptr_p (TREE_TYPE (arg), NULL, NULL))
        {
          if (TREE_CODE (arg) == PARM_DECL)
@@ -431,8 +431,6 @@ compute_pass_through_member_ptrs (struct ipa_node_params *info,
          else
            undecided_members = true;
        }
-
-      num++;
     }
 
   return undecided_members;
@@ -449,39 +447,36 @@ fill_member_ptr_cst_jump_function (struct ipa_jump_func *jfunc,
   jfunc->value.member_cst.delta = delta;
 }
 
-/* Traverse statements from CALL_STMT backwards, scanning whether the argument
-   ARG which is a member pointer is filled in with constant values.  If it is,
-   fill the jump function JFUNC in appropriately.  METHOD_FIELD and DELTA_FIELD
-   are fields of the record type of the member pointer.  To give an example, we
-   look for a pattern looking like the following:  
+/* Traverse statements from CALL backwards, scanning whether the argument ARG
+   which is a member pointer is filled in with constant values.  If it is, fill
+   the jump function JFUNC in appropriately.  METHOD_FIELD and DELTA_FIELD are
+   fields of the record type of the member pointer.  To give an example, we
+   look for a pattern looking like the following:
 
      D.2515.__pfn ={v} printStuff;
      D.2515.__delta ={v} 0;
      i_1 = doprinting (D.2515);  */
 static void
-determine_cst_member_ptr (tree call_stmt, tree arg, tree method_field,
+determine_cst_member_ptr (gimple call, tree arg, tree method_field,
                          tree delta_field, struct ipa_jump_func *jfunc)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   tree method = NULL_TREE;
   tree delta = NULL_TREE;
 
-  bsi = bsi_for_stmt (call_stmt);
+  gsi = gsi_for_stmt (call);
 
-  bsi_prev (&bsi);
-  for (; !bsi_end_p (bsi); bsi_prev (&bsi))
+  gsi_prev (&gsi);
+  for (; !gsi_end_p (gsi); gsi_prev (&gsi))
     {
-      tree stmt = bsi_stmt (bsi);
+      gimple stmt = gsi_stmt (gsi);
       tree lhs, rhs, fld;
 
-      if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+      if (!is_gimple_assign (stmt) || gimple_num_ops (stmt) != 2)
        return;
 
-      rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-      if (TREE_CODE (rhs) == CALL_EXPR)
-       return;
-
-      lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+      lhs = gimple_assign_lhs (stmt);
+      rhs = gimple_assign_rhs1 (stmt);
 
       if (TREE_CODE (lhs) != COMPONENT_REF
          || TREE_OPERAND (lhs, 0) != arg)
@@ -524,28 +519,26 @@ determine_cst_member_ptr (tree call_stmt, tree arg, tree method_field,
   return;
 }
 
-/* Go through the arguments of the call in CALL_STMT and for every member
-   pointer within tries determine whether it is a constant.  If it is, create a
-   corresponding constant jump function in FUNCTIONS which is an array of jump
-   functions associated with the call.  */
+/* Go through the arguments of the CALL and for every member pointer within
+   tries determine whether it is a constant.  If it is, create a corresponding
+   constant jump function in FUNCTIONS which is an array of jump functions
+   associated with the call.  */
 static void
 compute_cst_member_ptr_arguments (struct ipa_jump_func *functions,
-                                 tree call_stmt)
+                                 gimple call)
 {
-  call_expr_arg_iterator iter;
-  int num = 0;
-  tree call = get_call_expr_in (call_stmt);
+  unsigned num;
   tree arg, method_field, delta_field;
 
-  FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
+  for (num = 0; num < gimple_call_num_args (call); num++)
     {
+      arg = gimple_call_arg (call, num);
+
       if (functions[num].type == IPA_UNKNOWN
          && type_like_member_ptr_p (TREE_TYPE (arg), &method_field,
                                     &delta_field))
-       determine_cst_member_ptr (call_stmt, arg, method_field,
-                                 delta_field, &functions[num]);
-
-      num++;
+       determine_cst_member_ptr (call, arg, method_field, delta_field,
+                                 &functions[num]);
     }
 }
 
@@ -557,13 +550,15 @@ ipa_compute_jump_functions (struct cgraph_edge *cs)
 {
   struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
   struct ipa_edge_args *arguments = IPA_EDGE_REF (cs);
-  tree call;
+  gimple call;
 
   if (ipa_get_cs_argument_count (arguments) == 0 || arguments->jump_functions)
     return;
   arguments->jump_functions = XCNEWVEC (struct ipa_jump_func,
                                        ipa_get_cs_argument_count (arguments));
-  call = get_call_expr_in (cs->call_stmt);
+
+  call = cs->call_stmt;
+  gcc_assert (is_gimple_call (call));
 
   /* We will deal with constants and SSA scalars first:  */
   compute_scalar_jump_functions (info, arguments->jump_functions, call);
@@ -575,7 +570,7 @@ ipa_compute_jump_functions (struct cgraph_edge *cs)
 
   /* Finally, let's check whether we actually pass a new constant membeer
      pointer here...  */
-  compute_cst_member_ptr_arguments (arguments->jump_functions, cs->call_stmt);
+  compute_cst_member_ptr_arguments (arguments->jump_functions, call);
 }
 
 /* If RHS looks like a rhs of a statement loading pfn from a member pointer
@@ -604,14 +599,14 @@ ipa_get_member_ptr_load_param (tree rhs)
 /* If STMT looks like a statement loading a value from a member pointer formal
    parameter, this function retuns that parameter.  */
 static tree
-ipa_get_stmt_member_ptr_load_param (tree stmt)
+ipa_get_stmt_member_ptr_load_param (gimple stmt)
 {
   tree rhs;
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt) || gimple_num_ops (stmt) != 2)
     return NULL_TREE;
 
-  rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+  rhs = gimple_assign_rhs1 (stmt);
   return ipa_get_member_ptr_load_param (rhs);
 }
 
@@ -631,10 +626,10 @@ ipa_is_ssa_with_stmt_def (tree t)
    parameter.  STMT is the corresponding call statement.  */
 static void
 ipa_note_param_call (struct ipa_node_params *info, int formal_id,
-                    tree stmt)
+                    gimple stmt)
 {
   struct ipa_param_call_note *note;
-  basic_block bb = bb_for_stmt (stmt);
+  basic_block bb = gimple_bb (stmt);
 
   info->param_flags[formal_id].called = 1;
 
@@ -650,15 +645,14 @@ ipa_note_param_call (struct ipa_node_params *info, int formal_id,
   return;
 }
 
-/* Analyze the CALL (which itself must be a part of statement STMT) and examine
-   uses of formal parameters of the caller (described by INFO).  Currently it
-   checks whether the call calls a pointer that is a formal parameter and if
-   so, the parameter is marked with the called flag and a note describing the
-   call is created.  This is very simple for ordinary pointers represented in
-   SSA but not-so-nice when it comes to member pointers.  The ugly part of this
-   function does nothing more than tries to match the pattern of such a call.
-   An example of such a pattern is the gimple dump below, the call is on the
-   last line:
+/* Analyze the CALL and examine uses of formal parameters of the caller
+   (described by INFO).  Currently it checks whether the call calls a pointer
+   that is a formal parameter and if so, the parameter is marked with the
+   called flag and a note describing the call is created.  This is very simple
+   for ordinary pointers represented in SSA but not-so-nice when it comes to
+   member pointers.  The ugly part of this function does nothing more than
+   tries to match the pattern of such a call.  An example of such a pattern is
+   the gimple dump below, the call is on the last line:
 
      <bb 2>:
        f$__delta_5 = f.__delta;
@@ -698,16 +692,16 @@ ipa_note_param_call (struct ipa_node_params *info, int formal_id,
 */
 
 static void
-ipa_analyze_call_uses (struct ipa_node_params *info, tree call, tree stmt)
+ipa_analyze_call_uses (struct ipa_node_params *info, gimple call)
 {
-  tree target = CALL_EXPR_FN (call);
-  tree var, def;
+  tree target = gimple_call_fn (call);
+  gimple def;
+  tree var;
   tree n1, n2;
-  tree d1, d2;
-  tree rec, rec2;
-  tree branch, cond;
+  gimple d1, d2;
+  tree rec, rec2, cond;
+  gimple branch;
   int index;
-
   basic_block bb, virt_bb, join;
 
   if (TREE_CODE (target) != SSA_NAME)
@@ -719,7 +713,7 @@ ipa_analyze_call_uses (struct ipa_node_params *info, tree call, tree stmt)
       /* assuming TREE_CODE (var) == PARM_DECL */
       index = ipa_get_param_decl_index (info, var);
       if (index >= 0)
-       ipa_note_param_call (info, index, stmt);
+       ipa_note_param_call (info, index, call);
       return;
     }
 
@@ -731,10 +725,10 @@ ipa_analyze_call_uses (struct ipa_node_params *info, tree call, tree stmt)
     return;
 
   def = SSA_NAME_DEF_STMT (target);
-  if (TREE_CODE (def) != PHI_NODE)
+  if (gimple_code (def) != GIMPLE_PHI)
     return;
 
-  if (PHI_NUM_ARGS (def) != 2)
+  if (gimple_phi_num_args (def) != 2)
     return;
 
   /* First, we need to check whether one of these is a load from a member
@@ -751,13 +745,13 @@ ipa_analyze_call_uses (struct ipa_node_params *info, tree call, tree stmt)
       if (ipa_get_stmt_member_ptr_load_param (d2))
        return;
 
-      bb = bb_for_stmt (d1);
-      virt_bb = bb_for_stmt (d2);
+      bb = gimple_bb (d1);
+      virt_bb = gimple_bb (d2);
     }
   else if ((rec = ipa_get_stmt_member_ptr_load_param (d2)))
     {
-      bb = bb_for_stmt (d2);
-      virt_bb = bb_for_stmt (d1);
+      bb = gimple_bb (d2);
+      virt_bb = gimple_bb (d1);
     }
   else
     return;
@@ -765,7 +759,7 @@ ipa_analyze_call_uses (struct ipa_node_params *info, tree call, tree stmt)
   /* Second, we need to check that the basic blocks are laid out in the way
      corresponding to the pattern. */
 
-  join = bb_for_stmt (def);
+  join = gimple_bb (def);
   if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
       || single_pred (virt_bb) != bb
       || single_succ (virt_bb) != join)
@@ -775,52 +769,45 @@ ipa_analyze_call_uses (struct ipa_node_params *info, tree call, tree stmt)
      significant bit of the pfn. */
 
   branch = last_stmt (bb);
-  if (TREE_CODE (branch) != COND_EXPR)
+  if (gimple_code (branch) != GIMPLE_COND)
     return;
 
-  cond = TREE_OPERAND (branch, 0);
-  if (TREE_CODE (cond) != NE_EXPR
-      || !integer_zerop (TREE_OPERAND (cond, 1)))
+  if (gimple_cond_code (branch) != NE_EXPR
+      || !integer_zerop (gimple_cond_rhs (branch)))
     return;
-  cond = TREE_OPERAND (cond, 0);
 
+  cond = gimple_cond_lhs (branch);
   if (!ipa_is_ssa_with_stmt_def (cond))
     return;
 
-  cond = SSA_NAME_DEF_STMT (cond);
-  if (TREE_CODE (cond) != GIMPLE_MODIFY_STMT)
+  def = SSA_NAME_DEF_STMT (cond);
+  if (!is_gimple_assign (def) || gimple_num_ops (def) != 3
+      || gimple_assign_rhs_code (def) != BIT_AND_EXPR
+      || !integer_onep (gimple_assign_rhs2 (def)))
     return;
-  cond = GIMPLE_STMT_OPERAND (cond, 1);
-  if (TREE_CODE (cond) != BIT_AND_EXPR
-      || !integer_onep (TREE_OPERAND (cond, 1)))
-    return;
-  cond = TREE_OPERAND (cond, 0);
+
+  cond = gimple_assign_rhs1 (def);
   if (!ipa_is_ssa_with_stmt_def (cond))
     return;
 
-  cond = SSA_NAME_DEF_STMT (cond);
-  if (TREE_CODE (cond) != GIMPLE_MODIFY_STMT)
-    return;
-  cond = GIMPLE_STMT_OPERAND (cond, 1);
+  def = SSA_NAME_DEF_STMT (cond);
 
-  if (TREE_CODE (cond) == NOP_EXPR)
+  if (is_gimple_assign (def) && gimple_num_ops (def) == 2
+      && gimple_assign_rhs_code (def) == NOP_EXPR)
     {
-      cond = TREE_OPERAND (cond, 0);
+      cond = gimple_assign_rhs1 (def);
       if (!ipa_is_ssa_with_stmt_def (cond))
        return;
-      cond = SSA_NAME_DEF_STMT (cond);
-      if (TREE_CODE (cond) != GIMPLE_MODIFY_STMT)
-       return;
-      cond = GIMPLE_STMT_OPERAND (cond, 1);
+      def = SSA_NAME_DEF_STMT (cond);
     }
 
-  rec2 = ipa_get_member_ptr_load_param (cond);
+  rec2 = ipa_get_stmt_member_ptr_load_param (def);
   if (rec != rec2)
     return;
 
   index = ipa_get_param_decl_index (info, rec);
   if (index >= 0 && !ipa_is_ith_param_modified (info, index))
-    ipa_note_param_call (info, index, stmt);
+    ipa_note_param_call (info, index, call);
 
   return;
 }
@@ -829,12 +816,10 @@ ipa_analyze_call_uses (struct ipa_node_params *info, tree call, tree stmt)
    INFO) and their uses.  Currently it only checks whether formal parameters
    are called.  */
 static void
-ipa_analyze_stmt_uses (struct ipa_node_params *info, tree stmt)
+ipa_analyze_stmt_uses (struct ipa_node_params *info, gimple stmt)
 {
-  tree call = get_call_expr_in (stmt);
-
-  if (call)
-    ipa_analyze_call_uses (info, call, stmt);
+  if (is_gimple_call (stmt))
+    ipa_analyze_call_uses (info, stmt);
 }
 
 /* Scan the function body of NODE and inspect the uses of formal parameters.
@@ -846,11 +831,10 @@ ipa_analyze_params_uses (struct cgraph_node *node)
   tree decl = node->decl;
   basic_block bb;
   struct function *func;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   struct ipa_node_params *info = IPA_NODE_REF (node);
 
-  if (ipa_get_param_count (info) == 0 || info->uses_analysis_done
-      || !DECL_SAVED_TREE (decl))
+  if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
     return;
   if (!info->param_flags)
     info->param_flags = XCNEWVEC (struct ipa_param_flags,
@@ -859,9 +843,9 @@ ipa_analyze_params_uses (struct cgraph_node *node)
   func = DECL_STRUCT_FUNCTION (decl);
   FOR_EACH_BB_FN (bb, func)
     {
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
          ipa_analyze_stmt_uses (info, stmt);
        }
     }
@@ -918,7 +902,7 @@ print_edge_addition_message (FILE *f, struct ipa_param_call_note *nt,
     print_node_brief(f, "", jfunc->value.constant, 0);
 
   fprintf (f, ") in %s: ", cgraph_node_name (node));
-  print_generic_stmt (f, nt->stmt, 2);
+  print_gimple_stmt (f, nt->stmt, 2, TDF_SLIM);
 }
 
 /* Update the param called notes associated with NODE when CS is being inlined,
index 7d44da1..908a97e 100644 (file)
@@ -132,7 +132,7 @@ struct ipa_param_call_note
   /* Linked list's next */
   struct ipa_param_call_note *next;
   /* Statement that contains the call to the parameter above.  */
-  tree stmt;
+  gimple stmt;
   /* Index of the parameter that is called.  */
   unsigned int formal_id;
   /* Expected number of executions: calculated in profile.c.  */
index 578c48d..7720d30 100644 (file)
@@ -44,7 +44,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "ggc.h"
 #include "ipa-utils.h"
 #include "c-common.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "cgraph.h"
 #include "output.h"
 #include "flags.h"
@@ -276,42 +276,47 @@ check_lhs_var (funct_state local, tree t)
    actual asm statement.  */
 
 static void
-get_asm_expr_operands (funct_state local, tree stmt)
+get_asm_expr_operands (funct_state local, gimple stmt)
 {
-  int noutputs = list_length (ASM_OUTPUTS (stmt));
+  size_t noutputs = gimple_asm_noutputs (stmt);
   const char **oconstraints
     = (const char **) alloca ((noutputs) * sizeof (const char *));
-  int i;
-  tree link;
+  size_t i;
+  tree op;
   const char *constraint;
   bool allows_mem, allows_reg, is_inout;
   
-  for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
+  for (i = 0; i < noutputs; i++)
     {
+      op = gimple_asm_output_op (stmt, i);
       oconstraints[i] = constraint
-       = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+       = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
       parse_output_constraint (&constraint, i, 0, 0,
                               &allows_mem, &allows_reg, &is_inout);
       
-      check_lhs_var (local, TREE_VALUE (link));
+      check_lhs_var (local, TREE_VALUE (op));
     }
 
-  for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
+  for (i = 0; i < gimple_asm_ninputs (stmt); i++)
     {
+      op = gimple_asm_input_op (stmt, i);
       constraint
-       = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+       = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
                              oconstraints, &allows_mem, &allows_reg);
       
-      check_rhs_var (local, TREE_VALUE (link));
+      check_rhs_var (local, TREE_VALUE (op));
     }
   
-  for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
-    if (simple_cst_equal(TREE_VALUE (link), memory_identifier_string) == 1) 
-      /* Abandon all hope, ye who enter here. */
-      local->pure_const_state = IPA_NEITHER;
+  for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
+    {
+      op = gimple_asm_clobber_op (stmt, i);
+      if (simple_cst_equal(TREE_VALUE (op), memory_identifier_string) == 1) 
+       /* Abandon all hope, ye who enter here. */
+       local->pure_const_state = IPA_NEITHER;
+    }
 
-  if (ASM_VOLATILE_P (stmt))
+  if (gimple_asm_volatile_p (stmt))
     local->pure_const_state = IPA_NEITHER;
 }
 
@@ -323,17 +328,20 @@ get_asm_expr_operands (funct_state local, tree stmt)
    the entire call expression.  */
 
 static void
-check_call (funct_state local, tree call_expr
+check_call (funct_state local, gimple call
 {
-  int flags = call_expr_flags (call_expr);
-  tree operand;
-  call_expr_arg_iterator iter;
-  tree callee_t = get_callee_fndecl (call_expr);
+  int flags = gimple_call_flags (call);
+  tree lhs, callee_t = gimple_call_fndecl (call);
   struct cgraph_node* callee;
   enum availability avail = AVAIL_NOT_AVAILABLE;
+  size_t i;
+
+  lhs = gimple_call_lhs (call);
+  if (lhs)
+    check_lhs_var (local, lhs);
 
-  FOR_EACH_CALL_EXPR_ARG (operand, iter, call_expr)
-    check_rhs_var (local, operand);
+  for (i = 0; i < gimple_call_num_args (call); i++)
+    check_rhs_var (local, gimple_call_arg (call, i));
   
   /* The const and pure flags are set by a variety of places in the
      compiler (including here).  If someone has already set the flags
@@ -405,11 +413,10 @@ check_call (funct_state local, tree call_expr)
    should be converted to use the operand scanner.  */
 
 static tree
-scan_function (tree *tp, 
-                     int *walk_subtrees, 
-                     void *data)
+scan_function_op (tree *tp, int *walk_subtrees, void *data)
 {
-  struct cgraph_node *fn = (struct cgraph_node *) data;
+  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
+  struct cgraph_node *fn = (struct cgraph_node *) wi->info;
   tree t = *tp;
   funct_state local = get_function_state (fn);
 
@@ -417,58 +424,72 @@ scan_function (tree *tp,
     {
     case VAR_DECL:
       if (DECL_INITIAL (t))
-       walk_tree (&DECL_INITIAL (t), scan_function, fn, visited_nodes);
+       walk_tree (&DECL_INITIAL (t), scan_function_op, data, visited_nodes);
       *walk_subtrees = 0;
       break;
 
-    case GIMPLE_MODIFY_STMT:
+    case ADDR_EXPR:
+      /* This case is here to find addresses on rhs of constructors in
+        decl_initial of static variables. */
+      check_rhs_var (local, t);
+      *walk_subtrees = 0;
+      break;
+
+    default:
+      break;
+    }
+  return NULL;
+}
+
+static tree
+scan_function_stmt (gimple_stmt_iterator *gsi_p,
+                   bool *handled_ops_p,
+                   struct walk_stmt_info *wi)
+{
+  struct cgraph_node *fn = (struct cgraph_node *) wi->info;
+  gimple stmt = gsi_stmt (*gsi_p);
+  funct_state local = get_function_state (fn);
+
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_ASSIGN:
       {
        /* First look on the lhs and see what variable is stored to */
-       tree lhs = GIMPLE_STMT_OPERAND (t, 0);
-       tree rhs = GIMPLE_STMT_OPERAND (t, 1);
+       tree lhs = gimple_assign_lhs (stmt);
+       tree rhs1 = gimple_assign_rhs1 (stmt);
+       tree rhs2 = gimple_assign_rhs2 (stmt);
+       enum tree_code code = gimple_assign_rhs_code (stmt);
+
        check_lhs_var (local, lhs);
 
        /* For the purposes of figuring out what the cast affects */
 
        /* Next check the operands on the rhs to see if they are ok. */
-       switch (TREE_CODE_CLASS (TREE_CODE (rhs))) 
+       switch (TREE_CODE_CLASS (code))
          {
          case tcc_binary:          
            {
-             tree op0 = TREE_OPERAND (rhs, 0);
-             tree op1 = TREE_OPERAND (rhs, 1);
-             check_rhs_var (local, op0);
-             check_rhs_var (local, op1);
+             check_rhs_var (local, rhs1);
+             check_rhs_var (local, rhs2);
            }
            break;
          case tcc_unary:
            {
-             tree op0 = TREE_OPERAND (rhs, 0);
-             check_rhs_var (local, op0);
+             check_rhs_var (local, rhs1);
            }
 
            break;
          case tcc_reference:
-           check_rhs_var (local, rhs);
+           check_rhs_var (local, rhs1);
            break;
          case tcc_declaration:
-           check_rhs_var (local, rhs);
+           check_rhs_var (local, rhs1);
            break;
          case tcc_expression:
-           switch (TREE_CODE (rhs)) 
+           switch (code)
              {
              case ADDR_EXPR:
-               check_rhs_var (local, rhs);
-               break;
-             default:
-               break;
-             }
-           break;
-         case tcc_vl_exp:
-           switch (TREE_CODE (rhs)) 
-             {
-             case CALL_EXPR:
-               check_call (local, rhs);
+               check_rhs_var (local, rhs1);
                break;
              default:
                break;
@@ -477,19 +498,12 @@ scan_function (tree *tp,
          default:
            break;
          }
-       *walk_subtrees = 0;
+       *handled_ops_p = true;
       }
       break;
 
-    case ADDR_EXPR:
-      /* This case is here to find addresses on rhs of constructors in
-        decl_initial of static variables. */
-      check_rhs_var (local, t);
-      *walk_subtrees = 0;
-      break;
-
-    case LABEL_EXPR:
-      if (DECL_NONLOCAL (TREE_OPERAND (t, 0)))
+    case GIMPLE_LABEL:
+      if (DECL_NONLOCAL (gimple_label_label (stmt)))
        /* Target of long jump. */
        {
          local->pure_const_state = IPA_NEITHER;
@@ -497,14 +511,14 @@ scan_function (tree *tp,
        }
       break;
 
-    case CALL_EXPR: 
-      check_call (local, t);
-      *walk_subtrees = 0;
+    case GIMPLE_CALL:
+      check_call (local, stmt);
+      *handled_ops_p = true;
       break;
       
-    case ASM_EXPR:
-      get_asm_expr_operands (local, t);
-      *walk_subtrees = 0;
+    case GIMPLE_ASM:
+      get_asm_expr_operands (local, stmt);
+      *handled_ops_p = true;
       break;
       
     default:
@@ -567,11 +581,18 @@ analyze_function (struct cgraph_node *fn)
       
       FOR_EACH_BB_FN (this_block, this_cfun)
        {
-         block_stmt_iterator bsi;
-         for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
+         gimple_stmt_iterator gsi;
+         struct walk_stmt_info wi;
+
+         memset (&wi, 0, sizeof(wi));
+         for (gsi = gsi_start_bb (this_block);
+              !gsi_end_p (gsi);
+              gsi_next (&gsi))
            {
-             walk_tree (bsi_stmt_ptr (bsi), scan_function, 
-                        fn, visited_nodes);
+             wi.info = fn;
+             wi.pset = visited_nodes;
+             walk_gimple_stmt (&gsi, scan_function_stmt, scan_function_op, 
+                               &wi);
              if (l->pure_const_state == IPA_NEITHER) 
                goto end;
            }
index 872a4b4..c28c732 100644 (file)
@@ -45,8 +45,7 @@ along with GCC; see the file COPYING3.  If not see
    Currently must be run after inlining decisions have been made since
    otherwise, the local sets will not contain information that is
    consistent with post inlined state.  The global sets are not prone
-   to this problem since they are by definition transitive.  
-*/
+   to this problem since they are by definition transitive.  */
 
 #include "config.h"
 #include "system.h"
@@ -62,7 +61,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "ipa-utils.h"
 #include "ipa-reference.h"
 #include "c-common.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "cgraph.h"
 #include "output.h"
 #include "flags.h"
@@ -388,43 +387,48 @@ check_lhs_var (ipa_reference_local_vars_info_t local, tree t)
    function being analyzed and STMT is the actual asm statement.  */
 
 static void
-get_asm_expr_operands (ipa_reference_local_vars_info_t local, tree stmt)
+get_asm_stmt_operands (ipa_reference_local_vars_info_t local, gimple stmt)
 {
-  int noutputs = list_length (ASM_OUTPUTS (stmt));
+  size_t noutputs = gimple_asm_noutputs (stmt);
   const char **oconstraints
     = (const char **) alloca ((noutputs) * sizeof (const char *));
-  int i;
-  tree link;
+  size_t i;
+  tree op;
   const char *constraint;
   bool allows_mem, allows_reg, is_inout;
   
-  for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
+  for (i = 0; i < noutputs; i++)
     {
+      op = gimple_asm_output_op (stmt, i);
       oconstraints[i] = constraint
-       = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+       = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
       parse_output_constraint (&constraint, i, 0, 0,
                               &allows_mem, &allows_reg, &is_inout);
       
-      check_lhs_var (local, TREE_VALUE (link));
+      check_lhs_var (local, TREE_VALUE (op));
     }
 
-  for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
+  for (i = 0; i < gimple_asm_ninputs (stmt); i++)
     {
+      op = gimple_asm_input_op (stmt, i);
       constraint
-       = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+       = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
                              oconstraints, &allows_mem, &allows_reg);
       
-      check_rhs_var (local, TREE_VALUE (link));
+      check_rhs_var (local, TREE_VALUE (op));
     }
   
-  for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
-    if (simple_cst_equal(TREE_VALUE (link), memory_identifier_string) == 1) 
-      {
-       /* Abandon all hope, ye who enter here. */
-       local->calls_read_all = true;
-       local->calls_write_all = true;
-      }      
+  for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
+    {
+      op = gimple_asm_clobber_op (stmt, i);
+      if (simple_cst_equal(TREE_VALUE (op), memory_identifier_string) == 1) 
+       {
+         /* Abandon all hope, ye who enter here. */
+         local->calls_read_all = true;
+         local->calls_write_all = true;
+       }      
+    }
 }
 
 /* Check the parameters of a function call from CALLER to CALL_EXPR to
@@ -435,16 +439,19 @@ get_asm_expr_operands (ipa_reference_local_vars_info_t local, tree stmt)
    the tree node for the entire call expression.  */
 
 static void
-check_call (ipa_reference_local_vars_info_t local, tree call_expr) 
+check_call (ipa_reference_local_vars_info_t local, gimple stmt)
 {
-  int flags = call_expr_flags (call_expr);
+  int flags = gimple_call_flags (stmt);
   tree operand;
-  tree callee_t = get_callee_fndecl (call_expr);
+  tree callee_t = gimple_call_fndecl (stmt);
   enum availability avail = AVAIL_NOT_AVAILABLE;
-  call_expr_arg_iterator iter;
+  size_t i;
 
-  FOR_EACH_CALL_EXPR_ARG (operand, iter, call_expr)
-    check_rhs_var (local, operand);
+  if ((operand = gimple_call_lhs (stmt)) != NULL)
+    check_lhs_var (local, operand);
+
+  for (i = 0; i < gimple_call_num_args (stmt); i++)
+    check_rhs_var (local, gimple_call_arg (stmt, i));
 
   if (callee_t)
     {
@@ -473,73 +480,49 @@ check_call (ipa_reference_local_vars_info_t local, tree call_expr)
    should be converted to use the operand scanner.  */
 
 static tree
-scan_for_static_refs (tree *tp, 
-                     int *walk_subtrees, 
-                     void *data)
+scan_stmt_for_static_refs (gimple_stmt_iterator *gsip, bool *handled_ops_p,
+                          struct walk_stmt_info *data)
 {
-  struct cgraph_node *fn = (struct cgraph_node *) data;
-  tree t = *tp;
+  struct cgraph_node *fn = (struct cgraph_node *) data->info;
+  gimple stmt = gsi_stmt (*gsip);
   ipa_reference_local_vars_info_t local = NULL;
   if (fn)
     local = get_reference_vars_info_from_cgraph (fn)->local;
 
-  switch (TREE_CODE (t))  
+  switch (gimple_code (stmt))
     {
-    case VAR_DECL:
-      if (DECL_INITIAL (t))
-       walk_tree (&DECL_INITIAL (t), scan_for_static_refs, fn, visited_nodes);
-      *walk_subtrees = 0;
-      break;
-
-    case GIMPLE_MODIFY_STMT:
+    case GIMPLE_ASSIGN:
       {
        /* First look on the lhs and see what variable is stored to */
-       tree lhs = GIMPLE_STMT_OPERAND (t, 0);
-       tree rhs = GIMPLE_STMT_OPERAND (t, 1);
+       tree lhs = gimple_assign_lhs (stmt);
+       tree rhs1 = gimple_assign_rhs1 (stmt);
+       tree rhs2 = gimple_assign_rhs2 (stmt);
+       enum tree_code code = gimple_assign_rhs_code (stmt);
+
        check_lhs_var (local, lhs);
 
        /* For the purposes of figuring out what the cast affects */
 
        /* Next check the operands on the rhs to see if they are ok. */
-       switch (TREE_CODE_CLASS (TREE_CODE (rhs))) 
+       switch (TREE_CODE_CLASS (code))
          {
          case tcc_binary:          
          case tcc_comparison:      
-           {
-             tree op0 = TREE_OPERAND (rhs, 0);
-             tree op1 = TREE_OPERAND (rhs, 1);
-             check_rhs_var (local, op0);
-             check_rhs_var (local, op1);
-           }
+           check_rhs_var (local, rhs1);
+           check_rhs_var (local, rhs2);
            break;
-         case tcc_unary:
-           {
-             tree op0 = TREE_OPERAND (rhs, 0);
-             check_rhs_var (local, op0);
-           }
 
-           break;
+         case tcc_unary:
          case tcc_reference:
-           check_rhs_var (local, rhs);
-           break;
          case tcc_declaration:
-           check_rhs_var (local, rhs);
+           check_rhs_var (local, rhs1);
            break;
+
          case tcc_expression:
-           switch (TREE_CODE (rhs)) 
+           switch (code)
              {
              case ADDR_EXPR:
-               check_rhs_var (local, rhs);
-               break;
-             default:
-               break;
-             }
-           break;
-         case tcc_vl_exp:
-           switch (TREE_CODE (rhs))
-             {
-             case CALL_EXPR:
-               check_call (local, rhs);
+               check_rhs_var (local, rhs1);
                break;
              default:
                break;
@@ -548,19 +531,12 @@ scan_for_static_refs (tree *tp,
          default:
            break;
          }
-       *walk_subtrees = 0;
+       *handled_ops_p = true;
       }
       break;
 
-    case ADDR_EXPR:
-      /* This case is here to find addresses on rhs of constructors in
-        decl_initial of static variables. */
-      check_rhs_var (local, t);
-      *walk_subtrees = 0;
-      break;
-
-    case LABEL_EXPR:
-      if (DECL_NONLOCAL (TREE_OPERAND (t, 0)))
+    case GIMPLE_LABEL:
+      if (DECL_NONLOCAL (gimple_label_label (stmt)))
        {
          /* Target of long jump. */
          local->calls_read_all = true;
@@ -568,14 +544,14 @@ scan_for_static_refs (tree *tp,
        }
       break;
 
-    case CALL_EXPR: 
-      check_call (local, t);
-      *walk_subtrees = 0;
+    case GIMPLE_CALL:
+      check_call (local, stmt);
+      *handled_ops_p = true;
       break;
       
-    case ASM_EXPR:
-      get_asm_expr_operands (local, t);
-      *walk_subtrees = 0;
+    case GIMPLE_ASM:
+      get_asm_stmt_operands (local, stmt);
+      *handled_ops_p = true;
       break;
       
     default:
@@ -584,6 +560,42 @@ scan_for_static_refs (tree *tp,
   return NULL;
 }
 
+/* Call-back to scan GIMPLE operands for static references.  This is supposed
+   to work with scan_stmt_for_static_refs so the real call-back data is stored
+   inside a walk_stmt_info struct.  Callers using the walk_tree interface must
+   also wrap the call-back data in a walk_stmt_info struct.  */
+
+static tree
+scan_op_for_static_refs (tree *tp, int *walk_subtrees, void *data)
+{
+  struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
+  struct cgraph_node *fn = (struct cgraph_node *) wi->info;
+  tree t = *tp;
+  ipa_reference_local_vars_info_t local = NULL;
+  if (fn)
+    local = get_reference_vars_info_from_cgraph (fn)->local;
+
+  switch (TREE_CODE (t))  
+    {
+    case VAR_DECL:
+      if (DECL_INITIAL (t))
+       walk_tree (&DECL_INITIAL (t), scan_op_for_static_refs, data,
+                  wi->pset);
+      *walk_subtrees = 0;
+      break;
+
+    case ADDR_EXPR:
+      /* This case is here to find addresses on rhs of constructors in
+        decl_initial of static variables. */
+      check_rhs_var (local, t);
+      *walk_subtrees = 0;
+      break;
+
+    default:
+      break;
+    }
+  return NULL;
+}
 
 /* Lookup the tree node for the static variable that has UID.  */
 static tree
@@ -777,9 +789,13 @@ ipa_init (void)
 static void 
 analyze_variable (struct varpool_node *vnode)
 {
+  struct walk_stmt_info wi;
   tree global = vnode->decl;
-  walk_tree (&DECL_INITIAL (global), scan_for_static_refs, 
-             NULL, visited_nodes);
+
+  memset (&wi, 0, sizeof (wi));
+  wi.pset = visited_nodes;
+  walk_tree (&DECL_INITIAL (global), scan_op_for_static_refs,
+             &wi, wi.pset);
 }
 
 /* This is the main routine for finding the reference patterns for
@@ -793,6 +809,7 @@ analyze_function (struct cgraph_node *fn)
   ipa_reference_local_vars_info_t l
     = XCNEW (struct ipa_reference_local_vars_info_d);
   tree decl = fn->decl;
+  struct walk_stmt_info wi;
 
   /* Add the info to the tree's annotation.  */
   get_function_ann (fn->decl)->reference_vars_info = info;
@@ -810,14 +827,18 @@ analyze_function (struct cgraph_node *fn)
 
     FOR_EACH_BB_FN (this_block, this_cfun)
       {
-       block_stmt_iterator bsi;
-       tree phi, op;
+       gimple_stmt_iterator gsi;
+       gimple phi;
+       tree op;
        use_operand_p use;
        ssa_op_iter iter;
 
        /* Find the addresses taken in phi node arguments.  */
-       for (phi = phi_nodes (this_block); phi; phi = PHI_CHAIN (phi))
+       for (gsi = gsi_start_phis (this_block);
+            !gsi_end_p (gsi);
+            gsi_next (&gsi))
          {
+           phi = gsi_stmt (gsi);
            FOR_EACH_PHI_ARG (use, phi, iter, SSA_OP_USE)
              {
                op = USE_FROM_PTR (use);
@@ -826,9 +847,12 @@ analyze_function (struct cgraph_node *fn)
              }
          }
 
-       for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
-         walk_tree (bsi_stmt_ptr (bsi), scan_for_static_refs, 
-                    fn, visited_nodes);
+       memset (&wi, 0, sizeof (wi));
+       wi.info = fn;
+       wi.pset = visited_nodes;
+       for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
+         walk_gimple_stmt (&gsi, scan_stmt_for_static_refs,
+                           scan_op_for_static_refs, &wi);
       }
   }
 
@@ -844,8 +868,13 @@ analyze_function (struct cgraph_node *fn)
          if (TREE_CODE (var) == VAR_DECL 
              && DECL_INITIAL (var)
              && !TREE_STATIC (var))
-           walk_tree (&DECL_INITIAL (var), scan_for_static_refs, 
-                      fn, visited_nodes);
+           {
+             memset (&wi, 0, sizeof (wi));
+             wi.info = fn;
+             wi.pset = visited_nodes;
+             walk_tree (&DECL_INITIAL (var), scan_op_for_static_refs,
+                        &wi, wi.pset);
+           }
        }
     }
 }
@@ -1339,4 +1368,3 @@ struct simple_ipa_opt_pass pass_ipa_reference =
 };
 
 #include "gt-ipa-reference.h"
-
index fffb454..d6bca8a 100644 (file)
@@ -28,7 +28,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
 #include "ggc.h"
 #include "tree.h"
 #include "rtl.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-inline.h"
 #include "tree-flow.h"
 #include "tree-flow-inline.h"
@@ -55,6 +55,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
 #include "ipa-type-escape.h"
 #include "tree-dump.h"
 #include "c-common.h"
+#include "gimple.h"
 
 /* This optimization implements structure peeling.
 
@@ -168,7 +169,7 @@ typedef const struct new_var_data *const_new_var;
 /* This structure represents allocation site of the structure.  */
 typedef struct alloc_site
 {
-  tree stmt;
+  gimple stmt;
   d_str str;
 } alloc_site_t;
 
@@ -235,7 +236,7 @@ get_type_of_var (tree var)
 /* Set of actions we do for each newly generated STMT.  */ 
 
 static inline void
-finalize_stmt (tree stmt)
+finalize_stmt (gimple stmt)
 {
   update_stmt (stmt);
   mark_symbols_for_renaming (stmt);
@@ -244,9 +245,9 @@ finalize_stmt (tree stmt)
 /* This function finalizes STMT and appends it to the list STMTS.  */
 
 static inline void
-finalize_stmt_and_append (tree *stmts, tree stmt)
+finalize_stmt_and_append (gimple_seq *stmts, gimple stmt)
 {
-  append_to_statement_list (stmt, stmts);
+  gimple_seq_add_stmt (stmts, stmt);
   finalize_stmt (stmt);
 }
 
@@ -307,25 +308,24 @@ find_field_in_struct (d_str str, tree field_decl)
 static bool
 is_result_of_mult (tree arg, tree *num, tree struct_size)
 {
-  tree size_def_stmt = SSA_NAME_DEF_STMT (arg);
+  gimple size_def_stmt = SSA_NAME_DEF_STMT (arg);
 
   /* If the allocation statement was of the form
      D.2229_10 = <alloc_func> (D.2228_9);
      then size_def_stmt can be D.2228_9 = num.3_8 * 8;  */
 
-  if (size_def_stmt && TREE_CODE (size_def_stmt) == GIMPLE_MODIFY_STMT)
+  if (size_def_stmt && is_gimple_assign (size_def_stmt))
     {
-      tree lhs = GIMPLE_STMT_OPERAND (size_def_stmt, 0);
-      tree rhs = GIMPLE_STMT_OPERAND (size_def_stmt, 1);
+      tree lhs = gimple_assign_lhs (size_def_stmt);
 
       /* We expect temporary here.  */
       if (!is_gimple_reg (lhs))        
        return false;
 
-      if (TREE_CODE (rhs) == MULT_EXPR)
+      if (gimple_assign_rhs_code (size_def_stmt) == MULT_EXPR)
        {
-         tree arg0 = TREE_OPERAND (rhs, 0);
-         tree arg1 = TREE_OPERAND (rhs, 1);
+         tree arg0 = gimple_assign_rhs1 (size_def_stmt);
+         tree arg1 = gimple_assign_rhs2 (size_def_stmt);
 
          if (operand_equal_p (arg0, struct_size, OEP_ONLY_CONST))
            {
@@ -356,8 +356,9 @@ static bool
 decompose_indirect_ref_acc (tree str_decl, struct field_access_site *acc)
 {
   tree ref_var;
-  tree rhs, struct_size, op0, op1;
+  tree struct_size, op0, op1;
   tree before_cast;
+  enum tree_code rhs_code;
  
   ref_var = TREE_OPERAND (acc->ref, 0);
 
@@ -366,20 +367,20 @@ decompose_indirect_ref_acc (tree str_decl, struct field_access_site *acc)
 
   acc->ref_def_stmt = SSA_NAME_DEF_STMT (ref_var);
   if (!(acc->ref_def_stmt)
-      || (TREE_CODE (acc->ref_def_stmt) != GIMPLE_MODIFY_STMT))
+      || (gimple_code (acc->ref_def_stmt) != GIMPLE_ASSIGN))
     return false;
 
-  rhs = GIMPLE_STMT_OPERAND (acc->ref_def_stmt, 1);
+  rhs_code = gimple_assign_rhs_code (acc->ref_def_stmt);
 
-  if (TREE_CODE (rhs) != PLUS_EXPR
-      && TREE_CODE (rhs)!= MINUS_EXPR
-      && TREE_CODE (rhs) != POINTER_PLUS_EXPR)
+  if (rhs_code != PLUS_EXPR
+      && rhs_code != MINUS_EXPR
+      && rhs_code != POINTER_PLUS_EXPR)
     return false;
 
-  op0 = TREE_OPERAND (rhs, 0);
-  op1 = TREE_OPERAND (rhs, 1);
+  op0 = gimple_assign_rhs1 (acc->ref_def_stmt);
+  op1 = gimple_assign_rhs2 (acc->ref_def_stmt);
 
-  if (!is_array_access_through_pointer_and_index (TREE_CODE (rhs), op0, op1, 
+  if (!is_array_access_through_pointer_and_index (rhs_code, op0, op1, 
                                                 &acc->base, &acc->offset, 
                                                 &acc->cast_stmt))
     return false;
@@ -438,7 +439,7 @@ make_field_acc_node (void)
    if it is already in hashtable of function accesses F_ACCS.  */
 
 static struct field_access_site *
-is_in_field_accs (tree stmt, htab_t f_accs)
+is_in_field_accs (gimple stmt, htab_t f_accs)
 {
   return (struct field_access_site *) 
     htab_find_with_hash (f_accs, stmt, htab_hash_pointer (stmt));
@@ -466,7 +467,7 @@ add_field_acc_to_acc_sites (struct field_access_site *acc,
    accesses ACCS, this function creates it.  */ 
 
 static void
-add_access_to_acc_sites (tree stmt, tree var, htab_t accs)
+add_access_to_acc_sites (gimple stmt, tree var, htab_t accs)
 {
    struct access_site *acc;
 
@@ -538,23 +539,6 @@ finalize_new_vars_creation (void **slot, void *data ATTRIBUTE_UNUSED)
   return 1;
 }
 
-/* This function updates statements in STMT_LIST with BB info.  */
-
-static void
-add_bb_info (basic_block bb, tree stmt_list)
-{
-  if (TREE_CODE (stmt_list) == STATEMENT_LIST)
-    {
-      tree_stmt_iterator tsi;
-      for (tsi = tsi_start (stmt_list); !tsi_end_p (tsi); tsi_next (&tsi))
-       {
-         tree stmt = tsi_stmt (tsi);
-
-         set_bb_for_stmt (stmt, bb);
-       }
-    }
-}
-
 /* This function looks for the variable of NEW_TYPE type, stored in VAR.
    It returns it, if found, and NULL_TREE otherwise.  */
 
@@ -610,12 +594,12 @@ find_new_var_of_type (tree orig_var, tree new_type)
    res = NUM * sizeof(TYPE) and returns it.
    res is filled into RES.  */
 
-static tree
+static gimple
 gen_size (tree num, tree type, tree *res)
 {
   tree struct_size = TYPE_SIZE_UNIT (type);
   HOST_WIDE_INT struct_size_int = TREE_INT_CST_LOW (struct_size);
-  tree new_stmt;
+  gimple new_stmt;
 
   *res = create_tmp_var (TREE_TYPE (num), NULL);
 
@@ -625,17 +609,13 @@ gen_size (tree num, tree type, tree *res)
   if (exact_log2 (struct_size_int) == -1)
     {
       tree size = build_int_cst (TREE_TYPE (num), struct_size_int);
-      new_stmt = build_gimple_modify_stmt (*res, build2 (MULT_EXPR,
-                                                        TREE_TYPE (num),
-                                                        num, size));
+      new_stmt = gimple_build_assign_with_ops (MULT_EXPR, *res, num, size);
     }
   else
     {
       tree C = build_int_cst (TREE_TYPE (num), exact_log2 (struct_size_int));
  
-      new_stmt = build_gimple_modify_stmt (*res, build2 (LSHIFT_EXPR, 
-                                                        TREE_TYPE (num),
-                                                        num, C));
+      new_stmt = gimple_build_assign_with_ops (LSHIFT_EXPR, *res, num, C);
     }
 
   finalize_stmt (new_stmt);
@@ -646,21 +626,18 @@ gen_size (tree num, tree type, tree *res)
    BEFORE_CAST to NEW_TYPE. The cast result variable is stored 
    into RES_P. ORIG_CAST_STMT is the original cast statement.  */
 
-static tree
-gen_cast_stmt (tree before_cast, tree new_type, tree orig_cast_stmt,
+static gimple
+gen_cast_stmt (tree before_cast, tree new_type, gimple orig_cast_stmt,
               tree *res_p)
 {
-  tree lhs, new_lhs, new_stmt;
-  gcc_assert (TREE_CODE (orig_cast_stmt) == GIMPLE_MODIFY_STMT);
-    
-  lhs = GIMPLE_STMT_OPERAND (orig_cast_stmt, 0);
+  tree lhs, new_lhs;
+  gimple new_stmt;
+
+  lhs = gimple_assign_lhs (orig_cast_stmt);
   new_lhs = find_new_var_of_type (lhs, new_type);
   gcc_assert (new_lhs);
 
-  new_stmt = build_gimple_modify_stmt (new_lhs, 
-                                      build1 (NOP_EXPR, 
-                                              TREE_TYPE (new_lhs),
-                                              before_cast));
+  new_stmt = gimple_build_assign_with_ops (NOP_EXPR, new_lhs, before_cast, 0);
   finalize_stmt (new_stmt);
   *res_p = new_lhs;
   return new_stmt;
@@ -673,12 +650,14 @@ static edge
 make_edge_and_fix_phis_of_dest (basic_block bb, edge e)
 {
   edge new_e;
-  tree phi, arg;
+  tree arg;
+  gimple_stmt_iterator si;
                      
   new_e = make_edge (bb, e->dest, e->flags);
 
-  for (phi = phi_nodes (new_e->dest); phi; phi = PHI_CHAIN (phi))
+  for (si = gsi_start_phis (new_e->dest); !gsi_end_p (si); gsi_next (&si))
     {
+      gimple phi = gsi_stmt (si);
       arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
       add_phi_arg (phi, arg, new_e); 
     }
@@ -686,32 +665,46 @@ make_edge_and_fix_phis_of_dest (basic_block bb, edge e)
   return new_e;
 }
 
-/* This function inserts NEW_STMTS before STMT.  */
+/* This function inserts NEW_STMT before STMT.  */
 
 static void
-insert_before_stmt (tree stmt, tree new_stmts)
+insert_before_stmt (gimple stmt, gimple new_stmt)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
 
-  if (!stmt || !new_stmts)
+  if (!stmt || !new_stmt)
     return;
 
-  bsi = bsi_for_stmt (stmt); 
-  bsi_insert_before (&bsi, new_stmts, BSI_SAME_STMT);   
+  bsi = gsi_for_stmt (stmt); 
+  gsi_insert_before (&bsi, new_stmt, GSI_SAME_STMT);   
 }
 
 /* Insert NEW_STMTS after STMT.  */
 
 static void
-insert_after_stmt (tree stmt, tree new_stmts)
+insert_seq_after_stmt (gimple stmt, gimple_seq new_stmts)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
 
   if (!stmt || !new_stmts)
     return;
 
-  bsi = bsi_for_stmt (stmt); 
-  bsi_insert_after (&bsi, new_stmts, BSI_SAME_STMT);   
+  bsi = gsi_for_stmt (stmt); 
+  gsi_insert_seq_after (&bsi, new_stmts, GSI_SAME_STMT);   
+}
+
+/* Insert NEW_STMT after STMT.  */
+
+static void
+insert_after_stmt (gimple stmt, gimple new_stmt)
+{
+  gimple_stmt_iterator bsi;
+
+  if (!stmt || !new_stmt)
+    return;
+
+  bsi = gsi_for_stmt (stmt); 
+  gsi_insert_after (&bsi, new_stmt, GSI_SAME_STMT);   
 }
 
 /* This function returns vector of allocation sites
@@ -730,20 +723,20 @@ get_fallocs (tree fn_decl)
    p_8 = (struct str_t *) D.2225_7;
    which is returned by this function.  */
 
-static tree
-get_final_alloc_stmt (tree alloc_stmt)
+static gimple
+get_final_alloc_stmt (gimple alloc_stmt)
 {
-  tree final_stmt;
+  gimple final_stmt;
   use_operand_p use_p;
   tree alloc_res;
 
   if (!alloc_stmt)
     return NULL;
   
-  if (TREE_CODE (alloc_stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_call (alloc_stmt))
     return NULL;
 
-  alloc_res = GIMPLE_STMT_OPERAND (alloc_stmt, 0);
+  alloc_res = gimple_get_lhs (alloc_stmt);
 
   if (TREE_CODE (alloc_res) != SSA_NAME)
     return NULL;
@@ -758,7 +751,7 @@ get_final_alloc_stmt (tree alloc_stmt)
    sites of function FN_DECL. It returns false otherwise.  */
 
 static bool
-is_part_of_malloc (tree stmt, tree fn_decl)
+is_part_of_malloc (gimple stmt, tree fn_decl)
 {
   fallocs_t fallocs = get_fallocs (fn_decl);
   
@@ -767,8 +760,7 @@ is_part_of_malloc (tree stmt, tree fn_decl)
       alloc_site_t *call;
       unsigned i;
 
-      for (i = 0;
-          VEC_iterate (alloc_site_t, fallocs->allocs, i, call); i++)
+      for (i = 0; VEC_iterate (alloc_site_t, fallocs->allocs, i, call); i++)
        if (call->stmt == stmt
            || get_final_alloc_stmt (call->stmt) == stmt)
          return true;
@@ -780,7 +772,7 @@ is_part_of_malloc (tree stmt, tree fn_decl)
 struct find_stmt_data
 {
   bool found;
-  tree stmt;
+  gimple stmt;
 };
 
 /* This function looks for DATA->stmt among 
@@ -790,9 +782,8 @@ struct find_stmt_data
 static int
 find_in_field_accs (void **slot, void *data)
 {
-  struct field_access_site *f_acc = 
-    *(struct field_access_site **) slot;
-  tree stmt = ((struct find_stmt_data *)data)->stmt;
+  struct field_access_site *f_acc = *(struct field_access_site **) slot;
+  gimple stmt = ((struct find_stmt_data *)data)->stmt;
 
   if (f_acc->stmt == stmt
       || f_acc->ref_def_stmt == stmt
@@ -810,7 +801,7 @@ find_in_field_accs (void **slot, void *data)
    and false otherwise.  */
 
 static bool
-is_part_of_field_access (tree stmt, d_str str)
+is_part_of_field_access (gimple stmt, d_str str)
 {
   int i;
 
@@ -883,7 +874,8 @@ struct ref_pos
 static tree
 find_pos_in_stmt_1 (tree *tp, int *walk_subtrees, void * data)
 {
-  struct ref_pos * r_pos = (struct ref_pos *) data;
+  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
+  struct ref_pos *r_pos = (struct ref_pos *) wi->info;
   tree ref = r_pos->ref;
   tree t = *tp;
 
@@ -893,23 +885,8 @@ find_pos_in_stmt_1 (tree *tp, int *walk_subtrees, void * data)
       return t;
     }
 
-  switch (TREE_CODE (t))
-    {
-    case GIMPLE_MODIFY_STMT:
-      {
-       tree lhs = GIMPLE_STMT_OPERAND (t, 0);
-       tree rhs = GIMPLE_STMT_OPERAND (t, 1);
-       *walk_subtrees = 1;
-       walk_tree (&lhs, find_pos_in_stmt_1, data, NULL);
-       walk_tree (&rhs, find_pos_in_stmt_1, data, NULL);
-       *walk_subtrees = 0;         
-      }
-      break;
-
-    default:
-      *walk_subtrees = 1;      
-    }
-    return NULL_TREE;
+  *walk_subtrees = 1;      
+  return NULL_TREE;
 }
 
 
@@ -917,13 +894,16 @@ find_pos_in_stmt_1 (tree *tp, int *walk_subtrees, void * data)
    It returns it, if found, and NULL otherwise.  */
 
 static tree *
-find_pos_in_stmt (tree stmt, tree ref)
+find_pos_in_stmt (gimple stmt, tree ref)
 {
   struct ref_pos r_pos;
+  struct walk_stmt_info wi;
 
   r_pos.ref = ref;
   r_pos.pos = NULL;
-  walk_tree (&stmt, find_pos_in_stmt_1, &r_pos, NULL);
+  memset (&wi, 0, sizeof (wi));
+  wi.info = &r_pos;
+  walk_gimple_op (stmt, find_pos_in_stmt_1, &wi);
 
   return r_pos.pos;
 }
@@ -1003,16 +983,15 @@ replace_field_acc (struct field_access_site *acc, tree new_type)
   new_acc = build_comp_ref (new_ref, field_id, new_type);
   VEC_free (type_wrapper_t, heap, wrapper);  
 
-  if (TREE_CODE (acc->stmt) == GIMPLE_MODIFY_STMT)
+  if (is_gimple_assign (acc->stmt))
     {      
-      lhs = GIMPLE_STMT_OPERAND (acc->stmt, 0);
-      rhs = GIMPLE_STMT_OPERAND (acc->stmt, 1);
-      
-       
+      lhs = gimple_assign_lhs (acc->stmt);
+      rhs = gimple_assign_rhs1 (acc->stmt);
+
       if (lhs == acc->comp_ref)
-       GIMPLE_STMT_OPERAND (acc->stmt, 0) = new_acc;
+       gimple_assign_set_lhs (acc->stmt, new_acc);
       else if (rhs == acc->comp_ref)
-       GIMPLE_STMT_OPERAND (acc->stmt, 1) = new_acc;
+       gimple_assign_set_rhs1 (acc->stmt, new_acc);
       else
        {
          pos = find_pos_in_stmt (acc->stmt, acc->comp_ref);
@@ -1070,18 +1049,15 @@ find_structure (tree type)
    like assignments:  p.8_7 = p; or statements with rhs of 
    tree codes PLUS_EXPR and MINUS_EXPR.  */
 
-static tree
-create_base_plus_offset (tree orig_stmt, tree new_type, 
-                        tree offset)
+static gimple
+create_base_plus_offset (gimple orig_stmt, tree new_type, tree offset)
 {
-  tree lhs, rhs;
-  tree new_lhs, new_rhs;
-  tree new_stmt;
-
-  gcc_assert (TREE_CODE (orig_stmt) == GIMPLE_MODIFY_STMT);
+  tree lhs;
+  tree new_lhs;
+  gimple new_stmt;
+  tree new_op0 = NULL_TREE, new_op1 = NULL_TREE;
 
-  lhs = GIMPLE_STMT_OPERAND (orig_stmt, 0);
-  rhs = GIMPLE_STMT_OPERAND (orig_stmt, 1);
+  lhs = gimple_assign_lhs (orig_stmt);
 
   gcc_assert (TREE_CODE (lhs) == VAR_DECL
              || TREE_CODE (lhs) == SSA_NAME);
@@ -1090,15 +1066,14 @@ create_base_plus_offset (tree orig_stmt, tree new_type,
   gcc_assert (new_lhs);
   finalize_var_creation (new_lhs);
 
-  switch (TREE_CODE (rhs))
+  switch (gimple_assign_rhs_code (orig_stmt))
     {
     case PLUS_EXPR:
     case MINUS_EXPR:
     case POINTER_PLUS_EXPR:
       {
-       tree op0 = TREE_OPERAND (rhs, 0);
-       tree op1 = TREE_OPERAND (rhs, 1);
-       tree new_op0 = NULL_TREE, new_op1 = NULL_TREE;
+       tree op0 = gimple_assign_rhs1 (orig_stmt);
+       tree op1 = gimple_assign_rhs2 (orig_stmt);
        unsigned str0, str1;
        unsigned length = VEC_length (structure, structures);
        
@@ -1116,9 +1091,6 @@ create_base_plus_offset (tree orig_stmt, tree new_type,
          new_op0 = offset;
        if (!new_op1)
          new_op1 = offset;
-
-       new_rhs = build2 (TREE_CODE (rhs), TREE_TYPE (new_op0), 
-                         new_op0, new_op1);
       }
       break;
 
@@ -1126,8 +1098,9 @@ create_base_plus_offset (tree orig_stmt, tree new_type,
       gcc_unreachable();
     }
   
-  new_stmt = build_gimple_modify_stmt (new_lhs, new_rhs);
-  finalize_stmt (new_stmt);    
+  new_stmt = gimple_build_assign_with_ops (gimple_assign_rhs_code (orig_stmt),
+                                           new_lhs, new_op0, new_op1);
+  finalize_stmt (new_stmt);
 
   return new_stmt;
 }
@@ -1140,9 +1113,10 @@ create_new_field_access (struct field_access_site *f_acc,
                         struct field_entry field)
 {
   tree new_type = field.field_mapping;
-  tree new_stmt;
+  gimple new_stmt;
   tree size_res;
-  tree mult_stmt, cast_stmt;
+  gimple mult_stmt;
+  gimple cast_stmt;
   tree cast_res = NULL;
   
   if (f_acc->num)
@@ -1182,41 +1156,37 @@ create_new_field_access (struct field_access_site *f_acc,
    variable located in the condition statement at the position POS.  */
 
 static void
-create_new_stmts_for_cond_expr_1 (tree new_var, tree cond_stmt, bool pos)
+create_new_stmts_for_cond_expr_1 (tree new_var, gimple cond_stmt, unsigned pos)
 {
-  tree new_cond;
-  tree new_stmt;
+  gimple new_stmt;
   edge true_e = NULL, false_e = NULL;
   basic_block new_bb;
-  tree stmt_list;
+  gimple_stmt_iterator si;
 
-  extract_true_false_edges_from_block (bb_for_stmt (cond_stmt),
+  extract_true_false_edges_from_block (gimple_bb (cond_stmt),
                                       &true_e, &false_e);
 
-  new_cond = unshare_expr (COND_EXPR_COND (cond_stmt));
-
-  TREE_OPERAND (new_cond, pos) = new_var;
-                                     
-  new_stmt = build3 (COND_EXPR, TREE_TYPE (cond_stmt),
-                    new_cond, NULL_TREE, NULL_TREE);
+  new_stmt = gimple_build_cond (gimple_cond_code (cond_stmt),
+                              pos == 0 ? new_var : gimple_cond_lhs (cond_stmt),
+                              pos == 1 ? new_var : gimple_cond_rhs (cond_stmt),
+                              NULL_TREE,
+                              NULL_TREE);
 
   finalize_stmt (new_stmt);
 
   /* Create new basic block after bb.  */
-  new_bb = create_empty_bb (bb_for_stmt (cond_stmt));
+  new_bb = create_empty_bb (gimple_bb (cond_stmt));
 
   /* Add new condition stmt to the new_bb.  */
-  stmt_list = bb_stmt_list (new_bb);
-  append_to_statement_list (new_stmt, &stmt_list);
-  add_bb_info (new_bb, stmt_list);
+  si = gsi_start_bb (new_bb);
+  gsi_insert_after (&si, new_stmt, GSI_NEW_STMT);
 
-                 
   /* Create false and true edges from new_bb.  */
   make_edge_and_fix_phis_of_dest (new_bb, true_e);
   make_edge_and_fix_phis_of_dest (new_bb, false_e);
                  
   /* Redirect one of original edges to point to new_bb.  */
-  if (TREE_CODE (cond_stmt) == NE_EXPR)
+  if (gimple_cond_code (cond_stmt) == NE_EXPR)
     redirect_edge_succ (true_e, new_bb);
   else
     redirect_edge_succ (false_e, new_bb);
@@ -1227,23 +1197,22 @@ create_new_stmts_for_cond_expr_1 (tree new_var, tree cond_stmt, bool pos)
    recursively redirect edges to newly generated basic blocks.  */
 
 static void
-create_new_stmts_for_cond_expr (tree stmt)
+create_new_stmts_for_cond_expr (gimple stmt)
 {
-  tree cond = COND_EXPR_COND (stmt);
   tree arg0, arg1, arg;
   unsigned str0, str1;
   bool s0, s1;
   d_str str;
   tree type;
-  bool pos;
+  unsigned pos;
   int i;
   unsigned length = VEC_length (structure, structures);
 
-  gcc_assert (TREE_CODE (cond) == EQ_EXPR
-             || TREE_CODE (cond) == NE_EXPR);
+  gcc_assert (gimple_cond_code (stmt) == EQ_EXPR
+             || gimple_cond_code (stmt) == NE_EXPR);
 
-  arg0 = TREE_OPERAND (cond, 0);
-  arg1 = TREE_OPERAND (cond, 1);
+  arg0 = gimple_cond_lhs (stmt);
+  arg1 = gimple_cond_rhs (stmt);
 
   str0 = find_structure (strip_type (get_type_of_var (arg0)));
   str1 = find_structure (strip_type (get_type_of_var (arg1)));
@@ -1273,15 +1242,14 @@ create_new_stmts_for_cond_expr (tree stmt)
 /* Create a new general access to replace original access ACC
    for structure type NEW_TYPE.  */
 
-static tree
+static gimple
 create_general_new_stmt (struct access_site *acc, tree new_type)
 {
-  tree old_stmt = acc->stmt;
+  gimple old_stmt = acc->stmt;
   tree var;
-  tree new_stmt = unshare_expr (old_stmt);
+  gimple new_stmt = gimple_copy (old_stmt);
   unsigned i;
 
-  
   for (i = 0; VEC_iterate (tree, acc->vars, i, var); i++)
     {
       tree *pos;
@@ -1291,32 +1259,30 @@ create_general_new_stmt (struct access_site *acc, tree new_type)
       gcc_assert (new_var);
       finalize_var_creation (new_var);
 
-      if (TREE_CODE (new_stmt) == GIMPLE_MODIFY_STMT)
+      if (is_gimple_assign (new_stmt))
        {
-      
-         lhs = GIMPLE_STMT_OPERAND (new_stmt, 0);
-         rhs = GIMPLE_STMT_OPERAND (new_stmt, 1);
+         lhs = gimple_assign_lhs (new_stmt);
          
          if (TREE_CODE (lhs) == SSA_NAME)
            lhs = SSA_NAME_VAR (lhs);
-         if (TREE_CODE (rhs) == SSA_NAME)
-           rhs = SSA_NAME_VAR (rhs); 
+         if (gimple_assign_rhs_code (new_stmt) == SSA_NAME)
+           rhs = SSA_NAME_VAR (gimple_assign_rhs1 (new_stmt));
 
          /* It can happen that rhs is a constructor.
           Then we have to replace it to be of new_type.  */
-         if (TREE_CODE (rhs) == CONSTRUCTOR)
+         if (gimple_assign_rhs_code (new_stmt) == CONSTRUCTOR)
            {
              /* Dealing only with empty constructors right now.  */
              gcc_assert (VEC_empty (constructor_elt, 
                                     CONSTRUCTOR_ELTS (rhs)));
              rhs = build_constructor (new_type, 0);
-             GIMPLE_STMT_OPERAND (new_stmt, 1) = rhs;
+             gimple_assign_set_rhs1 (new_stmt, rhs);
            }
          
          if (lhs == var)
-           GIMPLE_STMT_OPERAND (new_stmt, 0) = new_var;
+           gimple_assign_set_lhs (new_stmt, new_var);
          else if (rhs == var)
-           GIMPLE_STMT_OPERAND (new_stmt, 1) = new_var;
+           gimple_assign_set_rhs1 (new_stmt, new_var);
          else
            {
              pos = find_pos_in_stmt (new_stmt, var);
@@ -1343,12 +1309,12 @@ static void
 create_new_stmts_for_general_acc (struct access_site *acc, d_str str)
 {
   tree type;
-  tree stmt = acc->stmt;
+  gimple stmt = acc->stmt;
   unsigned i;
 
   for (i = 0; VEC_iterate (tree, str->new_types, i, type); i++)
     {
-      tree new_stmt;
+      gimple new_stmt;
 
       new_stmt = create_general_new_stmt (acc, type);
       insert_after_stmt (stmt, new_stmt);
@@ -1361,10 +1327,10 @@ create_new_stmts_for_general_acc (struct access_site *acc, d_str str)
 static void
 create_new_general_access (struct access_site *acc, d_str str)
 {
-  tree stmt = acc->stmt;
-  switch (TREE_CODE (stmt))
+  gimple stmt = acc->stmt;
+  switch (gimple_code (stmt))
     {
-    case COND_EXPR:
+    case GIMPLE_COND:
       create_new_stmts_for_cond_expr (stmt);
       break;
 
@@ -1391,7 +1357,7 @@ create_new_acc (void **slot, void *data)
   basic_block bb = ((struct create_acc_data *)data)->bb;
   d_str str = ((struct create_acc_data *)data)->str;
 
-  if (bb_for_stmt (acc->stmt) == bb)
+  if (gimple_bb (acc->stmt) == bb)
     create_new_general_access (acc, str);
   return 1;
 }
@@ -1407,7 +1373,7 @@ create_new_field_acc (void **slot, void *data)
   d_str str = ((struct create_acc_data *)data)->str;
   int i = ((struct create_acc_data *)data)->field_index;
 
-  if (bb_for_stmt (f_acc->stmt) == bb)
+  if (gimple_bb (f_acc->stmt) == bb)
     create_new_field_access (f_acc, str->fields[i]);
   return 1;
 }
@@ -1462,11 +1428,11 @@ dump_field_acc (void **slot, void *data ATTRIBUTE_UNUSED)
 
   fprintf(dump_file, "\n");
   if (f_acc->stmt)
-    print_generic_stmt (dump_file, f_acc->stmt, 0);
+    print_gimple_stmt (dump_file, f_acc->stmt, 0, 0);
   if (f_acc->ref_def_stmt)
-    print_generic_stmt (dump_file, f_acc->ref_def_stmt, 0);
+    print_gimple_stmt (dump_file, f_acc->ref_def_stmt, 0, 0);
   if (f_acc->cast_stmt)
-    print_generic_stmt (dump_file, f_acc->cast_stmt, 0);
+    print_gimple_stmt (dump_file, f_acc->cast_stmt, 0, 0);
   return 1;
 }
 
@@ -1697,22 +1663,20 @@ free_field_accesses (htab_t f_accs)
    The edge origin is CONTEXT function.  */
 
 static void
-update_cgraph_with_malloc_call (tree malloc_stmt, tree context)
+update_cgraph_with_malloc_call (gimple malloc_stmt, tree context)
 {
-  tree call_expr;
   struct cgraph_node *src, *dest;
   tree malloc_fn_decl;
 
   if (!malloc_stmt)
     return;
 
-  call_expr = get_call_expr_in (malloc_stmt);
-  malloc_fn_decl = get_callee_fndecl (call_expr);
+  malloc_fn_decl = gimple_call_fndecl (malloc_stmt);
     
   src = cgraph_node (context);
   dest = cgraph_node (malloc_fn_decl);
   cgraph_create_edge (src, dest, malloc_stmt, 
-                     0, 0, bb_for_stmt (malloc_stmt)->loop_depth);
+                     0, 0, gimple_bb (malloc_stmt)->loop_depth);
 }
 
 /* This function generates set of statements required 
@@ -1720,40 +1684,39 @@ update_cgraph_with_malloc_call (tree malloc_stmt, tree context)
    The statements are stored in NEW_STMTS. The statement that contain
    call to malloc is returned. MALLOC_STMT is an original call to malloc.  */
 
-static tree
-create_new_malloc (tree malloc_stmt, tree new_type, tree *new_stmts, tree num)
+static gimple
+create_new_malloc (gimple malloc_stmt, tree new_type, gimple_seq *new_stmts,
+                  tree num)
 {
   tree new_malloc_size;
-  tree call_expr, malloc_fn_decl;
-  tree new_stmt, malloc_res;
-  tree call_stmt, final_stmt;
+  tree malloc_fn_decl;
+  gimple new_stmt;
+  tree malloc_res;
+  gimple call_stmt, final_stmt;
   tree cast_res;
 
   gcc_assert (num && malloc_stmt && new_type);
-  *new_stmts = alloc_stmt_list ();
+  *new_stmts = gimple_seq_alloc ();
 
   /* Generate argument to malloc as multiplication of num 
      and size of new_type.  */
   new_stmt = gen_size (num, new_type, &new_malloc_size);
-  append_to_statement_list (new_stmt, new_stmts);
+  gimple_seq_add_stmt (new_stmts, new_stmt);
 
   /* Generate new call for malloc.  */
   malloc_res = create_tmp_var (ptr_type_node, NULL);
+  add_referenced_var (malloc_res);
 
-  if (malloc_res)
-    add_referenced_var (malloc_res);
-
-  call_expr = get_call_expr_in (malloc_stmt);
-  malloc_fn_decl = get_callee_fndecl (call_expr);
-  call_expr = build_call_expr (malloc_fn_decl, 1, new_malloc_size); 
-  call_stmt = build_gimple_modify_stmt (malloc_res, call_expr);
+  malloc_fn_decl = gimple_call_fndecl (malloc_stmt);
+  call_stmt = gimple_build_call (malloc_fn_decl, 1, new_malloc_size); 
+  gimple_call_set_lhs (call_stmt, malloc_res);
   finalize_stmt_and_append (new_stmts, call_stmt);
 
   /* Create new cast statement. */
   final_stmt = get_final_alloc_stmt (malloc_stmt);
   gcc_assert (final_stmt);
   new_stmt = gen_cast_stmt (malloc_res, new_type, final_stmt, &cast_res);
-  append_to_statement_list (new_stmt, new_stmts);
+  gimple_seq_add_stmt (new_stmts, new_stmt);
  
   return call_stmt;      
 }
@@ -1764,11 +1727,10 @@ create_new_malloc (tree malloc_stmt, tree new_type, tree *new_stmts, tree num)
    they are filled into NEW_STMTS_P.  */
 
 static tree 
-gen_num_of_structs_in_malloc (tree stmt, tree str_decl, tree *new_stmts_p)
+gen_num_of_structs_in_malloc (gimple stmt, tree str_decl,
+                             gimple_seq *new_stmts_p)
 {
-  call_expr_arg_iterator iter;
   tree arg;
-  tree call_expr;
   tree struct_size;
   HOST_WIDE_INT struct_size_int;
 
@@ -1776,11 +1738,10 @@ gen_num_of_structs_in_malloc (tree stmt, tree str_decl, tree *new_stmts_p)
     return NULL_TREE;
 
   /* Get malloc argument.  */
-  call_expr = get_call_expr_in (stmt);
-  if (!call_expr)
+  if (!is_gimple_call (stmt))
     return NULL_TREE;
 
-  arg = first_call_expr_arg (call_expr, &iter);
+  arg = gimple_call_arg (stmt, 0);
 
   if (TREE_CODE (arg) != SSA_NAME
       && !TREE_CONSTANT (arg))
@@ -1793,7 +1754,8 @@ gen_num_of_structs_in_malloc (tree stmt, tree str_decl, tree *new_stmts_p)
 
   if (TREE_CODE (arg) == SSA_NAME)
     {
-      tree num, div_stmt;
+      tree num;
+      gimple div_stmt;
 
       if (is_result_of_mult (arg, &num, struct_size))
          return num;      
@@ -1804,23 +1766,16 @@ gen_num_of_structs_in_malloc (tree stmt, tree str_decl, tree *new_stmts_p)
        add_referenced_var (num);
 
       if (exact_log2 (struct_size_int) == -1)
-       div_stmt = build_gimple_modify_stmt (num, 
-                                            build2 (TRUNC_DIV_EXPR, 
-                                                    integer_type_node,
-                                                    arg, struct_size));
+       div_stmt = gimple_build_assign_with_ops (TRUNC_DIV_EXPR, num, arg,
+                                                struct_size);
       else
        {
          tree C =  build_int_cst (integer_type_node,
                                   exact_log2 (struct_size_int)); 
 
-         div_stmt = 
-           build_gimple_modify_stmt (num, build2 (RSHIFT_EXPR, 
-                                                  integer_type_node,
-                                                  arg, C)); 
+         div_stmt = gimple_build_assign_with_ops (RSHIFT_EXPR, num, arg, C); 
        }
-      *new_stmts_p = alloc_stmt_list ();
-      append_to_statement_list (div_stmt, 
-                               new_stmts_p);
+      gimple_seq_add_stmt (new_stmts_p, div_stmt);
       finalize_stmt (div_stmt);
       return num;
     }
@@ -2049,7 +2004,7 @@ field_acc_hash (const void *x)
 static int
 field_acc_eq (const void *x, const void *y)
 {
-  return ((const struct field_access_site *)x)->stmt == (const_tree)y;
+  return ((const struct field_access_site *)x)->stmt == (const_gimple)y;
 }
 
 /* This function prints an access site, defined by SLOT.  */ 
@@ -2063,7 +2018,7 @@ dump_acc (void **slot, void *data ATTRIBUTE_UNUSED)
 
   fprintf(dump_file, "\n");
   if (acc->stmt)
-    print_generic_stmt (dump_file, acc->stmt, 0);
+    print_gimple_stmt (dump_file, acc->stmt, 0, 0);
   fprintf(dump_file, " : ");
 
   for (i = 0; VEC_iterate (tree, acc->vars, i, var); i++)
@@ -2146,35 +2101,33 @@ create_new_alloc_sites (fallocs_t m_data, tree context)
   alloc_site_t *call;
   unsigned j;
   
-  for (j = 0;
-       VEC_iterate (alloc_site_t, m_data->allocs, j, call); j++)
+  for (j = 0; VEC_iterate (alloc_site_t, m_data->allocs, j, call); j++)
     {
-      tree stmt = call->stmt;
+      gimple stmt = call->stmt;
       d_str str = call->str;
       tree num;
-      tree new_stmts = NULL_TREE;
-      tree last_stmt = get_final_alloc_stmt (stmt);
+      gimple_seq new_stmts = NULL;
+      gimple last_stmt = get_final_alloc_stmt (stmt);
       unsigned i;
       tree type;
 
       num = gen_num_of_structs_in_malloc (stmt, str->decl, &new_stmts);
       if (new_stmts)
        {
-         last_stmt = tsi_stmt (tsi_last (new_stmts));
-         insert_after_stmt (last_stmt, new_stmts);
+         last_stmt = gimple_seq_last_stmt (new_stmts);
+         insert_seq_after_stmt (last_stmt, new_stmts);
        }
       
       /* Generate an allocation sites for each new structure type.  */      
-      for (i = 0; 
-          VEC_iterate (tree, str->new_types, i, type); i++)    
+      for (i = 0; VEC_iterate (tree, str->new_types, i, type); i++)    
        {
-         tree new_malloc_stmt = NULL_TREE;
-         tree last_stmt_tmp = NULL_TREE;
+         gimple new_malloc_stmt = NULL;
+         gimple last_stmt_tmp = NULL;
 
-         new_stmts = NULL_TREE;
+         new_stmts = NULL;
          new_malloc_stmt = create_new_malloc (stmt, type, &new_stmts, num);
-         last_stmt_tmp = tsi_stmt (tsi_last (new_stmts));
-         insert_after_stmt (last_stmt, new_stmts);
+         last_stmt_tmp = gimple_seq_last_stmt (new_stmts);
+         insert_seq_after_stmt (last_stmt, new_stmts);
          update_cgraph_with_malloc_call (new_malloc_stmt, context);
          last_stmt = last_stmt_tmp;
        }
@@ -2304,7 +2257,7 @@ acc_hash (const void *x)
 static int
 acc_eq (const void *x, const void *y)
 {
-  return ((const struct access_site *)x)->stmt == (const_tree)y;
+  return ((const struct access_site *)x)->stmt == (const_gimple)y;
 }
 
 /* Given a structure declaration STRUCT_DECL, and number of fields 
@@ -2405,25 +2358,19 @@ remove_structure (unsigned i)
    COND_STMT is a condition statement to check.  */
 
 static bool
-is_safe_cond_expr (tree cond_stmt)
+is_safe_cond_expr (gimple cond_stmt)
 {
-
   tree arg0, arg1;
   unsigned str0, str1;
   bool s0, s1;
   unsigned length = VEC_length (structure, structures);
 
-  tree cond = COND_EXPR_COND (cond_stmt);
-
-  if (TREE_CODE (cond) != EQ_EXPR
-      && TREE_CODE (cond) != NE_EXPR)
+  if (gimple_cond_code (cond_stmt) != EQ_EXPR
+      && gimple_cond_code (cond_stmt) != NE_EXPR)
     return false;
   
-  if (TREE_CODE_LENGTH (TREE_CODE (cond)) != 2)
-    return false;
-
-  arg0 = TREE_OPERAND (cond, 0);
-  arg1 = TREE_OPERAND (cond, 1);
+  arg0 = gimple_cond_lhs (cond_stmt);
+  arg1 = gimple_cond_rhs (cond_stmt);
 
   str0 = find_structure (strip_type (get_type_of_var (arg0)));
   str1 = find_structure (strip_type (get_type_of_var (arg1)));
@@ -2470,7 +2417,8 @@ exclude_from_accs (void **slot, void *data)
 static tree
 get_stmt_accesses (tree *tp, int *walk_subtrees, void *data)
 {
-  tree stmt = (tree) data;
+  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
+  gimple stmt = (gimple) wi->info;
   tree t = *tp;
 
   if (!t)
@@ -2478,17 +2426,6 @@ get_stmt_accesses (tree *tp, int *walk_subtrees, void *data)
 
   switch (TREE_CODE (t))
     {
-    case GIMPLE_MODIFY_STMT:
-      {
-       tree lhs = GIMPLE_STMT_OPERAND (t, 0);
-       tree rhs = GIMPLE_STMT_OPERAND (t, 1);
-       *walk_subtrees = 1;
-       walk_tree (&lhs, get_stmt_accesses, data, NULL);
-       walk_tree (&rhs, get_stmt_accesses, data, NULL);
-       *walk_subtrees = 0;         
-      }
-      break;
-
     case BIT_FIELD_REF:
       {
        tree var = TREE_OPERAND(t, 0);
@@ -2549,7 +2486,7 @@ get_stmt_accesses (tree *tp, int *walk_subtrees, void *data)
                            print_generic_expr (dump_file, type, 0);
                            fprintf (dump_file, 
                                     " has complicate access in statement ");
-                           print_generic_stmt (dump_file, stmt, 0);
+                           print_gimple_stmt (dump_file, stmt, 0, 0);
                          }
                        
                        remove_structure (i);
@@ -2558,7 +2495,7 @@ get_stmt_accesses (tree *tp, int *walk_subtrees, void *data)
                    else
                      {
                        /* Increase count of field.  */
-                       basic_block bb = bb_for_stmt (stmt);
+                       basic_block bb = gimple_bb (stmt);
                        field->count += bb->count;
 
                        /* Add stmt to the acc_sites of field.  */
@@ -2571,18 +2508,6 @@ get_stmt_accesses (tree *tp, int *walk_subtrees, void *data)
       }
       break;
 
-    case MINUS_EXPR:
-    case PLUS_EXPR:
-      {
-       tree op0 = TREE_OPERAND (t, 0);
-       tree op1 = TREE_OPERAND (t, 1);
-       *walk_subtrees = 1;         
-       walk_tree (&op0, get_stmt_accesses, data, NULL);
-       walk_tree (&op1, get_stmt_accesses, data, NULL);        
-       *walk_subtrees = 0;         
-      }
-      break;
-
     case COND_EXPR:
       {
        tree cond = COND_EXPR_COND (t);
@@ -2618,14 +2543,6 @@ get_stmt_accesses (tree *tp, int *walk_subtrees, void *data)
       }
       break;
 
-    case CALL_EXPR:
-      {
-       /* It was checked as part of stage1 that structures 
-          to be transformed cannot be passed as parameters of functions.  */
-       *walk_subtrees = 0;         
-      }
-      break;
-
     default:
       return NULL;
     }
@@ -3019,7 +2936,7 @@ add_structure (tree type)
    allocates the structure represented by STR.  */
 
 static void
-add_alloc_site (tree fn_decl, tree stmt, d_str str)
+add_alloc_site (tree fn_decl, gimple stmt, d_str str)
 {
   fallocs_t fallocs = NULL;
   alloc_site_t m_call;
@@ -3049,7 +2966,7 @@ add_alloc_site (tree fn_decl, tree stmt, d_str str)
   if (dump_file)
     {
       fprintf (dump_file, "\nAdding stmt ");
-      print_generic_stmt (dump_file, stmt, 0);
+      print_gimple_stmt (dump_file, stmt, 0, 0);
       fprintf (dump_file, " to list of mallocs.");
     }
 }
@@ -3061,11 +2978,11 @@ add_alloc_site (tree fn_decl, tree stmt, d_str str)
    Otherwise I_P contains the length of the vector of structures.  */
 
 static bool
-is_alloc_of_struct (tree stmt, unsigned *i_p)
+is_alloc_of_struct (gimple stmt, unsigned *i_p)
 {
   tree lhs;
   tree type;
-  tree final_stmt;
+  gimple final_stmt;
 
   final_stmt = get_final_alloc_stmt (stmt);
 
@@ -3075,10 +2992,10 @@ is_alloc_of_struct (tree stmt, unsigned *i_p)
   /* final_stmt should be of the form:
      T.3 = (struct_type *) T.2; */
 
-  if (TREE_CODE (final_stmt) != GIMPLE_MODIFY_STMT)
+  if (gimple_code (final_stmt) != GIMPLE_ASSIGN)
     return false;
 
-  lhs = GIMPLE_STMT_OPERAND (final_stmt, 0);      
+  lhs = gimple_assign_lhs (final_stmt);
 
   type = get_type_of_var (lhs);
       
@@ -3128,13 +3045,13 @@ safe_cond_expr_check (void **slot, void *data)
 {
   struct access_site *acc = *(struct access_site **) slot;
 
-  if (TREE_CODE (acc->stmt) == COND_EXPR
+  if (gimple_code (acc->stmt) == GIMPLE_COND
       && !is_safe_cond_expr (acc->stmt))
     {
       if (dump_file)
        {
          fprintf (dump_file, "\nUnsafe conditional statement ");
-         print_generic_stmt (dump_file, acc->stmt, 0);
+         print_gimple_stmt (dump_file, acc->stmt, 0, 0);
        }
       *(bool *) data = false;
       return 0;
@@ -3163,21 +3080,25 @@ exclude_alloc_and_field_accs_1 (d_str str, struct cgraph_node *node)
 static void
 collect_accesses_in_bb (basic_block bb)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
+  struct walk_stmt_info wi;
+
+  memset (&wi, 0, sizeof (wi));
 
-  for (bsi = bsi_start (bb); ! bsi_end_p (bsi); bsi_next (&bsi))
+  for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
     {
-      tree stmt = bsi_stmt (bsi);
+      gimple stmt = gsi_stmt (bsi);
 
       /* In asm stmt we cannot always track the arguments,
         so we just give up.  */
-      if (TREE_CODE (stmt) == ASM_EXPR)
+      if (gimple_code (stmt) == GIMPLE_ASM)
        {
          free_structures ();
          break;
        }
 
-      walk_tree (&stmt, get_stmt_accesses, stmt, NULL);
+      wi.info = (void *) stmt;
+      walk_gimple_op (stmt, get_stmt_accesses, &wi);
     }
 }
 
@@ -3467,7 +3388,6 @@ program_redefines_malloc_p (void)
   struct cgraph_edge *c_edge;
   tree fndecl;
   tree fndecl2;
-  tree call_expr;
   
   for (c_node = cgraph_nodes; c_node; c_node = c_node->next)
     {
@@ -3475,17 +3395,16 @@ program_redefines_malloc_p (void)
 
       for (c_edge = c_node->callees; c_edge; c_edge = c_edge->next_callee)
        {
-         call_expr = get_call_expr_in (c_edge->call_stmt);
          c_node2 = c_edge->callee;
          fndecl2 = c_node2->decl;
-         if (call_expr)
+         if (is_gimple_call (c_edge->call_stmt))
            {
              const char * fname = get_name (fndecl2);
 
-             if ((call_expr_flags (call_expr) & ECF_MALLOC) &&
-                 (DECL_FUNCTION_CODE (fndecl2) != BUILT_IN_MALLOC) &&
-                 (DECL_FUNCTION_CODE (fndecl2) != BUILT_IN_CALLOC) &&
-                 (DECL_FUNCTION_CODE (fndecl2) != BUILT_IN_ALLOCA))
+             if ((gimple_call_flags (c_edge->call_stmt) & ECF_MALLOC)
+                 && (DECL_FUNCTION_CODE (fndecl2) != BUILT_IN_MALLOC)
+                 && (DECL_FUNCTION_CODE (fndecl2) != BUILT_IN_CALLOC)
+                 && (DECL_FUNCTION_CODE (fndecl2) != BUILT_IN_ALLOCA))
                return true;
 
              /* Check that there is no __builtin_object_size,
@@ -3527,15 +3446,15 @@ collect_alloc_sites (void)
       {
        for (cs = node->callees; cs; cs = cs->next_callee)
          {
-           tree stmt = cs->call_stmt;
+           gimple stmt = cs->call_stmt;
 
            if (stmt)
              {
-               tree call = get_call_expr_in (stmt);
                tree decl;
 
-               if (call && (decl = get_callee_fndecl (call)) 
-                   && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+               if (is_gimple_call (stmt)
+                   && (decl = gimple_call_fndecl (stmt)) 
+                   && gimple_call_lhs (stmt))
                  {
                    unsigned i;
 
@@ -3555,7 +3474,7 @@ collect_alloc_sites (void)
                              {
                                fprintf (dump_file, 
                                         "\nUnsupported allocation function ");
-                               print_generic_stmt (dump_file, stmt, 0);
+                               print_gimple_stmt (dump_file, stmt, 0, 0);
                              }
                            remove_structure (i);               
                          }
@@ -4035,8 +3954,9 @@ reorg_structs_drive (void)
 static bool
 struct_reorg_gate (void)
 {
-  return flag_ipa_struct_reorg && flag_whole_program 
-    && (optimize > 0);
+  return flag_ipa_struct_reorg
+        && flag_whole_program 
+        && (optimize > 0);
 }
 
 struct simple_ipa_opt_pass pass_ipa_struct_reorg = 
index 54cdbc9..a92d345 100644 (file)
@@ -37,15 +37,15 @@ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
 struct field_access_site
 {
   /* Statement in which the access site occurs.  */
-  tree stmt;             /* D.2169_25 = D.2168_24->b;  */
+  gimple stmt;           /* D.2169_25 = D.2168_24->b;  */
   tree comp_ref;         /* D.2168_24->b  */
   tree field_decl;       /* b */
   tree ref;              /* D.2168_24  */
   tree num;              /* i.6_20  */
   tree offset;           /* D2167_22  */
   tree base;             /* p.5_23  */
-  tree ref_def_stmt;     /* D.2168_24 = D.2167_22 + p.5_23;  */
-  tree cast_stmt;        /* D.2167_22 = (struct str_t *) D.2166_21;
+  gimple ref_def_stmt;   /* D.2168_24 = D.2167_22 + p.5_23;  */
+  gimple cast_stmt;      /* D.2167_22 = (struct str_t *) D.2166_21;
                             This statement is not always present.  */
 };
 
@@ -53,7 +53,7 @@ struct field_access_site
 struct access_site
 {
   /* A statement in which the access site occurs.  */
-  tree stmt;
+  gimple stmt;
   /* A list of structure variables in the access site.  */
   VEC (tree, heap) *vars;
 };
index 81e6ddb..48d9504 100644 (file)
@@ -47,7 +47,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "ipa-utils.h"
 #include "ipa-type-escape.h"
 #include "c-common.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "cgraph.h"
 #include "output.h"
 #include "flags.h"
@@ -136,8 +136,8 @@ static bitmap_obstack ipa_obstack;
 
 /* Static functions from this file that are used 
    before being defined.  */
-static unsigned int look_for_casts (tree lhs ATTRIBUTE_UNUSED, tree);
-static bool is_cast_from_non_pointer (tree, tree, void *);
+static unsigned int look_for_casts (tree);
+static bool is_cast_from_non_pointer (tree, gimple, void *);
 
 /* Get the name of TYPE or return the string "<UNNAMED>".  */
 static const char*
@@ -308,7 +308,7 @@ get_canon_type (tree type, bool see_thru_ptrs, bool see_thru_arrays)
     while (POINTER_TYPE_P (type))
        type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
 
-  result = splay_tree_lookup(type_to_canon_type, (splay_tree_key) type);
+  result = splay_tree_lookup (type_to_canon_type, (splay_tree_key) type);
   
   if (result == NULL)
     return discover_unique_type (type);
@@ -663,9 +663,7 @@ check_cast_type (tree to_type, tree from_type)
 static bool
 is_malloc_result (tree var)
 {
-  tree def_stmt;
-  tree rhs;
-  int flags;
+  gimple def_stmt;
 
   if (!var)
     return false;
@@ -675,20 +673,13 @@ is_malloc_result (tree var)
 
   def_stmt = SSA_NAME_DEF_STMT (var);
   
-  if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_call (def_stmt))
     return false;
 
-  if (var != GIMPLE_STMT_OPERAND (def_stmt, 0))
+  if (var != gimple_call_lhs (def_stmt))
     return false;
 
-  rhs = get_call_expr_in (def_stmt);
-
-  if (!rhs)
-    return false;
-
-  flags = call_expr_flags (rhs);
-    
-  return ((flags & ECF_MALLOC) != 0);
+  return ((gimple_call_flags (def_stmt) & ECF_MALLOC) != 0);
 
 }
 
@@ -769,115 +760,98 @@ check_cast (tree to_type, tree from)
   return cast;
 }
 
-typedef struct cast 
-{
-  int type;
-  tree stmt;
-}cast_t;
-
-/* This function is a callback for walk_tree called from 
-   is_cast_from_non_pointer. The data->type is set to be:
 
-   0      - if there is no cast
-   number - the number of casts from non-pointer type
-   -1     - if there is a cast that makes the type to escape
+/* Scan assignment statement S to see if there are any casts within it.  */
 
-   If data->type = number, then data->stmt will contain the 
-   last casting stmt met in traversing.  */
-
-static tree
-is_cast_from_non_pointer_1 (tree *tp, int *walk_subtrees, void *data)
+static unsigned int
+look_for_casts_stmt (gimple s)
 {
-  tree def_stmt = *tp;
+  unsigned int cast = 0;
 
+  gcc_assert (is_gimple_assign (s));
 
-  if (pointer_set_insert (visited_stmts, def_stmt))
+  if (gimple_assign_cast_p (s))
     {
-      *walk_subtrees = 0;
-      return NULL;
+      tree castfromvar = gimple_assign_rhs1 (s);
+      cast |= check_cast (TREE_TYPE (gimple_assign_lhs (s)), castfromvar);
     }
-  
-  switch (TREE_CODE (def_stmt))
+  else
     {
-    case GIMPLE_MODIFY_STMT:
-      {
-       use_operand_p use_p; 
-       ssa_op_iter iter;
-       tree lhs = GIMPLE_STMT_OPERAND (def_stmt, 0);
-       tree rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
-
-        unsigned int cast = look_for_casts (lhs, rhs);
-       /* Check that only one cast happened, and it's of 
-          non-pointer type.  */
-       if ((cast & CT_FROM_NON_P) == (CT_FROM_NON_P) 
-           && (cast & ~(CT_FROM_NON_P)) == 0)
-         {
-           ((cast_t *)data)->stmt = def_stmt;
-           ((cast_t *)data)->type++;
-
-           FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_ALL_USES)
-             {
-               walk_use_def_chains (USE_FROM_PTR (use_p), is_cast_from_non_pointer, 
-                                    data, false);
-               if (((cast_t*)data)->type == -1)
-                 return def_stmt;
-             }
-         }
-
-       /* Check that there is no cast, or cast is not harmful. */
-       else if ((cast & CT_NO_CAST) == (CT_NO_CAST)
-                || (cast & CT_DOWN) == (CT_DOWN)
-                || (cast & CT_UP) == (CT_UP)
-                || (cast & CT_USELESS) == (CT_USELESS)
-                || (cast & CT_FROM_MALLOC) == (CT_FROM_MALLOC))
-         {
-           FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_ALL_USES)
-             {
-               walk_use_def_chains (USE_FROM_PTR (use_p), is_cast_from_non_pointer, 
-                                    data, false);
-               if (((cast_t*)data)->type == -1)
-                 return def_stmt;
-             }     
-         }
+      size_t i;
+      for (i = 0; i < gimple_num_ops (s); i++)
+       cast |= look_for_casts (gimple_op (s, i));
+    }
 
-       /* The cast is harmful.  */
-       else
-         {
-           ((cast_t *)data)->type = -1;
-           return def_stmt;
-         }
+  if (!cast)
+    cast = CT_NO_CAST;
 
-       *walk_subtrees = 0;
-      }     
-      break;
+  return cast;
+} 
 
-    default:
-      {
-       *walk_subtrees = 0;
-       break;
-      }
-    }
 
-  return NULL;
-}
+typedef struct cast 
+{
+  int type;
+  gimple stmt;
+} cast_t;
 
 /* This function is a callback for walk_use_def_chains function called 
    from is_array_access_through_pointer_and_index.  */
 
 static bool
-is_cast_from_non_pointer (tree var, tree def_stmt, void *data)
+is_cast_from_non_pointer (tree var, gimple def_stmt, void *data)
 {
-
   if (!def_stmt || !var)
     return false;
   
-  if (TREE_CODE (def_stmt) == PHI_NODE)
+  if (gimple_code (def_stmt) == GIMPLE_PHI)
     return false;
 
   if (SSA_NAME_IS_DEFAULT_DEF (var))
       return false;
 
-  walk_tree (&def_stmt, is_cast_from_non_pointer_1, data, NULL);
+  if (is_gimple_assign (def_stmt))
+    {
+      use_operand_p use_p; 
+      ssa_op_iter iter;
+      unsigned int cast = look_for_casts_stmt (def_stmt);
+
+      /* Check that only one cast happened, and it's of non-pointer
+        type.  */
+      if ((cast & CT_FROM_NON_P) == (CT_FROM_NON_P) 
+         && (cast & ~(CT_FROM_NON_P)) == 0)
+       {
+         ((cast_t *)data)->stmt = def_stmt;
+         ((cast_t *)data)->type++;
+
+         FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_ALL_USES)
+           {
+             walk_use_def_chains (USE_FROM_PTR (use_p),
+                                  is_cast_from_non_pointer, data, false);
+             if (((cast_t*)data)->type == -1)
+               break;
+           }
+       }
+      /* Check that there is no cast, or cast is not harmful. */
+      else if ((cast & CT_NO_CAST) == (CT_NO_CAST)
+         || (cast & CT_DOWN) == (CT_DOWN)
+         || (cast & CT_UP) == (CT_UP)
+         || (cast & CT_USELESS) == (CT_USELESS)
+         || (cast & CT_FROM_MALLOC) == (CT_FROM_MALLOC))
+       {
+         FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_ALL_USES)
+           {
+             walk_use_def_chains (USE_FROM_PTR (use_p),
+                                  is_cast_from_non_pointer, data, false);
+             if (((cast_t*)data)->type == -1)
+               break;
+           }       
+       }
+       /* The cast is harmful.  */
+       else
+         ((cast_t *)data)->type = -1;
+    }     
+
   if (((cast_t*)data)->type == -1)
     return true;
   
@@ -930,9 +904,10 @@ is_cast_from_non_pointer (tree var, tree def_stmt, void *data)
 bool
 is_array_access_through_pointer_and_index (enum tree_code code, tree op0, 
                                           tree op1, tree *base, tree *offset,
-                                          tree *offset_cast_stmt)
+                                          gimple *offset_cast_stmt)
 {
-  tree before_cast, before_cast_def_stmt;
+  tree before_cast;
+  gimple before_cast_def_stmt;
   cast_t op0_cast, op1_cast;
 
   *base = NULL;
@@ -1014,26 +989,23 @@ is_array_access_through_pointer_and_index (enum tree_code code, tree op0,
   /* before_cast_def_stmt should be of the form:
      D.1605_6 = i.1_5 * 16; */
   
-  if (TREE_CODE (before_cast_def_stmt) == GIMPLE_MODIFY_STMT)
+  if (is_gimple_assign (before_cast_def_stmt))
     {
-      tree lhs = GIMPLE_STMT_OPERAND (before_cast_def_stmt,0);
-      tree rhs = GIMPLE_STMT_OPERAND (before_cast_def_stmt,1);
-
       /* We expect temporary here.  */
-      if (!is_gimple_reg (lhs))        
+      if (!is_gimple_reg (gimple_assign_lhs (before_cast_def_stmt)))
        return false;
 
-      if (TREE_CODE (rhs) == MULT_EXPR)
+      if (gimple_assign_rhs_code (before_cast_def_stmt) == MULT_EXPR)
        {
-         tree arg0 = TREE_OPERAND (rhs, 0);
-         tree arg1 = TREE_OPERAND (rhs, 1);
+         tree arg0 = gimple_assign_rhs1 (before_cast_def_stmt);
+         tree arg1 = gimple_assign_rhs2 (before_cast_def_stmt);
          tree unit_size = 
            TYPE_SIZE_UNIT (TREE_TYPE (TYPE_MAIN_VARIANT (TREE_TYPE (op0))));
 
          if (!(CONSTANT_CLASS_P (arg0) 
-             && simple_cst_equal (arg0,unit_size))
+             && simple_cst_equal (arg0, unit_size))
              && !(CONSTANT_CLASS_P (arg1) 
-             && simple_cst_equal (arg1,unit_size)))
+             && simple_cst_equal (arg1, unit_size)))
            return false;                          
        }
       else
@@ -1173,7 +1145,11 @@ check_tree (tree t)
     check_tree (TREE_OPERAND (t, 0));
 
   if (SSA_VAR_P (t) || (TREE_CODE (t) == FUNCTION_DECL))
-    check_operand (t);
+    {
+      check_operand (t);
+      if (DECL_P (t) && DECL_INITIAL (t))
+       check_tree (DECL_INITIAL (t));
+    }
 }
 
 /* Create an address_of edge FROM_TYPE.TO_TYPE.  */
@@ -1260,15 +1236,13 @@ look_for_address_of (tree t)
 }
 
 
-/* Scan tree T to see if there are any casts within it.
-   LHS Is the LHS of the expression involving the cast.  */
+/* Scan tree T to see if there are any casts within it.  */
 
 static unsigned int 
-look_for_casts (tree lhs ATTRIBUTE_UNUSED, tree t)
+look_for_casts (tree t)
 {
   unsigned int cast = 0;
 
-
   if (is_gimple_cast (t) || TREE_CODE (t) == VIEW_CONVERT_EXPR)
     {
       tree castfromvar = TREE_OPERAND (t, 0);
@@ -1302,7 +1276,7 @@ static void
 check_rhs_var (tree t)
 {
   look_for_address_of (t);
-  check_tree(t);
+  check_tree (t);
 }
 
 /* Check to see if T is an assignment to a static var we are
@@ -1311,7 +1285,7 @@ check_rhs_var (tree t)
 static void
 check_lhs_var (tree t)
 {
-  check_tree(t);
+  check_tree (t);
 }
 
 /* This is a scaled down version of get_asm_expr_operands from
@@ -1322,35 +1296,15 @@ check_lhs_var (tree t)
    analyzed and STMT is the actual asm statement.  */
 
 static void
-get_asm_expr_operands (tree stmt)
+check_asm (gimple stmt)
 {
-  int noutputs = list_length (ASM_OUTPUTS (stmt));
-  const char **oconstraints
-    = (const char **) alloca ((noutputs) * sizeof (const char *));
-  int i;
-  tree link;
-  const char *constraint;
-  bool allows_mem, allows_reg, is_inout;
-  
-  for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
-    {
-      oconstraints[i] = constraint
-       = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
-      parse_output_constraint (&constraint, i, 0, 0,
-                              &allows_mem, &allows_reg, &is_inout);
-      
-      check_lhs_var (TREE_VALUE (link));
-    }
+  size_t i;
 
-  for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
-    {
-      constraint
-       = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
-      parse_input_constraint (&constraint, 0, 0, noutputs, 0,
-                             oconstraints, &allows_mem, &allows_reg);
-      
-      check_rhs_var (TREE_VALUE (link));
-    }
+  for (i = 0; i < gimple_asm_noutputs (stmt); i++)
+    check_lhs_var (gimple_asm_output_op (stmt, i));
+
+  for (i = 0; i < gimple_asm_ninputs (stmt); i++)
+    check_rhs_var (gimple_asm_input_op (stmt, i));
   
   /* There is no code here to check for asm memory clobbers.  The
      casual maintainer might think that such code would be necessary,
@@ -1360,22 +1314,22 @@ get_asm_expr_operands (tree stmt)
      assumed to already escape.  So, we are protected here.  */
 }
 
-/* Check the parameters of a function call to CALL_EXPR to mark the
+
+/* Check the parameters of function call to CALL to mark the
    types that pass across the function boundary.  Also check to see if
    this is either an indirect call, a call outside the compilation
    unit.  */
 
 static void
-check_call (tree call_expr) 
+check_call (gimple call)
 {
-  tree operand;
-  tree callee_t = get_callee_fndecl (call_expr);
+  tree callee_t = gimple_call_fndecl (call);
   struct cgraph_node* callee;
   enum availability avail = AVAIL_NOT_AVAILABLE;
-  call_expr_arg_iterator iter;
+  size_t i;
 
-  FOR_EACH_CALL_EXPR_ARG (operand, iter, call_expr)
-    check_rhs_var (operand);
+  for (i = 0; i < gimple_call_num_args (call); i++)
+    check_rhs_var (gimple_call_arg (call, i));
   
   if (callee_t)
     {
@@ -1388,12 +1342,11 @@ check_call (tree call_expr)
         parameters.  */
       if (TYPE_ARG_TYPES (TREE_TYPE (callee_t)))
        {
-         for (arg_type = TYPE_ARG_TYPES (TREE_TYPE (callee_t)),
-                operand = first_call_expr_arg (call_expr, &iter);
+         for (arg_type = TYPE_ARG_TYPES (TREE_TYPE (callee_t)), i = 0;
               arg_type && TREE_VALUE (arg_type) != void_type_node;
-              arg_type = TREE_CHAIN (arg_type),
-                operand = next_call_expr_arg (&iter))
+              arg_type = TREE_CHAIN (arg_type), i++)
            {
+             tree operand = gimple_call_arg (call, i);
              if (operand)
                {
                  last_arg_type = TREE_VALUE(arg_type);
@@ -1411,15 +1364,14 @@ check_call (tree call_expr)
          /* FIXME - According to Geoff Keating, we should never
             have to do this; the front ends should always process
             the arg list from the TYPE_ARG_LIST. */
-         for (arg_type = DECL_ARGUMENTS (callee_t),
-                operand = first_call_expr_arg (call_expr, &iter);
+         for (arg_type = DECL_ARGUMENTS (callee_t), i = 0;
               arg_type;
-              arg_type = TREE_CHAIN (arg_type),
-                operand = next_call_expr_arg (&iter))
+              arg_type = TREE_CHAIN (arg_type), i++)
            {
+             tree operand = gimple_call_arg (call, i);
              if (operand)
                {
-                 last_arg_type = TREE_TYPE(arg_type);
+                 last_arg_type = TREE_TYPE (arg_type);
                  check_cast (last_arg_type, operand);
                } 
              else 
@@ -1433,10 +1385,9 @@ check_call (tree call_expr)
       /* In the case where we have a var_args function, we need to
         check the remaining parameters against the last argument.  */
       arg_type = last_arg_type;
-      for (;
-          operand != NULL_TREE;
-          operand = next_call_expr_arg (&iter))
+      for ( ; i < gimple_call_num_args (call); i++)
        {
+         tree operand = gimple_call_arg (call, i);
          if (arg_type)
            check_cast (arg_type, operand);
          else 
@@ -1457,16 +1408,16 @@ check_call (tree call_expr)
      are any bits available for the callee (such as by declaration or
      because it is builtin) and process solely on the basis of those
      bits. */
-
   if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
     {
       /* If this is a direct call to an external function, mark all of
         the parameter and return types.  */
-      FOR_EACH_CALL_EXPR_ARG (operand, iter, call_expr)
+      for (i = 0; i < gimple_call_num_args (call); i++)
        {
+         tree operand = gimple_call_arg (call, i);
          tree type = get_canon_type (TREE_TYPE (operand), false, false);
          mark_interesting_type (type, EXPOSED_PARAMETER);
-    }
+       }
          
       if (callee_t) 
        {
@@ -1494,7 +1445,8 @@ okay_pointer_operation (enum tree_code code, tree op0, tree op1)
     case PLUS_EXPR:
     case POINTER_PLUS_EXPR:
       {
-       tree base, offset, offset_cast_stmt;
+       tree base, offset;
+       gimple offset_cast_stmt;
 
        if (POINTER_TYPE_P (op0type)
            && TREE_CODE (op0) == SSA_NAME 
@@ -1528,150 +1480,124 @@ okay_pointer_operation (enum tree_code code, tree op0, tree op1)
   return false;
 }
 
-/* TP is the part of the tree currently under the microscope.
-   WALK_SUBTREES is part of the walk_tree api but is unused here.
-   DATA is cgraph_node of the function being walked.  */
 
-/* FIXME: When this is converted to run over SSA form, this code
-   should be converted to use the operand scanner.  */
 
-static tree
-scan_for_refs (tree *tp, int *walk_subtrees, void *data)
+/* Helper for scan_for_refs.  Check the operands of an assignment to
+   mark types that may escape.  */
+
+static void
+check_assign (gimple t)
 {
-  struct cgraph_node *fn = (struct cgraph_node *) data;
-  tree t = *tp;
+  /* First look on the lhs and see what variable is stored to */
+  check_lhs_var (gimple_assign_lhs (t));
+
+  /* For the purposes of figuring out what the cast affects */
 
-  switch (TREE_CODE (t))  
+  /* Next check the operands on the rhs to see if they are ok. */
+  switch (TREE_CODE_CLASS (gimple_assign_rhs_code (t)))
     {
-    case VAR_DECL:
-      if (DECL_INITIAL (t))
-       walk_tree (&DECL_INITIAL (t), scan_for_refs, fn, visited_nodes);
-      *walk_subtrees = 0;
+    case tcc_binary:       
+      {
+       tree op0 = gimple_assign_rhs1 (t);
+       tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
+       tree op1 = gimple_assign_rhs2 (t);
+       tree type1 = get_canon_type (TREE_TYPE (op1), false, false);
+
+       /* If this is pointer arithmetic of any bad sort, then
+           we need to mark the types as bad.  For binary
+           operations, no binary operator we currently support
+           is always "safe" in regard to what it would do to
+           pointers for purposes of determining which types
+           escape, except operations of the size of the type.
+           It is possible that min and max under the right set
+           of circumstances and if the moon is in the correct
+           place could be safe, but it is hard to see how this
+           is worth the effort.  */
+       if (type0 && POINTER_TYPE_P (type0)
+           && !okay_pointer_operation (gimple_assign_rhs_code (t), op0, op1))
+         mark_interesting_type (type0, FULL_ESCAPE);
+
+       if (type1 && POINTER_TYPE_P (type1)
+           && !okay_pointer_operation (gimple_assign_rhs_code (t), op1, op0))
+         mark_interesting_type (type1, FULL_ESCAPE);
+
+       look_for_casts (op0);
+       look_for_casts (op1);
+       check_rhs_var (op0);
+       check_rhs_var (op1);
+      }
       break;
 
-    case GIMPLE_MODIFY_STMT:
+    case tcc_unary:
       {
-       /* First look on the lhs and see what variable is stored to */
-       tree lhs = GIMPLE_STMT_OPERAND (t, 0);
-       tree rhs = GIMPLE_STMT_OPERAND (t, 1);
+       tree op0 = gimple_assign_rhs1 (t);
+       tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
+
+       /* For unary operations, if the operation is NEGATE or ABS on
+          a pointer, this is also considered pointer arithmetic and
+          thus, bad for business.  */
+       if (type0
+           && POINTER_TYPE_P (type0)
+           && (TREE_CODE (op0) == NEGATE_EXPR
+             || TREE_CODE (op0) == ABS_EXPR))
+         mark_interesting_type (type0, FULL_ESCAPE);
+
+       check_rhs_var (op0);
+       look_for_casts (op0);
+      }
+      break;
 
-       check_lhs_var (lhs);
-       check_cast (TREE_TYPE (lhs), rhs);
+    case tcc_reference:
+      look_for_casts (gimple_assign_rhs1 (t));
+      check_rhs_var (gimple_assign_rhs1 (t));
+      break;
 
-       /* For the purposes of figuring out what the cast affects */
+    case tcc_declaration:
+      check_rhs_var (gimple_assign_rhs1 (t));
+      break;
 
-       /* Next check the operands on the rhs to see if they are ok. */
-       switch (TREE_CODE_CLASS (TREE_CODE (rhs))) 
-         {
-         case tcc_binary:          
-           {
-             tree op0 = TREE_OPERAND (rhs, 0);
-             tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
-             tree op1 = TREE_OPERAND (rhs, 1);
-             tree type1 = get_canon_type (TREE_TYPE (op1), false, false);
-             /* If this is pointer arithmetic of any bad sort, then
-                we need to mark the types as bad.  For binary
-                operations, no binary operator we currently support
-                is always "safe" in regard to what it would do to
-                pointers for purposes of determining which types
-                escape, except operations of the size of the type.
-                It is possible that min and max under the right set
-                of circumstances and if the moon is in the correct
-                place could be safe, but it is hard to see how this
-                is worth the effort.  */
-             if (type0 && POINTER_TYPE_P (type0)
-                 && !okay_pointer_operation (TREE_CODE (rhs), op0, op1))
-               mark_interesting_type (type0, FULL_ESCAPE);
-             if (type1 && POINTER_TYPE_P (type1)
-                 && !okay_pointer_operation (TREE_CODE (rhs), op1, op0))
-               mark_interesting_type (type1, FULL_ESCAPE);
-             
-             look_for_casts (lhs, op0);
-             look_for_casts (lhs, op1);
-             check_rhs_var (op0);
-             check_rhs_var (op1);
-           }
-           break;
-         case tcc_unary:
-           {
-             tree op0 = TREE_OPERAND (rhs, 0);
-             tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
-             /* For unary operations, if the operation is NEGATE or
-                ABS on a pointer, this is also considered pointer
-                arithmetic and thus, bad for business.  */
-             if (type0 && (TREE_CODE (op0) == NEGATE_EXPR
-                  || TREE_CODE (op0) == ABS_EXPR)
-                 && POINTER_TYPE_P (type0))
-               {
-                 mark_interesting_type (type0, FULL_ESCAPE);
-               }
-             check_rhs_var (op0);
-             look_for_casts (lhs, op0);
-             look_for_casts (lhs, rhs);
-           }
+    case tcc_expression:
+      if (gimple_assign_rhs_code (t) == ADDR_EXPR)
+       {
+         tree rhs = gimple_assign_rhs1 (t);
+         look_for_casts (TREE_OPERAND (rhs, 0));
+         check_rhs_var (rhs);
+       }
+      break;
 
-           break;
-         case tcc_reference:
-           look_for_casts (lhs, rhs);
-           check_rhs_var (rhs);
-           break;
-         case tcc_declaration:
-           check_rhs_var (rhs);
-           break;
-         case tcc_expression:
-           switch (TREE_CODE (rhs)) 
-             {
-             case ADDR_EXPR:
-               look_for_casts (lhs, TREE_OPERAND (rhs, 0));
-               check_rhs_var (rhs);
-               break;
-             default:
-               break;
-             }
-           break;
-         case tcc_vl_exp:
-           switch (TREE_CODE (rhs))
-             {
-             case CALL_EXPR:
-               /* If this is a call to malloc, squirrel away the
-                  result so we do mark the resulting cast as being
-                  bad.  */
-               check_call (rhs);
-               break;
-             default:
-               break;
-             }
-           break;
-         default:
-           break;
-         }
-       *walk_subtrees = 0;
-      }
+    default:
       break;
+    }
+}
+
 
-    case ADDR_EXPR:
-      /* This case is here to find addresses on rhs of constructors in
-        decl_initial of static variables. */
-      check_rhs_var (t);
-      *walk_subtrees = 0;
+/* Scan statement T for references to types and mark anything
+   interesting.  */
+
+static void
+scan_for_refs (gimple t)
+{
+  switch (gimple_code (t))  
+    {
+    case GIMPLE_ASSIGN:
+      check_assign (t);
       break;
 
-    case CALL_EXPR: 
+    case GIMPLE_CALL: 
+      /* If this is a call to malloc, squirrel away the result so we
+        do mark the resulting cast as being bad.  */
       check_call (t);
-      *walk_subtrees = 0;
       break;
       
-    case ASM_EXPR:
-      get_asm_expr_operands (t);
-      *walk_subtrees = 0;
+    case GIMPLE_ASM:
+      check_asm (t);
       break;
       
     default:
       break;
     }
-  return NULL;
+
+  return;
 }
 
 
@@ -1721,7 +1647,7 @@ analyze_variable (struct varpool_node *vnode)
   gcc_assert (TREE_CODE (global) == VAR_DECL);
 
   if (DECL_INITIAL (global))
-    walk_tree (&DECL_INITIAL (global), scan_for_refs, NULL, visited_nodes);
+    check_tree (DECL_INITIAL (global));
 }
 
 /* This is the main routine for finding the reference patterns for
@@ -1742,10 +1668,9 @@ analyze_function (struct cgraph_node *fn)
 
     FOR_EACH_BB_FN (this_block, this_cfun)
       {
-       block_stmt_iterator bsi;
-       for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
-         walk_tree (bsi_stmt_ptr (bsi), scan_for_refs, 
-                    fn, visited_nodes);
+       gimple_stmt_iterator gsi;
+       for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
+         scan_for_refs (gsi_stmt (gsi));
       }
   }
 
@@ -1761,8 +1686,7 @@ analyze_function (struct cgraph_node *fn)
          if (TREE_CODE (var) == VAR_DECL 
              && DECL_INITIAL (var)
              && !TREE_STATIC (var))
-           walk_tree (&DECL_INITIAL (var), scan_for_refs, 
-                      fn, visited_nodes);
+           check_tree (DECL_INITIAL (var));
          get_canon_type (TREE_TYPE (var), false, false);
        }
     }
@@ -2215,4 +2139,3 @@ struct simple_ipa_opt_pass pass_ipa_type_escape =
   0                                     /* todo_flags_finish */
  }
 };
-
index c851a27..13c3b72 100644 (file)
@@ -23,11 +23,11 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree.h"
 
 bool   ipa_type_escape_type_contained_p (tree type);
-bool   ipa_type_escape_field_does_not_clobber_p (tree record_type, tree field_type);
+bool   ipa_type_escape_field_does_not_clobber_p (tree, tree);
 int    ipa_type_escape_star_count_of_interesting_type (tree type); 
 int    ipa_type_escape_star_count_of_interesting_or_array_type (tree type);
 bool   is_array_access_through_pointer_and_index (enum tree_code, tree, tree,
-                                                 tree *, tree *, tree *);
+                                                 tree *, tree *, gimple *);
 
 
 #endif  /* GCC_IPA_TYPE_ESCAPE_H  */
index 2a95629..729a84d 100644 (file)
@@ -32,7 +32,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "ipa-utils.h"
 #include "ipa-reference.h"
 #include "c-common.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "cgraph.h"
 #include "output.h"
 #include "flags.h"
index 7fb266d..a2c3cca 100644 (file)
@@ -1,3 +1,48 @@
+2008-07-28  Richard Guenther  <rguenther@suse.de>
+
+       Merge from gimple-tuples-branch.
+
+       2008-07-18  Richard Guenther  <rguenther@suse.de>
+
+       * expr.c: Include tree-iterator.h.
+       * Make-lang.in (expr.o): Add tree-iterator.h dependency.
+
+       2008-07-18  Aldy Hernandez  <aldyh@redhat.com>
+
+       * java-gimplify.c: Include gimple.h instead of tree-gimple.h.
+       * expr.c: Same.
+
+       2008-07-14  Aldy Hernandez  <aldyh@redhat.com>
+
+       * java-gimplify.c (java_gimplify_expr): Same.
+       (java_gimplify_modify_expr): Same.
+       * java-tree.h: Rename GENERIC_NEXT to TREE_CHAIN.
+
+       2008-05-02  Diego Novillo  <dnovillo@google.com>
+
+       * expr.c (build_java_throw_out_of_bounds_exception): Fix
+       mixed declarations and code.
+
+       2008-05-02  Doug Kwan  <dougkwan@google.com>
+       
+       * expr.c (build_java_throw_out_of_bounds_exception ): Wrap call to
+       _Jv_ThrowBadArrayIndex with a COMPOUND_EXPR to return 0.
+
+       2008-02-19  Diego Novillo  <dnovillo@google.com>
+
+       http://gcc.gnu.org/ml/gcc-patches/2008-02/msg00804.html
+
+       * java-gimplify.c (java_gimplify_self_mod_expr): Change
+       gimple_seq arguments to gimple_seq *.  Update all users.
+
+       2007-11-26  Aldy Hernandez  <aldyh@redhat.com>
+
+       * java-gimplify.c (java_gimplify_expr): Make pre_p and post_p
+       sequences.
+       (java_gimplify_self_mod_expr): Same.                                    
+       * java-tree.h (java_gimplify_expr): Make pre_p and post_p               
+       sequences.
+
 2008-07-24  Jan Hubicka  <jh@suse.cz>
 
        * java/decl.c: Include cgraph.h
index c5446db..f188c7d 100644 (file)
@@ -269,7 +269,8 @@ java/except.o: java/except.c $(CONFIG_H) $(JAVA_TREE_H) java/jcf.h $(REAL_H) \
 java/expr.o: java/expr.c $(CONFIG_H) $(JAVA_TREE_H) java/jcf.h $(REAL_H) \
   $(RTL_H) $(EXPR_H) java/javaop.h java/java-opcodes.h except.h \
   java/java-except.h java/java-except.h java/parse.h toplev.h \
-  $(SYSTEM_H) coretypes.h $(TM_H) $(GGC_H) gt-java-expr.h $(TARGET_H)
+  $(SYSTEM_H) coretypes.h $(TM_H) $(GGC_H) gt-java-expr.h $(TARGET_H) \
+  tree-iterator.h
 java/jcf-depend.o: java/jcf-depend.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
   $(TM_H) java/jcf.h
 java/jcf-parse.o: java/jcf-parse.c $(CONFIG_H) $(JAVA_TREE_H) $(FLAGS_H) \
@@ -298,7 +299,7 @@ java/verify-impl.o: java/verify-impl.c $(CONFIG_H) java/verify.h $(SYSTEM_H) \
 java/zextract.o: java/zextract.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
   java/zipfile.h
 java/java-gimplify.o: java/java-gimplify.c $(CONFIG_H) $(SYSTEM_H) \
-  coretypes.h $(TM_H) $(JAVA_TREE_H) $(TREE_GIMPLE_H) toplev.h
+  coretypes.h $(TM_H) $(JAVA_TREE_H) $(GIMPLE_H) toplev.h
 
 # jcf-io.o needs $(ZLIBINC) added to cflags.
 java/jcf-io.o: java/jcf-io.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
index c64f6d6..ff28bbb 100644 (file)
@@ -42,7 +42,8 @@ The Free Software Foundation is independent of Sun Microsystems, Inc.  */
 #include "toplev.h"
 #include "except.h"
 #include "ggc.h"
-#include "tree-gimple.h"
+#include "tree-iterator.h"
+#include "gimple.h"
 #include "target.h"
 
 static void flush_quick_stack (void);
@@ -814,10 +815,20 @@ encode_newarray_type (tree type)
 static tree
 build_java_throw_out_of_bounds_exception (tree index)
 {
-  tree node = build_call_nary (int_type_node,
+  tree node;
+
+  /* We need to build a COMPOUND_EXPR because _Jv_ThrowBadArrayIndex()
+     has void return type.  We cannot just set the type of the CALL_EXPR below
+     to int_type_node because we would lose it during gimplification.  */
+  gcc_assert (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (soft_badarrayindex_node))));
+  node = build_call_nary (void_type_node,
                               build_address_of (soft_badarrayindex_node),
                               1, index);
+  TREE_SIDE_EFFECTS (node) = 1;
+
+  node = build2 (COMPOUND_EXPR, int_type_node, node, integer_zero_node);
   TREE_SIDE_EFFECTS (node) = 1;        /* Allows expansion within ANDIF */
+
   return (node);
 }
 
index 5358241..790cb13 100644 (file)
@@ -28,12 +28,13 @@ The Free Software Foundation is independent of Sun Microsystems, Inc.  */
 #include "tree.h"
 #include "java-tree.h"
 #include "tree-dump.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "toplev.h"
 
 static tree java_gimplify_block (tree);
 static enum gimplify_status java_gimplify_modify_expr (tree *);
-static enum gimplify_status java_gimplify_self_mod_expr (tree*, tree*, tree *);
+static enum gimplify_status java_gimplify_self_mod_expr (tree *, gimple_seq *,
+                                                        gimple_seq *);
 
 static void dump_java_tree (enum tree_dump_index, tree);
 
@@ -53,8 +54,7 @@ java_genericize (tree fndecl)
 /* Gimplify a Java tree.  */
 
 int
-java_gimplify_expr (tree *expr_p, tree *pre_p ATTRIBUTE_UNUSED,
-                   tree *post_p ATTRIBUTE_UNUSED)
+java_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
 {
   enum tree_code code = TREE_CODE (*expr_p);
 
@@ -68,9 +68,6 @@ java_gimplify_expr (tree *expr_p, tree *pre_p ATTRIBUTE_UNUSED,
       *expr_p = java_replace_reference (*expr_p, /* want_lvalue */ false);
       return GS_UNHANDLED;
 
-      /* We don't handle GIMPLE_MODIFY_STMT, as MODIFY_EXPRs with java
-         semantics should only be generated by the front-end, and never
-         by anything after gimplification.  */
     case MODIFY_EXPR:
       return java_gimplify_modify_expr (expr_p);
 
@@ -142,7 +139,7 @@ java_gimplify_modify_expr (tree *modify_expr_p)
     {
       tree new_lhs = java_replace_reference (lhs, /* want_lvalue */ true);
       tree new_rhs = build1 (NOP_EXPR, TREE_TYPE (new_lhs), rhs);
-      modify_expr = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (new_lhs),
+      modify_expr = build2 (MODIFY_EXPR, TREE_TYPE (new_lhs),
                            new_lhs, new_rhs);
       modify_expr = build1 (NOP_EXPR, lhs_type, modify_expr);
     }
@@ -160,8 +157,8 @@ java_gimplify_modify_expr (tree *modify_expr_p)
     between the reading and the writing.  */
 
 static enum gimplify_status
-java_gimplify_self_mod_expr (tree *expr_p, tree *pre_p ATTRIBUTE_UNUSED, 
-                            tree *post_p ATTRIBUTE_UNUSED)
+java_gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p ATTRIBUTE_UNUSED, 
+                            gimple_seq *post_p ATTRIBUTE_UNUSED)
 {
   tree lhs = TREE_OPERAND (*expr_p, 0);
 
@@ -180,7 +177,7 @@ java_gimplify_block (tree java_block)
 {
   tree decls = BLOCK_VARS (java_block);
   tree body = BLOCK_EXPR_BODY (java_block);
-  tree outer = gimple_current_bind_expr ();
+  gimple outer = gimple_current_bind_expr ();
   tree block;
 
   /* Don't bother with empty blocks.  */
@@ -199,10 +196,10 @@ java_gimplify_block (tree java_block)
      routines generate info for the variables in that block.  */
   TREE_USED (block) = 1;
 
-  if (outer != NULL_TREE)
+  if (outer != NULL)
     {
-      outer = BIND_EXPR_BLOCK (outer);
-      BLOCK_SUBBLOCKS (outer) = chainon (BLOCK_SUBBLOCKS (outer), block);
+      tree b = gimple_bind_block (outer);
+      BLOCK_SUBBLOCKS (b) = chainon (BLOCK_SUBBLOCKS (b), block);
     }
   BLOCK_EXPR_BODY (java_block) = NULL_TREE;
 
index a046a0f..7ae71d9 100644 (file)
@@ -649,7 +649,7 @@ struct lang_identifier GTY(())
 /* The resulting tree type.  */
 union lang_tree_node 
   GTY((desc ("TREE_CODE (&%h.generic) == IDENTIFIER_NODE"),
-       chain_next ("(union lang_tree_node *)GENERIC_NEXT (&%h.generic)")))
+       chain_next ("(union lang_tree_node *)TREE_CHAIN (&%h.generic)")))
 
 {
   union tree_node GTY ((tag ("0"), 
@@ -1555,7 +1555,7 @@ enum
 #undef DEBUG_JAVA_BINDING_LEVELS
 
 extern void java_genericize (tree);
-extern int java_gimplify_expr (tree *, tree *, tree *);
+extern int java_gimplify_expr (tree *, gimple_seq *, gimple_seq *);
 
 extern FILE *finput;
 
index 5ae74ff..21bc184 100644 (file)
@@ -1231,16 +1231,16 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
                         VEC(tree,heap) * outerinductionvars,
                         VEC(tree,heap) ** lboundvars,
                         VEC(tree,heap) ** uboundvars,
-                         VEC(int,heap) ** steps,
+                        VEC(int,heap) ** steps,
                          struct obstack * lambda_obstack)
 {
-  tree phi;
-  tree exit_cond;
+  gimple phi;
+  gimple exit_cond;
   tree access_fn, inductionvar;
   tree step;
   lambda_loop lloop = NULL;
   lambda_linear_expression lbound, ubound;
-  tree test;
+  tree test_lhs, test_rhs;
   int stepint;
   int extra = 0;
   tree lboundvar, uboundvar, uboundresult;
@@ -1257,9 +1257,7 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
       return NULL;
     }
 
-  test = TREE_OPERAND (exit_cond, 0);
-
-  if (SSA_NAME_DEF_STMT (inductionvar) == NULL_TREE)
+  if (SSA_NAME_DEF_STMT (inductionvar) == NULL)
     {
 
       if (dump_file && (dump_flags & TDF_DETAILS))
@@ -1270,10 +1268,10 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
     }
 
   phi = SSA_NAME_DEF_STMT (inductionvar);
-  if (TREE_CODE (phi) != PHI_NODE)
+  if (gimple_code (phi) != GIMPLE_PHI)
     {
-      phi = SINGLE_SSA_TREE_OPERAND (phi, SSA_OP_USE);
-      if (!phi)
+      tree op = SINGLE_SSA_TREE_OPERAND (phi, SSA_OP_USE);
+      if (!op)
        {
 
          if (dump_file && (dump_flags & TDF_DETAILS))
@@ -1283,16 +1281,14 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
          return NULL;
        }
 
-      phi = SSA_NAME_DEF_STMT (phi);
-      if (TREE_CODE (phi) != PHI_NODE)
+      phi = SSA_NAME_DEF_STMT (op);
+      if (gimple_code (phi) != GIMPLE_PHI)
        {
-
          if (dump_file && (dump_flags & TDF_DETAILS))
            fprintf (dump_file,
                     "Unable to convert loop: Cannot find PHI node for induction variable\n");
          return NULL;
        }
-
     }
 
   /* The induction variable name/version we want to put in the array is the
@@ -1331,7 +1327,7 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
 
   /* Only want phis for induction vars, which will have two
      arguments.  */
-  if (PHI_NUM_ARGS (phi) != 2)
+  if (gimple_phi_num_args (phi) != 2)
     {
       if (dump_file && (dump_flags & TDF_DETAILS))
        fprintf (dump_file,
@@ -1341,8 +1337,8 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
 
   /* Another induction variable check. One argument's source should be
      in the loop, one outside the loop.  */
-  if (flow_bb_inside_loop_p (loop, PHI_ARG_EDGE (phi, 0)->src)
-      && flow_bb_inside_loop_p (loop, PHI_ARG_EDGE (phi, 1)->src))
+  if (flow_bb_inside_loop_p (loop, gimple_phi_arg_edge (phi, 0)->src)
+      && flow_bb_inside_loop_p (loop, gimple_phi_arg_edge (phi, 1)->src))
     {
 
       if (dump_file && (dump_flags & TDF_DETAILS))
@@ -1352,7 +1348,7 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
       return NULL;
     }
 
-  if (flow_bb_inside_loop_p (loop, PHI_ARG_EDGE (phi, 0)->src))
+  if (flow_bb_inside_loop_p (loop, gimple_phi_arg_edge (phi, 0)->src))
     {
       lboundvar = PHI_ARG_DEF (phi, 1);
       lbound = gcc_tree_to_linear_expression (depth, lboundvar,
@@ -1378,21 +1374,23 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
     }
   /* One part of the test may be a loop invariant tree.  */
   VEC_reserve (tree, heap, *invariants, 1);
-  if (TREE_CODE (TREE_OPERAND (test, 1)) == SSA_NAME
-      && invariant_in_loop_and_outer_loops (loop, TREE_OPERAND (test, 1)))
-    VEC_quick_push (tree, *invariants, TREE_OPERAND (test, 1));
-  else if (TREE_CODE (TREE_OPERAND (test, 0)) == SSA_NAME
-          && invariant_in_loop_and_outer_loops (loop, TREE_OPERAND (test, 0)))
-    VEC_quick_push (tree, *invariants, TREE_OPERAND (test, 0));
+  test_lhs = gimple_cond_lhs (exit_cond);
+  test_rhs = gimple_cond_rhs (exit_cond);
+
+  if (TREE_CODE (test_rhs) == SSA_NAME
+      && invariant_in_loop_and_outer_loops (loop, test_rhs))
+    VEC_quick_push (tree, *invariants, test_rhs);
+  else if (TREE_CODE (test_lhs) == SSA_NAME
+          && invariant_in_loop_and_outer_loops (loop, test_lhs))
+    VEC_quick_push (tree, *invariants, test_lhs);
   
   /* The non-induction variable part of the test is the upper bound variable.
    */
-  if (TREE_OPERAND (test, 0) == inductionvar)
-    uboundvar = TREE_OPERAND (test, 1);
+  if (test_lhs == inductionvar)
+    uboundvar = test_rhs;
   else
-    uboundvar = TREE_OPERAND (test, 0);
+    uboundvar = test_lhs;
     
-
   /* We only size the vectors assuming we have, at max, 2 times as many
      invariants as we do loops (one for each bound).
      This is just an arbitrary number, but it has to be matched against the
@@ -1401,13 +1399,13 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
   
 
   /* We might have some leftover.  */
-  if (TREE_CODE (test) == LT_EXPR)
+  if (gimple_cond_code (exit_cond) == LT_EXPR)
     extra = -1 * stepint;
-  else if (TREE_CODE (test) == NE_EXPR)
+  else if (gimple_cond_code (exit_cond) == NE_EXPR)
     extra = -1 * stepint;
-  else if (TREE_CODE (test) == GT_EXPR)
+  else if (gimple_cond_code (exit_cond) == GT_EXPR)
     extra = -1 * stepint;
-  else if (TREE_CODE (test) == EQ_EXPR)
+  else if (gimple_cond_code (exit_cond) == EQ_EXPR)
     extra = 1 * stepint;
   
   ubound = gcc_tree_to_linear_expression (depth, uboundvar,
@@ -1439,24 +1437,23 @@ gcc_loop_to_lambda_loop (struct loop *loop, int depth,
 static tree
 find_induction_var_from_exit_cond (struct loop *loop)
 {
-  tree expr = get_loop_exit_condition (loop);
+  gimple expr = get_loop_exit_condition (loop);
   tree ivarop;
-  tree test;
-  if (expr == NULL_TREE)
-    return NULL_TREE;
-  if (TREE_CODE (expr) != COND_EXPR)
+  tree test_lhs, test_rhs;
+  if (expr == NULL)
     return NULL_TREE;
-  test = TREE_OPERAND (expr, 0);
-  if (!COMPARISON_CLASS_P (test))
+  if (gimple_code (expr) != GIMPLE_COND)
     return NULL_TREE;
+  test_lhs = gimple_cond_lhs (expr);
+  test_rhs = gimple_cond_rhs (expr);
 
   /* Find the side that is invariant in this loop. The ivar must be the other
      side.  */
   
-  if (expr_invariant_in_loop_p (loop, TREE_OPERAND (test, 0)))
-      ivarop = TREE_OPERAND (test, 1);
-  else if (expr_invariant_in_loop_p (loop, TREE_OPERAND (test, 1)))
-      ivarop = TREE_OPERAND (test, 0);
+  if (expr_invariant_in_loop_p (loop, test_lhs))
+      ivarop = test_rhs;
+  else if (expr_invariant_in_loop_p (loop, test_rhs))
+      ivarop = test_lhs;
   else
     return NULL_TREE;
 
@@ -1548,7 +1545,7 @@ gcc_loopnest_to_lambda_loopnest (struct loop *loop_nest,
 static tree
 lbv_to_gcc_expression (lambda_body_vector lbv, 
                       tree type, VEC(tree,heap) *induction_vars, 
-                      tree *stmts_to_insert)
+                      gimple_seq *stmts_to_insert)
 {
   int k;
   tree resvar;
@@ -1583,7 +1580,7 @@ lle_to_gcc_expression (lambda_linear_expression lle,
                       tree type,
                       VEC(tree,heap) *induction_vars,
                       VEC(tree,heap) *invariants,
-                      enum tree_code wrap, tree *stmts_to_insert)
+                      enum tree_code wrap, gimple_seq *stmts_to_insert)
 {
   int k;
   tree resvar;
@@ -1641,17 +1638,19 @@ lle_to_gcc_expression (lambda_linear_expression lle,
 /* Remove the induction variable defined at IV_STMT.  */
 
 void
-remove_iv (tree iv_stmt)
+remove_iv (gimple iv_stmt)
 {
-  if (TREE_CODE (iv_stmt) == PHI_NODE)
+  gimple_stmt_iterator si = gsi_for_stmt (iv_stmt);
+
+  if (gimple_code (iv_stmt) == GIMPLE_PHI)
     {
-      int i;
+      unsigned i;
 
-      for (i = 0; i < PHI_NUM_ARGS (iv_stmt); i++)
+      for (i = 0; i < gimple_phi_num_args (iv_stmt); i++)
        {
-         tree stmt;
+         gimple stmt;
          imm_use_iterator imm_iter;
-         tree arg = PHI_ARG_DEF (iv_stmt, i);
+         tree arg = gimple_phi_arg_def (iv_stmt, i);
          bool used = false;
 
          if (TREE_CODE (arg) != SSA_NAME)
@@ -1665,13 +1664,11 @@ remove_iv (tree iv_stmt)
            remove_iv (SSA_NAME_DEF_STMT (arg));
        }
 
-      remove_phi_node (iv_stmt, NULL_TREE, true);
+      remove_phi_node (&si, true);
     }
   else
     {
-      block_stmt_iterator bsi = bsi_for_stmt (iv_stmt);
-
-      bsi_remove (&bsi, true);
+      gsi_remove (&si, true);
       release_defs (iv_stmt); 
     }
 }
@@ -1692,18 +1689,18 @@ void
 lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
                                 VEC(tree,heap) *old_ivs,
                                 VEC(tree,heap) *invariants,
-                                VEC(tree,heap) **remove_ivs,
+                                VEC(gimple,heap) **remove_ivs,
                                 lambda_loopnest new_loopnest,
                                  lambda_trans_matrix transform,
                                  struct obstack * lambda_obstack)
 {
   struct loop *temp;
   size_t i = 0;
-  int j;
+  unsigned j;
   size_t depth = 0;
   VEC(tree,heap) *new_ivs = NULL;
   tree oldiv;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
 
   transform = lambda_trans_matrix_inverse (transform);
 
@@ -1720,13 +1717,15 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
       lambda_loop newloop;
       basic_block bb;
       edge exit;
-      tree ivvar, ivvarinced, exitcond, stmts;
+      tree ivvar, ivvarinced;
+      gimple exitcond;
+      gimple_seq stmts;
       enum tree_code testtype;
       tree newupperbound, newlowerbound;
       lambda_linear_expression offset;
       tree type;
       bool insert_after;
-      tree inc_stmt;
+      gimple inc_stmt;
 
       oldiv = VEC_index (tree, old_ivs, i);
       type = TREE_TYPE (oldiv);
@@ -1749,6 +1748,7 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
            
       /* Now build the  new lower bounds, and insert the statements
          necessary to generate it on the loop preheader.  */
+      stmts = NULL;
       newlowerbound = lle_to_gcc_expression (LL_LOWER_BOUND (newloop),
                                             LL_LINEAR_OFFSET (newloop),
                                             type,
@@ -1757,11 +1757,12 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
 
       if (stmts)
        {
-         bsi_insert_on_edge (loop_preheader_edge (temp), stmts);
-         bsi_commit_edge_inserts ();
+         gsi_insert_seq_on_edge (loop_preheader_edge (temp), stmts);
+         gsi_commit_edge_inserts ();
        }
       /* Build the new upper bound and insert its statements in the
          basic block of the exit condition */
+      stmts = NULL;
       newupperbound = lle_to_gcc_expression (LL_UPPER_BOUND (newloop),
                                             LL_LINEAR_OFFSET (newloop),
                                             type,
@@ -1769,10 +1770,10 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
                                             invariants, MIN_EXPR, &stmts);
       exit = single_exit (temp);
       exitcond = get_loop_exit_condition (temp);
-      bb = bb_for_stmt (exitcond);
-      bsi = bsi_after_labels (bb);
+      bb = gimple_bb (exitcond);
+      bsi = gsi_after_labels (bb);
       if (stmts)
-       bsi_insert_before (&bsi, stmts, BSI_NEW_STMT);
+       gsi_insert_seq_before (&bsi, stmts, GSI_NEW_STMT);
 
       /* Create the new iv.  */
 
@@ -1786,13 +1787,14 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
         dominate the block containing the exit condition.
         So we simply create our own incremented iv to use in the new exit
         test,  and let redundancy elimination sort it out.  */
-      inc_stmt = build2 (PLUS_EXPR, type, 
-                        ivvar, build_int_cst (type, LL_STEP (newloop)));
-      inc_stmt = build_gimple_modify_stmt (SSA_NAME_VAR (ivvar), inc_stmt);
+      inc_stmt = gimple_build_assign_with_ops (PLUS_EXPR, SSA_NAME_VAR (ivvar),
+                                              ivvar,
+                                              build_int_cst (type, LL_STEP (newloop)));
+
       ivvarinced = make_ssa_name (SSA_NAME_VAR (ivvar), inc_stmt);
-      GIMPLE_STMT_OPERAND (inc_stmt, 0) = ivvarinced;
-      bsi = bsi_for_stmt (exitcond);
-      bsi_insert_before (&bsi, inc_stmt, BSI_SAME_STMT);
+      gimple_assign_set_lhs (inc_stmt, ivvarinced);
+      bsi = gsi_for_stmt (exitcond);
+      gsi_insert_before (&bsi, inc_stmt, GSI_SAME_STMT);
 
       /* Replace the exit condition with the new upper bound
          comparison.  */
@@ -1806,9 +1808,7 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
       if (exit->flags & EDGE_FALSE_VALUE)
        testtype = swap_tree_comparison (testtype);
 
-      COND_EXPR_COND (exitcond) = build2 (testtype,
-                                         boolean_type_node,
-                                         newupperbound, ivvarinced);
+      gimple_cond_set_condition (exitcond, testtype, newupperbound, ivvarinced);
       update_stmt (exitcond);
       VEC_replace (tree, new_ivs, i, ivvar);
 
@@ -1824,10 +1824,10 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
       imm_use_iterator imm_iter;
       use_operand_p use_p;
       tree oldiv_def;
-      tree oldiv_stmt = SSA_NAME_DEF_STMT (oldiv);
-      tree stmt;
+      gimple oldiv_stmt = SSA_NAME_DEF_STMT (oldiv);
+      gimple stmt;
 
-      if (TREE_CODE (oldiv_stmt) == PHI_NODE)
+      if (gimple_code (oldiv_stmt) == GIMPLE_PHI)
         oldiv_def = PHI_RESULT (oldiv_stmt);
       else
        oldiv_def = SINGLE_SSA_TREE_OPERAND (oldiv_stmt, SSA_OP_DEF);
@@ -1835,7 +1835,8 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
 
       FOR_EACH_IMM_USE_STMT (stmt, imm_iter, oldiv_def)
         {
-         tree newiv, stmts;
+         tree newiv;
+         gimple_seq stmts;
          lambda_body_vector lbv, newlbv;
 
          /* Compute the new expression for the induction
@@ -1847,28 +1848,29 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
           newlbv = lambda_body_vector_compute_new (transform, lbv,
                                                    lambda_obstack);
 
+         stmts = NULL;
          newiv = lbv_to_gcc_expression (newlbv, TREE_TYPE (oldiv),
                                         new_ivs, &stmts);
 
-         if (stmts && TREE_CODE (stmt) != PHI_NODE)
+         if (stmts && gimple_code (stmt) != GIMPLE_PHI)
            {
-             bsi = bsi_for_stmt (stmt);
-             bsi_insert_before (&bsi, stmts, BSI_SAME_STMT);
+             bsi = gsi_for_stmt (stmt);
+             gsi_insert_seq_before (&bsi, stmts, GSI_SAME_STMT);
            }
 
          FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
            propagate_value (use_p, newiv);
 
-         if (stmts && TREE_CODE (stmt) == PHI_NODE)
-           for (j = 0; j < PHI_NUM_ARGS (stmt); j++)
-             if (PHI_ARG_DEF (stmt, j) == newiv)
-               bsi_insert_on_edge (PHI_ARG_EDGE (stmt, j), stmts);
+         if (stmts && gimple_code (stmt) == GIMPLE_PHI)
+           for (j = 0; j < gimple_phi_num_args (stmt); j++)
+             if (gimple_phi_arg_def (stmt, j) == newiv)
+               gsi_insert_seq_on_edge (gimple_phi_arg_edge (stmt, j), stmts);
 
          update_stmt (stmt);
        }
 
       /* Remove the now unused induction variable.  */
-      VEC_safe_push (tree, heap, *remove_ivs, oldiv_stmt);
+      VEC_safe_push (gimple, heap, *remove_ivs, oldiv_stmt);
     }
   VEC_free (tree, heap, new_ivs);
 }
@@ -1877,13 +1879,13 @@ lambda_loopnest_to_gcc_loopnest (struct loop *old_loopnest,
    determining if we have a perfect loop nest.  */
 
 static bool
-not_interesting_stmt (tree stmt)
+not_interesting_stmt (gimple stmt)
 {
   /* Note that COND_EXPR's aren't interesting because if they were exiting the
      loop, we would have already failed the number of exits tests.  */
-  if (TREE_CODE (stmt) == LABEL_EXPR
-      || TREE_CODE (stmt) == GOTO_EXPR
-      || TREE_CODE (stmt) == COND_EXPR)
+  if (gimple_code (stmt) == GIMPLE_LABEL
+      || gimple_code (stmt) == GIMPLE_GOTO
+      || gimple_code (stmt) == GIMPLE_COND)
     return true;
   return false;
 }
@@ -1891,11 +1893,11 @@ not_interesting_stmt (tree stmt)
 /* Return TRUE if PHI uses DEF for it's in-the-loop edge for LOOP.  */
 
 static bool
-phi_loop_edge_uses_def (struct loop *loop, tree phi, tree def)
+phi_loop_edge_uses_def (struct loop *loop, gimple phi, tree def)
 {
-  int i;
-  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
-    if (flow_bb_inside_loop_p (loop, PHI_ARG_EDGE (phi, i)->src))
+  unsigned i;
+  for (i = 0; i < gimple_phi_num_args (phi); i++)
+    if (flow_bb_inside_loop_p (loop, gimple_phi_arg_edge (phi, i)->src))
       if (PHI_ARG_DEF (phi, i) == def)
        return true;
   return false;
@@ -1904,7 +1906,7 @@ phi_loop_edge_uses_def (struct loop *loop, tree phi, tree def)
 /* Return TRUE if STMT is a use of PHI_RESULT.  */
 
 static bool
-stmt_uses_phi_result (tree stmt, tree phi_result)
+stmt_uses_phi_result (gimple stmt, tree phi_result)
 {
   tree use = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
   
@@ -1920,9 +1922,9 @@ stmt_uses_phi_result (tree stmt, tree phi_result)
         i_3 = PHI (0, i_29);  */
 
 static bool
-stmt_is_bumper_for_loop (struct loop *loop, tree stmt)
+stmt_is_bumper_for_loop (struct loop *loop, gimple stmt)
 {
-  tree use;
+  gimple use;
   tree def;
   imm_use_iterator iter;
   use_operand_p use_p;
@@ -1934,7 +1936,7 @@ stmt_is_bumper_for_loop (struct loop *loop, tree stmt)
   FOR_EACH_IMM_USE_FAST (use_p, iter, def)
     {
       use = USE_STMT (use_p);
-      if (TREE_CODE (use) == PHI_NODE)
+      if (gimple_code (use) == GIMPLE_PHI)
        {
          if (phi_loop_edge_uses_def (loop, use, def))
            if (stmt_uses_phi_result (stmt, PHI_RESULT (use)))
@@ -1976,7 +1978,7 @@ perfect_nest_p (struct loop *loop)
 {
   basic_block *bbs;
   size_t i;
-  tree exit_cond;
+  gimple exit_cond;
 
   /* Loops at depth 0 are perfect nests.  */
   if (!loop->inner)
@@ -1989,13 +1991,13 @@ perfect_nest_p (struct loop *loop)
     {
       if (bbs[i]->loop_father == loop)
        {
-         block_stmt_iterator bsi;
+         gimple_stmt_iterator bsi;
 
-         for (bsi = bsi_start (bbs[i]); !bsi_end_p (bsi); bsi_next (&bsi))
+         for (bsi = gsi_start_bb (bbs[i]); !gsi_end_p (bsi); gsi_next (&bsi))
            {
-             tree stmt = bsi_stmt (bsi);
+             gimple stmt = gsi_stmt (bsi);
 
-             if (TREE_CODE (stmt) == COND_EXPR
+             if (gimple_code (stmt) == GIMPLE_COND
                  && exit_cond != stmt)
                goto non_perfectly_nested;
 
@@ -2023,10 +2025,10 @@ perfect_nest_p (struct loop *loop)
    of body basic block.  */
 
 static void
-replace_uses_equiv_to_x_with_y (struct loop *loop, tree stmt, tree x, 
+replace_uses_equiv_to_x_with_y (struct loop *loop, gimple stmt, tree x, 
                                int xstep, tree y, tree yinit,
                                htab_t replacements,
-                               block_stmt_iterator *firstbsi)
+                               gimple_stmt_iterator *firstbsi)
 {
   ssa_op_iter iter;
   use_operand_p use_p;
@@ -2035,7 +2037,8 @@ replace_uses_equiv_to_x_with_y (struct loop *loop, tree stmt, tree x,
     {
       tree use = USE_FROM_PTR (use_p);
       tree step = NULL_TREE;
-      tree scev, init, val, var, setstmt;
+      tree scev, init, val, var;
+      gimple setstmt;
       struct tree_map *h, in;
       void **loc;
 
@@ -2098,12 +2101,12 @@ replace_uses_equiv_to_x_with_y (struct loop *loop, tree stmt, tree x,
         which sets Y.  */
       var = create_tmp_var (TREE_TYPE (use), "perfecttmp");
       add_referenced_var (var);
-      val = force_gimple_operand_bsi (firstbsi, val, false, NULL,
-                                     true, BSI_SAME_STMT);
-      setstmt = build_gimple_modify_stmt (var, val);
+      val = force_gimple_operand_gsi (firstbsi, val, false, NULL,
+                                     true, GSI_SAME_STMT);
+      setstmt = gimple_build_assign (var, val);
       var = make_ssa_name (var, setstmt);
-      GIMPLE_STMT_OPERAND (setstmt, 0) = var;
-      bsi_insert_before (firstbsi, setstmt, BSI_SAME_STMT);
+      gimple_assign_set_lhs (setstmt, var);
+      gsi_insert_before (firstbsi, setstmt, GSI_SAME_STMT);
       update_stmt (setstmt);
       SET_USE (use_p, var);
       h = GGC_NEW (struct tree_map);
@@ -2119,12 +2122,11 @@ replace_uses_equiv_to_x_with_y (struct loop *loop, tree stmt, tree x,
 /* Return true if STMT is an exit PHI for LOOP */
 
 static bool
-exit_phi_for_loop_p (struct loop *loop, tree stmt)
+exit_phi_for_loop_p (struct loop *loop, gimple stmt)
 {
-  
-  if (TREE_CODE (stmt) != PHI_NODE
-      || PHI_NUM_ARGS (stmt) != 1
-      || bb_for_stmt (stmt) != single_exit (loop)->dest)
+  if (gimple_code (stmt) != GIMPLE_PHI
+      || gimple_phi_num_args (stmt) != 1
+      || gimple_bb (stmt) != single_exit (loop)->dest)
     return false;
   
   return true;
@@ -2134,21 +2136,21 @@ exit_phi_for_loop_p (struct loop *loop, tree stmt)
    copying it to the beginning of that loop and changing the uses.  */
 
 static bool
-can_put_in_inner_loop (struct loop *inner, tree stmt)
+can_put_in_inner_loop (struct loop *inner, gimple stmt)
 {
   imm_use_iterator imm_iter;
   use_operand_p use_p;
   
-  gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
+  gcc_assert (is_gimple_assign (stmt));
   if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)
-      || !expr_invariant_in_loop_p (inner, GIMPLE_STMT_OPERAND (stmt, 1)))
+      || !stmt_invariant_in_loop_p (inner, stmt))
     return false;
   
-  FOR_EACH_IMM_USE_FAST (use_p, imm_iter, GIMPLE_STMT_OPERAND (stmt, 0))
+  FOR_EACH_IMM_USE_FAST (use_p, imm_iter, gimple_assign_lhs (stmt))
     {
       if (!exit_phi_for_loop_p (inner, USE_STMT (use_p)))
        {
-         basic_block immbb = bb_for_stmt (USE_STMT (use_p));
+         basic_block immbb = gimple_bb (USE_STMT (use_p));
 
          if (!flow_bb_inside_loop_p (inner, immbb))
            return false;
@@ -2158,8 +2160,9 @@ can_put_in_inner_loop (struct loop *inner, tree stmt)
 }
 
 /* Return true if STMT can be put *after* the inner loop of LOOP.  */
+
 static bool
-can_put_after_inner_loop (struct loop *loop, tree stmt)
+can_put_after_inner_loop (struct loop *loop, gimple stmt)
 {
   imm_use_iterator imm_iter;
   use_operand_p use_p;
@@ -2167,11 +2170,11 @@ can_put_after_inner_loop (struct loop *loop, tree stmt)
   if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
     return false;
   
-  FOR_EACH_IMM_USE_FAST (use_p, imm_iter, GIMPLE_STMT_OPERAND (stmt, 0))
+  FOR_EACH_IMM_USE_FAST (use_p, imm_iter, gimple_assign_lhs (stmt))
     {
       if (!exit_phi_for_loop_p (loop, USE_STMT (use_p)))
        {
-         basic_block immbb = bb_for_stmt (USE_STMT (use_p));
+         basic_block immbb = gimple_bb (USE_STMT (use_p));
          
          if (!dominated_by_p (CDI_DOMINATORS,
                               immbb,
@@ -2211,81 +2214,77 @@ can_duplicate_iv (tree iv, struct loop *loop)
    it has better cache behavior.  */
 
 static bool
-cannot_convert_modify_to_perfect_nest (tree stmt, struct loop *loop)
+cannot_convert_modify_to_perfect_nest (gimple stmt, struct loop *loop)
 {
-  
   use_operand_p use_a, use_b;
   imm_use_iterator imm_iter;
   ssa_op_iter op_iter, op_iter1;
-  tree op0 = GIMPLE_STMT_OPERAND (stmt, 0);
+  tree op0 = gimple_assign_lhs (stmt);
 
   /* The statement should not define a variable used in the inner
      loop.  */
   if (TREE_CODE (op0) == SSA_NAME
       && !can_duplicate_iv (op0, loop))
     FOR_EACH_IMM_USE_FAST (use_a, imm_iter, op0)
-      if (bb_for_stmt (USE_STMT (use_a))->loop_father
-         == loop->inner)
+      if (gimple_bb (USE_STMT (use_a))->loop_father == loop->inner)
        return true;
 
   FOR_EACH_SSA_USE_OPERAND (use_a, stmt, op_iter, SSA_OP_USE)
     {
-      tree node, op = USE_FROM_PTR (use_a);
+      gimple node;
+      tree op = USE_FROM_PTR (use_a);
 
       /* The variables should not be used in both loops.  */
       if (!can_duplicate_iv (op, loop))
        FOR_EACH_IMM_USE_FAST (use_b, imm_iter, op)
-         if (bb_for_stmt (USE_STMT (use_b))->loop_father
-             == loop->inner)
+         if (gimple_bb (USE_STMT (use_b))->loop_father == loop->inner)
            return true;
 
       /* The statement should not use the value of a scalar that was
         modified in the loop.  */
       node = SSA_NAME_DEF_STMT (op);
-      if (TREE_CODE (node) == PHI_NODE)
+      if (gimple_code (node) == GIMPLE_PHI)
        FOR_EACH_PHI_ARG (use_b, node, op_iter1, SSA_OP_USE)
-       {
-         tree arg = USE_FROM_PTR (use_b);
+         {
+           tree arg = USE_FROM_PTR (use_b);
 
-         if (TREE_CODE (arg) == SSA_NAME)
-           {
-             tree arg_stmt = SSA_NAME_DEF_STMT (arg);
+           if (TREE_CODE (arg) == SSA_NAME)
+             {
+               gimple arg_stmt = SSA_NAME_DEF_STMT (arg);
 
-             if (bb_for_stmt (arg_stmt)
-                 && (bb_for_stmt (arg_stmt)->loop_father
-                     == loop->inner))
-               return true;
-           }
-       }
+               if (gimple_bb (arg_stmt)
+                   && (gimple_bb (arg_stmt)->loop_father == loop->inner))
+                 return true;
+             }
+         }
     }
 
   return false;
 }
-
 /* Return true when BB contains statements that can harm the transform
    to a perfect loop nest.  */
 
 static bool
 cannot_convert_bb_to_perfect_nest (basic_block bb, struct loop *loop)
 {
-  block_stmt_iterator bsi;
-  tree exit_condition = get_loop_exit_condition (loop);
+  gimple_stmt_iterator bsi;
+  gimple exit_condition = get_loop_exit_condition (loop);
 
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+  for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
     { 
-      tree stmt = bsi_stmt (bsi);
+      gimple stmt = gsi_stmt (bsi);
 
       if (stmt == exit_condition
          || not_interesting_stmt (stmt)
          || stmt_is_bumper_for_loop (loop, stmt))
        continue;
 
-      if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+      if (is_gimple_assign (stmt))
        {
          if (cannot_convert_modify_to_perfect_nest (stmt, loop))
            return true;
 
-         if (can_duplicate_iv (GIMPLE_STMT_OPERAND (stmt, 0), loop))
+         if (can_duplicate_iv (gimple_assign_lhs (stmt), loop))
            continue;
 
          if (can_put_in_inner_loop (loop->inner, stmt)
@@ -2298,7 +2297,7 @@ cannot_convert_bb_to_perfect_nest (basic_block bb, struct loop *loop)
         right now.  This test ensures that the statement comes
         completely *after* the inner loop.  */
       if (!dominated_by_p (CDI_DOMINATORS,
-                          bb_for_stmt (stmt), 
+                          gimple_bb (stmt), 
                           loop->inner->header))
        return true;
     }
@@ -2306,6 +2305,7 @@ cannot_convert_bb_to_perfect_nest (basic_block bb, struct loop *loop)
   return false;
 }
 
+
 /* Return TRUE if LOOP is an imperfect nest that we can convert to a
    perfect one.  At the moment, we only handle imperfect nests of
    depth 2, where all of the statements occur after the inner loop.  */
@@ -2314,8 +2314,8 @@ static bool
 can_convert_to_perfect_nest (struct loop *loop)
 {
   basic_block *bbs;
-  tree phi;
   size_t i;
+  gimple_stmt_iterator si;
 
   /* Can't handle triply nested+ loops yet.  */
   if (!loop->inner || loop->inner->inner)
@@ -2329,8 +2329,10 @@ can_convert_to_perfect_nest (struct loop *loop)
 
   /* We also need to make sure the loop exit only has simple copy phis in it,
      otherwise we don't know how to transform it into a perfect nest.  */
-  for (phi = phi_nodes (single_exit (loop)->dest); phi; phi = PHI_CHAIN (phi))
-    if (PHI_NUM_ARGS (phi) != 1)
+  for (si = gsi_start_phis (single_exit (loop)->dest);
+       !gsi_end_p (si);
+       gsi_next (&si))
+    if (gimple_phi_num_args (gsi_stmt (si)) != 1)
       goto fail;
   
   free (bbs);
@@ -2385,17 +2387,17 @@ perfect_nestify (struct loop *loop,
                 VEC(tree,heap) *loopivs)
 {
   basic_block *bbs;
-  tree exit_condition;
-  tree cond_stmt;
+  gimple exit_condition;
+  gimple cond_stmt;
   basic_block preheaderbb, headerbb, bodybb, latchbb, olddest;
   int i;
-  block_stmt_iterator bsi, firstbsi;
+  gimple_stmt_iterator bsi, firstbsi;
   bool insert_after;
   edge e;
   struct loop *newloop;
-  tree phi;
+  gimple phi;
   tree uboundvar;
-  tree stmt;
+  gimple stmt;
   tree oldivvar, ivvar, ivvarinced;
   VEC(tree,heap) *phis = NULL;
   htab_t replacements = NULL;
@@ -2406,8 +2408,9 @@ perfect_nestify (struct loop *loop,
   headerbb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
   
   /* Push the exit phi nodes that we are moving.  */
-  for (phi = phi_nodes (olddest); phi; phi = PHI_CHAIN (phi))
+  for (bsi = gsi_start_phis (olddest); !gsi_end_p (bsi); gsi_next (&bsi))
     {
+      phi = gsi_stmt (bsi);
       VEC_reserve (tree, heap, phis, 2);
       VEC_quick_push (tree, phis, PHI_RESULT (phi));
       VEC_quick_push (tree, phis, PHI_ARG_DEF (phi, 0));
@@ -2415,8 +2418,8 @@ perfect_nestify (struct loop *loop,
   e = redirect_edge_and_branch (single_succ_edge (preheaderbb), headerbb);
 
   /* Remove the exit phis from the old basic block.  */
-  while (phi_nodes (olddest) != NULL)
-    remove_phi_node (phi_nodes (olddest), NULL, false);
+  for (bsi = gsi_start_phis (olddest); !gsi_end_p (bsi); )
+    remove_phi_node (&bsi, false);
 
   /* and add them back to the new basic block.  */
   while (VEC_length (tree, phis) != 0)
@@ -2434,13 +2437,10 @@ perfect_nestify (struct loop *loop,
   bodybb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
   latchbb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
   make_edge (headerbb, bodybb, EDGE_FALLTHRU); 
-  cond_stmt = build3 (COND_EXPR, void_type_node,
-                     build2 (NE_EXPR, boolean_type_node, 
-                             integer_one_node, 
-                             integer_zero_node), 
-                     NULL_TREE, NULL_TREE);
-  bsi = bsi_start (bodybb);
-  bsi_insert_after (&bsi, cond_stmt, BSI_NEW_STMT);
+  cond_stmt = gimple_build_cond (NE_EXPR, integer_one_node, integer_zero_node,
+                                NULL_TREE, NULL_TREE);
+  bsi = gsi_start_bb (bodybb);
+  gsi_insert_after (&bsi, cond_stmt, GSI_NEW_STMT);
   e = make_edge (bodybb, olddest, EDGE_FALSE_VALUE);
   make_edge (bodybb, latchbb, EDGE_TRUE_VALUE);
   make_edge (latchbb, headerbb, EDGE_FALLTHRU);
@@ -2474,19 +2474,16 @@ perfect_nestify (struct loop *loop,
   exit_condition = get_loop_exit_condition (newloop);
   uboundvar = create_tmp_var (integer_type_node, "uboundvar");
   add_referenced_var (uboundvar);
-  stmt = build_gimple_modify_stmt (uboundvar, VEC_index (tree, ubounds, 0));
+  stmt = gimple_build_assign (uboundvar, VEC_index (tree, ubounds, 0));
   uboundvar = make_ssa_name (uboundvar, stmt);
-  GIMPLE_STMT_OPERAND (stmt, 0) = uboundvar;
+  gimple_assign_set_lhs (stmt, uboundvar);
 
   if (insert_after)
-    bsi_insert_after (&bsi, stmt, BSI_SAME_STMT);
+    gsi_insert_after (&bsi, stmt, GSI_SAME_STMT);
   else
-    bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
+    gsi_insert_before (&bsi, stmt, GSI_SAME_STMT);
   update_stmt (stmt);
-  COND_EXPR_COND (exit_condition) = build2 (GE_EXPR, 
-                                           boolean_type_node,
-                                           uboundvar,
-                                           ivvarinced);
+  gimple_cond_set_condition (exit_condition, GE_EXPR, uboundvar, ivvarinced);
   update_stmt (exit_condition);
   replacements = htab_create_ggc (20, tree_map_hash,
                                  tree_map_eq, NULL);
@@ -2494,10 +2491,10 @@ perfect_nestify (struct loop *loop,
   /* Now move the statements, and replace the induction variable in the moved
      statements with the correct loop induction variable.  */
   oldivvar = VEC_index (tree, loopivs, 0);
-  firstbsi = bsi_start (bodybb);
+  firstbsi = gsi_start_bb (bodybb);
   for (i = loop->num_nodes - 1; i >= 0 ; i--)
     {
-      block_stmt_iterator tobsi = bsi_last (bodybb);
+      gimple_stmt_iterator tobsi = gsi_last_bb (bodybb);
       if (bbs[i]->loop_father == loop)
        {
          /* If this is true, we are *before* the inner loop.
@@ -2513,22 +2510,22 @@ perfect_nestify (struct loop *loop,
 
          if (dominated_by_p (CDI_DOMINATORS, loop->inner->header, bbs[i]))
            {
-             block_stmt_iterator header_bsi 
-               = bsi_after_labels (loop->inner->header);
+             gimple_stmt_iterator header_bsi 
+               = gsi_after_labels (loop->inner->header);
 
-             for (bsi = bsi_start (bbs[i]); !bsi_end_p (bsi);)
+             for (bsi = gsi_start_bb (bbs[i]); !gsi_end_p (bsi);)
                { 
-                 tree stmt = bsi_stmt (bsi);
+                 gimple stmt = gsi_stmt (bsi);
 
                  if (stmt == exit_condition
                      || not_interesting_stmt (stmt)
                      || stmt_is_bumper_for_loop (loop, stmt))
                    {
-                     bsi_next (&bsi);
+                     gsi_next (&bsi);
                      continue;
                    }
 
-                 bsi_move_before (&bsi, &header_bsi);
+                 gsi_move_before (&bsi, &header_bsi);
                }
            }
          else
@@ -2536,16 +2533,17 @@ perfect_nestify (struct loop *loop,
              /* Note that the bsi only needs to be explicitly incremented
                 when we don't move something, since it is automatically
                 incremented when we do.  */
-             for (bsi = bsi_start (bbs[i]); !bsi_end_p (bsi);)
+             for (bsi = gsi_start_bb (bbs[i]); !gsi_end_p (bsi);)
                { 
                  ssa_op_iter i;
-                 tree n, stmt = bsi_stmt (bsi);
+                 tree n;
+                 gimple stmt = gsi_stmt (bsi);
                  
                  if (stmt == exit_condition
                      || not_interesting_stmt (stmt)
                      || stmt_is_bumper_for_loop (loop, stmt))
                    {
-                     bsi_next (&bsi);
+                     gsi_next (&bsi);
                      continue;
                    }
                  
@@ -2553,7 +2551,7 @@ perfect_nestify (struct loop *loop,
                    (loop, stmt, oldivvar, VEC_index (int, steps, 0), ivvar,
                     VEC_index (tree, lbounds, 0), replacements, &firstbsi);
 
-                 bsi_move_before (&bsi, &tobsi);
+                 gsi_move_before (&bsi, &tobsi);
                  
                  /* If the statement has any virtual operands, they may
                     need to be rewired because the original loop may
@@ -2793,7 +2791,7 @@ build_access_matrix (data_reference_p data_reference,
 {
   struct access_matrix *am = GGC_NEW (struct access_matrix);
   unsigned i, ndim = DR_NUM_DIMENSIONS (data_reference);
-  struct loop *loop = bb_for_stmt (DR_STMT (data_reference))->loop_father;
+  struct loop *loop = gimple_bb (DR_STMT (data_reference))->loop_father;
   struct loop *loop_nest = get_loop (loop_nest_num);
   unsigned nivs = loop_depth (loop) - loop_depth (loop_nest) + 1;
   unsigned lambda_nb_columns;
index 852f812..36110a7 100644 (file)
@@ -24,6 +24,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "tm.h"
 #include "ggc.h"
 #include "tree.h"
+#include "tree-flow.h"
 #include "lambda.h"
 
 static void lambda_matrix_get_column (lambda_matrix, int, int, 
index c5872d5..e81a71c 100644 (file)
@@ -25,6 +25,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "ggc.h"
 #include "tree.h"
 #include "target.h"
+#include "tree-flow.h"
 #include "lambda.h"
 
 /* Allocate a new transformation matrix.  */
index 40e8502..66fbac7 100644 (file)
@@ -209,10 +209,10 @@ lambda_loopnest gcc_loopnest_to_lambda_loopnest (struct loop *,
                                                  struct obstack *);
 void lambda_loopnest_to_gcc_loopnest (struct loop *,
                                      VEC(tree,heap) *, VEC(tree,heap) *,
-                                     VEC(tree,heap) **,
+                                     VEC(gimple,heap) **,
                                       lambda_loopnest, lambda_trans_matrix,
                                       struct obstack *);
-void remove_iv (tree);
+void remove_iv (gimple);
 
 static inline void lambda_vector_negate (lambda_vector, lambda_vector, int);
 static inline void lambda_vector_mult_const (lambda_vector, lambda_vector, int, int);
index dd4916c..4cab70b 100644 (file)
@@ -73,7 +73,7 @@ extern tree lhd_callgraph_analyze_expr (tree *, int *);
 
 
 /* Declarations for tree gimplification hooks.  */
-extern int lhd_gimplify_expr (tree *, tree *, tree *);
+extern int lhd_gimplify_expr (tree *, gimple_seq *, gimple_seq *);
 extern enum omp_clause_default_kind lhd_omp_predetermined_sharing (tree);
 extern tree lhd_omp_assignment (tree, tree, tree);
 struct gimplify_omp_ctx;
index e246047..2285ac1 100644 (file)
@@ -27,7 +27,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "toplev.h"
 #include "tree.h"
 #include "tree-inline.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "rtl.h"
 #include "insn-config.h"
 #include "integrate.h"
@@ -303,8 +303,9 @@ lhd_expr_size (const_tree exp)
 /* lang_hooks.gimplify_expr re-writes *EXPR_P into GIMPLE form.  */
 
 int
-lhd_gimplify_expr (tree *expr_p ATTRIBUTE_UNUSED, tree *pre_p ATTRIBUTE_UNUSED,
-                  tree *post_p ATTRIBUTE_UNUSED)
+lhd_gimplify_expr (tree *expr_p ATTRIBUTE_UNUSED,
+                  gimple_seq *pre_p ATTRIBUTE_UNUSED,
+                  gimple_seq *post_p ATTRIBUTE_UNUSED)
 {
   return GS_UNHANDLED;
 }
@@ -527,7 +528,7 @@ lhd_omp_predetermined_sharing (tree decl ATTRIBUTE_UNUSED)
 tree
 lhd_omp_assignment (tree clause ATTRIBUTE_UNUSED, tree dst, tree src)
 {
-  return build_gimple_modify_stmt (dst, src);
+  return build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
 }
 
 /* Register language specific type size variables as potentially OpenMP
index 1f64cf1..6a23a31 100644 (file)
@@ -401,7 +401,7 @@ struct lang_hooks
 
   /* Perform language-specific gimplification on the argument.  Returns an
      enum gimplify_status, though we can't see that type here.  */
-  int (*gimplify_expr) (tree *, tree *, tree *);
+  int (*gimplify_expr) (tree *, gimple_seq *, gimple_seq *);
 
   /* Fold an OBJ_TYPE_REF expression to the address of a function.
      KNOWN_TYPE carries the true type of the OBJ_TYPE_REF_OBJECT.  */
index 33bb0b4..9ebbcde 100644 (file)
@@ -107,9 +107,7 @@ along with GCC; see the file COPYING3.  If not see
 
   Both optimizations are described in the paper "Matrix flattening and 
   transposing in GCC" which was presented in GCC summit 2006. 
-  http://www.gccsummit.org/2006/2006-GCC-Summit-Proceedings.pdf
-
- */
+  http://www.gccsummit.org/2006/2006-GCC-Summit-Proceedings.pdf.  */
 
 #include "config.h"
 #include "system.h"
@@ -145,8 +143,9 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-chrec.h"
 #include "tree-scalar-evolution.h"
 
-/*
-   We need to collect a lot of data from the original malloc,
+  /* FIXME tuples.  */
+#if 0
+/* We need to collect a lot of data from the original malloc,
    particularly as the gimplifier has converted:
 
    orig_var = (struct_type *) malloc (x * sizeof (struct_type *));
@@ -430,7 +429,7 @@ may_flatten_matrices_1 (tree stmt)
   switch (TREE_CODE (stmt))
     {
     case GIMPLE_MODIFY_STMT:
-      t = GIMPLE_STMT_OPERAND (stmt, 1);
+      t = TREE_OPERAND (stmt, 1);
       while (CONVERT_EXPR_P (t))
        {
          if (TREE_TYPE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
@@ -775,7 +774,7 @@ analyze_matrix_allocation_site (struct matrix_info *mi, tree stmt,
 {
   if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
     {
-      tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+      tree rhs = TREE_OPERAND (stmt, 1);
 
       rhs = get_inner_of_cast_expr (rhs);
       if (TREE_CODE (rhs) == SSA_NAME)
@@ -910,7 +909,7 @@ analyze_transpose (void **slot, void *data ATTRIBUTE_UNUSED)
   for (i = 0; VEC_iterate (access_site_info_p, mi->access_l, i, acc_info);
        i++)
     {
-      if (TREE_CODE (GIMPLE_STMT_OPERAND (acc_info->stmt, 1)) == POINTER_PLUS_EXPR
+      if (TREE_CODE (TREE_OPERAND (acc_info->stmt, 1)) == POINTER_PLUS_EXPR
          && acc_info->level < min_escape_l)
        {
          loop = loop_containing_stmt (acc_info->stmt);
@@ -930,7 +929,7 @@ analyze_transpose (void **slot, void *data ATTRIBUTE_UNUSED)
                    {
                      acc_info->iterated_by_inner_most_loop_p = 1;
                      mi->dim_hot_level[acc_info->level] +=
-                       bb_for_stmt (acc_info->stmt)->count;
+                       gimple_bb (acc_info->stmt)->count;
                    }
 
                }
@@ -952,7 +951,7 @@ get_index_from_offset (tree offset, tree def_stmt)
 
   if (TREE_CODE (def_stmt) == PHI_NODE)
     return NULL;
-  expr = get_inner_of_cast_expr (GIMPLE_STMT_OPERAND (def_stmt, 1));
+  expr = get_inner_of_cast_expr (TREE_OPERAND (def_stmt, 1));
   if (TREE_CODE (expr) == SSA_NAME)
     return get_index_from_offset (offset, SSA_NAME_DEF_STMT (expr));
   else if (TREE_CODE (expr) == MULT_EXPR)
@@ -980,9 +979,9 @@ update_type_size (struct matrix_info *mi, tree stmt, tree ssa_var,
 
   /* Update type according to the type of the INDIRECT_REF expr.   */
   if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-      && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == INDIRECT_REF)
+      && TREE_CODE (TREE_OPERAND (stmt, 0)) == INDIRECT_REF)
     {
-      lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+      lhs = TREE_OPERAND (stmt, 0);
       gcc_assert (POINTER_TYPE_P
                  (TREE_TYPE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))));
       type_size =
@@ -1139,8 +1138,8 @@ analyze_accesses_for_modify_stmt (struct matrix_info *mi, tree ssa_var,
                                  bool record_accesses)
 {
 
-  tree lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
-  tree rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
+  tree lhs = TREE_OPERAND (use_stmt, 0);
+  tree rhs = TREE_OPERAND (use_stmt, 1);
   struct ssa_acc_in_tree lhs_acc, rhs_acc;
 
   memset (&lhs_acc, 0, sizeof (lhs_acc));
@@ -1376,7 +1375,7 @@ check_var_notmodified_p (tree * tp, int *walk_subtrees, void *data)
        stmt = bsi_stmt (bsi);
        if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
          continue;
-       if (GIMPLE_STMT_OPERAND (stmt, 0) == t)
+       if (TREE_OPERAND (stmt, 0) == t)
          return stmt;
       }
   }
@@ -1425,7 +1424,7 @@ can_calculate_expr_before_stmt (tree expr, sbitmap visited)
        return fold_build2 (TREE_CODE (expr), TREE_TYPE (expr), op1, op2);
       return NULL_TREE;
     case GIMPLE_MODIFY_STMT:
-      return can_calculate_expr_before_stmt (GIMPLE_STMT_OPERAND (expr, 1),
+      return can_calculate_expr_before_stmt (TREE_OPERAND (expr, 1),
                                             visited);
     case PHI_NODE:
       {
@@ -1588,9 +1587,9 @@ find_sites_in_func (bool record)
       {
        stmt = bsi_stmt (bsi);
        if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-           && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == VAR_DECL)
+           && TREE_CODE (TREE_OPERAND (stmt, 0)) == VAR_DECL)
          {
-           tmpmi.decl = GIMPLE_STMT_OPERAND (stmt, 0);
+           tmpmi.decl = TREE_OPERAND (stmt, 0);
            if ((mi = (struct matrix_info *) htab_find (matrices_to_reorg,
                                                        &tmpmi)))
              {
@@ -1599,16 +1598,16 @@ find_sites_in_func (bool record)
              }
          }
        if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-           && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME
-           && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == VAR_DECL)
+           && TREE_CODE (TREE_OPERAND (stmt, 0)) == SSA_NAME
+           && TREE_CODE (TREE_OPERAND (stmt, 1)) == VAR_DECL)
          {
-           tmpmi.decl = GIMPLE_STMT_OPERAND (stmt, 1);
+           tmpmi.decl = TREE_OPERAND (stmt, 1);
            if ((mi = (struct matrix_info *) htab_find (matrices_to_reorg,
                                                        &tmpmi)))
              {
                sbitmap_zero (visited_stmts_1);
                analyze_matrix_accesses (mi,
-                                        GIMPLE_STMT_OPERAND (stmt, 0), 0,
+                                        TREE_OPERAND (stmt, 0), 0,
                                         false, visited_stmts_1, record);
              }
          }
@@ -1642,8 +1641,8 @@ record_all_accesses_in_func (void)
       if (!ssa_var
          || TREE_CODE (SSA_NAME_DEF_STMT (ssa_var)) != GIMPLE_MODIFY_STMT)
        continue;
-      rhs = GIMPLE_STMT_OPERAND (SSA_NAME_DEF_STMT (ssa_var), 1);
-      lhs = GIMPLE_STMT_OPERAND (SSA_NAME_DEF_STMT (ssa_var), 0);
+      rhs = TREE_OPERAND (SSA_NAME_DEF_STMT (ssa_var), 1);
+      lhs = TREE_OPERAND (SSA_NAME_DEF_STMT (ssa_var), 0);
       if (TREE_CODE (rhs) != VAR_DECL && TREE_CODE (lhs) != VAR_DECL)
        continue;
 
@@ -1741,7 +1740,7 @@ transform_access_sites (void **slot, void *data ATTRIBUTE_UNUSED)
        }
       if (acc_info->is_alloc)
        {
-         if (acc_info->level >= 0 && bb_for_stmt (acc_info->stmt))
+         if (acc_info->level >= 0 && gimple_bb (acc_info->stmt))
            {
              ssa_op_iter iter;
              tree def;
@@ -1751,7 +1750,7 @@ transform_access_sites (void **slot, void *data ATTRIBUTE_UNUSED)
                mark_sym_for_renaming (SSA_NAME_VAR (def));
              bsi = bsi_for_stmt (stmt);
              gcc_assert (TREE_CODE (acc_info->stmt) == GIMPLE_MODIFY_STMT);
-             if (TREE_CODE (GIMPLE_STMT_OPERAND (acc_info->stmt, 0)) ==
+             if (TREE_CODE (TREE_OPERAND (acc_info->stmt, 0)) ==
                  SSA_NAME && acc_info->level < min_escape_l - 1)
                {
                  imm_use_iterator imm_iter;
@@ -1759,7 +1758,7 @@ transform_access_sites (void **slot, void *data ATTRIBUTE_UNUSED)
                  tree use_stmt;
 
                  FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter,
-                                        GIMPLE_STMT_OPERAND (acc_info->stmt,
+                                        TREE_OPERAND (acc_info->stmt,
                                                              0))
                    FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
                  {
@@ -1768,22 +1767,22 @@ transform_access_sites (void **slot, void *data ATTRIBUTE_UNUSED)
                    /* Emit convert statement to convert to type of use.  */
                    conv =
                      fold_build1 (CONVERT_EXPR,
-                                  TREE_TYPE (GIMPLE_STMT_OPERAND
+                                  TREE_TYPE (TREE_OPERAND
                                              (acc_info->stmt, 0)),
-                                  TREE_OPERAND (GIMPLE_STMT_OPERAND
+                                  TREE_OPERAND (TREE_OPERAND
                                                 (acc_info->stmt, 1), 0));
                    tmp =
                      create_tmp_var (TREE_TYPE
-                                     (GIMPLE_STMT_OPERAND
+                                     (TREE_OPERAND
                                       (acc_info->stmt, 0)), "new");
                    add_referenced_var (tmp);
                    stmts =
                      fold_build2 (GIMPLE_MODIFY_STMT,
-                                  TREE_TYPE (GIMPLE_STMT_OPERAND
+                                  TREE_TYPE (TREE_OPERAND
                                              (acc_info->stmt, 0)), tmp,
                                   conv);
                    tmp = make_ssa_name (tmp, stmts);
-                   GIMPLE_STMT_OPERAND (stmts, 0) = tmp;
+                   TREE_OPERAND (stmts, 0) = tmp;
                    bsi = bsi_for_stmt (acc_info->stmt);
                    bsi_insert_after (&bsi, stmts, BSI_SAME_STMT);
                    SET_USE (use_p, tmp);
@@ -1795,7 +1794,7 @@ transform_access_sites (void **slot, void *data ATTRIBUTE_UNUSED)
          free (acc_info);
          continue;
        }
-      orig = GIMPLE_STMT_OPERAND (acc_info->stmt, 1);
+      orig = TREE_OPERAND (acc_info->stmt, 1);
       type = TREE_TYPE (orig);
       if (TREE_CODE (orig) == INDIRECT_REF
          && acc_info->level < min_escape_l - 1)
@@ -1804,8 +1803,8 @@ transform_access_sites (void **slot, void *data ATTRIBUTE_UNUSED)
             from "pointer to type" to "type".  */
          orig =
            build1 (NOP_EXPR, TREE_TYPE (orig),
-                   GIMPLE_STMT_OPERAND (orig, 0));
-         GIMPLE_STMT_OPERAND (acc_info->stmt, 1) = orig;
+                   TREE_OPERAND (orig, 0));
+         TREE_OPERAND (acc_info->stmt, 1) = orig;
        }
       else if (TREE_CODE (orig) == POINTER_PLUS_EXPR
               && acc_info->level < (min_escape_l))
@@ -2095,7 +2094,7 @@ transform_allocation_sites (void **slot, void *data ATTRIBUTE_UNUSED)
                                           true, BSI_SAME_STMT);
       /* GLOBAL_HOLDING_THE_SIZE = DIM_SIZE.  */
       tmp = fold_build2 (GIMPLE_MODIFY_STMT, type, dim_var, dim_size);
-      GIMPLE_STMT_OPERAND (tmp, 0) = dim_var;
+      TREE_OPERAND (tmp, 0) = dim_var;
       mark_symbols_for_renaming (tmp);
       bsi_insert_before (&bsi, tmp, BSI_SAME_STMT);
 
@@ -2104,7 +2103,7 @@ transform_allocation_sites (void **slot, void *data ATTRIBUTE_UNUSED)
   update_ssa (TODO_update_ssa);
   /* Replace the malloc size argument in the malloc of level 0 to be
      the size of all the dimensions.  */
-  malloc_stmt = GIMPLE_STMT_OPERAND (call_stmt_0, 1);
+  malloc_stmt = TREE_OPERAND (call_stmt_0, 1);
   c_node = cgraph_node (mi->allocation_function_decl);
   old_size_0 = CALL_EXPR_ARG (malloc_stmt, 0);
   tmp = force_gimple_operand_bsi (&bsi, mi->dimension_size[0], true,
@@ -2128,7 +2127,7 @@ transform_allocation_sites (void **slot, void *data ATTRIBUTE_UNUSED)
       tree call;
 
       tree call_stmt = mi->malloc_for_level[i];
-      call = GIMPLE_STMT_OPERAND (call_stmt, 1);
+      call = TREE_OPERAND (call_stmt, 1);
       gcc_assert (TREE_CODE (call) == CALL_EXPR);
       e = cgraph_edge (c_node, call_stmt);
       gcc_assert (e);
@@ -2138,7 +2137,7 @@ transform_allocation_sites (void **slot, void *data ATTRIBUTE_UNUSED)
       bsi_remove (&bsi, true);
       /* remove the type cast stmt.  */
       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter,
-                            GIMPLE_STMT_OPERAND (call_stmt, 0))
+                            TREE_OPERAND (call_stmt, 0))
       {
        use_stmt1 = use_stmt;
        bsi = bsi_for_stmt (use_stmt);
@@ -2146,7 +2145,7 @@ transform_allocation_sites (void **slot, void *data ATTRIBUTE_UNUSED)
       }
       /* Remove the assignment of the allocated area.  */
       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter,
-                            GIMPLE_STMT_OPERAND (use_stmt1, 0))
+                            TREE_OPERAND (use_stmt1, 0))
       {
        bsi = bsi_for_stmt (use_stmt);
        bsi_remove (&bsi, true);
@@ -2204,12 +2203,13 @@ dump_matrix_reorg_analysis (void **slot, void *data ATTRIBUTE_UNUSED)
   return 1;
 }
 
-
+#endif
 /* Perform matrix flattening.  */
 
 static unsigned int
 matrix_reorg (void)
 {
+#if 0 /* FIXME tuples */
   struct cgraph_node *node;
 
   if (profile_info)
@@ -2233,7 +2233,7 @@ matrix_reorg (void)
        current_function_decl = node->decl;
        push_cfun (DECL_STRUCT_FUNCTION (node->decl));
        bitmap_obstack_initialize (NULL);
-       tree_register_cfg_hooks ();
+       gimple_register_cfg_hooks ();
 
        if (!gimple_in_ssa_p (cfun))
          {
@@ -2301,7 +2301,7 @@ matrix_reorg (void)
        current_function_decl = node->decl;
        push_cfun (DECL_STRUCT_FUNCTION (node->decl));
        bitmap_obstack_initialize (NULL);
-       tree_register_cfg_hooks ();
+       gimple_register_cfg_hooks ();
        record_all_accesses_in_func ();
        htab_traverse (matrices_to_reorg, transform_access_sites, NULL);
        free_dominance_info (CDI_DOMINATORS);
@@ -2316,6 +2316,9 @@ matrix_reorg (void)
   set_cfun (NULL);
   matrices_to_reorg = NULL;
   return 0;
+#else
+  gcc_unreachable ();
+#endif
 }
 
 
@@ -2323,7 +2326,12 @@ matrix_reorg (void)
 static bool
 gate_matrix_reorg (void)
 {
+  /* FIXME tuples */
+#if 0
   return flag_ipa_matrix_reorg && flag_whole_program;
+#else
+  return false;
+#endif
 }
 
 struct simple_ipa_opt_pass pass_ipa_matrix_reorg = 
@@ -2344,3 +2352,4 @@ struct simple_ipa_opt_pass pass_ipa_matrix_reorg =
   TODO_dump_cgraph | TODO_dump_func    /* todo_flags_finish */
  }
 };
+
index 12291d5..8fa3d3d 100644 (file)
@@ -1,3 +1,19 @@
+2008-07-28  Richard Guenther  <rguenther@suse.de>
+
+       Merge from gimple-tuples-branch.
+
+       2008-07-18  Aldy Hernandez  <aldyh@redhat.com>
+
+       * Make-lang.in (objc-lang.o): Depend on GIMPLE_H.
+       (objc-act.o): Rename TREE_GIMPLE_H to GIMPLE_H.
+       * objc-act.h: Include gimple.h instead of tree-gimple.h.
+       * ipa-reference.c: Same.
+
+       2007-11-10  Aldy Hernandez  <aldyh@redhat.com>
+
+       * objc-act.c (objc_gimplify_expr): Change pre and post to sequences.
+       * objc-act.h (objc_gimplify_expr): Change prototype accordingly.
+
 2008-07-21  Ralf Wildenhues  <Ralf.Wildenhues@gmx.de>
 
        * objc-act.c: Fix comment typos.
index 93a0042..c5de9d8 100644 (file)
@@ -72,14 +72,14 @@ objc/objc-lang.o : objc/objc-lang.c \
    $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
    $(C_TREE_H) $(DIAGNOSTIC_H) \
    $(GGC_H) langhooks.h $(LANGHOOKS_DEF_H) $(C_COMMON_H) gtype-objc.h \
-   c-objc-common.h objc/objc-act.h $(TREE_GIMPLE_H)
+   c-objc-common.h objc/objc-act.h $(GIMPLE_H)
 
 objc/objc-act.o : objc/objc-act.c \
    $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) $(RTL_H) $(TM_P_H) \
    $(EXPR_H) $(TARGET_H) $(C_TREE_H) $(DIAGNOSTIC_H) toplev.h $(FLAGS_H) \
    objc/objc-act.h input.h $(FUNCTION_H) output.h debug.h langhooks.h \
    $(LANGHOOKS_DEF_H) $(HASHTAB_H) $(C_PRAGMA_H) gt-objc-objc-act.h \
-   $(TREE_GIMPLE_H)
+   $(GIMPLE_H)
 
 objc.srcextra:
 
index 44c8648..5941fb6 100644 (file)
@@ -9476,7 +9476,7 @@ objc_rewrite_function_call (tree function, tree params)
    of its cousins).  */
 
 enum gimplify_status
-objc_gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p)
+objc_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
 {
   enum gimplify_status r0, r1;
   if (TREE_CODE (*expr_p) == OBJ_TYPE_REF
index e4b8a93..741401d 100644 (file)
@@ -23,7 +23,7 @@ along with GCC; see the file COPYING3.  If not see
 #define GCC_OBJC_ACT_H
 
 /* For enum gimplify_status */
-#include "tree-gimple.h"
+#include "gimple.h"
 
 /*** Language hooks ***/
 
@@ -32,7 +32,7 @@ const char *objc_printable_name (tree, int);
 tree objc_get_callee_fndecl (const_tree);
 void objc_finish_file (void);
 tree objc_fold_obj_type_ref (tree, tree);
-enum gimplify_status objc_gimplify_expr (tree *, tree *, tree *);
+enum gimplify_status objc_gimplify_expr (tree *, gimple_seq *, gimple_seq *);
 
 /* NB: The remaining public functions are prototyped in c-common.h, for the
    benefit of stub-objc.c and objc-act.c.  */
index 695df4d..fb7cffe 100644 (file)
@@ -72,12 +72,12 @@ cc1objplus$(exeext): $(OBJCXX_OBJS) cc1objplus-checksum.o $(BACKEND) $(LIBDEPS)
 objcp/objcp-lang.o : objcp/objcp-lang.c \
   $(CXX_TREE_H) $(TM_H) toplev.h debug.h langhooks.h objc/objc-act.h \
   $(LANGHOOKS_DEF_H) $(C_COMMON_H) gtype-objcp.h \
-  $(DIAGNOSTIC_H) cp/cp-objcp-common.h $(TREE_GIMPLE_H)
+  $(DIAGNOSTIC_H) cp/cp-objcp-common.h $(GIMPLE_H)
 
 objcp/objcp-decl.o : objcp/objcp-decl.c \
    $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) $(C_TREE_H) \
    toplev.h $(GGC_H) $(C_PRAGMA_H) input.h $(FLAGS_H) output.h \
-   objc/objc-act.h objcp/objcp-decl.h $(TREE_GIMPLE_H) $(EXPR_H) $(TARGET_H)
+   objc/objc-act.h objcp/objcp-decl.h $(GIMPLE_H) $(EXPR_H) $(TARGET_H)
 
 # The following must be an explicit rule; please keep in sync with the implicit
 # one in Makefile.in.
@@ -86,7 +86,7 @@ objcp/objcp-act.o : objc/objc-act.c \
    $(EXPR_H) $(TARGET_H) $(CXX_TREE_H) $(DIAGNOSTIC_H) toplev.h $(FLAGS_H) \
    objc/objc-act.h input.h $(FUNCTION_H) output.h debug.h langhooks.h \
    objcp/objcp-decl.h $(LANGHOOKS_DEF_H) $(HASHTAB_H) gt-objc-objc-act.h \
-   $(TREE_GIMPLE_H)
+   $(GIMPLE_H)
        $(CC) -c $(ALL_CFLAGS) $(ALL_CPPFLAGS) $(INCLUDES) $< $(OUTPUT_OPTION)
 
 po-generated:
index 8ee717a..cda05a4 100644 (file)
@@ -27,7 +27,8 @@ along with GCC; see the file COPYING3.  If not see
 #include "tm.h"
 #include "tree.h"
 #include "rtl.h"
-#include "tree-gimple.h"
+#include "gimple.h"
+#include "tree-iterator.h"
 #include "tree-inline.h"
 #include "langhooks.h"
 #include "diagnostic.h"
@@ -44,6 +45,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "optabs.h"
 #include "cfgloop.h"
 
+
 /* Lowering of OpenMP parallel and workshare constructs proceeds in two 
    phases.  The first phase scans the function looking for OMP statements
    and then for variables that must be replaced to satisfy data sharing
@@ -68,7 +70,7 @@ typedef struct omp_context
 
   /* The tree of contexts corresponding to the encountered constructs.  */
   struct omp_context *outer;
-  tree stmt;
+  gimple stmt;
 
   /* Map variables to fields in a structure that allows communication 
      between sending and receiving threads.  */
@@ -114,7 +116,8 @@ struct omp_for_data_loop
 struct omp_for_data
 {
   struct omp_for_data_loop loop;
-  tree chunk_size, for_stmt;
+  tree chunk_size;
+  gimple for_stmt;
   tree pre, iter_type;
   int collapse;
   bool have_nowait, have_ordered;
@@ -128,8 +131,33 @@ static int taskreg_nesting_level;
 struct omp_region *root_omp_region;
 static bitmap task_shared_vars;
 
-static void scan_omp (tree *, omp_context *);
-static void lower_omp (tree *, omp_context *);
+static void scan_omp (gimple_seq, omp_context *);
+static tree scan_omp_1_op (tree *, int *, void *);
+
+#define WALK_SUBSTMTS  \
+    case GIMPLE_BIND: \
+    case GIMPLE_TRY: \
+    case GIMPLE_CATCH: \
+    case GIMPLE_EH_FILTER: \
+      /* The sub-statements for these should be walked.  */ \
+      *handled_ops_p = false; \
+      break;
+
+/* Convenience function for calling scan_omp_1_op on tree operands.  */
+
+static inline tree
+scan_omp_op (tree *tp, omp_context *ctx)
+{
+  struct walk_stmt_info wi;
+
+  memset (&wi, 0, sizeof (wi));
+  wi.info = ctx;
+  wi.want_locations = true;
+
+  return walk_tree (tp, scan_omp_1_op, &wi, NULL);
+}
+
+static void lower_omp (gimple_seq, omp_context *);
 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
 
@@ -150,7 +178,7 @@ find_omp_clause (tree clauses, enum tree_code kind)
 static inline bool
 is_parallel_ctx (omp_context *ctx)
 {
-  return TREE_CODE (ctx->stmt) == OMP_PARALLEL;
+  return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
 }
 
 
@@ -159,7 +187,7 @@ is_parallel_ctx (omp_context *ctx)
 static inline bool
 is_task_ctx (omp_context *ctx)
 {
-  return TREE_CODE (ctx->stmt) == OMP_TASK;
+  return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
 }
 
 
@@ -168,8 +196,8 @@ is_task_ctx (omp_context *ctx)
 static inline bool
 is_taskreg_ctx (omp_context *ctx)
 {
-  return TREE_CODE (ctx->stmt) == OMP_PARALLEL
-        || TREE_CODE (ctx->stmt) == OMP_TASK;
+  return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL
+        || gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
 }
 
 
@@ -186,7 +214,7 @@ is_combined_parallel (struct omp_region *region)
    them into *FD.  */
 
 static void
-extract_omp_for_data (tree for_stmt, struct omp_for_data *fd,
+extract_omp_for_data (gimple for_stmt, struct omp_for_data *fd,
                      struct omp_for_data_loop *loops)
 {
   tree t, var, *collapse_iter, *collapse_count;
@@ -197,7 +225,7 @@ extract_omp_for_data (tree for_stmt, struct omp_for_data *fd,
 
   fd->for_stmt = for_stmt;
   fd->pre = NULL;
-  fd->collapse = TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt));
+  fd->collapse = gimple_omp_for_collapse (for_stmt);
   if (fd->collapse > 1)
     fd->loops = loops;
   else
@@ -209,7 +237,7 @@ extract_omp_for_data (tree for_stmt, struct omp_for_data *fd,
   collapse_iter = NULL;
   collapse_count = NULL;
 
-  for (t = OMP_FOR_CLAUSES (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
+  for (t = gimple_omp_for_clauses (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
     switch (OMP_CLAUSE_CODE (t))
       {
       case OMP_CLAUSE_NOWAIT:
@@ -264,19 +292,16 @@ extract_omp_for_data (tree for_stmt, struct omp_for_data *fd,
       else
        loop = &dummy_loop;
 
-      t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
-      gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
-      loop->v = GIMPLE_STMT_OPERAND (t, 0);
+      
+      loop->v = gimple_omp_for_index (for_stmt, i);
       gcc_assert (SSA_VAR_P (loop->v));
       gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
                  || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
       var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
-      loop->n1 = GIMPLE_STMT_OPERAND (t, 1);
+      loop->n1 = gimple_omp_for_initial (for_stmt, i);
 
-      t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
-      loop->cond_code = TREE_CODE (t);
-      gcc_assert (TREE_OPERAND (t, 0) == var);
-      loop->n2 = TREE_OPERAND (t, 1);
+      loop->cond_code = gimple_omp_for_cond (for_stmt, i);
+      loop->n2 = gimple_omp_for_final (for_stmt, i);
       switch (loop->cond_code)
        {
        case LT_EXPR:
@@ -304,10 +329,7 @@ extract_omp_for_data (tree for_stmt, struct omp_for_data *fd,
          gcc_unreachable ();
        }
 
-      t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
-      gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
-      gcc_assert (GIMPLE_STMT_OPERAND (t, 0) == var);
-      t = GIMPLE_STMT_OPERAND (t, 1);
+      t = gimple_omp_for_incr (for_stmt, i);
       gcc_assert (TREE_OPERAND (t, 0) == var);
       switch (TREE_CODE (t))
        {
@@ -449,9 +471,9 @@ extract_omp_for_data (tree for_stmt, struct omp_for_data *fd,
 
    When expanding a combined parallel+workshare region, the call to
    the child function may need additional arguments in the case of
-   OMP_FOR regions.  In some cases, these arguments are computed out
-   of variables passed in from the parent to the child via 'struct
-   .omp_data_s'.  For instance:
+   GIMPLE_OMP_FOR regions.  In some cases, these arguments are
+   computed out of variables passed in from the parent to the child
+   via 'struct .omp_data_s'.  For instance:
 
        #pragma omp parallel for schedule (guided, i * 4)
        for (j ...)
@@ -475,7 +497,7 @@ extract_omp_for_data (tree for_stmt, struct omp_for_data *fd,
 
    To see whether the code in WS_ENTRY_BB blocks the combined
    parallel+workshare call, we collect all the variables used in the
-   OMP_FOR header check whether they appear on the LHS of any
+   GIMPLE_OMP_FOR header check whether they appear on the LHS of any
    statement in WS_ENTRY_BB.  If so, then we cannot emit the combined
    call.
 
@@ -488,15 +510,15 @@ static bool
 workshare_safe_to_combine_p (basic_block par_entry_bb, basic_block ws_entry_bb)
 {
   struct omp_for_data fd;
-  tree par_stmt, ws_stmt;
+  gimple par_stmt, ws_stmt;
 
   par_stmt = last_stmt (par_entry_bb);
   ws_stmt = last_stmt (ws_entry_bb);
 
-  if (TREE_CODE (ws_stmt) == OMP_SECTIONS)
+  if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
     return true;
 
-  gcc_assert (TREE_CODE (ws_stmt) == OMP_FOR);
+  gcc_assert (gimple_code (ws_stmt) == GIMPLE_OMP_FOR);
 
   extract_omp_for_data (ws_stmt, &fd, NULL);
 
@@ -525,11 +547,11 @@ workshare_safe_to_combine_p (basic_block par_entry_bb, basic_block ws_entry_bb)
    expanded.  */
 
 static tree
-get_ws_args_for (tree ws_stmt)
+get_ws_args_for (gimple ws_stmt)
 {
   tree t;
 
-  if (TREE_CODE (ws_stmt) == OMP_FOR)
+  if (gimple_code (ws_stmt) == GIMPLE_OMP_FOR)
     {
       struct omp_for_data fd;
       tree ws_args;
@@ -554,12 +576,12 @@ get_ws_args_for (tree ws_stmt)
 
       return ws_args;
     }
-  else if (TREE_CODE (ws_stmt) == OMP_SECTIONS)
+  else if (gimple_code (ws_stmt) == GIMPLE_OMP_SECTIONS)
     {
       /* Number of sections is equal to the number of edges from the
-        OMP_SECTIONS_SWITCH statement, except for the one to the exit
-        of the sections region.  */
-      basic_block bb = single_succ (bb_for_stmt (ws_stmt));
+        GIMPLE_OMP_SECTIONS_SWITCH statement, except for the one to
+        the exit of the sections region.  */
+      basic_block bb = single_succ (gimple_bb (ws_stmt));
       t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs) - 1);
       t = tree_cons (NULL, t, NULL);
       return t;
@@ -583,9 +605,9 @@ determine_parallel_type (struct omp_region *region)
     return;
 
   /* We only support parallel+for and parallel+sections.  */
-  if (region->type != OMP_PARALLEL
-      || (region->inner->type != OMP_FOR
-         && region->inner->type != OMP_SECTIONS))
+  if (region->type != GIMPLE_OMP_PARALLEL
+      || (region->inner->type != GIMPLE_OMP_FOR
+         && region->inner->type != GIMPLE_OMP_SECTIONS))
     return;
 
   /* Check for perfect nesting PAR_ENTRY_BB -> WS_ENTRY_BB and
@@ -598,13 +620,13 @@ determine_parallel_type (struct omp_region *region)
   if (single_succ (par_entry_bb) == ws_entry_bb
       && single_succ (ws_exit_bb) == par_exit_bb
       && workshare_safe_to_combine_p (par_entry_bb, ws_entry_bb)
-      && (OMP_PARALLEL_COMBINED (last_stmt (par_entry_bb))
+      && (gimple_omp_parallel_combined_p (last_stmt (par_entry_bb))
          || (last_and_only_stmt (ws_entry_bb)
              && last_and_only_stmt (par_exit_bb))))
     {
-      tree ws_stmt = last_stmt (ws_entry_bb);
+      gimple ws_stmt = last_stmt (ws_entry_bb);
 
-      if (region->inner->type == OMP_FOR)
+      if (region->inner->type == GIMPLE_OMP_FOR)
        {
          /* If this is a combined parallel loop, we need to determine
             whether or not to use the combined library calls.  There
@@ -615,7 +637,7 @@ determine_parallel_type (struct omp_region *region)
             parallel loop call would still need extra synchronization
             to implement ordered semantics, so there would not be any
             gain in using the combined call.  */
-         tree clauses = OMP_FOR_CLAUSES (ws_stmt);
+         tree clauses = gimple_omp_for_clauses (ws_stmt);
          tree c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
          if (c == NULL
              || OMP_CLAUSE_SCHEDULE_KIND (c) == OMP_CLAUSE_SCHEDULE_STATIC
@@ -745,7 +767,7 @@ use_pointer_for_field (tree decl, omp_context *shared_ctx)
            {
              tree c;
 
-             for (c = OMP_TASKREG_CLAUSES (up->stmt);
+             for (c = gimple_omp_taskreg_clauses (up->stmt);
                   c; c = OMP_CLAUSE_CHAIN (c))
                if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
                    && OMP_CLAUSE_DECL (c) == decl)
@@ -990,7 +1012,7 @@ fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
       && DECL_HAS_VALUE_EXPR_P (decl))
     {
       tree ve = DECL_VALUE_EXPR (decl);
-      walk_tree (&ve, copy_body_r, &ctx->cb, NULL);
+      walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
       SET_DECL_VALUE_EXPR (new_decl, ve);
       DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
     }
@@ -1058,19 +1080,19 @@ void
 dump_omp_region (FILE *file, struct omp_region *region, int indent)
 {
   fprintf (file, "%*sbb %d: %s\n", indent, "", region->entry->index,
-          tree_code_name[region->type]);
+          gimple_code_name[region->type]);
 
   if (region->inner)
     dump_omp_region (file, region->inner, indent + 4);
 
   if (region->cont)
     {
-      fprintf (file, "%*sbb %d: OMP_CONTINUE\n", indent, "",
+      fprintf (file, "%*sbb %d: GIMPLE_OMP_CONTINUE\n", indent, "",
               region->cont->index);
     }
     
   if (region->exit)
-    fprintf (file, "%*sbb %d: OMP_RETURN\n", indent, "",
+    fprintf (file, "%*sbb %d: GIMPLE_OMP_RETURN\n", indent, "",
             region->exit->index);
   else
     fprintf (file, "%*s[no exit marker]\n", indent, "");
@@ -1095,7 +1117,8 @@ debug_all_omp_regions (void)
 /* Create a new parallel region starting at STMT inside region PARENT.  */
 
 struct omp_region *
-new_omp_region (basic_block bb, enum tree_code type, struct omp_region *parent)
+new_omp_region (basic_block bb, enum gimple_code type,
+               struct omp_region *parent)
 {
   struct omp_region *region = XCNEW (struct omp_region);
 
@@ -1155,7 +1178,7 @@ free_omp_regions (void)
 /* Create a new context, with OUTER_CTX being the surrounding context.  */
 
 static omp_context *
-new_omp_context (tree stmt, omp_context *outer_ctx)
+new_omp_context (gimple stmt, omp_context *outer_ctx)
 {
   omp_context *ctx = XCNEW (omp_context);
 
@@ -1188,17 +1211,19 @@ new_omp_context (tree stmt, omp_context *outer_ctx)
   return ctx;
 }
 
-static void maybe_catch_exception (tree *stmt_p);
+static gimple_seq maybe_catch_exception (gimple_seq);
 
 /* Finalize task copyfn.  */
 
 static void
-finalize_task_copyfn (tree task_stmt)
+finalize_task_copyfn (gimple task_stmt)
 {
   struct function *child_cfun;
   tree child_fn, old_fn;
+  gimple_seq seq, new_seq;
+  gimple bind;
 
-  child_fn = OMP_TASK_COPYFN (task_stmt);
+  child_fn = gimple_omp_task_copy_fn (task_stmt);
   if (child_fn == NULL_TREE)
     return;
 
@@ -1211,8 +1236,17 @@ finalize_task_copyfn (tree task_stmt)
   old_fn = current_function_decl;
   push_cfun (child_cfun);
   current_function_decl = child_fn;
-  gimplify_body (&DECL_SAVED_TREE (child_fn), child_fn, false);
-  maybe_catch_exception (&BIND_EXPR_BODY (DECL_SAVED_TREE (child_fn)));
+  bind = gimplify_body (&DECL_SAVED_TREE (child_fn), child_fn, false);
+  seq = gimple_seq_alloc ();
+  gimple_seq_add_stmt (&seq, bind);
+  new_seq = maybe_catch_exception (seq);
+  if (new_seq != seq)
+    {
+      bind = gimple_build_bind (NULL, new_seq, NULL);
+      seq = gimple_seq_alloc ();
+      gimple_seq_add_stmt (&seq, bind);
+    }
+  gimple_set_body (child_fn, seq);
   pop_cfun ();
   current_function_decl = old_fn;
 
@@ -1285,9 +1319,11 @@ fixup_child_record_type (omp_context *ctx)
          DECL_CONTEXT (new_f) = type;
          TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
          TREE_CHAIN (new_f) = new_fields;
-         walk_tree (&DECL_SIZE (new_f), copy_body_r, &ctx->cb, NULL);
-         walk_tree (&DECL_SIZE_UNIT (new_f), copy_body_r, &ctx->cb, NULL);
-         walk_tree (&DECL_FIELD_OFFSET (new_f), copy_body_r, &ctx->cb, NULL);
+         walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
+         walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
+                    &ctx->cb, NULL);
+         walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
+                    &ctx->cb, NULL);
          new_fields = new_f;
 
          /* Arrange to be able to look up the receiver field
@@ -1386,7 +1422,7 @@ scan_sharing_clauses (tree clauses, omp_context *ctx)
 
        case OMP_CLAUSE_COPYPRIVATE:
          if (ctx->outer)
-           scan_omp (&OMP_CLAUSE_DECL (c), ctx->outer);
+           scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
          /* FALLTHRU */
 
        case OMP_CLAUSE_COPYIN:
@@ -1403,7 +1439,7 @@ scan_sharing_clauses (tree clauses, omp_context *ctx)
        case OMP_CLAUSE_NUM_THREADS:
        case OMP_CLAUSE_SCHEDULE:
          if (ctx->outer)
-           scan_omp (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
+           scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
          break;
 
        case OMP_CLAUSE_NOWAIT:
@@ -1424,7 +1460,7 @@ scan_sharing_clauses (tree clauses, omp_context *ctx)
        case OMP_CLAUSE_LASTPRIVATE:
          /* Let the corresponding firstprivate clause create
             the variable.  */
-         if (OMP_CLAUSE_LASTPRIVATE_STMT (c))
+         if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
            scan_array_reductions = true;
          if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
            break;
@@ -1472,12 +1508,12 @@ scan_sharing_clauses (tree clauses, omp_context *ctx)
       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
          && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
        {
-         scan_omp (&OMP_CLAUSE_REDUCTION_INIT (c), ctx);
-         scan_omp (&OMP_CLAUSE_REDUCTION_MERGE (c), ctx);
+         scan_omp (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
+         scan_omp (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
        }
       else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
-              && OMP_CLAUSE_LASTPRIVATE_STMT (c))
-       scan_omp (&OMP_CLAUSE_LASTPRIVATE_STMT (c), ctx);
+              && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
+       scan_omp (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
 }
 
 /* Create a new name for omp child function.  Returns an identifier.  */
@@ -1526,7 +1562,7 @@ create_omp_child_function (omp_context *ctx, bool task_copy)
   if (!task_copy)
     ctx->cb.dst_fn = decl;
   else
-    OMP_TASK_COPYFN (ctx->stmt) = decl;
+    gimple_omp_task_set_copy_fn (ctx->stmt, decl);
 
   TREE_STATIC (decl) = 1;
   TREE_USED (decl) = 1;
@@ -1567,8 +1603,8 @@ create_omp_child_function (omp_context *ctx, bool task_copy)
      allocate_struct_function clobbers CFUN, so we need to restore
      it afterward.  */
   push_struct_function (decl);
-  DECL_SOURCE_LOCATION (decl) = EXPR_LOCATION (ctx->stmt);
-  cfun->function_end_locus = EXPR_LOCATION (ctx->stmt);
+  DECL_SOURCE_LOCATION (decl) = gimple_location (ctx->stmt);
+  cfun->function_end_locus = gimple_location (ctx->stmt);
   pop_cfun ();
 }
 
@@ -1576,22 +1612,24 @@ create_omp_child_function (omp_context *ctx, bool task_copy)
 /* Scan an OpenMP parallel directive.  */
 
 static void
-scan_omp_parallel (tree *stmt_p, omp_context *outer_ctx)
+scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
 {
   omp_context *ctx;
   tree name;
+  gimple stmt = gsi_stmt (*gsi);
 
   /* Ignore parallel directives with empty bodies, unless there
      are copyin clauses.  */
   if (optimize > 0
-      && empty_body_p (OMP_PARALLEL_BODY (*stmt_p))
-      && find_omp_clause (OMP_CLAUSES (*stmt_p), OMP_CLAUSE_COPYIN) == NULL)
+      && empty_body_p (gimple_omp_body (stmt))
+      && find_omp_clause (gimple_omp_parallel_clauses (stmt),
+                         OMP_CLAUSE_COPYIN) == NULL)
     {
-      *stmt_p = build_empty_stmt ();
+      gsi_replace (gsi, gimple_build_nop (), false);
       return;
     }
 
-  ctx = new_omp_context (*stmt_p, outer_ctx);
+  ctx = new_omp_context (stmt, outer_ctx);
   if (taskreg_nesting_level > 1)
     ctx->is_nested = true;
   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
@@ -1601,10 +1639,10 @@ scan_omp_parallel (tree *stmt_p, omp_context *outer_ctx)
   name = build_decl (TYPE_DECL, name, ctx->record_type);
   TYPE_NAME (ctx->record_type) = name;
   create_omp_child_function (ctx, false);
-  OMP_PARALLEL_FN (*stmt_p) = ctx->cb.dst_fn;
+  gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
 
-  scan_sharing_clauses (OMP_PARALLEL_CLAUSES (*stmt_p), ctx);
-  scan_omp (&OMP_PARALLEL_BODY (*stmt_p), ctx);
+  scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
+  scan_omp (gimple_omp_body (stmt), ctx);
 
   if (TYPE_FIELDS (ctx->record_type) == NULL)
     ctx->record_type = ctx->receiver_decl = NULL;
@@ -1618,20 +1656,21 @@ scan_omp_parallel (tree *stmt_p, omp_context *outer_ctx)
 /* Scan an OpenMP task directive.  */
 
 static void
-scan_omp_task (tree *stmt_p, omp_context *outer_ctx)
+scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
 {
   omp_context *ctx;
-  tree name;
+  tree name, t;
+  gimple stmt = gsi_stmt (*gsi);
 
   /* Ignore task directives with empty bodies.  */
   if (optimize > 0
-      && empty_body_p (OMP_TASK_BODY (*stmt_p)))
+      && empty_body_p (gimple_omp_body (stmt)))
     {
-      *stmt_p = build_empty_stmt ();
+      gsi_replace (gsi, gimple_build_nop (), false);
       return;
     }
 
-  ctx = new_omp_context (*stmt_p, outer_ctx);
+  ctx = new_omp_context (stmt, outer_ctx);
   if (taskreg_nesting_level > 1)
     ctx->is_nested = true;
   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
@@ -1641,9 +1680,9 @@ scan_omp_task (tree *stmt_p, omp_context *outer_ctx)
   name = build_decl (TYPE_DECL, name, ctx->record_type);
   TYPE_NAME (ctx->record_type) = name;
   create_omp_child_function (ctx, false);
-  OMP_TASK_FN (*stmt_p) = ctx->cb.dst_fn;
+  gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
 
-  scan_sharing_clauses (OMP_TASK_CLAUSES (*stmt_p), ctx);
+  scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
 
   if (ctx->srecord_type)
     {
@@ -1653,15 +1692,15 @@ scan_omp_task (tree *stmt_p, omp_context *outer_ctx)
       create_omp_child_function (ctx, true);
     }
 
-  scan_omp (&OMP_TASK_BODY (*stmt_p), ctx);
+  scan_omp (gimple_omp_body (stmt), ctx);
 
   if (TYPE_FIELDS (ctx->record_type) == NULL)
     {
       ctx->record_type = ctx->receiver_decl = NULL;
-      OMP_TASK_ARG_SIZE (*stmt_p)
-       = build_int_cst (long_integer_type_node, 0);
-      OMP_TASK_ARG_ALIGN (*stmt_p)
-       = build_int_cst (long_integer_type_node, 1);
+      t = build_int_cst (long_integer_type_node, 0);
+      gimple_omp_task_set_arg_size (stmt, t);
+      t = build_int_cst (long_integer_type_node, 1);
+      gimple_omp_task_set_arg_align (stmt, t);
     }
   else
     {
@@ -1684,12 +1723,12 @@ scan_omp_task (tree *stmt_p, omp_context *outer_ctx)
       fixup_child_record_type (ctx);
       if (ctx->srecord_type)
        layout_type (ctx->srecord_type);
-      OMP_TASK_ARG_SIZE (*stmt_p)
-       = fold_convert (long_integer_type_node,
+      t = fold_convert (long_integer_type_node,
                        TYPE_SIZE_UNIT (ctx->record_type));
-      OMP_TASK_ARG_ALIGN (*stmt_p)
-       = build_int_cst (long_integer_type_node,
+      gimple_omp_task_set_arg_size (stmt, t);
+      t = build_int_cst (long_integer_type_node,
                         TYPE_ALIGN_UNIT (ctx->record_type));
+      gimple_omp_task_set_arg_align (stmt, t);
     }
 }
 
@@ -1697,47 +1736,43 @@ scan_omp_task (tree *stmt_p, omp_context *outer_ctx)
 /* Scan an OpenMP loop directive.  */
 
 static void
-scan_omp_for (tree *stmt_p, omp_context *outer_ctx)
+scan_omp_for (gimple stmt, omp_context *outer_ctx)
 {
   omp_context *ctx;
-  tree stmt;
-  int i;
+  size_t i;
 
-  stmt = *stmt_p;
   ctx = new_omp_context (stmt, outer_ctx);
 
-  scan_sharing_clauses (OMP_FOR_CLAUSES (stmt), ctx);
+  scan_sharing_clauses (gimple_omp_for_clauses (stmt), ctx);
 
-  scan_omp (&OMP_FOR_PRE_BODY (stmt), ctx);
-  for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (stmt)); i++)
+  scan_omp (gimple_omp_for_pre_body (stmt), ctx);
+  for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
     {
-      scan_omp (&TREE_VEC_ELT (OMP_FOR_INIT (stmt), i), ctx);
-      scan_omp (&TREE_VEC_ELT (OMP_FOR_COND (stmt), i), ctx);
-      scan_omp (&TREE_VEC_ELT (OMP_FOR_INCR (stmt), i), ctx);
+      scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
+      scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
+      scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
+      scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
     }
-  scan_omp (&OMP_FOR_BODY (stmt), ctx);
+  scan_omp (gimple_omp_body (stmt), ctx);
 }
 
 /* Scan an OpenMP sections directive.  */
 
 static void
-scan_omp_sections (tree *stmt_p, omp_context *outer_ctx)
+scan_omp_sections (gimple stmt, omp_context *outer_ctx)
 {
-  tree stmt;
   omp_context *ctx;
 
-  stmt = *stmt_p;
   ctx = new_omp_context (stmt, outer_ctx);
-  scan_sharing_clauses (OMP_SECTIONS_CLAUSES (stmt), ctx);
-  scan_omp (&OMP_SECTIONS_BODY (stmt), ctx);
+  scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
+  scan_omp (gimple_omp_body (stmt), ctx);
 }
 
 /* Scan an OpenMP single directive.  */
 
 static void
-scan_omp_single (tree *stmt_p, omp_context *outer_ctx)
+scan_omp_single (gimple stmt, omp_context *outer_ctx)
 {
-  tree stmt = *stmt_p;
   omp_context *ctx;
   tree name;
 
@@ -1748,8 +1783,8 @@ scan_omp_single (tree *stmt_p, omp_context *outer_ctx)
   name = build_decl (TYPE_DECL, name, ctx->record_type);
   TYPE_NAME (ctx->record_type) = name;
 
-  scan_sharing_clauses (OMP_SINGLE_CLAUSES (stmt), ctx);
-  scan_omp (&OMP_SINGLE_BODY (stmt), ctx);
+  scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
+  scan_omp (gimple_omp_body (stmt), ctx);
 
   if (TYPE_FIELDS (ctx->record_type) == NULL)
     ctx->record_type = NULL;
@@ -1760,24 +1795,24 @@ scan_omp_single (tree *stmt_p, omp_context *outer_ctx)
 
 /* Check OpenMP nesting restrictions.  */
 static void
-check_omp_nesting_restrictions (tree t, omp_context *ctx)
+check_omp_nesting_restrictions (gimple  stmt, omp_context *ctx)
 {
-  switch (TREE_CODE (t))
+  switch (gimple_code (stmt))
     {
-    case OMP_FOR:
-    case OMP_SECTIONS:
-    case OMP_SINGLE:
-    case CALL_EXPR:
+    case GIMPLE_OMP_FOR:
+    case GIMPLE_OMP_SECTIONS:
+    case GIMPLE_OMP_SINGLE:
+    case GIMPLE_CALL:
       for (; ctx != NULL; ctx = ctx->outer)
-       switch (TREE_CODE (ctx->stmt))
+       switch (gimple_code (ctx->stmt))
          {
-         case OMP_FOR:
-         case OMP_SECTIONS:
-         case OMP_SINGLE:
-         case OMP_ORDERED:
-         case OMP_MASTER:
-         case OMP_TASK:
-           if (TREE_CODE (t) == CALL_EXPR)
+         case GIMPLE_OMP_FOR:
+         case GIMPLE_OMP_SECTIONS:
+         case GIMPLE_OMP_SINGLE:
+         case GIMPLE_OMP_ORDERED:
+         case GIMPLE_OMP_MASTER:
+         case GIMPLE_OMP_TASK:
+           if (is_gimple_call (stmt))
              {
                warning (0, "barrier region may not be closely nested inside "
                            "of work-sharing, critical, ordered, master or "
@@ -1788,54 +1823,55 @@ check_omp_nesting_restrictions (tree t, omp_context *ctx)
                        "of work-sharing, critical, ordered, master or explicit "
                        "task region");
            return;
-         case OMP_PARALLEL:
+         case GIMPLE_OMP_PARALLEL:
            return;
          default:
            break;
          }
       break;
-    case OMP_MASTER:
+    case GIMPLE_OMP_MASTER:
       for (; ctx != NULL; ctx = ctx->outer)
-       switch (TREE_CODE (ctx->stmt))
+       switch (gimple_code (ctx->stmt))
          {
-         case OMP_FOR:
-         case OMP_SECTIONS:
-         case OMP_SINGLE:
-         case OMP_TASK:
+         case GIMPLE_OMP_FOR:
+         case GIMPLE_OMP_SECTIONS:
+         case GIMPLE_OMP_SINGLE:
+         case GIMPLE_OMP_TASK:
            warning (0, "master region may not be closely nested inside "
                        "of work-sharing or explicit task region");
            return;
-         case OMP_PARALLEL:
+         case GIMPLE_OMP_PARALLEL:
            return;
          default:
            break;
          }
       break;
-    case OMP_ORDERED:
+    case GIMPLE_OMP_ORDERED:
       for (; ctx != NULL; ctx = ctx->outer)
-       switch (TREE_CODE (ctx->stmt))
+       switch (gimple_code (ctx->stmt))
          {
-         case OMP_CRITICAL:
-         case OMP_TASK:
+         case GIMPLE_OMP_CRITICAL:
+         case GIMPLE_OMP_TASK:
            warning (0, "ordered region may not be closely nested inside "
                        "of critical or explicit task region");
            return;
-         case OMP_FOR:
-           if (find_omp_clause (OMP_CLAUSES (ctx->stmt),
+         case GIMPLE_OMP_FOR:
+           if (find_omp_clause (gimple_omp_for_clauses (ctx->stmt),
                                 OMP_CLAUSE_ORDERED) == NULL)
              warning (0, "ordered region must be closely nested inside "
                          "a loop region with an ordered clause");
            return;
-         case OMP_PARALLEL:
+         case GIMPLE_OMP_PARALLEL:
            return;
          default:
            break;
          }
       break;
-    case OMP_CRITICAL:
+    case GIMPLE_OMP_CRITICAL:
       for (; ctx != NULL; ctx = ctx->outer)
-       if (TREE_CODE (ctx->stmt) == OMP_CRITICAL
-           && OMP_CRITICAL_NAME (t) == OMP_CRITICAL_NAME (ctx->stmt))
+       if (gimple_code (ctx->stmt) == GIMPLE_OMP_CRITICAL
+           && (gimple_omp_critical_name (stmt)
+               == gimple_omp_critical_name (ctx->stmt)))
          {
            warning (0, "critical region may not be nested inside a critical "
                        "region with the same name");
@@ -1848,90 +1884,117 @@ check_omp_nesting_restrictions (tree t, omp_context *ctx)
 }
 
 
-/* Callback for walk_stmts used to scan for OpenMP directives at TP.  */
+/* Helper function scan_omp.
+
+   Callback for walk_tree or operators in walk_gimple_stmt used to
+   scan for OpenMP directives in TP.  */
 
 static tree
-scan_omp_1 (tree *tp, int *walk_subtrees, void *data)
+scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
 {
   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
   omp_context *ctx = (omp_context *) wi->info;
   tree t = *tp;
 
-  if (EXPR_HAS_LOCATION (t))
-    input_location = EXPR_LOCATION (t);
+  switch (TREE_CODE (t))
+    {
+    case VAR_DECL:
+    case PARM_DECL:
+    case LABEL_DECL:
+    case RESULT_DECL:
+      if (ctx)
+       *tp = remap_decl (t, &ctx->cb);
+      break;
+
+    default:
+      if (ctx && TYPE_P (t))
+       *tp = remap_type (t, &ctx->cb);
+      else if (!DECL_P (t))
+       *walk_subtrees = 1;
+      break;
+    }
+
+  return NULL_TREE;
+}
+
+
+/* Helper function for scan_omp.
+
+   Callback for walk_gimple_stmt used to scan for OpenMP directives in
+   the current statement in GSI.  */
+
+static tree
+scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
+                struct walk_stmt_info *wi)
+{
+  gimple stmt = gsi_stmt (*gsi);
+  omp_context *ctx = (omp_context *) wi->info;
+
+  if (gimple_has_location (stmt))
+    input_location = gimple_location (stmt);
 
   /* Check the OpenMP nesting restrictions.  */
   if (ctx != NULL)
     {
-      if (OMP_DIRECTIVE_P (t))
-       check_omp_nesting_restrictions (t, ctx);
-      else if (TREE_CODE (t) == CALL_EXPR)
+      if (is_gimple_omp (stmt))
+       check_omp_nesting_restrictions (stmt, ctx);
+      else if (is_gimple_call (stmt))
        {
-         tree fndecl = get_callee_fndecl (t);
+         tree fndecl = gimple_call_fndecl (stmt);
          if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
              && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
-           check_omp_nesting_restrictions (t, ctx);
+           check_omp_nesting_restrictions (stmt, ctx);
        }
     }
 
-  *walk_subtrees = 0;
-  switch (TREE_CODE (t))
+  *handled_ops_p = true;
+
+  switch (gimple_code (stmt))
     {
-    case OMP_PARALLEL:
+    case GIMPLE_OMP_PARALLEL:
       taskreg_nesting_level++;
-      scan_omp_parallel (tp, ctx);
+      scan_omp_parallel (gsi, ctx);
       taskreg_nesting_level--;
       break;
 
-    case OMP_TASK:
+    case GIMPLE_OMP_TASK:
       taskreg_nesting_level++;
-      scan_omp_task (tp, ctx);
+      scan_omp_task (gsi, ctx);
       taskreg_nesting_level--;
       break;
 
-    case OMP_FOR:
-      scan_omp_for (tp, ctx);
+    case GIMPLE_OMP_FOR:
+      scan_omp_for (stmt, ctx);
       break;
 
-    case OMP_SECTIONS:
-      scan_omp_sections (tp, ctx);
+    case GIMPLE_OMP_SECTIONS:
+      scan_omp_sections (stmt, ctx);
       break;
 
-    case OMP_SINGLE:
-      scan_omp_single (tp, ctx);
+    case GIMPLE_OMP_SINGLE:
+      scan_omp_single (stmt, ctx);
       break;
 
-    case OMP_SECTION:
-    case OMP_MASTER:
-    case OMP_ORDERED:
-    case OMP_CRITICAL:
-      ctx = new_omp_context (*tp, ctx);
-      scan_omp (&OMP_BODY (*tp), ctx);
+    case GIMPLE_OMP_SECTION:
+    case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_ORDERED:
+    case GIMPLE_OMP_CRITICAL:
+      ctx = new_omp_context (stmt, ctx);
+      scan_omp (gimple_omp_body (stmt), ctx);
       break;
 
-    case BIND_EXPR:
+    case GIMPLE_BIND:
       {
        tree var;
-       *walk_subtrees = 1;
 
-       for (var = BIND_EXPR_VARS (t); var ; var = TREE_CHAIN (var))
-         insert_decl_map (&ctx->cb, var, var);
+       *handled_ops_p = false;
+       if (ctx)
+         for (var = gimple_bind_vars (stmt); var ; var = TREE_CHAIN (var))
+           insert_decl_map (&ctx->cb, var, var);
       }
       break;
-
-    case VAR_DECL:
-    case PARM_DECL:
-    case LABEL_DECL:
-    case RESULT_DECL:
-      if (ctx)
-       *tp = remap_decl (t, &ctx->cb);
-      break;
-
     default:
-      if (ctx && TYPE_P (t))
-       *tp = remap_type (t, &ctx->cb);
-      else if (!DECL_P (t))
-       *walk_subtrees = 1;
+      *handled_ops_p = false;
       break;
     }
 
@@ -1939,24 +2002,22 @@ scan_omp_1 (tree *tp, int *walk_subtrees, void *data)
 }
 
 
-/* Scan all the statements starting at STMT_P.  CTX contains context
-   information about the OpenMP directives and clauses found during
-   the scan.  */
+/* Scan all the statements starting at the current statement.  CTX
+   contains context information about the OpenMP directives and
+   clauses found during the scan.  */
 
 static void
-scan_omp (tree *stmt_p, omp_context *ctx)
+scan_omp (gimple_seq body, omp_context *ctx)
 {
   location_t saved_location;
   struct walk_stmt_info wi;
 
   memset (&wi, 0, sizeof (wi));
-  wi.callback = scan_omp_1;
   wi.info = ctx;
-  wi.want_bind_expr = (ctx != NULL);
   wi.want_locations = true;
 
   saved_location = input_location;
-  walk_stmts (&wi, stmt_p);
+  walk_gimple_seq (body, scan_omp_1_stmt, scan_omp_1_op, &wi);
   input_location = saved_location;
 }
 \f
@@ -1973,7 +2034,7 @@ build_omp_barrier (void)
 /* If a context was created for STMT when it was scanned, return it.  */
 
 static omp_context *
-maybe_lookup_ctx (tree stmt)
+maybe_lookup_ctx (gimple stmt)
 {
   splay_tree_node n;
   n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
@@ -2133,17 +2194,17 @@ omp_reduction_init (tree clause, tree type)
    to destructors go in DLIST.  */
 
 static void
-lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
+lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
                         omp_context *ctx)
 {
-  tree_stmt_iterator diter;
+  gimple_stmt_iterator diter;
   tree c, dtor, copyin_seq, x, ptr;
   bool copyin_by_ref = false;
   bool lastprivate_firstprivate = false;
   int pass;
 
-  *dlist = alloc_stmt_list ();
-  diter = tsi_start (*dlist);
+  *dlist = gimple_seq_alloc ();
+  diter = gsi_start (*dlist);
   copyin_seq = NULL;
 
   /* Do all the fixed sized types in the first pass, and the variable sized
@@ -2205,15 +2266,26 @@ lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
 
              if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
                {
+                 gimple stmt;
+                 tree tmp;
+
                  ptr = DECL_VALUE_EXPR (new_var);
                  gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
                  ptr = TREE_OPERAND (ptr, 0);
                  gcc_assert (DECL_P (ptr));
                  x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
-                 x = build_call_expr (built_in_decls[BUILT_IN_ALLOCA], 1, x);
-                 x = fold_convert (TREE_TYPE (ptr), x);
-                 x = build_gimple_modify_stmt (ptr, x);
-                 gimplify_and_add (x, ilist);
+
+                 /* void *tmp = __builtin_alloca */
+                 stmt
+                   = gimple_build_call (built_in_decls[BUILT_IN_ALLOCA], 1, x);
+                 tmp = create_tmp_var_raw (ptr_type_node, NULL);
+                 gimple_add_tmp_var (tmp);
+                 gimple_call_set_lhs (stmt, tmp);
+
+                 gimple_seq_add_stmt (ilist, stmt);
+
+                 x = fold_convert (TREE_TYPE (ptr), tmp);
+                 gimplify_assign (ptr, x, ilist);
                }
            }
          else if (is_reference (var))
@@ -2252,8 +2324,7 @@ lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
                  x = fold_convert (TREE_TYPE (new_var), x);
                }
 
-             x = build_gimple_modify_stmt (new_var, x);
-             gimplify_and_add (x, ilist);
+             gimplify_assign (new_var, x, ilist);
 
              new_var = build_fold_indirect_ref (new_var);
            }
@@ -2315,9 +2386,11 @@ lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
              x = lang_hooks.decls.omp_clause_dtor (c, new_var);
              if (x)
                {
+                 gimple_seq tseq = NULL;
+
                  dtor = x;
-                 gimplify_stmt (&dtor);
-                 tsi_link_before (&diter, dtor, TSI_SAME_STMT);
+                 gimplify_stmt (&dtor, &tseq);
+                 gsi_insert_seq_before (&diter, tseq, GSI_SAME_STMT);
                }
              break;
 
@@ -2360,16 +2433,17 @@ lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
                    x = build_fold_addr_expr (x);
                  SET_DECL_VALUE_EXPR (placeholder, x);
                  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
-                 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), ilist);
-                 OMP_CLAUSE_REDUCTION_INIT (c) = NULL;
+                 lower_omp (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
+                 gimple_seq_add_seq (ilist,
+                                     OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
+                 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
                  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
                }
              else
                {
                  x = omp_reduction_init (c, TREE_TYPE (new_var));
                  gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
-                 x = build_gimple_modify_stmt (new_var, x);
-                 gimplify_and_add (x, ilist);
+                 gimplify_assign (new_var, x, ilist);
                }
              break;
 
@@ -2406,10 +2480,10 @@ lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
    always true.   */
 
 static void
-lower_lastprivate_clauses (tree clauses, tree predicate, tree *stmt_list,
-                          omp_context *ctx)
+lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
+                           omp_context *ctx)
 {
-  tree sub_list, x, c;
+  tree x, c, label = NULL;
   bool par_clauses = false;
 
   /* Early exit if there are no lastprivate clauses.  */
@@ -2426,14 +2500,29 @@ lower_lastprivate_clauses (tree clauses, tree predicate, tree *stmt_list,
       if (ctx == NULL || !is_parallel_ctx (ctx))
        return;
 
-      clauses = find_omp_clause (OMP_PARALLEL_CLAUSES (ctx->stmt),
+      clauses = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
                                 OMP_CLAUSE_LASTPRIVATE);
       if (clauses == NULL)
        return;
       par_clauses = true;
     }
 
-  sub_list = alloc_stmt_list ();
+  if (predicate)
+    {
+      gimple stmt;
+      tree label_true, arm1, arm2;
+
+      label = create_artificial_label ();
+      label_true = create_artificial_label ();
+      arm1 = TREE_OPERAND (predicate, 0);
+      arm2 = TREE_OPERAND (predicate, 1);
+      gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
+      gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
+      stmt = gimple_build_cond (TREE_CODE (predicate), arm1, arm2,
+                               label_true, label);
+      gimple_seq_add_stmt (stmt_list, stmt);
+      gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
+    }
 
   for (c = clauses; c ;)
     {
@@ -2444,15 +2533,19 @@ lower_lastprivate_clauses (tree clauses, tree predicate, tree *stmt_list,
          var = OMP_CLAUSE_DECL (c);
          new_var = lookup_decl (var, ctx);
 
-         if (OMP_CLAUSE_LASTPRIVATE_STMT (c))
-           gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c), &sub_list);
-         OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL;
+         if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
+           {
+             lower_omp (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
+             gimple_seq_add_seq (stmt_list,
+                                 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
+           }
+         OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
 
          x = build_outer_var_ref (var, ctx);
          if (is_reference (var))
            new_var = build_fold_indirect_ref (new_var);
          x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
-         append_to_statement_list (x, &sub_list);
+         gimplify_and_add (x, stmt_list);
        }
       c = OMP_CLAUSE_CHAIN (c);
       if (c == NULL && !par_clauses)
@@ -2467,27 +2560,25 @@ lower_lastprivate_clauses (tree clauses, tree predicate, tree *stmt_list,
          if (ctx == NULL || !is_parallel_ctx (ctx))
            break;
 
-         c = find_omp_clause (OMP_PARALLEL_CLAUSES (ctx->stmt),
+         c = find_omp_clause (gimple_omp_parallel_clauses (ctx->stmt),
                               OMP_CLAUSE_LASTPRIVATE);
          par_clauses = true;
        }
     }
 
-  if (predicate)
-    x = build3 (COND_EXPR, void_type_node, predicate, sub_list, NULL);
-  else
-    x = sub_list;
-
-  gimplify_and_add (x, stmt_list);
+  if (label)
+    gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
 }
 
 
 /* Generate code to implement the REDUCTION clauses.  */
 
 static void
-lower_reduction_clauses (tree clauses, tree *stmt_list, omp_context *ctx)
+lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
 {
-  tree sub_list = NULL, x, c;
+  gimple_seq sub_seq = NULL;
+  gimple stmt;
+  tree x, c;
   int count = 0;
 
   /* First see if there is exactly one reduction clause.  Use OMP_ATOMIC
@@ -2535,7 +2626,7 @@ lower_reduction_clauses (tree clauses, tree *stmt_list, omp_context *ctx)
          ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
          x = fold_build2 (code, TREE_TYPE (ref), ref, new_var);
          x = build2 (OMP_ATOMIC, void_type_node, addr, x);
-         gimplify_and_add (x, stmt_list);
+         gimplify_and_add (x, stmt_seqp);
          return;
        }
 
@@ -2547,33 +2638,33 @@ lower_reduction_clauses (tree clauses, tree *stmt_list, omp_context *ctx)
            ref = build_fold_addr_expr (ref);
          SET_DECL_VALUE_EXPR (placeholder, ref);
          DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
-         gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c), &sub_list);
-         OMP_CLAUSE_REDUCTION_MERGE (c) = NULL;
+         lower_omp (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
+         gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
+         OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
          OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
        }
       else
        {
          x = build2 (code, TREE_TYPE (ref), ref, new_var);
          ref = build_outer_var_ref (var, ctx);
-         x = build_gimple_modify_stmt (ref, x);
-         append_to_statement_list (x, &sub_list);
+         gimplify_assign (ref, x, &sub_seq);
        }
     }
 
-  x = build_call_expr (built_in_decls[BUILT_IN_GOMP_ATOMIC_START], 0);
-  gimplify_and_add (x, stmt_list);
+  stmt = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ATOMIC_START], 0);
+  gimple_seq_add_stmt (stmt_seqp, stmt);
 
-  gimplify_and_add (sub_list, stmt_list);
+  gimple_seq_add_seq (stmt_seqp, sub_seq);
 
-  x = build_call_expr (built_in_decls[BUILT_IN_GOMP_ATOMIC_END], 0);
-  gimplify_and_add (x, stmt_list);
+  stmt = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ATOMIC_END], 0);
+  gimple_seq_add_stmt (stmt_seqp, stmt);
 }
 
 
 /* Generate code to implement the COPYPRIVATE clauses.  */
 
 static void
-lower_copyprivate_clauses (tree clauses, tree *slist, tree *rlist,
+lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
                            omp_context *ctx)
 {
   tree c;
@@ -2592,8 +2683,7 @@ lower_copyprivate_clauses (tree clauses, tree *slist, tree *rlist,
       ref = build_sender_ref (var, ctx);
       x = lookup_decl_in_outer_ctx (var, ctx);
       x = by_ref ? build_fold_addr_expr (x) : x;
-      x = build_gimple_modify_stmt (ref, x);
-      gimplify_and_add (x, slist);
+      gimplify_assign (ref, x, slist);
 
       ref = build_receiver_ref (var, by_ref, ctx);
       if (is_reference (var))
@@ -2611,7 +2701,8 @@ lower_copyprivate_clauses (tree clauses, tree *slist, tree *rlist,
    and REDUCTION from the sender (aka parent) side.  */
 
 static void
-lower_send_clauses (tree clauses, tree *ilist, tree *olist, omp_context *ctx)
+lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
+                   omp_context *ctx)
 {
   tree c;
 
@@ -2681,8 +2772,7 @@ lower_send_clauses (tree clauses, tree *ilist, tree *olist, omp_context *ctx)
        {
          ref = build_sender_ref (val, ctx);
          x = by_ref ? build_fold_addr_expr (var) : var;
-         x = build_gimple_modify_stmt (ref, x);
-         gimplify_and_add (x, ilist);
+         gimplify_assign (ref, x, ilist);
          if (is_task_ctx (ctx))
            DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
        }
@@ -2690,18 +2780,17 @@ lower_send_clauses (tree clauses, tree *ilist, tree *olist, omp_context *ctx)
       if (do_out)
        {
          ref = build_sender_ref (val, ctx);
-         x = build_gimple_modify_stmt (var, ref);
-         gimplify_and_add (x, olist);
+         gimplify_assign (var, ref, olist);
        }
     }
 }
 
-/* Generate code to implement SHARED from the sender (aka parent) side.
-   This is trickier, since OMP_PARALLEL_CLAUSES doesn't list things that
-   got automatically shared.  */
+/* Generate code to implement SHARED from the sender (aka parent)
+   side.  This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
+   list things that got automatically shared.  */
 
 static void
-lower_send_shared_vars (tree *ilist, tree *olist, omp_context *ctx)
+lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
 {
   tree var, ovar, nvar, f, x, record_type;
 
@@ -2725,25 +2814,37 @@ lower_send_shared_vars (tree *ilist, tree *olist, omp_context *ctx)
        {
          x = build_sender_ref (ovar, ctx);
          var = build_fold_addr_expr (var);
-         x = build_gimple_modify_stmt (x, var);
-         gimplify_and_add (x, ilist);
+         gimplify_assign (x, var, ilist);
        }
       else
        {
          x = build_sender_ref (ovar, ctx);
-         x = build_gimple_modify_stmt (x, var);
-         gimplify_and_add (x, ilist);
+         gimplify_assign (x, var, ilist);
 
          if (!TREE_READONLY (var))
            {
              x = build_sender_ref (ovar, ctx);
-             x = build_gimple_modify_stmt (var, x);
-             gimplify_and_add (x, olist);
+             gimplify_assign (var, x, olist);
            }
        }
     }
 }
 
+
+/* A convenience function to build an empty GIMPLE_COND with just the
+   condition.  */
+
+static gimple
+gimple_build_cond_empty (tree cond)
+{
+  enum tree_code pred_code;
+  tree lhs, rhs;
+
+  gimple_cond_get_ops_from_tree (cond, &pred_code, &lhs, &rhs);
+  return gimple_build_cond (pred_code, lhs, rhs, NULL_TREE, NULL_TREE);
+}
+
+
 /* Build the function calls to GOMP_parallel_start etc to actually 
    generate the parallel operation.  REGION is the parallel region
    being expanded.  BB is the block where to insert the code.  WS_ARGS
@@ -2753,13 +2854,14 @@ lower_send_shared_vars (tree *ilist, tree *olist, omp_context *ctx)
 
 static void
 expand_parallel_call (struct omp_region *region, basic_block bb,
-                     tree entry_stmt, tree ws_args)
+                     gimple entry_stmt, tree ws_args)
 {
   tree t, t1, t2, val, cond, c, clauses;
-  block_stmt_iterator si;
+  gimple_stmt_iterator gsi;
+  gimple stmt;
   int start_ix;
 
-  clauses = OMP_PARALLEL_CLAUSES (entry_stmt);
+  clauses = gimple_omp_parallel_clauses (entry_stmt);
 
   /* Determine what flavor of GOMP_parallel_start we will be
      emitting.  */
@@ -2768,14 +2870,14 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
     {
       switch (region->inner->type)
        {
-       case OMP_FOR:
+       case GIMPLE_OMP_FOR:
          gcc_assert (region->inner->sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
          start_ix = BUILT_IN_GOMP_PARALLEL_LOOP_STATIC_START
                     + (region->inner->sched_kind
                        == OMP_CLAUSE_SCHEDULE_RUNTIME
                        ? 3 : region->inner->sched_kind);
          break;
-       case OMP_SECTIONS:
+       case GIMPLE_OMP_SECTIONS:
          start_ix = BUILT_IN_GOMP_PARALLEL_SECTIONS_START;
          break;
        default:
@@ -2803,7 +2905,7 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
      (cond != 0) or (cond ? val : 1u).  */
   if (cond)
     {
-      block_stmt_iterator si;
+      gimple_stmt_iterator gsi;
 
       cond = gimple_boolify (cond);
 
@@ -2814,14 +2916,14 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
        {
          basic_block cond_bb, then_bb, else_bb;
          edge e, e_then, e_else;
-         tree t, tmp_then, tmp_else, tmp_join, tmp_var;
+         tree tmp_then, tmp_else, tmp_join, tmp_var;
 
          tmp_var = create_tmp_var (TREE_TYPE (val), NULL);
          if (gimple_in_ssa_p (cfun))
            {
-             tmp_then = make_ssa_name (tmp_var, NULL_TREE);
-             tmp_else = make_ssa_name (tmp_var, NULL_TREE);
-             tmp_join = make_ssa_name (tmp_var, NULL_TREE);
+             tmp_then = make_ssa_name (tmp_var, NULL);
+             tmp_else = make_ssa_name (tmp_var, NULL);
+             tmp_join = make_ssa_name (tmp_var, NULL);
            }
          else
            {
@@ -2840,24 +2942,18 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
          set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
          set_immediate_dominator (CDI_DOMINATORS, else_bb, cond_bb);
 
-         t = build3 (COND_EXPR, void_type_node,
-                     cond, NULL_TREE, NULL_TREE);
+         stmt = gimple_build_cond_empty (cond);
+         gsi = gsi_start_bb (cond_bb);
+         gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
 
-         si = bsi_start (cond_bb);
-         bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+         gsi = gsi_start_bb (then_bb);
+         stmt = gimple_build_assign (tmp_then, val);
+         gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
 
-         si = bsi_start (then_bb);
-         t = build_gimple_modify_stmt (tmp_then, val);
-         if (gimple_in_ssa_p (cfun))
-           SSA_NAME_DEF_STMT (tmp_then) = t;
-         bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
-
-         si = bsi_start (else_bb);
-         t = build_gimple_modify_stmt (tmp_else, 
-                                       build_int_cst (unsigned_type_node, 1));
-         if (gimple_in_ssa_p (cfun))
-           SSA_NAME_DEF_STMT (tmp_else) = t;
-         bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+         gsi = gsi_start_bb (else_bb);
+         stmt = gimple_build_assign
+                  (tmp_else, build_int_cst (unsigned_type_node, 1));
+         gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
 
          make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
          make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
@@ -2866,7 +2962,7 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
 
          if (gimple_in_ssa_p (cfun))
            {
-             tree phi = create_phi_node (tmp_join, bb);
+             gimple phi = create_phi_node (tmp_join, bb);
              SSA_NAME_DEF_STMT (tmp_join) = phi;
              add_phi_arg (phi, tmp_then, e_then);
              add_phi_arg (phi, tmp_else, e_else);
@@ -2875,18 +2971,18 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
          val = tmp_join;
        }
 
-      si = bsi_start (bb);
-      val = force_gimple_operand_bsi (&si, val, true, NULL_TREE,
-                                     false, BSI_CONTINUE_LINKING);
+      gsi = gsi_start_bb (bb);
+      val = force_gimple_operand_gsi (&gsi, val, true, NULL_TREE,
+                                     false, GSI_CONTINUE_LINKING);
     }
 
-  si = bsi_last (bb);
-  t = OMP_PARALLEL_DATA_ARG (entry_stmt);
+  gsi = gsi_last_bb (bb);
+  t = gimple_omp_parallel_data_arg (entry_stmt);
   if (t == NULL)
     t1 = null_pointer_node;
   else
     t1 = build_fold_addr_expr (t);
-  t2 = build_fold_addr_expr (OMP_PARALLEL_FN (entry_stmt));
+  t2 = build_fold_addr_expr (gimple_omp_parallel_child_fn (entry_stmt));
 
   if (ws_args)
     {
@@ -2898,21 +2994,21 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
   else
     t = build_call_expr (built_in_decls[start_ix], 3, t2, t1, val);
 
-  force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                           false, BSI_CONTINUE_LINKING);
+  force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                           false, GSI_CONTINUE_LINKING);
 
-  t = OMP_PARALLEL_DATA_ARG (entry_stmt);
+  t = gimple_omp_parallel_data_arg (entry_stmt);
   if (t == NULL)
     t = null_pointer_node;
   else
     t = build_fold_addr_expr (t);
-  t = build_call_expr (OMP_PARALLEL_FN (entry_stmt), 1, t);
-  force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                           false, BSI_CONTINUE_LINKING);
+  t = build_call_expr (gimple_omp_parallel_child_fn (entry_stmt), 1, t);
+  force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                           false, GSI_CONTINUE_LINKING);
 
   t = build_call_expr (built_in_decls[BUILT_IN_GOMP_PARALLEL_END], 0);
-  force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                           false, BSI_CONTINUE_LINKING);
+  force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                           false, GSI_CONTINUE_LINKING);
 }
 
 
@@ -2920,12 +3016,12 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
    generate the task operation.  BB is the block where to insert the code.  */
 
 static void
-expand_task_call (basic_block bb, tree entry_stmt)
+expand_task_call (basic_block bb, gimple entry_stmt)
 {
   tree t, t1, t2, t3, flags, cond, c, clauses;
-  block_stmt_iterator si;
+  gimple_stmt_iterator gsi;
 
-  clauses = OMP_TASK_CLAUSES (entry_stmt);
+  clauses = gimple_omp_task_clauses (entry_stmt);
 
   c = find_omp_clause (clauses, OMP_CLAUSE_IF);
   if (c)
@@ -2936,53 +3032,52 @@ expand_task_call (basic_block bb, tree entry_stmt)
   c = find_omp_clause (clauses, OMP_CLAUSE_UNTIED);
   flags = build_int_cst (unsigned_type_node, (c ? 1 : 0));
 
-  si = bsi_last (bb);
-  t = OMP_TASK_DATA_ARG (entry_stmt);
+  gsi = gsi_last_bb (bb);
+  t = gimple_omp_task_data_arg (entry_stmt);
   if (t == NULL)
     t2 = null_pointer_node;
   else
     t2 = build_fold_addr_expr (t);
-  t1 = build_fold_addr_expr (OMP_TASK_FN (entry_stmt));
-  t = OMP_TASK_COPYFN (entry_stmt);
+  t1 = build_fold_addr_expr (gimple_omp_task_child_fn (entry_stmt));
+  t = gimple_omp_task_copy_fn (entry_stmt);
   if (t == NULL)
     t3 = null_pointer_node;
   else
     t3 = build_fold_addr_expr (t);
 
   t = build_call_expr (built_in_decls[BUILT_IN_GOMP_TASK], 7, t1, t2, t3,
-                      OMP_TASK_ARG_SIZE (entry_stmt),
-                      OMP_TASK_ARG_ALIGN (entry_stmt), cond, flags);
+                      gimple_omp_task_arg_size (entry_stmt),
+                      gimple_omp_task_arg_align (entry_stmt), cond, flags);
 
-  force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                           false, BSI_CONTINUE_LINKING);
+  force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                           false, GSI_CONTINUE_LINKING);
 }
 
 
-/* If exceptions are enabled, wrap *STMT_P in a MUST_NOT_THROW catch
-   handler.  This prevents programs from violating the structured
-   block semantics with throws.  */
+/* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
+   catch handler and return it.  This prevents programs from violating the
+   structured block semantics with throws.  */
 
-static void
-maybe_catch_exception (tree *stmt_p)
+static gimple_seq
+maybe_catch_exception (gimple_seq body)
 {
-  tree f, t;
+  gimple f, t;
 
   if (!flag_exceptions)
-    return;
+    return body;
 
   if (lang_protect_cleanup_actions)
     t = lang_protect_cleanup_actions ();
   else
-    t = build_call_expr (built_in_decls[BUILT_IN_TRAP], 0);
-  f = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
-  EH_FILTER_MUST_NOT_THROW (f) = 1;
-  gimplify_and_add (t, &EH_FILTER_FAILURE (f));
-  
-  t = build2 (TRY_CATCH_EXPR, void_type_node, *stmt_p, NULL);
-  append_to_statement_list (f, &TREE_OPERAND (t, 1));
+    t = gimple_build_call (built_in_decls[BUILT_IN_TRAP], 0);
+
+  f = gimple_build_eh_filter (NULL, gimple_seq_alloc_with_stmt (t));
+  gimple_eh_filter_set_must_not_throw (f, true);
+
+  t = gimple_build_try (body, gimple_seq_alloc_with_stmt (f),
+                       GIMPLE_TRY_CATCH);
 
-  *stmt_p = NULL;
-  append_to_statement_list (t, stmt_p);
+ return gimple_seq_alloc_with_stmt (t);
 }
 
 /* Chain all the DECLs in LIST by their TREE_CHAIN fields.  */
@@ -3006,19 +3101,19 @@ list2chain (tree list)
 
 
 /* Remove barriers in REGION->EXIT's block.  Note that this is only
-   valid for OMP_PARALLEL regions.  Since the end of a parallel region
-   is an implicit barrier, any workshare inside the OMP_PARALLEL that
-   left a barrier at the end of the OMP_PARALLEL region can now be
+   valid for GIMPLE_OMP_PARALLEL regions.  Since the end of a parallel region
+   is an implicit barrier, any workshare inside the GIMPLE_OMP_PARALLEL that
+   left a barrier at the end of the GIMPLE_OMP_PARALLEL region can now be
    removed.  */
 
 static void
 remove_exit_barrier (struct omp_region *region)
 {
-  block_stmt_iterator si;
+  gimple_stmt_iterator gsi;
   basic_block exit_bb;
   edge_iterator ei;
   edge e;
-  tree t;
+  gimple stmt;
 
   exit_bb = region->exit;
 
@@ -3027,32 +3122,32 @@ remove_exit_barrier (struct omp_region *region)
   if (! exit_bb)
     return;
 
-  /* The last insn in the block will be the parallel's OMP_RETURN.  The
-     workshare's OMP_RETURN will be in a preceding block.  The kinds of
+  /* The last insn in the block will be the parallel's GIMPLE_OMP_RETURN.  The
+     workshare's GIMPLE_OMP_RETURN will be in a preceding block.  The kinds of
      statements that can appear in between are extremely limited -- no
      memory operations at all.  Here, we allow nothing at all, so the
-     only thing we allow to precede this OMP_RETURN is a label.  */
-  si = bsi_last (exit_bb);
-  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
-  bsi_prev (&si);
-  if (!bsi_end_p (si) && TREE_CODE (bsi_stmt (si)) != LABEL_EXPR)
+     only thing we allow to precede this GIMPLE_OMP_RETURN is a label.  */
+  gsi = gsi_last_bb (exit_bb);
+  gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
+  gsi_prev (&gsi);
+  if (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
     return;
 
   FOR_EACH_EDGE (e, ei, exit_bb->preds)
     {
-      si = bsi_last (e->src);
-      if (bsi_end_p (si))
+      gsi = gsi_last_bb (e->src);
+      if (gsi_end_p (gsi))
        continue;
-      t = bsi_stmt (si);
-      if (TREE_CODE (t) == OMP_RETURN)
-       OMP_RETURN_NOWAIT (t) = 1;
+      stmt = gsi_stmt (gsi);
+      if (gimple_code (stmt) == GIMPLE_OMP_RETURN)
+       gimple_omp_return_set_nowait (stmt);
     }
 }
 
 static void
 remove_exit_barriers (struct omp_region *region)
 {
-  if (region->type == OMP_PARALLEL)
+  if (region->type == GIMPLE_OMP_PARALLEL)
     remove_exit_barrier (region);
 
   if (region->inner)
@@ -3076,27 +3171,26 @@ remove_exit_barriers (struct omp_region *region)
    scheduling point.  */
 
 static void
-optimize_omp_library_calls (tree entry_stmt)
+optimize_omp_library_calls (gimple entry_stmt)
 {
   basic_block bb;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   tree thr_num_id
     = DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_THREAD_NUM]);
   tree num_thr_id
     = DECL_ASSEMBLER_NAME (built_in_decls [BUILT_IN_OMP_GET_NUM_THREADS]);
-  bool untied_task = (TREE_CODE (entry_stmt) == OMP_TASK
-                     && find_omp_clause (OMP_TASK_CLAUSES (entry_stmt),
+  bool untied_task = (gimple_code (entry_stmt) == GIMPLE_OMP_TASK
+                     && find_omp_clause (gimple_omp_task_clauses (entry_stmt),
                                          OMP_CLAUSE_UNTIED) != NULL);
 
   FOR_EACH_BB (bb)
-    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
       {
-       tree stmt = bsi_stmt (bsi);
-       tree call = get_call_expr_in (stmt);
+       gimple call = gsi_stmt (gsi);
        tree decl;
 
-       if (call
-           && (decl = get_callee_fndecl (call))
+       if (is_gimple_call (call)
+           && (decl = gimple_call_fndecl (call))
            && DECL_EXTERNAL (decl)
            && TREE_PUBLIC (decl)
            && DECL_INITIAL (decl) == NULL)
@@ -3117,7 +3211,7 @@ optimize_omp_library_calls (tree entry_stmt)
              continue;
 
            if (DECL_ASSEMBLER_NAME (decl) != DECL_ASSEMBLER_NAME (built_in)
-               || call_expr_nargs (call) != 0)
+               || gimple_call_num_args (call) != 0)
              continue;
 
            if (flag_exceptions && !TREE_NOTHROW (decl))
@@ -3128,7 +3222,7 @@ optimize_omp_library_calls (tree entry_stmt)
                   != TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (built_in))))
              continue;
 
-           CALL_EXPR_FN (call) = build_fold_addr_expr (built_in);
+           gimple_call_set_fn (call, build_fold_addr_expr (built_in));
          }
       }
 }
@@ -3141,12 +3235,12 @@ expand_omp_taskreg (struct omp_region *region)
   basic_block entry_bb, exit_bb, new_bb;
   struct function *child_cfun;
   tree child_fn, block, t, ws_args, *tp;
-  block_stmt_iterator si;
-  tree entry_stmt;
+  gimple_stmt_iterator gsi;
+  gimple entry_stmt, stmt;
   edge e;
 
   entry_stmt = last_stmt (region->entry);
-  child_fn = OMP_TASKREG_FN (entry_stmt);
+  child_fn = gimple_omp_taskreg_child_fn (entry_stmt);
   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
   /* If this function has been already instrumented, make sure
      the child function isn't instrumented again.  */
@@ -3166,14 +3260,14 @@ expand_omp_taskreg (struct omp_region *region)
         the region, in which case all we need to do is make the
         sub-graph unreachable and emit the parallel call.  */
       edge entry_succ_e, exit_succ_e;
-      block_stmt_iterator si;
+      gimple_stmt_iterator gsi;
 
       entry_succ_e = single_succ_edge (entry_bb);
 
-      si = bsi_last (entry_bb);
-      gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_PARALLEL
-                 || TREE_CODE (bsi_stmt (si)) == OMP_TASK);
-      bsi_remove (&si, true);
+      gsi = gsi_last_bb (entry_bb);
+      gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_PARALLEL
+                 || gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_TASK);
+      gsi_remove (&gsi, true);
 
       new_bb = entry_bb;
       if (exit_bb)
@@ -3198,41 +3292,52 @@ expand_omp_taskreg (struct omp_region *region)
         a function call that has been inlined, the original PARM_DECL
         .OMP_DATA_I may have been converted into a different local
         variable.  In which case, we need to keep the assignment.  */
-      if (OMP_TASKREG_DATA_ARG (entry_stmt))
+      if (gimple_omp_taskreg_data_arg (entry_stmt))
        {
          basic_block entry_succ_bb = single_succ (entry_bb);
-         block_stmt_iterator si;
-         tree parcopy_stmt = NULL_TREE, arg, narg;
+         gimple_stmt_iterator gsi;
+         tree arg, narg;
+         gimple parcopy_stmt = NULL;
 
-         for (si = bsi_start (entry_succ_bb); ; bsi_next (&si))
+         for (gsi = gsi_start_bb (entry_succ_bb); ; gsi_next (&gsi))
            {
-             tree stmt, arg;
+             gimple stmt;
 
-             gcc_assert (!bsi_end_p (si));
-             stmt = bsi_stmt (si);
-             if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+             gcc_assert (!gsi_end_p (gsi));
+             stmt = gsi_stmt (gsi);
+             if (gimple_code (stmt) != GIMPLE_ASSIGN)
                continue;
 
-             arg = GIMPLE_STMT_OPERAND (stmt, 1);
-             STRIP_NOPS (arg);
-             if (TREE_CODE (arg) == ADDR_EXPR
-                 && TREE_OPERAND (arg, 0)
-                    == OMP_TASKREG_DATA_ARG (entry_stmt))
+             if (gimple_num_ops (stmt) == 2)
                {
-                 parcopy_stmt = stmt;
-                 break;
+                 tree arg = gimple_assign_rhs1 (stmt);
+
+                 /* We're ignore the subcode because we're
+                    effectively doing a STRIP_NOPS.  */
+
+                 if (TREE_CODE (arg) == ADDR_EXPR
+                     && TREE_OPERAND (arg, 0)
+                       == gimple_omp_taskreg_data_arg (entry_stmt))
+                   {
+                     parcopy_stmt = stmt;
+                     break;
+                   }
                }
            }
 
-         gcc_assert (parcopy_stmt != NULL_TREE);
+         gcc_assert (parcopy_stmt != NULL);
          arg = DECL_ARGUMENTS (child_fn);
 
          if (!gimple_in_ssa_p (cfun))
            {
-             if (GIMPLE_STMT_OPERAND (parcopy_stmt, 0) == arg)
-               bsi_remove (&si, true);
+             if (gimple_assign_lhs (parcopy_stmt) == arg)
+               gsi_remove (&gsi, true);
              else
-               GIMPLE_STMT_OPERAND (parcopy_stmt, 1) = arg;
+               {
+                 /* ?? Is setting the subcode really necessary ??  */
+                 gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (arg));
+                 gimple_assign_set_rhs1 (parcopy_stmt, arg);
+               }
            }
          else
            {
@@ -3240,9 +3345,11 @@ expand_omp_taskreg (struct omp_region *region)
                 definition of the argument.  That should not be defined now,
                 since the argument is not used uninitialized.  */
              gcc_assert (gimple_default_def (cfun, arg) == NULL);
-             narg = make_ssa_name (arg, build_empty_stmt ());
+             narg = make_ssa_name (arg, gimple_build_nop ());
              set_default_def (arg, narg);
-             GIMPLE_STMT_OPERAND (parcopy_stmt, 1) = narg;
+             /* ?? Is setting the subcode really necessary ??  */
+             gimple_omp_set_subcode (parcopy_stmt, TREE_CODE (narg));
+             gimple_assign_set_rhs1 (parcopy_stmt, narg);
              update_stmt (parcopy_stmt);
            }
        }
@@ -3250,33 +3357,34 @@ expand_omp_taskreg (struct omp_region *region)
       /* Declare local variables needed in CHILD_CFUN.  */
       block = DECL_INITIAL (child_fn);
       BLOCK_VARS (block) = list2chain (child_cfun->local_decls);
-      DECL_SAVED_TREE (child_fn) = bb_stmt_list (single_succ (entry_bb));
+      DECL_SAVED_TREE (child_fn) = NULL;
+      gimple_set_body (child_fn, bb_seq (single_succ (entry_bb)));
       TREE_USED (block) = 1;
 
       /* Reset DECL_CONTEXT on function arguments.  */
       for (t = DECL_ARGUMENTS (child_fn); t; t = TREE_CHAIN (t))
        DECL_CONTEXT (t) = child_fn;
 
-      /* Split ENTRY_BB at OMP_PARALLEL or OMP_TASK, so that it can be
-        moved to the child function.  */
-      si = bsi_last (entry_bb);
-      t = bsi_stmt (si);
-      gcc_assert (t && (TREE_CODE (t) == OMP_PARALLEL
-                       || TREE_CODE (t) == OMP_TASK));
-      bsi_remove (&si, true);
-      e = split_block (entry_bb, t);
+      /* Split ENTRY_BB at GIMPLE_OMP_PARALLEL or GIMPLE_OMP_TASK,
+        so that it can be moved to the child function.  */
+      gsi = gsi_last_bb (entry_bb);
+      stmt = gsi_stmt (gsi);
+      gcc_assert (stmt && (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
+                          || gimple_code (stmt) == GIMPLE_OMP_TASK));
+      gsi_remove (&gsi, true);
+      e = split_block (entry_bb, stmt);
       entry_bb = e->dest;
       single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
 
-      /* Convert OMP_RETURN into a RETURN_EXPR.  */
+      /* Convert GIMPLE_OMP_RETURN into a RETURN_EXPR.  */
       if (exit_bb)
        {
-         si = bsi_last (exit_bb);
-         gcc_assert (!bsi_end_p (si)
-                     && TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
-         t = build1 (RETURN_EXPR, void_type_node, NULL);
-         bsi_insert_after (&si, t, BSI_SAME_STMT);
-         bsi_remove (&si, true);
+         gsi = gsi_last_bb (exit_bb);
+         gcc_assert (!gsi_end_p (gsi)
+                     && gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_RETURN);
+         stmt = gimple_build_return (NULL);
+         gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
+         gsi_remove (&gsi, true);
        }
 
       /* Move the parallel region into CHILD_CFUN.  */
@@ -3291,7 +3399,7 @@ expand_omp_taskreg (struct omp_region *region)
          block = NULL_TREE;
        }
       else
-       block = TREE_BLOCK (entry_stmt);
+       block = gimple_block (entry_stmt);
 
       new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb, block);
       if (exit_bb)
@@ -3328,7 +3436,7 @@ expand_omp_taskreg (struct omp_region *region)
 
          current_function_decl = child_fn;
          FOR_EACH_BB (bb)
-           changed |= tree_purge_dead_eh_edges (bb);
+           changed |= gimple_purge_dead_eh_edges (bb);
          if (changed)
            cleanup_tree_cfg ();
          current_function_decl = save_current;
@@ -3337,7 +3445,7 @@ expand_omp_taskreg (struct omp_region *region)
     }
   
   /* Emit a library call to launch the children threads.  */
-  if (TREE_CODE (entry_stmt) == OMP_PARALLEL)
+  if (gimple_code (entry_stmt) == GIMPLE_OMP_PARALLEL)
     expand_parallel_call (region, new_bb, entry_stmt, ws_args);
   else
     expand_task_call (new_bb, entry_stmt);
@@ -3431,11 +3539,12 @@ expand_omp_for_generic (struct omp_region *region,
                        enum built_in_function start_fn,
                        enum built_in_function next_fn)
 {
-  tree type, istart0, iend0, iend, phi;
+  tree type, istart0, iend0, iend;
   tree t, vmain, vback, bias = NULL_TREE;
   basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
   basic_block l2_bb = NULL, l3_bb = NULL;
-  block_stmt_iterator si;
+  gimple_stmt_iterator gsi;
+  gimple stmt;
   bool in_combined_parallel = is_combined_parallel (region);
   bool broken_loop = region->cont == NULL;
   edge e, ne;
@@ -3499,9 +3608,9 @@ expand_omp_for_generic (struct omp_region *region,
   l3_bb = BRANCH_EDGE (entry_bb)->dest;
   exit_bb = region->exit;
 
-  si = bsi_last (entry_bb);
+  gsi = gsi_last_bb (entry_bb);
 
-  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
+  gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
   if (fd->collapse > 1)
     {
       /* collapsed loops need work for expansion in SSA form.  */
@@ -3536,21 +3645,23 @@ expand_omp_for_generic (struct omp_region *region,
          else
            {
              counts[i] = create_tmp_var (type, ".count");
-             t = build_gimple_modify_stmt (counts[i], t);
-             force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                       true, BSI_SAME_STMT);
+             t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
+                                           true, GSI_SAME_STMT);
+             stmt = gimple_build_assign (counts[i], t);
+             gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
            }
          if (SSA_VAR_P (fd->loop.n2))
            {
              if (i == 0)
-               t = build_gimple_modify_stmt (fd->loop.n2, counts[0]);
+               t = counts[0];
              else
                {
                  t = fold_build2 (MULT_EXPR, type, fd->loop.n2, counts[i]);
-                 t = build_gimple_modify_stmt (fd->loop.n2, t);
+                 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
+                                               true, GSI_SAME_STMT);
                }
-             force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                       true, BSI_SAME_STMT);
+             stmt = gimple_build_assign (fd->loop.n2, t);
+             gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
            }
        }
     }
@@ -3615,41 +3726,38 @@ expand_omp_for_generic (struct omp_region *region,
   if (TREE_TYPE (t) != boolean_type_node)
     t = fold_build2 (NE_EXPR, boolean_type_node,
                     t, build_int_cst (TREE_TYPE (t), 0));
-  t = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                               true, BSI_SAME_STMT);
-  t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
-  bsi_insert_after (&si, t, BSI_SAME_STMT);
+  t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                               true, GSI_SAME_STMT);
+  gsi_insert_after (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
 
-  /* Remove the OMP_FOR statement.  */
-  bsi_remove (&si, true);
+  /* Remove the GIMPLE_OMP_FOR statement.  */
+  gsi_remove (&gsi, true);
 
   /* Iteration setup for sequential loop goes in L0_BB.  */
-  si = bsi_start (l0_bb);
+  gsi = gsi_start_bb (l0_bb);
   if (bias)
     t = fold_convert (type, fold_build2 (MINUS_EXPR, fd->iter_type,
                                         istart0, bias));
   else
     t = fold_convert (type, istart0);
-  t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
-                               false, BSI_CONTINUE_LINKING);
-  t = build_gimple_modify_stmt (fd->loop.v, t);
-  bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
-  if (gimple_in_ssa_p (cfun))
-    SSA_NAME_DEF_STMT (fd->loop.v) = t;
+  t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
+                               false, GSI_CONTINUE_LINKING);
+  stmt = gimple_build_assign (fd->loop.v, t);
+  gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
 
   if (bias)
     t = fold_convert (type, fold_build2 (MINUS_EXPR, fd->iter_type,
                                         iend0, bias));
   else
     t = fold_convert (type, iend0);
-  iend = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                  false, BSI_CONTINUE_LINKING);
+  iend = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                                  false, GSI_CONTINUE_LINKING);
   if (fd->collapse > 1)
     {
       tree tem = create_tmp_var (type, ".tem");
 
-      t = build_gimple_modify_stmt (tem, fd->loop.v);
-      bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+      stmt = gimple_build_assign (tem, fd->loop.v);
+      gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
       for (i = fd->collapse - 1; i >= 0; i--)
        {
          tree vtype = TREE_TYPE (fd->loops[i].v), itype;
@@ -3664,15 +3772,17 @@ expand_omp_for_generic (struct omp_region *region,
                             fd->loops[i].n1, fold_convert (sizetype, t));
          else
            t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t);
-         t = build_gimple_modify_stmt (fd->loops[i].v, t);
-         force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                   false, BSI_CONTINUE_LINKING);
+         t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
+                                       false, GSI_CONTINUE_LINKING);
+         stmt = gimple_build_assign (fd->loops[i].v, t);
+         gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
          if (i != 0)
            {
              t = fold_build2 (TRUNC_DIV_EXPR, type, tem, counts[i]);
-             t = build_gimple_modify_stmt (tem, t);
-             force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                       false, BSI_CONTINUE_LINKING);
+             t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
+                                           false, GSI_CONTINUE_LINKING);
+             stmt = gimple_build_assign (tem, t);
+             gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
            }
        }
     }
@@ -3681,30 +3791,28 @@ expand_omp_for_generic (struct omp_region *region,
     {
       /* Code to control the increment and predicate for the sequential
         loop goes in the CONT_BB.  */
-      si = bsi_last (cont_bb);
-      t = bsi_stmt (si);
-      gcc_assert (TREE_CODE (t) == OMP_CONTINUE);
-      vmain = TREE_OPERAND (t, 1);
-      vback = TREE_OPERAND (t, 0);
+      gsi = gsi_last_bb (cont_bb);
+      stmt = gsi_stmt (gsi);
+      gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
+      vmain = gimple_omp_continue_control_use (stmt);
+      vback = gimple_omp_continue_control_def (stmt);
 
       if (POINTER_TYPE_P (type))
        t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
                         fold_convert (sizetype, fd->loop.step));
       else
        t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
-      t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
-                                   true, BSI_SAME_STMT);
-      t = build_gimple_modify_stmt (vback, t);
-      bsi_insert_before (&si, t, BSI_SAME_STMT);
-      if (gimple_in_ssa_p (cfun))
-       SSA_NAME_DEF_STMT (vback) = t;
-  
+      t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
+                                   true, GSI_SAME_STMT);
+      stmt = gimple_build_assign (vback, t);
+      gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+
       t = build2 (fd->loop.cond_code, boolean_type_node, vback, iend);
-      t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
-      bsi_insert_before (&si, t, BSI_SAME_STMT);
+      stmt = gimple_build_cond_empty (t);
+      gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
 
-      /* Remove OMP_CONTINUE.  */
-      bsi_remove (&si, true);
+      /* Remove GIMPLE_OMP_CONTINUE.  */
+      gsi_remove (&gsi, true);
 
       if (fd->collapse > 1)
        {
@@ -3716,17 +3824,18 @@ expand_omp_for_generic (struct omp_region *region,
              tree vtype = TREE_TYPE (fd->loops[i].v);
 
              bb = create_empty_bb (last_bb);
-             si = bsi_start (bb);
+             gsi = gsi_start_bb (bb);
 
              if (i < fd->collapse - 1)
                {
                  e = make_edge (last_bb, bb, EDGE_FALSE_VALUE);
                  e->probability = REG_BR_PROB_BASE / 8;
 
-                 t = build_gimple_modify_stmt (fd->loops[i + 1].v,
-                                               fd->loops[i + 1].n1);
-                 force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                           false, BSI_CONTINUE_LINKING);
+                 t = fd->loops[i + 1].n1;
+                 t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
+                                               false, GSI_CONTINUE_LINKING);
+                 stmt = gimple_build_assign (fd->loops[i + 1].v, t);
+                 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
                }
              else
                collapse_bb = bb;
@@ -3740,19 +3849,20 @@ expand_omp_for_generic (struct omp_region *region,
              else
                t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v,
                                 fd->loops[i].step);
-             t = build_gimple_modify_stmt (fd->loops[i].v, t);
-             force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                       false, BSI_CONTINUE_LINKING);
+             t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
+                                           false, GSI_CONTINUE_LINKING);
+             stmt = gimple_build_assign (fd->loops[i].v, t);
+             gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
 
              if (i > 0)
                {
+                 t = fd->loops[i].n2;
+                 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                                               false, GSI_CONTINUE_LINKING);
                  t = fold_build2 (fd->loops[i].cond_code, boolean_type_node,
-                                  fd->loops[i].v, fd->loops[i].n2);
-                 t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
-                                               false, BSI_CONTINUE_LINKING);
-                 t = build3 (COND_EXPR, void_type_node, t,
-                             NULL_TREE, NULL_TREE);
-                 bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+                                  fd->loops[i].v, t);
+                 stmt = gimple_build_cond_empty (t);
+                 gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
                  e = make_edge (bb, l1_bb, EDGE_TRUE_VALUE);
                  e->probability = REG_BR_PROB_BASE * 7 / 8;
                }
@@ -3763,29 +3873,29 @@ expand_omp_for_generic (struct omp_region *region,
        }
 
       /* Emit code to get the next parallel iteration in L2_BB.  */
-      si = bsi_start (l2_bb);
+      gsi = gsi_start_bb (l2_bb);
 
       t = build_call_expr (built_in_decls[next_fn], 2,
                           build_fold_addr_expr (istart0),
                           build_fold_addr_expr (iend0));
+      t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                                   false, GSI_CONTINUE_LINKING);
       if (TREE_TYPE (t) != boolean_type_node)
        t = fold_build2 (NE_EXPR, boolean_type_node,
                         t, build_int_cst (TREE_TYPE (t), 0));
-      t = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                   false, BSI_CONTINUE_LINKING);
-      t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
-      bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+      stmt = gimple_build_cond_empty (t);
+      gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
     }
 
   /* Add the loop cleanup function.  */
-  si = bsi_last (exit_bb);
-  if (OMP_RETURN_NOWAIT (bsi_stmt (si)))
+  gsi = gsi_last_bb (exit_bb);
+  if (gimple_omp_return_nowait_p (gsi_stmt (gsi)))
     t = built_in_decls[BUILT_IN_GOMP_LOOP_END_NOWAIT];
   else
     t = built_in_decls[BUILT_IN_GOMP_LOOP_END];
-  t = build_call_expr (t, 0);
-  bsi_insert_after (&si, t, BSI_SAME_STMT);
-  bsi_remove (&si, true);
+  stmt = gimple_build_call (t, 0);
+  gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
+  gsi_remove (&gsi, true);
 
   /* Connect the new blocks.  */
   find_edge (entry_bb, l0_bb)->flags = EDGE_TRUE_VALUE;
@@ -3793,12 +3903,18 @@ expand_omp_for_generic (struct omp_region *region,
 
   if (!broken_loop)
     {
+      gimple_seq phis;
+
       e = find_edge (cont_bb, l3_bb);
       ne = make_edge (l2_bb, l3_bb, EDGE_FALSE_VALUE);
 
-      for (phi = phi_nodes (l3_bb); phi; phi = PHI_CHAIN (phi))
-       SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
-                PHI_ARG_DEF_FROM_EDGE (phi, e));
+      phis = phi_nodes (l3_bb);
+      for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
+       {
+         gimple phi = gsi_stmt (gsi);
+         SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, ne),
+                  PHI_ARG_DEF_FROM_EDGE (phi, e));
+       }
       remove_edge (e);
 
       make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
@@ -3868,7 +3984,8 @@ expand_omp_for_static_nochunk (struct omp_region *region,
   tree type, itype, vmain, vback;
   basic_block entry_bb, exit_bb, seq_start_bb, body_bb, cont_bb;
   basic_block fin_bb;
-  block_stmt_iterator si;
+  gimple_stmt_iterator gsi;
+  gimple stmt;
 
   itype = type = TREE_TYPE (fd->loop.v);
   if (POINTER_TYPE_P (type))
@@ -3886,28 +4003,28 @@ expand_omp_for_static_nochunk (struct omp_region *region,
   exit_bb = region->exit;
 
   /* Iteration space partitioning goes in ENTRY_BB.  */
-  si = bsi_last (entry_bb);
-  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
+  gsi = gsi_last_bb (entry_bb);
+  gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
 
   t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
   t = fold_convert (itype, t);
-  nthreads = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                      true, BSI_SAME_STMT);
+  nthreads = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                                      true, GSI_SAME_STMT);
   
   t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
   t = fold_convert (itype, t);
-  threadid = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                      true, BSI_SAME_STMT);
+  threadid = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                                      true, GSI_SAME_STMT);
 
   fd->loop.n1
-    = force_gimple_operand_bsi (&si, fold_convert (type, fd->loop.n1),
-                               true, NULL_TREE, true, BSI_SAME_STMT);
+    = force_gimple_operand_gsi (&gsi, fold_convert (type, fd->loop.n1),
+                               true, NULL_TREE, true, GSI_SAME_STMT);
   fd->loop.n2
-    = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.n2),
-                               true, NULL_TREE, true, BSI_SAME_STMT);
+    = force_gimple_operand_gsi (&gsi, fold_convert (itype, fd->loop.n2),
+                               true, NULL_TREE, true, GSI_SAME_STMT);
   fd->loop.step
-    = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.step),
-                               true, NULL_TREE, true, BSI_SAME_STMT);
+    = force_gimple_operand_gsi (&gsi, fold_convert (itype, fd->loop.step),
+                               true, NULL_TREE, true, GSI_SAME_STMT);
 
   t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
   t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
@@ -3920,32 +4037,31 @@ expand_omp_for_static_nochunk (struct omp_region *region,
   else
     t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
   t = fold_convert (itype, t);
-  n = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
+  n = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
 
   t = fold_build2 (TRUNC_DIV_EXPR, itype, n, nthreads);
-  q = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
+  q = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
 
   t = fold_build2 (MULT_EXPR, itype, q, nthreads);
   t = fold_build2 (NE_EXPR, itype, t, n);
   t = fold_build2 (PLUS_EXPR, itype, q, t);
-  q = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
+  q = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
 
   t = build2 (MULT_EXPR, itype, q, threadid);
-  s0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
+  s0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
 
   t = fold_build2 (PLUS_EXPR, itype, s0, q);
   t = fold_build2 (MIN_EXPR, itype, t, n);
-  e0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
+  e0 = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, true, GSI_SAME_STMT);
 
   t = build2 (GE_EXPR, boolean_type_node, s0, e0);
-  t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
-  bsi_insert_before (&si, t, BSI_SAME_STMT);
+  gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
 
-  /* Remove the OMP_FOR statement.  */
-  bsi_remove (&si, true);
+  /* Remove the GIMPLE_OMP_FOR statement.  */
+  gsi_remove (&gsi, true);
 
   /* Setup code for sequential iteration goes in SEQ_START_BB.  */
-  si = bsi_start (seq_start_bb);
+  gsi = gsi_start_bb (seq_start_bb);
 
   t = fold_convert (itype, s0);
   t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
@@ -3954,13 +4070,11 @@ expand_omp_for_static_nochunk (struct omp_region *region,
                     fold_convert (sizetype, t));
   else
     t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
-  t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
-                               false, BSI_CONTINUE_LINKING);
-  t = build_gimple_modify_stmt (fd->loop.v, t);
-  bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
-  if (gimple_in_ssa_p (cfun))
-    SSA_NAME_DEF_STMT (fd->loop.v) = t;
-
+  t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
+                               false, GSI_CONTINUE_LINKING);
+  stmt = gimple_build_assign (fd->loop.v, t);
+  gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
   t = fold_convert (itype, e0);
   t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
   if (POINTER_TYPE_P (type))
@@ -3968,41 +4082,39 @@ expand_omp_for_static_nochunk (struct omp_region *region,
                     fold_convert (sizetype, t));
   else
     t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
-  e = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                               false, BSI_CONTINUE_LINKING);
+  e = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                               false, GSI_CONTINUE_LINKING);
 
-  /* The code controlling the sequential loop replaces the OMP_CONTINUE.  */
-  si = bsi_last (cont_bb);
-  t = bsi_stmt (si);
-  gcc_assert (TREE_CODE (t) == OMP_CONTINUE);
-  vmain = TREE_OPERAND (t, 1);
-  vback = TREE_OPERAND (t, 0);
+  /* The code controlling the sequential loop replaces the
+     GIMPLE_OMP_CONTINUE.  */
+  gsi = gsi_last_bb (cont_bb);
+  stmt = gsi_stmt (gsi);
+  gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
+  vmain = gimple_omp_continue_control_use (stmt);
+  vback = gimple_omp_continue_control_def (stmt);
 
   if (POINTER_TYPE_P (type))
     t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
                     fold_convert (sizetype, fd->loop.step));
   else
     t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
-  t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
-                               true, BSI_SAME_STMT);
-  t = build_gimple_modify_stmt (vback, t);
-  bsi_insert_before (&si, t, BSI_SAME_STMT);
-  if (gimple_in_ssa_p (cfun))
-    SSA_NAME_DEF_STMT (vback) = t;
+  t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
+                               true, GSI_SAME_STMT);
+  stmt = gimple_build_assign (vback, t);
+  gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
 
   t = build2 (fd->loop.cond_code, boolean_type_node, vback, e);
-  t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
-  bsi_insert_before (&si, t, BSI_SAME_STMT);
+  gsi_insert_before (&gsi, gimple_build_cond_empty (t), GSI_SAME_STMT);
 
-  /* Remove the OMP_CONTINUE statement.  */
-  bsi_remove (&si, true);
+  /* Remove the GIMPLE_OMP_CONTINUE statement.  */
+  gsi_remove (&gsi, true);
 
-  /* Replace the OMP_RETURN with a barrier, or nothing.  */
-  si = bsi_last (exit_bb);
-  if (!OMP_RETURN_NOWAIT (bsi_stmt (si)))
-    force_gimple_operand_bsi (&si, build_omp_barrier (), false, NULL_TREE,
-                             false, BSI_SAME_STMT);
-  bsi_remove (&si, true);
+  /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing.  */
+  gsi = gsi_last_bb (exit_bb);
+  if (!gimple_omp_return_nowait_p (gsi_stmt (gsi)))
+    force_gimple_operand_gsi (&gsi, build_omp_barrier (), false, NULL_TREE,
+                             false, GSI_SAME_STMT);
+  gsi_remove (&gsi, true);
 
   /* Connect all the blocks.  */
   find_edge (entry_bb, seq_start_bb)->flags = EDGE_FALSE_VALUE;
@@ -4057,16 +4169,16 @@ expand_omp_for_static_nochunk (struct omp_region *region,
 */
 
 static void
-expand_omp_for_static_chunk (struct omp_region *region,
-                            struct omp_for_data *fd)
+expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
 {
-  tree n, s0, e0, e, t, phi, nphi, args;
+  tree n, s0, e0, e, t;
   tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
-  tree type, itype, cont, v_main, v_back, v_extra;
+  tree type, itype, v_main, v_back, v_extra;
   basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
   basic_block trip_update_bb, cont_bb, fin_bb;
-  block_stmt_iterator si;
-  edge se, re, ene;
+  gimple_stmt_iterator si;
+  gimple stmt;
+  edge se;
 
   itype = type = TREE_TYPE (fd->loop.v);
   if (POINTER_TYPE_P (type))
@@ -4089,31 +4201,31 @@ expand_omp_for_static_chunk (struct omp_region *region,
   exit_bb = region->exit;
 
   /* Trip and adjustment setup goes in ENTRY_BB.  */
-  si = bsi_last (entry_bb);
-  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
+  si = gsi_last_bb (entry_bb);
+  gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_FOR);
 
   t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS], 0);
   t = fold_convert (itype, t);
-  nthreads = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                      true, BSI_SAME_STMT);
+  nthreads = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
+                                      true, GSI_SAME_STMT);
   
   t = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
   t = fold_convert (itype, t);
-  threadid = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                      true, BSI_SAME_STMT);
+  threadid = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
+                                      true, GSI_SAME_STMT);
 
   fd->loop.n1
-    = force_gimple_operand_bsi (&si, fold_convert (type, fd->loop.n1),
-                               true, NULL_TREE, true, BSI_SAME_STMT);
+    = force_gimple_operand_gsi (&si, fold_convert (type, fd->loop.n1),
+                               true, NULL_TREE, true, GSI_SAME_STMT);
   fd->loop.n2
-    = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.n2),
-                               true, NULL_TREE, true, BSI_SAME_STMT);
+    = force_gimple_operand_gsi (&si, fold_convert (itype, fd->loop.n2),
+                               true, NULL_TREE, true, GSI_SAME_STMT);
   fd->loop.step
-    = force_gimple_operand_bsi (&si, fold_convert (itype, fd->loop.step),
-                               true, NULL_TREE, true, BSI_SAME_STMT);
+    = force_gimple_operand_gsi (&si, fold_convert (itype, fd->loop.step),
+                               true, NULL_TREE, true, GSI_SAME_STMT);
   fd->chunk_size
-    = force_gimple_operand_bsi (&si, fold_convert (itype, fd->chunk_size),
-                               true, NULL_TREE, true, BSI_SAME_STMT);
+    = force_gimple_operand_gsi (&si, fold_convert (itype, fd->chunk_size),
+                               true, NULL_TREE, true, GSI_SAME_STMT);
 
   t = build_int_cst (itype, (fd->loop.cond_code == LT_EXPR ? -1 : 1));
   t = fold_build2 (PLUS_EXPR, itype, fd->loop.step, t);
@@ -4126,16 +4238,16 @@ expand_omp_for_static_chunk (struct omp_region *region,
   else
     t = fold_build2 (TRUNC_DIV_EXPR, itype, t, fd->loop.step);
   t = fold_convert (itype, t);
-  n = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                               true, BSI_SAME_STMT);
+  n = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
+                               true, GSI_SAME_STMT);
 
   trip_var = create_tmp_var (itype, ".trip");
   if (gimple_in_ssa_p (cfun))
     {
       add_referenced_var (trip_var);
-      trip_init = make_ssa_name (trip_var, NULL_TREE);
-      trip_main = make_ssa_name (trip_var, NULL_TREE);
-      trip_back = make_ssa_name (trip_var, NULL_TREE);
+      trip_init = make_ssa_name (trip_var, NULL);
+      trip_main = make_ssa_name (trip_var, NULL);
+      trip_back = make_ssa_name (trip_var, NULL);
     }
   else
     {
@@ -4144,10 +4256,8 @@ expand_omp_for_static_chunk (struct omp_region *region,
       trip_back = trip_var;
     }
 
-  t = build_gimple_modify_stmt (trip_init, build_int_cst (itype, 0));
-  bsi_insert_before (&si, t, BSI_SAME_STMT);
-  if (gimple_in_ssa_p (cfun))
-    SSA_NAME_DEF_STMT (trip_init) = t;
+  stmt = gimple_build_assign (trip_init, build_int_cst (itype, 0));
+  gsi_insert_before (&si, stmt, GSI_SAME_STMT);
 
   t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
   t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
@@ -4156,32 +4266,31 @@ expand_omp_for_static_chunk (struct omp_region *region,
                     fold_convert (sizetype, t));
   else
     t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
-  v_extra = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                     true, BSI_SAME_STMT);
+  v_extra = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
+                                     true, GSI_SAME_STMT);
 
-  /* Remove the OMP_FOR.  */
-  bsi_remove (&si, true);
+  /* Remove the GIMPLE_OMP_FOR.  */
+  gsi_remove (&si, true);
 
   /* Iteration space partitioning goes in ITER_PART_BB.  */
-  si = bsi_last (iter_part_bb);
+  si = gsi_last_bb (iter_part_bb);
 
   t = fold_build2 (MULT_EXPR, itype, trip_main, nthreads);
   t = fold_build2 (PLUS_EXPR, itype, t, threadid);
   t = fold_build2 (MULT_EXPR, itype, t, fd->chunk_size);
-  s0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                false, BSI_CONTINUE_LINKING);
+  s0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
+                                false, GSI_CONTINUE_LINKING);
 
   t = fold_build2 (PLUS_EXPR, itype, s0, fd->chunk_size);
   t = fold_build2 (MIN_EXPR, itype, t, n);
-  e0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                                false, BSI_CONTINUE_LINKING);
+  e0 = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
+                                false, GSI_CONTINUE_LINKING);
 
   t = build2 (LT_EXPR, boolean_type_node, s0, n);
-  t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
-  bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+  gsi_insert_after (&si, gimple_build_cond_empty (t), GSI_CONTINUE_LINKING);
 
   /* Setup code for sequential iteration goes in SEQ_START_BB.  */
-  si = bsi_start (seq_start_bb);
+  si = gsi_start_bb (seq_start_bb);
 
   t = fold_convert (itype, s0);
   t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
@@ -4190,12 +4299,10 @@ expand_omp_for_static_chunk (struct omp_region *region,
                     fold_convert (sizetype, t));
   else
     t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
-  t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
-                               false, BSI_CONTINUE_LINKING);
-  t = build_gimple_modify_stmt (fd->loop.v, t);
-  bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
-  if (gimple_in_ssa_p (cfun))
-    SSA_NAME_DEF_STMT (fd->loop.v) = t;
+  t = force_gimple_operand_gsi (&si, t, false, NULL_TREE,
+                               false, GSI_CONTINUE_LINKING);
+  stmt = gimple_build_assign (fd->loop.v, t);
+  gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
 
   t = fold_convert (itype, e0);
   t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
@@ -4204,50 +4311,45 @@ expand_omp_for_static_chunk (struct omp_region *region,
                     fold_convert (sizetype, t));
   else
     t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
-  e = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
-                               false, BSI_CONTINUE_LINKING);
+  e = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
+                               false, GSI_CONTINUE_LINKING);
 
   /* The code controlling the sequential loop goes in CONT_BB,
-     replacing the OMP_CONTINUE.  */
-  si = bsi_last (cont_bb);
-  cont = bsi_stmt (si);
-  gcc_assert (TREE_CODE (cont) == OMP_CONTINUE);
-  v_main = TREE_OPERAND (cont, 1);
-  v_back = TREE_OPERAND (cont, 0);
+     replacing the GIMPLE_OMP_CONTINUE.  */
+  si = gsi_last_bb (cont_bb);
+  stmt = gsi_stmt (si);
+  gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
+  v_main = gimple_omp_continue_control_use (stmt);
+  v_back = gimple_omp_continue_control_def (stmt);
 
   if (POINTER_TYPE_P (type))
     t = fold_build2 (POINTER_PLUS_EXPR, type, v_main,
                     fold_convert (sizetype, fd->loop.step));
   else
-    t = build2 (PLUS_EXPR, type, v_main, fd->loop.step);
-  t = build_gimple_modify_stmt (v_back, t);
-  bsi_insert_before (&si, t, BSI_SAME_STMT);
-  if (gimple_in_ssa_p (cfun))
-    SSA_NAME_DEF_STMT (v_back) = t;
+    t = fold_build2 (PLUS_EXPR, type, v_main, fd->loop.step);
+  stmt = gimple_build_assign (v_back, t);
+  gsi_insert_before (&si, stmt, GSI_SAME_STMT);
 
   t = build2 (fd->loop.cond_code, boolean_type_node, v_back, e);
-  t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
-  bsi_insert_before (&si, t, BSI_SAME_STMT);
+  gsi_insert_before (&si, gimple_build_cond_empty (t), GSI_SAME_STMT);
   
-  /* Remove OMP_CONTINUE.  */
-  bsi_remove (&si, true);
+  /* Remove GIMPLE_OMP_CONTINUE.  */
+  gsi_remove (&si, true);
 
   /* Trip update code goes into TRIP_UPDATE_BB.  */
-  si = bsi_start (trip_update_bb);
+  si = gsi_start_bb (trip_update_bb);
 
   t = build_int_cst (itype, 1);
   t = build2 (PLUS_EXPR, itype, trip_main, t);
-  t = build_gimple_modify_stmt (trip_back, t);
-  bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
-  if (gimple_in_ssa_p (cfun))
-    SSA_NAME_DEF_STMT (trip_back) = t;
+  stmt = gimple_build_assign (trip_back, t);
+  gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
 
-  /* Replace the OMP_RETURN with a barrier, or nothing.  */
-  si = bsi_last (exit_bb);
-  if (!OMP_RETURN_NOWAIT (bsi_stmt (si)))
-    force_gimple_operand_bsi (&si, build_omp_barrier (), false, NULL_TREE,
-                             false, BSI_SAME_STMT);
-  bsi_remove (&si, true);
+  /* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing.  */
+  si = gsi_last_bb (exit_bb);
+  if (!gimple_omp_return_nowait_p (gsi_stmt (si)))
+    force_gimple_operand_gsi (&si, build_omp_barrier (), false, NULL_TREE,
+                             false, GSI_SAME_STMT);
+  gsi_remove (&si, true);
 
   /* Connect the new blocks.  */
   find_edge (iter_part_bb, seq_start_bb)->flags = EDGE_TRUE_VALUE;
@@ -4260,21 +4362,30 @@ expand_omp_for_static_chunk (struct omp_region *region,
 
   if (gimple_in_ssa_p (cfun))
     {
+      gimple_stmt_iterator psi;
+      gimple phi;
+      edge re, ene;
+      edge_var_map_vector head;
+      edge_var_map *vm;
+      size_t i;
+
       /* When we redirect the edge from trip_update_bb to iter_part_bb, we
         remove arguments of the phi nodes in fin_bb.  We need to create
         appropriate phi nodes in iter_part_bb instead.  */
       se = single_pred_edge (fin_bb);
       re = single_succ_edge (trip_update_bb);
+      head = redirect_edge_var_map_vector (re);
       ene = single_succ_edge (entry_bb);
 
-      args = PENDING_STMT (re);
-      PENDING_STMT (re) = NULL_TREE;
-      for (phi = phi_nodes (fin_bb);
-          phi && args;
-          phi = PHI_CHAIN (phi), args = TREE_CHAIN (args))
+      psi = gsi_start_phis (fin_bb);
+      for (i = 0; !gsi_end_p (psi) && VEC_iterate (edge_var_map, head, i, vm);
+          gsi_next (&psi), ++i)
        {
-         t = PHI_RESULT (phi);
-         gcc_assert (t == TREE_PURPOSE (args));
+         gimple nphi;
+
+         phi = gsi_stmt (psi);
+         t = gimple_phi_result (phi);
+         gcc_assert (t == redirect_edge_var_map_result (vm));
          nphi = create_phi_node (t, iter_part_bb);
          SSA_NAME_DEF_STMT (t) = nphi;
 
@@ -4284,11 +4395,17 @@ expand_omp_for_static_chunk (struct omp_region *region,
          if (t == fd->loop.v)
            t = v_extra;
          add_phi_arg (nphi, t, ene);
-         add_phi_arg (nphi, TREE_VALUE (args), re);
+         add_phi_arg (nphi, redirect_edge_var_map_def (vm), re);
+       }
+      gcc_assert (!gsi_end_p (psi) && i == VEC_length (edge_var_map, head));
+      redirect_edge_var_map_clear (re);
+      while (1)
+       {
+         psi = gsi_start_phis (fin_bb);
+         if (gsi_end_p (psi))
+           break;
+         remove_phi_node (&psi, false);
        }
-      gcc_assert (!phi && !args);
-      while ((phi = phi_nodes (fin_bb)) != NULL_TREE)
-       remove_phi_node (phi, NULL_TREE, false);
 
       /* Make phi node for trip.  */
       phi = create_phi_node (trip_main, iter_part_bb);
@@ -4319,9 +4436,8 @@ expand_omp_for (struct omp_region *region)
 
   loops
     = (struct omp_for_data_loop *)
-      alloca (TREE_VEC_LENGTH (OMP_FOR_INIT (last_stmt (region->entry)))
+      alloca (gimple_omp_for_collapse (last_stmt (region->entry))
              * sizeof (struct omp_for_data_loop));
-
   extract_omp_for_data (last_stmt (region->entry), &fd, loops);
   region->sched_kind = fd.sched_kind;
 
@@ -4351,7 +4467,7 @@ expand_omp_for (struct omp_region *region)
 
       gcc_assert (fd.sched_kind != OMP_CLAUSE_SCHEDULE_AUTO);
       fn_index = (fd.sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
-                ? 3 : fd.sched_kind;
+                 ? 3 : fd.sched_kind;
       fn_index += fd.have_ordered * 4;
       start_ix = BUILT_IN_GOMP_LOOP_STATIC_START + fn_index;
       next_ix = BUILT_IN_GOMP_LOOP_STATIC_NEXT + fn_index;
@@ -4399,13 +4515,16 @@ expand_omp_for (struct omp_region *region)
 static void
 expand_omp_sections (struct omp_region *region)
 {
-  tree label_vec, l1, l2, t, u, sections_stmt, vin, vmain, vnext, cont;
-  unsigned i, casei, len;
+  tree t, u, vin = NULL, vmain, vnext, l1, l2;
+  VEC (tree,heap) *label_vec;
+  unsigned len;
   basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si, switch_si;
+  gimple sections_stmt, stmt, cont;
   edge_iterator ei;
   edge e;
   struct omp_region *inner;
+  unsigned i, casei;
   bool exit_reachable = region->cont != NULL;
 
   gcc_assert (exit_reachable == (region->exit != NULL));
@@ -4416,50 +4535,55 @@ expand_omp_sections (struct omp_region *region)
   if (exit_reachable)
     {
       if (single_pred (l2_bb) == l0_bb)
-       l2 = tree_block_label (l2_bb);
+       l2 = gimple_block_label (l2_bb);
       else
        {
          /* This can happen if there are reductions.  */
          len = EDGE_COUNT (l0_bb->succs);
          gcc_assert (len > 0);
          e = EDGE_SUCC (l0_bb, len - 1);
-         si = bsi_last (e->dest);
+         si = gsi_last_bb (e->dest);
          l2 = NULL_TREE;
-         if (bsi_end_p (si) || TREE_CODE (bsi_stmt (si)) != OMP_SECTION)
-           l2 = tree_block_label (e->dest);
+         if (gsi_end_p (si)
+             || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
+           l2 = gimple_block_label (e->dest);
          else
            FOR_EACH_EDGE (e, ei, l0_bb->succs)
              {
-               si = bsi_last (e->dest);
-               if (bsi_end_p (si) || TREE_CODE (bsi_stmt (si)) != OMP_SECTION)
+               si = gsi_last_bb (e->dest);
+               if (gsi_end_p (si)
+                   || gimple_code (gsi_stmt (si)) != GIMPLE_OMP_SECTION)
                  {
-                   l2 = tree_block_label (e->dest);
+                   l2 = gimple_block_label (e->dest);
                    break;
                  }
              }
        }
       default_bb = create_empty_bb (l1_bb->prev_bb);
-      l1 = tree_block_label (l1_bb);
+      l1 = gimple_block_label (l1_bb);
     }
   else
     {
       default_bb = create_empty_bb (l0_bb);
       l1 = NULL_TREE;
-      l2 = tree_block_label (default_bb);
+      l2 = gimple_block_label (default_bb);
     }
 
   /* We will build a switch() with enough cases for all the
-     OMP_SECTION regions, a '0' case to handle the end of more work
+     GIMPLE_OMP_SECTION regions, a '0' case to handle the end of more work
      and a default case to abort if something goes wrong.  */
   len = EDGE_COUNT (l0_bb->succs);
-  label_vec = make_tree_vec (len + 1);
+
+  /* Use VEC_quick_push on label_vec throughout, since we know the size
+     in advance.  */
+  label_vec = VEC_alloc (tree, heap, len);
 
   /* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
-     OMP_SECTIONS statement.  */
-  si = bsi_last (entry_bb);
-  sections_stmt = bsi_stmt (si);
-  gcc_assert (TREE_CODE (sections_stmt) == OMP_SECTIONS);
-  vin = OMP_SECTIONS_CONTROL (sections_stmt);
+     GIMPLE_OMP_SECTIONS statement.  */
+  si = gsi_last_bb (entry_bb);
+  sections_stmt = gsi_stmt (si);
+  gcc_assert (gimple_code (sections_stmt) == GIMPLE_OMP_SECTIONS);
+  vin = gimple_omp_sections_control (sections_stmt);
   if (!is_combined_parallel (region))
     {
       /* If we are not inside a combined parallel+sections region,
@@ -4467,29 +4591,28 @@ expand_omp_sections (struct omp_region *region)
       t = build_int_cst (unsigned_type_node,
                         exit_reachable ? len - 1 : len);
       u = built_in_decls[BUILT_IN_GOMP_SECTIONS_START];
-      t = build_call_expr (u, 1, t);
+      stmt = gimple_build_call (u, 1, t);
     }
   else
     {
       /* Otherwise, call GOMP_sections_next.  */
       u = built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT];
-      t = build_call_expr (u, 0);
+      stmt = gimple_build_call (u, 0);
     }
-  t = build_gimple_modify_stmt (vin, t);
-  bsi_insert_after (&si, t, BSI_SAME_STMT);
-  if (gimple_in_ssa_p (cfun))
-    SSA_NAME_DEF_STMT (vin) = t;
-  bsi_remove (&si, true);
-
-  /* The switch() statement replacing OMP_SECTIONS_SWITCH goes in L0_BB.  */
-  si = bsi_last (l0_bb);
-  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SECTIONS_SWITCH);
+  gimple_call_set_lhs (stmt, vin);
+  gsi_insert_after (&si, stmt, GSI_SAME_STMT);
+  gsi_remove (&si, true);
+
+  /* The switch() statement replacing GIMPLE_OMP_SECTIONS_SWITCH goes in
+     L0_BB.  */
+  switch_si = gsi_last_bb (l0_bb);
+  gcc_assert (gimple_code (gsi_stmt (switch_si)) == GIMPLE_OMP_SECTIONS_SWITCH);
   if (exit_reachable)
     {
       cont = last_stmt (l1_bb);
-      gcc_assert (TREE_CODE (cont) == OMP_CONTINUE);
-      vmain = TREE_OPERAND (cont, 1);
-      vnext = TREE_OPERAND (cont, 0);
+      gcc_assert (gimple_code (cont) == GIMPLE_OMP_CONTINUE);
+      vmain = gimple_omp_continue_control_use (cont);
+      vnext = gimple_omp_continue_control_def (cont);
     }
   else
     {
@@ -4497,20 +4620,16 @@ expand_omp_sections (struct omp_region *region)
       vnext = NULL_TREE;
     }
 
-  t = build3 (SWITCH_EXPR, void_type_node, vmain, NULL, label_vec);
-  bsi_insert_after (&si, t, BSI_SAME_STMT);
-  bsi_remove (&si, true);
-
   i = 0;
   if (exit_reachable)
     {
       t = build3 (CASE_LABEL_EXPR, void_type_node,
                  build_int_cst (unsigned_type_node, 0), NULL, l2);
-      TREE_VEC_ELT (label_vec, 0) = t;
+      VEC_quick_push (tree, label_vec, t);
       i++;
     }
 
-  /* Convert each OMP_SECTION into a CASE_LABEL_EXPR.  */
+  /* Convert each GIMPLE_OMP_SECTION into a CASE_LABEL_EXPR.  */
   for (inner = region->inner, casei = 1;
        inner;
        inner = inner->next, i++, casei++)
@@ -4518,7 +4637,7 @@ expand_omp_sections (struct omp_region *region)
       basic_block s_entry_bb, s_exit_bb;
 
       /* Skip optional reduction region.  */
-      if (inner->type == OMP_ATOMIC_LOAD)
+      if (inner->type == GIMPLE_OMP_ATOMIC_LOAD)
        {
          --i;
          --casei;
@@ -4528,61 +4647,63 @@ expand_omp_sections (struct omp_region *region)
       s_entry_bb = inner->entry;
       s_exit_bb = inner->exit;
 
-      t = tree_block_label (s_entry_bb);
+      t = gimple_block_label (s_entry_bb);
       u = build_int_cst (unsigned_type_node, casei);
       u = build3 (CASE_LABEL_EXPR, void_type_node, u, NULL, t);
-      TREE_VEC_ELT (label_vec, i) = u;
+      VEC_quick_push (tree, label_vec, u);
 
-      si = bsi_last (s_entry_bb);
-      gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SECTION);
-      gcc_assert (i < len || OMP_SECTION_LAST (bsi_stmt (si)));
-      bsi_remove (&si, true);
+      si = gsi_last_bb (s_entry_bb);
+      gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SECTION);
+      gcc_assert (i < len || gimple_omp_section_last_p (gsi_stmt (si)));
+      gsi_remove (&si, true);
       single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
 
       if (s_exit_bb == NULL)
        continue;
 
-      si = bsi_last (s_exit_bb);
-      gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
-      bsi_remove (&si, true);
+      si = gsi_last_bb (s_exit_bb);
+      gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
+      gsi_remove (&si, true);
 
       single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
     }
 
   /* Error handling code goes in DEFAULT_BB.  */
-  t = tree_block_label (default_bb);
+  t = gimple_block_label (default_bb);
   u = build3 (CASE_LABEL_EXPR, void_type_node, NULL, NULL, t);
-  TREE_VEC_ELT (label_vec, len) = u;
   make_edge (l0_bb, default_bb, 0);
 
-  si = bsi_start (default_bb);
-  t = build_call_expr (built_in_decls[BUILT_IN_TRAP], 0);
-  bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+  stmt = gimple_build_switch_vec (vmain, u, label_vec);
+  gsi_insert_after (&switch_si, stmt, GSI_SAME_STMT);
+  gsi_remove (&switch_si, true);
+  VEC_free (tree, heap, label_vec);
+
+  si = gsi_start_bb (default_bb);
+  stmt = gimple_build_call (built_in_decls[BUILT_IN_TRAP], 0);
+  gsi_insert_after (&si, stmt, GSI_CONTINUE_LINKING);
 
   if (exit_reachable)
     {
       /* Code to get the next section goes in L1_BB.  */
-      si = bsi_last (l1_bb);
-      gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
+      si = gsi_last_bb (l1_bb);
+      gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CONTINUE);
 
-      t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT], 0);
-      t = build_gimple_modify_stmt (vnext, t);
-      bsi_insert_after (&si, t, BSI_SAME_STMT);
-      if (gimple_in_ssa_p (cfun))
-       SSA_NAME_DEF_STMT (vnext) = t;
-      bsi_remove (&si, true);
+      stmt = gimple_build_call (built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT], 0);
+      gimple_call_set_lhs (stmt, vnext);
+      gsi_insert_after (&si, stmt, GSI_SAME_STMT);
+      gsi_remove (&si, true);
 
       single_succ_edge (l1_bb)->flags = EDGE_FALLTHRU;
 
-      /* Cleanup function replaces OMP_RETURN in EXIT_BB.  */
-      si = bsi_last (l2_bb);
-      if (OMP_RETURN_NOWAIT (bsi_stmt (si)))
+      /* Cleanup function replaces GIMPLE_OMP_RETURN in EXIT_BB.  */
+      si = gsi_last_bb (l2_bb);
+      if (gimple_omp_return_nowait_p (gsi_stmt (si)))
        t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END_NOWAIT];
       else
        t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END];
-      t = build_call_expr (t, 0);
-      bsi_insert_after (&si, t, BSI_SAME_STMT);
-      bsi_remove (&si, true);
+      stmt = gimple_build_call (t, 0);
+      gsi_insert_after (&si, stmt, GSI_SAME_STMT);
+      gsi_remove (&si, true);
     }
 
   set_immediate_dominator (CDI_DOMINATORS, default_bb, l0_bb);
@@ -4596,28 +4717,28 @@ static void
 expand_omp_single (struct omp_region *region)
 {
   basic_block entry_bb, exit_bb;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
   bool need_barrier = false;
 
   entry_bb = region->entry;
   exit_bb = region->exit;
 
-  si = bsi_last (entry_bb);
+  si = gsi_last_bb (entry_bb);
   /* The terminal barrier at the end of a GOMP_single_copy sequence cannot
      be removed.  We need to ensure that the thread that entered the single
      does not exit before the data is copied out by the other threads.  */
-  if (find_omp_clause (OMP_SINGLE_CLAUSES (bsi_stmt (si)),
+  if (find_omp_clause (gimple_omp_single_clauses (gsi_stmt (si)),
                       OMP_CLAUSE_COPYPRIVATE))
     need_barrier = true;
-  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SINGLE);
-  bsi_remove (&si, true);
+  gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE);
+  gsi_remove (&si, true);
   single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
 
-  si = bsi_last (exit_bb);
-  if (!OMP_RETURN_NOWAIT (bsi_stmt (si)) || need_barrier)
-    force_gimple_operand_bsi (&si, build_omp_barrier (), false, NULL_TREE,
-                             false, BSI_SAME_STMT);
-  bsi_remove (&si, true);
+  si = gsi_last_bb (exit_bb);
+  if (!gimple_omp_return_nowait_p (gsi_stmt (si)) || need_barrier)
+    force_gimple_operand_gsi (&si, build_omp_barrier (), false, NULL_TREE,
+                             false, GSI_SAME_STMT);
+  gsi_remove (&si, true);
   single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
 }
 
@@ -4630,24 +4751,24 @@ static void
 expand_omp_synch (struct omp_region *region)
 {
   basic_block entry_bb, exit_bb;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
 
   entry_bb = region->entry;
   exit_bb = region->exit;
 
-  si = bsi_last (entry_bb);
-  gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SINGLE
-             || TREE_CODE (bsi_stmt (si)) == OMP_MASTER
-             || TREE_CODE (bsi_stmt (si)) == OMP_ORDERED
-             || TREE_CODE (bsi_stmt (si)) == OMP_CRITICAL);
-  bsi_remove (&si, true);
+  si = gsi_last_bb (entry_bb);
+  gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SINGLE
+             || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_MASTER
+             || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ORDERED
+             || gimple_code (gsi_stmt (si)) == GIMPLE_OMP_CRITICAL);
+  gsi_remove (&si, true);
   single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
 
   if (exit_bb)
     {
-      si = bsi_last (exit_bb);
-      gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
-      bsi_remove (&si, true);
+      si = gsi_last_bb (exit_bb);
+      gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_RETURN);
+      gsi_remove (&si, true);
       single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
     }
 }
@@ -4667,38 +4788,36 @@ expand_omp_atomic_fetch_op (basic_block load_bb,
   enum insn_code *optab;
   tree rhs;
   basic_block store_bb = single_succ (load_bb);
-  block_stmt_iterator bsi;
-  tree stmt;
+  gimple_stmt_iterator gsi;
+  gimple stmt;
 
   /* We expect to find the following sequences:
    
    load_bb:
-       OMP_ATOMIC_LOAD (tmp, mem)
+       GIMPLE_OMP_ATOMIC_LOAD (tmp, mem)
 
    store_bb:
        val = tmp OP something; (or: something OP tmp)
-       OMP_STORE (val) 
+       GIMPLE_OMP_STORE (val) 
 
   ???FIXME: Allow a more flexible sequence.  
   Perhaps use data flow to pick the statements.
   
   */
 
-  bsi = bsi_after_labels (store_bb);
-  stmt = bsi_stmt (bsi);
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  gsi = gsi_after_labels (store_bb);
+  stmt = gsi_stmt (gsi);
+  if (!is_gimple_assign (stmt))
     return false;
-  bsi_next (&bsi);
-  if (TREE_CODE (bsi_stmt (bsi)) != OMP_ATOMIC_STORE)
+  gsi_next (&gsi);
+  if (gimple_code (gsi_stmt (gsi)) != GIMPLE_OMP_ATOMIC_STORE)
     return false;
 
-  if (!operand_equal_p (GIMPLE_STMT_OPERAND (stmt, 0), stored_val, 0))
+  if (!operand_equal_p (gimple_assign_lhs (stmt), stored_val, 0))
     return false;
 
-  rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-
   /* Check for one of the supported fetch-op operations.  */
-  switch (TREE_CODE (rhs))
+  switch (gimple_assign_rhs_code (stmt))
     {
     case PLUS_EXPR:
     case POINTER_PLUS_EXPR:
@@ -4725,11 +4844,11 @@ expand_omp_atomic_fetch_op (basic_block load_bb,
       return false;
     }
   /* Make sure the expression is of the proper form.  */
-  if (operand_equal_p (TREE_OPERAND (rhs, 0), loaded_val, 0))
-    rhs = TREE_OPERAND (rhs, 1);
-  else if (commutative_tree_code (TREE_CODE (rhs))
-          && operand_equal_p (TREE_OPERAND (rhs, 1), loaded_val, 0))
-    rhs = TREE_OPERAND (rhs, 0);
+  if (operand_equal_p (gimple_assign_rhs1 (stmt), loaded_val, 0))
+    rhs = gimple_assign_rhs2 (stmt);
+  else if (commutative_tree_code (gimple_assign_rhs_code (stmt))
+          && operand_equal_p (gimple_assign_rhs2 (stmt), loaded_val, 0))
+    rhs = gimple_assign_rhs1 (stmt);
   else
     return false;
 
@@ -4739,18 +4858,18 @@ expand_omp_atomic_fetch_op (basic_block load_bb,
   if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
     return false;
 
-  bsi = bsi_last (load_bb);
-  gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_LOAD);
+  gsi = gsi_last_bb (load_bb);
+  gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_LOAD);
   call = build_call_expr (decl, 2, addr, fold_convert (itype, rhs));
   call = fold_convert (void_type_node, call);
-  force_gimple_operand_bsi (&bsi, call, true, NULL_TREE, true, BSI_SAME_STMT);
-  bsi_remove (&bsi, true);
+  force_gimple_operand_gsi (&gsi, call, true, NULL_TREE, true, GSI_SAME_STMT);
+  gsi_remove (&gsi, true);
 
-  bsi = bsi_last (store_bb);
-  gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_STORE);
-  bsi_remove (&bsi, true);
-  bsi = bsi_last (store_bb);
-  bsi_remove (&bsi, true);
+  gsi = gsi_last_bb (store_bb);
+  gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_ATOMIC_STORE);
+  gsi_remove (&gsi, true);
+  gsi = gsi_last_bb (store_bb);
+  gsi_remove (&gsi, true);
 
   if (gimple_in_ssa_p (cfun))
     update_ssa (TODO_update_ssa_no_phi);
@@ -4777,9 +4896,9 @@ expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
 {
   tree loadedi, storedi, initial, new_storedi, old_vali;
   tree type, itype, cmpxchg, iaddr;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator si;
   basic_block loop_header = single_succ (load_bb);
-  tree phi, x;
+  gimple phi, stmt;
   edge e;
 
   cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
@@ -4789,19 +4908,24 @@ expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
   if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
     return false;
 
-  /* Load the initial value, replacing the OMP_ATOMIC_LOAD.  */
-  bsi = bsi_last (load_bb);
-  gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_LOAD);
+  /* Load the initial value, replacing the GIMPLE_OMP_ATOMIC_LOAD.  */
+  si = gsi_last_bb (load_bb);
+  gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
+
   /* For floating-point values, we'll need to view-convert them to integers
      so that we can perform the atomic compare and swap.  Simplify the
      following code by always setting up the "i"ntegral variables.  */
   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
     {
+      tree iaddr_val;
+
       iaddr = create_tmp_var (build_pointer_type (itype), NULL);
-      x = build_gimple_modify_stmt (iaddr,
-                                   fold_convert (TREE_TYPE (iaddr), addr));
-      force_gimple_operand_bsi (&bsi, x, true, NULL_TREE,
-                               true, BSI_SAME_STMT);
+      iaddr_val
+       = force_gimple_operand_gsi (&si,
+                                   fold_convert (TREE_TYPE (iaddr), addr),
+                                   false, NULL_TREE, true, GSI_SAME_STMT);
+      stmt = gimple_build_assign (iaddr, iaddr_val);
+      gsi_insert_before (&si, stmt, GSI_SAME_STMT);
       DECL_NO_TBAA_P (iaddr) = 1;
       DECL_POINTER_ALIAS_SET (iaddr) = 0;
       loadedi = create_tmp_var (itype, NULL);
@@ -4817,63 +4941,65 @@ expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
       iaddr = addr;
       loadedi = loaded_val;
     }
-  initial = force_gimple_operand_bsi (&bsi, build_fold_indirect_ref (iaddr),
-                                     true, NULL_TREE, true, BSI_SAME_STMT);
+
+  initial = force_gimple_operand_gsi (&si, build_fold_indirect_ref (iaddr),
+                                     true, NULL_TREE, true, GSI_SAME_STMT);
 
   /* Move the value to the LOADEDI temporary.  */
   if (gimple_in_ssa_p (cfun))
     {
-      gcc_assert (phi_nodes (loop_header) == NULL_TREE);
+      gcc_assert (gimple_seq_empty_p (phi_nodes (loop_header)));
       phi = create_phi_node (loadedi, loop_header);
       SSA_NAME_DEF_STMT (loadedi) = phi;
       SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (load_bb)),
               initial);
     }
   else
-    bsi_insert_before (&bsi,
-                      build_gimple_modify_stmt (loadedi, initial),
-                      BSI_SAME_STMT);
+    gsi_insert_before (&si,
+                      gimple_build_assign (loadedi, initial),
+                      GSI_SAME_STMT);
   if (loadedi != loaded_val)
     {
-      block_stmt_iterator bsi2;
+      gimple_stmt_iterator gsi2;
+      tree x;
 
       x = build1 (VIEW_CONVERT_EXPR, type, loadedi);
-      bsi2 = bsi_start (loop_header);
+      gsi2 = gsi_start_bb (loop_header);
       if (gimple_in_ssa_p (cfun))
        {
-         x = force_gimple_operand_bsi (&bsi2, x, true, NULL_TREE,
-                                       true, BSI_SAME_STMT);
-         x = build_gimple_modify_stmt (loaded_val, x);
-         bsi_insert_before (&bsi2, x, BSI_SAME_STMT);
-         SSA_NAME_DEF_STMT (loaded_val) = x;
+         gimple stmt;
+         x = force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
+                                       true, GSI_SAME_STMT);
+         stmt = gimple_build_assign (loaded_val, x);
+         gsi_insert_before (&gsi2, stmt, GSI_SAME_STMT);
        }
       else
        {
-         x = build_gimple_modify_stmt (loaded_val, x);
-         force_gimple_operand_bsi (&bsi2, x, true, NULL_TREE,
-                                   true, BSI_SAME_STMT);
+         x = build2 (MODIFY_EXPR, TREE_TYPE (loaded_val), loaded_val, x);
+         force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
+                                   true, GSI_SAME_STMT);
        }
     }
-  bsi_remove (&bsi, true);
+  gsi_remove (&si, true);
 
-  bsi = bsi_last (store_bb);
-  gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_STORE);
+  si = gsi_last_bb (store_bb);
+  gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
 
   if (iaddr == addr)
     storedi = stored_val;
   else
     storedi =
-      force_gimple_operand_bsi (&bsi,
+      force_gimple_operand_gsi (&si,
                                build1 (VIEW_CONVERT_EXPR, itype,
                                        stored_val), true, NULL_TREE, true,
-                               BSI_SAME_STMT);
+                               GSI_SAME_STMT);
 
   /* Build the compare&swap statement.  */
   new_storedi = build_call_expr (cmpxchg, 3, iaddr, loadedi, storedi);
-  new_storedi = force_gimple_operand_bsi (&bsi,
+  new_storedi = force_gimple_operand_gsi (&si,
                                          fold_convert (itype, new_storedi),
                                          true, NULL_TREE,
-                                         true, BSI_SAME_STMT);
+                                         true, GSI_SAME_STMT);
 
   if (gimple_in_ssa_p (cfun))
     old_vali = loadedi;
@@ -4882,21 +5008,20 @@ expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
       old_vali = create_tmp_var (itype, NULL);
       if (gimple_in_ssa_p (cfun))
        add_referenced_var (old_vali);
-      x = build_gimple_modify_stmt (old_vali, loadedi);
-      force_gimple_operand_bsi (&bsi, x, true, NULL_TREE,
-                               true, BSI_SAME_STMT);
+      stmt = gimple_build_assign (old_vali, loadedi);
+      gsi_insert_before (&si, stmt, GSI_SAME_STMT);
 
-      x = build_gimple_modify_stmt (loadedi, new_storedi);
-      force_gimple_operand_bsi (&bsi, x, true, NULL_TREE,
-                               true, BSI_SAME_STMT);
+      stmt = gimple_build_assign (loadedi, new_storedi);
+      gsi_insert_before (&si, stmt, GSI_SAME_STMT);
     }
 
   /* Note that we always perform the comparison as an integer, even for
      floating point.  This allows the atomic operation to properly 
      succeed even with NaNs and -0.0.  */
-  x = build2 (NE_EXPR, boolean_type_node, new_storedi, old_vali);
-  x = build3 (COND_EXPR, void_type_node, x, NULL_TREE, NULL_TREE);
-  bsi_insert_before (&bsi, x, BSI_SAME_STMT);
+  stmt = gimple_build_cond_empty
+           (build2 (NE_EXPR, boolean_type_node,
+                   new_storedi, old_vali));
+  gsi_insert_before (&si, stmt, GSI_SAME_STMT);
 
   /* Update cfg.  */
   e = single_succ_edge (store_bb);
@@ -4909,12 +5034,12 @@ expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
      if we are not in SSA).  */
   if (gimple_in_ssa_p (cfun))
     {
-      phi = phi_nodes (loop_header);
+      phi = gimple_seq_first_stmt (phi_nodes (loop_header));
       SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), new_storedi);
     }
 
-  /* Remove OMP_ATOMIC_STORE.  */
-  bsi_remove (&bsi, true);
+  /* Remove GIMPLE_OMP_ATOMIC_STORE.  */
+  gsi_remove (&si, true);
 
   if (gimple_in_ssa_p (cfun))
     update_ssa (TODO_update_ssa_no_phi);
@@ -4933,15 +5058,16 @@ expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
    responses received from omp@openmp.org, appears to be within spec.
    Which makes sense, since that's how several other compilers handle
    this situation as well.  
-   LOADED_VAL and ADDR are the operands of OMP_ATOMIC_LOAD we're expanding. 
-   STORED_VAL is the operand of the matching OMP_ATOMIC_STORE.
+   LOADED_VAL and ADDR are the operands of GIMPLE_OMP_ATOMIC_LOAD we're
+   expanding.  STORED_VAL is the operand of the matching
+   GIMPLE_OMP_ATOMIC_STORE.
 
    We replace 
-   OMP_ATOMIC_LOAD (loaded_val, addr) with  
+   GIMPLE_OMP_ATOMIC_LOAD (loaded_val, addr) with  
    loaded_val = *addr;
 
    and replace
-   OMP_ATOMIC_ATORE (stored_val)  with
+   GIMPLE_OMP_ATOMIC_ATORE (stored_val)  with
    *addr = stored_val;  
 */
 
@@ -4949,40 +5075,39 @@ static bool
 expand_omp_atomic_mutex (basic_block load_bb, basic_block store_bb,
                         tree addr, tree loaded_val, tree stored_val)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator si;
+  gimple stmt;
   tree t;
 
-  bsi = bsi_last (load_bb);
-  gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_LOAD);
+  si = gsi_last_bb (load_bb);
+  gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_LOAD);
 
   t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
   t = build_function_call_expr (t, 0);
-  force_gimple_operand_bsi (&bsi, t, true, NULL_TREE, true, BSI_SAME_STMT);
+  force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
 
-  t = build_gimple_modify_stmt (loaded_val, build_fold_indirect_ref (addr));
-  if (gimple_in_ssa_p (cfun))
-    SSA_NAME_DEF_STMT (loaded_val) = t;
-  bsi_insert_before (&bsi, t, BSI_SAME_STMT);
-  bsi_remove (&bsi, true);
+  stmt = gimple_build_assign (loaded_val, build_fold_indirect_ref (addr));
+  gsi_insert_before (&si, stmt, GSI_SAME_STMT);
+  gsi_remove (&si, true);
 
-  bsi = bsi_last (store_bb);
-  gcc_assert (TREE_CODE (bsi_stmt (bsi)) == OMP_ATOMIC_STORE);
+  si = gsi_last_bb (store_bb);
+  gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
 
-  t = build_gimple_modify_stmt (build_fold_indirect_ref (unshare_expr (addr)),
+  stmt = gimple_build_assign (build_fold_indirect_ref (unshare_expr (addr)),
                                stored_val);
-  bsi_insert_before (&bsi, t, BSI_SAME_STMT);
+  gsi_insert_before (&si, stmt, GSI_SAME_STMT);
 
   t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
   t = build_function_call_expr (t, 0);
-  force_gimple_operand_bsi (&bsi, t, true, NULL_TREE, true, BSI_SAME_STMT);
-  bsi_remove (&bsi, true);
+  force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
+  gsi_remove (&si, true);
 
   if (gimple_in_ssa_p (cfun))
     update_ssa (TODO_update_ssa_no_phi);
   return true;
 }
 
-/* Expand an OMP_ATOMIC statement.  We try to expand 
+/* Expand an GIMPLE_OMP_ATOMIC statement.  We try to expand 
    using expand_omp_atomic_fetch_op. If it failed, we try to 
    call expand_omp_atomic_pipeline, and if it fails too, the
    ultimate fallback is wrapping the operation in a mutex
@@ -4993,10 +5118,10 @@ static void
 expand_omp_atomic (struct omp_region *region)
 {
   basic_block load_bb = region->entry, store_bb = region->exit;
-  tree load = last_stmt (load_bb), store = last_stmt (store_bb);
-  tree loaded_val = TREE_OPERAND (load, 0);
-  tree addr = TREE_OPERAND (load, 1);
-  tree stored_val = TREE_OPERAND (store, 0);
+  gimple load = last_stmt (load_bb), store = last_stmt (store_bb);
+  tree loaded_val = gimple_omp_atomic_load_lhs (load);
+  tree addr = gimple_omp_atomic_load_rhs (load);
+  tree stored_val = gimple_omp_atomic_store_val (store);
   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
   HOST_WIDE_INT index;
 
@@ -5035,7 +5160,7 @@ expand_omp_atomic (struct omp_region *region)
 /* Expand the parallel region tree rooted at REGION.  Expansion
    proceeds in depth-first order.  Innermost regions are expanded
    first.  This way, parallel regions that require a new function to
-   be created (e.g., OMP_PARALLEL) can be expanded without having any
+   be created (e.g., GIMPLE_OMP_PARALLEL) can be expanded without having any
    internal dependencies in their body.  */
 
 static void
@@ -5047,50 +5172,47 @@ expand_omp (struct omp_region *region)
 
       /* First, determine whether this is a combined parallel+workshare
                 region.  */
-      if (region->type == OMP_PARALLEL)
+      if (region->type == GIMPLE_OMP_PARALLEL)
        determine_parallel_type (region);
 
       if (region->inner)
        expand_omp (region->inner);
 
       saved_location = input_location;
-      if (EXPR_HAS_LOCATION (last_stmt (region->entry)))
-       input_location = EXPR_LOCATION (last_stmt (region->entry));
+      if (gimple_has_location (last_stmt (region->entry)))
+       input_location = gimple_location (last_stmt (region->entry));
 
       switch (region->type)
        {
-       case OMP_PARALLEL:
+       case GIMPLE_OMP_PARALLEL:
+       case GIMPLE_OMP_TASK:
          expand_omp_taskreg (region);
          break;
 
-       case OMP_TASK:
-         expand_omp_taskreg (region);
-         break;
-
-       case OMP_FOR:
+       case GIMPLE_OMP_FOR:
          expand_omp_for (region);
          break;
 
-       case OMP_SECTIONS:
+       case GIMPLE_OMP_SECTIONS:
          expand_omp_sections (region);
          break;
 
-       case OMP_SECTION:
+       case GIMPLE_OMP_SECTION:
          /* Individual omp sections are handled together with their
-            parent OMP_SECTIONS region.  */
+            parent GIMPLE_OMP_SECTIONS region.  */
          break;
 
-       case OMP_SINGLE:
+       case GIMPLE_OMP_SINGLE:
          expand_omp_single (region);
          break;
 
-       case OMP_MASTER:
-       case OMP_ORDERED:
-       case OMP_CRITICAL:
+       case GIMPLE_OMP_MASTER:
+       case GIMPLE_OMP_ORDERED:
+       case GIMPLE_OMP_CRITICAL:
          expand_omp_synch (region);
          break;
 
-       case OMP_ATOMIC_LOAD:
+       case GIMPLE_OMP_ATOMIC_LOAD:
          expand_omp_atomic (region);
          break;
 
@@ -5113,19 +5235,19 @@ static void
 build_omp_regions_1 (basic_block bb, struct omp_region *parent,
                     bool single_tree)
 {
-  block_stmt_iterator si;
-  tree stmt;
+  gimple_stmt_iterator gsi;
+  gimple stmt;
   basic_block son;
 
-  si = bsi_last (bb);
-  if (!bsi_end_p (si) && OMP_DIRECTIVE_P (bsi_stmt (si)))
+  gsi = gsi_last_bb (bb);
+  if (!gsi_end_p (gsi) && is_gimple_omp (gsi_stmt (gsi)))
     {
       struct omp_region *region;
-      enum tree_code code;
+      enum gimple_code code;
 
-      stmt = bsi_stmt (si);
-      code = TREE_CODE (stmt);
-      if (code == OMP_RETURN)
+      stmt = gsi_stmt (gsi);
+      code = gimple_code (stmt);
+      if (code == GIMPLE_OMP_RETURN)
        {
          /* STMT is the return point out of region PARENT.  Mark it
             as the exit point and make PARENT the immediately
@@ -5135,26 +5257,28 @@ build_omp_regions_1 (basic_block bb, struct omp_region *parent,
          region->exit = bb;
          parent = parent->outer;
        }
-      else if (code == OMP_ATOMIC_STORE)
+      else if (code == GIMPLE_OMP_ATOMIC_STORE)
        {
-         /* OMP_ATOMIC_STORE is analogous to OMP_RETURN, but matches with
-            OMP_ATOMIC_LOAD.  */
+         /* GIMPLE_OMP_ATOMIC_STORE is analoguous to
+            GIMPLE_OMP_RETURN, but matches with
+            GIMPLE_OMP_ATOMIC_LOAD.  */
          gcc_assert (parent);
-         gcc_assert (parent->type == OMP_ATOMIC_LOAD);
+         gcc_assert (parent->type == GIMPLE_OMP_ATOMIC_LOAD);
          region = parent;
          region->exit = bb;
          parent = parent->outer;
        }
 
-      else if (code == OMP_CONTINUE)
+      else if (code == GIMPLE_OMP_CONTINUE)
        {
          gcc_assert (parent);
          parent->cont = bb;
        }
-      else if (code == OMP_SECTIONS_SWITCH)
+      else if (code == GIMPLE_OMP_SECTIONS_SWITCH)
        {
-         /* OMP_SECTIONS_SWITCH is part of OMP_SECTIONS, and we do nothing for
-            it.  */ ;
+         /* GIMPLE_OMP_SECTIONS_SWITCH is part of
+            GIMPLE_OMP_SECTIONS, and we do nothing for it.  */
+         ;
        }
       else
        {
@@ -5215,7 +5339,6 @@ build_omp_regions (void)
   build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL, false);
 }
 
-
 /* Main entry point for expanding OMP-GIMPLE into runtime calls.  */
 
 static unsigned int
@@ -5273,107 +5396,106 @@ struct gimple_opt_pass pass_expand_omp =
 \f
 /* Routines to lower OpenMP directives into OMP-GIMPLE.  */
 
-/* Lower the OpenMP sections directive in *STMT_P.  */
+/* Lower the OpenMP sections directive in the current statement in GSI_P.
+   CTX is the enclosing OMP context for the current statement.  */
 
 static void
-lower_omp_sections (tree *stmt_p, omp_context *ctx)
+lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
 {
-  tree new_stmt, stmt, body, bind, block, ilist, olist, new_body, control;
-  tree t, dlist;
-  tree_stmt_iterator tsi;
+  tree block, control;
+  gimple_stmt_iterator tgsi;
   unsigned i, len;
+  gimple stmt, new_stmt, bind, t;
+  gimple_seq ilist, dlist, olist, new_body, body;
   struct gimplify_ctx gctx;
 
-  stmt = *stmt_p;
+  stmt = gsi_stmt (*gsi_p);
 
   push_gimplify_context (&gctx);
 
   dlist = NULL;
   ilist = NULL;
-  lower_rec_input_clauses (OMP_SECTIONS_CLAUSES (stmt), &ilist, &dlist, ctx);
+  lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
+                          &ilist, &dlist, ctx);
 
-  tsi = tsi_start (OMP_SECTIONS_BODY (stmt));
-  for (len = 0; !tsi_end_p (tsi); len++, tsi_next (&tsi))
+  tgsi = gsi_start (gimple_omp_body (stmt));
+  for (len = 0; !gsi_end_p (tgsi); len++, gsi_next (&tgsi))
     continue;
 
-  tsi = tsi_start (OMP_SECTIONS_BODY (stmt));
-  body = alloc_stmt_list ();
-  for (i = 0; i < len; i++, tsi_next (&tsi))
+  tgsi = gsi_start (gimple_omp_body (stmt));
+  body = NULL;
+  for (i = 0; i < len; i++, gsi_next (&tgsi))
     {
       omp_context *sctx;
-      tree sec_start, sec_end;
+      gimple sec_start;
 
-      sec_start = tsi_stmt (tsi);
+      sec_start = gsi_stmt (tgsi);
       sctx = maybe_lookup_ctx (sec_start);
       gcc_assert (sctx);
 
-      append_to_statement_list (sec_start, &body);
+      gimple_seq_add_stmt (&body, sec_start);
 
-      lower_omp (&OMP_SECTION_BODY (sec_start), sctx);
-      append_to_statement_list (OMP_SECTION_BODY (sec_start), &body);
-      OMP_SECTION_BODY (sec_start) = NULL;
+      lower_omp (gimple_omp_body (sec_start), sctx);
+      gimple_seq_add_seq (&body, gimple_omp_body (sec_start));
+      gimple_omp_set_body (sec_start, NULL);
 
       if (i == len - 1)
        {
-         tree l = alloc_stmt_list ();
-         lower_lastprivate_clauses (OMP_SECTIONS_CLAUSES (stmt), NULL,
+         gimple_seq l = NULL;
+         lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
                                     &l, ctx);
-         append_to_statement_list (l, &body);
-         OMP_SECTION_LAST (sec_start) = 1;
+         gimple_seq_add_seq (&body, l);
+         gimple_omp_section_set_last (sec_start);
        }
       
-      sec_end = make_node (OMP_RETURN);
-      append_to_statement_list (sec_end, &body);
+      gimple_seq_add_stmt (&body, gimple_build_omp_return (false));
     }
 
   block = make_node (BLOCK);
-  bind = build3 (BIND_EXPR, void_type_node, NULL, body, block);
+  bind = gimple_build_bind (NULL, body, block);
 
-  olist = NULL_TREE;
-  lower_reduction_clauses (OMP_SECTIONS_CLAUSES (stmt), &olist, ctx);
+  olist = NULL;
+  lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
 
   block = make_node (BLOCK);
-  new_stmt = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
-  TREE_SIDE_EFFECTS (new_stmt) = 1;
+  new_stmt = gimple_build_bind (NULL, NULL, block);
 
   pop_gimplify_context (new_stmt);
-
-  BIND_EXPR_VARS (new_stmt)
-    = chainon (BIND_EXPR_VARS (new_stmt), ctx->block_vars);
-  BLOCK_VARS (block) = BIND_EXPR_VARS (new_stmt);
+  gimple_bind_append_vars (new_stmt, ctx->block_vars);
+  BLOCK_VARS (block) = gimple_bind_vars (bind);
   if (BLOCK_VARS (block))
     TREE_USED (block) = 1;
 
-  new_body = alloc_stmt_list ();
-  append_to_statement_list (ilist, &new_body);
-  append_to_statement_list (stmt, &new_body);
-  append_to_statement_list (make_node (OMP_SECTIONS_SWITCH), &new_body);
-  append_to_statement_list (bind, &new_body);
+  new_body = NULL;
+  gimple_seq_add_seq (&new_body, ilist);
+  gimple_seq_add_stmt (&new_body, stmt);
+  gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
+  gimple_seq_add_stmt (&new_body, bind);
 
   control = create_tmp_var (unsigned_type_node, ".section");
-  t = build2 (OMP_CONTINUE, void_type_node, control, control);
-  OMP_SECTIONS_CONTROL (stmt) = control;
-  append_to_statement_list (t, &new_body);
+  t = gimple_build_omp_continue (control, control);
+  gimple_omp_sections_set_control (stmt, control);
+  gimple_seq_add_stmt (&new_body, t);
 
-  append_to_statement_list (olist, &new_body);
-  append_to_statement_list (dlist, &new_body);
+  gimple_seq_add_seq (&new_body, olist);
+  gimple_seq_add_seq (&new_body, dlist);
 
-  maybe_catch_exception (&new_body);
+  new_body = maybe_catch_exception (new_body);
 
-  t = make_node (OMP_RETURN);
-  OMP_RETURN_NOWAIT (t) = !!find_omp_clause (OMP_SECTIONS_CLAUSES (stmt),
-                                            OMP_CLAUSE_NOWAIT);
-  append_to_statement_list (t, &new_body);
+  t = gimple_build_omp_return
+        (!!find_omp_clause (gimple_omp_sections_clauses (stmt),
+                           OMP_CLAUSE_NOWAIT));
+  gimple_seq_add_stmt (&new_body, t);
 
-  BIND_EXPR_BODY (new_stmt) = new_body;
-  OMP_SECTIONS_BODY (stmt) = NULL;
+  gimple_bind_set_body (new_stmt, new_body);
+  gimple_omp_set_body (stmt, NULL);
 
-  *stmt_p = new_stmt;
+  gsi_replace (gsi_p, new_stmt, true);
 }
 
 
 /* A subroutine of lower_omp_single.  Expand the simple form of
-   aOMP_SINGLE, without a copyprivate clause:
+   a GIMPLE_OMP_SINGLE, without a copyprivate clause:
 
        if (GOMP_single_start ())
          BODY;
@@ -5384,22 +5506,31 @@ lower_omp_sections (tree *stmt_p, omp_context *ctx)
   to a synchronization analysis pass.  */
 
 static void
-lower_omp_single_simple (tree single_stmt, tree *pre_p)
+lower_omp_single_simple (gimple single_stmt, gimple_seq *pre_p)
 {
-  tree t;
-
-  t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SINGLE_START], 0);
-  if (TREE_TYPE (t) != boolean_type_node)
-    t = fold_build2 (NE_EXPR, boolean_type_node,
-                    t, build_int_cst (TREE_TYPE (t), 0));
-  t = build3 (COND_EXPR, void_type_node, t,
-             OMP_SINGLE_BODY (single_stmt), NULL);
-  gimplify_and_add (t, pre_p);
+  tree tlabel = create_artificial_label ();
+  tree flabel = create_artificial_label ();
+  gimple call, cond;
+  tree lhs, decl;
+
+  decl = built_in_decls[BUILT_IN_GOMP_SINGLE_START];
+  lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)), NULL);
+  call = gimple_build_call (decl, 0);
+  gimple_call_set_lhs (call, lhs);
+  gimple_seq_add_stmt (pre_p, call);
+
+  cond = gimple_build_cond (EQ_EXPR, lhs,
+                           fold_convert (TREE_TYPE (lhs), boolean_true_node),
+                           tlabel, flabel);
+  gimple_seq_add_stmt (pre_p, cond);
+  gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
+  gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
+  gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
 }
 
 
 /* A subroutine of lower_omp_single.  Expand the simple form of
-   aOMP_SINGLE, with a copyprivate clause:
+   a GIMPLE_OMP_SINGLE, with a copyprivate clause:
 
        #pragma omp single copyprivate (a, b, c)
 
@@ -5428,9 +5559,10 @@ lower_omp_single_simple (tree single_stmt, tree *pre_p)
   to a synchronization analysis pass.  */
 
 static void
-lower_omp_single_copy (tree single_stmt, tree *pre_p, omp_context *ctx)
+lower_omp_single_copy (gimple single_stmt, gimple_seq *pre_p, omp_context *ctx)
 {
-  tree ptr_type, t, l0, l1, l2, copyin_seq;
+  tree ptr_type, t, l0, l1, l2;
+  gimple_seq copyin_seq;
 
   ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
 
@@ -5443,8 +5575,7 @@ lower_omp_single_copy (tree single_stmt, tree *pre_p, omp_context *ctx)
 
   t = build_call_expr (built_in_decls[BUILT_IN_GOMP_SINGLE_COPY_START], 0);
   t = fold_convert (ptr_type, t);
-  t = build_gimple_modify_stmt (ctx->receiver_decl, t);
-  gimplify_and_add (t, pre_p);
+  gimplify_assign (ctx->receiver_decl, t, pre_p);
 
   t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
              build_int_cst (ptr_type, 0));
@@ -5452,13 +5583,12 @@ lower_omp_single_copy (tree single_stmt, tree *pre_p, omp_context *ctx)
              build_and_jump (&l0), build_and_jump (&l1));
   gimplify_and_add (t, pre_p);
 
-  t = build1 (LABEL_EXPR, void_type_node, l0);
-  gimplify_and_add (t, pre_p);
+  gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
 
-  append_to_statement_list (OMP_SINGLE_BODY (single_stmt), pre_p);
+  gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
 
   copyin_seq = NULL;
-  lower_copyprivate_clauses (OMP_SINGLE_CLAUSES (single_stmt), pre_p,
+  lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
                              &copyin_seq, ctx);
 
   t = build_fold_addr_expr (ctx->sender_decl);
@@ -5468,56 +5598,57 @@ lower_omp_single_copy (tree single_stmt, tree *pre_p, omp_context *ctx)
   t = build_and_jump (&l2);
   gimplify_and_add (t, pre_p);
 
-  t = build1 (LABEL_EXPR, void_type_node, l1);
-  gimplify_and_add (t, pre_p);
+  gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
 
-  append_to_statement_list (copyin_seq, pre_p);
+  gimple_seq_add_seq (pre_p, copyin_seq);
 
-  t = build1 (LABEL_EXPR, void_type_node, l2);
-  gimplify_and_add (t, pre_p);
+  gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
 }
 
 
 /* Expand code for an OpenMP single directive.  */
 
 static void
-lower_omp_single (tree *stmt_p, omp_context *ctx)
+lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
 {
-  tree t, bind, block, single_stmt = *stmt_p, dlist;
+  tree block;
+  gimple t, bind, single_stmt = gsi_stmt (*gsi_p);
+  gimple_seq bind_body, dlist;
   struct gimplify_ctx gctx;
 
   push_gimplify_context (&gctx);
 
-  block = make_node (BLOCK);
-  *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
-  TREE_SIDE_EFFECTS (bind) = 1;
+  bind_body = NULL;
+  lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
+                          &bind_body, &dlist, ctx);
+  lower_omp (gimple_omp_body (single_stmt), ctx);
 
-  lower_rec_input_clauses (OMP_SINGLE_CLAUSES (single_stmt),
-                          &BIND_EXPR_BODY (bind), &dlist, ctx);
-  lower_omp (&OMP_SINGLE_BODY (single_stmt), ctx);
-
-  append_to_statement_list (single_stmt, &BIND_EXPR_BODY (bind));
+  gimple_seq_add_stmt (&bind_body, single_stmt);
 
   if (ctx->record_type)
-    lower_omp_single_copy (single_stmt, &BIND_EXPR_BODY (bind), ctx);
+    lower_omp_single_copy (single_stmt, &bind_body, ctx);
   else
-    lower_omp_single_simple (single_stmt, &BIND_EXPR_BODY (bind));
+    lower_omp_single_simple (single_stmt, &bind_body);
+
+  gimple_omp_set_body (single_stmt, NULL);
 
-  OMP_SINGLE_BODY (single_stmt) = NULL;
+  gimple_seq_add_seq (&bind_body, dlist);
 
-  append_to_statement_list (dlist, &BIND_EXPR_BODY (bind));
+  bind_body = maybe_catch_exception (bind_body);
 
-  maybe_catch_exception (&BIND_EXPR_BODY (bind));
+  t = gimple_build_omp_return 
+        (!!find_omp_clause (gimple_omp_single_clauses (single_stmt),
+                           OMP_CLAUSE_NOWAIT));
+  gimple_seq_add_stmt (&bind_body, t);
 
-  t = make_node (OMP_RETURN);
-  OMP_RETURN_NOWAIT (t) = !!find_omp_clause (OMP_SINGLE_CLAUSES (single_stmt),
-                                            OMP_CLAUSE_NOWAIT);
-  append_to_statement_list (t, &BIND_EXPR_BODY (bind));
+  block = make_node (BLOCK);
+  bind = gimple_build_bind (NULL, bind_body, block);
 
   pop_gimplify_context (bind);
 
-  BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
-  BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
+  gimple_bind_append_vars (bind, ctx->block_vars);
+  BLOCK_VARS (block) = ctx->block_vars;
+  gsi_replace (gsi_p, bind, true);
   if (BLOCK_VARS (block))
     TREE_USED (block) = 1;
 }
@@ -5526,82 +5657,80 @@ lower_omp_single (tree *stmt_p, omp_context *ctx)
 /* Expand code for an OpenMP master directive.  */
 
 static void
-lower_omp_master (tree *stmt_p, omp_context *ctx)
+lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
 {
-  tree bind, block, stmt = *stmt_p, lab = NULL, x;
+  tree block, lab = NULL, x;
+  gimple stmt = gsi_stmt (*gsi_p), bind;
+  gimple_seq tseq;
   struct gimplify_ctx gctx;
 
   push_gimplify_context (&gctx);
 
   block = make_node (BLOCK);
-  *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
-  TREE_SIDE_EFFECTS (bind) = 1;
-
-  append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
+  bind = gimple_build_bind (NULL, gimple_seq_alloc_with_stmt (stmt),
+                                block);
 
   x = build_call_expr (built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM], 0);
   x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
   x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
-  gimplify_and_add (x, &BIND_EXPR_BODY (bind));
+  tseq = NULL;
+  gimplify_and_add (x, &tseq);
+  gimple_bind_add_seq (bind, tseq);
 
-  lower_omp (&OMP_MASTER_BODY (stmt), ctx);
-  maybe_catch_exception (&OMP_MASTER_BODY (stmt));
-  append_to_statement_list (OMP_MASTER_BODY (stmt), &BIND_EXPR_BODY (bind));
-  OMP_MASTER_BODY (stmt) = NULL;
+  lower_omp (gimple_omp_body (stmt), ctx);
+  gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
+  gimple_bind_add_seq (bind, gimple_omp_body (stmt));
+  gimple_omp_set_body (stmt, NULL);
 
-  x = build1 (LABEL_EXPR, void_type_node, lab);
-  gimplify_and_add (x, &BIND_EXPR_BODY (bind));
+  gimple_bind_add_stmt (bind, gimple_build_label (lab));
 
-  x = make_node (OMP_RETURN);
-  OMP_RETURN_NOWAIT (x) = 1;
-  append_to_statement_list (x, &BIND_EXPR_BODY (bind));
+  gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
 
   pop_gimplify_context (bind);
 
-  BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
-  BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
+  gimple_bind_append_vars (bind, ctx->block_vars);
+  BLOCK_VARS (block) = ctx->block_vars;
+  gsi_replace (gsi_p, bind, true);
 }
 
 
 /* Expand code for an OpenMP ordered directive.  */
 
 static void
-lower_omp_ordered (tree *stmt_p, omp_context *ctx)
+lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
 {
-  tree bind, block, stmt = *stmt_p, x;
+  tree block;
+  gimple stmt = gsi_stmt (*gsi_p), bind, x;
   struct gimplify_ctx gctx;
 
   push_gimplify_context (&gctx);
 
   block = make_node (BLOCK);
-  *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
-  TREE_SIDE_EFFECTS (bind) = 1;
-
-  append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
+  bind = gimple_build_bind (NULL, gimple_seq_alloc_with_stmt (stmt),
+                                  block);
 
-  x = build_call_expr (built_in_decls[BUILT_IN_GOMP_ORDERED_START], 0);
-  gimplify_and_add (x, &BIND_EXPR_BODY (bind));
+  x = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ORDERED_START], 0);
+  gimple_bind_add_stmt (bind, x);
 
-  lower_omp (&OMP_ORDERED_BODY (stmt), ctx);
-  maybe_catch_exception (&OMP_ORDERED_BODY (stmt));
-  append_to_statement_list (OMP_ORDERED_BODY (stmt), &BIND_EXPR_BODY (bind));
-  OMP_ORDERED_BODY (stmt) = NULL;
+  lower_omp (gimple_omp_body (stmt), ctx);
+  gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
+  gimple_bind_add_seq (bind, gimple_omp_body (stmt));
+  gimple_omp_set_body (stmt, NULL);
 
-  x = build_call_expr (built_in_decls[BUILT_IN_GOMP_ORDERED_END], 0);
-  gimplify_and_add (x, &BIND_EXPR_BODY (bind));
+  x = gimple_build_call (built_in_decls[BUILT_IN_GOMP_ORDERED_END], 0);
+  gimple_bind_add_stmt (bind, x);
 
-  x = make_node (OMP_RETURN);
-  OMP_RETURN_NOWAIT (x) = 1;
-  append_to_statement_list (x, &BIND_EXPR_BODY (bind));
+  gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
 
   pop_gimplify_context (bind);
 
-  BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
-  BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
+  gimple_bind_append_vars (bind, ctx->block_vars);
+  BLOCK_VARS (block) = gimple_bind_vars (bind);
+  gsi_replace (gsi_p, bind, true);
 }
 
 
-/* Gimplify aOMP_CRITICAL statement.  This is a relatively simple
+/* Gimplify a GIMPLE_OMP_CRITICAL statement.  This is a relatively simple
    substitution of a couple of function calls.  But in the NAMED case,
    requires that languages coordinate a symbol name.  It is therefore
    best put here in common code.  */
@@ -5610,13 +5739,15 @@ static GTY((param1_is (tree), param2_is (tree)))
   splay_tree critical_name_mutexes;
 
 static void
-lower_omp_critical (tree *stmt_p, omp_context *ctx)
+lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
 {
-  tree bind, block, stmt = *stmt_p;
-  tree t, lock, unlock, name;
+  tree block;
+  tree name, lock, unlock;
+  gimple stmt = gsi_stmt (*gsi_p), bind;
+  gimple_seq tbody;
   struct gimplify_ctx gctx;
 
-  name = OMP_CRITICAL_NAME (stmt);
+  name = gimple_omp_critical_name (stmt);
   if (name)
     {
       tree decl;
@@ -5667,27 +5798,27 @@ lower_omp_critical (tree *stmt_p, omp_context *ctx)
   push_gimplify_context (&gctx);
 
   block = make_node (BLOCK);
-  *stmt_p = bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
-  TREE_SIDE_EFFECTS (bind) = 1;
-
-  append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
+  bind = gimple_build_bind (NULL, gimple_seq_alloc_with_stmt (stmt), block);
 
-  gimplify_and_add (lock, &BIND_EXPR_BODY (bind));
+  tbody = gimple_bind_body (bind);
+  gimplify_and_add (lock, &tbody);
+  gimple_bind_set_body (bind, tbody);
 
-  lower_omp (&OMP_CRITICAL_BODY (stmt), ctx);
-  maybe_catch_exception (&OMP_CRITICAL_BODY (stmt));
-  append_to_statement_list (OMP_CRITICAL_BODY (stmt), &BIND_EXPR_BODY (bind));
-  OMP_CRITICAL_BODY (stmt) = NULL;
+  lower_omp (gimple_omp_body (stmt), ctx);
+  gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
+  gimple_bind_add_seq (bind, gimple_omp_body (stmt));
+  gimple_omp_set_body (stmt, NULL);
 
-  gimplify_and_add (unlock, &BIND_EXPR_BODY (bind));
+  tbody = gimple_bind_body (bind);
+  gimplify_and_add (unlock, &tbody);
+  gimple_bind_set_body (bind, tbody);
 
-  t = make_node (OMP_RETURN);
-  OMP_RETURN_NOWAIT (t) = 1;
-  append_to_statement_list (t, &BIND_EXPR_BODY (bind));
+  gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
 
   pop_gimplify_context (bind);
-  BIND_EXPR_VARS (bind) = chainon (BIND_EXPR_VARS (bind), ctx->block_vars);
-  BLOCK_VARS (block) = BIND_EXPR_VARS (bind);
+  gimple_bind_append_vars (bind, ctx->block_vars);
+  BLOCK_VARS (block) = gimple_bind_vars (bind);
+  gsi_replace (gsi_p, bind, true);
 }
 
 
@@ -5698,11 +5829,12 @@ lower_omp_critical (tree *stmt_p, omp_context *ctx)
    *BODY_P.  */
 
 static void
-lower_omp_for_lastprivate (struct omp_for_data *fd, tree *body_p,
-                          tree *dlist, struct omp_context *ctx)
+lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
+                          gimple_seq *dlist, struct omp_context *ctx)
 {
-  tree clauses, cond, stmts, vinit, t;
+  tree clauses, cond, vinit;
   enum tree_code cond_code;
+  gimple_seq stmts;
   
   cond_code = fd->loop.cond_code;
   cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
@@ -5718,12 +5850,12 @@ lower_omp_for_lastprivate (struct omp_for_data *fd, tree *body_p,
 
   cond = build2 (cond_code, boolean_type_node, fd->loop.v, fd->loop.n2);
 
-  clauses = OMP_FOR_CLAUSES (fd->for_stmt);
+  clauses = gimple_omp_for_clauses (fd->for_stmt);
   stmts = NULL;
   lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
-  if (stmts != NULL)
+  if (!gimple_seq_empty_p (stmts))
     {
-      append_to_statement_list (*dlist, &stmts);
+      gimple_seq_add_seq (&stmts, *dlist);
       *dlist = stmts;
 
       /* Optimize: v = 0; is usually cheaper than v = some_other_constant.  */
@@ -5735,8 +5867,7 @@ lower_omp_for_lastprivate (struct omp_for_data *fd, tree *body_p,
 
       /* Initialize the iterator variable, so that threads that don't execute
         any iterations don't execute the lastprivate clauses by accident.  */
-      t = build_gimple_modify_stmt (fd->loop.v, vinit);
-      gimplify_and_add (t, body_p);
+      gimplify_assign (fd->loop.v, vinit, body_p);
     }
 }
 
@@ -5744,37 +5875,39 @@ lower_omp_for_lastprivate (struct omp_for_data *fd, tree *body_p,
 /* Lower code for an OpenMP loop directive.  */
 
 static void
-lower_omp_for (tree *stmt_p, omp_context *ctx)
+lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
 {
-  tree t, stmt, ilist, dlist, new_stmt, block, *body_p, *rhs_p;
+  tree *rhs_p, block;
   struct omp_for_data fd;
-  int i;
+  gimple stmt = gsi_stmt (*gsi_p), new_stmt;
+  gimple_seq omp_for_body, body, dlist, ilist;
+  size_t i;
   struct gimplify_ctx gctx;
 
-  stmt = *stmt_p;
-
   push_gimplify_context (&gctx);
 
-  lower_omp (&OMP_FOR_PRE_BODY (stmt), ctx);
-  lower_omp (&OMP_FOR_BODY (stmt), ctx);
+  lower_omp (gimple_omp_for_pre_body (stmt), ctx);
+  lower_omp (gimple_omp_body (stmt), ctx);
 
   block = make_node (BLOCK);
-  new_stmt = build3 (BIND_EXPR, void_type_node, NULL, NULL, block);
-  TREE_SIDE_EFFECTS (new_stmt) = 1;
-  body_p = &BIND_EXPR_BODY (new_stmt);
+  new_stmt = gimple_build_bind (NULL, NULL, block);
 
   /* Move declaration of temporaries in the loop body before we make
      it go away.  */
-  if (TREE_CODE (OMP_FOR_BODY (stmt)) == BIND_EXPR)
-    BIND_EXPR_VARS (new_stmt)
-      = chainon (BIND_EXPR_VARS (new_stmt),
-                BIND_EXPR_VARS (OMP_FOR_BODY (stmt)));
+  omp_for_body = gimple_omp_body (stmt);
+  if (!gimple_seq_empty_p (omp_for_body)
+      && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
+    {
+      tree vars = gimple_bind_vars (gimple_seq_first_stmt (omp_for_body));
+      gimple_bind_append_vars (new_stmt, vars);
+    }
 
-  /* The pre-body and input clauses go before the lowered OMP_FOR.  */
+  /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR.  */
   ilist = NULL;
   dlist = NULL;
-  lower_rec_input_clauses (OMP_FOR_CLAUSES (stmt), body_p, &dlist, ctx);
-  append_to_statement_list (OMP_FOR_PRE_BODY (stmt), body_p);
+  body = NULL;
+  lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx);
+  gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
 
   /* Lower the header expressions.  At this point, we can assume that
      the header is of the form:
@@ -5783,71 +5916,72 @@ lower_omp_for (tree *stmt_p, omp_context *ctx)
 
      We just need to make sure that VAL1, VAL2 and VAL3 are lowered
      using the .omp_data_s mapping, if needed.  */
-  for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (stmt)); i++)
+  for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
     {
-      rhs_p = &GIMPLE_STMT_OPERAND (TREE_VEC_ELT (OMP_FOR_INIT (stmt), i), 1);
+      rhs_p = gimple_omp_for_initial_ptr (stmt, i);
       if (!is_gimple_min_invariant (*rhs_p))
-       *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
+       *rhs_p = get_formal_tmp_var (*rhs_p, &body);
 
-      rhs_p = &TREE_OPERAND (TREE_VEC_ELT (OMP_FOR_COND (stmt), i), 1);
+      rhs_p = gimple_omp_for_final_ptr (stmt, i);
       if (!is_gimple_min_invariant (*rhs_p))
-       *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
+       *rhs_p = get_formal_tmp_var (*rhs_p, &body);
 
-      rhs_p = &TREE_OPERAND (GIMPLE_STMT_OPERAND
-                              (TREE_VEC_ELT (OMP_FOR_INCR (stmt), i), 1), 1);
+      rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
       if (!is_gimple_min_invariant (*rhs_p))
-       *rhs_p = get_formal_tmp_var (*rhs_p, body_p);
+       *rhs_p = get_formal_tmp_var (*rhs_p, &body);
     }
 
   /* Once lowered, extract the bounds and clauses.  */
   extract_omp_for_data (stmt, &fd, NULL);
 
-  lower_omp_for_lastprivate (&fd, body_p, &dlist, ctx);
-
-  append_to_statement_list (stmt, body_p);
+  lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
 
-  append_to_statement_list (OMP_FOR_BODY (stmt), body_p);
+  gimple_seq_add_stmt (&body, stmt);
+  gimple_seq_add_seq (&body, gimple_omp_body (stmt));
 
-  t = build2 (OMP_CONTINUE, void_type_node, fd.loop.v, fd.loop.v);
-  append_to_statement_list (t, body_p);
+  gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
+                                                        fd.loop.v));
 
   /* After the loop, add exit clauses.  */
-  lower_reduction_clauses (OMP_FOR_CLAUSES (stmt), body_p, ctx);
-  append_to_statement_list (dlist, body_p);
+  lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
+  gimple_seq_add_seq (&body, dlist);
 
-  maybe_catch_exception (body_p);
+  body = maybe_catch_exception (body);
 
   /* Region exit marker goes at the end of the loop body.  */
-  t = make_node (OMP_RETURN);
-  OMP_RETURN_NOWAIT (t) = fd.have_nowait;
-  append_to_statement_list (t, body_p);
+  gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
 
   pop_gimplify_context (new_stmt);
-  BIND_EXPR_VARS (new_stmt)
-    = chainon (BIND_EXPR_VARS (new_stmt), ctx->block_vars);
-  BLOCK_VARS (block) = BIND_EXPR_VARS (new_stmt);
+
+  gimple_bind_append_vars (new_stmt, ctx->block_vars);
+  BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
   if (BLOCK_VARS (block))
     TREE_USED (block) = 1;
 
-  OMP_FOR_BODY (stmt) = NULL_TREE;
-  OMP_FOR_PRE_BODY (stmt) = NULL_TREE;
-  *stmt_p = new_stmt;
+  gimple_bind_set_body (new_stmt, body);
+  gimple_omp_set_body (stmt, NULL);
+  gimple_omp_for_set_pre_body (stmt, NULL);
+  gsi_replace (gsi_p, new_stmt, true);
 }
 
-/* Callback for walk_stmts.  Check if *TP only contains OMP_FOR
-   or OMP_PARALLEL.  */
+/* Callback for walk_stmts.  Check if the current statement only contains 
+   GIMPLE_OMP_FOR or GIMPLE_OMP_PARALLEL.  */
 
 static tree
-check_combined_parallel (tree *tp, int *walk_subtrees, void *data)
+check_combined_parallel (gimple_stmt_iterator *gsi_p,
+                        bool *handled_ops_p,
+                        struct walk_stmt_info *wi)
 {
-  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
   int *info = (int *) wi->info;
+  gimple stmt = gsi_stmt (*gsi_p);
 
-  *walk_subtrees = 0;
-  switch (TREE_CODE (*tp))
+  *handled_ops_p = true;
+  switch (gimple_code (stmt))
     {
-    case OMP_FOR:
-    case OMP_SECTIONS:
+    WALK_SUBSTMTS;
+
+    case GIMPLE_OMP_FOR:
+    case GIMPLE_OMP_SECTIONS:
       *info = *info == 0 ? 1 : -1;
       break;
     default:
@@ -5894,9 +6028,10 @@ task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
       DECL_CONTEXT (new_f) = type;
       TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
       TREE_CHAIN (new_f) = new_fields;
-      walk_tree (&DECL_SIZE (new_f), copy_body_r, &tcctx->cb, NULL);
-      walk_tree (&DECL_SIZE_UNIT (new_f), copy_body_r, &tcctx->cb, NULL);
-      walk_tree (&DECL_FIELD_OFFSET (new_f), copy_body_r, &tcctx->cb, NULL);
+      walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
+      walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
+      walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
+                &tcctx->cb, NULL);
       new_fields = new_f;
       *pointer_map_insert (tcctx->cb.decl_map, f) = new_f;
     }
@@ -5908,7 +6043,7 @@ task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
 /* Create task copyfn.  */
 
 static void
-create_task_copyfn (tree task_stmt, omp_context *ctx)
+create_task_copyfn (gimple task_stmt, omp_context *ctx)
 {
   struct function *child_cfun;
   tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
@@ -5918,7 +6053,7 @@ create_task_copyfn (tree task_stmt, omp_context *ctx)
   struct omp_taskcopy_context tcctx;
   struct gimplify_ctx gctx;
 
-  child_fn = OMP_TASK_COPYFN (task_stmt);
+  child_fn = gimple_omp_task_copy_fn (task_stmt);
   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
   gcc_assert (child_cfun->cfg == NULL);
   child_cfun->dont_save_pending_sizes_p = 1;
@@ -5936,7 +6071,7 @@ create_task_copyfn (tree task_stmt, omp_context *ctx)
   TREE_SIDE_EFFECTS (bind) = 1;
   list = NULL;
   DECL_SAVED_TREE (child_fn) = bind;
-  DECL_SOURCE_LOCATION (child_fn) = EXPR_LOCATION (task_stmt);
+  DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
 
   /* Remap src and dst argument types if needed.  */
   record_type = ctx->record_type;
@@ -5986,7 +6121,7 @@ create_task_copyfn (tree task_stmt, omp_context *ctx)
   /* First pass: initialize temporaries used in record_type and srecord_type
      sizes and field offsets.  */
   if (tcctx.cb.decl_map)
-    for (c = OMP_TASK_CLAUSES (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
+    for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
        {
          tree *p;
@@ -6000,13 +6135,13 @@ create_task_copyfn (tree task_stmt, omp_context *ctx)
          sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
          src = build_fold_indirect_ref (sarg);
          src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
-         t = build_gimple_modify_stmt (*p, src);
+         t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
          append_to_statement_list (t, &list);
        }
 
   /* Second pass: copy shared var pointers and copy construct non-VLA
      firstprivate vars.  */
-  for (c = OMP_TASK_CLAUSES (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
+  for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
     switch (OMP_CLAUSE_CODE (c))
       {
       case OMP_CLAUSE_SHARED:
@@ -6025,7 +6160,7 @@ create_task_copyfn (tree task_stmt, omp_context *ctx)
        src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
        dst = build_fold_indirect_ref (arg);
        dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
-       t = build_gimple_modify_stmt (dst, src);
+       t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
        append_to_statement_list (t, &list);
        break;
       case OMP_CLAUSE_FIRSTPRIVATE:
@@ -6079,7 +6214,7 @@ create_task_copyfn (tree task_stmt, omp_context *ctx)
          src = decl;
        dst = build_fold_indirect_ref (arg);
        dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
-       t = build_gimple_modify_stmt (dst, src);
+       t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
        append_to_statement_list (t, &list);
        break;
       default:
@@ -6088,7 +6223,7 @@ create_task_copyfn (tree task_stmt, omp_context *ctx)
 
   /* Last pass: handle VLA firstprivates.  */
   if (tcctx.cb.decl_map)
-    for (c = OMP_TASK_CLAUSES (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
+    for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
        {
          tree ind, ptr, df;
@@ -6122,7 +6257,8 @@ create_task_copyfn (tree task_stmt, omp_context *ctx)
          df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
          ptr = build_fold_indirect_ref (arg);
          ptr = build3 (COMPONENT_REF, TREE_TYPE (df), ptr, df, NULL);
-         t = build_gimple_modify_stmt (ptr, build_fold_addr_expr (dst));
+         t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
+                     build_fold_addr_expr (dst));
          append_to_statement_list (t, &list);
        }
 
@@ -6137,117 +6273,113 @@ create_task_copyfn (tree task_stmt, omp_context *ctx)
   current_function_decl = ctx->cb.src_fn;
 }
 
-/* Lower the OpenMP parallel or task directive in *STMT_P.  CTX holds context
-   information for the directive.  */
+/* Lower the OpenMP parallel or task directive in the current statement
+   in GSI_P.  CTX holds context information for the directive.  */
 
 static void
-lower_omp_taskreg (tree *stmt_p, omp_context *ctx)
+lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
 {
-  tree clauses, par_bind, par_body, new_body, bind;
-  tree olist, ilist, par_olist, par_ilist;
-  tree stmt, child_fn, t;
+  tree clauses;
+  tree child_fn, t;
+  gimple stmt = gsi_stmt (*gsi_p);
+  gimple par_bind, bind;
+  gimple_seq par_body, olist, ilist, par_olist, par_ilist, new_body;
   struct gimplify_ctx gctx;
 
-  stmt = *stmt_p;
-
-  clauses = OMP_TASKREG_CLAUSES (stmt);
-  par_bind = OMP_TASKREG_BODY (stmt);
-  par_body = BIND_EXPR_BODY (par_bind);
+  clauses = gimple_omp_taskreg_clauses (stmt);
+  par_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
+  par_body = gimple_bind_body (par_bind);
   child_fn = ctx->cb.dst_fn;
-  if (TREE_CODE (stmt) == OMP_PARALLEL && !OMP_PARALLEL_COMBINED (stmt))
+  if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
+      && !gimple_omp_parallel_combined_p (stmt))
     {
       struct walk_stmt_info wi;
       int ws_num = 0;
 
       memset (&wi, 0, sizeof (wi));
-      wi.callback = check_combined_parallel;
       wi.info = &ws_num;
       wi.val_only = true;
-      walk_stmts (&wi, &par_bind);
+      walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
       if (ws_num == 1)
-       OMP_PARALLEL_COMBINED (stmt) = 1;
+       gimple_omp_parallel_set_combined_p (stmt, true);
     }
   if (ctx->srecord_type)
     create_task_copyfn (stmt, ctx);
 
   push_gimplify_context (&gctx);
 
-  par_olist = NULL_TREE;
-  par_ilist = NULL_TREE;
+  par_olist = NULL;
+  par_ilist = NULL;
   lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx);
-  lower_omp (&par_body, ctx);
-  if (TREE_CODE (stmt) == OMP_PARALLEL)
+  lower_omp (par_body, ctx);
+  if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
     lower_reduction_clauses (clauses, &par_olist, ctx);
 
   /* Declare all the variables created by mapping and the variables
      declared in the scope of the parallel body.  */
   record_vars_into (ctx->block_vars, child_fn);
-  record_vars_into (BIND_EXPR_VARS (par_bind), child_fn);
+  record_vars_into (gimple_bind_vars (par_bind), child_fn);
 
   if (ctx->record_type)
     {
       ctx->sender_decl
        = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
                          : ctx->record_type, ".omp_data_o");
-      OMP_TASKREG_DATA_ARG (stmt) = ctx->sender_decl;
+      gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
     }
 
-  olist = NULL_TREE;
-  ilist = NULL_TREE;
+  olist = NULL;
+  ilist = NULL;
   lower_send_clauses (clauses, &ilist, &olist, ctx);
   lower_send_shared_vars (&ilist, &olist, ctx);
 
   /* Once all the expansions are done, sequence all the different
-     fragments inside OMP_TASKREG_BODY.  */
-  bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
-                BIND_EXPR_BLOCK (par_bind));
-  TREE_SIDE_EFFECTS (bind) = 1;
+     fragments inside gimple_omp_body.  */
 
-  new_body = alloc_stmt_list ();
+  new_body = NULL;
 
   if (ctx->record_type)
     {
       t = build_fold_addr_expr (ctx->sender_decl);
       /* fixup_child_record_type might have changed receiver_decl's type.  */
       t = fold_convert (TREE_TYPE (ctx->receiver_decl), t);
-      t = build_gimple_modify_stmt (ctx->receiver_decl, t);
-      append_to_statement_list (t, &new_body);
+      gimple_seq_add_stmt (&new_body,
+                          gimple_build_assign (ctx->receiver_decl, t));
     }
 
-  append_to_statement_list (par_ilist, &new_body);
-  append_to_statement_list (par_body, &new_body);
-  append_to_statement_list (par_olist, &new_body);
-  maybe_catch_exception (&new_body);
-  t = make_node (OMP_RETURN);
-  append_to_statement_list (t, &new_body);
-  OMP_TASKREG_BODY (stmt) = new_body;
+  gimple_seq_add_seq (&new_body, par_ilist);
+  gimple_seq_add_seq (&new_body, par_body);
+  gimple_seq_add_seq (&new_body, par_olist);
+  new_body = maybe_catch_exception (new_body);
+  gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
+  gimple_omp_set_body (stmt, new_body);
 
-  append_to_statement_list (stmt, &BIND_EXPR_BODY (bind));
+  bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
+  gimple_bind_add_stmt (bind, stmt);
   if (ilist || olist)
     {
-      append_to_statement_list (bind, &ilist);
-      append_to_statement_list (olist, &ilist);
-      bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
-      TREE_SIDE_EFFECTS (bind) = 1;
-      append_to_statement_list (ilist, &BIND_EXPR_BODY (bind));
+      gimple_seq_add_stmt (&ilist, bind);
+      gimple_seq_add_seq (&ilist, olist);
+      bind = gimple_build_bind (NULL, ilist, NULL);
     }
 
-  *stmt_p = bind;
+  gsi_replace (gsi_p, bind, true);
 
-  pop_gimplify_context (NULL_TREE);
+  pop_gimplify_context (NULL);
 }
 
 /* Callback for lower_omp_1.  Return non-NULL if *tp needs to be
-   regimplified.  */
+   regimplified.  If DATA is non-NULL, lower_omp_1 is outside
+   of OpenMP context, but with task_shared_vars set.  */
 
 static tree
-lower_omp_2 (tree *tp, int *walk_subtrees, void *data)
+lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
+                       void *data)
 {
   tree t = *tp;
-  omp_context *ctx = (omp_context *) data;
 
   /* Any variable with DECL_VALUE_EXPR needs to be regimplified.  */
-  if (TREE_CODE (t) == VAR_DECL && ctx && DECL_HAS_VALUE_EXPR_P (t))
+  if (TREE_CODE (t) == VAR_DECL && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
     return t;
 
   if (task_shared_vars
@@ -6257,7 +6389,7 @@ lower_omp_2 (tree *tp, int *walk_subtrees, void *data)
 
   /* If a global variable has been privatized, TREE_CONSTANT on
      ADDR_EXPR might be wrong.  */
-  if (ctx && TREE_CODE (t) == ADDR_EXPR)
+  if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
     recompute_tree_invariant_for_addr_expr (t);
 
   *walk_subtrees = !TYPE_P (t) && !DECL_P (t);
@@ -6265,158 +6397,106 @@ lower_omp_2 (tree *tp, int *walk_subtrees, void *data)
 }
 
 static void
-lower_omp_1 (tree *tp, omp_context *ctx, tree_stmt_iterator *tsi)
+lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
 {
-  tree t = *tp;
+  gimple stmt = gsi_stmt (*gsi_p);
+  struct walk_stmt_info wi;
 
-  if (!t)
-    return;
+  if (gimple_has_location (stmt))
+    input_location = gimple_location (stmt);
 
-  if (EXPR_HAS_LOCATION (t))
-    input_location = EXPR_LOCATION (t);
+  if (task_shared_vars)
+    memset (&wi, '\0', sizeof (wi));
 
   /* If we have issued syntax errors, avoid doing any heavy lifting.
      Just replace the OpenMP directives with a NOP to avoid
      confusing RTL expansion.  */
-  if (errorcount && OMP_DIRECTIVE_P (t))
+  if (errorcount && is_gimple_omp (stmt))
     {
-      *tp = build_empty_stmt ();
+      gsi_replace (gsi_p, gimple_build_nop (), true);
       return;
     }
 
-  switch (TREE_CODE (t))
+  switch (gimple_code (stmt))
     {
-    case STATEMENT_LIST:
-      {
-       tree_stmt_iterator i;
-       for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
-         lower_omp_1 (tsi_stmt_ptr (i), ctx, &i);
-      }
-      break;
-
-    case COND_EXPR:
-      lower_omp_1 (&COND_EXPR_THEN (t), ctx, NULL);
-      lower_omp_1 (&COND_EXPR_ELSE (t), ctx, NULL);
+    case GIMPLE_COND:
       if ((ctx || task_shared_vars)
-         && walk_tree (&COND_EXPR_COND (t), lower_omp_2, ctx, NULL))
-       {
-         tree pre = NULL;
-         gimplify_expr (&COND_EXPR_COND (t), &pre, NULL,
-                        is_gimple_condexpr, fb_rvalue);
-         if (pre)
-           {
-             if (tsi)
-               tsi_link_before (tsi, pre, TSI_SAME_STMT);
-             else
-               {
-                 append_to_statement_list (t, &pre);
-                 *tp = pre;
-               }
-           }
-       }
+         && (walk_tree (gimple_cond_lhs_ptr (stmt), lower_omp_regimplify_p,
+                        ctx ? NULL : &wi, NULL)
+             || walk_tree (gimple_cond_rhs_ptr (stmt), lower_omp_regimplify_p,
+                           ctx ? NULL : &wi, NULL)))
+       gimple_regimplify_operands (stmt, gsi_p);
       break;
-    case CATCH_EXPR:
-      lower_omp_1 (&CATCH_BODY (t), ctx, NULL);
+    case GIMPLE_CATCH:
+      lower_omp (gimple_catch_handler (stmt), ctx);
       break;
-    case EH_FILTER_EXPR:
-      lower_omp_1 (&EH_FILTER_FAILURE (t), ctx, NULL);
+    case GIMPLE_EH_FILTER:
+      lower_omp (gimple_eh_filter_failure (stmt), ctx);
       break;
-    case TRY_CATCH_EXPR:
-    case TRY_FINALLY_EXPR:
-      lower_omp_1 (&TREE_OPERAND (t, 0), ctx, NULL);
-      lower_omp_1 (&TREE_OPERAND (t, 1), ctx, NULL);
+    case GIMPLE_TRY:
+      lower_omp (gimple_try_eval (stmt), ctx);
+      lower_omp (gimple_try_cleanup (stmt), ctx);
       break;
-    case BIND_EXPR:
-      lower_omp_1 (&BIND_EXPR_BODY (t), ctx, NULL);
+    case GIMPLE_BIND:
+      lower_omp (gimple_bind_body (stmt), ctx);
       break;
-    case RETURN_EXPR:
-      lower_omp_1 (&TREE_OPERAND (t, 0), ctx, NULL);
+    case GIMPLE_OMP_PARALLEL:
+    case GIMPLE_OMP_TASK:
+      ctx = maybe_lookup_ctx (stmt);
+      lower_omp_taskreg (gsi_p, ctx);
       break;
-
-    case OMP_PARALLEL:
-    case OMP_TASK:
-      ctx = maybe_lookup_ctx (t);
-      lower_omp_taskreg (tp, ctx);
-      break;
-    case OMP_FOR:
-      ctx = maybe_lookup_ctx (t);
+    case GIMPLE_OMP_FOR:
+      ctx = maybe_lookup_ctx (stmt);
       gcc_assert (ctx);
-      lower_omp_for (tp, ctx);
+      lower_omp_for (gsi_p, ctx);
       break;
-    case OMP_SECTIONS:
-      ctx = maybe_lookup_ctx (t);
+    case GIMPLE_OMP_SECTIONS:
+      ctx = maybe_lookup_ctx (stmt);
       gcc_assert (ctx);
-      lower_omp_sections (tp, ctx);
+      lower_omp_sections (gsi_p, ctx);
       break;
-    case OMP_SINGLE:
-      ctx = maybe_lookup_ctx (t);
+    case GIMPLE_OMP_SINGLE:
+      ctx = maybe_lookup_ctx (stmt);
       gcc_assert (ctx);
-      lower_omp_single (tp, ctx);
+      lower_omp_single (gsi_p, ctx);
       break;
-    case OMP_MASTER:
-      ctx = maybe_lookup_ctx (t);
+    case GIMPLE_OMP_MASTER:
+      ctx = maybe_lookup_ctx (stmt);
       gcc_assert (ctx);
-      lower_omp_master (tp, ctx);
+      lower_omp_master (gsi_p, ctx);
       break;
-    case OMP_ORDERED:
-      ctx = maybe_lookup_ctx (t);
+    case GIMPLE_OMP_ORDERED:
+      ctx = maybe_lookup_ctx (stmt);
       gcc_assert (ctx);
-      lower_omp_ordered (tp, ctx);
+      lower_omp_ordered (gsi_p, ctx);
       break;
-    case OMP_CRITICAL:
-      ctx = maybe_lookup_ctx (t);
+    case GIMPLE_OMP_CRITICAL:
+      ctx = maybe_lookup_ctx (stmt);
       gcc_assert (ctx);
-      lower_omp_critical (tp, ctx);
+      lower_omp_critical (gsi_p, ctx);
+      break;
+    case GIMPLE_OMP_ATOMIC_LOAD:
+      if ((ctx || task_shared_vars)
+         && walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt),
+                       lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
+       gimple_regimplify_operands (stmt, gsi_p);
       break;
-
     default:
       if ((ctx || task_shared_vars)
-         && walk_tree (tp, lower_omp_2, ctx, NULL))
-       {
-         /* The gimplifier doesn't gimplify CALL_EXPR_STATIC_CHAIN.
-            Handle that here.  */
-         tree call = get_call_expr_in (t);
-         if (call
-             && CALL_EXPR_STATIC_CHAIN (call)
-             && walk_tree (&CALL_EXPR_STATIC_CHAIN (call), lower_omp_2,
-                           ctx, NULL))
-           {
-             tree pre = NULL;
-             gimplify_expr (&CALL_EXPR_STATIC_CHAIN (call), &pre, NULL,
-                            is_gimple_val, fb_rvalue);
-             if (pre)
-               {
-                 if (tsi)
-                   tsi_link_before (tsi, pre, TSI_SAME_STMT);
-                 else
-                   {
-                     append_to_statement_list (t, &pre);
-                     lower_omp_1 (&pre, ctx, NULL);
-                     *tp = pre;
-                     return;
-                   }
-               }
-           }
-
-         if (tsi == NULL)
-           gimplify_stmt (tp);
-         else
-           {
-             tree pre = NULL;
-             gimplify_expr (tp, &pre, NULL, is_gimple_stmt, fb_none);
-             if (pre)
-               tsi_link_before (tsi, pre, TSI_SAME_STMT);
-           }
-       }
+         && walk_gimple_op (stmt, lower_omp_regimplify_p,
+                            ctx ? NULL : &wi))
+       gimple_regimplify_operands (stmt, gsi_p);
       break;
     }
 }
 
 static void
-lower_omp (tree *stmt_p, omp_context *ctx)
+lower_omp (gimple_seq body, omp_context *ctx)
 {
   location_t saved_location = input_location;
-  lower_omp_1 (stmt_p, ctx, NULL);
+  gimple_stmt_iterator gsi = gsi_start (body);
+  for (gsi = gsi_start (body); !gsi_end_p (gsi); gsi_next (&gsi))
+    lower_omp_1 (&gsi, ctx);
   input_location = saved_location;
 }
 \f
@@ -6425,10 +6505,13 @@ lower_omp (tree *stmt_p, omp_context *ctx)
 static unsigned int
 execute_lower_omp (void)
 {
+  gimple_seq body;
+
   all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
                                 delete_omp_context);
 
-  scan_omp (&DECL_SAVED_TREE (current_function_decl), NULL);
+  body = gimple_body (current_function_decl);
+  scan_omp (body, NULL);
   gcc_assert (taskreg_nesting_level == 0);
 
   if (all_contexts->root)
@@ -6437,7 +6520,7 @@ execute_lower_omp (void)
 
       if (task_shared_vars)
        push_gimplify_context (&gctx);
-      lower_omp (&DECL_SAVED_TREE (current_function_decl), NULL);
+      lower_omp (body, NULL);
       if (task_shared_vars)
        pop_gimplify_context (NULL);
     }
@@ -6486,13 +6569,25 @@ static splay_tree all_labels;
    true if an error is detected.  */
 
 static bool
-diagnose_sb_0 (tree *stmt_p, tree branch_ctx, tree label_ctx)
+diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
+              gimple branch_ctx, gimple label_ctx)
 {
-  bool exit_p = true;
-
-  if ((label_ctx ? TREE_VALUE (label_ctx) : NULL) == branch_ctx)
+  if (label_ctx == branch_ctx)
     return false;
 
+     
+  /*
+     Previously we kept track of the label's entire context in diagnose_sb_[12]
+     so we could traverse it and issue a correct "exit" or "enter" error
+     message upon a structured block violation.
+
+     We built the context by building a list with tree_cons'ing, but there is
+     no easy counterpart in gimple tuples.  It seems like far too much work
+     for issuing exit/enter error messages.  If someone really misses the
+     distinct error message... patches welcome.
+   */
+     
+#if 0
   /* Try to avoid confusing the user by producing and error message
      with correct "exit" or "enter" verbiage.  We prefer "exit"
      unless we can show that LABEL_CTX is nested within BRANCH_CTX.  */
@@ -6515,63 +6610,64 @@ diagnose_sb_0 (tree *stmt_p, tree branch_ctx, tree label_ctx)
     error ("invalid exit from OpenMP structured block");
   else
     error ("invalid entry to OpenMP structured block");
+#endif
 
-  *stmt_p = build_empty_stmt ();
+  /* If it's obvious we have an invalid entry, be specific about the error.  */
+  if (branch_ctx == NULL)
+    error ("invalid entry to OpenMP structured block");
+  else
+    /* Otherwise, be vague and lazy, but efficient.  */
+    error ("invalid branch to/from an OpenMP structured block");
+
+  gsi_replace (gsi_p, gimple_build_nop (), false);
   return true;
 }
 
 /* Pass 1: Create a minimal tree of OpenMP structured blocks, and record
-   where in the tree each label is found.  */
+   where each label is found.  */
 
 static tree
-diagnose_sb_1 (tree *tp, int *walk_subtrees, void *data)
+diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
+              struct walk_stmt_info *wi)
 {
-  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
-  tree context = (tree) wi->info;
-  tree inner_context;
-  tree t = *tp;
-  int i;
+  gimple context = (gimple) wi->info;
+  gimple inner_context;
+  gimple stmt = gsi_stmt (*gsi_p);
 
-  *walk_subtrees = 0;
-  switch (TREE_CODE (t))
+  *handled_ops_p = true;
+
+ switch (gimple_code (stmt))
     {
-    case OMP_PARALLEL:
-    case OMP_TASK:
-    case OMP_SECTIONS:
-    case OMP_SINGLE:
-      walk_tree (&OMP_CLAUSES (t), diagnose_sb_1, wi, NULL);
-      /* FALLTHRU */
-    case OMP_SECTION:
-    case OMP_MASTER:
-    case OMP_ORDERED:
-    case OMP_CRITICAL:
-      /* The minimal context here is just a tree of statements.  */
-      inner_context = tree_cons (NULL, t, context);
+    WALK_SUBSTMTS;
+      
+    case GIMPLE_OMP_PARALLEL:
+    case GIMPLE_OMP_TASK:
+    case GIMPLE_OMP_SECTIONS:
+    case GIMPLE_OMP_SINGLE:
+    case GIMPLE_OMP_SECTION:
+    case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_ORDERED:
+    case GIMPLE_OMP_CRITICAL:
+      /* The minimal context here is just the current OMP construct.  */
+      inner_context = stmt;
       wi->info = inner_context;
-      walk_stmts (wi, &OMP_BODY (t));
+      walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
       wi->info = context;
       break;
 
-    case OMP_FOR:
-      walk_tree (&OMP_FOR_CLAUSES (t), diagnose_sb_1, wi, NULL);
-      inner_context = tree_cons (NULL, t, context);
+    case GIMPLE_OMP_FOR:
+      inner_context = stmt;
       wi->info = inner_context;
-      for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (t)); i++)
-       {
-         walk_tree (&TREE_VEC_ELT (OMP_FOR_INIT (t), i), diagnose_sb_1,
-                    wi, NULL);
-         walk_tree (&TREE_VEC_ELT (OMP_FOR_COND (t), i), diagnose_sb_1,
-                    wi, NULL);
-         walk_tree (&TREE_VEC_ELT (OMP_FOR_INCR (t), i), diagnose_sb_1,
-                    wi, NULL);
-       }
-      walk_stmts (wi, &OMP_FOR_PRE_BODY (t));
-      walk_stmts (wi, &OMP_FOR_BODY (t));
+      /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
+        walk them.  */
+      walk_gimple_seq (gimple_omp_for_pre_body (stmt),
+                      diagnose_sb_1, NULL, wi);
+      walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
       wi->info = context;
       break;
 
-    case LABEL_EXPR:
-      splay_tree_insert (all_labels, (splay_tree_key) LABEL_EXPR_LABEL (t),
+    case GIMPLE_LABEL:
+      splay_tree_insert (all_labels, (splay_tree_key) gimple_label_label (stmt),
                         (splay_tree_value) context);
       break;
 
@@ -6586,76 +6682,68 @@ diagnose_sb_1 (tree *tp, int *walk_subtrees, void *data)
    the destination label's context.  */
 
 static tree
-diagnose_sb_2 (tree *tp, int *walk_subtrees, void *data)
+diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
+              struct walk_stmt_info *wi)
 {
-  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
-  tree context = (tree) wi->info;
+  gimple context = (gimple) wi->info;
   splay_tree_node n;
-  tree t = *tp;
-  int i;
+  gimple stmt = gsi_stmt (*gsi_p);
 
-  *walk_subtrees = 0;
-  switch (TREE_CODE (t))
+  *handled_ops_p = true;
+
+  switch (gimple_code (stmt))
     {
-    case OMP_PARALLEL:
-    case OMP_TASK:
-    case OMP_SECTIONS:
-    case OMP_SINGLE:
-      walk_tree (&OMP_CLAUSES (t), diagnose_sb_2, wi, NULL);
-      /* FALLTHRU */
-    case OMP_SECTION:
-    case OMP_MASTER:
-    case OMP_ORDERED:
-    case OMP_CRITICAL:
-      wi->info = t;
-      walk_stmts (wi, &OMP_BODY (t));
+    WALK_SUBSTMTS;
+
+    case GIMPLE_OMP_PARALLEL:
+    case GIMPLE_OMP_TASK:
+    case GIMPLE_OMP_SECTIONS:
+    case GIMPLE_OMP_SINGLE:
+    case GIMPLE_OMP_SECTION:
+    case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_ORDERED:
+    case GIMPLE_OMP_CRITICAL:
+      wi->info = stmt;
+      walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_2, NULL, wi);
       wi->info = context;
       break;
 
-    case OMP_FOR:
-      walk_tree (&OMP_FOR_CLAUSES (t), diagnose_sb_2, wi, NULL);
-      wi->info = t;
-      for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (t)); i++)
-       {
-         walk_tree (&TREE_VEC_ELT (OMP_FOR_INIT (t), i), diagnose_sb_2,
-                    wi, NULL);
-         walk_tree (&TREE_VEC_ELT (OMP_FOR_COND (t), i), diagnose_sb_2,
-                    wi, NULL);
-         walk_tree (&TREE_VEC_ELT (OMP_FOR_INCR (t), i), diagnose_sb_2,
-                    wi, NULL);
-       }
-      walk_stmts (wi, &OMP_FOR_PRE_BODY (t));
-      walk_stmts (wi, &OMP_FOR_BODY (t));
+    case GIMPLE_OMP_FOR:
+      wi->info = stmt;
+      /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
+        walk them.  */
+      walk_gimple_seq (gimple_omp_for_pre_body (stmt),
+                      diagnose_sb_2, NULL, wi);
+      walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_2, NULL, wi);
       wi->info = context;
       break;
 
-    case GOTO_EXPR:
+    case GIMPLE_GOTO:
       {
-       tree lab = GOTO_DESTINATION (t);
+       tree lab = gimple_goto_dest (stmt);
        if (TREE_CODE (lab) != LABEL_DECL)
          break;
 
        n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
-       diagnose_sb_0 (tp, context, n ? (tree) n->value : NULL_TREE);
+       diagnose_sb_0 (gsi_p, context, n ? (gimple) n->value : NULL);
       }
       break;
 
-    case SWITCH_EXPR:
+    case GIMPLE_SWITCH:
       {
-       tree vec = SWITCH_LABELS (t);
-       int i, len = TREE_VEC_LENGTH (vec);
-       for (i = 0; i < len; ++i)
+       unsigned int i;
+       for (i = 0; i < gimple_switch_num_labels (stmt); ++i)
          {
-           tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
+           tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
            n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
-           if (diagnose_sb_0 (tp, context, (tree) n->value))
+           if (n && diagnose_sb_0 (gsi_p, context, (gimple) n->value))
              break;
          }
       }
       break;
 
-    case RETURN_EXPR:
-      diagnose_sb_0 (tp, context, NULL_TREE);
+    case GIMPLE_RETURN:
+      diagnose_sb_0 (gsi_p, context, NULL);
       break;
 
     default:
@@ -6670,24 +6758,25 @@ diagnose_omp_structured_block_errors (tree fndecl)
 {
   tree save_current = current_function_decl;
   struct walk_stmt_info wi;
+  struct function *old_cfun = cfun;
+  gimple_seq body = gimple_body (fndecl);
 
   current_function_decl = fndecl;
+  set_cfun (DECL_STRUCT_FUNCTION (fndecl));
 
   all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
 
   memset (&wi, 0, sizeof (wi));
-  wi.callback = diagnose_sb_1;
-  walk_stmts (&wi, &DECL_SAVED_TREE (fndecl));
+  walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
 
   memset (&wi, 0, sizeof (wi));
-  wi.callback = diagnose_sb_2;
   wi.want_locations = true;
-  wi.want_return_expr = true;
-  walk_stmts (&wi, &DECL_SAVED_TREE (fndecl));
+  walk_gimple_seq (body, diagnose_sb_2, NULL, &wi);
 
   splay_tree_delete (all_labels);
   all_labels = NULL;
 
+  set_cfun (old_cfun);
   current_function_decl = save_current;
 }
 
index 53d2985..d07a5ca 100644 (file)
@@ -599,7 +599,6 @@ init_optimization_passes (void)
       NEXT_PASS (pass_build_alias);
       NEXT_PASS (pass_return_slot);
       NEXT_PASS (pass_rename_ssa_copies);
-
       /* Initial scalar cleanups.  */
       NEXT_PASS (pass_complete_unrolli);
       NEXT_PASS (pass_ccp);
@@ -628,14 +627,12 @@ init_optimization_passes (void)
       NEXT_PASS (pass_sra);
       NEXT_PASS (pass_rename_ssa_copies);
       NEXT_PASS (pass_dominator);
-
       /* The only const/copy propagation opportunities left after
         DOM should be due to degenerate PHI nodes.  So rather than
         run the full propagators, run a specialized pass which
         only examines PHIs to discover const/copy propagation
         opportunities.  */
       NEXT_PASS (pass_phi_only_cprop);
-
       NEXT_PASS (pass_reassoc);
       NEXT_PASS (pass_dce);
       NEXT_PASS (pass_dse);
@@ -683,14 +680,12 @@ init_optimization_passes (void)
       NEXT_PASS (pass_reassoc);
       NEXT_PASS (pass_vrp);
       NEXT_PASS (pass_dominator);
-      
       /* The only const/copy propagation opportunities left after
         DOM should be due to degenerate PHI nodes.  So rather than
         run the full propagators, run a specialized pass which
         only examines PHIs to discover const/copy propagation
         opportunities.  */
       NEXT_PASS (pass_phi_only_cprop);
-
       NEXT_PASS (pass_cd_dce);
       NEXT_PASS (pass_tracer);
 
@@ -719,6 +714,7 @@ init_optimization_passes (void)
   NEXT_PASS (pass_warn_function_noreturn);
   NEXT_PASS (pass_free_datastructures);
   NEXT_PASS (pass_mudflap_2);
+
   NEXT_PASS (pass_free_cfg_annotations);
   NEXT_PASS (pass_expand);
   NEXT_PASS (pass_rest_of_compilation);
@@ -958,12 +954,10 @@ execute_function_todo (void *data)
   if (flags & TODO_remove_unused_locals)
     remove_unused_locals ();
 
-  if ((flags & TODO_dump_func)
-      && dump_file && current_function_decl)
+  if ((flags & TODO_dump_func) && dump_file && current_function_decl)
     {
       if (cfun->curr_properties & PROP_trees)
-        dump_function_to_file (current_function_decl,
-                               dump_file, dump_flags);
+        dump_function_to_file (current_function_decl, dump_file, dump_flags);
       else
        {
          if (dump_flags & TDF_SLIM)
@@ -974,7 +968,7 @@ execute_function_todo (void *data)
           else
            print_rtl (dump_file, get_insns ());
 
-         if (cfun->curr_properties & PROP_cfg
+         if ((cfun->curr_properties & PROP_cfg)
              && graph_dump_format != no_graph
              && (dump_flags & TDF_GRAPH))
            print_rtl_graph_with_bb (dump_file_name, get_insns ());
@@ -1043,8 +1037,7 @@ execute_todo (unsigned int flags)
       cgraph_remove_unreachable_nodes (true, dump_file);
     }
 
-  if ((flags & TODO_dump_cgraph)
-      && dump_file && !current_function_decl)
+  if ((flags & TODO_dump_cgraph) && dump_file && !current_function_decl)
     {
       gcc_assert (!cfun);
       dump_cgraph (dump_file);
@@ -1054,9 +1047,7 @@ execute_todo (unsigned int flags)
     }
 
   if (flags & TODO_ggc_collect)
-    {
-      ggc_collect ();
-    }
+    ggc_collect ();
 
   /* Now that the dumping has been done, we can get rid of the optional 
      df problems.  */
@@ -1256,6 +1247,7 @@ execute_one_pass (struct opt_pass *pass)
     }
 
   current_pass = pass;
+
   /* See if we're supposed to run this pass.  */
   if (pass->gate && !pass->gate ())
     return false;
index f85786e..6a887be 100644 (file)
@@ -203,7 +203,7 @@ static struct pointer_map_t *bb_predictions;
    PREDICTOR.  */
 
 bool
-tree_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
+gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
 {
   struct edge_prediction *i;
   void **preds = pointer_map_contains (bb_predictions, bb);
@@ -305,7 +305,7 @@ rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
 
 /* Predict edge E with the given PROBABILITY.  */
 void
-tree_predict_edge (edge e, enum br_predictor predictor, int probability)
+gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
 {
   gcc_assert (profile_status != PROFILE_GUESSED);
   if ((e->src != ENTRY_BLOCK_PTR && EDGE_COUNT (e->src->succs) > 1)
@@ -947,36 +947,38 @@ guess_outgoing_edge_probabilities (basic_block bb)
   combine_predictions_for_insn (BB_END (bb), bb);
 }
 \f
-/* Return constant EXPR will likely have at execution time, NULL if unknown. 
-   The function is used by builtin_expect branch predictor so the evidence
-   must come from this construct and additional possible constant folding.
-  
-   We may want to implement more involved value guess (such as value range
-   propagation based prediction), but such tricks shall go to new
-   implementation.  */
+static tree expr_expected_value (tree, bitmap);
+
+/* Helper function for expr_expected_value.  */
 
 static tree
-expr_expected_value (tree expr, bitmap visited)
+expr_expected_value_1 (tree type, tree op0, enum tree_code code, tree op1, bitmap visited)
 {
-  if (TREE_CONSTANT (expr))
-    return expr;
-  else if (TREE_CODE (expr) == SSA_NAME)
+  gimple def;
+
+  if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
     {
-      tree def = SSA_NAME_DEF_STMT (expr);
+      if (TREE_CONSTANT (op0))
+       return op0;
+
+      if (code != SSA_NAME)
+       return NULL_TREE;
+
+      def = SSA_NAME_DEF_STMT (op0);
 
       /* If we were already here, break the infinite cycle.  */
-      if (bitmap_bit_p (visited, SSA_NAME_VERSION (expr)))
+      if (bitmap_bit_p (visited, SSA_NAME_VERSION (op0)))
        return NULL;
-      bitmap_set_bit (visited, SSA_NAME_VERSION (expr));
+      bitmap_set_bit (visited, SSA_NAME_VERSION (op0));
 
-      if (TREE_CODE (def) == PHI_NODE)
+      if (gimple_code (def) == GIMPLE_PHI)
        {
          /* All the arguments of the PHI node must have the same constant
             length.  */
-         int i;
+         int i, n = gimple_phi_num_args (def);
          tree val = NULL, new_val;
 
-         for (i = 0; i < PHI_NUM_ARGS (def); i++)
+         for (i = 0; i < n; i++)
            {
              tree arg = PHI_ARG_DEF (def, i);
 
@@ -999,81 +1001,121 @@ expr_expected_value (tree expr, bitmap visited)
            }
          return val;
        }
-      if (TREE_CODE (def) != GIMPLE_MODIFY_STMT
-         || GIMPLE_STMT_OPERAND (def, 0) != expr)
-       return NULL;
-      return expr_expected_value (GIMPLE_STMT_OPERAND (def, 1), visited);
-    }
-  else if (TREE_CODE (expr) == CALL_EXPR)
-    {
-      tree decl = get_callee_fndecl (expr);
-      if (!decl)
-       return NULL;
-      if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
-         && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
+      if (is_gimple_assign (def))
        {
-         tree val;
+         if (gimple_assign_lhs (def) != op0)
+           return NULL;
 
-         if (call_expr_nargs (expr) != 2)
+         return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)),
+                                       gimple_assign_rhs1 (def),
+                                       gimple_assign_rhs_code (def),
+                                       gimple_assign_rhs2 (def),
+                                       visited);
+       }
+
+      if (is_gimple_call (def))
+       {
+         tree decl = gimple_call_fndecl (def);
+         if (!decl)
            return NULL;
-         val = CALL_EXPR_ARG (expr, 0);
-         if (TREE_CONSTANT (val))
-           return val;
-         return CALL_EXPR_ARG (expr, 1);
+         if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
+             && DECL_FUNCTION_CODE (decl) == BUILT_IN_EXPECT)
+           {
+             tree val;
+
+             if (gimple_call_num_args (def) != 2)
+               return NULL;
+             val = gimple_call_arg (def, 0);
+             if (TREE_CONSTANT (val))
+               return val;
+             return gimple_call_arg (def, 1);
+           }
        }
+
+      return NULL;
     }
-  if (BINARY_CLASS_P (expr) || COMPARISON_CLASS_P (expr))
+
+  if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
     {
-      tree op0, op1, res;
-      op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
+      tree res;
+      op0 = expr_expected_value (op0, visited);
       if (!op0)
        return NULL;
-      op1 = expr_expected_value (TREE_OPERAND (expr, 1), visited);
+      op1 = expr_expected_value (op1, visited);
       if (!op1)
        return NULL;
-      res = fold_build2 (TREE_CODE (expr), TREE_TYPE (expr), op0, op1);
+      res = fold_build2 (code, type, op0, op1);
       if (TREE_CONSTANT (res))
        return res;
       return NULL;
     }
-  if (UNARY_CLASS_P (expr))
+  if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
     {
-      tree op0, res;
-      op0 = expr_expected_value (TREE_OPERAND (expr, 0), visited);
+      tree res;
+      op0 = expr_expected_value (op0, visited);
       if (!op0)
        return NULL;
-      res = fold_build1 (TREE_CODE (expr), TREE_TYPE (expr), op0);
+      res = fold_build1 (code, type, op0);
       if (TREE_CONSTANT (res))
        return res;
       return NULL;
     }
   return NULL;
 }
+
+/* Return constant EXPR will likely have at execution time, NULL if unknown. 
+   The function is used by builtin_expect branch predictor so the evidence
+   must come from this construct and additional possible constant folding.
+  
+   We may want to implement more involved value guess (such as value range
+   propagation based prediction), but such tricks shall go to new
+   implementation.  */
+
+static tree
+expr_expected_value (tree expr, bitmap visited)
+{
+  enum tree_code code;
+  tree op0, op1;
+
+  if (TREE_CONSTANT (expr))
+    return expr;
+
+  extract_ops_from_tree (expr, &code, &op0, &op1);
+  return expr_expected_value_1 (TREE_TYPE (expr),
+                               op0, code, op1, visited);
+}
+
 \f
 /* Get rid of all builtin_expect calls we no longer need.  */
 static void
 strip_builtin_expect (void)
 {
   basic_block bb;
+  gimple ass_stmt;
+  tree var;
+
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator bi;
-      for (bi = bsi_start (bb); !bsi_end_p (bi); bsi_next (&bi))
+      gimple_stmt_iterator bi;
+      for (bi = gsi_start_bb (bb); !gsi_end_p (bi); gsi_next (&bi))
        {
-         tree stmt = bsi_stmt (bi);
+         gimple stmt = gsi_stmt (bi);
          tree fndecl;
-         tree call;
 
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-             && (call = GIMPLE_STMT_OPERAND (stmt, 1))
-             && TREE_CODE (call) == CALL_EXPR
-             && (fndecl = get_callee_fndecl (call))
+         if (gimple_code (stmt) != GIMPLE_CALL)
+           continue;
+
+         fndecl = gimple_call_fndecl (stmt);
+
+         if (fndecl
              && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
              && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
-             && call_expr_nargs (call) == 2)
+             && gimple_call_num_args (stmt) == 2)
            {
-             GIMPLE_STMT_OPERAND (stmt, 1) = CALL_EXPR_ARG (call, 0);
-             update_stmt (stmt);
+             var = gimple_call_lhs (stmt);
+             ass_stmt = gimple_build_assign (var, gimple_call_arg (stmt, 0));
+
+             gsi_replace (&bi, ass_stmt, true);
            }
        }
     }
@@ -1083,27 +1125,26 @@ strip_builtin_expect (void)
 static void
 tree_predict_by_opcode (basic_block bb)
 {
-  tree stmt = last_stmt (bb);
+  gimple stmt = last_stmt (bb);
   edge then_edge;
-  tree cond;
-  tree op0;
+  tree op0, op1;
   tree type;
   tree val;
+  enum tree_code cmp;
   bitmap visited;
   edge_iterator ei;
 
-  if (!stmt || TREE_CODE (stmt) != COND_EXPR)
+  if (!stmt || gimple_code (stmt) != GIMPLE_COND)
     return;
   FOR_EACH_EDGE (then_edge, ei, bb->succs)
     if (then_edge->flags & EDGE_TRUE_VALUE)
       break;
-  cond = TREE_OPERAND (stmt, 0);
-  if (!COMPARISON_CLASS_P (cond))
-    return;
-  op0 = TREE_OPERAND (cond, 0);
+  op0 = gimple_cond_lhs (stmt);
+  op1 = gimple_cond_rhs (stmt);
+  cmp = gimple_cond_code (stmt);
   type = TREE_TYPE (op0);
   visited = BITMAP_ALLOC (NULL);
-  val = expr_expected_value (cond, visited);
+  val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, visited);
   BITMAP_FREE (visited);
   if (val)
     {
@@ -1118,9 +1159,9 @@ tree_predict_by_opcode (basic_block bb)
      Similarly, a comparison ptr1 == ptr2 is predicted as false.  */
   if (POINTER_TYPE_P (type))
     {
-      if (TREE_CODE (cond) == EQ_EXPR)
+      if (cmp == EQ_EXPR)
        predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
-      else if (TREE_CODE (cond) == NE_EXPR)
+      else if (cmp == NE_EXPR)
        predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
     }
   else
@@ -1129,7 +1170,7 @@ tree_predict_by_opcode (basic_block bb)
      EQ tests are usually false and NE tests are usually true. Also,
      most quantities are positive, so we can make the appropriate guesses
      about signed comparisons against zero.  */
-    switch (TREE_CODE (cond))
+    switch (cmp)
       {
       case EQ_EXPR:
       case UNEQ_EXPR:
@@ -1140,8 +1181,7 @@ tree_predict_by_opcode (basic_block bb)
          ;
        /* Comparisons with 0 are often used for booleans and there is
           nothing useful to predict about them.  */
-       else if (integer_zerop (op0)
-                || integer_zerop (TREE_OPERAND (cond, 1)))
+       else if (integer_zerop (op0) || integer_zerop (op1))
          ;
        else
          predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
@@ -1157,7 +1197,7 @@ tree_predict_by_opcode (basic_block bb)
        /* Comparisons with 0 are often used for booleans and there is
           nothing useful to predict about them.  */
        else if (integer_zerop (op0)
-                || integer_zerop (TREE_OPERAND (cond, 1)))
+                || integer_zerop (op1))
          ;
        else
          predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
@@ -1173,23 +1213,23 @@ tree_predict_by_opcode (basic_block bb)
 
       case LE_EXPR:
       case LT_EXPR:
-       if (integer_zerop (TREE_OPERAND (cond, 1))
-           || integer_onep (TREE_OPERAND (cond, 1))
-           || integer_all_onesp (TREE_OPERAND (cond, 1))
-           || real_zerop (TREE_OPERAND (cond, 1))
-           || real_onep (TREE_OPERAND (cond, 1))
-           || real_minus_onep (TREE_OPERAND (cond, 1)))
+       if (integer_zerop (op1)
+           || integer_onep (op1)
+           || integer_all_onesp (op1)
+           || real_zerop (op1)
+           || real_onep (op1)
+           || real_minus_onep (op1))
          predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
        break;
 
       case GE_EXPR:
       case GT_EXPR:
-       if (integer_zerop (TREE_OPERAND (cond, 1))
-           || integer_onep (TREE_OPERAND (cond, 1))
-           || integer_all_onesp (TREE_OPERAND (cond, 1))
-           || real_zerop (TREE_OPERAND (cond, 1))
-           || real_onep (TREE_OPERAND (cond, 1))
-           || real_minus_onep (TREE_OPERAND (cond, 1)))
+       if (integer_zerop (op1)
+           || integer_onep (op1)
+           || integer_all_onesp (op1)
+           || real_zerop (op1)
+           || real_onep (op1)
+           || real_minus_onep (op1))
          predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
        break;
 
@@ -1199,6 +1239,7 @@ tree_predict_by_opcode (basic_block bb)
 }
 
 /* Try to guess whether the value of return means error code.  */
+
 static enum br_predictor
 return_prediction (tree val, enum prediction *prediction)
 {
@@ -1243,10 +1284,10 @@ return_prediction (tree val, enum prediction *prediction)
 static void
 apply_return_prediction (void)
 {
-  tree return_stmt = NULL;
+  gimple return_stmt = NULL;
   tree return_val;
   edge e;
-  tree phi;
+  gimple phi;
   int phi_num_args, i;
   enum br_predictor pred;
   enum prediction direction;
@@ -1256,26 +1297,20 @@ apply_return_prediction (void)
     {
       return_stmt = last_stmt (e->src);
       if (return_stmt
-         && TREE_CODE (return_stmt) == RETURN_EXPR)
+         && gimple_code (return_stmt) == GIMPLE_RETURN)
        break;
     }
   if (!e)
     return;
-  return_val = TREE_OPERAND (return_stmt, 0);
+  return_val = gimple_return_retval (return_stmt);
   if (!return_val)
     return;
-  if (TREE_CODE (return_val) == GIMPLE_MODIFY_STMT)
-    return_val = GIMPLE_STMT_OPERAND (return_val, 1);
   if (TREE_CODE (return_val) != SSA_NAME
       || !SSA_NAME_DEF_STMT (return_val)
-      || TREE_CODE (SSA_NAME_DEF_STMT (return_val)) != PHI_NODE)
-    return;
-  for (phi = SSA_NAME_DEF_STMT (return_val); phi; phi = PHI_CHAIN (phi))
-    if (PHI_RESULT (phi) == return_val)
-      break;
-  if (!phi)
+      || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI)
     return;
-  phi_num_args = PHI_NUM_ARGS (phi);
+  phi = SSA_NAME_DEF_STMT (return_val);
+  phi_num_args = gimple_phi_num_args (phi);
   pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
 
   /* Avoid the degenerate case where all return values form the function
@@ -1289,7 +1324,7 @@ apply_return_prediction (void)
       {
        pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
        if (pred != PRED_NO_PREDICTION)
-         predict_paths_leading_to (PHI_ARG_EDGE (phi, i)->src, pred,
+         predict_paths_leading_to (gimple_phi_arg_edge (phi, i)->src, pred,
                                    direction);
       }
 }
@@ -1307,46 +1342,34 @@ tree_bb_level_predictions (void)
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator bsi = bsi_last (bb);
+      gimple_stmt_iterator gsi;
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi);)
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
        {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
          tree decl;
-         bool next = false;
 
-         switch (TREE_CODE (stmt))
+         if (is_gimple_call (stmt))
            {
-             case GIMPLE_MODIFY_STMT:
-               if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CALL_EXPR)
-                 {
-                   stmt = GIMPLE_STMT_OPERAND (stmt, 1);
-                   goto call_expr;
-                 }
-               break;
-             case CALL_EXPR:
-call_expr:;
-               if (call_expr_flags (stmt) & ECF_NORETURN)
-                 predict_paths_leading_to (bb, PRED_NORETURN,
-                                           NOT_TAKEN);
-               decl = get_callee_fndecl (stmt);
-               if (decl
-                   && lookup_attribute ("cold",
-                                        DECL_ATTRIBUTES (decl)))
-                 predict_paths_leading_to (bb, PRED_COLD_FUNCTION,
-                                           NOT_TAKEN);
-               break;
-             case PREDICT_EXPR:
-               predict_paths_leading_to (bb, PREDICT_EXPR_PREDICTOR (stmt),
-                                         PREDICT_EXPR_OUTCOME (stmt));
-               bsi_remove (&bsi, true);
-               next = true;
-               break;
-             default:
-               break;
+             if (gimple_call_flags (stmt) & ECF_NORETURN)
+               predict_paths_leading_to (bb, PRED_NORETURN,
+                                         NOT_TAKEN);
+             decl = gimple_call_fndecl (stmt);
+             if (decl
+                 && lookup_attribute ("cold",
+                                      DECL_ATTRIBUTES (decl)))
+               predict_paths_leading_to (bb, PRED_COLD_FUNCTION,
+                                         NOT_TAKEN);
            }
-         if (!next)
-           bsi_next (&bsi);
+         else if (gimple_code (stmt) == GIMPLE_PREDICT)
+           {
+             predict_paths_leading_to (bb, gimple_predict_predictor (stmt),
+                                       gimple_predict_outcome (stmt));
+             gsi_remove (&gsi, true);
+             continue;
+           }
+
+         gsi_next (&gsi);
        }
     }
 }
@@ -1416,7 +1439,7 @@ tree_estimate_probability (void)
              && e->dest != EXIT_BLOCK_PTR
              && single_succ_p (e->dest)
              && single_succ_edge (e->dest)->dest == EXIT_BLOCK_PTR
-             && TREE_CODE (last_stmt (e->dest)) == RETURN_EXPR)
+             && gimple_code (last_stmt (e->dest)) == GIMPLE_RETURN)
            {
              edge e1;
              edge_iterator ei1;
@@ -1442,23 +1465,20 @@ tree_estimate_probability (void)
              && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
              && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
            {
-             block_stmt_iterator bi;
+             gimple_stmt_iterator bi;
 
              /* The call heuristic claims that a guarded function call
                 is improbable.  This is because such calls are often used
                 to signal exceptional situations such as printing error
                 messages.  */
-             for (bi = bsi_start (e->dest); !bsi_end_p (bi);
-                  bsi_next (&bi))
+             for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
+                  gsi_next (&bi))
                {
-                 tree stmt = bsi_stmt (bi);
-                 if ((TREE_CODE (stmt) == CALL_EXPR
-                      || (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-                          && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1))
-                             == CALL_EXPR))
+                 gimple stmt = gsi_stmt (bi);
+                 if (is_gimple_call (stmt)
                      /* Constant and pure calls are hardly used to signalize
                         something exceptional.  */
-                     && TREE_SIDE_EFFECTS (stmt))
+                     && gimple_has_side_effects (stmt))
                    {
                      predict_edge_def (e, PRED_CALL, NOT_TAKEN);
                      break;
@@ -1483,7 +1503,7 @@ tree_estimate_probability (void)
   remove_fake_exit_edges ();
   loop_optimizer_finalize ();
   if (dump_file && (dump_flags & TDF_DETAILS))
-    dump_tree_cfg (dump_file, dump_flags);
+    gimple_dump_cfg (dump_file, dump_flags);
   if (profile_status == PROFILE_ABSENT)
     profile_status = PROFILE_GUESSED;
   return 0;
index ed31861..a310ec4 100644 (file)
@@ -29,6 +29,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "ggc.h"
 #include "langhooks.h"
 #include "tree-iterator.h"
+#include "diagnostic.h"
 #include "tree-flow.h"
 
 /* Define the hash table of nodes already seen.
@@ -281,7 +282,7 @@ print_node (FILE *file, const char *prefix, tree node, int indent)
       if (indent <= 4)
        print_node_brief (file, "type", TREE_TYPE (node), indent + 4);
     }
-  else if (!GIMPLE_TUPLE_P (node))
+  else
     {
       print_node (file, "type", TREE_TYPE (node), indent + 4);
       if (TREE_TYPE (node))
@@ -712,18 +713,6 @@ print_node (FILE *file, const char *prefix, tree node, int indent)
       print_node (file, "chain", TREE_CHAIN (node), indent + 4);
       break;
 
-    case tcc_gimple_stmt:
-      len = TREE_CODE_LENGTH (TREE_CODE (node));
-
-      for (i = 0; i < len; i++)
-       {
-         char temp[10];
-
-         sprintf (temp, "arg %d", i);
-         print_node (file, temp, GIMPLE_STMT_OPERAND (node, i), indent + 4);
-       }
-      break;
-
     case tcc_constant:
     case tcc_exceptional:
       switch (TREE_CODE (node))
@@ -896,8 +885,8 @@ print_node (FILE *file, const char *prefix, tree node, int indent)
 
        case SSA_NAME:
          print_node_brief (file, "var", SSA_NAME_VAR (node), indent + 4);
-         print_node_brief (file, "def_stmt",
-                           SSA_NAME_DEF_STMT (node), indent + 4);
+         fprintf (file, "def_stmt ");
+         print_gimple_stmt (file, SSA_NAME_DEF_STMT (node), indent + 4, 0);
 
          indent_to (file, indent + 4);
          fprintf (file, "version %u", SSA_NAME_VERSION (node));
@@ -917,12 +906,6 @@ print_node (FILE *file, const char *prefix, tree node, int indent)
            }
          break;
 
-       case PHI_NODE:
-         print_node (file, "result", PHI_RESULT (node), indent + 4);
-         for (i = 0; i < PHI_NUM_ARGS (node); i++)
-           print_node (file, "arg", PHI_ARG_DEF (node, i), indent + 4);
-         break;
-
        case OMP_CLAUSE:
            {
              int i;
index 7856822..b6cddc2 100644 (file)
@@ -95,6 +95,7 @@ struct bb_info {
 #define EDGE_INFO(e)  ((struct edge_info *) (e)->aux)
 #define BB_INFO(b)  ((struct bb_info *) (b)->aux)
 
+
 /* Counter summary from the last set of coverage counts read.  */
 
 const struct gcov_ctr_summary *profile_info;
@@ -671,7 +672,7 @@ compute_value_histograms (histogram_values values)
   for (i = 0; i < VEC_length (histogram_value, values); i++)
     {
       histogram_value hist = VEC_index (histogram_value, values, i);
-      tree stmt = hist->hvalue.stmt;
+      gimple stmt = hist->hvalue.stmt;
 
       t = (int) hist->type;
 
@@ -793,16 +794,16 @@ branch_prob (void)
 
       FOR_EACH_EDGE (e, ei, bb->succs)
        {
-         block_stmt_iterator bsi;
-         tree last = NULL;
+         gimple_stmt_iterator gsi;
+         gimple last = NULL;
 
          /* It may happen that there are compiler generated statements
             without a locus at all.  Go through the basic block from the
             last to the first statement looking for a locus.  */
-         for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
+         for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
            {
-             last = bsi_stmt (bsi);
-             if (EXPR_LOCUS (last))
+             last = gsi_stmt (gsi);
+             if (gimple_has_location (last))
                break;
            }
 
@@ -811,13 +812,14 @@ branch_prob (void)
             Don't do that when the locuses match, so 
             if (blah) goto something;
             is not computed twice.  */
-         if (last && EXPR_LOCUS (last)
-             && e->goto_locus
+         if (last
+             && gimple_has_location (last)
+             && e->goto_locus != UNKNOWN_LOCATION
              && !single_succ_p (bb)
              && (LOCATION_FILE (e->goto_locus)
-                 != LOCATION_FILE (EXPR_LOCATION  (last))
+                 != LOCATION_FILE (gimple_location (last))
                  || (LOCATION_LINE (e->goto_locus)
-                     != LOCATION_LINE (EXPR_LOCATION  (last)))))
+                     != LOCATION_LINE (gimple_location  (last)))))
            {
              basic_block new = split_edge (e);
              single_succ_edge (new)->goto_locus = e->goto_locus;
@@ -982,7 +984,7 @@ branch_prob (void)
 
       FOR_EACH_BB (bb)
        {
-         block_stmt_iterator bsi;
+         gimple_stmt_iterator gsi;
 
          offset = 0;
 
@@ -994,26 +996,18 @@ branch_prob (void)
                               &offset, bb);
            }
 
-         for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+         for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
            {
-             tree stmt = bsi_stmt (bsi);
-             if (EXPR_HAS_LOCATION (stmt))
-               output_location (EXPR_FILENAME (stmt), EXPR_LINENO (stmt),
-                                &offset, bb);
-             /* Take into account modify statements nested in return
-                produced by C++ NRV transformation.  */
-             if (TREE_CODE (stmt) == RETURN_EXPR
-                 && TREE_OPERAND (stmt, 0)
-                 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR
-                 && EXPR_HAS_LOCATION (TREE_OPERAND (stmt, 0)))
-               output_location (EXPR_FILENAME (TREE_OPERAND (stmt, 0)),
-                                EXPR_LINENO (TREE_OPERAND (stmt, 0)),
+             gimple stmt = gsi_stmt (gsi);
+             if (gimple_has_location (stmt))
+               output_location (gimple_filename (stmt), gimple_lineno (stmt),
                                 &offset, bb);
            }
 
          /* Notice GOTO expressions we eliminated while constructing the
             CFG.  */
-         if (single_succ_p (bb) && single_succ_edge (bb)->goto_locus)
+         if (single_succ_p (bb)
+             && single_succ_edge (bb)->goto_locus != UNKNOWN_LOCATION)
            {
              location_t curr_location = single_succ_edge (bb)->goto_locus;
              /* ??? The FILE/LINE API is inconsistent for these cases.  */
@@ -1063,7 +1057,7 @@ branch_prob (void)
        instrument_values (values);
 
       /* Commit changes done by instrumentation.  */
-      bsi_commit_edge_inserts ();
+      gsi_commit_edge_inserts ();
     }
 
   free_aux_for_edges ();
@@ -1251,4 +1245,3 @@ tree_register_profile_hooks (void)
   gcc_assert (current_ir_type () == IR_GIMPLE);
   profile_hooks = &tree_profile_hooks;
 }
-
index af02f34..664c910 100644 (file)
@@ -1356,9 +1356,6 @@ expand_expr_stmt (tree exp)
   tree type;
 
   value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
-  if (GIMPLE_TUPLE_P (exp))
-    type = void_type_node;
-  else
   type = TREE_TYPE (exp);
 
   /* If all we do is reference a volatile value in memory,
@@ -1412,7 +1409,6 @@ warn_if_unused_value (const_tree exp, location_t locus)
     case PREDECREMENT_EXPR:
     case POSTDECREMENT_EXPR:
     case MODIFY_EXPR:
-    case GIMPLE_MODIFY_STMT:
     case INIT_EXPR:
     case TARGET_EXPR:
     case CALL_EXPR:
@@ -1581,10 +1577,10 @@ expand_return (tree retval)
       expand_null_return ();
       return;
     }
-  else if ((TREE_CODE (retval) == GIMPLE_MODIFY_STMT
+  else if ((TREE_CODE (retval) == MODIFY_EXPR
            || TREE_CODE (retval) == INIT_EXPR)
-          && TREE_CODE (GENERIC_TREE_OPERAND (retval, 0)) == RESULT_DECL)
-    retval_rhs = GENERIC_TREE_OPERAND (retval, 1);
+          && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
+    retval_rhs = TREE_OPERAND (retval, 1);
   else
     retval_rhs = retval;
 
@@ -1603,7 +1599,7 @@ expand_return (tree retval)
      (and in expand_call).  */
 
   else if (retval_rhs != 0
-          && TYPE_MODE (GENERIC_TREE_TYPE (retval_rhs)) == BLKmode
+          && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
           && REG_P (result_rtl))
     {
       int i;
index be70b02..80bfe61 100644 (file)
@@ -786,8 +786,9 @@ extern void fancy_abort (const char *, int, const char *) ATTRIBUTE_NORETURN;
    change after the fact).  Beyond these uses, most other cases of
    using this macro should be viewed with extreme caution.  */
 
-#if defined(__GNUC__) && GCC_VERSION != 4000
-/* GCC 4.0.x has a bug where it may ICE on this expression.  */
+#if defined(__GNUC__) && GCC_VERSION > 4000
+/* GCC 4.0.x has a bug where it may ICE on this expression,
+   so does GCC 3.4.x (PR17436).  */
 #define CONST_CAST2(TOTYPE,FROMTYPE,X) ((__extension__(union {FROMTYPE _q; TOTYPE _nq;})(X))._nq)
 #else
 #define CONST_CAST2(TOTYPE,FROMTYPE,X) ((TOTYPE)(FROMTYPE)(X))
@@ -796,6 +797,7 @@ extern void fancy_abort (const char *, int, const char *) ATTRIBUTE_NORETURN;
 #define CONST_CAST_TREE(X) CONST_CAST(union tree_node *, (X))
 #define CONST_CAST_RTX(X) CONST_CAST(struct rtx_def *, (X))
 #define CONST_CAST_BB(X) CONST_CAST(struct basic_block_def *, (X))
+#define CONST_CAST_GIMPLE(X) CONST_CAST(union gimple_statement_d *, (X))
 
 /* Activate certain diagnostics as warnings (not errors via the
    -Werror flag).  */
index a44ab0b..331cc53 100644 (file)
@@ -707,8 +707,8 @@ struct gcc_target
   void (* expand_builtin_va_start) (tree valist, rtx nextarg);
 
   /* Gimplifies a VA_ARG_EXPR.  */
-  tree (* gimplify_va_arg_expr) (tree valist, tree type, tree *pre_p,
-                                tree *post_p);
+  tree (* gimplify_va_arg_expr) (tree valist, tree type, gimple_seq *pre_p,
+                                gimple_seq *post_p);
 
   /* Validity-checking routines for PCH files, target-specific.
      get_pch_validity returns a pointer to the data to be stored,
@@ -754,10 +754,9 @@ struct gcc_target
   void (* dwarf_handle_frame_unspec) (const char *, rtx, int);
 
   /* Perform architecture specific checking of statements gimplified
-     from VA_ARG_EXPR.  LHS is left hand side of MODIFY_EXPR, RHS
-     is right hand side.  Returns true if the statements doesn't need
-     to be checked for va_list references.  */
-  bool (* stdarg_optimize_hook) (struct stdarg_info *ai, const_tree lhs, const_tree rhs);
+     from VA_ARG_EXPR.  STMT is the statement.  Returns true if the statement
+     doesn't need to be checked for va_list references.  */
+  bool (* stdarg_optimize_hook) (struct stdarg_info *ai, const_gimple stmt);
 
   /* This target hook allows the operating system to override the DECL
      that represents the external variable that contains the stack
index 66d3212..dc2bb16 100644 (file)
@@ -1,3 +1,20 @@
+2008-07-28  Richard Guenther  <rguenther@suse.de>
+
+       Merge from gimple-tuples-branch.
+
+       * gcc.c-torture/compile/20080721-1.c: New testcase.
+       * gcc.dg/torture/20080716-1.c: Likewise.
+       * gcc.dg/tree-ssa/tailcall-3.c: Likewise.
+       * gcc.dg/tree-ssa/20080530.c: Likewise.
+       * gcc.dg/20080615-1.c: Likewise.
+       * g++.dg/torture/pr36826.C: Likewise.
+       * gcc.dg/fold-alloca-1.c: Look into cleanup_cfg1 dump instead of
+       useless dump.
+       * gcc.dg/tree-ssa/pr21658.c: Update search pattern.
+       * gfortran.dg/gomp/block-1.f90: Adjust dg-error.
+       * gcc.dg/tree-ssa/20030728-1.c: Test final_cleanup instead of
+       optimized dump.
+
 2008-07-28  Simon Baldwin  <simonb@google.com>
 
        * gcc.dg/pragma-message.c: New.
diff --git a/gcc/testsuite/g++.dg/torture/pr36826.C b/gcc/testsuite/g++.dg/torture/pr36826.C
new file mode 100644 (file)
index 0000000..436220b
--- /dev/null
@@ -0,0 +1,166 @@
+template <class T> T CoinMax(register const T x1, register const T x2); 
+template <class T> T CoinMin(register const T x1, register const T x2);
+class CoinIndexedVector;
+class ClpModel {
+protected:
+    double objectiveScale_;
+    double rhsScale_;
+    int numberRows_;
+    int numberColumns_;
+    double * rowActivity_;
+    double * columnActivity_;
+    double * dual_;
+    double * reducedCost_;
+    double* rowLower_;
+    double* rowUpper_;
+    double * rowObjective_;
+    double * columnLower_;
+    double * columnUpper_;
+    double * rowScale_;
+    double * columnScale_;
+    double * inverseRowScale_;
+    double * inverseColumnScale_;
+    int problemStatus_;
+    int secondaryStatus_;
+};
+class ClpSimplex : public ClpModel {
+    void deleteRim(int getRidOfFactorizationData=2);
+    double upperOut_;
+    double dualTolerance_;
+    double primalTolerance_;
+    double * rowLowerWork_;
+    double * columnLowerWork_;
+    double * rowUpperWork_;
+    double * columnUpperWork_;
+    double * rowObjectiveWork_;
+    CoinIndexedVector * columnArray_[6];
+    double * reducedCostWork_;
+    double * rowActivityWork_;
+    double * columnActivityWork_;
+    ClpSimplex * auxiliaryModel_;
+};
+class CoinIndexedVector {
+public:
+    void clear();
+};
+void ClpSimplex::deleteRim(int getRidOfFactorizationData)
+{
+  int numberRows=numberRows_;
+  int numberColumns=numberColumns_;
+  int i;
+  int numberPrimalScaled=0;
+  int numberPrimalUnscaled=0;
+  int numberDualScaled=0;
+  int numberDualUnscaled=0;
+  double scaleC = 1.0/objectiveScale_;
+  double scaleR = 1.0/rhsScale_;
+  if (!inverseColumnScale_) {
+      for (i=0; i<numberColumns; i++)
+       {
+         double scaleFactor = columnScale_[i];
+         double valueScaled = columnActivityWork_[i];
+         double lowerScaled = columnLowerWork_[i];
+         double upperScaled = columnUpperWork_[i];
+         if (lowerScaled>-1.0e20||upperScaled<1.0e20) {
+             if (valueScaled<lowerScaled-primalTolerance_||   valueScaled>upperScaled+primalTolerance_)
+               numberPrimalScaled++;
+             else
+               upperOut_ = CoinMax(upperOut_,CoinMin(valueScaled-lowerScaled,upperScaled-valueScaled));
+         }
+         columnActivity_[i] = valueScaled*scaleFactor*scaleR;
+         double value = columnActivity_[i];
+         if (value<columnLower_[i]-primalTolerance_)
+           numberPrimalUnscaled++;
+         else if (value>columnUpper_[i]+primalTolerance_)
+           numberPrimalUnscaled++;
+         double valueScaledDual = reducedCostWork_[i];
+         if (valueScaled>columnLowerWork_[i]+primalTolerance_&&valueScaledDual>dualTolerance_)
+           numberDualScaled++;
+         if (valueScaled<columnUpperWork_[i]-primalTolerance_&&valueScaledDual<-dualTolerance_)
+           numberDualScaled++;
+         reducedCost_[i] = (valueScaledDual*scaleC)/scaleFactor;
+         double valueDual = reducedCost_[i];
+         if (value>columnLower_[i]+primalTolerance_&&valueDual>dualTolerance_)
+           numberDualUnscaled++;
+         if (value<columnUpper_[i]-primalTolerance_&&valueDual<-dualTolerance_)
+           numberDualUnscaled++;
+       }
+      for (i=0; i<numberRows; i++)
+       {
+         double scaleFactor = rowScale_[i];
+         double valueScaled = rowActivityWork_[i];
+         double lowerScaled = rowLowerWork_[i];
+         double upperScaled = rowUpperWork_[i];
+         if (lowerScaled>-1.0e20||upperScaled<1.0e20) {      if (valueScaled<lowerScaled-primalTolerance_||   valueScaled>upperScaled+primalTolerance_)        numberPrimalScaled++;      else        upperOut_ = CoinMax(upperOut_,CoinMin(valueScaled-lowerScaled,upperScaled-valueScaled));    }
+         rowActivity_[i] = (valueScaled*scaleR)/scaleFactor;
+         double value = rowActivity_[i];
+         if (value<rowLower_[i]-primalTolerance_)      numberPrimalUnscaled++;
+         else if (value>rowUpper_[i]+primalTolerance_)      numberPrimalUnscaled++;
+         double valueScaledDual = dual_[i]+rowObjectiveWork_[i];
+         ;
+         if (valueScaled>rowLowerWork_[i]+primalTolerance_&&valueScaledDual>dualTolerance_)      numberDualScaled++;
+         if (valueScaled<rowUpperWork_[i]-primalTolerance_&&valueScaledDual<-dualTolerance_)      numberDualScaled++;
+         dual_[i] *= scaleFactor*scaleC;
+         double valueDual = dual_[i];
+         if (rowObjective_)      valueDual += rowObjective_[i];
+         if (value>rowLower_[i]+primalTolerance_&&valueDual>dualTolerance_)      numberDualUnscaled++;
+         if (value<rowUpper_[i]-primalTolerance_&&valueDual<-dualTolerance_)      numberDualUnscaled++;
+       }
+  }
+  const double * inverseScale = inverseColumnScale_;
+  for (i=0; i<numberColumns; i++)
+    {
+      double scaleFactor = columnScale_[i];
+      double valueScaled = columnActivityWork_[i];
+      double lowerScaled = columnLowerWork_[i];
+      double upperScaled = columnUpperWork_[i];
+      if (lowerScaled>-1.0e20||upperScaled<1.0e20) {      if (valueScaled<lowerScaled-primalTolerance_||   valueScaled>upperScaled+primalTolerance_)        numberPrimalScaled++;      else        upperOut_ = CoinMax(upperOut_,CoinMin(valueScaled-lowerScaled,upperScaled-valueScaled));    }
+      columnActivity_[i] = valueScaled*scaleFactor*scaleR;
+      double value = columnActivity_[i];
+      if (value<columnLower_[i]-primalTolerance_)      numberPrimalUnscaled++;
+      else if (value>columnUpper_[i]+primalTolerance_)      numberPrimalUnscaled++;
+      double valueScaledDual = reducedCostWork_[i];
+      if (valueScaled>columnLowerWork_[i]+primalTolerance_&&valueScaledDual>dualTolerance_)      numberDualScaled++;
+      if (valueScaled<columnUpperWork_[i]-primalTolerance_&&valueScaledDual<-dualTolerance_)      numberDualScaled++;
+      reducedCost_[i] = (valueScaledDual*scaleC)*inverseScale[i];
+      double valueDual = reducedCost_[i];
+      if (value>columnLower_[i]+primalTolerance_&&valueDual>dualTolerance_)      numberDualUnscaled++;
+      if (value<columnUpper_[i]-primalTolerance_&&valueDual<-dualTolerance_)      numberDualUnscaled++;
+    }
+  inverseScale = inverseRowScale_;
+  for (i=0; i<numberRows; i++)
+    {
+      double scaleFactor = rowScale_[i];
+      double valueScaled = rowActivityWork_[i];
+      double lowerScaled = rowLowerWork_[i];
+      double upperScaled = rowUpperWork_[i];
+      if (lowerScaled>-1.0e20||upperScaled<1.0e20) {      if (valueScaled<lowerScaled-primalTolerance_||   valueScaled>upperScaled+primalTolerance_)        numberPrimalScaled++;      else        upperOut_ = CoinMax(upperOut_,CoinMin(valueScaled-lowerScaled,upperScaled-valueScaled));    }
+      rowActivity_[i] = (valueScaled*scaleR)*inverseScale[i];
+      double value = rowActivity_[i];
+      if (value<rowLower_[i]-primalTolerance_)      numberPrimalUnscaled++;
+      else if (value>rowUpper_[i]+primalTolerance_)      numberPrimalUnscaled++;
+      double valueScaledDual = dual_[i]+rowObjectiveWork_[i];
+      ;
+      if (valueScaled>rowLowerWork_[i]+primalTolerance_&&valueScaledDual>dualTolerance_)      numberDualScaled++;
+      if (valueScaled<rowUpperWork_[i]-primalTolerance_&&valueScaledDual<-dualTolerance_)      numberDualScaled++;
+      dual_[i] *= scaleFactor*scaleC;
+      double valueDual = dual_[i];
+      if (rowObjective_)      valueDual += rowObjective_[i];
+      if (value>rowLower_[i]+primalTolerance_&&valueDual>dualTolerance_)      numberDualUnscaled++;
+      if (value<rowUpper_[i]-primalTolerance_&&valueDual<-dualTolerance_)      numberDualUnscaled++;
+    }
+  if (numberPrimalUnscaled) {
+      if (numberDualUnscaled) 
+       secondaryStatus_=4;
+      else
+       secondaryStatus_=2;
+  }
+  if (numberDualUnscaled)
+    secondaryStatus_=3;
+  int iRow,iColumn;
+  for (iRow=0; iRow<4; iRow++)
+    ;
+  for (iColumn=0; iColumn<2; iColumn++)
+    if (columnArray_[iColumn])
+      columnArray_[iColumn]->clear();
+}
diff --git a/gcc/testsuite/gcc.c-torture/compile/20080721-1.c b/gcc/testsuite/gcc.c-torture/compile/20080721-1.c
new file mode 100644 (file)
index 0000000..35ef352
--- /dev/null
@@ -0,0 +1,15 @@
+void foo(void);
+void bar(void);
+
+int test(int b)
+{
+  void *p, **q;
+  if (b)
+    p = (void *)foo;
+  else
+    p = (void *)bar;
+  q = (void **)p;
+  if (*q == (void *)0)
+    return 1;
+  return 0;
+}
diff --git a/gcc/testsuite/gcc.dg/20080615-1.c b/gcc/testsuite/gcc.dg/20080615-1.c
new file mode 100644 (file)
index 0000000..bce9476
--- /dev/null
@@ -0,0 +1,25 @@
+/* { dg-do compile }  */
+/* { dg-options "-w -O2" } */
+
+static int *see_bb_splay_ar = ((void *) 0);
+static void
+see_merge_and_eliminate_extensions (void)
+{
+  int i = 0;
+  printf ("* Phase 2: Merge and eliminate locally redundant extensions.  *\n");
+  splay_tree_foreach (see_bb_splay_ar[i], ((void *) 0), ((void *) 0));
+}
+static void
+see_main (void)
+{
+  int i = 0;
+  see_merge_and_eliminate_extensions ();
+  printf ("Searching register properties in bb %d\n", i);
+}
+gate_handle_see (void)
+{
+}
+rest_of_handle_see (void)
+{
+  see_main ();
+}
index 735a22f..c464536 100644 (file)
@@ -1,5 +1,5 @@
 /* { dg-do compile } */
-/* { dg-options "-fdump-tree-useless" } */
+/* { dg-options "-fdump-tree-cleanup_cfg1" } */
 
 void *alloca (__SIZE_TYPE__);
 void link_error ();
@@ -10,5 +10,5 @@ int main (int argc, char *argv[]) {
          link_error ();
        return 0;
 }
-/* { dg-final { scan-tree-dump-times "link_error" 0 "useless" } } */
-/* { dg-final { cleanup-tree-dump "useless" } } */
+/* { dg-final { scan-tree-dump-times "link_error" 0 "cleanup_cfg1" } } */
+/* { dg-final { cleanup-tree-dump "cleanup_cfg1" } } */
index abc66e5..dd7fe77 100644 (file)
@@ -4,7 +4,7 @@ void foo()
 {
   bad1:
   #pragma omp parallel
-    goto bad1;                 // { dg-error "invalid exit" }
+    goto bad1;                 // { dg-error "invalid branch" }
 
   goto bad2;                   // { dg-error "invalid entry" }
   #pragma omp parallel
index 810b2da..4c56add 100644 (file)
@@ -11,7 +11,7 @@ void foo()
   bad1:
   #pragma omp for
   for (i = 0; i < 10; ++i)
-    goto bad1;                 // { dg-error "invalid exit" }
+    goto bad1;                 // { dg-error "invalid branch" }
 
   goto bad2;                   // { dg-error "invalid entry" }
   #pragma omp for
index 160047c..c72b04c 100644 (file)
@@ -9,7 +9,7 @@ void foo()
     {
       #pragma omp sections
       {
-       continue;               // { dg-error "invalid exit" }
+       continue;               // { dg-error "invalid branch" }
       }
     }
 
@@ -18,12 +18,12 @@ void foo()
     #pragma omp section
       { bad1: ; }
     #pragma omp section
-      goto bad1;               // { dg-error "invalid exit" }
+      goto bad1;               // { dg-error "invalid branch" }
     }
 
   #pragma omp sections
     {
-      goto bad2;               // { dg-error "invalid exit" }
+      goto bad2;               // { dg-error "invalid branch" }
     }
   bad2:;
 
index 815d36b..61f490c 100644 (file)
@@ -4,6 +4,6 @@ void foo()
 {
   #pragma omp critical
     {
-      return;          // { dg-error "invalid exit" }
+      return;          // { dg-error "invalid branch" }
     }
 }
index 450106f..741049f 100644 (file)
@@ -4,12 +4,12 @@ void foo()
 {
   #pragma omp master
     {
-      goto bad1;       // { dg-error "invalid exit" }
+      goto bad1;       // { dg-error "invalid branch" }
     }
 
   #pragma omp master
     {
     bad1:
-      return;          // { dg-error "invalid exit" }
+      return;          // { dg-error "invalid branch" }
     }
 }
index fa4c5ea..87e6392 100644 (file)
@@ -4,6 +4,6 @@ void foo()
 {
   #pragma omp ordered
     {
-      return;          // { dg-error "invalid exit" }
+      return;          // { dg-error "invalid branch" }
     }
 }
index 802b3b3..2bc1cdb 100644 (file)
@@ -6,15 +6,15 @@ void foo()
   for (i = 0; i < 10; ++i)
     {
       #pragma omp for
-      for (j = ({ continue; 0; });     // { dg-error "invalid exit" }
-          j < ({ continue; 10; });     // { dg-error "invalid exit" }
-          j += ({ continue; 1; }))     // { dg-error "invalid exit" }
+      for (j = ({ continue; 0; });     // { dg-error "invalid branch" }
+          j < ({ continue; 10; });     // { dg-error "invalid branch" }
+          j += ({ continue; 1; }))     // { dg-error "invalid branch" }
        continue;
 
       #pragma omp for
-      for (j = ({ break; 0; });                // { dg-error "invalid exit" }
-          j < ({ break; 10; });        // { dg-error "invalid exit" }
-          j += ({ break; 1; }))        // { dg-error "invalid exit" }
+      for (j = ({ break; 0; });                // { dg-error "invalid branch" }
+          j < ({ break; 10; });        // { dg-error "invalid branch" }
+          j += ({ break; 1; }))        // { dg-error "invalid branch" }
        break;                          // { dg-error "break" }
     }
 }
index 177acaa..3c717d9 100644 (file)
@@ -7,5 +7,5 @@ int foo()
 
   #pragma omp parallel for
   for (i = 0; i < 10; ++i)
-    return 0;                  // { dg-error "invalid exit" }
+    return 0;                  // { dg-error "invalid branch" }
 }
diff --git a/gcc/testsuite/gcc.dg/torture/20080716-1.c b/gcc/testsuite/gcc.dg/torture/20080716-1.c
new file mode 100644 (file)
index 0000000..91fcd2b
--- /dev/null
@@ -0,0 +1,58 @@
+/* { dg-do run } */
+/* { dg-require-effective-target lp64 } */
+
+typedef unsigned long size_t;
+struct tree_base
+{
+  int code;
+};
+struct tree_decl_minimal
+{
+  struct tree_base base;
+  const char *name;
+};
+typedef union tree_node {
+  struct tree_base base;
+  struct tree_decl_minimal decl_minimal;
+} *tree;
+struct tree_overload
+{
+  struct tree_base common;
+  tree function;
+};
+typedef struct VEC_tree_base { unsigned num; unsigned alloc; tree vec[1]; } VEC_tree_base;
+typedef struct VEC_tree_gc { VEC_tree_base base; } VEC_tree_gc;
+static __inline__ unsigned VEC_tree_base_length (const VEC_tree_base *vec_)
+{ return vec_ ? vec_->num : 0; }
+static __inline__ int VEC_tree_base_iterate (const VEC_tree_base *vec_, unsigned ix_, tree *ptr)
+{
+  if (vec_ && ix_ < vec_->num) { *ptr = vec_->vec[ix_]; return 1; } else { *ptr = 0; return 0; }
+} 
+extern void abort (void);
+void __attribute__((noinline)) foo (size_t x)
+{
+  if (x != 18446744073709551614UL)
+    abort ();
+}
+void
+resort_type_method_vec (VEC_tree_gc *method_vec)
+{
+  int len = (VEC_tree_base_length(((method_vec) ? &(method_vec)->base : 0)));
+  size_t slot;
+  tree fn;
+
+  for (slot = 2;
+       (VEC_tree_base_iterate(((method_vec) ? &(method_vec)->base : 0),slot,&(fn)));
+       ++slot)
+    if (!(((((((fn)->base.code) == 225) ? (((struct tree_overload*)(fn))->function) : (fn)))->decl_minimal.name)))
+      break;
+
+  if (len - slot > 1)
+    foo (len - slot);
+}
+
+int main ()
+{
+  resort_type_method_vec ((void *)0);
+  return 0;
+}
index 3b1ace8..1ad2c63 100644 (file)
@@ -1,5 +1,5 @@
 /* { dg-do compile } */
-/* { dg-options "-O2 -fdump-tree-optimized" } */
+/* { dg-options "-O2 -fdump-tree-final_cleanup" } */
     
 
 union tree_node;
@@ -42,6 +42,6 @@ objects_must_conflict_p (t1, t2)
 }
 
 /* There should be two assignments of variables to the value zero.  */
-/* { dg-final { scan-tree-dump-times " = 0" 2 "optimized"} } */
+/* { dg-final { scan-tree-dump-times " = 0" 2 "final_cleanup"} } */
  
-/* { dg-final { cleanup-tree-dump "optimized" } } */
+/* { dg-final { cleanup-tree-dump "final_cleanup" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/20080530.c b/gcc/testsuite/gcc.dg/tree-ssa/20080530.c
new file mode 100644 (file)
index 0000000..6da7cb8
--- /dev/null
@@ -0,0 +1,22 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-einline" } */
+
+void bar (char *);
+int i;
+
+static void
+foo (void)
+{
+  char *p = __builtin_alloca (i);
+  bar (p);
+}
+
+int
+baz (void)
+{
+  foo ();      /* foo() should not be inlined here because it calls alloca */
+  return 6;
+}
+
+/* { dg-final { scan-tree-dump-times "Inlining foo into baz" 0 "einline2"} } */
+/* { dg-final { cleanup-tree-dump "einline2" } } */
index 577e179..d7b72f9 100644 (file)
@@ -17,5 +17,5 @@ f (void)
     link_error ();
 }
 
-/* { dg-final { scan-tree-dump-times "Folded statement: if " 1 "ccp1"} } */
+/* { dg-final { scan-tree-dump-times "Folded into: if " 1 "ccp1"} } */
 /* { dg-final { cleanup-tree-dump "ccp\[1-2\]" } } */
index 33c3f5c..50b3bfd 100644 (file)
@@ -22,5 +22,5 @@ void test_signed_msg_encoding(void)
     f();
 }
 
-/* { dg-final { scan-tree-dump-times "signInfo = {};" 1 "dse1" } } */
+/* { dg-final { scan-tree-dump-times "signInfo = {}" 1 "dse1" } } */
 /* { dg-final { cleanup-tree-dump "dse*" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/tailcall-3.c b/gcc/testsuite/gcc.dg/tree-ssa/tailcall-3.c
new file mode 100644 (file)
index 0000000..4055bc3
--- /dev/null
@@ -0,0 +1,28 @@
+/* The return argument needs a type conversion which on some targets
+   (e.g. s390) needs additional code.  So it is invalid to do tail
+   call optimization here.  */
+
+/* { dg-do compile } */
+/* { dg-options "-O2" } */
+
+extern void abort (void);
+
+long long __attribute__((noinline))
+foo ()
+{
+  return 3;
+}
+
+int __attribute__((noinline))
+boo ()
+{
+  return foo ();
+}
+
+int
+main ()
+{
+  if (boo () != 3)
+    abort ();
+}
+
index f03602a..04c39a4 100644 (file)
@@ -2,7 +2,7 @@
 
 !$omp parallel
 !$omp critical
-       goto 10         ! { dg-error "invalid exit" }
+       goto 10         ! { dg-error "invalid (exit|branch)" }
 !$omp end critical
  10    x = 1
 !$omp end parallel
index 6ba0c83..d021eee 100644 (file)
@@ -82,6 +82,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "alloc-pool.h"
 #include "tree-mudflap.h"
 #include "tree-pass.h"
+#include "gimple.h"
 
 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
 #include "dwarf2out.h"
@@ -2084,6 +2085,7 @@ dump_memory_report (bool final)
   ggc_print_statistics ();
   stringpool_statistics ();
   dump_tree_statistics ();
+  dump_gimple_statistics ();
   dump_rtx_statistics ();
   dump_varray_statistics ();
   dump_alloc_pool_statistics ();
index 98f66ce..fab2f49 100644 (file)
@@ -102,13 +102,13 @@ ignore_bb_p (const_basic_block bb)
 static int
 count_insns (basic_block bb)
 {
-  block_stmt_iterator bsi;
-  tree stmt;
+  gimple_stmt_iterator gsi;
+  gimple stmt;
   int n = 0;
 
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      stmt = bsi_stmt (bsi);
+      stmt = gsi_stmt (gsi);
       n += estimate_num_insns (stmt, &eni_size_weights);
     }
   return n;
index 59ac3d7..0d329d0 100644 (file)
@@ -30,7 +30,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-dump.h"
 #include "pointer-set.h"
 #include "tree-affine.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "flags.h"
 
 /* Extends CST as appropriate for the affine combinations COMB.  */
@@ -567,11 +567,13 @@ struct name_expansion
    results.  */
 
 void
-aff_combination_expand (aff_tree *comb, struct pointer_map_t **cache)
+aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
+                       struct pointer_map_t **cache ATTRIBUTE_UNUSED)
 {
   unsigned i;
   aff_tree to_add, current, curre;
-  tree e, def, rhs;
+  tree e, rhs;
+  gimple def;
   double_int scale;
   void **slot;
   struct name_expansion *exp;
@@ -580,6 +582,8 @@ aff_combination_expand (aff_tree *comb, struct pointer_map_t **cache)
   for (i = 0; i < comb->n; i++)
     {
       tree type, name;
+      enum tree_code code;
+
       e = comb->elts[i].val;
       type = TREE_TYPE (e);
       name = e;
@@ -591,19 +595,19 @@ aff_combination_expand (aff_tree *comb, struct pointer_map_t **cache)
       if (TREE_CODE (name) != SSA_NAME)
        continue;
       def = SSA_NAME_DEF_STMT (name);
-      if (TREE_CODE (def) != GIMPLE_MODIFY_STMT
-         || GIMPLE_STMT_OPERAND (def, 0) != name)
+      if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
        continue;
 
-      rhs = GIMPLE_STMT_OPERAND (def, 1);
-      if (TREE_CODE (rhs) != SSA_NAME
-         && !EXPR_P (rhs)
-         && !is_gimple_min_invariant (rhs))
+      code = gimple_assign_rhs_code (def);
+      if (code != SSA_NAME
+         && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
+         && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
+             || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
        continue;
 
       /* We do not know whether the reference retains its value at the
         place where the expansion is used.  */
-      if (REFERENCE_CLASS_P (rhs))
+      if (TREE_CODE_CLASS (code) == tcc_reference)
        continue;
 
       if (!*cache)
@@ -616,29 +620,27 @@ aff_combination_expand (aff_tree *comb, struct pointer_map_t **cache)
          exp = XNEW (struct name_expansion);
          exp->in_progress = 1;
          *slot = exp;
-         if (e != name)
+         /* In principle this is a generally valid folding, but
+            it is not unconditionally an optimization, so do it
+            here and not in fold_unary.  */
+         /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
+            than the type of X and overflow for the type of X is
+            undefined.  */
+         if (e != name
+             && INTEGRAL_TYPE_P (type)
+             && INTEGRAL_TYPE_P (TREE_TYPE (name))
+             && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
+             && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
+             && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
+             && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
+           rhs = fold_build2 (code, type,
+                              fold_convert (type, gimple_assign_rhs1 (def)),
+                              fold_convert (type, gimple_assign_rhs2 (def)));
+         else
            {
-             /* In principle this is a generally valid folding, but
-                it is not unconditionally an optimization, so do it
-                here and not in fold_unary.  */
-             /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
-                than the type of X and overflow for the type of X is
-                undefined.  */
-             if (INTEGRAL_TYPE_P (type)
-                 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
-                 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs))
-                 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs))
-                 && (TREE_CODE (rhs) == PLUS_EXPR
-                     || TREE_CODE (rhs) == MINUS_EXPR
-                     || TREE_CODE (rhs) == MULT_EXPR)
-                 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
-               {
-                 rhs = fold_build2 (TREE_CODE (rhs), type,
-                                    fold_convert (type, TREE_OPERAND (rhs, 0)),
-                                    fold_convert (type, TREE_OPERAND (rhs, 1)));
-               }
-             else
-               rhs = fold_convert (type, rhs);
+             rhs = gimple_assign_rhs_to_tree (def);
+             if (e != name)
+               rhs = fold_convert (type, rhs);
            }
          tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
          exp->expansion = current;
index 8f7041b..ce9572c 100644 (file)
@@ -35,7 +35,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree.h"
 #include "diagnostic.h"
 #include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-dump.h"
 #include "tree-pass.h"
 #include "timevar.h"
@@ -99,7 +99,7 @@ typedef struct input_domain
   bool is_ub_inclusive;
 } inp_domain;
 
-static VEC (tree, heap) *cond_dead_built_in_calls;
+static VEC (gimple, heap) *cond_dead_built_in_calls;
 
 /* A helper function to construct and return an input
    domain object.  LB is the lower bound, HAS_LB is 
@@ -174,16 +174,16 @@ check_target_format (tree arg)
 #define MAX_BASE_INT_BIT_SIZE 32
 
 static bool
-check_pow (tree pow_call)
+check_pow (gimple pow_call)
 {
   tree base, expn;
   enum tree_code bc, ec;
 
-  if (call_expr_nargs (pow_call) != 2)
+  if (gimple_call_num_args (pow_call) != 2)
     return false;
 
-  base = CALL_EXPR_ARG (pow_call, 0);
-  expn = CALL_EXPR_ARG (pow_call, 1);
+  base = gimple_call_arg (pow_call, 0);
+  expn = gimple_call_arg (pow_call, 1);
 
   if (!check_target_format (expn))
     return false;
@@ -212,20 +212,19 @@ check_pow (tree pow_call)
     }
   else if (bc == SSA_NAME)
     {
-      tree base_def, base_val, base_val0, base_var, type;
+      tree base_val0, base_var, type;
+      gimple base_def;
       int bit_sz;
 
       /* Only handles cases where base value is converted
          from integer values.  */ 
       base_def = SSA_NAME_DEF_STMT (base);
-      if (TREE_CODE (base_def) != GIMPLE_MODIFY_STMT)
+      if (gimple_code (base_def) != GIMPLE_ASSIGN)
         return false;
 
-      base_val = GIMPLE_STMT_OPERAND (base_def, 1);
-
-      if (TREE_CODE (base_val) != FLOAT_EXPR)
+      if (gimple_assign_rhs_code (base_def) != FLOAT_EXPR)
         return false;
-      base_val0 = TREE_OPERAND (base_val, 0);
+      base_val0 = gimple_assign_rhs1 (base_def);
 
       base_var = SSA_NAME_VAR (base_val0);
       if (!DECL_P  (base_var))
@@ -253,11 +252,11 @@ check_pow (tree pow_call)
    Returns true if the function call is a candidate.  */
 
 static bool
-check_builtin_call (tree bcall)
+check_builtin_call (gimple bcall)
 {
   tree arg;
 
-  arg = CALL_EXPR_ARG (bcall, 0);
+  arg = gimple_call_arg (bcall, 0);
   return check_target_format (arg);
 }
 
@@ -266,18 +265,18 @@ check_builtin_call (tree bcall)
    is a candidate.  */
 
 static bool
-is_call_dce_candidate (tree call)
+is_call_dce_candidate (gimple call)
 {
   tree fn;
   enum built_in_function fnc;
 
-  if (!flag_tree_builtin_call_dce)
+  /* Only potentially dead calls are considered.  */
+  if (gimple_call_lhs (call))
     return false;
 
-  gcc_assert (call && TREE_CODE (call) == CALL_EXPR);
-
-  fn = get_callee_fndecl (call);
-  if (!fn || !DECL_BUILT_IN (fn) 
+  fn = gimple_call_fndecl (call);
+  if (!fn
+      || !DECL_BUILT_IN (fn) 
       || (DECL_BUILT_IN_CLASS (fn) != BUILT_IN_NORMAL))
     return false;
 
@@ -331,38 +330,35 @@ static void
 gen_one_condition (tree arg, int lbub, 
                    enum tree_code tcode,
                    const char *temp_name1,
-                   const char *temp_name2,
-                   VEC (tree, heap) *conds,
+                  const char *temp_name2,
+                   VEC (gimple, heap) *conds,
                    unsigned *nconds)
 {
   tree lbub_real_cst, lbub_cst, float_type;
   tree temp, tempn, tempc, tempcn;
-  tree stmt1, stmt2, stmt3;
+  gimple stmt1, stmt2, stmt3;
 
   float_type = TREE_TYPE (arg);
   lbub_cst = build_int_cst (integer_type_node, lbub);
   lbub_real_cst = build_real_from_int_cst (float_type, lbub_cst);
 
   temp = create_tmp_var (float_type, temp_name1);
-  stmt1 = build_gimple_modify_stmt (temp, arg);
+  stmt1 = gimple_build_assign (temp, arg);
   tempn = make_ssa_name (temp, stmt1);
-  GIMPLE_STMT_OPERAND (stmt1, 0) = tempn;
+  gimple_assign_set_lhs (stmt1, tempn);
 
   tempc = create_tmp_var (boolean_type_node, temp_name2);
-  stmt2 = build_gimple_modify_stmt (tempc,
-                                    fold_build2 (tcode,
-                                                 boolean_type_node,
-                                                 tempn, lbub_real_cst));
+  stmt2 = gimple_build_assign (tempc,
+                               fold_build2 (tcode,
+                                           boolean_type_node,
+                                           tempn, lbub_real_cst));
   tempcn = make_ssa_name (tempc, stmt2);
-  GIMPLE_STMT_OPERAND (stmt2, 0) = tempcn;
-
-  /* fold_built3 not used for gimple statement here,
-     as it will hit assertion.  */
-  stmt3 = build3 (COND_EXPR, void_type_node,
-                  tempcn, NULL_TREE, NULL_TREE);
-  VEC_quick_push (tree, conds, stmt1);
-  VEC_quick_push (tree, conds, stmt2);
-  VEC_quick_push (tree, conds, stmt3);
+  gimple_assign_set_lhs (stmt2, tempcn);
+
+  stmt3 = gimple_build_cond_from_tree (tempcn, NULL_TREE, NULL_TREE);
+  VEC_quick_push (gimple, conds, stmt1);
+  VEC_quick_push (gimple, conds, stmt2);
+  VEC_quick_push (gimple, conds, stmt3);
   (*nconds)++;
 }
 
@@ -377,7 +373,7 @@ gen_one_condition (tree arg, int lbub,
 
 static void
 gen_conditions_for_domain (tree arg, inp_domain domain,
-                           VEC (tree, heap) *conds, 
+                           VEC (gimple, heap) *conds, 
                            unsigned *nconds)
 {
   if (domain.has_lb)
@@ -391,7 +387,7 @@ gen_conditions_for_domain (tree arg, inp_domain domain,
     {
       /* Now push a separator.  */
       if (domain.has_lb)
-        VEC_quick_push (tree, conds, NULL);
+        VEC_quick_push (gimple, conds, NULL);
 
       gen_one_condition (arg, domain.ub,
                          (domain.is_ub_inclusive
@@ -420,7 +416,7 @@ gen_conditions_for_domain (tree arg, inp_domain domain,
 
 static void
 gen_conditions_for_pow_cst_base (tree base, tree expn,
-                                 VEC (tree, heap) *conds,
+                                 VEC (gimple, heap) *conds,
                                  unsigned *nconds)
 {
   inp_domain exp_domain; 
@@ -456,20 +452,21 @@ gen_conditions_for_pow_cst_base (tree base, tree expn,
 
 static void
 gen_conditions_for_pow_int_base (tree base, tree expn,
-                                 VEC (tree, heap) *conds,
+                                 VEC (gimple, heap) *conds,
                                  unsigned *nconds)
 {
-  tree base_def, base_nm, base_val, base_val0;
+  gimple base_def;
+  tree base_nm, base_val0;
   tree base_var, int_type;
   tree temp, tempn;
-  tree cst0, stmt1, stmt2;
+  tree cst0;
+  gimple stmt1, stmt2;
   int bit_sz, max_exp;
   inp_domain exp_domain;
 
   base_def = SSA_NAME_DEF_STMT (base);
-  base_nm = GIMPLE_STMT_OPERAND (base_def, 0);
-  base_val = GIMPLE_STMT_OPERAND (base_def, 1);
-  base_val0 = TREE_OPERAND (base_val, 0);
+  base_nm = gimple_assign_lhs (base_def);
+  base_val0 = gimple_assign_rhs1 (base_def);
   base_var = SSA_NAME_VAR (base_val0);
   int_type = TREE_TYPE (base_var);
   bit_sz = TYPE_PRECISION (int_type);
@@ -514,19 +511,17 @@ gen_conditions_for_pow_int_base (tree base, tree expn,
      type is integer.  */
 
   /* Push a separator.  */
-  VEC_quick_push (tree, conds, NULL);
+  VEC_quick_push (gimple, conds, NULL);
 
   temp = create_tmp_var (int_type, "DCE_COND1");
   cst0 = build_int_cst (int_type, 0);
-  stmt1 = build_gimple_modify_stmt (temp, base_val0);
+  stmt1 = gimple_build_assign (temp, base_val0);
   tempn = make_ssa_name (temp, stmt1);
-  GIMPLE_STMT_OPERAND (stmt1, 0) = tempn;
-  stmt2 = build3 (COND_EXPR, void_type_node,
-                  fold_build2 (LE_EXPR, boolean_type_node, tempn, cst0),
-                  NULL_TREE, NULL_TREE);
+  gimple_assign_set_lhs (stmt1, tempn);
+  stmt2 = gimple_build_cond (LE_EXPR, tempn, cst0, NULL_TREE, NULL_TREE);
 
-  VEC_quick_push (tree, conds, stmt1);
-  VEC_quick_push (tree, conds, stmt2);
+  VEC_quick_push (gimple, conds, stmt1);
+  VEC_quick_push (gimple, conds, stmt2);
   (*nconds)++;
 }
 
@@ -548,7 +543,7 @@ gen_conditions_for_pow_int_base (tree base, tree expn,
    and *NCONDS is the number of logical conditions.  */
 
 static void
-gen_conditions_for_pow (tree pow_call, VEC (tree, heap) *conds, 
+gen_conditions_for_pow (gimple pow_call, VEC (gimple, heap) *conds, 
                         unsigned *nconds)
 {
   tree base, expn;
@@ -560,18 +555,16 @@ gen_conditions_for_pow (tree pow_call, VEC (tree, heap) *conds,
 
   *nconds = 0;
 
-  base = CALL_EXPR_ARG (pow_call, 0);
-  expn = CALL_EXPR_ARG (pow_call, 1);
+  base = gimple_call_arg (pow_call, 0);
+  expn = gimple_call_arg (pow_call, 1);
 
   bc = TREE_CODE (base);
   ec = TREE_CODE (expn);
 
   if (bc == REAL_CST)
-      gen_conditions_for_pow_cst_base (base, expn,
-                                       conds, nconds);
+      gen_conditions_for_pow_cst_base (base, expn, conds, nconds);
   else if (bc == SSA_NAME)
-      gen_conditions_for_pow_int_base (base, expn,
-                                       conds, nconds);
+      gen_conditions_for_pow_int_base (base, expn, conds, nconds);
   else
     gcc_unreachable ();
 }
@@ -689,22 +682,19 @@ get_no_error_domain (enum built_in_function fnc)
    condition are separated by NULL tree in the vector.  */
 
 static void
-gen_shrink_wrap_conditions (tree bi_call, VEC (tree, heap) *conds, 
+gen_shrink_wrap_conditions (gimple bi_call, VEC (gimple, heap) *conds, 
                             unsigned int *nconds)
 {
-  tree call, fn;
+  gimple call;
+  tree fn;
   enum built_in_function fnc;
 
   gcc_assert (nconds && conds);
-  gcc_assert (VEC_length (tree, conds) == 0);
-  gcc_assert (TREE_CODE (bi_call) == GIMPLE_MODIFY_STMT
-              || TREE_CODE (bi_call) == CALL_EXPR);
+  gcc_assert (VEC_length (gimple, conds) == 0);
+  gcc_assert (is_gimple_call (bi_call));
 
   call = bi_call;
-  if (TREE_CODE (call) == GIMPLE_MODIFY_STMT)
-    call = get_call_expr_in (bi_call);
-
-  fn = get_callee_fndecl (call);
+  fn = gimple_call_fndecl (call);
   gcc_assert (fn && DECL_BUILT_IN (fn));
   fnc = DECL_FUNCTION_CODE (fn);
   *nconds = 0;
@@ -716,7 +706,7 @@ gen_shrink_wrap_conditions (tree bi_call, VEC (tree, heap) *conds,
       tree arg;
       inp_domain domain = get_no_error_domain (fnc);
       *nconds = 0;
-      arg = CALL_EXPR_ARG (bi_call, 0);
+      arg = gimple_call_arg (bi_call, 0);
       gen_conditions_for_domain (arg, domain, conds, nconds);
     }
 
@@ -733,21 +723,21 @@ gen_shrink_wrap_conditions (tree bi_call, VEC (tree, heap) *conds,
    transformation actually happens.  */
 
 static bool 
-shrink_wrap_one_built_in_call (tree bi_call)
+shrink_wrap_one_built_in_call (gimple bi_call)
 {
-  block_stmt_iterator bi_call_bsi;
+  gimple_stmt_iterator bi_call_bsi;
   basic_block bi_call_bb, join_tgt_bb, guard_bb, guard_bb0;
   edge join_tgt_in_edge_from_call, join_tgt_in_edge_fall_thru;
   edge bi_call_in_edge0, guard_bb_in_edge;
-  VEC (tree, heap) *conds;
+  VEC (gimple, heap) *conds;
   unsigned tn_cond_stmts, nconds;
   unsigned ci;
-  tree cond_expr = NULL;
-  tree cond_expr_start;
+  gimple cond_expr = NULL;
+  gimple cond_expr_start;
   tree bi_call_label_decl;
-  tree bi_call_label;
+  gimple bi_call_label;
 
-  conds = VEC_alloc (tree, heap, 12);
+  conds = VEC_alloc (gimple, heap, 12);
   gen_shrink_wrap_conditions (bi_call, conds, &nconds);
 
   /* This can happen if the condition generator decides
@@ -757,40 +747,40 @@ shrink_wrap_one_built_in_call (tree bi_call)
   if (nconds == 0)
     return false;
 
-  bi_call_bb = bb_for_stmt (bi_call);
+  bi_call_bb = gimple_bb (bi_call);
 
   /* Now find the join target bb -- split
      bi_call_bb if needed.  */
-  bi_call_bsi = bsi_for_stmt (bi_call);
+  bi_call_bsi = gsi_for_stmt (bi_call);
 
   join_tgt_in_edge_from_call = split_block (bi_call_bb, bi_call);
-  bi_call_bsi = bsi_for_stmt (bi_call);
+  bi_call_bsi = gsi_for_stmt (bi_call);
 
   join_tgt_bb = join_tgt_in_edge_from_call->dest;
 
   /* Now it is time to insert the first conditional expression
      into bi_call_bb and split this bb so that bi_call is
      shrink-wrapped.  */
-  tn_cond_stmts = VEC_length (tree, conds);
+  tn_cond_stmts = VEC_length (gimple, conds);
   cond_expr = NULL;
-  cond_expr_start = VEC_index (tree, conds, 0);
+  cond_expr_start = VEC_index (gimple, conds, 0);
   for (ci = 0; ci < tn_cond_stmts; ci++)
     {
-      tree c = VEC_index (tree, conds, ci);
+      gimple c = VEC_index (gimple, conds, ci);
       gcc_assert (c || ci != 0);
       if (!c)
         break;
-      bsi_insert_before (&bi_call_bsi, c, BSI_SAME_STMT);
+      gsi_insert_before (&bi_call_bsi, c, GSI_SAME_STMT);
       cond_expr = c;
     }
   nconds--;
   ci++;
-  gcc_assert (cond_expr && TREE_CODE (cond_expr) == COND_EXPR);
+  gcc_assert (cond_expr && gimple_code (cond_expr) == GIMPLE_COND);
 
   /* Now the label.  */
   bi_call_label_decl = create_artificial_label ();
-  bi_call_label = build1 (LABEL_EXPR, void_type_node, bi_call_label_decl);
-  bsi_insert_before (&bi_call_bsi, bi_call_label, BSI_SAME_STMT);
+  bi_call_label = gimple_build_label (bi_call_label_decl);
+  gsi_insert_before (&bi_call_bsi, bi_call_label, GSI_SAME_STMT);
 
   bi_call_in_edge0 = split_block (bi_call_bb, cond_expr);
   bi_call_in_edge0->flags &= ~EDGE_FALLTHRU;
@@ -810,21 +800,21 @@ shrink_wrap_one_built_in_call (tree bi_call)
     {
       unsigned ci0;
       edge bi_call_in_edge;
-      block_stmt_iterator guard_bsi = bsi_for_stmt (cond_expr_start);
+      gimple_stmt_iterator guard_bsi = gsi_for_stmt (cond_expr_start);
       ci0 = ci;
-      cond_expr_start = VEC_index (tree, conds, ci0);
+      cond_expr_start = VEC_index (gimple, conds, ci0);
       for (; ci < tn_cond_stmts; ci++)
         {
-          tree c = VEC_index (tree, conds, ci);
+          gimple c = VEC_index (gimple, conds, ci);
           gcc_assert (c || ci != ci0);
           if (!c)
             break;
-          bsi_insert_before (&guard_bsi, c, BSI_SAME_STMT);
+          gsi_insert_before (&guard_bsi, c, GSI_SAME_STMT);
           cond_expr = c;
         }
       nconds--;
       ci++;
-      gcc_assert (cond_expr && TREE_CODE (cond_expr) == COND_EXPR);
+      gcc_assert (cond_expr && gimple_code (cond_expr) == GIMPLE_COND);
       guard_bb_in_edge = split_block (guard_bb, cond_expr);
       guard_bb_in_edge->flags &= ~EDGE_FALLTHRU;
       guard_bb_in_edge->flags |= EDGE_FALSE_VALUE;
@@ -836,11 +826,11 @@ shrink_wrap_one_built_in_call (tree bi_call)
           REG_BR_PROB_BASE - bi_call_in_edge->probability;
     }
 
-  VEC_free (tree, heap, conds);
+  VEC_free (gimple, heap, conds);
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       location_t loc;
-      loc = EXPR_LOCATION (bi_call);
+      loc = gimple_location (bi_call);
       fprintf (dump_file,
                "%s:%d: note: function call is shrink-wrapped"
                " into error conditions.\n",
@@ -859,13 +849,13 @@ shrink_wrap_conditional_dead_built_in_calls (void)
   bool changed = false;
   unsigned i = 0;
 
-  unsigned n = VEC_length (tree, cond_dead_built_in_calls);
+  unsigned n = VEC_length (gimple, cond_dead_built_in_calls);
   if (n == 0) 
     return false;
 
   for (; i < n ; i++)
     {
-      tree bi_call = VEC_index (tree, cond_dead_built_in_calls, i);
+      gimple bi_call = VEC_index (gimple, cond_dead_built_in_calls, i);
       changed |= shrink_wrap_one_built_in_call (bi_call);
     }
 
@@ -878,34 +868,33 @@ static unsigned int
 tree_call_cdce (void)
 {
   basic_block bb;
-  block_stmt_iterator i;
+  gimple_stmt_iterator i;
   bool something_changed = false;
-  cond_dead_built_in_calls = VEC_alloc (tree, heap, 64);
+  cond_dead_built_in_calls = VEC_alloc (gimple, heap, 64);
 
   FOR_EACH_BB (bb)
     {
       /* Collect dead call candidates.  */
-      for (i = bsi_start (bb); ! bsi_end_p (i); bsi_next (&i))
+      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
         {
-         tree stmt = bsi_stmt (i);
-          if (TREE_CODE (stmt) == CALL_EXPR
+         gimple stmt = gsi_stmt (i);
+          if (is_gimple_call (stmt)
               && is_call_dce_candidate (stmt))
             {
               if (dump_file && (dump_flags & TDF_DETAILS))
                 {
                   fprintf (dump_file, "Found conditional dead call: ");
-                  print_generic_stmt (dump_file, stmt, TDF_SLIM);
+                  print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
                   fprintf (dump_file, "\n");
                 }
-              VEC_quick_push (tree, cond_dead_built_in_calls, stmt);
+              VEC_quick_push (gimple, cond_dead_built_in_calls, stmt);
             }
        }
     }
 
-  something_changed =
-    shrink_wrap_conditional_dead_built_in_calls ();
+  something_changed = shrink_wrap_conditional_dead_built_in_calls ();
 
-  VEC_free (tree, heap, cond_dead_built_in_calls);
+  VEC_free (gimple, heap, cond_dead_built_in_calls);
 
   if (something_changed)
     {
index 5b747e8..00979bd 100644 (file)
@@ -59,7 +59,7 @@ static const int initial_cfg_capacity = 20;
 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
    which use a particular edge.  The CASE_LABEL_EXPRs are chained together
    via their TREE_CHAIN field, which we clear after we're done with the
-   hash table to prevent problems with duplication of SWITCH_EXPRs.
+   hash table to prevent problems with duplication of GIMPLE_SWITCHes.
 
    Access to this list of CASE_LABEL_EXPRs allows us to efficiently
    update the case vector in response to edge redirections.
@@ -83,35 +83,32 @@ static struct cfg_stats_d cfg_stats;
 static bool found_computed_goto;
 
 /* Basic blocks and flowgraphs.  */
-static basic_block create_bb (void *, void *, basic_block);
-static void make_blocks (tree);
+static void make_blocks (gimple_seq);
 static void factor_computed_gotos (void);
 
 /* Edges.  */
 static void make_edges (void);
 static void make_cond_expr_edges (basic_block);
-static void make_switch_expr_edges (basic_block);
+static void make_gimple_switch_edges (basic_block);
 static void make_goto_expr_edges (basic_block);
-static edge tree_redirect_edge_and_branch (edge, basic_block);
-static edge tree_try_redirect_by_replacing_jump (edge, basic_block);
+static edge gimple_redirect_edge_and_branch (edge, basic_block);
+static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
 static unsigned int split_critical_edges (void);
 
 /* Various helpers.  */
-static inline bool stmt_starts_bb_p (const_tree, const_tree);
-static int tree_verify_flow_info (void);
-static void tree_make_forwarder_block (edge);
-static void tree_cfg2vcg (FILE *);
-static inline void change_bb_for_stmt (tree t, basic_block bb);
-static bool computed_goto_p (const_tree);
+static inline bool stmt_starts_bb_p (gimple, gimple);
+static int gimple_verify_flow_info (void);
+static void gimple_make_forwarder_block (edge);
+static void gimple_cfg2vcg (FILE *);
 
 /* Flowgraph optimization and cleanup.  */
-static void tree_merge_blocks (basic_block, basic_block);
-static bool tree_can_merge_blocks_p (basic_block, basic_block);
+static void gimple_merge_blocks (basic_block, basic_block);
+static bool gimple_can_merge_blocks_p (basic_block, basic_block);
 static void remove_bb (basic_block);
 static edge find_taken_edge_computed_goto (basic_block, tree);
 static edge find_taken_edge_cond_expr (basic_block, tree);
 static edge find_taken_edge_switch_expr (basic_block, tree);
-static tree find_case_label_for_value (tree, tree);
+static tree find_case_label_for_value (gimple, tree);
 
 void
 init_empty_tree_cfg_for_function (struct function *fn)
@@ -155,21 +152,21 @@ init_empty_tree_cfg (void)
                              Create basic blocks
 ---------------------------------------------------------------------------*/
 
-/* Entry point to the CFG builder for trees.  TP points to the list of
+/* Entry point to the CFG builder for trees.  SEQ is the sequence of
    statements to be added to the flowgraph.  */
 
 static void
-build_tree_cfg (tree *tp)
+build_gimple_cfg (gimple_seq seq)
 {
-  /* Register specific tree functions.  */
-  tree_register_cfg_hooks ();
+  /* Register specific gimple functions.  */
+  gimple_register_cfg_hooks ();
 
   memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
 
   init_empty_tree_cfg ();
 
   found_computed_goto = 0;
-  make_blocks (*tp);
+  make_blocks (seq);
 
   /* Computed gotos are hell to deal with, especially if there are
      lots of them with a large number of destinations.  So we factor
@@ -207,7 +204,7 @@ build_tree_cfg (tree *tp)
     FILE *vcg_file = dump_begin (TDI_vcg, &local_dump_flags);
     if (vcg_file)
       {
-       tree_cfg2vcg (vcg_file);
+       gimple_cfg2vcg (vcg_file);
        dump_end (TDI_vcg, vcg_file);
       }
   }
@@ -218,13 +215,13 @@ build_tree_cfg (tree *tp)
 
   /* Dump a textual representation of the flowgraph.  */
   if (dump_file)
-    dump_tree_cfg (dump_file, dump_flags);
+    gimple_dump_cfg (dump_file, dump_flags);
 }
 
 static unsigned int
 execute_build_cfg (void)
 {
-  build_tree_cfg (&DECL_SAVED_TREE (current_function_decl));
+  build_gimple_cfg (gimple_body (current_function_decl));
   return 0;
 }
 
@@ -239,7 +236,7 @@ struct gimple_opt_pass pass_build_cfg =
   NULL,                                        /* next */
   0,                                   /* static_pass_number */
   TV_TREE_CFG,                         /* tv_id */
-  PROP_gimple_leh,                     /* properties_required */
+  PROP_gimple_leh,                     /* properties_required */
   PROP_cfg,                            /* properties_provided */
   0,                                   /* properties_destroyed */
   0,                                   /* todo_flags_start */
@@ -247,6 +244,17 @@ struct gimple_opt_pass pass_build_cfg =
  }
 };
 
+
+/* Return true if T is a computed goto.  */
+
+static bool
+computed_goto_p (gimple t)
+{
+  return (gimple_code (t) == GIMPLE_GOTO
+         && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
+}
+
+
 /* Search the CFG for any computed gotos.  If found, factor them to a
    common computed goto site.  Also record the location of that site so
    that we can un-factor the gotos after we have converted back to
@@ -258,8 +266,8 @@ factor_computed_gotos (void)
   basic_block bb;
   tree factored_label_decl = NULL;
   tree var = NULL;
-  tree factored_computed_goto_label = NULL;
-  tree factored_computed_goto = NULL;
+  gimple factored_computed_goto_label = NULL;
+  gimple factored_computed_goto = NULL;
 
   /* We know there are one or more computed gotos in this function.
      Examine the last statement in each basic block to see if the block
@@ -267,12 +275,13 @@ factor_computed_gotos (void)
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator bsi = bsi_last (bb);
-      tree last;
+      gimple_stmt_iterator gsi = gsi_last_bb (bb);
+      gimple last;
 
-      if (bsi_end_p (bsi))
+      if (gsi_end_p (gsi))
        continue;
-      last = bsi_stmt (bsi);
+
+      last = gsi_stmt (gsi);
 
       /* Ignore the computed goto we create when we factor the original
         computed gotos.  */
@@ -282,15 +291,15 @@ factor_computed_gotos (void)
       /* If the last statement is a computed goto, factor it.  */
       if (computed_goto_p (last))
        {
-         tree assignment;
+         gimple assignment;
 
          /* The first time we find a computed goto we need to create
             the factored goto block and the variable each original
             computed goto will use for their goto destination.  */
-         if (! factored_computed_goto)
+         if (!factored_computed_goto)
            {
              basic_block new_bb = create_empty_bb (bb);
-             block_stmt_iterator new_bsi = bsi_start (new_bb);
+             gimple_stmt_iterator new_gsi = gsi_start_bb (new_bb);
 
              /* Create the destination of the factored goto.  Each original
                 computed goto will put its desired destination into this
@@ -302,60 +311,58 @@ factor_computed_gotos (void)
                 factored computed goto.  */
              factored_label_decl = create_artificial_label ();
              factored_computed_goto_label
-               = build1 (LABEL_EXPR, void_type_node, factored_label_decl);
-             bsi_insert_after (&new_bsi, factored_computed_goto_label,
-                               BSI_NEW_STMT);
+               = gimple_build_label (factored_label_decl);
+             gsi_insert_after (&new_gsi, factored_computed_goto_label,
+                               GSI_NEW_STMT);
 
              /* Build our new computed goto.  */
-             factored_computed_goto = build1 (GOTO_EXPR, void_type_node, var);
-             bsi_insert_after (&new_bsi, factored_computed_goto,
-                               BSI_NEW_STMT);
+             factored_computed_goto = gimple_build_goto (var);
+             gsi_insert_after (&new_gsi, factored_computed_goto, GSI_NEW_STMT);
            }
 
          /* Copy the original computed goto's destination into VAR.  */
-         assignment = build_gimple_modify_stmt (var,
-                                                GOTO_DESTINATION (last));
-         bsi_insert_before (&bsi, assignment, BSI_SAME_STMT);
+         assignment = gimple_build_assign (var, gimple_goto_dest (last));
+         gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
 
          /* And re-vector the computed goto to the new destination.  */
-         GOTO_DESTINATION (last) = factored_label_decl;
+         gimple_goto_set_dest (last, factored_label_decl);
        }
     }
 }
 
 
-/* Build a flowgraph for the statement_list STMT_LIST.  */
+/* Build a flowgraph for the sequence of stmts SEQ.  */
 
 static void
-make_blocks (tree stmt_list)
+make_blocks (gimple_seq seq)
 {
-  tree_stmt_iterator i = tsi_start (stmt_list);
-  tree stmt = NULL;
+  gimple_stmt_iterator i = gsi_start (seq);
+  gimple stmt = NULL;
   bool start_new_block = true;
-  bool first_stmt_of_list = true;
+  bool first_stmt_of_seq = true;
   basic_block bb = ENTRY_BLOCK_PTR;
 
-  while (!tsi_end_p (i))
+  while (!gsi_end_p (i))
     {
-      tree prev_stmt;
+      gimple prev_stmt;
 
       prev_stmt = stmt;
-      stmt = tsi_stmt (i);
+      stmt = gsi_stmt (i);
 
       /* If the statement starts a new basic block or if we have determined
         in a previous pass that we need to create a new block for STMT, do
         so now.  */
       if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
        {
-         if (!first_stmt_of_list)
-           stmt_list = tsi_split_statement_list_before (&i);
-         bb = create_basic_block (stmt_list, NULL, bb);
+         if (!first_stmt_of_seq)
+           seq = gsi_split_seq_before (&i);
+         bb = create_basic_block (seq, NULL, bb);
          start_new_block = false;
        }
 
       /* Now add STMT to BB and create the subgraphs for special statement
         codes.  */
-      set_bb_for_stmt (stmt, bb);
+      gimple_set_bb (stmt, bb);
 
       if (computed_goto_p (stmt))
        found_computed_goto = true;
@@ -365,8 +372,8 @@ make_blocks (tree stmt_list)
       if (stmt_ends_bb_p (stmt))
        start_new_block = true;
 
-      tsi_next (&i);
-      first_stmt_of_list = false;
+      gsi_next (&i);
+      first_stmt_of_seq = false;
     }
 }
 
@@ -387,8 +394,8 @@ create_bb (void *h, void *e, basic_block after)
 
   bb->index = last_basic_block;
   bb->flags = BB_NEW;
-  bb->il.tree = GGC_CNEW (struct tree_bb_info);
-  set_bb_stmt_list (bb, h ? (tree) h : alloc_stmt_list ());
+  bb->il.gimple = GGC_CNEW (struct gimple_bb_info);
+  set_bb_seq (bb, h ? (gimple_seq) h : gimple_seq_alloc ());
 
   /* Add the new block to the linked list of blocks.  */
   link_block (bb, after);
@@ -423,25 +430,31 @@ fold_cond_expr_cond (void)
 
   FOR_EACH_BB (bb)
     {
-      tree stmt = last_stmt (bb);
+      gimple stmt = last_stmt (bb);
 
-      if (stmt
-         && TREE_CODE (stmt) == COND_EXPR)
+      if (stmt && gimple_code (stmt) == GIMPLE_COND)
        {
          tree cond;
          bool zerop, onep;
 
          fold_defer_overflow_warnings ();
-         cond = fold (COND_EXPR_COND (stmt));
-         zerop = integer_zerop (cond);
-         onep = integer_onep (cond);
+         cond = fold_binary (gimple_cond_code (stmt), boolean_type_node,
+                             gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
+         if (cond)
+           {
+             zerop = integer_zerop (cond);
+             onep = integer_onep (cond);
+           }
+         else
+           zerop = onep = false;
+
          fold_undefer_overflow_warnings (zerop || onep,
                                          stmt,
                                          WARN_STRICT_OVERFLOW_CONDITIONAL);
          if (zerop)
-           COND_EXPR_COND (stmt) = boolean_false_node;
+           gimple_cond_make_false (stmt);
          else if (onep)
-           COND_EXPR_COND (stmt) = boolean_true_node;
+           gimple_cond_make_true (stmt);
        }
     }
 }
@@ -461,40 +474,40 @@ make_edges (void)
   /* Traverse the basic block array placing edges.  */
   FOR_EACH_BB (bb)
     {
-      tree last = last_stmt (bb);
+      gimple last = last_stmt (bb);
       bool fallthru;
 
       if (last)
        {
-         enum tree_code code = TREE_CODE (last);
+         enum gimple_code code = gimple_code (last);
          switch (code)
            {
-           case GOTO_EXPR:
+           case GIMPLE_GOTO:
              make_goto_expr_edges (bb);
              fallthru = false;
              break;
-           case RETURN_EXPR:
+           case GIMPLE_RETURN:
              make_edge (bb, EXIT_BLOCK_PTR, 0);
              fallthru = false;
              break;
-           case COND_EXPR:
+           case GIMPLE_COND:
              make_cond_expr_edges (bb);
              fallthru = false;
              break;
-           case SWITCH_EXPR:
-             make_switch_expr_edges (bb);
+           case GIMPLE_SWITCH:
+             make_gimple_switch_edges (bb);
              fallthru = false;
              break;
-           case RESX_EXPR:
+           case GIMPLE_RESX:
              make_eh_edges (last);
              fallthru = false;
              break;
 
-           case CALL_EXPR:
+           case GIMPLE_CALL:
              /* If this function receives a nonlocal goto, then we need to
                 make edges from this call site to all the nonlocal goto
                 handlers.  */
-             if (tree_can_make_abnormal_goto (last))
+             if (stmt_can_make_abnormal_goto (last))
                make_abnormal_goto_edges (bb, true);
 
              /* If this statement has reachable exception handlers, then
@@ -502,83 +515,78 @@ make_edges (void)
              make_eh_edges (last);
 
              /* Some calls are known not to return.  */
-             fallthru = !(call_expr_flags (last) & ECF_NORETURN);
+             fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
              break;
 
-           case MODIFY_EXPR:
-             gcc_unreachable ();
-
-           case GIMPLE_MODIFY_STMT:
+           case GIMPLE_ASSIGN:
+              /* A GIMPLE_ASSIGN may throw internally and thus be considered
+                 control-altering. */
              if (is_ctrl_altering_stmt (last))
                {
-                 /* A GIMPLE_MODIFY_STMT may have a CALL_EXPR on its RHS and
-                    the CALL_EXPR may have an abnormal edge.  Search the RHS
-                    for this case and create any required edges.  */
-                 if (tree_can_make_abnormal_goto (last))
-                   make_abnormal_goto_edges (bb, true);  
-
                  make_eh_edges (last);
                }
              fallthru = true;
              break;
 
-           case OMP_PARALLEL:
-           case OMP_TASK:
-           case OMP_FOR:
-           case OMP_SINGLE:
-           case OMP_MASTER:
-           case OMP_ORDERED:
-           case OMP_CRITICAL:
-           case OMP_SECTION:
+           case GIMPLE_OMP_PARALLEL:
+           case GIMPLE_OMP_TASK:
+           case GIMPLE_OMP_FOR:
+           case GIMPLE_OMP_SINGLE:
+           case GIMPLE_OMP_MASTER:
+           case GIMPLE_OMP_ORDERED:
+           case GIMPLE_OMP_CRITICAL:
+           case GIMPLE_OMP_SECTION:
              cur_region = new_omp_region (bb, code, cur_region);
              fallthru = true;
              break;
 
-           case OMP_SECTIONS:
+           case GIMPLE_OMP_SECTIONS:
              cur_region = new_omp_region (bb, code, cur_region);
              fallthru = true;
              break;
 
-           case OMP_SECTIONS_SWITCH:
+           case GIMPLE_OMP_SECTIONS_SWITCH:
              fallthru = false;
              break;
 
 
-            case OMP_ATOMIC_LOAD:
-            case OMP_ATOMIC_STORE:
+            case GIMPLE_OMP_ATOMIC_LOAD:
+            case GIMPLE_OMP_ATOMIC_STORE:
                fallthru = true;
                break;
 
 
-           case OMP_RETURN:
-             /* In the case of an OMP_SECTION, the edge will go somewhere
-                other than the next block.  This will be created later.  */
+           case GIMPLE_OMP_RETURN:
+             /* In the case of a GIMPLE_OMP_SECTION, the edge will go
+                somewhere other than the next block.  This will be
+                created later.  */
              cur_region->exit = bb;
-             fallthru = cur_region->type != OMP_SECTION;
+             fallthru = cur_region->type != GIMPLE_OMP_SECTION;
              cur_region = cur_region->outer;
              break;
 
-           case OMP_CONTINUE:
+           case GIMPLE_OMP_CONTINUE:
              cur_region->cont = bb;
              switch (cur_region->type)
                {
-               case OMP_FOR:
-                 /* Mark all OMP_FOR and OMP_CONTINUE succs edges as abnormal
-                    to prevent splitting them.  */
+               case GIMPLE_OMP_FOR:
+                 /* Mark all GIMPLE_OMP_FOR and GIMPLE_OMP_CONTINUE
+                    succs edges as abnormal to prevent splitting
+                    them.  */
                  single_succ_edge (cur_region->entry)->flags |= EDGE_ABNORMAL;
                  /* Make the loopback edge.  */
                  make_edge (bb, single_succ (cur_region->entry),
                             EDGE_ABNORMAL);
 
-                 /* Create an edge from OMP_FOR to exit, which corresponds to
-                    the case that the body of the loop is not executed at
-                    all.  */
+                 /* Create an edge from GIMPLE_OMP_FOR to exit, which
+                    corresponds to the case that the body of the loop
+                    is not executed at all.  */
                  make_edge (cur_region->entry, bb->next_bb, EDGE_ABNORMAL);
                  make_edge (bb, bb->next_bb, EDGE_FALLTHRU | EDGE_ABNORMAL);
                  fallthru = false;
                  break;
 
-               case OMP_SECTIONS:
+               case GIMPLE_OMP_SECTIONS:
                  /* Wire up the edges into and out of the nested sections.  */
                  {
                    basic_block switch_bb = single_succ (cur_region->entry);
@@ -586,13 +594,13 @@ make_edges (void)
                    struct omp_region *i;
                    for (i = cur_region->inner; i ; i = i->next)
                      {
-                       gcc_assert (i->type == OMP_SECTION);
+                       gcc_assert (i->type == GIMPLE_OMP_SECTION);
                        make_edge (switch_bb, i->entry, 0);
                        make_edge (i->exit, bb, EDGE_FALLTHRU);
                      }
 
                    /* Make the loopback edge to the block with
-                      OMP_SECTIONS_SWITCH.  */
+                      GIMPLE_OMP_SECTIONS_SWITCH.  */
                    make_edge (bb, switch_bb, 0);
 
                    /* Make the edge from the switch to exit.  */
@@ -626,35 +634,37 @@ make_edges (void)
 }
 
 
-/* Create the edges for a COND_EXPR starting at block BB.
-   At this point, both clauses must contain only simple gotos.  */
+/* Create the edges for a GIMPLE_COND starting at block BB.  */
 
 static void
 make_cond_expr_edges (basic_block bb)
 {
-  tree entry = last_stmt (bb);
+  gimple entry = last_stmt (bb);
+  gimple then_stmt, else_stmt;
   basic_block then_bb, else_bb;
   tree then_label, else_label;
   edge e;
 
   gcc_assert (entry);
-  gcc_assert (TREE_CODE (entry) == COND_EXPR);
+  gcc_assert (gimple_code (entry) == GIMPLE_COND);
 
   /* Entry basic blocks for each component.  */
-  then_label = GOTO_DESTINATION (COND_EXPR_THEN (entry));
-  else_label = GOTO_DESTINATION (COND_EXPR_ELSE (entry));
+  then_label = gimple_cond_true_label (entry);
+  else_label = gimple_cond_false_label (entry);
   then_bb = label_to_block (then_label);
   else_bb = label_to_block (else_label);
+  then_stmt = first_stmt (then_bb);
+  else_stmt = first_stmt (else_bb);
 
   e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
-  e->goto_locus = EXPR_LOCATION (COND_EXPR_THEN (entry));
+  e->goto_locus = gimple_location (then_stmt);
   e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
   if (e)
-    e->goto_locus = EXPR_LOCATION (COND_EXPR_ELSE (entry));
+    e->goto_locus = gimple_location (else_stmt);
 
-  /* We do not need the gotos anymore.  */
-  COND_EXPR_THEN (entry) = NULL_TREE;
-  COND_EXPR_ELSE (entry) = NULL_TREE;
+  /* We do not need the labels anymore.  */
+  gimple_cond_set_true_label (entry, NULL_TREE);
+  gimple_cond_set_false_label (entry, NULL_TREE);
 }
 
 
@@ -714,11 +724,10 @@ end_recording_case_labels (void)
    Otherwise return NULL.  */
 
 static tree
-get_cases_for_edge (edge e, tree t)
+get_cases_for_edge (edge e, gimple t)
 {
   void **slot;
   size_t i, n;
-  tree vec;
 
   /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
      chains available.  Return NULL so the caller can detect this case.  */
@@ -733,11 +742,10 @@ get_cases_for_edge (edge e, tree t)
      time we have been queried for information about E & T.  Add all the
      elements from T to the hash table then perform the query again.  */
 
-  vec = SWITCH_LABELS (t);
-  n = TREE_VEC_LENGTH (vec);
+  n = gimple_switch_num_labels (t);
   for (i = 0; i < n; i++)
     {
-      tree elt = TREE_VEC_ELT (vec, i);
+      tree elt = gimple_switch_label (t, i);
       tree lab = CASE_LABEL (elt);
       basic_block label_bb = label_to_block (lab);
       edge this_edge = find_edge (e->src, label_bb);
@@ -752,23 +760,19 @@ get_cases_for_edge (edge e, tree t)
   return (tree) *pointer_map_contains (edge_to_cases, e);
 }
 
-/* Create the edges for a SWITCH_EXPR starting at block BB.
-   At this point, the switch body has been lowered and the
-   SWITCH_LABELS filled in, so this is in effect a multi-way branch.  */
+/* Create the edges for a GIMPLE_SWITCH starting at block BB.  */
 
 static void
-make_switch_expr_edges (basic_block bb)
+make_gimple_switch_edges (basic_block bb)
 {
-  tree entry = last_stmt (bb);
+  gimple entry = last_stmt (bb);
   size_t i, n;
-  tree vec;
 
-  vec = SWITCH_LABELS (entry);
-  n = TREE_VEC_LENGTH (vec);
+  n = gimple_switch_num_labels (entry);
 
   for (i = 0; i < n; ++i)
     {
-      tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
+      tree lab = CASE_LABEL (gimple_switch_label (entry, i));
       basic_block label_bb = label_to_block (lab);
       make_edge (bb, label_bb, 0);
     }
@@ -787,12 +791,11 @@ label_to_block_fn (struct function *ifun, tree dest)
      and undefined variable warnings quite right.  */
   if ((errorcount || sorrycount) && uid < 0)
     {
-      block_stmt_iterator bsi =
-       bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
-      tree stmt;
+      gimple_stmt_iterator gsi = gsi_start_bb (BASIC_BLOCK (NUM_FIXED_BLOCKS));
+      gimple stmt;
 
-      stmt = build1 (LABEL_EXPR, void_type_node, dest);
-      bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
+      stmt = gimple_build_label (dest);
+      gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
       uid = LABEL_DECL_UID (dest);
     }
   if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
@@ -808,17 +811,18 @@ void
 make_abnormal_goto_edges (basic_block bb, bool for_call)
 {
   basic_block target_bb;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
 
   FOR_EACH_BB (target_bb)
-    for (bsi = bsi_start (target_bb); !bsi_end_p (bsi); bsi_next (&bsi))
+    for (gsi = gsi_start_bb (target_bb); !gsi_end_p (gsi); gsi_next (&gsi))
       {
-       tree target = bsi_stmt (bsi);
+       gimple label_stmt = gsi_stmt (gsi);
+       tree target;
 
-       if (TREE_CODE (target) != LABEL_EXPR)
+       if (gimple_code (label_stmt) != GIMPLE_LABEL)
          break;
 
-       target = LABEL_EXPR_LABEL (target);
+       target = gimple_label_label (label_stmt);
 
        /* Make an edge to every label block that has been marked as a
           potential target for a computed goto or a non-local goto.  */
@@ -836,16 +840,16 @@ make_abnormal_goto_edges (basic_block bb, bool for_call)
 static void
 make_goto_expr_edges (basic_block bb)
 {
-  block_stmt_iterator last = bsi_last (bb);
-  tree goto_t = bsi_stmt (last);
+  gimple_stmt_iterator last = gsi_last_bb (bb);
+  gimple goto_t = gsi_stmt (last);
 
   /* A simple GOTO creates normal edges.  */
   if (simple_goto_p (goto_t))
     {
-      tree dest = GOTO_DESTINATION (goto_t);
+      tree dest = gimple_goto_dest (goto_t);
       edge e = make_edge (bb, label_to_block (dest), EDGE_FALLTHRU);
-      e->goto_locus = EXPR_LOCATION (goto_t);
-      bsi_remove (&last, true);
+      e->goto_locus = gimple_location (goto_t);
+      gsi_remove (&last, true);
       return;
     }
 
@@ -898,7 +902,9 @@ update_eh_label (struct eh_region *region)
     }
 }
 
+
 /* Given LABEL return the first label in the same basic block.  */
+
 static tree
 main_block_label (tree label)
 {
@@ -931,16 +937,17 @@ cleanup_dead_labels (void)
      label if there is one, or otherwise just the first label we see.  */
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator i;
+      gimple_stmt_iterator i;
 
-      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
+      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
        {
-         tree label, stmt = bsi_stmt (i);
+         tree label;
+         gimple stmt = gsi_stmt (i);
 
-         if (TREE_CODE (stmt) != LABEL_EXPR)
+         if (gimple_code (stmt) != GIMPLE_LABEL)
            break;
 
-         label = LABEL_EXPR_LABEL (stmt);
+         label = gimple_label_label (stmt);
 
          /* If we have not yet seen a label for the current block,
             remember this one and see if there are more labels.  */
@@ -966,52 +973,45 @@ cleanup_dead_labels (void)
      First do so for each block ending in a control statement.  */
   FOR_EACH_BB (bb)
     {
-      tree stmt = last_stmt (bb);
+      gimple stmt = last_stmt (bb);
       if (!stmt)
        continue;
 
-      switch (TREE_CODE (stmt))
+      switch (gimple_code (stmt))
        {
-       case COND_EXPR:
+       case GIMPLE_COND:
          {
-           tree true_branch, false_branch;
-
-           true_branch = COND_EXPR_THEN (stmt);
-           false_branch = COND_EXPR_ELSE (stmt);
-
-           if (true_branch)
-             GOTO_DESTINATION (true_branch)
-                     = main_block_label (GOTO_DESTINATION (true_branch));
-           if (false_branch)
-             GOTO_DESTINATION (false_branch)
-                     = main_block_label (GOTO_DESTINATION (false_branch));
+           tree true_label = gimple_cond_true_label (stmt);
+           tree false_label = gimple_cond_false_label (stmt);
 
+           if (true_label)
+             gimple_cond_set_true_label (stmt, main_block_label (true_label));
+           if (false_label)
+             gimple_cond_set_false_label (stmt, main_block_label (false_label));
            break;
          }
 
-       case SWITCH_EXPR:
+       case GIMPLE_SWITCH:
          {
-           size_t i;
-           tree vec = SWITCH_LABELS (stmt);
-           size_t n = TREE_VEC_LENGTH (vec);
+           size_t i, n = gimple_switch_num_labels (stmt);
 
            /* Replace all destination labels.  */
            for (i = 0; i < n; ++i)
              {
-               tree elt = TREE_VEC_ELT (vec, i);
-               tree label = main_block_label (CASE_LABEL (elt));
-               CASE_LABEL (elt) = label;
+               tree case_label = gimple_switch_label (stmt, i);
+               tree label = main_block_label (CASE_LABEL (case_label));
+               CASE_LABEL (case_label) = label;
              }
            break;
          }
 
-       /* We have to handle GOTO_EXPRs until they're removed, and we don't
+       /* We have to handle gotos until they're removed, and we don't
           remove them until after we've created the CFG edges.  */
-       case GOTO_EXPR:
-          if (! computed_goto_p (stmt))
+       case GIMPLE_GOTO:
+          if (!computed_goto_p (stmt))
            {
-             GOTO_DESTINATION (stmt)
-               = main_block_label (GOTO_DESTINATION (stmt));
+             tree new_dest = main_block_label (gimple_goto_dest (stmt));
+             gimple_goto_set_dest (stmt, new_dest);
              break;
            }
 
@@ -1027,7 +1027,7 @@ cleanup_dead_labels (void)
      address taken are preserved.  */
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator i;
+      gimple_stmt_iterator i;
       tree label_for_this_bb = label_for_bb[bb->index].label;
 
       if (!label_for_this_bb)
@@ -1037,22 +1037,23 @@ cleanup_dead_labels (void)
       if (!label_for_bb[bb->index].used)
        label_for_this_bb = NULL;
 
-      for (i = bsi_start (bb); !bsi_end_p (i); )
+      for (i = gsi_start_bb (bb); !gsi_end_p (i); )
        {
-         tree label, stmt = bsi_stmt (i);
+         tree label;
+         gimple stmt = gsi_stmt (i);
 
-         if (TREE_CODE (stmt) != LABEL_EXPR)
+         if (gimple_code (stmt) != GIMPLE_LABEL)
            break;
 
-         label = LABEL_EXPR_LABEL (stmt);
+         label = gimple_label_label (stmt);
 
          if (label == label_for_this_bb
-             || ! DECL_ARTIFICIAL (label)
+             || !DECL_ARTIFICIAL (label)
              || DECL_NONLOCAL (label)
              || FORCED_LABEL (label))
-           bsi_next (&i);
+           gsi_next (&i);
          else
-           bsi_remove (&i, true);
+           gsi_remove (&i, true);
        }
     }
 
@@ -1071,32 +1072,37 @@ group_case_labels (void)
 
   FOR_EACH_BB (bb)
     {
-      tree stmt = last_stmt (bb);
-      if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
+      gimple stmt = last_stmt (bb);
+      if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
        {
-         tree labels = SWITCH_LABELS (stmt);
-         int old_size = TREE_VEC_LENGTH (labels);
+         int old_size = gimple_switch_num_labels (stmt);
          int i, j, new_size = old_size;
          tree default_case = NULL_TREE;
          tree default_label = NULL_TREE;
+         bool has_default;
 
-         /* The default label is always the last case in a switch
+         /* The default label is always the first case in a switch
             statement after gimplification if it was not optimized
-            away */
-         if (!CASE_LOW (TREE_VEC_ELT (labels, old_size - 1))
-             && !CASE_HIGH (TREE_VEC_ELT (labels, old_size - 1)))
+            away */
+         if (!CASE_LOW (gimple_switch_default_label (stmt))
+             && !CASE_HIGH (gimple_switch_default_label (stmt)))
            {
-             default_case = TREE_VEC_ELT (labels, old_size - 1);
+             default_case = gimple_switch_default_label (stmt);
              default_label = CASE_LABEL (default_case);
-             old_size--;
+             has_default = true;
            }
+         else
+           has_default = false;
 
          /* Look for possible opportunities to merge cases.  */
-          i = 0;
+         if (has_default)
+           i = 1;
+         else
+           i = 0;
          while (i < old_size)
            {
              tree base_case, base_label, base_high;
-             base_case = TREE_VEC_ELT (labels, i);
+             base_case = gimple_switch_label (stmt, i);
 
              gcc_assert (base_case);
              base_label = CASE_LABEL (base_case);
@@ -1105,21 +1111,23 @@ group_case_labels (void)
                 default case.  */
              if (base_label == default_label)
                {
-                 TREE_VEC_ELT (labels, i) = NULL_TREE;
+                 gimple_switch_set_label (stmt, i, NULL_TREE);
                  i++;
                  new_size--;
                  continue;
                }
 
-             base_high = CASE_HIGH (base_case) ?
-               CASE_HIGH (base_case) : CASE_LOW (base_case);
+             base_high = CASE_HIGH (base_case)
+                         ? CASE_HIGH (base_case)
+                         : CASE_LOW (base_case);
              i++;
+
              /* Try to merge case labels.  Break out when we reach the end
                 of the label vector or when we cannot merge the next case
                 label with the current one.  */
              while (i < old_size)
                {
-                 tree merge_case = TREE_VEC_ELT (labels, i);
+                 tree merge_case = gimple_switch_label (stmt, i);
                  tree merge_label = CASE_LABEL (merge_case);
                  tree t = int_const_binop (PLUS_EXPR, base_high,
                                            integer_one_node, 1);
@@ -1132,7 +1140,7 @@ group_case_labels (void)
                      base_high = CASE_HIGH (merge_case) ?
                        CASE_HIGH (merge_case) : CASE_LOW (merge_case);
                      CASE_HIGH (base_case) = base_high;
-                     TREE_VEC_ELT (labels, i) = NULL_TREE;
+                     gimple_switch_set_label (stmt, i, NULL_TREE);
                      new_size--;
                      i++;
                    }
@@ -1145,11 +1153,14 @@ group_case_labels (void)
             length of the vector.  */
          for (i = 0, j = 0; i < new_size; i++)
            {
-             while (! TREE_VEC_ELT (labels, j))
+             while (! gimple_switch_label (stmt, j))
                j++;
-             TREE_VEC_ELT (labels, i) = TREE_VEC_ELT (labels, j++);
+             gimple_switch_set_label (stmt, i,
+                                      gimple_switch_label (stmt, j++));
            }
-         TREE_VEC_LENGTH (labels) = new_size;
+
+         gcc_assert (new_size <= old_size);
+         gimple_switch_set_num_labels (stmt, new_size);
        }
     }
 }
@@ -1157,11 +1168,11 @@ group_case_labels (void)
 /* Checks whether we can merge block B into block A.  */
 
 static bool
-tree_can_merge_blocks_p (basic_block a, basic_block b)
+gimple_can_merge_blocks_p (basic_block a, basic_block b)
 {
-  const_tree stmt;
-  block_stmt_iterator bsi;
-  tree phi;
+  gimple stmt;
+  gimple_stmt_iterator gsi;
+  gimple_seq phis;
 
   if (!single_succ_p (a))
     return false;
@@ -1180,40 +1191,46 @@ tree_can_merge_blocks_p (basic_block a, basic_block b)
 
   /* If A ends by a statement causing exceptions or something similar, we
      cannot merge the blocks.  */
-  /* This CONST_CAST is okay because last_stmt doesn't modify its
-     argument and the return value is assign to a const_tree.  */
-  stmt = last_stmt (CONST_CAST_BB (a));
+  stmt = last_stmt (a);
   if (stmt && stmt_ends_bb_p (stmt))
     return false;
 
   /* Do not allow a block with only a non-local label to be merged.  */
-  if (stmt && TREE_CODE (stmt) == LABEL_EXPR
-      && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
+  if (stmt
+      && gimple_code (stmt) == GIMPLE_LABEL
+      && DECL_NONLOCAL (gimple_label_label (stmt)))
     return false;
 
   /* It must be possible to eliminate all phi nodes in B.  If ssa form
      is not up-to-date, we cannot eliminate any phis; however, if only
      some symbols as whole are marked for renaming, this is not a problem,
      as phi nodes for those symbols are irrelevant in updating anyway.  */
-  phi = phi_nodes (b);
-  if (phi)
+  phis = phi_nodes (b);
+  if (!gimple_seq_empty_p (phis))
     {
+      gimple_stmt_iterator i;
+
       if (name_mappings_registered_p ())
        return false;
 
-      for (; phi; phi = PHI_CHAIN (phi))
-       if (!is_gimple_reg (PHI_RESULT (phi))
-           && !may_propagate_copy (PHI_RESULT (phi), PHI_ARG_DEF (phi, 0)))
-         return false;
+      for (i = gsi_start (phis); !gsi_end_p (i); gsi_next (&i))
+       {
+         gimple phi = gsi_stmt (i);
+
+         if (!is_gimple_reg (gimple_phi_result (phi))
+             && !may_propagate_copy (gimple_phi_result (phi),
+                                     gimple_phi_arg_def (phi, 0)))
+           return false;
+       }
     }
 
   /* Do not remove user labels.  */
-  for (bsi = bsi_start (b); !bsi_end_p (bsi); bsi_next (&bsi))
+  for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      stmt = bsi_stmt (bsi);
-      if (TREE_CODE (stmt) != LABEL_EXPR)
+      stmt = gsi_stmt (gsi);
+      if (gimple_code (stmt) != GIMPLE_LABEL)
        break;
-      if (!DECL_ARTIFICIAL (LABEL_EXPR_LABEL (stmt)))
+      if (!DECL_ARTIFICIAL (gimple_label_label (stmt)))
        return false;
     }
 
@@ -1232,21 +1249,21 @@ replace_uses_by (tree name, tree val)
 {
   imm_use_iterator imm_iter;
   use_operand_p use;
-  tree stmt;
+  gimple stmt;
   edge e;
 
   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
     {
-      if (TREE_CODE (stmt) != PHI_NODE)
+      if (gimple_code (stmt) != GIMPLE_PHI)
        push_stmt_changes (&stmt);
 
       FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
         {
          replace_exp (use, val);
 
-         if (TREE_CODE (stmt) == PHI_NODE)
+         if (gimple_code (stmt) == GIMPLE_PHI)
            {
-             e = PHI_ARG_EDGE (stmt, PHI_ARG_INDEX_FROM_USE (use));
+             e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
              if (e->flags & EDGE_ABNORMAL)
                {
                  /* This can only occur for virtual operands, since
@@ -1258,18 +1275,24 @@ replace_uses_by (tree name, tree val)
            }
        }
 
-      if (TREE_CODE (stmt) != PHI_NODE)
+      if (gimple_code (stmt) != GIMPLE_PHI)
        {
-         tree rhs;
+         size_t i;
 
          fold_stmt_inplace (stmt);
          if (cfgcleanup_altered_bbs)
-           bitmap_set_bit (cfgcleanup_altered_bbs, bb_for_stmt (stmt)->index);
+           bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
 
          /* FIXME.  This should go in pop_stmt_changes.  */
-         rhs = get_rhs (stmt);
-         if (TREE_CODE (rhs) == ADDR_EXPR)
-           recompute_tree_invariant_for_addr_expr (rhs);
+         for (i = 0; i < gimple_num_ops (stmt); i++)
+           {
+             tree op = gimple_op (stmt, i);
+              /* Operands may be empty here.  For example, the labels
+                 of a GIMPLE_COND are nulled out following the creation
+                 of the corresponding CFG edges.  */
+             if (op && TREE_CODE (op) == ADDR_EXPR)
+               recompute_tree_invariant_for_addr_expr (op);
+           }
 
          maybe_clean_or_replace_eh_stmt (stmt, stmt);
 
@@ -1295,23 +1318,24 @@ replace_uses_by (tree name, tree val)
 /* Merge block B into block A.  */
 
 static void
-tree_merge_blocks (basic_block a, basic_block b)
+gimple_merge_blocks (basic_block a, basic_block b)
 {
-  block_stmt_iterator bsi;
-  tree_stmt_iterator last;
-  tree phi;
+  gimple_stmt_iterator last, gsi, psi;
+  gimple_seq phis = phi_nodes (b);
 
   if (dump_file)
     fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
 
   /* Remove all single-valued PHI nodes from block B of the form
      V_i = PHI <V_j> by propagating V_j to all the uses of V_i.  */
-  bsi = bsi_last (a);
-  for (phi = phi_nodes (b); phi; phi = phi_nodes (b))
+  gsi = gsi_last_bb (a);
+  for (psi = gsi_start (phis); !gsi_end_p (psi); )
     {
-      tree def = PHI_RESULT (phi), use = PHI_ARG_DEF (phi, 0);
-      tree copy;
-      bool may_replace_uses = may_propagate_copy (def, use);
+      gimple phi = gsi_stmt (psi);
+      tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
+      gimple copy;
+      bool may_replace_uses = !is_gimple_reg (def)
+                             || may_propagate_copy (def, use);
 
       /* In case we maintain loop closed ssa form, do not propagate arguments
         of loop exit phi nodes.  */
@@ -1330,10 +1354,9 @@ tree_merge_blocks (basic_block a, basic_block b)
             with ordering of phi nodes.  This is because A is the single
             predecessor of B, therefore results of the phi nodes cannot
             appear as arguments of the phi nodes.  */
-         copy = build_gimple_modify_stmt (def, use);
-         bsi_insert_after (&bsi, copy, BSI_NEW_STMT);
-         SSA_NAME_DEF_STMT (def) = copy;
-          remove_phi_node (phi, NULL, false);
+         copy = gimple_build_assign (def, use);
+         gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
+          remove_phi_node (&psi, false);
        }
       else
         {
@@ -1344,7 +1367,7 @@ tree_merge_blocks (basic_block a, basic_block b)
            {
              imm_use_iterator iter;
              use_operand_p use_p;
-             tree stmt;
+             gimple stmt;
 
              FOR_EACH_IMM_USE_STMT (stmt, iter, def)
                FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
@@ -1352,7 +1375,8 @@ tree_merge_blocks (basic_block a, basic_block b)
            }
          else
             replace_uses_by (def, use);
-          remove_phi_node (phi, NULL, true);
+
+          remove_phi_node (&psi, true);
         }
     }
 
@@ -1362,37 +1386,38 @@ tree_merge_blocks (basic_block a, basic_block b)
   gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
   gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
 
-  /* Remove labels from B and set bb_for_stmt to A for other statements.  */
-  for (bsi = bsi_start (b); !bsi_end_p (bsi);)
+  /* Remove labels from B and set gimple_bb to A for other statements.  */
+  for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
     {
-      if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
+      if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
        {
-         tree label = bsi_stmt (bsi);
+         gimple label = gsi_stmt (gsi);
+
+         gsi_remove (&gsi, false);
 
-         bsi_remove (&bsi, false);
          /* Now that we can thread computed gotos, we might have
             a situation where we have a forced label in block B
             However, the label at the start of block B might still be
             used in other ways (think about the runtime checking for
             Fortran assigned gotos).  So we can not just delete the
             label.  Instead we move the label to the start of block A.  */
-         if (FORCED_LABEL (LABEL_EXPR_LABEL (label)))
+         if (FORCED_LABEL (gimple_label_label (label)))
            {
-             block_stmt_iterator dest_bsi = bsi_start (a);
-             bsi_insert_before (&dest_bsi, label, BSI_NEW_STMT);
+             gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
+             gsi_insert_before (&dest_gsi, label, GSI_NEW_STMT);
            }
        }
       else
        {
-         change_bb_for_stmt (bsi_stmt (bsi), a);
-         bsi_next (&bsi);
+         gimple_set_bb (gsi_stmt (gsi), a);
+         gsi_next (&gsi);
        }
     }
 
-  /* Merge the chains.  */
-  last = tsi_last (bb_stmt_list (a));
-  tsi_link_after (&last, bb_stmt_list (b), TSI_NEW_STMT);
-  set_bb_stmt_list (b, NULL_TREE);
+  /* Merge the sequences.  */
+  last = gsi_last_bb (a);
+  gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
+  set_bb_seq (b, NULL);
 
   if (cfgcleanup_altered_bbs)
     bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
@@ -1432,6 +1457,8 @@ single_noncomplex_succ (basic_block bb)
 
      * Some unnecessary BIND_EXPRs are removed
 
+     * GOTO_EXPRs immediately preceding destination are removed.
+
    Clearly more work could be done.  The trick is doing the analysis
    and removal fast enough to be a net improvement in compile times.
 
@@ -1441,208 +1468,172 @@ single_noncomplex_succ (basic_block bb)
 
 struct rus_data
 {
-  tree *last_goto;
   bool repeat;
   bool may_throw;
   bool may_branch;
   bool has_label;
+  bool last_was_goto;
+  gimple_stmt_iterator last_goto_gsi;
 };
 
-static void remove_useless_stmts_1 (tree *, struct rus_data *);
+
+static void remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *);
+
+/* Given a statement sequence, find the first executable statement with
+   location information, and warn that it is unreachable.  When searching,
+   descend into containers in execution order.  */
 
 static bool
-remove_useless_stmts_warn_notreached (tree stmt)
+remove_useless_stmts_warn_notreached (gimple_seq stmts)
 {
-  if (EXPR_HAS_LOCATION (stmt))
-    {
-      location_t loc = EXPR_LOCATION (stmt);
-      if (LOCATION_LINE (loc) > 0)
-       {
-         warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
-         return true;
-       }
-    }
+  gimple_stmt_iterator gsi;
 
-  switch (TREE_CODE (stmt))
+  for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-    case STATEMENT_LIST:
-      {
-       tree_stmt_iterator i;
-       for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
-         if (remove_useless_stmts_warn_notreached (tsi_stmt (i)))
-           return true;
-      }
-      break;
+      gimple stmt = gsi_stmt (gsi);
 
-    case COND_EXPR:
-      if (remove_useless_stmts_warn_notreached (COND_EXPR_COND (stmt)))
-       return true;
-      if (remove_useless_stmts_warn_notreached (COND_EXPR_THEN (stmt)))
-       return true;
-      if (remove_useless_stmts_warn_notreached (COND_EXPR_ELSE (stmt)))
-       return true;
-      break;
+      if (gimple_has_location (stmt))
+        {
+          location_t loc = gimple_location (stmt);
+          if (LOCATION_LINE (loc) > 0)
+           {
+              warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
+              return true;
+            }
+        }
 
-    case TRY_FINALLY_EXPR:
-    case TRY_CATCH_EXPR:
-      if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 0)))
-       return true;
-      if (remove_useless_stmts_warn_notreached (TREE_OPERAND (stmt, 1)))
-       return true;
-      break;
+      switch (gimple_code (stmt))
+        {
+        /* Unfortunately, we need the CFG now to detect unreachable
+           branches in a conditional, so conditionals are not handled here.  */
 
-    case CATCH_EXPR:
-      return remove_useless_stmts_warn_notreached (CATCH_BODY (stmt));
-    case EH_FILTER_EXPR:
-      return remove_useless_stmts_warn_notreached (EH_FILTER_FAILURE (stmt));
-    case BIND_EXPR:
-      return remove_useless_stmts_warn_notreached (BIND_EXPR_BLOCK (stmt));
+        case GIMPLE_TRY:
+          if (remove_useless_stmts_warn_notreached (gimple_try_eval (stmt)))
+            return true;
+          if (remove_useless_stmts_warn_notreached (gimple_try_cleanup (stmt)))
+            return true;
+          break;
 
-    default:
-      /* Not a live container.  */
-      break;
+        case GIMPLE_CATCH:
+          return remove_useless_stmts_warn_notreached (gimple_catch_handler (stmt));
+
+        case GIMPLE_EH_FILTER:
+          return remove_useless_stmts_warn_notreached (gimple_eh_filter_failure (stmt));
+
+        case GIMPLE_BIND:
+          return remove_useless_stmts_warn_notreached (gimple_bind_body (stmt));
+
+        default:
+          break;
+        }
     }
 
   return false;
 }
 
+/* Helper for remove_useless_stmts_1.  Handle GIMPLE_COND statements.  */
+
 static void
-remove_useless_stmts_cond (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_cond (gimple_stmt_iterator *gsi, struct rus_data *data)
 {
-  tree then_clause, else_clause, cond;
-  bool save_has_label, then_has_label, else_has_label;
-
-  save_has_label = data->has_label;
-  data->has_label = false;
-  data->last_goto = NULL;
-
-  remove_useless_stmts_1 (&COND_EXPR_THEN (*stmt_p), data);
-
-  then_has_label = data->has_label;
-  data->has_label = false;
-  data->last_goto = NULL;
+  gimple stmt = gsi_stmt (*gsi);
 
-  remove_useless_stmts_1 (&COND_EXPR_ELSE (*stmt_p), data);
+  /* The folded result must still be a conditional statement.  */
+  fold_stmt_inplace (stmt);
 
-  else_has_label = data->has_label;
-  data->has_label = save_has_label | then_has_label | else_has_label;
-
-  then_clause = COND_EXPR_THEN (*stmt_p);
-  else_clause = COND_EXPR_ELSE (*stmt_p);
-  cond = fold (COND_EXPR_COND (*stmt_p));
+  data->may_branch = true;
 
-  /* If neither arm does anything at all, we can remove the whole IF.  */
-  if (!TREE_SIDE_EFFECTS (then_clause) && !TREE_SIDE_EFFECTS (else_clause))
+  /* Replace trivial conditionals with gotos. */
+  if (gimple_cond_true_p (stmt))
     {
-      *stmt_p = build_empty_stmt ();
-      data->repeat = true;
-    }
+      /* Goto THEN label.  */
+      tree then_label = gimple_cond_true_label (stmt);
 
-  /* If there are no reachable statements in an arm, then we can
-     zap the entire conditional.  */
-  else if (integer_nonzerop (cond) && !else_has_label)
-    {
-      if (warn_notreached)
-       remove_useless_stmts_warn_notreached (else_clause);
-      *stmt_p = then_clause;
+      gsi_replace (gsi, gimple_build_goto (then_label), false);
+      data->last_goto_gsi = *gsi;
+      data->last_was_goto = true;
       data->repeat = true;
     }
-  else if (integer_zerop (cond) && !then_has_label)
+  else if (gimple_cond_false_p (stmt))
     {
-      if (warn_notreached)
-       remove_useless_stmts_warn_notreached (then_clause);
-      *stmt_p = else_clause;
+      /* Goto ELSE label.  */
+      tree else_label = gimple_cond_false_label (stmt);
+
+      gsi_replace (gsi, gimple_build_goto (else_label), false);
+      data->last_goto_gsi = *gsi;
+      data->last_was_goto = true;
       data->repeat = true;
     }
-
-  /* Check a couple of simple things on then/else with single stmts.  */
   else
     {
-      tree then_stmt = expr_only (then_clause);
-      tree else_stmt = expr_only (else_clause);
+      tree then_label = gimple_cond_true_label (stmt);
+      tree else_label = gimple_cond_false_label (stmt);
 
-      /* Notice branches to a common destination.  */
-      if (then_stmt && else_stmt
-         && TREE_CODE (then_stmt) == GOTO_EXPR
-         && TREE_CODE (else_stmt) == GOTO_EXPR
-         && (GOTO_DESTINATION (then_stmt) == GOTO_DESTINATION (else_stmt)))
-       {
-         *stmt_p = then_stmt;
+      if (then_label == else_label)
+        {
+          /* Goto common destination.  */
+          gsi_replace (gsi, gimple_build_goto (then_label), false);
+          data->last_goto_gsi = *gsi;
+          data->last_was_goto = true;
          data->repeat = true;
        }
-
-      /* If the THEN/ELSE clause merely assigns a value to a variable or
-        parameter which is already known to contain that value, then
-        remove the useless THEN/ELSE clause.  */
-      else if (TREE_CODE (cond) == VAR_DECL || TREE_CODE (cond) == PARM_DECL)
-       {
-         if (else_stmt
-             && TREE_CODE (else_stmt) == GIMPLE_MODIFY_STMT
-             && GIMPLE_STMT_OPERAND (else_stmt, 0) == cond
-             && integer_zerop (GIMPLE_STMT_OPERAND (else_stmt, 1)))
-           COND_EXPR_ELSE (*stmt_p) = alloc_stmt_list ();
-       }
-      else if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
-              && (TREE_CODE (TREE_OPERAND (cond, 0)) == VAR_DECL
-                  || TREE_CODE (TREE_OPERAND (cond, 0)) == PARM_DECL)
-              && TREE_CONSTANT (TREE_OPERAND (cond, 1)))
-       {
-         tree stmt = (TREE_CODE (cond) == EQ_EXPR
-                      ? then_stmt : else_stmt);
-         tree *location = (TREE_CODE (cond) == EQ_EXPR
-                           ? &COND_EXPR_THEN (*stmt_p)
-                           : &COND_EXPR_ELSE (*stmt_p));
-
-         if (stmt
-             && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-             && GIMPLE_STMT_OPERAND (stmt, 0) == TREE_OPERAND (cond, 0)
-             && GIMPLE_STMT_OPERAND (stmt, 1) == TREE_OPERAND (cond, 1))
-           *location = alloc_stmt_list ();
-       }
     }
 
-  /* Protect GOTOs in the arm of COND_EXPRs from being removed.  They
-     would be re-introduced during lowering.  */
-  data->last_goto = NULL;
+  gsi_next (gsi);
+
+  data->last_was_goto = false;
 }
 
+/* Helper for remove_useless_stmts_1. 
+   Handle the try-finally case for GIMPLE_TRY statements.  */
 
 static void
-remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_tf (gimple_stmt_iterator *gsi, struct rus_data *data)
 {
   bool save_may_branch, save_may_throw;
   bool this_may_branch, this_may_throw;
 
+  gimple_seq eval_seq, cleanup_seq;
+  gimple_stmt_iterator eval_gsi, cleanup_gsi;
+
+  gimple stmt = gsi_stmt (*gsi);
+
   /* Collect may_branch and may_throw information for the body only.  */
   save_may_branch = data->may_branch;
   save_may_throw = data->may_throw;
   data->may_branch = false;
   data->may_throw = false;
-  data->last_goto = NULL;
+  data->last_was_goto = false;
 
-  remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
+  eval_seq = gimple_try_eval (stmt);
+  eval_gsi = gsi_start (eval_seq);
+  remove_useless_stmts_1 (&eval_gsi, data);
 
   this_may_branch = data->may_branch;
   this_may_throw = data->may_throw;
   data->may_branch |= save_may_branch;
   data->may_throw |= save_may_throw;
-  data->last_goto = NULL;
+  data->last_was_goto = false;
 
-  remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
+  cleanup_seq = gimple_try_cleanup (stmt);
+  cleanup_gsi = gsi_start (cleanup_seq);
+  remove_useless_stmts_1 (&cleanup_gsi, data);
 
   /* If the body is empty, then we can emit the FINALLY block without
      the enclosing TRY_FINALLY_EXPR.  */
-  if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 0)))
+  if (gimple_seq_empty_p (eval_seq))
     {
-      *stmt_p = TREE_OPERAND (*stmt_p, 1);
+      gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
+      gsi_remove (gsi, false);
       data->repeat = true;
     }
 
   /* If the handler is empty, then we can emit the TRY block without
      the enclosing TRY_FINALLY_EXPR.  */
-  else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
+  else if (gimple_seq_empty_p (cleanup_seq))
     {
-      *stmt_p = TREE_OPERAND (*stmt_p, 0);
+      gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
+      gsi_remove (gsi, false);
       data->repeat = true;
     }
 
@@ -1650,37 +1641,51 @@ remove_useless_stmts_tf (tree *stmt_p, struct rus_data *data)
      string the TRY and FINALLY blocks together.  */
   else if (!this_may_branch && !this_may_throw)
     {
-      tree stmt = *stmt_p;
-      *stmt_p = TREE_OPERAND (stmt, 0);
-      append_to_statement_list (TREE_OPERAND (stmt, 1), stmt_p);
+      gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
+      gsi_insert_seq_before (gsi, cleanup_seq, GSI_SAME_STMT);
+      gsi_remove (gsi, false);
       data->repeat = true;
     }
+  else
+    gsi_next (gsi);
 }
 
+/* Helper for remove_useless_stmts_1. 
+   Handle the try-catch case for GIMPLE_TRY statements.  */
 
 static void
-remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_tc (gimple_stmt_iterator *gsi, struct rus_data *data)
 {
   bool save_may_throw, this_may_throw;
-  tree_stmt_iterator i;
-  tree stmt;
+
+  gimple_seq eval_seq, cleanup_seq, handler_seq, failure_seq;
+  gimple_stmt_iterator eval_gsi, cleanup_gsi, handler_gsi, failure_gsi;
+
+  gimple stmt = gsi_stmt (*gsi);
 
   /* Collect may_throw information for the body only.  */
   save_may_throw = data->may_throw;
   data->may_throw = false;
-  data->last_goto = NULL;
+  data->last_was_goto = false;
 
-  remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 0), data);
+  eval_seq = gimple_try_eval (stmt);
+  eval_gsi = gsi_start (eval_seq);
+  remove_useless_stmts_1 (&eval_gsi, data);
 
   this_may_throw = data->may_throw;
   data->may_throw = save_may_throw;
 
+  cleanup_seq = gimple_try_cleanup (stmt);
+
   /* If the body cannot throw, then we can drop the entire TRY_CATCH_EXPR.  */
   if (!this_may_throw)
     {
       if (warn_notreached)
-       remove_useless_stmts_warn_notreached (TREE_OPERAND (*stmt_p, 1));
-      *stmt_p = TREE_OPERAND (*stmt_p, 0);
+        {
+          remove_useless_stmts_warn_notreached (cleanup_seq);
+        }
+      gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
+      gsi_remove (gsi, false);
       data->repeat = true;
       return;
     }
@@ -1689,142 +1694,164 @@ remove_useless_stmts_tc (tree *stmt_p, struct rus_data *data)
      no exceptions propagate past this point.  */
 
   this_may_throw = true;
-  i = tsi_start (TREE_OPERAND (*stmt_p, 1));
-  stmt = tsi_stmt (i);
-  data->last_goto = NULL;
+  cleanup_gsi = gsi_start (cleanup_seq);
+  stmt = gsi_stmt (cleanup_gsi);
+  data->last_was_goto = false;
 
-  switch (TREE_CODE (stmt))
+  switch (gimple_code (stmt))
     {
-    case CATCH_EXPR:
-      for (; !tsi_end_p (i); tsi_next (&i))
-       {
-         stmt = tsi_stmt (i);
+    case GIMPLE_CATCH:
+      /* If the first element is a catch, they all must be.  */
+      while (!gsi_end_p (cleanup_gsi))
+        {
+         stmt = gsi_stmt (cleanup_gsi);
          /* If we catch all exceptions, then the body does not
             propagate exceptions past this point.  */
-         if (CATCH_TYPES (stmt) == NULL)
+         if (gimple_catch_types (stmt) == NULL)
            this_may_throw = false;
-         data->last_goto = NULL;
-         remove_useless_stmts_1 (&CATCH_BODY (stmt), data);
+         data->last_was_goto = false;
+          handler_seq = gimple_catch_handler (stmt);
+          handler_gsi = gsi_start (handler_seq);
+         remove_useless_stmts_1 (&handler_gsi, data);
+          gsi_next (&cleanup_gsi);
        }
+      gsi_next (gsi);
       break;
 
-    case EH_FILTER_EXPR:
-      if (EH_FILTER_MUST_NOT_THROW (stmt))
+    case GIMPLE_EH_FILTER:
+      /* If the first element is an eh_filter, it should stand alone.  */
+      if (gimple_eh_filter_must_not_throw (stmt))
        this_may_throw = false;
-      else if (EH_FILTER_TYPES (stmt) == NULL)
+      else if (gimple_eh_filter_types (stmt) == NULL)
        this_may_throw = false;
-      remove_useless_stmts_1 (&EH_FILTER_FAILURE (stmt), data);
+      failure_seq = gimple_eh_filter_failure (stmt);
+      failure_gsi = gsi_start (failure_seq);
+      remove_useless_stmts_1 (&failure_gsi, data);
+      gsi_next (gsi);
       break;
 
     default:
-      /* Otherwise this is a cleanup.  */
-      remove_useless_stmts_1 (&TREE_OPERAND (*stmt_p, 1), data);
+      /* Otherwise this is a list of cleanup statements.  */
+      remove_useless_stmts_1 (&cleanup_gsi, data);
 
       /* If the cleanup is empty, then we can emit the TRY block without
         the enclosing TRY_CATCH_EXPR.  */
-      if (!TREE_SIDE_EFFECTS (TREE_OPERAND (*stmt_p, 1)))
+      if (gimple_seq_empty_p (cleanup_seq))
        {
-         *stmt_p = TREE_OPERAND (*stmt_p, 0);
+          gsi_insert_seq_before (gsi, eval_seq, GSI_SAME_STMT);
+          gsi_remove(gsi, false);
          data->repeat = true;
        }
+      else
+        gsi_next (gsi);
       break;
     }
+
   data->may_throw |= this_may_throw;
 }
 
+/* Helper for remove_useless_stmts_1.  Handle GIMPLE_BIND statements.  */
 
 static void
-remove_useless_stmts_bind (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_bind (gimple_stmt_iterator *gsi, struct rus_data *data ATTRIBUTE_UNUSED)
 {
   tree block;
+  gimple_seq body_seq, fn_body_seq;
+  gimple_stmt_iterator body_gsi;
+
+  gimple stmt = gsi_stmt (*gsi);
 
   /* First remove anything underneath the BIND_EXPR.  */
-  remove_useless_stmts_1 (&BIND_EXPR_BODY (*stmt_p), data);
+  
+  body_seq = gimple_bind_body (stmt);
+  body_gsi = gsi_start (body_seq);
+  remove_useless_stmts_1 (&body_gsi, data);
 
-  /* If the BIND_EXPR has no variables, then we can pull everything
-     up one level and remove the BIND_EXPR, unless this is the toplevel
-     BIND_EXPR for the current function or an inlined function.
+  /* If the GIMPLE_BIND has no variables, then we can pull everything
+     up one level and remove the GIMPLE_BIND, unless this is the toplevel
+     GIMPLE_BIND for the current function or an inlined function.
 
      When this situation occurs we will want to apply this
      optimization again.  */
-  block = BIND_EXPR_BLOCK (*stmt_p);
-  if (BIND_EXPR_VARS (*stmt_p) == NULL_TREE
-      && *stmt_p != DECL_SAVED_TREE (current_function_decl)
+  block = gimple_bind_block (stmt);
+  fn_body_seq = gimple_body (current_function_decl);
+  if (gimple_bind_vars (stmt) == NULL_TREE
+      && (gimple_seq_empty_p (fn_body_seq)
+          || stmt != gimple_seq_first_stmt (fn_body_seq))
       && (! block
          || ! BLOCK_ABSTRACT_ORIGIN (block)
          || (TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block))
              != FUNCTION_DECL)))
     {
-      *stmt_p = BIND_EXPR_BODY (*stmt_p);
+      gsi_insert_seq_before (gsi, body_seq, GSI_SAME_STMT);
+      gsi_remove (gsi, false);
       data->repeat = true;
     }
+  else
+    gsi_next (gsi);
 }
 
+/* Helper for remove_useless_stmts_1.  Handle GIMPLE_GOTO statements.  */
 
 static void
-remove_useless_stmts_goto (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_goto (gimple_stmt_iterator *gsi, struct rus_data *data)
 {
-  tree dest = GOTO_DESTINATION (*stmt_p);
+  gimple stmt = gsi_stmt (*gsi);
+
+  tree dest = gimple_goto_dest (stmt);
 
   data->may_branch = true;
-  data->last_goto = NULL;
+  data->last_was_goto = false;
 
-  /* Record the last goto expr, so that we can delete it if unnecessary.  */
+  /* Record iterator for last goto expr, so that we can delete it if unnecessary.  */
   if (TREE_CODE (dest) == LABEL_DECL)
-    data->last_goto = stmt_p;
+    {
+      data->last_goto_gsi = *gsi;
+      data->last_was_goto = true;
+    }
+
+  gsi_next(gsi);
 }
 
+/* Helper for remove_useless_stmts_1.  Handle GIMPLE_LABEL statements.  */
 
 static void
-remove_useless_stmts_label (tree *stmt_p, struct rus_data *data)
+remove_useless_stmts_label (gimple_stmt_iterator *gsi, struct rus_data *data)
 {
-  tree label = LABEL_EXPR_LABEL (*stmt_p);
+  gimple stmt = gsi_stmt (*gsi);
+
+  tree label = gimple_label_label (stmt);
 
   data->has_label = true;
 
   /* We do want to jump across non-local label receiver code.  */
   if (DECL_NONLOCAL (label))
-    data->last_goto = NULL;
+    data->last_was_goto = false;
 
-  else if (data->last_goto && GOTO_DESTINATION (*data->last_goto) == label)
+  else if (data->last_was_goto
+           && gimple_goto_dest (gsi_stmt (data->last_goto_gsi)) == label)
     {
-      *data->last_goto = build_empty_stmt ();
+      /* Replace the preceding GIMPLE_GOTO statement with
+         a GIMPLE_NOP, which will be subsequently removed.
+         In this way, we avoid invalidating other iterators
+         active on the statement sequence.  */
+      gsi_replace(&data->last_goto_gsi, gimple_build_nop(), false);
+      data->last_was_goto = false;
       data->repeat = true;
     }
 
   /* ??? Add something here to delete unused labels.  */
-}
-
 
-/* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
-   decl.  This allows us to eliminate redundant or useless
-   calls to "const" functions.
-
-   Gimplifier already does the same operation, but we may notice functions
-   being const and pure once their calls has been gimplified, so we need
-   to update the flag.  */
-
-static void
-update_call_expr_flags (tree call)
-{
-  tree decl = get_callee_fndecl (call);
-  int flags;
-  if (!decl)
-    return;
-  flags = call_expr_flags (call);
-  if (flags & (ECF_CONST | ECF_PURE) && !(flags & ECF_LOOPING_CONST_OR_PURE))
-    TREE_SIDE_EFFECTS (call) = 0;
-  if (TREE_NOTHROW (decl))
-    TREE_NOTHROW (call) = 1;
+  gsi_next (gsi);
 }
 
 
 /* T is CALL_EXPR.  Set current_function_calls_* flags.  */
 
 void
-notice_special_calls (tree t)
+notice_special_calls (gimple call)
 {
-  int flags = call_expr_flags (t);
+  int flags = gimple_call_flags (call);
 
   if (flags & ECF_MAY_BE_ALLOCA)
     cfun->calls_alloca = true;
@@ -1843,133 +1870,144 @@ clear_special_calls (void)
   cfun->calls_setjmp = false;
 }
 
+/* Remove useless statements from a statement sequence, and perform
+   some preliminary simplifications.  */
 
 static void
-remove_useless_stmts_1 (tree *tp, struct rus_data *data)
+remove_useless_stmts_1 (gimple_stmt_iterator *gsi, struct rus_data *data)
 {
-  tree t = *tp, op;
-
-  switch (TREE_CODE (t))
+  while (!gsi_end_p (*gsi))
     {
-    case COND_EXPR:
-      remove_useless_stmts_cond (tp, data);
-      break;
-
-    case TRY_FINALLY_EXPR:
-      remove_useless_stmts_tf (tp, data);
-      break;
-
-    case TRY_CATCH_EXPR:
-      remove_useless_stmts_tc (tp, data);
-      break;
-
-    case BIND_EXPR:
-      remove_useless_stmts_bind (tp, data);
-      break;
-
-    case GOTO_EXPR:
-      remove_useless_stmts_goto (tp, data);
-      break;
-
-    case LABEL_EXPR:
-      remove_useless_stmts_label (tp, data);
-      break;
-
-    case RETURN_EXPR:
-      fold_stmt (tp);
-      data->last_goto = NULL;
-      data->may_branch = true;
-      break;
-
-    case CALL_EXPR:
-      fold_stmt (tp);
-      data->last_goto = NULL;
-      notice_special_calls (t);
-      update_call_expr_flags (t);
-      if (tree_could_throw_p (t))
-       data->may_throw = true;
-      break;
-
-    case MODIFY_EXPR:
-      gcc_unreachable ();
-
-    case GIMPLE_MODIFY_STMT:
-      data->last_goto = NULL;
-      fold_stmt (tp);
-      op = get_call_expr_in (t);
-      if (op)
-       {
-         update_call_expr_flags (op);
-         notice_special_calls (op);
-       }
-      if (tree_could_throw_p (t))
-       data->may_throw = true;
-      break;
+      gimple stmt = gsi_stmt (*gsi);
 
-    case STATEMENT_LIST:
-      {
-       tree_stmt_iterator i = tsi_start (t);
-       while (!tsi_end_p (i))
-         {
-           t = tsi_stmt (i);
-           if (IS_EMPTY_STMT (t))
-             {
-               tsi_delink (&i);
-               continue;
-             }
-
-           remove_useless_stmts_1 (tsi_stmt_ptr (i), data);
-
-           t = tsi_stmt (i);
-           if (TREE_CODE (t) == STATEMENT_LIST)
-             {
-               tsi_link_before (&i, t, TSI_SAME_STMT);
-               tsi_delink (&i);
-             }
-           else
-             tsi_next (&i);
-         }
-      }
-      break;
-    case ASM_EXPR:
-      fold_stmt (tp);
-      data->last_goto = NULL;
-      break;
-
-    case OMP_PARALLEL:
-    case OMP_TASK:
-      /* Make sure the outermost BIND_EXPR in OMP_BODY isn't removed
-        as useless.  */
-      remove_useless_stmts_1 (&BIND_EXPR_BODY (OMP_TASKREG_BODY (*tp)), data);
-      data->last_goto = NULL;
-      break;
-
-    case OMP_SECTIONS:
-    case OMP_SINGLE:
-    case OMP_SECTION:
-    case OMP_MASTER:
-    case OMP_ORDERED:
-    case OMP_CRITICAL:
-      remove_useless_stmts_1 (&OMP_BODY (*tp), data);
-      data->last_goto = NULL;
-      break;
-
-    case OMP_FOR:
-      remove_useless_stmts_1 (&OMP_FOR_BODY (*tp), data);
-      data->last_goto = NULL;
-      if (OMP_FOR_PRE_BODY (*tp))
-       {
-         remove_useless_stmts_1 (&OMP_FOR_PRE_BODY (*tp), data);
-         data->last_goto = NULL;
-       }
-      break;
-
-    default:
-      data->last_goto = NULL;
-      break;
+      switch (gimple_code (stmt))
+        {
+        case GIMPLE_COND:
+          remove_useless_stmts_cond (gsi, data);
+          break;
+
+        case GIMPLE_GOTO:
+          remove_useless_stmts_goto (gsi, data);
+          break;
+
+        case GIMPLE_LABEL:
+          remove_useless_stmts_label (gsi, data);
+          break;
+
+        case GIMPLE_ASSIGN:
+          fold_stmt (gsi);
+          stmt = gsi_stmt (*gsi);
+          data->last_was_goto = false;
+          if (stmt_could_throw_p (stmt))
+            data->may_throw = true;
+          gsi_next (gsi);
+          break;
+
+        case GIMPLE_ASM:
+          fold_stmt (gsi);
+          data->last_was_goto = false;
+          gsi_next (gsi);
+          break;
+
+        case GIMPLE_CALL:
+          fold_stmt (gsi);
+          stmt = gsi_stmt (*gsi);
+          data->last_was_goto = false;
+          if (is_gimple_call (stmt))
+            notice_special_calls (stmt);
+
+          /* We used to call update_gimple_call_flags here,
+             which copied side-effects and nothrows status
+             from the function decl to the call.  In the new
+             tuplified GIMPLE, the accessors for this information
+             always consult the function decl, so this copying
+             is no longer necessary.  */
+          if (stmt_could_throw_p (stmt))
+            data->may_throw = true;
+          gsi_next (gsi);
+          break;
+
+        case GIMPLE_RETURN:
+          fold_stmt (gsi);
+          data->last_was_goto = false;
+          data->may_branch = true;
+          gsi_next (gsi);
+          break;
+
+        case GIMPLE_BIND:
+          remove_useless_stmts_bind (gsi, data);
+          break;
+
+        case GIMPLE_TRY:
+          if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
+            remove_useless_stmts_tc (gsi, data);
+          else if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
+            remove_useless_stmts_tf (gsi, data);
+          else
+            gcc_unreachable ();
+          break;
+
+        case GIMPLE_CATCH:
+          gcc_unreachable ();
+          break;
+
+        case GIMPLE_NOP:
+          gsi_remove (gsi, false);
+          break;
+
+        case GIMPLE_OMP_FOR:
+          {
+            gimple_seq pre_body_seq = gimple_omp_for_pre_body (stmt);
+            gimple_stmt_iterator pre_body_gsi = gsi_start (pre_body_seq);
+
+            remove_useless_stmts_1 (&pre_body_gsi, data);
+           data->last_was_goto = false;
+          }
+          /* FALLTHROUGH */
+        case GIMPLE_OMP_CRITICAL:
+        case GIMPLE_OMP_CONTINUE:
+        case GIMPLE_OMP_MASTER:
+        case GIMPLE_OMP_ORDERED:
+        case GIMPLE_OMP_SECTION:
+        case GIMPLE_OMP_SECTIONS:
+        case GIMPLE_OMP_SINGLE:
+          {
+            gimple_seq body_seq = gimple_omp_body (stmt);
+            gimple_stmt_iterator body_gsi = gsi_start (body_seq);
+
+            remove_useless_stmts_1 (&body_gsi, data);
+           data->last_was_goto = false;
+           gsi_next (gsi);
+          }
+          break;
+
+        case GIMPLE_OMP_PARALLEL:
+       case GIMPLE_OMP_TASK:
+          {
+           /* Make sure the outermost GIMPLE_BIND isn't removed
+              as useless.  */
+            gimple_seq body_seq = gimple_omp_body (stmt);
+            gimple bind = gimple_seq_first_stmt (body_seq);
+            gimple_seq bind_seq = gimple_bind_body (bind);
+            gimple_stmt_iterator bind_gsi = gsi_start (bind_seq);
+
+            remove_useless_stmts_1 (&bind_gsi, data);
+           data->last_was_goto = false;
+           gsi_next (gsi);
+          }
+          break;
+
+        default:
+          data->last_was_goto = false;
+          gsi_next (gsi);
+          break;
+        }
     }
 }
 
+/* Walk the function tree, removing useless statements and performing
+   some preliminary simplifications.  */
+
 static unsigned int
 remove_useless_stmts (void)
 {
@@ -1979,8 +2017,11 @@ remove_useless_stmts (void)
 
   do
     {
+      gimple_stmt_iterator gsi;
+
+      gsi = gsi_start (gimple_body (current_function_decl));
       memset (&data, 0, sizeof (data));
-      remove_useless_stmts_1 (&DECL_SAVED_TREE (current_function_decl), &data);
+      remove_useless_stmts_1 (&gsi, &data);
     }
   while (data.repeat);
   return 0;
@@ -2011,17 +2052,14 @@ struct gimple_opt_pass pass_remove_useless_stmts =
 static void
 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
 {
-  tree phi;
+  gimple_stmt_iterator gsi;
 
   /* Since this block is no longer reachable, we can just delete all
      of its PHI nodes.  */
-  phi = phi_nodes (bb);
-  while (phi)
-    {
-      tree next = PHI_CHAIN (phi);
-      remove_phi_node (phi, NULL_TREE, true);
-      phi = next;
-    }
+  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
+    remove_phi_node (&gsi, true);
+
+  set_phi_nodes (bb, NULL);
 
   /* Remove edges to BB's successors.  */
   while (EDGE_COUNT (bb->succs) > 0)
@@ -2034,7 +2072,7 @@ remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
 static void
 remove_bb (basic_block bb)
 {
-  block_stmt_iterator i;
+  gimple_stmt_iterator i;
   source_location loc = UNKNOWN_LOCATION;
 
   if (dump_file)
@@ -2059,31 +2097,31 @@ remove_bb (basic_block bb)
     }
 
   /* Remove all the instructions in the block.  */
-  if (bb_stmt_list (bb) != NULL_TREE)
+  if (bb_seq (bb) != NULL)
     {
-      for (i = bsi_start (bb); !bsi_end_p (i);)
+      for (i = gsi_start_bb (bb); !gsi_end_p (i);)
        {
-         tree stmt = bsi_stmt (i);
-         if (TREE_CODE (stmt) == LABEL_EXPR
-             && (FORCED_LABEL (LABEL_EXPR_LABEL (stmt))
-                 || DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt))))
+         gimple stmt = gsi_stmt (i);
+         if (gimple_code (stmt) == GIMPLE_LABEL
+             && (FORCED_LABEL (gimple_label_label (stmt))
+                 || DECL_NONLOCAL (gimple_label_label (stmt))))
            {
              basic_block new_bb;
-             block_stmt_iterator new_bsi;
+             gimple_stmt_iterator new_gsi;
 
              /* A non-reachable non-local label may still be referenced.
                 But it no longer needs to carry the extra semantics of
                 non-locality.  */
-             if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
+             if (DECL_NONLOCAL (gimple_label_label (stmt)))
                {
-                 DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)) = 0;
-                 FORCED_LABEL (LABEL_EXPR_LABEL (stmt)) = 1;
+                 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
+                 FORCED_LABEL (gimple_label_label (stmt)) = 1;
                }
 
              new_bb = bb->prev_bb;
-             new_bsi = bsi_start (new_bb);
-             bsi_remove (&i, false);
-             bsi_insert_before (&new_bsi, stmt, BSI_NEW_STMT);
+             new_gsi = gsi_start_bb (new_bb);
+             gsi_remove (&i, false);
+             gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
            }
          else
            {
@@ -2094,18 +2132,17 @@ remove_bb (basic_block bb)
              if (gimple_in_ssa_p (cfun))
                release_defs (stmt);
 
-             bsi_remove (&i, true);
+             gsi_remove (&i, true);
            }
 
          /* Don't warn for removed gotos.  Gotos are often removed due to
             jump threading, thus resulting in bogus warnings.  Not great,
             since this way we lose warnings for gotos in the original
             program that are indeed unreachable.  */
-         if (TREE_CODE (stmt) != GOTO_EXPR && EXPR_HAS_LOCATION (stmt) && !loc)
-           {
-             if (EXPR_HAS_LOCATION (stmt))
-               loc = EXPR_LOCATION (stmt);
-           }
+         if (gimple_code (stmt) != GIMPLE_GOTO
+             && gimple_has_location (stmt)
+             && !loc)
+           loc = gimple_location (stmt);
        }
     }
 
@@ -2117,7 +2154,7 @@ remove_bb (basic_block bb)
     warning (OPT_Wunreachable_code, "%Hwill never be executed", &loc);
 
   remove_phi_nodes_and_edges_for_unreachable_block (bb);
-  bb->il.tree = NULL;
+  bb->il.gimple = NULL;
 }
 
 
@@ -2128,21 +2165,23 @@ remove_bb (basic_block bb)
 edge
 find_taken_edge (basic_block bb, tree val)
 {
-  tree stmt;
+  gimple stmt;
 
   stmt = last_stmt (bb);
 
   gcc_assert (stmt);
   gcc_assert (is_ctrl_stmt (stmt));
-  gcc_assert (val);
 
-  if (! is_gimple_min_invariant (val))
+  if (val == NULL)
+    return NULL;
+
+  if (!is_gimple_min_invariant (val))
     return NULL;
 
-  if (TREE_CODE (stmt) == COND_EXPR)
+  if (gimple_code (stmt) == GIMPLE_COND)
     return find_taken_edge_cond_expr (bb, val);
 
-  if (TREE_CODE (stmt) == SWITCH_EXPR)
+  if (gimple_code (stmt) == GIMPLE_SWITCH)
     return find_taken_edge_switch_expr (bb, val);
 
   if (computed_goto_p (stmt))
@@ -2204,12 +2243,13 @@ find_taken_edge_cond_expr (basic_block bb, tree val)
 static edge
 find_taken_edge_switch_expr (basic_block bb, tree val)
 {
-  tree switch_expr, taken_case;
   basic_block dest_bb;
   edge e;
+  gimple switch_stmt;
+  tree taken_case;
 
-  switch_expr = last_stmt (bb);
-  taken_case = find_case_label_for_value (switch_expr, val);
+  switch_stmt = last_stmt (bb);
+  taken_case = find_case_label_for_value (switch_stmt, val);
   dest_bb = label_to_block (CASE_LABEL (taken_case));
 
   e = find_edge (bb, dest_bb);
@@ -2218,21 +2258,20 @@ find_taken_edge_switch_expr (basic_block bb, tree val)
 }
 
 
-/* Return the CASE_LABEL_EXPR that SWITCH_EXPR will take for VAL.
+/* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
    We can make optimal use here of the fact that the case labels are
    sorted: We can do a binary search for a case matching VAL.  */
 
 static tree
-find_case_label_for_value (tree switch_expr, tree val)
+find_case_label_for_value (gimple switch_stmt, tree val)
 {
-  tree vec = SWITCH_LABELS (switch_expr);
-  size_t low, high, n = TREE_VEC_LENGTH (vec);
-  tree default_case = TREE_VEC_ELT (vec, n - 1);
+  size_t low, high, n = gimple_switch_num_labels (switch_stmt);
+  tree default_case = gimple_switch_default_label (switch_stmt);
 
-  for (low = -1, high = n - 1; high - low > 1; )
+  for (low = 0, high = n; high - low > 1; )
     {
       size_t i = (high + low) / 2;
-      tree t = TREE_VEC_ELT (vec, i);
+      tree t = gimple_switch_label (switch_stmt, i);
       int cmp;
 
       /* Cache the result of comparing CASE_LOW and val.  */
@@ -2261,36 +2300,21 @@ find_case_label_for_value (tree switch_expr, tree val)
 }
 
 
-
-
-/*---------------------------------------------------------------------------
-                             Debugging functions
----------------------------------------------------------------------------*/
-
-/* Dump tree-specific information of block BB to file OUTF.  */
-
-void
-tree_dump_bb (basic_block bb, FILE *outf, int indent)
-{
-  dump_generic_bb (outf, bb, indent, TDF_VOPS|TDF_MEMSYMS);
-}
-
-
 /* Dump a basic block on stderr.  */
 
 void
-debug_tree_bb (basic_block bb)
+gimple_debug_bb (basic_block bb)
 {
-  dump_bb (bb, stderr, 0);
+  gimple_dump_bb (bb, stderr, 0, TDF_VOPS|TDF_MEMSYMS);
 }
 
 
 /* Dump basic block with index N on stderr.  */
 
 basic_block
-debug_tree_bb_n (int n)
+gimple_debug_bb_n (int n)
 {
-  debug_tree_bb (BASIC_BLOCK (n));
+  gimple_debug_bb (BASIC_BLOCK (n));
   return BASIC_BLOCK (n);
 }
 
@@ -2301,9 +2325,9 @@ debug_tree_bb_n (int n)
    (see TDF_* in tree-pass.h).  */
 
 void
-debug_tree_cfg (int flags)
+gimple_debug_cfg (int flags)
 {
-  dump_tree_cfg (stderr, flags);
+  gimple_dump_cfg (stderr, flags);
 }
 
 
@@ -2313,7 +2337,7 @@ debug_tree_cfg (int flags)
    tree.h).  */
 
 void
-dump_tree_cfg (FILE *file, int flags)
+gimple_dump_cfg (FILE *file, int flags)
 {
   if (flags & TDF_DETAILS)
     {
@@ -2401,7 +2425,7 @@ debug_cfg_stats (void)
 /* Dump the flowgraph to a .vcg FILE.  */
 
 static void
-tree_cfg2vcg (FILE *file)
+gimple_cfg2vcg (FILE *file)
 {
   edge e;
   edge_iterator ei;
@@ -2431,17 +2455,17 @@ tree_cfg2vcg (FILE *file)
 
   FOR_EACH_BB (bb)
     {
-      enum tree_code head_code, end_code;
+      enum gimple_code head_code, end_code;
       const char *head_name, *end_name;
       int head_line = 0;
       int end_line = 0;
-      tree first = first_stmt (bb);
-      tree last = last_stmt (bb);
+      gimple first = first_stmt (bb);
+      gimple last = last_stmt (bb);
 
       if (first)
        {
-         head_code = TREE_CODE (first);
-         head_name = tree_code_name[head_code];
+         head_code = gimple_code (first);
+         head_name = gimple_code_name[head_code];
          head_line = get_lineno (first);
        }
       else
@@ -2449,8 +2473,8 @@ tree_cfg2vcg (FILE *file)
 
       if (last)
        {
-         end_code = TREE_CODE (last);
-         end_name = tree_code_name[end_code];
+         end_code = gimple_code (last);
+         end_name = gimple_code_name[end_code];
          end_line = get_lineno (last);
        }
       else
@@ -2491,13 +2515,13 @@ tree_cfg2vcg (FILE *file)
 /* Return true if T represents a stmt that always transfers control.  */
 
 bool
-is_ctrl_stmt (const_tree t)
+is_ctrl_stmt (gimple t)
 {
-  return (TREE_CODE (t) == COND_EXPR
-         || TREE_CODE (t) == SWITCH_EXPR
-         || TREE_CODE (t) == GOTO_EXPR
-         || TREE_CODE (t) == RETURN_EXPR
-         || TREE_CODE (t) == RESX_EXPR);
+  return gimple_code (t) == GIMPLE_COND
+    || gimple_code (t) == GIMPLE_SWITCH
+    || gimple_code (t) == GIMPLE_GOTO
+    || gimple_code (t) == GIMPLE_RETURN
+    || gimple_code (t) == GIMPLE_RESX;
 }
 
 
@@ -2505,50 +2529,41 @@ is_ctrl_stmt (const_tree t)
    (e.g., a call to a non-returning function).  */
 
 bool
-is_ctrl_altering_stmt (const_tree t)
+is_ctrl_altering_stmt (gimple t)
 {
-  const_tree call;
-
   gcc_assert (t);
-  call = get_call_expr_in (CONST_CAST_TREE (t));
-  if (call)
+
+  if (is_gimple_call (t))
     {
-      /* A non-pure/const CALL_EXPR alters flow control if the current
+      int flags = gimple_call_flags (t);
+
+      /* A non-pure/const call alters flow control if the current
         function has nonlocal labels.  */
-      if (TREE_SIDE_EFFECTS (call) && cfun->has_nonlocal_label)
+      if (!(flags & (ECF_CONST | ECF_PURE))
+         && cfun->has_nonlocal_label)
        return true;
 
-      /* A CALL_EXPR also alters control flow if it does not return.  */
-      if (call_expr_flags (call) & ECF_NORETURN)
+      /* A call also alters control flow if it does not return.  */
+      if (gimple_call_flags (t) & ECF_NORETURN)
        return true;
     }
 
   /* OpenMP directives alter control flow.  */
-  if (OMP_DIRECTIVE_P (t))
+  if (is_gimple_omp (t))
     return true;
 
   /* If a statement can throw, it alters control flow.  */
-  return tree_can_throw_internal (t);
-}
-
-
-/* Return true if T is a computed goto.  */
-
-static bool
-computed_goto_p (const_tree t)
-{
-  return (TREE_CODE (t) == GOTO_EXPR
-         && TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL);
+  return stmt_can_throw_internal (t);
 }
 
 
 /* Return true if T is a simple local goto.  */
 
 bool
-simple_goto_p (const_tree t)
+simple_goto_p (gimple t)
 {
-  return (TREE_CODE (t) == GOTO_EXPR
-         && TREE_CODE (GOTO_DESTINATION (t)) == LABEL_DECL);
+  return (gimple_code (t) == GIMPLE_GOTO
+         && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
 }
 
 
@@ -2556,46 +2571,42 @@ simple_goto_p (const_tree t)
    Transfers of control flow associated with EH are excluded.  */
 
 bool
-tree_can_make_abnormal_goto (const_tree t)
+stmt_can_make_abnormal_goto (gimple t)
 {
   if (computed_goto_p (t))
     return true;
-  if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
-    t = GIMPLE_STMT_OPERAND (t, 1);
-  if (TREE_CODE (t) == WITH_SIZE_EXPR)
-    t = TREE_OPERAND (t, 0);
-  if (TREE_CODE (t) == CALL_EXPR)
-    return TREE_SIDE_EFFECTS (t) && cfun->has_nonlocal_label;
+  if (is_gimple_call (t))
+    return gimple_has_side_effects (t) && cfun->has_nonlocal_label;
   return false;
 }
 
 
-/* Return true if T should start a new basic block.  PREV_T is the
-   statement preceding T.  It is used when T is a label or a case label.
-   Labels should only start a new basic block if their previous statement
-   wasn't a label.  Otherwise, sequence of labels would generate
-   unnecessary basic blocks that only contain a single label.  */
+/* Return true if STMT should start a new basic block.  PREV_STMT is
+   the statement preceding STMT.  It is used when STMT is a label or a
+   case label.  Labels should only start a new basic block if their
+   previous statement wasn't a label.  Otherwise, sequence of labels
+   would generate unnecessary basic blocks that only contain a single
+   label.  */
 
 static inline bool
-stmt_starts_bb_p (const_tree t, const_tree prev_t)
+stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
 {
-  if (t == NULL_TREE)
+  if (stmt == NULL)
     return false;
 
-  /* LABEL_EXPRs start a new basic block only if the preceding
-     statement wasn't a label of the same type.  This prevents the
-     creation of consecutive blocks that have nothing but a single
-     label.  */
-  if (TREE_CODE (t) == LABEL_EXPR)
+  /* Labels start a new basic block only if the preceding statement
+     wasn't a label of the same type.  This prevents the creation of
+     consecutive blocks that have nothing but a single label.  */
+  if (gimple_code (stmt) == GIMPLE_LABEL)
     {
       /* Nonlocal and computed GOTO targets always start a new block.  */
-      if (DECL_NONLOCAL (LABEL_EXPR_LABEL (t))
-         || FORCED_LABEL (LABEL_EXPR_LABEL (t)))
+      if (DECL_NONLOCAL (gimple_label_label (stmt))
+         || FORCED_LABEL (gimple_label_label (stmt)))
        return true;
 
-      if (prev_t && TREE_CODE (prev_t) == LABEL_EXPR)
+      if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
        {
-         if (DECL_NONLOCAL (LABEL_EXPR_LABEL (prev_t)))
+         if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
            return true;
 
          cfg_stats.num_merged_labels++;
@@ -2612,502 +2623,97 @@ stmt_starts_bb_p (const_tree t, const_tree prev_t)
 /* Return true if T should end a basic block.  */
 
 bool
-stmt_ends_bb_p (const_tree t)
+stmt_ends_bb_p (gimple t)
 {
   return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
 }
 
-/* Remove block annotations and other datastructures.  */
+/* Remove block annotations and other data structures.  */
 
 void
 delete_tree_cfg_annotations (void)
 {
-  basic_block bb;
-  block_stmt_iterator bsi;
-
-  /* Remove annotations from every tree in the function.  */
-  FOR_EACH_BB (bb)
-    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-      {
-       tree stmt = bsi_stmt (bsi);
-       ggc_free (stmt->base.ann);
-       stmt->base.ann = NULL;
-      }
   label_to_block_map = NULL;
 }
 
 
 /* Return the first statement in basic block BB.  */
 
-tree
+gimple
 first_stmt (basic_block bb)
 {
-  block_stmt_iterator i = bsi_start (bb);
-  return !bsi_end_p (i) ? bsi_stmt (i) : NULL_TREE;
+  gimple_stmt_iterator i = gsi_start_bb (bb);
+  return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
 }
 
 /* Return the last statement in basic block BB.  */
 
-tree
+gimple
 last_stmt (basic_block bb)
 {
-  block_stmt_iterator b = bsi_last (bb);
-  return !bsi_end_p (b) ? bsi_stmt (b) : NULL_TREE;
+  gimple_stmt_iterator b = gsi_last_bb (bb);
+  return !gsi_end_p (b) ? gsi_stmt (b) : NULL;
 }
 
 /* Return the last statement of an otherwise empty block.  Return NULL
    if the block is totally empty, or if it contains more than one
    statement.  */
 
-tree
+gimple
 last_and_only_stmt (basic_block bb)
 {
-  block_stmt_iterator i = bsi_last (bb);
-  tree last, prev;
+  gimple_stmt_iterator i = gsi_last_bb (bb);
+  gimple last, prev;
 
-  if (bsi_end_p (i))
-    return NULL_TREE;
+  if (gsi_end_p (i))
+    return NULL;
 
-  last = bsi_stmt (i);
-  bsi_prev (&i);
-  if (bsi_end_p (i))
+  last = gsi_stmt (i);
+  gsi_prev (&i);
+  if (gsi_end_p (i))
     return last;
 
   /* Empty statements should no longer appear in the instruction stream.
      Everything that might have appeared before should be deleted by
-     remove_useless_stmts, and the optimizers should just bsi_remove
+     remove_useless_stmts, and the optimizers should just gsi_remove
      instead of smashing with build_empty_stmt.
 
      Thus the only thing that should appear here in a block containing
      one executable statement is a label.  */
-  prev = bsi_stmt (i);
-  if (TREE_CODE (prev) == LABEL_EXPR)
+  prev = gsi_stmt (i);
+  if (gimple_code (prev) == GIMPLE_LABEL)
     return last;
   else
-    return NULL_TREE;
-}
-
-
-/* Mark BB as the basic block holding statement T.  */
-
-void
-set_bb_for_stmt (tree t, basic_block bb)
-{
-  if (TREE_CODE (t) == PHI_NODE)
-    PHI_BB (t) = bb;
-  else if (TREE_CODE (t) == STATEMENT_LIST)
-    {
-      tree_stmt_iterator i;
-      for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
-       set_bb_for_stmt (tsi_stmt (i), bb);
-    }
-  else
-    {
-      stmt_ann_t ann = get_stmt_ann (t);
-      ann->bb = bb;
-
-      /* If the statement is a label, add the label to block-to-labels map
-        so that we can speed up edge creation for GOTO_EXPRs.  */
-      if (TREE_CODE (t) == LABEL_EXPR)
-       {
-         int uid;
-
-         t = LABEL_EXPR_LABEL (t);
-         uid = LABEL_DECL_UID (t);
-         if (uid == -1)
-           {
-             unsigned old_len = VEC_length (basic_block, label_to_block_map);
-             LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
-             if (old_len <= (unsigned) uid)
-               {
-                 unsigned new_len = 3 * uid / 2;
-
-                 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
-                                        new_len);
-               }
-           }
-         else
-           /* We're moving an existing label.  Make sure that we've
-               removed it from the old block.  */
-           gcc_assert (!bb
-                       || !VEC_index (basic_block, label_to_block_map, uid));
-         VEC_replace (basic_block, label_to_block_map, uid, bb);
-       }
-    }
-}
-
-/* Faster version of set_bb_for_stmt that assume that statement is being moved
-   from one basic block to another.  
-   For BB splitting we can run into quadratic case, so performance is quite
-   important and knowing that the tables are big enough, change_bb_for_stmt
-   can inline as leaf function.  */
-static inline void
-change_bb_for_stmt (tree t, basic_block bb)
-{
-  get_stmt_ann (t)->bb = bb;
-  if (TREE_CODE (t) == LABEL_EXPR)
-    VEC_replace (basic_block, label_to_block_map,
-                LABEL_DECL_UID (LABEL_EXPR_LABEL (t)), bb);
-}
-
-/* Finds iterator for STMT.  */
-
-extern block_stmt_iterator
-bsi_for_stmt (tree stmt)
-{
-  block_stmt_iterator bsi;
-
-  for (bsi = bsi_start (bb_for_stmt (stmt)); !bsi_end_p (bsi); bsi_next (&bsi))
-    if (bsi_stmt (bsi) == stmt)
-      return bsi;
-
-  gcc_unreachable ();
-}
-
-/* Mark statement T as modified, and update it.  */
-static inline void
-update_modified_stmts (tree t)
-{
-  if (!ssa_operands_active ())
-    return;
-  if (TREE_CODE (t) == STATEMENT_LIST)
-    {
-      tree_stmt_iterator i;
-      tree stmt;
-      for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
-        {
-         stmt = tsi_stmt (i);
-         update_stmt_if_modified (stmt);
-       }
-    }
-  else
-    update_stmt_if_modified (t);
-}
-
-/* Insert statement (or statement list) T before the statement
-   pointed-to by iterator I.  M specifies how to update iterator I
-   after insertion (see enum bsi_iterator_update).  */
-
-void
-bsi_insert_before (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
-{
-  set_bb_for_stmt (t, i->bb);
-  update_modified_stmts (t);
-  tsi_link_before (&i->tsi, t, m);
-}
-
-
-/* Insert statement (or statement list) T after the statement
-   pointed-to by iterator I.  M specifies how to update iterator I
-   after insertion (see enum bsi_iterator_update).  */
-
-void
-bsi_insert_after (block_stmt_iterator *i, tree t, enum bsi_iterator_update m)
-{
-  set_bb_for_stmt (t, i->bb);
-  update_modified_stmts (t);
-  tsi_link_after (&i->tsi, t, m);
-}
-
-
-/* Remove the statement pointed to by iterator I.  The iterator is updated
-   to the next statement.
-
-   When REMOVE_EH_INFO is true we remove the statement pointed to by
-   iterator I from the EH tables.  Otherwise we do not modify the EH
-   tables.
-
-   Generally, REMOVE_EH_INFO should be true when the statement is going to
-   be removed from the IL and not reinserted elsewhere.  */
-
-void
-bsi_remove (block_stmt_iterator *i, bool remove_eh_info)
-{
-  tree t = bsi_stmt (*i);
-  set_bb_for_stmt (t, NULL);
-  delink_stmt_imm_use (t);
-  tsi_delink (&i->tsi);
-  mark_stmt_modified (t);
-  if (remove_eh_info)
-    {
-      remove_stmt_from_eh_region (t);
-      gimple_remove_stmt_histograms (cfun, t);
-    }
-}
-
-
-/* Move the statement at FROM so it comes right after the statement at TO.  */
-
-void
-bsi_move_after (block_stmt_iterator *from, block_stmt_iterator *to)
-{
-  tree stmt = bsi_stmt (*from);
-  bsi_remove (from, false);
-  /* We must have BSI_NEW_STMT here, as bsi_move_after is sometimes used to
-     move statements to an empty block.  */
-  bsi_insert_after (to, stmt, BSI_NEW_STMT);
-}
-
-
-/* Move the statement at FROM so it comes right before the statement at TO.  */
-
-void
-bsi_move_before (block_stmt_iterator *from, block_stmt_iterator *to)
-{
-  tree stmt = bsi_stmt (*from);
-  bsi_remove (from, false);
-  /* For consistency with bsi_move_after, it might be better to have
-     BSI_NEW_STMT here; however, that breaks several places that expect
-     that TO does not change.  */
-  bsi_insert_before (to, stmt, BSI_SAME_STMT);
-}
-
-
-/* Move the statement at FROM to the end of basic block BB.  */
-
-void
-bsi_move_to_bb_end (block_stmt_iterator *from, basic_block bb)
-{
-  block_stmt_iterator last = bsi_last (bb);
-
-  /* Have to check bsi_end_p because it could be an empty block.  */
-  if (!bsi_end_p (last) && is_ctrl_stmt (bsi_stmt (last)))
-    bsi_move_before (from, &last);
-  else
-    bsi_move_after (from, &last);
-}
-
-
-/* Replace the contents of the statement pointed to by iterator BSI
-   with STMT.  If UPDATE_EH_INFO is true, the exception handling
-   information of the original statement is moved to the new statement.  */
-
-void
-bsi_replace (const block_stmt_iterator *bsi, tree stmt, bool update_eh_info)
-{
-  int eh_region;
-  tree orig_stmt = bsi_stmt (*bsi);
-
-  if (stmt == orig_stmt)
-    return;
-  SET_EXPR_LOCUS (stmt, EXPR_LOCUS (orig_stmt));
-  set_bb_for_stmt (stmt, bsi->bb);
-
-  /* Preserve EH region information from the original statement, if
-     requested by the caller.  */
-  if (update_eh_info)
-    {
-      eh_region = lookup_stmt_eh_region (orig_stmt);
-      if (eh_region >= 0)
-       {
-         remove_stmt_from_eh_region (orig_stmt);
-         add_stmt_to_eh_region (stmt, eh_region);
-       }
-    }
-
-  gimple_duplicate_stmt_histograms (cfun, stmt, cfun, orig_stmt);
-  gimple_remove_stmt_histograms (cfun, orig_stmt);
-  delink_stmt_imm_use (orig_stmt);
-  *bsi_stmt_ptr (*bsi) = stmt;
-  mark_stmt_modified (stmt);
-  update_modified_stmts (stmt);
-}
-
-
-/* Insert the statement pointed-to by BSI into edge E.  Every attempt
-   is made to place the statement in an existing basic block, but
-   sometimes that isn't possible.  When it isn't possible, the edge is
-   split and the statement is added to the new block.
-
-   In all cases, the returned *BSI points to the correct location.  The
-   return value is true if insertion should be done after the location,
-   or false if it should be done before the location.  If new basic block
-   has to be created, it is stored in *NEW_BB.  */
-
-static bool
-tree_find_edge_insert_loc (edge e, block_stmt_iterator *bsi,
-                          basic_block *new_bb)
-{
-  basic_block dest, src;
-  tree tmp;
-
-  dest = e->dest;
- restart:
-
-  /* If the destination has one predecessor which has no PHI nodes,
-     insert there.  Except for the exit block.
-
-     The requirement for no PHI nodes could be relaxed.  Basically we
-     would have to examine the PHIs to prove that none of them used
-     the value set by the statement we want to insert on E.  That
-     hardly seems worth the effort.  */
-  if (single_pred_p (dest)
-      && ! phi_nodes (dest)
-      && dest != EXIT_BLOCK_PTR)
-    {
-      *bsi = bsi_start (dest);
-      if (bsi_end_p (*bsi))
-       return true;
-
-      /* Make sure we insert after any leading labels.  */
-      tmp = bsi_stmt (*bsi);
-      while (TREE_CODE (tmp) == LABEL_EXPR)
-       {
-         bsi_next (bsi);
-         if (bsi_end_p (*bsi))
-           break;
-         tmp = bsi_stmt (*bsi);
-       }
-
-      if (bsi_end_p (*bsi))
-       {
-         *bsi = bsi_last (dest);
-         return true;
-       }
-      else
-       return false;
-    }
-
-  /* If the source has one successor, the edge is not abnormal and
-     the last statement does not end a basic block, insert there.
-     Except for the entry block.  */
-  src = e->src;
-  if ((e->flags & EDGE_ABNORMAL) == 0
-      && single_succ_p (src)
-      && src != ENTRY_BLOCK_PTR)
-    {
-      *bsi = bsi_last (src);
-      if (bsi_end_p (*bsi))
-       return true;
-
-      tmp = bsi_stmt (*bsi);
-      if (!stmt_ends_bb_p (tmp))
-       return true;
-
-      /* Insert code just before returning the value.  We may need to decompose
-         the return in the case it contains non-trivial operand.  */
-      if (TREE_CODE (tmp) == RETURN_EXPR)
-        {
-         tree op = TREE_OPERAND (tmp, 0);
-         if (op && !is_gimple_val (op))
-           {
-             gcc_assert (TREE_CODE (op) == GIMPLE_MODIFY_STMT);
-             bsi_insert_before (bsi, op, BSI_NEW_STMT);
-             TREE_OPERAND (tmp, 0) = GIMPLE_STMT_OPERAND (op, 0);
-           }
-         bsi_prev (bsi);
-         return true;
-        }
-    }
-
-  /* Otherwise, create a new basic block, and split this edge.  */
-  dest = split_edge (e);
-  if (new_bb)
-    *new_bb = dest;
-  e = single_pred_edge (dest);
-  goto restart;
-}
-
-
-/* This routine will commit all pending edge insertions, creating any new
-   basic blocks which are necessary.  */
-
-void
-bsi_commit_edge_inserts (void)
-{
-  basic_block bb;
-  edge e;
-  edge_iterator ei;
-
-  bsi_commit_one_edge_insert (single_succ_edge (ENTRY_BLOCK_PTR), NULL);
-
-  FOR_EACH_BB (bb)
-    FOR_EACH_EDGE (e, ei, bb->succs)
-      bsi_commit_one_edge_insert (e, NULL);
-}
-
-
-/* Commit insertions pending at edge E. If a new block is created, set NEW_BB
-   to this block, otherwise set it to NULL.  */
-
-void
-bsi_commit_one_edge_insert (edge e, basic_block *new_bb)
-{
-  if (new_bb)
-    *new_bb = NULL;
-  if (PENDING_STMT (e))
-    {
-      block_stmt_iterator bsi;
-      tree stmt = PENDING_STMT (e);
-
-      PENDING_STMT (e) = NULL_TREE;
-
-      if (tree_find_edge_insert_loc (e, &bsi, new_bb))
-       bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
-      else
-       bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
-    }
-}
-
-
-/* Add STMT to the pending list of edge E.  No actual insertion is
-   made until a call to bsi_commit_edge_inserts () is made.  */
-
-void
-bsi_insert_on_edge (edge e, tree stmt)
-{
-  append_to_statement_list (stmt, &PENDING_STMT (e));
-}
-
-/* Similar to bsi_insert_on_edge+bsi_commit_edge_inserts.  If a new
-   block has to be created, it is returned.  */
-
-basic_block
-bsi_insert_on_edge_immediate (edge e, tree stmt)
-{
-  block_stmt_iterator bsi;
-  basic_block new_bb = NULL;
-
-  gcc_assert (!PENDING_STMT (e));
-
-  if (tree_find_edge_insert_loc (e, &bsi, &new_bb))
-    bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
-  else
-    bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
-
-  return new_bb;
+    return NULL;
 }
 
-/*---------------------------------------------------------------------------
-            Tree specific functions for CFG manipulation
----------------------------------------------------------------------------*/
-
 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE.  */
 
 static void
 reinstall_phi_args (edge new_edge, edge old_edge)
 {
-  tree phi;
   edge_var_map_vector v;
   edge_var_map *vm;
   int i;
-
+  gimple_stmt_iterator phis;
+  
   v = redirect_edge_var_map_vector (old_edge);
   if (!v)
     return;
-
-  for (i = 0, phi = phi_nodes (new_edge->dest);
-       VEC_iterate (edge_var_map, v, i, vm) && phi;
-       i++, phi = PHI_CHAIN (phi))
+  
+  for (i = 0, phis = gsi_start_phis (new_edge->dest);
+       VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
+       i++, gsi_next (&phis))
     {
+      gimple phi = gsi_stmt (phis);
       tree result = redirect_edge_var_map_result (vm);
       tree arg = redirect_edge_var_map_def (vm);
-
-      gcc_assert (result == PHI_RESULT (phi));
-
+      gcc_assert (result == gimple_phi_result (phi));
+  
       add_phi_arg (phi, arg, new_edge);
     }
-
+  
   redirect_edge_var_map_clear (old_edge);
 }
 
@@ -3131,7 +2737,7 @@ split_edge_bb_loc (edge edge_in)
    Abort on abnormal edges.  */
 
 static basic_block
-tree_split_edge (edge edge_in)
+gimple_split_edge (edge edge_in)
 {
   basic_block new_bb, after_bb, dest;
   edge new_edge, e;
@@ -3194,10 +2800,7 @@ verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
       break;
 
     case MODIFY_EXPR:
-      gcc_unreachable ();
-
-    case GIMPLE_MODIFY_STMT:
-      x = GIMPLE_STMT_OPERAND (t, 0);
+      x = TREE_OPERAND (t, 0);
       if (TREE_CODE (x) == BIT_FIELD_REF
          && is_gimple_reg (TREE_OPERAND (x, 0)))
        {
@@ -3420,74 +3023,12 @@ verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
 #undef CHECK_OP
 }
 
-/* Verifies if EXPR is a valid GIMPLE unary expression.  Returns true
-   if there is an error, otherwise false.  */
-
-static bool
-verify_gimple_unary_expr (const_tree expr)
-{
-  tree op = TREE_OPERAND (expr, 0);
-  tree type = TREE_TYPE (expr);
-
-  if (!is_gimple_val (op))
-    {
-      error ("invalid operand in unary expression");
-      return true;
-    }
-
-  /* For general unary expressions we have the operations type
-     as the effective type the operation is carried out on.  So all
-     we need to require is that the operand is trivially convertible
-     to that type.  */
-  if (!useless_type_conversion_p (type, TREE_TYPE (op)))
-    {
-      error ("type mismatch in unary expression");
-      debug_generic_expr (type);
-      debug_generic_expr (TREE_TYPE (op));
-      return true;
-    }
-
-  return false;
-}
-
-/* Verifies if EXPR is a valid GIMPLE binary expression.  Returns true
-   if there is an error, otherwise false.  */
-
-static bool
-verify_gimple_binary_expr (const_tree expr)
-{
-  tree op0 = TREE_OPERAND (expr, 0);
-  tree op1 = TREE_OPERAND (expr, 1);
-  tree type = TREE_TYPE (expr);
-
-  if (!is_gimple_val (op0) || !is_gimple_val (op1))
-    {
-      error ("invalid operands in binary expression");
-      return true;
-    }
-
-  /* For general binary expressions we have the operations type
-     as the effective type the operation is carried out on.  So all
-     we need to require is that both operands are trivially convertible
-     to that type.  */
-  if (!useless_type_conversion_p (type, TREE_TYPE (op0))
-      || !useless_type_conversion_p (type, TREE_TYPE (op1)))
-    {
-      error ("type mismatch in binary expression");
-      debug_generic_stmt (type);
-      debug_generic_stmt (TREE_TYPE (op0));
-      debug_generic_stmt (TREE_TYPE (op1));
-      return true;
-    }
-
-  return false;
-}
 
 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
    Returns true if there is an error, otherwise false.  */
 
 static bool
-verify_gimple_min_lval (tree expr)
+verify_types_in_gimple_min_lval (tree expr)
 {
   tree op;
 
@@ -3525,7 +3066,7 @@ verify_gimple_min_lval (tree expr)
    if there is an error, otherwise false.  */
 
 static bool
-verify_gimple_reference (tree expr)
+verify_types_in_gimple_reference (tree expr)
 {
   while (handled_component_p (expr))
     {
@@ -3594,7 +3135,7 @@ verify_gimple_reference (tree expr)
       expr = op;
     }
 
-  return verify_gimple_min_lval (expr);
+  return verify_types_in_gimple_min_lval (expr);
 }
 
 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
@@ -3615,71 +3156,135 @@ one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
   return false;
 }
 
-/* Return true if TYPE1 is a fixed-point type and if conversions to and
-   from TYPE2 can be handled by FIXED_CONVERT_EXPR.  */
+/* Return true if TYPE1 is a fixed-point type and if conversions to and
+   from TYPE2 can be handled by FIXED_CONVERT_EXPR.  */
+
+static bool
+valid_fixed_convert_types_p (tree type1, tree type2)
+{
+  return (FIXED_POINT_TYPE_P (type1)
+         && (INTEGRAL_TYPE_P (type2)
+             || SCALAR_FLOAT_TYPE_P (type2)
+             || FIXED_POINT_TYPE_P (type2)));
+}
+
+/* Verify that OP is a valid GIMPLE operand.  Return true if there is
+   an error, false otherwise.  */
+
+static bool
+verify_types_in_gimple_op (tree op)
+{
+  if (!is_gimple_val (op) && !is_gimple_lvalue (op))
+    {
+      error ("Invalid GIMPLE operand");
+      debug_generic_expr (op);
+      return true;
+    }
+
+  return false;
+}
+
+
+/* Verify the contents of a GIMPLE_CALL STMT.  Returns true when there
+   is a problem, otherwise false.  */
+
+static bool
+verify_types_in_gimple_call (gimple stmt)
+{
+  bool failed = false;
+  unsigned int i;
+  tree fn;
+
+  if (gimple_call_lhs (stmt))
+    failed |= verify_types_in_gimple_op (gimple_call_lhs (stmt));
+
+  fn = gimple_call_fn (stmt);
+  if (TREE_CODE (fn) != OBJ_TYPE_REF
+      && verify_types_in_gimple_op (fn))
+    failed = true;
+
+  if (gimple_call_chain (stmt))
+    failed |= verify_types_in_gimple_op (gimple_call_chain (stmt));
+
+  for (i = 0; i < gimple_call_num_args (stmt); i++)
+    failed |= verify_types_in_gimple_op (gimple_call_arg (stmt,i));
+
+  return failed;
+}
+
+
+/* Verify the contents of a GIMPLE_COND STMT.  Returns true when there
+   is a problem, otherwise false.  */
 
 static bool
-valid_fixed_convert_types_p (tree type1, tree type2)
+verify_types_in_gimple_cond (gimple stmt)
 {
-  return (FIXED_POINT_TYPE_P (type1)
-         && (INTEGRAL_TYPE_P (type2)
-             || SCALAR_FLOAT_TYPE_P (type2)
-             || FIXED_POINT_TYPE_P (type2)));
+  bool failed = false;
+  
+  failed |= verify_types_in_gimple_op (gimple_cond_lhs (stmt));
+  failed |= verify_types_in_gimple_op (gimple_cond_rhs (stmt));
+  failed |= verify_types_in_gimple_op (gimple_cond_true_label (stmt));
+  failed |= verify_types_in_gimple_op (gimple_cond_false_label (stmt));
+
+  return failed;
 }
 
-/* Verify the GIMPLE expression EXPR.  Returns true if there is an
-   error, otherwise false.  */
+
+/* Verify the contents of a GIMPLE_ASSIGN STMT.  Returns true when there
+   is a problem, otherwise false.
+
+   Verify that the types of the LHS and the RHS operands are
+   compatible.  This verification largely depends on what kind of
+   operation is done on the RHS of the assignment.  It is not always
+   the case that all the types of the operands must match (e.g., 'a =
+   (unsigned long) b' or 'ptr = ptr + 1').  */
 
 static bool
-verify_gimple_expr (tree expr)
+verify_types_in_gimple_assign (gimple stmt)
 {
-  tree type = TREE_TYPE (expr);
-
-  if (is_gimple_val (expr))
-    return false;
+  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
+  tree lhs = gimple_assign_lhs (stmt);
+  tree rhs1 = gimple_assign_rhs1 (stmt);
+  tree rhs2 = (gimple_num_ops (stmt) == 3) ? gimple_assign_rhs2 (stmt) : NULL;
+  tree lhs_type = TREE_TYPE (lhs);
+  tree rhs1_type = TREE_TYPE (rhs1);
+  tree rhs2_type = (rhs2) ? TREE_TYPE (rhs2) : NULL;
 
   /* Special codes we cannot handle via their class.  */
-  switch (TREE_CODE (expr))
+  switch (rhs_code)
     {
     CASE_CONVERT:
       {
-       tree op = TREE_OPERAND (expr, 0);
-       if (!is_gimple_val (op))
+       if (!is_gimple_val (rhs1))
          {
            error ("invalid operand in conversion");
            return true;
          }
 
-       /* Allow conversions between integral types and between
-          pointer types.  */
-        if ((INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
-           || (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (op))))
-         return false;
-
        /* Allow conversions between integral types and pointers only if
           there is no sign or zero extension involved.  */
-       if (((POINTER_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
-            || (POINTER_TYPE_P (TREE_TYPE (op)) && INTEGRAL_TYPE_P (type)))
-           && (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op))
+       if (((POINTER_TYPE_P (lhs_type) && INTEGRAL_TYPE_P (rhs1_type))
+            || (POINTER_TYPE_P (rhs1_type) && INTEGRAL_TYPE_P (lhs_type)))
+           && (TYPE_PRECISION (lhs_type) == TYPE_PRECISION (rhs1_type)
                /* For targets were the precision of sizetype doesn't
                   match that of pointers we need the following.  */
-               || type == sizetype || TREE_TYPE (op) == sizetype))
+               || lhs_type == sizetype || rhs1_type == sizetype))
          return false;
 
        /* Allow conversion from integer to offset type and vice versa.  */
-       if ((TREE_CODE (type) == OFFSET_TYPE
-            && TREE_CODE (TREE_TYPE (op)) == INTEGER_TYPE)
-           || (TREE_CODE (type) == INTEGER_TYPE
-               && TREE_CODE (TREE_TYPE (op)) == OFFSET_TYPE))
+       if ((TREE_CODE (lhs_type) == OFFSET_TYPE
+            && TREE_CODE (rhs1_type) == INTEGER_TYPE)
+           || (TREE_CODE (lhs_type) == INTEGER_TYPE
+               && TREE_CODE (rhs1_type) == OFFSET_TYPE))
          return false;
 
        /* Otherwise assert we are converting between types of the
           same kind.  */
-       if (TREE_CODE (type) != TREE_CODE (TREE_TYPE (op)))
+       if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
          {
            error ("invalid types in nop conversion");
-           debug_generic_expr (type);
-           debug_generic_expr (TREE_TYPE (op));
+           debug_generic_expr (lhs_type);
+           debug_generic_expr (rhs1_type);
            return true;
          }
 
@@ -3688,19 +3293,18 @@ verify_gimple_expr (tree expr)
 
     case FIXED_CONVERT_EXPR:
       {
-       tree op = TREE_OPERAND (expr, 0);
-       if (!is_gimple_val (op))
+       if (!is_gimple_val (rhs1))
          {
            error ("invalid operand in conversion");
            return true;
          }
 
-       if (!valid_fixed_convert_types_p (type, TREE_TYPE (op))
-           && !valid_fixed_convert_types_p (TREE_TYPE (op), type))
+       if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
+           && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
          {
            error ("invalid types in fixed-point conversion");
-           debug_generic_expr (type);
-           debug_generic_expr (TREE_TYPE (op));
+           debug_generic_expr (lhs_type);
+           debug_generic_expr (rhs1_type);
            return true;
          }
 
@@ -3709,79 +3313,70 @@ verify_gimple_expr (tree expr)
 
     case FLOAT_EXPR:
       {
-       tree op = TREE_OPERAND (expr, 0);
-       if (!is_gimple_val (op))
+       if (!is_gimple_val (rhs1))
          {
            error ("invalid operand in int to float conversion");
            return true;
          }
-       if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
-           || !SCALAR_FLOAT_TYPE_P (type))
+
+       if (!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
          {
            error ("invalid types in conversion to floating point");
-           debug_generic_expr (type);
-           debug_generic_expr (TREE_TYPE (op));
+           debug_generic_expr (lhs_type);
+           debug_generic_expr (rhs1_type);
            return true;
          }
+
         return false;
       }
 
     case FIX_TRUNC_EXPR:
       {
-       tree op = TREE_OPERAND (expr, 0);
-       if (!is_gimple_val (op))
+       if (!is_gimple_val (rhs1))
          {
            error ("invalid operand in float to int conversion");
            return true;
          }
-       if (!INTEGRAL_TYPE_P (type)
-           || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (op)))
+
+       if (!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
          {
            error ("invalid types in conversion to integer");
-           debug_generic_expr (type);
-           debug_generic_expr (TREE_TYPE (op));
+           debug_generic_expr (lhs_type);
+           debug_generic_expr (rhs1_type);
            return true;
          }
+
         return false;
       }
 
     case COMPLEX_EXPR:
       {
-       tree op0 = TREE_OPERAND (expr, 0);
-       tree op1 = TREE_OPERAND (expr, 1);
-       if (!is_gimple_val (op0) || !is_gimple_val (op1))
+       if (!is_gimple_val (rhs1) || !is_gimple_val (rhs2))
          {
            error ("invalid operands in complex expression");
            return true;
          }
-       if (!TREE_CODE (type) == COMPLEX_TYPE
-           || !(TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
-                || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op0)))
-           || !(TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
-                || SCALAR_FLOAT_TYPE_P (TREE_TYPE (op1)))
-           || !useless_type_conversion_p (TREE_TYPE (type),
-                                          TREE_TYPE (op0))
-           || !useless_type_conversion_p (TREE_TYPE (type),
-                                          TREE_TYPE (op1)))
+
+       if (!TREE_CODE (lhs_type) == COMPLEX_TYPE
+           || !(TREE_CODE (rhs1_type) == INTEGER_TYPE
+                || SCALAR_FLOAT_TYPE_P (rhs1_type))
+           || !(TREE_CODE (rhs2_type) == INTEGER_TYPE
+                || SCALAR_FLOAT_TYPE_P (rhs2_type)))
          {
            error ("type mismatch in complex expression");
-           debug_generic_stmt (TREE_TYPE (expr));
-           debug_generic_stmt (TREE_TYPE (op0));
-           debug_generic_stmt (TREE_TYPE (op1));
+           debug_generic_expr (lhs_type);
+           debug_generic_expr (rhs1_type);
+           debug_generic_expr (rhs2_type);
            return true;
          }
+
        return false;
       }
 
     case CONSTRUCTOR:
       {
-       /* This is used like COMPLEX_EXPR but for vectors.  */
-       if (TREE_CODE (type) != VECTOR_TYPE)
-         {
-           error ("constructor not allowed for non-vector types");
-           debug_generic_stmt (type);
-           return true;
-         }
+       /* In this context we know that we are on the RHS of an
+          assignment, so CONSTRUCTOR operands are OK.  */
        /* FIXME: verify constructor arguments.  */
        return false;
       }
@@ -3791,113 +3386,83 @@ verify_gimple_expr (tree expr)
     case LROTATE_EXPR:
     case RROTATE_EXPR:
       {
-       tree op0 = TREE_OPERAND (expr, 0);
-       tree op1 = TREE_OPERAND (expr, 1);
-       if (!is_gimple_val (op0) || !is_gimple_val (op1))
+       if (!is_gimple_val (rhs1) || !is_gimple_val (rhs2))
          {
            error ("invalid operands in shift expression");
            return true;
          }
-       if (!TREE_CODE (TREE_TYPE (op1)) == INTEGER_TYPE
-           || !useless_type_conversion_p (type, TREE_TYPE (op0)))
+
+       if (!TREE_CODE (rhs1_type) == INTEGER_TYPE
+           || !useless_type_conversion_p (lhs_type, rhs1_type))
          {
            error ("type mismatch in shift expression");
-           debug_generic_stmt (TREE_TYPE (expr));
-           debug_generic_stmt (TREE_TYPE (op0));
-           debug_generic_stmt (TREE_TYPE (op1));
+           debug_generic_expr (lhs_type);
+           debug_generic_expr (rhs1_type);
+           debug_generic_expr (rhs2_type);
            return true;
          }
+
        return false;
       }
 
     case PLUS_EXPR:
     case MINUS_EXPR:
       {
-       tree op0 = TREE_OPERAND (expr, 0);
-       tree op1 = TREE_OPERAND (expr, 1);
-       if (POINTER_TYPE_P (type)
-           || POINTER_TYPE_P (TREE_TYPE (op0))
-           || POINTER_TYPE_P (TREE_TYPE (op1)))
+       if (POINTER_TYPE_P (lhs_type)
+           || POINTER_TYPE_P (rhs1_type)
+           || POINTER_TYPE_P (rhs2_type))
          {
            error ("invalid (pointer) operands to plus/minus");
            return true;
          }
+
        /* Continue with generic binary expression handling.  */
        break;
       }
 
     case POINTER_PLUS_EXPR:
       {
-       tree op0 = TREE_OPERAND (expr, 0);
-       tree op1 = TREE_OPERAND (expr, 1);
-       if (!is_gimple_val (op0) || !is_gimple_val (op1))
+       if (!is_gimple_val (rhs1) || !is_gimple_val (rhs2))
          {
            error ("invalid operands in pointer plus expression");
            return true;
          }
-       if (!POINTER_TYPE_P (TREE_TYPE (op0))
-           || !useless_type_conversion_p (type, TREE_TYPE (op0))
-           || !useless_type_conversion_p (sizetype, TREE_TYPE (op1)))
+       if (!POINTER_TYPE_P (rhs1_type)
+           || !useless_type_conversion_p (lhs_type, rhs1_type)
+           || !useless_type_conversion_p (sizetype, rhs2_type))
          {
            error ("type mismatch in pointer plus expression");
-           debug_generic_stmt (type);
-           debug_generic_stmt (TREE_TYPE (op0));
-           debug_generic_stmt (TREE_TYPE (op1));
+           debug_generic_stmt (lhs_type);
+           debug_generic_stmt (rhs1_type);
+           debug_generic_stmt (rhs2_type);
            return true;
          }
-       return false;
-      }
 
-    case COND_EXPR:
-      {
-       tree op0 = TREE_OPERAND (expr, 0);
-       tree op1 = TREE_OPERAND (expr, 1);
-       tree op2 = TREE_OPERAND (expr, 2);
-       if ((!is_gimple_val (op1)
-            && TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE)
-           || (!is_gimple_val (op2)
-               && TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE))
-         {
-           error ("invalid operands in conditional expression");
-           return true;
-         }
-       if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
-           || (TREE_CODE (TREE_TYPE (op1)) != VOID_TYPE
-               && !useless_type_conversion_p (type, TREE_TYPE (op1)))
-           || (TREE_CODE (TREE_TYPE (op2)) != VOID_TYPE
-               && !useless_type_conversion_p (type, TREE_TYPE (op2))))
-         {
-           error ("type mismatch in conditional expression");
-           debug_generic_stmt (type);
-           debug_generic_stmt (TREE_TYPE (op0));
-           debug_generic_stmt (TREE_TYPE (op1));
-           debug_generic_stmt (TREE_TYPE (op2));
-           return true;
-         }
-       return verify_gimple_expr (op0);
-      }
+       return false;
+      } 
 
     case ADDR_EXPR:
       {
-       tree op = TREE_OPERAND (expr, 0);
+       tree op = TREE_OPERAND (rhs1, 0);
        if (!is_gimple_addressable (op))
          {
            error ("invalid operand in unary expression");
            return true;
          }
-       if (!one_pointer_to_useless_type_conversion_p (type, TREE_TYPE (op))
+
+       if (!one_pointer_to_useless_type_conversion_p (lhs_type, TREE_TYPE (op))
            /* FIXME: a longstanding wart, &a == &a[0].  */
            && (TREE_CODE (TREE_TYPE (op)) != ARRAY_TYPE
-               || !one_pointer_to_useless_type_conversion_p (type,
+               || !one_pointer_to_useless_type_conversion_p (lhs_type,
                      TREE_TYPE (TREE_TYPE (op)))))
          {
            error ("type mismatch in address expression");
-           debug_generic_stmt (TREE_TYPE (expr));
+           debug_generic_stmt (lhs_type);
            debug_generic_stmt (TYPE_POINTER_TO (TREE_TYPE (op)));
            return true;
          }
 
-       return verify_gimple_reference (op);
+       return verify_types_in_gimple_reference (TREE_OPERAND (rhs1, 0));
       }
 
     case TRUTH_ANDIF_EXPR:
@@ -3908,24 +3473,21 @@ verify_gimple_expr (tree expr)
     case TRUTH_OR_EXPR:
     case TRUTH_XOR_EXPR:
       {
-       tree op0 = TREE_OPERAND (expr, 0);
-       tree op1 = TREE_OPERAND (expr, 1);
-
-       if (!is_gimple_val (op0) || !is_gimple_val (op1))
+       if (!is_gimple_val (rhs1) || !is_gimple_val (rhs2))
          {
            error ("invalid operands in truth expression");
            return true;
          }
 
        /* We allow any kind of integral typed argument and result.  */
-       if (!INTEGRAL_TYPE_P (TREE_TYPE (op0))
-           || !INTEGRAL_TYPE_P (TREE_TYPE (op1))
-           || !INTEGRAL_TYPE_P (type))
+       if (!INTEGRAL_TYPE_P (rhs1_type)
+           || !INTEGRAL_TYPE_P (rhs2_type)
+           || !INTEGRAL_TYPE_P (lhs_type))
          {
            error ("type mismatch in binary truth expression");
-           debug_generic_stmt (type);
-           debug_generic_stmt (TREE_TYPE (op0));
-           debug_generic_stmt (TREE_TYPE (op1));
+           debug_generic_expr (lhs_type);
+           debug_generic_expr (rhs1_type);
+           debug_generic_expr (rhs2_type);
            return true;
          }
 
@@ -3934,9 +3496,7 @@ verify_gimple_expr (tree expr)
 
     case TRUTH_NOT_EXPR:
       {
-       tree op = TREE_OPERAND (expr, 0);
-
-       if (!is_gimple_val (op))
+       if (!is_gimple_val (rhs1))
          {
            error ("invalid operand in unary not");
            return true;
@@ -3944,33 +3504,37 @@ verify_gimple_expr (tree expr)
 
        /* For TRUTH_NOT_EXPR we can have any kind of integral
           typed arguments and results.  */
-       if (!INTEGRAL_TYPE_P (TREE_TYPE (op))
-           || !INTEGRAL_TYPE_P (type))
+       if (!INTEGRAL_TYPE_P (rhs1_type)
+           || !INTEGRAL_TYPE_P (lhs_type))
          {
            error ("type mismatch in not expression");
-           debug_generic_expr (TREE_TYPE (expr));
-           debug_generic_expr (TREE_TYPE (op));
+           debug_generic_expr (lhs_type);
+           debug_generic_expr (rhs1_type);
            return true;
          }
 
        return false;
       }
 
+    /* After gimplification we should not have any of these.  */
+    case ASM_EXPR:
+    case BIND_EXPR:
     case CALL_EXPR:
-      /* FIXME.  The C frontend passes unpromoted arguments in case it
-        didn't see a function declaration before the call.  */
+    case COND_EXPR:
+    case TREE_LIST:
+    case COMPOUND_EXPR:
+    case MODIFY_EXPR:
+    case INIT_EXPR:
+    case GOTO_EXPR:
+    case LABEL_EXPR:
+    case RETURN_EXPR:
+    case TRY_FINALLY_EXPR:
+    case TRY_CATCH_EXPR:
+    case EH_FILTER_EXPR:
+    case STATEMENT_LIST:
       {
-       tree decl = CALL_EXPR_FN (expr);
-
-       if (TREE_CODE (decl) == FUNCTION_DECL 
-           && DECL_LOOPING_CONST_OR_PURE_P (decl)
-           && (!DECL_PURE_P (decl))
-           && (!TREE_READONLY (decl)))
-         {
-           error ("invalid pure const state for function");
-           return true;
-         }
-       return false;
+       error ("tree node that should already be gimple.");
+       return true;
       }
 
     case OBJ_TYPE_REF:
@@ -3981,26 +3545,29 @@ verify_gimple_expr (tree expr)
     }
 
   /* Generic handling via classes.  */
-  switch (TREE_CODE_CLASS (TREE_CODE (expr)))
+  switch (TREE_CODE_CLASS (rhs_code))
     {
     case tcc_unary:
-      return verify_gimple_unary_expr (expr);
-
-    case tcc_binary:
-      return verify_gimple_binary_expr (expr);
+      if (!useless_type_conversion_p (lhs_type, rhs1_type))
+       {
+         error ("non-trivial conversion at assignment");
+         debug_generic_expr (lhs);
+         debug_generic_expr (rhs1);
+         return true;
+       }
+      break;
 
     case tcc_reference:
-      return verify_gimple_reference (expr);
+      return verify_types_in_gimple_reference (rhs1);
 
     case tcc_comparison:
       {
-       tree op0 = TREE_OPERAND (expr, 0);
-       tree op1 = TREE_OPERAND (expr, 1);
-       if (!is_gimple_val (op0) || !is_gimple_val (op1))
+       if (!is_gimple_val (rhs1) || !is_gimple_val (rhs2))
          {
            error ("invalid operands in comparison expression");
            return true;
          }
+
        /* For comparisons we do not have the operations type as the
           effective type the comparison is carried out in.  Instead
           we require that either the first operand is trivially
@@ -4008,186 +3575,196 @@ verify_gimple_expr (tree expr)
           The resulting type of a comparison may be any integral type.
           Because we special-case pointers to void we allow
           comparisons of pointers with the same mode as well.  */
-       if ((!useless_type_conversion_p (TREE_TYPE (op0), TREE_TYPE (op1))
-            && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0))
-            && (!POINTER_TYPE_P (TREE_TYPE (op0))
-                || !POINTER_TYPE_P (TREE_TYPE (op1))
-                || TYPE_MODE (TREE_TYPE (op0)) != TYPE_MODE (TREE_TYPE (op1))))
-           || !INTEGRAL_TYPE_P (type))
+       if ((!useless_type_conversion_p (rhs1_type, rhs2_type)
+            && !useless_type_conversion_p (rhs2_type, rhs1_type)
+            && (!POINTER_TYPE_P (rhs1_type)
+                || !POINTER_TYPE_P (rhs2_type)
+                || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)))
+           || !INTEGRAL_TYPE_P (lhs_type))
          {
            error ("type mismatch in comparison expression");
-           debug_generic_stmt (TREE_TYPE (expr));
-           debug_generic_stmt (TREE_TYPE (op0));
-           debug_generic_stmt (TREE_TYPE (op1));
+           debug_generic_expr (lhs_type);
+           debug_generic_expr (rhs1_type);
+           debug_generic_expr (rhs2_type);
            return true;
          }
         break;
       }
 
-    default:
-      gcc_unreachable ();
+    default:;
     }
 
   return false;
 }
 
-/* Verify the GIMPLE assignment statement STMT.  Returns true if there
-   is an error, otherwise false.  */
+
+/* Verify the contents of a GIMPLE_RETURN STMT.  Returns true when there
+   is a problem, otherwise false.  */
 
 static bool
-verify_gimple_modify_stmt (const_tree stmt)
+verify_types_in_gimple_return (gimple stmt)
 {
-  tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-  tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+  tree op = gimple_return_retval (stmt);
+
+  if (op == NULL)
+    return false;
+  
+  return verify_types_in_gimple_op (op);
+}
 
-  gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
 
-  if (!useless_type_conversion_p (TREE_TYPE (lhs),
-                                 TREE_TYPE (rhs)))
+/* Verify the contents of a GIMPLE_SWITCH STMT.  Returns true when there
+   is a problem, otherwise false.  */
+
+static bool
+verify_types_in_gimple_switch (gimple stmt)
+{
+  if (!is_gimple_val (gimple_switch_index (stmt)))
     {
-      error ("non-trivial conversion at assignment");
-      debug_generic_expr (TREE_TYPE (lhs));
-      debug_generic_expr (TREE_TYPE (rhs));
+      error ("invalid operand to switch statement");
+      debug_generic_expr (gimple_switch_index (stmt));
       return true;
     }
 
-  /* Loads/stores from/to a variable are ok.  */
-  if ((is_gimple_val (lhs)
-       && is_gimple_variable (rhs))
-      || (is_gimple_val (rhs)
-         && is_gimple_variable (lhs)))
-    return false;
+  return false;
+}
 
-  /* Aggregate copies are ok.  */
-  if (!is_gimple_reg_type (TREE_TYPE (lhs))
-      && !is_gimple_reg_type (TREE_TYPE (rhs)))
-    return false;
 
-  /* We might get 'loads' from a parameter which is not a gimple value.  */
-  if (TREE_CODE (rhs) == PARM_DECL)
-    return verify_gimple_expr (lhs);
+/* Verify the contents of a GIMPLE_PHI.  Returns true if there is a problem,
+   and false otherwise.  */
 
-  if (!is_gimple_variable (lhs)
-      && verify_gimple_expr (lhs))
-    return true;
+static bool
+verify_types_in_gimple_phi (gimple stmt)
+{
+  size_t i;
 
-  if (!is_gimple_variable (rhs)
-      && verify_gimple_expr (rhs))
+  if (verify_types_in_gimple_op (gimple_phi_result (stmt)))
     return true;
 
+  for (i = 0; i < gimple_phi_num_args (stmt); i++)
+    if (verify_types_in_gimple_op (gimple_phi_arg_def (stmt, i)))
+      return true;
+
   return false;
 }
 
+
 /* Verify the GIMPLE statement STMT.  Returns true if there is an
    error, otherwise false.  */
 
 static bool
-verify_gimple_stmt (tree stmt)
+verify_types_in_gimple_stmt (gimple stmt)
 {
-  if (!is_gimple_stmt (stmt))
-    {
-      error ("is not a valid GIMPLE statement");
-      return true;
-    }
-
-  if (OMP_DIRECTIVE_P (stmt))
+  if (is_gimple_omp (stmt))
     {
       /* OpenMP directives are validated by the FE and never operated
-        on by the optimizers.  Furthermore, OMP_FOR may contain
+        on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
         non-gimple expressions when the main index variable has had
         its address taken.  This does not affect the loop itself
-        because the header of an OMP_FOR is merely used to determine
+        because the header of an GIMPLE_OMP_FOR is merely used to determine
         how to setup the parallel iteration.  */
       return false;
     }
 
-  switch (TREE_CODE (stmt))
+  switch (gimple_code (stmt))
     {
-    case GIMPLE_MODIFY_STMT:
-      return verify_gimple_modify_stmt (stmt);
+    case GIMPLE_ASSIGN:
+      return verify_types_in_gimple_assign (stmt);
 
-    case GOTO_EXPR:
-    case LABEL_EXPR:
-      return false;
+    case GIMPLE_LABEL:
+      return TREE_CODE (gimple_label_label (stmt)) != LABEL_DECL;
 
-    case SWITCH_EXPR:
-      if (!is_gimple_val (TREE_OPERAND (stmt, 0)))
-       {
-         error ("invalid operand to switch statement");
-         debug_generic_expr (TREE_OPERAND (stmt, 0));
-       }
-      return false;
+    case GIMPLE_CALL:
+      return verify_types_in_gimple_call (stmt);
 
-    case RETURN_EXPR:
-      {
-       tree op = TREE_OPERAND (stmt, 0);
+    case GIMPLE_COND:
+      return verify_types_in_gimple_cond (stmt);
 
-       if (TREE_CODE (TREE_TYPE (stmt)) != VOID_TYPE)
-         {
-           error ("type error in return expression");
-           return true;
-         }
+    case GIMPLE_GOTO:
+      return verify_types_in_gimple_op (gimple_goto_dest (stmt));
 
-       if (op == NULL_TREE
-           || TREE_CODE (op) == RESULT_DECL)
-         return false;
+    case GIMPLE_NOP:
+    case GIMPLE_PREDICT:
+      return false;
 
-       return verify_gimple_modify_stmt (op);
-      }
+    case GIMPLE_SWITCH:
+      return verify_types_in_gimple_switch (stmt);
 
-    case CALL_EXPR:
-    case COND_EXPR:
-      return verify_gimple_expr (stmt);
+    case GIMPLE_RETURN:
+      return verify_types_in_gimple_return (stmt);
 
-    case NOP_EXPR:
-    case CHANGE_DYNAMIC_TYPE_EXPR:
-    case ASM_EXPR:
-    case PREDICT_EXPR:
+    case GIMPLE_ASM:
       return false;
 
+    case GIMPLE_CHANGE_DYNAMIC_TYPE:
+      return verify_types_in_gimple_op (gimple_cdt_location (stmt));
+
+    case GIMPLE_PHI:
+      return verify_types_in_gimple_phi (stmt);
+
     default:
       gcc_unreachable ();
     }
 }
 
-/* Verify the GIMPLE statements inside the statement list STMTS.
-   Returns true if there were any errors.  */
+/* Verify the GIMPLE statements inside the sequence STMTS.  */
 
 static bool
-verify_gimple_2 (tree stmts)
+verify_types_in_gimple_seq_2 (gimple_seq stmts)
 {
-  tree_stmt_iterator tsi;
+  gimple_stmt_iterator ittr;
   bool err = false;
 
-  for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
+  for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
     {
-      tree stmt = tsi_stmt (tsi);
-
-      switch (TREE_CODE (stmt))
-       {
-       case BIND_EXPR:
-         err |= verify_gimple_2 (BIND_EXPR_BODY (stmt));
-         break;
-
-       case TRY_CATCH_EXPR:
-       case TRY_FINALLY_EXPR:
-         err |= verify_gimple_2 (TREE_OPERAND (stmt, 0));
-         err |= verify_gimple_2 (TREE_OPERAND (stmt, 1));
-         break;
-
-       case CATCH_EXPR:
-         err |= verify_gimple_2 (CATCH_BODY (stmt));
-         break;
+      gimple stmt = gsi_stmt (ittr);
 
-       case EH_FILTER_EXPR:
-         err |= verify_gimple_2 (EH_FILTER_FAILURE (stmt));
-         break;
+      switch (gimple_code (stmt))
+        {
+          case GIMPLE_BIND:
+            err |= verify_types_in_gimple_seq_2 (gimple_bind_body (stmt));
+            break;
+
+          case GIMPLE_TRY:
+            err |= verify_types_in_gimple_seq_2 (gimple_try_eval (stmt));
+            err |= verify_types_in_gimple_seq_2 (gimple_try_cleanup (stmt));
+            break;
+
+          case GIMPLE_EH_FILTER:
+            err |= verify_types_in_gimple_seq_2
+                    (gimple_eh_filter_failure (stmt));
+            break;
+
+          case GIMPLE_CATCH:
+             err |= verify_types_in_gimple_seq_2 (gimple_catch_handler (stmt));
+             break;
+
+         case GIMPLE_OMP_CRITICAL:
+          case GIMPLE_OMP_CONTINUE:
+          case GIMPLE_OMP_MASTER:
+          case GIMPLE_OMP_ORDERED:
+          case GIMPLE_OMP_SECTION:
+          case GIMPLE_OMP_FOR:
+          case GIMPLE_OMP_PARALLEL:
+         case GIMPLE_OMP_TASK:
+          case GIMPLE_OMP_SECTIONS:
+          case GIMPLE_OMP_SINGLE:
+         case GIMPLE_OMP_ATOMIC_STORE:
+         case GIMPLE_OMP_ATOMIC_LOAD:
+            break;
+
+         /* Tuples that do not have trees.  */
+          case GIMPLE_NOP:
+          case GIMPLE_RESX:
+          case GIMPLE_OMP_RETURN:
+         case GIMPLE_PREDICT:
+            break;
 
        default:
          {
-           bool err2 = verify_gimple_stmt (stmt);
+           bool err2 = verify_types_in_gimple_stmt (stmt);
            if (err2)
-             debug_generic_expr (stmt);
+             debug_gimple_stmt (stmt);
            err |= err2;
          }
        }
@@ -4200,54 +3777,58 @@ verify_gimple_2 (tree stmts)
 /* Verify the GIMPLE statements inside the statement list STMTS.  */
 
 void
-verify_gimple_1 (tree stmts)
+verify_types_in_gimple_seq (gimple_seq stmts)
 {
-  if (verify_gimple_2 (stmts))
+  if (verify_types_in_gimple_seq_2 (stmts))
     internal_error ("verify_gimple failed");
 }
 
-/* Verify the GIMPLE statements inside the current function.  */
-
-void
-verify_gimple (void)
-{
-  verify_gimple_1 (BIND_EXPR_BODY (DECL_SAVED_TREE (cfun->decl)));
-}
 
 /* Verify STMT, return true if STMT is not in GIMPLE form.
    TODO: Implement type checking.  */
 
 static bool
-verify_stmt (tree stmt, bool last_in_block)
+verify_stmt (gimple_stmt_iterator *gsi)
 {
   tree addr;
+  struct walk_stmt_info wi;
+  bool last_in_block = gsi_one_before_end_p (*gsi);
+  gimple stmt = gsi_stmt (*gsi);
 
-  if (OMP_DIRECTIVE_P (stmt))
+  if (is_gimple_omp (stmt))
     {
       /* OpenMP directives are validated by the FE and never operated
-        on by the optimizers.  Furthermore, OMP_FOR may contain
+        on by the optimizers.  Furthermore, GIMPLE_OMP_FOR may contain
         non-gimple expressions when the main index variable has had
         its address taken.  This does not affect the loop itself
-        because the header of an OMP_FOR is merely used to determine
+        because the header of an GIMPLE_OMP_FOR is merely used to determine
         how to setup the parallel iteration.  */
       return false;
     }
 
-  if (!is_gimple_stmt (stmt))
+  /* FIXME.  The C frontend passes unpromoted arguments in case it
+     didn't see a function declaration before the call.  */
+  if (is_gimple_call (stmt))
     {
-      error ("is not a valid GIMPLE statement");
-      goto fail;
+      tree decl = gimple_call_fn (stmt);
+
+      if (TREE_CODE (decl) == FUNCTION_DECL 
+         && DECL_LOOPING_CONST_OR_PURE_P (decl)
+         && (!DECL_PURE_P (decl))
+         && (!TREE_READONLY (decl)))
+       {
+         error ("invalid pure const state for function");
+         return true;
+       }
     }
 
-  addr = walk_tree (&stmt, verify_expr, NULL, NULL);
+  memset (&wi, 0, sizeof (wi));
+  addr = walk_gimple_op (gsi_stmt (*gsi), verify_expr, &wi);
   if (addr)
     {
-      debug_generic_stmt (addr);
-      if (addr != stmt)
-       {
-         inform ("in statement");
-         debug_generic_stmt (stmt);
-       }
+      debug_generic_expr (addr);
+      inform ("in statement");
+      debug_gimple_stmt (stmt);
       return true;
     }
 
@@ -4258,12 +3839,12 @@ verify_stmt (tree stmt, bool last_in_block)
      to match.  */
   if (lookup_stmt_eh_region (stmt) >= 0)
     {
-      if (!tree_could_throw_p (stmt))
+      if (!stmt_could_throw_p (stmt))
        {
          error ("statement marked for throw, but doesn%'t");
          goto fail;
        }
-      if (!last_in_block && tree_can_throw_internal (stmt))
+      if (!last_in_block && stmt_can_throw_internal (stmt))
        {
          error ("statement marked for throw in middle of block");
          goto fail;
@@ -4273,7 +3854,7 @@ verify_stmt (tree stmt, bool last_in_block)
   return false;
 
  fail:
-  debug_generic_stmt (stmt);
+  debug_gimple_stmt (stmt);
   return true;
 }
 
@@ -4307,12 +3888,13 @@ tree_node_can_be_shared (tree t)
 }
 
 
-/* Called via walk_trees.  Verify tree sharing.  */
+/* Called via walk_gimple_stmt.  Verify tree sharing.  */
 
 static tree
-verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
+verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
 {
-  struct pointer_set_t *visited = (struct pointer_set_t *) data;
+  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
+  struct pointer_set_t *visited = (struct pointer_set_t *) wi->info;
 
   if (tree_node_can_be_shared (*tp))
     {
@@ -4327,35 +3909,6 @@ verify_node_sharing (tree * tp, int *walk_subtrees, void *data)
 }
 
 
-/* Helper function for verify_gimple_tuples.  */
-
-static tree
-verify_gimple_tuples_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
-                        void *data ATTRIBUTE_UNUSED)
-{
-  switch (TREE_CODE (*tp))
-    {
-    case MODIFY_EXPR:
-      error ("unexpected non-tuple");
-      debug_tree (*tp);
-      gcc_unreachable ();
-      return NULL_TREE;
-
-    default:
-      return NULL_TREE;
-    }
-}
-
-/* Verify that there are no trees that should have been converted to
-   gimple tuples.  Return true if T contains a node that should have
-   been converted to a gimple tuple, but hasn't.  */
-
-static bool
-verify_gimple_tuples (tree t)
-{
-  return walk_tree (&t, verify_gimple_tuples_1, NULL, NULL) != NULL;
-}
-
 static bool eh_error_found;
 static int
 verify_eh_throw_stmt_node (void **slot, void *data)
@@ -4366,52 +3919,56 @@ verify_eh_throw_stmt_node (void **slot, void *data)
   if (!pointer_set_contains (visited, node->stmt))
     {
       error ("Dead STMT in EH table");
-      debug_generic_stmt (node->stmt);
+      debug_gimple_stmt (node->stmt);
       eh_error_found = true;
     }
   return 0;
 }
 
-/* Verify the GIMPLE statement chain.  */
+
+/* Verify the GIMPLE statements in every basic block.  */
 
 void
 verify_stmts (void)
 {
   basic_block bb;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   bool err = false;
   struct pointer_set_t *visited, *visited_stmts;
   tree addr;
+  struct walk_stmt_info wi;
 
   timevar_push (TV_TREE_STMT_VERIFY);
   visited = pointer_set_create ();
   visited_stmts = pointer_set_create ();
 
+  memset (&wi, 0, sizeof (wi));
+  wi.info = (void *) visited;
+
   FOR_EACH_BB (bb)
     {
-      tree phi;
-      int i;
+      gimple phi;
+      size_t i;
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         int phi_num_args = PHI_NUM_ARGS (phi);
-
+         phi = gsi_stmt (gsi);
          pointer_set_insert (visited_stmts, phi);
-         if (bb_for_stmt (phi) != bb)
+         if (gimple_bb (phi) != bb)
            {
-             error ("bb_for_stmt (phi) is set to a wrong basic block");
+             error ("gimple_bb (phi) is set to a wrong basic block");
              err |= true;
            }
 
-         for (i = 0; i < phi_num_args; i++)
+         for (i = 0; i < gimple_phi_num_args (phi); i++)
            {
-             tree t = PHI_ARG_DEF (phi, i);
+             tree t = gimple_phi_arg_def (phi, i);
              tree addr;
 
              if (!t)
                {
                  error ("missing PHI def");
-                 debug_generic_stmt (phi);
+                 debug_gimple_stmt (phi);
                  err |= true;
                  continue;
                }
@@ -4421,9 +3978,9 @@ verify_stmts (void)
                       && TREE_CODE (t) != FUNCTION_DECL
                       && !is_gimple_min_invariant (t))
                {
-                 error ("PHI def is not a GIMPLE value");
-                 debug_generic_stmt (phi);
-                 debug_generic_stmt (t);
+                 error ("PHI argument is not a GIMPLE value");
+                 debug_gimple_stmt (phi);
+                 debug_generic_expr (t);
                  err |= true;
                }
 
@@ -4431,38 +3988,59 @@ verify_stmts (void)
              if (addr)
                {
                  error ("incorrect sharing of tree nodes");
-                 debug_generic_stmt (phi);
-                 debug_generic_stmt (addr);
+                 debug_gimple_stmt (phi);
+                 debug_generic_expr (addr);
                  err |= true;
                }
            }
        }
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
        {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
+
+         if (gimple_code (stmt) == GIMPLE_WITH_CLEANUP_EXPR
+             || gimple_code (stmt) == GIMPLE_BIND)
+           {
+             error ("invalid GIMPLE statement");
+             debug_gimple_stmt (stmt);
+             err |= true;
+           }
 
          pointer_set_insert (visited_stmts, stmt);
-         err |= verify_gimple_tuples (stmt);
 
-         if (bb_for_stmt (stmt) != bb)
+         if (gimple_bb (stmt) != bb)
            {
-             error ("bb_for_stmt (stmt) is set to a wrong basic block");
+             error ("gimple_bb (stmt) is set to a wrong basic block");
              err |= true;
            }
 
-         bsi_next (&bsi);
-         err |= verify_stmt (stmt, bsi_end_p (bsi));
-         addr = walk_tree (&stmt, verify_node_sharing, visited, NULL);
+         if (gimple_code (stmt) == GIMPLE_LABEL)
+           {
+             tree decl = gimple_label_label (stmt);
+             int uid = LABEL_DECL_UID (decl);
+
+             if (uid == -1
+                 || VEC_index (basic_block, label_to_block_map, uid) != bb)
+               {
+                 error ("incorrect entry in label_to_block_map.\n");
+                 err |= true;
+               }
+           }
+
+         err |= verify_stmt (&gsi);
+         addr = walk_gimple_op (gsi_stmt (gsi), verify_node_sharing, &wi);
          if (addr)
            {
              error ("incorrect sharing of tree nodes");
-             debug_generic_stmt (stmt);
-             debug_generic_stmt (addr);
+             debug_gimple_stmt (stmt);
+             debug_generic_expr (addr);
              err |= true;
            }
+         gsi_next (&gsi);
        }
     }
+
   eh_error_found = false;
   if (get_eh_throw_stmt_table (cfun))
     htab_traverse (get_eh_throw_stmt_table (cfun),
@@ -4482,22 +4060,22 @@ verify_stmts (void)
 /* Verifies that the flow information is OK.  */
 
 static int
-tree_verify_flow_info (void)
+gimple_verify_flow_info (void)
 {
   int err = 0;
   basic_block bb;
-  block_stmt_iterator bsi;
-  tree stmt;
+  gimple_stmt_iterator gsi;
+  gimple stmt;
   edge e;
   edge_iterator ei;
 
-  if (ENTRY_BLOCK_PTR->il.tree)
+  if (ENTRY_BLOCK_PTR->il.gimple)
     {
       error ("ENTRY_BLOCK has IL associated with it");
       err = 1;
     }
 
-  if (EXIT_BLOCK_PTR->il.tree)
+  if (EXIT_BLOCK_PTR->il.gimple)
     {
       error ("EXIT_BLOCK has IL associated with it");
       err = 1;
@@ -4514,41 +4092,42 @@ tree_verify_flow_info (void)
     {
       bool found_ctrl_stmt = false;
 
-      stmt = NULL_TREE;
+      stmt = NULL;
 
       /* Skip labels on the start of basic block.  */
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree prev_stmt = stmt;
+         tree label;
+         gimple prev_stmt = stmt;
 
-         stmt = bsi_stmt (bsi);
+         stmt = gsi_stmt (gsi);
 
-         if (TREE_CODE (stmt) != LABEL_EXPR)
+         if (gimple_code (stmt) != GIMPLE_LABEL)
            break;
 
-         if (prev_stmt && DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
+         label = gimple_label_label (stmt);
+         if (prev_stmt && DECL_NONLOCAL (label))
            {
              error ("nonlocal label ");
-             print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
+             print_generic_expr (stderr, label, 0);
              fprintf (stderr, " is not first in a sequence of labels in bb %d",
                       bb->index);
              err = 1;
            }
 
-         if (label_to_block (LABEL_EXPR_LABEL (stmt)) != bb)
+         if (label_to_block (label) != bb)
            {
              error ("label ");
-             print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
+             print_generic_expr (stderr, label, 0);
              fprintf (stderr, " to block does not match in bb %d",
                       bb->index);
              err = 1;
            }
 
-         if (decl_function_context (LABEL_EXPR_LABEL (stmt))
-             != current_function_decl)
+         if (decl_function_context (label) != current_function_decl)
            {
              error ("label ");
-             print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
+             print_generic_expr (stderr, label, 0);
              fprintf (stderr, " has incorrect context in bb %d",
                       bb->index);
              err = 1;
@@ -4556,9 +4135,9 @@ tree_verify_flow_info (void)
        }
 
       /* Verify that body of basic block BB is free of control flow.  */
-      for (; !bsi_end_p (bsi); bsi_next (&bsi))
+      for (; !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
 
          if (found_ctrl_stmt)
            {
@@ -4570,20 +4149,20 @@ tree_verify_flow_info (void)
          if (stmt_ends_bb_p (stmt))
            found_ctrl_stmt = true;
 
-         if (TREE_CODE (stmt) == LABEL_EXPR)
+         if (gimple_code (stmt) == GIMPLE_LABEL)
            {
              error ("label ");
-             print_generic_expr (stderr, LABEL_EXPR_LABEL (stmt), 0);
+             print_generic_expr (stderr, gimple_label_label (stmt), 0);
              fprintf (stderr, " in the middle of basic block %d", bb->index);
              err = 1;
            }
        }
 
-      bsi = bsi_last (bb);
-      if (bsi_end_p (bsi))
+      gsi = gsi_last_bb (bb);
+      if (gsi_end_p (gsi))
        continue;
 
-      stmt = bsi_stmt (bsi);
+      stmt = gsi_stmt (gsi);
 
       err |= verify_eh_edges (stmt);
 
@@ -4598,37 +4177,30 @@ tree_verify_flow_info (void)
              }
        }
 
-      if (TREE_CODE (stmt) != COND_EXPR)
+      if (gimple_code (stmt) != GIMPLE_COND)
        {
          /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
             after anything else but if statement.  */
          FOR_EACH_EDGE (e, ei, bb->succs)
            if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
              {
-               error ("true/false edge after a non-COND_EXPR in bb %d",
+               error ("true/false edge after a non-GIMPLE_COND in bb %d",
                       bb->index);
                err = 1;
              }
        }
 
-      switch (TREE_CODE (stmt))
+      switch (gimple_code (stmt))
        {
-       case COND_EXPR:
+       case GIMPLE_COND:
          {
            edge true_edge;
            edge false_edge;
   
-           if (COND_EXPR_THEN (stmt) != NULL_TREE
-               || COND_EXPR_ELSE (stmt) != NULL_TREE)
-             {
-               error ("COND_EXPR with code in branches at the end of bb %d",
-                      bb->index);
-               err = 1;
-             }
-
            extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
 
-           if (!true_edge || !false_edge
+           if (!true_edge
+               || !false_edge
                || !(true_edge->flags & EDGE_TRUE_VALUE)
                || !(false_edge->flags & EDGE_FALSE_VALUE)
                || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
@@ -4642,7 +4214,7 @@ tree_verify_flow_info (void)
          }
          break;
 
-       case GOTO_EXPR:
+       case GIMPLE_GOTO:
          if (simple_goto_p (stmt))
            {
              error ("explicit goto at end of bb %d", bb->index);
@@ -4664,7 +4236,7 @@ tree_verify_flow_info (void)
            }
          break;
 
-       case RETURN_EXPR:
+       case GIMPLE_RETURN:
          if (!single_succ_p (bb)
              || (single_succ_edge (bb)->flags
                  & (EDGE_FALLTHRU | EDGE_ABNORMAL
@@ -4681,41 +4253,37 @@ tree_verify_flow_info (void)
            }
          break;
 
-       case SWITCH_EXPR:
+       case GIMPLE_SWITCH:
          {
            tree prev;
            edge e;
            size_t i, n;
-           tree vec;
 
-           vec = SWITCH_LABELS (stmt);
-           n = TREE_VEC_LENGTH (vec);
+           n = gimple_switch_num_labels (stmt);
 
            /* Mark all the destination basic blocks.  */
            for (i = 0; i < n; ++i)
              {
-               tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
+               tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
                basic_block label_bb = label_to_block (lab);
-
                gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
                label_bb->aux = (void *)1;
              }
 
            /* Verify that the case labels are sorted.  */
-           prev = TREE_VEC_ELT (vec, 0);
+           prev = gimple_switch_label (stmt, 0);
            for (i = 1; i < n; ++i)
              {
-               tree c = TREE_VEC_ELT (vec, i);
-               if (! CASE_LOW (c))
+               tree c = gimple_switch_label (stmt, i);
+               if (!CASE_LOW (c))
                  {
-                   if (i != n - 1)
-                     {
-                       error ("found default case not at end of case vector");
-                       err = 1;
-                     }
+                   error ("found default case not at the start of "
+                          "case vector");
+                   err = 1;
                    continue;
                  }
-               if (! tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
+               if (CASE_LOW (prev)
+                   && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
                  {
                    error ("case labels not sorted: ");
                    print_generic_expr (stderr, prev, 0);
@@ -4738,6 +4306,7 @@ tree_verify_flow_info (void)
                           bb->index, e->dest->index);
                    err = 1;
                  }
+
                e->dest->aux = (void *)2;
                if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
                                 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
@@ -4751,13 +4320,12 @@ tree_verify_flow_info (void)
            /* Check that we have all of them.  */
            for (i = 0; i < n; ++i)
              {
-               tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
+               tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
                basic_block label_bb = label_to_block (lab);
 
                if (label_bb->aux != (void *)2)
                  {
-                   error ("missing edge %i->%i",
-                          bb->index, label_bb->index);
+                   error ("missing edge %i->%i", bb->index, label_bb->index);
                    err = 1;
                  }
              }
@@ -4781,12 +4349,13 @@ tree_verify_flow_info (void)
    by edge FALLTHRU.  */
 
 static void
-tree_make_forwarder_block (edge fallthru)
+gimple_make_forwarder_block (edge fallthru)
 {
   edge e;
   edge_iterator ei;
   basic_block dummy, bb;
-  tree phi, new_phi, var;
+  tree var;
+  gimple_stmt_iterator gsi;
 
   dummy = fallthru->src;
   bb = fallthru->dest;
@@ -4796,18 +4365,18 @@ tree_make_forwarder_block (edge fallthru)
 
   /* If we redirected a branch we must create new PHI nodes at the
      start of BB.  */
-  for (phi = phi_nodes (dummy); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      var = PHI_RESULT (phi);
+      gimple phi, new_phi;
+      
+      phi = gsi_stmt (gsi);
+      var = gimple_phi_result (phi);
       new_phi = create_phi_node (var, bb);
       SSA_NAME_DEF_STMT (var) = new_phi;
-      SET_PHI_RESULT (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
-      add_phi_arg (new_phi, PHI_RESULT (phi), fallthru);
+      gimple_phi_set_result (phi, make_ssa_name (SSA_NAME_VAR (var), phi));
+      add_phi_arg (new_phi, gimple_phi_result (phi), fallthru);
     }
 
-  /* Ensure that the PHI node chain is in the same order.  */
-  set_phi_nodes (bb, phi_reverse (phi_nodes (bb)));
-
   /* Add the arguments we have stored on edges.  */
   FOR_EACH_EDGE (e, ei, bb->preds)
     {
@@ -4823,29 +4392,30 @@ tree_make_forwarder_block (edge fallthru)
    Create one if it doesn't exist.  */
 
 tree
-tree_block_label (basic_block bb)
+gimple_block_label (basic_block bb)
 {
-  block_stmt_iterator i, s = bsi_start (bb);
+  gimple_stmt_iterator i, s = gsi_start_bb (bb);
   bool first = true;
-  tree label, stmt;
+  tree label;
+  gimple stmt;
 
-  for (i = s; !bsi_end_p (i); first = false, bsi_next (&i))
+  for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
     {
-      stmt = bsi_stmt (i);
-      if (TREE_CODE (stmt) != LABEL_EXPR)
+      stmt = gsi_stmt (i);
+      if (gimple_code (stmt) != GIMPLE_LABEL)
        break;
-      label = LABEL_EXPR_LABEL (stmt);
+      label = gimple_label_label (stmt);
       if (!DECL_NONLOCAL (label))
        {
          if (!first)
-           bsi_move_before (&i, &s);
+           gsi_move_before (&i, &s);
          return label;
        }
     }
 
   label = create_artificial_label ();
-  stmt = build1 (LABEL_EXPR, void_type_node, label);
-  bsi_insert_before (&s, stmt, BSI_NEW_STMT);
+  stmt = gimple_build_label (label);
+  gsi_insert_before (&s, stmt, GSI_NEW_STMT);
   return label;
 }
 
@@ -4857,11 +4427,11 @@ tree_block_label (basic_block bb)
    redirect_edge_and_branch.  */
 
 static edge
-tree_try_redirect_by_replacing_jump (edge e, basic_block target)
+gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
 {
   basic_block src = e->src;
-  block_stmt_iterator b;
-  tree stmt;
+  gimple_stmt_iterator i;
+  gimple stmt;
 
   /* We can replace or remove a complex jump only when we have exactly
      two edges.  */
@@ -4871,15 +4441,15 @@ tree_try_redirect_by_replacing_jump (edge e, basic_block target)
       || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
     return NULL;
 
-  b = bsi_last (src);
-  if (bsi_end_p (b))
+  i = gsi_last_bb (src);
+  if (gsi_end_p (i))
     return NULL;
-  stmt = bsi_stmt (b);
 
-  if (TREE_CODE (stmt) == COND_EXPR
-      || TREE_CODE (stmt) == SWITCH_EXPR)
+  stmt = gsi_stmt (i);
+
+  if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
     {
-      bsi_remove (&b, true);
+      gsi_remove (&i, true);
       e = ssa_redirect_edge (e, target);
       e->flags = EDGE_FALLTHRU;
       return e;
@@ -4893,41 +4463,41 @@ tree_try_redirect_by_replacing_jump (edge e, basic_block target)
    edge representing the redirected branch.  */
 
 static edge
-tree_redirect_edge_and_branch (edge e, basic_block dest)
+gimple_redirect_edge_and_branch (edge e, basic_block dest)
 {
   basic_block bb = e->src;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   edge ret;
-  tree stmt;
+  gimple stmt;
 
   if (e->flags & EDGE_ABNORMAL)
     return NULL;
 
   if (e->src != ENTRY_BLOCK_PTR
-      && (ret = tree_try_redirect_by_replacing_jump (e, dest)))
+      && (ret = gimple_try_redirect_by_replacing_jump (e, dest)))
     return ret;
 
   if (e->dest == dest)
     return NULL;
 
-  bsi = bsi_last (bb);
-  stmt = bsi_end_p (bsi) ? NULL : bsi_stmt (bsi);
+  gsi = gsi_last_bb (bb);
+  stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
 
-  switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
+  switch (stmt ? gimple_code (stmt) : ERROR_MARK)
     {
-    case COND_EXPR:
+    case GIMPLE_COND:
       /* For COND_EXPR, we only need to redirect the edge.  */
       break;
 
-    case GOTO_EXPR:
+    case GIMPLE_GOTO:
       /* No non-abnormal edges should lead from a non-simple goto, and
         simple ones should be represented implicitly.  */
       gcc_unreachable ();
 
-    case SWITCH_EXPR:
+    case GIMPLE_SWITCH:
       {
+       tree label = gimple_block_label (dest);
         tree cases = get_cases_for_edge (e, stmt);
-       tree label = tree_block_label (dest);
 
        /* If we have a list of cases associated with E, then use it
           as it's a lot faster than walking the entire case vector.  */
@@ -4956,13 +4526,11 @@ tree_redirect_edge_and_branch (edge e, basic_block dest)
          }
        else
          {
-           tree vec = SWITCH_LABELS (stmt);
-           size_t i, n = TREE_VEC_LENGTH (vec);
+           size_t i, n = gimple_switch_num_labels (stmt);
 
            for (i = 0; i < n; i++)
              {
-               tree elt = TREE_VEC_ELT (vec, i);
-
+               tree elt = gimple_switch_label (stmt, i);
                if (label_to_block (CASE_LABEL (elt)) == e->dest)
                  CASE_LABEL (elt) = label;
              }
@@ -4971,15 +4539,15 @@ tree_redirect_edge_and_branch (edge e, basic_block dest)
        break;
       }
 
-    case RETURN_EXPR:
-      bsi_remove (&bsi, true);
+    case GIMPLE_RETURN:
+      gsi_remove (&gsi, true);
       e->flags |= EDGE_FALLTHRU;
       break;
 
-    case OMP_RETURN:
-    case OMP_CONTINUE:
-    case OMP_SECTIONS_SWITCH:
-    case OMP_FOR:
+    case GIMPLE_OMP_RETURN:
+    case GIMPLE_OMP_CONTINUE:
+    case GIMPLE_OMP_SECTIONS_SWITCH:
+    case GIMPLE_OMP_FOR:
       /* The edges from OMP constructs can be simply redirected.  */
       break;
 
@@ -5002,7 +4570,7 @@ tree_redirect_edge_and_branch (edge e, basic_block dest)
    it to the destination of the other edge from E->src.  */
 
 static bool
-tree_can_remove_branch_p (const_edge e)
+gimple_can_remove_branch_p (const_edge e)
 {
   if (e->flags & EDGE_ABNORMAL)
     return false;
@@ -5013,9 +4581,9 @@ tree_can_remove_branch_p (const_edge e)
 /* Simple wrapper, as we can always redirect fallthru edges.  */
 
 static basic_block
-tree_redirect_edge_and_branch_force (edge e, basic_block dest)
+gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
 {
-  e = tree_redirect_edge_and_branch (e, dest);
+  e = gimple_redirect_edge_and_branch (e, dest);
   gcc_assert (e);
 
   return NULL;
@@ -5026,11 +4594,12 @@ tree_redirect_edge_and_branch_force (edge e, basic_block dest)
    labels).  If STMT is NULL, BB is split just after the labels.  */
 
 static basic_block
-tree_split_block (basic_block bb, void *stmt)
+gimple_split_block (basic_block bb, void *stmt)
 {
-  block_stmt_iterator bsi;
-  tree_stmt_iterator tsi_tgt;
-  tree act, list;
+  gimple_stmt_iterator gsi;
+  gimple_stmt_iterator gsi_tgt;
+  gimple act;
+  gimple_seq list;
   basic_block new_bb;
   edge e;
   edge_iterator ei;
@@ -5043,14 +4612,14 @@ tree_split_block (basic_block bb, void *stmt)
   FOR_EACH_EDGE (e, ei, new_bb->succs)
     e->src = new_bb;
 
-  if (stmt && TREE_CODE ((tree) stmt) == LABEL_EXPR)
+  if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
     stmt = NULL;
 
-  /* Move everything from BSI to the new basic block.  */
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+  /* Move everything from GSI to the new basic block.  */
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      act = bsi_stmt (bsi);
-      if (TREE_CODE (act) == LABEL_EXPR)
+      act = gsi_stmt (gsi);
+      if (gimple_code (act) == GIMPLE_LABEL)
        continue;
 
       if (!stmt)
@@ -5058,23 +4627,23 @@ tree_split_block (basic_block bb, void *stmt)
 
       if (stmt == act)
        {
-         bsi_next (&bsi);
+         gsi_next (&gsi);
          break;
        }
     }
 
-  if (bsi_end_p (bsi))
+  if (gsi_end_p (gsi))
     return new_bb;
 
   /* Split the statement list - avoid re-creating new containers as this
      brings ugly quadratic memory consumption in the inliner.  
      (We are still quadratic since we need to update stmt BB pointers,
      sadly.)  */
-  list = tsi_split_statement_list_before (&bsi.tsi);
-  set_bb_stmt_list (new_bb, list);
-  for (tsi_tgt = tsi_start (list);
-       !tsi_end_p (tsi_tgt); tsi_next (&tsi_tgt))
-    change_bb_for_stmt (tsi_stmt (tsi_tgt), new_bb);
+  list = gsi_split_seq_before (&gsi);
+  set_bb_seq (new_bb, list);
+  for (gsi_tgt = gsi_start (list);
+       !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
+    gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
 
   return new_bb;
 }
@@ -5083,7 +4652,7 @@ tree_split_block (basic_block bb, void *stmt)
 /* Moves basic block BB after block AFTER.  */
 
 static bool
-tree_move_block_after (basic_block bb, basic_block after)
+gimple_move_block_after (basic_block bb, basic_block after)
 {
   if (bb->prev_bb == after)
     return true;
@@ -5098,52 +4667,49 @@ tree_move_block_after (basic_block bb, basic_block after)
 /* Return true if basic_block can be duplicated.  */
 
 static bool
-tree_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
+gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
 {
   return true;
 }
 
-
 /* Create a duplicate of the basic block BB.  NOTE: This does not
    preserve SSA form.  */
 
 static basic_block
-tree_duplicate_bb (basic_block bb)
+gimple_duplicate_bb (basic_block bb)
 {
   basic_block new_bb;
-  block_stmt_iterator bsi, bsi_tgt;
-  tree phi;
+  gimple_stmt_iterator gsi, gsi_tgt;
+  gimple_seq phis = phi_nodes (bb);
+  gimple phi, stmt, copy;
 
   new_bb = create_empty_bb (EXIT_BLOCK_PTR->prev_bb);
 
   /* Copy the PHI nodes.  We ignore PHI node arguments here because
      the incoming edges have not been setup yet.  */
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      tree copy = create_phi_node (PHI_RESULT (phi), new_bb);
-      create_new_def_for (PHI_RESULT (copy), copy, PHI_RESULT_PTR (copy));
+      phi = gsi_stmt (gsi);
+      copy = create_phi_node (gimple_phi_result (phi), new_bb);
+      create_new_def_for (gimple_phi_result (copy), copy,
+                         gimple_phi_result_ptr (copy));
     }
 
-  /* Keep the chain of PHI nodes in the same order so that they can be
-     updated by ssa_redirect_edge.  */
-  set_phi_nodes (new_bb, phi_reverse (phi_nodes (new_bb)));
-
-  bsi_tgt = bsi_start (new_bb);
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+  gsi_tgt = gsi_start_bb (new_bb);
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
       def_operand_p def_p;
       ssa_op_iter op_iter;
-      tree stmt, copy;
       int region;
 
-      stmt = bsi_stmt (bsi);
-      if (TREE_CODE (stmt) == LABEL_EXPR)
+      stmt = gsi_stmt (gsi);
+      if (gimple_code (stmt) == GIMPLE_LABEL)
        continue;
 
       /* Create a new copy of STMT and duplicate STMT's virtual
         operands.  */
-      copy = unshare_expr (stmt);
-      bsi_insert_after (&bsi_tgt, copy, BSI_NEW_STMT);
+      copy = gimple_copy (stmt);
+      gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
       copy_virtual_operands (copy, stmt);
       region = lookup_stmt_eh_region (stmt);
       if (region >= 0)
@@ -5167,9 +4733,11 @@ add_phi_args_after_copy_edge (edge e_copy)
   basic_block bb, bb_copy = e_copy->src, dest;
   edge e;
   edge_iterator ei;
-  tree phi, phi_copy, phi_next, def;
+  gimple phi, phi_copy;
+  tree def;
+  gimple_stmt_iterator psi, psi_copy;
 
-  if (!phi_nodes (e_copy->dest))
+  if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
     return;
 
   bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
@@ -5195,11 +4763,13 @@ add_phi_args_after_copy_edge (edge e_copy)
       gcc_assert (e != NULL);
     }
 
-  for (phi = phi_nodes (e->dest), phi_copy = phi_nodes (e_copy->dest);
-       phi;
-       phi = phi_next, phi_copy = PHI_CHAIN (phi_copy))
+  for (psi = gsi_start_phis (e->dest),
+       psi_copy = gsi_start_phis (e_copy->dest);
+       !gsi_end_p (psi);
+       gsi_next (&psi), gsi_next (&psi_copy))
     {
-      phi_next = PHI_CHAIN (phi);
+      phi = gsi_stmt (psi);
+      phi_copy = gsi_stmt (psi_copy);
       def = PHI_ARG_DEF_FROM_EDGE (phi, e);
       add_phi_arg (phi_copy, def, e_copy);
     }
@@ -5213,8 +4783,8 @@ add_phi_args_after_copy_edge (edge e_copy)
 void
 add_phi_args_after_copy_bb (basic_block bb_copy)
 {
-  edge_iterator ei;
   edge e_copy;
+  edge_iterator ei;
 
   FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
     {
@@ -5256,7 +4826,7 @@ add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
    true otherwise.  */
 
 bool
-tree_duplicate_sese_region (edge entry, edge exit,
+gimple_duplicate_sese_region (edge entry, edge exit,
                            basic_block *region, unsigned n_region,
                            basic_block *region_copy)
 {
@@ -5422,9 +4992,9 @@ tree_duplicate_sese_region (edge entry, edge exit,
 */
 
 bool
-tree_duplicate_sese_tail (edge entry, edge exit,
-                         basic_block *region, unsigned n_region,
-                         basic_block *region_copy)
+gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
+                         basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
+                         basic_block *region_copy ATTRIBUTE_UNUSED)
 {
   unsigned i;
   bool free_region_copy = false;
@@ -5435,8 +5005,8 @@ tree_duplicate_sese_tail (edge entry, edge exit,
   int total_freq = 0, exit_freq = 0;
   gcov_type total_count = 0, exit_count = 0;
   edge exits[2], nexits[2], e;
-  block_stmt_iterator bsi;
-  tree cond;
+  gimple_stmt_iterator gsi;
+  gimple cond_stmt;
   edge sorig, snew;
 
   gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
@@ -5525,10 +5095,13 @@ tree_duplicate_sese_tail (edge entry, edge exit,
     switch_bb = split_edge (entry);
   set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
 
-  bsi = bsi_last (switch_bb);
-  cond = last_stmt (exit->src);
-  gcc_assert (TREE_CODE (cond) == COND_EXPR);
-  bsi_insert_after (&bsi, unshare_expr (cond), BSI_NEW_STMT);
+  gsi = gsi_last_bb (switch_bb);
+  cond_stmt = last_stmt (exit->src);
+  gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
+  cond_stmt = gimple_copy (cond_stmt);
+  gimple_cond_set_lhs (cond_stmt, unshare_expr (gimple_cond_lhs (cond_stmt)));
+  gimple_cond_set_rhs (cond_stmt, unshare_expr (gimple_cond_rhs (cond_stmt)));
+  gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
 
   sorig = single_succ_edge (switch_bb);
   sorig->flags = exits[1]->flags;
@@ -5543,9 +5116,9 @@ tree_duplicate_sese_tail (edge entry, edge exit,
   /* Get rid of now superfluous conditions and associated edges (and phi node
      arguments).  */
   e = redirect_edge_and_branch (exits[0], exits[1]->dest);
-  PENDING_STMT (e) = NULL_TREE;
+  PENDING_STMT (e) = NULL;
   e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
-  PENDING_STMT (e) = NULL_TREE;
+  PENDING_STMT (e) = NULL;
 
   /* Anything that is outside of the region, but was dominated by something
      inside needs to update dominance info.  */
@@ -5562,11 +5135,6 @@ tree_duplicate_sese_tail (edge entry, edge exit,
   return true;
 }
 
-/*
-DEF_VEC_P(basic_block);
-DEF_VEC_ALLOC_P(basic_block,heap);
-*/
-
 /* Add all the blocks dominated by ENTRY to the array BBS_P.  Stop
    adding blocks when the dominator traversal reaches EXIT.  This
    function silently assumes that ENTRY strictly dominates EXIT.  */
@@ -5627,6 +5195,7 @@ replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
   *tp = new_t;
 }
 
+
 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
    VARS_MAP maps old ssa names and var_decls to the new ones.  */
 
@@ -5679,44 +5248,16 @@ struct move_stmt_d
    DECL_CONTEXT of every local variable referenced in *TP.  */
 
 static tree
-move_stmt_r (tree *tp, int *walk_subtrees, void *data)
+move_stmt_op (tree *tp, int *walk_subtrees, void *data)
 {
-  struct move_stmt_d *p = (struct move_stmt_d *) data;
+  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
+  struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
   tree t = *tp;
 
-  if (EXPR_P (t) || GIMPLE_STMT_P (t))
-    {
-      tree block = TREE_BLOCK (t);
-      if (p->orig_block == NULL_TREE
-         || block == p->orig_block
-         || block == NULL_TREE)
-       TREE_BLOCK (t) = p->new_block;
-#ifdef ENABLE_CHECKING
-      else if (block != p->new_block)
-       {
-         while (block && block != p->orig_block)
-           block = BLOCK_SUPERCONTEXT (block);
-         gcc_assert (block);
-       }
-#endif
-    }
-
-  if (OMP_DIRECTIVE_P (t)
-      && TREE_CODE (t) != OMP_RETURN
-      && TREE_CODE (t) != OMP_CONTINUE)
-    {
-      /* Do not remap variables inside OMP directives.  Variables
-        referenced in clauses and directive header belong to the
-        parent function and should not be moved into the child
-        function.  */
-      bool save_remap_decls_p = p->remap_decls_p;
-      p->remap_decls_p = false;
-      *walk_subtrees = 0;
+  if (EXPR_P (t))
+    /* We should never have TREE_BLOCK set on non-statements.  */
+    gcc_assert (!TREE_BLOCK (t));
 
-      walk_tree (&OMP_BODY (t), move_stmt_r, p, NULL);
-
-      p->remap_decls_p = save_remap_decls_p;
-    }
   else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
     {
       if (TREE_CODE (t) == SSA_NAME)
@@ -5764,20 +5305,67 @@ move_stmt_r (tree *tp, int *walk_subtrees, void *data)
   return NULL_TREE;
 }
 
+/* Like move_stmt_op, but for gimple statements.
+
+   Helper for move_block_to_fn.  Set GIMPLE_BLOCK in every expression
+   contained in the current statement in *GSI_P and change the
+   DECL_CONTEXT of every local variable referenced in the current
+   statement.  */
+
+static tree
+move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
+            struct walk_stmt_info *wi)
+{
+  struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
+  gimple stmt = gsi_stmt (*gsi_p);
+  tree block = gimple_block (stmt);
+
+  if (p->orig_block == NULL_TREE
+      || block == p->orig_block
+      || block == NULL_TREE)
+    gimple_set_block (stmt, p->new_block);
+#ifdef ENABLE_CHECKING
+  else if (block != p->new_block)
+    {
+      while (block && block != p->orig_block)
+       block = BLOCK_SUPERCONTEXT (block);
+      gcc_assert (block);
+    }
+#endif
+
+  if (is_gimple_omp (stmt)
+      && gimple_code (stmt) != GIMPLE_OMP_RETURN
+      && gimple_code (stmt) != GIMPLE_OMP_CONTINUE)
+    {
+      /* Do not remap variables inside OMP directives.  Variables
+        referenced in clauses and directive header belong to the
+        parent function and should not be moved into the child
+        function.  */
+      bool save_remap_decls_p = p->remap_decls_p;
+      p->remap_decls_p = false;
+      *handled_ops_p = true;
+
+      walk_gimple_seq (gimple_omp_body (stmt), move_stmt_r, move_stmt_op, wi);
+
+      p->remap_decls_p = save_remap_decls_p;
+    }
+
+  return NULL_TREE;
+}
+
 /* Marks virtual operands of all statements in basic blocks BBS for
    renaming.  */
 
 void
 mark_virtual_ops_in_bb (basic_block bb)
 {
-  tree phi;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
 
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-    mark_virtual_ops_for_renaming (phi);
+  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+    mark_virtual_ops_for_renaming (gsi_stmt (gsi));
 
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-    mark_virtual_ops_for_renaming (bsi_stmt (bsi));
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+    mark_virtual_ops_for_renaming (gsi_stmt (gsi));
 }
 
 /* Marks virtual operands of all statements in basic blocks BBS for
@@ -5811,9 +5399,8 @@ move_block_to_fn (struct function *dest_cfun, basic_block bb,
   struct control_flow_graph *cfg;
   edge_iterator ei;
   edge e;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
   unsigned old_len, new_len;
-  tree phi, next_phi;
 
   /* Remove BB from dominance structures.  */
   delete_from_dominance_info (CDI_DOMINATORS, bb);
@@ -5853,18 +5440,18 @@ move_block_to_fn (struct function *dest_cfun, basic_block bb,
                bb->index, bb);
 
   /* Remap the variables in phi nodes.  */
-  for (phi = phi_nodes (bb); phi; phi = next_phi)
+  for (si = gsi_start_phis (bb); !gsi_end_p (si); )
     {
+      gimple phi = gsi_stmt (si);
       use_operand_p use;
       tree op = PHI_RESULT (phi);
       ssa_op_iter oi;
 
-      next_phi = PHI_CHAIN (phi);
       if (!is_gimple_reg (op))
        {
          /* Remove the phi nodes for virtual operands (alias analysis will be
             run for the new function, anyway).  */
-          remove_phi_node (phi, NULL, true);
+          remove_phi_node (&si, true);
          continue;
        }
 
@@ -5876,18 +5463,23 @@ move_block_to_fn (struct function *dest_cfun, basic_block bb,
          if (TREE_CODE (op) == SSA_NAME)
            SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
        }
+
+      gsi_next (&si);
     }
 
-  for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+  for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
     {
-      tree stmt = bsi_stmt (si);
+      gimple stmt = gsi_stmt (si);
       int region;
+      struct walk_stmt_info wi;
 
-      walk_tree (&stmt, move_stmt_r, d, NULL);
+      memset (&wi, 0, sizeof (wi));
+      wi.info = d;
+      walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
 
-      if (TREE_CODE (stmt) == LABEL_EXPR)
+      if (gimple_code (stmt) == GIMPLE_LABEL)
        {
-         tree label = LABEL_EXPR_LABEL (stmt);
+         tree label = gimple_label_label (stmt);
          int uid = LABEL_DECL_UID (label);
 
          gcc_assert (uid > -1);
@@ -5908,11 +5500,8 @@ move_block_to_fn (struct function *dest_cfun, basic_block bb,
          if (uid >= dest_cfun->cfg->last_label_uid)
            dest_cfun->cfg->last_label_uid = uid + 1;
        }
-      else if (TREE_CODE (stmt) == RESX_EXPR && eh_offset != 0)
-       TREE_OPERAND (stmt, 0) =
-         build_int_cst (NULL_TREE,
-                        TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0))
-                        + eh_offset);
+      else if (gimple_code (stmt) == GIMPLE_RESX && eh_offset != 0)
+       gimple_resx_set_region (stmt, gimple_resx_region (stmt) + eh_offset);
 
       region = lookup_stmt_eh_region (stmt);
       if (region >= 0)
@@ -5939,15 +5528,15 @@ static int
 find_outermost_region_in_block (struct function *src_cfun,
                                basic_block bb, int region)
 {
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
 
-  for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+  for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
     {
-      tree stmt = bsi_stmt (si);
+      gimple stmt = gsi_stmt (si);
       int stmt_region;
 
-      if (TREE_CODE (stmt) == RESX_EXPR)
-       stmt_region = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
+      if (gimple_code (stmt) == GIMPLE_RESX)
+       stmt_region = gimple_resx_region (stmt);
       else
        stmt_region = lookup_stmt_eh_region_fn (src_cfun, stmt);
       if (stmt_region > 0)
@@ -6242,7 +5831,8 @@ move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
 }
 
 
-/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree.h)  */
+/* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in tree-pass.h)
+   */
 
 void
 dump_function_to_file (tree fn, FILE *file, int flags)
@@ -6276,7 +5866,7 @@ dump_function_to_file (tree fn, FILE *file, int flags)
   if (dsf && (flags & TDF_DETAILS))
     dump_eh_tree (file, dsf);
 
-  if (flags & TDF_RAW)
+  if (flags & TDF_RAW && !gimple_body (fn))
     {
       dump_node (fn, TDF_SLIM | flags, file);
       return;
@@ -6307,7 +5897,7 @@ dump_function_to_file (tree fn, FILE *file, int flags)
 
   if (cfun && cfun->decl == fn && cfun->cfg && basic_block_info)
     {
-      /* Make a CFG based dump.  */
+      /* If the CFG has been built, emit a CFG-based dump.  */
       check_bb_profile (ENTRY_BLOCK_PTR, file);
       if (!ignore_topmost_bind)
        fprintf (file, "{\n");
@@ -6316,11 +5906,34 @@ dump_function_to_file (tree fn, FILE *file, int flags)
        fprintf (file, "\n");
 
       FOR_EACH_BB (bb)
-       dump_generic_bb (file, bb, 2, flags);
+       gimple_dump_bb (bb, file, 2, flags);
 
       fprintf (file, "}\n");
       check_bb_profile (EXIT_BLOCK_PTR, file);
     }
+  else if (DECL_SAVED_TREE (fn) == NULL)
+    {
+      /* The function is now in GIMPLE form but the CFG has not been
+        built yet.  Emit the single sequence of GIMPLE statements
+        that make up its body.  */
+      gimple_seq body = gimple_body (fn);
+
+      if (gimple_seq_first_stmt (body)
+         && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
+         && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
+       print_gimple_seq (file, body, 0, flags);
+      else
+       {
+         if (!ignore_topmost_bind)
+           fprintf (file, "{\n");
+
+         if (any_var)
+           fprintf (file, "\n");
+
+         print_gimple_seq (file, body, 2, flags);
+         fprintf (file, "}\n");
+       }
+    }
   else
     {
       int indent;
@@ -6417,7 +6030,7 @@ print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
   if (verbosity >= 3)
     {
       fprintf (file, "%s  {\n", s_indent);
-      tree_dump_bb (bb, file, indent + 4);
+      gimple_dump_bb (bb, file, indent + 4, TDF_VOPS|TDF_MEMSYMS);
       fprintf (file, "%s  }\n", s_indent);
     }
 }
@@ -6531,10 +6144,10 @@ debug_loop_num (unsigned num, int verbosity)
    otherwise.  */
 
 static bool
-tree_block_ends_with_call_p (basic_block bb)
+gimple_block_ends_with_call_p (basic_block bb)
 {
-  block_stmt_iterator bsi = bsi_last (bb);
-  return get_call_expr_in (bsi_stmt (bsi)) != NULL;
+  gimple_stmt_iterator gsi = gsi_last_bb (bb);
+  return is_gimple_call (gsi_stmt (gsi));
 }
 
 
@@ -6542,23 +6155,21 @@ tree_block_ends_with_call_p (basic_block bb)
    otherwise.  */
 
 static bool
-tree_block_ends_with_condjump_p (const_basic_block bb)
+gimple_block_ends_with_condjump_p (const_basic_block bb)
 {
-  /* This CONST_CAST is okay because last_stmt doesn't modify its
-     argument and the return value is not modified.  */
-  const_tree stmt = last_stmt (CONST_CAST_BB(bb));
-  return (stmt && TREE_CODE (stmt) == COND_EXPR);
+  gimple stmt = last_stmt (CONST_CAST_BB (bb));
+  return (stmt && gimple_code (stmt) == GIMPLE_COND);
 }
 
 
 /* Return true if we need to add fake edge to exit at statement T.
-   Helper function for tree_flow_call_edges_add.  */
+   Helper function for gimple_flow_call_edges_add.  */
 
 static bool
-need_fake_edge_p (tree t)
+need_fake_edge_p (gimple t)
 {
-  tree call, fndecl = NULL_TREE;
-  int call_flags;
+  tree fndecl = NULL_TREE;
+  int call_flags = 0;
 
   /* NORETURN and LONGJMP calls already have an edge to exit.
      CONST and PURE calls do not need one.
@@ -6567,24 +6178,26 @@ need_fake_edge_p (tree t)
      figured out from the RTL in mark_constant_function, and
      the counter incrementation code from -fprofile-arcs
      leads to different results from -fbranch-probabilities.  */
-  call = get_call_expr_in (t);
-  if (call)
+  if (is_gimple_call (t))
     {
-      fndecl = get_callee_fndecl (call);
-      call_flags = call_expr_flags (call);
+      fndecl = gimple_call_fndecl (t);
+      call_flags = gimple_call_flags (t);
     }
 
-  if (call && fndecl && DECL_BUILT_IN (fndecl)
+  if (is_gimple_call (t)
+      && fndecl
+      && DECL_BUILT_IN (fndecl)
       && (call_flags & ECF_NOTHROW)
       && !(call_flags & ECF_NORETURN)
       && !(call_flags & ECF_RETURNS_TWICE))
    return false;
 
-  if (call && !(call_flags & ECF_NORETURN))
+  if (is_gimple_call (t)
+      && !(call_flags & ECF_NORETURN))
     return true;
 
-  if (TREE_CODE (t) == ASM_EXPR
-       && (ASM_VOLATILE_P (t) || ASM_INPUT_P (t)))
+  if (gimple_code (t) == ASM_EXPR
+       && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
     return true;
 
   return false;
@@ -6600,7 +6213,7 @@ need_fake_edge_p (tree t)
    not imply that all subsequent instructions must be executed.  */
 
 static int
-tree_flow_call_edges_add (sbitmap blocks)
+gimple_flow_call_edges_add (sbitmap blocks)
 {
   int i;
   int blocks_split = 0;
@@ -6630,10 +6243,11 @@ tree_flow_call_edges_add (sbitmap blocks)
   if (check_last_block)
     {
       basic_block bb = EXIT_BLOCK_PTR->prev_bb;
-      block_stmt_iterator bsi = bsi_last (bb);
-      tree t = NULL_TREE;
-      if (!bsi_end_p (bsi))
-       t = bsi_stmt (bsi);
+      gimple_stmt_iterator gsi = gsi_last_bb (bb);
+      gimple t = NULL;
+
+      if (!gsi_end_p (gsi))
+       t = gsi_stmt (gsi);
 
       if (t && need_fake_edge_p (t))
        {
@@ -6642,8 +6256,8 @@ tree_flow_call_edges_add (sbitmap blocks)
          e = find_edge (bb, EXIT_BLOCK_PTR);
          if (e)
            {
-             bsi_insert_on_edge (e, build_empty_stmt ());
-             bsi_commit_edge_inserts ();
+             gsi_insert_on_edge (e, gimple_build_nop ());
+             gsi_commit_edge_inserts ();
            }
        }
     }
@@ -6654,8 +6268,8 @@ tree_flow_call_edges_add (sbitmap blocks)
   for (i = 0; i < last_bb; i++)
     {
       basic_block bb = BASIC_BLOCK (i);
-      block_stmt_iterator bsi;
-      tree stmt, last_stmt;
+      gimple_stmt_iterator gsi;
+      gimple stmt, last_stmt;
 
       if (!bb)
        continue;
@@ -6663,16 +6277,17 @@ tree_flow_call_edges_add (sbitmap blocks)
       if (blocks && !TEST_BIT (blocks, i))
        continue;
 
-      bsi = bsi_last (bb);
-      if (!bsi_end_p (bsi))
+      gsi = gsi_last_bb (bb);
+      if (!gsi_end_p (gsi))
        {
-         last_stmt = bsi_stmt (bsi);
+         last_stmt = gsi_stmt (gsi);
          do
            {
-             stmt = bsi_stmt (bsi);
+             stmt = gsi_stmt (gsi);
              if (need_fake_edge_p (stmt))
                {
                  edge e;
+
                  /* The handling above of the final block before the
                     epilogue should be enough to verify that there is
                     no edge to the exit block in CFG already.
@@ -6696,9 +6311,9 @@ tree_flow_call_edges_add (sbitmap blocks)
                    }
                  make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
                }
-             bsi_prev (&bsi);
+             gsi_prev (&gsi);
            }
-         while (!bsi_end_p (bsi));
+         while (!gsi_end_p (gsi));
        }
     }
 
@@ -6711,17 +6326,17 @@ tree_flow_call_edges_add (sbitmap blocks)
 /* Purge dead abnormal call edges from basic block BB.  */
 
 bool
-tree_purge_dead_abnormal_call_edges (basic_block bb)
+gimple_purge_dead_abnormal_call_edges (basic_block bb)
 {
-  bool changed = tree_purge_dead_eh_edges (bb);
+  bool changed = gimple_purge_dead_eh_edges (bb);
 
   if (cfun->has_nonlocal_label)
     {
-      tree stmt = last_stmt (bb);
+      gimple stmt = last_stmt (bb);
       edge_iterator ei;
       edge e;
 
-      if (!(stmt && tree_can_make_abnormal_goto (stmt)))
+      if (!(stmt && stmt_can_make_abnormal_goto (stmt)))
        for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
          {
            if (e->flags & EDGE_ABNORMAL)
@@ -6733,7 +6348,7 @@ tree_purge_dead_abnormal_call_edges (basic_block bb)
              ei_next (&ei);
          }
 
-      /* See tree_purge_dead_eh_edges below.  */
+      /* See gimple_purge_dead_eh_edges below.  */
       if (changed)
        free_dominance_info (CDI_DOMINATORS);
     }
@@ -6880,14 +6495,14 @@ remove_edge_and_dominated_blocks (edge e)
 /* Purge dead EH edges from basic block BB.  */
 
 bool
-tree_purge_dead_eh_edges (basic_block bb)
+gimple_purge_dead_eh_edges (basic_block bb)
 {
   bool changed = false;
   edge e;
   edge_iterator ei;
-  tree stmt = last_stmt (bb);
+  gimple stmt = last_stmt (bb);
 
-  if (stmt && tree_can_throw_internal (stmt))
+  if (stmt && stmt_can_throw_internal (stmt))
     return false;
 
   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
@@ -6905,7 +6520,7 @@ tree_purge_dead_eh_edges (basic_block bb)
 }
 
 bool
-tree_purge_all_dead_eh_edges (const_bitmap blocks)
+gimple_purge_all_dead_eh_edges (const_bitmap blocks)
 {
   bool changed = false;
   unsigned i;
@@ -6913,7 +6528,7 @@ tree_purge_all_dead_eh_edges (const_bitmap blocks)
 
   EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
     {
-      changed |= tree_purge_dead_eh_edges (BASIC_BLOCK (i));
+      changed |= gimple_purge_dead_eh_edges (BASIC_BLOCK (i));
     }
 
   return changed;
@@ -6923,7 +6538,7 @@ tree_purge_all_dead_eh_edges (const_bitmap blocks)
    redirected.  */
 
 static void
-tree_execute_on_growing_pred (edge e)
+gimple_execute_on_growing_pred (edge e)
 {
   basic_block bb = e->dest;
 
@@ -6935,7 +6550,7 @@ tree_execute_on_growing_pred (edge e)
    the edge vector E->dest->preds.  */
 
 static void
-tree_execute_on_shrinking_pred (edge e)
+gimple_execute_on_shrinking_pred (edge e)
 {
   if (phi_nodes (e->dest))
     remove_phi_args (e);
@@ -6951,14 +6566,15 @@ tree_execute_on_shrinking_pred (edge e)
    on the edge by split_edge(). Later, additional edge 'e' was created to
    connect 'new_head' and 'first'. Now this routine adds phi args on this
    additional edge 'e' that new_head to second edge received as part of edge
-   splitting.
-*/
+   splitting.  */
 
 static void
-tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
-                               basic_block new_head, edge e)
+gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
+                                 basic_block new_head, edge e)
 {
-  tree phi1, phi2;
+  gimple phi1, phi2;
+  gimple_stmt_iterator psi1, psi2;
+  tree def;
   edge e2 = find_edge (new_head, second);
 
   /* Because NEW_HEAD has been created by splitting SECOND's incoming
@@ -6968,35 +6584,41 @@ tree_lv_adjust_loop_header_phi (basic_block first, basic_block second,
   /* Browse all 'second' basic block phi nodes and add phi args to
      edge 'e' for 'first' head. PHI args are always in correct order.  */
 
-  for (phi2 = phi_nodes (second), phi1 = phi_nodes (first);
-       phi2 && phi1;
-       phi2 = PHI_CHAIN (phi2),  phi1 = PHI_CHAIN (phi1))
+  for (psi2 = gsi_start_phis (second),
+       psi1 = gsi_start_phis (first);
+       !gsi_end_p (psi2) && !gsi_end_p (psi1);
+       gsi_next (&psi2),  gsi_next (&psi1))
     {
-      tree def = PHI_ARG_DEF (phi2, e2->dest_idx);
+      phi1 = gsi_stmt (psi1);
+      phi2 = gsi_stmt (psi2);
+      def = PHI_ARG_DEF (phi2, e2->dest_idx);
       add_phi_arg (phi1, def, e);
     }
 }
 
+
 /* Adds a if else statement to COND_BB with condition COND_EXPR.
    SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
    the destination of the ELSE part.  */
+
 static void
-tree_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
-                            basic_block second_head ATTRIBUTE_UNUSED,
-                            basic_block cond_bb, void *cond_e)
+gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
+                              basic_block second_head ATTRIBUTE_UNUSED,
+                              basic_block cond_bb, void *cond_e)
 {
-  block_stmt_iterator bsi;
-  tree new_cond_expr = NULL_TREE;
+  gimple_stmt_iterator gsi;
+  gimple new_cond_expr;
   tree cond_expr = (tree) cond_e;
   edge e0;
 
   /* Build new conditional expr */
-  new_cond_expr = build3 (COND_EXPR, void_type_node, cond_expr,
-                         NULL_TREE, NULL_TREE);
+  new_cond_expr = gimple_build_cond_from_tree (cond_expr,
+                                              NULL_TREE, NULL_TREE);
 
   /* Add new cond in cond_bb.  */
-  bsi = bsi_start (cond_bb);
-  bsi_insert_after (&bsi, new_cond_expr, BSI_NEW_STMT);
+  gsi = gsi_last_bb (cond_bb);
+  gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
+
   /* Adjust edges appropriately to connect new head with first head
      as well as second head.  */
   e0 = single_succ_edge (cond_bb);
@@ -7004,34 +6626,34 @@ tree_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
   e0->flags |= EDGE_FALSE_VALUE;
 }
 
-struct cfg_hooks tree_cfg_hooks = {
-  "tree",
-  tree_verify_flow_info,
-  tree_dump_bb,                        /* dump_bb  */
+struct cfg_hooks gimple_cfg_hooks = {
+  "gimple",
+  gimple_verify_flow_info,
+  gimple_dump_bb,              /* dump_bb  */
   create_bb,                   /* create_basic_block  */
-  tree_redirect_edge_and_branch,/* redirect_edge_and_branch  */
-  tree_redirect_edge_and_branch_force,/* redirect_edge_and_branch_force  */
-  tree_can_remove_branch_p,    /* can_remove_branch_p  */
+  gimple_redirect_edge_and_branch, /* redirect_edge_and_branch  */
+  gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force  */
+  gimple_can_remove_branch_p,  /* can_remove_branch_p  */
   remove_bb,                   /* delete_basic_block  */
-  tree_split_block,            /* split_block  */
-  tree_move_block_after,       /* move_block_after  */
-  tree_can_merge_blocks_p,     /* can_merge_blocks_p  */
-  tree_merge_blocks,           /* merge_blocks  */
-  tree_predict_edge,           /* predict_edge  */
-  tree_predicted_by_p,         /* predicted_by_p  */
-  tree_can_duplicate_bb_p,     /* can_duplicate_block_p  */
-  tree_duplicate_bb,           /* duplicate_block  */
-  tree_split_edge,             /* split_edge  */
-  tree_make_forwarder_block,   /* make_forward_block  */
+  gimple_split_block,          /* split_block  */
+  gimple_move_block_after,     /* move_block_after  */
+  gimple_can_merge_blocks_p,   /* can_merge_blocks_p  */
+  gimple_merge_blocks,         /* merge_blocks  */
+  gimple_predict_edge,         /* predict_edge  */
+  gimple_predicted_by_p,               /* predicted_by_p  */
+  gimple_can_duplicate_bb_p,   /* can_duplicate_block_p  */
+  gimple_duplicate_bb,         /* duplicate_block  */
+  gimple_split_edge,           /* split_edge  */
+  gimple_make_forwarder_block, /* make_forward_block  */
   NULL,                                /* tidy_fallthru_edge  */
-  tree_block_ends_with_call_p, /* block_ends_with_call_p */
-  tree_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
-  tree_flow_call_edges_add,     /* flow_call_edges_add */
-  tree_execute_on_growing_pred,        /* execute_on_growing_pred */
-  tree_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
-  tree_duplicate_loop_to_header_edge, /* duplicate loop for trees */
-  tree_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
-  tree_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
+  gimple_block_ends_with_call_p,/* block_ends_with_call_p */
+  gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
+  gimple_flow_call_edges_add,     /* flow_call_edges_add */
+  gimple_execute_on_growing_pred,      /* execute_on_growing_pred */
+  gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
+  gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
+  gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
+  gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
   extract_true_false_edges_from_block, /* extract_cond_bb_edges */
   flush_pending_stmts          /* flush_pending_stmts */
 };
@@ -7081,39 +6703,12 @@ struct gimple_opt_pass pass_split_crit_edges =
  }
 };
 
-\f
-/* Return EXP if it is a valid GIMPLE rvalue, else gimplify it into
-   a temporary, make sure and register it to be renamed if necessary,
-   and finally return the temporary.  Put the statements to compute
-   EXP before the current statement in BSI.  */
-
-tree
-gimplify_val (block_stmt_iterator *bsi, tree type, tree exp)
-{
-  tree t, new_stmt, orig_stmt;
-
-  if (is_gimple_val (exp))
-    return exp;
-
-  t = make_rename_temp (type, NULL);
-  new_stmt = build_gimple_modify_stmt (t, exp);
-
-  orig_stmt = bsi_stmt (*bsi);
-  SET_EXPR_LOCUS (new_stmt, EXPR_LOCUS (orig_stmt));
-  TREE_BLOCK (new_stmt) = TREE_BLOCK (orig_stmt);
-
-  bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
-  if (gimple_in_ssa_p (cfun))
-    mark_symbols_for_renaming (new_stmt);
-
-  return t;
-}
 
-/* Build a ternary operation and gimplify it.  Emit code before BSI.
+/* Build a ternary operation and gimplify it.  Emit code before GSI.
    Return the gimple_val holding the result.  */
 
 tree
-gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
+gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
                 tree type, tree a, tree b, tree c)
 {
   tree ret;
@@ -7121,14 +6716,15 @@ gimplify_build3 (block_stmt_iterator *bsi, enum tree_code code,
   ret = fold_build3 (code, type, a, b, c);
   STRIP_NOPS (ret);
 
-  return gimplify_val (bsi, type, ret);
+  return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
+                                   GSI_SAME_STMT);
 }
 
-/* Build a binary operation and gimplify it.  Emit code before BSI.
+/* Build a binary operation and gimplify it.  Emit code before GSI.
    Return the gimple_val holding the result.  */
 
 tree
-gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
+gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
                 tree type, tree a, tree b)
 {
   tree ret;
@@ -7136,14 +6732,15 @@ gimplify_build2 (block_stmt_iterator *bsi, enum tree_code code,
   ret = fold_build2 (code, type, a, b);
   STRIP_NOPS (ret);
 
-  return gimplify_val (bsi, type, ret);
+  return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
+                                   GSI_SAME_STMT);
 }
 
-/* Build a unary operation and gimplify it.  Emit code before BSI.
+/* Build a unary operation and gimplify it.  Emit code before GSI.
    Return the gimple_val holding the result.  */
 
 tree
-gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
+gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
                 tree a)
 {
   tree ret;
@@ -7151,7 +6748,8 @@ gimplify_build1 (block_stmt_iterator *bsi, enum tree_code code, tree type,
   ret = fold_build1 (code, type, a);
   STRIP_NOPS (ret);
 
-  return gimplify_val (bsi, type, ret);
+  return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
+                                   GSI_SAME_STMT);
 }
 
 
@@ -7162,7 +6760,7 @@ static unsigned int
 execute_warn_function_return (void)
 {
   source_location location;
-  tree last;
+  gimple last;
   edge e;
   edge_iterator ei;
 
@@ -7174,8 +6772,8 @@ execute_warn_function_return (void)
       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
        {
          last = last_stmt (e->src);
-         if (TREE_CODE (last) == RETURN_EXPR
-             && (location = EXPR_LOCATION (last)) != UNKNOWN_LOCATION)
+         if (gimple_code (last) == GIMPLE_RETURN
+             && (location = gimple_location (last)) != UNKNOWN_LOCATION)
            break;
        }
       if (location == UNKNOWN_LOCATION)
@@ -7192,12 +6790,12 @@ execute_warn_function_return (void)
     {
       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
        {
-         tree last = last_stmt (e->src);
-         if (TREE_CODE (last) == RETURN_EXPR
-             && TREE_OPERAND (last, 0) == NULL
-             && !TREE_NO_WARNING (last))
+         gimple last = last_stmt (e->src);
+         if (gimple_code (last) == GIMPLE_RETURN
+             && gimple_return_retval (last) == NULL
+             && !gimple_no_warning_p (last))
            {
-             location = EXPR_LOCATION (last);
+             location = gimple_location (last);
              if (location == UNKNOWN_LOCATION)
                  location = cfun->function_end_locus;
              warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
index 8970a9b..433900c 100644 (file)
@@ -73,15 +73,17 @@ remove_fallthru_edge (VEC(edge,gc) *ev)
   return false;
 }
 
+
 /* Disconnect an unreachable block in the control expression starting
    at block BB.  */
 
 static bool
-cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
+cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
 {
   edge taken_edge;
   bool retval = false;
-  tree expr = bsi_stmt (bsi), val;
+  gimple stmt = gsi_stmt (gsi);
+  tree val;
 
   if (!single_succ_p (bb))
     {
@@ -90,26 +92,7 @@ cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
       bool warned;
 
       fold_defer_overflow_warnings ();
-
-      switch (TREE_CODE (expr))
-       {
-       case COND_EXPR:
-         val = fold (COND_EXPR_COND (expr));
-         break;
-
-       case SWITCH_EXPR:
-         val = fold (SWITCH_COND (expr));
-         if (TREE_CODE (val) != INTEGER_CST)
-           {
-             fold_undefer_and_ignore_overflow_warnings ();
-             return false;
-           }
-         break;
-
-       default:
-         gcc_unreachable ();
-       }
-
+      val = gimple_fold (stmt);
       taken_edge = find_taken_edge (bb, val);
       if (!taken_edge)
        {
@@ -126,7 +109,7 @@ cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
              if (!warned)
                {
                  fold_undefer_overflow_warnings
-                   (true, expr, WARN_STRICT_OVERFLOW_CONDITIONAL);
+                   (true, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
                  warned = true;
                }
 
@@ -147,7 +130,7 @@ cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
     taken_edge = single_succ_edge (bb);
 
   bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
-  bsi_remove (&bsi, true);
+  gsi_remove (&gsi, true);
   taken_edge->flags = EDGE_FALLTHRU;
 
   return retval;
@@ -159,30 +142,30 @@ cleanup_control_expr_graph (basic_block bb, block_stmt_iterator bsi)
 static bool
 cleanup_control_flow_bb (basic_block bb)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   bool retval = false;
-  tree stmt;
+  gimple stmt;
 
   /* If the last statement of the block could throw and now cannot,
      we need to prune cfg.  */
-  retval |= tree_purge_dead_eh_edges (bb);
+  retval |= gimple_purge_dead_eh_edges (bb);
 
-  bsi = bsi_last (bb);
-  if (bsi_end_p (bsi))
+  gsi = gsi_last_bb (bb);
+  if (gsi_end_p (gsi))
     return retval;
 
-  stmt = bsi_stmt (bsi);
+  stmt = gsi_stmt (gsi);
 
-  if (TREE_CODE (stmt) == COND_EXPR
-      || TREE_CODE (stmt) == SWITCH_EXPR)
-    retval |= cleanup_control_expr_graph (bb, bsi);
-  /* If we had a computed goto which has a compile-time determinable
-     destination, then we can eliminate the goto.  */
-  else if (TREE_CODE (stmt) == GOTO_EXPR
-          && TREE_CODE (GOTO_DESTINATION (stmt)) == ADDR_EXPR
-          && (TREE_CODE (TREE_OPERAND (GOTO_DESTINATION (stmt), 0))
+  if (gimple_code (stmt) == GIMPLE_COND
+      || gimple_code (stmt) == GIMPLE_SWITCH)
+    retval |= cleanup_control_expr_graph (bb, gsi);
+  else if (gimple_code (stmt) == GIMPLE_GOTO
+          && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR
+          && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt), 0))
               == LABEL_DECL))
     {
+      /* If we had a computed goto which has a compile-time determinable
+        destination, then we can eliminate the goto.  */
       edge e;
       tree label;
       edge_iterator ei;
@@ -191,7 +174,7 @@ cleanup_control_flow_bb (basic_block bb)
       /* First look at all the outgoing edges.  Delete any outgoing
         edges which do not go to the right block.  For the one
         edge which goes to the right block, fix up its flags.  */
-      label = TREE_OPERAND (GOTO_DESTINATION (stmt), 0);
+      label = TREE_OPERAND (gimple_goto_dest (stmt), 0);
       target_block = label_to_block (label);
       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
        {
@@ -213,13 +196,15 @@ cleanup_control_flow_bb (basic_block bb)
 
       /* Remove the GOTO_EXPR as it is not needed.  The CFG has all the
         relevant information we need.  */
-      bsi_remove (&bsi, true);
+      gsi_remove (&gsi, true);
       retval = true;
     }
 
   /* Check for indirect calls that have been turned into
      noreturn calls.  */
-  else if (noreturn_call_p (stmt) && remove_fallthru_edge (bb->succs))
+  else if (is_gimple_call (stmt)
+           && gimple_call_noreturn_p (stmt)
+           && remove_fallthru_edge (bb->succs))
     retval = true;
 
   return retval;
@@ -235,7 +220,7 @@ cleanup_control_flow_bb (basic_block bb)
 static bool
 tree_forwarder_block_p (basic_block bb, bool phi_wanted)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   edge_iterator ei;
   edge e, succ;
   basic_block dest;
@@ -244,7 +229,7 @@ tree_forwarder_block_p (basic_block bb, bool phi_wanted)
   if (single_succ_p (bb) != 1
       /* If PHI_WANTED is false, BB must not have any PHI nodes.
         Otherwise, BB must have PHI nodes.  */
-      || (phi_nodes (bb) != NULL_TREE) != phi_wanted
+      || gimple_seq_empty_p (phi_nodes (bb)) == phi_wanted
       /* BB may not be a predecessor of EXIT_BLOCK_PTR.  */
       || single_succ (bb) == EXIT_BLOCK_PTR
       /* Nor should this be an infinite loop.  */
@@ -259,14 +244,14 @@ tree_forwarder_block_p (basic_block bb, bool phi_wanted)
 
   /* Now walk through the statements backward.  We can ignore labels,
      anything else means this is not a forwarder block.  */
-  for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
+  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
     {
-      tree stmt = bsi_stmt (bsi);
+      gimple stmt = gsi_stmt (gsi);
 
-      switch (TREE_CODE (stmt))
+      switch (gimple_code (stmt))
        {
-       case LABEL_EXPR:
-         if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt)))
+       case GIMPLE_LABEL:
+         if (DECL_NONLOCAL (gimple_label_label (stmt)))
            return false;
          break;
 
@@ -333,12 +318,13 @@ phi_alternatives_equal (basic_block dest, edge e1, edge e2)
 {
   int n1 = e1->dest_idx;
   int n2 = e2->dest_idx;
-  tree phi;
+  gimple_stmt_iterator gsi;
 
-  for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      tree val1 = PHI_ARG_DEF (phi, n1);
-      tree val2 = PHI_ARG_DEF (phi, n2);
+      gimple phi = gsi_stmt (gsi);
+      tree val1 = gimple_phi_arg_def (phi, n1);
+      tree val2 = gimple_phi_arg_def (phi, n2);
 
       gcc_assert (val1 != NULL_TREE);
       gcc_assert (val2 != NULL_TREE);
@@ -357,10 +343,9 @@ remove_forwarder_block (basic_block bb)
 {
   edge succ = single_succ_edge (bb), e, s;
   basic_block dest = succ->dest;
-  tree label;
-  tree phi;
+  gimple label;
   edge_iterator ei;
-  block_stmt_iterator bsi, bsi_to;
+  gimple_stmt_iterator gsi, gsi_to;
   bool seen_abnormal_edge = false;
 
   /* We check for infinite loops already in tree_forwarder_block_p.
@@ -373,8 +358,8 @@ remove_forwarder_block (basic_block bb)
      it.  */
   label = first_stmt (dest);
   if (label
-      && TREE_CODE (label) == LABEL_EXPR
-      && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
+      && gimple_code (label) == GIMPLE_LABEL
+      && DECL_NONLOCAL (gimple_label_label (label)))
     return false;
 
   /* If there is an abnormal edge to basic block BB, but not into
@@ -393,14 +378,14 @@ remove_forwarder_block (basic_block bb)
       seen_abnormal_edge = true;
 
       if (has_abnormal_incoming_edge_p (dest)
-         || phi_nodes (dest) != NULL_TREE)
+         || !gimple_seq_empty_p (phi_nodes (dest)))
        return false;
     }
 
   /* If there are phi nodes in DEST, and some of the blocks that are
      predecessors of BB are also predecessors of DEST, check that the
      phi node arguments match.  */
-  if (phi_nodes (dest))
+  if (!gimple_seq_empty_p (phi_nodes (dest)))
     {
       FOR_EACH_EDGE (e, ei, bb->preds)
        {
@@ -431,8 +416,13 @@ remove_forwarder_block (basic_block bb)
        {
          /* Create arguments for the phi nodes, since the edge was not
             here before.  */
-         for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
-           add_phi_arg (phi, PHI_ARG_DEF (phi, succ->dest_idx), s);
+         for (gsi = gsi_start_phis (dest);
+              !gsi_end_p (gsi);
+              gsi_next (&gsi))
+           {
+             gimple phi = gsi_stmt (gsi);
+             add_phi_arg (phi, gimple_phi_arg_def (phi, succ->dest_idx), s);
+           }
        }
     }
 
@@ -440,14 +430,13 @@ remove_forwarder_block (basic_block bb)
     {
       /* Move the labels to the new block, so that the redirection of
         the abnormal edges works.  */
-
-      bsi_to = bsi_start (dest);
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
+      gsi_to = gsi_start_bb (dest);
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
        {
-         label = bsi_stmt (bsi);
-         gcc_assert (TREE_CODE (label) == LABEL_EXPR);
-         bsi_remove (&bsi, false);
-         bsi_insert_before (&bsi_to, label, BSI_CONTINUE_LINKING);
+         label = gsi_stmt (gsi);
+         gcc_assert (gimple_code (label) == GIMPLE_LABEL);
+         gsi_remove (&gsi, false);
+         gsi_insert_before (&gsi_to, label, GSI_CONTINUE_LINKING);
        }
     }
 
@@ -485,18 +474,18 @@ static bool
 split_bbs_on_noreturn_calls (void)
 {
   bool changed = false;
-  tree stmt;
+  gimple stmt;
   basic_block bb;
 
   /* Detect cases where a mid-block call is now known not to return.  */
   if (cfun->gimple_df)
-    while (VEC_length (tree, MODIFIED_NORETURN_CALLS (cfun)))
+    while (VEC_length (gimple, MODIFIED_NORETURN_CALLS (cfun)))
       {
-       stmt = VEC_pop (tree, MODIFIED_NORETURN_CALLS (cfun));
-       bb = bb_for_stmt (stmt);
+       stmt = VEC_pop (gimple, MODIFIED_NORETURN_CALLS (cfun));
+       bb = gimple_bb (stmt);
        if (bb == NULL
            || last_stmt (bb) == stmt
-           || !noreturn_call_p (stmt))
+           || !gimple_call_noreturn_p (stmt))
          continue;
 
        changed = true;
@@ -507,23 +496,23 @@ split_bbs_on_noreturn_calls (void)
   return changed;
 }
 
-/* If OMP_RETURN in basic block BB is unreachable, remove it.  */
+/* If GIMPLE_OMP_RETURN in basic block BB is unreachable, remove it.  */
 
 static bool
 cleanup_omp_return (basic_block bb)
 {
-  tree stmt = last_stmt (bb);
+  gimple stmt = last_stmt (bb);
   basic_block control_bb;
 
-  if (stmt == NULL_TREE
-      || TREE_CODE (stmt) != OMP_RETURN
+  if (stmt == NULL
+      || gimple_code (stmt) != GIMPLE_OMP_RETURN
       || !single_pred_p (bb))
     return false;
 
   control_bb = single_pred (bb);
   stmt = last_stmt (control_bb);
 
-  if (TREE_CODE (stmt) != OMP_SECTIONS_SWITCH)
+  if (gimple_code (stmt) != GIMPLE_OMP_SECTIONS_SWITCH)
     return false;
 
   /* The block with the control statement normally has two entry edges -- one
@@ -553,7 +542,6 @@ cleanup_tree_cfg_bb (basic_block bb)
   /* Forwarder blocks can carry line number information which is
      useful when debugging, so we only clean them up when
      optimizing.  */
-
   if (optimize > 0
       && tree_forwarder_block_p (bb, false)
       && remove_forwarder_block (bb))
@@ -716,7 +704,7 @@ remove_forwarder_block_with_phi (basic_block bb)
 {
   edge succ = single_succ_edge (bb);
   basic_block dest = succ->dest;
-  tree label;
+  gimple label;
   basic_block dombb, domdest, dom;
 
   /* We check for infinite loops already in tree_forwarder_block_p.
@@ -729,15 +717,15 @@ remove_forwarder_block_with_phi (basic_block bb)
      merge it.  */
   label = first_stmt (dest);
   if (label
-      && TREE_CODE (label) == LABEL_EXPR
-      && DECL_NONLOCAL (LABEL_EXPR_LABEL (label)))
+      && gimple_code (label) == GIMPLE_LABEL
+      && DECL_NONLOCAL (gimple_label_label (label)))
     return;
 
   /* Redirect each incoming edge to BB to DEST.  */
   while (EDGE_COUNT (bb->preds) > 0)
     {
       edge e = EDGE_PRED (bb, 0), s;
-      tree phi;
+      gimple_stmt_iterator gsi;
 
       s = find_edge (e->src, dest);
       if (s)
@@ -765,9 +753,12 @@ remove_forwarder_block_with_phi (basic_block bb)
 
       /* Add to the PHI nodes at DEST each PHI argument removed at the
         destination of E.  */
-      for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
+      for (gsi = gsi_start_phis (dest);
+          !gsi_end_p (gsi);
+          gsi_next (&gsi))
        {
-         tree def = PHI_ARG_DEF (phi, succ->dest_idx);
+         gimple phi = gsi_stmt (gsi);
+         tree def = gimple_phi_arg_def (phi, succ->dest_idx);
 
          if (TREE_CODE (def) == SSA_NAME)
            {
@@ -879,7 +870,7 @@ merge_phi_nodes (void)
        }
       else
        {
-         tree phi;
+         gimple_stmt_iterator gsi;
          unsigned int dest_idx = single_succ_edge (bb)->dest_idx;
 
          /* BB dominates DEST.  There may be many users of the PHI
@@ -887,11 +878,13 @@ merge_phi_nodes (void)
             can handle.  If the result of every PHI in BB is used
             only by a PHI in DEST, then we can trivially merge the
             PHI nodes from BB into DEST.  */
-         for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+         for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+              gsi_next (&gsi))
            {
-             tree result = PHI_RESULT (phi);
+             gimple phi = gsi_stmt (gsi);
+             tree result = gimple_phi_result (phi);
              use_operand_p imm_use;
-             tree use_stmt;
+             gimple use_stmt;
 
              /* If the PHI's result is never used, then we can just
                 ignore it.  */
@@ -900,15 +893,15 @@ merge_phi_nodes (void)
 
              /* Get the single use of the result of this PHI node.  */
              if (!single_imm_use (result, &imm_use, &use_stmt)
-                 || TREE_CODE (use_stmt) != PHI_NODE
-                 || bb_for_stmt (use_stmt) != dest
-                 || PHI_ARG_DEF (use_stmt, dest_idx) != result)
+                 || gimple_code (use_stmt) != GIMPLE_PHI
+                 || gimple_bb (use_stmt) != dest
+                 || gimple_phi_arg_def (use_stmt, dest_idx) != result)
                break;
            }
 
          /* If the loop above iterated through all the PHI nodes
             in BB, then we can merge the PHIs from BB into DEST.  */
-         if (!phi)
+         if (gsi_end_p (gsi))
            *current++ = bb;
        }
     }
index 295fb79..89e96fd 100644 (file)
@@ -343,9 +343,9 @@ chrec_fold_plus (tree type,
     return chrec_fold_automatically_generated_operands (op0, op1);
 
   if (integer_zerop (op0))
-    return chrec_convert (type, op1, NULL_TREE);
+    return chrec_convert (type, op1, NULL);
   if (integer_zerop (op1))
-    return chrec_convert (type, op0, NULL_TREE);
+    return chrec_convert (type, op0, NULL);
 
   if (POINTER_TYPE_P (type))
     code = POINTER_PLUS_EXPR;
@@ -577,7 +577,7 @@ chrec_apply (unsigned var,
   if (evolution_function_is_affine_p (chrec))
     {
       /* "{a, +, b} (x)"  ->  "a + b*x".  */
-      x = chrec_convert_rhs (type, x, NULL_TREE);
+      x = chrec_convert_rhs (type, x, NULL);
       res = chrec_fold_multiply (TREE_TYPE (x), CHREC_RIGHT (chrec), x);
       if (!integer_zerop (CHREC_LEFT (chrec)))
        res = chrec_fold_plus (type, CHREC_LEFT (chrec), res);
@@ -1115,7 +1115,7 @@ avoid_arithmetics_in_type_p (const_tree type)
   return false;
 }
 
-static tree chrec_convert_1 (tree, tree, tree, bool);
+static tree chrec_convert_1 (tree, tree, gimple, bool);
 
 /* Converts BASE and STEP of affine scev to TYPE.  LOOP is the loop whose iv
    the scev corresponds to.  AT_STMT is the statement at that the scev is
@@ -1127,7 +1127,7 @@ static tree chrec_convert_1 (tree, tree, tree, bool);
 
 bool
 convert_affine_scev (struct loop *loop, tree type,
-                    tree *base, tree *step, tree at_stmt,
+                    tree *base, tree *step, gimple at_stmt,
                     bool use_overflow_semantics)
 {
   tree ct = TREE_TYPE (*step);
@@ -1228,7 +1228,7 @@ convert_affine_scev (struct loop *loop, tree type,
 /* Convert CHREC for the right hand side of a CREC.
    The increment for a pointer type is always sizetype.  */
 tree 
-chrec_convert_rhs (tree type, tree chrec, tree at_stmt)
+chrec_convert_rhs (tree type, tree chrec, gimple at_stmt)
 {
   if (POINTER_TYPE_P (type))
    type = sizetype;
@@ -1260,7 +1260,7 @@ chrec_convert_rhs (tree type, tree chrec, tree at_stmt)
 */
 
 tree 
-chrec_convert (tree type, tree chrec, tree at_stmt)
+chrec_convert (tree type, tree chrec, gimple at_stmt)
 {
   return chrec_convert_1 (type, chrec, at_stmt, true);
 }
@@ -1278,7 +1278,7 @@ chrec_convert (tree type, tree chrec, tree at_stmt)
    tests, but also to enforce that the result follows them.  */
 
 static tree 
-chrec_convert_1 (tree type, tree chrec, tree at_stmt,
+chrec_convert_1 (tree type, tree chrec, gimple at_stmt,
                 bool use_overflow_semantics)
 {
   tree ct, res;
@@ -1352,10 +1352,10 @@ chrec_convert_aggressive (tree type, tree chrec)
   right = CHREC_RIGHT (chrec);
   lc = chrec_convert_aggressive (type, left);
   if (!lc)
-    lc = chrec_convert (type, left, NULL_TREE);
+    lc = chrec_convert (type, left, NULL);
   rc = chrec_convert_aggressive (rtype, right);
   if (!rc)
-    rc = chrec_convert (rtype, right, NULL_TREE);
+    rc = chrec_convert (rtype, right, NULL);
  
   return build_polynomial_chrec (CHREC_VARIABLE (chrec), lc, rc);
 }
index 7f240c6..9000fb7 100644 (file)
@@ -57,8 +57,8 @@ tree_is_chrec (const_tree expr)
 extern tree chrec_fold_plus (tree, tree, tree);
 extern tree chrec_fold_minus (tree, tree, tree);
 extern tree chrec_fold_multiply (tree, tree, tree);
-extern tree chrec_convert (tree, tree, tree);
-extern tree chrec_convert_rhs (tree, tree, tree);
+extern tree chrec_convert (tree, tree, gimple);
+extern tree chrec_convert_rhs (tree, tree, gimple);
 extern tree chrec_convert_aggressive (tree, tree);
 
 /* Operations.  */
index 0fc1cc5..bbf4c49 100644 (file)
@@ -26,7 +26,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "real.h"
 #include "flags.h"
 #include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-iterator.h"
 #include "tree-pass.h"
 #include "tree-ssa-propagate.h"
@@ -104,16 +104,37 @@ some_nonzerop (tree t)
   return !zerop;
 }
 
-/* Compute a lattice value from T.  It may be a gimple_val, or, as a 
-   special exception, a COMPLEX_EXPR.  */
+
+/* Compute a lattice value from the components of a complex type REAL
+   and IMAG.  */
 
 static complex_lattice_t
-find_lattice_value (tree t)
+find_lattice_value_parts (tree real, tree imag)
 {
-  tree real, imag;
   int r, i;
   complex_lattice_t ret;
 
+  r = some_nonzerop (real);
+  i = some_nonzerop (imag);
+  ret = r * ONLY_REAL + i * ONLY_IMAG;
+
+  /* ??? On occasion we could do better than mapping 0+0i to real, but we
+     certainly don't want to leave it UNINITIALIZED, which eventually gets
+     mapped to VARYING.  */
+  if (ret == UNINITIALIZED)
+    ret = ONLY_REAL;
+
+  return ret;
+}
+
+
+/* Compute a lattice value from gimple_val T.  */
+
+static complex_lattice_t
+find_lattice_value (tree t)
+{
+  tree real, imag;
+
   switch (TREE_CODE (t))
     {
     case SSA_NAME:
@@ -125,26 +146,11 @@ find_lattice_value (tree t)
       imag = TREE_IMAGPART (t);
       break;
 
-    case COMPLEX_EXPR:
-      real = TREE_OPERAND (t, 0);
-      imag = TREE_OPERAND (t, 1);
-      break;
-
     default:
       gcc_unreachable ();
     }
 
-  r = some_nonzerop (real);
-  i = some_nonzerop (imag);
-  ret = r*ONLY_REAL + i*ONLY_IMAG;
-
-  /* ??? On occasion we could do better than mapping 0+0i to real, but we
-     certainly don't want to leave it UNINITIALIZED, which eventually gets
-     mapped to VARYING.  */
-  if (ret == UNINITIALIZED)
-    ret = ONLY_REAL;
-
-  return ret;
+  return find_lattice_value_parts (real, imag);
 }
 
 /* Determine if LHS is something for which we're interested in seeing
@@ -171,66 +177,72 @@ init_parameter_lattice_values (void)
                   SSA_NAME_VERSION (ssa_name), VARYING);
 }
 
-/* Initialize DONT_SIMULATE_AGAIN for each stmt and phi.  Return false if
-   we found no statements we want to simulate, and thus there's nothing for
-   the entire pass to do.  */
+/* Initialize simulation state for each statement.  Return false if we
+   found no statements we want to simulate, and thus there's nothing
+   for the entire pass to do.  */
 
 static bool
 init_dont_simulate_again (void)
 {
   basic_block bb;
-  block_stmt_iterator bsi;
-  tree phi;
+  gimple_stmt_iterator gsi;
+  gimple phi;
   bool saw_a_complex_op = false;
 
   FOR_EACH_BB (bb)
     {
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-       DONT_SIMULATE_AGAIN (phi) = !is_complex_reg (PHI_RESULT (phi));
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+       {
+         phi = gsi_stmt (gsi);
+         prop_set_simulate_again (phi,
+                                  is_complex_reg (gimple_phi_result (phi)));
+       }
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree orig_stmt, stmt, rhs = NULL;
-         bool dsa;
+         gimple stmt;
+         tree op0, op1;
+         bool sim_again_p;
 
-         orig_stmt = stmt = bsi_stmt (bsi);
+         stmt = gsi_stmt (gsi);
+         op0 = op1 = NULL_TREE;
 
          /* Most control-altering statements must be initially 
             simulated, else we won't cover the entire cfg.  */
-         dsa = !stmt_ends_bb_p (stmt);
+         sim_again_p = stmt_ends_bb_p (stmt);
 
-         switch (TREE_CODE (stmt))
+         switch (gimple_code (stmt))
            {
-           case RETURN_EXPR:
-             /* We don't care what the lattice value of <retval> is,
-                since it's never used as an input to another computation.  */
-             dsa = true;
-             stmt = TREE_OPERAND (stmt, 0);
-             if (!stmt || TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
-               break;
-             /* FALLTHRU */
+           case GIMPLE_CALL:
+             if (gimple_call_lhs (stmt))
+               sim_again_p = is_complex_reg (gimple_call_lhs (stmt));
+             break;
 
-           case GIMPLE_MODIFY_STMT:
-             dsa = !is_complex_reg (GIMPLE_STMT_OPERAND (stmt, 0));
-             rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+           case GIMPLE_ASSIGN:
+             sim_again_p = is_complex_reg (gimple_assign_lhs (stmt));
+             if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
+                 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR)
+               op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
+             else
+               op0 = gimple_assign_rhs1 (stmt);
+             if (gimple_num_ops (stmt) > 2)
+               op1 = gimple_assign_rhs2 (stmt);
              break;
 
-           case COND_EXPR:
-             rhs = TREE_OPERAND (stmt, 0);
+           case GIMPLE_COND:
+             op0 = gimple_cond_lhs (stmt);
+             op1 = gimple_cond_rhs (stmt);
              break;
 
            default:
              break;
            }
 
-         if (rhs)
-           switch (TREE_CODE (rhs))
+         if (op0 || op1)
+           switch (gimple_expr_code (stmt))
              {
              case EQ_EXPR:
              case NE_EXPR:
-               rhs = TREE_OPERAND (rhs, 0);
-               /* FALLTHRU */
-
              case PLUS_EXPR:
              case MINUS_EXPR:
              case MULT_EXPR:
@@ -239,20 +251,25 @@ init_dont_simulate_again (void)
              case FLOOR_DIV_EXPR:
              case ROUND_DIV_EXPR:
              case RDIV_EXPR:
+               if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE
+                   || TREE_CODE (TREE_TYPE (op1)) == COMPLEX_TYPE)
+                 saw_a_complex_op = true;
+               break;
+
              case NEGATE_EXPR:
              case CONJ_EXPR:
-               if (TREE_CODE (TREE_TYPE (rhs)) == COMPLEX_TYPE)
+               if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
                  saw_a_complex_op = true;
                break;
 
              case REALPART_EXPR:
              case IMAGPART_EXPR:
                /* The total store transformation performed during
-                  gimplification creates such uninitialized loads
-                  and we need to lower the statement to be able
-                  to fix things up.  */
-               if (TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
-                   && ssa_undefined_value_p (TREE_OPERAND (rhs, 0)))
+                 gimplification creates such uninitialized loads
+                 and we need to lower the statement to be able
+                 to fix things up.  */
+               if (TREE_CODE (op0) == SSA_NAME
+                   && ssa_undefined_value_p (op0))
                  saw_a_complex_op = true;
                break;
 
@@ -260,7 +277,7 @@ init_dont_simulate_again (void)
                break;
              }
 
-         DONT_SIMULATE_AGAIN (orig_stmt) = dsa;
+         prop_set_simulate_again (stmt, sim_again_p);
        }
     }
 
@@ -271,19 +288,18 @@ init_dont_simulate_again (void)
 /* Evaluate statement STMT against the complex lattice defined above.  */
 
 static enum ssa_prop_result
-complex_visit_stmt (tree stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
+complex_visit_stmt (gimple stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
                    tree *result_p)
 {
   complex_lattice_t new_l, old_l, op1_l, op2_l;
   unsigned int ver;
-  tree lhs, rhs;
+  tree lhs;
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  lhs = gimple_get_lhs (stmt);
+  /* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs.  */
+  if (!lhs)
     return SSA_PROP_VARYING;
 
-  lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-  rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-
   /* These conditions should be satisfied due to the initial filter
      set up in init_dont_simulate_again.  */
   gcc_assert (TREE_CODE (lhs) == SSA_NAME);
@@ -293,18 +309,22 @@ complex_visit_stmt (tree stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
   ver = SSA_NAME_VERSION (lhs);
   old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
 
-  switch (TREE_CODE (rhs))
+  switch (gimple_expr_code (stmt))
     {
     case SSA_NAME:
-    case COMPLEX_EXPR:
     case COMPLEX_CST:
-      new_l = find_lattice_value (rhs);
+      new_l = find_lattice_value (gimple_assign_rhs1 (stmt));
+      break;
+
+    case COMPLEX_EXPR:
+      new_l = find_lattice_value_parts (gimple_assign_rhs1 (stmt),
+                                       gimple_assign_rhs2 (stmt));
       break;
 
     case PLUS_EXPR:
     case MINUS_EXPR:
-      op1_l = find_lattice_value (TREE_OPERAND (rhs, 0));
-      op2_l = find_lattice_value (TREE_OPERAND (rhs, 1));
+      op1_l = find_lattice_value (gimple_assign_rhs1 (stmt));
+      op2_l = find_lattice_value (gimple_assign_rhs2 (stmt));
 
       /* We've set up the lattice values such that IOR neatly
         models addition.  */
@@ -317,8 +337,8 @@ complex_visit_stmt (tree stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
     case CEIL_DIV_EXPR:
     case FLOOR_DIV_EXPR:
     case ROUND_DIV_EXPR:
-      op1_l = find_lattice_value (TREE_OPERAND (rhs, 0));
-      op2_l = find_lattice_value (TREE_OPERAND (rhs, 1));
+      op1_l = find_lattice_value (gimple_assign_rhs1 (stmt));
+      op2_l = find_lattice_value (gimple_assign_rhs2 (stmt));
 
       /* Obviously, if either varies, so does the result.  */
       if (op1_l == VARYING || op2_l == VARYING)
@@ -344,7 +364,7 @@ complex_visit_stmt (tree stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
 
     case NEGATE_EXPR:
     case CONJ_EXPR:
-      new_l = find_lattice_value (TREE_OPERAND (rhs, 0));
+      new_l = find_lattice_value (gimple_assign_rhs1 (stmt));
       break;
 
     default:
@@ -363,14 +383,14 @@ complex_visit_stmt (tree stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
 /* Evaluate a PHI node against the complex lattice defined above.  */
 
 static enum ssa_prop_result
-complex_visit_phi (tree phi)
+complex_visit_phi (gimple phi)
 {
   complex_lattice_t new_l, old_l;
   unsigned int ver;
   tree lhs;
   int i;
 
-  lhs = PHI_RESULT (phi);
+  lhs = gimple_phi_result (phi);
 
   /* This condition should be satisfied due to the initial filter
      set up in init_dont_simulate_again.  */
@@ -378,8 +398,8 @@ complex_visit_phi (tree phi)
 
   /* We've set up the lattice values such that IOR neatly models PHI meet.  */
   new_l = UNINITIALIZED;
-  for (i = PHI_NUM_ARGS (phi) - 1; i >= 0; --i)
-    new_l |= find_lattice_value (PHI_ARG_DEF (phi, i));
+  for (i = gimple_phi_num_args (phi) - 1; i >= 0; --i)
+    new_l |= find_lattice_value (gimple_phi_arg_def (phi, i));
 
   ver = SSA_NAME_VERSION (lhs);
   old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
@@ -475,7 +495,7 @@ get_component_ssa_name (tree ssa_name, bool imag_p)
       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret)
        = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name);
       if (TREE_CODE (SSA_NAME_VAR (ssa_name)) == VAR_DECL
-         && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name)))
+         && gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
        {
          SSA_NAME_DEF_STMT (ret) = SSA_NAME_DEF_STMT (ssa_name);
          set_default_def (SSA_NAME_VAR (ret), ret);
@@ -487,15 +507,17 @@ get_component_ssa_name (tree ssa_name, bool imag_p)
   return ret;
 }
 
-/* Set a value for a complex component of SSA_NAME, return a STMT_LIST of
-   stuff that needs doing.  */
+/* Set a value for a complex component of SSA_NAME, return a
+   gimple_seq of stuff that needs doing.  */
 
-static tree
+static gimple_seq
 set_component_ssa_name (tree ssa_name, bool imag_p, tree value)
 {
   complex_lattice_t lattice = find_lattice_value (ssa_name);
   size_t ssa_name_index;
-  tree comp, list, last;
+  tree comp;
+  gimple last;
+  gimple_seq list;
 
   /* We know the value must be zero, else there's a bug in our lattice
      analysis.  But the value may well be a variable known to contain
@@ -542,22 +564,21 @@ set_component_ssa_name (tree ssa_name, bool imag_p, tree value)
     comp = get_component_ssa_name (ssa_name, imag_p);
   
   /* Do all the work to assign VALUE to COMP.  */
+  list = NULL;
   value = force_gimple_operand (value, &list, false, NULL);
-  last = build_gimple_modify_stmt (comp, value);
-  append_to_statement_list (last, &list);
-
-  gcc_assert (SSA_NAME_DEF_STMT (comp) == NULL);
-  SSA_NAME_DEF_STMT (comp) = last;
+  last =  gimple_build_assign (comp, value);
+  gimple_seq_add_stmt (&list, last);
+  gcc_assert (SSA_NAME_DEF_STMT (comp) == last);
 
   return list;
 }
 
 /* Extract the real or imaginary part of a complex variable or constant.
    Make sure that it's a proper gimple_val and gimplify it if not.
-   Emit any new code before BSI.  */
+   Emit any new code before gsi.  */
 
 static tree
-extract_component (block_stmt_iterator *bsi, tree t, bool imagpart_p,
+extract_component (gimple_stmt_iterator *gsi, tree t, bool imagpart_p,
                   bool gimple_p)
 {
   switch (TREE_CODE (t))
@@ -566,7 +587,7 @@ extract_component (block_stmt_iterator *bsi, tree t, bool imagpart_p,
       return imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t);
 
     case COMPLEX_EXPR:
-      return TREE_OPERAND (t, imagpart_p);
+      gcc_unreachable ();
 
     case VAR_DECL:
     case RESULT_DECL:
@@ -581,7 +602,8 @@ extract_component (block_stmt_iterator *bsi, tree t, bool imagpart_p,
                    inner_type, unshare_expr (t));
 
        if (gimple_p)
-         t = gimplify_val (bsi, inner_type, t);
+         t = force_gimple_operand_gsi (gsi, t, true, NULL, true,
+                                        GSI_SAME_STMT);
 
        return t;
       }
@@ -597,53 +619,53 @@ extract_component (block_stmt_iterator *bsi, tree t, bool imagpart_p,
 /* Update the complex components of the ssa name on the lhs of STMT.  */
 
 static void
-update_complex_components (block_stmt_iterator *bsi, tree stmt, tree r, tree i)
+update_complex_components (gimple_stmt_iterator *gsi, gimple stmt, tree r,
+                          tree i)
 {
-  tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-  tree list;
+  tree lhs;
+  gimple_seq list;
+
+  lhs = gimple_get_lhs (stmt);
 
   list = set_component_ssa_name (lhs, false, r);
   if (list)
-    bsi_insert_after (bsi, list, BSI_CONTINUE_LINKING);
+    gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
 
   list = set_component_ssa_name (lhs, true, i);
   if (list)
-    bsi_insert_after (bsi, list, BSI_CONTINUE_LINKING);
+    gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
 }
 
 static void
 update_complex_components_on_edge (edge e, tree lhs, tree r, tree i)
 {
-  tree list;
+  gimple_seq list;
 
   list = set_component_ssa_name (lhs, false, r);
   if (list)
-    bsi_insert_on_edge (e, list);
+    gsi_insert_seq_on_edge (e, list);
 
   list = set_component_ssa_name (lhs, true, i);
   if (list)
-    bsi_insert_on_edge (e, list);
+    gsi_insert_seq_on_edge (e, list);
 }
 
+
 /* Update an assignment to a complex variable in place.  */
 
 static void
-update_complex_assignment (block_stmt_iterator *bsi, tree r, tree i)
+update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i)
 {
-  tree stmt, mod;
-  tree type;
-
-  mod = stmt = bsi_stmt (*bsi);
-  if (TREE_CODE (stmt) == RETURN_EXPR)
-    mod = TREE_OPERAND (mod, 0);
-  else if (gimple_in_ssa_p (cfun))
-    update_complex_components (bsi, stmt, r, i);
-  
-  type = TREE_TYPE (GIMPLE_STMT_OPERAND (mod, 1));
-  GIMPLE_STMT_OPERAND (mod, 1) = build2 (COMPLEX_EXPR, type, r, i);
-  update_stmt (stmt);
+  gimple_stmt_iterator orig_si = *gsi;
+
+  if (gimple_in_ssa_p (cfun))
+    update_complex_components (gsi, gsi_stmt (*gsi), r, i);
+
+  gimple_assign_set_rhs_with_ops (&orig_si, COMPLEX_EXPR, r, i);
+  update_stmt (gsi_stmt (orig_si));
 }
 
+
 /* Generate code at the entry point of the function to initialize the
    component variables for a complex parameter.  */
 
@@ -678,49 +700,54 @@ update_parameter_components (void)
 static void
 update_phi_components (basic_block bb)
 {
-  tree phi;
+  gimple_stmt_iterator gsi;
 
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-    if (is_complex_reg (PHI_RESULT (phi)))
-      {
-       tree lr, li, pr = NULL, pi = NULL;
-       unsigned int i, n;
+  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+    {
+      gimple phi = gsi_stmt (gsi);
 
-       lr = get_component_ssa_name (PHI_RESULT (phi), false);
-       if (TREE_CODE (lr) == SSA_NAME)
-         {
-           pr = create_phi_node (lr, bb);
-           SSA_NAME_DEF_STMT (lr) = pr;
-         }
+      if (is_complex_reg (gimple_phi_result (phi)))
+       {
+         tree lr, li;
+         gimple pr = NULL, pi = NULL;
+         unsigned int i, n;
 
-       li = get_component_ssa_name (PHI_RESULT (phi), true);
-       if (TREE_CODE (li) == SSA_NAME)
-         {
-           pi = create_phi_node (li, bb);
-           SSA_NAME_DEF_STMT (li) = pi;
-         }
-       
-       for (i = 0, n = PHI_NUM_ARGS (phi); i < n; ++i)
-         {
-           tree comp, arg = PHI_ARG_DEF (phi, i);
-           if (pr)
-             {
-               comp = extract_component (NULL, arg, false, false);
-               SET_PHI_ARG_DEF (pr, i, comp);
-             }
-           if (pi)
-             {
-               comp = extract_component (NULL, arg, true, false);
-               SET_PHI_ARG_DEF (pi, i, comp);
-             }
-         }
-      }
+         lr = get_component_ssa_name (gimple_phi_result (phi), false);
+         if (TREE_CODE (lr) == SSA_NAME)
+           {
+             pr = create_phi_node (lr, bb);
+             SSA_NAME_DEF_STMT (lr) = pr;
+           }
+
+         li = get_component_ssa_name (gimple_phi_result (phi), true);
+         if (TREE_CODE (li) == SSA_NAME)
+           {
+             pi = create_phi_node (li, bb);
+             SSA_NAME_DEF_STMT (li) = pi;
+           }
+
+         for (i = 0, n = gimple_phi_num_args (phi); i < n; ++i)
+           {
+             tree comp, arg = gimple_phi_arg_def (phi, i);
+             if (pr)
+               {
+                 comp = extract_component (NULL, arg, false, false);
+                 SET_PHI_ARG_DEF (pr, i, comp);
+               }
+             if (pi)
+               {
+                 comp = extract_component (NULL, arg, true, false);
+                 SET_PHI_ARG_DEF (pi, i, comp);
+               }
+           }
+       }
+    }
 }
 
 /* Mark each virtual op in STMT for ssa update.  */
 
 static void
-update_all_vops (tree stmt)
+update_all_vops (gimple stmt)
 {
   ssa_op_iter iter;
   tree sym;
@@ -733,18 +760,35 @@ update_all_vops (tree stmt)
     }
 }
 
+
 /* Expand a complex move to scalars.  */
 
 static void
-expand_complex_move (block_stmt_iterator *bsi, tree stmt, tree type,
-                    tree lhs, tree rhs)
+expand_complex_move (gimple_stmt_iterator *gsi, tree type)
 {
   tree inner_type = TREE_TYPE (type);
-  tree r, i;
+  tree r, i, lhs, rhs;
+  gimple stmt = gsi_stmt (*gsi);
+
+  if (is_gimple_assign (stmt))
+    {
+      lhs = gimple_assign_lhs (stmt);
+      if (gimple_num_ops (stmt) == 2)
+       rhs = gimple_assign_rhs1 (stmt);
+      else
+       rhs = NULL_TREE;
+    }
+  else if (is_gimple_call (stmt))
+    {
+      lhs = gimple_call_lhs (stmt);
+      rhs = NULL_TREE;
+    }
+  else
+    gcc_unreachable ();
 
   if (TREE_CODE (lhs) == SSA_NAME)
     {
-      if (is_ctrl_altering_stmt (bsi_stmt (*bsi)))
+      if (is_ctrl_altering_stmt (stmt))
        {
          edge_iterator ei;
          edge e;
@@ -752,7 +796,7 @@ expand_complex_move (block_stmt_iterator *bsi, tree stmt, tree type,
          /* The value is not assigned on the exception edges, so we need not
             concern ourselves there.  We do need to update on the fallthru
             edge.  Find it.  */
-         FOR_EACH_EDGE (e, ei, bsi->bb->succs)
+         FOR_EACH_EDGE (e, ei, gsi_bb (*gsi)->succs)
            if (e->flags & EDGE_FALLTHRU)
              goto found_fallthru;
          gcc_unreachable ();
@@ -762,47 +806,57 @@ expand_complex_move (block_stmt_iterator *bsi, tree stmt, tree type,
          i = build1 (IMAGPART_EXPR, inner_type, lhs);
          update_complex_components_on_edge (e, lhs, r, i);
        }
-      else if (TREE_CODE (rhs) == CALL_EXPR || TREE_SIDE_EFFECTS (rhs)
-              || TREE_CODE (rhs) == PAREN_EXPR)
+      else if (is_gimple_call (stmt)
+              || gimple_has_side_effects (stmt)
+              || gimple_assign_rhs_code (stmt) == PAREN_EXPR)
        {
          r = build1 (REALPART_EXPR, inner_type, lhs);
          i = build1 (IMAGPART_EXPR, inner_type, lhs);
-         update_complex_components (bsi, stmt, r, i);
+         update_complex_components (gsi, stmt, r, i);
        }
       else
        {
-         update_all_vops (bsi_stmt (*bsi));
-         r = extract_component (bsi, rhs, 0, true);
-         i = extract_component (bsi, rhs, 1, true);
-         update_complex_assignment (bsi, r, i);
+         update_all_vops (stmt);
+         if (gimple_assign_rhs_code (stmt) != COMPLEX_EXPR)
+           {
+             r = extract_component (gsi, rhs, 0, true);
+             i = extract_component (gsi, rhs, 1, true);
+           }
+         else
+           {
+             r = gimple_assign_rhs1 (stmt);
+             i = gimple_assign_rhs2 (stmt);
+           }
+         update_complex_assignment (gsi, r, i);
        }
     }
-  else if (TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs))
+  else if (rhs && TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs))
     {
       tree x;
+      gimple t;
 
-      r = extract_component (bsi, rhs, 0, false);
-      i = extract_component (bsi, rhs, 1, false);
+      r = extract_component (gsi, rhs, 0, false);
+      i = extract_component (gsi, rhs, 1, false);
 
       x = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs));
-      x = build_gimple_modify_stmt (x, r);
-      bsi_insert_before (bsi, x, BSI_SAME_STMT);
+      t = gimple_build_assign (x, r);
+      gsi_insert_before (gsi, t, GSI_SAME_STMT);
 
-      if (stmt == bsi_stmt (*bsi))
+      if (stmt == gsi_stmt (*gsi))
        {
          x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
-         GIMPLE_STMT_OPERAND (stmt, 0) = x;
-         GIMPLE_STMT_OPERAND (stmt, 1) = i;
+         gimple_assign_set_lhs (stmt, x);
+         gimple_assign_set_rhs1 (stmt, i);
        }
       else
        {
          x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
-         x = build_gimple_modify_stmt (x, i);
-         bsi_insert_before (bsi, x, BSI_SAME_STMT);
+         t = gimple_build_assign (x, i);
+         gsi_insert_before (gsi, t, GSI_SAME_STMT);
 
-         stmt = bsi_stmt (*bsi);
-         gcc_assert (TREE_CODE (stmt) == RETURN_EXPR);
-         GIMPLE_STMT_OPERAND (stmt, 0) = lhs;
+         stmt = gsi_stmt (*gsi);
+         gcc_assert (gimple_code (stmt) == GIMPLE_RETURN);
+         gimple_return_set_retval (stmt, lhs);
        }
 
       update_all_vops (stmt);
@@ -816,7 +870,7 @@ expand_complex_move (block_stmt_iterator *bsi, tree stmt, tree type,
 */
 
 static void
-expand_complex_addition (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_addition (gimple_stmt_iterator *gsi, tree inner_type,
                         tree ar, tree ai, tree br, tree bi,
                         enum tree_code code,
                         complex_lattice_t al, complex_lattice_t bl)
@@ -826,21 +880,21 @@ expand_complex_addition (block_stmt_iterator *bsi, tree inner_type,
   switch (PAIR (al, bl))
     {
     case PAIR (ONLY_REAL, ONLY_REAL):
-      rr = gimplify_build2 (bsi, code, inner_type, ar, br);
+      rr = gimplify_build2 (gsi, code, inner_type, ar, br);
       ri = ai;
       break;
 
     case PAIR (ONLY_REAL, ONLY_IMAG):
       rr = ar;
       if (code == MINUS_EXPR)
-       ri = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ai, bi);
+       ri = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, bi);
       else
        ri = bi;
       break;
 
     case PAIR (ONLY_IMAG, ONLY_REAL):
       if (code == MINUS_EXPR)
-       rr = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ar, br);
+       rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ar, br);
       else
        rr = br;
       ri = ai;
@@ -848,23 +902,23 @@ expand_complex_addition (block_stmt_iterator *bsi, tree inner_type,
 
     case PAIR (ONLY_IMAG, ONLY_IMAG):
       rr = ar;
-      ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
+      ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
       break;
 
     case PAIR (VARYING, ONLY_REAL):
-      rr = gimplify_build2 (bsi, code, inner_type, ar, br);
+      rr = gimplify_build2 (gsi, code, inner_type, ar, br);
       ri = ai;
       break;
 
     case PAIR (VARYING, ONLY_IMAG):
       rr = ar;
-      ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
+      ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
       break;
 
     case PAIR (ONLY_REAL, VARYING):
       if (code == MINUS_EXPR)
        goto general;
-      rr = gimplify_build2 (bsi, code, inner_type, ar, br);
+      rr = gimplify_build2 (gsi, code, inner_type, ar, br);
       ri = bi;
       break;
 
@@ -872,38 +926,41 @@ expand_complex_addition (block_stmt_iterator *bsi, tree inner_type,
       if (code == MINUS_EXPR)
        goto general;
       rr = br;
-      ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
+      ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
       break;
 
     case PAIR (VARYING, VARYING):
     general:
-      rr = gimplify_build2 (bsi, code, inner_type, ar, br);
-      ri = gimplify_build2 (bsi, code, inner_type, ai, bi);
+      rr = gimplify_build2 (gsi, code, inner_type, ar, br);
+      ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
       break;
 
     default:
       gcc_unreachable ();
     }
 
-  update_complex_assignment (bsi, rr, ri);
+  update_complex_assignment (gsi, rr, ri);
 }
 
 /* Expand a complex multiplication or division to a libcall to the c99
    compliant routines.  */
 
 static void
-expand_complex_libcall (block_stmt_iterator *bsi, tree ar, tree ai,
+expand_complex_libcall (gimple_stmt_iterator *gsi, tree ar, tree ai,
                        tree br, tree bi, enum tree_code code)
 {
   enum machine_mode mode;
   enum built_in_function bcode;
-  tree fn, stmt, type;
+  tree fn, type, lhs;
+  gimple stmt;
 
-  stmt = bsi_stmt (*bsi);
-  type = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 1));
+  stmt = gsi_stmt (*gsi);
+  lhs = gimple_assign_lhs (stmt);
+  type = TREE_TYPE (lhs);
 
   mode = TYPE_MODE (type);
   gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT);
+
   if (code == MULT_EXPR)
     bcode = BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT;
   else if (code == RDIV_EXPR)
@@ -912,16 +969,18 @@ expand_complex_libcall (block_stmt_iterator *bsi, tree ar, tree ai,
     gcc_unreachable ();
   fn = built_in_decls[bcode];
 
-  GIMPLE_STMT_OPERAND (stmt, 1) = build_call_expr (fn, 4, ar, ai, br, bi);
+  stmt = gimple_build_call (fn, 4, ar, ai, br, bi);
+  gimple_call_set_lhs (stmt, lhs);
   update_stmt (stmt);
+  gsi_replace (gsi, stmt, true);
 
   if (gimple_in_ssa_p (cfun))
     {
-      tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
       type = TREE_TYPE (type);
-      update_complex_components (bsi, stmt,
+      update_complex_components (gsi, stmt,
                                 build1 (REALPART_EXPR, type, lhs),
                                 build1 (IMAGPART_EXPR, type, lhs));
+      SSA_NAME_DEF_STMT (lhs) = stmt;
     }
 }
 
@@ -930,7 +989,7 @@ expand_complex_libcall (block_stmt_iterator *bsi, tree ar, tree ai,
 */
 
 static void
-expand_complex_multiplication (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_multiplication (gimple_stmt_iterator *gsi, tree inner_type,
                               tree ar, tree ai, tree br, tree bi,
                               complex_lattice_t al, complex_lattice_t bl)
 {
@@ -947,7 +1006,7 @@ expand_complex_multiplication (block_stmt_iterator *bsi, tree inner_type,
   switch (PAIR (al, bl))
     {
     case PAIR (ONLY_REAL, ONLY_REAL):
-      rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
+      rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
       ri = ai;
       break;
 
@@ -957,49 +1016,49 @@ expand_complex_multiplication (block_stmt_iterator *bsi, tree inner_type,
          && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst1))
        ri = br;
       else
-       ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
+       ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
       break;
 
     case PAIR (ONLY_IMAG, ONLY_IMAG):
-      rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
-      rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, rr);
+      rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
+      rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr);
       ri = ar;
       break;
 
     case PAIR (VARYING, ONLY_REAL):
-      rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
-      ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
+      rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
+      ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
       break;
 
     case PAIR (VARYING, ONLY_IMAG):
-      rr = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
-      rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, rr);
-      ri = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
+      rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
+      rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr);
+      ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
       break;
 
     case PAIR (VARYING, VARYING):
       if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type))
        {
-         expand_complex_libcall (bsi, ar, ai, br, bi, MULT_EXPR);
+         expand_complex_libcall (gsi, ar, ai, br, bi, MULT_EXPR);
          return;
        }
       else
        {
          tree t1, t2, t3, t4;
 
-         t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
-         t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
-         t3 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
+         t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
+         t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
+         t3 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
 
          /* Avoid expanding redundant multiplication for the common
             case of squaring a complex number.  */
          if (ar == br && ai == bi)
            t4 = t3;
          else
-           t4 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
+           t4 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
 
-         rr = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, t2);
-         ri = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t3, t4);
+         rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2);
+         ri = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t3, t4);
        }
       break;
 
@@ -1007,7 +1066,7 @@ expand_complex_multiplication (block_stmt_iterator *bsi, tree inner_type,
       gcc_unreachable ();
     }
 
-  update_complex_assignment (bsi, rr, ri);
+  update_complex_assignment (gsi, rr, ri);
 }
 
 /* Expand complex division to scalars, straightforward algorithm.
@@ -1016,43 +1075,44 @@ expand_complex_multiplication (block_stmt_iterator *bsi, tree inner_type,
 */
 
 static void
-expand_complex_div_straight (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_div_straight (gimple_stmt_iterator *gsi, tree inner_type,
                             tree ar, tree ai, tree br, tree bi,
                             enum tree_code code)
 {
   tree rr, ri, div, t1, t2, t3;
 
-  t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, br, br);
-  t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, bi, bi);
-  div = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, t2);
+  t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, br);
+  t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, bi);
+  div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2);
 
-  t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, br);
-  t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, bi);
-  t3 = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, t2);
-  rr = gimplify_build2 (bsi, code, inner_type, t3, div);
+  t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
+  t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
+  t3 = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2);
+  rr = gimplify_build2 (gsi, code, inner_type, t3, div);
 
-  t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, br);
-  t2 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, bi);
-  t3 = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, t2);
-  ri = gimplify_build2 (bsi, code, inner_type, t3, div);
+  t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
+  t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
+  t3 = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2);
+  ri = gimplify_build2 (gsi, code, inner_type, t3, div);
 
-  update_complex_assignment (bsi, rr, ri);
+  update_complex_assignment (gsi, rr, ri);
 }
 
 /* Expand complex division to scalars, modified algorithm to minimize
    overflow with wide input ranges.  */
 
 static void
-expand_complex_div_wide (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_div_wide (gimple_stmt_iterator *gsi, tree inner_type,
                         tree ar, tree ai, tree br, tree bi,
                         enum tree_code code)
 {
   tree rr, ri, ratio, div, t1, t2, tr, ti, compare;
   basic_block bb_cond, bb_true, bb_false, bb_join;
+  gimple stmt;
 
   /* Examine |br| < |bi|, and branch.  */
-  t1 = gimplify_build1 (bsi, ABS_EXPR, inner_type, br);
-  t2 = gimplify_build1 (bsi, ABS_EXPR, inner_type, bi);
+  t1 = gimplify_build1 (gsi, ABS_EXPR, inner_type, br);
+  t2 = gimplify_build1 (gsi, ABS_EXPR, inner_type, bi);
   compare = fold_build2 (LT_EXPR, boolean_type_node, t1, t2);
   STRIP_NOPS (compare);
 
@@ -1061,20 +1121,25 @@ expand_complex_div_wide (block_stmt_iterator *bsi, tree inner_type,
   if (!TREE_CONSTANT (compare))
     {
       edge e;
+      gimple stmt;
       tree cond, tmp;
 
       tmp = create_tmp_var (boolean_type_node, NULL);
-      cond = build_gimple_modify_stmt (tmp, compare);
+      stmt = gimple_build_assign (tmp, compare);
       if (gimple_in_ssa_p (cfun))
-       tmp = make_ssa_name (tmp,  cond);
-      GIMPLE_STMT_OPERAND (cond, 0) = tmp;
-      bsi_insert_before (bsi, cond, BSI_SAME_STMT);
+       {
+         tmp = make_ssa_name (tmp,  stmt);
+         gimple_assign_set_lhs (stmt, tmp);
+       }
+
+      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
 
-      cond = build3 (COND_EXPR, void_type_node, tmp, NULL_TREE, NULL_TREE);
-      bsi_insert_before (bsi, cond, BSI_SAME_STMT);
+      cond = fold_build2 (EQ_EXPR, boolean_type_node, tmp, boolean_true_node);
+      stmt = gimple_build_cond_from_tree (cond, NULL_TREE, NULL_TREE);
+      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
 
       /* Split the original block, and create the TRUE and FALSE blocks.  */
-      e = split_block (bsi->bb, cond);
+      e = split_block (gsi_bb (*gsi), stmt);
       bb_cond = e->src;
       bb_join = e->dest;
       bb_true = create_empty_bb (bb_cond);
@@ -1110,31 +1175,31 @@ expand_complex_div_wide (block_stmt_iterator *bsi, tree inner_type,
     {
       if (bb_true)
        {
-         *bsi = bsi_last (bb_true);
-         bsi_insert_after (bsi, build_empty_stmt (), BSI_NEW_STMT);
+         *gsi = gsi_last_bb (bb_true);
+         gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT);
        }
 
-      ratio = gimplify_build2 (bsi, code, inner_type, br, bi);
+      ratio = gimplify_build2 (gsi, code, inner_type, br, bi);
 
-      t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, br, ratio);
-      div = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, bi);
+      t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, ratio);
+      div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, bi);
 
-      t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, ratio);
-      tr = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, ai);
+      t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio);
+      tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ai);
 
-      t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, ratio);
-      ti = gimplify_build2 (bsi, MINUS_EXPR, inner_type, t1, ar);
+      t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio);
+      ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, ar);
 
-      tr = gimplify_build2 (bsi, code, inner_type, tr, div);
-      ti = gimplify_build2 (bsi, code, inner_type, ti, div);
+      tr = gimplify_build2 (gsi, code, inner_type, tr, div);
+      ti = gimplify_build2 (gsi, code, inner_type, ti, div);
 
      if (bb_true)
        {
-        t1 = build_gimple_modify_stmt (rr, tr);
-        bsi_insert_before (bsi, t1, BSI_SAME_STMT);
-        t1 = build_gimple_modify_stmt (ri, ti);
-        bsi_insert_before (bsi, t1, BSI_SAME_STMT);
-        bsi_remove (bsi, true);
+        stmt = gimple_build_assign (rr, tr);
+        gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+        stmt = gimple_build_assign (ri, ti);
+        gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+        gsi_remove (gsi, true);
        }
     }
 
@@ -1149,46 +1214,46 @@ expand_complex_div_wide (block_stmt_iterator *bsi, tree inner_type,
     {
       if (bb_false)
        {
-         *bsi = bsi_last (bb_false);
-         bsi_insert_after (bsi, build_empty_stmt (), BSI_NEW_STMT);
+         *gsi = gsi_last_bb (bb_false);
+         gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT);
        }
 
-      ratio = gimplify_build2 (bsi, code, inner_type, bi, br);
+      ratio = gimplify_build2 (gsi, code, inner_type, bi, br);
 
-      t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, bi, ratio);
-      div = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, br);
+      t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, ratio);
+      div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, br);
 
-      t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ai, ratio);
-      tr = gimplify_build2 (bsi, PLUS_EXPR, inner_type, t1, ar);
+      t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio);
+      tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ar);
 
-      t1 = gimplify_build2 (bsi, MULT_EXPR, inner_type, ar, ratio);
-      ti = gimplify_build2 (bsi, MINUS_EXPR, inner_type, ai, t1);
+      t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio);
+      ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, t1);
 
-      tr = gimplify_build2 (bsi, code, inner_type, tr, div);
-      ti = gimplify_build2 (bsi, code, inner_type, ti, div);
+      tr = gimplify_build2 (gsi, code, inner_type, tr, div);
+      ti = gimplify_build2 (gsi, code, inner_type, ti, div);
 
      if (bb_false)
        {
-        t1 = build_gimple_modify_stmt (rr, tr);
-        bsi_insert_before (bsi, t1, BSI_SAME_STMT);
-        t1 = build_gimple_modify_stmt (ri, ti);
-        bsi_insert_before (bsi, t1, BSI_SAME_STMT);
-        bsi_remove (bsi, true);
+        stmt = gimple_build_assign (rr, tr);
+        gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+        stmt = gimple_build_assign (ri, ti);
+        gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+        gsi_remove (gsi, true);
        }
     }
 
   if (bb_join)
-    *bsi = bsi_start (bb_join);
+    *gsi = gsi_start_bb (bb_join);
   else
     rr = tr, ri = ti;
 
-  update_complex_assignment (bsi, rr, ri);
+  update_complex_assignment (gsi, rr, ri);
 }
 
 /* Expand complex division to scalars.  */
 
 static void
-expand_complex_division (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_division (gimple_stmt_iterator *gsi, tree inner_type,
                         tree ar, tree ai, tree br, tree bi,
                         enum tree_code code,
                         complex_lattice_t al, complex_lattice_t bl)
@@ -1198,35 +1263,35 @@ expand_complex_division (block_stmt_iterator *bsi, tree inner_type,
   switch (PAIR (al, bl))
     {
     case PAIR (ONLY_REAL, ONLY_REAL):
-      rr = gimplify_build2 (bsi, code, inner_type, ar, br);
+      rr = gimplify_build2 (gsi, code, inner_type, ar, br);
       ri = ai;
       break;
 
     case PAIR (ONLY_REAL, ONLY_IMAG):
       rr = ai;
-      ri = gimplify_build2 (bsi, code, inner_type, ar, bi);
-      ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ri);
+      ri = gimplify_build2 (gsi, code, inner_type, ar, bi);
+      ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri);
       break;
 
     case PAIR (ONLY_IMAG, ONLY_REAL):
       rr = ar;
-      ri = gimplify_build2 (bsi, code, inner_type, ai, br);
+      ri = gimplify_build2 (gsi, code, inner_type, ai, br);
       break;
 
     case PAIR (ONLY_IMAG, ONLY_IMAG):
-      rr = gimplify_build2 (bsi, code, inner_type, ai, bi);
+      rr = gimplify_build2 (gsi, code, inner_type, ai, bi);
       ri = ar;
       break;
 
     case PAIR (VARYING, ONLY_REAL):
-      rr = gimplify_build2 (bsi, code, inner_type, ar, br);
-      ri = gimplify_build2 (bsi, code, inner_type, ai, br);
+      rr = gimplify_build2 (gsi, code, inner_type, ar, br);
+      ri = gimplify_build2 (gsi, code, inner_type, ai, br);
       break;
 
     case PAIR (VARYING, ONLY_IMAG):
-      rr = gimplify_build2 (bsi, code, inner_type, ai, bi);
-      ri = gimplify_build2 (bsi, code, inner_type, ar, bi);
-      ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ri);
+      rr = gimplify_build2 (gsi, code, inner_type, ai, bi);
+      ri = gimplify_build2 (gsi, code, inner_type, ar, bi);
+      ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri);
 
     case PAIR (ONLY_REAL, VARYING):
     case PAIR (ONLY_IMAG, VARYING):
@@ -1235,20 +1300,20 @@ expand_complex_division (block_stmt_iterator *bsi, tree inner_type,
        {
        case 0:
          /* straightforward implementation of complex divide acceptable.  */
-         expand_complex_div_straight (bsi, inner_type, ar, ai, br, bi, code);
+         expand_complex_div_straight (gsi, inner_type, ar, ai, br, bi, code);
          break;
 
        case 2:
          if (SCALAR_FLOAT_TYPE_P (inner_type))
            {
-             expand_complex_libcall (bsi, ar, ai, br, bi, code);
+             expand_complex_libcall (gsi, ar, ai, br, bi, code);
              break;
            }
          /* FALLTHRU */
 
        case 1:
          /* wide ranges of inputs must work for complex divide.  */
-         expand_complex_div_wide (bsi, inner_type, ar, ai, br, bi, code);
+         expand_complex_div_wide (gsi, inner_type, ar, ai, br, bi, code);
          break;
 
        default:
@@ -1260,7 +1325,7 @@ expand_complex_division (block_stmt_iterator *bsi, tree inner_type,
       gcc_unreachable ();
     }
 
-  update_complex_assignment (bsi, rr, ri);
+  update_complex_assignment (gsi, rr, ri);
 }
 
 /* Expand complex negation to scalars:
@@ -1268,15 +1333,15 @@ expand_complex_division (block_stmt_iterator *bsi, tree inner_type,
 */
 
 static void
-expand_complex_negation (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_negation (gimple_stmt_iterator *gsi, tree inner_type,
                         tree ar, tree ai)
 {
   tree rr, ri;
 
-  rr = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ar);
-  ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ai);
+  rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ar);
+  ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai);
 
-  update_complex_assignment (bsi, rr, ri);
+  update_complex_assignment (gsi, rr, ri);
 }
 
 /* Expand complex conjugate to scalars:
@@ -1284,44 +1349,52 @@ expand_complex_negation (block_stmt_iterator *bsi, tree inner_type,
 */
 
 static void
-expand_complex_conjugate (block_stmt_iterator *bsi, tree inner_type,
+expand_complex_conjugate (gimple_stmt_iterator *gsi, tree inner_type,
                          tree ar, tree ai)
 {
   tree ri;
 
-  ri = gimplify_build1 (bsi, NEGATE_EXPR, inner_type, ai);
+  ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai);
 
-  update_complex_assignment (bsi, ar, ri);
+  update_complex_assignment (gsi, ar, ri);
 }
 
 /* Expand complex comparison (EQ or NE only).  */
 
 static void
-expand_complex_comparison (block_stmt_iterator *bsi, tree ar, tree ai,
+expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai,
                           tree br, tree bi, enum tree_code code)
 {
-  tree cr, ci, cc, stmt, expr, type;
+  tree cr, ci, cc, type;
+  gimple stmt;
 
-  cr = gimplify_build2 (bsi, code, boolean_type_node, ar, br);
-  ci = gimplify_build2 (bsi, code, boolean_type_node, ai, bi);
-  cc = gimplify_build2 (bsi,
+  cr = gimplify_build2 (gsi, code, boolean_type_node, ar, br);
+  ci = gimplify_build2 (gsi, code, boolean_type_node, ai, bi);
+  cc = gimplify_build2 (gsi,
                        (code == EQ_EXPR ? TRUTH_AND_EXPR : TRUTH_OR_EXPR),
                        boolean_type_node, cr, ci);
 
-  stmt = expr = bsi_stmt (*bsi);
+  stmt = gsi_stmt (*gsi);
 
-  switch (TREE_CODE (stmt))
+  switch (gimple_code (stmt))
     {
-    case RETURN_EXPR:
-      expr = TREE_OPERAND (stmt, 0);
-      /* FALLTHRU */
-    case GIMPLE_MODIFY_STMT:
-      type = TREE_TYPE (GIMPLE_STMT_OPERAND (expr, 1));
-      GIMPLE_STMT_OPERAND (expr, 1) = fold_convert (type, cc);
+    case GIMPLE_RETURN:
+      type = TREE_TYPE (gimple_return_retval (stmt));
+      gimple_return_set_retval (stmt, fold_convert (type, cc));
       break;
-    case COND_EXPR:
-      TREE_OPERAND (stmt, 0) = cc;
+
+    case GIMPLE_ASSIGN:
+      type = TREE_TYPE (gimple_assign_lhs (stmt));
+      gimple_assign_set_rhs_from_tree (gsi, fold_convert (type, cc));
+      stmt = gsi_stmt (*gsi);
       break;
+
+    case GIMPLE_COND:
+      gimple_cond_set_code (stmt, EQ_EXPR);
+      gimple_cond_set_lhs (stmt, cc);
+      gimple_cond_set_rhs (stmt, boolean_true_node);
+      break;
+
     default:
       gcc_unreachable ();
     }
@@ -1329,41 +1402,24 @@ expand_complex_comparison (block_stmt_iterator *bsi, tree ar, tree ai,
   update_stmt (stmt);
 }
 
+
 /* Process one statement.  If we identify a complex operation, expand it.  */
 
 static void
-expand_complex_operations_1 (block_stmt_iterator *bsi)
+expand_complex_operations_1 (gimple_stmt_iterator *gsi)
 {
-  tree stmt = bsi_stmt (*bsi);
-  tree rhs, type, inner_type;
+  gimple stmt = gsi_stmt (*gsi);
+  tree type, inner_type, lhs;
   tree ac, ar, ai, bc, br, bi;
   complex_lattice_t al, bl;
   enum tree_code code;
 
-  switch (TREE_CODE (stmt))
-    {
-    case RETURN_EXPR:
-      stmt = TREE_OPERAND (stmt, 0);
-      if (!stmt)
-       return;
-      if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
-       return;
-      /* FALLTHRU */
-
-    case GIMPLE_MODIFY_STMT:
-      rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-      break;
+  lhs = gimple_get_lhs (stmt);
+  if (!lhs && gimple_code (stmt) != GIMPLE_COND)
+    return;
 
-    case COND_EXPR:
-      rhs = TREE_OPERAND (stmt, 0);
-      break;
-
-    default:
-      return;
-    }
-
-  type = TREE_TYPE (rhs);
-  code = TREE_CODE (rhs);
+  type = TREE_TYPE (gimple_op (stmt, 0));
+  code = gimple_expr_code (stmt);
 
   /* Initial filter for operations we handle.  */
   switch (code)
@@ -1385,32 +1441,36 @@ expand_complex_operations_1 (block_stmt_iterator *bsi)
 
     case EQ_EXPR:
     case NE_EXPR:
-      inner_type = TREE_TYPE (TREE_OPERAND (rhs, 1));
+      /* Note, both GIMPLE_ASSIGN and GIMPLE_COND may have an EQ_EXPR
+        subocde, so we need to access the operands using gimple_op.  */
+      inner_type = TREE_TYPE (gimple_op (stmt, 1));
       if (TREE_CODE (inner_type) != COMPLEX_TYPE)
        return;
       break;
 
     default:
       {
-       tree lhs, rhs;
+       tree rhs;
 
-       /* COND_EXPR may also fallthru here, but we do not need to do anything
-          with it.  */
-       if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+       /* GIMPLE_COND may also fallthru here, but we do not need to
+          do anything with it.  */
+       if (gimple_code (stmt) == GIMPLE_COND)
          return;
 
-       lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-       rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-
        if (TREE_CODE (type) == COMPLEX_TYPE)
-         expand_complex_move (bsi, stmt, type, lhs, rhs);
-       else if ((TREE_CODE (rhs) == REALPART_EXPR
-                 || TREE_CODE (rhs) == IMAGPART_EXPR)
-                && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
+         expand_complex_move (gsi, type);
+       else if (is_gimple_assign (stmt)
+                && (gimple_assign_rhs_code (stmt) == REALPART_EXPR
+                    || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR)
+                && TREE_CODE (lhs) == SSA_NAME)
          {
-           GENERIC_TREE_OPERAND (stmt, 1)
-             = extract_component (bsi, TREE_OPERAND (rhs, 0),
-                                  TREE_CODE (rhs) == IMAGPART_EXPR, false);
+           rhs = gimple_assign_rhs1 (stmt);
+           rhs = extract_component (gsi, TREE_OPERAND (rhs, 0),
+                                    gimple_assign_rhs_code (stmt)
+                                      == IMAGPART_EXPR,
+                                    false);
+           gimple_assign_set_rhs_from_tree (gsi, rhs);
+           stmt = gsi_stmt (*gsi);
            update_stmt (stmt);
          }
       }
@@ -1419,23 +1479,30 @@ expand_complex_operations_1 (block_stmt_iterator *bsi)
 
   /* Extract the components of the two complex values.  Make sure and
      handle the common case of the same value used twice specially.  */
-  ac = TREE_OPERAND (rhs, 0);
-  ar = extract_component (bsi, ac, 0, true);
-  ai = extract_component (bsi, ac, 1, true);
-
-  if (TREE_CODE_CLASS (code) == tcc_unary)
-    bc = br = bi = NULL;
+  if (is_gimple_assign (stmt))
+    {
+      ac = gimple_assign_rhs1 (stmt);
+      bc = (gimple_num_ops (stmt) > 2) ? gimple_assign_rhs2 (stmt) : NULL;
+    }
+  /* GIMPLE_CALL can not get here.  */
   else
     {
-      bc = TREE_OPERAND (rhs, 1);
-      if (ac == bc)
-       br = ar, bi = ai;
-      else
-       {
-         br = extract_component (bsi, bc, 0, true);
-         bi = extract_component (bsi, bc, 1, true);
-       }
+      ac = gimple_cond_lhs (stmt);
+      bc = gimple_cond_rhs (stmt);
+    }
+
+  ar = extract_component (gsi, ac, false, true);
+  ai = extract_component (gsi, ac, true, true);
+
+  if (ac == bc)
+    br = ar, bi = ai;
+  else if (bc)
+    {
+      br = extract_component (gsi, bc, 0, true);
+      bi = extract_component (gsi, bc, 1, true);
     }
+  else
+    br = bi = NULL_TREE;
 
   if (gimple_in_ssa_p (cfun))
     {
@@ -1461,11 +1528,11 @@ expand_complex_operations_1 (block_stmt_iterator *bsi)
     {
     case PLUS_EXPR:
     case MINUS_EXPR:
-      expand_complex_addition (bsi, inner_type, ar, ai, br, bi, code, al, bl);
+      expand_complex_addition (gsi, inner_type, ar, ai, br, bi, code, al, bl);
       break;
 
     case MULT_EXPR:
-      expand_complex_multiplication (bsi, inner_type, ar, ai, br, bi, al, bl);
+      expand_complex_multiplication (gsi, inner_type, ar, ai, br, bi, al, bl);
       break;
 
     case TRUNC_DIV_EXPR:
@@ -1473,20 +1540,20 @@ expand_complex_operations_1 (block_stmt_iterator *bsi)
     case FLOOR_DIV_EXPR:
     case ROUND_DIV_EXPR:
     case RDIV_EXPR:
-      expand_complex_division (bsi, inner_type, ar, ai, br, bi, code, al, bl);
+      expand_complex_division (gsi, inner_type, ar, ai, br, bi, code, al, bl);
       break;
       
     case NEGATE_EXPR:
-      expand_complex_negation (bsi, inner_type, ar, ai);
+      expand_complex_negation (gsi, inner_type, ar, ai);
       break;
 
     case CONJ_EXPR:
-      expand_complex_conjugate (bsi, inner_type, ar, ai);
+      expand_complex_conjugate (gsi, inner_type, ar, ai);
       break;
 
     case EQ_EXPR:
     case NE_EXPR:
-      expand_complex_comparison (bsi, ar, ai, br, bi, code);
+      expand_complex_comparison (gsi, ar, ai, br, bi, code);
       break;
 
     default:
@@ -1501,7 +1568,7 @@ static unsigned int
 tree_lower_complex (void)
 {
   int old_last_basic_block;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   basic_block bb;
 
   if (!init_dont_simulate_again ())
@@ -1529,12 +1596,13 @@ tree_lower_complex (void)
     {
       if (bb->index >= old_last_basic_block)
        continue;
+
       update_phi_components (bb);
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-       expand_complex_operations_1 (&bsi);
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+       expand_complex_operations_1 (&gsi);
     }
 
-  bsi_commit_edge_inserts ();
+  gsi_commit_edge_inserts ();
 
   htab_delete (complex_variable_components);
   VEC_free (tree, heap, complex_ssa_name_components);
@@ -1571,15 +1639,16 @@ static unsigned int
 tree_lower_complex_O0 (void)
 {
   int old_last_basic_block = last_basic_block;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   basic_block bb;
 
   FOR_EACH_BB (bb)
     {
       if (bb->index >= old_last_basic_block)
        continue;
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-       expand_complex_operations_1 (&bsi);
+
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+       expand_complex_operations_1 (&gsi);
     }
   return 0;
 }
index c024b74..1b5e92b 100644 (file)
@@ -186,7 +186,7 @@ dump_data_reference (FILE *outf,
   unsigned int i;
   
   fprintf (outf, "(Data Ref: \n  stmt: ");
-  print_generic_stmt (outf, DR_STMT (dr), 0);
+  print_gimple_stmt (outf, DR_STMT (dr), 0, 0);
   fprintf (outf, "  ref: ");
   print_generic_stmt (outf, DR_REF (dr), 0);
   fprintf (outf, "  base_object: ");
@@ -500,68 +500,65 @@ dump_ddrs (FILE *file, VEC (ddr_p, heap) *ddrs)
   fprintf (file, "\n\n");
 }
 
-/* Expresses EXP as VAR + OFF, where off is a constant.  The type of OFF
-   will be ssizetype.  */
+/* Helper function for split_constant_offset.  Expresses OP0 CODE OP1
+   (the type of the result is TYPE) as VAR + OFF, where OFF is a nonzero
+   constant of type ssizetype, and returns true.  If we cannot do this
+   with OFF nonzero, OFF and VAR are set to NULL_TREE instead and false
+   is returned.  */
 
-void
-split_constant_offset (tree exp, tree *var, tree *off)
+static bool
+split_constant_offset_1 (tree type, tree op0, enum tree_code code, tree op1,
+                        tree *var, tree *off)
 {
-  tree type = TREE_TYPE (exp), otype;
   tree var0, var1;
   tree off0, off1;
-  enum tree_code code;
+  enum tree_code ocode = code;
 
-  *var = exp;
-  STRIP_NOPS (exp);
-  otype = TREE_TYPE (exp);
-  code = TREE_CODE (exp);
+  *var = NULL_TREE;
+  *off = NULL_TREE;
 
   switch (code)
     {
     case INTEGER_CST:
       *var = build_int_cst (type, 0);
-      *off = fold_convert (ssizetype, exp);
-      return;
+      *off = fold_convert (ssizetype, op0);
+      return true;
 
     case POINTER_PLUS_EXPR:
-      code = PLUS_EXPR;
+      ocode = PLUS_EXPR;
       /* FALLTHROUGH */
     case PLUS_EXPR:
     case MINUS_EXPR:
-      split_constant_offset (TREE_OPERAND (exp, 0), &var0, &off0);
-      split_constant_offset (TREE_OPERAND (exp, 1), &var1, &off1);
-      *var = fold_convert (type, fold_build2 (TREE_CODE (exp), otype, 
-                                             var0, var1));
-      *off = size_binop (code, off0, off1);
-      return;
+      split_constant_offset (op0, &var0, &off0);
+      split_constant_offset (op1, &var1, &off1);
+      *var = fold_build2 (code, type, var0, var1);
+      *off = size_binop (ocode, off0, off1);
+      return true;
 
     case MULT_EXPR:
-      off1 = TREE_OPERAND (exp, 1);
-      if (TREE_CODE (off1) != INTEGER_CST)
-       break;
+      if (TREE_CODE (op1) != INTEGER_CST)
+       return false;
 
-      split_constant_offset (TREE_OPERAND (exp, 0), &var0, &off0);
-      *var = fold_convert (type, fold_build2 (MULT_EXPR, otype,
-                                             var0, off1));
-      *off = size_binop (MULT_EXPR, off0, fold_convert (ssizetype, off1));
-      return;
+      split_constant_offset (op0, &var0, &off0);
+      *var = fold_build2 (MULT_EXPR, type, var0, op1);
+      *off = size_binop (MULT_EXPR, off0, fold_convert (ssizetype, op1));
+      return true;
 
     case ADDR_EXPR:
       {
-       tree op, base, poffset;
+       tree base, poffset;
        HOST_WIDE_INT pbitsize, pbitpos;
        enum machine_mode pmode;
        int punsignedp, pvolatilep;
 
-       op = TREE_OPERAND (exp, 0);
-       if (!handled_component_p (op))
-         break;
+       if (!handled_component_p (op0))
+         return false;
 
-       base = get_inner_reference (op, &pbitsize, &pbitpos, &poffset,
+       base = get_inner_reference (op0, &pbitsize, &pbitpos, &poffset,
                                    &pmode, &punsignedp, &pvolatilep, false);
 
        if (pbitpos % BITS_PER_UNIT != 0)
-         break;
+         return false;
        base = build_fold_addr_expr (base);
        off0 = ssize_int (pbitpos / BITS_PER_UNIT);
 
@@ -595,40 +592,57 @@ split_constant_offset (tree exp, tree *var, tree *off)
        while (POINTER_TYPE_P (type))
          type = TREE_TYPE (type);
        if (int_size_in_bytes (type) < 0)
-         break;
+         return false;
 
        *var = var0;
        *off = off0;
-       return;
+       return true;
       }
 
     case SSA_NAME:
       {
-       tree def_stmt = SSA_NAME_DEF_STMT (exp);
-       if (TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT)
-         {
-           tree def_stmt_rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
+       gimple def_stmt = SSA_NAME_DEF_STMT (op0);
+       enum tree_code subcode;
 
-           if (!TREE_SIDE_EFFECTS (def_stmt_rhs) 
-               && EXPR_P (def_stmt_rhs)
-               && !REFERENCE_CLASS_P (def_stmt_rhs)
-               && !get_call_expr_in (def_stmt_rhs))
-             {
-               split_constant_offset (def_stmt_rhs, &var0, &off0);
-               var0 = fold_convert (type, var0);
-               *var = var0;
-               *off = off0;
-               return;
-             }
-         }
-       break;
+       if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
+         return false;
+
+       var0 = gimple_assign_rhs1 (def_stmt);
+       subcode = gimple_assign_rhs_code (def_stmt);
+       var1 = gimple_assign_rhs2 (def_stmt);
+
+       return split_constant_offset_1 (type, var0, subcode, var1, var, off);
       }
 
     default:
-      break;
+      return false;
     }
+}
+
+/* Expresses EXP as VAR + OFF, where off is a constant.  The type of OFF
+   will be ssizetype.  */
+
+void
+split_constant_offset (tree exp, tree *var, tree *off)
+{
+  tree type = TREE_TYPE (exp), otype, op0, op1, e, o;
+  enum tree_code code;
 
+  *var = exp;
   *off = ssize_int (0);
+  STRIP_NOPS (exp);
+
+  if (automatically_generated_chrec_p (exp))
+    return;
+
+  otype = TREE_TYPE (exp);
+  code = TREE_CODE (exp);
+  extract_ops_from_tree (exp, &code, &op0, &op1);
+  if (split_constant_offset_1 (otype, op0, code, op1, &e, &o))
+    {
+      *var = fold_convert (type, e);
+      *off = o;
+    }
 }
 
 /* Returns the address ADDR of an object in a canonical shape (without nop
@@ -658,7 +672,7 @@ canonicalize_base_object_address (tree addr)
 void
 dr_analyze_innermost (struct data_reference *dr)
 {
-  tree stmt = DR_STMT (dr);
+  gimple stmt = DR_STMT (dr);
   struct loop *loop = loop_containing_stmt (stmt);
   tree ref = DR_REF (dr);
   HOST_WIDE_INT pbitsize, pbitpos;
@@ -729,7 +743,7 @@ dr_analyze_innermost (struct data_reference *dr)
 static void
 dr_analyze_indices (struct data_reference *dr, struct loop *nest)
 {
-  tree stmt = DR_STMT (dr);
+  gimple stmt = DR_STMT (dr);
   struct loop *loop = loop_containing_stmt (stmt);
   VEC (tree, heap) *access_fns = NULL;
   tree ref = unshare_expr (DR_REF (dr)), aref = ref, op;
@@ -773,7 +787,7 @@ dr_analyze_indices (struct data_reference *dr, struct loop *nest)
 static void
 dr_analyze_alias (struct data_reference *dr)
 {
-  tree stmt = DR_STMT (dr);
+  gimple stmt = DR_STMT (dr);
   tree ref = DR_REF (dr);
   tree base = get_base_address (ref), addr, smt = NULL_TREE;
   ssa_op_iter it;
@@ -834,7 +848,7 @@ free_data_ref (data_reference_p dr)
    loop nest in that the reference should be analyzed.  */
 
 struct data_reference *
-create_data_ref (struct loop *nest, tree memref, tree stmt, bool is_read)
+create_data_ref (struct loop *nest, tree memref, gimple stmt, bool is_read)
 {
   struct data_reference *dr;
 
@@ -1537,8 +1551,8 @@ analyze_ziv_subscript (tree chrec_a,
     fprintf (dump_file, "(analyze_ziv_subscript \n");
 
   type = signed_type_for_types (TREE_TYPE (chrec_a), TREE_TYPE (chrec_b));
-  chrec_a = chrec_convert (type, chrec_a, NULL_TREE);
-  chrec_b = chrec_convert (type, chrec_b, NULL_TREE);
+  chrec_a = chrec_convert (type, chrec_a, NULL);
+  chrec_b = chrec_convert (type, chrec_b, NULL);
   difference = chrec_fold_minus (type, chrec_a, chrec_b);
   
   switch (TREE_CODE (difference))
@@ -1668,8 +1682,8 @@ analyze_siv_subscript_cst_affine (tree chrec_a,
   tree type, difference, tmp;
 
   type = signed_type_for_types (TREE_TYPE (chrec_a), TREE_TYPE (chrec_b));
-  chrec_a = chrec_convert (type, chrec_a, NULL_TREE);
-  chrec_b = chrec_convert (type, chrec_b, NULL_TREE);
+  chrec_a = chrec_convert (type, chrec_a, NULL);
+  chrec_b = chrec_convert (type, chrec_b, NULL);
   difference = chrec_fold_minus (type, initial_condition (chrec_b), chrec_a);
   
   if (!chrec_is_positive (initial_condition (difference), &value0))
@@ -1875,7 +1889,7 @@ initialize_matrix_A (lambda_matrix A, tree chrec, unsigned index, int mult)
     case NOP_EXPR:
       {
        tree op = initialize_matrix_A (A, TREE_OPERAND (chrec, 0), index, mult);
-       return chrec_convert (chrec_type (chrec), op, NULL_TREE);
+       return chrec_convert (chrec_type (chrec), op, NULL);
       }
 
     case INTEGER_CST:
@@ -2365,7 +2379,7 @@ can_use_analyze_subscript_affine_affine (tree *chrec_a, tree *chrec_b)
 
   type = chrec_type (*chrec_a);
   left_a = CHREC_LEFT (*chrec_a);
-  left_b = chrec_convert (type, CHREC_LEFT (*chrec_b), NULL_TREE);
+  left_b = chrec_convert (type, CHREC_LEFT (*chrec_b), NULL);
   diff = chrec_fold_minus (type, left_a, left_b);
 
   if (!evolution_function_is_constant_p (diff))
@@ -2376,7 +2390,7 @@ can_use_analyze_subscript_affine_affine (tree *chrec_a, tree *chrec_b)
 
   *chrec_a = build_polynomial_chrec (CHREC_VARIABLE (*chrec_a), 
                                     diff, CHREC_RIGHT (*chrec_a));
-  right_b = chrec_convert (type, CHREC_RIGHT (*chrec_b), NULL_TREE);
+  right_b = chrec_convert (type, CHREC_RIGHT (*chrec_b), NULL);
   *chrec_b = build_polynomial_chrec (CHREC_VARIABLE (*chrec_b),
                                     build_int_cst (type, 0),
                                     right_b);
@@ -2523,8 +2537,8 @@ analyze_miv_subscript (tree chrec_a,
     fprintf (dump_file, "(analyze_miv_subscript \n");
 
   type = signed_type_for_types (TREE_TYPE (chrec_a), TREE_TYPE (chrec_b));
-  chrec_a = chrec_convert (type, chrec_a, NULL_TREE);
-  chrec_b = chrec_convert (type, chrec_b, NULL_TREE);
+  chrec_a = chrec_convert (type, chrec_a, NULL);
+  chrec_b = chrec_convert (type, chrec_b, NULL);
   difference = chrec_fold_minus (type, chrec_a, chrec_b);
   
   if (eq_evolutions_p (chrec_a, chrec_b))
@@ -3474,8 +3488,8 @@ omega_setup_subscript (tree access_fun_a, tree access_fun_b,
   int eq;
   tree type = signed_type_for_types (TREE_TYPE (access_fun_a),
                                     TREE_TYPE (access_fun_b));
-  tree fun_a = chrec_convert (type, access_fun_a, NULL_TREE);
-  tree fun_b = chrec_convert (type, access_fun_b, NULL_TREE);
+  tree fun_a = chrec_convert (type, access_fun_a, NULL);
+  tree fun_b = chrec_convert (type, access_fun_b, NULL);
   tree difference = chrec_fold_minus (type, fun_a, fun_b);
 
   /* When the fun_a - fun_b is not constant, the dependence is not
@@ -3835,9 +3849,9 @@ compute_affine_dependence (struct data_dependence_relation *ddr,
     {
       fprintf (dump_file, "(compute_affine_dependence\n");
       fprintf (dump_file, "  (stmt_a = \n");
-      print_generic_expr (dump_file, DR_STMT (dra), 0);
+      print_gimple_stmt (dump_file, DR_STMT (dra), 0, 0);
       fprintf (dump_file, ")\n  (stmt_b = \n");
-      print_generic_expr (dump_file, DR_STMT (drb), 0);
+      print_gimple_stmt (dump_file, DR_STMT (drb), 0, 0);
       fprintf (dump_file, ")\n");
     }
 
@@ -3988,32 +4002,32 @@ compute_all_dependences (VEC (data_reference_p, heap) *datarefs,
    true if STMT clobbers memory, false otherwise.  */
 
 bool
-get_references_in_stmt (tree stmt, VEC (data_ref_loc, heap) **references)
+get_references_in_stmt (gimple stmt, VEC (data_ref_loc, heap) **references)
 {
   bool clobbers_memory = false;
   data_ref_loc *ref;
-  tree *op0, *op1, call;
+  tree *op0, *op1;
+  enum gimple_code stmt_code = gimple_code (stmt);
 
   *references = NULL;
 
   /* ASM_EXPR and CALL_EXPR may embed arbitrary side effects.
      Calls have side-effects, except those to const or pure
      functions.  */
-  call = get_call_expr_in (stmt);
-  if ((call
-       && !(call_expr_flags (call) & (ECF_CONST | ECF_PURE)))
-      || (TREE_CODE (stmt) == ASM_EXPR
-         && ASM_VOLATILE_P (stmt)))
+  if ((stmt_code == GIMPLE_CALL
+       && !(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
+      || (stmt_code == GIMPLE_ASM
+         && gimple_asm_volatile_p (stmt)))
     clobbers_memory = true;
 
   if (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
     return clobbers_memory;
 
-  if (TREE_CODE (stmt) ==  GIMPLE_MODIFY_STMT)
+  if (stmt_code == GIMPLE_ASSIGN)
     {
       tree base;
-      op0 = &GIMPLE_STMT_OPERAND (stmt, 0);
-      op1 = &GIMPLE_STMT_OPERAND (stmt, 1);
+      op0 = gimple_assign_lhs_ptr (stmt);
+      op1 = gimple_assign_rhs1_ptr (stmt);
                
       if (DECL_P (*op1)
          || (REFERENCE_CLASS_P (*op1)
@@ -4033,14 +4047,13 @@ get_references_in_stmt (tree stmt, VEC (data_ref_loc, heap) **references)
          ref->is_read = false;
        }
     }
-
-  if (call)
+  else if (stmt_code == GIMPLE_CALL)
     {
-      unsigned i, n = call_expr_nargs (call);
+      unsigned i, n = gimple_call_num_args (stmt);
 
       for (i = 0; i < n; i++)
        {
-         op0 = &CALL_EXPR_ARG (call, i);
+         op0 = gimple_call_arg_ptr (stmt, i);
 
          if (DECL_P (*op0)
              || (REFERENCE_CLASS_P (*op0) && get_base_address (*op0)))
@@ -4060,7 +4073,7 @@ get_references_in_stmt (tree stmt, VEC (data_ref_loc, heap) **references)
    loop of the loop nest in that the references should be analyzed.  */
 
 static bool
-find_data_references_in_stmt (struct loop *nest, tree stmt,
+find_data_references_in_stmt (struct loop *nest, gimple stmt,
                              VEC (data_reference_p, heap) **datarefs)
 {
   unsigned i;
@@ -4110,7 +4123,7 @@ find_data_references_in_loop (struct loop *loop,
 {
   basic_block bb, *bbs;
   unsigned int i;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
 
   bbs = get_loop_body_in_dom_order (loop);
 
@@ -4118,9 +4131,9 @@ find_data_references_in_loop (struct loop *loop,
     {
       bb = bbs[i];
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
        {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (bsi);
 
          if (!find_data_references_in_stmt (loop, stmt, datarefs))
            {
@@ -4443,7 +4456,7 @@ dump_rdg_vertex (FILE *file, struct graph *rdg, int i)
       fprintf (file, " %d", e->dest);
 
   fprintf (file, ") \n");
-  print_generic_stmt (file, RDGV_STMT (v), TDF_VOPS|TDF_MEMSYMS);
+  print_gimple_stmt (file, RDGV_STMT (v), 0, TDF_VOPS|TDF_MEMSYMS);
   fprintf (file, ")\n");
 }
 
@@ -4579,14 +4592,14 @@ dot_rdg (struct graph *rdg)
 
 struct rdg_vertex_info GTY(())
 {
-  tree stmt;
+  gimple stmt;
   int index;
 };
 
 /* Returns the index of STMT in RDG.  */
 
 int
-rdg_vertex_for_stmt (struct graph *rdg, tree stmt)
+rdg_vertex_for_stmt (struct graph *rdg, gimple stmt)
 {
   struct rdg_vertex_info rvi, *slot;
 
@@ -4690,12 +4703,12 @@ create_rdg_edges (struct graph *rdg, VEC (ddr_p, heap) *ddrs)
 /* Build the vertices of the reduced dependence graph RDG.  */
 
 static void
-create_rdg_vertices (struct graph *rdg, VEC (tree, heap) *stmts)
+create_rdg_vertices (struct graph *rdg, VEC (gimple, heap) *stmts)
 {
   int i, j;
-  tree stmt;
+  gimple stmt;
 
-  for (i = 0; VEC_iterate (tree, stmts, i, stmt); i++)
+  for (i = 0; VEC_iterate (gimple, stmts, i, stmt); i++)
     {
       VEC (data_ref_loc, heap) *references;
       data_ref_loc *ref;
@@ -4717,7 +4730,7 @@ create_rdg_vertices (struct graph *rdg, VEC (tree, heap) *stmts)
 
       RDG_MEM_WRITE_STMT (rdg, i) = false;
       RDG_MEM_READS_STMT (rdg, i) = false;
-      if (TREE_CODE (stmt) == PHI_NODE)
+      if (gimple_code (stmt) == GIMPLE_PHI)
        continue;
 
       get_references_in_stmt (stmt, &references);
@@ -4738,23 +4751,26 @@ create_rdg_vertices (struct graph *rdg, VEC (tree, heap) *stmts)
    identifying statements. */
 
 static void
-stmts_from_loop (struct loop *loop, VEC (tree, heap) **stmts)
+stmts_from_loop (struct loop *loop, VEC (gimple, heap) **stmts)
 {
   unsigned int i;
   basic_block *bbs = get_loop_body_in_dom_order (loop);
 
   for (i = 0; i < loop->num_nodes; i++)
     {
-      tree phi, stmt;
       basic_block bb = bbs[i];
-      block_stmt_iterator bsi;
+      gimple_stmt_iterator bsi;
+      gimple stmt;
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-       VEC_safe_push (tree, heap, *stmts, phi);
+      for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+       VEC_safe_push (gimple, heap, *stmts, gsi_stmt (bsi));
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-       if (TREE_CODE (stmt = bsi_stmt (bsi)) != LABEL_EXPR)
-         VEC_safe_push (tree, heap, *stmts, stmt);
+      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+       {
+         stmt = gsi_stmt (bsi);
+         if (gimple_code (stmt) != GIMPLE_LABEL)
+           VEC_safe_push (gimple, heap, *stmts, stmt);
+       }
     }
 
   free (bbs);
@@ -4782,7 +4798,7 @@ hash_stmt_vertex_info (const void *elt)
 {
   const struct rdg_vertex_info *const rvi =
     (const struct rdg_vertex_info *) elt;
-  const_tree stmt = rvi->stmt;
+  gimple stmt = rvi->stmt;
 
   return htab_hash_pointer (stmt);
 }
@@ -4817,7 +4833,7 @@ build_rdg (struct loop *loop)
   struct graph *rdg = NULL;
   VEC (ddr_p, heap) *dependence_relations;
   VEC (data_reference_p, heap) *datarefs;
-  VEC (tree, heap) *stmts = VEC_alloc (tree, heap, nb_data_refs);
+  VEC (gimple, heap) *stmts = VEC_alloc (gimple, heap, nb_data_refs);
   
   dependence_relations = VEC_alloc (ddr_p, heap, nb_data_refs * nb_data_refs) ;
   datarefs = VEC_alloc (data_reference_p, heap, nb_data_refs);
@@ -4830,7 +4846,7 @@ build_rdg (struct loop *loop)
     goto end_rdg;
 
   stmts_from_loop (loop, &stmts);
-  rdg = new_graph (VEC_length (tree, stmts));
+  rdg = new_graph (VEC_length (gimple, stmts));
 
   rdg->indices = htab_create (nb_data_refs, hash_stmt_vertex_info,
                              eq_stmt_vertex_info, hash_stmt_vertex_del);
@@ -4840,7 +4856,7 @@ build_rdg (struct loop *loop)
  end_rdg:
   free_dependence_relations (dependence_relations);
   free_data_refs (datarefs);
-  VEC_free (tree, heap, stmts);
+  VEC_free (gimple, heap, stmts);
 
   return rdg;
 }
@@ -4863,7 +4879,7 @@ free_rdg (struct graph *rdg)
    store to memory.  */
 
 void
-stores_from_loop (struct loop *loop, VEC (tree, heap) **stmts)
+stores_from_loop (struct loop *loop, VEC (gimple, heap) **stmts)
 {
   unsigned int i;
   basic_block *bbs = get_loop_body_in_dom_order (loop);
@@ -4871,11 +4887,11 @@ stores_from_loop (struct loop *loop, VEC (tree, heap) **stmts)
   for (i = 0; i < loop->num_nodes; i++)
     {
       basic_block bb = bbs[i];
-      block_stmt_iterator bsi;
+      gimple_stmt_iterator bsi;
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-       if (!ZERO_SSA_OPERANDS (bsi_stmt (bsi), SSA_OP_VDEF))
-         VEC_safe_push (tree, heap, *stmts, bsi_stmt (bsi));
+      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+       if (!ZERO_SSA_OPERANDS (gsi_stmt (bsi), SSA_OP_VDEF))
+         VEC_safe_push (gimple, heap, *stmts, gsi_stmt (bsi));
     }
 
   free (bbs);
@@ -4885,7 +4901,7 @@ stores_from_loop (struct loop *loop, VEC (tree, heap) **stmts)
    address or NULL_TREE if the base is not determined.  */
 
 static inline tree
-ref_base_address (tree stmt, data_ref_loc *ref)
+ref_base_address (gimple stmt, data_ref_loc *ref)
 {
   tree base = NULL_TREE;
   tree base_address;
@@ -4921,7 +4937,7 @@ ref_base_address (tree stmt, data_ref_loc *ref)
 bool
 rdg_defs_used_in_other_loops_p (struct graph *rdg, int v)
 {
-  tree stmt = RDG_STMT (rdg, v);
+  gimple stmt = RDG_STMT (rdg, v);
   struct loop *loop = loop_containing_stmt (stmt);
   use_operand_p imm_use_p;
   imm_use_iterator iterator;
@@ -4949,7 +4965,7 @@ rdg_defs_used_in_other_loops_p (struct graph *rdg, int v)
    ref_base_address is the same.  */
 
 bool
-have_similar_memory_accesses (tree s1, tree s2)
+have_similar_memory_accesses (gimple s1, gimple s2)
 {
   bool res = false;
   unsigned i, j;
@@ -4983,8 +4999,8 @@ have_similar_memory_accesses (tree s1, tree s2)
 static int
 have_similar_memory_accesses_1 (const void *s1, const void *s2)
 {
-  return have_similar_memory_accesses (CONST_CAST_TREE ((const_tree)s1),
-                                      CONST_CAST_TREE ((const_tree)s2));
+  return have_similar_memory_accesses (CONST_CAST_GIMPLE ((const_gimple) s1),
+                                      CONST_CAST_GIMPLE ((const_gimple) s2));
 }
 
 /* Helper function for the hashtab.  */
@@ -4992,7 +5008,7 @@ have_similar_memory_accesses_1 (const void *s1, const void *s2)
 static hashval_t
 ref_base_address_1 (const void *s)
 {
-  tree stmt = CONST_CAST_TREE((const_tree)s);
+  gimple stmt = CONST_CAST_GIMPLE ((const_gimple) s);
   unsigned i;
   VEC (data_ref_loc, heap) *refs;
   data_ref_loc *ref;
@@ -5014,21 +5030,21 @@ ref_base_address_1 (const void *s)
 /* Try to remove duplicated write data references from STMTS.  */
 
 void
-remove_similar_memory_refs (VEC (tree, heap) **stmts)
+remove_similar_memory_refs (VEC (gimple, heap) **stmts)
 {
   unsigned i;
-  tree stmt;
-  htab_t seen = htab_create (VEC_length (tree, *stmts), ref_base_address_1,
+  gimple stmt;
+  htab_t seen = htab_create (VEC_length (gimple, *stmts), ref_base_address_1,
                             have_similar_memory_accesses_1, NULL);
 
-  for (i = 0; VEC_iterate (tree, *stmts, i, stmt); )
+  for (i = 0; VEC_iterate (gimple, *stmts, i, stmt); )
     {
       void **slot;
 
       slot = htab_find_slot (seen, stmt, INSERT);
 
       if (*slot)
-       VEC_ordered_remove (tree, *stmts, i);
+       VEC_ordered_remove (gimple, *stmts, i);
       else
        {
          *slot = (void *) stmt;
index c1672eb..639a32b 100644 (file)
@@ -156,7 +156,7 @@ int access_matrix_get_index_for_parameter (tree, struct access_matrix *);
 struct data_reference
 {
   /* A pointer to the statement that contains this DR.  */
-  tree stmt;
+  gimple stmt;
   
   /* A pointer to the memory reference.  */
   tree ref;
@@ -368,7 +368,7 @@ typedef struct data_ref_loc_d
 DEF_VEC_O (data_ref_loc);
 DEF_VEC_ALLOC_O (data_ref_loc, heap);
 
-bool get_references_in_stmt (tree, VEC (data_ref_loc, heap) **);
+bool get_references_in_stmt (gimple, VEC (data_ref_loc, heap) **);
 void dr_analyze_innermost (struct data_reference *);
 extern bool compute_data_dependences_for_loop (struct loop *, bool,
                                               VEC (data_reference_p, heap) **,
@@ -392,7 +392,7 @@ extern void free_dependence_relation (struct data_dependence_relation *);
 extern void free_dependence_relations (VEC (ddr_p, heap) *);
 extern void free_data_ref (data_reference_p);
 extern void free_data_refs (VEC (data_reference_p, heap) *);
-struct data_reference *create_data_ref (struct loop *, tree, tree, bool);
+struct data_reference *create_data_ref (struct loop *, tree, gimple, bool);
 bool find_loop_nest (struct loop *, VEC (loop_p, heap) **);
 void compute_all_dependences (VEC (data_reference_p, heap) *,
                              VEC (ddr_p, heap) **, VEC (loop_p, heap) *, bool);
@@ -462,7 +462,7 @@ ddr_dependence_level (ddr_p ddr)
 typedef struct rdg_vertex
 {
   /* The statement represented by this vertex.  */
-  tree stmt;
+  gimple stmt;
 
   /* True when the statement contains a write to memory.  */
   bool has_mem_write;
@@ -485,7 +485,7 @@ void debug_rdg_component (struct graph *, int);
 void dump_rdg (FILE *, struct graph *);
 void debug_rdg (struct graph *);
 void dot_rdg (struct graph *);
-int rdg_vertex_for_stmt (struct graph *, tree);
+int rdg_vertex_for_stmt (struct graph *, gimple);
 
 /* Data dependence type.  */
 
@@ -538,10 +538,10 @@ index_in_loop_nest (int var, VEC (loop_p, heap) *loop_nest)
   return var_index;
 }
 
-void stores_from_loop (struct loop *, VEC (tree, heap) **);
-void remove_similar_memory_refs (VEC (tree, heap) **);
+void stores_from_loop (struct loop *, VEC (gimple, heap) **);
+void remove_similar_memory_refs (VEC (gimple, heap) **);
 bool rdg_defs_used_in_other_loops_p (struct graph *, int);
-bool have_similar_memory_accesses (tree, tree);
+bool have_similar_memory_accesses (gimple, gimple);
 
 /* Determines whether RDG vertices V1 and V2 access to similar memory
    locations, in which case they have to be in the same partition.  */
index 285af39..df0be2d 100644 (file)
@@ -39,7 +39,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "function.h"
 #include "diagnostic.h"
 #include "tree-dump.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-flow.h"
 #include "tree-inline.h"
 #include "tree-pass.h"
@@ -52,13 +52,12 @@ along with GCC; see the file COPYING3.  If not see
 /* Counters used to display DFA and SSA statistics.  */
 struct dfa_stats_d
 {
-  long num_stmt_anns;
   long num_var_anns;
   long num_defs;
   long num_uses;
   long num_phis;
   long num_phi_args;
-  int max_num_phi_args;
+  size_t max_num_phi_args;
   long num_vdefs;
   long num_vuses;
 };
@@ -66,7 +65,6 @@ struct dfa_stats_d
 
 /* Local functions.  */
 static void collect_dfa_stats (struct dfa_stats_d *);
-static tree collect_dfa_stats_r (tree *, int *, void *);
 static tree find_vars_r (tree *, int *, void *);
 
 
@@ -85,27 +83,28 @@ static unsigned int
 find_referenced_vars (void)
 {
   basic_block bb;
-  block_stmt_iterator si;
-  tree phi;
+  gimple_stmt_iterator si;
 
   FOR_EACH_BB (bb)
     {
-      for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+      for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
        {
-         tree *stmt_p = bsi_stmt_ptr (si);
-         walk_tree (stmt_p, find_vars_r, NULL, NULL);
+         size_t i;
+         gimple stmt = gsi_stmt (si);
+         for (i = 0; i < gimple_num_ops (stmt); i++)
+           walk_tree (gimple_op_ptr (stmt, i), find_vars_r, NULL, NULL);
        }
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
        {
-         int len = PHI_NUM_ARGS (phi);
-         int i;
+         gimple phi = gsi_stmt (si);
+         size_t i, len = gimple_phi_num_args (phi);
 
-         walk_tree (&phi, find_vars_r, NULL, NULL);
+         walk_tree (gimple_phi_result_ptr (phi), find_vars_r, NULL, NULL);
 
          for (i = 0; i < len; i++)
            {
-             tree arg = PHI_ARG_DEF (phi, i);
+             tree arg = gimple_phi_arg_def (phi, i);
              walk_tree (&arg, find_vars_r, NULL, NULL);
            }
        }
@@ -176,29 +175,6 @@ create_function_ann (tree t)
   return ann;
 }
 
-/* Create a new annotation for a statement node T.  */
-
-stmt_ann_t
-create_stmt_ann (tree t)
-{
-  stmt_ann_t ann;
-
-  gcc_assert (is_gimple_stmt (t));
-  gcc_assert (!t->base.ann || t->base.ann->common.type == STMT_ANN);
-
-  ann = GGC_CNEW (struct stmt_ann_d);
-
-  ann->common.type = STMT_ANN;
-
-  /* Since we just created the annotation, mark the statement modified.  */
-  ann->modified = true;
-
-  ann->uid = inc_gimple_stmt_max_uid (cfun);
-  t->base.ann = (tree_ann_t) ann;
-
-  return ann;
-}
-
 /* Renumber all of the gimple stmt uids.  */
 
 void 
@@ -209,17 +185,11 @@ renumber_gimple_stmt_uids (void)
   set_gimple_stmt_max_uid (cfun, 0);
   FOR_ALL_BB (bb)
     {
-      block_stmt_iterator bsi;
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      gimple_stmt_iterator bsi;
+      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
        {
-         tree stmt = bsi_stmt (bsi);
-         /* If the stmt has an annotation, then overwrite it, if not,
-            the process of getting it will set the number
-            properly.  */
-         if (has_stmt_ann (stmt))
-           set_gimple_stmt_uid (stmt, inc_gimple_stmt_max_uid (cfun));
-         else
-           get_stmt_ann (stmt);
+         gimple stmt = gsi_stmt (bsi);
+         gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
        }
     }
 }
@@ -237,6 +207,7 @@ create_tree_common_ann (tree t)
   ann = GGC_CNEW (struct tree_ann_common_d);
 
   ann->type = TREE_ANN_COMMON;
+  ann->rn = -1;
   t->base.ann = (tree_ann_t) ann;
 
   return ann;
@@ -448,11 +419,6 @@ dump_dfa_stats (FILE *file)
   fprintf (file, fmt_str_1, "Referenced variables", (unsigned long)num_referenced_vars,
           SCALE (size), LABEL (size));
 
-  size = dfa_stats.num_stmt_anns * sizeof (struct stmt_ann_d);
-  total += size;
-  fprintf (file, fmt_str_1, "Statements annotated", dfa_stats.num_stmt_anns,
-          SCALE (size), LABEL (size));
-
   size = dfa_stats.num_var_anns * sizeof (struct var_ann_d);
   total += size;
   fprintf (file, fmt_str_1, "Variables annotated", dfa_stats.num_var_anns,
@@ -478,7 +444,7 @@ dump_dfa_stats (FILE *file)
   fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
           SCALE (size), LABEL (size));
 
-  size = dfa_stats.num_phis * sizeof (struct tree_phi_node);
+  size = dfa_stats.num_phis * sizeof (struct gimple_statement_phi);
   total += size;
   fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
           SCALE (size), LABEL (size));
@@ -495,9 +461,9 @@ dump_dfa_stats (FILE *file)
   fprintf (file, "\n");
 
   if (dfa_stats.num_phis)
-    fprintf (file, "Average number of arguments per PHI node: %.1f (max: %d)\n",
+    fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
             (float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
-            dfa_stats.max_num_phi_args);
+            (long) dfa_stats.max_num_phi_args);
 
   fprintf (file, "\n");
 }
@@ -516,75 +482,44 @@ debug_dfa_stats (void)
    DFA_STATS_P.  */
 
 static void
-collect_dfa_stats (struct dfa_stats_d *dfa_stats_p)
+collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
 {
-  struct pointer_set_t *pset;
   basic_block bb;
-  block_stmt_iterator i;
+  referenced_var_iterator vi;
+  tree var;
 
   gcc_assert (dfa_stats_p);
 
   memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
 
-  /* Walk all the trees in the function counting references.  Start at
-     basic block NUM_FIXED_BLOCKS, but don't stop at block boundaries.  */
-  pset = pointer_set_create ();
-
-  for (i = bsi_start (BASIC_BLOCK (NUM_FIXED_BLOCKS));
-       !bsi_end_p (i); bsi_next (&i))
-    walk_tree (bsi_stmt_ptr (i), collect_dfa_stats_r, (void *) dfa_stats_p,
-              pset);
-
-  pointer_set_destroy (pset);
+  /* Count all the variable annotations.  */
+  FOR_EACH_REFERENCED_VAR (var, vi)
+    if (var_ann (var))
+      dfa_stats_p->num_var_anns++;
 
+  /* Walk all the statements in the function counting references.  */
   FOR_EACH_BB (bb)
     {
-      tree phi;
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      gimple_stmt_iterator si;
+
+      for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
        {
+         gimple phi = gsi_stmt (si);
          dfa_stats_p->num_phis++;
-         dfa_stats_p->num_phi_args += PHI_NUM_ARGS (phi);
-         if (PHI_NUM_ARGS (phi) > dfa_stats_p->max_num_phi_args)
-           dfa_stats_p->max_num_phi_args = PHI_NUM_ARGS (phi);
+         dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
+         if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
+           dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
        }
-    }
-}
 
-
-/* Callback for walk_tree to collect DFA statistics for a tree and its
-   children.  */
-
-static tree
-collect_dfa_stats_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
-                    void *data)
-{
-  tree t = *tp;
-  struct dfa_stats_d *dfa_stats_p = (struct dfa_stats_d *)data;
-
-  if (t->base.ann)
-    {
-      switch (ann_type (t->base.ann))
+      for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
        {
-       case STMT_ANN:
-         {
-           dfa_stats_p->num_stmt_anns++;
-           dfa_stats_p->num_defs += NUM_SSA_OPERANDS (t, SSA_OP_DEF);
-           dfa_stats_p->num_uses += NUM_SSA_OPERANDS (t, SSA_OP_USE);
-           dfa_stats_p->num_vdefs += NUM_SSA_OPERANDS (t, SSA_OP_VDEF);
-           dfa_stats_p->num_vuses += NUM_SSA_OPERANDS (t, SSA_OP_VUSE);
-           break;
-         }
-
-       case VAR_ANN:
-         dfa_stats_p->num_var_anns++;
-         break;
-
-       default:
-         break;
+         gimple stmt = gsi_stmt (si);
+         dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
+         dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
+         dfa_stats_p->num_vdefs += NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
+         dfa_stats_p->num_vuses += NUM_SSA_OPERANDS (stmt, SSA_OP_VUSE);
        }
     }
-
-  return NULL;
 }
 
 
@@ -800,7 +735,7 @@ get_virtual_var (tree var)
    combination push_stmt_changes/pop_stmt_changes.  */
 
 void
-mark_symbols_for_renaming (tree stmt)
+mark_symbols_for_renaming (gimple stmt)
 {
   tree op;
   ssa_op_iter iter;
@@ -814,8 +749,9 @@ mark_symbols_for_renaming (tree stmt)
 }
 
 
-/* Find all variables within the gimplified statement that were not previously
-   visible to the function and add them to the referenced variables list.  */
+/* Find all variables within the gimplified statement that were not
+   previously visible to the function and add them to the referenced
+   variables list.  */
 
 static tree
 find_new_referenced_vars_1 (tree *tp, int *walk_subtrees,
@@ -835,10 +771,13 @@ find_new_referenced_vars_1 (tree *tp, int *walk_subtrees,
   return NULL;
 }
 
+
+/* Find any new referenced variables in STMT.  */
+
 void
-find_new_referenced_vars (tree *stmt_p)
+find_new_referenced_vars (gimple stmt)
 {
-  walk_tree (stmt_p, find_new_referenced_vars_1, NULL, NULL);
+  walk_gimple_op (stmt, find_new_referenced_vars_1, NULL);
 }
 
 
@@ -1013,7 +952,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
    SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false.  */
 
 bool
-stmt_references_abnormal_ssa_name (tree stmt)
+stmt_references_abnormal_ssa_name (gimple stmt)
 {
   ssa_op_iter oi;
   use_operand_p use_p;
@@ -1163,25 +1102,25 @@ refs_may_alias_p (tree ref1, tree ref2)
    a PHI node as well.  Note that if all VUSEs are default definitions
    this function will return an empty statement.  */
 
-tree
-get_single_def_stmt (tree stmt)
+gimple
+get_single_def_stmt (gimple stmt)
 {
-  tree def_stmt = NULL_TREE;
+  gimple def_stmt = NULL;
   tree use;
   ssa_op_iter iter;
 
   FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_VIRTUAL_USES)
     {
-      tree tmp = SSA_NAME_DEF_STMT (use);
+      gimple tmp = SSA_NAME_DEF_STMT (use);
 
       /* ???  This is too simplistic for multiple virtual operands
         reaching different PHI nodes of the same basic blocks or for
         reaching all default definitions.  */
       if (def_stmt
          && def_stmt != tmp
-         && !(IS_EMPTY_STMT (def_stmt)
-              && IS_EMPTY_STMT (tmp)))
-       return NULL_TREE;
+         && !(gimple_nop_p (def_stmt)
+              && gimple_nop_p (tmp)))
+       return NULL;
 
       def_stmt = tmp;
     }
@@ -1195,25 +1134,25 @@ get_single_def_stmt (tree stmt)
    from a non-backedge.  Returns NULL_TREE if such statement within
    the above conditions cannot be found.  */
 
-tree
-get_single_def_stmt_from_phi (tree ref, tree phi)
+gimple
+get_single_def_stmt_from_phi (tree ref, gimple phi)
 {
   tree def_arg = NULL_TREE;
-  int i;
+  unsigned i;
 
   /* Find the single PHI argument that is not flowing in from a
      back edge and verify that the loop-carried definitions do
      not alias the reference we look for.  */
-  for (i = 0; i < PHI_NUM_ARGS (phi); ++i)
+  for (i = 0; i < gimple_phi_num_args (phi); ++i)
     {
       tree arg = PHI_ARG_DEF (phi, i);
-      tree def_stmt;
+      gimple def_stmt;
 
-      if (!(PHI_ARG_EDGE (phi, i)->flags & EDGE_DFS_BACK))
+      if (!(gimple_phi_arg_edge (phi, i)->flags & EDGE_DFS_BACK))
        {
          /* Multiple non-back edges?  Do not try to handle this.  */
          if (def_arg)
-           return NULL_TREE;
+           return NULL;
          def_arg = arg;
          continue;
        }
@@ -1223,14 +1162,14 @@ get_single_def_stmt_from_phi (tree ref, tree phi)
       def_stmt = SSA_NAME_DEF_STMT (arg);
       do
        {
-         if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT
-             || refs_may_alias_p (ref, GIMPLE_STMT_OPERAND (def_stmt, 0)))
-           return NULL_TREE;
+         if (!is_gimple_assign (def_stmt)
+             || refs_may_alias_p (ref, gimple_assign_lhs (def_stmt)))
+           return NULL;
          /* ???  This will only work, reaching the PHI node again if
             there is a single virtual operand on def_stmt.  */
          def_stmt = get_single_def_stmt (def_stmt);
          if (!def_stmt)
-           return NULL_TREE;
+           return NULL;
        }
       while (def_stmt != phi);
     }
@@ -1243,8 +1182,8 @@ get_single_def_stmt_from_phi (tree ref, tree phi)
    Take into account only definitions that alias REF if following
    back-edges when looking through a loop PHI node.  */
 
-tree
-get_single_def_stmt_with_phi (tree ref, tree stmt)
+gimple
+get_single_def_stmt_with_phi (tree ref, gimple stmt)
 {
   switch (NUM_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES))
     {
@@ -1253,11 +1192,11 @@ get_single_def_stmt_with_phi (tree ref, tree stmt)
 
     case 1:
       {
-       tree def_stmt = SSA_NAME_DEF_STMT (SINGLE_SSA_TREE_OPERAND
+       gimple def_stmt = SSA_NAME_DEF_STMT (SINGLE_SSA_TREE_OPERAND
                                             (stmt, SSA_OP_VIRTUAL_USES));
        /* We can handle lookups over PHI nodes only for a single
           virtual operand.  */
-       if (TREE_CODE (def_stmt) == PHI_NODE)
+       if (gimple_code (def_stmt) == GIMPLE_PHI)
          return get_single_def_stmt_from_phi (ref, def_stmt);
        return def_stmt;
       }
index 7cc0285..66ea1e7 100644 (file)
@@ -598,11 +598,6 @@ dequeue_and_dump (dump_info_p di)
       dump_child ("op 1", TREE_OPERAND (t, 1));
       break;
 
-    case GIMPLE_MODIFY_STMT:
-      dump_child ("op 0", GIMPLE_STMT_OPERAND (t, 0));
-      dump_child ("op 1", GIMPLE_STMT_OPERAND (t, 1));
-      break;
-
     case COMPONENT_REF:
       dump_child ("op 0", TREE_OPERAND (t, 0));
       dump_child ("op 1", TREE_OPERAND (t, 1));
@@ -825,7 +820,8 @@ static const struct dump_option_value_info dump_options[] =
   {"memsyms", TDF_MEMSYMS},
   {"verbose", TDF_VERBOSE},
   {"all", ~(TDF_RAW | TDF_SLIM | TDF_LINENO | TDF_TREE | TDF_RTL | TDF_IPA 
-           | TDF_STMTADDR | TDF_GRAPH | TDF_DIAGNOSTIC | TDF_VERBOSE)},
+           | TDF_STMTADDR | TDF_GRAPH | TDF_DIAGNOSTIC | TDF_VERBOSE
+           | TDF_RHS_ONLY)},
   {NULL, 0}
 };
 
index 965acce..140cbc5 100644 (file)
@@ -37,9 +37,12 @@ along with GCC; see the file COPYING3.  If not see
 #include "langhooks.h"
 #include "ggc.h"
 #include "toplev.h"
-#include "pointer-set.h"
+#include "gimple.h"
+
+/* In some instances a tree and a gimple need to be stored in a same table,
+   i.e. in hash tables. This is a structure to do this. */
+typedef union {tree *tp; tree t; gimple g;} treemple;
 
-\f
 /* Nonzero if we are using EH to handle cleanups.  */
 static int using_eh_for_cleanups_p = 0;
 
@@ -48,7 +51,7 @@ using_eh_for_cleanups (void)
 {
   using_eh_for_cleanups_p = 1;
 }
-\f
+
 /* Misc functions used in this file.  */
 
 /* Compare and hash for any structure which begins with a canonical
@@ -70,7 +73,7 @@ struct_ptr_hash (const void *a)
   return (size_t)*x >> 4;
 }
 
-\f
+
 /* Remember and lookup EH region data for arbitrary statements.
    Really this means any statement that could_throw_p.  We could
    stuff this information into the stmt_ann data structure, but:
@@ -84,7 +87,7 @@ struct_ptr_hash (const void *a)
    of space by only allocating memory for those that can throw.  */
 
 static void
-record_stmt_eh_region (struct eh_region *region, tree t)
+record_stmt_eh_region (struct eh_region *region, gimple t)
 {
   if (!region)
     return;
@@ -92,14 +95,17 @@ record_stmt_eh_region (struct eh_region *region, tree t)
   add_stmt_to_eh_region (t, get_eh_region_number (region));
 }
 
+
+/* Add statement T in function IFUN to EH region NUM.  */
+
 void
-add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
+add_stmt_to_eh_region_fn (struct function *ifun, gimple t, int num)
 {
   struct throw_stmt_node *n;
   void **slot;
 
   gcc_assert (num >= 0);
-  gcc_assert (TREE_CODE (t) != RESX_EXPR);
+  gcc_assert (gimple_code (t) != GIMPLE_RESX);
 
   n = GGC_NEW (struct throw_stmt_node);
   n->stmt = t;
@@ -115,14 +121,21 @@ add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
   *slot = n;
 }
 
+
+/* Add statement T in the current function (cfun) to EH region number
+   NUM.  */
+
 void
-add_stmt_to_eh_region (tree t, int num)
+add_stmt_to_eh_region (gimple t, int num)
 {
   add_stmt_to_eh_region_fn (cfun, t, num);
 }
 
+
+/* Remove statement T in function IFUN from the EH region holding it.  */
+
 bool
-remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
+remove_stmt_from_eh_region_fn (struct function *ifun, gimple t)
 {
   struct throw_stmt_node dummy;
   void **slot;
@@ -142,54 +155,98 @@ remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
     return false;
 }
 
+
+/* Remove statement T in the current function (cfun) from the EH
+   region holding it.  */
+
 bool
-remove_stmt_from_eh_region (tree t)
+remove_stmt_from_eh_region (gimple t)
 {
   return remove_stmt_from_eh_region_fn (cfun, t);
 }
 
+/* Determine if statement T is inside an EH region in function IFUN.
+   Return the EH region number if found, return -2 if IFUN does not
+   have an EH table and -1 if T could not be found in IFUN's EH region
+   table.  */
+
 int
-lookup_stmt_eh_region_fn (struct function *ifun, const_tree t)
+lookup_stmt_eh_region_fn (struct function *ifun, gimple t)
 {
   struct throw_stmt_node *p, n;
 
   if (!get_eh_throw_stmt_table (ifun))
     return -2;
 
-  /* The CONST_CAST is okay because we don't modify n.stmt throughout
-     its scope, or the scope of p.  */
-  n.stmt = CONST_CAST_TREE (t);
-  p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
-                                            &n);
-
+  n.stmt = t;
+  p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun), &n);
   return (p ? p->region_nr : -1);
 }
 
+
+/* Determine if statement T is inside an EH region in the current
+   function (cfun).  Return the EH region number if found, return -2
+   if cfun does not have an EH table and -1 if T could not be found in
+   cfun's EH region table.  */
+
 int
-lookup_stmt_eh_region (const_tree t)
+lookup_stmt_eh_region (gimple t)
 {
   /* We can get called from initialized data when -fnon-call-exceptions
      is on; prevent crash.  */
   if (!cfun)
     return -1;
+
   return lookup_stmt_eh_region_fn (cfun, t);
 }
 
-\f
-/* First pass of EH node decomposition.  Build up a tree of TRY_FINALLY_EXPR
+
+/* Determine if expression T is inside an EH region in the current
+   function (cfun).  Return the EH region number if found, return -2
+   if IFUN does not have an EH table and -1 if T could not be found in
+   IFUN's EH region table.  */
+
+int
+lookup_expr_eh_region (tree t)
+{
+  /* We can get called from initialized data when -fnon-call-exceptions
+     is on; prevent crash.  */
+  if (!cfun)
+    return -1;
+
+  if (!get_eh_throw_stmt_table (cfun))
+    return -2;
+
+  if (t && EXPR_P (t))
+    {
+      tree_ann_common_t ann = tree_common_ann (t);
+      if (ann)
+       return (int) ann->rn;
+    }
+
+  return -1;
+}
+
+
+/* First pass of EH node decomposition.  Build up a tree of GIMPLE_TRY_FINALLY
    nodes and LABEL_DECL nodes.  We will use this during the second phase to
    determine if a goto leaves the body of a TRY_FINALLY_EXPR node.  */
 
 struct finally_tree_node
 {
-  tree child, parent;
+  /* When storing a GIMPLE_TRY, we have to record a gimple.  However
+     when deciding whether a GOTO to a certain LABEL_DECL (which is a
+     tree) leaves the TRY block, its necessary to record a tree in
+     this field.  Thus a treemple is used. */
+  treemple child; 
+  gimple parent;
 };
 
 /* Note that this table is *not* marked GTY.  It is short-lived.  */
 static htab_t finally_tree;
 
 static void
-record_in_finally_tree (tree child, tree parent)
+record_in_finally_tree (treemple child, gimple parent)
 {
   struct finally_tree_node *n;
   void **slot;
@@ -204,40 +261,53 @@ record_in_finally_tree (tree child, tree parent)
 }
 
 static void
-collect_finally_tree (tree t, tree region)
+collect_finally_tree (gimple stmt, gimple region);
+
+/* Go through the gimple sequence.  Works with collect_finally_tree to 
+   record all GIMPLE_LABEL and GIMPLE_TRY statements. */
+
+static void
+collect_finally_tree_1 (gimple_seq seq, gimple region)
 {
- tailrecurse:
-  switch (TREE_CODE (t))
-    {
-    case LABEL_EXPR:
-      record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
-      break;
+  gimple_stmt_iterator gsi;
 
-    case TRY_FINALLY_EXPR:
-      record_in_finally_tree (t, region);
-      collect_finally_tree (TREE_OPERAND (t, 0), t);
-      t = TREE_OPERAND (t, 1);
-      goto tailrecurse;
+  for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
+    collect_finally_tree (gsi_stmt (gsi), region);
+}
 
-    case TRY_CATCH_EXPR:
-      collect_finally_tree (TREE_OPERAND (t, 0), region);
-      t = TREE_OPERAND (t, 1);
-      goto tailrecurse;
+static void
+collect_finally_tree (gimple stmt, gimple region)
+{
+  treemple temp;
+
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_LABEL:
+      temp.t = gimple_label_label (stmt);
+      record_in_finally_tree (temp, region);
+      break;
 
-    case CATCH_EXPR:
-      t = CATCH_BODY (t);
-      goto tailrecurse;
+    case GIMPLE_TRY:
+      if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
+        {
+          temp.g = stmt;
+          record_in_finally_tree (temp, region);
+          collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
+         collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
+        }
+      else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
+        {
+          collect_finally_tree_1 (gimple_try_eval (stmt), region);
+          collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
+        }
+      break;
 
-    case EH_FILTER_EXPR:
-      t = EH_FILTER_FAILURE (t);
-      goto tailrecurse;
+    case GIMPLE_CATCH:
+      collect_finally_tree_1 (gimple_catch_handler (stmt), region);
+      break;
 
-    case STATEMENT_LIST:
-      {
-       tree_stmt_iterator i;
-       for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
-         collect_finally_tree (tsi_stmt (i), region);
-      }
+    case GIMPLE_EH_FILTER:
+      collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
       break;
 
     default:
@@ -247,11 +317,12 @@ collect_finally_tree (tree t, tree region)
     }
 }
 
+
 /* Use the finally tree to determine if a jump from START to TARGET
    would leave the try_finally node that START lives in.  */
 
 static bool
-outside_finally_tree (tree start, tree target)
+outside_finally_tree (treemple start, gimple target)
 {
   struct finally_tree_node n, *p;
 
@@ -261,15 +332,15 @@ outside_finally_tree (tree start, tree target)
       p = (struct finally_tree_node *) htab_find (finally_tree, &n);
       if (!p)
        return true;
-      start = p->parent;
+      start.g = p->parent;
     }
-  while (start != target);
+  while (start.g != target);
 
   return false;
 }
-\f
-/* Second pass of EH node decomposition.  Actually transform the TRY_FINALLY
-   and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
+
+/* Second pass of EH node decomposition.  Actually transform the GIMPLE_TRY
+   nodes into a set of gotos, magic labels, and eh regions.
    The eh region creation is straight-forward, but frobbing all the gotos
    and such into shape isn't.  */
 
@@ -291,12 +362,15 @@ struct leh_state
 
 struct leh_tf_state
 {
-  /* Pointer to the TRY_FINALLY node under discussion.  The try_finally_expr
-     is the original TRY_FINALLY_EXPR.  We need to retain this so that
-     outside_finally_tree can reliably reference the tree used in the
-     collect_finally_tree data structures.  */
-  tree try_finally_expr;
-  tree *top_p;
+  /* Pointer to the GIMPLE_TRY_FINALLY node under discussion.  The
+     try_finally_expr is the original GIMPLE_TRY_FINALLY.  We need to retain
+     this so that outside_finally_tree can reliably reference the tree used
+     in the collect_finally_tree data structures.  */
+  gimple try_finally_expr;
+  gimple top_p;
+  /* While lowering a top_p usually it is expanded into multiple statements,
+     thus we need the following field to store them. */
+  gimple_seq top_p_seq;
 
   /* The state outside this try_finally node.  */
   struct leh_state *outer;
@@ -304,13 +378,22 @@ struct leh_tf_state
   /* The exception region created for it.  */
   struct eh_region *region;
 
-  /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
-     that are seen to escape this TRY_FINALLY_EXPR node.  */
+  /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN statements
+     that are seen to escape this GIMPLE_TRY_FINALLY node.
+     The idea is to record a gimple statement for everything except for 
+     the conditionals, which get their labels recorded. Since labels are of
+     type 'tree', we need this node to store both gimple and tree objects.
+     REPL_STMT is the sequence used to replace the goto/return statement.
+     CONT_STMT is used to store the statement that allows the return/goto to
+     jump to the original destination. */
   struct goto_queue_node {
-    tree stmt;
-    tree repl_stmt;
-    tree cont_stmt;
+    treemple stmt;
+    gimple_seq repl_stmt;
+    gimple cont_stmt;
     int index;
+    /* this is used when index >= 0 to indicate that stmt is a label(as
+       opposed to a goto stmt) */
+    int is_label;
   } *goto_queue;
   size_t goto_queue_size;
   size_t goto_queue_active;
@@ -334,7 +417,7 @@ struct leh_tf_state
      Cleared if the fallthru is converted to a goto.  */
   bool may_fallthru;
 
-  /* True if any entry in goto_queue is a RETURN_EXPR.  */
+  /* True if any entry in goto_queue is a GIMPLE_RETURN.  */
   bool may_return;
 
   /* True if the finally block can receive an exception edge.
@@ -342,16 +425,17 @@ struct leh_tf_state
   bool may_throw;
 };
 
-static void lower_eh_filter (struct leh_state *, tree *);
-static void lower_eh_constructs_1 (struct leh_state *, tree *);
+static gimple_seq lower_eh_filter (struct leh_state *, gimple);
 
 /* Search for STMT in the goto queue.  Return the replacement,
    or null if the statement isn't in the queue.  */
 
 #define LARGE_GOTO_QUEUE 20
 
-static tree
-find_goto_replacement (struct leh_tf_state *tf, tree stmt)
+static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq);
+
+static gimple_seq
+find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
 {
   unsigned int i;
   void **slot;
@@ -359,7 +443,7 @@ find_goto_replacement (struct leh_tf_state *tf, tree stmt)
   if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
     {
       for (i = 0; i < tf->goto_queue_active; i++)
-       if (tf->goto_queue[i].stmt == stmt)
+       if ( tf->goto_queue[i].stmt.g == stmt.g)
          return tf->goto_queue[i].repl_stmt;
       return NULL;
     }
@@ -372,13 +456,14 @@ find_goto_replacement (struct leh_tf_state *tf, tree stmt)
       tf->goto_queue_map = pointer_map_create ();
       for (i = 0; i < tf->goto_queue_active; i++)
        {
-         slot = pointer_map_insert (tf->goto_queue_map, tf->goto_queue[i].stmt);
+         slot = pointer_map_insert (tf->goto_queue_map,
+                                     tf->goto_queue[i].stmt.g);
           gcc_assert (*slot == NULL);
-         *slot = (void *) &tf->goto_queue[i];
+         *slot = &tf->goto_queue[i];
        }
     }
 
-  slot = pointer_map_contains (tf->goto_queue_map, stmt);
+  slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
   if (slot != NULL)
     return (((struct goto_queue_node *) *slot)->repl_stmt);
 
@@ -386,91 +471,98 @@ find_goto_replacement (struct leh_tf_state *tf, tree stmt)
 }
 
 /* A subroutine of replace_goto_queue_1.  Handles the sub-clauses of a
-   lowered COND_EXPR.  If, by chance, the replacement is a simple goto,
+   lowered GIMPLE_COND.  If, by chance, the replacement is a simple goto,
    then we can just splat it in, otherwise we add the new stmts immediately
-   after the COND_EXPR and redirect.  */
+   after the GIMPLE_COND and redirect.  */
 
 static void
 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
-                               tree_stmt_iterator *tsi)
+                               gimple_stmt_iterator *gsi)
 {
-  tree new, one, label;
+  tree label;
+  gimple_seq new;
+  treemple temp;
 
-  new = find_goto_replacement (tf, *tp);
+  temp.tp = tp;
+  new = find_goto_replacement (tf, temp);
   if (!new)
     return;
 
-  one = expr_only (new);
-  if (one && TREE_CODE (one) == GOTO_EXPR)
+  if (gimple_seq_singleton_p (new)
+      && gimple_code (gimple_seq_first_stmt (new)) == GIMPLE_GOTO)
     {
-      *tp = one;
+      *tp = gimple_goto_dest (gimple_seq_first_stmt (new));
       return;
     }
 
-  label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
-  *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
+  label = create_artificial_label ();
+  /* Set the new label for the GIMPLE_COND */
+  *tp = label;
 
-  tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
-  tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
+  gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
+  gsi_insert_seq_after (gsi, gimple_seq_copy (new), GSI_CONTINUE_LINKING);
 }
 
 /* The real work of replace_goto_queue.  Returns with TSI updated to
    point to the next statement.  */
 
-static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
+static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *);
 
 static void
-replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
+replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
+                     gimple_stmt_iterator *gsi)
 {
-  switch (TREE_CODE (t))
+  gimple_seq seq;
+  treemple temp;
+  temp.g = NULL;
+
+  switch (gimple_code (stmt))
     {
-    case GOTO_EXPR:
-    case RETURN_EXPR:
-      t = find_goto_replacement (tf, t);
-      if (t)
+    case GIMPLE_GOTO:
+    case GIMPLE_RETURN:
+      temp.g = stmt;
+      seq = find_goto_replacement (tf, temp);
+      if (seq)
        {
-         tsi_link_before (tsi, t, TSI_SAME_STMT);
-         tsi_delink (tsi);
+         gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
+         gsi_remove (gsi, false);
          return;
        }
       break;
 
-    case COND_EXPR:
-      replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
-      replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
+    case GIMPLE_COND:
+      replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
+      replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
       break;
 
-    case TRY_FINALLY_EXPR:
-    case TRY_CATCH_EXPR:
-      replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
-      replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
+    case GIMPLE_TRY:
+      replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf);
+      replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf);
       break;
-    case CATCH_EXPR:
-      replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
+    case GIMPLE_CATCH:
+      replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf);
       break;
-    case EH_FILTER_EXPR:
-      replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
+    case GIMPLE_EH_FILTER:
+      replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf);
       break;
 
-    case STATEMENT_LIST:
-      gcc_unreachable ();
-
     default:
       /* These won't have gotos in them.  */
       break;
     }
 
-  tsi_next (tsi);
+  gsi_next (gsi);
 }
 
-/* A subroutine of replace_goto_queue.  Handles STATEMENT_LISTs.  */
+/* A subroutine of replace_goto_queue.  Handles GIMPLE_SEQ.  */
 
 static void
-replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
+replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf)
 {
-  tree_stmt_iterator i = tsi_start (t);
-  while (!tsi_end_p (i))
-    replace_goto_queue_1 (tsi_stmt (i), tf, &i);
+  gimple_stmt_iterator gsi = gsi_start (seq);
+
+  while (!gsi_end_p (gsi))
+    replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
 }
 
 /* Replace all goto queue members.  */
@@ -480,66 +572,21 @@ replace_goto_queue (struct leh_tf_state *tf)
 {
   if (tf->goto_queue_active == 0)
     return;
-  replace_goto_queue_stmt_list (*tf->top_p, tf);
+  replace_goto_queue_stmt_list (tf->top_p_seq, tf);
 }
 
-/* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
-   node, and if so record that fact in the goto queue associated with that
-   try_finally node.  */
+/* Add a new record to the goto queue contained in TF. NEW_STMT is the
+   data to be added, IS_LABEL indicates whether NEW_STMT is a label or
+   a gimple return. */
 
 static void
-maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
+record_in_goto_queue (struct leh_tf_state *tf,
+                      treemple new_stmt,
+                      int index,
+                      bool is_label)
 {
-  struct leh_tf_state *tf = state->tf;
-  struct goto_queue_node *q;
   size_t active, size;
-  int index;
-
-  if (!tf)
-    return;
-
-  switch (TREE_CODE (stmt))
-    {
-    case GOTO_EXPR:
-      {
-       tree lab = GOTO_DESTINATION (stmt);
-
-       /* Computed and non-local gotos do not get processed.  Given
-          their nature we can neither tell whether we've escaped the
-          finally block nor redirect them if we knew.  */
-       if (TREE_CODE (lab) != LABEL_DECL)
-         return;
-
-       /* No need to record gotos that don't leave the try block.  */
-       if (! outside_finally_tree (lab, tf->try_finally_expr))
-         return;
-
-       if (! tf->dest_array)
-         {
-           tf->dest_array = VEC_alloc (tree, heap, 10);
-           VEC_quick_push (tree, tf->dest_array, lab);
-           index = 0;
-         }
-       else
-         {
-           int n = VEC_length (tree, tf->dest_array);
-           for (index = 0; index < n; ++index)
-             if (VEC_index (tree, tf->dest_array, index) == lab)
-               break;
-           if (index == n)
-             VEC_safe_push (tree, heap, tf->dest_array, lab);
-         }
-      }
-      break;
-
-    case RETURN_EXPR:
-      tf->may_return = true;
-      index = -1;
-      break;
-
-    default:
-      gcc_unreachable ();
-    }
+  struct goto_queue_node *q;
 
   gcc_assert (!tf->goto_queue_map);
 
@@ -557,32 +604,118 @@ maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
   tf->goto_queue_active = active + 1;
 
   memset (q, 0, sizeof (*q));
-  q->stmt = stmt;
+  q->stmt = new_stmt;
   q->index = index;
+  q->is_label = is_label;
+}
+
+/* Record the LABEL label in the goto queue contained in TF.
+   TF is not null.  */
+
+static void
+record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label)
+{
+  int index;
+  treemple temp, new_stmt;
+
+  if (!label)
+    return;
+
+  /* Computed and non-local gotos do not get processed.  Given
+     their nature we can neither tell whether we've escaped the
+     finally block nor redirect them if we knew.  */
+  if (TREE_CODE (label) != LABEL_DECL)
+    return;
+
+  /* No need to record gotos that don't leave the try block.  */
+  temp.t = label;
+  if (!outside_finally_tree (temp, tf->try_finally_expr))
+    return;
+
+  if (! tf->dest_array)
+    {
+      tf->dest_array = VEC_alloc (tree, heap, 10);
+      VEC_quick_push (tree, tf->dest_array, label);
+      index = 0;
+    }
+  else
+    {
+      int n = VEC_length (tree, tf->dest_array);
+      for (index = 0; index < n; ++index)
+        if (VEC_index (tree, tf->dest_array, index) == label)
+          break;
+      if (index == n)
+        VEC_safe_push (tree, heap, tf->dest_array, label);
+    }
+
+  /* In the case of a GOTO we want to record the destination label,
+     since with a GIMPLE_COND we have an easy access to the then/else
+     labels. */
+  new_stmt = stmt;
+  record_in_goto_queue (tf, new_stmt, index, true);
+
+}
+
+/* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
+   node, and if so record that fact in the goto queue associated with that
+   try_finally node.  */
+
+static void
+maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
+{
+  struct leh_tf_state *tf = state->tf;
+  treemple new_stmt;
+
+  if (!tf)
+    return;
+
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_COND:
+      new_stmt.tp = gimple_op_ptr (stmt, 2);
+      record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt));
+      new_stmt.tp = gimple_op_ptr (stmt, 3);
+      record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt));
+      break;
+    case GIMPLE_GOTO:
+      new_stmt.g = stmt;
+      record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt));
+      break;
+
+    case GIMPLE_RETURN:
+      tf->may_return = true;
+      new_stmt.g = stmt;
+      record_in_goto_queue (tf, new_stmt, -1, false);
+      break;
+
+    default:
+      gcc_unreachable ();
+    }
 }
 
+
 #ifdef ENABLE_CHECKING
-/* We do not process SWITCH_EXPRs for now.  As long as the original source
+/* We do not process GIMPLE_SWITCHes for now.  As long as the original source
    was in fact structured, and we've not yet done jump threading, then none
-   of the labels will leave outer TRY_FINALLY_EXPRs.  Verify this.  */
+   of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this.  */
 
 static void
-verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
+verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
 {
   struct leh_tf_state *tf = state->tf;
   size_t i, n;
-  tree vec;
 
   if (!tf)
     return;
 
-  vec = SWITCH_LABELS (switch_expr);
-  n = TREE_VEC_LENGTH (vec);
+  n = gimple_switch_num_labels (switch_expr);
 
   for (i = 0; i < n; ++i)
     {
-      tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
-      gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
+      treemple temp;
+      tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
+      temp.t = lab;
+      gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
     }
 }
 #else
@@ -595,14 +728,24 @@ verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
    variable to be used in manipulating the value returned from the function.  */
 
 static void
-do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
+do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
                       tree *return_value_p)
 {
-  tree ret_expr = TREE_OPERAND (q->stmt, 0);
-  tree x;
+  tree ret_expr;
+  gimple x;
+
+  /* In the case of a return, the queue node must be a gimple statement. */
+  gcc_assert (!q->is_label);
+
+  ret_expr = gimple_return_retval (q->stmt.g);
 
   if (ret_expr)
     {
+      if (!*return_value_p)
+        *return_value_p = ret_expr;
+      else
+        gcc_assert (*return_value_p == ret_expr);
+      q->cont_stmt = q->stmt.g;
       /* The nasty part about redirecting the return value is that the
         return value itself is to be computed before the FINALLY block
         is executed.  e.g.
@@ -625,78 +768,50 @@ do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
          depends, I guess, but it does make generation of the switch in
          lower_try_finally_switch easier.  */
 
-      switch (TREE_CODE (ret_expr))
+      if (TREE_CODE (ret_expr) == RESULT_DECL)
        {
-       case RESULT_DECL:
          if (!*return_value_p)
            *return_value_p = ret_expr;
          else
            gcc_assert (*return_value_p == ret_expr);
-         q->cont_stmt = q->stmt;
-         break;
-
-       case GIMPLE_MODIFY_STMT:
-         {
-           tree result = GIMPLE_STMT_OPERAND (ret_expr, 0);
-           tree new, old = GIMPLE_STMT_OPERAND (ret_expr, 1);
-
-           if (!*return_value_p)
-             {
-               if (aggregate_value_p (TREE_TYPE (result),
-                                     TREE_TYPE (current_function_decl)))
-                 /* If this function returns in memory, copy the argument
-                   into the return slot now.  Otherwise, we might need to
-                   worry about magic return semantics, so we need to use a
-                   temporary to hold the value until we're actually ready
-                   to return.  */
-                 new = result;
-               else
-                 new = create_tmp_var (TREE_TYPE (old), "rettmp");
-               *return_value_p = new;
-             }
-           else
-             new = *return_value_p;
-
-           x = build_gimple_modify_stmt (new, old);
-           append_to_statement_list (x, &q->repl_stmt);
-
-           if (new == result)
-             x = result;
-           else
-             x = build_gimple_modify_stmt (result, new);
-           q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
-         }
-
-       default:
-         gcc_unreachable ();
+         q->cont_stmt = q->stmt.g;
        }
+      else
+         gcc_unreachable ();
     }
   else
-    {
       /* If we don't return a value, all return statements are the same.  */
-      q->cont_stmt = q->stmt;
-    }
+      q->cont_stmt = q->stmt.g;
+
+  if (!q->repl_stmt)
+    q->repl_stmt = gimple_seq_alloc ();
 
   if (mod)
-    append_to_statement_list (mod, &q->repl_stmt);
+    gimple_seq_add_seq (&q->repl_stmt, mod);
 
-  x = build1 (GOTO_EXPR, void_type_node, finlab);
-  append_to_statement_list (x, &q->repl_stmt);
+  x = gimple_build_goto (finlab);
+  gimple_seq_add_stmt (&q->repl_stmt, x);
 }
 
-/* Similar, but easier, for GOTO_EXPR.  */
+/* Similar, but easier, for GIMPLE_GOTO.  */
 
 static void
-do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
+do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
+                    struct leh_tf_state *tf)
 {
-  tree x;
+  gimple x;
+
+  gcc_assert (q->is_label);
+  if (!q->repl_stmt)
+    q->repl_stmt = gimple_seq_alloc ();
+
+  q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array,q->index));
 
-  q->cont_stmt = q->stmt;
   if (mod)
-    append_to_statement_list (mod, &q->repl_stmt);
+    gimple_seq_add_seq (&q->repl_stmt, mod);
 
-  x = build1 (GOTO_EXPR, void_type_node, finlab);
-  append_to_statement_list (x, &q->repl_stmt);
+  x = gimple_build_goto (finlab);
+  gimple_seq_add_stmt (&q->repl_stmt, x);
 }
 
 /* We want to transform
@@ -704,56 +819,59 @@ do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
    to
        body; goto over; lab: stuff; over:
 
-   T is a TRY_FINALLY or TRY_CATCH node.  LAB is the label that
+   TP is a GIMPLE_TRY node.  LAB is the label that
    should be placed before the second operand, or NULL.  OVER is
    an existing label that should be put at the exit, or NULL.  */
 
-static void
-frob_into_branch_around (tree *tp, tree lab, tree over)
+static gimple_seq
+frob_into_branch_around (gimple tp, tree lab, tree over)
 {
-  tree x, op1;
+  gimple x;
+  gimple_seq cleanup, result;
 
-  op1 = TREE_OPERAND (*tp, 1);
-  *tp = TREE_OPERAND (*tp, 0);
+  cleanup = gimple_try_cleanup (tp);
+  result = gimple_try_eval (tp);
 
-  if (block_may_fallthru (*tp))
+  if (gimple_seq_may_fallthru (result))
     {
       if (!over)
        over = create_artificial_label ();
-      x = build1 (GOTO_EXPR, void_type_node, over);
-      append_to_statement_list (x, tp);
+      x = gimple_build_goto (over);
+      gimple_seq_add_stmt (&result, x);
     }
 
   if (lab)
     {
-      x = build1 (LABEL_EXPR, void_type_node, lab);
-      append_to_statement_list (x, tp);
+      x = gimple_build_label (lab);
+      gimple_seq_add_stmt (&result, x);
     }
 
-  append_to_statement_list (op1, tp);
+  gimple_seq_add_seq (&result, cleanup);
 
   if (over)
     {
-      x = build1 (LABEL_EXPR, void_type_node, over);
-      append_to_statement_list (x, tp);
+      x = gimple_build_label (over);
+      gimple_seq_add_stmt (&result, x);
     }
+  return result;
 }
 
 /* A subroutine of lower_try_finally.  Duplicate the tree rooted at T.
    Make sure to record all new labels found.  */
 
-static tree
-lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
+static gimple_seq
+lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state)
 {
-  tree region = NULL;
+  gimple region = NULL;
+  gimple_seq new_seq;
 
-  t = unsave_expr_now (t);
+  new_seq = copy_gimple_seq_and_replace_locals (seq);
 
   if (outer_state->tf)
     region = outer_state->tf->try_finally_expr;
-  collect_finally_tree (t, region);
+  collect_finally_tree_1 (new_seq, region);
 
-  return t;
+  return new_seq;
 }
 
 /* A subroutine of lower_try_finally.  Create a fallthru label for
@@ -764,12 +882,17 @@ static tree
 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
 {
   tree label = tf->fallthru_label;
+  treemple temp;
+
   if (!label)
     {
       label = create_artificial_label ();
       tf->fallthru_label = label;
       if (tf->outer->tf)
-        record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
+        {
+          temp.t = label;
+          record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
+        }
     }
   return label;
 }
@@ -799,9 +922,11 @@ honor_protect_cleanup_actions (struct leh_state *outer_state,
                               struct leh_state *this_state,
                               struct leh_tf_state *tf)
 {
-  tree protect_cleanup_actions, finally, x;
-  tree_stmt_iterator i;
+  gimple protect_cleanup_actions;
+  gimple_stmt_iterator gsi;
   bool finally_may_fallthru;
+  gimple_seq finally;
+  gimple x;
 
   /* First check for nothing to do.  */
   if (lang_protect_cleanup_actions)
@@ -809,7 +934,7 @@ honor_protect_cleanup_actions (struct leh_state *outer_state,
   else
     protect_cleanup_actions = NULL;
 
-  finally = TREE_OPERAND (*tf->top_p, 1);
+  finally = gimple_try_cleanup (tf->top_p);
 
   /* If the EH case of the finally block can fall through, this may be a
      structure of the form
@@ -832,7 +957,7 @@ honor_protect_cleanup_actions (struct leh_state *outer_state,
     be used (via fallthru from the finally) we handle the eh case here,
     whether or not protect_cleanup_actions is active.  */
 
-  finally_may_fallthru = block_may_fallthru (finally);
+  finally_may_fallthru = gimple_seq_may_fallthru (finally);
   if (!finally_may_fallthru && !protect_cleanup_actions)
     return;
 
@@ -848,14 +973,15 @@ honor_protect_cleanup_actions (struct leh_state *outer_state,
      cp/decl.c).  Since it's logically at an outer level, we should call
      terminate before we get to it, so strip it away before adding the
      MUST_NOT_THROW filter.  */
-  i = tsi_start (finally);
-  x = tsi_stmt (i);
+  gsi = gsi_start (finally);
+  x = gsi_stmt (gsi);
   if (protect_cleanup_actions
-      && TREE_CODE (x) == TRY_CATCH_EXPR
-      && TRY_CATCH_IS_CLEANUP (x))
+      && gimple_code (x) == GIMPLE_TRY
+      && gimple_try_kind (x) == GIMPLE_TRY_CATCH
+      && gimple_try_catch_is_cleanup (x))
     {
-      tsi_link_before (&i, TREE_OPERAND (x, 0), TSI_SAME_STMT);
-      tsi_delink (&i);
+      gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
+      gsi_remove (&gsi, false);
     }
 
   /* Resume execution after the exception.  Adding this now lets
@@ -864,55 +990,61 @@ honor_protect_cleanup_actions (struct leh_state *outer_state,
   if (finally_may_fallthru)
     {
       tree save_eptr, save_filt;
+      tree tmp;
 
       save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
       save_filt = create_tmp_var (integer_type_node, "save_filt");
 
-      i = tsi_start (finally);
-      x = build0 (EXC_PTR_EXPR, ptr_type_node);
-      x = build_gimple_modify_stmt (save_eptr, x);
-      tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
+      gsi = gsi_start (finally);
+      tmp = build0 (EXC_PTR_EXPR, ptr_type_node);
+      x = gimple_build_assign (save_eptr, tmp);
+      gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING);
 
-      x = build0 (FILTER_EXPR, integer_type_node);
-      x = build_gimple_modify_stmt (save_filt, x);
-      tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
+      tmp = build0 (FILTER_EXPR, integer_type_node);
+      x = gimple_build_assign (save_filt, tmp);
+      gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING);
 
-      i = tsi_last (finally);
-      x = build0 (EXC_PTR_EXPR, ptr_type_node);
-      x = build_gimple_modify_stmt (x, save_eptr);
-      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
+      gsi = gsi_last (finally);
+      tmp = build0 (EXC_PTR_EXPR, ptr_type_node);
+      x = gimple_build_assign (tmp, save_eptr);
+      gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
 
-      x = build0 (FILTER_EXPR, integer_type_node);
-      x = build_gimple_modify_stmt (x, save_filt);
-      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
+      tmp = build0 (FILTER_EXPR, integer_type_node);
+      x = gimple_build_assign (tmp, save_filt);
+      gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
 
-      x = build_resx (get_eh_region_number (tf->region));
-      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
+      x = gimple_build_resx (get_eh_region_number (tf->region));
+      gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
     }
 
   /* Wrap the block with protect_cleanup_actions as the action.  */
   if (protect_cleanup_actions)
     {
-      x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
-      append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
-      EH_FILTER_MUST_NOT_THROW (x) = 1;
-      finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
-      lower_eh_filter (outer_state, &finally);
+      gimple_seq seq = NULL, failure = NULL;
+
+      gimple_seq_add_stmt (&failure, protect_cleanup_actions);
+      x = gimple_build_eh_filter (NULL, failure);
+      gimple_eh_filter_set_must_not_throw (x, 1);
+
+      gimple_seq_add_stmt (&seq, x);
+      x = gimple_build_try (finally, seq, GIMPLE_TRY_CATCH);
+      finally = lower_eh_filter (outer_state, x);
     }
   else
-    lower_eh_constructs_1 (outer_state, &finally);
+    lower_eh_constructs_1 (outer_state, finally);
 
   /* Hook this up to the end of the existing try block.  If we
      previously fell through the end, we'll have to branch around.
      This means adding a new goto, and adding it to the queue.  */
 
-  i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
+  gsi = gsi_last (gimple_try_eval (tf->top_p));
 
   if (tf->may_fallthru)
     {
-      x = lower_try_finally_fallthru_label (tf);
-      x = build1 (GOTO_EXPR, void_type_node, x);
-      tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
+      tree tmp;
+      tmp = lower_try_finally_fallthru_label (tf);
+      x = gimple_build_goto (tmp);
+      gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
 
       if (this_state)
         maybe_record_in_goto_queue (this_state, x);
@@ -920,9 +1052,9 @@ honor_protect_cleanup_actions (struct leh_state *outer_state,
       tf->may_fallthru = false;
     }
 
-  x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
-  tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
-  tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
+  x = gimple_build_label (tf->eh_label);
+  gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
+  gsi_insert_seq_after (&gsi, finally, GSI_CONTINUE_LINKING);
 
   /* Having now been handled, EH isn't to be considered with
      the rest of the outgoing edges.  */
@@ -935,9 +1067,12 @@ honor_protect_cleanup_actions (struct leh_state *outer_state,
    try_finally node for this special case.  */
 
 static void
-lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
+lower_try_finally_nofallthru (struct leh_state *state,
+                             struct leh_tf_state *tf)
 {
-  tree x, finally, lab, return_val;
+  tree lab, return_val;
+  gimple x;
+  gimple_seq finally;
   struct goto_queue_node *q, *qe;
 
   if (tf->may_throw)
@@ -945,11 +1080,12 @@ lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
   else
     lab = create_artificial_label ();
 
-  finally = TREE_OPERAND (*tf->top_p, 1);
-  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
+  /* We expect that tf->top_p is a GIMPLE_TRY. */
+  finally = gimple_try_cleanup (tf->top_p);
+  tf->top_p_seq = gimple_try_eval (tf->top_p);
 
-  x = build1 (LABEL_EXPR, void_type_node, lab);
-  append_to_statement_list (x, tf->top_p);
+  x = gimple_build_label (lab);
+  gimple_seq_add_stmt (&tf->top_p_seq, x);
 
   return_val = NULL;
   q = tf->goto_queue;
@@ -958,12 +1094,12 @@ lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
     if (q->index < 0)
       do_return_redirection (q, lab, NULL, &return_val);
     else
-      do_goto_redirection (q, lab, NULL);
+      do_goto_redirection (q, lab, NULL, tf);
 
   replace_goto_queue (tf);
 
-  lower_eh_constructs_1 (state, &finally);
-  append_to_statement_list (finally, tf->top_p);
+  lower_eh_constructs_1 (state, finally);
+  gimple_seq_add_seq (&tf->top_p_seq, finally);
 }
 
 /* A subroutine of lower_try_finally.  We have determined that there is
@@ -974,26 +1110,28 @@ static void
 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
 {
   struct goto_queue_node *q, *qe;
-  tree x, finally, finally_label;
+  gimple x;
+  gimple_seq finally;
+  tree finally_label;
 
-  finally = TREE_OPERAND (*tf->top_p, 1);
-  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
+  finally = gimple_try_cleanup (tf->top_p);
+  tf->top_p_seq = gimple_try_eval (tf->top_p);
 
-  lower_eh_constructs_1 (state, &finally);
+  lower_eh_constructs_1 (state, finally);
 
   if (tf->may_throw)
     {
       /* Only reachable via the exception edge.  Add the given label to
          the head of the FINALLY block.  Append a RESX at the end.  */
 
-      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
-      append_to_statement_list (x, tf->top_p);
+      x = gimple_build_label (tf->eh_label);
+      gimple_seq_add_stmt (&tf->top_p_seq, x);
 
-      append_to_statement_list (finally, tf->top_p);
+      gimple_seq_add_seq (&tf->top_p_seq, finally);
 
-      x = build_resx (get_eh_region_number (tf->region));
+      x = gimple_build_resx (get_eh_region_number (tf->region));
 
-      append_to_statement_list (x, tf->top_p);
+      gimple_seq_add_stmt (&tf->top_p_seq, x);
 
       return;
     }
@@ -1002,15 +1140,15 @@ lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
     {
       /* Only reachable via the fallthru edge.  Do nothing but let
         the two blocks run together; we'll fall out the bottom.  */
-      append_to_statement_list (finally, tf->top_p);
+      gimple_seq_add_seq (&tf->top_p_seq, finally);
       return;
     }
 
   finally_label = create_artificial_label ();
-  x = build1 (LABEL_EXPR, void_type_node, finally_label);
-  append_to_statement_list (x, tf->top_p);
+  x = gimple_build_label (finally_label);
+  gimple_seq_add_stmt (&tf->top_p_seq, x);
 
-  append_to_statement_list (finally, tf->top_p);
+  gimple_seq_add_seq (&tf->top_p_seq, finally);
 
   q = tf->goto_queue;
   qe = q + tf->goto_queue_active;
@@ -1027,7 +1165,7 @@ lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
     {
       /* Reachable by goto expressions only.  Redirect them.  */
       for (; q < qe; ++q)
-       do_goto_redirection (q, finally_label, NULL);
+       do_goto_redirection (q, finally_label, NULL, tf);
       replace_goto_queue (tf);
 
       if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
@@ -1040,11 +1178,11 @@ lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
        }
     }
 
-  /* Reset the locus of the goto since we're moving 
-     goto to a different block which might be on a different line. */
-  SET_EXPR_LOCUS (tf->goto_queue[0].cont_stmt, NULL);
-  append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
-  maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
+  /* Place the original return/goto to the original destination
+     immediately after the finally block. */
+  x = tf->goto_queue[0].cont_stmt;
+  gimple_seq_add_stmt (&tf->top_p_seq, x);
+  maybe_record_in_goto_queue (state, x);
 }
 
 /* A subroutine of lower_try_finally.  There are multiple edges incoming
@@ -1054,36 +1192,38 @@ lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
 static void
 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
 {
-  tree finally, new_stmt;
-  tree x;
+  gimple_seq finally;
+  gimple_seq new_stmt;
+  gimple_seq seq;
+  gimple x;
+  tree tmp;
 
-  finally = TREE_OPERAND (*tf->top_p, 1);
-  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
-
-  new_stmt = NULL_TREE;
+  finally = gimple_try_cleanup (tf->top_p);
+  tf->top_p_seq = gimple_try_eval (tf->top_p);
+  new_stmt = NULL;
 
   if (tf->may_fallthru)
     {
-      x = lower_try_finally_dup_block (finally, state);
-      lower_eh_constructs_1 (state, &x);
-      append_to_statement_list (x, &new_stmt);
+      seq = lower_try_finally_dup_block (finally, state);
+      lower_eh_constructs_1 (state, seq);
+      gimple_seq_add_seq (&new_stmt, seq);
 
-      x = lower_try_finally_fallthru_label (tf);
-      x = build1 (GOTO_EXPR, void_type_node, x);
-      append_to_statement_list (x, &new_stmt);
+      tmp = lower_try_finally_fallthru_label (tf);
+      x = gimple_build_goto (tmp);
+      gimple_seq_add_stmt (&new_stmt, x);
     }
 
   if (tf->may_throw)
     {
-      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
-      append_to_statement_list (x, &new_stmt);
+      x = gimple_build_label (tf->eh_label);
+      gimple_seq_add_stmt (&new_stmt, x);
 
-      x = lower_try_finally_dup_block (finally, state);
-      lower_eh_constructs_1 (state, &x);
-      append_to_statement_list (x, &new_stmt);
+      seq = lower_try_finally_dup_block (finally, state);
+      lower_eh_constructs_1 (state, seq);
+      gimple_seq_add_seq (&new_stmt, seq);
 
-      x = build_resx (get_eh_region_number (tf->region));
-      append_to_statement_list (x, &new_stmt);
+      x = gimple_build_resx (get_eh_region_number (tf->region));
+      gimple_seq_add_stmt (&new_stmt, x);
     }
 
   if (tf->goto_queue)
@@ -1123,16 +1263,16 @@ lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
          if (index == return_index)
            do_return_redirection (q, lab, NULL, &return_val);
          else
-           do_goto_redirection (q, lab, NULL);
+           do_goto_redirection (q, lab, NULL, tf);
 
-         x = build1 (LABEL_EXPR, void_type_node, lab);
-         append_to_statement_list (x, &new_stmt);
+         x = gimple_build_label (lab);
+          gimple_seq_add_stmt (&new_stmt, x);
 
-         x = lower_try_finally_dup_block (finally, state);
-         lower_eh_constructs_1 (state, &x);
-         append_to_statement_list (x, &new_stmt);
+         seq = lower_try_finally_dup_block (finally, state);
+         lower_eh_constructs_1 (state, seq);
+          gimple_seq_add_seq (&new_stmt, seq);
 
-         append_to_statement_list (q->cont_stmt, &new_stmt);
+          gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
          maybe_record_in_goto_queue (state, q->cont_stmt);
        }
 
@@ -1150,7 +1290,7 @@ lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
          if (index == return_index)
            do_return_redirection (q, lab, NULL, &return_val);
          else
-           do_goto_redirection (q, lab, NULL);
+           do_goto_redirection (q, lab, NULL, tf);
        }
        
       replace_goto_queue (tf);
@@ -1159,7 +1299,7 @@ lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
 
   /* Need to link new stmts after running replace_goto_queue due
      to not wanting to process the same goto stmts twice.  */
-  append_to_statement_list (new_stmt, tf->top_p);
+  gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
 }
 
 /* A subroutine of lower_try_finally.  There are multiple edges incoming
@@ -1172,18 +1312,26 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
 {
   struct goto_queue_node *q, *qe;
   tree return_val = NULL;
-  tree finally, finally_tmp, finally_label;
+  tree finally_tmp, finally_label;
   int return_index, eh_index, fallthru_index;
   int nlabels, ndests, j, last_case_index;
-  tree case_label_vec, switch_stmt, last_case, switch_body;
-  tree x;
+  tree last_case;
+  VEC (tree,heap) *case_label_vec;
+  gimple_seq switch_body;
+  gimple x;
+  tree tmp;
+  gimple switch_stmt;
+  gimple_seq finally;
+  struct pointer_map_t *cont_map = NULL;
+
+  switch_body = gimple_seq_alloc ();
 
   /* Mash the TRY block to the head of the chain.  */
-  finally = TREE_OPERAND (*tf->top_p, 1);
-  *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
+  finally = gimple_try_cleanup (tf->top_p);
+  tf->top_p_seq = gimple_try_eval (tf->top_p);
 
   /* Lower the finally block itself.  */
-  lower_eh_constructs_1 (state, &finally);
+  lower_eh_constructs_1 (state, finally);
 
   /* Prepare for switch statement generation.  */
   nlabels = VEC_length (tree, tf->dest_array);
@@ -1195,10 +1343,10 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
   finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
   finally_label = create_artificial_label ();
 
-  case_label_vec = make_tree_vec (ndests);
-  switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
-                       NULL_TREE, case_label_vec);
-  switch_body = NULL;
+  /* We use VEC_quick_push on case_label_vec throughout this function,
+     since we know the size in advance and allocate precisely as muce
+     space as needed.  */
+  case_label_vec = VEC_alloc (tree, heap, ndests);
   last_case = NULL;
   last_case_index = 0;
 
@@ -1208,117 +1356,137 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
 
   if (tf->may_fallthru)
     {
-      x = build_gimple_modify_stmt (finally_tmp,
-                                   build_int_cst (integer_type_node,
-                                                  fallthru_index));
-      append_to_statement_list (x, tf->top_p);
+      x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node,
+                                                          fallthru_index));
+      gimple_seq_add_stmt (&tf->top_p_seq, x);
 
       if (tf->may_throw)
        {
-         x = build1 (GOTO_EXPR, void_type_node, finally_label);
-         append_to_statement_list (x, tf->top_p);
+         x = gimple_build_goto (finally_label);
+          gimple_seq_add_stmt (&tf->top_p_seq, x);
        }
 
 
       last_case = build3 (CASE_LABEL_EXPR, void_type_node,
                          build_int_cst (NULL_TREE, fallthru_index), NULL,
                          create_artificial_label ());
-      TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
+      VEC_quick_push (tree, case_label_vec, last_case);
       last_case_index++;
 
-      x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
-      append_to_statement_list (x, &switch_body);
+      x = gimple_build_label (CASE_LABEL (last_case));
+      gimple_seq_add_stmt (&switch_body, x);
 
-      x = lower_try_finally_fallthru_label (tf);
-      x = build1 (GOTO_EXPR, void_type_node, x);
-      append_to_statement_list (x, &switch_body);
+      tmp = lower_try_finally_fallthru_label (tf);
+      x = gimple_build_goto (tmp);
+      gimple_seq_add_stmt (&switch_body, x);
     }
 
   if (tf->may_throw)
     {
-      x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
-      append_to_statement_list (x, tf->top_p);
+      x = gimple_build_label (tf->eh_label);
+      gimple_seq_add_stmt (&tf->top_p_seq, x);
 
-      x = build_gimple_modify_stmt (finally_tmp,
-                                   build_int_cst (integer_type_node,
-                                                  eh_index));
-      append_to_statement_list (x, tf->top_p);
+      x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node,
+                                                           eh_index));
+      gimple_seq_add_stmt (&tf->top_p_seq, x);
 
       last_case = build3 (CASE_LABEL_EXPR, void_type_node,
                          build_int_cst (NULL_TREE, eh_index), NULL,
                          create_artificial_label ());
-      TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
+      VEC_quick_push (tree, case_label_vec, last_case);
       last_case_index++;
 
-      x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
-      append_to_statement_list (x, &switch_body);
-      x = build_resx (get_eh_region_number (tf->region));
-      append_to_statement_list (x, &switch_body);
+      x = gimple_build_label (CASE_LABEL (last_case));
+      gimple_seq_add_stmt (&switch_body, x);
+      x = gimple_build_resx (get_eh_region_number (tf->region));
+      gimple_seq_add_stmt (&switch_body, x);
     }
 
-  x = build1 (LABEL_EXPR, void_type_node, finally_label);
-  append_to_statement_list (x, tf->top_p);
+  x = gimple_build_label (finally_label);
+  gimple_seq_add_stmt (&tf->top_p_seq, x);
 
-  append_to_statement_list (finally, tf->top_p);
+  gimple_seq_add_seq (&tf->top_p_seq, finally);
 
   /* Redirect each incoming goto edge.  */
   q = tf->goto_queue;
   qe = q + tf->goto_queue_active;
   j = last_case_index + tf->may_return;
+  /* Prepare the assignments to finally_tmp that are executed upon the
+     entrance through a particular edge. */
   for (; q < qe; ++q)
     {
-      tree mod;
-      int switch_id, case_index;
+      gimple_seq mod;
+      int switch_id;
+      unsigned int case_index;
+
+      mod = gimple_seq_alloc ();
 
       if (q->index < 0)
        {
-         mod = build_gimple_modify_stmt (finally_tmp,
-                                         build_int_cst (integer_type_node,
-                                                        return_index));
+         x = gimple_build_assign (finally_tmp,
+                                  build_int_cst (integer_type_node,
+                                                 return_index));
+         gimple_seq_add_stmt (&mod, x);
          do_return_redirection (q, finally_label, mod, &return_val);
          switch_id = return_index;
        }
       else
        {
-         mod = build_gimple_modify_stmt (finally_tmp,
-                                         build_int_cst (integer_type_node,
-                                                        q->index));
-         do_goto_redirection (q, finally_label, mod);
+         x = gimple_build_assign (finally_tmp,
+                                  build_int_cst (integer_type_node, q->index));
+         gimple_seq_add_stmt (&mod, x);
+         do_goto_redirection (q, finally_label, mod, tf);
          switch_id = q->index;
        }
 
       case_index = j + q->index;
-      if (!TREE_VEC_ELT (case_label_vec, case_index))
-       TREE_VEC_ELT (case_label_vec, case_index)
-         = build3 (CASE_LABEL_EXPR, void_type_node,
-                   build_int_cst (NULL_TREE, switch_id), NULL,
-                   /* We store the cont_stmt in the
-                      CASE_LABEL, so that we can recover it
-                      in the loop below.  We don't create
-                      the new label while walking the
-                      goto_queue because pointers don't
-                      offer a stable order.  */
-                   q->cont_stmt);
+      if (VEC_length (tree, case_label_vec) <= case_index
+          || !VEC_index (tree, case_label_vec, case_index))
+        {
+          tree case_lab;
+          void **slot;
+          case_lab = build3 (CASE_LABEL_EXPR, void_type_node,
+                             build_int_cst (NULL_TREE, switch_id), NULL,
+                             NULL);
+          /* We store the cont_stmt in the pointer map, so that we can recover
+             it in the loop below.  We don't create the new label while
+             walking the goto_queue because pointers don't offer a stable 
+             order.  */
+          if (!cont_map)
+            cont_map = pointer_map_create ();
+          slot = pointer_map_insert (cont_map, case_lab);
+          *slot = q->cont_stmt;
+          VEC_quick_push (tree, case_label_vec, case_lab);
+        }
     }
   for (j = last_case_index; j < last_case_index + nlabels; j++)
     {
       tree label;
-      tree cont_stmt;
+      gimple cont_stmt;
+      void **slot;
 
-      last_case = TREE_VEC_ELT (case_label_vec, j);
+      last_case = VEC_index (tree, case_label_vec, j);
 
       gcc_assert (last_case);
+      gcc_assert (cont_map);
 
-      cont_stmt = CASE_LABEL (last_case);
+      slot = pointer_map_contains (cont_map, last_case);
+      /* As the comment above suggests, CASE_LABEL (last_case) was just a
+         placeholder, it does not store an actual label, yet. */
+      gcc_assert (slot);
+      cont_stmt = *(gimple *) slot;
 
       label = create_artificial_label ();
       CASE_LABEL (last_case) = label;
 
-      x = build1 (LABEL_EXPR, void_type_node, label);
-      append_to_statement_list (x, &switch_body);
-      append_to_statement_list (cont_stmt, &switch_body);
+      x = gimple_build_label (label);
+      gimple_seq_add_stmt (&switch_body, x);
+      gimple_seq_add_stmt (&switch_body, cont_stmt);
       maybe_record_in_goto_queue (state, cont_stmt);
     }
+  if (cont_map)
+    pointer_map_destroy (cont_map);
+
   replace_goto_queue (tf);
 
   /* Make sure that the last case is the default label, as one is required.
@@ -1326,10 +1494,15 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
   CASE_LOW (last_case) = NULL;
   sort_case_labels (case_label_vec);
 
-  /* Need to link switch_stmt after running replace_goto_queue due
-     to not wanting to process the same goto stmts twice.  */
-  append_to_statement_list (switch_stmt, tf->top_p);
-  append_to_statement_list (switch_body, tf->top_p);
+  /* Build the switch statement, setting last_case to be the default
+     label.  */
+  switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
+                                         case_label_vec);
+
+  /* Need to link SWITCH_STMT after running replace_goto_queue
+     due to not wanting to process the same goto stmts twice.  */
+  gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
+  gimple_seq_add_seq (&tf->top_p_seq, switch_body);
 }
 
 /* Decide whether or not we are going to duplicate the finally block.
@@ -1347,7 +1520,7 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
    the estimate of the size of the switch machinery we'd have to add.  */
 
 static bool
-decide_copy_try_finally (int ndests, tree finally)
+decide_copy_try_finally (int ndests, gimple_seq finally)
 {
   int f_estimate, sw_estimate;
 
@@ -1355,7 +1528,7 @@ decide_copy_try_finally (int ndests, tree finally)
     return false;
 
   /* Finally estimate N times, plus N gotos.  */
-  f_estimate = estimate_num_insns (finally, &eni_size_weights);
+  f_estimate = count_insns_seq (finally, &eni_size_weights);
   f_estimate = (f_estimate + 1) * ndests;
 
   /* Switch statement (cost 10), N variable assignments, N gotos.  */
@@ -1372,13 +1545,14 @@ decide_copy_try_finally (int ndests, tree finally)
     return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
 }
 
-/* A subroutine of lower_eh_constructs_1.  Lower a TRY_FINALLY_EXPR nodes
+
+/* A subroutine of lower_eh_constructs_1.  Lower a GIMPLE_TRY_FINALLY nodes
    to a sequence of labels and blocks, plus the exception region trees
    that record all the magic.  This is complicated by the need to
    arrange for the FINALLY block to be executed on all exits.  */
 
-static void
-lower_try_finally (struct leh_state *state, tree *tp)
+static gimple_seq
+lower_try_finally (struct leh_state *state, gimple tp)
 {
   struct leh_tf_state this_tf;
   struct leh_state this_state;
@@ -1387,7 +1561,7 @@ lower_try_finally (struct leh_state *state, tree *tp)
   /* Process the try block.  */
 
   memset (&this_tf, 0, sizeof (this_tf));
-  this_tf.try_finally_expr = *tp;
+  this_tf.try_finally_expr = tp;
   this_tf.top_p = tp;
   this_tf.outer = state;
   if (using_eh_for_cleanups_p)
@@ -1400,10 +1574,10 @@ lower_try_finally (struct leh_state *state, tree *tp)
   this_state.prev_try = state->prev_try;
   this_state.tf = &this_tf;
 
-  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
+  lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
 
   /* Determine if the try block is escaped through the bottom.  */
-  this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
+  this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
 
   /* Determine if any exceptions are possible within the try block.  */
   if (using_eh_for_cleanups_p)
@@ -1426,19 +1600,20 @@ lower_try_finally (struct leh_state *state, tree *tp)
 
   /* If the FINALLY block is not reachable, dike it out.  */
   if (ndests == 0)
-    *tp = TREE_OPERAND (*tp, 0);
-
+    {
+      gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
+      gimple_try_set_cleanup (tp, NULL);
+    }
   /* If the finally block doesn't fall through, then any destination
      we might try to impose there isn't reached either.  There may be
      some minor amount of cleanup and redirection still needed.  */
-  else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
+  else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
     lower_try_finally_nofallthru (state, &this_tf);
 
   /* We can easily special-case redirection to a single destination.  */
   else if (ndests == 1)
     lower_try_finally_onedest (state, &this_tf);
-
-  else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
+  else if (decide_copy_try_finally (ndests, gimple_try_cleanup (tp)))
     lower_try_finally_copy (state, &this_tf);
   else
     lower_try_finally_switch (state, &this_tf);
@@ -1447,8 +1622,9 @@ lower_try_finally (struct leh_state *state, tree *tp)
      block, do so.  */
   if (this_tf.fallthru_label)
     {
-      tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
-      append_to_statement_list (x, tp);
+      /* This must be reached only if ndests == 0. */
+      gimple x = gimple_build_label (this_tf.fallthru_label);
+      gimple_seq_add_stmt (&this_tf.top_p_seq, x);
     }
 
   VEC_free (tree, heap, this_tf.dest_array);
@@ -1456,18 +1632,20 @@ lower_try_finally (struct leh_state *state, tree *tp)
     free (this_tf.goto_queue);
   if (this_tf.goto_queue_map)
     pointer_map_destroy (this_tf.goto_queue_map);
+
+  return this_tf.top_p_seq;
 }
 
-/* A subroutine of lower_eh_constructs_1.  Lower a TRY_CATCH_EXPR with a
-   list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
-   exception region trees that record all the magic.  */
+/* A subroutine of lower_eh_constructs_1.  Lower a GIMPLE_TRY_CATCH with a
+   list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
+   exception region trees that records all the magic.  */
 
-static void
-lower_catch (struct leh_state *state, tree *tp)
+static gimple_seq
+lower_catch (struct leh_state *state, gimple tp)
 {
   struct eh_region *try_region;
   struct leh_state this_state;
-  tree_stmt_iterator i;
+  gimple_stmt_iterator gsi;
   tree out_label;
 
   try_region = gen_eh_region_try (state->cur_region);
@@ -1475,118 +1653,121 @@ lower_catch (struct leh_state *state, tree *tp)
   this_state.prev_try = try_region;
   this_state.tf = state->tf;
 
-  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
+  lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
 
   if (!get_eh_region_may_contain_throw (try_region))
     {
-      *tp = TREE_OPERAND (*tp, 0);
-      return;
+      return gimple_try_eval (tp);
     }
 
   out_label = NULL;
-  for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
+  for (gsi = gsi_start (gimple_try_cleanup (tp)); !gsi_end_p (gsi); )
     {
       struct eh_region *catch_region;
-      tree catch, x, eh_label;
+      tree eh_label;
+      gimple x, catch;
 
-      catch = tsi_stmt (i);
-      catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
+      catch = gsi_stmt (gsi);
+      catch_region = gen_eh_region_catch (try_region,
+                                          gimple_catch_types (catch));
 
       this_state.cur_region = catch_region;
       this_state.prev_try = state->prev_try;
-      lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
+      lower_eh_constructs_1 (&this_state, gimple_catch_handler (catch));
 
       eh_label = create_artificial_label ();
       set_eh_region_tree_label (catch_region, eh_label);
 
-      x = build1 (LABEL_EXPR, void_type_node, eh_label);
-      tsi_link_before (&i, x, TSI_SAME_STMT);
+      x = gimple_build_label (eh_label);
+      gsi_insert_before (&gsi, x, GSI_SAME_STMT);
 
-      if (block_may_fallthru (CATCH_BODY (catch)))
+      if (gimple_seq_may_fallthru (gimple_catch_handler (catch)))
        {
          if (!out_label)
            out_label = create_artificial_label ();
 
-         x = build1 (GOTO_EXPR, void_type_node, out_label);
-         append_to_statement_list (x, &CATCH_BODY (catch));
+         x = gimple_build_goto (out_label);
+         gimple_seq_add_stmt (gimple_catch_handler_ptr (catch), x);
        }
 
-      tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
-      tsi_delink (&i);
+      gsi_insert_seq_before (&gsi, gimple_catch_handler (catch),
+                            GSI_SAME_STMT);
+      gsi_remove (&gsi, false);
     }
 
-  frob_into_branch_around (tp, NULL, out_label);
+  return frob_into_branch_around (tp, NULL, out_label);
 }
 
-/* A subroutine of lower_eh_constructs_1.  Lower a TRY_CATCH_EXPR with a
-   EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
+/* A subroutine of lower_eh_constructs_1.  Lower a GIMPLE_TRY with a
+   GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
    region trees that record all the magic.  */
 
-static void
-lower_eh_filter (struct leh_state *state, tree *tp)
+static gimple_seq
+lower_eh_filter (struct leh_state *state, gimple tp)
 {
   struct leh_state this_state;
   struct eh_region *this_region;
-  tree inner = expr_first (TREE_OPERAND (*tp, 1));
+  gimple inner;
   tree eh_label;
 
-  if (EH_FILTER_MUST_NOT_THROW (inner))
+  inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
+
+  if (gimple_eh_filter_must_not_throw (inner))
     this_region = gen_eh_region_must_not_throw (state->cur_region);
   else
     this_region = gen_eh_region_allowed (state->cur_region,
-                                        EH_FILTER_TYPES (inner));
+                                        gimple_eh_filter_types (inner));
   this_state = *state;
   this_state.cur_region = this_region;
   /* For must not throw regions any cleanup regions inside it
      can't reach outer catch regions.  */
-  if (EH_FILTER_MUST_NOT_THROW (inner))
+  if (gimple_eh_filter_must_not_throw (inner))
     this_state.prev_try = NULL;
 
-  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
+  lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
 
   if (!get_eh_region_may_contain_throw (this_region))
     {
-      *tp = TREE_OPERAND (*tp, 0);
-      return;
+      return gimple_try_eval (tp);
     }
 
-  lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
-  TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
+  lower_eh_constructs_1 (state, gimple_eh_filter_failure (inner));
+  gimple_try_set_cleanup (tp, gimple_eh_filter_failure (inner));
 
   eh_label = create_artificial_label ();
   set_eh_region_tree_label (this_region, eh_label);
 
-  frob_into_branch_around (tp, eh_label, NULL);
+  return frob_into_branch_around (tp, eh_label, NULL);
 }
 
 /* Implement a cleanup expression.  This is similar to try-finally,
    except that we only execute the cleanup block for exception edges.  */
 
-static void
-lower_cleanup (struct leh_state *state, tree *tp)
+static gimple_seq
+lower_cleanup (struct leh_state *state, gimple tp)
 {
   struct leh_state this_state;
   struct eh_region *this_region;
   struct leh_tf_state fake_tf;
+  gimple_seq result;
 
   /* If not using eh, then exception-only cleanups are no-ops.  */
   if (!flag_exceptions)
     {
-      *tp = TREE_OPERAND (*tp, 0);
-      lower_eh_constructs_1 (state, tp);
-      return;
+      result = gimple_try_eval (tp);
+      lower_eh_constructs_1 (state, result);
+      return result;
     }
 
   this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
   this_state = *state;
   this_state.cur_region = this_region;
 
-  lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
+  lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
 
   if (!get_eh_region_may_contain_throw (this_region))
     {
-      *tp = TREE_OPERAND (*tp, 0);
-      return;
+      return gimple_try_eval (tp);
     }
 
   /* Build enough of a try-finally state so that we can reuse
@@ -1595,7 +1776,7 @@ lower_cleanup (struct leh_state *state, tree *tp)
   fake_tf.top_p = tp;
   fake_tf.outer = state;
   fake_tf.region = this_region;
-  fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
+  fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
   fake_tf.may_throw = true;
 
   fake_tf.eh_label = create_artificial_label ();
@@ -1607,118 +1788,119 @@ lower_cleanup (struct leh_state *state, tree *tp)
     {
       /* In this case honor_protect_cleanup_actions had nothing to do,
         and we should process this normally.  */
-      lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
-      frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
+      lower_eh_constructs_1 (state, gimple_try_cleanup (tp));
+      result = frob_into_branch_around (tp, fake_tf.eh_label,
+                                       fake_tf.fallthru_label);
     }
   else
     {
       /* In this case honor_protect_cleanup_actions did nearly all of
         the work.  All we have left is to append the fallthru_label.  */
 
-      *tp = TREE_OPERAND (*tp, 0);
+      result = gimple_try_eval (tp);
       if (fake_tf.fallthru_label)
        {
-         tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
-         append_to_statement_list (x, tp);
+         gimple x = gimple_build_label (fake_tf.fallthru_label);
+         gimple_seq_add_stmt (&result, x);
        }
     }
+  return result;
 }
 
-/* Main loop for lowering eh constructs.  */
+
+
+/* Main loop for lowering eh constructs. Also moves gsi to the next 
+   statement. */
 
 static void
-lower_eh_constructs_1 (struct leh_state *state, tree *tp)
+lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
 {
-  tree_stmt_iterator i;
-  tree t = *tp;
+  gimple_seq replace;
+  gimple x;
+  gimple stmt = gsi_stmt (*gsi);
 
-  switch (TREE_CODE (t))
+  switch (gimple_code (stmt))
     {
-    case COND_EXPR:
-      lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
-      lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
-      break;
-
-    case CALL_EXPR:
-      /* Look for things that can throw exceptions, and record them.  */
-      if (state->cur_region && tree_could_throw_p (t))
-       {
-         record_stmt_eh_region (state->cur_region, t);
-         note_eh_region_may_contain_throw (state->cur_region);
-       }
-      break;
-
-    case GIMPLE_MODIFY_STMT:
+    case GIMPLE_CALL:
+    case GIMPLE_ASSIGN:
       /* Look for things that can throw exceptions, and record them.  */
-      if (state->cur_region && tree_could_throw_p (t))
+      if (state->cur_region && stmt_could_throw_p (stmt))
        {
-         record_stmt_eh_region (state->cur_region, t);
+         record_stmt_eh_region (state->cur_region, stmt);
          note_eh_region_may_contain_throw (state->cur_region);
        }
       break;
 
-    case GOTO_EXPR:
-    case RETURN_EXPR:
-      maybe_record_in_goto_queue (state, t);
-      break;
-    case SWITCH_EXPR:
-      verify_norecord_switch_expr (state, t);
+    case GIMPLE_COND:
+    case GIMPLE_GOTO:
+    case GIMPLE_RETURN:
+      maybe_record_in_goto_queue (state, stmt);
       break;
 
-    case TRY_FINALLY_EXPR:
-      lower_try_finally (state, tp);
+    case GIMPLE_SWITCH:
+      verify_norecord_switch_expr (state, stmt);
       break;
 
-    case TRY_CATCH_EXPR:
-      i = tsi_start (TREE_OPERAND (t, 1));
-      switch (TREE_CODE (tsi_stmt (i)))
-       {
-       case CATCH_EXPR:
-         lower_catch (state, tp);
-         break;
-       case EH_FILTER_EXPR:
-         lower_eh_filter (state, tp);
-         break;
-       default:
-         lower_cleanup (state, tp);
-         break;
-       }
-      break;
-
-    case STATEMENT_LIST:
-      for (i = tsi_start (t); !tsi_end_p (i); )
+    case GIMPLE_TRY:
+      if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
+       replace = lower_try_finally (state, stmt);
+      else
        {
-         lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
-         t = tsi_stmt (i);
-         if (TREE_CODE (t) == STATEMENT_LIST)
+         x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
+         switch (gimple_code (x))
            {
-             tsi_link_before (&i, t, TSI_SAME_STMT);
-             tsi_delink (&i);
+           case GIMPLE_CATCH:
+             replace = lower_catch (state, stmt);
+             break;
+           case GIMPLE_EH_FILTER:
+             replace = lower_eh_filter (state, stmt);
+             break;
+           default:
+             replace = lower_cleanup (state, stmt);
+             break;
            }
-         else
-           tsi_next (&i);
        }
-      break;
+
+      /* Remove the old stmt and insert the transformed sequence
+        instead. */
+      gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
+      gsi_remove (gsi, true);
+
+      /* Return since we don't want gsi_next () */
+      return;
 
     default:
       /* A type, a decl, or some kind of statement that we're not
         interested in.  Don't walk them.  */
       break;
     }
+
+  gsi_next (gsi);
+}
+
+/* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
+
+static void
+lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq)
+{
+  gimple_stmt_iterator gsi;
+  for (gsi = gsi_start (seq); !gsi_end_p (gsi);)
+    lower_eh_constructs_2 (state, &gsi);
 }
 
 static unsigned int
 lower_eh_constructs (void)
 {
   struct leh_state null_state;
-  tree *tp = &DECL_SAVED_TREE (current_function_decl);
+
+  gimple_seq bodyp = gimple_body (current_function_decl);
 
   finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
 
-  collect_finally_tree (*tp, NULL);
+  collect_finally_tree_1 (bodyp, NULL);
 
   memset (&null_state, 0, sizeof (null_state));
-  lower_eh_constructs_1 (&null_state, tp);
+  lower_eh_constructs_1 (&null_state, bodyp);
 
   htab_delete (finally_tree);
 
@@ -1751,27 +1933,28 @@ struct gimple_opt_pass pass_lower_eh =
 static void
 make_eh_edge (struct eh_region *region, void *data)
 {
-  tree stmt, lab;
+  gimple stmt;
+  tree lab;
   basic_block src, dst;
 
-  stmt = (tree) data;
+  stmt = (gimple) data;
   lab = get_eh_region_tree_label (region);
 
-  src = bb_for_stmt (stmt);
+  src = gimple_bb (stmt);
   dst = label_to_block (lab);
 
   make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
 }
 
 void
-make_eh_edges (tree stmt)
+make_eh_edges (gimple stmt)
 {
   int region_nr;
   bool is_resx;
 
-  if (TREE_CODE (stmt) == RESX_EXPR)
+  if (gimple_code (stmt) == GIMPLE_RESX)
     {
-      region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
+      region_nr = gimple_resx_region (stmt);
       is_resx = true;
     }
   else
@@ -1789,17 +1972,19 @@ static bool mark_eh_edge_found_error;
 
 /* Mark edge make_eh_edge would create for given region by setting it aux
    field, output error if something goes wrong.  */
+
 static void
 mark_eh_edge (struct eh_region *region, void *data)
 {
-  tree stmt, lab;
+  gimple stmt;
+  tree lab;
   basic_block src, dst;
   edge e;
 
-  stmt = (tree) data;
+  stmt = (gimple) data;
   lab = get_eh_region_tree_label (region);
 
-  src = bb_for_stmt (stmt);
+  src = gimple_bb (stmt);
   dst = label_to_block (lab);
 
   e = find_edge (src, dst);
@@ -1823,23 +2008,24 @@ mark_eh_edge (struct eh_region *region, void *data)
     e->aux = (void *)1;
 }
 
-/* Verify that BB containing stmt as last stmt has precisely the edges
-   make_eh_edges would create.  */
+/* Verify that BB containing STMT as the last statement, has precisely the
+   edges that make_eh_edges would create.  */
+
 bool
-verify_eh_edges (tree stmt)
+verify_eh_edges (gimple stmt)
 {
   int region_nr;
   bool is_resx;
-  basic_block bb = bb_for_stmt (stmt);
+  basic_block bb = gimple_bb (stmt);
   edge_iterator ei;
   edge e;
 
   FOR_EACH_EDGE (e, ei, bb->succs)
     gcc_assert (!e->aux);
   mark_eh_edge_found_error = false;
-  if (TREE_CODE (stmt) == RESX_EXPR)
+  if (gimple_code (stmt) == GIMPLE_RESX)
     {
-      region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
+      region_nr = gimple_resx_region (stmt);
       is_resx = true;
     }
   else
@@ -1855,7 +2041,7 @@ verify_eh_edges (tree stmt)
              }
           return false;
        }
-      if (!tree_could_throw_p (stmt))
+      if (!stmt_could_throw_p (stmt))
        {
          error ("BB %i last statement has incorrectly set region", bb->index);
          return true;
@@ -1874,42 +2060,151 @@ verify_eh_edges (tree stmt)
        }
       e->aux = NULL;
     }
+
   return mark_eh_edge_found_error;
 }
 
 \f
-/* Return true if the expr can trap, as in dereferencing an invalid pointer
+/* Helper function for operation_could_trap_p and stmt_could_throw_p.  */
+
+static bool
+operation_could_trap_helper_p (enum tree_code op,
+                              bool fp_operation,
+                              bool honor_trapv,
+                              bool honor_nans,
+                              bool honor_snans,
+                              tree divisor,
+                              bool *handled)
+{
+  *handled = true;
+  switch (op)
+    {
+    case TRUNC_DIV_EXPR:
+    case CEIL_DIV_EXPR:
+    case FLOOR_DIV_EXPR:
+    case ROUND_DIV_EXPR:
+    case EXACT_DIV_EXPR:
+    case CEIL_MOD_EXPR:
+    case FLOOR_MOD_EXPR:
+    case ROUND_MOD_EXPR:
+    case TRUNC_MOD_EXPR:
+    case RDIV_EXPR:
+      if (honor_snans || honor_trapv)
+       return true;
+      if (fp_operation)
+       return flag_trapping_math;
+      if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
+        return true;
+      return false;
+
+    case LT_EXPR:
+    case LE_EXPR:
+    case GT_EXPR:
+    case GE_EXPR:
+    case LTGT_EXPR:
+      /* Some floating point comparisons may trap.  */
+      return honor_nans;
+
+    case EQ_EXPR:
+    case NE_EXPR:
+    case UNORDERED_EXPR:
+    case ORDERED_EXPR:
+    case UNLT_EXPR:
+    case UNLE_EXPR:
+    case UNGT_EXPR:
+    case UNGE_EXPR:
+    case UNEQ_EXPR:
+      return honor_snans;
+
+    case CONVERT_EXPR:
+    case FIX_TRUNC_EXPR:
+      /* Conversion of floating point might trap.  */
+      return honor_nans;
+
+    case NEGATE_EXPR:
+    case ABS_EXPR:
+    case CONJ_EXPR:
+      /* These operations don't trap with floating point.  */
+      if (honor_trapv)
+       return true;
+      return false;
+
+    case PLUS_EXPR:
+    case MINUS_EXPR:
+    case MULT_EXPR:
+      /* Any floating arithmetic may trap.  */
+      if (fp_operation && flag_trapping_math)
+       return true;
+      if (honor_trapv)
+       return true;
+      return false;
+
+    default:
+      /* Any floating arithmetic may trap.  */
+      if (fp_operation && flag_trapping_math)
+       return true;
+
+      *handled = false;
+      return false;
+    }
+}
+
+/* Return true if operation OP may trap.  FP_OPERATION is true if OP is applied
+   on floating-point values.  HONOR_TRAPV is true if OP is applied on integer
+   type operands that may trap.  If OP is a division operator, DIVISOR contains
+   the value of the divisor.  */
+
+bool
+operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
+                       tree divisor)
+{
+  bool honor_nans = (fp_operation && flag_trapping_math
+                    && !flag_finite_math_only);
+  bool honor_snans = fp_operation && flag_signaling_nans != 0;
+  bool handled;
+
+  if (TREE_CODE_CLASS (op) != tcc_comparison
+      && TREE_CODE_CLASS (op) != tcc_unary
+      && TREE_CODE_CLASS (op) != tcc_binary)
+    return false;
+
+  return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
+                                       honor_nans, honor_snans, divisor,
+                                       &handled);
+}
+
+/* Return true if EXPR can trap, as in dereferencing an invalid pointer
    location or floating point arithmetic.  C.f. the rtl version, may_trap_p.
    This routine expects only GIMPLE lhs or rhs input.  */
 
 bool
 tree_could_trap_p (tree expr)
 {
-  enum tree_code code = TREE_CODE (expr);
-  bool honor_nans = false;
-  bool honor_snans = false;
+  enum tree_code code;
   bool fp_operation = false;
   bool honor_trapv = false;
-  tree t, base;
+  tree t, base, div = NULL_TREE;
 
-  if (TREE_CODE_CLASS (code) == tcc_comparison
-      || TREE_CODE_CLASS (code) == tcc_unary
-      || TREE_CODE_CLASS (code) == tcc_binary)
+  if (!expr)
+    return false;
+  code = TREE_CODE (expr);
+  t = TREE_TYPE (expr);
+
+  if (t)
     {
-      t = TREE_TYPE (expr);
       if (COMPARISON_CLASS_P (expr))
        fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
       else
        fp_operation = FLOAT_TYPE_P (t);
-      if (fp_operation)
-       {
-         honor_nans = flag_trapping_math && !flag_finite_math_only;
-         honor_snans = flag_signaling_nans != 0;
-       }
-      else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
-       honor_trapv = true;
+      honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
     }
 
+  if (TREE_CODE_CLASS (code) == tcc_binary)
+    div = TREE_OPERAND (expr, 1);
+  if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
+    return true;
+
  restart:
   switch (code)
     {
@@ -1958,93 +2253,126 @@ tree_could_trap_p (tree expr)
     case ASM_EXPR:
       return TREE_THIS_VOLATILE (expr);
 
-    case TRUNC_DIV_EXPR:
-    case CEIL_DIV_EXPR:
-    case FLOOR_DIV_EXPR:
-    case ROUND_DIV_EXPR:
-    case EXACT_DIV_EXPR:
-    case CEIL_MOD_EXPR:
-    case FLOOR_MOD_EXPR:
-    case ROUND_MOD_EXPR:
-    case TRUNC_MOD_EXPR:
-    case RDIV_EXPR:
-      if (honor_snans || honor_trapv)
+
+    case CALL_EXPR:
+      t = get_callee_fndecl (expr);
+      /* Assume that calls to weak functions may trap.  */
+      if (!t || !DECL_P (t) || DECL_WEAK (t))
        return true;
-      if (fp_operation)
-       return flag_trapping_math;
-      t = TREE_OPERAND (expr, 1);
-      if (!TREE_CONSTANT (t) || integer_zerop (t))
-        return true;
       return false;
 
-    case LT_EXPR:
-    case LE_EXPR:
-    case GT_EXPR:
-    case GE_EXPR:
-    case LTGT_EXPR:
-      /* Some floating point comparisons may trap.  */
-      return honor_nans;
+    default:
+      return false;
+    }
+}
 
-    case EQ_EXPR:
-    case NE_EXPR:
-    case UNORDERED_EXPR:
-    case ORDERED_EXPR:
-    case UNLT_EXPR:
-    case UNLE_EXPR:
-    case UNGT_EXPR:
-    case UNGE_EXPR:
-    case UNEQ_EXPR:
-      return honor_snans;
 
-    case CONVERT_EXPR:
-    case FIX_TRUNC_EXPR:
-      /* Conversion of floating point might trap.  */
-      return honor_nans;
+/* Helper for stmt_could_throw_p.  Return true if STMT (assumed to be a
+   an assignment or a conditional) may throw.  */
 
-    case NEGATE_EXPR:
-    case ABS_EXPR:
-    case CONJ_EXPR:
-      /* These operations don't trap with floating point.  */
-      if (honor_trapv)
-       return true;
-      return false;
+static bool
+stmt_could_throw_1_p (gimple stmt)
+{
+  enum tree_code code = gimple_expr_code (stmt);
+  bool honor_nans = false;
+  bool honor_snans = false;
+  bool fp_operation = false;
+  bool honor_trapv = false;
+  tree t;
+  size_t i;
+  bool handled, ret;
 
-    case PLUS_EXPR:
-    case MINUS_EXPR:
-    case MULT_EXPR:
-      /* Any floating arithmetic may trap.  */
-      if (fp_operation && flag_trapping_math)
-       return true;
-      if (honor_trapv)
-       return true;
-      return false;
+  if (TREE_CODE_CLASS (code) == tcc_comparison
+      || TREE_CODE_CLASS (code) == tcc_unary
+      || TREE_CODE_CLASS (code) == tcc_binary)
+    {
+      t = gimple_expr_type (stmt);
+      fp_operation = FLOAT_TYPE_P (t);
+      if (fp_operation)
+       {
+         honor_nans = flag_trapping_math && !flag_finite_math_only;
+         honor_snans = flag_signaling_nans != 0;
+       }
+      else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
+       honor_trapv = true;
+    }
+
+  /* Check if the main expression may trap.  */
+  t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
+  ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
+                                      honor_nans, honor_snans, t,
+                                      &handled);
+  if (handled)
+    return ret;
+
+  /* If the expression does not trap, see if any of the individual operands may
+     trap.  */
+  for (i = 0; i < gimple_num_ops (stmt); i++)
+    if (tree_could_trap_p (gimple_op (stmt, i)))
+      return true;
+
+  return false;
+}
+
+
+/* Return true if statement STMT could throw an exception.  */
+
+bool
+stmt_could_throw_p (gimple stmt)
+{
+  enum gimple_code code;
+
+  if (!flag_exceptions)
+    return false;
+
+  /* The only statements that can throw an exception are assignments,
+     conditionals, calls and asms.  */
+  code = gimple_code (stmt);
+  if (code != GIMPLE_ASSIGN
+      && code != GIMPLE_COND
+      && code != GIMPLE_CALL
+      && code != GIMPLE_ASM)
+    return false;
+
+  /* If exceptions can only be thrown by function calls and STMT is not a
+     GIMPLE_CALL, the statement cannot throw.  */
+  if (!flag_non_call_exceptions && code != GIMPLE_CALL)
+    return false;
+
+  if (code == GIMPLE_ASSIGN || code == GIMPLE_COND)
+    return stmt_could_throw_1_p (stmt);
+  else if (is_gimple_call (stmt))
+    {
+      tree t = gimple_call_fndecl (stmt);
 
-    case CALL_EXPR:
-      t = get_callee_fndecl (expr);
       /* Assume that calls to weak functions may trap.  */
       if (!t || !DECL_P (t) || DECL_WEAK (t))
        return true;
-      return false;
 
-    default:
-      /* Any floating arithmetic may trap.  */
-      if (fp_operation && flag_trapping_math)
-       return true;
-      return false;
+      return (gimple_call_flags (stmt) & ECF_NOTHROW) == 0;
     }
+  else if (gimple_code (stmt) == GIMPLE_ASM)
+    return (gimple_asm_volatile_p (stmt));
+  else
+    gcc_unreachable ();
+
+  return false;
 }
 
+
+/* Return true if expression T could throw an exception.  */
+
 bool
 tree_could_throw_p (tree t)
 {
   if (!flag_exceptions)
     return false;
-  if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
+  if (TREE_CODE (t) == MODIFY_EXPR)
     {
       if (flag_non_call_exceptions
-         && tree_could_trap_p (GIMPLE_STMT_OPERAND (t, 0)))
+         && tree_could_trap_p (TREE_OPERAND (t, 0)))
        return true;
-      t = GIMPLE_STMT_OPERAND (t, 1);
+      t = TREE_OPERAND (t, 1);
     }
 
   if (TREE_CODE (t) == WITH_SIZE_EXPR)
@@ -2056,36 +2384,30 @@ tree_could_throw_p (tree t)
   return false;
 }
 
+
+/* Return true if STMT can throw an exception that is caught within
+   the current function (CFUN).  */
+
 bool
-tree_can_throw_internal (const_tree stmt)
+stmt_can_throw_internal (gimple stmt)
 {
   int region_nr;
   bool is_resx = false;
 
-  if (TREE_CODE (stmt) == RESX_EXPR)
-    region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
+  if (gimple_code (stmt) == GIMPLE_RESX)
+    {
+      region_nr = gimple_resx_region (stmt);
+      is_resx = true;
+    }
   else
     region_nr = lookup_stmt_eh_region (stmt);
+
   if (region_nr < 0)
     return false;
+
   return can_throw_internal_1 (region_nr, is_resx);
 }
 
-bool
-tree_can_throw_external (tree stmt)
-{
-  int region_nr;
-  bool is_resx = false;
-
-  if (TREE_CODE (stmt) == RESX_EXPR)
-    region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
-  else
-    region_nr = lookup_stmt_eh_region (stmt);
-  if (region_nr < 0)
-    return tree_could_throw_p (stmt);
-  else
-    return can_throw_external_1 (region_nr, is_resx);
-}
 
 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
    OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
@@ -2093,13 +2415,13 @@ tree_can_throw_external (tree stmt)
    done that my require an EH edge purge.  */
 
 bool 
-maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt) 
+maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt) 
 {
   int region_nr = lookup_stmt_eh_region (old_stmt);
 
   if (region_nr >= 0)
     {
-      bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
+      bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
 
       if (new_stmt == old_stmt && new_stmt_could_throw)
        return false;
@@ -2117,38 +2439,42 @@ maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
   return false;
 }
 \f
-/* Returns TRUE if oneh and twoh are exception handlers (op 1 of
-   TRY_CATCH_EXPR or TRY_FINALLY_EXPR that are similar enough to be
-   considered the same.  Currently this only handles handlers consisting of
-   a single call, as that's the important case for C++: a destructor call
-   for a particular object showing up in multiple handlers.  */
+/* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
+   GIMPLE_TRY) that are similar enough to be considered the same.  Currently
+   this only handles handlers consisting of a single call, as that's the
+   important case for C++: a destructor call for a particular object showing
+   up in multiple handlers.  */
 
 static bool
-same_handler_p (tree oneh, tree twoh)
+same_handler_p (gimple_seq oneh, gimple_seq twoh)
 {
-  tree_stmt_iterator i;
-  tree ones, twos;
-  int ai;
+  gimple_stmt_iterator gsi;
+  gimple ones, twos;
+  unsigned int ai;
 
-  i = tsi_start (oneh);
-  if (!tsi_one_before_end_p (i))
+  gsi = gsi_start (oneh);
+  if (!gsi_one_before_end_p (gsi))
     return false;
-  ones = tsi_stmt (i);
+  ones = gsi_stmt (gsi);
 
-  i = tsi_start (twoh);
-  if (!tsi_one_before_end_p (i))
+  gsi = gsi_start (twoh);
+  if (!gsi_one_before_end_p (gsi))
     return false;
-  twos = tsi_stmt (i);
-
-  if (TREE_CODE (ones) != CALL_EXPR
-      || TREE_CODE (twos) != CALL_EXPR
-      || !operand_equal_p (CALL_EXPR_FN (ones), CALL_EXPR_FN (twos), 0)
-      || call_expr_nargs (ones) != call_expr_nargs (twos))
+  twos = gsi_stmt (gsi);
+
+  if (!is_gimple_call (ones)
+      || !is_gimple_call (twos)
+      || gimple_call_lhs (ones)
+      || gimple_call_lhs (twos)
+      || gimple_call_chain (ones)
+      || gimple_call_chain (twos)
+      || !operand_equal_p (gimple_call_fn (ones), gimple_call_fn (twos), 0)
+      || gimple_call_num_args (ones) != gimple_call_num_args (twos))
     return false;
 
-  for (ai = 0; ai < call_expr_nargs (ones); ++ai)
-    if (!operand_equal_p (CALL_EXPR_ARG (ones, ai),
-                         CALL_EXPR_ARG (twos, ai), 0))
+  for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
+    if (!operand_equal_p (gimple_call_arg (ones, ai),
+                         gimple_call_arg (twos, ai), 0))
       return false;
 
   return true;
@@ -2165,27 +2491,29 @@ same_handler_p (tree oneh, tree twoh)
    temporary used in the initializer for A.  */
 
 static void
-optimize_double_finally (tree one, tree two)
+optimize_double_finally (gimple one, gimple two)
 {
-  tree oneh;
-  tree_stmt_iterator i;
+  gimple oneh;
+  gimple_stmt_iterator gsi;
 
-  i = tsi_start (TREE_OPERAND (one, 1));
-  if (!tsi_one_before_end_p (i))
+  gsi = gsi_start (gimple_try_cleanup (one));
+  if (!gsi_one_before_end_p (gsi))
     return;
 
-  oneh = tsi_stmt (i);
-  if (TREE_CODE (oneh) != TRY_CATCH_EXPR)
+  oneh = gsi_stmt (gsi);
+  if (gimple_code (oneh) != GIMPLE_TRY
+      || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
     return;
 
-  if (same_handler_p (TREE_OPERAND (oneh, 1), TREE_OPERAND (two, 1)))
+  if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
     {
-      tree b = TREE_OPERAND (oneh, 0);
-      TREE_OPERAND (one, 1) = b;
-      TREE_SET_CODE (one, TRY_CATCH_EXPR);
+      gimple_seq seq = gimple_try_eval (oneh);
 
-      i = tsi_start (TREE_OPERAND (two, 0));
-      tsi_link_before (&i, unsave_expr_now (b), TSI_SAME_STMT);
+      gimple_try_set_cleanup (one, seq);
+      gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
+      seq = copy_gimple_seq_and_replace_locals (seq);
+      gimple_seq_add_seq (&seq, gimple_try_eval (two));
+      gimple_try_set_eval (two, seq);
     }
 }
 
@@ -2193,60 +2521,55 @@ optimize_double_finally (tree one, tree two)
    flow has been lowered but EH structures haven't.  */
 
 static void
-refactor_eh_r (tree t)
+refactor_eh_r (gimple_seq seq)
 {
- tailrecurse:
-  switch (TREE_CODE (t))
-    {
-    case TRY_FINALLY_EXPR:
-    case TRY_CATCH_EXPR:
-      refactor_eh_r (TREE_OPERAND (t, 0));
-      t = TREE_OPERAND (t, 1);
-      goto tailrecurse;
+  gimple_stmt_iterator gsi;
+  gimple one, two;
 
-    case CATCH_EXPR:
-      t = CATCH_BODY (t);
-      goto tailrecurse;
-
-    case EH_FILTER_EXPR:
-      t = EH_FILTER_FAILURE (t);
-      goto tailrecurse;
-
-    case STATEMENT_LIST:
-      {
-       tree_stmt_iterator i;
-       tree one = NULL_TREE, two = NULL_TREE;
-       /* Try to refactor double try/finally.  */
-       for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
-         {
-           one = two;
-           two = tsi_stmt (i);
-           if (one && two
-               && TREE_CODE (one) == TRY_FINALLY_EXPR
-               && TREE_CODE (two) == TRY_FINALLY_EXPR)
-             optimize_double_finally (one, two);
-           if (one)
-             refactor_eh_r (one);
-         }
-       if (two)
+  one = NULL;
+  two = NULL;
+  gsi = gsi_start (seq);
+  while (1)
+    {
+      one = two;
+      if (gsi_end_p (gsi))
+       two = NULL;
+      else
+       two = gsi_stmt (gsi);
+      if (one
+         && two
+         && gimple_code (one) == GIMPLE_TRY
+         && gimple_code (two) == GIMPLE_TRY
+         && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
+         && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
+       optimize_double_finally (one, two);
+      if (one)
+       switch (gimple_code (one))
          {
-           t = two;
-           goto tailrecurse;
+         case GIMPLE_TRY:
+           refactor_eh_r (gimple_try_eval (one));
+           refactor_eh_r (gimple_try_cleanup (one));
+           break;
+         case GIMPLE_CATCH:
+           refactor_eh_r (gimple_catch_handler (one));
+           break;
+         case GIMPLE_EH_FILTER:
+           refactor_eh_r (gimple_eh_filter_failure (one));
+           break;
+         default:
+           break;
          }
-      }
-      break;
-
-    default:
-      /* A type, a decl, or some kind of statement that we're not
-        interested in.  Don't walk them.  */
-      break;
+      if (two)
+       gsi_next (&gsi);
+      else
+       break;
     }
 }
 
 static unsigned
 refactor_eh (void)
 {
-  refactor_eh_r (DECL_SAVED_TREE (current_function_decl));
+  refactor_eh_r (gimple_body (current_function_decl));
   return 0;
 }
 
index 1eb2840..eefc983 100644 (file)
@@ -235,52 +235,6 @@ get_function_ann (tree var)
   return (ann) ? ann : create_function_ann (var);
 }
 
-/* Return true if T has a statement annotation attached to it.  */
-
-static inline bool
-has_stmt_ann (tree t)
-{
-#ifdef ENABLE_CHECKING
-  gcc_assert (is_gimple_stmt (t));
-#endif
-  return t->base.ann && t->base.ann->common.type == STMT_ANN;
-}
-
-/* Return the statement annotation for T, which must be a statement
-   node.  Return NULL if the statement annotation doesn't exist.  */
-static inline stmt_ann_t
-stmt_ann (tree t)
-{
-#ifdef ENABLE_CHECKING
-  gcc_assert (is_gimple_stmt (t));
-#endif
-  gcc_assert (!t->base.ann || t->base.ann->common.type == STMT_ANN);
-  return (stmt_ann_t) t->base.ann;
-}
-
-/* Return the statement annotation for T, which must be a statement
-   node.  Create the statement annotation if it doesn't exist.  */
-static inline stmt_ann_t
-get_stmt_ann (tree stmt)
-{
-  stmt_ann_t ann = stmt_ann (stmt);
-  return (ann) ? ann : create_stmt_ann (stmt);
-}
-
-/* Set the uid of all non phi function statements.  */
-static inline void
-set_gimple_stmt_uid (tree stmt, unsigned int uid)
-{
-  get_stmt_ann (stmt)->uid = uid;
-}
-
-/* Get the uid of all non phi function statements.  */
-static inline unsigned int
-gimple_stmt_uid (tree stmt)
-{
-  return get_stmt_ann (stmt)->uid;
-}
-
 /* Get the number of the next statement uid to be allocated.  */
 static inline unsigned int
 gimple_stmt_max_uid (struct function *fn)
@@ -309,19 +263,6 @@ ann_type (tree_ann_t ann)
   return ann->common.type;
 }
 
-/* Return the basic block for statement T.  */
-static inline basic_block
-bb_for_stmt (tree t)
-{
-  stmt_ann_t ann;
-
-  if (TREE_CODE (t) == PHI_NODE)
-    return PHI_BB (t);
-
-  ann = stmt_ann (t);
-  return ann ? ann->bb : NULL;
-}
-
 /* Return the may_aliases bitmap for variable VAR, or NULL if it has
    no may aliases.  */
 static inline bitmap
@@ -333,71 +274,18 @@ may_aliases (const_tree var)
 /* Return the line number for EXPR, or return -1 if we have no line
    number information for it.  */
 static inline int
-get_lineno (const_tree expr)
+get_lineno (const_gimple stmt)
 {
-  if (expr == NULL_TREE)
-    return -1;
-
-  if (TREE_CODE (expr) == COMPOUND_EXPR)
-    expr = TREE_OPERAND (expr, 0);
+  location_t loc;
 
-  if (! EXPR_HAS_LOCATION (expr))
+  if (!stmt)
     return -1;
 
-  return EXPR_LINENO (expr);
-}
-
-/* Return true if T is a noreturn call.  */
-static inline bool
-noreturn_call_p (tree t)
-{
-  tree call = get_call_expr_in (t);
-  return call != 0 && (call_expr_flags (call) & ECF_NORETURN) != 0;
-}
-
-/* Mark statement T as modified.  */
-static inline void
-mark_stmt_modified (tree t)
-{
-  stmt_ann_t ann;
-  if (TREE_CODE (t) == PHI_NODE)
-    return;
-
-  ann = stmt_ann (t);
-  if (ann == NULL)
-    ann = create_stmt_ann (t);
-  else if (noreturn_call_p (t) && cfun->gimple_df)
-    VEC_safe_push (tree, gc, MODIFIED_NORETURN_CALLS (cfun), t);
-  ann->modified = 1;
-}
-
-/* Mark statement T as modified, and update it.  */
-static inline void
-update_stmt (tree t)
-{
-  if (TREE_CODE (t) == PHI_NODE)
-    return;
-  mark_stmt_modified (t);
-  update_stmt_operands (t);
-}
-
-static inline void
-update_stmt_if_modified (tree t)
-{
-  if (stmt_modified_p (t))
-    update_stmt_operands (t);
-}
-
-/* Return true if T is marked as modified, false otherwise.  */
-static inline bool
-stmt_modified_p (tree t)
-{
-  stmt_ann_t ann = stmt_ann (t);
+  loc = gimple_location (stmt);
+  if (loc != UNKNOWN_LOCATION)
+    return -1;
 
-  /* Note that if the statement doesn't yet have an annotation, we consider it
-     modified.  This will force the next call to update_stmt_operands to scan 
-     the statement.  */
-  return ann ? ann->modified : true;
+  return LOCATION_LINE (loc);
 }
 
 /* Delink an immediate_uses node from its chain.  */
@@ -457,13 +345,13 @@ set_ssa_use_from_ptr (use_operand_p use, tree val)
 /* Link ssa_imm_use node LINKNODE into the chain for DEF, with use occurring 
    in STMT.  */
 static inline void
-link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, tree stmt)
+link_imm_use_stmt (ssa_use_operand_t *linknode, tree def, gimple stmt)
 {
   if (stmt)
     link_imm_use (linknode, def);
   else
     link_imm_use (linknode, NULL);
-  linknode->stmt = stmt;
+  linknode->loc.stmt = stmt;
 }
 
 /* Relink a new node in place of an old node in the list.  */
@@ -486,13 +374,14 @@ relink_imm_use (ssa_use_operand_t *node, ssa_use_operand_t *old)
 /* Relink ssa_imm_use node LINKNODE into the chain for OLD, with use occurring 
    in STMT.  */
 static inline void
-relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old, tree stmt)
+relink_imm_use_stmt (ssa_use_operand_t *linknode, ssa_use_operand_t *old,
+                    gimple stmt)
 {
   if (stmt)
     relink_imm_use (linknode, old);
   else
     link_imm_use (linknode, NULL);
-  linknode->stmt = stmt;
+  linknode->loc.stmt = stmt;
 }
 
 
@@ -562,17 +451,17 @@ has_single_use (const_tree var)
 /* If VAR has only a single immediate use, return true, and set USE_P and STMT
    to the use pointer and stmt of occurrence.  */
 static inline bool
-single_imm_use (const_tree var, use_operand_p *use_p, tree *stmt)
+single_imm_use (const_tree var, use_operand_p *use_p, gimple *stmt)
 {
   const ssa_use_operand_t *const ptr = &(SSA_NAME_IMM_USE_NODE (var));
   if (ptr != ptr->next && ptr == ptr->next->next)
     {
       *use_p = ptr->next;
-      *stmt = ptr->next->stmt;
+      *stmt = ptr->next->loc.stmt;
       return true;
     }
   *use_p = NULL_USE_OPERAND_P;
-  *stmt = NULL_TREE;
+  *stmt = NULL;
   return false;
 }
 
@@ -590,75 +479,76 @@ num_imm_uses (const_tree var)
   return num;
 }
 
-/* Return the tree pointeto by USE.  */ 
+/* Return the tree pointed-to by USE.  */ 
 static inline tree
 get_use_from_ptr (use_operand_p use)
 { 
   return *(use->use);
 } 
 
-/* Return the tree pointeto by DEF.  */
+/* Return the tree pointed-to by DEF.  */
 static inline tree
 get_def_from_ptr (def_operand_p def)
 {
   return *def;
 }
 
-/* Return a def_operand_p pointer for the result of PHI.  */
-static inline def_operand_p
-get_phi_result_ptr (tree phi)
+/* Return a use_operand_p pointer for argument I of PHI node GS.  */
+
+static inline use_operand_p
+gimple_phi_arg_imm_use_ptr (gimple gs, int i)
 {
-  return &(PHI_RESULT_TREE (phi));
+  return &gimple_phi_arg (gs, i)->imm_use;
 }
 
-/* Return a use_operand_p pointer for argument I of phinode PHI.  */
-static inline use_operand_p
-get_phi_arg_def_ptr (tree phi, int i)
+/* Return the tree operand for argument I of PHI node GS.  */
+
+static inline tree
+gimple_phi_arg_def (gimple gs, size_t index)
 {
-  return &(PHI_ARG_IMM_USE_NODE (phi,i));
+  struct phi_arg_d *pd = gimple_phi_arg (gs, index);
+  return get_use_from_ptr (&pd->imm_use);
 }
 
+/* Return a pointer to the tree operand for argument I of PHI node GS.  */
 
-/* Return the bitmap of addresses taken by STMT, or NULL if it takes
-   no addresses.  */
-static inline bitmap
-addresses_taken (tree stmt)
+static inline tree *
+gimple_phi_arg_def_ptr (gimple gs, size_t index)
+{
+  return &gimple_phi_arg (gs, index)->def;
+}
+
+/* Return the edge associated with argument I of phi node GS.  */
+
+static inline edge
+gimple_phi_arg_edge (gimple gs, size_t i)
 {
-  stmt_ann_t ann = stmt_ann (stmt);
-  return ann ? ann->addresses_taken : NULL;
+  return EDGE_PRED (gimple_bb (gs), i);
 }
 
 /* Return the PHI nodes for basic block BB, or NULL if there are no
    PHI nodes.  */
-static inline tree
+static inline gimple_seq
 phi_nodes (const_basic_block bb)
 {
   gcc_assert (!(bb->flags & BB_RTL));
-  if (!bb->il.tree)
+  if (!bb->il.gimple)
     return NULL;
-  return bb->il.tree->phi_nodes;
+  return bb->il.gimple->phi_nodes;
 }
 
-/* Return pointer to the list of PHI nodes for basic block BB.  */
-
-static inline tree *
-phi_nodes_ptr (basic_block bb)
-{
-  gcc_assert (!(bb->flags & BB_RTL));
-  return &bb->il.tree->phi_nodes;
-}
-
-/* Set list of phi nodes of a basic block BB to L.  */
+/* Set PHI nodes of a basic block BB to SEQ.  */
 
 static inline void
-set_phi_nodes (basic_block bb, tree l)
+set_phi_nodes (basic_block bb, gimple_seq seq)
 {
-  tree phi;
+  gimple_stmt_iterator i;
 
   gcc_assert (!(bb->flags & BB_RTL));
-  bb->il.tree->phi_nodes = l;
-  for (phi = l; phi; phi = PHI_CHAIN (phi))
-    set_bb_for_stmt (phi, bb);
+  bb->il.gimple->phi_nodes = seq;
+  if (seq)
+    for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
+      gimple_set_bb (gsi_stmt (i), bb);
 }
 
 /* Return the phi argument which contains the specified use.  */
@@ -667,18 +557,18 @@ static inline int
 phi_arg_index_from_use (use_operand_p use)
 {
   struct phi_arg_d *element, *root;
-  int index;
-  tree phi;
+  size_t index;
+  gimple phi;
 
   /* Since the use is the first thing in a PHI argument element, we can
      calculate its index based on casting it to an argument, and performing
      pointer arithmetic.  */
 
   phi = USE_STMT (use);
-  gcc_assert (TREE_CODE (phi) == PHI_NODE);
+  gcc_assert (gimple_code (phi) == GIMPLE_PHI);
 
   element = (struct phi_arg_d *)use;
-  root = &(PHI_ARG_ELT (phi, 0));
+  root = gimple_phi_arg (phi, 0);
   index = element - root;
 
 #ifdef ENABLE_CHECKING
@@ -686,7 +576,7 @@ phi_arg_index_from_use (use_operand_p use)
      then imm_use is likely not the first element in phi_arg_d.  */
   gcc_assert (
          (((char *)element - (char *)root) % sizeof (struct phi_arg_d)) == 0);
-  gcc_assert (index >= 0 && index < PHI_ARG_CAPACITY (phi));
+  gcc_assert (index < gimple_phi_capacity (phi));
 #endif
  
  return index;
@@ -728,121 +618,13 @@ phi_ssa_name_p (const_tree t)
   return false;
 }
 
-/*  -----------------------------------------------------------------------  */
-
-/* Returns the list of statements in BB.  */
-
-static inline tree
-bb_stmt_list (const_basic_block bb)
-{
-  gcc_assert (!(bb->flags & BB_RTL));
-  return bb->il.tree->stmt_list;
-}
-
-/* Sets the list of statements in BB to LIST.  */
-
-static inline void
-set_bb_stmt_list (basic_block bb, tree list)
-{
-  gcc_assert (!(bb->flags & BB_RTL));
-  bb->il.tree->stmt_list = list;
-}
-
-/* Return a block_stmt_iterator that points to beginning of basic
-   block BB.  */
-static inline block_stmt_iterator
-bsi_start (basic_block bb)
-{
-  block_stmt_iterator bsi;
-  if (bb->index < NUM_FIXED_BLOCKS)
-    {
-      bsi.tsi.ptr = NULL;
-      bsi.tsi.container = NULL;
-    }
-  else
-    bsi.tsi = tsi_start (bb_stmt_list (bb));
-  bsi.bb = bb;
-  return bsi;
-}
-
-/* Return a block statement iterator that points to the first non-label
-   statement in block BB.  */
-
-static inline block_stmt_iterator
-bsi_after_labels (basic_block bb)
-{
-  block_stmt_iterator bsi = bsi_start (bb);
-
-  while (!bsi_end_p (bsi) && TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
-    bsi_next (&bsi);
-
-  return bsi;
-}
-
-/* Return a block statement iterator that points to the end of basic
-   block BB.  */
-static inline block_stmt_iterator
-bsi_last (basic_block bb)
-{
-  block_stmt_iterator bsi;
-
-  if (bb->index < NUM_FIXED_BLOCKS)
-    {
-      bsi.tsi.ptr = NULL;
-      bsi.tsi.container = NULL;
-    }
-  else
-    bsi.tsi = tsi_last (bb_stmt_list (bb));
-  bsi.bb = bb;
-  return bsi;
-}
-
-/* Return true if block statement iterator I has reached the end of
-   the basic block.  */
-static inline bool
-bsi_end_p (block_stmt_iterator i)
-{
-  return tsi_end_p (i.tsi);
-}
-
-/* Modify block statement iterator I so that it is at the next
-   statement in the basic block.  */
-static inline void
-bsi_next (block_stmt_iterator *i)
-{
-  tsi_next (&i->tsi);
-}
-
-/* Modify block statement iterator I so that it is at the previous
-   statement in the basic block.  */
-static inline void
-bsi_prev (block_stmt_iterator *i)
-{
-  tsi_prev (&i->tsi);
-}
-
-/* Return the statement that block statement iterator I is currently
-   at.  */
-static inline tree
-bsi_stmt (block_stmt_iterator i)
-{
-  return tsi_stmt (i.tsi);
-}
-
-/* Return a pointer to the statement that block statement iterator I
-   is currently at.  */
-static inline tree *
-bsi_stmt_ptr (block_stmt_iterator i)
-{
-  return tsi_stmt_ptr (i.tsi);
-}
 
 /* Returns the loop of the statement STMT.  */
 
 static inline struct loop *
-loop_containing_stmt (tree stmt)
+loop_containing_stmt (gimple stmt)
 {
-  basic_block bb = bb_for_stmt (stmt);
+  basic_block bb = gimple_bb (stmt);
   if (!bb)
     return NULL;
 
@@ -1083,7 +865,7 @@ clear_and_done_ssa_iter (ssa_op_iter *ptr)
   ptr->iter_type = ssa_op_iter_none;
   ptr->phi_i = 0;
   ptr->num_phi = 0;
-  ptr->phi_stmt = NULL_TREE;
+  ptr->phi_stmt = NULL;
   ptr->done = true;
   ptr->vuse_index = 0;
   ptr->mayuse_index = 0;
@@ -1091,22 +873,18 @@ clear_and_done_ssa_iter (ssa_op_iter *ptr)
 
 /* Initialize the iterator PTR to the virtual defs in STMT.  */
 static inline void
-op_iter_init (ssa_op_iter *ptr, tree stmt, int flags)
+op_iter_init (ssa_op_iter *ptr, gimple stmt, int flags)
 {
-#ifdef ENABLE_CHECKING
-  gcc_assert (stmt_ann (stmt));
-#endif
-
-  ptr->defs = (flags & SSA_OP_DEF) ? DEF_OPS (stmt) : NULL;
-  ptr->uses = (flags & SSA_OP_USE) ? USE_OPS (stmt) : NULL;
-  ptr->vuses = (flags & SSA_OP_VUSE) ? VUSE_OPS (stmt) : NULL;
-  ptr->vdefs = (flags & SSA_OP_VDEF) ? VDEF_OPS (stmt) : NULL;
-  ptr->mayuses = (flags & SSA_OP_VMAYUSE) ? VDEF_OPS (stmt) : NULL;
+  ptr->defs = (flags & SSA_OP_DEF) ? gimple_def_ops (stmt) : NULL;
+  ptr->uses = (flags & SSA_OP_USE) ? gimple_use_ops (stmt) : NULL;
+  ptr->vuses = (flags & SSA_OP_VUSE) ? gimple_vuse_ops (stmt) : NULL;
+  ptr->vdefs = (flags & SSA_OP_VDEF) ? gimple_vdef_ops (stmt) : NULL;
+  ptr->mayuses = (flags & SSA_OP_VMAYUSE) ? gimple_vdef_ops (stmt) : NULL;
   ptr->done = false;
 
   ptr->phi_i = 0;
   ptr->num_phi = 0;
-  ptr->phi_stmt = NULL_TREE;
+  ptr->phi_stmt = NULL;
   ptr->vuse_index = 0;
   ptr->mayuse_index = 0;
 }
@@ -1114,7 +892,7 @@ op_iter_init (ssa_op_iter *ptr, tree stmt, int flags)
 /* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
    the first use.  */
 static inline use_operand_p
-op_iter_init_use (ssa_op_iter *ptr, tree stmt, int flags)
+op_iter_init_use (ssa_op_iter *ptr, gimple stmt, int flags)
 {
   gcc_assert ((flags & SSA_OP_ALL_DEFS) == 0);
   op_iter_init (ptr, stmt, flags);
@@ -1125,7 +903,7 @@ op_iter_init_use (ssa_op_iter *ptr, tree stmt, int flags)
 /* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
    the first def.  */
 static inline def_operand_p
-op_iter_init_def (ssa_op_iter *ptr, tree stmt, int flags)
+op_iter_init_def (ssa_op_iter *ptr, gimple stmt, int flags)
 {
   gcc_assert ((flags & SSA_OP_ALL_USES) == 0);
   op_iter_init (ptr, stmt, flags);
@@ -1136,7 +914,7 @@ op_iter_init_def (ssa_op_iter *ptr, tree stmt, int flags)
 /* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
    the first operand as a tree.  */
 static inline tree
-op_iter_init_tree (ssa_op_iter *ptr, tree stmt, int flags)
+op_iter_init_tree (ssa_op_iter *ptr, gimple stmt, int flags)
 {
   op_iter_init (ptr, stmt, flags);
   ptr->iter_type = ssa_op_iter_tree;
@@ -1185,10 +963,10 @@ op_iter_next_mustdef (use_operand_p *use, def_operand_p *def,
 /* Initialize iterator PTR to the operands in STMT.  Return the first operands
    in USE and DEF.  */
 static inline void
-op_iter_init_vdef (ssa_op_iter *ptr, tree stmt, vuse_vec_p *use, 
+op_iter_init_vdef (ssa_op_iter *ptr, gimple stmt, vuse_vec_p *use, 
                     def_operand_p *def)
 {
-  gcc_assert (TREE_CODE (stmt) != PHI_NODE);
+  gcc_assert (gimple_code (stmt) != GIMPLE_PHI);
 
   op_iter_init (ptr, stmt, SSA_OP_VMAYUSE);
   ptr->iter_type = ssa_op_iter_vdef;
@@ -1199,7 +977,7 @@ op_iter_init_vdef (ssa_op_iter *ptr, tree stmt, vuse_vec_p *use,
 /* If there is a single operand in STMT matching FLAGS, return it.  Otherwise
    return NULL.  */
 static inline tree
-single_ssa_tree_operand (tree stmt, int flags)
+single_ssa_tree_operand (gimple stmt, int flags)
 {
   tree var;
   ssa_op_iter iter;
@@ -1217,7 +995,7 @@ single_ssa_tree_operand (tree stmt, int flags)
 /* If there is a single operand in STMT matching FLAGS, return it.  Otherwise
    return NULL.  */
 static inline use_operand_p
-single_ssa_use_operand (tree stmt, int flags)
+single_ssa_use_operand (gimple stmt, int flags)
 {
   use_operand_p var;
   ssa_op_iter iter;
@@ -1236,7 +1014,7 @@ single_ssa_use_operand (tree stmt, int flags)
 /* If there is a single operand in STMT matching FLAGS, return it.  Otherwise
    return NULL.  */
 static inline def_operand_p
-single_ssa_def_operand (tree stmt, int flags)
+single_ssa_def_operand (gimple stmt, int flags)
 {
   def_operand_p var;
   ssa_op_iter iter;
@@ -1254,7 +1032,7 @@ single_ssa_def_operand (tree stmt, int flags)
 /* Return true if there are zero operands in STMT matching the type 
    given in FLAGS.  */
 static inline bool
-zero_ssa_operands (tree stmt, int flags)
+zero_ssa_operands (gimple stmt, int flags)
 {
   ssa_op_iter iter;
 
@@ -1265,7 +1043,7 @@ zero_ssa_operands (tree stmt, int flags)
 
 /* Return the number of operands matching FLAGS in STMT.  */
 static inline int
-num_ssa_operands (tree stmt, int flags)
+num_ssa_operands (gimple stmt, int flags)
 {
   ssa_op_iter iter;
   tree t;
@@ -1279,7 +1057,7 @@ num_ssa_operands (tree stmt, int flags)
 
 /* Delink all immediate_use information for STMT.  */
 static inline void
-delink_stmt_imm_use (tree stmt)
+delink_stmt_imm_use (gimple stmt)
 {
    ssa_op_iter iter;
    use_operand_p use_p;
@@ -1293,7 +1071,7 @@ delink_stmt_imm_use (tree stmt)
 /* This routine will compare all the operands matching FLAGS in STMT1 to those
    in STMT2.  TRUE is returned if they are the same.  STMTs can be NULL.  */
 static inline bool
-compare_ssa_operands_equal (tree stmt1, tree stmt2, int flags)
+compare_ssa_operands_equal (gimple stmt1, gimple stmt2, int flags)
 {
   ssa_op_iter iter1, iter2;
   tree op1 = NULL_TREE;
@@ -1303,8 +1081,8 @@ compare_ssa_operands_equal (tree stmt1, tree stmt2, int flags)
   if (stmt1 == stmt2)
     return true;
 
-  look1 = stmt1 && stmt_ann (stmt1);
-  look2 = stmt2 && stmt_ann (stmt2);
+  look1 = stmt1 != NULL;
+  look2 = stmt2 != NULL;
 
   if (look1)
     {
@@ -1339,7 +1117,7 @@ compare_ssa_operands_equal (tree stmt1, tree stmt2, int flags)
 /* If there is a single DEF in the PHI node which matches FLAG, return it.
    Otherwise return NULL_DEF_OPERAND_P.  */
 static inline tree
-single_phi_def (tree stmt, int flags)
+single_phi_def (gimple stmt, int flags)
 {
   tree def = PHI_RESULT (stmt);
   if ((flags & SSA_OP_DEF) && is_gimple_reg (def)) 
@@ -1352,9 +1130,9 @@ single_phi_def (tree stmt, int flags)
 /* Initialize the iterator PTR for uses matching FLAGS in PHI.  FLAGS should
    be either SSA_OP_USES or SSA_OP_VIRTUAL_USES.  */
 static inline use_operand_p
-op_iter_init_phiuse (ssa_op_iter *ptr, tree phi, int flags)
+op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags)
 {
-  tree phi_def = PHI_RESULT (phi);
+  tree phi_def = gimple_phi_result (phi);
   int comp;
 
   clear_and_done_ssa_iter (ptr);
@@ -1372,7 +1150,7 @@ op_iter_init_phiuse (ssa_op_iter *ptr, tree phi, int flags)
     }
 
   ptr->phi_stmt = phi;
-  ptr->num_phi = PHI_NUM_ARGS (phi);
+  ptr->num_phi = gimple_phi_num_args (phi);
   ptr->iter_type = ssa_op_iter_use;
   return op_iter_next_use (ptr);
 }
@@ -1381,7 +1159,7 @@ op_iter_init_phiuse (ssa_op_iter *ptr, tree phi, int flags)
 /* Start an iterator for a PHI definition.  */
 
 static inline def_operand_p
-op_iter_init_phidef (ssa_op_iter *ptr, tree phi, int flags)
+op_iter_init_phidef (ssa_op_iter *ptr, gimple phi, int flags)
 {
   tree phi_def = PHI_RESULT (phi);
   int comp;
@@ -1461,7 +1239,7 @@ link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
 {
   use_operand_p use_p;
   use_operand_p last_p = head;
-  tree head_stmt = USE_STMT (head);
+  gimple head_stmt = USE_STMT (head);
   tree use = USE_FROM_PTR (head);
   ssa_op_iter op_iter;
   int flag;
@@ -1469,7 +1247,7 @@ link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
   /* Only look at virtual or real uses, depending on the type of HEAD.  */
   flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
 
-  if (TREE_CODE (head_stmt) == PHI_NODE)
+  if (gimple_code (head_stmt) == GIMPLE_PHI)
     {
       FOR_EACH_PHI_ARG (use_p, head_stmt, op_iter, flag)
        if (USE_FROM_PTR (use_p) == use)
@@ -1488,7 +1266,7 @@ link_use_stmts_after (use_operand_p head, imm_use_iterator *imm)
 }
 
 /* Initialize IMM to traverse over uses of VAR.  Return the first statement.  */
-static inline tree
+static inline gimple
 first_imm_use_stmt (imm_use_iterator *imm, tree var)
 {
   gcc_assert (TREE_CODE (var) == SSA_NAME);
@@ -1502,11 +1280,11 @@ first_imm_use_stmt (imm_use_iterator *imm, tree var)
      stmt and use, which indicates a marker node.  */
   imm->iter_node.prev = NULL_USE_OPERAND_P;
   imm->iter_node.next = NULL_USE_OPERAND_P;
-  imm->iter_node.stmt = NULL_TREE;
+  imm->iter_node.loc.stmt = NULL;
   imm->iter_node.use = NULL_USE_OPERAND_P;
 
   if (end_imm_use_stmt_p (imm))
-    return NULL_TREE;
+    return NULL;
 
   link_use_stmts_after (imm->imm_use, imm);
 
@@ -1515,7 +1293,7 @@ first_imm_use_stmt (imm_use_iterator *imm, tree var)
 
 /* Bump IMM to the next stmt which has a use of var.  */
 
-static inline tree
+static inline gimple
 next_imm_use_stmt (imm_use_iterator *imm)
 {
   imm->imm_use = imm->iter_node.next;
@@ -1523,7 +1301,7 @@ next_imm_use_stmt (imm_use_iterator *imm)
     {
       if (imm->iter_node.prev != NULL)
        delink_imm_use (&imm->iter_node);
-      return NULL_TREE;
+      return NULL;
     }
 
   link_use_stmts_after (imm->imm_use, imm);
@@ -1694,7 +1472,7 @@ redirect_edge_var_map_result (edge_var_map *v)
    in function cfun.  */
 
 static inline tree
-make_ssa_name (tree var, tree stmt)
+make_ssa_name (tree var, gimple stmt)
 {
   return make_ssa_name_fn (cfun, var, stmt);
 }
index a441893..96c4795 100644 (file)
@@ -26,7 +26,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "hard-reg-set.h"
 #include "basic-block.h"
 #include "hashtab.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-ssa-operands.h"
 #include "cgraph.h"
 #include "ipa-reference.h"
@@ -141,12 +141,12 @@ struct gimple_df GTY(())
   /* Array of all variables referenced in the function.  */
   htab_t GTY((param_is (union tree_node))) referenced_vars;
 
-  /* A list of all the noreturn calls passed to modify_stmt.
+  /* A vector of all the noreturn calls passed to modify_stmt.
      cleanup_control_flow uses it to detect cases where a mid-block
      indirect call has been turned into a noreturn call.  When this
      happens, all the instructions after the call are no longer
      reachable and must be deleted as dead.  */
-  VEC(tree,gc) *modified_noreturn_calls;
+  VEC(gimple,gc) *modified_noreturn_calls;
 
   /* Array of all SSA_NAMEs used in the function.  */
   VEC(tree,gc) *ssa_names;
@@ -268,19 +268,27 @@ struct ptr_info_def GTY(())
 /*---------------------------------------------------------------------------
                   Tree annotations stored in tree_base.ann
 ---------------------------------------------------------------------------*/
-enum tree_ann_type { TREE_ANN_COMMON, VAR_ANN, FUNCTION_ANN, STMT_ANN };
+enum tree_ann_type { TREE_ANN_COMMON, VAR_ANN, FUNCTION_ANN };
 
 struct tree_ann_common_d GTY(())
 {
   /* Annotation type.  */
   enum tree_ann_type type;
 
- /* Auxiliary info specific to a pass.  At all times, this
-    should either point to valid data or be NULL.  */ 
+  /* Record EH region number into a statement tree created during RTL
+     expansion (see gimple_to_tree).  */
+  int rn;
+
+  /* Auxiliary info specific to a pass.  At all times, this
+     should either point to valid data or be NULL.  */ 
   PTR GTY ((skip (""))) aux; 
 
   /* The value handle for this expression.  Used by GVN-PRE.  */
   tree GTY((skip)) value_handle;
+
+  /* Pointer to original GIMPLE statement.  Used during RTL expansion
+     (see gimple_to_tree).  */
+  gimple stmt;
 };
 
 /* It is advantageous to avoid things like life analysis for variables which
@@ -417,6 +425,42 @@ struct function_ann_d GTY(())
   ipa_reference_vars_info_t GTY ((skip)) reference_vars_info;
 };
 
+
+/* Immediate use lists are used to directly access all uses for an SSA
+   name and get pointers to the statement for each use. 
+
+   The structure ssa_use_operand_d consists of PREV and NEXT pointers
+   to maintain the list.  A USE pointer, which points to address where
+   the use is located and a LOC pointer which can point to the
+   statement where the use is located, or, in the case of the root
+   node, it points to the SSA name itself.
+
+   The list is anchored by an occurrence of ssa_operand_d *in* the
+   ssa_name node itself (named 'imm_uses').  This node is uniquely
+   identified by having a NULL USE pointer. and the LOC pointer
+   pointing back to the ssa_name node itself.  This node forms the
+   base for a circular list, and initially this is the only node in
+   the list.
+
+   Fast iteration allows each use to be examined, but does not allow
+   any modifications to the uses or stmts.
+
+   Normal iteration allows insertion, deletion, and modification. the
+   iterator manages this by inserting a marker node into the list
+   immediately before the node currently being examined in the list.
+   this marker node is uniquely identified by having null stmt *and* a
+   null use pointer.  
+
+   When iterating to the next use, the iteration routines check to see
+   if the node after the marker has changed. if it has, then the node
+   following the marker is now the next one to be visited. if not, the
+   marker node is moved past that node in the list (visualize it as
+   bumping the marker node through the list).  this continues until
+   the marker node is moved to the original anchor position. the
+   marker node is then removed from the list.
+
+   If iteration is halted early, the marker node must be removed from
+   the list before continuing.  */
 typedef struct immediate_use_iterator_d
 {
   /* This is the current use the iterator is processing.  */
@@ -476,50 +520,16 @@ typedef struct immediate_use_iterator_d
 
 
 
-struct stmt_ann_d GTY(())
-{
-  struct tree_ann_common_d common;
-
-  /* Basic block that contains this statement.  */
-  basic_block bb;
-
-  /* Operand cache for stmt.  */
-  struct stmt_operands_d GTY ((skip (""))) operands;
-
-  /* Set of variables that have had their address taken in the statement.  */
-  bitmap addresses_taken;
-
-  /* Unique identifier for this statement.  These ID's are to be
-     created by each pass on an as-needed basis in any order
-     convenient for the pass which needs statement UIDs.  This field
-     should only be accessed thru set_gimple_stmt_uid and
-     gimple_stmt_uid functions.  */
-  unsigned int uid;
-
-  /* Nonzero if the statement references memory (at least one of its
-     expressions contains a non-register operand).  */
-  unsigned references_memory : 1;
-
-  /* Nonzero if the statement has been modified (meaning that the operands
-     need to be scanned again).  */
-  unsigned modified : 1;
-
-  /* Nonzero if the statement makes references to volatile storage.  */
-  unsigned has_volatile_ops : 1;
-};
-
 union tree_ann_d GTY((desc ("ann_type ((tree_ann_t)&%h)")))
 {
   struct tree_ann_common_d GTY((tag ("TREE_ANN_COMMON"))) common;
   struct var_ann_d GTY((tag ("VAR_ANN"))) vdecl;
   struct function_ann_d GTY((tag ("FUNCTION_ANN"))) fdecl;
-  struct stmt_ann_d GTY((tag ("STMT_ANN"))) stmt;
 };
 
 typedef union tree_ann_d *tree_ann_t;
 typedef struct var_ann_d *var_ann_t;
 typedef struct function_ann_d *function_ann_t;
-typedef struct stmt_ann_d *stmt_ann_t;
 typedef struct tree_ann_common_d *tree_ann_common_t;
 
 static inline tree_ann_common_t tree_common_ann (const_tree);
@@ -528,18 +538,10 @@ static inline var_ann_t var_ann (const_tree);
 static inline var_ann_t get_var_ann (tree);
 static inline function_ann_t function_ann (const_tree);
 static inline function_ann_t get_function_ann (tree);
-static inline stmt_ann_t stmt_ann (tree);
-static inline bool has_stmt_ann (tree);
-static inline stmt_ann_t get_stmt_ann (tree);
 static inline enum tree_ann_type ann_type (tree_ann_t);
-static inline basic_block bb_for_stmt (tree);
-extern void set_bb_for_stmt (tree, basic_block);
-static inline bool noreturn_call_p (tree);
-static inline void update_stmt (tree);
-static inline bool stmt_modified_p (tree);
+static inline void update_stmt (gimple);
 static inline bitmap may_aliases (const_tree);
-static inline int get_lineno (const_tree);
-static inline bitmap addresses_taken (tree);
+static inline int get_lineno (const_gimple);
 
 /*---------------------------------------------------------------------------
                   Structure representing predictions in tree level.
@@ -553,8 +555,8 @@ struct edge_prediction GTY((chain_next ("%h.ep_next")))
 };
 
 /* Accessors for basic block annotations.  */
-static inline tree phi_nodes (const_basic_block);
-static inline void set_phi_nodes (basic_block, tree);
+static inline gimple_seq phi_nodes (const_basic_block);
+static inline void set_phi_nodes (basic_block, gimple_seq);
 
 /*---------------------------------------------------------------------------
                              Global declarations
@@ -627,48 +629,6 @@ extern bool referenced_var_check_and_insert (tree);
 #define PERCENT(x,y) ((float)(x) * 100.0 / (float)(y))
 
 /*---------------------------------------------------------------------------
-                             Block iterators
----------------------------------------------------------------------------*/
-
-typedef struct {
-  tree_stmt_iterator tsi;
-  basic_block bb;
-} block_stmt_iterator;
-
-static inline block_stmt_iterator bsi_start (basic_block);
-static inline block_stmt_iterator bsi_last (basic_block);
-static inline block_stmt_iterator bsi_after_labels (basic_block);
-block_stmt_iterator bsi_for_stmt (tree);
-static inline bool bsi_end_p (block_stmt_iterator);
-static inline void bsi_next (block_stmt_iterator *);
-static inline void bsi_prev (block_stmt_iterator *);
-static inline tree bsi_stmt (block_stmt_iterator);
-static inline tree * bsi_stmt_ptr (block_stmt_iterator);
-
-extern void bsi_remove (block_stmt_iterator *, bool);
-extern void bsi_move_before (block_stmt_iterator *, block_stmt_iterator *);
-extern void bsi_move_after (block_stmt_iterator *, block_stmt_iterator *);
-extern void bsi_move_to_bb_end (block_stmt_iterator *, basic_block);
-
-enum bsi_iterator_update
-{
-  /* Note that these are intentionally in the same order as TSI_FOO.  They
-     mean exactly the same as their TSI_* counterparts.  */
-  BSI_NEW_STMT,
-  BSI_SAME_STMT,
-  BSI_CHAIN_START,
-  BSI_CHAIN_END,
-  BSI_CONTINUE_LINKING
-};
-
-extern void bsi_insert_before (block_stmt_iterator *, tree,
-                              enum bsi_iterator_update);
-extern void bsi_insert_after (block_stmt_iterator *, tree,
-                             enum bsi_iterator_update);
-
-extern void bsi_replace (const block_stmt_iterator *, tree, bool);
-
-/*---------------------------------------------------------------------------
                              OpenMP Region Tree
 ---------------------------------------------------------------------------*/
 
@@ -702,7 +662,7 @@ struct omp_region
   tree ws_args;
 
   /* The code for the omp directive of this region.  */
-  enum tree_code type;
+  enum gimple_code type;
 
   /* Schedule kind, only used for OMP_FOR type regions.  */
   enum omp_clause_schedule_kind sched_kind;
@@ -712,7 +672,7 @@ struct omp_region
 };
 
 extern struct omp_region *root_omp_region;
-extern struct omp_region *new_omp_region (basic_block, enum tree_code,
+extern struct omp_region *new_omp_region (basic_block, enum gimple_code,
                                          struct omp_region *);
 extern void free_omp_regions (void);
 void omp_expand_local (basic_block);
@@ -725,20 +685,20 @@ tree copy_var_decl (tree, tree, tree);
 /* In tree-cfg.c  */
 
 /* Location to track pending stmt for edge insertion.  */
-#define PENDING_STMT(e)        ((e)->insns.t)
+#define PENDING_STMT(e)        ((e)->insns.g)
 
 extern void delete_tree_cfg_annotations (void);
-extern bool stmt_ends_bb_p (const_tree);
-extern bool is_ctrl_stmt (const_tree);
-extern bool is_ctrl_altering_stmt (const_tree);
-extern bool simple_goto_p (const_tree);
-extern bool tree_can_make_abnormal_goto (const_tree);
+extern bool stmt_ends_bb_p (gimple);
+extern bool is_ctrl_stmt (gimple);
+extern bool is_ctrl_altering_stmt (gimple);
+extern bool simple_goto_p (gimple);
+extern bool stmt_can_make_abnormal_goto (gimple);
 extern basic_block single_noncomplex_succ (basic_block bb);
-extern void tree_dump_bb (basic_block, FILE *, int);
-extern void debug_tree_bb (basic_block);
-extern basic_block debug_tree_bb_n (int);
-extern void dump_tree_cfg (FILE *, int);
-extern void debug_tree_cfg (int);
+extern void gimple_dump_bb (basic_block, FILE *, int, int);
+extern void gimple_debug_bb (basic_block);
+extern basic_block gimple_debug_bb_n (int);
+extern void gimple_dump_cfg (FILE *, int);
+extern void gimple_debug_cfg (int);
 extern void dump_cfg_stats (FILE *);
 extern void dot_cfg (void);
 extern void debug_cfg_stats (void);
@@ -749,40 +709,35 @@ extern void print_loops (FILE *, int);
 extern void print_loops_bb (FILE *, basic_block, int, int);
 extern void cleanup_dead_labels (void);
 extern void group_case_labels (void);
-extern tree first_stmt (basic_block);
-extern tree last_stmt (basic_block);
-extern tree last_and_only_stmt (basic_block);
+extern gimple first_stmt (basic_block);
+extern gimple last_stmt (basic_block);
+extern gimple last_and_only_stmt (basic_block);
 extern edge find_taken_edge (basic_block, tree);
 extern basic_block label_to_block_fn (struct function *, tree);
 #define label_to_block(t) (label_to_block_fn (cfun, t))
-extern void bsi_insert_on_edge (edge, tree);
-extern basic_block bsi_insert_on_edge_immediate (edge, tree);
-extern void bsi_commit_one_edge_insert (edge, basic_block *);
-extern void bsi_commit_edge_inserts (void);
-extern void notice_special_calls (tree);
+extern void notice_special_calls (gimple);
 extern void clear_special_calls (void);
 extern void verify_stmts (void);
 extern void verify_gimple (void);
-extern void verify_gimple_1 (tree);
-extern tree tree_block_label (basic_block);
+extern void verify_types_in_gimple_seq (gimple_seq);
+extern tree gimple_block_label (basic_block);
 extern void extract_true_false_edges_from_block (basic_block, edge *, edge *);
-extern bool tree_duplicate_sese_region (edge, edge, basic_block *, unsigned,
+extern bool gimple_duplicate_sese_region (edge, edge, basic_block *, unsigned,
                                        basic_block *);
-extern bool tree_duplicate_sese_tail (edge, edge, basic_block *, unsigned,
+extern bool gimple_duplicate_sese_tail (edge, edge, basic_block *, unsigned,
                                      basic_block *);
 extern void gather_blocks_in_sese_region (basic_block entry, basic_block exit,
                                          VEC(basic_block,heap) **bbs_p);
 extern void add_phi_args_after_copy_bb (basic_block);
 extern void add_phi_args_after_copy (basic_block *, unsigned, edge);
-extern bool tree_purge_dead_abnormal_call_edges (basic_block);
-extern bool tree_purge_dead_eh_edges (basic_block);
-extern bool tree_purge_all_dead_eh_edges (const_bitmap);
-extern tree gimplify_val (block_stmt_iterator *, tree, tree);
-extern tree gimplify_build1 (block_stmt_iterator *, enum tree_code,
+extern bool gimple_purge_dead_abnormal_call_edges (basic_block);
+extern bool gimple_purge_dead_eh_edges (basic_block);
+extern bool gimple_purge_all_dead_eh_edges (const_bitmap);
+extern tree gimplify_build1 (gimple_stmt_iterator *, enum tree_code,
                             tree, tree);
-extern tree gimplify_build2 (block_stmt_iterator *, enum tree_code,
+extern tree gimplify_build2 (gimple_stmt_iterator *, enum tree_code,
                             tree, tree, tree);
-extern tree gimplify_build3 (block_stmt_iterator *, enum tree_code,
+extern tree gimplify_build3 (gimple_stmt_iterator *, enum tree_code,
                             tree, tree, tree, tree);
 extern void init_empty_tree_cfg (void);
 extern void init_empty_tree_cfg_for_function (struct function *);
@@ -807,7 +762,6 @@ extern const char *op_symbol_code (enum tree_code);
 /* In tree-dfa.c  */
 extern var_ann_t create_var_ann (tree);
 extern function_ann_t create_function_ann (tree);
-extern stmt_ann_t create_stmt_ann (tree);
 extern void renumber_gimple_stmt_uids (void);
 extern tree_ann_common_t create_tree_common_ann (tree);
 extern void dump_dfa_stats (FILE *);
@@ -819,27 +773,26 @@ extern void debug_variable (tree);
 extern tree get_virtual_var (tree);
 extern void add_referenced_var (tree);
 extern void remove_referenced_var (tree);
-extern void mark_symbols_for_renaming (tree);
-extern void find_new_referenced_vars (tree *);
+extern void mark_symbols_for_renaming (gimple);
+extern void find_new_referenced_vars (gimple);
 extern tree make_rename_temp (tree, const char *);
 extern void set_default_def (tree, tree);
 extern tree gimple_default_def (struct function *, tree);
-extern bool stmt_references_abnormal_ssa_name (tree);
+extern bool stmt_references_abnormal_ssa_name (gimple);
 extern bool refs_may_alias_p (tree, tree);
-extern tree get_single_def_stmt (tree);
-extern tree get_single_def_stmt_from_phi (tree, tree);
-extern tree get_single_def_stmt_with_phi (tree, tree);
+extern gimple get_single_def_stmt (gimple);
+extern gimple get_single_def_stmt_from_phi (tree, gimple);
+extern gimple get_single_def_stmt_with_phi (tree, gimple);
 
 /* In tree-phinodes.c  */
 extern void reserve_phi_args_for_new_edge (basic_block);
-extern tree create_phi_node (tree, basic_block);
-extern void add_phi_arg (tree, tree, edge);
+extern gimple create_phi_node (tree, basic_block);
+extern void add_phi_arg (gimple, tree, edge);
 extern void remove_phi_args (edge);
-extern void remove_phi_node (tree, tree, bool);
-extern tree phi_reverse (tree);
+extern void remove_phi_node (gimple_stmt_iterator *, bool);
 extern void init_phinodes (void);
 extern void fini_phinodes (void);
-extern void release_phi_node (tree);
+extern void release_phi_node (gimple);
 #ifdef GATHER_STATISTICS
 extern void phinodes_print_statistics (void);
 #endif
@@ -848,6 +801,8 @@ extern void phinodes_print_statistics (void);
 extern void record_vars_into (tree, tree);
 extern void record_vars (tree);
 extern bool block_may_fallthru (const_tree);
+extern bool gimple_seq_may_fallthru (gimple_seq);
+extern bool gimple_stmt_may_fallthru (gimple);
 
 /* In tree-ssa-alias.c  */
 extern unsigned int compute_may_aliases (void);
@@ -864,7 +819,7 @@ extern bool may_alias_p (tree, alias_set_type, tree, alias_set_type, bool);
 extern struct ptr_info_def *get_ptr_info (tree);
 extern bool may_point_to_global_var (tree);
 extern void new_type_alias (tree, tree, tree);
-extern void count_uses_and_derefs (tree, tree, unsigned *, unsigned *,
+extern void count_uses_and_derefs (tree, gimple, unsigned *, unsigned *,
                                   unsigned *);
 static inline bool ref_contains_array_ref (const_tree);
 static inline bool array_ref_contains_indirect_ref (const_tree);
@@ -881,7 +836,7 @@ extern void debug_all_mem_sym_stats (void);
 
 /* Call-back function for walk_use_def_chains().  At each reaching
    definition, a function with this prototype is called.  */
-typedef bool (*walk_use_def_chains_fn) (tree, tree, void *);
+typedef bool (*walk_use_def_chains_fn) (tree, gimple, void *);
 
 /* In tree-ssa-alias-warnings.c  */
 extern void strict_aliasing_warning_backend (void);
@@ -918,7 +873,6 @@ extern bool types_compatible_p (tree, tree);
 extern void verify_ssa (bool);
 extern void delete_tree_ssa (void);
 extern void walk_use_def_chains (tree, walk_use_def_chains_fn, void *, bool);
-extern bool stmt_references_memory_p (tree);
 extern bool ssa_undefined_value_p (tree);
 
 
@@ -926,14 +880,13 @@ extern bool ssa_undefined_value_p (tree);
 void update_ssa (unsigned);
 void delete_update_ssa (void);
 void register_new_name_mapping (tree, tree);
-tree create_new_def_for (tree, tree, def_operand_p);
+tree create_new_def_for (tree, gimple, def_operand_p);
 bool need_ssa_update_p (void);
 bool name_mappings_registered_p (void);
 bool name_registered_for_update_p (tree);
 bitmap ssa_names_to_replace (void);
 void release_ssa_name_after_update_ssa (tree);
 void compute_global_livein (bitmap, bitmap);
-tree duplicate_ssa_name (tree, tree);
 void mark_sym_for_renaming (tree);
 void mark_set_for_renaming (bitmap);
 tree get_current_def (tree);
@@ -942,11 +895,11 @@ void set_current_def (tree, tree);
 /* In tree-ssanames.c  */
 extern void init_ssanames (struct function *, int);
 extern void fini_ssanames (void);
-extern tree make_ssa_name_fn (struct function *, tree, tree);
-extern tree duplicate_ssa_name (tree, tree);
+extern tree make_ssa_name_fn (struct function *, tree, gimple);
+extern tree duplicate_ssa_name (tree, gimple);
 extern void duplicate_ssa_name_ptr_info (tree, struct ptr_info_def *);
 extern void release_ssa_name (tree);
-extern void release_defs (tree);
+extern void release_defs (gimple);
 extern void replace_ssa_name_symbol (tree, tree);
 
 #ifdef GATHER_STATISTICS
@@ -954,14 +907,14 @@ extern void ssanames_print_statistics (void);
 #endif
 
 /* In tree-ssa-ccp.c  */
-bool fold_stmt (tree *);
-bool fold_stmt_inplace (tree);
+bool fold_stmt (gimple_stmt_iterator *);
+bool fold_stmt_inplace (gimple);
 tree get_symbol_constant_value (tree);
 tree fold_const_aggregate_ref (tree);
 
 /* In tree-vrp.c  */
-tree vrp_evaluate_conditional (enum tree_code, tree, tree, tree);
-void simplify_stmt_using_ranges (tree);
+tree vrp_evaluate_conditional (enum tree_code, tree, tree, gimple);
+void simplify_stmt_using_ranges (gimple);
 
 /* In tree-ssa-dom.c  */
 extern void dump_dominator_optimization_stats (FILE *);
@@ -972,8 +925,10 @@ int loop_depth_of_name (tree);
 extern void merge_alias_info (tree, tree);
 extern void propagate_value (use_operand_p, tree);
 extern void propagate_tree_value (tree *, tree);
+extern void propagate_tree_value_into_stmt (gimple_stmt_iterator *, tree);
 extern void replace_exp (use_operand_p, tree);
 extern bool may_propagate_copy (tree, tree);
+extern bool may_propagate_copy_into_stmt (gimple, tree);
 extern bool may_propagate_copy_into_asm (tree);
 
 /* Affine iv.  */
@@ -1046,8 +1001,8 @@ tree find_loop_niter (struct loop *, edge *);
 tree loop_niter_by_eval (struct loop *, edge);
 tree find_loop_niter_by_eval (struct loop *, edge *);
 void estimate_numbers_of_iterations (void);
-bool scev_probably_wraps_p (tree, tree, tree, struct loop *, bool);
-bool convert_affine_scev (struct loop *, tree, tree *, tree *, tree, bool);
+bool scev_probably_wraps_p (tree, tree, gimple, struct loop *, bool);
+bool convert_affine_scev (struct loop *, tree, tree *, tree *, gimple, bool);
 
 bool nowrap_type_p (tree);
 enum ev_direction {EV_DIR_GROWS, EV_DIR_DECREASES, EV_DIR_UNKNOWN};
@@ -1058,14 +1013,14 @@ void free_numbers_of_iterations_estimates_loop (struct loop *);
 void rewrite_into_loop_closed_ssa (bitmap, unsigned);
 void verify_loop_closed_ssa (void);
 bool for_each_index (tree *, bool (*) (tree, tree *, void *), void *);
-void create_iv (tree, tree, tree, struct loop *, block_stmt_iterator *, bool,
+void create_iv (tree, tree, tree, struct loop *, gimple_stmt_iterator *, bool,
                tree *, tree *);
 basic_block split_loop_exit_edge (edge);
-void standard_iv_increment_position (struct loop *, block_stmt_iterator *,
+void standard_iv_increment_position (struct loop *, gimple_stmt_iterator *,
                                     bool *);
 basic_block ip_end_pos (struct loop *);
 basic_block ip_normal_pos (struct loop *);
-bool tree_duplicate_loop_to_header_edge (struct loop *, edge,
+bool gimple_duplicate_loop_to_header_edge (struct loop *, edge,
                                         unsigned int, sbitmap,
                                         edge, VEC (edge, heap) **,
                                         int);
@@ -1085,13 +1040,13 @@ void tree_transform_and_unroll_loop (struct loop *, unsigned,
                                     edge, struct tree_niter_desc *,
                                     transform_callback, void *);
 bool contains_abnormal_ssa_name_p (tree);
-bool stmt_dominates_stmt_p (tree, tree);
-void mark_virtual_ops_for_renaming (tree);
+bool stmt_dominates_stmt_p (gimple, gimple);
+void mark_virtual_ops_for_renaming (gimple);
 
 /* In tree-ssa-threadedge.c */
 extern bool potentially_threadable_block (basic_block);
-extern void thread_across_edge (tree, edge, bool,
-                               VEC(tree, heap) **, tree (*) (tree, tree));
+extern void thread_across_edge (gimple, edge, bool,
+                               VEC(tree, heap) **, tree (*) (gimple, gimple));
 
 /* In tree-ssa-loop-im.c  */
 /* The possibilities of statement movement.  */
@@ -1103,7 +1058,7 @@ enum move_pos
                                   become executed -- memory accesses, ... */
     MOVE_POSSIBLE              /* Unlimited movement.  */
   };
-extern enum move_pos movement_possibility (tree);
+extern enum move_pos movement_possibility (gimple);
 char *get_lsm_tmp_name (tree, unsigned);
 
 /* In tree-flow-inline.h  */
@@ -1113,15 +1068,22 @@ static inline void set_is_used (tree);
 static inline bool unmodifiable_var_p (const_tree);
 
 /* In tree-eh.c  */
-extern void make_eh_edges (tree);
+extern void make_eh_edges (gimple);
 extern bool tree_could_trap_p (tree);
+extern bool operation_could_trap_p (enum tree_code, bool, bool, tree);
+extern bool stmt_could_throw_p (gimple);
 extern bool tree_could_throw_p (tree);
-extern bool tree_can_throw_internal (const_tree);
-extern bool tree_can_throw_external (tree);
-extern int lookup_stmt_eh_region (const_tree);
-extern void add_stmt_to_eh_region (tree, int);
-extern bool remove_stmt_from_eh_region (tree);
-extern bool maybe_clean_or_replace_eh_stmt (tree, tree);
+extern bool stmt_can_throw_internal (gimple);
+extern void add_stmt_to_eh_region (gimple, int);
+extern bool remove_stmt_from_eh_region (gimple);
+extern bool maybe_clean_or_replace_eh_stmt (gimple, gimple);
+extern void add_stmt_to_eh_region_fn (struct function *, gimple, int);
+extern bool remove_stmt_from_eh_region_fn (struct function *, gimple);
+extern int lookup_stmt_eh_region_fn (struct function *, gimple);
+extern int lookup_expr_eh_region (tree);
+extern int lookup_stmt_eh_region (gimple);
+extern bool verify_eh_edges (gimple);
+
 
 /* In tree-ssa-pre.c  */
 struct pre_expr_d;
@@ -1137,21 +1099,21 @@ bool expressions_equal_p (tree, tree);
 void sort_vuses (VEC (tree, gc) *);
 void sort_vuses_heap (VEC (tree, heap) *);
 tree vn_lookup_or_add (tree);
-tree vn_lookup_or_add_with_stmt (tree, tree);
+tree vn_lookup_or_add_with_stmt (tree, gimple);
 tree vn_lookup_or_add_with_vuses (tree, VEC (tree, gc) *);
 void vn_add (tree, tree);
 void vn_add_with_vuses (tree, tree, VEC (tree, gc) *);
-tree vn_lookup_with_stmt (tree, tree);
+tree vn_lookup_with_stmt (tree, gimple);
 tree vn_lookup (tree);
 tree vn_lookup_with_vuses (tree, VEC (tree, gc) *);
 
 /* In tree-ssa-sink.c  */
-bool is_hidden_global_store (tree);
+bool is_hidden_global_store (gimple);
 
 /* In tree-sra.c  */
-void insert_edge_copies (tree, basic_block);
-void sra_insert_before (block_stmt_iterator *, tree);
-void sra_insert_after (block_stmt_iterator *, tree);
+void insert_edge_copies_seq (gimple_seq, basic_block);
+void sra_insert_before (gimple_stmt_iterator *, gimple_seq);
+void sra_insert_after (gimple_stmt_iterator *, gimple_seq);
 void sra_init_cache (void);
 bool sra_type_can_be_decomposed_p (tree);
 
@@ -1163,6 +1125,7 @@ extern void tree_check_data_deps (void);
 
 /* In tree-ssa-loop-ivopts.c  */
 bool expr_invariant_in_loop_p (struct loop *, tree);
+bool stmt_invariant_in_loop_p (struct loop *, gimple);
 bool multiplier_allowed_in_address_p (HOST_WIDE_INT, enum machine_mode);
 unsigned multiply_by_cost (HOST_WIDE_INT, enum machine_mode);
 
@@ -1171,9 +1134,9 @@ extern bool thread_through_all_blocks (bool);
 extern void register_jump_thread (edge, edge);
 
 /* In gimplify.c  */
-tree force_gimple_operand (tree, tree *, bool, tree);
-tree force_gimple_operand_bsi (block_stmt_iterator *, tree, bool, tree,
-                              bool, enum bsi_iterator_update);
+tree force_gimple_operand (tree, gimple_seq *, bool, tree);
+tree force_gimple_operand_gsi (gimple_stmt_iterator *, tree, bool, tree,
+                              bool, enum gsi_iterator_update);
 tree gimple_fold_indirect_ref (tree);
 
 /* In tree-ssa-structalias.c */
@@ -1194,7 +1157,7 @@ struct mem_address
 };
 
 struct affine_tree_combination;
-tree create_mem_ref (block_stmt_iterator *, tree, 
+tree create_mem_ref (gimple_stmt_iterator *, tree, 
                     struct affine_tree_combination *);
 rtx addr_for_mem_ref (struct mem_address *, bool);
 void get_address_description (tree, struct mem_address *);
@@ -1206,7 +1169,7 @@ unsigned int execute_fixup_cfg (void);
 
 #include "tree-flow-inline.h"
 
-void swap_tree_operands (tree, tree *, tree *);
+void swap_tree_operands (gimple, tree *, tree *);
 
 int least_common_multiple (int, int);
 
diff --git a/gcc/tree-gimple.c b/gcc/tree-gimple.c
deleted file mode 100644 (file)
index 8b05f93..0000000
+++ /dev/null
@@ -1,653 +0,0 @@
-/* Functions to analyze and validate GIMPLE trees.
-   Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
-   Free Software Foundation, Inc.
-   Contributed by Diego Novillo <dnovillo@redhat.com>
-   Rewritten by Jason Merrill <jason@redhat.com>
-
-This file is part of GCC.
-
-GCC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 3, or (at your option)
-any later version.
-
-GCC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING3.  If not see
-<http://www.gnu.org/licenses/>.  */
-
-#include "config.h"
-#include "system.h"
-#include "coretypes.h"
-#include "ggc.h"
-#include "tm.h"
-#include "tree.h"
-#include "tree-gimple.h"
-#include "tree-flow.h"
-#include "output.h"
-#include "rtl.h"
-#include "expr.h"
-#include "bitmap.h"
-
-/* For the definitive definition of GIMPLE, see doc/tree-ssa.texi.  */
-
-/* Validation of GIMPLE expressions.  */
-
-/* Return true if T is a GIMPLE RHS for an assignment to a temporary.  */
-
-bool
-is_gimple_formal_tmp_rhs (tree t)
-{
-  enum tree_code code = TREE_CODE (t);
-
-  switch (TREE_CODE_CLASS (code))
-    {
-    case tcc_unary:
-    case tcc_binary:
-    case tcc_comparison:
-      return true;
-
-    default:
-      break;
-    }
-
-  switch (code)
-    {
-    case TRUTH_NOT_EXPR:
-    case TRUTH_AND_EXPR:
-    case TRUTH_OR_EXPR:
-    case TRUTH_XOR_EXPR:
-    case COND_EXPR:
-    case ADDR_EXPR:
-    case CALL_EXPR:
-    case CONSTRUCTOR:
-    case COMPLEX_EXPR:
-    case INTEGER_CST:
-    case REAL_CST:
-    case FIXED_CST:
-    case STRING_CST:
-    case COMPLEX_CST:
-    case VECTOR_CST:
-    case OBJ_TYPE_REF:
-    case ASSERT_EXPR:
-      return true;
-
-    default:
-      break;
-    }
-
-  return is_gimple_lvalue (t) || is_gimple_val (t);
-}
-
-/* Returns true iff T is a valid RHS for an assignment to a renamed
-   user -- or front-end generated artificial -- variable.  */
-
-bool
-is_gimple_reg_rhs (tree t)
-{
-  /* If the RHS of the GIMPLE_MODIFY_STMT may throw or make a nonlocal goto
-     and the LHS is a user variable, then we need to introduce a formal
-     temporary.  This way the optimizers can determine that the user
-     variable is only modified if evaluation of the RHS does not throw.
-
-     Don't force a temp of a non-renamable type; the copy could be
-     arbitrarily expensive.  Instead we will generate a VDEF for
-     the assignment.  */
-
-  if (is_gimple_reg_type (TREE_TYPE (t))
-      && ((TREE_CODE (t) == CALL_EXPR && TREE_SIDE_EFFECTS (t))
-         || tree_could_throw_p (t)))
-    return false;
-
-  return is_gimple_formal_tmp_rhs (t);
-}
-
-/* Returns true iff T is a valid RHS for an assignment to an un-renamed
-   LHS, or for a call argument.  */
-
-bool
-is_gimple_mem_rhs (tree t)
-{
-  /* If we're dealing with a renamable type, either source or dest must be
-     a renamed variable.  */
-  if (is_gimple_reg_type (TREE_TYPE (t)))
-    return is_gimple_val (t);
-  else
-    return is_gimple_formal_tmp_rhs (t);
-}
-
-/* Returns the appropriate RHS predicate for this LHS.  */
-
-gimple_predicate
-rhs_predicate_for (tree lhs)
-{
-  if (is_gimple_formal_tmp_var (lhs))
-    return is_gimple_formal_tmp_rhs;
-  else if (is_gimple_reg (lhs))
-    return is_gimple_reg_rhs;
-  else
-    return is_gimple_mem_rhs;
-}
-
-/*  Return true if T is a valid LHS for a GIMPLE assignment expression.  */
-
-bool
-is_gimple_lvalue (tree t)
-{
-  return (is_gimple_addressable (t)
-         || TREE_CODE (t) == WITH_SIZE_EXPR
-         /* These are complex lvalues, but don't have addresses, so they
-            go here.  */
-         || TREE_CODE (t) == BIT_FIELD_REF);
-}
-
-/*  Return true if T is a GIMPLE condition.  */
-
-bool
-is_gimple_condexpr (tree t)
-{
-  return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
-                               && !tree_could_trap_p (t)
-                               && is_gimple_val (TREE_OPERAND (t, 0))
-                               && is_gimple_val (TREE_OPERAND (t, 1))));
-}
-
-/*  Return true if T is something whose address can be taken.  */
-
-bool
-is_gimple_addressable (tree t)
-{
-  return (is_gimple_id (t) || handled_component_p (t)
-         || INDIRECT_REF_P (t));
-}
-
-/* Return true if T is a valid gimple constant.  */
-
-bool
-is_gimple_constant (const_tree t)
-{
-  switch (TREE_CODE (t))
-    {
-    case INTEGER_CST:
-    case REAL_CST:
-    case FIXED_CST:
-    case STRING_CST:
-    case COMPLEX_CST:
-    case VECTOR_CST:
-      return true;
-
-    /* Vector constant constructors are gimple invariant.  */
-    case CONSTRUCTOR:
-      if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
-       return TREE_CONSTANT (t);
-      else
-       return false;
-
-    default:
-      return false;
-    }
-}
-
-/* Return true if T is a gimple address.  */
-
-bool
-is_gimple_address (const_tree t)
-{
-  tree op;
-
-  if (TREE_CODE (t) != ADDR_EXPR)
-    return false;
-
-  op = TREE_OPERAND (t, 0);
-  while (handled_component_p (op))
-    {
-      if ((TREE_CODE (op) == ARRAY_REF
-          || TREE_CODE (op) == ARRAY_RANGE_REF)
-         && !is_gimple_val (TREE_OPERAND (op, 1)))
-           return false;
-
-      op = TREE_OPERAND (op, 0);
-    }
-
-  if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op))
-    return true;
-
-  switch (TREE_CODE (op))
-    {
-    case PARM_DECL:
-    case RESULT_DECL:
-    case LABEL_DECL:
-    case FUNCTION_DECL:
-    case VAR_DECL:
-    case CONST_DECL:
-      return true;
-
-    default:
-      return false;
-    }
-}
-
-/* Return true if T is a gimple invariant address.  */
-
-bool
-is_gimple_invariant_address (const_tree t)
-{
-  tree op;
-
-  if (TREE_CODE (t) != ADDR_EXPR)
-    return false;
-
-  op = TREE_OPERAND (t, 0);
-  while (handled_component_p (op))
-    {
-      switch (TREE_CODE (op))
-       {
-       case ARRAY_REF:
-       case ARRAY_RANGE_REF:
-         if (!is_gimple_constant (TREE_OPERAND (op, 1))
-             || TREE_OPERAND (op, 2) != NULL_TREE
-             || TREE_OPERAND (op, 3) != NULL_TREE)
-           return false;
-         break;
-
-       case COMPONENT_REF:
-         if (TREE_OPERAND (op, 2) != NULL_TREE)
-           return false;
-         break;
-
-       default:;
-       }
-      op = TREE_OPERAND (op, 0);
-    }
-
-  return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
-}
-
-/* Return true if T is a GIMPLE minimal invariant.  It's a restricted
-   form of function invariant.  */
-
-bool
-is_gimple_min_invariant (const_tree t)
-{
-  if (TREE_CODE (t) == ADDR_EXPR)
-    return is_gimple_invariant_address (t);
-
-  return is_gimple_constant (t);
-}
-
-/* Return true if T looks like a valid GIMPLE statement.  */
-
-bool
-is_gimple_stmt (tree t)
-{
-  const enum tree_code code = TREE_CODE (t);
-
-  switch (code)
-    {
-    case NOP_EXPR:
-      /* The only valid NOP_EXPR is the empty statement.  */
-      return IS_EMPTY_STMT (t);
-
-    case BIND_EXPR:
-    case COND_EXPR:
-      /* These are only valid if they're void.  */
-      return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
-
-    case SWITCH_EXPR:
-    case GOTO_EXPR:
-    case RETURN_EXPR:
-    case LABEL_EXPR:
-    case CASE_LABEL_EXPR:
-    case TRY_CATCH_EXPR:
-    case TRY_FINALLY_EXPR:
-    case EH_FILTER_EXPR:
-    case CATCH_EXPR:
-    case CHANGE_DYNAMIC_TYPE_EXPR:
-    case ASM_EXPR:
-    case RESX_EXPR:
-    case PHI_NODE:
-    case STATEMENT_LIST:
-    case OMP_PARALLEL:
-    case OMP_FOR:
-    case OMP_SECTIONS:
-    case OMP_SECTIONS_SWITCH:
-    case OMP_SECTION:
-    case OMP_SINGLE:
-    case OMP_MASTER:
-    case OMP_ORDERED:
-    case OMP_CRITICAL:
-    case OMP_RETURN:
-    case OMP_CONTINUE:
-    case OMP_TASK:
-    case OMP_ATOMIC_LOAD:
-    case OMP_ATOMIC_STORE:
-      /* These are always void.  */
-      return true;
-
-    case CALL_EXPR:
-    case GIMPLE_MODIFY_STMT:
-    case PREDICT_EXPR:
-      /* These are valid regardless of their type.  */
-      return true;
-
-    default:
-      return false;
-    }
-}
-
-/* Return true if T is a variable.  */
-
-bool
-is_gimple_variable (tree t)
-{
-  return (TREE_CODE (t) == VAR_DECL
-         || TREE_CODE (t) == PARM_DECL
-         || TREE_CODE (t) == RESULT_DECL
-         || TREE_CODE (t) == SSA_NAME);
-}
-
-/*  Return true if T is a GIMPLE identifier (something with an address).  */
-
-bool
-is_gimple_id (tree t)
-{
-  return (is_gimple_variable (t)
-         || TREE_CODE (t) == FUNCTION_DECL
-         || TREE_CODE (t) == LABEL_DECL
-         || TREE_CODE (t) == CONST_DECL
-         /* Allow string constants, since they are addressable.  */
-         || TREE_CODE (t) == STRING_CST);
-}
-
-/* Return true if TYPE is a suitable type for a scalar register variable.  */
-
-bool
-is_gimple_reg_type (tree type)
-{
-  /* In addition to aggregate types, we also exclude complex types if not
-     optimizing because they can be subject to partial stores in GNU C by
-     means of the __real__ and __imag__ operators and we cannot promote
-     them to total stores (see gimplify_modify_expr_complex_part).  */
-  return !(AGGREGATE_TYPE_P (type)
-          || (TREE_CODE (type) == COMPLEX_TYPE && !optimize));
-
-}
-
-/* Return true if T is a non-aggregate register variable.  */
-
-bool
-is_gimple_reg (tree t)
-{
-  if (TREE_CODE (t) == SSA_NAME)
-    t = SSA_NAME_VAR (t);
-
-  if (MTAG_P (t))
-    return false;
-
-  if (!is_gimple_variable (t))
-    return false;
-
-  if (!is_gimple_reg_type (TREE_TYPE (t)))
-    return false;
-
-  /* A volatile decl is not acceptable because we can't reuse it as
-     needed.  We need to copy it into a temp first.  */
-  if (TREE_THIS_VOLATILE (t))
-    return false;
-
-  /* We define "registers" as things that can be renamed as needed,
-     which with our infrastructure does not apply to memory.  */
-  if (needs_to_live_in_memory (t))
-    return false;
-
-  /* Hard register variables are an interesting case.  For those that
-     are call-clobbered, we don't know where all the calls are, since
-     we don't (want to) take into account which operations will turn
-     into libcalls at the rtl level.  For those that are call-saved,
-     we don't currently model the fact that calls may in fact change
-     global hard registers, nor do we examine ASM_CLOBBERS at the tree
-     level, and so miss variable changes that might imply.  All around,
-     it seems safest to not do too much optimization with these at the
-     tree level at all.  We'll have to rely on the rtl optimizers to
-     clean this up, as there we've got all the appropriate bits exposed.  */
-  if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
-    return false;
-
-  /* Complex and vector values must have been put into SSA-like form.
-     That is, no assignments to the individual components.  */
-  if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
-      || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
-    return DECL_GIMPLE_REG_P (t);
-
-  return true;
-}
-
-
-/* Returns true if T is a GIMPLE formal temporary variable.  */
-
-bool
-is_gimple_formal_tmp_var (tree t)
-{
-  if (TREE_CODE (t) == SSA_NAME)
-    return true;
-
-  return TREE_CODE (t) == VAR_DECL && DECL_GIMPLE_FORMAL_TEMP_P (t);
-}
-
-/* Returns true if T is a GIMPLE formal temporary register variable.  */
-
-bool
-is_gimple_formal_tmp_reg (tree t)
-{
-  /* The intent of this is to get hold of a value that won't change.
-     An SSA_NAME qualifies no matter if its of a user variable or not.  */
-  if (TREE_CODE (t) == SSA_NAME)
-    return true;
-
-  /* We don't know the lifetime characteristics of user variables.  */
-  if (!is_gimple_formal_tmp_var (t))
-    return false;
-
-  /* Finally, it must be capable of being placed in a register.  */
-  return is_gimple_reg (t);
-}
-
-/* Return true if T is a GIMPLE variable whose address is not needed.  */
-
-bool
-is_gimple_non_addressable (tree t)
-{
-  if (TREE_CODE (t) == SSA_NAME)
-    t = SSA_NAME_VAR (t);
-
-  return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
-}
-
-/* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant.  */
-
-bool
-is_gimple_val (tree t)
-{
-  /* Make loads from volatiles and memory vars explicit.  */
-  if (is_gimple_variable (t)
-      && is_gimple_reg_type (TREE_TYPE (t))
-      && !is_gimple_reg (t))
-    return false;
-
-  /* FIXME make these decls.  That can happen only when we expose the
-     entire landing-pad construct at the tree level.  */
-  if (TREE_CODE (t) == EXC_PTR_EXPR || TREE_CODE (t) == FILTER_EXPR)
-    return true;
-
-  return (is_gimple_variable (t) || is_gimple_min_invariant (t));
-}
-
-/* Similarly, but accept hard registers as inputs to asm statements.  */
-
-bool
-is_gimple_asm_val (tree t)
-{
-  if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
-    return true;
-
-  return is_gimple_val (t);
-}
-
-/* Return true if T is a GIMPLE minimal lvalue.  */
-
-bool
-is_gimple_min_lval (tree t)
-{
-  return (is_gimple_id (t)
-         || TREE_CODE (t) == INDIRECT_REF);
-}
-
-/* Return true if T is a typecast operation.  */
-
-bool
-is_gimple_cast (tree t)
-{
-  return (CONVERT_EXPR_P (t)
-          || TREE_CODE (t) == FIX_TRUNC_EXPR);
-}
-
-/* Return true if T is a valid function operand of a CALL_EXPR.  */
-
-bool
-is_gimple_call_addr (tree t)
-{
-  return (TREE_CODE (t) == OBJ_TYPE_REF
-         || is_gimple_val (t));
-}
-
-/* If T makes a function call, return the corresponding CALL_EXPR operand.
-   Otherwise, return NULL_TREE.  */
-
-tree
-get_call_expr_in (tree t)
-{
-  /* FIXME tuples: delete the assertion below when conversion complete.  */
-  gcc_assert (TREE_CODE (t) != MODIFY_EXPR);
-  if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
-    t = GIMPLE_STMT_OPERAND (t, 1);
-  if (TREE_CODE (t) == WITH_SIZE_EXPR)
-    t = TREE_OPERAND (t, 0);
-  if (TREE_CODE (t) == CALL_EXPR)
-    return t;
-  return NULL_TREE;
-}
-
-/* Given a memory reference expression T, return its base address.
-   The base address of a memory reference expression is the main
-   object being referenced.  For instance, the base address for
-   'array[i].fld[j]' is 'array'.  You can think of this as stripping
-   away the offset part from a memory address.
-
-   This function calls handled_component_p to strip away all the inner
-   parts of the memory reference until it reaches the base object.  */
-
-tree
-get_base_address (tree t)
-{
-  while (handled_component_p (t))
-    t = TREE_OPERAND (t, 0);
-  
-  if (SSA_VAR_P (t)
-      || TREE_CODE (t) == STRING_CST
-      || TREE_CODE (t) == CONSTRUCTOR
-      || INDIRECT_REF_P (t))
-    return t;
-  else
-    return NULL_TREE;
-}
-
-void
-recalculate_side_effects (tree t)
-{
-  enum tree_code code = TREE_CODE (t);
-  int len = TREE_OPERAND_LENGTH (t);
-  int i;
-
-  switch (TREE_CODE_CLASS (code))
-    {
-    case tcc_expression:
-      switch (code)
-       {
-       case INIT_EXPR:
-       case GIMPLE_MODIFY_STMT:
-       case VA_ARG_EXPR:
-       case PREDECREMENT_EXPR:
-       case PREINCREMENT_EXPR:
-       case POSTDECREMENT_EXPR:
-       case POSTINCREMENT_EXPR:
-         /* All of these have side-effects, no matter what their
-            operands are.  */
-         return;
-
-       default:
-         break;
-       }
-      /* Fall through.  */
-
-    case tcc_comparison:  /* a comparison expression */
-    case tcc_unary:       /* a unary arithmetic expression */
-    case tcc_binary:      /* a binary arithmetic expression */
-    case tcc_reference:   /* a reference */
-    case tcc_vl_exp:        /* a function call */
-      TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
-      for (i = 0; i < len; ++i)
-       {
-         tree op = TREE_OPERAND (t, i);
-         if (op && TREE_SIDE_EFFECTS (op))
-           TREE_SIDE_EFFECTS (t) = 1;
-       }
-      break;
-
-    default:
-      /* Can never be used with non-expressions.  */
-      gcc_unreachable ();
-   }
-}
-
-/* Canonicalize a tree T for use in a COND_EXPR as conditional.  Returns
-   a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
-   we failed to create one.  */
-
-tree
-canonicalize_cond_expr_cond (tree t)
-{
-  /* For (bool)x use x != 0.  */
-  if (TREE_CODE (t) == NOP_EXPR
-      && TREE_TYPE (t) == boolean_type_node)
-    {
-      tree top0 = TREE_OPERAND (t, 0);
-      t = build2 (NE_EXPR, TREE_TYPE (t),
-                 top0, build_int_cst (TREE_TYPE (top0), 0));
-    }
-  /* For !x use x == 0.  */
-  else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
-    {
-      tree top0 = TREE_OPERAND (t, 0);
-      t = build2 (EQ_EXPR, TREE_TYPE (t),
-                 top0, build_int_cst (TREE_TYPE (top0), 0));
-    }
-  /* For cmp ? 1 : 0 use cmp.  */
-  else if (TREE_CODE (t) == COND_EXPR
-          && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
-          && integer_onep (TREE_OPERAND (t, 1))
-          && integer_zerop (TREE_OPERAND (t, 2)))
-    {
-      tree top0 = TREE_OPERAND (t, 0);
-      t = build2 (TREE_CODE (top0), TREE_TYPE (t),
-                 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
-    }
-
-  if (is_gimple_condexpr (t))
-    return t;
-
-  return NULL_TREE;
-}
diff --git a/gcc/tree-gimple.h b/gcc/tree-gimple.h
deleted file mode 100644 (file)
index 3691cbc..0000000
+++ /dev/null
@@ -1,235 +0,0 @@
-/* Functions to analyze and validate GIMPLE trees.
-   Copyright (C) 2002, 2003, 2005, 2007 Free Software Foundation, Inc.
-   Contributed by Diego Novillo <dnovillo@redhat.com>
-
-This file is part of GCC.
-
-GCC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 3, or (at your option)
-any later version.
-
-GCC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING3.  If not see
-<http://www.gnu.org/licenses/>.  */
-
-#ifndef _TREE_SIMPLE_H
-#define _TREE_SIMPLE_H 1
-
-
-#include "tree-iterator.h"
-
-extern tree create_tmp_var_raw (tree, const char *);
-extern tree create_tmp_var_name (const char *);
-extern tree create_tmp_var (tree, const char *);
-extern tree get_initialized_tmp_var (tree, tree *, tree *);
-extern tree get_formal_tmp_var (tree, tree *);
-
-extern void declare_vars (tree, tree, bool);
-
-extern void annotate_all_with_locus (tree *, location_t);
-
-/* Validation of GIMPLE expressions.  Note that these predicates only check
-   the basic form of the expression, they don't recurse to make sure that
-   underlying nodes are also of the right form.  */
-
-typedef bool (*gimple_predicate)(tree);
-
-/* Returns true iff T is a valid GIMPLE statement.  */
-extern bool is_gimple_stmt (tree);
-
-/* Returns true iff TYPE is a valid type for a scalar register variable.  */
-extern bool is_gimple_reg_type (tree);
-/* Returns true iff T is a scalar register variable.  */
-extern bool is_gimple_reg (tree);
-/* Returns true if T is a GIMPLE temporary variable, false otherwise.  */
-extern bool is_gimple_formal_tmp_var (tree);
-/* Returns true if T is a GIMPLE temporary register variable.  */
-extern bool is_gimple_formal_tmp_reg (tree);
-/* Returns true iff T is any sort of variable.  */
-extern bool is_gimple_variable (tree);
-/* Returns true iff T is any sort of symbol.  */
-extern bool is_gimple_id (tree);
-/* Returns true iff T is a variable or an INDIRECT_REF (of a variable).  */
-extern bool is_gimple_min_lval (tree);
-/* Returns true iff T is something whose address can be taken.  */
-extern bool is_gimple_addressable (tree);
-/* Returns true iff T is any valid GIMPLE lvalue.  */
-extern bool is_gimple_lvalue (tree);
-
-/* Returns true iff T is a GIMPLE address.  */
-bool is_gimple_address (const_tree);
-/* Returns true iff T is a GIMPLE invariant address.  */
-bool is_gimple_invariant_address (const_tree);
-/* Returns true iff T is a valid GIMPLE constant.  */
-bool is_gimple_constant (const_tree);
-/* Returns true iff T is a GIMPLE restricted function invariant.  */
-extern bool is_gimple_min_invariant (const_tree);
-/* Returns true iff T is a GIMPLE rvalue.  */
-extern bool is_gimple_val (tree);
-/* Returns true iff T is a GIMPLE asm statement input.  */
-extern bool is_gimple_asm_val (tree);
-/* Returns true iff T is a valid rhs for a MODIFY_EXPR where the LHS is a
-   GIMPLE temporary, a renamed user variable, or something else,
-   respectively.  */
-extern bool is_gimple_formal_tmp_rhs (tree);
-extern bool is_gimple_reg_rhs (tree);
-extern bool is_gimple_mem_rhs (tree);
-/* Returns the appropriate one of the above three predicates for the LHS
-   T.  */
-extern gimple_predicate rhs_predicate_for (tree);
-
-/* Returns true iff T is a valid if-statement condition.  */
-extern bool is_gimple_condexpr (tree);
-
-/* Returns true iff T is a type conversion.  */
-extern bool is_gimple_cast (tree);
-/* Returns true iff T is a variable that does not need to live in memory.  */
-extern bool is_gimple_non_addressable (tree t);
-
-/* Returns true iff T is a valid call address expression.  */
-extern bool is_gimple_call_addr (tree);
-/* If T makes a function call, returns the CALL_EXPR operand.  */
-extern tree get_call_expr_in (tree t);
-/* Returns true iff T contains a CALL_EXPR not suitable for inlining.  */
-#define CALL_STMT_CANNOT_INLINE_P(T) \
-  CALL_CANNOT_INLINE_P (get_call_expr_in (T))
-
-extern void recalculate_side_effects (tree);
-
-/* FIXME we should deduce this from the predicate.  */
-typedef enum fallback_t {
-  fb_none = 0,
-  fb_rvalue = 1,
-  fb_lvalue = 2,
-  fb_mayfail = 4,
-  fb_either= fb_rvalue | fb_lvalue
-} fallback_t;
-
-enum gimplify_status {
-  GS_ERROR     = -2,   /* Something Bad Seen.  */
-  GS_UNHANDLED = -1,   /* A langhook result for "I dunno".  */
-  GS_OK                = 0,    /* We did something, maybe more to do.  */
-  GS_ALL_DONE  = 1     /* The expression is fully gimplified.  */
-};
-
-struct gimplify_ctx
-{
-  struct gimplify_ctx *prev_context;
-
-  tree current_bind_expr;
-  tree temps;
-  tree conditional_cleanups;
-  tree exit_label;
-  tree return_temp;
-  
-  VEC(tree,heap) *case_labels;
-  /* The formal temporary table.  Should this be persistent?  */
-  htab_t temp_htab;
-
-  int conditions;
-  bool save_stack;
-  bool into_ssa;
-  bool allow_rhs_cond_expr;
-};
-
-extern enum gimplify_status gimplify_expr (tree *, tree *, tree *,
-                                          bool (*) (tree), fallback_t);
-extern void gimplify_type_sizes (tree, tree *);
-extern void gimplify_one_sizepos (tree *, tree *);
-extern void gimplify_stmt (tree *);
-extern void gimplify_to_stmt_list (tree *);
-extern void gimplify_body (tree *, tree, bool);
-extern void push_gimplify_context (struct gimplify_ctx *);
-extern void pop_gimplify_context (tree);
-extern void gimplify_and_add (tree, tree *);
-
-/* Miscellaneous helpers.  */
-extern void gimple_add_tmp_var (tree);
-extern tree gimple_current_bind_expr (void);
-extern tree voidify_wrapper_expr (tree, tree);
-extern tree gimple_build_eh_filter (tree, tree, tree);
-extern tree build_and_jump (tree *);
-extern tree alloc_stmt_list (void);
-extern void free_stmt_list (tree);
-extern tree force_labels_r (tree *, int *, void *);
-extern enum gimplify_status gimplify_va_arg_expr (tree *, tree *, tree *);
-struct gimplify_omp_ctx;
-extern void omp_firstprivatize_variable (struct gimplify_omp_ctx *, tree);
-extern tree gimple_boolify (tree);
-extern tree canonicalize_cond_expr_cond (tree);
-
-/* In omp-low.c.  */
-extern void diagnose_omp_structured_block_errors (tree);
-extern tree omp_reduction_init (tree, tree);
-
-/* In tree-nested.c.  */
-extern void lower_nested_functions (tree);
-extern void insert_field_into_struct (tree, tree);
-
-/* Convenience routines to walk all statements of a gimple function.
-   The difference between these walkers and the generic walk_tree is
-   that walk_stmt provides context information to the callback
-   routine to know whether it is currently on the LHS or RHS of an
-   assignment (IS_LHS) or contexts where only GIMPLE values are
-   allowed (VAL_ONLY).
-   
-   This is useful in walkers that need to re-write sub-expressions
-   inside statements while making sure the result is still in GIMPLE
-   form.
-
-   Note that this is useful exclusively before the code is converted
-   into SSA form.  Once the program is in SSA form, the standard
-   operand interface should be used to analyze/modify statements.  */
-
-struct walk_stmt_info
-{
-  /* For each statement, we invoke CALLBACK via walk_tree.  The passed
-     data is a walk_stmt_info structure.  */
-  walk_tree_fn callback;
-
-  /* Points to the current statement being walked.  */
-  tree_stmt_iterator tsi;
-  
-  /* Additional data that CALLBACK may want to carry through the
-     recursion.  */
-  void *info;
-
-  /* Indicates whether the *TP being examined may be replaced 
-     with something that matches is_gimple_val (if true) or something
-     slightly more complicated (if false).  "Something" technically 
-     means the common subset of is_gimple_lvalue and is_gimple_rhs, 
-     but we never try to form anything more complicated than that, so
-     we don't bother checking.
-
-     Also note that CALLBACK should update this flag while walking the
-     sub-expressions of a statement.  For instance, when walking the
-     statement 'foo (&var)', the flag VAL_ONLY will initially be set
-     to true, however, when walking &var, the operand of that
-     ADDR_EXPR does not need to be a GIMPLE value.  */
-  bool val_only;
-
-  /* True if we are currently walking the LHS of an assignment.  */
-  bool is_lhs;
-
-  /* Optional.  Set to true by CALLBACK if it made any changes.  */
-  bool changed;
-
-  /* True if we're interested in seeing BIND_EXPRs.  */
-  bool want_bind_expr;
-
-  /* True if we're interested in seeing RETURN_EXPRs.  */
-  bool want_return_expr;
-
-  /* True if we're interested in location information.  */
-  bool want_locations;
-};
-
-void walk_stmts (struct walk_stmt_info *, tree *);
-
-#endif /* _TREE_SIMPLE_H  */
index 78b29a4..d21bb9d 100644 (file)
@@ -100,31 +100,33 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-pass.h"
 #include "target.h"
 
+
 /* local function prototypes */
 static unsigned int main_tree_if_conversion (void);
-static tree tree_if_convert_stmt (struct loop *loop, tree, tree,
-                                 block_stmt_iterator *);
-static void tree_if_convert_cond_expr (struct loop *, tree, tree,
-                                      block_stmt_iterator *);
-static bool if_convertible_phi_p (struct loop *, basic_block, tree);
-static bool if_convertible_gimple_modify_stmt_p (struct loop *, basic_block,
-                                                tree);
-static bool if_convertible_stmt_p (struct loop *, basic_block, tree);
+static tree tree_if_convert_stmt (struct loop *loop, gimple, tree,
+                                 gimple_stmt_iterator *);
+static void tree_if_convert_cond_stmt (struct loop *, gimple, tree,
+                                      gimple_stmt_iterator *);
+static bool if_convertible_phi_p (struct loop *, basic_block, gimple);
+static bool if_convertible_gimple_assign_stmt_p (struct loop *, basic_block,
+                                                gimple);
+static bool if_convertible_stmt_p (struct loop *, basic_block, gimple);
 static bool if_convertible_bb_p (struct loop *, basic_block, basic_block);
 static bool if_convertible_loop_p (struct loop *, bool);
 static void add_to_predicate_list (basic_block, tree);
 static tree add_to_dst_predicate_list (struct loop * loop, edge,
                                       tree, tree,
-                                      block_stmt_iterator *);
+                                      gimple_stmt_iterator *);
 static void clean_predicate_lists (struct loop *loop);
 static basic_block find_phi_replacement_condition (struct loop *loop,
                                                   basic_block, tree *,
-                                                  block_stmt_iterator *);
-static void replace_phi_with_cond_gimple_modify_stmt (tree, tree, basic_block,
-                                               block_stmt_iterator *);
+                                                  gimple_stmt_iterator *);
+static void replace_phi_with_cond_gimple_assign_stmt (gimple, tree,
+                                                     basic_block,
+                                                     gimple_stmt_iterator *);
 static void process_phi_nodes (struct loop *);
 static void combine_blocks (struct loop *);
-static tree ifc_temp_var (tree, tree);
+static gimple ifc_temp_var (tree, tree);
 static bool pred_blocks_visited_p (basic_block, bitmap *);
 static basic_block * get_loop_body_in_if_conv_order (const struct loop *loop);
 static bool bb_with_exit_edge_p (struct loop *, basic_block);
@@ -143,7 +145,7 @@ static bool
 tree_if_conversion (struct loop *loop, bool for_vectorizer)
 {
   basic_block bb;
-  block_stmt_iterator itr;
+  gimple_stmt_iterator itr;
   unsigned int i;
 
   ifc_bbs = NULL;
@@ -176,12 +178,12 @@ tree_if_conversion (struct loop *loop, bool for_vectorizer)
       /* Process all statements in this basic block.
         Remove conditional expression, if any, and annotate
         destination basic block(s) appropriately.  */
-      for (itr = bsi_start (bb); !bsi_end_p (itr); /* empty */)
+      for (itr = gsi_start_bb (bb); !gsi_end_p (itr); /* empty */)
        {
-         tree t = bsi_stmt (itr);
+         gimple t = gsi_stmt (itr);
          cond = tree_if_convert_stmt (loop, t, cond, &itr);
-         if (!bsi_end_p (itr))
-           bsi_next (&itr);
+         if (!gsi_end_p (itr))
+           gsi_next (&itr);
        }
 
       /* If current bb has only one successor, then consider it as an
@@ -214,41 +216,41 @@ tree_if_conversion (struct loop *loop, bool for_vectorizer)
 }
 
 /* if-convert stmt T which is part of LOOP.
-   If T is a GIMPLE_MODIFY_STMT than it is converted into conditional modify
+   If T is a GIMPLE_ASSIGN then it is converted into conditional modify
    expression using COND.  For conditional expressions, add condition in the
    destination basic block's predicate list and remove conditional
    expression itself. BSI is the iterator used to traverse statements of
    loop. It is used here when it is required to delete current statement.  */
 
 static tree
-tree_if_convert_stmt (struct loop *  loop, tree t, tree cond,
-                     block_stmt_iterator *bsi)
+tree_if_convert_stmt (struct loop *  loop, gimple t, tree cond,
+                     gimple_stmt_iterator *gsi)
 {
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "------if-convert stmt\n");
-      print_generic_stmt (dump_file, t, TDF_SLIM);
+      print_gimple_stmt (dump_file, t, 0, TDF_SLIM);
       print_generic_stmt (dump_file, cond, TDF_SLIM);
     }
 
-  switch (TREE_CODE (t))
+  switch (gimple_code (t))
     {
       /* Labels are harmless here.  */
-    case LABEL_EXPR:
+    case GIMPLE_LABEL:
       break;
 
-    case GIMPLE_MODIFY_STMT:
-      /* This GIMPLE_MODIFY_STMT is killing previous value of LHS. Appropriate
+    case GIMPLE_ASSIGN:
+      /* This GIMPLE_ASSIGN is killing previous value of LHS. Appropriate
         value will be selected by PHI node based on condition. It is possible
         that before this transformation, PHI nodes was selecting default
         value and now it will use this new value. This is OK because it does 
         not change validity the program.  */
       break;
 
-    case COND_EXPR:
+    case GIMPLE_COND:
       /* Update destination blocks' predicate list and remove this
         condition expression.  */
-      tree_if_convert_cond_expr (loop, t, cond, bsi);
+      tree_if_convert_cond_stmt (loop, t, cond, gsi);
       cond = NULL_TREE;
       break;
 
@@ -258,41 +260,41 @@ tree_if_convert_stmt (struct loop *  loop, tree t, tree cond,
   return cond;
 }
 
-/* STMT is COND_EXPR. Update two destination's predicate list.
+/* STMT is a GIMPLE_COND. Update two destination's predicate list.
    Remove COND_EXPR, if it is not the loop exit condition. Otherwise
-   update loop exit condition appropriately.  BSI is the iterator
+   update loop exit condition appropriately.  GSI is the iterator
    used to traverse statement list. STMT is part of loop LOOP.  */
 
 static void
-tree_if_convert_cond_expr (struct loop *loop, tree stmt, tree cond,
-                          block_stmt_iterator *bsi)
+tree_if_convert_cond_stmt (struct loop *loop, gimple stmt, tree cond,
+                          gimple_stmt_iterator *gsi)
 {
   tree c, c2;
   edge true_edge, false_edge;
 
-  gcc_assert (TREE_CODE (stmt) == COND_EXPR);
+  gcc_assert (gimple_code (stmt) == GIMPLE_COND);
 
-  c = COND_EXPR_COND (stmt);
+  c = fold_build2 (gimple_cond_code (stmt), boolean_type_node,
+                  gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
 
-  extract_true_false_edges_from_block (bb_for_stmt (stmt),
+  extract_true_false_edges_from_block (gimple_bb (stmt),
                                       &true_edge, &false_edge);
 
   /* Add new condition into destination's predicate list.  */
 
-  /* If 'c' is true then TRUE_EDGE is taken.  */
-  add_to_dst_predicate_list (loop, true_edge, cond,
-                            unshare_expr (c), bsi);
+  /* If C is true then TRUE_EDGE is taken.  */
+  add_to_dst_predicate_list (loop, true_edge, cond, c, gsi);
 
   /* If 'c' is false then FALSE_EDGE is taken.  */
   c2 = invert_truthvalue (unshare_expr (c));
-  add_to_dst_predicate_list (loop, false_edge, cond, c2, bsi);
+  add_to_dst_predicate_list (loop, false_edge, cond, c2, gsi);
 
   /* Now this conditional statement is redundant. Remove it.
      But, do not remove exit condition! Update exit condition
      using new condition.  */
-  if (!bb_with_exit_edge_p (loop, bb_for_stmt (stmt)))
+  if (!bb_with_exit_edge_p (loop, gimple_bb (stmt)))
     {
-      bsi_remove (bsi, true);
+      gsi_remove (gsi, true);
       cond = NULL_TREE;
     }
   return;
@@ -306,22 +308,22 @@ tree_if_convert_cond_expr (struct loop *loop, tree stmt, tree cond,
    - Virtual PHI on BB other than header.  */
 
 static bool
-if_convertible_phi_p (struct loop *loop, basic_block bb, tree phi)
+if_convertible_phi_p (struct loop *loop, basic_block bb, gimple phi)
 {
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "-------------------------\n");
-      print_generic_stmt (dump_file, phi, TDF_SLIM);
+      print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
     }
 
-  if (bb != loop->header && PHI_NUM_ARGS (phi) != 2)
+  if (bb != loop->header && gimple_phi_num_args (phi) != 2)
     {
       if (dump_file && (dump_flags & TDF_DETAILS))
        fprintf (dump_file, "More than two phi node args.\n");
       return false;
     }
 
-  if (!is_gimple_reg (SSA_NAME_VAR (PHI_RESULT (phi))))
+  if (!is_gimple_reg (SSA_NAME_VAR (gimple_phi_result (phi))))
     {
       imm_use_iterator imm_iter;
       use_operand_p use_p;
@@ -332,9 +334,9 @@ if_convertible_phi_p (struct loop *loop, basic_block bb, tree phi)
            fprintf (dump_file, "Virtual phi not on loop header.\n");
          return false;
        }
-      FOR_EACH_IMM_USE_FAST (use_p, imm_iter, PHI_RESULT (phi))
+      FOR_EACH_IMM_USE_FAST (use_p, imm_iter, gimple_phi_result (phi))
        {
-         if (TREE_CODE (USE_STMT (use_p)) == PHI_NODE)
+         if (gimple_code (USE_STMT (use_p)) == GIMPLE_PHI)
            {
              if (dump_file && (dump_flags & TDF_DETAILS))
                fprintf (dump_file, "Difficult to handle this virtual phi.\n");
@@ -346,37 +348,36 @@ if_convertible_phi_p (struct loop *loop, basic_block bb, tree phi)
   return true;
 }
 
-/* Return true, if M_EXPR is if-convertible.
-   GIMPLE_MODIFY_STMT is not if-convertible if,
+/* Return true, if STMT is if-convertible.
+   GIMPLE_ASSIGN statement is not if-convertible if,
    - It is not movable.
    - It could trap.
    - LHS is not var decl.
-  GIMPLE_MODIFY_STMT is part of block BB, which is inside loop LOOP.
-*/
+  GIMPLE_ASSIGN is part of block BB, which is inside loop LOOP.  */
 
 static bool
-if_convertible_gimple_modify_stmt_p (struct loop *loop, basic_block bb,
-                                    tree m_expr)
+if_convertible_gimple_assign_stmt_p (struct loop *loop, basic_block bb,
+                                    gimple stmt)
 {
-  tree lhs, rhs;
+  tree lhs;
 
-  if (TREE_CODE (m_expr) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt))
     return false;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "-------------------------\n");
-      print_generic_stmt (dump_file, m_expr, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
     }
 
-  lhs = GIMPLE_STMT_OPERAND (m_expr, 0);
-  rhs = GIMPLE_STMT_OPERAND (m_expr, 1);
+  lhs = gimple_assign_lhs (stmt);
 
   /* Some of these constrains might be too conservative.  */
-  if (stmt_ends_bb_p (m_expr) || stmt_ann (m_expr)->has_volatile_ops
+  if (stmt_ends_bb_p (stmt)
+      || gimple_has_volatile_ops (stmt)
       || (TREE_CODE (lhs) == SSA_NAME
           && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
-      || TREE_SIDE_EFFECTS (rhs))
+      || gimple_has_side_effects (stmt))
     {
       if (dump_file && (dump_flags & TDF_DETAILS))
         fprintf (dump_file, "stmt not suitable for ifcvt\n");
@@ -385,57 +386,49 @@ if_convertible_gimple_modify_stmt_p (struct loop *loop, basic_block bb,
 
   /* See if it needs speculative loading or not.  */
   if (bb != loop->header
-      && tree_could_trap_p (GIMPLE_STMT_OPERAND (m_expr, 1)))
+      && gimple_assign_rhs_could_trap_p (stmt))
     {
       if (dump_file && (dump_flags & TDF_DETAILS))
        fprintf (dump_file, "tree could trap...\n");
       return false;
     }
 
-  if (TREE_CODE (GIMPLE_STMT_OPERAND (m_expr, 1)) == CALL_EXPR)
-    {
-      if (dump_file && (dump_flags & TDF_DETAILS))
-       fprintf (dump_file, "CALL_EXPR \n");
-      return false;
-    }
-
-  if (TREE_CODE (GIMPLE_STMT_OPERAND (m_expr, 0)) != SSA_NAME
+  if (TREE_CODE (lhs) != SSA_NAME
       && bb != loop->header
       && !bb_with_exit_edge_p (loop, bb))
     {
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
          fprintf (dump_file, "LHS is not var\n");
-         print_generic_stmt (dump_file, m_expr, TDF_SLIM);
+         print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
        }
       return false;
     }
 
-
   return true;
 }
 
 /* Return true, iff STMT is if-convertible.
    Statement is if-convertible if,
-   - It is if-convertible GIMPLE_MODIFY_STMT
-   - IT is LABEL_EXPR or COND_EXPR.
+   - It is if-convertible GIMPLE_ASSGIN
+   - It is GIMPLE_LABEL or GIMPLE_COND.
    STMT is inside block BB, which is inside loop LOOP.  */
 
 static bool
-if_convertible_stmt_p (struct loop *loop, basic_block bb, tree stmt)
+if_convertible_stmt_p (struct loop *loop, basic_block bb, gimple stmt)
 {
-  switch (TREE_CODE (stmt))
+  switch (gimple_code (stmt))
     {
-    case LABEL_EXPR:
+    case GIMPLE_LABEL:
       break;
 
-    case GIMPLE_MODIFY_STMT:
+    case GIMPLE_ASSIGN:
 
-      if (!if_convertible_gimple_modify_stmt_p (loop, bb, stmt))
+      if (!if_convertible_gimple_assign_stmt_p (loop, bb, stmt))
        return false;
       break;
 
-    case COND_EXPR:
+    case GIMPLE_COND:
       break;
 
     default:
@@ -443,7 +436,7 @@ if_convertible_stmt_p (struct loop *loop, basic_block bb, tree stmt)
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
          fprintf (dump_file, "don't know what to do\n");
-         print_generic_stmt (dump_file, stmt, TDF_SLIM);
+         print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
        }
       return false;
       break;
@@ -521,9 +514,8 @@ if_convertible_bb_p (struct loop *loop, basic_block bb, basic_block exit_bb)
 static bool
 if_convertible_loop_p (struct loop *loop, bool for_vectorizer ATTRIBUTE_UNUSED)
 {
-  tree phi;
   basic_block bb;
-  block_stmt_iterator itr;
+  gimple_stmt_iterator itr;
   unsigned int i;
   edge e;
   edge_iterator ei;
@@ -584,22 +576,22 @@ if_convertible_loop_p (struct loop *loop, bool for_vectorizer ATTRIBUTE_UNUSED)
        return false;
 
       /* Check statements.  */
-      for (itr = bsi_start (bb); !bsi_end_p (itr); bsi_next (&itr))
-       if (!if_convertible_stmt_p (loop, bb, bsi_stmt (itr)))
+      for (itr = gsi_start_bb (bb); !gsi_end_p (itr); gsi_next (&itr))
+       if (!if_convertible_stmt_p (loop, bb, gsi_stmt (itr)))
          return false;
       /* ??? Check data dependency for vectorizer.  */
 
       /* What about phi nodes ? */
-      phi = phi_nodes (bb);
+      itr = gsi_start_phis (bb);
 
       /* Clear aux field of incoming edges to a bb with a phi node.  */
-      if (phi)
+      if (!gsi_end_p (itr))
        FOR_EACH_EDGE (e, ei, bb->preds)
          e->aux = NULL;
 
       /* Check statements.  */
-      for (; phi; phi = PHI_CHAIN (phi))
-       if (!if_convertible_phi_p (loop, bb, phi))
+      for (; !gsi_end_p (itr); gsi_next (&itr))
+       if (!if_convertible_phi_p (loop, bb, gsi_stmt (itr)))
          return false;
 
       if (bb_with_exit_edge_p (loop, bb))
@@ -637,7 +629,7 @@ add_to_predicate_list (basic_block bb, tree new_cond)
 static tree
 add_to_dst_predicate_list (struct loop * loop, edge e,
                           tree prev_cond, tree cond,
-                          block_stmt_iterator *bsi)
+                          gimple_stmt_iterator *gsi)
 {
   tree new_cond = NULL_TREE;
 
@@ -649,13 +641,13 @@ add_to_dst_predicate_list (struct loop * loop, edge e,
   else
     {
       tree tmp;
-      tree tmp_stmt = NULL_TREE;
+      gimple tmp_stmt = NULL;
 
-      prev_cond = force_gimple_operand_bsi (bsi, unshare_expr (prev_cond),
-                                           true, NULL, true, BSI_SAME_STMT);
+      prev_cond = force_gimple_operand_gsi (gsi, unshare_expr (prev_cond),
+                                           true, NULL, true, GSI_SAME_STMT);
 
-      cond = force_gimple_operand_bsi (bsi, unshare_expr (cond),
-                                      true, NULL, true, BSI_SAME_STMT);
+      cond = force_gimple_operand_gsi (gsi, unshare_expr (cond),
+                                      true, NULL, true, GSI_SAME_STMT);
 
       /* Add the condition to aux field of the edge.  In case edge
         destination is a PHI node, this condition will be ANDed with
@@ -666,8 +658,8 @@ add_to_dst_predicate_list (struct loop * loop, edge e,
       tmp = build2 (TRUTH_AND_EXPR, boolean_type_node,
                    unshare_expr (prev_cond), cond);
       tmp_stmt = ifc_temp_var (boolean_type_node, tmp);
-      bsi_insert_before (bsi, tmp_stmt, BSI_SAME_STMT);
-      new_cond = GIMPLE_STMT_OPERAND (tmp_stmt, 0);
+      gsi_insert_before (gsi, tmp_stmt, GSI_SAME_STMT);
+      new_cond = gimple_assign_lhs (tmp_stmt);
     }
   add_to_predicate_list (e->dest, new_cond);
   return new_cond;
@@ -703,7 +695,7 @@ clean_predicate_lists (struct loop *loop)
 static basic_block
 find_phi_replacement_condition (struct loop *loop, 
                                basic_block bb, tree *cond,
-                                block_stmt_iterator *bsi)
+                                gimple_stmt_iterator *gsi)
 {
   edge first_edge, second_edge;
   tree tmp_cond;
@@ -788,16 +780,16 @@ find_phi_replacement_condition (struct loop *loop,
      condition in vector compare operation. Using gimple value allows
      compiler to emit vector compare and select RTL without exposing
      compare's result.  */
-  *cond = force_gimple_operand_bsi (bsi, unshare_expr (*cond),
+  *cond = force_gimple_operand_gsi (gsi, unshare_expr (*cond),
                                    false, NULL_TREE,
-                                   true, BSI_SAME_STMT);
+                                   true, GSI_SAME_STMT);
   if (!is_gimple_reg (*cond) && !is_gimple_condexpr (*cond))
     {
-      tree new_stmt;
+      gimple new_stmt;
 
       new_stmt = ifc_temp_var (TREE_TYPE (*cond), unshare_expr (*cond));
-      bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
-      *cond = GIMPLE_STMT_OPERAND (new_stmt, 0);
+      gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
+      *cond = gimple_assign_lhs (new_stmt);
     }
 
   gcc_assert (*cond);
@@ -817,33 +809,33 @@ find_phi_replacement_condition (struct loop *loop,
 */
 
 static void
-replace_phi_with_cond_gimple_modify_stmt (tree phi, tree cond,
+replace_phi_with_cond_gimple_assign_stmt (gimple phi, tree cond,
                                          basic_block true_bb,
-                                         block_stmt_iterator *bsi)
+                                         gimple_stmt_iterator *gsi)
 {
-  tree new_stmt;
+  gimple new_stmt;
   basic_block bb;
   tree rhs;
   tree arg_0, arg_1;
 
-  gcc_assert (TREE_CODE (phi) == PHI_NODE);
+  gcc_assert (gimple_code (phi) == GIMPLE_PHI);
   
   /* If this is not filtered earlier, then now it is too late.  */
-  gcc_assert (PHI_NUM_ARGS (phi) == 2);
+  gcc_assert (gimple_phi_num_args (phi) == 2);
 
   /* Find basic block and initialize iterator.  */
-  bb = bb_for_stmt (phi);
+  bb = gimple_bb (phi);
 
   /* Use condition that is not TRUTH_NOT_EXPR in conditional modify expr.  */
   if (EDGE_PRED (bb, 1)->src == true_bb)
     {
-      arg_0 = PHI_ARG_DEF (phi, 1);
-      arg_1 = PHI_ARG_DEF (phi, 0);
+      arg_0 = gimple_phi_arg_def (phi, 1);
+      arg_1 = gimple_phi_arg_def (phi, 0);
     }
   else
     {
-      arg_0 = PHI_ARG_DEF (phi, 0);
-      arg_1 = PHI_ARG_DEF (phi, 1);
+      arg_0 = gimple_phi_arg_def (phi, 0);
+      arg_1 = gimple_phi_arg_def (phi, 1);
     }
 
   /* Build new RHS using selected condition and arguments.  */
@@ -851,20 +843,20 @@ replace_phi_with_cond_gimple_modify_stmt (tree phi, tree cond,
                unshare_expr (cond), unshare_expr (arg_0),
                unshare_expr (arg_1));
 
-  /* Create new MODIFY expression using RHS.  */
-  new_stmt = build_gimple_modify_stmt (unshare_expr (PHI_RESULT (phi)), rhs);
+  /* Create new GIMPLE_ASSIGN statement using RHS.  */
+  new_stmt = gimple_build_assign (unshare_expr (PHI_RESULT (phi)), rhs);
 
   /* Make new statement definition of the original phi result.  */
-  SSA_NAME_DEF_STMT (PHI_RESULT (phi)) = new_stmt;
+  SSA_NAME_DEF_STMT (gimple_phi_result (phi)) = new_stmt;
 
   /* Insert using iterator.  */
-  bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
+  gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
   update_stmt (new_stmt);
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "new phi replacement stmt\n");
-      print_generic_stmt (dump_file, new_stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, new_stmt, 0, TDF_SLIM);
     }
 }
 
@@ -881,30 +873,31 @@ process_phi_nodes (struct loop *loop)
   /* Replace phi nodes with cond. modify expr.  */
   for (i = 1; i < orig_loop_num_nodes; i++)
     {
-      tree phi, cond = NULL_TREE;
-      block_stmt_iterator bsi;
+      gimple phi;
+      tree cond = NULL_TREE;
+      gimple_stmt_iterator gsi, phi_gsi;
       basic_block true_bb = NULL;
       bb = ifc_bbs[i];
 
       if (bb == loop->header)
        continue;
 
-      phi = phi_nodes (bb);
-      bsi = bsi_after_labels (bb);
+      phi_gsi = gsi_start_phis (bb);
+      gsi = gsi_after_labels (bb);
 
       /* BB has two predecessors. Using predecessor's aux field, set
         appropriate condition for the PHI node replacement.  */
-      if (phi)
-       true_bb = find_phi_replacement_condition (loop, bb, &cond, &bsi);
+      if (!gsi_end_p (phi_gsi))
+       true_bb = find_phi_replacement_condition (loop, bb, &cond, &gsi);
 
-      while (phi)
+      while (!gsi_end_p (phi_gsi))
        {
-         tree next = PHI_CHAIN (phi);
-         replace_phi_with_cond_gimple_modify_stmt (phi, cond, true_bb, &bsi);
+         phi = gsi_stmt (phi_gsi);
+         replace_phi_with_cond_gimple_assign_stmt (phi, cond, true_bb, &gsi);
          release_phi_node (phi);
-         phi = next;
+         gsi_next (&phi_gsi);
        }
-      set_phi_nodes (bb, NULL_TREE);
+      set_phi_nodes (bb, NULL);
     }
   return;
 }
@@ -978,8 +971,8 @@ combine_blocks (struct loop *loop)
   merge_target_bb = loop->header;
   for (i = 1; i < orig_loop_num_nodes; i++)
     {
-      block_stmt_iterator bsi;
-      tree_stmt_iterator last;
+      gimple_stmt_iterator gsi;
+      gimple_stmt_iterator last;
 
       bb = ifc_bbs[i];
 
@@ -987,21 +980,21 @@ combine_blocks (struct loop *loop)
        continue;
 
       /* Remove labels and make stmts member of loop->header.  */
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
        {
-         if (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
-           bsi_remove (&bsi, true);
+         if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
+           gsi_remove (&gsi, true);
          else
            {
-             set_bb_for_stmt (bsi_stmt (bsi), merge_target_bb);
-             bsi_next (&bsi);
+             gimple_set_bb (gsi_stmt (gsi), merge_target_bb);
+             gsi_next (&gsi);
            }
        }
 
       /* Update stmt list.  */
-      last = tsi_last (bb_stmt_list (merge_target_bb));
-      tsi_link_after (&last, bb_stmt_list (bb), TSI_NEW_STMT);
-      set_bb_stmt_list (bb, alloc_stmt_list());
+      last = gsi_last_bb (merge_target_bb);
+      gsi_insert_seq_after (&last, bb_seq (bb), GSI_NEW_STMT);
+      set_bb_seq (bb, NULL);
 
       delete_basic_block (bb);
     }
@@ -1015,30 +1008,29 @@ combine_blocks (struct loop *loop)
     merge_blocks (loop->header, exit_bb);
 }
 
-/* Make new  temp variable of type TYPE. Add GIMPLE_MODIFY_STMT to assign EXP
+/* Make a new temp variable of type TYPE. Add GIMPLE_ASSIGN to assign EXP
    to the new variable.  */
 
-static tree
+static gimple
 ifc_temp_var (tree type, tree exp)
 {
   const char *name = "_ifc_";
-  tree var, stmt, new_name;
-
-  if (is_gimple_reg (exp))
-    return exp;
+  tree var, new_name;
+  gimple stmt;
 
   /* Create new temporary variable.  */
   var = create_tmp_var (type, name);
   add_referenced_var (var);
 
   /* Build new statement to assign EXP to new variable.  */
-  stmt = build_gimple_modify_stmt (var, exp);
+  stmt = gimple_build_assign (var, exp);
 
   /* Get SSA name for the new variable and set make new statement
      its definition statement.  */
   new_name = make_ssa_name (var, stmt);
-  GIMPLE_STMT_OPERAND (stmt, 0) = new_name;
+  gimple_assign_set_lhs (stmt, new_name);
   SSA_NAME_DEF_STMT (new_name) = stmt;
+  update_stmt (stmt);
 
   return stmt;
 }
index 103f504..511270b 100644 (file)
@@ -56,13 +56,13 @@ along with GCC; see the file COPYING3.  If not see
 
 /* I'm not real happy about this, but we need to handle gimple and
    non-gimple trees.  */
-#include "tree-gimple.h"
+#include "gimple.h"
 
 /* Inlining, Cloning, Versioning, Parallelization
 
    Inlining: a function body is duplicated, but the PARM_DECLs are
    remapped into VAR_DECLs, and non-void RETURN_EXPRs become
-   GIMPLE_MODIFY_STMTs that store to a dedicated returned-value variable.
+   MODIFY_EXPRs that store to a dedicated returned-value variable.
    The duplicated eh_region info of the copy will later be appended
    to the info for the caller; the eh_region info in copied throwing
    statements and RESX_EXPRs is adjusted accordingly.
@@ -88,7 +88,7 @@ along with GCC; see the file COPYING3.  If not see
    updated to point into the new body.  (Note that the original
    callgraph node and edge list will not be altered.)
 
-   See the CALL_EXPR handling case in copy_body_r ().  */
+   See the CALL_EXPR handling case in copy_tree_body_r ().  */
 
 /* To Do:
 
@@ -133,6 +133,7 @@ static void add_lexical_block (tree current_block, tree new_block);
 static tree copy_decl_to_var (tree, copy_body_data *);
 static tree copy_result_decl_to_var (tree, copy_body_data *);
 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
+static gimple remap_gimple_stmt (gimple, copy_body_data *);
 
 /* Insert a tree->tree mapping for ID.  Despite the name suggests
    that the trees should be variables, it is used for more than that.  */
@@ -165,6 +166,7 @@ remap_ssa_name (tree name, copy_body_data *id)
   /* Do not set DEF_STMT yet as statement is not copied yet. We do that
      in copy_bb.  */
   new = remap_decl (SSA_NAME_VAR (name), id);
+
   /* We might've substituted constant or another SSA_NAME for
      the variable. 
 
@@ -180,36 +182,37 @@ remap_ssa_name (tree name, copy_body_data *id)
       SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
        = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
       TREE_TYPE (new) = TREE_TYPE (SSA_NAME_VAR (new));
-      if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
+      if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
        {
          /* By inlining function having uninitialized variable, we might
             extend the lifetime (variable might get reused).  This cause
             ICE in the case we end up extending lifetime of SSA name across
             abnormal edge, but also increase register pressure.
 
-            We simply initialize all uninitialized vars by 0 except for case
-            we are inlining to very first BB.  We can avoid this for all
-            BBs that are not withing strongly connected regions of the CFG,
-            but this is bit expensive to test.
-          */
-         if (id->entry_bb && is_gimple_reg (SSA_NAME_VAR (name))
+            We simply initialize all uninitialized vars by 0 except
+            for case we are inlining to very first BB.  We can avoid
+            this for all BBs that are not inside strongly connected
+            regions of the CFG, but this is expensive to test.  */
+         if (id->entry_bb
+             && is_gimple_reg (SSA_NAME_VAR (name))
              && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
              && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
                  || EDGE_COUNT (id->entry_bb->preds) != 1))
            {
-             block_stmt_iterator bsi = bsi_last (id->entry_bb);
-             tree init_stmt
-                 = build_gimple_modify_stmt (new,
-                                             fold_convert (TREE_TYPE (new),
+             gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
+             gimple init_stmt;
+             
+             init_stmt = gimple_build_assign (new,
+                                              fold_convert (TREE_TYPE (new),
                                                            integer_zero_node));
-             bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
-             SSA_NAME_DEF_STMT (new) = init_stmt;
+             gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
              SSA_NAME_IS_DEFAULT_DEF (new) = 0;
            }
          else
            {
-             SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
-             if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) == name)
+             SSA_NAME_DEF_STMT (new) = gimple_build_nop ();
+             if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
+                 == name)
                set_default_def (SSA_NAME_VAR (new), new);
            }
        }
@@ -255,15 +258,15 @@ remap_decl (tree decl, copy_body_data *id)
         DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
 
       /* Remap sizes as necessary.  */
-      walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
-      walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
+      walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
+      walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
 
       /* If fields, do likewise for offset and qualifier.  */
       if (TREE_CODE (t) == FIELD_DECL)
        {
-         walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
+         walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
          if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
-           walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
+           walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
        }
 
       if (cfun && gimple_in_ssa_p (cfun)
@@ -278,7 +281,7 @@ remap_decl (tree decl, copy_body_data *id)
              /* Watch out RESULT_DECLs whose SSA names map directly
                 to them.  */
              if (TREE_CODE (map) == SSA_NAME
-                 && IS_EMPTY_STMT (SSA_NAME_DEF_STMT (map)))
+                 && gimple_nop_p (SSA_NAME_DEF_STMT (map)))
                set_default_def (t, map);
            }
          add_referenced_var (t);
@@ -350,16 +353,16 @@ remap_type_1 (tree type, copy_body_data *id)
     case BOOLEAN_TYPE:
       t = TYPE_MIN_VALUE (new);
       if (t && TREE_CODE (t) != INTEGER_CST)
-        walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
+        walk_tree (&TYPE_MIN_VALUE (new), copy_tree_body_r, id, NULL);
 
       t = TYPE_MAX_VALUE (new);
       if (t && TREE_CODE (t) != INTEGER_CST)
-        walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
+        walk_tree (&TYPE_MAX_VALUE (new), copy_tree_body_r, id, NULL);
       return new;
 
     case FUNCTION_TYPE:
       TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
-      walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
+      walk_tree (&TYPE_ARG_TYPES (new), copy_tree_body_r, id, NULL);
       return new;
 
     case ARRAY_TYPE:
@@ -390,8 +393,8 @@ remap_type_1 (tree type, copy_body_data *id)
       gcc_unreachable ();
     }
 
-  walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
-  walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
+  walk_tree (&TYPE_SIZE (new), copy_tree_body_r, id, NULL);
+  walk_tree (&TYPE_SIZE_UNIT (new), copy_tree_body_r, id, NULL);
 
   return new;
 }
@@ -435,9 +438,10 @@ remap_decls (tree decls, copy_body_data *id)
     {
       tree new_var;
 
-      /* We can not chain the local static declarations into the local_decls
-         as we can't duplicate them or break one decl rule.  Go ahead and link
-         them into local_decls.  */
+      /* We cannot chain the local static declarations into the local_decls
+        as we can't duplicate them or break one decl rule.  Go ahead
+        and link them into local_decls.  */
+
       if (!auto_var_in_fn_p (old_var, id->src_fn)
          && !DECL_EXTERNAL (old_var))
        {
@@ -449,7 +453,7 @@ remap_decls (tree decls, copy_body_data *id)
       /* Remap the variable.  */
       new_var = remap_decl (old_var, id);
 
-      /* If we didn't remap this variable, so we can't mess with its
+      /* If we didn't remap this variable, we can't mess with its
         TREE_CHAIN.  If we remapped this variable to the return slot, it's
         already declared somewhere else, so don't declare it here.  */
       if (!new_var || new_var == id->retvar)
@@ -545,11 +549,234 @@ copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
     BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
 }
 
-/* Called from copy_body_id via walk_tree.  DATA is really an
+
+/* Create a new gimple_seq by remapping all the statements in BODY
+   using the inlining information in ID.  */
+
+gimple_seq
+remap_gimple_seq (gimple_seq body, copy_body_data *id)
+{
+  gimple_stmt_iterator si;
+  gimple_seq new_body = NULL;
+
+  for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
+    {
+      gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
+      gimple_seq_add_stmt (&new_body, new_stmt);
+    }
+
+  return new_body;
+}
+
+
+/* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
+   block using the mapping information in ID.  */
+
+static gimple
+copy_gimple_bind (gimple stmt, copy_body_data *id)
+{
+  gimple new_bind;
+  tree new_block, new_vars;
+  gimple_seq body, new_body;
+
+  /* Copy the statement.  Note that we purposely don't use copy_stmt
+     here because we need to remap statements as we copy.  */
+  body = gimple_bind_body (stmt);
+  new_body = remap_gimple_seq (body, id);
+
+  new_block = gimple_bind_block (stmt);
+  if (new_block)
+    remap_block (&new_block, id);
+
+  /* This will remap a lot of the same decls again, but this should be
+     harmless.  */
+  new_vars = gimple_bind_vars (stmt);
+  if (new_vars)
+    new_vars = remap_decls (new_vars, id);
+
+  new_bind = gimple_build_bind (new_vars, new_body, new_block);
+
+  return new_bind;
+}
+
+
+/* Remap the GIMPLE operand pointed to by *TP.  DATA is really a
+   'struct walk_stmt_info *'.  DATA->INFO is a 'copy_body_data *'.
+   WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
+   recursing into the children nodes of *TP.  */
+
+static tree
+remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
+{
+  struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
+  copy_body_data *id = (copy_body_data *) wi_p->info;
+  tree fn = id->src_fn;
+
+  if (TREE_CODE (*tp) == SSA_NAME)
+    {
+      *tp = remap_ssa_name (*tp, id);
+      *walk_subtrees = 0;
+      return NULL;
+    }
+  else if (auto_var_in_fn_p (*tp, fn))
+    {
+      /* Local variables and labels need to be replaced by equivalent
+        variables.  We don't want to copy static variables; there's
+        only one of those, no matter how many times we inline the
+        containing function.  Similarly for globals from an outer
+        function.  */
+      tree new_decl;
+
+      /* Remap the declaration.  */
+      new_decl = remap_decl (*tp, id);
+      gcc_assert (new_decl);
+      /* Replace this variable with the copy.  */
+      STRIP_TYPE_NOPS (new_decl);
+      *tp = new_decl;
+      *walk_subtrees = 0;
+    }
+  else if (TREE_CODE (*tp) == STATEMENT_LIST)
+    gcc_unreachable ();
+  else if (TREE_CODE (*tp) == SAVE_EXPR)
+    gcc_unreachable ();
+  else if (TREE_CODE (*tp) == LABEL_DECL
+          && (!DECL_CONTEXT (*tp)
+              || decl_function_context (*tp) == id->src_fn))
+    /* These may need to be remapped for EH handling.  */
+    *tp = remap_decl (*tp, id);
+  else if (TYPE_P (*tp))
+    /* Types may need remapping as well.  */
+    *tp = remap_type (*tp, id);
+  else if (CONSTANT_CLASS_P (*tp))
+    {
+      /* If this is a constant, we have to copy the node iff the type
+        will be remapped.  copy_tree_r will not copy a constant.  */
+      tree new_type = remap_type (TREE_TYPE (*tp), id);
+
+      if (new_type == TREE_TYPE (*tp))
+       *walk_subtrees = 0;
+
+      else if (TREE_CODE (*tp) == INTEGER_CST)
+       *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
+                                 TREE_INT_CST_HIGH (*tp));
+      else
+       {
+         *tp = copy_node (*tp);
+         TREE_TYPE (*tp) = new_type;
+       }
+    }
+  else
+    {
+      /* Otherwise, just copy the node.  Note that copy_tree_r already
+        knows not to copy VAR_DECLs, etc., so this is safe.  */
+      if (TREE_CODE (*tp) == INDIRECT_REF)
+       {
+         /* Get rid of *& from inline substitutions that can happen when a
+            pointer argument is an ADDR_EXPR.  */
+         tree decl = TREE_OPERAND (*tp, 0);
+         tree *n;
+
+         n = (tree *) pointer_map_contains (id->decl_map, decl);
+         if (n)
+           {
+             tree type, new, old;
+
+             /* If we happen to get an ADDR_EXPR in n->value, strip
+                it manually here as we'll eventually get ADDR_EXPRs
+                which lie about their types pointed to.  In this case
+                build_fold_indirect_ref wouldn't strip the
+                INDIRECT_REF, but we absolutely rely on that.  As
+                fold_indirect_ref does other useful transformations,
+                try that first, though.  */
+             type = TREE_TYPE (TREE_TYPE (*n));
+             new = unshare_expr (*n);
+             old = *tp;
+             *tp = gimple_fold_indirect_ref (new);
+             if (!*tp)
+               {
+                 if (TREE_CODE (new) == ADDR_EXPR)
+                   {
+                     *tp = fold_indirect_ref_1 (type, new);
+                     /* ???  We should either assert here or build
+                        a VIEW_CONVERT_EXPR instead of blindly leaking
+                        incompatible types to our IL.  */
+                     if (! *tp)
+                       *tp = TREE_OPERAND (new, 0);
+                   }
+                 else
+                   {
+                     *tp = build1 (INDIRECT_REF, type, new);
+                     TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
+                   }
+               }
+             *walk_subtrees = 0;
+             return NULL;
+           }
+       }
+
+      /* Here is the "usual case".  Copy this tree node, and then
+        tweak some special cases.  */
+      copy_tree_r (tp, walk_subtrees, NULL);
+
+      /* Global variables we haven't seen yet need to go into referenced
+        vars.  If not referenced from types only.  */
+      if (gimple_in_ssa_p (cfun)
+         && TREE_CODE (*tp) == VAR_DECL
+         && id->remapping_type_depth == 0)
+       add_referenced_var (*tp);
+
+      /* We should never have TREE_BLOCK set on non-statements.  */
+      if (EXPR_P (*tp))
+       gcc_assert (!TREE_BLOCK (*tp));
+
+      if (TREE_CODE (*tp) != OMP_CLAUSE)
+       TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
+
+      if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
+       {
+         /* The copied TARGET_EXPR has never been expanded, even if the
+            original node was expanded already.  */
+         TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
+         TREE_OPERAND (*tp, 3) = NULL_TREE;
+       }
+      else if (TREE_CODE (*tp) == ADDR_EXPR)
+       {
+         /* Variable substitution need not be simple.  In particular,
+            the INDIRECT_REF substitution above.  Make sure that
+            TREE_CONSTANT and friends are up-to-date.  But make sure
+            to not improperly set TREE_BLOCK on some sub-expressions.  */
+         int invariant = is_gimple_min_invariant (*tp);
+         tree block = id->block;
+         id->block = NULL_TREE;
+         walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
+         id->block = block;
+
+         /* Handle the case where we substituted an INDIRECT_REF
+            into the operand of the ADDR_EXPR.  */
+         if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
+           *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
+         else
+           recompute_tree_invariant_for_addr_expr (*tp);
+
+         /* If this used to be invariant, but is not any longer,
+            then regimplification is probably needed.  */
+         if (invariant && !is_gimple_min_invariant (*tp))
+           id->regimplify = true;
+
+         *walk_subtrees = 0;
+       }
+    }
+
+  /* Keep iterating.  */
+  return NULL_TREE;
+}
+
+
+/* Called from copy_body_id via walk_tree.  DATA is really a
    `copy_body_data *'.  */
 
 tree
-copy_body_r (tree *tp, int *walk_subtrees, void *data)
+copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
 {
   copy_body_data *id = (copy_body_data *) data;
   tree fn = id->src_fn;
@@ -562,7 +789,7 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
      duplicated and/or tweaked.  */
 
   /* When requested, RETURN_EXPRs should be transformed to just the
-     contained GIMPLE_MODIFY_STMT.  The branch semantics of the return will
+     contained MODIFY_EXPR.  The branch semantics of the return will
      be handled elsewhere by manipulating the CFG rather than a statement.  */
   if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
     {
@@ -573,10 +800,10 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
         If the "assignment" is just the result decl, the result
         decl has already been set (e.g. a recent "foo (&result_decl,
         ...)"); just toss the entire RETURN_EXPR.  */
-      if (assignment && TREE_CODE (assignment) == GIMPLE_MODIFY_STMT)
+      if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
        {
          /* Replace the RETURN_EXPR with (a copy of) the
-            GIMPLE_MODIFY_STMT hanging underneath.  */
+            MODIFY_EXPR hanging underneath.  */
          *tp = copy_node (assignment);
        }
       else /* Else the RETURN_EXPR returns no value.  */
@@ -649,14 +876,14 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
       /* Here we handle trees that are not completely rewritten.
         First we detect some inlining-induced bogosities for
         discarding.  */
-      if (TREE_CODE (*tp) == GIMPLE_MODIFY_STMT
-         && GIMPLE_STMT_OPERAND (*tp, 0) == GIMPLE_STMT_OPERAND (*tp, 1)
-         && (auto_var_in_fn_p (GIMPLE_STMT_OPERAND (*tp, 0), fn)))
+      if (TREE_CODE (*tp) == MODIFY_EXPR
+         && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
+         && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
        {
          /* Some assignments VAR = VAR; don't generate any rtl code
             and thus don't count as variable modification.  Avoid
             keeping bogosities like 0 = 0.  */
-         tree decl = GIMPLE_STMT_OPERAND (*tp, 0), value;
+         tree decl = TREE_OPERAND (*tp, 0), value;
          tree *n;
 
          n = (tree *) pointer_map_contains (id->decl_map, decl);
@@ -667,7 +894,7 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
              if (TREE_CONSTANT (value) || TREE_READONLY (value))
                {
                  *tp = build_empty_stmt ();
-                 return copy_body_r (tp, walk_subtrees, data);
+                 return copy_tree_body_r (tp, walk_subtrees, data);
                }
            }
        }
@@ -722,14 +949,15 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
 
       /* Global variables we haven't seen yet needs to go into referenced
         vars.  If not referenced from types only.  */
-      if (gimple_in_ssa_p (cfun) && TREE_CODE (*tp) == VAR_DECL
+      if (gimple_in_ssa_p (cfun)
+         && TREE_CODE (*tp) == VAR_DECL
          && id->remapping_type_depth == 0)
        add_referenced_var (*tp);
        
       /* If EXPR has block defined, map it to newly constructed block.
          When inlining we want EXPRs without block appear in the block
         of function call.  */
-      if (EXPR_P (*tp) || GIMPLE_STMT_P (*tp))
+      if (EXPR_P (*tp))
        {
          new_block = id->block;
          if (TREE_BLOCK (*tp))
@@ -745,11 +973,11 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
 
       if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
        TREE_OPERAND (*tp, 0) =
-         build_int_cst
-           (NULL_TREE,
-            id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
+         build_int_cst (NULL_TREE,
+                        id->eh_region_offset
+                        + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
 
-      if (!GIMPLE_TUPLE_P (*tp) && TREE_CODE (*tp) != OMP_CLAUSE)
+      if (TREE_CODE (*tp) != OMP_CLAUSE)
        TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
 
       /* The copied TARGET_EXPR has never been expanded, even if the
@@ -766,17 +994,20 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
       else if (TREE_CODE (*tp) == ADDR_EXPR)
        {
          int invariant = is_gimple_min_invariant (*tp);
-         walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
+         walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
+
          /* Handle the case where we substituted an INDIRECT_REF
             into the operand of the ADDR_EXPR.  */
          if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
            *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
          else
            recompute_tree_invariant_for_addr_expr (*tp);
+
          /* If this used to be invariant, but is not any longer,
             then regimplification is probably needed.  */
          if (invariant && !is_gimple_min_invariant (*tp))
            id->regimplify = true;
+
          *walk_subtrees = 0;
        }
     }
@@ -785,6 +1016,209 @@ copy_body_r (tree *tp, int *walk_subtrees, void *data)
   return NULL_TREE;
 }
 
+
+/* Helper for copy_bb.  Remap statement STMT using the inlining
+   information in ID.  Return the new statement copy.  */
+
+static gimple
+remap_gimple_stmt (gimple stmt, copy_body_data *id)
+{
+  gimple copy = NULL;
+  struct walk_stmt_info wi;
+  tree new_block;
+
+  /* Begin by recognizing trees that we'll completely rewrite for the
+     inlining context.  Our output for these trees is completely
+     different from out input (e.g. RETURN_EXPR is deleted, and morphs
+     into an edge).  Further down, we'll handle trees that get
+     duplicated and/or tweaked.  */
+
+  /* When requested, GIMPLE_RETURNs should be transformed to just the
+     contained GIMPLE_ASSIGN.  The branch semantics of the return will
+     be handled elsewhere by manipulating the CFG rather than the
+     statement.  */
+  if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
+    {
+      tree retval = gimple_return_retval (stmt);
+
+      /* If we're returning something, just turn that into an
+        assignment into the equivalent of the original RESULT_DECL.
+        If RETVAL is just the result decl, the result decl has
+        already been set (e.g. a recent "foo (&result_decl, ...)");
+        just toss the entire GIMPLE_RETURN.  */
+      if (retval && TREE_CODE (retval) != RESULT_DECL)
+       copy = gimple_build_assign (id->retvar, retval);
+      else
+       return gimple_build_nop ();
+    }
+  else if (gimple_has_substatements (stmt))
+    {
+      gimple_seq s1, s2;
+
+      /* When cloning bodies from the C++ front end, we will be handed bodies
+        in High GIMPLE form.  Handle here all the High GIMPLE statements that
+        have embedded statements.  */
+      switch (gimple_code (stmt))
+       {
+       case GIMPLE_BIND:
+         copy = copy_gimple_bind (stmt, id);
+         break;
+
+       case GIMPLE_CATCH:
+         s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
+         copy = gimple_build_catch (gimple_catch_types (stmt), s1);
+         break;
+
+       case GIMPLE_EH_FILTER:
+         s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
+         copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
+         break;
+
+       case GIMPLE_TRY:
+         s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
+         s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
+         copy = gimple_build_try (s1, s2, gimple_try_kind (stmt)); 
+         break;
+
+       case GIMPLE_WITH_CLEANUP_EXPR:
+         s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
+         copy = gimple_build_wce (s1);
+         break;
+
+       case GIMPLE_OMP_PARALLEL:
+         s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
+         copy = gimple_build_omp_parallel
+                  (s1,
+                   gimple_omp_parallel_clauses (stmt),
+                   gimple_omp_parallel_child_fn (stmt),
+                   gimple_omp_parallel_data_arg (stmt));
+         break;
+
+       case GIMPLE_OMP_TASK:
+         s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
+         copy = gimple_build_omp_task
+                  (s1,
+                   gimple_omp_task_clauses (stmt),
+                   gimple_omp_task_child_fn (stmt),
+                   gimple_omp_task_data_arg (stmt),
+                   gimple_omp_task_copy_fn (stmt),
+                   gimple_omp_task_arg_size (stmt),
+                   gimple_omp_task_arg_align (stmt));
+         break;
+
+       case GIMPLE_OMP_FOR:
+         s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
+         s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
+         copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
+                                      gimple_omp_for_collapse (stmt), s2);
+         {
+           size_t i;
+           for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
+             {
+               gimple_omp_for_set_index (copy, i,
+                                         gimple_omp_for_index (stmt, i));
+               gimple_omp_for_set_initial (copy, i,
+                                           gimple_omp_for_initial (stmt, i));
+               gimple_omp_for_set_final (copy, i,
+                                         gimple_omp_for_final (stmt, i));
+               gimple_omp_for_set_incr (copy, i,
+                                        gimple_omp_for_incr (stmt, i));
+               gimple_omp_for_set_cond (copy, i,
+                                        gimple_omp_for_cond (stmt, i));
+             }
+         }
+         break;
+
+       case GIMPLE_OMP_MASTER:
+         s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
+         copy = gimple_build_omp_master (s1);
+         break;
+
+       case GIMPLE_OMP_ORDERED:
+         s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
+         copy = gimple_build_omp_ordered (s1);
+         break;
+
+       case GIMPLE_OMP_SECTION:
+         s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
+         copy = gimple_build_omp_section (s1);
+         break;
+
+       case GIMPLE_OMP_SECTIONS:
+         s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
+         copy = gimple_build_omp_sections
+                  (s1, gimple_omp_sections_clauses (stmt));
+         break;
+
+       case GIMPLE_OMP_SINGLE:
+         s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
+         copy = gimple_build_omp_single
+                  (s1, gimple_omp_single_clauses (stmt));
+         break;
+
+       default:
+         gcc_unreachable ();
+       }
+    }
+  else
+    {
+      if (gimple_assign_copy_p (stmt)
+         && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
+         && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
+       {
+         /* Here we handle statements that are not completely rewritten.
+            First we detect some inlining-induced bogosities for
+            discarding.  */
+
+         /* Some assignments VAR = VAR; don't generate any rtl code
+            and thus don't count as variable modification.  Avoid
+            keeping bogosities like 0 = 0.  */
+         tree decl = gimple_assign_lhs (stmt), value;
+         tree *n;
+
+         n = (tree *) pointer_map_contains (id->decl_map, decl);
+         if (n)
+           {
+             value = *n;
+             STRIP_TYPE_NOPS (value);
+             if (TREE_CONSTANT (value) || TREE_READONLY (value))
+               return gimple_build_nop ();
+           }
+       }
+
+      /* Create a new deep copy of the statement.  */
+      copy = gimple_copy (stmt);
+    }
+
+  /* If STMT has a block defined, map it to the newly constructed
+     block.  When inlining we want statements without a block to
+     appear in the block of the function call.  */
+  new_block = id->block;
+  if (gimple_block (copy))
+    {
+      tree *n;
+      n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
+      gcc_assert (n);
+      new_block = *n;
+    }
+
+  gimple_set_block (copy, new_block);
+
+  /* Remap all the operands in COPY.  */
+  memset (&wi, 0, sizeof (wi));
+  wi.info = id;
+  walk_gimple_op (copy, remap_gimple_op_r, &wi); 
+
+  /* We have to handle EH region remapping of GIMPLE_RESX specially because
+     the region number is not an operand.  */
+  if (gimple_code (stmt) == GIMPLE_RESX && id->eh_region_offset)
+    {
+      gimple_resx_set_region (copy, gimple_resx_region (stmt) + id->eh_region_offset);
+    }
+  return copy;
+}
+
+
 /* Copy basic block, scale profile accordingly.  Edges will be taken care of
    later  */
 
@@ -792,8 +1226,9 @@ static basic_block
 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
          gcov_type count_scale)
 {
-  block_stmt_iterator bsi, copy_bsi;
+  gimple_stmt_iterator gsi, copy_gsi;
   basic_block copy_basic_block;
+  tree decl;
 
   /* create_basic_block() will append every new block to
      basic_block_info automatically.  */
@@ -801,238 +1236,227 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
                                          (basic_block) bb->prev_bb->aux);
   copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
 
-  /* We are going to rebuild frequencies from scratch.  These values have just
-     small importance to drive canonicalize_loop_headers.  */
+  /* We are going to rebuild frequencies from scratch.  These values
+     have just small importance to drive canonicalize_loop_headers.  */
   copy_basic_block->frequency = ((gcov_type)bb->frequency
-                                    * frequency_scale / REG_BR_PROB_BASE);
+                                * frequency_scale / REG_BR_PROB_BASE);
+
   if (copy_basic_block->frequency > BB_FREQ_MAX)
     copy_basic_block->frequency = BB_FREQ_MAX;
-  copy_bsi = bsi_start (copy_basic_block);
 
-  for (bsi = bsi_start (bb);
-       !bsi_end_p (bsi); bsi_next (&bsi))
+  copy_gsi = gsi_start_bb (copy_basic_block);
+
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      tree stmt = bsi_stmt (bsi);
-      tree orig_stmt = stmt;
+      gimple stmt = gsi_stmt (gsi);
+      gimple orig_stmt = stmt;
 
       id->regimplify = false;
-      walk_tree (&stmt, copy_body_r, id, NULL);
-
-      /* RETURN_EXPR might be removed,
-         this is signalled by making stmt pointer NULL.  */
-      if (stmt)
+      stmt = remap_gimple_stmt (stmt, id);
+      if (gimple_nop_p (stmt))
+       continue;
+
+      gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
+
+      /* With return slot optimization we can end up with
+        non-gimple (foo *)&this->m, fix that here.  */
+      if ((is_gimple_assign (stmt)
+           && gimple_assign_rhs_code (stmt) == NOP_EXPR
+           && !is_gimple_val (gimple_assign_rhs1 (stmt)))
+         || id->regimplify)
        {
-         tree call, decl;
-
-         gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
-
-         /* With return slot optimization we can end up with
-            non-gimple (foo *)&this->m, fix that here.  */
-         if ((TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-              && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
-              && !is_gimple_val (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0)))
-             || id->regimplify)
-           gimplify_stmt (&stmt);
+         tree new_rhs;
+         new_rhs = force_gimple_operand_gsi (&copy_gsi,
+                                             gimple_assign_rhs1 (stmt),
+                                             true, NULL, true, GSI_SAME_STMT);
+         gimple_assign_set_rhs1 (stmt, new_rhs);
+       }
+      else if (id->regimplify)
+       gimple_regimplify_operands (stmt, &copy_gsi);
 
-          bsi_insert_after (&copy_bsi, stmt, BSI_NEW_STMT);
+      gsi_insert_after (&copy_gsi, stmt, GSI_NEW_STMT);
 
-         /* Process new statement.  gimplify_stmt possibly turned statement
-            into multiple statements, we need to process all of them.  */
-         while (!bsi_end_p (copy_bsi))
+      /* Process the new statement.  The call to gimple_regimplify_operands
+        possibly turned the statement into multiple statements, we
+        need to process all of them.  */
+      while (!gsi_end_p (copy_gsi))
+       {
+         if (is_gimple_call (stmt)
+             && gimple_call_va_arg_pack_p (stmt)
+             && id->gimple_call)
+           {
+             /* __builtin_va_arg_pack () should be replaced by
+                all arguments corresponding to ... in the caller.  */
+             tree p;
+             gimple new_call;
+             VEC(tree, heap) *argarray;
+             size_t nargs = gimple_call_num_args (id->gimple_call);
+             size_t n;
+
+             for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
+               nargs--;
+
+             /* Create the new array of arguments.  */
+             n = nargs + gimple_call_num_args (stmt);
+             argarray = VEC_alloc (tree, heap, n);
+             VEC_safe_grow (tree, heap, argarray, n);
+
+             /* Copy all the arguments before '...'  */
+             memcpy (VEC_address (tree, argarray),
+                     gimple_call_arg_ptr (stmt, 0),
+                     gimple_call_num_args (stmt) * sizeof (tree));
+
+             /* Append the arguments passed in '...'  */
+             memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
+                     gimple_call_arg_ptr (id->gimple_call, 0)
+                       + (gimple_call_num_args (id->gimple_call) - nargs),
+                     nargs * sizeof (tree));
+
+             new_call = gimple_build_call_vec (gimple_call_fn (stmt),
+                                               argarray);
+
+             VEC_free (tree, heap, argarray);
+
+             /* Copy all GIMPLE_CALL flags, location and block, except
+                GF_CALL_VA_ARG_PACK.  */
+             gimple_call_copy_flags (new_call, stmt);
+             gimple_call_set_va_arg_pack (new_call, false);
+             gimple_set_location (new_call, gimple_location (stmt));
+             gimple_set_block (new_call, gimple_block (stmt));
+             gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
+
+             gsi_replace (&copy_gsi, new_call, false);
+             stmt = new_call;
+           }
+         else if (is_gimple_call (stmt)
+                  && id->gimple_call
+                  && (decl = gimple_call_fndecl (stmt))
+                  && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
+                  && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
            {
-             tree *stmtp = bsi_stmt_ptr (copy_bsi);
-             tree stmt = *stmtp;
-             call = get_call_expr_in (stmt);
+             /* __builtin_va_arg_pack_len () should be replaced by
+                the number of anonymous arguments.  */
+             size_t nargs = gimple_call_num_args (id->gimple_call);
+             tree count, p;
+             gimple new_stmt;
+
+             for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
+               nargs--;
+
+             count = build_int_cst (integer_type_node, nargs);
+             new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
+             gsi_replace (&copy_gsi, new_stmt, false);
+             stmt = new_stmt;
+           }
 
-             if (call && CALL_EXPR_VA_ARG_PACK (call) && id->call_expr)
-               {
-                 /* __builtin_va_arg_pack () should be replaced by
-                    all arguments corresponding to ... in the caller.  */
-                 tree p, *argarray, new_call, *call_ptr;
-                 int nargs = call_expr_nargs (id->call_expr);
-
-                 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
-                   nargs--;
-
-                 argarray = (tree *) alloca ((nargs + call_expr_nargs (call))
-                                             * sizeof (tree));
-
-                 memcpy (argarray, CALL_EXPR_ARGP (call),
-                         call_expr_nargs (call) * sizeof (*argarray));
-                 memcpy (argarray + call_expr_nargs (call),
-                         CALL_EXPR_ARGP (id->call_expr)
-                         + (call_expr_nargs (id->call_expr) - nargs),
-                         nargs * sizeof (*argarray));
-
-                 new_call = build_call_array (TREE_TYPE (call),
-                                              CALL_EXPR_FN (call),
-                                              nargs + call_expr_nargs (call),
-                                              argarray);
-                 /* Copy all CALL_EXPR flags, locus and block, except
-                    CALL_EXPR_VA_ARG_PACK flag.  */
-                 CALL_EXPR_STATIC_CHAIN (new_call)
-                   = CALL_EXPR_STATIC_CHAIN (call);
-                 CALL_EXPR_TAILCALL (new_call) = CALL_EXPR_TAILCALL (call);
-                 CALL_EXPR_RETURN_SLOT_OPT (new_call)
-                   = CALL_EXPR_RETURN_SLOT_OPT (call);
-                 CALL_FROM_THUNK_P (new_call) = CALL_FROM_THUNK_P (call);
-                 CALL_CANNOT_INLINE_P (new_call)
-                   = CALL_CANNOT_INLINE_P (call);
-                 TREE_NOTHROW (new_call) = TREE_NOTHROW (call);
-                 SET_EXPR_LOCUS (new_call, EXPR_LOCUS (call));
-                 TREE_BLOCK (new_call) = TREE_BLOCK (call);
-
-                 call_ptr = stmtp;
-                 if (TREE_CODE (*call_ptr) == GIMPLE_MODIFY_STMT)
-                   call_ptr = &GIMPLE_STMT_OPERAND (*call_ptr, 1);
-                 if (TREE_CODE (*call_ptr) == WITH_SIZE_EXPR)
-                   call_ptr = &TREE_OPERAND (*call_ptr, 0);
-                 gcc_assert (*call_ptr == call);
-                 if (call_ptr == stmtp)
-                   {
-                     bsi_replace (&copy_bsi, new_call, true);
-                     stmtp = bsi_stmt_ptr (copy_bsi);
-                     stmt = *stmtp;
-                   }
-                 else
-                   {
-                     *call_ptr = new_call;
-                     stmt = *stmtp;
-                     update_stmt (stmt);
-                   }
-               }
-             else if (call
-                      && id->call_expr
-                      && (decl = get_callee_fndecl (call))
-                      && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
-                      && DECL_FUNCTION_CODE (decl)
-                         == BUILT_IN_VA_ARG_PACK_LEN)
-               {
-                 /* __builtin_va_arg_pack_len () should be replaced by
-                    the number of anonymous arguments.  */
-                 int nargs = call_expr_nargs (id->call_expr);
-                 tree count, *call_ptr, p;
-
-                 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
-                   nargs--;
-
-                 count = build_int_cst (integer_type_node, nargs);
-                 call_ptr = stmtp;
-                 if (TREE_CODE (*call_ptr) == GIMPLE_MODIFY_STMT)
-                   call_ptr = &GIMPLE_STMT_OPERAND (*call_ptr, 1);
-                 if (TREE_CODE (*call_ptr) == WITH_SIZE_EXPR)
-                   call_ptr = &TREE_OPERAND (*call_ptr, 0);
-                 gcc_assert (*call_ptr == call && call_ptr != stmtp);
-                 *call_ptr = count;
-                 stmt = *stmtp;
-                 update_stmt (stmt);
-                 call = NULL_TREE;
-               }
+         /* Statements produced by inlining can be unfolded, especially
+            when we constant propagated some operands.  We can't fold
+            them right now for two reasons:
+            1) folding require SSA_NAME_DEF_STMTs to be correct
+            2) we can't change function calls to builtins.
+            So we just mark statement for later folding.  We mark
+            all new statements, instead just statements that has changed
+            by some nontrivial substitution so even statements made
+            foldable indirectly are updated.  If this turns out to be
+            expensive, copy_body can be told to watch for nontrivial
+            changes.  */
+         if (id->statements_to_fold)
+           pointer_set_insert (id->statements_to_fold, stmt);
+
+         /* We're duplicating a CALL_EXPR.  Find any corresponding
+            callgraph edges and update or duplicate them.  */
+         if (is_gimple_call (stmt))
+           {
+             struct cgraph_node *node;
+             struct cgraph_edge *edge;
 
-             /* Statements produced by inlining can be unfolded, especially
-                when we constant propagated some operands.  We can't fold
-                them right now for two reasons:
-                1) folding require SSA_NAME_DEF_STMTs to be correct
-                2) we can't change function calls to builtins.
-                So we just mark statement for later folding.  We mark
-                all new statements, instead just statements that has changed
-                by some nontrivial substitution so even statements made
-                foldable indirectly are updated.  If this turns out to be
-                expensive, copy_body can be told to watch for nontrivial
-                changes.  */
-             if (id->statements_to_fold)
-               pointer_set_insert (id->statements_to_fold, stmt);
-             /* We're duplicating a CALL_EXPR.  Find any corresponding
-                callgraph edges and update or duplicate them.  */
-             if (call)
+             switch (id->transform_call_graph_edges)
                {
-                 struct cgraph_node *node;
-                 struct cgraph_edge *edge;
-                
-                 switch (id->transform_call_graph_edges)
-                   {
-                   case CB_CGE_DUPLICATE:
-                     edge = cgraph_edge (id->src_node, orig_stmt);
-                     if (edge)
-                       cgraph_clone_edge (edge, id->dst_node, stmt,
+             case CB_CGE_DUPLICATE:
+               edge = cgraph_edge (id->src_node, orig_stmt);
+               if (edge)
+                 cgraph_clone_edge (edge, id->dst_node, stmt,
                                           REG_BR_PROB_BASE, 1,
                                           edge->frequency, true);
-                     break;
-
-                   case CB_CGE_MOVE_CLONES:
-                     for (node = id->dst_node->next_clone;
-                          node;
-                          node = node->next_clone)
-                       {
-                         edge = cgraph_edge (node, orig_stmt);
+               break;
+
+             case CB_CGE_MOVE_CLONES:
+               for (node = id->dst_node->next_clone;
+                   node;
+                   node = node->next_clone)
+                 {
+                   edge = cgraph_edge (node, orig_stmt);
                          if (edge)
                            cgraph_set_call_stmt (edge, stmt);
-                       }
-                     /* FALLTHRU */
+                 }
+               /* FALLTHRU */
 
-                   case CB_CGE_MOVE:
-                     edge = cgraph_edge (id->dst_node, orig_stmt);
-                     if (edge)
-                       cgraph_set_call_stmt (edge, stmt);
-                     break;
+             case CB_CGE_MOVE:
+               edge = cgraph_edge (id->dst_node, orig_stmt);
+               if (edge)
+                 cgraph_set_call_stmt (edge, stmt);
+               break;
 
-                   default:
-                     gcc_unreachable ();
-                   }
+             default:
+               gcc_unreachable ();
                }
-             /* If you think we can abort here, you are wrong.
-                There is no region 0 in tree land.  */
-             gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
-                         != 0);
-
-             if (tree_could_throw_p (stmt)
-                 /* When we are cloning for inlining, we are supposed to
-                    construct a clone that calls precisely the same functions
-                    as original.  However IPA optimizers might've proved
-                    earlier some function calls as non-trapping that might
-                    render some basic blocks dead that might become
-                    unreachable.
-
-                    We can't update SSA with unreachable blocks in CFG and thus
-                    we prevent the scenario by preserving even the "dead" eh
-                    edges until the point they are later removed by
-                    fixup_cfg pass.  */
-                 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
-                     && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
-               {
-                 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
-                 /* Add an entry for the copied tree in the EH hashtable.
-                    When cloning or versioning, use the hashtable in
-                    cfun, and just copy the EH number.  When inlining, use the
-                    hashtable in the caller, and adjust the region number.  */
-                 if (region > 0)
-                   add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
-
-                 /* If this tree doesn't have a region associated with it,
-                    and there is a "current region,"
-                    then associate this tree with the current region
-                    and add edges associated with this region.  */
-                 if ((lookup_stmt_eh_region_fn (id->src_cfun,
-                                                orig_stmt) <= 0
-                      && id->eh_region > 0)
-                     && tree_could_throw_p (stmt))
-                   add_stmt_to_eh_region (stmt, id->eh_region);
-               }
-             if (gimple_in_ssa_p (cfun))
-               {
-                  ssa_op_iter i;
-                  tree def;
+           }
 
-                  find_new_referenced_vars (bsi_stmt_ptr (copy_bsi));
-                  FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
-                   if (TREE_CODE (def) == SSA_NAME)
-                     SSA_NAME_DEF_STMT (def) = stmt;
-               }
-             bsi_next (&copy_bsi);
+         /* If you think we can abort here, you are wrong.
+            There is no region 0 in gimple.  */
+         gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) != 0);
+
+         if (stmt_could_throw_p (stmt)
+             /* When we are cloning for inlining, we are supposed to
+                construct a clone that calls precisely the same functions
+                as original.  However IPA optimizers might've proved
+                earlier some function calls as non-trapping that might
+                render some basic blocks dead that might become
+                unreachable.
+
+                We can't update SSA with unreachable blocks in CFG and thus
+                we prevent the scenario by preserving even the "dead" eh
+                edges until the point they are later removed by
+                fixup_cfg pass.  */
+             || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
+                 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
+           {
+             int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
+
+             /* Add an entry for the copied tree in the EH hashtable.
+                When cloning or versioning, use the hashtable in
+                cfun, and just copy the EH number.  When inlining, use the
+                hashtable in the caller, and adjust the region number.  */
+             if (region > 0)
+               add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
+
+             /* If this tree doesn't have a region associated with it,
+                and there is a "current region,"
+                then associate this tree with the current region
+                and add edges associated with this region.  */
+             if (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) <= 0
+                 && id->eh_region > 0
+                 && stmt_could_throw_p (stmt))
+               add_stmt_to_eh_region (stmt, id->eh_region);
            }
-         copy_bsi = bsi_last (copy_basic_block);
+
+         if (gimple_in_ssa_p (cfun))
+           {
+             ssa_op_iter i;
+             tree def;
+
+             find_new_referenced_vars (gsi_stmt (copy_gsi));
+             FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
+               if (TREE_CODE (def) == SSA_NAME)
+                 SSA_NAME_DEF_STMT (def) = stmt;
+           }
+
+         gsi_next (&copy_gsi);
        }
+
+      copy_gsi = gsi_last_bb (copy_basic_block);
     }
+
   return copy_basic_block;
 }
 
@@ -1065,27 +1489,31 @@ update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
     if (!e->dest->aux
        || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
       {
-       tree phi;
+       gimple phi;
+       gimple_stmt_iterator si;
 
        gcc_assert (e->flags & EDGE_ABNORMAL);
+
        if (!nonlocal_goto)
          gcc_assert (e->flags & EDGE_EH);
+
        if (!can_throw)
          gcc_assert (!(e->flags & EDGE_EH));
-       for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
+
+       for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
          {
            edge re;
 
+           phi = gsi_stmt (si);
+
            /* There shouldn't be any PHI nodes in the ENTRY_BLOCK.  */
            gcc_assert (!e->dest->aux);
 
-           gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
-                       (PHI_RESULT (phi)));
+           gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
 
            if (!is_gimple_reg (PHI_RESULT (phi)))
              {
-               mark_sym_for_renaming
-                 (SSA_NAME_VAR (PHI_RESULT (phi)));
+               mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
                continue;
              }
 
@@ -1100,16 +1528,18 @@ update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
       }
 }
 
+
 /* Copy edges from BB into its copy constructed earlier, scale profile
    accordingly.  Edges will be taken care of later.  Assume aux
    pointers to point to the copies of each BB.  */
+
 static void
 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
 {
   basic_block new_bb = (basic_block) bb->aux;
   edge_iterator ei;
   edge old_edge;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator si;
   int flags;
 
   /* Use the indices from the original blocks to create edges for the
@@ -1133,17 +1563,18 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
   if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
     return;
 
-  for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
+  for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
     {
-      tree copy_stmt;
+      gimple copy_stmt;
       bool can_throw, nonlocal_goto;
 
-      copy_stmt = bsi_stmt (bsi);
+      copy_stmt = gsi_stmt (si);
       update_stmt (copy_stmt);
       if (gimple_in_ssa_p (cfun))
         mark_symbols_for_renaming (copy_stmt);
+
       /* Do this before the possible split_block.  */
-      bsi_next (&bsi);
+      gsi_next (&si);
 
       /* If this tree could throw an exception, there are two
          cases where we need to add abnormal edge(s): the
@@ -1156,13 +1587,12 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
          propagation can change an INDIRECT_REF which throws
          into a COMPONENT_REF which doesn't.  If the copy
          can throw, the original could also throw.  */
-
-      can_throw = tree_can_throw_internal (copy_stmt);
-      nonlocal_goto = tree_can_make_abnormal_goto (copy_stmt);
+      can_throw = stmt_can_throw_internal (copy_stmt);
+      nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
 
       if (can_throw || nonlocal_goto)
        {
-         if (!bsi_end_p (bsi))
+         if (!gsi_end_p (si))
            /* Note that bb's predecessor edges aren't necessarily
               right at this point; split_block doesn't care.  */
            {
@@ -1170,7 +1600,7 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
 
              new_bb = e->dest;
              new_bb->aux = e->src->aux;
-             bsi = bsi_start (new_bb);
+             si = gsi_start_bb (new_bb);
            }
        }
 
@@ -1178,11 +1608,11 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
        make_eh_edges (copy_stmt);
 
       if (nonlocal_goto)
-       make_abnormal_goto_edges (bb_for_stmt (copy_stmt), true);
+       make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
 
       if ((can_throw || nonlocal_goto)
          && gimple_in_ssa_p (cfun))
-       update_ssa_across_abnormal_edges (bb_for_stmt (copy_stmt), ret_bb,
+       update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
                                          can_throw, nonlocal_goto);
     }
 }
@@ -1197,27 +1627,33 @@ copy_phis_for_bb (basic_block bb, copy_body_data *id)
 {
   basic_block const new_bb = (basic_block) bb->aux;
   edge_iterator ei;
-  tree phi;
+  gimple phi;
+  gimple_stmt_iterator si;
 
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+  for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
     {
-      tree res = PHI_RESULT (phi);
-      tree new_res = res;
-      tree new_phi;
+      tree res, new_res;
+      gimple new_phi;
       edge new_edge;
 
+      phi = gsi_stmt (si);
+      res = PHI_RESULT (phi);
+      new_res = res;
       if (is_gimple_reg (res))
        {
-         walk_tree (&new_res, copy_body_r, id, NULL);
+         walk_tree (&new_res, copy_tree_body_r, id, NULL);
          SSA_NAME_DEF_STMT (new_res)
            = new_phi = create_phi_node (new_res, new_bb);
          FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
            {
-             edge const old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
+             edge const old_edge
+               = find_edge ((basic_block) new_edge->src->aux, bb);
              tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
              tree new_arg = arg;
-
-             walk_tree (&new_arg, copy_body_r, id, NULL);
+             tree block = id->block;
+             id->block = NULL_TREE;
+             walk_tree (&new_arg, copy_tree_body_r, id, NULL);
+             id->block = block;
              gcc_assert (new_arg);
              /* With return slot optimization we can end up with
                 non-gimple (foo *)&this->m, fix that here.  */
@@ -1225,10 +1661,9 @@ copy_phis_for_bb (basic_block bb, copy_body_data *id)
                  && TREE_CODE (new_arg) != FUNCTION_DECL
                  && !is_gimple_val (new_arg))
                {
-                 tree stmts = NULL_TREE;
-                 new_arg = force_gimple_operand (new_arg, &stmts,
-                                                 true, NULL);
-                 bsi_insert_on_edge_immediate (new_edge, stmts);
+                 gimple_seq stmts = NULL;
+                 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
+                 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
                }
              add_phi_arg (new_phi, new_arg, new_edge);
            }
@@ -1236,7 +1671,9 @@ copy_phis_for_bb (basic_block bb, copy_body_data *id)
     }
 }
 
+
 /* Wrapper for remap_decl so it can be used as a callback.  */
+
 static tree
 remap_decl_1 (tree decl, void *data)
 {
@@ -1269,7 +1706,7 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
     frequency_scale = count_scale;
 
   /* Register specific tree functions.  */
-  tree_register_cfg_hooks ();
+  gimple_register_cfg_hooks ();
   *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
   new_cfun->funcdef_no = get_next_funcdef_no ();
   VALUE_HISTOGRAMS (new_cfun) = NULL;
@@ -1334,7 +1771,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
     frequency_scale = count_scale;
 
   /* Register specific tree functions.  */
-  tree_register_cfg_hooks ();
+  gimple_register_cfg_hooks ();
 
   /* Must have a CFG here at this point.  */
   gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
@@ -1342,7 +1779,6 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
 
   cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
 
-
   ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
   EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
   entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
@@ -1355,6 +1791,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
        = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
                                0, id->eh_region);
     }
+
   /* Use aux pointers to map the original blocks to copy.  */
   FOR_EACH_BB_FN (bb, cfun_to_copy)
     {
@@ -1364,17 +1801,21 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
     }
 
   last = last_basic_block;
+
   /* Now that we've duplicated the blocks, duplicate their edges.  */
   FOR_ALL_BB_FN (bb, cfun_to_copy)
     copy_edges_for_bb (bb, count_scale, exit_block_map);
+
   if (gimple_in_ssa_p (cfun))
     FOR_ALL_BB_FN (bb, cfun_to_copy)
       copy_phis_for_bb (bb, id);
+
   FOR_ALL_BB_FN (bb, cfun_to_copy)
     {
       ((basic_block)bb->aux)->aux = NULL;
       bb->aux = NULL;
     }
+
   /* Zero out AUX fields of newly created block during EH edge
      insertion. */
   for (; last < last_basic_block; last++)
@@ -1385,21 +1826,6 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
   return new_fndecl;
 }
 
-/* Make a copy of the body of FN so that it can be inserted inline in
-   another function.  */
-
-tree
-copy_generic_body (copy_body_data *id)
-{
-  tree body;
-  tree fndecl = id->src_fn;
-
-  body = DECL_SAVED_TREE (fndecl);
-  walk_tree (&body, copy_body_r, id, NULL);
-
-  return body;
-}
-
 static tree
 copy_body (copy_body_data *id, gcov_type count, int frequency,
           basic_block entry_block_map, basic_block exit_block_map)
@@ -1434,7 +1860,7 @@ static void
 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
                     basic_block bb, tree *vars)
 {
-  tree init_stmt;
+  gimple init_stmt;
   tree var;
   tree rhs = value;
   tree def = (gimple_in_ssa_p (cfun)
@@ -1550,7 +1976,7 @@ setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
      the argument to the proper type in case it was promoted.  */
   if (value)
     {
-      block_stmt_iterator bsi = bsi_last (bb);
+      gimple_stmt_iterator si = gsi_last_bb (bb);
 
       if (rhs == error_mark_node)
        {
@@ -1560,21 +1986,20 @@ setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
 
       STRIP_USELESS_TYPE_CONVERSION (rhs);
 
-      /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
+      /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
         keep our trees in gimple form.  */
       if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
        {
          def = remap_ssa_name (def, id);
-          init_stmt = build_gimple_modify_stmt (def, rhs);
-         SSA_NAME_DEF_STMT (def) = init_stmt;
+          init_stmt = gimple_build_assign (def, rhs);
          SSA_NAME_IS_DEFAULT_DEF (def) = 0;
          set_default_def (var, NULL);
        }
       else
-        init_stmt = build_gimple_modify_stmt (var, rhs);
+        init_stmt = gimple_build_assign (var, rhs);
 
       /* If we did not create a gimple value and we did not create a gimple
-        cast of a gimple value, then we will need to gimplify INIT_STMTS
+        cast of a gimple value, then we will need to gimplify INIT_STMT
         at the end.  Note that is_gimple_cast only checks the outer
         tree code, not its operand.  Thus the explicit check that its
         operand is a gimple value.  */
@@ -1583,54 +2008,67 @@ setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
              || !is_gimple_val (TREE_OPERAND (rhs, 0))))
          || !is_gimple_reg (var))
        {
-          tree_stmt_iterator i;
+         gimple_stmt_iterator i;
+         gimple_seq seq = gimple_seq_alloc ();
           struct gimplify_ctx gctx;
 
          push_gimplify_context (&gctx);
-         gimplify_stmt (&init_stmt);
+
+         i = gsi_start (seq);
+         gimple_regimplify_operands (init_stmt, &i);
+
          if (gimple_in_ssa_p (cfun)
-              && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
+              && init_stmt
+             && !gimple_seq_empty_p (seq))
            {
              /* The replacement can expose previously unreferenced
                 variables.  */
-             for (i = tsi_start (init_stmt); !tsi_end_p (i); tsi_next (&i))
-               find_new_referenced_vars (tsi_stmt_ptr (i));
+             for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
+               find_new_referenced_vars (gsi_stmt (i));
+
+             /* Insert the gimplified sequence needed for INIT_STMT
+                after SI.  INIT_STMT will be inserted after SEQ.  */
+             gsi_insert_seq_after (&si, seq, GSI_NEW_STMT);
             }
+
          pop_gimplify_context (NULL);
        }
 
       /* If VAR represents a zero-sized variable, it's possible that the
         assignment statement may result in no gimple statements.  */
       if (init_stmt)
-        bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
+        gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
+
       if (gimple_in_ssa_p (cfun))
-       for (;!bsi_end_p (bsi); bsi_next (&bsi))
-         mark_symbols_for_renaming (bsi_stmt (bsi));
+       for (;!gsi_end_p (si); gsi_next (&si))
+         mark_symbols_for_renaming (gsi_stmt (si));
     }
 }
 
 /* Generate code to initialize the parameters of the function at the
-   top of the stack in ID from the CALL_EXPR EXP.  */
+   top of the stack in ID from the GIMPLE_CALL STMT.  */
 
 static void
-initialize_inlined_parameters (copy_body_data *id, tree exp,
+initialize_inlined_parameters (copy_body_data *id, gimple stmt,
                               tree fn, basic_block bb)
 {
   tree parms;
-  tree a;
+  size_t i;
   tree p;
   tree vars = NULL_TREE;
-  call_expr_arg_iterator iter;
-  tree static_chain = CALL_EXPR_STATIC_CHAIN (exp);
+  tree static_chain = gimple_call_chain (stmt);
 
   /* Figure out what the parameters are.  */
   parms = DECL_ARGUMENTS (fn);
 
   /* Loop through the parameter declarations, replacing each with an
      equivalent VAR_DECL, appropriately initialized.  */
-  for (p = parms, a = first_call_expr_arg (exp, &iter); p;
-       a = next_call_expr_arg (&iter), p = TREE_CHAIN (p))
-    setup_one_parameter (id, p, a, fn, bb, &vars);
+  for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
+    {
+      tree val;
+      val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
+      setup_one_parameter (id, p, val, fn, bb, &vars);
+    }
 
   /* Initialize the static chain.  */
   p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
@@ -1646,6 +2084,7 @@ initialize_inlined_parameters (copy_body_data *id, tree exp,
   declare_inline_vars (id->block, vars);
 }
 
+
 /* Declare a return variable to replace the RESULT_DECL for the
    function we are calling.  An appropriate DECL_STMT is returned.
    The USE_STMT is filled to contain a use of the declaration to
@@ -1653,7 +2092,7 @@ initialize_inlined_parameters (copy_body_data *id, tree exp,
 
    RETURN_SLOT, if non-null is place where to store the result.  It
    is set only for CALL_EXPR_RETURN_SLOT_OPT.  MODIFY_DEST, if non-null,
-   was the LHS of the GIMPLE_MODIFY_STMT to which this call is the RHS.
+   was the LHS of the MODIFY_EXPR to which this call is the RHS.
 
    The return value is a (possibly null) value that is the result of the
    function as seen by the callee.  *USE_P is a (possibly null) value that
@@ -1833,37 +2272,89 @@ declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
 bool
 tree_inlinable_function_p (tree fn)
 {
-  return inlinable_function_p (fn);
+  bool ret = inlinable_function_p (fn);
+
+  if (getenv ("TUPLES_INLINE"))
+    fprintf (stderr, "Function %s is %sinlinable\n", get_name (fn),
+            ret ? "" : "not ");
+
+  return ret;
+}
+
+static const char *inline_forbidden_reason;
+
+/* A callback for walk_gimple_seq to handle tree operands.  Returns
+   NULL_TREE if a function can be inlined, otherwise sets the reason
+   why not and returns a tree representing the offending operand. */
+
+static tree
+inline_forbidden_p_op (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
+                         void *fnp ATTRIBUTE_UNUSED)
+{
+  tree node = *nodep;
+  tree t;
+
+  if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE)
+    {
+      /* We cannot inline a function of the form
+
+          void F (int i) { struct S { int ar[i]; } s; }
+
+        Attempting to do so produces a catch-22.
+        If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
+        UNION_TYPE nodes, then it goes into infinite recursion on a
+        structure containing a pointer to its own type.  If it doesn't,
+        then the type node for S doesn't get adjusted properly when
+        F is inlined. 
+
+        ??? This is likely no longer true, but it's too late in the 4.0
+        cycle to try to find out.  This should be checked for 4.1.  */
+      for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
+       if (variably_modified_type_p (TREE_TYPE (t), NULL))
+         {
+           inline_forbidden_reason
+             = G_("function %q+F can never be inlined "
+                  "because it uses variable sized variables");
+           return node;
+         }
+    }
+
+  return NULL_TREE;
 }
 
-static const char *inline_forbidden_reason;
+
+/* A callback for walk_gimple_seq to handle statements.  Returns
+   non-NULL iff a function can not be inlined.  Also sets the reason
+   why. */
 
 static tree
-inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
-                     void *fnp)
+inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
+                        struct walk_stmt_info *wip)
 {
-  tree node = *nodep;
-  tree fn = (tree) fnp;
+  tree fn = (tree) wip->info;
   tree t;
+  gimple stmt = gsi_stmt (*gsi);
 
-  switch (TREE_CODE (node))
+  switch (gimple_code (stmt))
     {
-    case CALL_EXPR:
+    case GIMPLE_CALL:
       /* Refuse to inline alloca call unless user explicitly forced so as
         this may change program's memory overhead drastically when the
         function using alloca is called in loop.  In GCC present in
         SPEC2000 inlining into schedule_block cause it to require 2GB of
         RAM instead of 256MB.  */
-      if (alloca_call_p (node)
+      if (gimple_alloca_call_p (stmt)
          && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
        {
          inline_forbidden_reason
            = G_("function %q+F can never be inlined because it uses "
                 "alloca (override using the always_inline attribute)");
-         return node;
+         *handled_ops_p = true;
+         return fn;
        }
-      t = get_callee_fndecl (node);
-      if (! t)
+
+      t = gimple_call_fndecl (stmt);
+      if (t == NULL_TREE)
        break;
 
       /* We cannot inline functions that call setjmp.  */
@@ -1871,7 +2362,8 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
        {
          inline_forbidden_reason
            = G_("function %q+F can never be inlined because it uses setjmp");
-         return node;
+         *handled_ops_p = true;
+         return t;
        }
 
       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
@@ -1885,7 +2377,8 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
            inline_forbidden_reason
              = G_("function %q+F can never be inlined because it "
                   "uses variable argument lists");
-           return node;
+           *handled_ops_p = true;
+           return t;
 
          case BUILT_IN_LONGJMP:
            /* We can't inline functions that call __builtin_longjmp at
@@ -1896,14 +2389,16 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
            inline_forbidden_reason
              = G_("function %q+F can never be inlined because "
                   "it uses setjmp-longjmp exception handling");
-           return node;
+           *handled_ops_p = true;
+           return t;
 
          case BUILT_IN_NONLOCAL_GOTO:
            /* Similarly.  */
            inline_forbidden_reason
              = G_("function %q+F can never be inlined because "
                   "it uses non-local goto");
-           return node;
+           *handled_ops_p = true;
+           return t;
 
          case BUILT_IN_RETURN:
          case BUILT_IN_APPLY_ARGS:
@@ -1914,15 +2409,16 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
            inline_forbidden_reason
              = G_("function %q+F can never be inlined because "
                   "it uses __builtin_return or __builtin_apply_args");
-           return node;
+           *handled_ops_p = true;
+           return t;
 
          default:
            break;
          }
       break;
 
-    case GOTO_EXPR:
-      t = TREE_OPERAND (node, 0);
+    case GIMPLE_GOTO:
+      t = gimple_goto_dest (stmt);
 
       /* We will not inline a function which uses computed goto.  The
         addresses of its local labels, which may be tucked into
@@ -1933,12 +2429,13 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
          inline_forbidden_reason
            = G_("function %q+F can never be inlined "
                 "because it contains a computed goto");
-         return node;
+         *handled_ops_p = true;
+         return t;
        }
       break;
 
-    case LABEL_EXPR:
-      t = TREE_OPERAND (node, 0);
+    case GIMPLE_LABEL:
+      t = gimple_label_label (stmt);
       if (DECL_NONLOCAL (t))
        {
          /* We cannot inline a function that receives a non-local goto
@@ -1947,41 +2444,20 @@ inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
          inline_forbidden_reason
            = G_("function %q+F can never be inlined "
                 "because it receives a non-local goto");
-         return node;
+         *handled_ops_p = true;
+         return t;
        }
       break;
 
-    case RECORD_TYPE:
-    case UNION_TYPE:
-      /* We cannot inline a function of the form
-
-          void F (int i) { struct S { int ar[i]; } s; }
-
-        Attempting to do so produces a catch-22.
-        If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
-        UNION_TYPE nodes, then it goes into infinite recursion on a
-        structure containing a pointer to its own type.  If it doesn't,
-        then the type node for S doesn't get adjusted properly when
-        F is inlined. 
-
-        ??? This is likely no longer true, but it's too late in the 4.0
-        cycle to try to find out.  This should be checked for 4.1.  */
-      for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
-       if (variably_modified_type_p (TREE_TYPE (t), NULL))
-         {
-           inline_forbidden_reason
-             = G_("function %q+F can never be inlined "
-                  "because it uses variable sized variables");
-           return node;
-         }
-
     default:
       break;
     }
 
+  *handled_ops_p = false;
   return NULL_TREE;
 }
 
+
 static tree
 inline_forbidden_p_2 (tree *nodep, int *walk_subtrees,
                      void *fnp)
@@ -2003,25 +2479,35 @@ inline_forbidden_p_2 (tree *nodep, int *walk_subtrees,
   return NULL_TREE;
 }
 
-/* Return subexpression representing possible alloca call, if any.  */
-static tree
+/* Return true if FNDECL is a function that cannot be inlined into
+   another one.  */
+
+static bool
 inline_forbidden_p (tree fndecl)
 {
   location_t saved_loc = input_location;
-  block_stmt_iterator bsi;
-  basic_block bb;
-  tree ret = NULL_TREE;
   struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
   tree step;
+  struct walk_stmt_info wi;
+  struct pointer_set_t *visited_nodes;
+  basic_block bb;
+  bool forbidden_p = false;
+
+  visited_nodes = pointer_set_create ();
+  memset (&wi, 0, sizeof (wi));
+  wi.info = (void *) fndecl;
+  wi.pset = visited_nodes;
 
   FOR_EACH_BB_FN (bb, fun)
-    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-      {
-       ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
-                                           inline_forbidden_p_1, fndecl);
-       if (ret)
-         goto egress;
-      }
+    {
+      gimple ret;
+      gimple_seq seq = bb_seq (bb);
+      ret = walk_gimple_seq (seq, inline_forbidden_p_stmt,
+                            inline_forbidden_p_op, &wi);
+      forbidden_p = (ret != NULL);
+      if (forbidden_p)
+       goto egress;
+    }
 
   for (step = fun->local_decls; step; step = TREE_CHAIN (step))
     {
@@ -2030,15 +2516,20 @@ inline_forbidden_p (tree fndecl)
          && TREE_STATIC (decl)
          && !DECL_EXTERNAL (decl)
          && DECL_INITIAL (decl))
-       ret = walk_tree_without_duplicates (&DECL_INITIAL (decl),
-                                           inline_forbidden_p_2, fndecl);
-       if (ret)
-         goto egress;
+        {
+         tree ret;
+         ret = walk_tree_without_duplicates (&DECL_INITIAL (decl),
+                                             inline_forbidden_p_2, fndecl);
+         forbidden_p = (ret != NULL);
+         if (forbidden_p)
+           goto egress;
+        }
     }
 
 egress:
+  pointer_set_destroy (visited_nodes);
   input_location = saved_loc;
-  return ret;
+  return forbidden_p;
 }
 
 /* Returns nonzero if FN is a function that does not have any
@@ -2089,7 +2580,7 @@ inlinable_function_p (tree fn)
   /* If we don't have the function body available, we can't inline it.
      However, this should not be recorded since we also get here for
      forward declared inline functions.  Therefore, return at once.  */
-  if (!DECL_SAVED_TREE (fn))
+  if (!gimple_body (fn))
     return false;
 
   else if (inline_forbidden_p (fn))
@@ -2132,151 +2623,21 @@ estimate_move_cost (tree type)
     return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
 }
 
-/* Arguments for estimate_num_insns_1.  */
-
-struct eni_data
-{
-  /* Used to return the number of insns.  */
-  int count;
-
-  /* Weights of various constructs.  */
-  eni_weights *weights;
-};
+/* Returns cost of operation CODE, according to WEIGHTS  */
 
-/* Used by estimate_num_insns.  Estimate number of instructions seen
-   by given statement.  */
-
-static tree
-estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
+static int
+estimate_operator_cost (enum tree_code code, eni_weights *weights)
 {
-  struct eni_data *const d = (struct eni_data *) data;
-  tree x = *tp;
-  unsigned cost;
-
-  if (IS_TYPE_OR_DECL_P (x))
+  switch (code)
     {
-      *walk_subtrees = 0;
-      return NULL;
-    }
-  /* Assume that constants and references counts nothing.  These should
-     be majorized by amount of operations among them we count later
-     and are common target of CSE and similar optimizations.  */
-  else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
-    return NULL;
-
-  switch (TREE_CODE (x))
-    {
-    /* Containers have no cost.  */
-    case TREE_LIST:
-    case TREE_VEC:
-    case BLOCK:
-    case COMPONENT_REF:
-    case BIT_FIELD_REF:
-    case INDIRECT_REF:
-    case ALIGN_INDIRECT_REF:
-    case MISALIGNED_INDIRECT_REF:
-    case ARRAY_REF:
-    case ARRAY_RANGE_REF:
-    case OBJ_TYPE_REF:
-    case EXC_PTR_EXPR: /* ??? */
-    case FILTER_EXPR: /* ??? */
-    case COMPOUND_EXPR:
-    case BIND_EXPR:
-    case WITH_CLEANUP_EXPR:
-    case PAREN_EXPR:
-    CASE_CONVERT:
-    case VIEW_CONVERT_EXPR:
-    case SAVE_EXPR:
-    case ADDR_EXPR:
-    case COMPLEX_EXPR:
+    /* These are "free" conversions, or their presumed cost
+       is folded into other operations.  */
     case RANGE_EXPR:
-    case CASE_LABEL_EXPR:
-    case SSA_NAME:
-    case CATCH_EXPR:
-    case EH_FILTER_EXPR:
-    case STATEMENT_LIST:
-    case ERROR_MARK:
-    case FDESC_EXPR:
-    case VA_ARG_EXPR:
-    case TRY_CATCH_EXPR:
-    case TRY_FINALLY_EXPR:
-    case LABEL_EXPR:
-    case GOTO_EXPR:
-    case RETURN_EXPR:
-    case EXIT_EXPR:
-    case LOOP_EXPR:
-    case PHI_NODE:
-    case WITH_SIZE_EXPR:
-    case OMP_CLAUSE:
-    case OMP_RETURN:
-    case OMP_CONTINUE:
-    case OMP_SECTIONS_SWITCH:
-    case OMP_ATOMIC_STORE:
-      break;
-
-    /* We don't account constants for now.  Assume that the cost is amortized
-       by operations that do use them.  We may re-consider this decision once
-       we are able to optimize the tree before estimating its size and break
-       out static initializers.  */
-    case IDENTIFIER_NODE:
-    case INTEGER_CST:
-    case REAL_CST:
-    case FIXED_CST:
-    case COMPLEX_CST:
-    case VECTOR_CST:
-    case STRING_CST:
-    case PREDICT_EXPR:
-      *walk_subtrees = 0;
-      return NULL;
-
-      /* CHANGE_DYNAMIC_TYPE_EXPR explicitly expands to nothing.  */
-    case CHANGE_DYNAMIC_TYPE_EXPR:
-      *walk_subtrees = 0;
-      return NULL;
-
-    /* Try to estimate the cost of assignments.  We have three cases to
-       deal with:
-       1) Simple assignments to registers;
-       2) Stores to things that must live in memory.  This includes
-          "normal" stores to scalars, but also assignments of large
-          structures, or constructors of big arrays;
-       3) TARGET_EXPRs.
-
-       Let us look at the first two cases, assuming we have "a = b + C":
-       <GIMPLE_MODIFY_STMT <var_decl "a">
-                                  <plus_expr <var_decl "b"> <constant C>>
-       If "a" is a GIMPLE register, the assignment to it is free on almost
-       any target, because "a" usually ends up in a real register.  Hence
-       the only cost of this expression comes from the PLUS_EXPR, and we
-       can ignore the GIMPLE_MODIFY_STMT.
-       If "a" is not a GIMPLE register, the assignment to "a" will most
-       likely be a real store, so the cost of the GIMPLE_MODIFY_STMT is the cost
-       of moving something into "a", which we compute using the function
-       estimate_move_cost.
-
-       The third case deals with TARGET_EXPRs, for which the semantics are
-       that a temporary is assigned, unless the TARGET_EXPR itself is being
-       assigned to something else.  In the latter case we do not need the
-       temporary.  E.g. in:
-                       <GIMPLE_MODIFY_STMT <var_decl "a"> <target_expr>>, the
-       GIMPLE_MODIFY_STMT is free.  */
-    case INIT_EXPR:
-    case GIMPLE_MODIFY_STMT:
-      /* Is the right and side a TARGET_EXPR?  */
-      if (TREE_CODE (GENERIC_TREE_OPERAND (x, 1)) == TARGET_EXPR)
-       break;
-      /* ... fall through ...  */
-
-    case TARGET_EXPR:
-      x = GENERIC_TREE_OPERAND (x, 0);
-      /* Is this an assignments to a register?  */
-      if (is_gimple_reg (x))
-       break;
-      /* Otherwise it's a store, so fall through to compute the move cost.  */
-
-    case CONSTRUCTOR:
-      d->count += estimate_move_cost (TREE_TYPE (x));
-      break;
+    case CONVERT_EXPR:
+    case COMPLEX_EXPR:
+    case PAREN_EXPR:
+    case NOP_EXPR:
+      return 0;
 
     /* Assign cost of 1 to usual operations.
        ??? We may consider mapping RTL costs to this.  */
@@ -2339,15 +2700,15 @@ estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
     case POSTDECREMENT_EXPR:
     case POSTINCREMENT_EXPR:
 
-    case ASM_EXPR:
-
     case REALIGN_LOAD_EXPR:
 
     case REDUC_MAX_EXPR:
     case REDUC_MIN_EXPR:
     case REDUC_PLUS_EXPR:
     case WIDEN_SUM_EXPR:
-    case DOT_PROD_EXPR: 
+    case WIDEN_MULT_EXPR:
+    case DOT_PROD_EXPR:
+
     case VEC_WIDEN_MULT_HI_EXPR:
     case VEC_WIDEN_MULT_LO_EXPR:
     case VEC_UNPACK_HI_EXPR:
@@ -2357,26 +2718,12 @@ estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
     case VEC_PACK_TRUNC_EXPR:
     case VEC_PACK_SAT_EXPR:
     case VEC_PACK_FIX_TRUNC_EXPR:
-
-    case WIDEN_MULT_EXPR:
-
     case VEC_EXTRACT_EVEN_EXPR:
     case VEC_EXTRACT_ODD_EXPR:
     case VEC_INTERLEAVE_HIGH_EXPR:
     case VEC_INTERLEAVE_LOW_EXPR:
 
-    case RESX_EXPR:
-      d->count += 1;
-      break;
-
-    case SWITCH_EXPR:
-      /* Take into account cost of the switch + guess 2 conditional jumps for
-         each case label.  
-
-        TODO: once the switch expansion logic is sufficiently separated, we can
-        do better job on estimating cost of the switch.  */
-      d->count += TREE_VEC_LENGTH (SWITCH_LABELS (x)) * 2;
-      break;
+      return 1;
 
     /* Few special cases of expensive operations.  This is useful
        to avoid inlining on functions having too many of these.  */
@@ -2390,34 +2737,115 @@ estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
     case FLOOR_MOD_EXPR:
     case ROUND_MOD_EXPR:
     case RDIV_EXPR:
-      d->count += d->weights->div_mod_cost;
+      return weights->div_mod_cost;
+
+    default:
+      /* We expect a copy assignment with no operator.  */
+      gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
+      return 0;
+    }
+}
+
+
+/* Estimate number of instructions that will be created by expanding
+   the statements in the statement sequence STMTS.
+   WEIGHTS contains weights attributed to various constructs.  */
+
+static
+int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
+{
+  int cost;
+  gimple_stmt_iterator gsi;
+
+  cost = 0;
+  for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
+    cost += estimate_num_insns (gsi_stmt (gsi), weights);
+
+  return cost;
+}
+
+
+/* Estimate number of instructions that will be created by expanding STMT.
+   WEIGHTS contains weights attributed to various constructs.  */
+
+int
+estimate_num_insns (gimple stmt, eni_weights *weights)
+{
+  unsigned cost, i;
+  enum gimple_code code = gimple_code (stmt);
+  tree lhs;
+
+  switch (code)
+    {
+    case GIMPLE_ASSIGN:
+      /* Try to estimate the cost of assignments.  We have three cases to
+        deal with:
+        1) Simple assignments to registers;
+        2) Stores to things that must live in memory.  This includes
+           "normal" stores to scalars, but also assignments of large
+           structures, or constructors of big arrays;
+
+        Let us look at the first two cases, assuming we have "a = b + C":
+        <GIMPLE_ASSIGN <var_decl "a">
+               <plus_expr <var_decl "b"> <constant C>>
+        If "a" is a GIMPLE register, the assignment to it is free on almost
+        any target, because "a" usually ends up in a real register.  Hence
+        the only cost of this expression comes from the PLUS_EXPR, and we
+        can ignore the GIMPLE_ASSIGN.
+        If "a" is not a GIMPLE register, the assignment to "a" will most
+        likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
+        of moving something into "a", which we compute using the function
+        estimate_move_cost.  */
+      lhs = gimple_assign_lhs (stmt);
+      if (is_gimple_reg (lhs))
+       cost = 0;
+      else
+       cost = estimate_move_cost (TREE_TYPE (lhs));
+
+      cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights);
+      break;
+
+    case GIMPLE_COND:
+      cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights);
+      break;
+
+    case GIMPLE_SWITCH:
+      /* Take into account cost of the switch + guess 2 conditional jumps for
+         each case label.  
+
+        TODO: once the switch expansion logic is sufficiently separated, we can
+        do better job on estimating cost of the switch.  */
+      cost = gimple_switch_num_labels (stmt) * 2;
       break;
-    case CALL_EXPR:
+
+    case GIMPLE_CALL:
       {
-       tree decl = get_callee_fndecl (x);
-       tree addr = CALL_EXPR_FN (x);
+       tree decl = gimple_call_fndecl (stmt);
+       tree addr = gimple_call_fn (stmt);
        tree funtype = TREE_TYPE (addr);
 
-       gcc_assert (POINTER_TYPE_P (funtype));
-       funtype = TREE_TYPE (funtype);
+       if (POINTER_TYPE_P (funtype))
+         funtype = TREE_TYPE (funtype);
 
        if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
-         cost = d->weights->target_builtin_call_cost;
+         cost = weights->target_builtin_call_cost;
        else
-         cost = d->weights->call_cost;
+         cost = weights->call_cost;
        
        if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
          switch (DECL_FUNCTION_CODE (decl))
            {
            case BUILT_IN_CONSTANT_P:
-             *walk_subtrees = 0;
-             return NULL_TREE;
+             return 0;
            case BUILT_IN_EXPECT:
-             return NULL_TREE;
+             cost = 0;
+             break;
+
            /* Prefetch instruction is not expensive.  */
            case BUILT_IN_PREFETCH:
-             cost = 1;
+             cost = weights->target_builtin_call_cost;
              break;
+
            default:
              break;
            }
@@ -2425,96 +2853,117 @@ estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
        if (decl)
          funtype = TREE_TYPE (decl);
 
-       /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
-          that does use function declaration to figure out the arguments. 
-
-          When we deal with function with no body nor prototype, base estimates on
-          actual parameters of the call expression.  Otherwise use either the actual
-          arguments types or function declaration for more precise answer.  */
+       /* Our cost must be kept in sync with
+          cgraph_estimate_size_after_inlining that does use function
+          declaration to figure out the arguments.  */
        if (decl && DECL_ARGUMENTS (decl))
          {
            tree arg;
            for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
-             d->count += estimate_move_cost (TREE_TYPE (arg));
+             cost += estimate_move_cost (TREE_TYPE (arg));
          }
        else if (funtype && prototype_p (funtype))
          {
            tree t;
            for (t = TYPE_ARG_TYPES (funtype); t; t = TREE_CHAIN (t))
-             d->count += estimate_move_cost (TREE_VALUE (t));
+             cost += estimate_move_cost (TREE_VALUE (t));
          }
        else
          {
-           tree a;
-           call_expr_arg_iterator iter;
-           FOR_EACH_CALL_EXPR_ARG (a, iter, x)
-             d->count += estimate_move_cost (TREE_TYPE (a));
+           for (i = 0; i < gimple_call_num_args (stmt); i++)
+             {
+               tree arg = gimple_call_arg (stmt, i);
+               cost += estimate_move_cost (TREE_TYPE (arg));
+             }
          }
 
-       d->count += cost;
        break;
       }
 
-    case OMP_PARALLEL:
-    case OMP_TASK:
-    case OMP_FOR:
-    case OMP_SECTIONS:
-    case OMP_SINGLE:
-    case OMP_SECTION:
-    case OMP_MASTER:
-    case OMP_ORDERED:
-    case OMP_CRITICAL:
-    case OMP_ATOMIC:
-    case OMP_ATOMIC_LOAD:
-      /* OpenMP directives are generally very expensive.  */
-      d->count += d->weights->omp_cost;
-      break;
+    case GIMPLE_GOTO:
+    case GIMPLE_LABEL:
+    case GIMPLE_NOP:
+    case GIMPLE_PHI:
+    case GIMPLE_RETURN:
+    case GIMPLE_CHANGE_DYNAMIC_TYPE:
+    case GIMPLE_PREDICT:
+      return 0;
+
+    case GIMPLE_ASM:
+    case GIMPLE_RESX:
+      return 1;
+
+    case GIMPLE_BIND:
+      return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
+
+    case GIMPLE_EH_FILTER:
+      return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
+
+    case GIMPLE_CATCH:
+      return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
+
+    case GIMPLE_TRY:
+      return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
+              + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
+
+    /* OpenMP directives are generally very expensive.  */
+
+    case GIMPLE_OMP_RETURN:
+    case GIMPLE_OMP_SECTIONS_SWITCH:
+    case GIMPLE_OMP_ATOMIC_STORE:
+    case GIMPLE_OMP_CONTINUE:
+      /* ...except these, which are cheap.  */
+      return 0;
+
+    case GIMPLE_OMP_ATOMIC_LOAD:
+      return weights->omp_cost;
+
+    case GIMPLE_OMP_FOR:
+      return (weights->omp_cost
+              + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
+              + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
+
+    case GIMPLE_OMP_PARALLEL:
+    case GIMPLE_OMP_TASK:
+    case GIMPLE_OMP_CRITICAL:
+    case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_ORDERED:
+    case GIMPLE_OMP_SECTION:
+    case GIMPLE_OMP_SECTIONS:
+    case GIMPLE_OMP_SINGLE:
+      return (weights->omp_cost
+              + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
 
     default:
       gcc_unreachable ();
     }
-  return NULL;
+
+  return cost;
 }
 
-/* Estimate number of instructions that will be created by expanding EXPR.
-   WEIGHTS contains weights attributed to various constructs.  */
+/* Estimate number of instructions that will be created by expanding
+   function FNDECL.  WEIGHTS contains weights attributed to various
+   constructs.  */
 
 int
-estimate_num_insns (tree expr, eni_weights *weights)
+estimate_num_insns_fn (tree fndecl, eni_weights *weights)
 {
-  struct pointer_set_t *visited_nodes;
+  struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
+  gimple_stmt_iterator bsi;
   basic_block bb;
-  block_stmt_iterator bsi;
-  struct function *my_function;
-  struct eni_data data;
-
-  data.count = 0;
-  data.weights = weights;
+  int n = 0;
 
-  /* If we're given an entire function, walk the CFG.  */
-  if (TREE_CODE (expr) == FUNCTION_DECL)
+  gcc_assert (my_function && my_function->cfg);
+  FOR_EACH_BB_FN (bb, my_function)
     {
-      my_function = DECL_STRUCT_FUNCTION (expr);
-      gcc_assert (my_function && my_function->cfg);
-      visited_nodes = pointer_set_create ();
-      FOR_EACH_BB_FN (bb, my_function)
-       {
-         for (bsi = bsi_start (bb);
-              !bsi_end_p (bsi);
-              bsi_next (&bsi))
-           {
-             walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
-                        &data, visited_nodes);
-           }
-       }
-      pointer_set_destroy (visited_nodes);
+      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+       n += estimate_num_insns (gsi_stmt (bsi), weights);
     }
-  else
-    walk_tree_without_duplicates (&expr, estimate_num_insns_1, &data);
 
-  return data.count;
+  return n;
 }
 
+
 /* Initializes weights used by estimate_num_insns.  */
 
 void
@@ -2540,7 +2989,22 @@ init_inline_once (void)
   eni_time_weights.omp_cost = 40;
 }
 
+/* Estimate the number of instructions in a gimple_seq. */
+
+int
+count_insns_seq (gimple_seq seq, eni_weights *weights)
+{
+  gimple_stmt_iterator gsi;
+  int n = 0;
+  for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
+    n += estimate_num_insns (gsi_stmt (gsi), weights);
+
+  return n;
+}
+
+
 /* Install new lexical TREE_BLOCK underneath 'current_block'.  */
+
 static void
 add_lexical_block (tree current_block, tree new_block)
 {
@@ -2558,7 +3022,7 @@ add_lexical_block (tree current_block, tree new_block)
 /* Fetch callee declaration from the call graph edge going from NODE and
    associated with STMR call statement.  Return NULL_TREE if not found.  */
 static tree
-get_indirect_callee_fndecl (struct cgraph_node *node, tree stmt)
+get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
 {
   struct cgraph_edge *cs;
 
@@ -2569,13 +3033,11 @@ get_indirect_callee_fndecl (struct cgraph_node *node, tree stmt)
   return NULL_TREE;
 }
 
-/* If *TP is a CALL_EXPR, replace it with its inline expansion.  */
+/* If STMT is a GIMPLE_CALL, replace it with its inline expansion.  */
 
 static bool
-expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
+expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
 {
-  copy_body_data *id;
-  tree t;
   tree retvar, use_retvar;
   tree fn;
   struct pointer_map_t *st;
@@ -2586,29 +3048,25 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
   const char *reason;
   basic_block return_block;
   edge e;
-  block_stmt_iterator bsi, stmt_bsi;
+  gimple_stmt_iterator gsi, stmt_gsi;
   bool successfully_inlined = FALSE;
   bool purge_dead_abnormal_edges;
   tree t_step;
   tree var;
 
-  /* See what we've got.  */
-  id = (copy_body_data *) data;
-  t = *tp;
-
   /* Set input_location here so we get the right instantiation context
      if we call instantiate_decl from inlinable_function_p.  */
   saved_location = input_location;
-  if (EXPR_HAS_LOCATION (t))
-    input_location = EXPR_LOCATION (t);
+  if (gimple_has_location (stmt))
+    input_location = gimple_location (stmt);
 
   /* From here on, we're only interested in CALL_EXPRs.  */
-  if (TREE_CODE (t) != CALL_EXPR)
+  if (gimple_code (stmt) != GIMPLE_CALL)
     goto egress;
 
   /* First, see if we can figure out what function is being called.
      If we cannot, then there is no hope of inlining the function.  */
-  fn = get_callee_fndecl (t);
+  fn = gimple_call_fndecl (stmt);
   if (!fn)
     {
       fn = get_indirect_callee_fndecl (id->dst_node, stmt);
@@ -2619,16 +3077,16 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
   /* Turn forward declarations into real ones.  */
   fn = cgraph_node (fn)->decl;
 
-  /* If fn is a declaration of a function in a nested scope that was
+  /* If FN is a declaration of a function in a nested scope that was
      globally declared inline, we don't set its DECL_INITIAL.
      However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
      C++ front-end uses it for cdtors to refer to their internal
      declarations, that are not real functions.  Fortunately those
      don't have trees to be saved, so we can tell by checking their
-     DECL_SAVED_TREE.  */
-  if (! DECL_INITIAL (fn)
+     gimple_body.  */
+  if (!DECL_INITIAL (fn)
       && DECL_ABSTRACT_ORIGIN (fn)
-      && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
+      && gimple_body (DECL_ABSTRACT_ORIGIN (fn)))
     fn = DECL_ABSTRACT_ORIGIN (fn);
 
   /* Objective C and fortran still calls tree_rest_of_compilation directly.
@@ -2701,7 +3159,7 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
   /* We will be inlining this callee.  */
   id->eh_region = lookup_stmt_eh_region (stmt);
 
-  /* Split the block holding the CALL_EXPR.  */
+  /* Split the block holding the GIMPLE_CALL.  */
   e = split_block (bb, stmt);
   bb = e->src;
   return_block = e->dest;
@@ -2710,26 +3168,26 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
   /* split_block splits after the statement; work around this by
      moving the call into the second block manually.  Not pretty,
      but seems easier than doing the CFG manipulation by hand
-     when the CALL_EXPR is in the last statement of BB.  */
-  stmt_bsi = bsi_last (bb);
-  bsi_remove (&stmt_bsi, false);
+     when the GIMPLE_CALL is in the last statement of BB.  */
+  stmt_gsi = gsi_last_bb (bb);
+  gsi_remove (&stmt_gsi, false);
 
-  /* If the CALL_EXPR was in the last statement of BB, it may have
+  /* If the GIMPLE_CALL was in the last statement of BB, it may have
      been the source of abnormal edges.  In this case, schedule
      the removal of dead abnormal edges.  */
-  bsi = bsi_start (return_block);
-  if (bsi_end_p (bsi))
+  gsi = gsi_start_bb (return_block);
+  if (gsi_end_p (gsi))
     {
-      bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
+      gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
       purge_dead_abnormal_edges = true;
     }
   else
     {
-      bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
+      gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
       purge_dead_abnormal_edges = false;
     }
 
-  stmt_bsi = bsi_start (return_block);
+  stmt_gsi = gsi_start_bb (return_block);
 
   /* Build a block containing code to initialize the arguments, the
      actual inline expansion of the body, and a label for the return
@@ -2738,7 +3196,7 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
   id->block = make_node (BLOCK);
   BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
   BLOCK_SOURCE_LOCATION (id->block) = input_location;
-  add_lexical_block (TREE_BLOCK (stmt), id->block);
+  add_lexical_block (gimple_block (stmt), id->block);
 
   /* Local declarations will be replaced by their equivalents in this
      map.  */
@@ -2749,27 +3207,26 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
   id->src_fn = fn;
   id->src_node = cg_edge->callee;
   id->src_cfun = DECL_STRUCT_FUNCTION (fn);
-  id->call_expr = t;
+  id->gimple_call = stmt;
 
   gcc_assert (!id->src_cfun->after_inlining);
 
   id->entry_bb = bb;
-  initialize_inlined_parameters (id, t, fn, bb);
+  initialize_inlined_parameters (id, stmt, fn, bb);
 
   if (DECL_INITIAL (fn))
     add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
 
   /* Return statements in the function body will be replaced by jumps
      to the RET_LABEL.  */
-
   gcc_assert (DECL_INITIAL (fn));
   gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
 
-  /* Find the lhs to which the result of this call is assigned.  */
+  /* Find the LHS to which the result of this call is assigned.  */
   return_slot = NULL;
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+  if (gimple_call_lhs (stmt))
     {
-      modify_dest = GIMPLE_STMT_OPERAND (stmt, 0);
+      modify_dest = gimple_call_lhs (stmt);
 
       /* The function which we are inlining might not return a value,
         in which case we should issue a warning that the function
@@ -2779,7 +3236,8 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
         uninitialized variable.  */
       if (DECL_P (modify_dest))
        TREE_NO_WARNING (modify_dest) = 1;
-      if (CALL_EXPR_RETURN_SLOT_OPT (t))
+
+      if (gimple_call_return_slot_opt_p (stmt))
        {
          return_slot = modify_dest;
          modify_dest = NULL;
@@ -2800,8 +3258,7 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
     }
 
   /* Declare the return variable for the function.  */
-  retvar = declare_return_variable (id, return_slot,
-                                   modify_dest, &use_retvar);
+  retvar = declare_return_variable (id, return_slot, modify_dest, &use_retvar);
 
   if (DECL_IS_OPERATOR_NEW (fn))
     {
@@ -2835,56 +3292,65 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
   id->decl_map = st;
 
   /* If the inlined function returns a result that we care about,
-     clobber the CALL_EXPR with a reference to the return variable.  */
-  if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
+     substitute the GIMPLE_CALL with an assignment of the return
+     variable to the LHS of the call.  That is, if STMT was
+     'a = foo (...)', substitute the call with 'a = USE_RETVAR'.  */
+  if (use_retvar && gimple_call_lhs (stmt))
     {
-      *tp = use_retvar;
+      gimple old_stmt = stmt;
+      stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
+      gsi_replace (&stmt_gsi, stmt, false);
       if (gimple_in_ssa_p (cfun))
        {
           update_stmt (stmt);
           mark_symbols_for_renaming (stmt);
        }
-      maybe_clean_or_replace_eh_stmt (stmt, stmt);
+      maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
     }
   else
-    /* We're modifying a TSI owned by gimple_expand_calls_inline();
-       tsi_delink() will leave the iterator in a sane state.  */
     {
-      /* Handle case of inlining function that miss return statement so 
-         return value becomes undefined.  */
-      if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-         && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
+      /* Handle the case of inlining a function with no return
+        statement, which causes the return value to become undefined.  */
+      if (gimple_call_lhs (stmt)
+         && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
        {
-         tree name = GIMPLE_STMT_OPERAND (stmt, 0);
-         tree var = SSA_NAME_VAR (GIMPLE_STMT_OPERAND (stmt, 0));
+         tree name = gimple_call_lhs (stmt);
+         tree var = SSA_NAME_VAR (name);
          tree def = gimple_default_def (cfun, var);
 
-         /* If the variable is used undefined, make this name undefined via
-            move.  */
          if (def)
            {
-             GIMPLE_STMT_OPERAND (stmt, 1) = def;
+             /* If the variable is used undefined, make this name
+                undefined via a move.  */
+             stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
+             gsi_replace (&stmt_gsi, stmt, true);
              update_stmt (stmt);
            }
-         /* Otherwise make this variable undefined.  */
          else
            {
-             bsi_remove (&stmt_bsi, true);
+             /* Otherwise make this variable undefined.  */
+             gsi_remove (&stmt_gsi, true);
              set_default_def (var, name);
-             SSA_NAME_DEF_STMT (name) = build_empty_stmt ();
+             SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
            }
        }
       else
-        bsi_remove (&stmt_bsi, true);
+        gsi_remove (&stmt_gsi, true);
     }
 
   if (purge_dead_abnormal_edges)
-    tree_purge_dead_abnormal_call_edges (return_block);
+    gimple_purge_dead_abnormal_call_edges (return_block);
 
   /* If the value of the new expression is ignored, that's OK.  We
      don't warn about this for CALL_EXPRs, so we shouldn't warn about
      the equivalent inlined version either.  */
-  TREE_USED (*tp) = 1;
+  if (is_gimple_assign (stmt))
+    {
+      gcc_assert (gimple_assign_single_p (stmt)
+                 || gimple_assign_rhs_code (stmt) == NOP_EXPR
+                 || gimple_assign_rhs_code (stmt) == CONVERT_EXPR);
+      TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
+    }
 
   /* Output the inlining info for this abstract function, since it has been
      inlined.  If we don't do this now, we can lose the information about the
@@ -2905,58 +3371,58 @@ expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
 
 /* Expand call statements reachable from STMT_P.
    We can only have CALL_EXPRs as the "toplevel" tree code or nested
-   in a GIMPLE_MODIFY_STMT.  See tree-gimple.c:get_call_expr_in().  We can
+   in a MODIFY_EXPR.  See tree-gimple.c:get_call_expr_in().  We can
    unfortunately not use that function here because we need a pointer
    to the CALL_EXPR, not the tree itself.  */
 
 static bool
 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
 
-  /* Register specific tree functions.  */
-  tree_register_cfg_hooks ();
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      tree *expr_p = bsi_stmt_ptr (bsi);
-      tree stmt = *expr_p;
+      gimple stmt = gsi_stmt (gsi);
 
-      if (TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT)
-       expr_p = &GIMPLE_STMT_OPERAND (*expr_p, 1);
-      if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
-       expr_p = &TREE_OPERAND (*expr_p, 0);
-      if (TREE_CODE (*expr_p) == CALL_EXPR)
-       if (expand_call_inline (bb, stmt, expr_p, id))
-         return true;
+      if (is_gimple_call (stmt)
+         && expand_call_inline (bb, stmt, id))
+       return true;
     }
+
   return false;
 }
 
+
 /* Walk all basic blocks created after FIRST and try to fold every statement
    in the STATEMENTS pointer set.  */
+
 static void
 fold_marked_statements (int first, struct pointer_set_t *statements)
 {
-  for (;first < n_basic_blocks;first++)
+  for (; first < n_basic_blocks; first++)
     if (BASIC_BLOCK (first))
       {
-        block_stmt_iterator bsi;
-       for (bsi = bsi_start (BASIC_BLOCK (first));
-            !bsi_end_p (bsi); bsi_next (&bsi))
-         if (pointer_set_contains (statements, bsi_stmt (bsi)))
+        gimple_stmt_iterator gsi;
+
+       for (gsi = gsi_start_bb (BASIC_BLOCK (first));
+            !gsi_end_p (gsi);
+            gsi_next (&gsi))
+         if (pointer_set_contains (statements, gsi_stmt (gsi)))
            {
-             tree old_stmt = bsi_stmt (bsi);
-             tree old_call = get_call_expr_in (old_stmt);
+             gimple old_stmt = gsi_stmt (gsi);
 
-             if (fold_stmt (bsi_stmt_ptr (bsi)))
+             if (fold_stmt (&gsi))
                {
-                 update_stmt (bsi_stmt (bsi));
-                 if (old_call)
-                   cgraph_update_edges_for_call_stmt (old_stmt, old_call,
-                                                      bsi_stmt (bsi));
-                 if (maybe_clean_or_replace_eh_stmt (old_stmt,
-                                                     bsi_stmt (bsi)))
-                   tree_purge_dead_eh_edges (BASIC_BLOCK (first));
+                 /* Re-read the statement from GSI as fold_stmt() may
+                    have changed it.  */
+                 gimple new_stmt = gsi_stmt (gsi);
+                 update_stmt (new_stmt);
+
+                 if (is_gimple_call (old_stmt))
+                   cgraph_update_edges_for_call_stmt (old_stmt, new_stmt);
+
+                 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
+                   gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
                }
            }
       }
@@ -3020,6 +3486,9 @@ optimize_inline_calls (tree fn)
   free_dominance_info (CDI_DOMINATORS);
   free_dominance_info (CDI_POST_DOMINATORS);
 
+  /* Register specific gimple functions.  */
+  gimple_register_cfg_hooks ();
+
   /* Reach the trees by walking over the CFG, and note the
      enclosing basic-blocks in the call edges.  */
   /* We walk the blocks going forward, because inlined function bodies
@@ -3057,11 +3526,13 @@ optimize_inline_calls (tree fn)
   cgraph_node_remove_callees (id.dst_node);
 
   fold_cond_expr_cond ();
+
   /* It would be nice to check SSA/CFG/statement consistency here, but it is
      not possible yet - the IPA passes might make various functions to not
      throw and they don't care to proactively update local EH info.  This is
      done later in fixup_cfg pass that also execute the verification.  */
-  return (TODO_update_ssa | TODO_cleanup_cfg
+  return (TODO_update_ssa
+         | TODO_cleanup_cfg
          | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
          | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
 }
@@ -3076,7 +3547,6 @@ copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
 
   /* We make copies of most nodes.  */
   if (IS_EXPR_CODE_CLASS (cl)
-      || IS_GIMPLE_STMT_CODE_CLASS (cl)
       || code == TREE_LIST
       || code == TREE_VEC
       || code == TYPE_DECL
@@ -3086,8 +3556,7 @@ copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
         here.  */
       tree chain = NULL_TREE, new;
 
-      if (!GIMPLE_TUPLE_P (*tp))
-       chain = TREE_CHAIN (*tp);
+      chain = TREE_CHAIN (*tp);
 
       /* Copy the node.  */
       new = copy_node (*tp);
@@ -3247,7 +3716,7 @@ unsave_r (tree *tp, int *walk_subtrees, void *data)
     }
 
   else if (TREE_CODE (*tp) == STATEMENT_LIST)
-    copy_statement_list (tp);
+    gcc_unreachable ();
   else if (TREE_CODE (*tp) == BIND_EXPR)
     copy_bind_expr (tp, walk_subtrees, id);
   else if (TREE_CODE (*tp) == SAVE_EXPR)
@@ -3300,6 +3769,162 @@ unsave_expr_now (tree expr)
   return expr;
 }
 
+/* Called via walk_gimple_seq.  If *GSIP points to a GIMPLE_LABEL for a local
+   label, copies the declaration and enters it in the splay_tree in DATA (which
+   is really a 'copy_body_data *'.  */
+
+static tree
+mark_local_labels_stmt (gimple_stmt_iterator *gsip,
+                       bool *handled_ops_p ATTRIBUTE_UNUSED,
+                       struct walk_stmt_info *wi)
+{
+  copy_body_data *id = (copy_body_data *) wi->info;
+  gimple stmt = gsi_stmt (*gsip);
+
+  if (gimple_code (stmt) == GIMPLE_LABEL)
+    {
+      tree decl = gimple_label_label (stmt);
+
+      /* Copy the decl and remember the copy.  */
+      insert_decl_map (id, decl, id->copy_decl (decl, id));
+    }
+
+  return NULL_TREE;
+}
+
+
+/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
+   Using the splay_tree pointed to by ST (which is really a `splay_tree'),
+   remaps all local declarations to appropriate replacements in gimple
+   operands. */
+
+static tree
+replace_locals_op (tree *tp, int *walk_subtrees, void *data)
+{
+  struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
+  copy_body_data *id = (copy_body_data *) wi->info;
+  struct pointer_map_t *st = id->decl_map;
+  tree *n;
+  tree expr = *tp;
+
+  /* Only a local declaration (variable or label).  */
+  if ((TREE_CODE (expr) == VAR_DECL
+       && !TREE_STATIC (expr))
+      || TREE_CODE (expr) == LABEL_DECL)
+    {
+      /* Lookup the declaration.  */
+      n = (tree *) pointer_map_contains (st, expr);
+
+      /* If it's there, remap it.  */
+      if (n)
+       *tp = *n;
+      *walk_subtrees = 0;
+    }
+  else if (TREE_CODE (expr) == STATEMENT_LIST
+          || TREE_CODE (expr) == BIND_EXPR
+          || TREE_CODE (expr) == SAVE_EXPR)
+    gcc_unreachable ();
+  else if (TREE_CODE (expr) == TARGET_EXPR)
+    {
+      /* Don't mess with a TARGET_EXPR that hasn't been expanded.
+         It's OK for this to happen if it was part of a subtree that
+         isn't immediately expanded, such as operand 2 of another
+         TARGET_EXPR.  */
+      if (!TREE_OPERAND (expr, 1))
+       {
+         TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
+         TREE_OPERAND (expr, 3) = NULL_TREE;
+       }
+    }
+
+  /* Keep iterating.  */
+  return NULL_TREE;
+}
+
+
+/* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
+   Using the splay_tree pointed to by ST (which is really a `splay_tree'),
+   remaps all local declarations to appropriate replacements in gimple
+   statements. */
+
+static tree
+replace_locals_stmt (gimple_stmt_iterator *gsip,
+                    bool *handled_ops_p ATTRIBUTE_UNUSED,
+                    struct walk_stmt_info *wi)
+{
+  copy_body_data *id = (copy_body_data *) wi->info;
+  gimple stmt = gsi_stmt (*gsip);
+
+  if (gimple_code (stmt) == GIMPLE_BIND)
+    {
+      tree block = gimple_bind_block (stmt);
+
+      if (block)
+       {
+         remap_block (&block, id);
+         gimple_bind_set_block (stmt, block);
+       }
+
+      /* This will remap a lot of the same decls again, but this should be
+        harmless.  */
+      if (gimple_bind_vars (stmt))
+       gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), id));
+    }
+
+  /* Keep iterating.  */
+  return NULL_TREE;
+}
+
+
+/* Copies everything in SEQ and replaces variables and labels local to
+   current_function_decl.  */
+
+gimple_seq
+copy_gimple_seq_and_replace_locals (gimple_seq seq)
+{
+  copy_body_data id;
+  struct walk_stmt_info wi;
+  struct pointer_set_t *visited;
+  gimple_seq copy;
+
+  /* There's nothing to do for NULL_TREE.  */
+  if (seq == NULL)
+    return seq;
+
+  /* Set up ID.  */
+  memset (&id, 0, sizeof (id));
+  id.src_fn = current_function_decl;
+  id.dst_fn = current_function_decl;
+  id.decl_map = pointer_map_create ();
+
+  id.copy_decl = copy_decl_no_change;
+  id.transform_call_graph_edges = CB_CGE_DUPLICATE;
+  id.transform_new_cfg = false;
+  id.transform_return_to_modify = false;
+  id.transform_lang_insert_block = NULL;
+
+  /* Walk the tree once to find local labels.  */
+  memset (&wi, 0, sizeof (wi));
+  visited = pointer_set_create ();
+  wi.info = &id;
+  wi.pset = visited;
+  walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
+  pointer_set_destroy (visited);
+
+  copy = gimple_seq_copy (seq);
+
+  /* Walk the copy, remapping decls.  */
+  memset (&wi, 0, sizeof (wi));
+  wi.info = &id;
+  walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
+
+  /* Clean up.  */
+  pointer_map_destroy (id.decl_map);
+
+  return copy;
+}
+
+
 /* Allow someone to determine if SEARCH is a child of TOP from gdb.  */
 
 static tree
@@ -3336,7 +3961,6 @@ declare_inline_vars (tree block, tree vars)
     BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
 }
 
-
 /* Copy NODE (which must be a DECL).  The DECL originally was in the FROM_FN,
    but now it will be in the TO_FN.  PARM_TO_VAR means enable PARM_DECL to
    VAR_DECL translation.  */
@@ -3429,7 +4053,6 @@ copy_result_decl_to_var (tree decl, copy_body_data *id)
   return copy_decl_for_dup_finish (id, decl, copy);
 }
 
-
 tree
 copy_decl_no_change (tree decl, copy_body_data *id)
 {
@@ -3591,7 +4214,8 @@ tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
   if (tree_map)
     for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
       {
-       replace_info = (struct ipa_replace_map *) VARRAY_GENERIC_PTR (tree_map, i);
+       replace_info
+         = (struct ipa_replace_map *) VARRAY_GENERIC_PTR (tree_map, i);
        if (replace_info->replace_p)
          insert_decl_map (&id, replace_info->old_tree,
                           replace_info->new_tree);
index ab03f7a..e590e14 100644 (file)
@@ -32,13 +32,17 @@ typedef struct copy_body_data
   /* FUNCTION_DECL for function being inlined, or in general the
      source function providing the original trees.  */
   tree src_fn;
+
   /* FUNCTION_DECL for function being inlined into, or in general
      the destination function receiving the new trees.  */
   tree dst_fn;
+
   /* Callgraph node of the source function.  */
   struct cgraph_node *src_node;
+
   /* Callgraph node of the destination function.  */
   struct cgraph_node *dst_node;
+
   /* struct function for function being inlined.  Usually this is the same
      as DECL_STRUCT_FUNCTION (src_fn), but can be different if saved_cfg
      and saved_eh are in use.  */
@@ -46,6 +50,7 @@ typedef struct copy_body_data
 
   /* The VAR_DECL for the return value.  */
   tree retvar;
+
   /* The map from local declarations in the inlined function to
      equivalents in the function into which it is being inlined.  */
   struct pointer_map_t *decl_map;
@@ -56,12 +61,13 @@ typedef struct copy_body_data
   /* Current BLOCK.  */
   tree block;
 
-  /* CALL_EXPR if va arg parameter packs should be expanded or NULL
+  /* GIMPLE_CALL if va arg parameter packs should be expanded or NULL
      is not.  */
-  tree call_expr;
+  gimple gimple_call;
 
   /* Exception region the inlined call lie in.  */
   int eh_region;
+
   /* Take region number in the function being copied, add this value and
      get eh region number of the duplicate in the function we inline into.  */
   int eh_region_offset;
@@ -137,23 +143,26 @@ extern eni_weights eni_time_weights;
 
 /* Function prototypes.  */
 
-extern tree copy_body_r (tree *, int *, void *);
+extern tree copy_tree_body_r (tree *, int *, void *);
 extern void insert_decl_map (copy_body_data *, tree, tree);
 
 unsigned int optimize_inline_calls (tree);
 bool tree_inlinable_function_p (tree);
 tree copy_tree_r (tree *, int *, void *);
-tree copy_generic_body (copy_body_data *id);
 tree copy_decl_no_change (tree decl, copy_body_data *id);
 void save_body (tree, tree *, tree *);
 int estimate_move_cost (tree type);
-int estimate_num_insns (tree expr, eni_weights *);
+int estimate_num_insns (gimple, eni_weights *);
+int estimate_num_insns_fn (tree, eni_weights *);
+int count_insns_seq (gimple_seq, eni_weights *);
 bool tree_versionable_function_p (tree);
 void tree_function_versioning (tree, tree, varray_type, bool);
 bool tree_can_inline_p (tree, tree);
 
+extern gimple_seq remap_gimple_seq (gimple_seq, copy_body_data *);
 extern tree remap_decl (tree decl, copy_body_data *id);
 extern tree remap_type (tree type, copy_body_data *id);
+extern gimple_seq copy_gimple_seq_and_replace_locals (gimple_seq seq);
 
 extern HOST_WIDE_INT estimated_stack_frame_size (void);
 
index c9e99ef..fb61731 100644 (file)
@@ -36,7 +36,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "diagnostic.h"
 #include "bitmap.h"
 #include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-inline.h"
 #include "varray.h"
 #include "timevar.h"
@@ -49,6 +49,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "params.h"
 #include "vecprim.h"
 
+
 /* This file builds the SSA form for a function as described in:
    R. Cytron, J. Ferrante, B. Rosen, M. Wegman, and K. Zadeck. Efficiently
    Computing Static Single Assignment Form and the Control Dependence
@@ -102,6 +103,7 @@ static htab_t def_blocks;
      associated with the current block.  */
 static VEC(tree,heap) *block_defs_stack;
 
+
 /* Set of existing SSA names being replaced by update_ssa.  */
 static sbitmap old_ssa_names;
 
@@ -110,6 +112,7 @@ static sbitmap old_ssa_names;
    the operations done on them are presence tests.  */
 static sbitmap new_ssa_names;
 
+
 /* Symbols whose SSA form needs to be updated or created for the first
    time.  */
 static bitmap syms_to_rename;
@@ -129,11 +132,11 @@ static bitmap names_to_release;
 
 /* For each block, the PHI nodes that need to be rewritten are stored into
    these vectors.  */
-typedef VEC(tree, heap) *tree_vec;
-DEF_VEC_P (tree_vec);
-DEF_VEC_ALLOC_P (tree_vec, heap);
+typedef VEC(gimple, heap) *gimple_vec;
+DEF_VEC_P (gimple_vec);
+DEF_VEC_ALLOC_P (gimple_vec, heap);
 
-static VEC(tree_vec, heap) *phis_to_rewrite;
+static VEC(gimple_vec, heap) *phis_to_rewrite;
 
 /* The bitmap of non-NULL elements of PHIS_TO_REWRITE.  */
 static bitmap blocks_with_phis_to_rewrite;
@@ -152,6 +155,7 @@ struct repl_map_d
   bitmap set;
 };
 
+
 /* NEW -> OLD_SET replacement table.  If we are replacing several
    existing SSA names O_1, O_2, ..., O_j with a new name N_i,
    then REPL_TBL[N_i] = { O_1, O_2, ..., O_j }.  */
@@ -239,19 +243,6 @@ enum rewrite_mode {
 };
 
 
-/* Use TREE_VISITED to keep track of which statements we want to
-   rename.  When renaming a subset of the variables, not all
-   statements will be processed.  This is decided in mark_def_sites.  */
-#define REWRITE_THIS_STMT(T)   TREE_VISITED (T)
-
-/* Use the unsigned flag to keep track of which statements we want to
-   visit when marking new definition sites.  This is slightly
-   different than REWRITE_THIS_STMT: it's used by update_ssa to
-   distinguish statements that need to have both uses and defs
-   processed from those that only need to have their defs processed.
-   Statements that define new SSA names only need to have their defs
-   registered, but they don't need to have their uses renamed.  */
-#define REGISTER_DEFS_IN_THIS_STMT(T)  (T)->base.unsigned_flag
 
 
 /* Prototypes for debugging functions.  */
@@ -271,6 +262,50 @@ extern void debug_defs_stack (int);
 extern void dump_currdefs (FILE *);
 extern void debug_currdefs (void);
 
+/* Return true if STMT needs to be rewritten.  When renaming a subset
+   of the variables, not all statements will be processed.  This is
+   decided in mark_def_sites.  */
+
+static inline bool
+rewrite_uses_p (gimple stmt)
+{
+  return gimple_visited_p (stmt);
+}
+
+
+/* Set the rewrite marker on STMT to the value given by REWRITE_P.  */
+
+static inline void
+set_rewrite_uses (gimple stmt, bool rewrite_p)
+{
+  gimple_set_visited (stmt, rewrite_p);
+}
+
+
+/* Return true if the DEFs created by statement STMT should be
+   registered when marking new definition sites.  This is slightly
+   different than rewrite_uses_p: it's used by update_ssa to
+   distinguish statements that need to have both uses and defs
+   processed from those that only need to have their defs processed.
+   Statements that define new SSA names only need to have their defs
+   registered, but they don't need to have their uses renamed.  */
+
+static inline bool
+register_defs_p (gimple stmt)
+{
+  return gimple_plf (stmt, GF_PLF_1) != 0;
+}
+
+
+/* If REGISTER_DEFS_P is true, mark STMT to have its DEFs registered.  */
+
+static inline void
+set_register_defs (gimple stmt, bool register_defs_p)
+{
+  gimple_set_plf (stmt, GF_PLF_1, register_defs_p);
+}
+
+
 /* Get the information associated with NAME.  */
 
 static inline ssa_name_info_p
@@ -371,7 +406,7 @@ set_current_def (tree var, tree def)
    for LIVEIN).  */
 
 void
-compute_global_livein (bitmap livein, bitmap def_blocks)
+compute_global_livein (bitmap livein ATTRIBUTE_UNUSED, bitmap def_blocks ATTRIBUTE_UNUSED)
 {
   basic_block bb, *worklist, *tos;
   unsigned i;
@@ -419,24 +454,26 @@ compute_global_livein (bitmap livein, bitmap def_blocks)
 static void
 initialize_flags_in_bb (basic_block bb)
 {
-  tree phi, stmt;
-  block_stmt_iterator bsi;
+  gimple stmt;
+  gimple_stmt_iterator gsi;
 
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      REWRITE_THIS_STMT (phi) = 0;
-      REGISTER_DEFS_IN_THIS_STMT (phi) = 0;
+      gimple phi = gsi_stmt (gsi);
+      set_rewrite_uses (phi, false);
+      set_register_defs (phi, false);
     }
 
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      stmt = bsi_stmt (bsi);
+      stmt = gsi_stmt (gsi);
+
       /* We are going to use the operand cache API, such as
         SET_USE, SET_DEF, and FOR_EACH_IMM_USE_FAST.  The operand
         cache for each statement should be up-to-date.  */
-      gcc_assert (!stmt_modified_p (stmt));
-      REWRITE_THIS_STMT (stmt) = 0;
-      REGISTER_DEFS_IN_THIS_STMT (stmt) = 0;
+      gcc_assert (!gimple_modified_p (stmt));
+      set_rewrite_uses (stmt, false);
+      set_register_defs (stmt, false);
     }
 }
 
@@ -724,23 +761,26 @@ add_new_name_mapping (tree new, tree old)
 
 static void
 mark_def_sites (struct dom_walk_data *walk_data, basic_block bb,
-               block_stmt_iterator bsi)
+               gimple_stmt_iterator gsi)
 {
   struct mark_def_sites_global_data *gd;
   bitmap kills;
-  tree stmt, def;
+  tree def;
+  gimple stmt;
   use_operand_p use_p;
   ssa_op_iter iter;
 
-  stmt = bsi_stmt (bsi);
-  update_stmt_if_modified (stmt);
+  /* Since this is the first time that we rewrite the program into SSA
+     form, force an operand scan on every statement.  */
+  stmt = gsi_stmt (gsi);
+  update_stmt (stmt);
 
   gd = (struct mark_def_sites_global_data *) walk_data->global_data;
   kills = gd->kills;
 
   gcc_assert (blocks_to_update == NULL);
-  REGISTER_DEFS_IN_THIS_STMT (stmt) = 0;
-  REWRITE_THIS_STMT (stmt) = 0;
+  set_register_defs (stmt, false);
+  set_rewrite_uses (stmt, false);
 
   /* If a variable is used before being set, then the variable is live
      across a block boundary, so mark it live-on-entry to BB.  */
@@ -750,7 +790,7 @@ mark_def_sites (struct dom_walk_data *walk_data, basic_block bb,
       gcc_assert (DECL_P (sym));
       if (!bitmap_bit_p (kills, DECL_UID (sym)))
        set_livein_block (sym, bb);
-      REWRITE_THIS_STMT (stmt) = 1;
+      set_rewrite_uses (stmt, true);
     }
   
   /* Now process the defs.  Mark BB as the definition block and add
@@ -760,12 +800,12 @@ mark_def_sites (struct dom_walk_data *walk_data, basic_block bb,
       gcc_assert (DECL_P (def));
       set_def_block (def, bb, false);
       bitmap_set_bit (kills, DECL_UID (def));
-      REGISTER_DEFS_IN_THIS_STMT (stmt) = 1;
+      set_register_defs (stmt, true);
     }
 
   /* If we found the statement interesting then also mark the block BB
      as interesting.  */
-  if (REWRITE_THIS_STMT (stmt) || REGISTER_DEFS_IN_THIS_STMT (stmt))
+  if (rewrite_uses_p (stmt) || register_defs_p (stmt))
     SET_BIT (gd->interesting_blocks, bb->index);
 }
 
@@ -1007,7 +1047,7 @@ get_default_def_for (tree sym)
 
   if (ddef == NULL_TREE)
     {
-      ddef = make_ssa_name (sym, build_empty_stmt ());
+      ddef = make_ssa_name (sym, gimple_build_nop ());
       set_default_def (sym, ddef);
     }
 
@@ -1018,30 +1058,30 @@ get_default_def_for (tree sym)
 /* Marks phi node PHI in basic block BB for rewrite.  */
 
 static void
-mark_phi_for_rewrite (basic_block bb, tree phi)
+mark_phi_for_rewrite (basic_block bb, gimple phi)
 {
-  tree_vec phis;
+  gimple_vec phis;
   unsigned i, idx = bb->index;
 
-  if (REWRITE_THIS_STMT (phi))
+  if (rewrite_uses_p (phi))
     return;
 
-  REWRITE_THIS_STMT (phi) = 1;
+  set_rewrite_uses (phi, true);
 
   if (!blocks_with_phis_to_rewrite)
     return;
 
   bitmap_set_bit (blocks_with_phis_to_rewrite, idx);
-  VEC_reserve (tree_vec, heap, phis_to_rewrite, last_basic_block + 1);
-  for (i = VEC_length (tree_vec, phis_to_rewrite); i <= idx; i++)
-    VEC_quick_push (tree_vec, phis_to_rewrite, NULL);
+  VEC_reserve (gimple_vec, heap, phis_to_rewrite, last_basic_block + 1);
+  for (i = VEC_length (gimple_vec, phis_to_rewrite); i <= idx; i++)
+    VEC_quick_push (gimple_vec, phis_to_rewrite, NULL);
 
-  phis = VEC_index (tree_vec, phis_to_rewrite, idx);
+  phis = VEC_index (gimple_vec, phis_to_rewrite, idx);
   if (!phis)
-    phis = VEC_alloc (tree, heap, 10);
+    phis = VEC_alloc (gimple, heap, 10);
 
-  VEC_safe_push (tree, heap, phis, phi);
-  VEC_replace (tree_vec, phis_to_rewrite, idx, phis);
+  VEC_safe_push (gimple, heap, phis, phi);
+  VEC_replace (gimple_vec, phis_to_rewrite, idx, phis);
 }
 
 
@@ -1060,7 +1100,7 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
 {
   unsigned bb_index;
   edge e;
-  tree phi;
+  gimple phi;
   basic_block bb;
   bitmap_iterator bi;
   struct def_blocks_d *def_map;
@@ -1082,7 +1122,7 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
       if (update_p)
        mark_block_for_update (bb);
 
-      phi = NULL_TREE;
+      phi = NULL;
 
       if (TREE_CODE (var) == SSA_NAME)
        {
@@ -1097,7 +1137,7 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
          phi = create_phi_node (var, bb);
 
          new_lhs = duplicate_ssa_name (var, phi);
-         SET_PHI_RESULT (phi, new_lhs);
+         gimple_phi_set_result (phi, new_lhs);
          add_new_name_mapping (new_lhs, var);
 
          /* Add VAR to every argument slot of PHI.  We need VAR in
@@ -1116,7 +1156,7 @@ insert_phi_nodes_for (tree var, bitmap phi_insertion_points, bool update_p)
        }
 
       /* Mark this PHI node as interesting for update_ssa.  */
-      REGISTER_DEFS_IN_THIS_STMT (phi) = 1;
+      set_register_defs (phi, true);
       mark_phi_for_rewrite (bb, phi);
     }
 }
@@ -1231,7 +1271,8 @@ static void
 rewrite_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
                          basic_block bb)
 {
-  tree phi;
+  gimple phi;
+  gimple_stmt_iterator gsi;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     fprintf (dump_file, "\n\nRenaming block #%d\n\n", bb->index);
@@ -1242,9 +1283,12 @@ rewrite_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
   /* Step 1.  Register new definitions for every PHI node in the block.
      Conceptually, all the PHI nodes are executed in parallel and each PHI
      node introduces a new version for the associated variable.  */
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      tree result = PHI_RESULT (phi);
+      tree result;
+
+      phi = gsi_stmt (gsi);
+      result = gimple_phi_result (phi);
       gcc_assert (is_gimple_reg (result));
       register_new_def (result, SSA_NAME_VAR (result));
     }
@@ -1283,30 +1327,30 @@ get_reaching_def (tree var)
 
 static void
 rewrite_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
-             basic_block bb ATTRIBUTE_UNUSED, block_stmt_iterator si)
+             basic_block bb ATTRIBUTE_UNUSED, gimple_stmt_iterator si)
 {
-  tree stmt;
+  gimple stmt;
   use_operand_p use_p;
   def_operand_p def_p;
   ssa_op_iter iter;
 
-  stmt = bsi_stmt (si);
+  stmt = gsi_stmt (si);
 
   /* If mark_def_sites decided that we don't need to rewrite this
      statement, ignore it.  */
   gcc_assert (blocks_to_update == NULL);
-  if (!REWRITE_THIS_STMT (stmt) && !REGISTER_DEFS_IN_THIS_STMT (stmt))
+  if (!rewrite_uses_p (stmt) && !register_defs_p (stmt))
     return;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "Renaming statement ");
-      print_generic_stmt (dump_file, stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
       fprintf (dump_file, "\n");
     }
 
   /* Step 1.  Rewrite USES in the statement.  */
-  if (REWRITE_THIS_STMT (stmt))
+  if (rewrite_uses_p (stmt))
     FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
       {
        tree var = USE_FROM_PTR (use_p);
@@ -1315,7 +1359,7 @@ rewrite_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
       }
 
   /* Step 2.  Register the statement's DEF operands.  */
-  if (REGISTER_DEFS_IN_THIS_STMT (stmt))
+  if (register_defs_p (stmt))
     FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
       {
        tree var = DEF_FROM_PTR (def_p);
@@ -1340,12 +1384,15 @@ rewrite_add_phi_arguments (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
 
   FOR_EACH_EDGE (e, ei, bb->succs)
     {
-      tree phi;
+      gimple phi;
+      gimple_stmt_iterator gsi;
 
-      for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
+      for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi);
+          gsi_next (&gsi))
        {
          tree currdef;
-         currdef = get_reaching_def (SSA_NAME_VAR (PHI_RESULT (phi)));
+         phi = gsi_stmt (gsi);
+         currdef = get_reaching_def (SSA_NAME_VAR (gimple_phi_result (phi)));
          add_phi_arg (phi, currdef, e);
        }
     }
@@ -1721,8 +1768,8 @@ rewrite_update_init_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
 {
   edge e;
   edge_iterator ei;
-  tree phi;
   bool is_abnormal_phi;
+  gimple_stmt_iterator gsi;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     fprintf (dump_file, "\n\nRegistering new PHI nodes in block #%d\n\n",
@@ -1749,14 +1796,15 @@ rewrite_update_init_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
      register it as a new definition for its corresponding name.  Also
      register definitions for names whose underlying symbols are
      marked for renaming.  */
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
       tree lhs, lhs_sym;
+      gimple phi = gsi_stmt (gsi);
 
-      if (!REGISTER_DEFS_IN_THIS_STMT (phi))
+      if (!register_defs_p (phi))
        continue;
       
-      lhs = PHI_RESULT (phi);
+      lhs = gimple_phi_result (phi);
       lhs_sym = SSA_NAME_VAR (lhs);
 
       if (symbol_marked_for_renaming (lhs_sym))
@@ -1834,7 +1882,7 @@ maybe_replace_use (use_operand_p use_p)
    DEF_P.  */
 
 static inline void
-maybe_register_def (def_operand_p def_p, tree stmt)
+maybe_register_def (def_operand_p def_p, gimple stmt)
 {
   tree def = DEF_FROM_PTR (def_p);
   tree sym = DECL_P (def) ? def : SSA_NAME_VAR (def);
@@ -1876,33 +1924,31 @@ maybe_register_def (def_operand_p def_p, tree stmt)
 static void
 rewrite_update_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
                     basic_block bb ATTRIBUTE_UNUSED,
-                    block_stmt_iterator si)
+                    gimple_stmt_iterator si)
 {
-  stmt_ann_t ann;
-  tree stmt;
+  gimple stmt;
   use_operand_p use_p;
   def_operand_p def_p;
   ssa_op_iter iter;
 
-  stmt = bsi_stmt (si);
-  ann = stmt_ann (stmt);
+  stmt = gsi_stmt (si);
 
   gcc_assert (bitmap_bit_p (blocks_to_update, bb->index));
 
   /* Only update marked statements.  */
-  if (!REWRITE_THIS_STMT (stmt) && !REGISTER_DEFS_IN_THIS_STMT (stmt))
+  if (!rewrite_uses_p (stmt) && !register_defs_p (stmt))
     return;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "Updating SSA information for statement ");
-      print_generic_stmt (dump_file, stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
       fprintf (dump_file, "\n");
     }
 
   /* Rewrite USES included in OLD_SSA_NAMES and USES whose underlying
      symbol is marked for renaming.  */
-  if (REWRITE_THIS_STMT (stmt))
+  if (rewrite_uses_p (stmt))
     {
       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
        maybe_replace_use (use_p);
@@ -1915,7 +1961,7 @@ rewrite_update_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
   /* Register definitions of names in NEW_SSA_NAMES and OLD_SSA_NAMES.
      Also register definitions for names whose underlying symbol is
      marked for renaming.  */
-  if (REGISTER_DEFS_IN_THIS_STMT (stmt))
+  if (register_defs_p (stmt))
     {
       FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
        maybe_register_def (def_p, stmt);
@@ -1942,19 +1988,19 @@ rewrite_update_phi_arguments (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
 
   FOR_EACH_EDGE (e, ei, bb->succs)
     {
-      tree phi;
-      tree_vec phis;
+      gimple phi;
+      gimple_vec phis;
 
       if (!bitmap_bit_p (blocks_with_phis_to_rewrite, e->dest->index))
        continue;
      
-      phis = VEC_index (tree_vec, phis_to_rewrite, e->dest->index);
-      for (i = 0; VEC_iterate (tree, phis, i, phi); i++)
+      phis = VEC_index (gimple_vec, phis_to_rewrite, e->dest->index);
+      for (i = 0; VEC_iterate (gimple, phis, i, phi); i++)
        {
          tree arg, lhs_sym;
          use_operand_p arg_p;
 
-         gcc_assert (REWRITE_THIS_STMT (phi));
+         gcc_assert (rewrite_uses_p (phi));
 
          arg_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
          arg = USE_FROM_PTR (arg_p);
@@ -1962,7 +2008,7 @@ rewrite_update_phi_arguments (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
          if (arg && !DECL_P (arg) && TREE_CODE (arg) != SSA_NAME)
            continue;
 
-         lhs_sym = SSA_NAME_VAR (PHI_RESULT (phi));
+         lhs_sym = SSA_NAME_VAR (gimple_phi_result (phi));
 
          if (arg == NULL_TREE)
            {
@@ -2168,7 +2214,6 @@ fini_ssa_renamer (void)
   cfun->gimple_df->in_ssa_p = true;
 }
 
-
 /* Main entry point into the SSA builder.  The renaming process
    proceeds in four main phases:
 
@@ -2264,14 +2309,14 @@ struct gimple_opt_pass pass_build_ssa =
    renamer.  BLOCKS is the set of blocks that need updating.  */
 
 static void
-mark_def_interesting (tree var, tree stmt, basic_block bb, bool insert_phi_p)
+mark_def_interesting (tree var, gimple stmt, basic_block bb, bool insert_phi_p)
 {
   gcc_assert (bitmap_bit_p (blocks_to_update, bb->index));
-  REGISTER_DEFS_IN_THIS_STMT (stmt) = 1;
+  set_register_defs (stmt, true);
 
   if (insert_phi_p)
     {
-      bool is_phi_p = TREE_CODE (stmt) == PHI_NODE;
+      bool is_phi_p = gimple_code (stmt) == GIMPLE_PHI;
 
       set_def_block (var, bb, is_phi_p);
 
@@ -2295,17 +2340,17 @@ mark_def_interesting (tree var, tree stmt, basic_block bb, bool insert_phi_p)
    nodes.  */
 
 static inline void
-mark_use_interesting (tree var, tree stmt, basic_block bb, bool insert_phi_p)
+mark_use_interesting (tree var, gimple stmt, basic_block bb, bool insert_phi_p)
 {
-  basic_block def_bb = bb_for_stmt (stmt);
+  basic_block def_bb = gimple_bb (stmt);
 
   mark_block_for_update (def_bb);
   mark_block_for_update (bb);
 
-  if (TREE_CODE (stmt) == PHI_NODE)
+  if (gimple_code (stmt) == GIMPLE_PHI)
     mark_phi_for_rewrite (def_bb, stmt);
   else
-    REWRITE_THIS_STMT (stmt) = 1;
+    set_rewrite_uses (stmt, true);
 
   /* If VAR has not been defined in BB, then it is live-on-entry
      to BB.  Note that we cannot just use the block holding VAR's
@@ -2340,8 +2385,7 @@ static void
 prepare_block_for_update (basic_block bb, bool insert_phi_p)
 {
   basic_block son;
-  block_stmt_iterator si;
-  tree phi;
+  gimple_stmt_iterator si;
   edge e;
   edge_iterator ei;
 
@@ -2349,14 +2393,16 @@ prepare_block_for_update (basic_block bb, bool insert_phi_p)
 
   /* Process PHI nodes marking interesting those that define or use
      the symbols that we are interested in.  */
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+  for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
     {
-      tree lhs_sym, lhs = PHI_RESULT (phi);
+      gimple phi = gsi_stmt (si);
+      tree lhs_sym, lhs = gimple_phi_result (phi);
 
       lhs_sym = DECL_P (lhs) ? lhs : SSA_NAME_VAR (lhs);
 
       if (!symbol_marked_for_renaming (lhs_sym))
        continue;
+
       mark_def_interesting (lhs_sym, phi, bb, insert_phi_p);
 
       /* Mark the uses in phi nodes as interesting.  It would be more correct
@@ -2367,20 +2413,18 @@ prepare_block_for_update (basic_block bb, bool insert_phi_p)
         block that also contains its definition, and thus insert a few more
         phi nodes for it.  */
       FOR_EACH_EDGE (e, ei, bb->preds)
-       {
-         mark_use_interesting (lhs_sym, phi, e->src, insert_phi_p);
-       }
+       mark_use_interesting (lhs_sym, phi, e->src, insert_phi_p);
     }
 
   /* Process the statements.  */
-  for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+  for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
     {
-      tree stmt;
+      gimple stmt;
       ssa_op_iter i;
       use_operand_p use_p;
       def_operand_p def_p;
       
-      stmt = bsi_stmt (si);
+      stmt = gsi_stmt (si);
 
       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, i, SSA_OP_ALL_USES)
        {
@@ -2419,13 +2463,13 @@ prepare_use_sites_for (tree name, bool insert_phi_p)
 
   FOR_EACH_IMM_USE_FAST (use_p, iter, name)
     {
-      tree stmt = USE_STMT (use_p);
-      basic_block bb = bb_for_stmt (stmt);
+      gimple stmt = USE_STMT (use_p);
+      basic_block bb = gimple_bb (stmt);
 
-      if (TREE_CODE (stmt) == PHI_NODE)
+      if (gimple_code (stmt) == GIMPLE_PHI)
        {
          int ix = PHI_ARG_INDEX_FROM_USE (use_p);
-         edge e = PHI_ARG_EDGE (stmt, ix);
+         edge e = gimple_phi_arg_edge (stmt, ix);
          mark_use_interesting (name, stmt, e->src, insert_phi_p);
        }
       else
@@ -2445,14 +2489,14 @@ prepare_use_sites_for (tree name, bool insert_phi_p)
 static void
 prepare_def_site_for (tree name, bool insert_phi_p)
 {
-  tree stmt;
+  gimple stmt;
   basic_block bb;
 
   gcc_assert (names_to_release == NULL
              || !bitmap_bit_p (names_to_release, SSA_NAME_VERSION (name)));
 
   stmt = SSA_NAME_DEF_STMT (name);
-  bb = bb_for_stmt (stmt);
+  bb = gimple_bb (stmt);
   if (bb)
     {
       gcc_assert (bb->index < last_basic_block);
@@ -2659,10 +2703,10 @@ delete_update_ssa (void)
   if (blocks_with_phis_to_rewrite)
     EXECUTE_IF_SET_IN_BITMAP (blocks_with_phis_to_rewrite, 0, i, bi)
       {
-       tree_vec phis = VEC_index (tree_vec, phis_to_rewrite, i);
+       gimple_vec phis = VEC_index (gimple_vec, phis_to_rewrite, i);
 
-       VEC_free (tree, heap, phis);
-       VEC_replace (tree_vec, phis_to_rewrite, i, NULL);
+       VEC_free (gimple, heap, phis);
+       VEC_replace (gimple_vec, phis_to_rewrite, i, NULL);
       }
 
   BITMAP_FREE (blocks_with_phis_to_rewrite);
@@ -2676,17 +2720,17 @@ delete_update_ssa (void)
    update_ssa's tables.  */
 
 tree
-create_new_def_for (tree old_name, tree stmt, def_operand_p def)
+create_new_def_for (tree old_name, gimple stmt, def_operand_p def)
 {
   tree new_name = duplicate_ssa_name (old_name, stmt);
 
   SET_DEF (def, new_name);
 
-  if (TREE_CODE (stmt) == PHI_NODE)
+  if (gimple_code (stmt) == GIMPLE_PHI)
     {
       edge e;
       edge_iterator ei;
-      basic_block bb = bb_for_stmt (stmt);
+      basic_block bb = gimple_bb (stmt);
 
       /* If needed, mark NEW_NAME as occurring in an abnormal PHI node. */
       FOR_EACH_EDGE (e, ei, bb->preds)
@@ -2713,7 +2757,7 @@ create_new_def_for (tree old_name, tree stmt, def_operand_p def)
    update_ssa.  */
 
 void
-register_new_name_mapping (tree new, tree old)
+register_new_name_mapping (tree new ATTRIBUTE_UNUSED, tree old ATTRIBUTE_UNUSED)
 {
   if (need_to_initialize_update_ssa_p)
     init_update_ssa ();
@@ -2779,7 +2823,7 @@ name_mappings_registered_p (void)
 /* Return true if name N has been registered in the replacement table.  */
 
 bool
-name_registered_for_update_p (tree n)
+name_registered_for_update_p (tree n ATTRIBUTE_UNUSED)
 {
   if (!need_ssa_update_p ())
     return false;
@@ -3079,7 +3123,7 @@ update_ssa (unsigned update_flags)
 
   blocks_with_phis_to_rewrite = BITMAP_ALLOC (NULL);
   if (!phis_to_rewrite)
-    phis_to_rewrite = VEC_alloc (tree_vec, heap, last_basic_block);
+    phis_to_rewrite = VEC_alloc (gimple_vec, heap, last_basic_block);
   blocks_to_update = BITMAP_ALLOC (NULL);
 
   /* Ensure that the dominance information is up-to-date.  */
index 9816f24..d8c151a 100644 (file)
@@ -22,7 +22,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "system.h"
 #include "coretypes.h"
 #include "tree.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-iterator.h"
 #include "ggc.h"
 
index d15ab80..d86391e 100644 (file)
@@ -84,22 +84,25 @@ static void
 update_phis_for_loop_copy (struct loop *orig_loop, struct loop *new_loop)
 {
   tree new_ssa_name;
-  tree phi_new, phi_orig;
+  gimple_stmt_iterator si_new, si_orig;
   edge orig_loop_latch = loop_latch_edge (orig_loop);
   edge orig_entry_e = loop_preheader_edge (orig_loop);
   edge new_loop_entry_e = loop_preheader_edge (new_loop);
 
   /* Scan the phis in the headers of the old and new loops
      (they are organized in exactly the same order).  */
-
-  for (phi_new = phi_nodes (new_loop->header),
-       phi_orig = phi_nodes (orig_loop->header);
-       phi_new && phi_orig;
-       phi_new = PHI_CHAIN (phi_new), phi_orig = PHI_CHAIN (phi_orig))
+  for (si_new = gsi_start_phis (new_loop->header),
+       si_orig = gsi_start_phis (orig_loop->header);
+       !gsi_end_p (si_new) && !gsi_end_p (si_orig);
+       gsi_next (&si_new), gsi_next (&si_orig))
     {
+      tree def;
+      gimple phi_new = gsi_stmt (si_new);
+      gimple phi_orig = gsi_stmt (si_orig);
+
       /* Add the first phi argument for the phi in NEW_LOOP (the one
         associated with the entry of NEW_LOOP)  */
-      tree def = PHI_ARG_DEF_FROM_EDGE (phi_orig, orig_entry_e);
+      def = PHI_ARG_DEF_FROM_EDGE (phi_orig, orig_entry_e);
       add_phi_arg (phi_new, def, new_loop_entry_e);
 
       /* Add the second phi argument for the phi in NEW_LOOP (the one
@@ -171,7 +174,7 @@ static bool
 generate_loops_for_partition (struct loop *loop, bitmap partition, bool copy_p)
 {
   unsigned i, x;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
   basic_block *bbs;
 
   if (copy_p)
@@ -192,27 +195,19 @@ generate_loops_for_partition (struct loop *loop, bitmap partition, bool copy_p)
   for (x = 0, i = 0; i < loop->num_nodes; i++)
     {
       basic_block bb = bbs[i];
-      tree phi, prev = NULL_TREE, next;
 
-      for (phi = phi_nodes (bb); phi;)
+      for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi);)
        if (!bitmap_bit_p (partition, x++))
-         {
-           next = PHI_CHAIN (phi);
-           remove_phi_node (phi, prev, true);
-           phi = next;
-         }
+         remove_phi_node (&bsi, true);
        else
-         {
-           prev = phi;
-           phi = PHI_CHAIN (phi);
-         }
+         gsi_next (&bsi);
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi);)
-       if (TREE_CODE (bsi_stmt (bsi)) != LABEL_EXPR
+      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi);)
+       if (gimple_code (gsi_stmt (bsi)) != GIMPLE_LABEL
            && !bitmap_bit_p (partition, x++))
-         bsi_remove (&bsi, false);
+         gsi_remove (&bsi, false);
        else
-         bsi_next (&bsi);
+         gsi_next (&bsi);
 
        mark_virtual_ops_in_bb (bb);
     }
@@ -224,22 +219,22 @@ generate_loops_for_partition (struct loop *loop, bitmap partition, bool copy_p)
 /* Generate a call to memset.  Return true when the operation succeeded.  */
 
 static bool
-generate_memset_zero (tree stmt, tree op0, tree nb_iter,
-                     block_stmt_iterator bsi)
+generate_memset_zero (gimple stmt, tree op0, tree nb_iter,
+                     gimple_stmt_iterator bsi)
 {
-  tree s, t, stmts, nb_bytes, addr_base;
+  tree t, nb_bytes, addr_base;
   bool res = false;
-  tree stmt_list = NULL_TREE;
-  tree args [3];
-  tree fn_call, mem, fndecl, fntype, fn;
-  tree_stmt_iterator i;
+  gimple_seq stmts = NULL, stmt_list = NULL;
+  gimple fn_call;
+  tree mem, fndecl, fntype, fn;
+  gimple_stmt_iterator i;
   ssa_op_iter iter;
   struct data_reference *dr = XCNEW (struct data_reference);
 
   nb_bytes = fold_build2 (MULT_EXPR, TREE_TYPE (nb_iter),
                          nb_iter, TYPE_SIZE_UNIT (TREE_TYPE (op0)));
   nb_bytes = force_gimple_operand (nb_bytes, &stmts, true, NULL);
-  append_to_statement_list_force (stmts, &stmt_list);
+  gimple_seq_add_seq (&stmt_list, stmts);
 
   DR_STMT (dr) = stmt;
   DR_REF (dr) = op0;
@@ -261,7 +256,7 @@ generate_memset_zero (tree stmt, tree op0, tree nb_iter,
       addr_base = size_binop (PLUS_EXPR, DR_OFFSET (dr), DR_INIT (dr));
       addr_base = fold_build2 (MINUS_EXPR, sizetype, addr_base, nb_bytes);
       addr_base = force_gimple_operand (addr_base, &stmts, true, NULL);
-      append_to_statement_list_force (stmts, &stmt_list);
+      gimple_seq_add_seq (&stmt_list, stmts);
 
       addr_base = fold_build2 (POINTER_PLUS_EXPR,
                               TREE_TYPE (DR_BASE_ADDRESS (dr)),
@@ -271,23 +266,18 @@ generate_memset_zero (tree stmt, tree op0, tree nb_iter,
     goto end;
 
   mem = force_gimple_operand (addr_base, &stmts, true, NULL);
-  append_to_statement_list_force (stmts, &stmt_list);
-
+  gimple_seq_add_seq (&stmt_list, stmts);
 
   fndecl = implicit_built_in_decls [BUILT_IN_MEMSET];
   fntype = TREE_TYPE (fndecl);
   fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
 
-  args[0] = mem;
-  args[1] = integer_zero_node;
-  args[2] = nb_bytes;
-
-  fn_call = build_call_array (fntype, fn, 3, args);
-  append_to_statement_list_force (fn_call, &stmt_list);
+  fn_call = gimple_build_call (fn, 3, mem, integer_zero_node, nb_bytes);
+  gimple_seq_add_stmt (&stmt_list, fn_call);
 
-  for (i = tsi_start (stmt_list); !tsi_end_p (i); tsi_next (&i))
+  for (i = gsi_start (stmt_list); !gsi_end_p (i); gsi_next (&i))
     {
-      s = tsi_stmt (i);
+      gimple s = gsi_stmt (i);
       update_stmt_if_modified (s);
 
       FOR_EACH_SSA_TREE_OPERAND (t, s, iter, SSA_OP_VIRTUAL_DEFS)
@@ -303,6 +293,7 @@ generate_memset_zero (tree stmt, tree op0, tree nb_iter,
     {
       if (TREE_CODE (t) == SSA_NAME)
        {
+         gimple s;
          imm_use_iterator imm_iter;
 
          FOR_EACH_IMM_USE_STMT (s, imm_iter, t)
@@ -313,7 +304,7 @@ generate_memset_zero (tree stmt, tree op0, tree nb_iter,
       mark_sym_for_renaming (t);
     }
 
-  bsi_insert_after (&bsi, stmt_list, BSI_CONTINUE_LINKING);
+  gsi_insert_seq_after (&bsi, stmt_list, GSI_CONTINUE_LINKING);
   res = true;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
@@ -334,9 +325,9 @@ generate_builtin (struct loop *loop, bitmap partition, bool copy_p)
   bool res = false;
   unsigned i, x = 0;
   basic_block *bbs;
-  tree write = NULL_TREE;
+  gimple write = NULL;
   tree op0, op1;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
   tree nb_iter = number_of_exit_cond_executions (loop);
 
   if (!nb_iter || nb_iter == chrec_dont_know)
@@ -347,18 +338,17 @@ generate_builtin (struct loop *loop, bitmap partition, bool copy_p)
   for (i = 0; i < loop->num_nodes; i++)
     {
       basic_block bb = bbs[i];
-      tree phi;
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
        x++;
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
        {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (bsi);
 
          if (bitmap_bit_p (partition, x++)
-             && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-             && !is_gimple_reg (GIMPLE_STMT_OPERAND (stmt, 0)))
+             && is_gimple_assign (stmt)
+             && !is_gimple_reg (gimple_assign_lhs (stmt)))
            {
              /* Don't generate the builtins when there are more than
                 one memory write.  */
@@ -373,17 +363,18 @@ generate_builtin (struct loop *loop, bitmap partition, bool copy_p)
   if (!write)
     goto end;
 
-  op0 = GIMPLE_STMT_OPERAND (write, 0);
-  op1 = GIMPLE_STMT_OPERAND (write, 1);
+  op0 = gimple_assign_lhs (write);
+  op1 = gimple_assign_rhs1 (write);
 
   if (!(TREE_CODE (op0) == ARRAY_REF
        || TREE_CODE (op0) == INDIRECT_REF))
     goto end;
 
   /* The new statements will be placed before LOOP.  */
-  bsi = bsi_last (loop_preheader_edge (loop)->src);
+  bsi = gsi_last_bb (loop_preheader_edge (loop)->src);
 
-  if (integer_zerop (op1) || real_zerop (op1))
+  if (gimple_assign_rhs_code (write) == INTEGER_CST
+      && (integer_zerop (op1) || real_zerop (op1)))
     res = generate_memset_zero (write, op0, nb_iter, bsi);
 
   /* If this is the last partition for which we generate code, we have
@@ -557,7 +548,7 @@ rdg_flag_uses (struct graph *rdg, int u, bitmap partition, bitmap loops,
   ssa_op_iter iter;
   use_operand_p use_p;
   struct vertex *x = &(rdg->vertices[u]);
-  tree stmt = RDGV_STMT (x);
+  gimple stmt = RDGV_STMT (x);
   struct graph_edge *anti_dep = has_anti_dependence (x);
 
   /* Keep in the same partition the destination of an antidependence,
@@ -572,7 +563,7 @@ rdg_flag_uses (struct graph *rdg, int u, bitmap partition, bitmap loops,
                                       processed, part_has_writes);
     }
 
-  if (TREE_CODE (stmt) != PHI_NODE)
+  if (gimple_code (stmt) != GIMPLE_PHI)
     {
       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_VIRTUAL_USES)
        {
@@ -580,7 +571,7 @@ rdg_flag_uses (struct graph *rdg, int u, bitmap partition, bitmap loops,
 
          if (TREE_CODE (use) == SSA_NAME)
            {
-             tree def_stmt = SSA_NAME_DEF_STMT (use);
+             gimple def_stmt = SSA_NAME_DEF_STMT (use);
              int v = rdg_vertex_for_stmt (rdg, def_stmt);
 
              if (v >= 0
@@ -591,10 +582,9 @@ rdg_flag_uses (struct graph *rdg, int u, bitmap partition, bitmap loops,
        }
     }
 
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-      && has_upstream_mem_writes (u))
+  if (is_gimple_assign (stmt) && has_upstream_mem_writes (u))
     {
-      tree op0 = GIMPLE_STMT_OPERAND (stmt, 0);
+      tree op0 = gimple_assign_lhs (stmt);
 
       /* Scalar channels don't have enough space for transmitting data
         between tasks, unless we add more storage by privatizing.  */
@@ -667,7 +657,7 @@ rdg_flag_vertex_and_dependent (struct graph *rdg, int v, bitmap partition,
    blocks of LOOP.  */
 
 static void
-collect_condition_stmts (struct loop *loop, VEC (tree, heap) **conds)
+collect_condition_stmts (struct loop *loop, VEC (gimple, heap) **conds)
 {
   unsigned i;
   edge e;
@@ -675,10 +665,10 @@ collect_condition_stmts (struct loop *loop, VEC (tree, heap) **conds)
 
   for (i = 0; VEC_iterate (edge, exits, i, e); i++)
     {
-      tree cond = last_stmt (e->src);
+      gimple cond = last_stmt (e->src);
 
       if (cond)
-       VEC_safe_push (tree, heap, *conds, cond);
+       VEC_safe_push (gimple, heap, *conds, cond);
     }
 
   VEC_free (edge, heap, exits);
@@ -694,14 +684,14 @@ rdg_flag_loop_exits (struct graph *rdg, bitmap loops, bitmap partition,
 {
   unsigned i;
   bitmap_iterator bi;
-  VEC (tree, heap) *conds = VEC_alloc (tree, heap, 3);
+  VEC (gimple, heap) *conds = VEC_alloc (gimple, heap, 3);
 
   EXECUTE_IF_SET_IN_BITMAP (loops, 0, i, bi)
     collect_condition_stmts (get_loop (i), &conds);
 
-  while (!VEC_empty (tree, conds))
+  while (!VEC_empty (gimple, conds))
     {
-      tree cond = VEC_pop (tree, conds);
+      gimple cond = VEC_pop (gimple, conds);
       int v = rdg_vertex_for_stmt (rdg, cond);
       bitmap new_loops = BITMAP_ALLOC (NULL);
 
@@ -1050,11 +1040,11 @@ ldist_gen (struct loop *loop, struct graph *rdg,
    Returns the number of distributed loops.  */
 
 static int
-distribute_loop (struct loop *loop, VEC (tree, heap) *stmts)
+distribute_loop (struct loop *loop, VEC (gimple, heap) *stmts)
 {
   bool res = false;
   struct graph *rdg;
-  tree s;
+  gimple s;
   unsigned i;
   VEC (int, heap) *vertices;
 
@@ -1085,7 +1075,7 @@ distribute_loop (struct loop *loop, VEC (tree, heap) *stmts)
   if (dump_file && (dump_flags & TDF_DETAILS))
     dump_rdg (dump_file, rdg);
 
-  for (i = 0; VEC_iterate (tree, stmts, i, s); i++)
+  for (i = 0; VEC_iterate (gimple, stmts, i, s); i++)
     {
       int v = rdg_vertex_for_stmt (rdg, s);
 
@@ -1117,7 +1107,7 @@ tree_loop_distribution (void)
 
   FOR_EACH_LOOP (li, loop, 0)
     {
-      VEC (tree, heap) *work_list = VEC_alloc (tree, heap, 3);
+      VEC (gimple, heap) *work_list = VEC_alloc (gimple, heap, 3);
 
       /* With the following working list, we're asking distribute_loop
         to separate the stores of the loop: when dependences allow,
@@ -1143,7 +1133,7 @@ tree_loop_distribution (void)
 
       verify_loop_structure ();
 
-      VEC_free (tree, heap, work_list);
+      VEC_free (gimple, heap, work_list);
     }
 
   return 0;
index f58bd11..66d25ec 100644 (file)
@@ -89,13 +89,13 @@ along with GCC; see the file COPYING3.  If not see
 */
 
 static void
-gather_interchange_stats (VEC (ddr_p, heap) *dependence_relations,
-                         VEC (data_reference_p, heap) *datarefs,
-                         struct loop *loop,
-                         struct loop *first_loop,
-                         unsigned int *dependence_steps, 
-                         unsigned int *nb_deps_not_carried_by_loop, 
-                         double_int *access_strides)
+gather_interchange_stats (VEC (ddr_p, heap) *dependence_relations ATTRIBUTE_UNUSED,
+                         VEC (data_reference_p, heap) *datarefs ATTRIBUTE_UNUSED,
+                         struct loop *loop ATTRIBUTE_UNUSED,
+                         struct loop *first_loop ATTRIBUTE_UNUSED,
+                         unsigned int *dependence_steps ATTRIBUTE_UNUSED
+                         unsigned int *nb_deps_not_carried_by_loop ATTRIBUTE_UNUSED
+                         double_int *access_strides ATTRIBUTE_UNUSED)
 {
   unsigned int i, j;
   struct data_dependence_relation *ddr;
@@ -135,7 +135,7 @@ gather_interchange_stats (VEC (ddr_p, heap) *dependence_relations,
     {
       unsigned int it;
       tree ref = DR_REF (dr);
-      tree stmt = DR_STMT (dr);
+      gimple stmt = DR_STMT (dr);
       struct loop *stmt_loop = loop_containing_stmt (stmt);
       struct loop *inner_loop = first_loop->inner;
 
@@ -319,9 +319,9 @@ linear_transform_loops (void)
   VEC(tree,heap) *oldivs = NULL;
   VEC(tree,heap) *invariants = NULL;
   VEC(tree,heap) *lambda_parameters = NULL;
-  VEC(tree,heap) *remove_ivs = VEC_alloc (tree, heap, 3);
+  VEC(gimple,heap) *remove_ivs = VEC_alloc (gimple, heap, 3);
   struct loop *loop_nest;
-  tree oldiv_stmt;
+  gimple oldiv_stmt;
   unsigned i;
 
   FOR_EACH_LOOP (li, loop_nest, 0)
@@ -412,12 +412,12 @@ linear_transform_loops (void)
       free_data_refs (datarefs);
     }
 
-  for (i = 0; VEC_iterate (tree, remove_ivs, i, oldiv_stmt); i++)
+  for (i = 0; VEC_iterate (gimple, remove_ivs, i, oldiv_stmt); i++)
     remove_iv (oldiv_stmt);
 
   VEC_free (tree, heap, oldivs);
   VEC_free (tree, heap, invariants);
-  VEC_free (tree, heap, remove_ivs);
+  VEC_free (gimple, heap, remove_ivs);
   scev_reset ();
 
   if (modified)
index 46650b3..7d15077 100644 (file)
@@ -33,7 +33,8 @@ along with GCC; see the file COPYING3.  If not see
 #include "flags.h"
 #include "function.h"
 #include "tree-inline.h"
-#include "tree-gimple.h"
+#include "gimple.h"
+#include "tree-iterator.h"
 #include "tree-flow.h"
 #include "tree-mudflap.h"
 #include "tree-dump.h"
@@ -45,6 +46,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "ggc.h"
 #include "cgraph.h"
 #include "toplev.h"
+#include "gimple.h"
 
 /* Internal function decls */
 
@@ -64,9 +66,10 @@ static void mf_xform_derefs (void);
 static unsigned int execute_mudflap_function_ops (void);
 
 /* Addressable variables instrumentation.  */
-static void mf_xform_decls (tree, tree);
-static tree mx_xfn_xform_decls (tree *, int *, void *);
-static void mx_register_decls (tree, tree *);
+static void mf_xform_decls (gimple_seq, tree);
+static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
+                               struct walk_stmt_info *);
+static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
 static unsigned int execute_mudflap_function_decls (void);
 
 
@@ -451,8 +454,8 @@ execute_mudflap_function_ops (void)
 static void
 mf_decl_cache_locals (void)
 {
-  tree t, shift_init_stmts, mask_init_stmts;
-  tree_stmt_iterator tsi;
+  gimple g;
+  gimple_seq seq = gimple_seq_alloc ();
 
   /* Build the cache vars.  */
   mf_cache_shift_decl_l
@@ -465,28 +468,17 @@ mf_decl_cache_locals (void)
 
   /* Build initialization nodes for the cache vars.  We just load the
      globals into the cache variables.  */
-  t = build_gimple_modify_stmt (mf_cache_shift_decl_l, mf_cache_shift_decl);
-  SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (current_function_decl));
-  gimplify_to_stmt_list (&t);
-  shift_init_stmts = t;
-
-  t = build_gimple_modify_stmt (mf_cache_mask_decl_l, mf_cache_mask_decl);
-  SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (current_function_decl));
-  gimplify_to_stmt_list (&t);
-  mask_init_stmts = t;
-
-  /* Anticipating multiple entry points, we insert the cache vars
-     initializers in each successor of the ENTRY_BLOCK_PTR.  */
-  for (tsi = tsi_start (shift_init_stmts);
-       ! tsi_end_p (tsi);
-       tsi_next (&tsi))
-    insert_edge_copies (tsi_stmt (tsi), ENTRY_BLOCK_PTR);
-
-  for (tsi = tsi_start (mask_init_stmts);
-       ! tsi_end_p (tsi);
-       tsi_next (&tsi))
-    insert_edge_copies (tsi_stmt (tsi), ENTRY_BLOCK_PTR);
-  bsi_commit_edge_inserts ();
+  g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
+  gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
+  gimple_seq_add_stmt (&seq, g);
+
+  g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
+  gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
+  gimple_seq_add_stmt (&seq, g);
+
+  insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
+
+  gsi_commit_edge_inserts ();
 }
 
 
@@ -500,27 +492,28 @@ mf_decl_clear_locals (void)
 
 static void
 mf_build_check_statement_for (tree base, tree limit,
-                              block_stmt_iterator *instr_bsi,
-                              location_t *locus, tree dirflag)
+                              gimple_stmt_iterator *instr_gsi,
+                              location_t location, tree dirflag)
 {
-  tree_stmt_iterator head, tsi;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   basic_block cond_bb, then_bb, join_bb;
   edge e;
   tree cond, t, u, v;
   tree mf_base;
   tree mf_elem;
   tree mf_limit;
+  gimple g;
+  gimple_seq seq;
 
   /* We first need to split the current basic block, and start altering
      the CFG.  This allows us to insert the statements we're about to
      construct into the right basic blocks.  */
 
-  cond_bb = bb_for_stmt (bsi_stmt (*instr_bsi));
-  bsi = *instr_bsi;
-  bsi_prev (&bsi);
-  if (! bsi_end_p (bsi))
-    e = split_block (cond_bb, bsi_stmt (bsi));
+  cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
+  gsi = *instr_gsi;
+  gsi_prev (&gsi);
+  if (! gsi_end_p (gsi))
+    e = split_block (cond_bb, gsi_stmt (gsi));
   else
     e = split_block_after_labels (cond_bb);
   cond_bb = e->src;
@@ -558,21 +551,19 @@ mf_build_check_statement_for (tree base, tree limit,
   mf_limit = create_tmp_var (mf_uintptr_type, "__mf_limit");
 
   /* Build: __mf_base = (uintptr_t) <base address expression>.  */
-  t = build_gimple_modify_stmt (mf_base,
-                               fold_convert (mf_uintptr_type,
-                                             unshare_expr (base)));
-  SET_EXPR_LOCUS (t, locus);
-  gimplify_to_stmt_list (&t);
-  head = tsi_start (t);
-  tsi = tsi_last (t);
+  seq = gimple_seq_alloc ();
+  t = fold_convert (mf_uintptr_type, unshare_expr (base));
+  gimplify_expr (&t, &seq, &seq, is_gimple_reg_rhs, fb_rvalue);
+  g = gimple_build_assign (mf_base, t);
+  gimple_set_location (g, location);
+  gimple_seq_add_stmt (&seq, g);
 
   /* Build: __mf_limit = (uintptr_t) <limit address expression>.  */
-  t = build_gimple_modify_stmt (mf_limit,
-                               fold_convert (mf_uintptr_type,
-                                             unshare_expr (limit)));
-  SET_EXPR_LOCUS (t, locus);
-  gimplify_to_stmt_list (&t);
-  tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
+  t = fold_convert (mf_uintptr_type, unshare_expr (limit));
+  gimplify_expr (&t, &seq, &seq, is_gimple_reg_rhs, fb_rvalue);
+  g = gimple_build_assign (mf_limit, t);
+  gimple_set_location (g, location);
+  gimple_seq_add_stmt (&seq, g);
 
   /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
                                             & __mf_mask].  */
@@ -586,10 +577,10 @@ mf_build_check_statement_for (tree base, tree limit,
               TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
               mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
   t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
-  t = build_gimple_modify_stmt (mf_elem, t);
-  SET_EXPR_LOCUS (t, locus);
-  gimplify_to_stmt_list (&t);
-  tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
+  gimplify_expr (&t, &seq, &seq, is_gimple_reg_rhs, fb_rvalue);
+  g = gimple_build_assign (mf_elem, t);
+  gimple_set_location (g, location);
+  gimple_seq_add_stmt (&seq, g);
 
   /* Quick validity check.
 
@@ -631,16 +622,18 @@ mf_build_check_statement_for (tree base, tree limit,
      result of the evaluation of 't' in a temporary variable which we
      can use as the condition for the conditional jump.  */
   t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
+  gimplify_expr (&t, &seq, &seq, is_gimple_reg_rhs, fb_rvalue);
   cond = create_tmp_var (boolean_type_node, "__mf_unlikely_cond");
-  t = build_gimple_modify_stmt (cond, t);
-  gimplify_to_stmt_list (&t);
-  tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
+  g = gimple_build_assign  (cond, t);
+  gimple_set_location (g, location);
+  gimple_seq_add_stmt (&seq, g);
 
   /* Build the conditional jump.  'cond' is just a temporary so we can
      simply build a void COND_EXPR.  We do need labels in both arms though.  */
-  t = build3 (COND_EXPR, void_type_node, cond, NULL_TREE, NULL_TREE);
-  SET_EXPR_LOCUS (t, locus);
-  tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
+  g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, NULL_TREE,
+                        NULL_TREE);
+  gimple_set_location (g, location);
+  gimple_seq_add_stmt (&seq, g);
 
   /* At this point, after so much hard work, we have only constructed
      the conditional jump,
@@ -653,9 +646,8 @@ mf_build_check_statement_for (tree base, tree limit,
 
      We can insert this now in the current basic block, i.e. the one that
      the statement we're instrumenting was originally in.  */
-  bsi = bsi_last (cond_bb);
-  for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi))
-    bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING);
+  gsi = gsi_last_bb (cond_bb);
+  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
 
   /*  Now build up the body of the cache-miss handling:
 
@@ -664,33 +656,31 @@ mf_build_check_statement_for (tree base, tree limit,
 
      This is the body of the conditional.  */
 
-  u = mf_file_function_line_tree (locus == NULL ? UNKNOWN_LOCATION : *locus);
+  seq = gimple_seq_alloc ();
+  /* u is a string, so it is already a gimple value.  */
+  u = mf_file_function_line_tree (location);
   /* NB: we pass the overall [base..limit] range to mf_check.  */
   v = fold_build2 (PLUS_EXPR, integer_type_node,
                   fold_build2 (MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
                   integer_one_node);
-  t = build_call_expr (mf_check_fndecl, 4, mf_base, v, dirflag, u);
-  gimplify_to_stmt_list (&t);
-  head = tsi_start (t);
-  tsi = tsi_last (t);
+  gimplify_expr (&v, &seq, &seq, is_gimple_mem_rhs, fb_rvalue);
+  g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
+  gimple_seq_add_stmt (&seq, g);
 
   if (! flag_mudflap_threads)
     {
-      t = build_gimple_modify_stmt (mf_cache_shift_decl_l,
-                                   mf_cache_shift_decl);
-      tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
+      g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
+      gimple_seq_add_stmt (&seq, g);
 
-      t = build_gimple_modify_stmt (mf_cache_mask_decl_l,
-                                   mf_cache_mask_decl);
-      tsi_link_after (&tsi, t, TSI_CONTINUE_LINKING);
+      g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
+      gimple_seq_add_stmt (&seq, g);
     }
 
   /* Insert the check code in the THEN block.  */
-  bsi = bsi_start (then_bb);
-  for (tsi = head; ! tsi_end_p (tsi); tsi_next (&tsi))
-    bsi_insert_after (&bsi, tsi_stmt (tsi), BSI_CONTINUE_LINKING);
+  gsi = gsi_start_bb (then_bb);
+  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
 
-  *instr_bsi = bsi_start (join_bb);
+  *instr_gsi = gsi_start_bb (join_bb);
 }
 
 
@@ -717,8 +707,8 @@ mf_decl_eligible_p (tree decl)
 
 
 static void
-mf_xform_derefs_1 (block_stmt_iterator *iter, tree *tp,
-                   location_t *locus, tree dirflag)
+mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
+                   location_t location, tree dirflag)
 {
   tree type, base, limit, addr, size, t;
 
@@ -898,44 +888,45 @@ mf_xform_derefs_1 (block_stmt_iterator *iter, tree *tp,
       return;
     }
 
-  mf_build_check_statement_for (base, limit, iter, locus, dirflag);
+  mf_build_check_statement_for (base, limit, iter, location, dirflag);
 }
 
 static void
 mf_xform_derefs (void)
 {
   basic_block bb, next;
-  block_stmt_iterator i;
+  gimple_stmt_iterator i;
   int saved_last_basic_block = last_basic_block;
+  enum gimple_rhs_class class;
 
   bb = ENTRY_BLOCK_PTR ->next_bb;
   do
     {
       next = bb->next_bb;
-      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
+      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
         {
-          tree s = bsi_stmt (i);
+          gimple s = gsi_stmt (i);
 
           /* Only a few GIMPLE statements can reference memory.  */
-          switch (TREE_CODE (s))
+          switch (gimple_code (s))
             {
-            case GIMPLE_MODIFY_STMT:
-              mf_xform_derefs_1 (&i, &GIMPLE_STMT_OPERAND (s, 0),
-                                EXPR_LOCUS (s), integer_one_node);
-              mf_xform_derefs_1 (&i, &GIMPLE_STMT_OPERAND (s, 1),
-                                EXPR_LOCUS (s), integer_zero_node);
+            case GIMPLE_ASSIGN:
+             mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
+                                gimple_location (s), integer_one_node);
+             mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
+                                gimple_location (s), integer_zero_node);
+             class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
+             if (class == GIMPLE_BINARY_RHS)
+               mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
+                                  gimple_location (s), integer_zero_node);
               break;
 
-            case RETURN_EXPR:
-              if (TREE_OPERAND (s, 0) != NULL_TREE)
+            case GIMPLE_RETURN:
+              if (gimple_return_retval (s) != NULL_TREE)
                 {
-                  if (TREE_CODE (TREE_OPERAND (s, 0)) == GIMPLE_MODIFY_STMT)
-                    mf_xform_derefs_1 (&i, &GIMPLE_STMT_OPERAND
-                                            (TREE_OPERAND (s, 0), 1),
-                                       EXPR_LOCUS (s), integer_zero_node);
-                  else
-                    mf_xform_derefs_1 (&i, &TREE_OPERAND (s, 0), EXPR_LOCUS (s),
-                                       integer_zero_node);
+                  mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
+                                    gimple_location (s),
+                                    integer_zero_node);
                 }
               break;
 
@@ -970,7 +961,7 @@ execute_mudflap_function_decls (void)
 
   push_gimplify_context (&gctx);
 
-  mf_xform_decls (DECL_SAVED_TREE (current_function_decl),
+  mf_xform_decls (gimple_body (current_function_decl),
                   DECL_ARGUMENTS (current_function_decl));
 
   pop_gimplify_context (NULL);
@@ -988,12 +979,13 @@ struct mf_xform_decls_data
 
 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
    _DECLs if appropriate.  Arrange to call the __mf_register function
-   now, and the __mf_unregister function later for each.  */
-static void
-mx_register_decls (tree decl, tree *stmt_list)
+   now, and the __mf_unregister function later for each.  Return the
+   gimple sequence after synthesis.  */
+gimple_seq
+mx_register_decls (tree decl, gimple_seq seq, location_t location)
 {
-  tree finally_stmts = NULL_TREE;
-  tree_stmt_iterator initially_stmts = tsi_start (*stmt_list);
+  gimple_seq finally_stmts = NULL;
+  gimple_stmt_iterator initially_stmts = gsi_start (seq);
 
   while (decl != NULL_TREE)
     {
@@ -1005,46 +997,46 @@ mx_register_decls (tree decl, tree *stmt_list)
           && ! TREE_STATIC (decl))
         {
           tree size = NULL_TREE, variable_name;
-          tree unregister_fncall, unregister_fncall_param;
-          tree register_fncall, register_fncall_param;
+          gimple unregister_fncall, register_fncall;
+         tree unregister_fncall_param, register_fncall_param;
 
+         /* Variable-sized objects should have sizes already been
+            gimplified when we got here. */
          size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl)));
-
+         gcc_assert (is_gimple_val (size));
+       
 
           unregister_fncall_param =
-           convert (ptr_type_node,
-                    mf_mark (build1 (ADDR_EXPR,
-                                     build_pointer_type (TREE_TYPE (decl)),
-                                     decl)));
+           mf_mark (build1 (ADDR_EXPR,
+                            build_pointer_type (TREE_TYPE (decl)),
+                            decl));
           /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
-          unregister_fncall = build_call_expr (mf_unregister_fndecl, 3,
-                                              unregister_fncall_param,
-                                              size,
-                                              build_int_cst (NULL_TREE, 3));
+          unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
+                                                unregister_fncall_param,
+                                                size,
+                                                build_int_cst (NULL_TREE, 3));
 
 
           variable_name = mf_varname_tree (decl);
           register_fncall_param =
-           convert (ptr_type_node,
-                    mf_mark (build1 (ADDR_EXPR,
-                                     build_pointer_type (TREE_TYPE (decl)),
-                                     decl)));
+           mf_mark (build1 (ADDR_EXPR,
+                            build_pointer_type (TREE_TYPE (decl)),
+                            decl));
           /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
                            "name") */
-         register_fncall = build_call_expr (mf_register_fndecl, 4,
-                                            register_fncall_param,
-                                            size,
-                                            build_int_cst (NULL_TREE, 3),
-                                            variable_name);
-
+         register_fncall = gimple_build_call (mf_register_fndecl, 4,
+                                              register_fncall_param,
+                                              size,
+                                              build_int_cst (NULL_TREE, 3),
+                                              variable_name);
+         
 
           /* Accumulate the two calls.  */
-          /* ??? Set EXPR_LOCATION.  */
-          gimplify_stmt (&register_fncall);
-          gimplify_stmt (&unregister_fncall);
+         gimple_set_location (register_fncall, location);
+         gimple_set_location (unregister_fncall, location);
 
           /* Add the __mf_register call at the current appending point.  */
-          if (tsi_end_p (initially_stmts))
+          if (gsi_end_p (initially_stmts))
            {
              if (!DECL_ARTIFICIAL (decl))
                warning (OPT_Wmudflap,
@@ -1053,11 +1045,11 @@ mx_register_decls (tree decl, tree *stmt_list)
            }
          else
            {
-             tsi_link_before (&initially_stmts, register_fncall,
-                              TSI_SAME_STMT);
+             gsi_insert_before (&initially_stmts, register_fncall,
+                                GSI_SAME_STMT);
 
              /* Accumulate the FINALLY piece.  */
-             append_to_statement_list (unregister_fncall, &finally_stmts);
+             gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
            }
           mf_mark (decl);
         }
@@ -1066,39 +1058,46 @@ mx_register_decls (tree decl, tree *stmt_list)
     }
 
   /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
-  if (finally_stmts != NULL_TREE)
+  if (finally_stmts != NULL)
     {
-      tree t = build2 (TRY_FINALLY_EXPR, void_type_node,
-                       *stmt_list, finally_stmts);
-      *stmt_list = NULL;
-      append_to_statement_list (t, stmt_list);
+      gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
+      gimple_seq new_seq = gimple_seq_alloc ();
+
+      gimple_seq_add_stmt (&new_seq, stmt);
+      return new_seq;
     }
+   else
+    return seq;
 }
 
 
 /* Process every variable mentioned in BIND_EXPRs.  */
 static tree
-mx_xfn_xform_decls (tree *t, int *continue_p, void *data)
+mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
+                   bool *handled_operands_p ATTRIBUTE_UNUSED,
+                   struct walk_stmt_info *wi)
 {
-  struct mf_xform_decls_data* d = (struct mf_xform_decls_data*) data;
-
-  if (*t == NULL_TREE || *t == error_mark_node)
-    {
-      *continue_p = 0;
-      return NULL_TREE;
-    }
+  struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
+  gimple stmt = gsi_stmt (*gsi);
 
-  *continue_p = 1;
-
-  switch (TREE_CODE (*t))
+  switch (gimple_code (stmt))
     {
-    case BIND_EXPR:
+    case GIMPLE_BIND:
       {
         /* Process function parameters now (but only once).  */
-        mx_register_decls (d->param_decls, &BIND_EXPR_BODY (*t));
-        d->param_decls = NULL_TREE;
+       if (d->param_decls)
+         {
+           gimple_bind_set_body (stmt,
+                                 mx_register_decls (d->param_decls,
+                                                    gimple_bind_body (stmt),
+                                                    gimple_location (stmt)));
+           d->param_decls = NULL_TREE;
+         }
 
-        mx_register_decls (BIND_EXPR_VARS (*t), &BIND_EXPR_BODY (*t));
+       gimple_bind_set_body (stmt,
+                             mx_register_decls (gimple_bind_vars (stmt),
+                                                gimple_bind_body (stmt),
+                                                gimple_location (stmt)));
       }
       break;
 
@@ -1118,11 +1117,18 @@ mx_xfn_xform_decls (tree *t, int *continue_p, void *data)
 */
 
 static void
-mf_xform_decls (tree fnbody, tree fnparams)
+mf_xform_decls (gimple_seq fnbody, tree fnparams)
 {
   struct mf_xform_decls_data d;
+  struct walk_stmt_info wi;
+  struct pointer_set_t *pset = pointer_set_create ();
+
   d.param_decls = fnparams;
-  walk_tree_without_duplicates (&fnbody, mx_xfn_xform_decls, &d);
+  memset (&wi, 0, sizeof (wi));
+  wi.info = (void*) &d;
+  wi.pset = pset;
+  walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
+  pointer_set_destroy (pset);
 }
 
 
index 2256050..8f4ab04 100644 (file)
@@ -1,4 +1,4 @@
-/* Nested function decomposition for trees.
+/* Nested function decomposition for GIMPLE.
    Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
 
    This file is part of GCC.
@@ -15,7 +15,7 @@
 
    You should have received a copy of the GNU General Public License
    along with GCC; see the file COPYING3.  If not see
-<http://www.gnu.org/licenses/>.  */
+   <http://www.gnu.org/licenses/>.  */
 
 #include "config.h"
 #include "system.h"
@@ -27,7 +27,7 @@
 #include "function.h"
 #include "tree-dump.h"
 #include "tree-inline.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-iterator.h"
 #include "tree-flow.h"
 #include "cgraph.h"
@@ -355,46 +355,70 @@ get_chain_field (struct nesting_info *info)
   return field;
 }
 
+/* Initialize a new temporary with the GIMPLE_CALL STMT.  */
+
+static tree
+init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
+                       gimple call)
+{
+  tree t;
+
+  t = create_tmp_var_for (info, TREE_TYPE (TREE_TYPE (gimple_call_fn (call))),
+                          NULL);
+  gimple_call_set_lhs (call, t);
+  if (! gsi_end_p (*gsi))
+    gimple_set_location (call, gimple_location (gsi_stmt (*gsi)));
+  gsi_insert_before (gsi, call, GSI_SAME_STMT);
+
+  return t;
+}
+
+  
 /* Copy EXP into a temporary.  Allocate the temporary in the context of
-   INFO and insert the initialization statement before TSI.  */
+   INFO and insert the initialization statement before GSI.  */
 
 static tree
-init_tmp_var (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
+init_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
 {
-  tree t, stmt;
+  tree t;
+  gimple stmt;
 
   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
-  stmt = build_gimple_modify_stmt (t, exp);
-  SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
-  tsi_link_before (tsi, stmt, TSI_SAME_STMT);
+  stmt = gimple_build_assign (t, exp);
+  if (! gsi_end_p (*gsi))
+    gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
+  gsi_insert_before_without_update (gsi, stmt, GSI_SAME_STMT);
 
   return t;
 }
 
+
 /* Similarly, but only do so to force EXP to satisfy is_gimple_val.  */
 
 static tree
-tsi_gimplify_val (struct nesting_info *info, tree exp, tree_stmt_iterator *tsi)
+gsi_gimplify_val (struct nesting_info *info, tree exp,
+                 gimple_stmt_iterator *gsi)
 {
   if (is_gimple_val (exp))
     return exp;
   else
-    return init_tmp_var (info, exp, tsi);
+    return init_tmp_var (info, exp, gsi);
 }
 
 /* Similarly, but copy from the temporary and insert the statement
    after the iterator.  */
 
 static tree
-save_tmp_var (struct nesting_info *info, tree exp,
-             tree_stmt_iterator *tsi)
+save_tmp_var (struct nesting_info *info, tree exp, gimple_stmt_iterator *gsi)
 {
-  tree t, stmt;
+  tree t;
+  gimple stmt;
 
   t = create_tmp_var_for (info, TREE_TYPE (exp), NULL);
-  stmt = build_gimple_modify_stmt (exp, t);
-  SET_EXPR_LOCUS (stmt, EXPR_LOCUS (tsi_stmt (*tsi)));
-  tsi_link_after (tsi, stmt, TSI_SAME_STMT);
+  stmt = gimple_build_assign (exp, t);
+  if (! gsi_end_p (*gsi))
+    gimple_set_location (stmt, gimple_location (gsi_stmt (*gsi)));
+  gsi_insert_after_without_update (gsi, stmt, GSI_SAME_STMT);
 
   return t;
 }
@@ -512,236 +536,102 @@ get_nl_goto_field (struct nesting_info *info)
 
   return field;
 }
-\f
-/* Helper function for walk_stmts.  Walk output operands of an ASM_EXPR.  */
-
-static void
-walk_asm_expr (struct walk_stmt_info *wi, tree stmt)
-{
-  int noutputs = list_length (ASM_OUTPUTS (stmt));
-  const char **oconstraints
-    = (const char **) alloca ((noutputs) * sizeof (const char *));
-  int i;
-  tree link;
-  const char *constraint;
-  bool allows_mem, allows_reg, is_inout;
-
-  wi->is_lhs = true;
-  for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
-    {
-      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
-      oconstraints[i] = constraint;
-      parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
-                              &allows_reg, &is_inout);
-
-      wi->val_only = (allows_reg || !allows_mem);
-      walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
-    }
-
-  for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
-    {
-      constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
-      parse_input_constraint (&constraint, 0, 0, noutputs, 0,
-                             oconstraints, &allows_mem, &allows_reg);
-
-      wi->val_only = (allows_reg || !allows_mem);
-      /* Although input "m" is not really a LHS, we need a lvalue.  */
-      wi->is_lhs = !wi->val_only;
-      walk_tree (&TREE_VALUE (link), wi->callback, wi, NULL);
-    }
-
-  wi->is_lhs = false;
-  wi->val_only = true;
-}
-
-/* Iterate over all sub-statements of *TP calling walk_tree with
-   WI->CALLBACK for every sub-expression in each statement found.  */
-
-void
-walk_stmts (struct walk_stmt_info *wi, tree *tp)
-{
-  tree t = *tp;
-  int walk_subtrees;
-
-  if (!t)
-    return;
 
-  if (wi->want_locations && EXPR_HAS_LOCATION (t))
-    input_location = EXPR_LOCATION (t);
-
-  switch (TREE_CODE (t))
-    {
-    case STATEMENT_LIST:
-      {
-       tree_stmt_iterator i;
-       for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
-         {
-           wi->tsi = i;
-           walk_stmts (wi, tsi_stmt_ptr (i));
-         }
-      }
-      break;
-
-    case COND_EXPR:
-      walk_tree (&COND_EXPR_COND (t), wi->callback, wi, NULL);
-      walk_stmts (wi, &COND_EXPR_THEN (t));
-      walk_stmts (wi, &COND_EXPR_ELSE (t));
-      break;
-    case CATCH_EXPR:
-      walk_stmts (wi, &CATCH_BODY (t));
-      break;
-    case EH_FILTER_EXPR:
-      walk_stmts (wi, &EH_FILTER_FAILURE (t));
-      break;
-    case TRY_CATCH_EXPR:
-    case TRY_FINALLY_EXPR:
-      walk_stmts (wi, &TREE_OPERAND (t, 0));
-      walk_stmts (wi, &TREE_OPERAND (t, 1));
-      break;
-
-    case BIND_EXPR:
-      if (wi->want_bind_expr)
-       {
-         walk_subtrees = 1;
-         wi->callback (tp, &walk_subtrees, wi);
-         if (!walk_subtrees)
-           break;
-       }
-      walk_stmts (wi, &BIND_EXPR_BODY (t));
-      break;
-
-    case RETURN_EXPR:
-      if (wi->want_return_expr)
-       {
-         walk_subtrees = 1;
-         wi->callback (tp, &walk_subtrees, wi);
-         if (!walk_subtrees)
-           break;
-       }
-      walk_stmts (wi, &TREE_OPERAND (t, 0));
-      break;
-
-    case GIMPLE_MODIFY_STMT:
-      /* A formal temporary lhs may use a COMPONENT_REF rhs.  */
-      wi->val_only = !is_gimple_formal_tmp_var (GIMPLE_STMT_OPERAND (t, 0));
-      walk_tree (&GIMPLE_STMT_OPERAND (t, 1), wi->callback, wi, NULL);
-
-      /* If the rhs is appropriate for a memory, we may use a
-        COMPONENT_REF on the lhs.  */
-      wi->val_only = !is_gimple_mem_rhs (GIMPLE_STMT_OPERAND (t, 1));
-      wi->is_lhs = true;
-      walk_tree (&GIMPLE_STMT_OPERAND (t, 0), wi->callback, wi, NULL);
-
-      wi->val_only = true;
-      wi->is_lhs = false;
-      break;
-
-    case ASM_EXPR:
-      walk_asm_expr (wi, *tp);
-      break;
-
-    default:
-      wi->val_only = true;
-      walk_tree (tp, wi->callback, wi, NULL);
-      break;
-    }
-}
-
-/* Invoke CALLBACK on all statements of *STMT_P.  */
+/* Invoke CALLBACK on all statements of GIMPLE sequence SEQ.  */
 
 static void
-walk_body (walk_tree_fn callback, struct nesting_info *info, tree *stmt_p)
+walk_body (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
+          struct nesting_info *info, gimple_seq seq)
 {
   struct walk_stmt_info wi;
 
   memset (&wi, 0, sizeof (wi));
-  wi.callback = callback;
   wi.info = info;
   wi.val_only = true;
-
-  walk_stmts (&wi, stmt_p);
+  walk_gimple_seq (seq, callback_stmt, callback_op, &wi);
 }
 
-/* Invoke CALLBACK on all statements of INFO->CONTEXT.  */
+
+/* Invoke CALLBACK_STMT/CALLBACK_OP on all statements of INFO->CONTEXT.  */
 
 static inline void
-walk_function (walk_tree_fn callback, struct nesting_info *info)
+walk_function (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
+              struct nesting_info *info)
 {
-  walk_body (callback, info, &DECL_SAVED_TREE (info->context));
+  walk_body (callback_stmt, callback_op, info, gimple_body (info->context));
 }
 
-/* Invoke CALLBACK on OMP_FOR init, cond, incr and pre-body.  */
+/* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body.  */
 
 static void
-walk_omp_for (walk_tree_fn callback, struct nesting_info *info, tree for_stmt)
+walk_gimple_omp_for (gimple for_stmt,
+                    walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
+                    struct nesting_info *info)
 {
   struct walk_stmt_info wi;
-  tree t, list = NULL, empty;
-  int i;
+  gimple_seq seq;
+  tree t;
+  size_t i;
 
-  walk_body (callback, info, &OMP_FOR_PRE_BODY (for_stmt));
+  walk_body (callback_stmt, callback_op, info, gimple_omp_for_pre_body (for_stmt));
 
-  empty = build_empty_stmt ();
-  append_to_statement_list_force (empty, &list);
+  seq = gimple_seq_alloc ();
   memset (&wi, 0, sizeof (wi));
-  wi.callback = callback;
   wi.info = info;
-  wi.tsi = tsi_last (list);
+  wi.gsi = gsi_last (seq);
 
-  for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
+  for (i = 0; i < gimple_omp_for_collapse (for_stmt); i++)
     {
-      t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
-      gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
-      SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
       wi.val_only = false;
-      walk_tree (&GIMPLE_STMT_OPERAND (t, 0), callback, &wi, NULL);
+      walk_tree (gimple_omp_for_index_ptr (for_stmt, i), callback_op,
+                &wi, NULL);
       wi.val_only = true;
       wi.is_lhs = false;
-      walk_tree (&GIMPLE_STMT_OPERAND (t, 1), callback, &wi, NULL);
+      walk_tree (gimple_omp_for_initial_ptr (for_stmt, i), callback_op,
+                &wi, NULL);
 
-      t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
-      gcc_assert (COMPARISON_CLASS_P (t));
-      SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
-      wi.val_only = false;
-      walk_tree (&TREE_OPERAND (t, 0), callback, &wi, NULL);
       wi.val_only = true;
       wi.is_lhs = false;
-      walk_tree (&TREE_OPERAND (t, 1), callback, &wi, NULL);
+      walk_tree (gimple_omp_for_final_ptr (for_stmt, i), callback_op,
+                &wi, NULL);
 
-      t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
-      gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
-      SET_EXPR_LOCUS (empty, EXPR_LOCUS (t));
-      wi.val_only = false;
-      walk_tree (&GIMPLE_STMT_OPERAND (t, 0), callback, &wi, NULL);
-      t = GIMPLE_STMT_OPERAND (t, 1);
+      t = gimple_omp_for_incr (for_stmt, i);
       gcc_assert (BINARY_CLASS_P (t));
       wi.val_only = false;
-      walk_tree (&TREE_OPERAND (t, 0), callback, &wi, NULL);
+      walk_tree (&TREE_OPERAND (t, 0), callback_op, &wi, NULL);
       wi.val_only = true;
       wi.is_lhs = false;
-      walk_tree (&TREE_OPERAND (t, 1), callback, &wi, NULL);
+      walk_tree (&TREE_OPERAND (t, 1), callback_op, &wi, NULL);
     }
 
-  /* Remove empty statement added above from the end of statement list.  */
-  tsi_delink (&wi.tsi);
-  append_to_statement_list (list, &OMP_FOR_PRE_BODY (for_stmt));
+  if (gimple_seq_empty_p (seq))
+    gimple_seq_free (seq);
+  else
+    {
+      gimple_seq pre_body = gimple_omp_for_pre_body (for_stmt);
+      annotate_all_with_location (seq, gimple_location (for_stmt));
+      gimple_seq_add_seq (&pre_body, seq);
+      gimple_omp_for_set_pre_body (for_stmt, pre_body);
+    }
 }
 
 /* Similarly for ROOT and all functions nested underneath, depth first.  */
     
 static void
-walk_all_functions (walk_tree_fn callback, struct nesting_info *root)
+walk_all_functions (walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
+                   struct nesting_info *root)
 {
   do
     {
       if (root->inner)
-       walk_all_functions (callback, root->inner);
-      walk_function (callback, root);
+       walk_all_functions (callback_stmt, callback_op, root->inner);
+      walk_function (callback_stmt, callback_op, root);
       root = root->next;
     }
   while (root);
 }
-\f
+
+
 /* We have to check for a fairly pathological case.  The operands of function
    nested function are to be interpreted in the context of the enclosing
    function.  So if any are variably-sized, they will get remapped when the
@@ -817,7 +707,7 @@ create_nesting_tree (struct cgraph_node *cgn)
 
 static tree
 get_static_chain (struct nesting_info *info, tree target_context,
-                 tree_stmt_iterator *tsi)
+                 gimple_stmt_iterator *gsi)
 {
   struct nesting_info *i;
   tree x;
@@ -836,20 +726,21 @@ get_static_chain (struct nesting_info *info, tree target_context,
 
          x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
          x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
-         x = init_tmp_var (info, x, tsi);
+         x = init_tmp_var (info, x, gsi);
        }
     }
 
   return x;
 }
 
+
 /* Return an expression referencing FIELD from TARGET_CONTEXT's non-local
    frame as seen from INFO->CONTEXT.  Insert any necessary computations
-   before TSI.  */
+   before GSI.  */
 
 static tree
 get_frame_field (struct nesting_info *info, tree target_context,
-                tree field, tree_stmt_iterator *tsi)
+                tree field, gimple_stmt_iterator *gsi)
 {
   struct nesting_info *i;
   tree x;
@@ -870,7 +761,7 @@ get_frame_field (struct nesting_info *info, tree target_context,
 
          x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
          x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
-         x = init_tmp_var (info, x, tsi);
+         x = init_tmp_var (info, x, gsi);
        }
 
       x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
@@ -880,7 +771,8 @@ get_frame_field (struct nesting_info *info, tree target_context,
   return x;
 }
 
-/* A subroutine of convert_nonlocal_reference.  Create a local variable
+
+/* A subroutine of convert_nonlocal_reference_op.  Create a local variable
    in the nested function with DECL_VALUE_EXPR set to reference the true
    variable in the parent function.  This is used both for debug info 
    and in OpenMP lowering.  */
@@ -947,7 +839,8 @@ get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
   return new_decl;
 }
 
-/* Called via walk_function+walk_tree, rewrite all references to VAR
+
+/* Callback for walk_gimple_stmt, rewrite all references to VAR
    and PARM_DECLs that belong to outer functions.
 
    The rewrite will involve some number of structure accesses back up
@@ -955,16 +848,12 @@ get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
    be CHAIN->FOO.  For two levels it'll be CHAIN->__chain->FOO.  Further
    indirections apply to decls for which use_pointer_in_frame is true.  */
 
-static bool convert_nonlocal_omp_clauses (tree *, struct walk_stmt_info *);
-
 static tree
-convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
+convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
 {
   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
   struct nesting_info *const info = (struct nesting_info *) wi->info;
   tree t = *tp;
-  tree save_local_var_chain;
-  bitmap save_suppress;
 
   *walk_subtrees = 0;
   switch (TREE_CODE (t))
@@ -989,10 +878,10 @@ convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
              for (i = info->outer; i->context != target_context; i = i->outer)
                continue;
              x = lookup_field_for_decl (i, t, INSERT);
-             x = get_frame_field (info, target_context, x, &wi->tsi);
+             x = get_frame_field (info, target_context, x, &wi->gsi);
              if (use_pointer_in_frame (t))
                {
-                 x = init_tmp_var (info, x, &wi->tsi);
+                 x = init_tmp_var (info, x, &wi->gsi);
                  x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
                }
            }
@@ -1000,25 +889,15 @@ convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
          if (wi->val_only)
            {
              if (wi->is_lhs)
-               x = save_tmp_var (info, x, &wi->tsi);
+               x = save_tmp_var (info, x, &wi->gsi);
              else
-               x = init_tmp_var (info, x, &wi->tsi);
+               x = init_tmp_var (info, x, &wi->gsi);
            }
 
          *tp = x;
        }
       break;
 
-    case GOTO_EXPR:
-      /* Don't walk non-local gotos for now.  */
-      if (TREE_CODE (GOTO_DESTINATION (t)) != LABEL_DECL)
-       {
-         *walk_subtrees = 1;
-         wi->val_only = true;
-         wi->is_lhs = false;
-       }
-      break;
-
     case LABEL_DECL:
       /* We're taking the address of a label from a parent function, but
         this is not itself a non-local goto.  Mark the label such that it
@@ -1035,7 +914,7 @@ convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
        wi->val_only = false;
        wi->is_lhs = false;
        wi->changed = false;
-       walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference, wi, NULL);
+       walk_tree (&TREE_OPERAND (t, 0), convert_nonlocal_reference_op, wi, 0);
        wi->val_only = true;
 
        if (wi->changed)
@@ -1053,8 +932,8 @@ convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
               where we only accept variables (and min_invariant, presumably),
               then compute the address into a temporary.  */
            if (save_val_only)
-             *tp = tsi_gimplify_val ((struct nesting_info *) wi->info,
-                                     t, &wi->tsi);
+             *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
+                                     t, &wi->gsi);
          }
       }
       break;
@@ -1073,28 +952,28 @@ convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
        {
          if (TREE_CODE (t) == COMPONENT_REF)
-           walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
+           walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op, wi,
                       NULL);
          else if (TREE_CODE (t) == ARRAY_REF
                   || TREE_CODE (t) == ARRAY_RANGE_REF)
            {
-             walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
-                        NULL);
-             walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
-                        NULL);
-             walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference, wi,
-                        NULL);
+             walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
+                        wi, NULL);
+             walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
+                        wi, NULL);
+             walk_tree (&TREE_OPERAND (t, 3), convert_nonlocal_reference_op,
+                        wi, NULL);
            }
          else if (TREE_CODE (t) == BIT_FIELD_REF)
            {
-             walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference, wi,
-                        NULL);
-             walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference, wi,
-                        NULL);
+             walk_tree (&TREE_OPERAND (t, 1), convert_nonlocal_reference_op,
+                        wi, NULL);
+             walk_tree (&TREE_OPERAND (t, 2), convert_nonlocal_reference_op,
+                        wi, NULL);
            }
        }
       wi->val_only = false;
-      walk_tree (tp, convert_nonlocal_reference, wi, NULL);
+      walk_tree (tp, convert_nonlocal_reference_op, wi, NULL);
       break;
 
     case VIEW_CONVERT_EXPR:
@@ -1104,52 +983,6 @@ convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
       *walk_subtrees = 1;
       break;
 
-    case OMP_PARALLEL:
-    case OMP_TASK:
-      save_suppress = info->suppress_expansion;
-      if (convert_nonlocal_omp_clauses (&OMP_TASKREG_CLAUSES (t), wi))
-       {
-         tree c, decl;
-         decl = get_chain_decl (info);
-         c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
-         OMP_CLAUSE_DECL (c) = decl;
-         OMP_CLAUSE_CHAIN (c) = OMP_TASKREG_CLAUSES (t);
-         OMP_TASKREG_CLAUSES (t) = c;
-       }
-
-      save_local_var_chain = info->new_local_var_chain;
-      info->new_local_var_chain = NULL;
-
-      walk_body (convert_nonlocal_reference, info, &OMP_TASKREG_BODY (t));
-
-      if (info->new_local_var_chain)
-       declare_vars (info->new_local_var_chain, OMP_TASKREG_BODY (t), false);
-      info->new_local_var_chain = save_local_var_chain;
-      info->suppress_expansion = save_suppress;
-      break;
-
-    case OMP_FOR:
-      save_suppress = info->suppress_expansion;
-      convert_nonlocal_omp_clauses (&OMP_FOR_CLAUSES (t), wi);
-      walk_omp_for (convert_nonlocal_reference, info, t);
-      walk_body (convert_nonlocal_reference, info, &OMP_FOR_BODY (t));
-      info->suppress_expansion = save_suppress;
-      break;
-
-    case OMP_SECTIONS:
-    case OMP_SINGLE:
-      save_suppress = info->suppress_expansion;
-      convert_nonlocal_omp_clauses (&OMP_CLAUSES (t), wi);
-      walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
-      info->suppress_expansion = save_suppress;
-      break;
-
-    case OMP_SECTION:
-    case OMP_MASTER:
-    case OMP_ORDERED:
-      walk_body (convert_nonlocal_reference, info, &OMP_BODY (t));
-      break;
-
     default:
       if (!IS_TYPE_OR_DECL_P (t))
        {
@@ -1163,6 +996,12 @@ convert_nonlocal_reference (tree *tp, int *walk_subtrees, void *data)
   return NULL_TREE;
 }
 
+static tree convert_nonlocal_reference_stmt (gimple_stmt_iterator *, bool *,
+                                            struct walk_stmt_info *);
+
+/* Helper for convert_nonlocal_references, rewrite all references to VAR
+   and PARM_DECLs that belong to outer functions.  */
+
 static bool
 convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
 {
@@ -1185,7 +1024,7 @@ convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
          goto do_decl_clause;
 
        case OMP_CLAUSE_LASTPRIVATE:
-         if (OMP_CLAUSE_LASTPRIVATE_STMT (clause))
+         if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
            need_stmts = true;
          goto do_decl_clause;
 
@@ -1214,8 +1053,8 @@ convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
        case OMP_CLAUSE_NUM_THREADS:
          wi->val_only = true;
          wi->is_lhs = false;
-         convert_nonlocal_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
-                                     wi);
+         convert_nonlocal_reference_op (&OMP_CLAUSE_OPERAND (clause, 0),
+                                        &dummy, wi);
          break;
 
        case OMP_CLAUSE_NOWAIT:
@@ -1244,18 +1083,21 @@ convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
                = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
              DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
                = info->context;
-             walk_body (convert_nonlocal_reference, info,
-                        &OMP_CLAUSE_REDUCTION_INIT (clause));
-             walk_body (convert_nonlocal_reference, info,
-                        &OMP_CLAUSE_REDUCTION_MERGE (clause));
+             walk_body (convert_nonlocal_reference_stmt,
+                        convert_nonlocal_reference_op, info,
+                        OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
+             walk_body (convert_nonlocal_reference_stmt,
+                        convert_nonlocal_reference_op, info,
+                        OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
              DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
                = old_context;
            }
          break;
 
        case OMP_CLAUSE_LASTPRIVATE:
-         walk_body (convert_nonlocal_reference, info,
-                    &OMP_CLAUSE_LASTPRIVATE_STMT (clause));
+         walk_body (convert_nonlocal_reference_stmt,
+                    convert_nonlocal_reference_op, info,
+                    OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
          break;
 
        default:
@@ -1265,6 +1107,110 @@ convert_nonlocal_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
   return need_chain;
 }
 
+
+/* Callback for walk_gimple_stmt.  Rewrite all references to VAR and
+   PARM_DECLs that belong to outer functions.  This handles statements
+   that are not handled via the standard recursion done in
+   walk_gimple_stmt.  STMT is the statement to examine, DATA is as in
+   convert_nonlocal_reference_op.  Set *HANDLED_OPS_P to true if all the
+   operands of STMT have been handled by this function.  */
+
+static tree
+convert_nonlocal_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
+                                struct walk_stmt_info *wi)
+{
+  struct nesting_info *info = (struct nesting_info *) wi->info;
+  tree save_local_var_chain;
+  bitmap save_suppress;
+  gimple stmt = gsi_stmt (*gsi);
+
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_GOTO:
+      /* Don't walk non-local gotos for now.  */
+      if (TREE_CODE (gimple_goto_dest (stmt)) != LABEL_DECL)
+       {
+         wi->val_only = true;
+         wi->is_lhs = false;
+         *handled_ops_p = true;
+         return NULL_TREE;
+       }
+      break;
+
+    case GIMPLE_OMP_PARALLEL:
+    case GIMPLE_OMP_TASK:
+      save_suppress = info->suppress_expansion;
+      if (convert_nonlocal_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
+                                       wi))
+       {
+         tree c, decl;
+         decl = get_chain_decl (info);
+         c = build_omp_clause (OMP_CLAUSE_FIRSTPRIVATE);
+         OMP_CLAUSE_DECL (c) = decl;
+         OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
+         gimple_omp_taskreg_set_clauses (stmt, c);
+       }
+
+      save_local_var_chain = info->new_local_var_chain;
+      info->new_local_var_chain = NULL;
+
+      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
+                info, gimple_omp_body (stmt));
+
+      if (info->new_local_var_chain)
+       declare_vars (info->new_local_var_chain,
+                     gimple_seq_first_stmt (gimple_omp_body (stmt)),
+                     false);
+      info->new_local_var_chain = save_local_var_chain;
+      info->suppress_expansion = save_suppress;
+      break;
+
+    case GIMPLE_OMP_FOR:
+      save_suppress = info->suppress_expansion;
+      convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
+      walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
+                          convert_nonlocal_reference_op, info);
+      walk_body (convert_nonlocal_reference_stmt,
+                convert_nonlocal_reference_op, info, gimple_omp_body (stmt));
+      info->suppress_expansion = save_suppress;
+      break;
+
+    case GIMPLE_OMP_SECTIONS:
+      save_suppress = info->suppress_expansion;
+      convert_nonlocal_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
+      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
+                info, gimple_omp_body (stmt));
+      info->suppress_expansion = save_suppress;
+      break;
+
+    case GIMPLE_OMP_SINGLE:
+      save_suppress = info->suppress_expansion;
+      convert_nonlocal_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
+      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
+                info, gimple_omp_body (stmt));
+      info->suppress_expansion = save_suppress;
+      break;
+
+    case GIMPLE_OMP_SECTION:
+    case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_ORDERED:
+      walk_body (convert_nonlocal_reference_stmt, convert_nonlocal_reference_op,
+                info, gimple_omp_body (stmt));
+      break;
+
+    default:
+      /* For every other statement that we are not interested in
+        handling here, let the walker traverse the operands.  */
+      *handled_ops_p = false;
+      return NULL_TREE;
+    }
+
+  /* We have handled all of STMT operands, no need to traverse the operands.  */
+  *handled_ops_p = true;
+  return NULL_TREE;
+}
+
+
 /* A subroutine of convert_local_reference.  Create a local variable
    in the parent function with DECL_VALUE_EXPR set to reference the
    field in FRAME.  This is used both for debug info and in OpenMP
@@ -1309,21 +1255,20 @@ get_local_debug_decl (struct nesting_info *info, tree decl, tree field)
   return new_decl;
 }
 
-/* Called via walk_function+walk_tree, rewrite all references to VAR
+
+/* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
    and PARM_DECLs that were referenced by inner nested functions.
    The rewrite will be a structure reference to the local frame variable.  */
 
 static bool convert_local_omp_clauses (tree *, struct walk_stmt_info *);
 
 static tree
-convert_local_reference (tree *tp, int *walk_subtrees, void *data)
+convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
 {
   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
   struct nesting_info *const info = (struct nesting_info *) wi->info;
   tree t = *tp, field, x;
   bool save_val_only;
-  tree save_local_var_chain;
-  bitmap save_suppress;
 
   *walk_subtrees = 0;
   switch (TREE_CODE (t))
@@ -1351,14 +1296,14 @@ convert_local_reference (tree *tp, int *walk_subtrees, void *data)
 
          x = get_local_debug_decl (info, t, field);
          if (!bitmap_bit_p (info->suppress_expansion, DECL_UID (t)))
-           x = get_frame_field (info, info->context, field, &wi->tsi);
+           x = get_frame_field (info, info->context, field, &wi->gsi);
 
          if (wi->val_only)
            {
              if (wi->is_lhs)
-               x = save_tmp_var (info, x, &wi->tsi);
+               x = save_tmp_var (info, x, &wi->gsi);
              else
-               x = init_tmp_var (info, x, &wi->tsi);
+               x = init_tmp_var (info, x, &wi->gsi);
            }
 
          *tp = x;
@@ -1370,7 +1315,7 @@ convert_local_reference (tree *tp, int *walk_subtrees, void *data)
       wi->val_only = false;
       wi->is_lhs = false;
       wi->changed = false;
-      walk_tree (&TREE_OPERAND (t, 0), convert_local_reference, wi, NULL);
+      walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op, wi, NULL);
       wi->val_only = save_val_only;
 
       /* If we converted anything ... */
@@ -1389,7 +1334,8 @@ convert_local_reference (tree *tp, int *walk_subtrees, void *data)
          /* If we are in a context where we only accept values, then
             compute the address into a temporary.  */
          if (save_val_only)
-           *tp = tsi_gimplify_val ((struct nesting_info *)wi->info, t, &wi->tsi);
+           *tp = gsi_gimplify_val ((struct nesting_info *) wi->info,
+                                   t, &wi->gsi);
        }
       break;
 
@@ -1408,28 +1354,28 @@ convert_local_reference (tree *tp, int *walk_subtrees, void *data)
       for (; handled_component_p (t); tp = &TREE_OPERAND (t, 0), t = *tp)
        {
          if (TREE_CODE (t) == COMPONENT_REF)
-           walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
+           walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
                       NULL);
          else if (TREE_CODE (t) == ARRAY_REF
                   || TREE_CODE (t) == ARRAY_RANGE_REF)
            {
-             walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
+             walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
                         NULL);
-             walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
+             walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
                         NULL);
-             walk_tree (&TREE_OPERAND (t, 3), convert_local_reference, wi,
+             walk_tree (&TREE_OPERAND (t, 3), convert_local_reference_op, wi,
                         NULL);
            }
          else if (TREE_CODE (t) == BIT_FIELD_REF)
            {
-             walk_tree (&TREE_OPERAND (t, 1), convert_local_reference, wi,
+             walk_tree (&TREE_OPERAND (t, 1), convert_local_reference_op, wi,
                         NULL);
-             walk_tree (&TREE_OPERAND (t, 2), convert_local_reference, wi,
+             walk_tree (&TREE_OPERAND (t, 2), convert_local_reference_op, wi,
                         NULL);
            }
        }
       wi->val_only = false;
-      walk_tree (tp, convert_local_reference, wi, NULL);
+      walk_tree (tp, convert_local_reference_op, wi, NULL);
       wi->val_only = save_val_only;
       break;
 
@@ -1440,52 +1386,6 @@ convert_local_reference (tree *tp, int *walk_subtrees, void *data)
       *walk_subtrees = 1;
       break;
 
-    case OMP_PARALLEL:
-    case OMP_TASK:
-      save_suppress = info->suppress_expansion;
-      if (convert_local_omp_clauses (&OMP_TASKREG_CLAUSES (t), wi))
-       {
-         tree c;
-         (void) get_frame_type (info);
-         c = build_omp_clause (OMP_CLAUSE_SHARED);
-         OMP_CLAUSE_DECL (c) = info->frame_decl;
-         OMP_CLAUSE_CHAIN (c) = OMP_TASKREG_CLAUSES (t);
-         OMP_TASKREG_CLAUSES (t) = c;
-       }
-
-      save_local_var_chain = info->new_local_var_chain;
-      info->new_local_var_chain = NULL;
-
-      walk_body (convert_local_reference, info, &OMP_TASKREG_BODY (t));
-
-      if (info->new_local_var_chain)
-       declare_vars (info->new_local_var_chain, OMP_TASKREG_BODY (t), false);
-      info->new_local_var_chain = save_local_var_chain;
-      info->suppress_expansion = save_suppress;
-      break;
-
-    case OMP_FOR:
-      save_suppress = info->suppress_expansion;
-      convert_local_omp_clauses (&OMP_FOR_CLAUSES (t), wi);
-      walk_omp_for (convert_local_reference, info, t);
-      walk_body (convert_local_reference, info, &OMP_FOR_BODY (t));
-      info->suppress_expansion = save_suppress;
-      break;
-
-    case OMP_SECTIONS:
-    case OMP_SINGLE:
-      save_suppress = info->suppress_expansion;
-      convert_local_omp_clauses (&OMP_CLAUSES (t), wi);
-      walk_body (convert_local_reference, info, &OMP_BODY (t));
-      info->suppress_expansion = save_suppress;
-      break;
-
-    case OMP_SECTION:
-    case OMP_MASTER:
-    case OMP_ORDERED:
-      walk_body (convert_local_reference, info, &OMP_BODY (t));
-      break;
-
     default:
       if (!IS_TYPE_OR_DECL_P (t))
        {
@@ -1499,6 +1399,12 @@ convert_local_reference (tree *tp, int *walk_subtrees, void *data)
   return NULL_TREE;
 }
 
+static tree convert_local_reference_stmt (gimple_stmt_iterator *, bool *,
+                                         struct walk_stmt_info *);
+
+/* Helper for convert_local_reference.  Convert all the references in
+   the chain of clauses at *PCLAUSES.  WI is as in convert_local_reference.  */
+
 static bool
 convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
 {
@@ -1521,7 +1427,7 @@ convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
          goto do_decl_clause;
 
        case OMP_CLAUSE_LASTPRIVATE:
-         if (OMP_CLAUSE_LASTPRIVATE_STMT (clause))
+         if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause))
            need_stmts = true;
          goto do_decl_clause;
 
@@ -1556,7 +1462,8 @@ convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
        case OMP_CLAUSE_NUM_THREADS:
          wi->val_only = true;
          wi->is_lhs = false;
-         convert_local_reference (&OMP_CLAUSE_OPERAND (clause, 0), &dummy, wi);
+         convert_local_reference_op (&OMP_CLAUSE_OPERAND (clause, 0), &dummy,
+                                     wi);
          break;
 
        case OMP_CLAUSE_NOWAIT:
@@ -1585,18 +1492,21 @@ convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
                = DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause));
              DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
                = info->context;
-             walk_body (convert_local_reference, info,
-                        &OMP_CLAUSE_REDUCTION_INIT (clause));
-             walk_body (convert_local_reference, info,
-                        &OMP_CLAUSE_REDUCTION_MERGE (clause));
+             walk_body (convert_local_reference_stmt,
+                        convert_local_reference_op, info,
+                        OMP_CLAUSE_REDUCTION_GIMPLE_INIT (clause));
+             walk_body (convert_local_reference_stmt,
+                        convert_local_reference_op, info,
+                        OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (clause));
              DECL_CONTEXT (OMP_CLAUSE_REDUCTION_PLACEHOLDER (clause))
                = old_context;
            }
          break;
 
        case OMP_CLAUSE_LASTPRIVATE:
-         walk_body (convert_local_reference, info,
-                    &OMP_CLAUSE_LASTPRIVATE_STMT (clause));
+         walk_body (convert_local_reference_stmt,
+                    convert_local_reference_op, info,
+                    OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (clause));
          break;
 
        default:
@@ -1606,27 +1516,128 @@ convert_local_omp_clauses (tree *pclauses, struct walk_stmt_info *wi)
   return need_frame;
 }
 
-/* Called via walk_function+walk_tree, rewrite all GOTO_EXPRs that 
-   reference labels from outer functions.  The rewrite will be a 
+
+/* Called via walk_function+walk_gimple_stmt, rewrite all references to VAR
+   and PARM_DECLs that were referenced by inner nested functions.
+   The rewrite will be a structure reference to the local frame variable.  */
+
+static tree
+convert_local_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
+                             struct walk_stmt_info *wi)
+{
+  struct nesting_info *info = (struct nesting_info *) wi->info;
+  tree save_local_var_chain;
+  bitmap save_suppress;
+  gimple stmt = gsi_stmt (*gsi);
+
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_OMP_PARALLEL:
+    case GIMPLE_OMP_TASK:
+      save_suppress = info->suppress_expansion;
+      if (convert_local_omp_clauses (gimple_omp_taskreg_clauses_ptr (stmt),
+                                    wi))
+       {
+         tree c;
+         (void) get_frame_type (info);
+         c = build_omp_clause (OMP_CLAUSE_SHARED);
+         OMP_CLAUSE_DECL (c) = info->frame_decl;
+         OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
+         gimple_omp_taskreg_set_clauses (stmt, c);
+       }
+
+      save_local_var_chain = info->new_local_var_chain;
+      info->new_local_var_chain = NULL;
+
+      walk_body (convert_local_reference_stmt, convert_local_reference_op, info,
+                gimple_omp_body (stmt));
+
+      if (info->new_local_var_chain)
+       declare_vars (info->new_local_var_chain,
+                     gimple_seq_first_stmt (gimple_omp_body (stmt)), false);
+      info->new_local_var_chain = save_local_var_chain;
+      info->suppress_expansion = save_suppress;
+      break;
+
+    case GIMPLE_OMP_FOR:
+      save_suppress = info->suppress_expansion;
+      convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
+      walk_gimple_omp_for (stmt, convert_local_reference_stmt,
+                          convert_local_reference_op, info);
+      walk_body (convert_local_reference_stmt, convert_local_reference_op,
+                info, gimple_omp_body (stmt));
+      info->suppress_expansion = save_suppress;
+      break;
+
+    case GIMPLE_OMP_SECTIONS:
+      save_suppress = info->suppress_expansion;
+      convert_local_omp_clauses (gimple_omp_sections_clauses_ptr (stmt), wi);
+      walk_body (convert_local_reference_stmt, convert_local_reference_op,
+                info, gimple_omp_body (stmt));
+      info->suppress_expansion = save_suppress;
+      break;
+
+    case GIMPLE_OMP_SINGLE:
+      save_suppress = info->suppress_expansion;
+      convert_local_omp_clauses (gimple_omp_single_clauses_ptr (stmt), wi);
+      walk_body (convert_local_reference_stmt, convert_local_reference_op,
+                info, gimple_omp_body (stmt));
+      info->suppress_expansion = save_suppress;
+      break;
+
+    case GIMPLE_OMP_SECTION:
+    case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_ORDERED:
+      walk_body (convert_local_reference_stmt, convert_local_reference_op,
+                info, gimple_omp_body (stmt));
+      break;
+
+    default:
+      /* For every other statement that we are not interested in
+        handling here, let the walker traverse the operands.  */
+      *handled_ops_p = false;
+      return NULL_TREE;
+    }
+
+  /* Indicate that we have handled all the operands ourselves.  */
+  *handled_ops_p = true;
+  return NULL_TREE;
+}
+
+
+/* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_GOTOs
+   that reference labels from outer functions.  The rewrite will be a
    call to __builtin_nonlocal_goto.  */
 
 static tree
-convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data)
+convert_nl_goto_reference (gimple_stmt_iterator *gsi, bool *handled_ops_p,
+                          struct walk_stmt_info *wi)
 {
-  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
-  tree t = *tp, label, new_label, target_context, x, field;
+  tree label, new_label, target_context, x, field;
   void **slot;
+  gimple call;
+  gimple stmt = gsi_stmt (*gsi);
 
-  *walk_subtrees = 0;
-  if (TREE_CODE (t) != GOTO_EXPR)
-    return NULL_TREE;
-  label = GOTO_DESTINATION (t);
+  if (gimple_code (stmt) != GIMPLE_GOTO)
+    {
+      *handled_ops_p = false;
+      return NULL_TREE;
+    }
+
+  label = gimple_goto_dest (stmt);
   if (TREE_CODE (label) != LABEL_DECL)
-    return NULL_TREE;
+    {
+      *handled_ops_p = false;
+      return NULL_TREE;
+    }
+
   target_context = decl_function_context (label);
   if (target_context == info->context)
-    return NULL_TREE;
+    {
+      *handled_ops_p = false;
+      return NULL_TREE;
+    }
 
   for (i = info->outer; target_context != i->context; i = i->outer)
     continue;
@@ -1650,69 +1661,80 @@ convert_nl_goto_reference (tree *tp, int *walk_subtrees, void *data)
   
   /* Build: __builtin_nl_goto(new_label, &chain->nl_goto_field).  */
   field = get_nl_goto_field (i);
-  x = get_frame_field (info, target_context, field, &wi->tsi);
+  x = get_frame_field (info, target_context, field, &wi->gsi);
   x = build_addr (x, target_context);
-  x = tsi_gimplify_val (info, x, &wi->tsi);
-  x = build_call_expr (implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO], 2,
-                      build_addr (new_label, target_context), x);
-
-  SET_EXPR_LOCUS (x, EXPR_LOCUS (tsi_stmt (wi->tsi)));
-  *tsi_stmt_ptr (wi->tsi) = x;
+  x = gsi_gimplify_val (info, x, &wi->gsi);
+  call = gimple_build_call (implicit_built_in_decls[BUILT_IN_NONLOCAL_GOTO], 2,
+                           build_addr (new_label, target_context), x);
+  gsi_replace (&wi->gsi, call, false);
 
+  /* We have handled all of STMT's operands, no need to keep going.  */
+  *handled_ops_p = true;
   return NULL_TREE;
 }
 
-/* Called via walk_function+walk_tree, rewrite all LABEL_EXPRs that 
+
+/* Called via walk_function+walk_tree, rewrite all GIMPLE_LABELs whose labels
    are referenced via nonlocal goto from a nested function.  The rewrite
    will involve installing a newly generated DECL_NONLOCAL label, and
-   (potentially) a branch around the rtl gunk that is assumed to be 
+   (potentially) a branch around the rtl gunk that is assumed to be
    attached to such a label.  */
 
 static tree
-convert_nl_goto_receiver (tree *tp, int *walk_subtrees, void *data)
+convert_nl_goto_receiver (gimple_stmt_iterator *gsi, bool *handled_ops_p,
+                         struct walk_stmt_info *wi)
 {
-  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
   struct nesting_info *const info = (struct nesting_info *) wi->info;
-  tree t = *tp, label, new_label, x;
-  tree_stmt_iterator tmp_tsi;
+  tree label, new_label;
+  gimple_stmt_iterator tmp_gsi;
   void **slot;
+  gimple stmt = gsi_stmt (*gsi);
 
-  *walk_subtrees = 0;
-  if (TREE_CODE (t) != LABEL_EXPR)
-    return NULL_TREE;
-  label = LABEL_EXPR_LABEL (t);
+  if (gimple_code (stmt) != GIMPLE_LABEL)
+    {
+      *handled_ops_p = false;
+      return NULL_TREE;
+    }
+
+  label = gimple_label_label (stmt);
 
   slot = pointer_map_contains (info->var_map, label);
   if (!slot)
-    return NULL_TREE;
+    {
+      *handled_ops_p = false;
+      return NULL_TREE;
+    }
 
   /* If there's any possibility that the previous statement falls through,
      then we must branch around the new non-local label.  */
-  tmp_tsi = wi->tsi;
-  tsi_prev (&tmp_tsi);
-  if (tsi_end_p (tmp_tsi) || block_may_fallthru (tsi_stmt (tmp_tsi)))
+  tmp_gsi = wi->gsi;
+  gsi_prev (&tmp_gsi);
+  if (gsi_end_p (tmp_gsi) || gimple_stmt_may_fallthru (gsi_stmt (tmp_gsi)))
     {
-      x = build1 (GOTO_EXPR, void_type_node, label);
-      tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
+      gimple stmt = gimple_build_goto (label);
+      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
     }
 
   new_label = (tree) *slot;
-  x = build1 (LABEL_EXPR, void_type_node, new_label);
-  tsi_link_before (&wi->tsi, x, TSI_SAME_STMT);
+  stmt = gimple_build_label (new_label);
+  gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
 
+  *handled_ops_p = true;
   return NULL_TREE;
 }
 
-/* Called via walk_function+walk_tree, rewrite all references to addresses
+
+/* Called via walk_function+walk_stmt, rewrite all references to addresses
    of nested functions that require the use of trampolines.  The rewrite
    will involve a reference a trampoline generated for the occasion.  */
 
 static tree
-convert_tramp_reference (tree *tp, int *walk_subtrees, void *data)
+convert_tramp_reference_op (tree *tp, int *walk_subtrees, void *data)
 {
   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
   struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
-  tree t = *tp, decl, target_context, x;
+  tree t = *tp, decl, target_context, x, builtin;
+  gimple call;
 
   *walk_subtrees = 0;
   switch (TREE_CODE (t))
@@ -1749,85 +1771,103 @@ convert_tramp_reference (tree *tp, int *walk_subtrees, void *data)
       x = lookup_tramp_for_decl (i, decl, INSERT);
 
       /* Compute the address of the field holding the trampoline.  */
-      x = get_frame_field (info, target_context, x, &wi->tsi);
+      x = get_frame_field (info, target_context, x, &wi->gsi);
       x = build_addr (x, target_context);
-      x = tsi_gimplify_val (info, x, &wi->tsi);
+      x = gsi_gimplify_val (info, x, &wi->gsi);
 
       /* Do machine-specific ugliness.  Normally this will involve
         computing extra alignment, but it can really be anything.  */
-      x = build_call_expr (implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE],
-                          1, x);
-      x = init_tmp_var (info, x, &wi->tsi);
+      builtin = implicit_built_in_decls[BUILT_IN_ADJUST_TRAMPOLINE];
+      call = gimple_build_call (builtin, 1, x);
+      x = init_tmp_var_with_call (info, &wi->gsi, call);
 
       /* Cast back to the proper function type.  */
       x = build1 (NOP_EXPR, TREE_TYPE (t), x);
-      x = init_tmp_var (info, x, &wi->tsi);
+      x = init_tmp_var (info, x, &wi->gsi);
 
       *tp = x;
       break;
 
-    case CALL_EXPR:
-      /* Only walk call arguments, lest we generate trampolines for
-        direct calls.  */
+    default:
+      if (!IS_TYPE_OR_DECL_P (t))
+       *walk_subtrees = 1;
+      break;
+    }
+
+  return NULL_TREE;
+}
+
+
+/* Called via walk_function+walk_gimple_stmt, rewrite all references
+   to addresses of nested functions that require the use of
+   trampolines.  The rewrite will involve a reference a trampoline
+   generated for the occasion.  */
+
+static tree
+convert_tramp_reference_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
+                             struct walk_stmt_info *wi)
+{
+  gimple stmt = gsi_stmt (*gsi);
+
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_CALL:
       {
-       int nargs = call_expr_nargs (t);
-       int i;
+       /* Only walk call arguments, lest we generate trampolines for
+          direct calls.  */
+       unsigned long i, nargs = gimple_call_num_args (stmt);
        for (i = 0; i < nargs; i++)
-         walk_tree (&CALL_EXPR_ARG (t, i), convert_tramp_reference, wi, NULL);
+         walk_tree (gimple_call_arg_ptr (stmt, i), convert_tramp_reference_op,
+                    wi, NULL);
+
+       *handled_ops_p = true;
+       return NULL_TREE;
       }
-      break;
 
     default:
-      if (!IS_TYPE_OR_DECL_P (t))
-       *walk_subtrees = 1;
       break;
     }
 
+  *handled_ops_p = false;
   return NULL_TREE;
 }
 
-/* Called via walk_function+walk_tree, rewrite all CALL_EXPRs that 
-   reference nested functions to make sure that the static chain is
-   set up properly for the call.  */
+
+
+/* Called via walk_function+walk_gimple_stmt, rewrite all GIMPLE_CALLs
+   that reference nested functions to make sure that the static chain
+   is set up properly for the call.  */
 
 static tree
-convert_call_expr (tree *tp, int *walk_subtrees, void *data)
+convert_gimple_call (gimple_stmt_iterator *gsi, bool *handled_ops_p,
+                     struct walk_stmt_info *wi)
 {
-  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
   struct nesting_info *const info = (struct nesting_info *) wi->info;
-  tree t = *tp, decl, target_context;
+  tree decl, target_context;
   char save_static_chain_added;
   int i;
+  gimple stmt = gsi_stmt (*gsi);
 
-  *walk_subtrees = 0;
-  switch (TREE_CODE (t))
+  switch (gimple_code (stmt))
     {
-    case CALL_EXPR:
-      decl = get_callee_fndecl (t);
-      if (!decl)
+    case GIMPLE_CALL:
+      decl = gimple_call_fn (stmt);
+      if (TREE_CODE (decl) != FUNCTION_DECL)
        break;
       target_context = decl_function_context (decl);
       if (target_context && !DECL_NO_STATIC_CHAIN (decl))
        {
-         CALL_EXPR_STATIC_CHAIN (t)
-           = get_static_chain (info, target_context, &wi->tsi);
-         info->static_chain_added
-           |= (1 << (info->context != target_context));
+         gimple_call_set_chain (stmt, get_static_chain (info, target_context,
+                                                        &wi->gsi));
+         info->static_chain_added |= (1 << (info->context != target_context));
        }
       break;
 
-    case RETURN_EXPR:
-    case GIMPLE_MODIFY_STMT:
-    case WITH_SIZE_EXPR:
-      /* Only return modify and with_size_expr may contain calls.  */
-      *walk_subtrees = 1;
-      break;
-
-    case OMP_PARALLEL:
-    case OMP_TASK:
+    case GIMPLE_OMP_PARALLEL:
+    case GIMPLE_OMP_TASK:
       save_static_chain_added = info->static_chain_added;
       info->static_chain_added = 0;
-      walk_body (convert_call_expr, info, &OMP_TASKREG_BODY (t));
+      walk_body (convert_gimple_call, NULL, info, gimple_omp_body (stmt));
       for (i = 0; i < 2; i++)
        {
          tree c, decl;
@@ -1835,7 +1875,9 @@ convert_call_expr (tree *tp, int *walk_subtrees, void *data)
            continue;
          decl = i ? get_chain_decl (info) : info->frame_decl;
          /* Don't add CHAIN.* or FRAME.* twice.  */
-         for (c = OMP_TASKREG_CLAUSES (t); c; c = OMP_CLAUSE_CHAIN (c))
+         for (c = gimple_omp_taskreg_clauses (stmt);
+              c;
+              c = OMP_CLAUSE_CHAIN (c))
            if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
                 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
                && OMP_CLAUSE_DECL (c) == decl)
@@ -1845,32 +1887,37 @@ convert_call_expr (tree *tp, int *walk_subtrees, void *data)
              c = build_omp_clause (i ? OMP_CLAUSE_FIRSTPRIVATE
                                      : OMP_CLAUSE_SHARED);
              OMP_CLAUSE_DECL (c) = decl;
-             OMP_CLAUSE_CHAIN (c) = OMP_TASKREG_CLAUSES (t);
-             OMP_TASKREG_CLAUSES (t) = c;
+             OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
+             gimple_omp_taskreg_set_clauses (stmt, c);
            }
        }
       info->static_chain_added |= save_static_chain_added;
       break;
 
-    case OMP_FOR:
-      walk_body (convert_call_expr, info, &OMP_FOR_PRE_BODY (t));
+    case GIMPLE_OMP_FOR:
+      walk_body (convert_gimple_call, NULL, info,
+                gimple_omp_for_pre_body (stmt));
       /* FALLTHRU */
-    case OMP_SECTIONS:
-    case OMP_SECTION:
-    case OMP_SINGLE:
-    case OMP_MASTER:
-    case OMP_ORDERED:
-    case OMP_CRITICAL:
-      walk_body (convert_call_expr, info, &OMP_BODY (t));
+    case GIMPLE_OMP_SECTIONS:
+    case GIMPLE_OMP_SECTION:
+    case GIMPLE_OMP_SINGLE:
+    case GIMPLE_OMP_MASTER:
+    case GIMPLE_OMP_ORDERED:
+    case GIMPLE_OMP_CRITICAL:
+      walk_body (convert_gimple_call, NULL, info, gimple_omp_body (stmt));
       break;
 
     default:
-      break;
+      /* Keep looking for other operands.  */
+      *handled_ops_p = false;
+      return NULL_TREE;
     }
 
+  *handled_ops_p = true;
   return NULL_TREE;
 }
 
+
 /* Walk the nesting tree starting with ROOT, depth first.  Convert all
    trampolines and call expressions.  On the way back up, determine if
    a nested function actually uses its static chain; if not, remember that.  */
@@ -1883,8 +1930,9 @@ convert_all_function_calls (struct nesting_info *root)
       if (root->inner)
        convert_all_function_calls (root->inner);
 
-      walk_function (convert_tramp_reference, root);
-      walk_function (convert_call_expr, root);
+      walk_function (convert_tramp_reference_stmt, convert_tramp_reference_op,
+                    root);
+      walk_function (convert_gimple_call, NULL, root);
 
       /* If the function does not use a static chain, then remember that.  */
       if (root->outer && !root->chain_decl && !root->chain_field)
@@ -1905,10 +1953,13 @@ convert_all_function_calls (struct nesting_info *root)
 static void
 finalize_nesting_tree_1 (struct nesting_info *root)
 {
-  tree stmt_list = NULL;
+  gimple_seq stmt_list;
+  gimple stmt;
   tree context = root->context;
   struct function *sf;
 
+  stmt_list = NULL;
+
   /* If we created a non-local frame type or decl, we need to lay them
      out at this time.  */
   if (root->frame_type)
@@ -1943,8 +1994,8 @@ finalize_nesting_tree_1 (struct nesting_info *root)
 
          y = build3 (COMPONENT_REF, TREE_TYPE (field),
                      root->frame_decl, field, NULL_TREE);
-         x = build_gimple_modify_stmt (y, x);
-         append_to_statement_list (x, &stmt_list);
+         stmt = gimple_build_assign (y, x);
+         gimple_seq_add_stmt (&stmt_list, stmt);
        }
     }
 
@@ -1954,8 +2005,8 @@ finalize_nesting_tree_1 (struct nesting_info *root)
     {
       tree x = build3 (COMPONENT_REF, TREE_TYPE (root->chain_field),
                       root->frame_decl, root->chain_field, NULL_TREE);
-      x = build_gimple_modify_stmt (x, get_chain_decl (root));
-      append_to_statement_list (x, &stmt_list);
+      stmt = gimple_build_assign (x, get_chain_decl (root));
+      gimple_seq_add_stmt (&stmt_list, stmt);
     }
 
   /* If trampolines were created, then we need to initialize them.  */
@@ -1982,19 +2033,19 @@ finalize_nesting_tree_1 (struct nesting_info *root)
          arg1 = build_addr (x, context);
 
          x = implicit_built_in_decls[BUILT_IN_INIT_TRAMPOLINE];
-         x = build_call_expr (x, 3, arg1, arg2, arg3);
-         append_to_statement_list (x, &stmt_list);
+         stmt = gimple_build_call (x, 3, arg1, arg2, arg3);
+         gimple_seq_add_stmt (&stmt_list, stmt);
        }
     }
 
   /* If we created initialization statements, insert them.  */
   if (stmt_list)
     {
-      annotate_all_with_locus (&stmt_list,
-                              DECL_SOURCE_LOCATION (context));
-      append_to_statement_list (BIND_EXPR_BODY (DECL_SAVED_TREE (context)),
-                               &stmt_list);
-      BIND_EXPR_BODY (DECL_SAVED_TREE (context)) = stmt_list;
+      gimple bind;
+      annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
+      bind = gimple_seq_first_stmt (gimple_body (context));
+      gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
+      gimple_bind_set_body (bind, stmt_list);
     }
 
   /* If a chain_decl was created, then it needs to be registered with
@@ -2014,10 +2065,12 @@ finalize_nesting_tree_1 (struct nesting_info *root)
   /* Make sure all new local variables get inserted into the
      proper BIND_EXPR.  */
   if (root->new_local_var_chain)
-    declare_vars (root->new_local_var_chain, DECL_SAVED_TREE (root->context),
+    declare_vars (root->new_local_var_chain,
+                 gimple_seq_first_stmt (gimple_body (root->context)),
                  false);
   if (root->debug_var_chain)
-    declare_vars (root->debug_var_chain, DECL_SAVED_TREE (root->context),
+    declare_vars (root->debug_var_chain,
+                 gimple_seq_first_stmt (gimple_body (root->context)),
                  true);
 
   /* Dump the translated tree function.  */
@@ -2101,10 +2154,14 @@ lower_nested_functions (tree fndecl)
 
   bitmap_obstack_initialize (&nesting_info_bitmap_obstack);
   root = create_nesting_tree (cgn);
-  walk_all_functions (convert_nonlocal_reference, root);
-  walk_all_functions (convert_local_reference, root);
-  walk_all_functions (convert_nl_goto_reference, root);
-  walk_all_functions (convert_nl_goto_receiver, root);
+  walk_all_functions (convert_nonlocal_reference_stmt,
+                      convert_nonlocal_reference_op,
+                     root);
+  walk_all_functions (convert_local_reference_stmt,
+                      convert_local_reference_op,
+                     root);
+  walk_all_functions (convert_nl_goto_reference, NULL, root);
+  walk_all_functions (convert_nl_goto_receiver, NULL, root);
   convert_all_function_calls (root);
   finalize_nesting_tree (root);
   unnest_nesting_tree (root);
index 8d1ae75..8e618a1 100644 (file)
@@ -27,7 +27,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-inline.h"
 #include "c-tree.h"
 #include "c-common.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "diagnostic.h"
 #include "hashtab.h"
 #include "output.h"
index f277be9..40e7508 100644 (file)
@@ -74,7 +74,8 @@ static tree finalize_nrv_r (tree *, int *, void *);
 static tree
 finalize_nrv_r (tree *tp, int *walk_subtrees, void *data)
 {
-  struct nrv_data *dp = (struct nrv_data *)data;
+  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
+  struct nrv_data *dp = (struct nrv_data *) wi->info;
 
   /* No need to walk into types.  */
   if (TYPE_P (*tp))
@@ -107,7 +108,7 @@ tree_nrv (void)
   tree result_type = TREE_TYPE (result);
   tree found = NULL;
   basic_block bb;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   struct nrv_data data;
 
   /* If this function does not return an aggregate type in memory, then
@@ -123,24 +124,29 @@ tree_nrv (void)
   /* Look through each block for assignments to the RESULT_DECL.  */
   FOR_EACH_BB (bb)
     {
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree stmt = bsi_stmt (bsi);
-         tree ret_expr;
+         gimple stmt = gsi_stmt (gsi);
+         tree ret_val;
 
-         if (TREE_CODE (stmt) == RETURN_EXPR)
+         if (gimple_code (stmt) == GIMPLE_RETURN)
            {
              /* In a function with an aggregate return value, the
                 gimplifier has changed all non-empty RETURN_EXPRs to
                 return the RESULT_DECL.  */
-             ret_expr = TREE_OPERAND (stmt, 0);
-             if (ret_expr)
-               gcc_assert (ret_expr == result);
+             ret_val = gimple_return_retval (stmt);
+             if (ret_val)
+               gcc_assert (ret_val == result);
            }
-         else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-                  && GIMPLE_STMT_OPERAND (stmt, 0) == result)
+         else if (is_gimple_assign (stmt)
+                  && gimple_assign_lhs (stmt) == result)
            {
-             ret_expr = GIMPLE_STMT_OPERAND (stmt, 1);
+              tree rhs;
+
+             if (!gimple_assign_copy_p (stmt))
+               return 0;
+
+             rhs = gimple_assign_rhs1 (stmt);
 
              /* Now verify that this return statement uses the same value
                 as any previously encountered return statement.  */
@@ -149,11 +155,11 @@ tree_nrv (void)
                  /* If we found a return statement using a different variable
                     than previous return statements, then we can not perform
                     NRV optimizations.  */
-                 if (found != ret_expr)
+                 if (found != rhs)
                    return 0;
                }
              else
-               found = ret_expr;
+               found = rhs;
 
              /* The returned value must be a local automatic variable of the
                 same type and alignment as the function's result.  */
@@ -167,9 +173,9 @@ tree_nrv (void)
                                                TREE_TYPE (found)))
                return 0;
            }
-         else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+         else if (is_gimple_assign (stmt))
            {
-             tree addr = get_base_address (GIMPLE_STMT_OPERAND (stmt, 0));
+             tree addr = get_base_address (gimple_assign_lhs (stmt));
               /* If there's any MODIFY of component of RESULT, 
                  then bail out.  */
              if (addr && addr == result)
@@ -205,18 +211,21 @@ tree_nrv (void)
   data.result = result;
   FOR_EACH_BB (bb)
     {
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
        {
-         tree *tp = bsi_stmt_ptr (bsi);
+         gimple stmt = gsi_stmt (gsi);
          /* If this is a copy from VAR to RESULT, remove it.  */
-         if (TREE_CODE (*tp) == GIMPLE_MODIFY_STMT
-             && GIMPLE_STMT_OPERAND (*tp, 0) == result
-             && GIMPLE_STMT_OPERAND (*tp, 1) == found)
-           bsi_remove (&bsi, true);
+         if (gimple_assign_copy_p (stmt)
+             && gimple_assign_lhs (stmt) == result
+             && gimple_assign_rhs1 (stmt) == found)
+           gsi_remove (&gsi, true);
          else
            {
-             walk_tree (tp, finalize_nrv_r, &data, 0);
-             bsi_next (&bsi);
+             struct walk_stmt_info wi;
+             memset (&wi, 0, sizeof (wi));
+             wi.info = &data;
+             walk_gimple_op (stmt, finalize_nrv_r, &wi);
+             gsi_next (&gsi);
            }
        }
     }
@@ -277,7 +286,7 @@ dest_safe_for_nrv_p (tree dest)
   return true;
 }
 
-/* Walk through the function looking for GIMPLE_MODIFY_STMTs with calls that
+/* Walk through the function looking for GIMPLE_ASSIGNs with calls that
    return in memory on the RHS.  For each of these, determine whether it is
    safe to pass the address of the LHS as the return slot, and mark the
    call appropriately if so.
@@ -296,21 +305,24 @@ execute_return_slot_opt (void)
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator i;
-      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
+      gimple_stmt_iterator gsi;
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree stmt = bsi_stmt (i);
-         tree call;
-
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-             && (call = GIMPLE_STMT_OPERAND (stmt, 1),
-                 TREE_CODE (call) == CALL_EXPR)
-             && !CALL_EXPR_RETURN_SLOT_OPT (call)
-             && aggregate_value_p (call, call))
-           /* Check if the location being assigned to is
-              call-clobbered.  */
-           CALL_EXPR_RETURN_SLOT_OPT (call) =
-             dest_safe_for_nrv_p (GIMPLE_STMT_OPERAND (stmt, 0)) ? 1 : 0;
+         gimple stmt = gsi_stmt (gsi);
+         bool slot_opt_p;
+
+         if (is_gimple_call (stmt)
+             && gimple_call_lhs (stmt)
+             && !gimple_call_return_slot_opt_p (stmt)
+             && aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),
+                                   gimple_call_fndecl (stmt))
+            )
+           {
+             /* Check if the location being assigned to is
+                call-clobbered.  */
+             slot_opt_p = dest_safe_for_nrv_p (gimple_call_lhs (stmt));
+             gimple_call_set_return_slot_opt (stmt, slot_opt_p);
+           }
        }
     }
   return 0;
index c1b3b5f..22c4951 100644 (file)
@@ -43,13 +43,13 @@ static unsigned HOST_WIDE_INT unknown[4] = { -1, -1, 0, 0 };
 
 static tree compute_object_offset (const_tree, const_tree);
 static unsigned HOST_WIDE_INT addr_object_size (const_tree, int);
-static unsigned HOST_WIDE_INT alloc_object_size (const_tree, int);
-static tree pass_through_call (const_tree);
+static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
+static tree pass_through_call (const_gimple);
 static void collect_object_sizes_for (struct object_size_info *, tree);
 static void expr_object_size (struct object_size_info *, tree, tree);
 static bool merge_object_sizes (struct object_size_info *, tree, tree,
                                unsigned HOST_WIDE_INT);
-static bool plus_expr_object_size (struct object_size_info *, tree, tree);
+static bool plus_stmt_object_size (struct object_size_info *, tree, gimple);
 static bool cond_expr_object_size (struct object_size_info *, tree, tree);
 static unsigned int compute_object_sizes (void);
 static void init_offset_limit (void);
@@ -219,21 +219,21 @@ addr_object_size (const_tree ptr, int object_size_type)
 }
 
 
-/* Compute __builtin_object_size for CALL, which is a CALL_EXPR.
+/* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
    Handles various allocation calls.  OBJECT_SIZE_TYPE is the second
    argument from __builtin_object_size.  If unknown, return
    unknown[object_size_type].  */
 
 static unsigned HOST_WIDE_INT
-alloc_object_size (const_tree call, int object_size_type)
+alloc_object_size (const_gimple call, int object_size_type)
 {
   tree callee, bytes = NULL_TREE;
   tree alloc_size;
   int arg1 = -1, arg2 = -1;
 
-  gcc_assert (TREE_CODE (call) == CALL_EXPR);
+  gcc_assert (is_gimple_call (call));
 
-  callee = get_callee_fndecl (call);
+  callee = gimple_call_fndecl (call);
   if (!callee)
     return unknown[object_size_type];
 
@@ -244,7 +244,7 @@ alloc_object_size (const_tree call, int object_size_type)
 
       arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
       if (TREE_CHAIN (p))
-         arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
+        arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
     }
  
   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
@@ -260,19 +260,19 @@ alloc_object_size (const_tree call, int object_size_type)
        break;
       }
 
-  if (arg1 < 0 || arg1 >= call_expr_nargs (call)
-      || TREE_CODE (CALL_EXPR_ARG (call, arg1)) != INTEGER_CST
+  if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
+      || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
       || (arg2 >= 0 
-         && (arg2 >= call_expr_nargs (call)
-             || TREE_CODE (CALL_EXPR_ARG (call, arg2)) != INTEGER_CST)))
+         && (arg2 >= (int)gimple_call_num_args (call)
+             || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
     return unknown[object_size_type];    
 
   if (arg2 >= 0)
     bytes = size_binop (MULT_EXPR,
-       fold_convert (sizetype, CALL_EXPR_ARG (call, arg1)),
-       fold_convert (sizetype, CALL_EXPR_ARG (call, arg2)));
+       fold_convert (sizetype, gimple_call_arg (call, arg1)),
+       fold_convert (sizetype, gimple_call_arg (call, arg2)));
   else if (arg1 >= 0)
-    bytes = fold_convert (sizetype, CALL_EXPR_ARG (call, arg1));
+    bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
 
   if (bytes && host_integerp (bytes, 1))
     return tree_low_cst (bytes, 1);
@@ -282,13 +282,13 @@ alloc_object_size (const_tree call, int object_size_type)
 
 
 /* If object size is propagated from one of function's arguments directly
-   to its return value, return that argument for CALL_EXPR CALL.
+   to its return value, return that argument for GIMPLE_CALL statement CALL.
    Otherwise return NULL.  */
 
 static tree
-pass_through_call (const_tree call)
+pass_through_call (const_gimple call)
 {
-  tree callee = get_callee_fndecl (call);
+  tree callee = gimple_call_fndecl (call);
 
   if (callee
       && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
@@ -308,8 +308,8 @@ pass_through_call (const_tree call)
       case BUILT_IN_STRNCPY_CHK:
       case BUILT_IN_STRCAT_CHK:
       case BUILT_IN_STRNCAT_CHK:
-       if (call_expr_nargs (call) >= 1)
-         return CALL_EXPR_ARG (call, 0);
+       if (gimple_call_num_args (call) >= 1)
+         return gimple_call_arg (call, 0);
        break;
       default:
        break;
@@ -332,16 +332,8 @@ compute_builtin_object_size (tree ptr, int object_size_type)
 
   if (TREE_CODE (ptr) == ADDR_EXPR)
     return addr_object_size (ptr, object_size_type);
-  else if (TREE_CODE (ptr) == CALL_EXPR)
-    {
-      tree arg = pass_through_call (ptr);
 
-      if (arg)
-       return compute_builtin_object_size (arg, object_size_type);
-      else
-       return alloc_object_size (ptr, object_size_type);
-    }
-  else if (TREE_CODE (ptr) == SSA_NAME
+  if (TREE_CODE (ptr) == SSA_NAME
           && POINTER_TYPE_P (TREE_TYPE (ptr))
           && object_sizes[object_size_type] != NULL)
     {
@@ -463,9 +455,7 @@ compute_builtin_object_size (tree ptr, int object_size_type)
   return unknown[object_size_type];
 }
 
-
-/* Compute object_sizes for PTR, defined to VALUE, which is not
-   a SSA_NAME.  */
+/* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME.  */
 
 static void
 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
@@ -487,8 +477,6 @@ expr_object_size (struct object_size_info *osi, tree ptr, tree value)
 
   if (TREE_CODE (value) == ADDR_EXPR)
     bytes = addr_object_size (value, object_size_type);
-  else if (TREE_CODE (value) == CALL_EXPR)
-    bytes = alloc_object_size (value, object_size_type);
   else
     bytes = unknown[object_size_type];
 
@@ -505,6 +493,64 @@ expr_object_size (struct object_size_info *osi, tree ptr, tree value)
 }
 
 
+/* Compute object_sizes for PTR, defined to the result of a call.  */
+
+static void
+call_object_size (struct object_size_info *osi, tree ptr, gimple call)
+{
+  int object_size_type = osi->object_size_type;
+  unsigned int varno = SSA_NAME_VERSION (ptr);
+  unsigned HOST_WIDE_INT bytes;
+
+  gcc_assert (is_gimple_call (call));
+
+  gcc_assert (object_sizes[object_size_type][varno]
+             != unknown[object_size_type]);
+  gcc_assert (osi->pass == 0);
+
+  bytes = alloc_object_size (call, object_size_type);
+
+  if ((object_size_type & 2) == 0)
+    {
+      if (object_sizes[object_size_type][varno] < bytes)
+       object_sizes[object_size_type][varno] = bytes;
+    }
+  else
+    {
+      if (object_sizes[object_size_type][varno] > bytes)
+       object_sizes[object_size_type][varno] = bytes;
+    }
+}
+
+
+/* Compute object_sizes for PTR, defined to an unknown value.  */
+
+static void
+unknown_object_size (struct object_size_info *osi, tree ptr)
+{
+  int object_size_type = osi->object_size_type;
+  unsigned int varno = SSA_NAME_VERSION (ptr);
+  unsigned HOST_WIDE_INT bytes;
+
+  gcc_assert (object_sizes[object_size_type][varno]
+             != unknown[object_size_type]);
+  gcc_assert (osi->pass == 0);
+
+  bytes = unknown[object_size_type];
+
+  if ((object_size_type & 2) == 0)
+    {
+      if (object_sizes[object_size_type][varno] < bytes)
+       object_sizes[object_size_type][varno] = bytes;
+    }
+  else
+    {
+      if (object_sizes[object_size_type][varno] > bytes)
+       object_sizes[object_size_type][varno] = bytes;
+    }
+}
+
+
 /* Merge object sizes of ORIG + OFFSET into DEST.  Return true if
    the object size might need reexamination later.  */
 
@@ -552,20 +598,22 @@ merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
 }
 
 
-/* Compute object_sizes for PTR, defined to VALUE, which is
-   a POINTER_PLUS_EXPR.  Return true if the object size might need reexamination
-   later.  */
+/* Compute object_sizes for VAR, defined to the result of an assignment
+   with operator POINTER_PLUS_EXPR.  Return true if the object size might
+   need reexamination  later.  */
 
 static bool
-plus_expr_object_size (struct object_size_info *osi, tree var, tree value)
+plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
 {
-  tree op0 = TREE_OPERAND (value, 0);
-  tree op1 = TREE_OPERAND (value, 1);
   int object_size_type = osi->object_size_type;
   unsigned int varno = SSA_NAME_VERSION (var);
   unsigned HOST_WIDE_INT bytes;
+  tree op0, op1;
+
+  gcc_assert (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR);
 
-  gcc_assert (TREE_CODE (value) == POINTER_PLUS_EXPR);
+  op0 = gimple_assign_rhs1 (stmt);
+  op1 = gimple_assign_rhs2 (stmt);
 
   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
     return false;
@@ -583,6 +631,7 @@ plus_expr_object_size (struct object_size_info *osi, tree var, tree value)
        {
          unsigned HOST_WIDE_INT off = tree_low_cst (op1, 1);
 
+          /* op0 will be ADDR_EXPR here.  */
          bytes = compute_builtin_object_size (op0, object_size_type);
          if (bytes == unknown[object_size_type])
            ;
@@ -611,7 +660,7 @@ plus_expr_object_size (struct object_size_info *osi, tree var, tree value)
 }
 
 
-/* Compute object_sizes for PTR, defined to VALUE, which is
+/* Compute object_sizes for VAR, defined to VALUE, which is
    a COND_EXPR.  Return true if the object size might need reexamination
    later.  */
 
@@ -644,12 +693,11 @@ cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
   return reexamine;
 }
 
-
 /* Compute object sizes for VAR.
    For ADDR_EXPR an object size is the number of remaining bytes
    to the end of the object (where what is considered an object depends on
    OSI->object_size_type).
-   For allocation CALL_EXPR like malloc or calloc object size is the size
+   For allocation GIMPLE_CALL like malloc or calloc object size is the size
    of the allocation.
    For POINTER_PLUS_EXPR where second operand is a constant integer,
    object size is object size of the first operand minus the constant.
@@ -660,7 +708,7 @@ cond_expr_object_size (struct object_size_info *osi, tree var, tree value)
    unknown[object_size_type] for all objects bigger than half of the address
    space, and constants less than half of the address space are considered
    addition, while bigger constants subtraction.
-   For a memcpy like CALL_EXPR that always returns one of its arguments, the
+   For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
    object size is object size of that argument.
    Otherwise, object size is the maximum of object sizes of variables
    that it might be set to.  */
@@ -670,7 +718,7 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
 {
   int object_size_type = osi->object_size_type;
   unsigned int varno = SSA_NAME_VERSION (var);
-  tree stmt;
+  gimple stmt;
   bool reexamine;
 
   if (bitmap_bit_p (computed[object_size_type], varno))
@@ -709,51 +757,57 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
   stmt = SSA_NAME_DEF_STMT (var);
   reexamine = false;
 
-  switch (TREE_CODE (stmt))
+  switch (gimple_code (stmt))
     {
-    case RETURN_EXPR:
-      gcc_assert (TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT);
-      stmt = TREE_OPERAND (stmt, 0);
-      /* FALLTHRU  */
-
-    case GIMPLE_MODIFY_STMT:
+    case GIMPLE_ASSIGN:
       {
-       tree rhs = GIMPLE_STMT_OPERAND (stmt, 1), arg;
-       STRIP_NOPS (rhs);
-
-       if (TREE_CODE (rhs) == CALL_EXPR)
-         {
-           arg = pass_through_call (rhs);
-           if (arg)
-             rhs = arg;
-         }
-
-       if (TREE_CODE (rhs) == SSA_NAME
-           && POINTER_TYPE_P (TREE_TYPE (rhs)))
-         reexamine = merge_object_sizes (osi, var, rhs, 0);
-
-       else if (TREE_CODE (rhs) == POINTER_PLUS_EXPR)
-         reexamine = plus_expr_object_size (osi, var, rhs);
-
-        else if (TREE_CODE (rhs) == COND_EXPR)
-         reexamine = cond_expr_object_size (osi, var, rhs);
+        if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+          reexamine = plus_stmt_object_size (osi, var, stmt);
+        else if (gimple_assign_single_p (stmt)
+                 || gimple_assign_unary_nop_p (stmt))
+          {
+            tree rhs = gimple_assign_rhs1 (stmt);
+
+            if (TREE_CODE (rhs) == SSA_NAME
+                && POINTER_TYPE_P (TREE_TYPE (rhs)))
+              reexamine = merge_object_sizes (osi, var, rhs, 0);
+            else if (TREE_CODE (rhs) == COND_EXPR)
+              reexamine = cond_expr_object_size (osi, var, rhs);
+            else
+              expr_object_size (osi, var, rhs);
+          }
+        else
+          unknown_object_size (osi, var);
+        break;
+      }
 
-       else
-         expr_object_size (osi, var, rhs);
+    case GIMPLE_CALL:
+      {
+        tree arg = pass_through_call (stmt);
+        if (arg)
+          {
+            if (TREE_CODE (arg) == SSA_NAME
+                && POINTER_TYPE_P (TREE_TYPE (arg)))
+              reexamine = merge_object_sizes (osi, var, arg, 0);
+            else if (TREE_CODE (arg) == COND_EXPR)
+              reexamine = cond_expr_object_size (osi, var, arg);
+            else
+              expr_object_size (osi, var, arg);
+          }
+        else
+          call_object_size (osi, var, stmt);
        break;
       }
 
-    case ASM_EXPR:
+    case GIMPLE_ASM:
       /* Pointers defined by __asm__ statements can point anywhere.  */
       object_sizes[object_size_type][varno] = unknown[object_size_type];
       break;
 
-    case NOP_EXPR:
+    case GIMPLE_NOP:
       {
        tree decl = SSA_NAME_VAR (var);
 
-       gcc_assert (IS_EMPTY_STMT (stmt));
-
        if (TREE_CODE (decl) != PARM_DECL && DECL_INITIAL (decl))
          expr_object_size (osi, var, DECL_INITIAL (decl));
        else
@@ -761,13 +815,13 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
       }
       break;
 
-    case PHI_NODE:
+    case GIMPLE_PHI:
       {
-       int i;
+       unsigned i;
 
-       for (i = 0; i < PHI_NUM_ARGS (stmt); i++)
+       for (i = 0; i < gimple_phi_num_args (stmt); i++)
          {
-           tree rhs = PHI_ARG_DEF (stmt, i);
+           tree rhs = gimple_phi_arg (stmt, i)->def;
 
            if (object_sizes[object_size_type][varno]
                == unknown[object_size_type])
@@ -780,6 +834,7 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
          }
        break;
       }
+
     default:
       gcc_unreachable ();
     }
@@ -810,7 +865,7 @@ static void
 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
                           unsigned int depth)
 {
-  tree stmt = SSA_NAME_DEF_STMT (var);
+  gimple stmt = SSA_NAME_DEF_STMT (var);
   unsigned int varno = SSA_NAME_VERSION (var);
 
   if (osi->depths[varno])
@@ -838,57 +893,61 @@ check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
   osi->depths[varno] = depth;
   *osi->tos++ = varno;
 
-  switch (TREE_CODE (stmt))
+  switch (gimple_code (stmt))
     {
-    case RETURN_EXPR:
-      gcc_assert (TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT);
-      stmt = TREE_OPERAND (stmt, 0);
-      /* FALLTHRU  */
 
-    case GIMPLE_MODIFY_STMT:
+    case GIMPLE_ASSIGN:
       {
-       tree rhs = GIMPLE_STMT_OPERAND (stmt, 1), arg;
-       STRIP_NOPS (rhs);
-
-       if (TREE_CODE (rhs) == CALL_EXPR)
-         {
-           arg = pass_through_call (rhs);
-           if (arg)
-             rhs = arg;
-         }
-
-       if (TREE_CODE (rhs) == SSA_NAME)
-         check_for_plus_in_loops_1 (osi, rhs, depth);
-       else if (TREE_CODE (rhs) == POINTER_PLUS_EXPR)
-         {
-           tree op0 = TREE_OPERAND (rhs, 0);
-           tree op1 = TREE_OPERAND (rhs, 1);
-           tree cst, basevar;
-
-           basevar = op0;
-           cst = op1;
-           gcc_assert (TREE_CODE (cst) == INTEGER_CST);
+        if ((gimple_assign_single_p (stmt)
+             || gimple_assign_unary_nop_p (stmt))
+            && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
+          {
+            tree rhs = gimple_assign_rhs1 (stmt);
+
+            check_for_plus_in_loops_1 (osi, rhs, depth);
+          }
+        else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+          {
+            tree basevar = gimple_assign_rhs1 (stmt);
+            tree cst = gimple_assign_rhs2 (stmt);
+
+            gcc_assert (TREE_CODE (cst) == INTEGER_CST);
+
+            check_for_plus_in_loops_1 (osi, basevar,
+                                       depth + !integer_zerop (cst));
+          }
+        else
+          gcc_unreachable ();
+        break;
+      }
 
-           check_for_plus_in_loops_1 (osi, basevar,
-                                      depth + !integer_zerop (cst));
-         }
-       else
-         gcc_unreachable ();
-       break;
+    case GIMPLE_CALL:
+      {
+        tree arg = pass_through_call (stmt);
+        if (arg)
+          {
+            if (TREE_CODE (arg) == SSA_NAME)
+              check_for_plus_in_loops_1 (osi, arg, depth);
+            else
+              gcc_unreachable ();
+          }
+        break;
       }
-    case PHI_NODE:
+
+    case GIMPLE_PHI:
       {
-       int i;
+       unsigned i;
 
-       for (i = 0; i < PHI_NUM_ARGS (stmt); i++)
+       for (i = 0; i < gimple_phi_num_args (stmt); i++)
          {
-           tree rhs = PHI_ARG_DEF (stmt, i);
+           tree rhs = gimple_phi_arg (stmt, i)->def;
 
            if (TREE_CODE (rhs) == SSA_NAME)
              check_for_plus_in_loops_1 (osi, rhs, depth);
          }
        break;
       }
+
     default:
       gcc_unreachable ();
     }
@@ -905,50 +964,29 @@ check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
 static void
 check_for_plus_in_loops (struct object_size_info *osi, tree var)
 {
-  tree stmt = SSA_NAME_DEF_STMT (var);
+  gimple stmt = SSA_NAME_DEF_STMT (var);
 
-  switch (TREE_CODE (stmt))
-    {
-    case RETURN_EXPR:
-      gcc_assert (TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT);
-      stmt = TREE_OPERAND (stmt, 0);
-      /* FALLTHRU  */
-
-    case GIMPLE_MODIFY_STMT:
-      {
-       tree rhs = GIMPLE_STMT_OPERAND (stmt, 1), arg;
-       STRIP_NOPS (rhs);
-
-       if (TREE_CODE (rhs) == CALL_EXPR)
-         {
-           arg = pass_through_call (rhs);
-           if (arg)
-             rhs = arg;
-         }
+  /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
+     and looked for a POINTER_PLUS_EXPR in the pass-through
+     argument, if any.  In GIMPLE, however, such an expression
+     is not a valid call operand.  */
 
-       if (TREE_CODE (rhs) == POINTER_PLUS_EXPR)
-         {
-           tree op0 = TREE_OPERAND (rhs, 0);
-           tree op1 = TREE_OPERAND (rhs, 1);
-           tree cst, basevar;
-
-           basevar = op0;
-           cst = op1;
-           gcc_assert (TREE_CODE (cst) == INTEGER_CST);
-
-           if (integer_zerop (cst))
-             break;
-
-           osi->depths[SSA_NAME_VERSION (basevar)] = 1;
-           *osi->tos++ = SSA_NAME_VERSION (basevar);
-           check_for_plus_in_loops_1 (osi, var, 2);
-           osi->depths[SSA_NAME_VERSION (basevar)] = 0;
-           osi->tos--;
-         }
-       break;
-      }
-    default:
-      break;
+  if (is_gimple_assign (stmt)
+      && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+    {
+      tree basevar = gimple_assign_rhs1 (stmt);
+      tree cst = gimple_assign_rhs2 (stmt);
+           
+      gcc_assert (TREE_CODE (cst) == INTEGER_CST);
+
+      if (integer_zerop (cst))
+        return;
+
+      osi->depths[SSA_NAME_VERSION (basevar)] = 1;
+      *osi->tos++ = SSA_NAME_VERSION (basevar);
+      check_for_plus_in_loops_1 (osi, var, 2);
+      osi->depths[SSA_NAME_VERSION (basevar)] = 0;
+      osi->tos--;
     }
 }
 
@@ -997,30 +1035,29 @@ compute_object_sizes (void)
   basic_block bb;
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator i;
-      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
+      gimple_stmt_iterator i;
+      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
        {
-         tree *stmtp = bsi_stmt_ptr (i);
-         tree call = get_rhs (*stmtp);
          tree callee, result;
+         gimple call = gsi_stmt (i);
 
-         if (!call || TREE_CODE (call) != CALL_EXPR)
+          if (gimple_code (call) != GIMPLE_CALL)
            continue;
 
-         callee = get_callee_fndecl (call);
+         callee = gimple_call_fndecl (call);
          if (!callee
              || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
              || DECL_FUNCTION_CODE (callee) != BUILT_IN_OBJECT_SIZE)
            continue;
 
          init_object_sizes ();
-         result = fold_call_expr (call, false);
+         result = fold_call_stmt (call, false);
          if (!result)
            {
-             if (call_expr_nargs (call) == 2
-                 && POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (call, 0))))
+             if (gimple_call_num_args (call) == 2
+                 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
                {
-                 tree ost = CALL_EXPR_ARG (call, 1);
+                 tree ost = gimple_call_arg (call, 1);
 
                  if (host_integerp (ost, 1))
                    {
@@ -1042,17 +1079,19 @@ compute_object_sizes (void)
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
              fprintf (dump_file, "Simplified\n  ");
-             print_generic_stmt (dump_file, *stmtp, dump_flags);
+             print_gimple_stmt (dump_file, call, 0, dump_flags);
            }
 
-         if (!set_rhs (stmtp, result))
+         if (!update_call_from_tree (&i, result))
            gcc_unreachable ();
-         update_stmt (*stmtp);
+
+          /* NOTE: In the pre-tuples code, we called update_stmt here.  This is
+             now handled by gsi_replace, called from update_call_from_tree.  */
 
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
              fprintf (dump_file, "to\n  ");
-             print_generic_stmt (dump_file, *stmtp, dump_flags);
+             print_gimple_stmt (dump_file, call, 0, dump_flags);
              fprintf (dump_file, "\n");
            }
        }
index 820df4c..ba3d0fc 100644 (file)
@@ -289,7 +289,7 @@ unsigned int
 execute_fixup_cfg (void)
 {
   basic_block bb;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
 
   cfun->after_inlining = true;
@@ -297,35 +297,36 @@ execute_fixup_cfg (void)
   if (cfun->eh)
     FOR_EACH_BB (bb)
       {
-       for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
          {
-           tree stmt = bsi_stmt (bsi);
-           tree call = get_call_expr_in (stmt);
-           tree decl = call ? get_callee_fndecl (call) : NULL;
-
-           if (decl && call_expr_flags (call) & (ECF_CONST | ECF_PURE 
-                                                 | ECF_LOOPING_CONST_OR_PURE)
-               && TREE_SIDE_EFFECTS (call))
+           gimple stmt = gsi_stmt (gsi);
+           tree decl = is_gimple_call (stmt)
+                       ? gimple_call_fndecl (stmt)
+                       : NULL;
+
+           if (decl
+               && gimple_call_flags (stmt) & (ECF_CONST
+                                              | ECF_PURE 
+                                              | ECF_LOOPING_CONST_OR_PURE))
              {
                if (gimple_in_ssa_p (cfun))
                  {
                    todo |= TODO_update_ssa | TODO_cleanup_cfg;
                    update_stmt (stmt);
                  }
-               TREE_SIDE_EFFECTS (call) = 0;
              }
-           if (decl && TREE_NOTHROW (decl))
-             TREE_NOTHROW (call) = 1;
-           if (!tree_could_throw_p (stmt) && lookup_stmt_eh_region (stmt))
+
+           if (!stmt_could_throw_p (stmt) && lookup_stmt_eh_region (stmt))
              remove_stmt_from_eh_region (stmt);
          }
-       if (tree_purge_dead_eh_edges (bb))
+
+       if (gimple_purge_dead_eh_edges (bb))
           todo |= TODO_cleanup_cfg;
       }
 
   /* Dump a textual representation of the flowgraph.  */
   if (dump_file)
-    dump_tree_cfg (dump_file, dump_flags);
+    gimple_dump_cfg (dump_file, dump_flags);
 
   return todo;
 }
@@ -367,7 +368,7 @@ tree_lowering_passes (tree fn)
 
   current_function_decl = fn;
   push_cfun (DECL_STRUCT_FUNCTION (fn));
-  tree_register_cfg_hooks ();
+  gimple_register_cfg_hooks ();
   bitmap_obstack_initialize (NULL);
   execute_pass_list (all_lowering_passes);
   if (optimize && cgraph_global_info_ready)
@@ -410,7 +411,7 @@ tree_rest_of_compilation (tree fndecl)
      not safe to try to expand expressions involving them.  */
   cfun->dont_save_pending_sizes_p = 1;
   
-  tree_register_cfg_hooks ();
+  gimple_register_cfg_hooks ();
 
   bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
   /* Perform all tree transforms and optimizations.  */
@@ -421,7 +422,6 @@ tree_rest_of_compilation (tree fndecl)
   /* Release the default bitmap obstack.  */
   bitmap_obstack_release (NULL);
   
-  DECL_SAVED_TREE (fndecl) = NULL;
   set_cfun (NULL);
 
   /* If requested, warn about function definitions where the function will
@@ -448,7 +448,7 @@ tree_rest_of_compilation (tree fndecl)
        }
     }
 
-  DECL_SAVED_TREE (fndecl) = NULL;
+  gimple_set_body (fndecl, NULL);
   if (DECL_STRUCT_FUNCTION (fndecl) == 0
       && !cgraph_node (fndecl)->origin)
     {
index 21f362b..58aed88 100644 (file)
@@ -141,9 +141,9 @@ create_temp (tree t)
 static void
 insert_copy_on_edge (edge e, tree dest, tree src)
 {
-  tree copy;
+  gimple copy;
 
-  copy = build_gimple_modify_stmt (dest, src);
+  copy = gimple_build_assign (dest, src);
   set_is_used (dest);
 
   if (TREE_CODE (src) == ADDR_EXPR)
@@ -157,11 +157,11 @@ insert_copy_on_edge (edge e, tree dest, tree src)
               "Inserting a copy on edge BB%d->BB%d :",
               e->src->index,
               e->dest->index);
-      print_generic_expr (dump_file, copy, dump_flags);
+      print_gimple_stmt (dump_file, copy, 0, dump_flags);
       fprintf (dump_file, "\n");
     }
 
-  bsi_insert_on_edge (e, copy);
+  gsi_insert_on_edge (e, copy);
 }
 
 
@@ -315,15 +315,17 @@ eliminate_name (elim_graph g, tree T)
 static void
 eliminate_build (elim_graph g, basic_block B)
 {
-  tree phi;
   tree T0, Ti;
   int p0, pi;
+  gimple_stmt_iterator gsi;
 
   clear_elim_graph (g);
   
-  for (phi = phi_nodes (B); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (B); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      T0 = var_to_partition_to_var (g->map, PHI_RESULT (phi));
+      gimple phi = gsi_stmt (gsi);
+
+      T0 = var_to_partition_to_var (g->map, gimple_phi_result (phi));
       
       /* Ignore results which are not in partitions.  */
       if (T0 == NULL_TREE)
@@ -551,7 +553,7 @@ assign_vars (var_map map)
    If the stmt is changed, return true.  */ 
 
 static inline bool
-replace_use_variable (var_map map, use_operand_p p, tree *expr)
+replace_use_variable (var_map map, use_operand_p p, gimple *expr)
 {
   tree new_var;
   tree var = USE_FROM_PTR (p);
@@ -562,11 +564,7 @@ replace_use_variable (var_map map, use_operand_p p, tree *expr)
       int version = SSA_NAME_VERSION (var);
       if (expr[version])
         {
-         tree new_expr = GIMPLE_STMT_OPERAND (expr[version], 1);
-         SET_USE (p, new_expr);
-
-         /* Clear the stmt's RHS, or GC might bite us.  */
-         GIMPLE_STMT_OPERAND (expr[version], 1) = NULL_TREE;
+         SET_USE (p, gimple_assign_rhs_to_tree (expr[version]));
          return true;
        }
     }
@@ -614,20 +612,20 @@ static void
 eliminate_virtual_phis (void)
 {
   basic_block bb;
-  tree phi, next;
+  gimple_stmt_iterator gsi;
 
   FOR_EACH_BB (bb)
     {
-      for (phi = phi_nodes (bb); phi; phi = next)
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
         {
-         next = PHI_CHAIN (phi);
-         if (!is_gimple_reg (SSA_NAME_VAR (PHI_RESULT (phi))))
+         gimple phi = gsi_stmt (gsi);
+         if (!is_gimple_reg (SSA_NAME_VAR (gimple_phi_result (phi))))
            {
 #ifdef ENABLE_CHECKING
-             int i;
+             size_t i;
              /* There should be no arguments of this PHI which are in
                 the partition list, or we get incorrect results.  */
-             for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+             for (i = 0; i < gimple_phi_num_args (phi); i++)
                {
                  tree arg = PHI_ARG_DEF (phi, i);
                  if (TREE_CODE (arg) == SSA_NAME 
@@ -636,13 +634,15 @@ eliminate_virtual_phis (void)
                      fprintf (stderr, "Argument of PHI is not virtual (");
                      print_generic_expr (stderr, arg, TDF_SLIM);
                      fprintf (stderr, "), but the result is :");
-                     print_generic_stmt (stderr, phi, TDF_SLIM);
+                     print_gimple_stmt (stderr, phi, 0, TDF_SLIM);
                      internal_error ("SSA corruption");
                    }
                }
 #endif
-             remove_phi_node (phi, NULL_TREE, true);
+             remove_phi_node (&gsi, true);
            }
+          else
+            gsi_next (&gsi);
        }
     }
 }
@@ -655,13 +655,13 @@ eliminate_virtual_phis (void)
    variable.  */
 
 static void
-rewrite_trees (var_map map, tree *values)
+rewrite_trees (var_map map, gimple *values)
 {
   elim_graph g;
   basic_block bb;
-  block_stmt_iterator si;
+  gimple_stmt_iterator gsi;
   edge e;
-  tree phi;
+  gimple_seq phi;
   bool changed;
  
 #ifdef ENABLE_CHECKING
@@ -670,14 +670,14 @@ rewrite_trees (var_map map, tree *values)
      create incorrect code.  */
   FOR_EACH_BB (bb)
     {
-      tree phi;
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree T0 = var_to_partition_to_var (map, PHI_RESULT (phi));
+         gimple phi = gsi_stmt (gsi);
+         tree T0 = var_to_partition_to_var (map, gimple_phi_result (phi));
          if (T0 == NULL_TREE)
            {
-             int i;
-             for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+             size_t i;
+             for (i = 0; i < gimple_phi_num_args (phi); i++)
                {
                  tree arg = PHI_ARG_DEF (phi, i);
 
@@ -687,7 +687,7 @@ rewrite_trees (var_map map, tree *values)
                      fprintf (stderr, "Argument of PHI is in a partition :(");
                      print_generic_expr (stderr, arg, TDF_SLIM);
                      fprintf (stderr, "), but the result is not :");
-                     print_generic_stmt (stderr, phi, TDF_SLIM);
+                     print_gimple_stmt (stderr, phi, 0, TDF_SLIM);
                      internal_error ("SSA corruption");
                    }
                }
@@ -701,21 +701,18 @@ rewrite_trees (var_map map, tree *values)
   g->map = map;
   FOR_EACH_BB (bb)
     {
-      for (si = bsi_start (bb); !bsi_end_p (si); )
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
        {
-         tree stmt = bsi_stmt (si);
+         gimple stmt = gsi_stmt (gsi);
          use_operand_p use_p, copy_use_p;
          def_operand_p def_p;
          bool remove = false, is_copy = false;
          int num_uses = 0;
-         stmt_ann_t ann;
          ssa_op_iter iter;
 
-         ann = stmt_ann (stmt);
          changed = false;
 
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT 
-             && (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == SSA_NAME))
+         if (gimple_assign_copy_p (stmt))
            is_copy = true;
 
          copy_use_p = NULL_USE_OPERAND_P;
@@ -759,13 +756,13 @@ rewrite_trees (var_map map, tree *values)
 
          /* Remove any stmts marked for removal.  */
          if (remove)
-           bsi_remove (&si, true);
+           gsi_remove (&gsi, true);
          else
            {
              if (changed)
                if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
-                 tree_purge_dead_eh_edges (bb);
-             bsi_next (&si);
+                 gimple_purge_dead_eh_edges (bb);
+             gsi_next (&gsi);
            }
        }
 
@@ -784,7 +781,7 @@ rewrite_trees (var_map map, tree *values)
 /* These are the local work structures used to determine the best place to 
    insert the copies that were placed on edges by the SSA->normal pass..  */
 static VEC(edge,heap) *edge_leader;
-static VEC(tree,heap) *stmt_list;
+static VEC(gimple_seq,heap) *stmt_list;
 static bitmap leader_has_match = NULL;
 static edge leader_match = NULL;
 
@@ -803,22 +800,19 @@ same_stmt_list_p (edge e)
 /* Return TRUE if S1 and S2 are equivalent copies.  */
 
 static inline bool
-identical_copies_p (const_tree s1, const_tree s2)
+identical_copies_p (const_gimple s1, const_gimple s2)
 {
 #ifdef ENABLE_CHECKING
-  gcc_assert (TREE_CODE (s1) == GIMPLE_MODIFY_STMT);
-  gcc_assert (TREE_CODE (s2) == GIMPLE_MODIFY_STMT);
-  gcc_assert (DECL_P (GIMPLE_STMT_OPERAND (s1, 0)));
-  gcc_assert (DECL_P (GIMPLE_STMT_OPERAND (s2, 0)));
+  gcc_assert (is_gimple_assign (s1));
+  gcc_assert (is_gimple_assign (s2));
+  gcc_assert (DECL_P (gimple_assign_lhs (s1)));
+  gcc_assert (DECL_P (gimple_assign_lhs (s2)));
 #endif
 
-  if (GIMPLE_STMT_OPERAND (s1, 0) != GIMPLE_STMT_OPERAND (s2, 0))
+  if (gimple_assign_lhs (s1) != gimple_assign_lhs (s2))
     return false;
 
-  s1 = GIMPLE_STMT_OPERAND (s1, 1);
-  s2 = GIMPLE_STMT_OPERAND (s2, 1);
-
-  if (s1 != s2)
+  if (gimple_assign_rhs1 (s1) != gimple_assign_rhs1 (s2))
     return false;
 
   return true;
@@ -831,22 +825,19 @@ identical_copies_p (const_tree s1, const_tree s2)
 static inline bool 
 identical_stmt_lists_p (const_edge e1, const_edge e2)
 {
-  tree t1 = PENDING_STMT (e1);
-  tree t2 = PENDING_STMT (e2);
-  tree_stmt_iterator tsi1, tsi2;
+  gimple_seq t1 = PENDING_STMT (e1);
+  gimple_seq t2 = PENDING_STMT (e2);
+  gimple_stmt_iterator gsi1, gsi2;
 
-  gcc_assert (TREE_CODE (t1) == STATEMENT_LIST);
-  gcc_assert (TREE_CODE (t2) == STATEMENT_LIST);
-
-  for (tsi1 = tsi_start (t1), tsi2 = tsi_start (t2);
-       !tsi_end_p (tsi1) && !tsi_end_p (tsi2); 
-       tsi_next (&tsi1), tsi_next (&tsi2))
+  for (gsi1 = gsi_start (t1), gsi2 = gsi_start (t2);
+       !gsi_end_p (gsi1) && !gsi_end_p (gsi2); 
+       gsi_next (&gsi1), gsi_next (&gsi2))
     {
-      if (!identical_copies_p (tsi_stmt (tsi1), tsi_stmt (tsi2)))
+      if (!identical_copies_p (gsi_stmt (gsi1), gsi_stmt (gsi2)))
         break;
     }
 
-  if (!tsi_end_p (tsi1) || ! tsi_end_p (tsi2))
+  if (!gsi_end_p (gsi1) || !gsi_end_p (gsi2))
     return false;
 
   return true;
@@ -859,7 +850,7 @@ static void
 init_analyze_edges_for_bb (void)
 {
   edge_leader = VEC_alloc (edge, heap, 25);
-  stmt_list = VEC_alloc (tree, heap, 25);
+  stmt_list = VEC_alloc (gimple_seq, heap, 25);
   leader_has_match = BITMAP_ALLOC (NULL);
 }
 
@@ -870,7 +861,7 @@ static void
 fini_analyze_edges_for_bb (void)
 {
   VEC_free (edge, heap, edge_leader);
-  VEC_free (tree, heap, stmt_list);
+  VEC_free (gimple_seq, heap, stmt_list);
   BITMAP_FREE (leader_has_match);
 }
 
@@ -902,13 +893,14 @@ contains_tree_r (tree * tp, int *walk_subtrees, void *data)
 static bool
 process_single_block_loop_latch (edge single_edge)
 {
-  tree stmts;
+  gimple_seq stmts;
   basic_block b_exit, b_pheader, b_loop = single_edge->src;
   edge_iterator ei;
   edge e;
-  block_stmt_iterator bsi, bsi_exit;
-  tree_stmt_iterator tsi;
-  tree expr, stmt;
+  gimple_stmt_iterator gsi, gsi_exit;
+  gimple_stmt_iterator tsi;
+  tree expr;
+  gimple stmt;
   unsigned int count = 0;
 
   if (single_edge == NULL || (single_edge->dest != single_edge->src)
@@ -941,29 +933,31 @@ process_single_block_loop_latch (edge single_edge)
   if (b_exit == b_pheader || b_exit == b_loop || b_pheader == b_loop)
     return false;
 
-  bsi_exit = bsi_after_labels (b_exit);
+  gsi_exit = gsi_after_labels (b_exit);
 
   /* Get the last stmt in the loop body.  */
-  bsi = bsi_last (single_edge->src);
-  stmt = bsi_stmt (bsi);
+  gsi = gsi_last_bb (single_edge->src);
+  stmt = gsi_stmt (gsi);
 
-  if (TREE_CODE (stmt) != COND_EXPR)
+  if (gimple_code (stmt) != GIMPLE_COND)
     return false;
 
-  expr = COND_EXPR_COND (stmt);
+
+  expr = build2 (gimple_cond_code (stmt), boolean_type_node,
+                 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
   /* Iterate over the insns on the latch and count them.  */
-  for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
+  for (tsi = gsi_start (stmts); !gsi_end_p (tsi); gsi_next (&tsi))
     {
-      tree stmt1 = tsi_stmt (tsi);
+      gimple stmt1 = gsi_stmt (tsi);
       tree var;
 
       count++;
       /* Check that the condition does not contain any new definition
          created in the latch as the stmts from the latch intended
          to precede it.  */
-      if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
+      if (gimple_code (stmt1) != GIMPLE_ASSIGN)
         return false;
-      var = GIMPLE_STMT_OPERAND (stmt1, 0);
+      var = gimple_assign_lhs (stmt1);
       if (TREE_THIS_VOLATILE (var)
          || TYPE_VOLATILE (TREE_TYPE (var))
          || walk_tree (&expr, contains_tree_r, var, NULL))
@@ -999,25 +993,26 @@ process_single_block_loop_latch (edge single_edge)
      var = tmp_var;
      ... 
    */
-  for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
+  for (tsi = gsi_start (stmts); !gsi_end_p (tsi); gsi_next (&tsi))
     {
-      tree stmt1 = tsi_stmt (tsi);
-      tree var, tmp_var, copy;
+      gimple stmt1 = gsi_stmt (tsi);
+      tree var, tmp_var;
+      gimple copy;
 
       /* Create a new variable to load back the value of var in case
          we exit the loop.  */
-      var = GIMPLE_STMT_OPERAND (stmt1, 0);
+      var = gimple_assign_lhs (stmt1);
       tmp_var = create_temp (var);
-      copy = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (tmp_var), tmp_var, var);
+      copy = gimple_build_assign (tmp_var, var);
       set_is_used (tmp_var);
-      bsi_insert_before (&bsi, copy, BSI_SAME_STMT);
-      copy = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (tmp_var), var, tmp_var);
-      bsi_insert_before (&bsi_exit, copy, BSI_SAME_STMT);
+      gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
+      copy = gimple_build_assign (var, tmp_var);
+      gsi_insert_before (&gsi_exit, copy, GSI_SAME_STMT);
     }
 
   PENDING_STMT (single_edge) = 0;
   /* Insert the new stmts to the loop body.  */
-  bsi_insert_before (&bsi, stmts, BSI_NEW_STMT);
+  gsi_insert_seq_before (&gsi, stmts, GSI_NEW_STMT);
 
   if (dump_file)
     fprintf (dump_file,
@@ -1038,8 +1033,8 @@ analyze_edges_for_bb (basic_block bb)
   int count;
   unsigned int x;
   bool have_opportunity;
-  block_stmt_iterator bsi;
-  tree stmt;
+  gimple_stmt_iterator gsi;
+  gimple stmt;
   edge single_edge = NULL;
   bool is_label;
   edge leader;
@@ -1061,7 +1056,7 @@ analyze_edges_for_bb (basic_block bb)
     {
       FOR_EACH_EDGE (e, ei, bb->preds)
        if (PENDING_STMT (e))
-         bsi_commit_one_edge_insert (e, NULL);
+         gsi_commit_one_edge_insert (e, NULL);
       return;
     }
 
@@ -1074,18 +1069,19 @@ analyze_edges_for_bb (basic_block bb)
          gcc_assert (!(e->flags & EDGE_ABNORMAL));
          if (e->flags & EDGE_FALLTHRU)
            {
-             bsi = bsi_start (e->src);
-             if (!bsi_end_p (bsi))
+             gsi = gsi_start_bb (e->src);
+             if (!gsi_end_p (gsi))
                {
-                 stmt = bsi_stmt (bsi);
-                 bsi_next (&bsi);
-                 gcc_assert (stmt != NULL_TREE);
-                 is_label = (TREE_CODE (stmt) == LABEL_EXPR);
+                 stmt = gsi_stmt (gsi);
+                 gsi_next (&gsi);
+                 gcc_assert (stmt != NULL);
+                 is_label = (gimple_code (stmt) == GIMPLE_LABEL);
                  /* Punt if it has non-label stmts, or isn't local.  */
-                 if (!is_label || DECL_NONLOCAL (TREE_OPERAND (stmt, 0)) 
-                     || !bsi_end_p (bsi))
+                 if (!is_label
+                     || DECL_NONLOCAL (gimple_label_label (stmt)) 
+                     || !gsi_end_p (gsi))
                    {
-                     bsi_commit_one_edge_insert (e, NULL);
+                     gsi_commit_one_edge_insert (e, NULL);
                      continue;
                    }
                }
@@ -1103,7 +1099,7 @@ analyze_edges_for_bb (basic_block bb)
        /* Add stmts to the edge unless processed specially as a
           single-block loop latch edge. */
        if (!process_single_block_loop_latch (single_edge))
-         bsi_commit_one_edge_insert (single_edge, NULL);
+         gsi_commit_one_edge_insert (single_edge, NULL);
       }
       return;
     }
@@ -1111,7 +1107,7 @@ analyze_edges_for_bb (basic_block bb)
   /* Ensure that we have empty worklists.  */
 #ifdef ENABLE_CHECKING
   gcc_assert (VEC_length (edge, edge_leader) == 0);
-  gcc_assert (VEC_length (tree, stmt_list) == 0);
+  gcc_assert (VEC_length (gimple_seq, stmt_list) == 0);
   gcc_assert (bitmap_empty_p (leader_has_match));
 #endif
 
@@ -1144,7 +1140,7 @@ analyze_edges_for_bb (basic_block bb)
          if (!found)
            {
              VEC_safe_push (edge, heap, edge_leader, e);
-             VEC_safe_push (tree, heap, stmt_list, PENDING_STMT (e));
+             VEC_safe_push (gimple_seq, heap, stmt_list, PENDING_STMT (e));
            }
        }
      }
@@ -1153,9 +1149,9 @@ analyze_edges_for_bb (basic_block bb)
   if (!have_opportunity)
     {
       for (x = 0; VEC_iterate (edge, edge_leader, x, leader); x++)
-       bsi_commit_one_edge_insert (leader, NULL);
+       gsi_commit_one_edge_insert (leader, NULL);
       VEC_truncate (edge, edge_leader, 0);
-      VEC_truncate (tree, stmt_list, 0);
+      VEC_truncate (gimple_seq, stmt_list, 0);
       bitmap_clear (leader_has_match);
       return;
     }
@@ -1170,8 +1166,8 @@ analyze_edges_for_bb (basic_block bb)
     if (bitmap_bit_p (leader_has_match, x))
       {
        edge new_edge;
-       block_stmt_iterator bsi;
-       tree curr_stmt_list;
+       gimple_stmt_iterator gsi;
+       gimple_seq curr_stmt_list;
 
        leader_match = leader;
 
@@ -1181,7 +1177,7 @@ analyze_edges_for_bb (basic_block bb)
           and use the saved stmt list.  */
        PENDING_STMT (leader) = NULL;
        leader->aux = leader;
-       curr_stmt_list = VEC_index (tree, stmt_list, x);
+       curr_stmt_list = VEC_index (gimple_seq, stmt_list, x);
 
         new_edge = make_forwarder_block (leader->dest, same_stmt_list_p, 
                                         NULL);
@@ -1191,7 +1187,7 @@ analyze_edges_for_bb (basic_block bb)
            fprintf (dump_file, "Splitting BB %d for Common stmt list.  ", 
                     leader->dest->index);
            fprintf (dump_file, "Original block is now BB%d.\n", bb->index);
-           print_generic_stmt (dump_file, curr_stmt_list, TDF_VOPS);
+           print_gimple_seq (dump_file, curr_stmt_list, 0, TDF_VOPS);
          }
 
        FOR_EACH_EDGE (e, ei, new_edge->src->preds)
@@ -1202,22 +1198,22 @@ analyze_edges_for_bb (basic_block bb)
                       e->src->index, e->dest->index);
          }
 
-       bsi = bsi_last (leader->dest);
-       bsi_insert_after (&bsi, curr_stmt_list, BSI_NEW_STMT);
+       gsi = gsi_last_bb (leader->dest);
+       gsi_insert_seq_after (&gsi, curr_stmt_list, GSI_NEW_STMT);
 
        leader_match = NULL;
        /* We should never get a new block now.  */
       }
     else
       {
-       PENDING_STMT (leader) = VEC_index (tree, stmt_list, x);
-       bsi_commit_one_edge_insert (leader, NULL);
+       PENDING_STMT (leader) = VEC_index (gimple_seq, stmt_list, x);
+       gsi_commit_one_edge_insert (leader, NULL);
       }
 
    
   /* Clear the working data structures.  */
   VEC_truncate (edge, edge_leader, 0);
-  VEC_truncate (tree, stmt_list, 0);
+  VEC_truncate (gimple_seq, stmt_list, 0);
   bitmap_clear (leader_has_match);
 }
 
@@ -1297,9 +1293,9 @@ static void
 remove_ssa_form (bool perform_ter)
 {
   basic_block bb;
-  tree phi, next;
-  tree *values = NULL;
+  gimple *values = NULL;
   var_map map;
+  gimple_stmt_iterator gsi;
 
   map = coalesce_ssa_name ();
 
@@ -1336,13 +1332,8 @@ remove_ssa_form (bool perform_ter)
 
   /* Remove PHI nodes which have been translated back to real variables.  */
   FOR_EACH_BB (bb)
-    {
-      for (phi = phi_nodes (bb); phi; phi = next)
-       {
-         next = PHI_CHAIN (phi);
-         remove_phi_node (phi, NULL_TREE, true);
-       }
-    }
+    for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
+      remove_phi_node (&gsi, true);
 
   /* If any copies were inserted on edges, analyze and insert them now.  */
   perform_edge_inserts ();
@@ -1364,25 +1355,25 @@ static void
 insert_backedge_copies (void)
 {
   basic_block bb;
+  gimple_stmt_iterator gsi;
 
   FOR_EACH_BB (bb)
     {
-      tree phi;
-
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree result = PHI_RESULT (phi);
+         gimple phi = gsi_stmt (gsi);
+         tree result = gimple_phi_result (phi);
          tree result_var;
-         int i;
+         size_t i;
 
          if (!is_gimple_reg (result))
            continue;
 
          result_var = SSA_NAME_VAR (result);
-         for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+         for (i = 0; i < gimple_phi_num_args (phi); i++)
            {
-             tree arg = PHI_ARG_DEF (phi, i);
-             edge e = PHI_ARG_EDGE (phi, i);
+             tree arg = gimple_phi_arg_def (phi, i);
+             edge e = gimple_phi_arg_edge (phi, i);
 
              /* If the argument is not an SSA_NAME, then we will need a 
                 constant initialization.  If the argument is an SSA_NAME with
@@ -1392,12 +1383,13 @@ insert_backedge_copies (void)
                  && (TREE_CODE (arg) != SSA_NAME
                      || SSA_NAME_VAR (arg) != result_var))
                {
-                 tree stmt, name, last = NULL;
-                 block_stmt_iterator bsi;
+                 tree name;
+                 gimple stmt, last = NULL;
+                 gimple_stmt_iterator gsi2;
 
-                 bsi = bsi_last (PHI_ARG_EDGE (phi, i)->src);
-                 if (!bsi_end_p (bsi))
-                   last = bsi_stmt (bsi);
+                 gsi2 = gsi_last_bb (gimple_phi_arg_edge (phi, i)->src);
+                 if (!gsi_end_p (gsi2))
+                   last = gsi_stmt (gsi2);
 
                  /* In theory the only way we ought to get back to the
                     start of a loop should be with a COND_EXPR or GOTO_EXPR.
@@ -1418,17 +1410,17 @@ insert_backedge_copies (void)
 
                  /* Create a new instance of the underlying variable of the 
                     PHI result.  */
-                 stmt = build_gimple_modify_stmt (NULL_TREE,
-                                                  PHI_ARG_DEF (phi, i));
+                 stmt = gimple_build_assign (result_var,
+                                             gimple_phi_arg_def (phi, i));
                  name = make_ssa_name (result_var, stmt);
-                 GIMPLE_STMT_OPERAND (stmt, 0) = name;
+                 gimple_assign_set_lhs (stmt, name);
 
                  /* Insert the new statement into the block and update
                     the PHI node.  */
                  if (last && stmt_ends_bb_p (last))
-                   bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
+                   gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
                  else
-                   bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
+                   gsi_insert_after (&gsi2, stmt, GSI_NEW_STMT);
                  SET_PHI_ARG_DEF (phi, i, name);
                }
            }
@@ -1454,12 +1446,12 @@ rewrite_out_of_ssa (void)
   eliminate_virtual_phis ();
 
   if (dump_file && (dump_flags & TDF_DETAILS))
-    dump_tree_cfg (dump_file, dump_flags & ~TDF_DETAILS);
+    gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS);
 
   remove_ssa_form (flag_tree_ter && !flag_mudflap);
 
   if (dump_file && (dump_flags & TDF_DETAILS))
-    dump_tree_cfg (dump_file, dump_flags & ~TDF_DETAILS);
+    gimple_dump_cfg (dump_file, dump_flags & ~TDF_DETAILS);
 
   cfun->gimple_df->in_ssa_p = false;
   return 0;
index be0fd9c..f2b8d04 100644 (file)
@@ -41,14 +41,14 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
    The implementation is straightforward -- for each loop we test whether its
    iterations are independent, and if it is the case (and some additional
    conditions regarding profitability and correctness are satisfied), we
-   add OMP_PARALLEL and OMP_FOR codes and let omp expansion machinery do
-   its job.
+   add GIMPLE_OMP_PARALLEL and GIMPLE_OMP_FOR codes and let omp expansion
+   machinery do its job.
    
    The most of the complexity is in bringing the code into shape expected
    by the omp expanders:
-   -- for OMP_FOR, ensuring that the loop has only one induction variable
-      and that the exit test is at the start of the loop body
-   -- for OMP_PARALLEL, replacing the references to local addressable
+   -- for GIMPLE_OMP_FOR, ensuring that the loop has only one induction
+      variable and that the exit test is at the start of the loop body
+   -- for GIMPLE_OMP_PARALLEL, replacing the references to local addressable
       variables by accesses through pointers, and breaking up ssa chains
       by storing the values incoming to the parallelized loop to a structure
       passed to the new function as an argument (something similar is done
@@ -122,11 +122,11 @@ parloop
 
   sum.27_11 = D.1827_8 + sum.27_29;
 
-  OMP_CONTINUE
+  GIMPLE_OMP_CONTINUE
 
   # Adding this reduction phi is done at create_phi_for_local_result() #
   # sum.27_56 = PHI <sum.27_11, 0>
-  OMP_RETURN
+  GIMPLE_OMP_RETURN
   
   # Creating the atomic operation is done at 
   create_call_for_reduction_1()  #
@@ -136,7 +136,7 @@ parloop
   D.1840_60 = sum.27_56 + D.1839_59;
   #pragma omp atomic_store (D.1840_60);
   
-  OMP_RETURN
+  GIMPLE_OMP_RETURN
   
  # collecting the result after the join of the threads is done at
   create_loads_for_reductions().
@@ -166,15 +166,15 @@ parloop
    reduction in the current loop.  */
 struct reduction_info
 {
-  tree reduc_stmt;             /* reduction statement.  */
-  tree reduc_phi;              /* The phi node defining the reduction.  */
-  enum tree_code reduction_code;       /* code for the reduction operation.  */
-  tree keep_res;               /* The PHI_RESULT of this phi is the resulting value 
+  gimple reduc_stmt;           /* reduction statement.  */
+  gimple reduc_phi;            /* The phi node defining the reduction.  */
+  enum tree_code reduction_code;/* code for the reduction operation.  */
+  gimple keep_res;             /* The PHI_RESULT of this phi is the resulting value 
                                   of the reduction variable when existing the loop. */
   tree initial_value;          /* The initial value of the reduction var before entering the loop.  */
   tree field;                  /*  the name of the field in the parloop data structure intended for reduction.  */
   tree init;                   /* reduction initialization value.  */
-  tree new_phi;                        /* (helper field) Newly created phi node whose result 
+  gimple new_phi;              /* (helper field) Newly created phi node whose result 
                                   will be passed to the atomic operation.  Represents
                                   the local result each thread computed for the reduction
                                   operation.  */
@@ -200,7 +200,7 @@ reduction_info_hash (const void *aa)
 }
 
 static struct reduction_info *
-reduction_phi (htab_t reduction_list, tree phi)
+reduction_phi (htab_t reduction_list, gimple phi)
 {
   struct reduction_info tmpred, *red;
 
@@ -249,14 +249,15 @@ name_to_copy_elt_hash (const void *aa)
    reductions are found, they are inserted to the REDUCTION_LIST.  */  
 
 static bool
-loop_parallel_p (struct loop *loop, htab_t reduction_list, struct tree_niter_desc *niter)
+loop_parallel_p (struct loop *loop, htab_t reduction_list,
+                struct tree_niter_desc *niter)
 {
   edge exit = single_dom_exit (loop);
   VEC (ddr_p, heap) * dependence_relations;
-  VEC (data_reference_p, heap) * datarefs;
+  VEC (data_reference_p, heap) *datarefs;
   lambda_trans_matrix trans;
   bool ret = false;
-  tree phi;
+  gimple_stmt_iterator gsi;
   loop_vec_info simple_loop_info;
 
   /* Only consider innermost loops with just one exit.  The innermost-loop
@@ -279,9 +280,10 @@ loop_parallel_p (struct loop *loop, htab_t reduction_list, struct tree_niter_des
 
   simple_loop_info = vect_analyze_loop_form (loop);
 
-  for (phi = phi_nodes (loop->header); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (loop->header); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      tree reduc_stmt = NULL, operation;
+      gimple phi = gsi_stmt (gsi);
+      gimple reduc_stmt = NULL;
 
       /* ??? TODO: Change this into a generic function that 
          recognizes reductions.  */
@@ -302,7 +304,7 @@ loop_parallel_p (struct loop *loop, htab_t reduction_list, struct tree_niter_des
            {
              fprintf (dump_file,
                       "Detected reduction. reduction stmt is: \n");
-             print_generic_stmt (dump_file, reduc_stmt, 0);
+             print_gimple_stmt (dump_file, reduc_stmt, 0, 0);
              fprintf (dump_file, "\n");
            }
 
@@ -310,8 +312,7 @@ loop_parallel_p (struct loop *loop, htab_t reduction_list, struct tree_niter_des
 
          new_reduction->reduc_stmt = reduc_stmt;
          new_reduction->reduc_phi = phi;
-         operation = GIMPLE_STMT_OPERAND (reduc_stmt, 1);
-         new_reduction->reduction_code = TREE_CODE (operation);
+         new_reduction->reduction_code = gimple_assign_rhs_code (reduc_stmt);
          slot = htab_find_slot (reduction_list, new_reduction, INSERT);
          *slot = new_reduction;
        }
@@ -320,13 +321,13 @@ loop_parallel_p (struct loop *loop, htab_t reduction_list, struct tree_niter_des
   /* Get rid of the information created by the vectorizer functions.  */
   destroy_loop_vec_info (simple_loop_info, true);
 
-  for (phi = phi_nodes (exit->dest); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (exit->dest); !gsi_end_p (gsi); gsi_next (&gsi))
     {
+      gimple phi = gsi_stmt (gsi);
       struct reduction_info *red;
       imm_use_iterator imm_iter;
       use_operand_p use_p;
-      tree reduc_phi;
-
+      gimple reduc_phi;
       tree val = PHI_ARG_DEF_FROM_EDGE (phi, exit);
 
       if (is_gimple_reg (val))
@@ -334,7 +335,7 @@ loop_parallel_p (struct loop *loop, htab_t reduction_list, struct tree_niter_des
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
              fprintf (dump_file, "phi is ");
-             print_generic_expr (dump_file, phi, 0);
+             print_gimple_stmt (dump_file, phi, 0, 0);
              fprintf (dump_file, "arg of phi to exit:   value ");
              print_generic_expr (dump_file, val, 0);
              fprintf (dump_file, " used outside loop\n");
@@ -351,7 +352,7 @@ loop_parallel_p (struct loop *loop, htab_t reduction_list, struct tree_niter_des
          reduc_phi = NULL;
          FOR_EACH_IMM_USE_FAST (use_p, imm_iter, val)
          {
-           if (flow_bb_inside_loop_p (loop, bb_for_stmt (USE_STMT (use_p))))
+           if (flow_bb_inside_loop_p (loop, gimple_bb (USE_STMT (use_p))))
              {
                reduc_phi = USE_STMT (use_p);
                break;
@@ -368,9 +369,9 @@ loop_parallel_p (struct loop *loop, htab_t reduction_list, struct tree_niter_des
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
              fprintf (dump_file, "reduction phi is  ");
-             print_generic_expr (dump_file, red->reduc_phi, 0);
+             print_gimple_stmt (dump_file, red->reduc_phi, 0, 0);
              fprintf (dump_file, "reduction stmt is  ");
-             print_generic_expr (dump_file, red->reduc_stmt, 0);
+             print_gimple_stmt (dump_file, red->reduc_stmt, 0, 0);
            }
 
        }
@@ -378,8 +379,9 @@ loop_parallel_p (struct loop *loop, htab_t reduction_list, struct tree_niter_des
 
   /* The iterations of the loop may communicate only through bivs whose
      iteration space can be distributed efficiently.  */
-  for (phi = phi_nodes (loop->header); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (loop->header); !gsi_end_p (gsi); gsi_next (&gsi))
     {
+      gimple phi = gsi_stmt (gsi);
       tree def = PHI_RESULT (phi);
       affine_iv iv;
 
@@ -465,7 +467,9 @@ take_address_of (tree obj, tree type, edge entry, htab_t decl_address)
   int uid;
   void **dslot;
   struct int_tree_map ielt, *nielt;
-  tree *var_p, name, bvar, stmt, addr;
+  tree *var_p, name, bvar, addr;
+  gimple stmt;
+  gimple_seq stmts;
 
   /* Since the address of OBJ is invariant, the trees may be shared.
      Avoid rewriting unrelated parts of the code.  */
@@ -483,10 +487,10 @@ take_address_of (tree obj, tree type, edge entry, htab_t decl_address)
       addr = build_addr (*var_p, current_function_decl);
       bvar = create_tmp_var (TREE_TYPE (addr), get_name (*var_p));
       add_referenced_var (bvar);
-      stmt = build_gimple_modify_stmt (bvar, addr);
+      stmt = gimple_build_assign (bvar, addr);
       name = make_ssa_name (bvar, stmt);
-      GIMPLE_STMT_OPERAND (stmt, 0) = name;
-      bsi_insert_on_edge_immediate (entry, stmt);
+      gimple_assign_set_lhs (stmt, name);
+      gsi_insert_on_edge_immediate (entry, stmt);
 
       nielt = XNEW (struct int_tree_map);
       nielt->uid = uid;
@@ -500,17 +504,17 @@ take_address_of (tree obj, tree type, edge entry, htab_t decl_address)
     {
       *var_p = build1 (INDIRECT_REF, TREE_TYPE (*var_p), name);
       name = force_gimple_operand (build_addr (obj, current_function_decl),
-                                  &stmt, true, NULL_TREE);
-      if (stmt)
-       bsi_insert_on_edge_immediate (entry, stmt);
+                                  &stmts, true, NULL_TREE);
+      if (!gimple_seq_empty_p (stmts))
+       gsi_insert_seq_on_edge_immediate (entry, stmts);
     }
 
   if (TREE_TYPE (name) != type)
     {
-      name = force_gimple_operand (fold_convert (type, name), &stmt, true,
+      name = force_gimple_operand (fold_convert (type, name), &stmts, true,
                                   NULL_TREE);
-      if (stmt)
-       bsi_insert_on_edge_immediate (entry, stmt);
+      if (!gimple_seq_empty_p (stmts))
+       gsi_insert_seq_on_edge_immediate (entry, stmts);
     }
 
   return name;
@@ -543,8 +547,7 @@ initialize_reductions (void **slot, void *data)
 
   c = build_omp_clause (OMP_CLAUSE_REDUCTION);
   OMP_CLAUSE_REDUCTION_CODE (c) = reduc->reduction_code;
-  OMP_CLAUSE_DECL (c) =
-    SSA_NAME_VAR (GIMPLE_STMT_OPERAND (reduc->reduc_stmt, 0));
+  OMP_CLAUSE_DECL (c) = SSA_NAME_VAR (gimple_assign_lhs (reduc->reduc_stmt));
 
   init = omp_reduction_init (c, TREE_TYPE (bvar));
   reduc->init = init;
@@ -569,6 +572,7 @@ initialize_reductions (void **slot, void *data)
 
 struct elv_data
 {
+  struct walk_stmt_info info;
   edge entry;
   htab_t decl_address;
   bool changed;
@@ -632,7 +636,7 @@ eliminate_local_variables_1 (tree *tp, int *walk_subtrees, void *data)
       return NULL_TREE;
     }
 
-  if (!EXPR_P (t) && !GIMPLE_STMT_P (t))
+  if (!EXPR_P (t))
     *walk_subtrees = 0;
 
   return NULL_TREE;
@@ -644,16 +648,17 @@ eliminate_local_variables_1 (tree *tp, int *walk_subtrees, void *data)
    already.  */
 
 static void
-eliminate_local_variables_stmt (edge entry, tree stmt,
+eliminate_local_variables_stmt (edge entry, gimple stmt,
                                htab_t decl_address)
 {
   struct elv_data dta;
 
+  memset (&dta.info, '\0', sizeof (dta.info));
   dta.entry = entry;
   dta.decl_address = decl_address;
   dta.changed = false;
 
-  walk_tree (&stmt, eliminate_local_variables_1, &dta, NULL);
+  walk_gimple_op (stmt, eliminate_local_variables_1, &dta.info);
 
   if (dta.changed)
     update_stmt (stmt);
@@ -676,7 +681,7 @@ eliminate_local_variables (edge entry, edge exit)
   basic_block bb;
   VEC (basic_block, heap) *body = VEC_alloc (basic_block, heap, 3);
   unsigned i;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   htab_t decl_address = htab_create (10, int_tree_map_hash, int_tree_map_eq,
                                     free);
   basic_block entry_bb = entry->src;
@@ -686,8 +691,8 @@ eliminate_local_variables (edge entry, edge exit)
 
   for (i = 0; VEC_iterate (basic_block, body, i, bb); i++)
     if (bb != entry_bb && bb != exit_bb)
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-       eliminate_local_variables_stmt (entry, bsi_stmt (bsi),
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+       eliminate_local_variables_stmt (entry, gsi_stmt (gsi),
                                        decl_address);
 
   htab_delete (decl_address);
@@ -703,14 +708,13 @@ expr_invariant_in_region_p (edge entry, edge exit, tree expr)
   basic_block entry_bb = entry->src;
   basic_block exit_bb = exit->dest;
   basic_block def_bb;
-  unsigned i, len;
 
   if (is_gimple_min_invariant (expr))
     return true;
 
   if (TREE_CODE (expr) == SSA_NAME)
     {
-      def_bb = bb_for_stmt (SSA_NAME_DEF_STMT (expr));
+      def_bb = gimple_bb (SSA_NAME_DEF_STMT (expr));
       if (def_bb
          && dominated_by_p (CDI_DOMINATORS, def_bb, entry_bb)
          && !dominated_by_p (CDI_DOMINATORS, def_bb, exit_bb))
@@ -719,15 +723,7 @@ expr_invariant_in_region_p (edge entry, edge exit, tree expr)
       return true;
     }
 
-  if (!EXPR_P (expr) && !GIMPLE_STMT_P (expr))
-    return false;
-
-  len = TREE_OPERAND_LENGTH (expr);
-  for (i = 0; i < len; i++)
-    if (!expr_invariant_in_region_p (entry, exit, TREE_OPERAND (expr, i)))
-      return false;
-
-  return true;
+  return false;
 }
 
 /* If COPY_NAME_P is true, creates and returns a duplicate of NAME.
@@ -788,7 +784,7 @@ separate_decls_in_region_name (tree name,
 
   if (copy_name_p)
     {
-      copy = duplicate_ssa_name (name, NULL_TREE);
+      copy = duplicate_ssa_name (name, NULL);
       nelt = XNEW (struct name_to_copy_elt);
       nelt->version = idx;
       nelt->new_name = copy;
@@ -813,7 +809,7 @@ separate_decls_in_region_name (tree name,
    replacement decls are stored in DECL_COPIES.  */
 
 static void
-separate_decls_in_region_stmt (edge entry, edge exit, tree stmt,
+separate_decls_in_region_stmt (edge entry, edge exit, gimple stmt,
                               htab_t name_copies, htab_t decl_copies)
 {
   use_operand_p use;
@@ -855,7 +851,7 @@ add_field_for_reduction (void **slot, void *data)
   
   struct reduction_info *const red = (struct reduction_info *) *slot;
   tree const type = (tree) data;
-  tree var = SSA_NAME_VAR (GIMPLE_STMT_OPERAND (red->reduc_stmt, 0));
+  tree var = SSA_NAME_VAR (gimple_assign_lhs (red->reduc_stmt));
   tree field = build_decl (FIELD_DECL, DECL_NAME (var), TREE_TYPE (var));
 
   insert_field_into_struct (type, field);
@@ -896,13 +892,13 @@ create_phi_for_local_result (void **slot, void *data)
   struct reduction_info *const reduc = (struct reduction_info *) *slot;
   const struct loop *const loop = (const struct loop *) data;
   edge e;
-  tree new_phi;
+  gimple new_phi;
   basic_block store_bb;
   tree local_res;
 
   /* STORE_BB is the block where the phi 
      should be stored.  It is the destination of the loop exit.  
-     (Find the fallthru edge from OMP_CONTINUE).  */
+     (Find the fallthru edge from GIMPLE_OMP_CONTINUE).  */
   store_bb = FALLTHRU_EDGE (loop->latch)->dest;
 
   /* STORE_BB has two predecessors.  One coming from  the loop
@@ -914,11 +910,13 @@ create_phi_for_local_result (void **slot, void *data)
     e = EDGE_PRED (store_bb, 1);
   else
     e = EDGE_PRED (store_bb, 0);
-  local_res = make_ssa_name (SSA_NAME_VAR (GIMPLE_STMT_OPERAND (reduc->reduc_stmt, 0)), NULL_TREE);
+  local_res
+    = make_ssa_name (SSA_NAME_VAR (gimple_assign_lhs (reduc->reduc_stmt)),
+                    NULL);
   new_phi = create_phi_node (local_res, store_bb);
   SSA_NAME_DEF_STMT (local_res) = new_phi;
   add_phi_arg (new_phi, reduc->init, e);
-  add_phi_arg (new_phi, GIMPLE_STMT_OPERAND (reduc->reduc_stmt, 0),
+  add_phi_arg (new_phi, gimple_assign_lhs (reduc->reduc_stmt),
               FALLTHRU_EDGE (loop->latch));
   reduc->new_phi = new_phi;
 
@@ -944,7 +942,7 @@ create_call_for_reduction_1 (void **slot, void *data)
 {
   struct reduction_info *const reduc = (struct reduction_info *) *slot;
   struct clsn_data *const clsn_data = (struct clsn_data *) data;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   tree type = TREE_TYPE (PHI_RESULT (reduc->reduc_phi));
   tree struct_type = TREE_TYPE (TREE_TYPE (clsn_data->load));
   tree load_struct;
@@ -952,7 +950,8 @@ create_call_for_reduction_1 (void **slot, void *data)
   basic_block new_bb;
   edge e;
   tree t, addr, addr_type, ref, x;
-  tree tmp_load, load, name;
+  tree tmp_load, name;
+  gimple load;
 
   load_struct = fold_build1 (INDIRECT_REF, struct_type, clsn_data->load);
   t = build3 (COMPONENT_REF, type, load_struct, reduc->field, NULL_TREE);
@@ -969,27 +968,23 @@ create_call_for_reduction_1 (void **slot, void *data)
   tmp_load = create_tmp_var (TREE_TYPE (TREE_TYPE (addr)), NULL);
   add_referenced_var (tmp_load);
   tmp_load = make_ssa_name (tmp_load, NULL);
-  load = build2 (OMP_ATOMIC_LOAD, void_type_node, tmp_load, addr);
+  load = gimple_build_omp_atomic_load (tmp_load, addr);
   SSA_NAME_DEF_STMT (tmp_load) = load;
-  bsi = bsi_start (new_bb);
-  bsi_insert_after (&bsi, load, BSI_NEW_STMT);
+  gsi = gsi_start_bb (new_bb);
+  gsi_insert_after (&gsi, load, GSI_NEW_STMT);
 
   e = split_block (new_bb, load);
   new_bb = e->dest;
-  bsi = bsi_start (new_bb);
+  gsi = gsi_start_bb (new_bb);
   ref = tmp_load;
-  x =
-    fold_build2 (reduc->reduction_code,
-                TREE_TYPE (PHI_RESULT (reduc->new_phi)), ref,
-                PHI_RESULT (reduc->new_phi));
-
-  name =
-    force_gimple_operand_bsi (&bsi, x, true, NULL_TREE, true,
-                             BSI_CONTINUE_LINKING);
+  x = fold_build2 (reduc->reduction_code,
+                  TREE_TYPE (PHI_RESULT (reduc->new_phi)), ref,
+                  PHI_RESULT (reduc->new_phi));
 
-  x = build1 (OMP_ATOMIC_STORE, void_type_node, name);
+  name = force_gimple_operand_gsi (&gsi, x, true, NULL_TREE, true,
+                                  GSI_CONTINUE_LINKING);
 
-  bsi_insert_after (&bsi, x, BSI_NEW_STMT);
+  gsi_insert_after (&gsi, gimple_build_omp_atomic_store (name), GSI_NEW_STMT);
   return 1;
 }
 
@@ -1002,7 +997,7 @@ create_call_for_reduction (struct loop *loop, htab_t reduction_list,
                           struct clsn_data *ld_st_data)
 {
   htab_traverse (reduction_list, create_phi_for_local_result, loop);
-  /* Find the fallthru edge from OMP_CONTINUE.  */
+  /* Find the fallthru edge from GIMPLE_OMP_CONTINUE.  */
   ld_st_data->load_bb = FALLTHRU_EDGE (loop->latch)->dest;
   htab_traverse (reduction_list, create_call_for_reduction_1, ld_st_data);
 }
@@ -1015,30 +1010,34 @@ create_loads_for_reductions (void **slot, void *data)
 {
   struct reduction_info *const red = (struct reduction_info *) *slot;
   struct clsn_data *const clsn_data = (struct clsn_data *) data;
-  tree stmt;
-  block_stmt_iterator bsi;
-  tree type = TREE_TYPE (GIMPLE_STMT_OPERAND (red->reduc_stmt, 0));
+  gimple stmt;
+  gimple_stmt_iterator gsi;
+  tree type = TREE_TYPE (gimple_assign_lhs (red->reduc_stmt));
   tree struct_type = TREE_TYPE (TREE_TYPE (clsn_data->load));
   tree load_struct;
   tree name;
   tree x;
 
-  bsi = bsi_after_labels (clsn_data->load_bb);
+  gsi = gsi_after_labels (clsn_data->load_bb);
   load_struct = fold_build1 (INDIRECT_REF, struct_type, clsn_data->load);
   load_struct = build3 (COMPONENT_REF, type, load_struct, red->field,
                        NULL_TREE);
 
   x = load_struct;
   name = PHI_RESULT (red->keep_res);
-  stmt = build_gimple_modify_stmt (name, x);
-  GIMPLE_STMT_OPERAND (stmt, 0) = name;
+  stmt = gimple_build_assign (name, x);
   SSA_NAME_DEF_STMT (name) = stmt;
 
-  bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
+  gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
 
-  remove_phi_node (red->keep_res, NULL_TREE, false);
-
-  return 1;
+  for (gsi = gsi_start_phis (gimple_bb (red->keep_res));
+       !gsi_end_p (gsi); gsi_next (&gsi))
+    if (gsi_stmt (gsi) == red->keep_res)
+      {
+       remove_phi_node (&gsi, false);
+       return 1;
+      }
+  gcc_unreachable ();
 }
 
 /* Load the reduction result that was stored in LD_ST_DATA.  
@@ -1048,18 +1047,16 @@ static void
 create_final_loads_for_reduction (htab_t reduction_list, 
                                  struct clsn_data *ld_st_data)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   tree t;
+  gimple stmt;
 
-  bsi = bsi_after_labels (ld_st_data->load_bb);
+  gsi = gsi_after_labels (ld_st_data->load_bb);
   t = build_fold_addr_expr (ld_st_data->store);
-  t =
-    build_gimple_modify_stmt (ld_st_data->load,
-                             build_fold_addr_expr (ld_st_data->store));
+  stmt = gimple_build_assign (ld_st_data->load, t);
 
-  bsi_insert_before (&bsi, t, BSI_NEW_STMT);
-  SSA_NAME_DEF_STMT (ld_st_data->load) = t;
-  GIMPLE_STMT_OPERAND (t, 0) = ld_st_data->load;
+  gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
+  SSA_NAME_DEF_STMT (ld_st_data->load) = stmt;
 
   htab_traverse (reduction_list, create_loads_for_reductions, ld_st_data);
 
@@ -1076,18 +1073,16 @@ create_stores_for_reduction (void **slot, void *data)
 {
   struct reduction_info *const red = (struct reduction_info *) *slot;
   struct clsn_data *const clsn_data = (struct clsn_data *) data;
-  tree stmt;
-  block_stmt_iterator bsi;
-  tree type = TREE_TYPE (GIMPLE_STMT_OPERAND (red->reduc_stmt, 0));
-  
-  bsi = bsi_last (clsn_data->store_bb);
-  stmt =
-    build_gimple_modify_stmt (build3
-                              (COMPONENT_REF, type, clsn_data->store,
-                               red->field, NULL_TREE),
-                               red->initial_value);
+  tree t;
+  gimple stmt;
+  gimple_stmt_iterator gsi;
+  tree type = TREE_TYPE (gimple_assign_lhs (red->reduc_stmt));
+
+  gsi = gsi_last_bb (clsn_data->store_bb);
+  t = build3 (COMPONENT_REF, type, clsn_data->store, red->field, NULL_TREE);
+  stmt = gimple_build_assign (t, red->initial_value);
   mark_virtual_ops_for_renaming (stmt);
-  bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
+  gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
 
   return 1;
 }
@@ -1101,28 +1096,25 @@ create_loads_and_stores_for_name (void **slot, void *data)
 {
   struct name_to_copy_elt *const elt = (struct name_to_copy_elt *) *slot;
   struct clsn_data *const clsn_data = (struct clsn_data *) data;
-  tree stmt;
-  block_stmt_iterator bsi;
+  tree t;
+  gimple stmt;
+  gimple_stmt_iterator gsi;
   tree type = TREE_TYPE (elt->new_name);
   tree struct_type = TREE_TYPE (TREE_TYPE (clsn_data->load));
   tree load_struct;
 
-  bsi = bsi_last (clsn_data->store_bb);
-  stmt =
-    build_gimple_modify_stmt (build3
-                             (COMPONENT_REF, type, clsn_data->store,
-                              elt->field, NULL_TREE),
-                             ssa_name (elt->version));
+  gsi = gsi_last_bb (clsn_data->store_bb);
+  t = build3 (COMPONENT_REF, type, clsn_data->store, elt->field, NULL_TREE);
+  stmt = gimple_build_assign (t, ssa_name (elt->version));
   mark_virtual_ops_for_renaming (stmt);
-  bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
+  gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
 
-  bsi = bsi_last (clsn_data->load_bb);
+  gsi = gsi_last_bb (clsn_data->load_bb);
   load_struct = fold_build1 (INDIRECT_REF, struct_type, clsn_data->load);
-  stmt = build_gimple_modify_stmt (elt->new_name,
-                                  build3 (COMPONENT_REF, type, load_struct,
-                                          elt->field, NULL_TREE));
+  t = build3 (COMPONENT_REF, type, load_struct, elt->field, NULL_TREE);
+  stmt = gimple_build_assign (elt->new_name, t);
   SSA_NAME_DEF_STMT (elt->new_name) = stmt;
-  bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
+  gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
 
   return 1;
 }
@@ -1174,27 +1166,27 @@ separate_decls_in_region (edge entry, edge exit, htab_t reduction_list,
   htab_t decl_copies = htab_create (10, int_tree_map_hash, int_tree_map_eq,
                                    free);
   unsigned i;
-  tree phi, type, type_name, nvar;
-  block_stmt_iterator bsi;
+  tree type, type_name, nvar;
+  gimple_stmt_iterator gsi;
   struct clsn_data clsn_data;
   VEC (basic_block, heap) *body = VEC_alloc (basic_block, heap, 3);
   basic_block bb;
   basic_block entry_bb = bb1;
   basic_block exit_bb = exit->dest;
 
-  entry = single_succ_edge(entry_bb);
+  entry = single_succ_edge (entry_bb);
   gather_blocks_in_sese_region (entry_bb, exit_bb, &body);
 
   for (i = 0; VEC_iterate (basic_block, body, i, bb); i++)
     {
       if (bb != entry_bb && bb != exit_bb) 
        {
-         for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-           separate_decls_in_region_stmt (entry, exit, phi, name_copies,
-                                          decl_copies);
-         
-         for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-           separate_decls_in_region_stmt (entry, exit, bsi_stmt (bsi),
+         for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+           separate_decls_in_region_stmt (entry, exit, gsi_stmt (gsi),
+                                          name_copies, decl_copies);
+
+         for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+           separate_decls_in_region_stmt (entry, exit, gsi_stmt (gsi),
                                           name_copies, decl_copies);
        }
     }
@@ -1230,7 +1222,7 @@ separate_decls_in_region (edge entry, edge exit, htab_t reduction_list,
       add_referenced_var (*arg_struct);
       nvar = create_tmp_var (build_pointer_type (type), ".paral_data_load");
       add_referenced_var (nvar);
-      *new_arg_struct = make_ssa_name (nvar, NULL_TREE);
+      *new_arg_struct = make_ssa_name (nvar, NULL);
 
       ld_st_data->store = *arg_struct;
       ld_st_data->load = *new_arg_struct;
@@ -1246,7 +1238,7 @@ separate_decls_in_region (edge entry, edge exit, htab_t reduction_list,
        {
          htab_traverse (reduction_list, create_stores_for_reduction,
                         ld_st_data); 
-         clsn_data.load = make_ssa_name (nvar, NULL_TREE);
+         clsn_data.load = make_ssa_name (nvar, NULL);
          clsn_data.load_bb = exit->dest;
          clsn_data.store = ld_st_data->store;
          create_final_loads_for_reduction (reduction_list, &clsn_data);
@@ -1338,15 +1330,18 @@ static void
 canonicalize_loop_ivs (struct loop *loop, htab_t reduction_list, tree nit)
 {
   unsigned precision = TYPE_PRECISION (TREE_TYPE (nit));
-  tree phi, prev, res, type, var_before, val, atype, mtype, t, next;
-  block_stmt_iterator bsi;
+  tree res, type, var_before, val, atype, mtype;
+  gimple_stmt_iterator gsi, psi;
+  gimple phi, stmt;
   bool ok;
   affine_iv iv;
   edge exit = single_dom_exit (loop);
   struct reduction_info *red;
 
-  for (phi = phi_nodes (loop->header); phi; phi = PHI_CHAIN (phi))
+  for (psi = gsi_start_phis (loop->header);
+       !gsi_end_p (psi); gsi_next (&psi))
     {
+      phi = gsi_stmt (psi);
       res = PHI_RESULT (phi);
 
       if (is_gimple_reg (res) && TYPE_PRECISION (TREE_TYPE (res)) > precision)
@@ -1355,20 +1350,19 @@ canonicalize_loop_ivs (struct loop *loop, htab_t reduction_list, tree nit)
 
   type = lang_hooks.types.type_for_size (precision, 1);
 
-  bsi = bsi_last (loop->latch);
+  gsi = gsi_last_bb (loop->latch);
   create_iv (build_int_cst_type (type, 0), build_int_cst (type, 1), NULL_TREE,
-            loop, &bsi, true, &var_before, NULL);
+            loop, &gsi, true, &var_before, NULL);
 
-  bsi = bsi_after_labels (loop->header);
-  prev = NULL;
-  for (phi = phi_nodes (loop->header); phi; phi = next)
+  gsi = gsi_after_labels (loop->header);
+  for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); )
     {
-      next = PHI_CHAIN (phi);
+      phi = gsi_stmt (psi);
       res = PHI_RESULT (phi);
 
       if (!is_gimple_reg (res) || res == var_before)
        {
-         prev = phi;
+         gsi_next (&psi);
          continue;
        }
 
@@ -1377,12 +1371,12 @@ canonicalize_loop_ivs (struct loop *loop, htab_t reduction_list, tree nit)
       /* We preserve the reduction phi nodes.  */
       if (!ok && red)
        {
-         prev = phi;
+         gsi_next (&psi);
          continue;
        }
       else
        gcc_assert (ok);
-      remove_phi_node (phi, prev, false);
+      remove_phi_node (&psi, false);
 
       atype = TREE_TYPE (res);
       mtype = POINTER_TYPE_P (atype) ? sizetype : atype;
@@ -1391,14 +1385,14 @@ canonicalize_loop_ivs (struct loop *loop, htab_t reduction_list, tree nit)
       val = fold_build2 (POINTER_TYPE_P (atype)
                         ? POINTER_PLUS_EXPR : PLUS_EXPR,
                         atype, unshare_expr (iv.base), val);
-      val = force_gimple_operand_bsi (&bsi, val, false, NULL_TREE, true,
-                                     BSI_SAME_STMT);
-      t = build_gimple_modify_stmt (res, val);
-      bsi_insert_before (&bsi, t, BSI_SAME_STMT);
-      SSA_NAME_DEF_STMT (res) = t;
+      val = force_gimple_operand_gsi (&gsi, val, false, NULL_TREE, true,
+                                     GSI_SAME_STMT);
+      stmt = gimple_build_assign (res, val);
+      gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+      SSA_NAME_DEF_STMT (res) = stmt;
     }
 
-  t = last_stmt (exit->src);
+  stmt = last_stmt (exit->src);
   /* Make the loop exit if the control condition is not satisfied.  */
   if (exit->flags & EDGE_TRUE_VALUE)
     {
@@ -1408,7 +1402,9 @@ canonicalize_loop_ivs (struct loop *loop, htab_t reduction_list, tree nit)
       te->flags = EDGE_FALSE_VALUE;
       fe->flags = EDGE_TRUE_VALUE;
     }
-  COND_EXPR_COND (t) = build2 (LT_EXPR, boolean_type_node, var_before, nit);
+  gimple_cond_set_code (stmt, LT_EXPR);
+  gimple_cond_set_lhs (stmt, var_before);
+  gimple_cond_set_rhs (stmt, nit);
 }
 
 /* Moves the exit condition of LOOP to the beginning of its header, and
@@ -1430,22 +1426,23 @@ transform_to_exit_first_loop (struct loop *loop, htab_t reduction_list, tree nit
   unsigned n;
   bool ok;
   edge exit = single_dom_exit (loop), hpred;
-  tree phi, nphi, cond, control, control_name, res, t, cond_stmt;
-  block_stmt_iterator bsi;
+  tree control, control_name, res, t;
+  gimple phi, nphi, cond_stmt, stmt;
+  gimple_stmt_iterator gsi;
 
   split_block_after_labels (loop->header);
   orig_header = single_succ (loop->header);
   hpred = single_succ_edge (loop->header);
 
   cond_stmt = last_stmt (exit->src);
-  cond = COND_EXPR_COND (cond_stmt);
-  control = TREE_OPERAND (cond, 0);
-  gcc_assert (TREE_OPERAND (cond, 1) == nit);
+  control = gimple_cond_lhs (cond_stmt);
+  gcc_assert (gimple_cond_rhs (cond_stmt) == nit);
 
   /* Make sure that we have phi nodes on exit for all loop header phis
      (create_parallel_loop requires that).  */
-  for (phi = phi_nodes (loop->header); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (loop->header); !gsi_end_p (gsi); gsi_next (&gsi))
     {
+      phi = gsi_stmt (gsi);
       res = PHI_RESULT (phi);
       t = make_ssa_name (SSA_NAME_VAR (res), phi);
       SET_PHI_RESULT (phi, t);
@@ -1456,7 +1453,7 @@ transform_to_exit_first_loop (struct loop *loop, htab_t reduction_list, tree nit
 
       if (res == control)
        {
-         TREE_OPERAND (cond, 0) = t;
+         gimple_cond_set_lhs (cond_stmt, t);
          update_stmt (cond_stmt);
          control = t;
        }
@@ -1466,22 +1463,26 @@ transform_to_exit_first_loop (struct loop *loop, htab_t reduction_list, tree nit
   for (n = 0; bbs[n] != exit->src; n++)
     continue;
   nbbs = XNEWVEC (basic_block, n);
-  ok = tree_duplicate_sese_tail (single_succ_edge (loop->header), exit,
-                                bbs + 1, n, nbbs);
+  ok = gimple_duplicate_sese_tail (single_succ_edge (loop->header), exit,
+                                  bbs + 1, n, nbbs);
   gcc_assert (ok);
   free (bbs);
   ex_bb = nbbs[0];
   free (nbbs);
 
   /* Other than reductions, the only gimple reg that should be copied 
-   out of the loop is the control variable.  */
+     out of the loop is the control variable.  */
 
   control_name = NULL_TREE;
-  for (phi = phi_nodes (ex_bb); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (ex_bb); !gsi_end_p (gsi); )
     {
+      phi = gsi_stmt (gsi);
       res = PHI_RESULT (phi);
       if (!is_gimple_reg (res))
-       continue;
+       {
+         gsi_next (&gsi);
+         continue;
+       }
 
       /* Check if it is a part of reduction.  If it is,
          keep the phi at the reduction's keep_res field.  The  
@@ -1498,93 +1499,95 @@ transform_to_exit_first_loop (struct loop *loop, htab_t reduction_list, tree nit
 
          red = reduction_phi (reduction_list, SSA_NAME_DEF_STMT (val));
          if (red)
-           red->keep_res = phi;
+           {
+             red->keep_res = phi;
+             gsi_next (&gsi);
+             continue;
+           }
        }
-      else
-       gcc_assert (control_name == NULL_TREE
-                   && SSA_NAME_VAR (res) == SSA_NAME_VAR (control));
+      gcc_assert (control_name == NULL_TREE
+                 && SSA_NAME_VAR (res) == SSA_NAME_VAR (control));
       control_name = res;
+      remove_phi_node (&gsi, false);
     }
   gcc_assert (control_name != NULL_TREE);
-  phi = SSA_NAME_DEF_STMT (control_name);
-  remove_phi_node (phi, NULL_TREE, false);
 
   /* Initialize the control variable to NIT.  */
-  bsi = bsi_after_labels (ex_bb);
-  nit = force_gimple_operand_bsi (&bsi,
+  gsi = gsi_after_labels (ex_bb);
+  nit = force_gimple_operand_gsi (&gsi,
                                  fold_convert (TREE_TYPE (control_name), nit),
-                                 false, NULL_TREE, false, BSI_SAME_STMT);
-  t = build_gimple_modify_stmt (control_name, nit);
-  bsi_insert_before (&bsi, t, BSI_NEW_STMT);
-  SSA_NAME_DEF_STMT (control_name) = t;
+                                 false, NULL_TREE, false, GSI_SAME_STMT);
+  stmt = gimple_build_assign (control_name, nit);
+  gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
+  SSA_NAME_DEF_STMT (control_name) = stmt;
 }
 
 /* Create the parallel constructs for LOOP as described in gen_parallel_loop.
-   LOOP_FN and DATA are the arguments of OMP_PARALLEL.
+   LOOP_FN and DATA are the arguments of GIMPLE_OMP_PARALLEL.
    NEW_DATA is the variable that should be initialized from the argument
    of LOOP_FN.  N_THREADS is the requested number of threads.  Returns the
-   basic block containing OMP_PARALLEL tree.  */
+   basic block containing GIMPLE_OMP_PARALLEL tree.  */
 
 static basic_block
 create_parallel_loop (struct loop *loop, tree loop_fn, tree data,
                      tree new_data, unsigned n_threads)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   basic_block bb, paral_bb, for_bb, ex_bb;
-  tree t, param, res, for_stmt;
-  tree cvar, cvar_init, initvar, cvar_next, cvar_base, cond, phi, type;
+  tree t, param, res;
+  gimple stmt, for_stmt, phi, cond_stmt;
+  tree cvar, cvar_init, initvar, cvar_next, cvar_base, type;
   edge exit, nexit, guard, end, e;
 
-  /* Prepare the OMP_PARALLEL statement.  */
+  /* Prepare the GIMPLE_OMP_PARALLEL statement.  */
   bb = loop_preheader_edge (loop)->src;
   paral_bb = single_pred (bb);
-  bsi = bsi_last (paral_bb);
+  gsi = gsi_last_bb (paral_bb);
 
   t = build_omp_clause (OMP_CLAUSE_NUM_THREADS);
   OMP_CLAUSE_NUM_THREADS_EXPR (t)
     = build_int_cst (integer_type_node, n_threads);
-  t = build4 (OMP_PARALLEL, void_type_node, NULL_TREE, t, loop_fn, data);
+  stmt = gimple_build_omp_parallel (NULL, t, loop_fn, data);
 
-  bsi_insert_after (&bsi, t, BSI_NEW_STMT);
+  gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
 
   /* Initialize NEW_DATA.  */
   if (data)
     {
-      bsi = bsi_after_labels (bb);
-
-      param = make_ssa_name (DECL_ARGUMENTS (loop_fn), NULL_TREE);
-      t = build_gimple_modify_stmt (param, build_fold_addr_expr (data));
-      bsi_insert_before (&bsi, t, BSI_SAME_STMT);
-      SSA_NAME_DEF_STMT (param) = t;
-
-      t = build_gimple_modify_stmt (new_data,
-                                   fold_convert (TREE_TYPE (new_data),
-                                                 param));
-      bsi_insert_before (&bsi, t, BSI_SAME_STMT);
-      SSA_NAME_DEF_STMT (new_data) = t;
+      gsi = gsi_after_labels (bb);
+
+      param = make_ssa_name (DECL_ARGUMENTS (loop_fn), NULL);
+      stmt = gimple_build_assign (param, build_fold_addr_expr (data));
+      gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+      SSA_NAME_DEF_STMT (param) = stmt;
+
+      stmt = gimple_build_assign (new_data,
+                                 fold_convert (TREE_TYPE (new_data), param));
+      gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+      SSA_NAME_DEF_STMT (new_data) = stmt;
     }
 
-  /* Emit OMP_RETURN for OMP_PARALLEL.  */
+  /* Emit GIMPLE_OMP_RETURN for GIMPLE_OMP_PARALLEL.  */
   bb = split_loop_exit_edge (single_dom_exit (loop));
-  bsi = bsi_last (bb);
-  bsi_insert_after (&bsi, make_node (OMP_RETURN), BSI_NEW_STMT);
+  gsi = gsi_last_bb (bb);
+  gsi_insert_after (&gsi, gimple_build_omp_return (false), GSI_NEW_STMT);
 
-  /* Extract data for OMP_FOR.  */
+  /* Extract data for GIMPLE_OMP_FOR.  */
   gcc_assert (loop->header == single_dom_exit (loop)->src);
-  cond = COND_EXPR_COND (last_stmt (loop->header));
+  cond_stmt = last_stmt (loop->header);
 
-  cvar = TREE_OPERAND (cond, 0);
+  cvar = gimple_cond_lhs (cond_stmt);
   cvar_base = SSA_NAME_VAR (cvar);
   phi = SSA_NAME_DEF_STMT (cvar);
   cvar_init = PHI_ARG_DEF_FROM_EDGE (phi, loop_preheader_edge (loop));
-  initvar = make_ssa_name (cvar_base, NULL_TREE);
+  initvar = make_ssa_name (cvar_base, NULL);
   SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, loop_preheader_edge (loop)),
           initvar);
   cvar_next = PHI_ARG_DEF_FROM_EDGE (phi, loop_latch_edge (loop));
 
-  bsi = bsi_last (loop->latch);
-  gcc_assert (bsi_stmt (bsi) == SSA_NAME_DEF_STMT (cvar_next));
-  bsi_remove (&bsi, true);
+  gsi = gsi_last_bb (loop->latch);
+  gcc_assert (gsi_stmt (gsi) == SSA_NAME_DEF_STMT (cvar_next));
+  gsi_remove (&gsi, true);
 
   /* Prepare cfg.  */
   for_bb = split_edge (loop_preheader_edge (loop));
@@ -1595,56 +1598,48 @@ create_parallel_loop (struct loop *loop, tree loop_fn, tree data,
   guard = make_edge (for_bb, ex_bb, 0);
   single_succ_edge (loop->latch)->flags = 0;
   end = make_edge (loop->latch, ex_bb, EDGE_FALLTHRU);
-  for (phi = phi_nodes (ex_bb); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (ex_bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
+      phi = gsi_stmt (gsi);
       res = PHI_RESULT (phi);
-      gcc_assert (!is_gimple_reg (phi));
-      t = SSA_NAME_DEF_STMT (PHI_ARG_DEF_FROM_EDGE (phi, exit));
-      add_phi_arg (phi, PHI_ARG_DEF_FROM_EDGE (t, loop_preheader_edge (loop)),
+      stmt = SSA_NAME_DEF_STMT (PHI_ARG_DEF_FROM_EDGE (phi, exit));
+      add_phi_arg (phi,
+                  PHI_ARG_DEF_FROM_EDGE (stmt, loop_preheader_edge (loop)),
                   guard);
-      add_phi_arg (phi, PHI_ARG_DEF_FROM_EDGE (t, loop_latch_edge (loop)),
+      add_phi_arg (phi, PHI_ARG_DEF_FROM_EDGE (stmt, loop_latch_edge (loop)),
                   end);
     }
   e = redirect_edge_and_branch (exit, nexit->dest);
   PENDING_STMT (e) = NULL;
 
-  /* Emit OMP_FOR.  */
-  TREE_OPERAND (cond, 0) = cvar_base;
+  /* Emit GIMPLE_OMP_FOR.  */
+  gimple_cond_set_lhs (cond_stmt, cvar_base);
   type = TREE_TYPE (cvar);
   t = build_omp_clause (OMP_CLAUSE_SCHEDULE);
   OMP_CLAUSE_SCHEDULE_KIND (t) = OMP_CLAUSE_SCHEDULE_STATIC;
 
-  for_stmt = make_node (OMP_FOR);
-  TREE_TYPE (for_stmt) = void_type_node;
-  OMP_FOR_CLAUSES (for_stmt) = t;
-  OMP_FOR_INIT (for_stmt) = make_tree_vec (1);
-  TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0)
-    = build_gimple_modify_stmt (initvar, cvar_init);
-  OMP_FOR_COND (for_stmt) = make_tree_vec (1);
-  TREE_VEC_ELT (OMP_FOR_COND (for_stmt), 0) = cond;
-  OMP_FOR_INCR (for_stmt) = make_tree_vec (2);
-  TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), 0)
-    = build_gimple_modify_stmt (cvar_base,
-                               build2 (PLUS_EXPR, type, cvar_base,
-                                       build_int_cst (type, 1)));
-  OMP_FOR_BODY (for_stmt) = NULL_TREE;
-  OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
-
-  bsi = bsi_last (for_bb);
-  bsi_insert_after (&bsi, for_stmt, BSI_NEW_STMT);
+  for_stmt = gimple_build_omp_for (NULL, t, 1, NULL);
+  gimple_omp_for_set_index (for_stmt, 0, initvar);
+  gimple_omp_for_set_initial (for_stmt, 0, cvar_init);
+  gimple_omp_for_set_final (for_stmt, 0, gimple_cond_rhs (cond_stmt));
+  gimple_omp_for_set_cond (for_stmt, 0, gimple_cond_code (cond_stmt));
+  gimple_omp_for_set_incr (for_stmt, 0, build2 (PLUS_EXPR, type,
+                                               cvar_base,
+                                               build_int_cst (type, 1)));
+
+  gsi = gsi_last_bb (for_bb);
+  gsi_insert_after (&gsi, for_stmt, GSI_NEW_STMT);
   SSA_NAME_DEF_STMT (initvar) = for_stmt;
 
-  /* Emit OMP_CONTINUE.  */
-  bsi = bsi_last (loop->latch);
-  t = build2 (OMP_CONTINUE, void_type_node, cvar_next, cvar);
-  bsi_insert_after (&bsi, t, BSI_NEW_STMT);
-  SSA_NAME_DEF_STMT (cvar_next) = t;
+  /* Emit GIMPLE_OMP_CONTINUE.  */
+  gsi = gsi_last_bb (loop->latch);
+  stmt = gimple_build_omp_continue (cvar_next, cvar);
+  gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
+  SSA_NAME_DEF_STMT (cvar_next) = stmt;
 
-  /* Emit OMP_RETURN for OMP_FOR.  */
-  bsi = bsi_last (ex_bb);
-  t = make_node (OMP_RETURN);
-  OMP_RETURN_NOWAIT (t) = 1;
-  bsi_insert_after (&bsi, t, BSI_NEW_STMT);
+  /* Emit GIMPLE_OMP_RETURN for GIMPLE_OMP_FOR.  */
+  gsi = gsi_last_bb (ex_bb);
+  gsi_insert_after (&gsi, gimple_build_omp_return (true), GSI_NEW_STMT);
 
   return paral_bb;
 }
@@ -1660,7 +1655,8 @@ gen_parallel_loop (struct loop *loop, htab_t reduction_list,
   struct loop *nloop;
   loop_iterator li;
   tree many_iterations_cond, type, nit;
-  tree stmts, arg_struct, new_arg_struct;
+  tree arg_struct, new_arg_struct;
+  gimple_seq stmts;
   basic_block parallel_head;
   edge entry, exit;
   struct clsn_data clsn_data;
@@ -1690,14 +1686,14 @@ gen_parallel_loop (struct loop *loop, htab_t reduction_list,
 
      BODY1;
      store all local loop-invariant variables used in body of the loop to DATA.
-     OMP_PARALLEL (OMP_CLAUSE_NUM_THREADS (N_THREADS), LOOPFN, DATA);
+     GIMPLE_OMP_PARALLEL (OMP_CLAUSE_NUM_THREADS (N_THREADS), LOOPFN, DATA);
      load the variables from DATA.
-     OMP_FOR (IV = INIT; COND; IV += STEP) (OMP_CLAUSE_SCHEDULE (static))
+     GIMPLE_OMP_FOR (IV = INIT; COND; IV += STEP) (OMP_CLAUSE_SCHEDULE (static))
      BODY2;
      BODY1;
-     OMP_CONTINUE;
-     OMP_RETURN         -- OMP_FOR
-     OMP_RETURN         -- OMP_PARALLEL
+     GIMPLE_OMP_CONTINUE;
+     GIMPLE_OMP_RETURN         -- GIMPLE_OMP_FOR
+     GIMPLE_OMP_RETURN         -- GIMPLE_OMP_PARALLEL
      goto end;
 
      original:
@@ -1723,7 +1719,7 @@ gen_parallel_loop (struct loop *loop, htab_t reduction_list,
   nit = force_gimple_operand (unshare_expr (niter->niter), &stmts, true,
                              NULL_TREE);
   if (stmts)
-    bsi_insert_on_edge_immediate (loop_preheader_edge (loop), stmts);
+    gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
 
   many_iterations_cond =
     fold_build2 (GE_EXPR, boolean_type_node,
@@ -1735,14 +1731,14 @@ gen_parallel_loop (struct loop *loop, htab_t reduction_list,
   many_iterations_cond
     = force_gimple_operand (many_iterations_cond, &stmts, false, NULL_TREE);
   if (stmts)
-    bsi_insert_on_edge_immediate (loop_preheader_edge (loop), stmts);
+    gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
   if (!is_gimple_condexpr (many_iterations_cond))
     {
       many_iterations_cond
        = force_gimple_operand (many_iterations_cond, &stmts,
                                true, NULL_TREE);
       if (stmts)
-       bsi_insert_on_edge_immediate (loop_preheader_edge (loop), stmts);
+       gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
     }
 
   initialize_original_copy_tables ();
@@ -1803,16 +1799,16 @@ gen_parallel_loop (struct loop *loop, htab_t reduction_list,
 /* Returns true when LOOP contains vector phi nodes.  */
 
 static bool
-loop_has_vector_phi_nodes (struct loop *loop)
+loop_has_vector_phi_nodes (struct loop *loop ATTRIBUTE_UNUSED)
 {
   unsigned i;
   basic_block *bbs = get_loop_body_in_dom_order (loop);
+  gimple_stmt_iterator gsi;
   bool res = true;
-  tree phi;
 
   for (i = 0; i < loop->num_nodes; i++)
-    for (phi = phi_nodes (bbs[i]); phi; phi = PHI_CHAIN (phi))
-      if (TREE_CODE (TREE_TYPE (PHI_RESULT (phi))) == VECTOR_TYPE)
+    for (gsi = gsi_start_phis (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
+      if (TREE_CODE (TREE_TYPE (PHI_RESULT (gsi_stmt (gsi)))) == VECTOR_TYPE)
        goto end;
 
   res = false;
@@ -1841,6 +1837,7 @@ parallelize_loops (void)
 
   reduction_list = htab_create (10, reduction_info_hash,
                                 reduction_info_eq, free);
+  init_stmt_vec_info_vec ();
 
   FOR_EACH_LOOP (li, loop, 0)
     {
@@ -1865,6 +1862,7 @@ parallelize_loops (void)
       verify_loop_closed_ssa ();
     }
 
+  free_stmt_vec_info_vec ();
   htab_delete (reduction_list);
   return changed;
 }
index 8d50aac..1e1463f 100644 (file)
@@ -72,7 +72,9 @@ enum tree_dump_index
 #define TDF_DIAGNOSTIC (1 << 15)       /* A dump to be put in a diagnostic
                                           message.  */
 #define TDF_VERBOSE     (1 << 16)       /* A dump that uses the full tree 
-                                          dumper to print stmts. */
+                                          dumper to print stmts.  */
+#define TDF_RHS_ONLY   (1 << 17)       /* a flag to only print the RHS of
+                                          a gimple stmt.  */
 
 extern char *get_dump_file_name (enum tree_dump_index);
 extern int dump_enabled_p (enum tree_dump_index);
index 9d20b0e..511e84b 100644 (file)
@@ -28,6 +28,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "basic-block.h"
 #include "tree-flow.h"
 #include "toplev.h"
+#include "gimple.h"
 
 /* Rewriting a function into SSA form can create a huge number of PHIs
    many of which may be thrown away shortly after their creation if jumps
@@ -76,11 +77,10 @@ along with GCC; see the file COPYING3.  If not see
    the -2 on all the calculations below.  */
 
 #define NUM_BUCKETS 10
-static GTY ((deletable (""))) tree free_phinodes[NUM_BUCKETS - 2];
+static GTY ((deletable (""))) VEC(gimple,gc) *free_phinodes[NUM_BUCKETS - 2];
 static unsigned long free_phinode_count;
 
 static int ideal_phi_node_len (int);
-static void resize_phi_node (tree *, int);
 
 #ifdef GATHER_STATISTICS
 unsigned int phi_nodes_reused;
@@ -126,13 +126,13 @@ phinodes_print_statistics (void)
    happens to contain a PHI node with LEN arguments or more, return
    that one.  */
 
-static inline tree
-allocate_phi_node (int len)
+static inline gimple
+allocate_phi_node (size_t len)
 {
-  tree phi;
-  int bucket = NUM_BUCKETS - 2;
-  int size = (sizeof (struct tree_phi_node)
-             + (len - 1) * sizeof (struct phi_arg_d));
+  gimple phi;
+  size_t bucket = NUM_BUCKETS - 2;
+  size_t size = sizeof (struct gimple_statement_phi)
+               + (len - 1) * sizeof (struct phi_arg_d);
 
   if (free_phinode_count)
     for (bucket = len - 2; bucket < NUM_BUCKETS - 2; bucket++)
@@ -141,22 +141,27 @@ allocate_phi_node (int len)
 
   /* If our free list has an element, then use it.  */
   if (bucket < NUM_BUCKETS - 2
-      && PHI_ARG_CAPACITY (free_phinodes[bucket]) >= len)
+      && gimple_phi_capacity (VEC_index (gimple, free_phinodes[bucket], 0))
+        >= len)
     {
       free_phinode_count--;
-      phi = free_phinodes[bucket];
-      free_phinodes[bucket] = PHI_CHAIN (free_phinodes[bucket]);
+      phi = VEC_pop (gimple, free_phinodes[bucket]);
+      if (VEC_empty (gimple, free_phinodes[bucket]))
+       VEC_free (gimple, gc, free_phinodes[bucket]);
 #ifdef GATHER_STATISTICS
       phi_nodes_reused++;
 #endif
     }
   else
     {
-      phi = (tree) ggc_alloc (size);
+      phi = (gimple) ggc_alloc (size);
 #ifdef GATHER_STATISTICS
       phi_nodes_created++;
-      tree_node_counts[(int) phi_kind]++;
-      tree_node_sizes[(int) phi_kind] += size;
+       {
+         enum gimple_alloc_kind kind = gimple_alloc_kind (GIMPLE_PHI);
+          gimple_alloc_counts[(int) kind]++;
+          gimple_alloc_sizes[(int) kind] += size;
+       }
 #endif
     }
 
@@ -184,7 +189,8 @@ ideal_phi_node_len (int len)
     len = 2;
 
   /* Compute the number of bytes of the original request.  */
-  size = sizeof (struct tree_phi_node) + (len - 1) * sizeof (struct phi_arg_d);
+  size = sizeof (struct gimple_statement_phi)
+        + (len - 1) * sizeof (struct phi_arg_d);
 
   /* Round it up to the next power of two.  */
   log2 = ceil_log2 (size);
@@ -199,10 +205,10 @@ ideal_phi_node_len (int len)
 
 /* Return a PHI node with LEN argument slots for variable VAR.  */
 
-static tree
+static gimple
 make_phi_node (tree var, int len)
 {
-  tree phi;
+  gimple phi;
   int capacity, i;
 
   capacity = ideal_phi_node_len (len);
@@ -212,24 +218,25 @@ make_phi_node (tree var, int len)
   /* We need to clear the entire PHI node, including the argument
      portion, because we represent a "missing PHI argument" by placing
      NULL_TREE in PHI_ARG_DEF.  */
-  memset (phi, 0, (sizeof (struct tree_phi_node) - sizeof (struct phi_arg_d)
+  memset (phi, 0, (sizeof (struct gimple_statement_phi)
+                  - sizeof (struct phi_arg_d)
                   + sizeof (struct phi_arg_d) * len));
-  TREE_SET_CODE (phi, PHI_NODE);
-  PHI_NUM_ARGS (phi) = len;
-  PHI_ARG_CAPACITY (phi) = capacity;
+  phi->gsbase.code = GIMPLE_PHI;
+  phi->gimple_phi.nargs = len;
+  phi->gimple_phi.capacity = capacity;
   if (TREE_CODE (var) == SSA_NAME)
-    SET_PHI_RESULT (phi, var);
+    gimple_phi_set_result (phi, var);
   else
-    SET_PHI_RESULT (phi, make_ssa_name (var, phi));
+    gimple_phi_set_result (phi, make_ssa_name (var, phi));
 
   for (i = 0; i < capacity; i++)
     {
       use_operand_p  imm;
-      imm = &(PHI_ARG_IMM_USE_NODE (phi, i));
-      imm->use = &(PHI_ARG_DEF_TREE (phi, i));
+      imm = gimple_phi_arg_imm_use_ptr (phi, i);
+      imm->use = gimple_phi_arg_def_ptr (phi, i);
       imm->prev = NULL;
       imm->next = NULL;
-      imm->stmt = phi;
+      imm->loc.stmt = phi;
     }
 
   return phi;
@@ -238,66 +245,66 @@ make_phi_node (tree var, int len)
 /* We no longer need PHI, release it so that it may be reused.  */
 
 void
-release_phi_node (tree phi)
+release_phi_node (gimple phi)
 {
-  int bucket;
-  int len = PHI_ARG_CAPACITY (phi);
-  int x;
+  size_t bucket;
+  size_t len = gimple_phi_capacity (phi);
+  size_t x;
 
-  for (x = 0; x < PHI_NUM_ARGS (phi); x++)
+  for (x = 0; x < gimple_phi_num_args (phi); x++)
     {
       use_operand_p  imm;
-      imm = &(PHI_ARG_IMM_USE_NODE (phi, x));
+      imm = gimple_phi_arg_imm_use_ptr (phi, x);
       delink_imm_use (imm);
     }
 
   bucket = len > NUM_BUCKETS - 1 ? NUM_BUCKETS - 1 : len;
   bucket -= 2;
-  PHI_CHAIN (phi) = free_phinodes[bucket];
-  free_phinodes[bucket] = phi;
+  VEC_safe_push (gimple, gc, free_phinodes[bucket], phi);
   free_phinode_count++;
 }
 
+
 /* Resize an existing PHI node.  The only way is up.  Return the
    possibly relocated phi.  */
 
 static void
-resize_phi_node (tree *phi, int len)
+resize_phi_node (gimple *phi, size_t len)
 {
-  int old_size, i;
-  tree new_phi;
+  size_t old_size, i;
+  gimple new_phi;
 
-  gcc_assert (len > PHI_ARG_CAPACITY (*phi));
+  gcc_assert (len > gimple_phi_capacity (*phi));
 
   /* The garbage collector will not look at the PHI node beyond the
      first PHI_NUM_ARGS elements.  Therefore, all we have to copy is a
      portion of the PHI node currently in use.  */
-  old_size = (sizeof (struct tree_phi_node)
-            + (PHI_NUM_ARGS (*phi) - 1) * sizeof (struct phi_arg_d));
+  old_size = sizeof (struct gimple_statement_phi)
+            + (gimple_phi_num_args (*phi) - 1) * sizeof (struct phi_arg_d);
 
   new_phi = allocate_phi_node (len);
 
   memcpy (new_phi, *phi, old_size);
 
-  for (i = 0; i < PHI_NUM_ARGS (new_phi); i++)
+  for (i = 0; i < gimple_phi_num_args (new_phi); i++)
     {
       use_operand_p imm, old_imm;
-      imm = &(PHI_ARG_IMM_USE_NODE (new_phi, i));
-      old_imm = &(PHI_ARG_IMM_USE_NODE (*phi, i));
-      imm->use = &(PHI_ARG_DEF_TREE (new_phi, i));
+      imm = gimple_phi_arg_imm_use_ptr (new_phi, i);
+      old_imm = gimple_phi_arg_imm_use_ptr (*phi, i);
+      imm->use = gimple_phi_arg_def_ptr (new_phi, i);
       relink_imm_use_stmt (imm, old_imm, new_phi);
     }
 
-  PHI_ARG_CAPACITY (new_phi) = len;
+  new_phi->gimple_phi.capacity = len;
 
-  for (i = PHI_NUM_ARGS (new_phi); i < len; i++)
+  for (i = gimple_phi_num_args (new_phi); i < len; i++)
     {
       use_operand_p imm;
-      imm = &(PHI_ARG_IMM_USE_NODE (new_phi, i));
-      imm->use = &(PHI_ARG_DEF_TREE (new_phi, i));
+      imm = gimple_phi_arg_imm_use_ptr (new_phi, i);
+      imm->use = gimple_phi_arg_def_ptr (new_phi, i);
       imm->prev = NULL;
       imm->next = NULL;
-      imm->stmt = new_phi;
+      imm->loc.stmt = new_phi;
     }
 
   *phi = new_phi;
@@ -308,22 +315,22 @@ resize_phi_node (tree *phi, int len)
 void
 reserve_phi_args_for_new_edge (basic_block bb)
 {
-  tree *loc;
-  int len = EDGE_COUNT (bb->preds);
-  int cap = ideal_phi_node_len (len + 4);
+  size_t len = EDGE_COUNT (bb->preds);
+  size_t cap = ideal_phi_node_len (len + 4);
+  gimple_stmt_iterator gsi;
 
-  for (loc = phi_nodes_ptr (bb);
-       *loc;
-       loc = &PHI_CHAIN (*loc))
+  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      if (len > PHI_ARG_CAPACITY (*loc))
+      gimple *loc = gsi_stmt_ptr (&gsi);
+
+      if (len > gimple_phi_capacity (*loc))
        {
-         tree old_phi = *loc;
+         gimple old_phi = *loc;
 
          resize_phi_node (loc, cap);
 
-         /* The result of the phi is defined by this phi node.  */
-         SSA_NAME_DEF_STMT (PHI_RESULT (*loc)) = *loc;
+         /* The result of the PHI is defined by this PHI node.  */
+         SSA_NAME_DEF_STMT (gimple_phi_result (*loc)) = *loc;
 
          release_phi_node (old_phi);
        }
@@ -337,26 +344,28 @@ reserve_phi_args_for_new_edge (basic_block bb)
         batch.  */
       SET_PHI_ARG_DEF (*loc, len - 1, NULL_TREE);
 
-      PHI_NUM_ARGS (*loc)++;
+      (*loc)->gimple_phi.nargs++;
     }
 }
 
 
 /* Create a new PHI node for variable VAR at basic block BB.  */
 
-tree
+gimple
 create_phi_node (tree var, basic_block bb)
 {
-  tree phi;
-
-  phi = make_phi_node (var, EDGE_COUNT (bb->preds));
+  gimple_stmt_iterator gsi;
+  gimple phi = make_phi_node (var, EDGE_COUNT (bb->preds));
 
   /* Add the new PHI node to the list of PHI nodes for block BB.  */
-  PHI_CHAIN (phi) = phi_nodes (bb);
-  set_phi_nodes (bb, phi);
+  if (phi_nodes (bb) == NULL)
+    set_phi_nodes (bb, gimple_seq_alloc ());
+
+  gsi = gsi_last (phi_nodes (bb));
+  gsi_insert_after (&gsi, phi, GSI_NEW_STMT);
 
   /* Associate BB to the PHI node.  */
-  set_bb_for_stmt (phi, bb);
+  gimple_set_bb (phi, bb);
 
   return phi;
 }
@@ -369,19 +378,19 @@ create_phi_node (tree var, basic_block bb)
    PHI points to the reallocated phi node when we return.  */
 
 void
-add_phi_arg (tree phi, tree def, edge e)
+add_phi_arg (gimple phi, tree def, edge e)
 {
   basic_block bb = e->dest;
 
-  gcc_assert (bb == bb_for_stmt (phi));
+  gcc_assert (bb == gimple_bb (phi));
 
   /* We resize PHI nodes upon edge creation.  We should always have
      enough room at this point.  */
-  gcc_assert (PHI_NUM_ARGS (phi) <= PHI_ARG_CAPACITY (phi));
+  gcc_assert (gimple_phi_num_args (phi) <= gimple_phi_capacity (phi));
 
   /* We resize PHI nodes upon edge creation.  We should always have
      enough room at this point.  */
-  gcc_assert (e->dest_idx < (unsigned int) PHI_NUM_ARGS (phi));
+  gcc_assert (e->dest_idx < gimple_phi_num_args (phi));
 
   /* Copy propagation needs to know what object occur in abnormal
      PHI nodes.  This is a convenient place to record such information.  */
@@ -401,22 +410,22 @@ add_phi_arg (tree phi, tree def, edge e)
    is consistent with how we remove an edge from the edge vector.  */
 
 static void
-remove_phi_arg_num (tree phi, int i)
+remove_phi_arg_num (gimple phi, int i)
 {
-  int num_elem = PHI_NUM_ARGS (phi);
+  int num_elem = gimple_phi_num_args (phi);
 
   gcc_assert (i < num_elem);
 
   /* Delink the item which is being removed.  */
-  delink_imm_use (&(PHI_ARG_IMM_USE_NODE (phi, i)));
+  delink_imm_use (gimple_phi_arg_imm_use_ptr (phi, i));
 
   /* If it is not the last element, move the last element
      to the element we want to delete, resetting all the links. */
   if (i != num_elem - 1)
     {
       use_operand_p old_p, new_p;
-      old_p = &PHI_ARG_IMM_USE_NODE (phi, num_elem - 1);
-      new_p = &PHI_ARG_IMM_USE_NODE (phi, i);
+      old_p = gimple_phi_arg_imm_use_ptr (phi, num_elem - 1);
+      new_p = gimple_phi_arg_imm_use_ptr (phi, i);
       /* Set use on new node, and link into last element's place.  */
       *(new_p->use) = *(old_p->use);
       relink_imm_use (new_p, old_p);
@@ -425,7 +434,7 @@ remove_phi_arg_num (tree phi, int i)
   /* Shrink the vector and return.  Note that we do not have to clear
      PHI_ARG_DEF because the garbage collector will not look at those
      elements beyond the first PHI_NUM_ARGS elements of the array.  */
-  PHI_NUM_ARGS (phi)--;
+  phi->gimple_phi.nargs--;
 }
 
 
@@ -434,60 +443,29 @@ remove_phi_arg_num (tree phi, int i)
 void
 remove_phi_args (edge e)
 {
-  tree phi;
+  gimple_stmt_iterator gsi;
 
-  for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
-    remove_phi_arg_num (phi, e->dest_idx);
+  for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
+    remove_phi_arg_num (gsi_stmt (gsi), e->dest_idx);
 }
 
 
-/* Remove PHI node PHI from basic block BB.  If PREV is non-NULL, it is
-   used as the node immediately before PHI in the linked list.  If
-   RELEASE_LHS_P is true, the LHS of this PHI node is released into
-   the free pool of SSA names.  */
+/* Remove the PHI node pointed-to by iterator GSI from basic block BB.  After
+   removal, iterator GSI is updated to point to the next PHI node in the
+   sequence. If RELEASE_LHS_P is true, the LHS of this PHI node is released
+   into the free pool of SSA names.  */
 
 void
-remove_phi_node (tree phi, tree prev, bool release_lhs_p)
+remove_phi_node (gimple_stmt_iterator *gsi, bool release_lhs_p)
 {
-  tree *loc;
-
-  if (prev)
-    {
-      loc = &PHI_CHAIN (prev);
-    }
-  else
-    {
-      for (loc = phi_nodes_ptr (bb_for_stmt (phi));
-          *loc != phi;
-          loc = &PHI_CHAIN (*loc))
-       ;
-    }
-
-  /* Remove PHI from the chain.  */
-  *loc = PHI_CHAIN (phi);
+  gimple phi = gsi_stmt (*gsi);
+  gsi_remove (gsi, false);
 
   /* If we are deleting the PHI node, then we should release the
      SSA_NAME node so that it can be reused.  */
   release_phi_node (phi);
   if (release_lhs_p)
-    release_ssa_name (PHI_RESULT (phi));
-}
-
-
-/* Reverse the order of PHI nodes in the chain PHI.
-   Return the new head of the chain (old last PHI node).  */
-
-tree
-phi_reverse (tree phi)
-{
-  tree prev = NULL_TREE, next;
-  for (; phi; phi = next)
-    {
-      next = PHI_CHAIN (phi);
-      PHI_CHAIN (phi) = prev;
-      prev = phi;
-    }
-  return prev;
+    release_ssa_name (gimple_phi_result (phi));
 }
 
 #include "gt-tree-phinodes.h"
index 32d7fbe..63911b3 100644 (file)
@@ -207,7 +207,8 @@ along with GCC; see the file COPYING3.  If not see
 
 #define MAX_DISTANCE (target_avail_regs < 16 ? 4 : 8)
    
-/* Data references.  */
+/* Data references (or phi nodes that carry data reference values across
+   loop iterations).  */
 
 typedef struct dref
 {
@@ -215,7 +216,12 @@ typedef struct dref
   struct data_reference *ref;
 
   /* The statement in that the reference appears.  */
-  tree stmt;
+  gimple stmt;
+
+  /* In case that STMT is a phi node, this field is set to the SSA name
+     defined by it in replace_phis_by_defined_names (in order to avoid
+     pointing to phi node that got reallocated in the meantime).  */
+  tree name_defined_by_phi;
 
   /* Distance of the reference from the root of the chain (in number of
      iterations of the loop).  */
@@ -349,12 +355,12 @@ dump_dref (FILE *file, dref ref)
     }
   else
     {
-      if (TREE_CODE (ref->stmt) == PHI_NODE)
+      if (gimple_code (ref->stmt) == GIMPLE_PHI)
        fprintf (file, "    looparound ref\n");
       else
        fprintf (file, "    combination ref\n");
       fprintf (file, "      in statement ");
-      print_generic_expr (file, ref->stmt, TDF_SLIM);
+      print_gimple_stmt (file, ref->stmt, 0, TDF_SLIM);
       fprintf (file, "\n");
       fprintf (file, "      distance %u\n", ref->distance);
     }
@@ -777,7 +783,7 @@ split_data_refs_to_components (struct loop *loop,
 
       dataref->always_accessed
              = dominated_by_p (CDI_DOMINATORS, last_always_executed,
-                               bb_for_stmt (dataref->stmt));
+                               gimple_bb (dataref->stmt));
       dataref->pos = VEC_length (dref, comp->refs);
       VEC_quick_push (dref, comp->refs, dataref);
     }
@@ -813,7 +819,7 @@ suitable_component_p (struct loop *loop, struct component *comp)
 
   for (i = 0; VEC_iterate (dref, comp->refs, i, a); i++)
     {
-      ba = bb_for_stmt (a->stmt);
+      ba = gimple_bb (a->stmt);
 
       if (!just_once_each_iteration_p (loop, ba))
        return false;
@@ -989,12 +995,12 @@ name_for_ref (dref ref)
 {
   tree name;
 
-  if (TREE_CODE (ref->stmt) == GIMPLE_MODIFY_STMT)
+  if (is_gimple_assign (ref->stmt))
     {
       if (!ref->ref || DR_IS_READ (ref->ref))
-       name = GIMPLE_STMT_OPERAND (ref->stmt, 0);
+       name = gimple_assign_lhs (ref->stmt);
       else
-       name = GIMPLE_STMT_OPERAND (ref->stmt, 1);
+       name = gimple_assign_rhs1 (ref->stmt);
     }
   else
     name = PHI_RESULT (ref->stmt);
@@ -1052,44 +1058,49 @@ valid_initializer_p (struct data_reference *ref,
    iteration), returns the phi node.  Otherwise, NULL_TREE is returned.  ROOT
    is the root of the current chain.  */
 
-static tree
+static gimple
 find_looparound_phi (struct loop *loop, dref ref, dref root)
 {
-  tree name, phi, init, init_stmt, init_ref;
+  tree name, init, init_ref;
+  gimple phi = NULL, init_stmt;
   edge latch = loop_latch_edge (loop);
   struct data_reference init_dr;
+  gimple_stmt_iterator psi;
 
-  if (TREE_CODE (ref->stmt) == GIMPLE_MODIFY_STMT)
+  if (is_gimple_assign (ref->stmt))
     {
       if (DR_IS_READ (ref->ref))
-       name = GIMPLE_STMT_OPERAND (ref->stmt, 0);
+       name = gimple_assign_lhs (ref->stmt);
       else
-       name = GIMPLE_STMT_OPERAND (ref->stmt, 1);
+       name = gimple_assign_rhs1 (ref->stmt);
     }
   else
     name = PHI_RESULT (ref->stmt);
   if (!name)
-    return NULL_TREE;
+    return NULL;
 
-  for (phi = phi_nodes (loop->header); phi; phi = PHI_CHAIN (phi))
-    if (PHI_ARG_DEF_FROM_EDGE (phi, latch) == name)
-      break;
+  for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
+    {
+      phi = gsi_stmt (psi);
+      if (PHI_ARG_DEF_FROM_EDGE (phi, latch) == name)
+       break;
+    }
 
-  if (!phi)
-    return NULL_TREE;
+  if (gsi_end_p (psi))
+    return NULL;
 
   init = PHI_ARG_DEF_FROM_EDGE (phi, loop_preheader_edge (loop));
   if (TREE_CODE (init) != SSA_NAME)
-    return NULL_TREE;
+    return NULL;
   init_stmt = SSA_NAME_DEF_STMT (init);
-  if (TREE_CODE (init_stmt) != GIMPLE_MODIFY_STMT)
-    return NULL_TREE;
-  gcc_assert (GIMPLE_STMT_OPERAND (init_stmt, 0) == init);
+  if (gimple_code (init_stmt) != GIMPLE_ASSIGN)
+    return NULL;
+  gcc_assert (gimple_assign_lhs (init_stmt) == init);
 
-  init_ref = GIMPLE_STMT_OPERAND (init_stmt, 1);
+  init_ref = gimple_assign_rhs1 (init_stmt);
   if (!REFERENCE_CLASS_P (init_ref)
       && !DECL_P (init_ref))
-    return NULL_TREE;
+    return NULL;
 
   /* Analyze the behavior of INIT_REF with respect to LOOP (innermost
      loop enclosing PHI).  */
@@ -1099,7 +1110,7 @@ find_looparound_phi (struct loop *loop, dref ref, dref root)
   dr_analyze_innermost (&init_dr);
 
   if (!valid_initializer_p (&init_dr, ref->distance + 1, root->ref))
-    return NULL_TREE;
+    return NULL;
 
   return phi;
 }
@@ -1107,7 +1118,7 @@ find_looparound_phi (struct loop *loop, dref ref, dref root)
 /* Adds a reference for the looparound copy of REF in PHI to CHAIN.  */
 
 static void
-insert_looparound_copy (chain_p chain, dref ref, tree phi)
+insert_looparound_copy (chain_p chain, dref ref, gimple phi)
 {
   dref nw = XCNEW (struct dref), aref;
   unsigned i;
@@ -1138,7 +1149,7 @@ add_looparound_copies (struct loop *loop, chain_p chain)
 {
   unsigned i;
   dref ref, root = get_chain_root (chain);
-  tree phi;
+  gimple phi;
 
   for (i = 0; VEC_iterate (dref, chain->refs, i, ref); i++)
     {
@@ -1218,69 +1229,85 @@ determine_roots (struct loop *loop,
    is in the lhs of STMT, false if it is in rhs.  */
 
 static void
-replace_ref_with (tree stmt, tree new, bool set, bool in_lhs)
+replace_ref_with (gimple stmt, tree new, bool set, bool in_lhs)
 {
-  tree val, new_stmt;
-  block_stmt_iterator bsi;
+  tree val;
+  gimple new_stmt;
+  gimple_stmt_iterator bsi, psi;
 
-  if (TREE_CODE (stmt) == PHI_NODE)
+  if (gimple_code (stmt) == GIMPLE_PHI)
     {
       gcc_assert (!in_lhs && !set);
 
       val = PHI_RESULT (stmt);
-      bsi = bsi_after_labels (bb_for_stmt (stmt));
-      remove_phi_node (stmt, NULL_TREE, false);
+      bsi = gsi_after_labels (gimple_bb (stmt));
+      psi = gsi_for_stmt (stmt);
+      remove_phi_node (&psi, false);
 
-      /* Turn the phi node into GIMPLE_MODIFY_STMT.  */
-      new_stmt = build_gimple_modify_stmt (val, new);
-      SSA_NAME_DEF_STMT (val) = new_stmt;
-      bsi_insert_before (&bsi, new_stmt, BSI_NEW_STMT);
+      /* Turn the phi node into GIMPLE_ASSIGN.  */
+      new_stmt = gimple_build_assign (val, new);
+      gsi_insert_before (&bsi, new_stmt, GSI_NEW_STMT);
       return;
     }
       
   /* Since the reference is of gimple_reg type, it should only
      appear as lhs or rhs of modify statement.  */
-  gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
+  gcc_assert (is_gimple_assign (stmt));
+
+  bsi = gsi_for_stmt (stmt);
 
   /* If we do not need to initialize NEW, just replace the use of OLD.  */
   if (!set)
     {
       gcc_assert (!in_lhs);
-      GIMPLE_STMT_OPERAND (stmt, 1) = new;
+      gimple_assign_set_rhs_from_tree (&bsi, new);
+      stmt = gsi_stmt (bsi);
       update_stmt (stmt);
       return;
     }
 
-  bsi = bsi_for_stmt (stmt);
   if (in_lhs)
     {
-      val = GIMPLE_STMT_OPERAND (stmt, 1);
+      /* We have statement
+        
+        OLD = VAL
 
-      /* OLD = VAL
-
-        is transformed to
+        If OLD is a memory reference, then VAL is gimple_val, and we transform
+        this to
 
         OLD = VAL
         NEW = VAL
 
-        (since the reference is of gimple_reg type, VAL is either gimple
-        invariant or ssa name).  */
+        Otherwise, we are replacing a combination chain, 
+        VAL is the expression that performs the combination, and OLD is an
+        SSA name.  In this case, we transform the assignment to
+
+        OLD = VAL
+        NEW = OLD
+
+        */
+
+      val = gimple_assign_lhs (stmt);
+      if (TREE_CODE (val) != SSA_NAME)
+       {
+         gcc_assert (gimple_assign_copy_p (stmt));
+         val = gimple_assign_rhs1 (stmt);
+       }
     }
   else
     {
-      val = GIMPLE_STMT_OPERAND (stmt, 0);
-
       /* VAL = OLD
 
         is transformed to
 
         VAL = OLD
         NEW = VAL  */
+
+      val = gimple_assign_lhs (stmt);
     }
 
-  new_stmt = build_gimple_modify_stmt (new, unshare_expr (val));
-  bsi_insert_after (&bsi, new_stmt, BSI_NEW_STMT);
-  SSA_NAME_DEF_STMT (new) = new_stmt;
+  new_stmt = gimple_build_assign (new, unshare_expr (val));
+  gsi_insert_after (&bsi, new_stmt, GSI_NEW_STMT);
 }
 
 /* Returns the reference to the address of REF in the ITER-th iteration of
@@ -1388,12 +1415,12 @@ get_init_expr (chain_p chain, unsigned index)
 /* Marks all virtual operands of statement STMT for renaming.  */
 
 void
-mark_virtual_ops_for_renaming (tree stmt)
+mark_virtual_ops_for_renaming (gimple stmt)
 {
   ssa_op_iter iter;
   tree var;
 
-  if (TREE_CODE (stmt) == PHI_NODE)
+  if (gimple_code (stmt) == GIMPLE_PHI)
     {
       var = PHI_RESULT (stmt);
       if (is_gimple_reg (var))
@@ -1418,12 +1445,12 @@ mark_virtual_ops_for_renaming (tree stmt)
 /* Calls mark_virtual_ops_for_renaming for all members of LIST.  */
 
 static void
-mark_virtual_ops_for_renaming_list (tree list)
+mark_virtual_ops_for_renaming_list (gimple_seq list)
 {
-  tree_stmt_iterator tsi;
+  gimple_stmt_iterator gsi;
 
-  for (tsi = tsi_start (list); !tsi_end_p (tsi); tsi_next (&tsi))
-    mark_virtual_ops_for_renaming (tsi_stmt (tsi));
+  for (gsi = gsi_start (list); !gsi_end_p (gsi); gsi_next (&gsi))
+    mark_virtual_ops_for_renaming (gsi_stmt (gsi));
 }
 
 /* Returns a new temporary variable used for the I-th variable carrying
@@ -1457,8 +1484,9 @@ initialize_root_vars (struct loop *loop, chain_p chain, bitmap tmp_vars)
   unsigned n = chain->length;
   dref root = get_chain_root (chain);
   bool reuse_first = !chain->has_max_use_after;
-  tree ref, init, var, next, stmts;
-  tree phi;
+  tree ref, init, var, next;
+  gimple phi;
+  gimple_seq stmts;
   edge entry = loop_preheader_edge (loop), latch = loop_latch_edge (loop);
 
   /* If N == 0, then all the references are within the single iteration.  And
@@ -1468,7 +1496,7 @@ initialize_root_vars (struct loop *loop, chain_p chain, bitmap tmp_vars)
   chain->vars = VEC_alloc (tree, heap, n + 1);
 
   if (chain->type == CT_COMBINATION)
-    ref = GIMPLE_STMT_OPERAND (root->stmt, 0);
+    ref = gimple_assign_lhs (root->stmt);
   else
     ref = DR_REF (root->ref);
 
@@ -1481,7 +1509,7 @@ initialize_root_vars (struct loop *loop, chain_p chain, bitmap tmp_vars)
     VEC_quick_push (tree, chain->vars, VEC_index (tree, chain->vars, 0));
   
   for (i = 0; VEC_iterate (tree, chain->vars, i, var); i++)
-    VEC_replace (tree, chain->vars, i, make_ssa_name (var, NULL_TREE));
+    VEC_replace (tree, chain->vars, i, make_ssa_name (var, NULL));
 
   for (i = 0; i < n; i++)
     {
@@ -1493,7 +1521,7 @@ initialize_root_vars (struct loop *loop, chain_p chain, bitmap tmp_vars)
       if (stmts)
        {
          mark_virtual_ops_for_renaming_list (stmts);
-         bsi_insert_on_edge_immediate (entry, stmts);
+         gsi_insert_seq_on_edge_immediate (entry, stmts);
        }
 
       phi = create_phi_node (var, loop->header);
@@ -1533,8 +1561,9 @@ initialize_root_vars_lm (struct loop *loop, dref root, bool written,
                         bitmap tmp_vars)
 {
   unsigned i;
-  tree ref = DR_REF (root->ref), init, var, next, stmts;
-  tree phi;
+  tree ref = DR_REF (root->ref), init, var, next;
+  gimple_seq stmts;
+  gimple phi;
   edge entry = loop_preheader_edge (loop), latch = loop_latch_edge (loop);
 
   /* Find the initializer for the variable, and check that it cannot
@@ -1548,7 +1577,7 @@ initialize_root_vars_lm (struct loop *loop, dref root, bool written,
     VEC_quick_push (tree, *vars, VEC_index (tree, *vars, 0));
   
   for (i = 0; VEC_iterate (tree, *vars, i, var); i++)
-    VEC_replace (tree, *vars, i, make_ssa_name (var, NULL_TREE));
+    VEC_replace (tree, *vars, i, make_ssa_name (var, NULL));
 
   var = VEC_index (tree, *vars, 0);
       
@@ -1556,7 +1585,7 @@ initialize_root_vars_lm (struct loop *loop, dref root, bool written,
   if (stmts)
     {
       mark_virtual_ops_for_renaming_list (stmts);
-      bsi_insert_on_edge_immediate (entry, stmts);
+      gsi_insert_seq_on_edge_immediate (entry, stmts);
     }
 
   if (written)
@@ -1569,10 +1598,9 @@ initialize_root_vars_lm (struct loop *loop, dref root, bool written,
     }
   else
     {
-      init = build_gimple_modify_stmt (var, init);
-      SSA_NAME_DEF_STMT (var) = init;
-      mark_virtual_ops_for_renaming (init);
-      bsi_insert_on_edge_immediate (entry, init);
+      gimple init_stmt = gimple_build_assign (var, init);
+      mark_virtual_ops_for_renaming (init_stmt);
+      gsi_insert_on_edge_immediate (entry, init_stmt);
     }
 }
 
@@ -1613,7 +1641,7 @@ execute_load_motion (struct loop *loop, chain_p chain, bitmap tmp_vars)
          if (n_writes)
            {
              var = VEC_index (tree, vars, 0);
-             var = make_ssa_name (SSA_NAME_VAR (var), NULL_TREE);
+             var = make_ssa_name (SSA_NAME_VAR (var), NULL);
              VEC_replace (tree, vars, 0, var);
            }
          else
@@ -1629,20 +1657,20 @@ execute_load_motion (struct loop *loop, chain_p chain, bitmap tmp_vars)
 
 /* Returns the single statement in that NAME is used, excepting
    the looparound phi nodes contained in one of the chains.  If there is no
-   such statement, or more statements, NULL_TREE is returned.  */
+   such statement, or more statements, NULL is returned.  */
 
-static tree
+static gimple
 single_nonlooparound_use (tree name)
 {
   use_operand_p use;
   imm_use_iterator it;
-  tree stmt, ret = NULL_TREE;
+  gimple stmt, ret = NULL;
 
   FOR_EACH_IMM_USE_FAST (use, it, name)
     {
       stmt = USE_STMT (use);
 
-      if (TREE_CODE (stmt) == PHI_NODE)
+      if (gimple_code (stmt) == GIMPLE_PHI)
        {
          /* Ignore uses in looparound phi nodes.  Uses in other phi nodes
             could not be processed anyway, so just fail for them.  */
@@ -1650,10 +1678,10 @@ single_nonlooparound_use (tree name)
                            SSA_NAME_VERSION (PHI_RESULT (stmt))))
            continue;
 
-         return NULL_TREE;
+         return NULL;
        }
-      else if (ret != NULL_TREE)
-       return NULL_TREE;
+      else if (ret != NULL)
+       return NULL;
       else
        ret = stmt;
     }
@@ -1665,19 +1693,22 @@ single_nonlooparound_use (tree name)
    used.  */
 
 static void
-remove_stmt (tree stmt)
+remove_stmt (gimple stmt)
 {
-  tree next, name;
+  tree name;
+  gimple next;
+  gimple_stmt_iterator psi;
 
-  if (TREE_CODE (stmt) == PHI_NODE)
+  if (gimple_code (stmt) == GIMPLE_PHI)
     {
       name = PHI_RESULT (stmt);
       next = single_nonlooparound_use (name);
-      remove_phi_node (stmt, NULL_TREE, true);
+      psi = gsi_for_stmt (stmt);
+      remove_phi_node (&psi, true);
 
       if (!next
-         || TREE_CODE (next) != GIMPLE_MODIFY_STMT
-         || GIMPLE_STMT_OPERAND (next, 1) != name)
+         || !gimple_assign_copy_p (next)
+         || gimple_assign_rhs1 (next) != name)
        return;
 
       stmt = next;
@@ -1685,21 +1716,21 @@ remove_stmt (tree stmt)
 
   while (1)
     {
-      block_stmt_iterator bsi;
+      gimple_stmt_iterator bsi;
     
-      bsi = bsi_for_stmt (stmt);
+      bsi = gsi_for_stmt (stmt);
 
-      name = GIMPLE_STMT_OPERAND (stmt, 0);
+      name = gimple_assign_lhs (stmt);
       gcc_assert (TREE_CODE (name) == SSA_NAME);
 
       next = single_nonlooparound_use (name);
 
       mark_virtual_ops_for_renaming (stmt);
-      bsi_remove (&bsi, true);
+      gsi_remove (&bsi, true);
 
       if (!next
-         || TREE_CODE (next) != GIMPLE_MODIFY_STMT
-         || GIMPLE_STMT_OPERAND (next, 1) != name)
+         || !gimple_assign_copy_p (next)
+         || gimple_assign_rhs1 (next) != name)
        return;
 
       stmt = next;
@@ -1794,7 +1825,7 @@ execute_pred_commoning (struct loop *loop, VEC (chain_p, heap) *chains,
 }
 
 /* For each reference in CHAINS, if its defining statement is
-   ssa name, set it to phi node that defines it.  */
+   phi node, record the ssa name that is defined by it.  */
 
 static void
 replace_phis_by_defined_names (VEC (chain_p, heap) *chains)
@@ -1806,14 +1837,16 @@ replace_phis_by_defined_names (VEC (chain_p, heap) *chains)
   for (i = 0; VEC_iterate (chain_p, chains, i, chain); i++)
     for (j = 0; VEC_iterate (dref, chain->refs, j, a); j++)
       {
-       gcc_assert (TREE_CODE (a->stmt) != SSA_NAME);
-       if (TREE_CODE (a->stmt) == PHI_NODE)
-         a->stmt = PHI_RESULT (a->stmt);
+       if (gimple_code (a->stmt) == GIMPLE_PHI)
+         {
+           a->name_defined_by_phi = PHI_RESULT (a->stmt);
+           a->stmt = NULL;
+         }
       }
 }
 
-/* For each reference in CHAINS, if its defining statement is
-   phi node, set it to the ssa name that is defined by it.  */
+/* For each reference in CHAINS, if name_defined_by_phi is not
+   NULL, use it to set the stmt field.  */
 
 static void
 replace_names_by_phis (VEC (chain_p, heap) *chains)
@@ -1824,10 +1857,11 @@ replace_names_by_phis (VEC (chain_p, heap) *chains)
 
   for (i = 0; VEC_iterate (chain_p, chains, i, chain); i++)
     for (j = 0; VEC_iterate (dref, chain->refs, j, a); j++)
-      if (TREE_CODE (a->stmt) == SSA_NAME)
+      if (a->stmt == NULL)
        {
-         a->stmt = SSA_NAME_DEF_STMT (a->stmt);
-         gcc_assert (TREE_CODE (a->stmt) == PHI_NODE);
+         a->stmt = SSA_NAME_DEF_STMT (a->name_defined_by_phi);
+         gcc_assert (gimple_code (a->stmt) == GIMPLE_PHI);
+         a->name_defined_by_phi = NULL_TREE;
        }
 }
 
@@ -1896,7 +1930,7 @@ should_unroll_loop_p (struct loop *loop, unsigned factor,
 static void
 base_names_in_chain_on (struct loop *loop, tree name, tree var)
 {
-  tree stmt, phi;
+  gimple stmt, phi;
   imm_use_iterator iter;
   edge e;
 
@@ -1907,8 +1941,8 @@ base_names_in_chain_on (struct loop *loop, tree name, tree var)
       phi = NULL;
       FOR_EACH_IMM_USE_STMT (stmt, iter, name)
        {
-         if (TREE_CODE (stmt) == PHI_NODE
-             && flow_bb_inside_loop_p (loop, bb_for_stmt (stmt)))
+         if (gimple_code (stmt) == GIMPLE_PHI
+             && flow_bb_inside_loop_p (loop, gimple_bb (stmt)))
            {
              phi = stmt;
              BREAK_FROM_IMM_USE_STMT (iter);
@@ -1917,10 +1951,10 @@ base_names_in_chain_on (struct loop *loop, tree name, tree var)
       if (!phi)
        return;
 
-      if (bb_for_stmt (phi) == loop->header)
+      if (gimple_bb (phi) == loop->header)
        e = loop_latch_edge (loop);
       else
-       e = single_pred_edge (bb_for_stmt (stmt));
+       e = single_pred_edge (gimple_bb (stmt));
 
       name = PHI_RESULT (phi);
       SSA_NAME_VAR (name) = var;
@@ -1936,11 +1970,14 @@ static void
 eliminate_temp_copies (struct loop *loop, bitmap tmp_vars)
 {
   edge e;
-  tree phi, name, use, var, stmt;
+  gimple phi, stmt;
+  tree name, use, var;
+  gimple_stmt_iterator psi;
 
   e = loop_latch_edge (loop);
-  for (phi = phi_nodes (loop->header); phi; phi = PHI_CHAIN (phi))
+  for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
     {
+      phi = gsi_stmt (psi);
       name = PHI_RESULT (phi);
       var = SSA_NAME_VAR (name);
       if (!bitmap_bit_p (tmp_vars, DECL_UID (var)))
@@ -1950,15 +1987,15 @@ eliminate_temp_copies (struct loop *loop, bitmap tmp_vars)
 
       /* Base all the ssa names in the ud and du chain of NAME on VAR.  */
       stmt = SSA_NAME_DEF_STMT (use);
-      while (TREE_CODE (stmt) == PHI_NODE
+      while (gimple_code (stmt) == GIMPLE_PHI
             /* In case we could not unroll the loop enough to eliminate
                all copies, we may reach the loop header before the defining
                statement (in that case, some register copies will be present
                in loop latch in the final code, corresponding to the newly
                created looparound phi nodes).  */
-            && bb_for_stmt (stmt) != loop->header)
+            && gimple_bb (stmt) != loop->header)
        {
-         gcc_assert (single_pred_p (bb_for_stmt (stmt)));
+         gcc_assert (single_pred_p (gimple_bb (stmt)));
          use = PHI_ARG_DEF (stmt, 0);
          stmt = SSA_NAME_DEF_STMT (use);
        }
@@ -1980,38 +2017,40 @@ chain_can_be_combined_p (chain_p chain)
    statements, NAME is replaced with the actual name used in the returned
    statement.  */
 
-static tree
+static gimple
 find_use_stmt (tree *name)
 {
-  tree stmt, rhs, lhs;
+  gimple stmt;
+  tree rhs, lhs;
 
   /* Skip over assignments.  */
   while (1)
     {
       stmt = single_nonlooparound_use (*name);
       if (!stmt)
-       return NULL_TREE;
+       return NULL;
 
-      if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
-       return NULL_TREE;
+      if (gimple_code (stmt) != GIMPLE_ASSIGN)
+       return NULL;
 
-      lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+      lhs = gimple_assign_lhs (stmt);
       if (TREE_CODE (lhs) != SSA_NAME)
-       return NULL_TREE;
+       return NULL;
 
-      rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-      if (rhs != *name)
-       break;
+      if (gimple_assign_copy_p (stmt))
+       {
+         rhs = gimple_assign_rhs1 (stmt);
+         if (rhs != *name)
+           return NULL;
 
-      *name = lhs;
+         *name = lhs;
+       }
+      else if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
+              == GIMPLE_BINARY_RHS)
+       return stmt;
+      else
+       return NULL;
     }
-
-  if (!EXPR_P (rhs)
-      || REFERENCE_CLASS_P (rhs)
-      || TREE_CODE_LENGTH (TREE_CODE (rhs)) != 2)
-    return NULL_TREE;
-
-  return stmt;
 }
 
 /* Returns true if we may perform reassociation for operation CODE in TYPE.  */
@@ -2031,27 +2070,26 @@ may_reassociate_p (tree type, enum tree_code code)
    tree of the same operations and returns its root.  Distance to the root
    is stored in DISTANCE.  */
 
-static tree
-find_associative_operation_root (tree stmt, unsigned *distance)
+static gimple
+find_associative_operation_root (gimple stmt, unsigned *distance)
 {
-  tree rhs = GIMPLE_STMT_OPERAND (stmt, 1), lhs, next;
-  enum tree_code code = TREE_CODE (rhs);
+  tree lhs;
+  gimple next;
+  enum tree_code code = gimple_assign_rhs_code (stmt);
+  tree type = TREE_TYPE (gimple_assign_lhs (stmt));
   unsigned dist = 0;
 
-  if (!may_reassociate_p (TREE_TYPE (rhs), code))
-    return NULL_TREE;
+  if (!may_reassociate_p (type, code))
+    return NULL;
 
   while (1)
     {
-      lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+      lhs = gimple_assign_lhs (stmt);
       gcc_assert (TREE_CODE (lhs) == SSA_NAME);
 
       next = find_use_stmt (&lhs);
-      if (!next)
-       break;
-
-      rhs = GIMPLE_STMT_OPERAND (next, 1);
-      if (TREE_CODE (rhs) != code)
+      if (!next
+         || gimple_assign_rhs_code (next) != code)
        break;
 
       stmt = next;
@@ -2069,30 +2107,30 @@ find_associative_operation_root (tree stmt, unsigned *distance)
    tree formed by this operation instead of the statement that uses NAME1 or
    NAME2.  */
 
-static tree
+static gimple
 find_common_use_stmt (tree *name1, tree *name2)
 {
-  tree stmt1, stmt2;
+  gimple stmt1, stmt2;
 
   stmt1 = find_use_stmt (name1);
   if (!stmt1)
-    return NULL_TREE;
+    return NULL;
 
   stmt2 = find_use_stmt (name2);
   if (!stmt2)
-    return NULL_TREE;
+    return NULL;
 
   if (stmt1 == stmt2)
     return stmt1;
 
   stmt1 = find_associative_operation_root (stmt1, NULL);
   if (!stmt1)
-    return NULL_TREE;
+    return NULL;
   stmt2 = find_associative_operation_root (stmt2, NULL);
   if (!stmt2)
-    return NULL_TREE;
+    return NULL;
 
-  return (stmt1 == stmt2 ? stmt1 : NULL_TREE);
+  return (stmt1 == stmt2 ? stmt1 : NULL);
 }
 
 /* Checks whether R1 and R2 are combined together using CODE, with the result
@@ -2106,7 +2144,8 @@ combinable_refs_p (dref r1, dref r2,
   enum tree_code acode;
   bool aswap;
   tree atype;
-  tree name1, name2, stmt, rhs;
+  tree name1, name2;
+  gimple stmt;
 
   name1 = name_for_ref (r1);
   name2 = name_for_ref (r2);
@@ -2117,11 +2156,10 @@ combinable_refs_p (dref r1, dref r2,
   if (!stmt)
     return false;
 
-  rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-  acode = TREE_CODE (rhs);
+  acode = gimple_assign_rhs_code (stmt);
   aswap = (!commutative_tree_code (acode)
-          && TREE_OPERAND (rhs, 0) != name1);
-  atype = TREE_TYPE (rhs);
+          && gimple_assign_rhs1 (stmt) != name1);
+  atype = TREE_TYPE (gimple_assign_lhs (stmt));
 
   if (*code == ERROR_MARK)
     {
@@ -2140,43 +2178,49 @@ combinable_refs_p (dref r1, dref r2,
    an assignment of the remaining operand.  */
 
 static void
-remove_name_from_operation (tree stmt, tree op)
+remove_name_from_operation (gimple stmt, tree op)
 {
-  tree *rhs;
+  tree other_op;
+  gimple_stmt_iterator si;
 
-  gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
+  gcc_assert (is_gimple_assign (stmt));
 
-  rhs = &GIMPLE_STMT_OPERAND (stmt, 1);
-  if (TREE_OPERAND (*rhs, 0) == op)
-    *rhs = TREE_OPERAND (*rhs, 1);
-  else if (TREE_OPERAND (*rhs, 1) == op)
-    *rhs = TREE_OPERAND (*rhs, 0);
+  if (gimple_assign_rhs1 (stmt) == op)
+    other_op = gimple_assign_rhs2 (stmt);
   else
-    gcc_unreachable ();
+    other_op = gimple_assign_rhs1 (stmt);
+
+  si = gsi_for_stmt (stmt);
+  gimple_assign_set_rhs_from_tree (&si, other_op);
+
+  /* We should not have reallocated STMT.  */
+  gcc_assert (gsi_stmt (si) == stmt);
+
   update_stmt (stmt);
 }
 
 /* Reassociates the expression in that NAME1 and NAME2 are used so that they
    are combined in a single statement, and returns this statement.  */
 
-static tree
+static gimple
 reassociate_to_the_same_stmt (tree name1, tree name2)
 {
-  tree stmt1, stmt2, root1, root2, r1, r2, s1, s2;
-  tree new_stmt, tmp_stmt, new_name, tmp_name, var;
+  gimple stmt1, stmt2, root1, root2, s1, s2;
+  gimple new_stmt, tmp_stmt;
+  tree new_name, tmp_name, var, r1, r2;
   unsigned dist1, dist2;
   enum tree_code code;
   tree type = TREE_TYPE (name1);
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
 
   stmt1 = find_use_stmt (&name1);
   stmt2 = find_use_stmt (&name2);
   root1 = find_associative_operation_root (stmt1, &dist1);
   root2 = find_associative_operation_root (stmt2, &dist2);
-  code = TREE_CODE (GIMPLE_STMT_OPERAND (stmt1, 1));
+  code = gimple_assign_rhs_code (stmt1);
 
   gcc_assert (root1 && root2 && root1 == root2
-             && code == TREE_CODE (GIMPLE_STMT_OPERAND (stmt2, 1)));
+             && code == gimple_assign_rhs_code (stmt2));
 
   /* Find the root of the nearest expression in that both NAME1 and NAME2
      are used.  */
@@ -2188,22 +2232,22 @@ reassociate_to_the_same_stmt (tree name1, tree name2)
   while (dist1 > dist2)
     {
       s1 = find_use_stmt (&r1);
-      r1 = GIMPLE_STMT_OPERAND (s1, 0);
+      r1 = gimple_assign_lhs (s1);
       dist1--;
     }
   while (dist2 > dist1)
     {
       s2 = find_use_stmt (&r2);
-      r2 = GIMPLE_STMT_OPERAND (s2, 0);
+      r2 = gimple_assign_lhs (s2);
       dist2--;
     }
 
   while (s1 != s2)
     {
       s1 = find_use_stmt (&r1);
-      r1 = GIMPLE_STMT_OPERAND (s1, 0);
+      r1 = gimple_assign_lhs (s1);
       s2 = find_use_stmt (&r2);
-      r2 = GIMPLE_STMT_OPERAND (s2, 0);
+      r2 = gimple_assign_lhs (s2);
     }
 
   /* Remove NAME1 and NAME2 from the statements in that they are used
@@ -2215,24 +2259,28 @@ reassociate_to_the_same_stmt (tree name1, tree name2)
      combine it with the rhs of S1.  */
   var = create_tmp_var (type, "predreastmp");
   add_referenced_var (var);
-  new_name = make_ssa_name (var, NULL_TREE);
-  new_stmt = build_gimple_modify_stmt (new_name,
-                           fold_build2 (code, type, name1, name2));
-  SSA_NAME_DEF_STMT (new_name) = new_stmt;
+  new_name = make_ssa_name (var, NULL);
+  new_stmt = gimple_build_assign_with_ops (code, new_name, name1, name2);
 
   var = create_tmp_var (type, "predreastmp");
   add_referenced_var (var);
-  tmp_name = make_ssa_name (var, NULL_TREE);
-  tmp_stmt = build_gimple_modify_stmt (tmp_name,
-                                           GIMPLE_STMT_OPERAND (s1, 1));
-  SSA_NAME_DEF_STMT (tmp_name) = tmp_stmt;
-
-  GIMPLE_STMT_OPERAND (s1, 1) = fold_build2 (code, type, new_name, tmp_name);
+  tmp_name = make_ssa_name (var, NULL);
+
+  /* Rhs of S1 may now be either a binary expression with operation
+     CODE, or gimple_val (in case that stmt1 == s1 or stmt2 == s1,
+     so that name1 or name2 was removed from it).  */
+  tmp_stmt = gimple_build_assign_with_ops (gimple_assign_rhs_code (s1),
+                                          tmp_name,
+                                          gimple_assign_rhs1 (s1),
+                                          gimple_assign_rhs2 (s1));
+
+  bsi = gsi_for_stmt (s1);
+  gimple_assign_set_rhs_with_ops (&bsi, code, new_name, tmp_name);
+  s1 = gsi_stmt (bsi);
   update_stmt (s1);
 
-  bsi = bsi_for_stmt (s1);
-  bsi_insert_before (&bsi, new_stmt, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, tmp_stmt, BSI_SAME_STMT);
+  gsi_insert_before (&bsi, new_stmt, GSI_SAME_STMT);
+  gsi_insert_before (&bsi, tmp_stmt, GSI_SAME_STMT);
 
   return new_stmt;
 }
@@ -2242,10 +2290,10 @@ reassociate_to_the_same_stmt (tree name1, tree name2)
    associative and commutative operation in the same expression, reassociate
    the expression so that they are used in the same statement.  */
 
-static tree
+static gimple
 stmt_combining_refs (dref r1, dref r2)
 {
-  tree stmt1, stmt2;
+  gimple stmt1, stmt2;
   tree name1 = name_for_ref (r1);
   tree name2 = name_for_ref (r2);
 
@@ -2268,7 +2316,7 @@ combine_chains (chain_p ch1, chain_p ch2)
   bool swap = false;
   chain_p new_chain;
   unsigned i;
-  tree root_stmt;
+  gimple root_stmt;
   tree rslt_type = NULL_TREE;
 
   if (ch1 == ch2)
@@ -2399,7 +2447,8 @@ prepare_initializers_chain (struct loop *loop, chain_p chain)
 {
   unsigned i, n = (chain->type == CT_INVARIANT) ? 1 : chain->length;
   struct data_reference *dr = get_chain_root (chain)->ref;
-  tree init, stmts;
+  tree init;
+  gimple_seq stmts;
   dref laref;
   edge entry = loop_preheader_edge (loop);
 
@@ -2413,7 +2462,7 @@ prepare_initializers_chain (struct loop *loop, chain_p chain)
      instead of creating our own.  */
   for (i = 0; VEC_iterate (dref, chain->refs, i, laref); i++)
     {
-      if (TREE_CODE (laref->stmt) != PHI_NODE)
+      if (gimple_code (laref->stmt) != GIMPLE_PHI)
        continue;
 
       gcc_assert (laref->distance > 0);
@@ -2437,7 +2486,7 @@ prepare_initializers_chain (struct loop *loop, chain_p chain)
       if (stmts)
        {
          mark_virtual_ops_for_renaming_list (stmts);
-         bsi_insert_on_edge_immediate (entry, stmts);
+         gsi_insert_seq_on_edge_immediate (entry, stmts);
        }
       set_alias_info (init, dr);
 
index fce766c..df055f8 100644 (file)
@@ -44,11 +44,8 @@ static void pretty_print_string (pretty_printer *, const char*);
 static void print_call_name (pretty_printer *, const_tree);
 static void newline_and_indent (pretty_printer *, int);
 static void maybe_init_pretty_print (FILE *);
-static void print_declaration (pretty_printer *, tree, int, int);
 static void print_struct_decl (pretty_printer *, const_tree, int, int);
 static void do_niy (pretty_printer *, const_tree);
-static void dump_vops (pretty_printer *, tree, int, int);
-static void dump_generic_bb_buff (pretty_printer *, basic_block, int, int);
 
 #define INDENT(SPACE) do { \
   int i; for (i = 0; i<SPACE; i++) pp_space (buffer); } while (0)
@@ -409,7 +406,7 @@ dump_omp_clause (pretty_printer *buffer, tree clause, int spc, int flags)
 /* Dump the list of OpenMP clauses.  BUFFER, SPC and FLAGS are as in
    dump_generic_node.  */
 
-static void
+void
 dump_omp_clauses (pretty_printer *buffer, tree clause, int spc, int flags)
 {
   if (clause == NULL)
@@ -427,33 +424,6 @@ dump_omp_clauses (pretty_printer *buffer, tree clause, int spc, int flags)
 }
 
 
-/* Dump the set of decls SYMS.  BUFFER, SPC and FLAGS are as in
-   dump_generic_node.  */
-
-static void
-dump_symbols (pretty_printer *buffer, bitmap syms, int flags)
-{
-  unsigned i;
-  bitmap_iterator bi;
-
-  if (syms == NULL)
-    pp_string (buffer, "NIL");
-  else
-    {
-      pp_string (buffer, " { ");
-
-      EXECUTE_IF_SET_IN_BITMAP (syms, 0, i, bi)
-       {
-         tree sym = referenced_var_lookup (i);
-         dump_generic_node (buffer, sym, 0, flags, false);
-         pp_string (buffer, " ");
-       }
-
-      pp_string (buffer, "}");
-    }
-}
-
-
 /* Dump the node NODE on the pretty_printer BUFFER, SPC spaces of
    indent.  FLAGS specifies details to show in the dump (see TDF_* in
    tree-pass.h).  If IS_STMT is true, the object printed is considered
@@ -471,18 +441,7 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
   if (node == NULL_TREE)
     return spc;
 
-  is_expr = EXPR_P (node) || GIMPLE_STMT_P (node);
-
-  /* We use has_stmt_ann because CALL_EXPR can be both an expression
-     and a statement, and we have no guarantee that it will have a
-     stmt_ann when it is used as an RHS expression.  stmt_ann will assert
-     if you call it on something with a non-stmt annotation attached.  */
-  if (TREE_CODE (node) != ERROR_MARK
-      && is_gimple_stmt (node)
-      && (flags & (TDF_VOPS|TDF_MEMSYMS))
-      && has_stmt_ann (node)
-      && TREE_CODE (node) != PHI_NODE)
-    dump_vops (buffer, node, spc, flags);
+  is_expr = EXPR_P (node);
 
   if (is_stmt && (flags & TDF_STMTADDR))
     pp_printf (buffer, "<&%p> ", (void *)node);
@@ -1095,24 +1054,16 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
       break;
 
     case MODIFY_EXPR:
-    case GIMPLE_MODIFY_STMT:
     case INIT_EXPR:
-      dump_generic_node (buffer, GENERIC_TREE_OPERAND (node, 0), spc, flags,
+      dump_generic_node (buffer, TREE_OPERAND (node, 0), spc, flags,
                         false);
       pp_space (buffer);
       pp_character (buffer, '=');
-      if (TREE_CODE (node) == GIMPLE_MODIFY_STMT
+      if (TREE_CODE (node) == MODIFY_EXPR
          && MOVE_NONTEMPORAL (node))
        pp_string (buffer, "{nt}");
-      if (TREE_CODE (node) == GIMPLE_MODIFY_STMT)
-       {
-       stmt_ann_t ann;
-        if ((ann = stmt_ann (node))
-           && ann->has_volatile_ops)
-         pp_string (buffer, "{v}");
-        }
       pp_space (buffer);
-      dump_generic_node (buffer, GENERIC_TREE_OPERAND (node, 1), spc, flags,
+      dump_generic_node (buffer, TREE_OPERAND (node, 1), spc, flags,
                         false);
       break;
 
@@ -1622,9 +1573,8 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
       if (op0)
        {
          pp_space (buffer);
-         if (TREE_CODE (op0) == MODIFY_EXPR
-             || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
-           dump_generic_node (buffer, GENERIC_TREE_OPERAND (op0, 1),
+         if (TREE_CODE (op0) == MODIFY_EXPR)
+           dump_generic_node (buffer, TREE_OPERAND (op0, 1),
                               spc, flags, false);
          else
            dump_generic_node (buffer, op0, spc, flags, false);
@@ -1730,7 +1680,7 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
          dump_generic_node (buffer, CASE_LOW (node), spc, flags, false);
        }
       else
-       pp_string (buffer, "default ");
+       pp_string (buffer, "default");
       pp_character (buffer, ':');
       break;
 
@@ -1745,28 +1695,6 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
       pp_character (buffer, ')');
       break;
 
-    case PHI_NODE:
-      {
-       int i;
-
-       dump_generic_node (buffer, PHI_RESULT (node), spc, flags, false);
-       pp_string (buffer, " = PHI <");
-       for (i = 0; i < PHI_NUM_ARGS (node); i++)
-         {
-           dump_generic_node (buffer, PHI_ARG_DEF (node, i), spc, flags, false);
-           pp_string (buffer, "(");
-           pp_decimal_int (buffer, PHI_ARG_EDGE (node, i)->src->index);
-           pp_string (buffer, ")");
-           if (i < PHI_NUM_ARGS (node) - 1)
-             pp_string (buffer, ", ");
-         }
-       pp_string (buffer, ">");
-
-       if (stmt_references_memory_p (node) && (flags & TDF_MEMSYMS))
-         dump_symbols (buffer, STORED_SYMS (node), flags);
-      }
-      break;
-
     case SSA_NAME:
       dump_generic_node (buffer, SSA_NAME_VAR (node), spc, flags, false);
       pp_string (buffer, "_");
@@ -1844,21 +1772,6 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
     case OMP_PARALLEL:
       pp_string (buffer, "#pragma omp parallel");
       dump_omp_clauses (buffer, OMP_PARALLEL_CLAUSES (node), spc, flags);
-      if (OMP_PARALLEL_FN (node))
-       {
-         pp_string (buffer, " [child fn: ");
-         dump_generic_node (buffer, OMP_PARALLEL_FN (node), spc, flags, false);
-
-         pp_string (buffer, " (");
-
-         if (OMP_PARALLEL_DATA_ARG (node))
-           dump_generic_node (buffer, OMP_PARALLEL_DATA_ARG (node), spc, flags,
-                              false);
-         else
-           pp_string (buffer, "???");
-
-         pp_string (buffer, ")]");
-       }
 
     dump_omp_body:
       if (!(flags & TDF_SLIM) && OMP_BODY (node))
@@ -1876,28 +1789,6 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
     case OMP_TASK:
       pp_string (buffer, "#pragma omp task");
       dump_omp_clauses (buffer, OMP_TASK_CLAUSES (node), spc, flags);
-      if (OMP_TASK_FN (node))
-       {
-         pp_string (buffer, " [child fn: ");
-         dump_generic_node (buffer, OMP_TASK_FN (node), spc, flags, false);
-
-         pp_string (buffer, " (");
-
-         if (OMP_TASK_DATA_ARG (node))
-           dump_generic_node (buffer, OMP_TASK_DATA_ARG (node), spc, flags,
-                              false);
-         else
-           pp_string (buffer, "???");
-
-         pp_character (buffer, ')');
-         if (OMP_TASK_COPYFN (node))
-           {
-             pp_string (buffer, ", copy fn: ");
-             dump_generic_node (buffer, OMP_TASK_COPYFN (node), spc,
-                                flags, false);
-           }
-         pp_character (buffer, ']');
-       }
       goto dump_omp_body;
 
     case OMP_FOR:
@@ -1956,21 +1847,9 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
 
     case OMP_SECTIONS:
       pp_string (buffer, "#pragma omp sections");
-      if (OMP_SECTIONS_CONTROL (node))
-       {
-         pp_string (buffer, " <");
-         dump_generic_node (buffer, OMP_SECTIONS_CONTROL (node), spc,
-                            flags, false);
-         pp_string (buffer, ">");
-       }
       dump_omp_clauses (buffer, OMP_SECTIONS_CLAUSES (node), spc, flags);
       goto dump_omp_body;
 
-    case OMP_SECTIONS_SWITCH:
-      pp_string (buffer, "OMP_SECTIONS_SWITCH");
-      is_expr = false;
-      break;
     case OMP_SECTION:
       pp_string (buffer, "#pragma omp section");
       goto dump_omp_body;
@@ -2005,44 +1884,11 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
       dump_generic_node (buffer, TREE_OPERAND (node, 1), spc, flags, false);
       break;
 
-    case OMP_ATOMIC_LOAD:
-      pp_string (buffer, "#pragma omp atomic_load");
-      newline_and_indent (buffer, spc + 2);
-      dump_generic_node (buffer, TREE_OPERAND (node, 0), spc, flags, false);
-      pp_space (buffer);
-      pp_character (buffer, '=');
-      pp_space (buffer);
-      pp_character (buffer, '*');
-      dump_generic_node (buffer, TREE_OPERAND (node, 1), spc, flags, false);
-      break;
-
-    case OMP_ATOMIC_STORE:
-      pp_string (buffer, "#pragma omp atomic_store (");
-      dump_generic_node (buffer, TREE_OPERAND (node, 0), spc, flags, false);
-      pp_character (buffer, ')');
-      break;
-
     case OMP_SINGLE:
       pp_string (buffer, "#pragma omp single");
       dump_omp_clauses (buffer, OMP_SINGLE_CLAUSES (node), spc, flags);
       goto dump_omp_body;
 
-    case OMP_RETURN:
-      pp_string (buffer, "OMP_RETURN");
-      if (OMP_RETURN_NOWAIT (node))
-       pp_string (buffer, " [nowait]");
-      is_expr = false;
-      break;
-
-    case OMP_CONTINUE:
-      pp_string (buffer, "OMP_CONTINUE <");
-      dump_generic_node (buffer, TREE_OPERAND (node, 0), spc, flags, false);
-      pp_string (buffer, " <- ");
-      dump_generic_node (buffer, TREE_OPERAND (node, 1), spc, flags, false);
-      pp_string (buffer, ">");
-      is_expr = false;
-      break;
-
     case OMP_CLAUSE:
       dump_omp_clause (buffer, node, spc, flags);
       is_expr = false;
@@ -2237,7 +2083,7 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
 
 /* Print the declaration of a variable.  */
 
-static void
+void
 print_declaration (pretty_printer *buffer, tree t, int spc, int flags)
 {
   INDENT (spc);
@@ -2415,7 +2261,6 @@ op_prio (const_tree op)
       return 1;
 
     case MODIFY_EXPR:
-    case GIMPLE_MODIFY_STMT:
     case INIT_EXPR:
       return 2;
 
@@ -2549,7 +2394,6 @@ op_symbol_code (enum tree_code code)
   switch (code)
     {
     case MODIFY_EXPR:
-    case GIMPLE_MODIFY_STMT:
       return "=";
 
     case TRUTH_OR_EXPR:
@@ -2890,342 +2734,3 @@ newline_and_indent (pretty_printer *buffer, int spc)
   pp_newline (buffer);
   INDENT (spc);
 }
-
-
-static void
-dump_vops (pretty_printer *buffer, tree stmt, int spc, int flags)
-{
-  struct voptype_d *vdefs;
-  struct voptype_d *vuses;
-  int i, n;
-
-  if (!ssa_operands_active () || !stmt_references_memory_p (stmt))
-    return;
-
-  /* Even if the statement doesn't have virtual operators yet, it may
-     contain symbol information (this happens before aliases have been
-     computed).  */
-  if ((flags & TDF_MEMSYMS)
-      && VUSE_OPS (stmt) == NULL
-      && VDEF_OPS (stmt) == NULL)
-    {
-      if (LOADED_SYMS (stmt))
-       {
-         pp_string (buffer, "# LOADS: ");
-         dump_symbols (buffer, LOADED_SYMS (stmt), flags);
-         newline_and_indent (buffer, spc);
-       }
-
-      if (STORED_SYMS (stmt))
-       {
-         pp_string (buffer, "# STORES: ");
-         dump_symbols (buffer, STORED_SYMS (stmt), flags);
-         newline_and_indent (buffer, spc);
-       }
-
-      return;
-    }
-
-  vuses = VUSE_OPS (stmt);
-  while (vuses)
-    {
-      pp_string (buffer, "# VUSE <");
-
-      n = VUSE_NUM (vuses);
-      for (i = 0; i < n; i++)
-       {
-         dump_generic_node (buffer, VUSE_OP (vuses, i), spc + 2, flags, false);
-         if (i < n - 1)
-           pp_string (buffer, ", ");
-       }
-
-      pp_string (buffer, ">");
-
-      if (flags & TDF_MEMSYMS)
-       dump_symbols (buffer, LOADED_SYMS (stmt), flags);
-
-      newline_and_indent (buffer, spc);
-      vuses = vuses->next;
-    }
-
-  vdefs = VDEF_OPS (stmt);
-  while (vdefs)
-    {
-      pp_string (buffer, "# ");
-      dump_generic_node (buffer, VDEF_RESULT (vdefs), spc + 2, flags, false);
-      pp_string (buffer, " = VDEF <");
-
-      n = VDEF_NUM (vdefs);
-      for (i = 0; i < n; i++)
-       {
-         dump_generic_node (buffer, VDEF_OP (vdefs, i), spc + 2, flags, 0);
-         if (i < n - 1)
-           pp_string (buffer, ", ");
-       }
-
-      pp_string (buffer, ">");
-
-      if ((flags & TDF_MEMSYMS) && vdefs->next == NULL)
-       dump_symbols (buffer, STORED_SYMS (stmt), flags);
-
-      newline_and_indent (buffer, spc);
-      vdefs = vdefs->next;
-    }
-}
-
-
-/* Dumps basic block BB to FILE with details described by FLAGS and
-   indented by INDENT spaces.  */
-
-void
-dump_generic_bb (FILE *file, basic_block bb, int indent, int flags)
-{
-  maybe_init_pretty_print (file);
-  dump_generic_bb_buff (&buffer, bb, indent, flags);
-  pp_flush (&buffer);
-}
-
-/* Dumps header of basic block BB to buffer BUFFER indented by INDENT
-   spaces and details described by flags.  */
-
-static void
-dump_bb_header (pretty_printer *buffer, basic_block bb, int indent, int flags)
-{
-  edge e;
-  tree stmt;
-  edge_iterator ei;
-
-  if (flags & TDF_BLOCKS)
-    {
-      INDENT (indent);
-      pp_string (buffer, "# BLOCK ");
-      pp_decimal_int (buffer, bb->index);
-      if (bb->frequency)
-       {
-          pp_string (buffer, " freq:");
-          pp_decimal_int (buffer, bb->frequency);
-       }
-      if (bb->count)
-       {
-          pp_string (buffer, " count:");
-          pp_widest_integer (buffer, bb->count);
-       }
-
-      if (flags & TDF_LINENO)
-       {
-         block_stmt_iterator bsi;
-
-         for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-           if (get_lineno (bsi_stmt (bsi)) != -1)
-             {
-               pp_string (buffer, ", starting at line ");
-               pp_decimal_int (buffer, get_lineno (bsi_stmt (bsi)));
-               break;
-             }
-       }
-      newline_and_indent (buffer, indent);
-
-      pp_string (buffer, "# PRED:");
-      pp_write_text_to_stream (buffer);
-      FOR_EACH_EDGE (e, ei, bb->preds)
-       if (flags & TDF_SLIM)
-         {
-           pp_string (buffer, " ");
-           if (e->src == ENTRY_BLOCK_PTR)
-             pp_string (buffer, "ENTRY");
-           else
-             pp_decimal_int (buffer, e->src->index);
-         }
-       else
-         dump_edge_info (buffer->buffer->stream, e, 0);
-      pp_newline (buffer);
-    }
-  else
-    {
-      stmt = first_stmt (bb);
-      if (!stmt || TREE_CODE (stmt) != LABEL_EXPR)
-       {
-         INDENT (indent - 2);
-         pp_string (buffer, "<bb ");
-         pp_decimal_int (buffer, bb->index);
-         pp_string (buffer, ">:");
-         pp_newline (buffer);
-       }
-    }
-  pp_write_text_to_stream (buffer);
-  check_bb_profile (bb, buffer->buffer->stream);
-}
-
-/* Dumps end of basic block BB to buffer BUFFER indented by INDENT
-   spaces.  */
-
-static void
-dump_bb_end (pretty_printer *buffer, basic_block bb, int indent, int flags)
-{
-  edge e;
-  edge_iterator ei;
-
-  INDENT (indent);
-  pp_string (buffer, "# SUCC:");
-  pp_write_text_to_stream (buffer);
-  FOR_EACH_EDGE (e, ei, bb->succs)
-    if (flags & TDF_SLIM)
-      {
-       pp_string (buffer, " ");
-       if (e->dest == EXIT_BLOCK_PTR)
-         pp_string (buffer, "EXIT");
-       else
-         pp_decimal_int (buffer, e->dest->index);
-      }
-    else
-      dump_edge_info (buffer->buffer->stream, e, 1);
-  pp_newline (buffer);
-}
-
-/* Dump PHI nodes of basic block BB to BUFFER with details described
-   by FLAGS and indented by INDENT spaces.  */
-
-static void
-dump_phi_nodes (pretty_printer *buffer, basic_block bb, int indent, int flags)
-{
-  tree phi = phi_nodes (bb);
-  if (!phi)
-    return;
-
-  for (; phi; phi = PHI_CHAIN (phi))
-    {
-      if (is_gimple_reg (PHI_RESULT (phi)) || (flags & TDF_VOPS))
-        {
-          INDENT (indent);
-          pp_string (buffer, "# ");
-          dump_generic_node (buffer, phi, indent, flags, false);
-          pp_newline (buffer);
-         if (flags & TDF_VERBOSE)
-           print_node (buffer->buffer->stream, "", phi, indent);
-        }
-    }
-}
-
-
-/* Dump jump to basic block BB that is represented implicitly in the cfg
-   to BUFFER.  */
-
-static void
-pp_cfg_jump (pretty_printer *buffer, basic_block bb)
-{
-  tree stmt;
-
-  stmt = first_stmt (bb);
-
-  pp_string (buffer, "goto <bb ");
-  pp_decimal_int (buffer, bb->index);
-  pp_string (buffer, ">");
-  if (stmt && TREE_CODE (stmt) == LABEL_EXPR)
-    {
-      pp_string (buffer, " (");
-      dump_generic_node (buffer, LABEL_EXPR_LABEL (stmt), 0, 0, false);
-      pp_string (buffer, ")");
-    }
-  pp_semicolon (buffer);
-}
-
-/* Dump edges represented implicitly in basic block BB to BUFFER, indented
-   by INDENT spaces, with details given by FLAGS.  */
-
-static void
-dump_implicit_edges (pretty_printer *buffer, basic_block bb, int indent,
-                    int flags)
-{
-  edge e;
-  edge_iterator ei;
-  tree stmt;
-
-  stmt = last_stmt (bb);
-  if (stmt && TREE_CODE (stmt) == COND_EXPR)
-    {
-      edge true_edge, false_edge;
-
-      /* When we are emitting the code or changing CFG, it is possible that
-        the edges are not yet created.  When we are using debug_bb in such
-        a situation, we do not want it to crash.  */
-      if (EDGE_COUNT (bb->succs) != 2)
-       return;
-      extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
-
-      INDENT (indent + 2);
-      pp_cfg_jump (buffer, true_edge->dest);
-      newline_and_indent (buffer, indent);
-      pp_string (buffer, "else");
-      newline_and_indent (buffer, indent + 2);
-      pp_cfg_jump (buffer, false_edge->dest);
-      pp_newline (buffer);
-      return;
-    }
-
-  /* If there is a fallthru edge, we may need to add an artificial goto to the
-     dump.  */
-  FOR_EACH_EDGE (e, ei, bb->succs)
-    if (e->flags & EDGE_FALLTHRU)
-      break;
-  if (e && e->dest != bb->next_bb)
-    {
-      INDENT (indent);
-
-      if ((flags & TDF_LINENO) && e->goto_locus != UNKNOWN_LOCATION)
-       {
-         expanded_location goto_xloc;
-         goto_xloc = expand_location (e->goto_locus);
-         pp_character (buffer, '[');
-         if (goto_xloc.file)
-           {
-             pp_string (buffer, goto_xloc.file);
-             pp_string (buffer, " : ");
-           }
-         pp_decimal_int (buffer, goto_xloc.line);
-         pp_string (buffer, "] ");
-       }
-
-      pp_cfg_jump (buffer, e->dest);
-      pp_newline (buffer);
-    }
-}
-
-/* Dumps basic block BB to buffer BUFFER with details described by FLAGS and
-   indented by INDENT spaces.  */
-
-static void
-dump_generic_bb_buff (pretty_printer *buffer, basic_block bb,
-                     int indent, int flags)
-{
-  block_stmt_iterator bsi;
-  tree stmt;
-  int label_indent = indent - 2;
-
-  if (label_indent < 0)
-    label_indent = 0;
-
-  dump_bb_header (buffer, bb, indent, flags);
-
-  dump_phi_nodes (buffer, bb, indent, flags);
-
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-    {
-      int curr_indent;
-
-      stmt = bsi_stmt (bsi);
-
-      curr_indent = TREE_CODE (stmt) == LABEL_EXPR ? label_indent : indent;
-
-      INDENT (curr_indent);
-      dump_generic_node (buffer, stmt, curr_indent, flags, true);
-      pp_newline (buffer);
-      dump_histograms_for_stmt (cfun, buffer->buffer->stream, stmt);
-      if (flags & TDF_VERBOSE)
-       print_node (buffer->buffer->stream, "", stmt, curr_indent);
-    }
-
-  dump_implicit_edges (buffer, bb, indent, flags);
-
-  if (flags & TDF_BLOCKS)
-    dump_bb_end (buffer, bb, indent, flags);
-}
index 405f0d8..20ded1b 100644 (file)
@@ -164,12 +164,13 @@ tree_init_edge_profiler (void)
 
 /* Output instructions as GIMPLE trees to increment the edge 
    execution count, and insert them on E.  We rely on 
-   bsi_insert_on_edge to preserve the order.  */
+   gsi_insert_on_edge to preserve the order.  */
 
 static void
 tree_gen_edge_profiler (int edgeno, edge e)
 {
-  tree ref, one, stmt1, stmt2, stmt3;
+  tree ref, one;
+  gimple stmt1, stmt2, stmt3;
 
   /* We share one temporary variable declaration per function.  This
      gets re-set in tree_profiling.  */
@@ -177,26 +178,24 @@ tree_gen_edge_profiler (int edgeno, edge e)
     gcov_type_tmp_var = create_tmp_var (gcov_type_node, "PROF_edge_counter");
   ref = tree_coverage_counter_ref (GCOV_COUNTER_ARCS, edgeno);
   one = build_int_cst (gcov_type_node, 1);
-  stmt1 = build_gimple_modify_stmt (gcov_type_tmp_var, ref);
-  stmt2 = build_gimple_modify_stmt (gcov_type_tmp_var,
-                                   build2 (PLUS_EXPR, gcov_type_node,
-                                           gcov_type_tmp_var, one));
-  stmt3 = build_gimple_modify_stmt (unshare_expr (ref), gcov_type_tmp_var);
-  bsi_insert_on_edge (e, stmt1);
-  bsi_insert_on_edge (e, stmt2);
-  bsi_insert_on_edge (e, stmt3);
+  stmt1 = gimple_build_assign (gcov_type_tmp_var, ref);
+  stmt2 = gimple_build_assign_with_ops (PLUS_EXPR, gcov_type_tmp_var,
+                                       gcov_type_tmp_var, one);
+  stmt3 = gimple_build_assign (unshare_expr (ref), gcov_type_tmp_var);
+  gsi_insert_on_edge (e, stmt1);
+  gsi_insert_on_edge (e, stmt2);
+  gsi_insert_on_edge (e, stmt3);
 }
 
-/* Emits code to get VALUE to instrument at BSI, and returns the
+/* Emits code to get VALUE to instrument at GSI, and returns the
    variable containing the value.  */
 
 static tree
-prepare_instrumented_value (block_stmt_iterator *bsi,
-                           histogram_value value)
+prepare_instrumented_value (gimple_stmt_iterator *gsi, histogram_value value)
 {
   tree val = value->hvalue.value;
-  return force_gimple_operand_bsi (bsi, fold_convert (gcov_type_node, val),
-                                  true, NULL_TREE, true, BSI_SAME_STMT);
+  return force_gimple_operand_gsi (gsi, fold_convert (gcov_type_node, val),
+                                  true, NULL_TREE, true, GSI_SAME_STMT);
 }
 
 /* Output instructions as GIMPLE trees to increment the interval histogram 
@@ -206,20 +205,23 @@ prepare_instrumented_value (block_stmt_iterator *bsi,
 static void
 tree_gen_interval_profiler (histogram_value value, unsigned tag, unsigned base)
 {
-  tree stmt = value->hvalue.stmt;
-  block_stmt_iterator bsi = bsi_for_stmt (stmt);
+  gimple stmt = value->hvalue.stmt;
+  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
   tree ref = tree_coverage_counter_ref (tag, base), ref_ptr;
-  tree call, val;
-  tree start = build_int_cst_type (integer_type_node, value->hdata.intvl.int_start);
-  tree steps = build_int_cst_type (unsigned_type_node, value->hdata.intvl.steps);
+  gimple call;
+  tree val;
+  tree start = build_int_cst_type (integer_type_node,
+                                  value->hdata.intvl.int_start);
+  tree steps = build_int_cst_type (unsigned_type_node,
+                                  value->hdata.intvl.steps);
   
-  ref_ptr = force_gimple_operand_bsi (&bsi,
+  ref_ptr = force_gimple_operand_gsi (&gsi,
                                      build_addr (ref, current_function_decl),
-                                     true, NULL_TREE, true, BSI_SAME_STMT);
-  val = prepare_instrumented_value (&bsi, value);
-  call = build_call_expr (tree_interval_profiler_fn, 4,
-                         ref_ptr, val, start, steps);
-  bsi_insert_before (&bsi, call, BSI_SAME_STMT);
+                                     true, NULL_TREE, true, GSI_SAME_STMT);
+  val = prepare_instrumented_value (&gsi, value);
+  call = gimple_build_call (tree_interval_profiler_fn, 4,
+                           ref_ptr, val, start, steps);
+  gsi_insert_before (&gsi, call, GSI_SAME_STMT);
 }
 
 /* Output instructions as GIMPLE trees to increment the power of two histogram 
@@ -229,16 +231,17 @@ tree_gen_interval_profiler (histogram_value value, unsigned tag, unsigned base)
 static void
 tree_gen_pow2_profiler (histogram_value value, unsigned tag, unsigned base)
 {
-  tree stmt = value->hvalue.stmt;
-  block_stmt_iterator bsi = bsi_for_stmt (stmt);
+  gimple stmt = value->hvalue.stmt;
+  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
   tree ref_ptr = tree_coverage_counter_addr (tag, base);
-  tree call, val;
+  gimple call;
+  tree val;
   
-  ref_ptr = force_gimple_operand_bsi (&bsi, ref_ptr,
-                                     true, NULL_TREE, true, BSI_SAME_STMT);
-  val = prepare_instrumented_value (&bsi, value);
-  call = build_call_expr (tree_pow2_profiler_fn, 2, ref_ptr, val);
-  bsi_insert_before (&bsi, call, BSI_SAME_STMT);
+  ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
+                                     true, NULL_TREE, true, GSI_SAME_STMT);
+  val = prepare_instrumented_value (&gsi, value);
+  call = gimple_build_call (tree_pow2_profiler_fn, 2, ref_ptr, val);
+  gsi_insert_before (&gsi, call, GSI_SAME_STMT);
 }
 
 /* Output instructions as GIMPLE trees for code to find the most common value.
@@ -248,16 +251,17 @@ tree_gen_pow2_profiler (histogram_value value, unsigned tag, unsigned base)
 static void
 tree_gen_one_value_profiler (histogram_value value, unsigned tag, unsigned base)
 {
-  tree stmt = value->hvalue.stmt;
-  block_stmt_iterator bsi = bsi_for_stmt (stmt);
+  gimple stmt = value->hvalue.stmt;
+  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
   tree ref_ptr = tree_coverage_counter_addr (tag, base);
-  tree call, val;
+  gimple call;
+  tree val;
   
-  ref_ptr = force_gimple_operand_bsi (&bsi, ref_ptr,
-                                     true, NULL_TREE, true, BSI_SAME_STMT);
-  val = prepare_instrumented_value (&bsi, value);
-  call = build_call_expr (tree_one_value_profiler_fn, 2, ref_ptr, val);
-  bsi_insert_before (&bsi, call, BSI_SAME_STMT);
+  ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
+                                     true, NULL_TREE, true, GSI_SAME_STMT);
+  val = prepare_instrumented_value (&gsi, value);
+  call = gimple_build_call (tree_one_value_profiler_fn, 2, ref_ptr, val);
+  gsi_insert_before (&gsi, call, GSI_SAME_STMT);
 }
 
 
@@ -270,13 +274,14 @@ tree_gen_one_value_profiler (histogram_value value, unsigned tag, unsigned base)
 static void
 tree_gen_ic_profiler (histogram_value value, unsigned tag, unsigned base)
 {
-  tree tmp1, stmt1, stmt2, stmt3;
-  tree stmt = value->hvalue.stmt;
-  block_stmt_iterator bsi = bsi_for_stmt (stmt);
+  tree tmp1;
+  gimple stmt1, stmt2, stmt3;
+  gimple stmt = value->hvalue.stmt;
+  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
   tree ref_ptr = tree_coverage_counter_addr (tag, base);
 
-  ref_ptr = force_gimple_operand_bsi (&bsi, ref_ptr,
-                                     true, NULL_TREE, true, BSI_SAME_STMT);
+  ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
+                                     true, NULL_TREE, true, GSI_SAME_STMT);
 
   /* Insert code:
     
@@ -285,13 +290,13 @@ tree_gen_ic_profiler (histogram_value value, unsigned tag, unsigned base)
    */
 
   tmp1 = create_tmp_var (ptr_void, "PROF");
-  stmt1 = build_gimple_modify_stmt (ic_gcov_type_ptr_var, ref_ptr);
-  stmt2 = build_gimple_modify_stmt (tmp1, unshare_expr (value->hvalue.value));
-  stmt3 = build_gimple_modify_stmt (ic_void_ptr_var, tmp1);
+  stmt1 = gimple_build_assign (ic_gcov_type_ptr_var, ref_ptr);
+  stmt2 = gimple_build_assign (tmp1, unshare_expr (value->hvalue.value));
+  stmt3 = gimple_build_assign (ic_void_ptr_var, tmp1);
 
-  bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
 }
 
 
@@ -304,11 +309,11 @@ static void
 tree_gen_ic_func_profiler (void)
 {
   struct cgraph_node * c_node = cgraph_node (current_function_decl);
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   edge e;
   basic_block bb;
   edge_iterator ei;
-  tree stmt1, stmt2;
+  gimple stmt1, stmt2;
   tree tree_uid, cur_func;
 
   if (!c_node->needed)
@@ -321,30 +326,30 @@ tree_gen_ic_func_profiler (void)
       tree void0;
 
       bb = split_edge (e);
-      bsi = bsi_start (bb);
+      gsi = gsi_start_bb (bb);
 
-      cur_func = force_gimple_operand_bsi (&bsi,
+      cur_func = force_gimple_operand_gsi (&gsi,
                                           build_addr (current_function_decl, 
                                                       current_function_decl),
                                           true, NULL_TREE,
-                                          true, BSI_SAME_STMT);
+                                          true, GSI_SAME_STMT);
       tree_uid = build_int_cst (gcov_type_node, c_node->pid);
-      stmt1 = build_call_expr (tree_indirect_call_profiler_fn, 4,
-                              ic_gcov_type_ptr_var,
-                              tree_uid,
-                              cur_func,
-                              ic_void_ptr_var);
-      bsi_insert_after (&bsi, stmt1, BSI_NEW_STMT);
+      stmt1 = gimple_build_call (tree_indirect_call_profiler_fn, 4,
+                                ic_gcov_type_ptr_var,
+                                tree_uid,
+                                cur_func,
+                                ic_void_ptr_var);
+      gsi_insert_after (&gsi, stmt1, GSI_NEW_STMT);
 
       gcc_assert (EDGE_COUNT (bb->succs) == 1);
       bb = split_edge (EDGE_I (bb->succs, 0));
-      bsi = bsi_start (bb);
+      gsi = gsi_start_bb (bb);
       /* Set __gcov_indirect_call_callee to 0,
          so that calls from other modules won't get misattributed
         to the last caller of the current callee. */
       void0 = build_int_cst (build_pointer_type (void_type_node), 0);
-      stmt2 = build_gimple_modify_stmt (ic_void_ptr_var, void0);
-      bsi_insert_after (&bsi, stmt2, BSI_NEW_STMT);
+      stmt2 = gimple_build_assign (ic_void_ptr_var, void0);
+      gsi_insert_after (&gsi, stmt2, GSI_NEW_STMT);
     }
 }
 
@@ -354,9 +359,9 @@ tree_gen_ic_func_profiler (void)
    section for counters, BASE is offset of the counter position.  */
 
 static void
-tree_gen_const_delta_profiler (histogram_value value ATTRIBUTE_UNUSED, 
-                               unsigned tag ATTRIBUTE_UNUSED,
-                               unsigned base ATTRIBUTE_UNUSED)
+tree_gen_const_delta_profiler (histogram_value value ATTRIBUTE_UNUSED,
+                              unsigned tag ATTRIBUTE_UNUSED,
+                              unsigned base ATTRIBUTE_UNUSED)
 {
   /* FIXME implement this.  */
 #ifdef ENABLE_CHECKING
@@ -372,17 +377,18 @@ tree_gen_const_delta_profiler (histogram_value value ATTRIBUTE_UNUSED,
 static void
 tree_gen_average_profiler (histogram_value value, unsigned tag, unsigned base)
 {
-  tree stmt = value->hvalue.stmt;
-  block_stmt_iterator bsi = bsi_for_stmt (stmt);
+  gimple stmt = value->hvalue.stmt;
+  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
   tree ref_ptr = tree_coverage_counter_addr (tag, base);
-  tree call, val;
+  gimple call;
+  tree val;
   
-  ref_ptr = force_gimple_operand_bsi (&bsi, ref_ptr,
+  ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
                                      true, NULL_TREE,
-                                     true, BSI_SAME_STMT);
-  val = prepare_instrumented_value (&bsi, value);
-  call = build_call_expr (tree_average_profiler_fn, 2, ref_ptr, val);
-  bsi_insert_before (&bsi, call, BSI_SAME_STMT);
+                                     true, GSI_SAME_STMT);
+  val = prepare_instrumented_value (&gsi, value);
+  call = gimple_build_call (tree_average_profiler_fn, 2, ref_ptr, val);
+  gsi_insert_before (&gsi, call, GSI_SAME_STMT);
 }
 
 /* Output instructions as GIMPLE trees to increment the ior histogram 
@@ -392,16 +398,17 @@ tree_gen_average_profiler (histogram_value value, unsigned tag, unsigned base)
 static void
 tree_gen_ior_profiler (histogram_value value, unsigned tag, unsigned base)
 {
-  tree stmt = value->hvalue.stmt;
-  block_stmt_iterator bsi = bsi_for_stmt (stmt);
+  gimple stmt = value->hvalue.stmt;
+  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
   tree ref_ptr = tree_coverage_counter_addr (tag, base);
-  tree call, val;
+  gimple call;
+  tree val;
   
-  ref_ptr = force_gimple_operand_bsi (&bsi, ref_ptr,
-                                     true, NULL_TREE, true, BSI_SAME_STMT);
-  val = prepare_instrumented_value (&bsi, value);
-  call = build_call_expr (tree_ior_profiler_fn, 2, ref_ptr, val);
-  bsi_insert_before (&bsi, call, BSI_SAME_STMT);
+  ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
+                                     true, NULL_TREE, true, GSI_SAME_STMT);
+  val = prepare_instrumented_value (&gsi, value);
+  call = gimple_build_call (tree_ior_profiler_fn, 2, ref_ptr, val);
+  gsi_insert_before (&gsi, call, GSI_SAME_STMT);
 }
 
 /* Return 1 if tree-based profiling is in effect, else 0.
@@ -414,7 +421,7 @@ do_tree_profiling (void)
   if (profile_arc_flag || flag_test_coverage || flag_branch_probabilities)
     {
       tree_register_profile_hooks ();
-      tree_register_value_prof_hooks ();
+      gimple_register_value_prof_hooks ();
       return true;
     }
   return false;
index 57fe59b..67fcd08 100644 (file)
@@ -48,7 +48,7 @@ along with GCC; see the file COPYING3.  If not see
    Given a scalar variable to be analyzed, follow the SSA edge to
    its definition:
      
-   - When the definition is a GIMPLE_MODIFY_STMT: if the right hand side
+   - When the definition is a GIMPLE_ASSIGN: if the right hand side
    (RHS) of the definition cannot be statically analyzed, the answer
    of the analyzer is: "don't know".  
    Otherwise, for all the variables that are not yet analyzed in the
@@ -397,7 +397,7 @@ chrec_contains_symbols_defined_in_loop (const_tree chrec, unsigned loop_nb)
 
   if (TREE_CODE (chrec) == SSA_NAME)
     {
-      tree def = SSA_NAME_DEF_STMT (chrec);
+      gimple def = SSA_NAME_DEF_STMT (chrec);
       struct loop *def_loop = loop_containing_stmt (def);
       struct loop *loop = get_loop (loop_nb);
 
@@ -421,13 +421,13 @@ chrec_contains_symbols_defined_in_loop (const_tree chrec, unsigned loop_nb)
 /* Return true when PHI is a loop-phi-node.  */
 
 static bool
-loop_phi_node_p (tree phi)
+loop_phi_node_p (gimple phi)
 {
   /* The implementation of this function is based on the following
      property: "all the loop-phi-nodes of a loop are contained in the
      loop's header basic block".  */
 
-  return loop_containing_stmt (phi)->header == bb_for_stmt (phi);
+  return loop_containing_stmt (phi)->header == gimple_bb (phi);
 }
 
 /* Compute the scalar evolution for EVOLUTION_FN after crossing LOOP.
@@ -656,7 +656,7 @@ get_scalar_evolution (tree scalar)
 
 static tree
 add_to_evolution_1 (unsigned loop_nb, tree chrec_before, tree to_add,
-                   tree at_stmt)
+                   gimple at_stmt)
 {
   tree type, left, right;
   struct loop *loop = get_loop (loop_nb), *chloop;
@@ -853,7 +853,7 @@ add_to_evolution_1 (unsigned loop_nb, tree chrec_before, tree to_add,
 
 static tree 
 add_to_evolution (unsigned loop_nb, tree chrec_before, enum tree_code code,
-                 tree to_add, tree at_stmt)
+                 tree to_add, gimple at_stmt)
 {
   tree type = chrec_type (to_add);
   tree res = NULL_TREE;
@@ -918,47 +918,14 @@ set_nb_iterations_in_loop (struct loop *loop,
    scalar evolution analysis.  For the moment, greedily select all the
    loop nests we could analyze.  */
 
-/* Return true when it is possible to analyze the condition expression
-   EXPR.  */
-
-static bool
-analyzable_condition (const_tree expr)
-{
-  tree condition;
-  
-  if (TREE_CODE (expr) != COND_EXPR)
-    return false;
-  
-  condition = TREE_OPERAND (expr, 0);
-  
-  switch (TREE_CODE (condition))
-    {
-    case SSA_NAME:
-      return true;
-      
-    case LT_EXPR:
-    case LE_EXPR:
-    case GT_EXPR:
-    case GE_EXPR:
-    case EQ_EXPR:
-    case NE_EXPR:
-      return true;
-      
-    default:
-      return false;
-    }
-  
-  return false;
-}
-
 /* For a loop with a single exit edge, return the COND_EXPR that
    guards the exit edge.  If the expression is too difficult to
    analyze, then give up.  */
 
-tre
+gimpl
 get_loop_exit_condition (const struct loop *loop)
 {
-  tree res = NULL_TREE;
+  gimple res = NULL;
   edge exit_edge = single_exit (loop);
   
   if (dump_file && (dump_flags & TDF_DETAILS))
@@ -966,16 +933,16 @@ get_loop_exit_condition (const struct loop *loop)
   
   if (exit_edge)
     {
-      tree expr;
+      gimple stmt;
       
-      expr = last_stmt (exit_edge->src);
-      if (analyzable_condition (expr))
-       res = expr;
+      stmt = last_stmt (exit_edge->src);
+      if (gimple_code (stmt) == GIMPLE_COND)
+       res = stmt;
     }
   
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
-      print_generic_expr (dump_file, res, 0);
+      print_gimple_stmt (dump_file, res, 0, 0);
       fprintf (dump_file, ")\n");
     }
   
@@ -986,7 +953,7 @@ get_loop_exit_condition (const struct loop *loop)
 
 static void 
 get_exit_conditions_rec (struct loop *loop, 
-                        VEC(tree,heap) **exit_conditions)
+                        VEC(gimple,heap) **exit_conditions)
 {
   if (!loop)
     return;
@@ -997,10 +964,10 @@ get_exit_conditions_rec (struct loop *loop,
   
   if (single_exit (loop))
     {
-      tree loop_condition = get_loop_exit_condition (loop);
+      gimple loop_condition = get_loop_exit_condition (loop);
       
       if (loop_condition)
-       VEC_safe_push (tree, heap, *exit_conditions, loop_condition);
+       VEC_safe_push (gimple, heap, *exit_conditions, loop_condition);
     }
 }
 
@@ -1008,7 +975,7 @@ get_exit_conditions_rec (struct loop *loop,
    initializes the EXIT_CONDITIONS array.  */
 
 static void
-select_loops_exit_conditions (VEC(tree,heap) **exit_conditions)
+select_loops_exit_conditions (VEC(gimple,heap) **exit_conditions)
 {
   struct loop *function_body = current_loops->tree_root;
   
@@ -1025,59 +992,23 @@ typedef enum t_bool {
 } t_bool;
 
 
-static t_bool follow_ssa_edge (struct loop *loop, tree, tree, tree *, int);
+static t_bool follow_ssa_edge (struct loop *loop, gimple, gimple, tree *, int);
 
-/* Follow the ssa edge into the right hand side RHS of an assignment.
+/* Follow the ssa edge into the binary expression RHS0 CODE RHS1.
    Return true if the strongly connected component has been found.  */
 
 static t_bool
-follow_ssa_edge_in_rhs (struct loop *loop, tree at_stmt, tree rhs, 
-                       tree halting_phi, tree *evolution_of_loop, int limit)
+follow_ssa_edge_binary (struct loop *loop, gimple at_stmt,
+                       tree type, tree rhs0, enum tree_code code, tree rhs1,
+                       gimple halting_phi, tree *evolution_of_loop, int limit)
 {
   t_bool res = t_false;
-  tree rhs0, rhs1;
-  tree type_rhs = TREE_TYPE (rhs);
   tree evol;
-  enum tree_code code;
-  
-  /* The RHS is one of the following cases:
-     - an SSA_NAME, 
-     - an INTEGER_CST,
-     - a PLUS_EXPR, 
-     - a POINTER_PLUS_EXPR, 
-     - a MINUS_EXPR,
-     - an ASSERT_EXPR,
-     - other cases are not yet handled.  */
-  code = TREE_CODE (rhs);
+
   switch (code)
     {
-    case NOP_EXPR:
-      /* This assignment is under the form "a_1 = (cast) rhs.  */
-      res = follow_ssa_edge_in_rhs (loop, at_stmt, TREE_OPERAND (rhs, 0),
-                                   halting_phi, evolution_of_loop, limit);
-      *evolution_of_loop = chrec_convert (TREE_TYPE (rhs),
-                                         *evolution_of_loop, at_stmt);
-      break;
-
-    case INTEGER_CST:
-      /* This assignment is under the form "a_1 = 7".  */
-      res = t_false;
-      break;
-      
-    case SSA_NAME:
-      /* This assignment is under the form: "a_1 = b_2".  */
-      res = follow_ssa_edge 
-       (loop, SSA_NAME_DEF_STMT (rhs), halting_phi, evolution_of_loop, limit);
-      break;
-      
     case POINTER_PLUS_EXPR:
     case PLUS_EXPR:
-      /* This case is under the form "rhs0 + rhs1".  */
-      rhs0 = TREE_OPERAND (rhs, 0);
-      rhs1 = TREE_OPERAND (rhs, 1);
-      STRIP_TYPE_NOPS (rhs0);
-      STRIP_TYPE_NOPS (rhs1);
-
       if (TREE_CODE (rhs0) == SSA_NAME)
        {
          if (TREE_CODE (rhs1) == SSA_NAME)
@@ -1092,13 +1023,12 @@ follow_ssa_edge_in_rhs (struct loop *loop, tree at_stmt, tree rhs,
 
              evol = *evolution_of_loop;
              res = follow_ssa_edge 
-               (loop, SSA_NAME_DEF_STMT (rhs0), halting_phi, 
-                &evol, limit);
+               (loop, SSA_NAME_DEF_STMT (rhs0), halting_phi, &evol, limit);
              
              if (res == t_true)
                *evolution_of_loop = add_to_evolution 
                  (loop->num, 
-                  chrec_convert (type_rhs, evol, at_stmt), 
+                  chrec_convert (type, evol, at_stmt), 
                   code, rhs1, at_stmt);
              
              else if (res == t_false)
@@ -1110,7 +1040,7 @@ follow_ssa_edge_in_rhs (struct loop *loop, tree at_stmt, tree rhs,
                  if (res == t_true)
                    *evolution_of_loop = add_to_evolution 
                      (loop->num, 
-                      chrec_convert (type_rhs, *evolution_of_loop, at_stmt), 
+                      chrec_convert (type, *evolution_of_loop, at_stmt), 
                       code, rhs0, at_stmt);
 
                  else if (res == t_dont_know)
@@ -1130,7 +1060,7 @@ follow_ssa_edge_in_rhs (struct loop *loop, tree at_stmt, tree rhs,
                 evolution_of_loop, limit);
              if (res == t_true)
                *evolution_of_loop = add_to_evolution 
-                 (loop->num, chrec_convert (type_rhs, *evolution_of_loop,
+                 (loop->num, chrec_convert (type, *evolution_of_loop,
                                             at_stmt),
                   code, rhs1, at_stmt);
 
@@ -1148,7 +1078,7 @@ follow_ssa_edge_in_rhs (struct loop *loop, tree at_stmt, tree rhs,
             evolution_of_loop, limit);
          if (res == t_true)
            *evolution_of_loop = add_to_evolution 
-             (loop->num, chrec_convert (type_rhs, *evolution_of_loop,
+             (loop->num, chrec_convert (type, *evolution_of_loop,
                                         at_stmt),
               code, rhs0, at_stmt);
 
@@ -1161,16 +1091,10 @@ follow_ssa_edge_in_rhs (struct loop *loop, tree at_stmt, tree rhs,
           "a = ... + ...".  */
        /* And there is nothing to do.  */
        res = t_false;
-      
       break;
       
     case MINUS_EXPR:
       /* This case is under the form "opnd0 = rhs0 - rhs1".  */
-      rhs0 = TREE_OPERAND (rhs, 0);
-      rhs1 = TREE_OPERAND (rhs, 1);
-      STRIP_TYPE_NOPS (rhs0);
-      STRIP_TYPE_NOPS (rhs1);
-
       if (TREE_CODE (rhs0) == SSA_NAME)
        {
          /* Match an assignment under the form: 
@@ -1186,7 +1110,7 @@ follow_ssa_edge_in_rhs (struct loop *loop, tree at_stmt, tree rhs,
                                 evolution_of_loop, limit);
          if (res == t_true)
            *evolution_of_loop = add_to_evolution 
-             (loop->num, chrec_convert (type_rhs, *evolution_of_loop, at_stmt),
+             (loop->num, chrec_convert (type, *evolution_of_loop, at_stmt),
               MINUS_EXPR, rhs1, at_stmt);
 
          else if (res == t_dont_know)
@@ -1197,14 +1121,72 @@ follow_ssa_edge_in_rhs (struct loop *loop, tree at_stmt, tree rhs,
           "a = ... - ...".  */
        /* And there is nothing to do.  */
        res = t_false;
-      
       break;
+
+    default:
+      res = t_false;
+    }
+
+  return res;
+}
     
+/* Follow the ssa edge into the expression EXPR.
+   Return true if the strongly connected component has been found.  */
+
+static t_bool
+follow_ssa_edge_expr (struct loop *loop, gimple at_stmt, tree expr, 
+                     gimple halting_phi, tree *evolution_of_loop, int limit)
+{
+  t_bool res = t_false;
+  tree rhs0, rhs1;
+  tree type = TREE_TYPE (expr);
+  enum tree_code code;
+  
+  /* The EXPR is one of the following cases:
+     - an SSA_NAME, 
+     - an INTEGER_CST,
+     - a PLUS_EXPR, 
+     - a POINTER_PLUS_EXPR, 
+     - a MINUS_EXPR,
+     - an ASSERT_EXPR,
+     - other cases are not yet handled.  */
+  code = TREE_CODE (expr);
+  switch (code)
+    {
+    case NOP_EXPR:
+      /* This assignment is under the form "a_1 = (cast) rhs.  */
+      res = follow_ssa_edge_expr (loop, at_stmt, TREE_OPERAND (expr, 0),
+                                 halting_phi, evolution_of_loop, limit);
+      *evolution_of_loop = chrec_convert (type, *evolution_of_loop, at_stmt);
+      break;
+
+    case INTEGER_CST:
+      /* This assignment is under the form "a_1 = 7".  */
+      res = t_false;
+      break;
+      
+    case SSA_NAME:
+      /* This assignment is under the form: "a_1 = b_2".  */
+      res = follow_ssa_edge 
+       (loop, SSA_NAME_DEF_STMT (expr), halting_phi, evolution_of_loop, limit);
+      break;
+      
+    case POINTER_PLUS_EXPR:
+    case PLUS_EXPR:
+    case MINUS_EXPR:
+      /* This case is under the form "rhs0 +- rhs1".  */
+      rhs0 = TREE_OPERAND (expr, 0);
+      rhs1 = TREE_OPERAND (expr, 1);
+      STRIP_TYPE_NOPS (rhs0);
+      STRIP_TYPE_NOPS (rhs1);
+      return follow_ssa_edge_binary (loop, at_stmt, type, rhs0, code, rhs1,
+                                    halting_phi, evolution_of_loop, limit);
+
     case ASSERT_EXPR:
       {
        /* This assignment is of the form: "a_1 = ASSERT_EXPR <a_2, ...>"
           It must be handled as a copy assignment of the form a_1 = a_2.  */
-       tree op0 = ASSERT_EXPR_VAR (rhs);
+       tree op0 = ASSERT_EXPR_VAR (expr);
        if (TREE_CODE (op0) == SSA_NAME)
          res = follow_ssa_edge (loop, SSA_NAME_DEF_STMT (op0),
                                 halting_phi, evolution_of_loop, limit);
@@ -1222,12 +1204,37 @@ follow_ssa_edge_in_rhs (struct loop *loop, tree at_stmt, tree rhs,
   return res;
 }
 
+/* Follow the ssa edge into the right hand side of an assignment STMT.
+   Return true if the strongly connected component has been found.  */
+
+static t_bool
+follow_ssa_edge_in_rhs (struct loop *loop, gimple stmt,
+                       gimple halting_phi, tree *evolution_of_loop, int limit)
+{
+  tree type = TREE_TYPE (gimple_assign_lhs (stmt));
+  enum tree_code code = gimple_assign_rhs_code (stmt);
+
+  switch (get_gimple_rhs_class (code))
+    {
+    case GIMPLE_BINARY_RHS:
+      return follow_ssa_edge_binary (loop, stmt, type,
+                                    gimple_assign_rhs1 (stmt), code,
+                                    gimple_assign_rhs2 (stmt),
+                                    halting_phi, evolution_of_loop, limit);
+    case GIMPLE_SINGLE_RHS:
+      return follow_ssa_edge_expr (loop, stmt, gimple_assign_rhs1 (stmt),
+                                  halting_phi, evolution_of_loop, limit);
+    default:
+      return t_false;
+    }
+}
+
 /* Checks whether the I-th argument of a PHI comes from a backedge.  */
 
 static bool
-backedge_phi_arg_p (const_tree phi, int i)
+backedge_phi_arg_p (gimple phi, int i)
 {
-  const_edge e = PHI_ARG_EDGE (phi, i);
+  const_edge e = gimple_phi_arg_edge (phi, i);
 
   /* We would in fact like to test EDGE_DFS_BACK here, but we do not care
      about updating it anywhere, and this should work as well most of the
@@ -1245,8 +1252,8 @@ backedge_phi_arg_p (const_tree phi, int i)
 static inline t_bool
 follow_ssa_edge_in_condition_phi_branch (int i,
                                         struct loop *loop, 
-                                        tree condition_phi, 
-                                        tree halting_phi,
+                                        gimple condition_phi, 
+                                        gimple halting_phi,
                                         tree *evolution_of_branch,
                                         tree init_cond, int limit)
 {
@@ -1280,11 +1287,11 @@ follow_ssa_edge_in_condition_phi_branch (int i,
 
 static t_bool
 follow_ssa_edge_in_condition_phi (struct loop *loop,
-                                 tree condition_phi, 
-                                 tree halting_phi, 
+                                 gimple condition_phi, 
+                                 gimple halting_phi, 
                                  tree *evolution_of_loop, int limit)
 {
-  int i;
+  int i, n;
   tree init = *evolution_of_loop;
   tree evolution_of_branch;
   t_bool res = follow_ssa_edge_in_condition_phi_branch (0, loop, condition_phi,
@@ -1297,10 +1304,11 @@ follow_ssa_edge_in_condition_phi (struct loop *loop,
   *evolution_of_loop = evolution_of_branch;
 
   /* If the phi node is just a copy, do not increase the limit.  */
-  if (PHI_NUM_ARGS (condition_phi) > 1)
+  n = gimple_phi_num_args (condition_phi);
+  if (n > 1)
     limit++;
 
-  for (i = 1; i < PHI_NUM_ARGS (condition_phi); i++)
+  for (i = 1; i < n; i++)
     {
       /* Quickly give up when the evolution of one of the branches is
         not known.  */
@@ -1328,8 +1336,8 @@ follow_ssa_edge_in_condition_phi (struct loop *loop,
 
 static t_bool
 follow_ssa_edge_inner_loop_phi (struct loop *outer_loop,
-                               tree loop_phi_node, 
-                               tree halting_phi,
+                               gimple loop_phi_node, 
+                               gimple halting_phi,
                                tree *evolution_of_loop, int limit)
 {
   struct loop *loop = loop_containing_stmt (loop_phi_node);
@@ -1340,19 +1348,19 @@ follow_ssa_edge_inner_loop_phi (struct loop *outer_loop,
   if (ev == PHI_RESULT (loop_phi_node))
     {
       t_bool res = t_false;
-      int i;
+      int i, n = gimple_phi_num_args (loop_phi_node);
 
-      for (i = 0; i < PHI_NUM_ARGS (loop_phi_node); i++)
+      for (i = 0; i < n; i++)
        {
          tree arg = PHI_ARG_DEF (loop_phi_node, i);
          basic_block bb;
 
          /* Follow the edges that exit the inner loop.  */
-         bb = PHI_ARG_EDGE (loop_phi_node, i)->src;
+         bb = gimple_phi_arg_edge (loop_phi_node, i)->src;
          if (!flow_bb_inside_loop_p (loop, bb))
-           res = follow_ssa_edge_in_rhs (outer_loop, loop_phi_node,
-                                         arg, halting_phi,
-                                         evolution_of_loop, limit);
+           res = follow_ssa_edge_expr (outer_loop, loop_phi_node,
+                                       arg, halting_phi,
+                                       evolution_of_loop, limit);
          if (res == t_true)
            break;
        }
@@ -1366,20 +1374,20 @@ follow_ssa_edge_inner_loop_phi (struct loop *outer_loop,
 
   /* Otherwise, compute the overall effect of the inner loop.  */
   ev = compute_overall_effect_of_inner_loop (loop, ev);
-  return follow_ssa_edge_in_rhs (outer_loop, loop_phi_node, ev, halting_phi,
-                                evolution_of_loop, limit);
+  return follow_ssa_edge_expr (outer_loop, loop_phi_node, ev, halting_phi,
+                              evolution_of_loop, limit);
 }
 
 /* Follow an SSA edge from a loop-phi-node to itself, constructing a
    path that is analyzed on the return walk.  */
 
 static t_bool
-follow_ssa_edge (struct loop *loop, tree def, tree halting_phi,
+follow_ssa_edge (struct loop *loop, gimple def, gimple halting_phi,
                 tree *evolution_of_loop, int limit)
 {
   struct loop *def_loop;
   
-  if (TREE_CODE (def) == NOP_EXPR)
+  if (gimple_nop_p (def))
     return t_false;
   
   /* Give up if the path is longer than the MAX that we allow.  */
@@ -1388,9 +1396,9 @@ follow_ssa_edge (struct loop *loop, tree def, tree halting_phi,
   
   def_loop = loop_containing_stmt (def);
   
-  switch (TREE_CODE (def))
+  switch (gimple_code (def))
     {
-    case PHI_NODE:
+    case GIMPLE_PHI:
       if (!loop_phi_node_p (def))
        /* DEF is a condition-phi-node.  Follow the branches, and
           record their evolutions.  Finally, merge the collected
@@ -1419,15 +1427,13 @@ follow_ssa_edge (struct loop *loop, tree def, tree halting_phi,
       /* Outer loop.  */
       return t_false;
 
-    case GIMPLE_MODIFY_STMT:
-      return follow_ssa_edge_in_rhs (loop, def,
-                                    GIMPLE_STMT_OPERAND (def, 1), 
-                                    halting_phi, 
+    case GIMPLE_ASSIGN:
+      return follow_ssa_edge_in_rhs (loop, def, halting_phi, 
                                     evolution_of_loop, limit);
       
     default:
       /* At this level of abstraction, the program is just a set
-        of GIMPLE_MODIFY_STMTs and PHI_NODEs.  In principle there is no
+        of GIMPLE_ASSIGNs and PHI_NODEs.  In principle there is no
         other node to be handled.  */
       return t_false;
     }
@@ -1439,10 +1445,10 @@ follow_ssa_edge (struct loop *loop, tree def, tree halting_phi,
    function from LOOP_PHI_NODE to LOOP_PHI_NODE in the loop.  */
 
 static tree
-analyze_evolution_in_loop (tree loop_phi_node, 
+analyze_evolution_in_loop (gimple loop_phi_node, 
                           tree init_cond)
 {
-  int i;
+  int i, n = gimple_phi_num_args (loop_phi_node);
   tree evolution_function = chrec_not_analyzed_yet;
   struct loop *loop = loop_containing_stmt (loop_phi_node);
   basic_block bb;
@@ -1451,18 +1457,19 @@ analyze_evolution_in_loop (tree loop_phi_node,
     {
       fprintf (dump_file, "(analyze_evolution_in_loop \n");
       fprintf (dump_file, "  (loop_phi_node = ");
-      print_generic_expr (dump_file, loop_phi_node, 0);
+      print_gimple_stmt (dump_file, loop_phi_node, 0, 0);
       fprintf (dump_file, ")\n");
     }
   
-  for (i = 0; i < PHI_NUM_ARGS (loop_phi_node); i++)
+  for (i = 0; i < n; i++)
     {
       tree arg = PHI_ARG_DEF (loop_phi_node, i);
-      tree ssa_chain, ev_fn;
+      gimple ssa_chain;
+      tree ev_fn;
       t_bool res;
 
       /* Select the edges that enter the loop body.  */
-      bb = PHI_ARG_EDGE (loop_phi_node, i)->src;
+      bb = gimple_phi_arg_edge (loop_phi_node, i)->src;
       if (!flow_bb_inside_loop_p (loop, bb))
        continue;
       
@@ -1509,24 +1516,25 @@ analyze_evolution_in_loop (tree loop_phi_node,
    loop, and leaves this task to the on-demand tree reconstructor.  */
 
 static tree 
-analyze_initial_condition (tree loop_phi_node)
+analyze_initial_condition (gimple loop_phi_node)
 {
-  int i;
+  int i, n;
   tree init_cond = chrec_not_analyzed_yet;
-  struct loop *loop = bb_for_stmt (loop_phi_node)->loop_father;
+  struct loop *loop = loop_containing_stmt (loop_phi_node);
   
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "(analyze_initial_condition \n");
       fprintf (dump_file, "  (loop_phi_node = \n");
-      print_generic_expr (dump_file, loop_phi_node, 0);
+      print_gimple_stmt (dump_file, loop_phi_node, 0, 0);
       fprintf (dump_file, ")\n");
     }
   
-  for (i = 0; i < PHI_NUM_ARGS (loop_phi_node); i++)
+  n = gimple_phi_num_args (loop_phi_node);
+  for (i = 0; i < n; i++)
     {
       tree branch = PHI_ARG_DEF (loop_phi_node, i);
-      basic_block bb = PHI_ARG_EDGE (loop_phi_node, i)->src;
+      basic_block bb = gimple_phi_arg_edge (loop_phi_node, i)->src;
       
       /* When the branch is oriented to the loop's body, it does
         not contribute to the initial condition.  */
@@ -1565,7 +1573,7 @@ analyze_initial_condition (tree loop_phi_node)
 /* Analyze the scalar evolution for LOOP_PHI_NODE.  */
 
 static tree 
-interpret_loop_phi (struct loop *loop, tree loop_phi_node)
+interpret_loop_phi (struct loop *loop, gimple loop_phi_node)
 {
   tree res;
   struct loop *phi_loop = loop_containing_stmt (loop_phi_node);
@@ -1597,12 +1605,12 @@ interpret_loop_phi (struct loop *loop, tree loop_phi_node)
    analyzed.  */
 
 static tree
-interpret_condition_phi (struct loop *loop, tree condition_phi)
+interpret_condition_phi (struct loop *loop, gimple condition_phi)
 {
-  int i;
+  int i, n = gimple_phi_num_args (condition_phi);
   tree res = chrec_not_analyzed_yet;
   
-  for (i = 0; i < PHI_NUM_ARGS (condition_phi); i++)
+  for (i = 0; i < n; i++)
     {
       tree branch_chrec;
       
@@ -1621,88 +1629,83 @@ interpret_condition_phi (struct loop *loop, tree condition_phi)
   return res;
 }
 
-/* Interpret the right hand side of a GIMPLE_MODIFY_STMT OPND1.  If we didn't
+/* Interpret the operation RHS1 OP RHS2.  If we didn't
    analyze this node before, follow the definitions until ending
-   either on an analyzed GIMPLE_MODIFY_STMT, or on a loop-phi-node.  On the
+   either on an analyzed GIMPLE_ASSIGN, or on a loop-phi-node.  On the
    return path, this function propagates evolutions (ala constant copy
    propagation).  OPND1 is not a GIMPLE expression because we could
    analyze the effect of an inner loop: see interpret_loop_phi.  */
 
 static tree
-interpret_rhs_modify_stmt (struct loop *loop, tree at_stmt,
-                                 tree opnd1, tree type)
+interpret_rhs_expr (struct loop *loop, gimple at_stmt,
+                   tree type, tree rhs1, enum tree_code code, tree rhs2)
 {
-  tree res, opnd10, opnd11, chrec10, chrec11;
+  tree res, chrec1, chrec2;
+
+  if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
+    {
+      if (is_gimple_min_invariant (rhs1))
+       return chrec_convert (type, rhs1, at_stmt);
+
+      if (code == SSA_NAME)
+       return chrec_convert (type, analyze_scalar_evolution (loop, rhs1),
+                             at_stmt);
 
-  if (is_gimple_min_invariant (opnd1))
-    return chrec_convert (type, opnd1, at_stmt);
+      if (code == ASSERT_EXPR)
+       {
+         rhs1 = ASSERT_EXPR_VAR (rhs1);
+         return chrec_convert (type, analyze_scalar_evolution (loop, rhs1),
+                               at_stmt);
+       }
+
+      return chrec_dont_know;
+    }
 
-  switch (TREE_CODE (opnd1))
+  switch (code)
     {
     case POINTER_PLUS_EXPR:
-      opnd10 = TREE_OPERAND (opnd1, 0);
-      opnd11 = TREE_OPERAND (opnd1, 1);
-      chrec10 = analyze_scalar_evolution (loop, opnd10);
-      chrec11 = analyze_scalar_evolution (loop, opnd11);
-      chrec10 = chrec_convert (type, chrec10, at_stmt);
-      chrec11 = chrec_convert (sizetype, chrec11, at_stmt);
-      res = chrec_fold_plus (type, chrec10, chrec11);
+      chrec1 = analyze_scalar_evolution (loop, rhs1);
+      chrec2 = analyze_scalar_evolution (loop, rhs2);
+      chrec1 = chrec_convert (type, chrec1, at_stmt);
+      chrec2 = chrec_convert (sizetype, chrec2, at_stmt);
+      res = chrec_fold_plus (type, chrec1, chrec2);
       break;
 
     case PLUS_EXPR:
-      opnd10 = TREE_OPERAND (opnd1, 0);
-      opnd11 = TREE_OPERAND (opnd1, 1);
-      chrec10 = analyze_scalar_evolution (loop, opnd10);
-      chrec11 = analyze_scalar_evolution (loop, opnd11);
-      chrec10 = chrec_convert (type, chrec10, at_stmt);
-      chrec11 = chrec_convert (type, chrec11, at_stmt);
-      res = chrec_fold_plus (type, chrec10, chrec11);
+      chrec1 = analyze_scalar_evolution (loop, rhs1);
+      chrec2 = analyze_scalar_evolution (loop, rhs2);
+      chrec1 = chrec_convert (type, chrec1, at_stmt);
+      chrec2 = chrec_convert (type, chrec2, at_stmt);
+      res = chrec_fold_plus (type, chrec1, chrec2);
       break;
       
     case MINUS_EXPR:
-      opnd10 = TREE_OPERAND (opnd1, 0);
-      opnd11 = TREE_OPERAND (opnd1, 1);
-      chrec10 = analyze_scalar_evolution (loop, opnd10);
-      chrec11 = analyze_scalar_evolution (loop, opnd11);
-      chrec10 = chrec_convert (type, chrec10, at_stmt);
-      chrec11 = chrec_convert (type, chrec11, at_stmt);
-      res = chrec_fold_minus (type, chrec10, chrec11);
+      chrec1 = analyze_scalar_evolution (loop, rhs1);
+      chrec2 = analyze_scalar_evolution (loop, rhs2);
+      chrec1 = chrec_convert (type, chrec1, at_stmt);
+      chrec2 = chrec_convert (type, chrec2, at_stmt);
+      res = chrec_fold_minus (type, chrec1, chrec2);
       break;
 
     case NEGATE_EXPR:
-      opnd10 = TREE_OPERAND (opnd1, 0);
-      chrec10 = analyze_scalar_evolution (loop, opnd10);
-      chrec10 = chrec_convert (type, chrec10, at_stmt);
+      chrec1 = analyze_scalar_evolution (loop, rhs1);
+      chrec1 = chrec_convert (type, chrec1, at_stmt);
       /* TYPE may be integer, real or complex, so use fold_convert.  */
-      res = chrec_fold_multiply (type, chrec10,
+      res = chrec_fold_multiply (type, chrec1,
                                 fold_convert (type, integer_minus_one_node));
       break;
 
     case MULT_EXPR:
-      opnd10 = TREE_OPERAND (opnd1, 0);
-      opnd11 = TREE_OPERAND (opnd1, 1);
-      chrec10 = analyze_scalar_evolution (loop, opnd10);
-      chrec11 = analyze_scalar_evolution (loop, opnd11);
-      chrec10 = chrec_convert (type, chrec10, at_stmt);
-      chrec11 = chrec_convert (type, chrec11, at_stmt);
-      res = chrec_fold_multiply (type, chrec10, chrec11);
-      break;
-      
-    case SSA_NAME:
-      res = chrec_convert (type, analyze_scalar_evolution (loop, opnd1),
-                          at_stmt);
-      break;
-
-    case ASSERT_EXPR:
-      opnd10 = ASSERT_EXPR_VAR (opnd1);
-      res = chrec_convert (type, analyze_scalar_evolution (loop, opnd10),
-                          at_stmt);
+      chrec1 = analyze_scalar_evolution (loop, rhs1);
+      chrec2 = analyze_scalar_evolution (loop, rhs2);
+      chrec1 = chrec_convert (type, chrec1, at_stmt);
+      chrec2 = chrec_convert (type, chrec2, at_stmt);
+      res = chrec_fold_multiply (type, chrec1, chrec2);
       break;
       
     CASE_CONVERT:
-      opnd10 = TREE_OPERAND (opnd1, 0);
-      chrec10 = analyze_scalar_evolution (loop, opnd10);
-      res = chrec_convert (type, chrec10, at_stmt);
+      chrec1 = analyze_scalar_evolution (loop, rhs1);
+      res = chrec_convert (type, chrec1, at_stmt);
       break;
       
     default:
@@ -1713,6 +1716,39 @@ interpret_rhs_modify_stmt (struct loop *loop, tree at_stmt,
   return res;
 }
 
+/* Interpret the expression EXPR.  */
+
+static tree
+interpret_expr (struct loop *loop, gimple at_stmt, tree expr)
+{
+  enum tree_code code;
+  tree type = TREE_TYPE (expr), op0, op1;
+
+  if (automatically_generated_chrec_p (expr))
+    return expr;
+
+  if (TREE_CODE (expr) == POLYNOMIAL_CHREC)
+    return chrec_dont_know;
+
+  extract_ops_from_tree (expr, &code, &op0, &op1);
+
+  return interpret_rhs_expr (loop, at_stmt, type,
+                            op0, code, op1);
+}
+
+/* Interpret the rhs of the assignment STMT.  */
+
+static tree
+interpret_gimple_assign (struct loop *loop, gimple stmt)
+{
+  tree type = TREE_TYPE (gimple_assign_lhs (stmt));
+  enum tree_code code = gimple_assign_rhs_code (stmt);
+
+  return interpret_rhs_expr (loop, stmt, type,
+                            gimple_assign_rhs1 (stmt), code,
+                            gimple_assign_rhs2 (stmt));
+}
+
 \f
 
 /* This section contains all the entry points: 
@@ -1744,7 +1780,8 @@ compute_scalar_evolution_in_loop (struct loop *wrto_loop,
 static tree
 analyze_scalar_evolution_1 (struct loop *loop, tree var, tree res)
 {
-  tree def, type = TREE_TYPE (var);
+  tree type = TREE_TYPE (var);
+  gimple def;
   basic_block bb;
   struct loop *def_loop;
 
@@ -1752,10 +1789,10 @@ analyze_scalar_evolution_1 (struct loop *loop, tree var, tree res)
     return chrec_dont_know;
 
   if (TREE_CODE (var) != SSA_NAME)
-    return interpret_rhs_modify_stmt (loop, NULL_TREE, var, type);
+    return interpret_expr (loop, NULL, var);
 
   def = SSA_NAME_DEF_STMT (var);
-  bb = bb_for_stmt (def);
+  bb = gimple_bb (def);
   def_loop = bb ? bb->loop_father : NULL;
 
   if (bb == NULL
@@ -1783,14 +1820,13 @@ analyze_scalar_evolution_1 (struct loop *loop, tree var, tree res)
       goto set_and_end;
     }
 
-  switch (TREE_CODE (def))
+  switch (gimple_code (def))
     {
-    case GIMPLE_MODIFY_STMT:
-      res = interpret_rhs_modify_stmt (loop, def,
-                                      GIMPLE_STMT_OPERAND (def, 1), type);
+    case GIMPLE_ASSIGN:
+      res = interpret_gimple_assign (loop, def);
       break;
 
-    case PHI_NODE:
+    case GIMPLE_PHI:
       if (loop_phi_node_p (def))
        res = interpret_loop_phi (loop, def);
       else
@@ -1845,9 +1881,6 @@ analyze_scalar_evolution (struct loop *loop, tree var)
 
   res = analyze_scalar_evolution_1 (loop, var, get_scalar_evolution (var));
 
-  if (TREE_CODE (var) == SSA_NAME && res == chrec_dont_know)
-    res = var;
-
   if (dump_file && (dump_flags & TDF_DETAILS))
     fprintf (dump_file, ")\n");
 
@@ -1934,7 +1967,8 @@ loop_closed_phi_def (tree var)
 {
   struct loop *loop;
   edge exit;
-  tree phi;
+  gimple phi;
+  gimple_stmt_iterator psi;
 
   if (var == NULL_TREE
       || TREE_CODE (var) != SSA_NAME)
@@ -1945,9 +1979,12 @@ loop_closed_phi_def (tree var)
   if (!exit)
     return NULL_TREE;
 
-  for (phi = phi_nodes (exit->dest); phi; phi = PHI_CHAIN (phi))
-    if (PHI_ARG_DEF_FROM_EDGE (phi, exit) == var)
-      return PHI_RESULT (phi);
+  for (psi = gsi_start_phis (exit->dest); !gsi_end_p (psi); gsi_next (&psi))
+    {
+      phi = gsi_stmt (psi);
+      if (PHI_ARG_DEF_FROM_EDGE (phi, exit) == var)
+       return PHI_RESULT (phi);
+    }
 
   return NULL_TREE;
 }
@@ -1987,7 +2024,7 @@ instantiate_scev_1 (struct loop *instantiation_loop,
   switch (TREE_CODE (chrec))
     {
     case SSA_NAME:
-      def_bb = bb_for_stmt (SSA_NAME_DEF_STMT (chrec));
+      def_bb = gimple_bb (SSA_NAME_DEF_STMT (chrec));
 
       /* A parameter (or loop invariant and we do not want to include
         evolutions in outer loops), nothing to do.  */
@@ -2073,7 +2110,7 @@ instantiate_scev_1 (struct loop *instantiation_loop,
       if (CHREC_LEFT (chrec) != op0
          || CHREC_RIGHT (chrec) != op1)
        {
-         op1 = chrec_convert_rhs (chrec_type (op0), op1, NULL_TREE);
+         op1 = chrec_convert_rhs (chrec_type (op0), op1, NULL);
          chrec = build_polynomial_chrec (CHREC_VARIABLE (chrec), op0, op1);
        }
       return chrec;
@@ -2095,8 +2132,8 @@ instantiate_scev_1 (struct loop *instantiation_loop,
       if (TREE_OPERAND (chrec, 0) != op0
          || TREE_OPERAND (chrec, 1) != op1)
        {
-         op0 = chrec_convert (type, op0, NULL_TREE);
-         op1 = chrec_convert_rhs (type, op1, NULL_TREE);
+         op0 = chrec_convert (type, op0, NULL);
+         op1 = chrec_convert_rhs (type, op1, NULL);
          chrec = chrec_fold_plus (type, op0, op1);
        }
       return chrec;
@@ -2117,8 +2154,8 @@ instantiate_scev_1 (struct loop *instantiation_loop,
       if (TREE_OPERAND (chrec, 0) != op0
          || TREE_OPERAND (chrec, 1) != op1)
        {
-         op0 = chrec_convert (type, op0, NULL_TREE);
-         op1 = chrec_convert (type, op1, NULL_TREE);
+         op0 = chrec_convert (type, op0, NULL);
+         op1 = chrec_convert (type, op1, NULL);
          chrec = chrec_fold_minus (type, op0, op1);
        }
       return chrec;
@@ -2139,8 +2176,8 @@ instantiate_scev_1 (struct loop *instantiation_loop,
       if (TREE_OPERAND (chrec, 0) != op0
          || TREE_OPERAND (chrec, 1) != op1)
        {
-         op0 = chrec_convert (type, op0, NULL_TREE);
-         op1 = chrec_convert (type, op1, NULL_TREE);
+         op0 = chrec_convert (type, op0, NULL);
+         op1 = chrec_convert (type, op1, NULL);
          chrec = chrec_fold_multiply (type, op0, op1);
        }
       return chrec;
@@ -2168,7 +2205,7 @@ instantiate_scev_1 (struct loop *instantiation_loop,
       if (fold_conversions)
        return fold_convert (TREE_TYPE (chrec), op0);
 
-      return chrec_convert (TREE_TYPE (chrec), op0, NULL_TREE);
+      return chrec_convert (TREE_TYPE (chrec), op0, NULL);
 
     case SCEV_NOT_KNOWN:
       return chrec_dont_know;
@@ -2388,14 +2425,14 @@ number_of_exit_cond_executions (struct loop *loop)
    from the EXIT_CONDITIONS array.  */
 
 static void 
-number_of_iterations_for_all_loops (VEC(tree,heap) **exit_conditions)
+number_of_iterations_for_all_loops (VEC(gimple,heap) **exit_conditions)
 {
   unsigned int i;
   unsigned nb_chrec_dont_know_loops = 0;
   unsigned nb_static_loops = 0;
-  tree cond;
+  gimple cond;
   
-  for (i = 0; VEC_iterate (tree, *exit_conditions, i, cond); i++)
+  for (i = 0; VEC_iterate (gimple, *exit_conditions, i, cond); i++)
     {
       tree res = number_of_latch_executions (loop_containing_stmt (cond));
       if (chrec_contains_undetermined (res))
@@ -2540,33 +2577,37 @@ gather_chrec_stats (tree chrec, struct chrec_stats *stats)
    index.  This allows the parallelization of the loop.  */
 
 static void 
-analyze_scalar_evolution_for_all_loop_phi_nodes (VEC(tree,heap) **exit_conditions)
+analyze_scalar_evolution_for_all_loop_phi_nodes (VEC(gimple,heap) **exit_conditions)
 {
   unsigned int i;
   struct chrec_stats stats;
-  tree cond;
+  gimple cond, phi;
+  gimple_stmt_iterator psi;
   
   reset_chrecs_counters (&stats);
   
-  for (i = 0; VEC_iterate (tree, *exit_conditions, i, cond); i++)
+  for (i = 0; VEC_iterate (gimple, *exit_conditions, i, cond); i++)
     {
       struct loop *loop;
       basic_block bb;
-      tree phi, chrec;
+      tree chrec;
       
       loop = loop_containing_stmt (cond);
       bb = loop->header;
       
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-       if (is_gimple_reg (PHI_RESULT (phi)))
-         {
-           chrec = instantiate_parameters
-             (loop, 
-              analyze_scalar_evolution (loop, PHI_RESULT (phi)));
+      for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
+       {
+         phi = gsi_stmt (psi);
+         if (is_gimple_reg (PHI_RESULT (phi)))
+           {
+             chrec = instantiate_parameters 
+                       (loop, 
+                        analyze_scalar_evolution (loop, PHI_RESULT (phi)));
            
-           if (dump_file && (dump_flags & TDF_STATS))
-             gather_chrec_stats (chrec, &stats);
-         }
+             if (dump_file && (dump_flags & TDF_STATS))
+               gather_chrec_stats (chrec, &stats);
+           }
+       }
     }
   
   if (dump_file && (dump_flags & TDF_STATS))
@@ -2671,10 +2712,10 @@ scev_reset (void)
    overflow (e.g.  because it is computed in signed arithmetics).  */
 
 bool
-simple_iv (struct loop *loop, tree stmt, tree op, affine_iv *iv,
+simple_iv (struct loop *loop, gimple stmt, tree op, affine_iv *iv,
           bool allow_nonconstant_step)
 {
-  basic_block bb = bb_for_stmt (stmt);
+  basic_block bb = gimple_bb (stmt);
   tree type, ev;
   bool folded_casts;
 
@@ -2730,16 +2771,16 @@ simple_iv (struct loop *loop, tree stmt, tree op, affine_iv *iv,
 void
 scev_analysis (void)
 {
-  VEC(tree,heap) *exit_conditions;
+  VEC(gimple,heap) *exit_conditions;
   
-  exit_conditions = VEC_alloc (tree, heap, 37);
+  exit_conditions = VEC_alloc (gimple, heap, 37);
   select_loops_exit_conditions (&exit_conditions);
 
   if (dump_file && (dump_flags & TDF_STATS))
     analyze_scalar_evolution_for_all_loop_phi_nodes (&exit_conditions);
   
   number_of_iterations_for_all_loops (&exit_conditions);
-  VEC_free (tree, heap, exit_conditions);
+  VEC_free (gimple, heap, exit_conditions);
 }
 
 /* Finalize the scalar evolution analysis.  */
@@ -2765,11 +2806,13 @@ unsigned int
 scev_const_prop (void)
 {
   basic_block bb;
-  tree name, phi, next_phi, type, ev;
+  tree name, type, ev;
+  gimple phi, ass;
   struct loop *loop, *ex_loop;
   bitmap ssa_names_to_remove = NULL;
   unsigned i;
   loop_iterator li;
+  gimple_stmt_iterator psi;
 
   if (number_of_loops () <= 1)
     return 0;
@@ -2778,8 +2821,9 @@ scev_const_prop (void)
     {
       loop = bb->loop_father;
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
        {
+         phi = gsi_stmt (psi);
          name = PHI_RESULT (phi);
 
          if (!is_gimple_reg (name))
@@ -2815,11 +2859,13 @@ scev_const_prop (void)
 
       EXECUTE_IF_SET_IN_BITMAP (ssa_names_to_remove, 0, i, bi)
        {
+         gimple_stmt_iterator psi;
          name = ssa_name (i);
          phi = SSA_NAME_DEF_STMT (name);
 
-         gcc_assert (TREE_CODE (phi) == PHI_NODE);
-         remove_phi_node (phi, NULL, true);
+         gcc_assert (gimple_code (phi) == GIMPLE_PHI);
+         psi = gsi_for_stmt (phi);
+         remove_phi_node (&psi, true);
        }
 
       BITMAP_FREE (ssa_names_to_remove);
@@ -2830,8 +2876,8 @@ scev_const_prop (void)
   FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST)
     {
       edge exit;
-      tree def, rslt, ass, niter;
-      block_stmt_iterator bsi;
+      tree def, rslt, niter;
+      gimple_stmt_iterator bsi;
 
       /* If we do not know exact number of iterations of the loop, we cannot
         replace the final value.  */
@@ -2852,22 +2898,28 @@ scev_const_prop (void)
       /* Ensure that it is possible to insert new statements somewhere.  */
       if (!single_pred_p (exit->dest))
        split_loop_exit_edge (exit);
-      bsi = bsi_after_labels (exit->dest);
+      bsi = gsi_after_labels (exit->dest);
 
       ex_loop = superloop_at_depth (loop,
                                    loop_depth (exit->dest->loop_father) + 1);
 
-      for (phi = phi_nodes (exit->dest); phi; phi = next_phi)
+      for (psi = gsi_start_phis (exit->dest); !gsi_end_p (psi); )
        {
-         next_phi = PHI_CHAIN (phi);
+         phi = gsi_stmt (psi);
          rslt = PHI_RESULT (phi);
          def = PHI_ARG_DEF_FROM_EDGE (phi, exit);
          if (!is_gimple_reg (def))
-           continue;
+           {
+             gsi_next (&psi);
+             continue;
+           }
 
          if (!POINTER_TYPE_P (TREE_TYPE (def))
              && !INTEGRAL_TYPE_P (TREE_TYPE (def)))
-           continue;
+           {
+             gsi_next (&psi);
+             continue;
+           }
 
          def = analyze_scalar_evolution_in_loop (ex_loop, loop, def, NULL);
          def = compute_overall_effect_of_inner_loop (ex_loop, def);
@@ -2877,23 +2929,20 @@ scev_const_prop (void)
                 of some ssa names, which may cause problems if they appear
                 on abnormal edges.  */
              || contains_abnormal_ssa_name_p (def))
-           continue;
+           {
+             gsi_next (&psi);
+             continue;
+           }
 
          /* Eliminate the PHI node and replace it by a computation outside
             the loop.  */
          def = unshare_expr (def);
-         remove_phi_node (phi, NULL_TREE, false);
-
-         ass = build_gimple_modify_stmt (rslt, NULL_TREE);
-         SSA_NAME_DEF_STMT (rslt) = ass;
-         {
-           block_stmt_iterator dest = bsi;
-           bsi_insert_before (&dest, ass, BSI_NEW_STMT);
-           def = force_gimple_operand_bsi (&dest, def, false, NULL_TREE,
-                                           true, BSI_SAME_STMT);
-         }
-         GIMPLE_STMT_OPERAND (ass, 1) = def;
-         update_stmt (ass);
+         remove_phi_node (&psi, false);
+
+         def = force_gimple_operand_gsi (&bsi, def, false, NULL_TREE,
+                                         true, GSI_SAME_STMT);
+         ass = gimple_build_assign (rslt, def);
+         gsi_insert_before (&bsi, ass, GSI_SAME_STMT);
        }
     }
   return 0;
index 472b194..5d6d711 100644 (file)
@@ -23,7 +23,7 @@ along with GCC; see the file COPYING3.  If not see
 
 extern tree number_of_latch_executions (struct loop *);
 extern tree number_of_exit_cond_executions (struct loop *);
-extern tree get_loop_exit_condition (const struct loop *);
+extern gimple get_loop_exit_condition (const struct loop *);
 
 extern void scev_initialize (void);
 extern void scev_reset (void);
@@ -35,7 +35,7 @@ extern void gather_stats_on_scev_database (void);
 extern void scev_analysis (void);
 unsigned int scev_const_prop (void);
 
-extern bool simple_iv (struct loop *, tree, tree, affine_iv *, bool);
+extern bool simple_iv (struct loop *, gimple, tree, affine_iv *, bool);
 
 /* Analyze all the parameters of the chrec that were left under a
    symbolic form.  LOOP is the loop in which symbolic names have to
index 21cf367..5ae1c51 100644 (file)
@@ -37,7 +37,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "langhooks.h"
 #include "tree-inline.h"
 #include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-dump.h"
 #include "tree-pass.h"
 #include "timevar.h"
@@ -208,8 +208,9 @@ extern void debug_sra_elt_name (struct sra_elt *);
 
 /* Forward declarations.  */
 static tree generate_element_ref (struct sra_elt *);
-static tree sra_build_assignment (tree dst, tree src);
-static void mark_all_v_defs (tree list);
+static gimple_seq sra_build_assignment (tree dst, tree src);
+static void mark_all_v_defs_seq (gimple_seq);
+static void mark_all_v_defs_stmt (gimple);
 
 \f
 /* Return true if DECL is an SRA candidate.  */
@@ -719,7 +720,7 @@ maybe_lookup_element_for_expr (tree expr)
    references, and categorize them.  */
 
 /* A set of callbacks for phases 2 and 4.  They'll be invoked for the
-   various kinds of references seen.  In all cases, *BSI is an iterator
+   various kinds of references seen.  In all cases, *GSI is an iterator
    pointing to the statement being processed.  */
 struct sra_walk_fns
 {
@@ -729,21 +730,21 @@ struct sra_walk_fns
      is a left-hand-side reference.  USE_ALL is true if we saw something we
      couldn't quite identify and had to force the use of the entire object.  */
   void (*use) (struct sra_elt *elt, tree *expr_p,
-              block_stmt_iterator *bsi, bool is_output, bool use_all);
+              gimple_stmt_iterator *gsi, bool is_output, bool use_all);
 
   /* Invoked when we have a copy between two scalarizable references.  */
   void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
-               block_stmt_iterator *bsi);
+               gimple_stmt_iterator *gsi);
 
   /* Invoked when ELT is initialized from a constant.  VALUE may be NULL,
      in which case it should be treated as an empty CONSTRUCTOR.  */
-  void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
+  void (*init) (struct sra_elt *elt, tree value, gimple_stmt_iterator *gsi);
 
   /* Invoked when we have a copy between one scalarizable reference ELT
      and one non-scalarizable reference OTHER without side-effects. 
      IS_OUTPUT is true if ELT is on the left-hand side.  */
   void (*ldst) (struct sra_elt *elt, tree other,
-               block_stmt_iterator *bsi, bool is_output);
+               gimple_stmt_iterator *gsi, bool is_output);
 
   /* True during phase 2, false during phase 4.  */
   /* ??? This is a hack.  */
@@ -777,7 +778,7 @@ sra_find_candidate_decl (tree *tp, int *walk_subtrees,
    If we find one, invoke FNS->USE.  */
 
 static void
-sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
+sra_walk_expr (tree *expr_p, gimple_stmt_iterator *gsi, bool is_output,
               const struct sra_walk_fns *fns)
 {
   tree expr = *expr_p;
@@ -804,7 +805,7 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
            if (disable_scalarization)
              elt->cannot_scalarize = true;
            else
-             fns->use (elt, expr_p, bsi, is_output, use_all_p);
+             fns->use (elt, expr_p, gsi, is_output, use_all_p);
          }
        return;
 
@@ -881,6 +882,7 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
        goto use_all;
 
       case NOP_EXPR:
+      case CONVERT_EXPR:
        /* Similarly, a nop explicitly wants to look at an object in a
           type other than the one we've scalarized.  */
        goto use_all;
@@ -916,60 +918,62 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
       }
 }
 
-/* Walk a TREE_LIST of values looking for scalarizable aggregates.
+/* Walk the arguments of a GIMPLE_CALL looking for scalarizable aggregates.
    If we find one, invoke FNS->USE.  */
 
 static void
-sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
-                   const struct sra_walk_fns *fns)
-{
-  tree op;
-  for (op = list; op ; op = TREE_CHAIN (op))
-    sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
-}
-
-/* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
-   If we find one, invoke FNS->USE.  */
-
-static void
-sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
+sra_walk_gimple_call (gimple stmt, gimple_stmt_iterator *gsi,
                    const struct sra_walk_fns *fns)
 {
   int i;
-  int nargs = call_expr_nargs (expr);
+  int nargs = gimple_call_num_args (stmt);
+
   for (i = 0; i < nargs; i++)
-    sra_walk_expr (&CALL_EXPR_ARG (expr, i), bsi, false, fns);
+    sra_walk_expr (gimple_call_arg_ptr (stmt, i), gsi, false, fns);
+
+  if (gimple_call_lhs (stmt))
+    sra_walk_expr (gimple_call_lhs_ptr (stmt), gsi, true, fns);
 }
 
-/* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
+/* Walk the inputs and outputs of a GIMPLE_ASM looking for scalarizable
    aggregates.  If we find one, invoke FNS->USE.  */
 
 static void
-sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
+sra_walk_gimple_asm (gimple stmt, gimple_stmt_iterator *gsi,
                   const struct sra_walk_fns *fns)
 {
-  sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
-  sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
+  size_t i;
+  for (i = 0; i < gimple_asm_ninputs (stmt); i++)
+    sra_walk_expr (&TREE_VALUE (gimple_asm_input_op (stmt, i)), gsi, false, fns);
+  for (i = 0; i < gimple_asm_noutputs (stmt); i++)
+    sra_walk_expr (&TREE_VALUE (gimple_asm_output_op (stmt, i)), gsi, true, fns);
 }
 
-/* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately.  */
+/* Walk a GIMPLE_ASSIGN and categorize the assignment appropriately.  */
 
 static void
-sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
-                            const struct sra_walk_fns *fns)
+sra_walk_gimple_assign (gimple stmt, gimple_stmt_iterator *gsi,
+                       const struct sra_walk_fns *fns)
 {
-  struct sra_elt *lhs_elt, *rhs_elt;
+  struct sra_elt *lhs_elt = NULL, *rhs_elt = NULL;
   tree lhs, rhs;
 
-  lhs = GIMPLE_STMT_OPERAND (expr, 0);
-  rhs = GIMPLE_STMT_OPERAND (expr, 1);
+  /* If there is more than 1 element on the RHS, only walk the lhs.  */
+  if (!gimple_assign_single_p (stmt))
+    {
+      sra_walk_expr (gimple_assign_lhs_ptr (stmt), gsi, true, fns);
+      return;
+    }
+
+  lhs = gimple_assign_lhs (stmt);
+  rhs = gimple_assign_rhs1 (stmt);
   lhs_elt = maybe_lookup_element_for_expr (lhs);
   rhs_elt = maybe_lookup_element_for_expr (rhs);
 
   /* If both sides are scalarizable, this is a COPY operation.  */
   if (lhs_elt && rhs_elt)
     {
-      fns->copy (lhs_elt, rhs_elt, bsi);
+      fns->copy (lhs_elt, rhs_elt, gsi);
       return;
     }
 
@@ -977,9 +981,9 @@ sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
   if (rhs_elt)
     {
       if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
-       fns->ldst (rhs_elt, lhs, bsi, false);
+       fns->ldst (rhs_elt, lhs, gsi, false);
       else
-       fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false, false);
+       fns->use (rhs_elt, gimple_assign_rhs1_ptr (stmt), gsi, false, false);
     }
 
   /* If it isn't scalarizable, there may be scalarizable variables within, so
@@ -988,13 +992,7 @@ sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
      that the statements get inserted in the proper place, before any
      copy-out operations.  */
   else
-    {
-      tree call = get_call_expr_in (rhs);
-      if (call)
-       sra_walk_call_expr (call, bsi, fns);
-      else
-       sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 1), bsi, false, fns);
-    }
+    sra_walk_expr (gimple_assign_rhs1_ptr (stmt), gsi, false, fns);
 
   /* Likewise, handle the LHS being scalarizable.  We have cases similar
      to those above, but also want to handle RHS being constant.  */
@@ -1005,7 +1003,7 @@ sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
       if (TREE_CODE (rhs) == COMPLEX_EXPR
          || TREE_CODE (rhs) == COMPLEX_CST
          || TREE_CODE (rhs) == CONSTRUCTOR)
-       fns->init (lhs_elt, rhs, bsi);
+       fns->init (lhs_elt, rhs, gsi);
 
       /* If this is an assignment from read-only memory, treat this as if
         we'd been passed the constructor directly.  Invoke INIT.  */
@@ -1013,7 +1011,7 @@ sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
               && TREE_STATIC (rhs)
               && TREE_READONLY (rhs)
               && targetm.binds_local_p (rhs))
-       fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
+       fns->init (lhs_elt, DECL_INITIAL (rhs), gsi);
 
       /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
         The lvalue requirement prevents us from trying to directly scalarize
@@ -1021,19 +1019,19 @@ sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
         the function multiple times, and other evil things.  */
       else if (!lhs_elt->is_scalar
               && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
-       fns->ldst (lhs_elt, rhs, bsi, true);
+       fns->ldst (lhs_elt, rhs, gsi, true);
 
       /* Otherwise we're being used in some context that requires the
         aggregate to be seen as a whole.  Invoke USE.  */
       else
-       fns->use (lhs_elt, &GIMPLE_STMT_OPERAND (expr, 0), bsi, true, false);
+       fns->use (lhs_elt, gimple_assign_lhs_ptr (stmt), gsi, true, false);
     }
 
   /* Similarly to above, LHS_ELT being null only means that the LHS as a
      whole is not a scalarizable reference.  There may be occurrences of
      scalarizable variables within, which implies a USE.  */
   else
-    sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 0), bsi, true, fns);
+    sra_walk_expr (gimple_assign_lhs_ptr (stmt), gsi, true, fns);
 }
 
 /* Entry point to the walk functions.  Search the entire function,
@@ -1044,22 +1042,20 @@ static void
 sra_walk_function (const struct sra_walk_fns *fns)
 {
   basic_block bb;
-  block_stmt_iterator si, ni;
+  gimple_stmt_iterator si, ni;
 
   /* ??? Phase 4 could derive some benefit to walking the function in
      dominator tree order.  */
 
   FOR_EACH_BB (bb)
-    for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
+    for (si = gsi_start_bb (bb); !gsi_end_p (si); si = ni)
       {
-       tree stmt, t;
-       stmt_ann_t ann;
+       gimple stmt;
 
-       stmt = bsi_stmt (si);
-       ann = stmt_ann (stmt);
+       stmt = gsi_stmt (si);
 
        ni = si;
-       bsi_next (&ni);
+       gsi_next (&ni);
 
        /* If the statement has no virtual operands, then it doesn't
           make any structure references that we care about.  */
@@ -1067,35 +1063,28 @@ sra_walk_function (const struct sra_walk_fns *fns)
            && ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
              continue;
 
-       switch (TREE_CODE (stmt))
+       switch (gimple_code (stmt))
          {
-         case RETURN_EXPR:
+         case GIMPLE_RETURN:
            /* If we have "return <retval>" then the return value is
               already exposed for our pleasure.  Walk it as a USE to
               force all the components back in place for the return.
-
-              If we have an embedded assignment, then <retval> is of
-              a type that gets returned in registers in this ABI, and
-              we do not wish to extend their lifetimes.  Treat this
-              as a USE of the variable on the RHS of this assignment.  */
-
-           t = TREE_OPERAND (stmt, 0);
-           if (t == NULL_TREE)
+              */
+           if (gimple_return_retval (stmt)  == NULL_TREE)
              ;
-           else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
-             sra_walk_expr (&GIMPLE_STMT_OPERAND (t, 1), &si, false, fns);
            else
-             sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
+             sra_walk_expr (gimple_return_retval_ptr (stmt), &si, false,
+                             fns);
            break;
 
-         case GIMPLE_MODIFY_STMT:
-           sra_walk_gimple_modify_stmt (stmt, &si, fns);
+         case GIMPLE_ASSIGN:
+           sra_walk_gimple_assign (stmt, &si, fns);
            break;
-         case CALL_EXPR:
-           sra_walk_call_expr (stmt, &si, fns);
+         case GIMPLE_CALL:
+           sra_walk_gimple_call (stmt, &si, fns);
            break;
-         case ASM_EXPR:
-           sra_walk_asm_expr (stmt, &si, fns);
+         case GIMPLE_ASM:
+           sra_walk_gimple_asm (stmt, &si, fns);
            break;
 
          default:
@@ -1136,7 +1125,7 @@ find_candidates_for_sra (void)
 
 static void
 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
-         block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
+         gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
          bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
 {
   elt->n_uses += 1;
@@ -1144,7 +1133,7 @@ scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
 
 static void
 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
-          block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
+          gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
 {
   lhs_elt->n_copies += 1;
   rhs_elt->n_copies += 1;
@@ -1152,14 +1141,14 @@ scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
 
 static void
 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
-          block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
+          gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
 {
   lhs_elt->n_copies += 1;
 }
 
 static void
 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
-          block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
+          gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
           bool is_output ATTRIBUTE_UNUSED)
 {
   elt->n_copies += 1;
@@ -1350,10 +1339,12 @@ instantiate_element (struct sra_elt *elt)
       || (var != elt->replacement
          && TREE_CODE (elt->replacement) == BIT_FIELD_REF))
     {
-      tree init = sra_build_assignment (var, fold_convert (TREE_TYPE (var),
-                                                          integer_zero_node));
-      insert_edge_copies (init, ENTRY_BLOCK_PTR);
-      mark_all_v_defs (init);
+      gimple_seq init = sra_build_assignment (var,
+                                              fold_convert (TREE_TYPE (var),
+                                                            integer_zero_node)
+                                             );
+      insert_edge_copies_seq (init, ENTRY_BLOCK_PTR);
+      mark_all_v_defs_seq (init);
     }
 
   if (dump_file)
@@ -2030,7 +2021,7 @@ decide_instantiations (void)
    non-scalar.  */
 
 static void
-mark_all_v_defs_1 (tree stmt)
+mark_all_v_defs_stmt (gimple stmt)
 {
   tree sym;
   ssa_op_iter iter;
@@ -2050,18 +2041,13 @@ mark_all_v_defs_1 (tree stmt)
    LIST for renaming.  */
 
 static void
-mark_all_v_defs (tree list)
+mark_all_v_defs_seq (gimple_seq seq)
 {
-  if (TREE_CODE (list) != STATEMENT_LIST)
-    mark_all_v_defs_1 (list);
-  else
-    {
-      tree_stmt_iterator i;
-      for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
-       mark_all_v_defs_1 (tsi_stmt (i));
-    }
-}
+  gimple_stmt_iterator gsi;
 
+  for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
+    mark_all_v_defs_stmt (gsi_stmt (gsi));
+}
 
 /* Mark every replacement under ELT with TREE_NO_WARNING.  */
 
@@ -2155,9 +2141,11 @@ scalar_bitfield_p (tree bf)
 
 /* Create an assignment statement from SRC to DST.  */
 
-static tree
+static gimple_seq
 sra_build_assignment (tree dst, tree src)
 {
+  gimple stmt;
+  gimple_seq seq = NULL;
   /* Turning BIT_FIELD_REFs into bit operations enables other passes
      to do a much better job at optimizing the code.
      From dst = BIT_FIELD_REF <var, sz, off> we produce
@@ -2172,7 +2160,6 @@ sra_build_assignment (tree dst, tree src)
     {
       tree var, shift, width;
       tree utype, stype, stmp, utmp, dtmp;
-      tree list, stmt;
       bool unsignedp = (INTEGRAL_TYPE_P (TREE_TYPE (src))
                        ? TYPE_UNSIGNED (TREE_TYPE (src)) : true);
 
@@ -2205,7 +2192,6 @@ sra_build_assignment (tree dst, tree src)
       else if (!TYPE_UNSIGNED (utype))
        utype = unsigned_type_for (utype);
 
-      list = NULL;
       stmp = make_rename_temp (stype, "SR");
 
       /* Convert the base var of the BIT_FIELD_REF to the scalar type
@@ -2213,22 +2199,19 @@ sra_build_assignment (tree dst, tree src)
       if (!useless_type_conversion_p (stype, TREE_TYPE (var)))
        {
          if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
-           stmt = build_gimple_modify_stmt (stmp,
-                                            fold_convert (stype, var));
+           stmt = gimple_build_assign (stmp, fold_convert (stype, var));
          else
-           stmt = build_gimple_modify_stmt (stmp,
-                                            fold_build1 (VIEW_CONVERT_EXPR,
-                                                         stype, var));
-         append_to_statement_list (stmt, &list);
+           stmt = gimple_build_assign (stmp, fold_build1 (VIEW_CONVERT_EXPR,
+                                                          stype, var));
+         gimple_seq_add_stmt (&seq, stmt);
          var = stmp;
        }
 
       if (!integer_zerop (shift))
        {
-         stmt = build_gimple_modify_stmt (stmp,
-                                          fold_build2 (RSHIFT_EXPR, stype,
-                                                       var, shift));
-         append_to_statement_list (stmt, &list);
+         stmt = gimple_build_assign (stmp, fold_build2 (RSHIFT_EXPR, stype,
+                                                        var, shift));
+         gimple_seq_add_stmt (&seq, stmt);
          var = stmp;
        }
 
@@ -2241,10 +2224,9 @@ sra_build_assignment (tree dst, tree src)
          tree mask = int_const_binop (LSHIFT_EXPR, one, width, 0);
          mask = int_const_binop (MINUS_EXPR, mask, one, 0);
 
-         stmt = build_gimple_modify_stmt (stmp,
-                                          fold_build2 (BIT_AND_EXPR, stype,
-                                                       var, mask));
-         append_to_statement_list (stmt, &list);
+         stmt = gimple_build_assign (stmp, fold_build2 (BIT_AND_EXPR, stype,
+                                                        var, mask));
+         gimple_seq_add_stmt (&seq, stmt);
          var = stmp;
        }
 
@@ -2254,8 +2236,8 @@ sra_build_assignment (tree dst, tree src)
        {
          utmp = make_rename_temp (utype, "SR");
 
-         stmt = build_gimple_modify_stmt (utmp, fold_convert (utype, var));
-         append_to_statement_list (stmt, &list);
+         stmt = gimple_build_assign (utmp, fold_convert (utype, var));
+         gimple_seq_add_stmt (&seq, stmt);
 
          var = utmp;
        }
@@ -2269,15 +2251,13 @@ sra_build_assignment (tree dst, tree src)
                                          size_binop (MINUS_EXPR, width,
                                                      bitsize_int (1)), 0);
 
-         stmt = build_gimple_modify_stmt (utmp,
-                                          fold_build2 (BIT_XOR_EXPR, utype,
-                                                       var, signbit));
-         append_to_statement_list (stmt, &list);
+         stmt = gimple_build_assign (utmp, fold_build2 (BIT_XOR_EXPR, utype,
+                                                        var, signbit));
+         gimple_seq_add_stmt (&seq, stmt);
 
-         stmt = build_gimple_modify_stmt (utmp,
-                                          fold_build2 (MINUS_EXPR, utype,
-                                                       utmp, signbit));
-         append_to_statement_list (stmt, &list);
+         stmt = gimple_build_assign (utmp, fold_build2 (MINUS_EXPR, utype,
+                                                        utmp, signbit));
+         gimple_seq_add_stmt (&seq, stmt);
 
          var = utmp;
        }
@@ -2298,15 +2278,15 @@ sra_build_assignment (tree dst, tree src)
          if (!is_gimple_reg (dst))
            {
              dtmp = make_rename_temp (TREE_TYPE (dst), "SR");
-             stmt = build_gimple_modify_stmt (dtmp, var);
-             append_to_statement_list (stmt, &list);
+             stmt = gimple_build_assign (dtmp, var);
+             gimple_seq_add_stmt (&seq, stmt);
              var = dtmp;
            }
        }
-      stmt = build_gimple_modify_stmt (dst, var);
-      append_to_statement_list (stmt, &list);
+      stmt = gimple_build_assign (dst, var);
+      gimple_seq_add_stmt (&seq, stmt);
 
-      return list;
+      return seq;
     }
 
   /* fold_build3 (BIT_FIELD_REF, ...) sometimes returns a cast.  */
@@ -2329,7 +2309,9 @@ sra_build_assignment (tree dst, tree src)
           && !useless_type_conversion_p (TREE_TYPE (dst), TREE_TYPE (src)))
     src = fold_convert (TREE_TYPE (dst), src);
 
-  return build_gimple_modify_stmt (dst, src);
+  stmt = gimple_build_assign (dst, src);
+  gimple_seq_add_stmt (&seq, stmt);
+  return seq;
 }
 
 /* BIT_FIELD_REFs must not be shared.  sra_build_elt_assignment()
@@ -2339,11 +2321,12 @@ sra_build_assignment (tree dst, tree src)
 /* Emit an assignment from SRC to DST, but if DST is a scalarizable
    BIT_FIELD_REF, turn it into bit operations.  */
 
-static tree
+static gimple_seq
 sra_build_bf_assignment (tree dst, tree src)
 {
   tree var, type, utype, tmp, tmp2, tmp3;
-  tree list, stmt;
+  gimple_seq seq;
+  gimple stmt;
   tree cst, cst2, mask;
   tree minshift, maxshift;
 
@@ -2355,7 +2338,7 @@ sra_build_bf_assignment (tree dst, tree src)
   if (!scalar_bitfield_p (dst))
     return sra_build_assignment (REPLDUP (dst), src);
 
-  list = NULL;
+  seq = NULL;
 
   cst = fold_convert (bitsizetype, TREE_OPERAND (dst, 2));
   cst2 = size_binop (PLUS_EXPR,
@@ -2404,11 +2387,11 @@ sra_build_bf_assignment (tree dst, tree src)
       tmp2 = make_rename_temp (utype, "SR");
 
       if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
-       stmt = build_gimple_modify_stmt (tmp2, fold_convert (utype, tmp));
+       stmt = gimple_build_assign (tmp2, fold_convert (utype, tmp));
       else
-       stmt = build_gimple_modify_stmt (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
-                                                           utype, tmp));
-      append_to_statement_list (stmt, &list);
+       stmt = gimple_build_assign (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
+                                                      utype, tmp));
+      gimple_seq_add_stmt (&seq, stmt);
     }
   else
     tmp2 = var;
@@ -2416,10 +2399,9 @@ sra_build_bf_assignment (tree dst, tree src)
   if (!integer_zerop (mask))
     {
       tmp = make_rename_temp (utype, "SR");
-      stmt = build_gimple_modify_stmt (tmp,
-                                      fold_build2 (BIT_AND_EXPR, utype,
+      stmt = gimple_build_assign (tmp, fold_build2 (BIT_AND_EXPR, utype,
                                                    tmp2, mask));
-      append_to_statement_list (stmt, &list);
+      gimple_seq_add_stmt (&seq, stmt);
     }
   else
     tmp = mask;
@@ -2428,28 +2410,31 @@ sra_build_bf_assignment (tree dst, tree src)
     tmp2 = src;
   else if (INTEGRAL_TYPE_P (TREE_TYPE (src)))
     {
+      gimple_seq tmp_seq;
       tmp2 = make_rename_temp (TREE_TYPE (src), "SR");
-      stmt = sra_build_assignment (tmp2, src);
-      append_to_statement_list (stmt, &list);
+      tmp_seq = sra_build_assignment (tmp2, src);
+      gimple_seq_add_seq (&seq, tmp_seq);
     }
   else
     {
+      gimple_seq tmp_seq;
       tmp2 = make_rename_temp
        (lang_hooks.types.type_for_size
         (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (src))),
          1), "SR");
-      stmt = sra_build_assignment (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
+      tmp_seq = sra_build_assignment (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
                                                      TREE_TYPE (tmp2), src));
-      append_to_statement_list (stmt, &list);
+      gimple_seq_add_seq (&seq, tmp_seq);
     }
 
   if (!TYPE_UNSIGNED (TREE_TYPE (tmp2)))
     {
+      gimple_seq tmp_seq;
       tree ut = unsigned_type_for (TREE_TYPE (tmp2));
       tmp3 = make_rename_temp (ut, "SR");
       tmp2 = fold_convert (ut, tmp2);
-      stmt = sra_build_assignment (tmp3, tmp2);
-      append_to_statement_list (stmt, &list);
+      tmp_seq = sra_build_assignment (tmp3, tmp2);
+      gimple_seq_add_seq (&seq, tmp_seq);
 
       tmp2 = fold_build1 (BIT_NOT_EXPR, utype, mask);
       tmp2 = int_const_binop (RSHIFT_EXPR, tmp2, minshift, true);
@@ -2459,8 +2444,8 @@ sra_build_bf_assignment (tree dst, tree src)
       if (tmp3 != tmp2)
        {
          tmp3 = make_rename_temp (ut, "SR");
-         stmt = sra_build_assignment (tmp3, tmp2);
-         append_to_statement_list (stmt, &list);
+         tmp_seq = sra_build_assignment (tmp3, tmp2);
+          gimple_seq_add_seq (&seq, tmp_seq);
        }
 
       tmp2 = tmp3;
@@ -2468,20 +2453,20 @@ sra_build_bf_assignment (tree dst, tree src)
 
   if (TYPE_MAIN_VARIANT (TREE_TYPE (tmp2)) != TYPE_MAIN_VARIANT (utype))
     {
+      gimple_seq tmp_seq;
       tmp3 = make_rename_temp (utype, "SR");
       tmp2 = fold_convert (utype, tmp2);
-      stmt = sra_build_assignment (tmp3, tmp2);
-      append_to_statement_list (stmt, &list);
+      tmp_seq = sra_build_assignment (tmp3, tmp2);
+      gimple_seq_add_seq (&seq, tmp_seq);
       tmp2 = tmp3;
     }
 
   if (!integer_zerop (minshift))
     {
       tmp3 = make_rename_temp (utype, "SR");
-      stmt = build_gimple_modify_stmt (tmp3,
-                                      fold_build2 (LSHIFT_EXPR, utype,
-                                                   tmp2, minshift));
-      append_to_statement_list (stmt, &list);
+      stmt = gimple_build_assign (tmp3, fold_build2 (LSHIFT_EXPR, utype,
+                                                    tmp2, minshift));
+      gimple_seq_add_stmt (&seq, stmt);
       tmp2 = tmp3;
     }
 
@@ -2489,35 +2474,34 @@ sra_build_bf_assignment (tree dst, tree src)
     tmp3 = make_rename_temp (utype, "SR");
   else
     tmp3 = var;
-  stmt = build_gimple_modify_stmt (tmp3,
-                                  fold_build2 (BIT_IOR_EXPR, utype,
-                                               tmp, tmp2));
-  append_to_statement_list (stmt, &list);
+  stmt = gimple_build_assign (tmp3, fold_build2 (BIT_IOR_EXPR, utype,
+                                                tmp, tmp2));
+      gimple_seq_add_stmt (&seq, stmt);
 
   if (tmp3 != var)
     {
       if (TREE_TYPE (var) == type)
-       stmt = build_gimple_modify_stmt (var,
-                                        fold_convert (type, tmp3));
+       stmt = gimple_build_assign (var, fold_convert (type, tmp3));
       else
-       stmt = build_gimple_modify_stmt (var,
-                                        fold_build1 (VIEW_CONVERT_EXPR,
+       stmt = gimple_build_assign (var, fold_build1 (VIEW_CONVERT_EXPR,
                                                      TREE_TYPE (var), tmp3));
-      append_to_statement_list (stmt, &list);
+      gimple_seq_add_stmt (&seq, stmt);
     }
 
-  return list;
+  return seq;
 }
 
 /* Expand an assignment of SRC to the scalarized representation of
    ELT.  If it is a field group, try to widen the assignment to cover
    the full variable.  */
 
-static tree
+static gimple_seq
 sra_build_elt_assignment (struct sra_elt *elt, tree src)
 {
   tree dst = elt->replacement;
-  tree var, tmp, cst, cst2, list, stmt;
+  tree var, tmp, cst, cst2;
+  gimple stmt;
+  gimple_seq seq;
 
   if (TREE_CODE (dst) != BIT_FIELD_REF
       || !elt->in_bitfld_block)
@@ -2553,7 +2537,8 @@ sra_build_elt_assignment (struct sra_elt *elt, tree src)
          if (TYPE_MAIN_VARIANT (TREE_TYPE (var))
              != TYPE_MAIN_VARIANT (TREE_TYPE (src)))
            {
-             list = NULL;
+              gimple_seq tmp_seq;
+             seq = NULL;
 
              if (!INTEGRAL_TYPE_P (TREE_TYPE (src)))
                src = fold_build1 (VIEW_CONVERT_EXPR,
@@ -2564,15 +2549,15 @@ sra_build_elt_assignment (struct sra_elt *elt, tree src)
              gcc_assert (TYPE_UNSIGNED (TREE_TYPE (src)));
 
              tmp = make_rename_temp (TREE_TYPE (src), "SR");
-             stmt = build_gimple_modify_stmt (tmp, src);
-             append_to_statement_list (stmt, &list);
+             stmt = gimple_build_assign (tmp, src);
+             gimple_seq_add_stmt (&seq, stmt);
 
-             stmt = sra_build_assignment (var,
-                                          fold_convert (TREE_TYPE (var),
-                                                        tmp));
-             append_to_statement_list (stmt, &list);
+             tmp_seq = sra_build_assignment (var,
+                                             fold_convert (TREE_TYPE (var),
+                                                           tmp));
+             gimple_seq_add_seq (&seq, tmp_seq);
 
-             return list;
+             return seq;
            }
 
          src = fold_convert (TREE_TYPE (var), src);
@@ -2595,9 +2580,10 @@ sra_build_elt_assignment (struct sra_elt *elt, tree src)
 
 static void
 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
-                    tree *list_p)
+                    gimple_seq *seq_p)
 {
   struct sra_elt *c;
+  gimple_seq tmp_seq;
   tree t;
 
   if (!copy_out && TREE_CODE (expr) == SSA_NAME
@@ -2611,24 +2597,25 @@ generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
       i = c->replacement;
 
       t = build2 (COMPLEX_EXPR, elt->type, r, i);
-      t = sra_build_bf_assignment (expr, t);
-      SSA_NAME_DEF_STMT (expr) = t;
-      append_to_statement_list (t, list_p);
+      tmp_seq = sra_build_bf_assignment (expr, t);
+      gcc_assert (gimple_seq_singleton_p (tmp_seq));
+      SSA_NAME_DEF_STMT (expr) = gimple_seq_first_stmt (tmp_seq);
+      gimple_seq_add_seq (seq_p, tmp_seq);
     }
   else if (elt->replacement)
     {
       if (copy_out)
-       t = sra_build_elt_assignment (elt, expr);
+       tmp_seq = sra_build_elt_assignment (elt, expr);
       else
-       t = sra_build_bf_assignment (expr, REPLDUP (elt->replacement));
-      append_to_statement_list (t, list_p);
+       tmp_seq = sra_build_bf_assignment (expr, REPLDUP (elt->replacement));
+      gimple_seq_add_seq (seq_p, tmp_seq);
     }
   else
     {
       FOR_EACH_ACTUAL_CHILD (c, elt)
        {
          t = generate_one_element_ref (c, unshare_expr (expr));
-         generate_copy_inout (c, copy_out, t, list_p);
+         generate_copy_inout (c, copy_out, t, seq_p);
        }
     }
 }
@@ -2638,7 +2625,7 @@ generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
    correspondence of instantiated elements.  */
 
 static void
-generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
+generate_element_copy (struct sra_elt *dst, struct sra_elt *src, gimple_seq *seq_p)
 {
   struct sra_elt *dc, *sc;
 
@@ -2653,7 +2640,7 @@ generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
            {
              sc = lookup_element (src, dcs->element, NULL, NO_INSERT);
              gcc_assert (sc);
-             generate_element_copy (dcs, sc, list_p);
+             generate_element_copy (dcs, sc, seq_p);
            }
 
          continue;
@@ -2685,17 +2672,17 @@ generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
          sc = lookup_element (src, f, NULL, NO_INSERT);
        }
 
-      generate_element_copy (dc, sc, list_p);
+      generate_element_copy (dc, sc, seq_p);
     }
 
   if (dst->replacement)
     {
-      tree t;
+      gimple_seq tmp_seq;
 
       gcc_assert (src->replacement);
 
-      t = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
-      append_to_statement_list (t, list_p);
+      tmp_seq = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
+      gimple_seq_add_seq (seq_p, tmp_seq);
     }
 }
 
@@ -2705,7 +2692,7 @@ generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
    with generate_element_init.  */
 
 static void
-generate_element_zero (struct sra_elt *elt, tree *list_p)
+generate_element_zero (struct sra_elt *elt, gimple_seq *seq_p)
 {
   struct sra_elt *c;
 
@@ -2717,17 +2704,18 @@ generate_element_zero (struct sra_elt *elt, tree *list_p)
 
   if (!elt->in_bitfld_block)
     FOR_EACH_ACTUAL_CHILD (c, elt)
-      generate_element_zero (c, list_p);
+      generate_element_zero (c, seq_p);
 
   if (elt->replacement)
     {
       tree t;
+      gimple_seq tmp_seq;
 
       gcc_assert (elt->is_scalar);
       t = fold_convert (elt->type, integer_zero_node);
 
-      t = sra_build_elt_assignment (elt, t);
-      append_to_statement_list (t, list_p);
+      tmp_seq = sra_build_elt_assignment (elt, t);
+      gimple_seq_add_seq (seq_p, tmp_seq);
     }
 }
 
@@ -2735,11 +2723,10 @@ generate_element_zero (struct sra_elt *elt, tree *list_p)
    Add the result to *LIST_P.  */
 
 static void
-generate_one_element_init (struct sra_elt *elt, tree init, tree *list_p)
+generate_one_element_init (struct sra_elt *elt, tree init, gimple_seq *seq_p)
 {
-  /* The replacement can be almost arbitrarily complex.  Gimplify.  */
-  tree stmt = sra_build_elt_assignment (elt, init);
-  gimplify_and_add (stmt, list_p);
+  gimple_seq tmp_seq = sra_build_elt_assignment (elt, init);
+  gimple_seq_add_seq (seq_p, tmp_seq);
 }
 
 /* Generate a set of assignment statements in *LIST_P to set all instantiated
@@ -2749,7 +2736,7 @@ generate_one_element_init (struct sra_elt *elt, tree init, tree *list_p)
    handle.  */
 
 static bool
-generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
+generate_element_init_1 (struct sra_elt *elt, tree init, gimple_seq *seq_p)
 {
   bool result = true;
   enum tree_code init_code;
@@ -2767,7 +2754,7 @@ generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
     {
       if (elt->replacement)
        {
-         generate_one_element_init (elt, init, list_p);
+         generate_one_element_init (elt, init, seq_p);
          elt->visited = true;
        }
       return result;
@@ -2785,7 +2772,7 @@ generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
          else
            t = (init_code == COMPLEX_EXPR
                 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
-         result &= generate_element_init_1 (sub, t, list_p);
+         result &= generate_element_init_1 (sub, t, seq_p);
        }
       break;
 
@@ -2801,7 +2788,7 @@ generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
                {
                  sub = lookup_element (elt, lower, NULL, NO_INSERT);
                  if (sub != NULL)
-                   result &= generate_element_init_1 (sub, value, list_p);
+                   result &= generate_element_init_1 (sub, value, seq_p);
                  if (tree_int_cst_equal (lower, upper))
                    break;
                  lower = int_const_binop (PLUS_EXPR, lower,
@@ -2812,7 +2799,7 @@ generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
            {
              sub = lookup_element (elt, purpose, NULL, NO_INSERT);
              if (sub != NULL)
-               result &= generate_element_init_1 (sub, value, list_p);
+               result &= generate_element_init_1 (sub, value, seq_p);
            }
        }
       break;
@@ -2829,96 +2816,86 @@ generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
    gimplification.  */
 
 static bool
-generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
+generate_element_init (struct sra_elt *elt, tree init, gimple_seq *seq_p)
 {
   bool ret;
   struct gimplify_ctx gctx;
 
   push_gimplify_context (&gctx);
-  ret = generate_element_init_1 (elt, init, list_p);
+  ret = generate_element_init_1 (elt, init, seq_p);
   pop_gimplify_context (NULL);
 
   /* The replacement can expose previously unreferenced variables.  */
-  if (ret && *list_p)
+  if (ret && *seq_p)
     {
-      tree_stmt_iterator i;
+      gimple_stmt_iterator i;
 
-      for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
-       find_new_referenced_vars (tsi_stmt_ptr (i));
+      for (i = gsi_start (*seq_p); !gsi_end_p (i); gsi_next (&i))
+       find_new_referenced_vars (gsi_stmt (i));
     }
 
   return ret;
 }
 
-/* Insert STMT on all the outgoing edges out of BB.  Note that if BB
-   has more than one edge, STMT will be replicated for each edge.  Also,
-   abnormal edges will be ignored.  */
+/* Insert a gimple_seq SEQ on all the outgoing edges out of BB.  Note that
+   if BB has more than one edge, STMT will be replicated for each edge.
+   Also, abnormal edges will be ignored.  */
 
 void
-insert_edge_copies (tree stmt, basic_block bb)
+insert_edge_copies_seq (gimple_seq seq, basic_block bb)
 {
   edge e;
   edge_iterator ei;
-  bool first_copy;
+  unsigned n_copies = -1;
 
-  first_copy = true;
   FOR_EACH_EDGE (e, ei, bb->succs)
-    {
-      /* We don't need to insert copies on abnormal edges.  The
-        value of the scalar replacement is not guaranteed to
-        be valid through an abnormal edge.  */
-      if (!(e->flags & EDGE_ABNORMAL))
-       {
-         if (first_copy)
-           {
-             bsi_insert_on_edge (e, stmt);
-             first_copy = false;
-           }
-         else
-           bsi_insert_on_edge (e, unsave_expr_now (stmt));
-       }
-    }
+    if (!(e->flags & EDGE_ABNORMAL)) 
+      n_copies++;
+
+  FOR_EACH_EDGE (e, ei, bb->succs)
+    if (!(e->flags & EDGE_ABNORMAL)) 
+      gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
 }
 
-/* Helper function to insert LIST before BSI, and set up line number info.  */
+/* Helper function to insert LIST before GSI, and set up line number info.  */
 
 void
-sra_insert_before (block_stmt_iterator *bsi, tree list)
+sra_insert_before (gimple_stmt_iterator *gsi, gimple_seq seq)
 {
-  tree stmt = bsi_stmt (*bsi);
+  gimple stmt = gsi_stmt (*gsi);
 
-  if (EXPR_HAS_LOCATION (stmt))
-    annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
-  bsi_insert_before (bsi, list, BSI_SAME_STMT);
+  if (gimple_has_location (stmt))
+    annotate_all_with_location (seq, gimple_location (stmt));
+  gsi_insert_seq_before (gsi, seq, GSI_SAME_STMT);
 }
 
-/* Similarly, but insert after BSI.  Handles insertion onto edges as well.  */
+/* Similarly, but insert after GSI.  Handles insertion onto edges as well.  */
 
 void
-sra_insert_after (block_stmt_iterator *bsi, tree list)
+sra_insert_after (gimple_stmt_iterator *gsi, gimple_seq seq)
 {
-  tree stmt = bsi_stmt (*bsi);
+  gimple stmt = gsi_stmt (*gsi);
 
-  if (EXPR_HAS_LOCATION (stmt))
-    annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
+  if (gimple_has_location (stmt))
+    annotate_all_with_location (seq, gimple_location (stmt));
 
   if (stmt_ends_bb_p (stmt))
-    insert_edge_copies (list, bsi->bb);
+    insert_edge_copies_seq (seq, gsi_bb (*gsi));
   else
-    bsi_insert_after (bsi, list, BSI_SAME_STMT);
+    gsi_insert_seq_after (gsi, seq, GSI_SAME_STMT);
 }
 
-/* Similarly, but replace the statement at BSI.  */
+/* Similarly, but replace the statement at GSI.  */
 
 static void
-sra_replace (block_stmt_iterator *bsi, tree list)
+sra_replace (gimple_stmt_iterator *gsi, gimple_seq seq)
 {
-  sra_insert_before (bsi, list);
-  bsi_remove (bsi, false);
-  if (bsi_end_p (*bsi))
-    *bsi = bsi_last (bsi->bb);
+  sra_insert_before (gsi, seq);
+  gsi_remove (gsi, false);
+  if (gsi_end_p (*gsi))
+    *gsi = gsi_last (gsi_seq (*gsi));
   else
-    bsi_prev (bsi);
+    gsi_prev (gsi);
 }
 
 /* Data structure that bitfield_overlaps_p fills in with information
@@ -3032,7 +3009,7 @@ bitfield_overlaps_p (tree blen, tree bpos, struct sra_elt *fld,
 
 static void
 sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
-                                tree *listp, tree blen, tree bpos,
+                                gimple_seq *seq_p, tree blen, tree bpos,
                                 struct sra_elt *elt)
 {
   struct sra_elt *fld;
@@ -3050,7 +3027,8 @@ sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
 
       if (fld->replacement)
        {
-         tree infld, invar, st, type;
+         tree infld, invar, type;
+          gimple_seq st;
 
          infld = fld->replacement;
 
@@ -3089,7 +3067,7 @@ sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
          else
            st = sra_build_bf_assignment (infld, invar);
 
-         append_to_statement_list (st, listp);
+         gimple_seq_add_seq (seq_p, st);
        }
       else
        {
@@ -3098,7 +3076,7 @@ sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
          if (flp.overlap_pos)
            sub = size_binop (PLUS_EXPR, sub, flp.overlap_pos);
 
-         sra_explode_bitfield_assignment (var, sub, to_var, listp,
+         sra_explode_bitfield_assignment (var, sub, to_var, seq_p,
                                           flen, fpos, fld);
        }
     }
@@ -3111,7 +3089,8 @@ sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
    full variable back to the scalarized variables.  */
 
 static void
-sra_sync_for_bitfield_assignment (tree *listbeforep, tree *listafterp,
+sra_sync_for_bitfield_assignment (gimple_seq *seq_before_p,
+                                  gimple_seq *seq_after_p,
                                  tree blen, tree bpos,
                                  struct sra_elt *elt)
 {
@@ -3124,18 +3103,18 @@ sra_sync_for_bitfield_assignment (tree *listbeforep, tree *listafterp,
        if (fld->replacement || (!flp.overlap_len && !flp.overlap_pos))
          {
            generate_copy_inout (fld, false, generate_element_ref (fld),
-                                listbeforep);
+                                seq_before_p);
            mark_no_warning (fld);
-           if (listafterp)
+           if (seq_after_p)
              generate_copy_inout (fld, true, generate_element_ref (fld),
-                                  listafterp);
+                                  seq_after_p);
          }
        else
          {
            tree flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
            tree fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
 
-           sra_sync_for_bitfield_assignment (listbeforep, listafterp,
+           sra_sync_for_bitfield_assignment (seq_before_p, seq_after_p,
                                              flen, fpos, fld);
          }
       }
@@ -3146,10 +3125,10 @@ sra_sync_for_bitfield_assignment (tree *listbeforep, tree *listafterp,
    aggregate.  IS_OUTPUT is true if ELT is being modified.  */
 
 static void
-scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
+scalarize_use (struct sra_elt *elt, tree *expr_p, gimple_stmt_iterator *gsi,
               bool is_output, bool use_all)
 {
-  tree stmt = bsi_stmt (*bsi);
+  gimple stmt = gsi_stmt (*gsi);
   tree bfexpr;
 
   if (elt->replacement)
@@ -3161,52 +3140,43 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
       if (is_output
          && TREE_CODE (elt->replacement) == BIT_FIELD_REF
          && is_gimple_reg (TREE_OPERAND (elt->replacement, 0))
-         && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-         && &GIMPLE_STMT_OPERAND (stmt, 0) == expr_p)
+         && is_gimple_assign (stmt)
+         && gimple_assign_lhs_ptr (stmt) == expr_p)
        {
-         tree newstmt = sra_build_elt_assignment
-           (elt, GIMPLE_STMT_OPERAND (stmt, 1));
-         if (TREE_CODE (newstmt) != STATEMENT_LIST)
-           {
-             tree list = NULL;
-             append_to_statement_list (newstmt, &list);
-             newstmt = list;
-           }
-         sra_replace (bsi, newstmt);
+          gimple_seq newseq;
+          /* RHS must be a single operand. */
+          gcc_assert (gimple_assign_single_p (stmt));
+         newseq = sra_build_elt_assignment (elt, gimple_assign_rhs1 (stmt));
+         sra_replace (gsi, newseq);
          return;
        }
       else if (!is_output
               && TREE_CODE (elt->replacement) == BIT_FIELD_REF
-              && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-              && &GIMPLE_STMT_OPERAND (stmt, 1) == expr_p)
+              && is_gimple_assign (stmt)
+              && gimple_assign_rhs1_ptr (stmt) == expr_p)
        {
          tree tmp = make_rename_temp
-           (TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 0)), "SR");
-         tree newstmt = sra_build_assignment (tmp, REPLDUP (elt->replacement));
+           (TREE_TYPE (gimple_assign_lhs (stmt)), "SR");
+         gimple_seq newseq = sra_build_assignment (tmp, REPLDUP (elt->replacement));
 
-         if (TREE_CODE (newstmt) != STATEMENT_LIST)
-           {
-             tree list = NULL;
-             append_to_statement_list (newstmt, &list);
-             newstmt = list;
-           }
-         sra_insert_before (bsi, newstmt);
+         sra_insert_before (gsi, newseq);
          replacement = tmp;
        }
       if (is_output)
-         mark_all_v_defs (stmt);
+         mark_all_v_defs_stmt (stmt);
       *expr_p = REPLDUP (replacement);
       update_stmt (stmt);
     }
   else if (use_all && is_output
-          && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+          && is_gimple_assign (stmt)
           && TREE_CODE (bfexpr
-                        = GIMPLE_STMT_OPERAND (stmt, 0)) == BIT_FIELD_REF
+                        = gimple_assign_lhs (stmt)) == BIT_FIELD_REF
           && &TREE_OPERAND (bfexpr, 0) == expr_p
           && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
           && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
     {
-      tree listbefore = NULL, listafter = NULL;
+      gimple_seq seq_before = NULL;
+      gimple_seq seq_after = NULL;
       tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
       tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
       bool update = false;
@@ -3214,18 +3184,18 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
       if (!elt->use_block_copy)
        {
          tree type = TREE_TYPE (bfexpr);
-         tree var = make_rename_temp (type, "SR"), tmp, st, vpos;
+         tree var = make_rename_temp (type, "SR"), tmp, vpos;
+          gimple st;
 
-         GIMPLE_STMT_OPERAND (stmt, 0) = var;
+         gimple_assign_set_lhs (stmt, var);
          update = true;
 
          if (!TYPE_UNSIGNED (type))
            {
              type = unsigned_type_for (type);
              tmp = make_rename_temp (type, "SR");
-             st = build_gimple_modify_stmt (tmp,
-                                            fold_convert (type, var));
-             append_to_statement_list (st, &listafter);
+             st = gimple_build_assign (tmp, fold_convert (type, var));
+             gimple_seq_add_stmt (&seq_after, st);
              var = tmp;
            }
 
@@ -3238,35 +3208,35 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
          else
            vpos = bitsize_int (0);
          sra_explode_bitfield_assignment
-           (var, vpos, false, &listafter, blen, bpos, elt);
+           (var, vpos, false, &seq_after, blen, bpos, elt);
        }
       else
        sra_sync_for_bitfield_assignment
-         (&listbefore, &listafter, blen, bpos, elt);
+         (&seq_before, &seq_after, blen, bpos, elt);
 
-      if (listbefore)
+      if (seq_before)
        {
-         mark_all_v_defs (listbefore);
-         sra_insert_before (bsi, listbefore);
+         mark_all_v_defs_seq (seq_before);
+         sra_insert_before (gsi, seq_before);
        }
-      if (listafter)
+      if (seq_after)
        {
-         mark_all_v_defs (listafter);
-         sra_insert_after (bsi, listafter);
+         mark_all_v_defs_seq (seq_after);
+         sra_insert_after (gsi, seq_after);
        }
 
       if (update)
        update_stmt (stmt);
     }
   else if (use_all && !is_output
-          && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+          && is_gimple_assign (stmt)
           && TREE_CODE (bfexpr
-                        = GIMPLE_STMT_OPERAND (stmt, 1)) == BIT_FIELD_REF
-          && &TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0) == expr_p
+                        = gimple_assign_rhs1 (stmt)) == BIT_FIELD_REF
+          && &TREE_OPERAND (gimple_assign_rhs1 (stmt), 0) == expr_p
           && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
           && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
     {
-      tree list = NULL;
+      gimple_seq seq = NULL;
       tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
       tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
       bool update = false;
@@ -3281,9 +3251,9 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
 
          var = make_rename_temp (type, "SR");
 
-         append_to_statement_list (build_gimple_modify_stmt
-                                   (var, build_int_cst_wide (type, 0, 0)),
-                                   &list);
+         gimple_seq_add_stmt (&seq,
+                               gimple_build_assign
+                                (var, build_int_cst_wide (type, 0, 0)));
 
          /* If VAR is wider than BLEN bits, it is padded at the
             most-significant end.  We want to set VPOS such that
@@ -3294,19 +3264,19 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
          else
            vpos = bitsize_int (0);
          sra_explode_bitfield_assignment
-           (var, vpos, true, &list, blen, bpos, elt);
+           (var, vpos, true, &seq, blen, bpos, elt);
 
-         GIMPLE_STMT_OPERAND (stmt, 1) = var;
+         gimple_assign_set_rhs1 (stmt, var);
          update = true;
        }
       else
        sra_sync_for_bitfield_assignment
-         (&list, NULL, blen, bpos, elt);
+         (&seq, NULL, blen, bpos, elt);
 
-      if (list)
+      if (seq)
        {
-         mark_all_v_defs (list);
-         sra_insert_before (bsi, list);
+         mark_all_v_defs_seq (seq);
+         sra_insert_before (gsi, seq);
        }
 
       if (update)
@@ -3314,7 +3284,7 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
     }
   else
     {
-      tree list = NULL;
+      gimple_seq seq = NULL;
 
       /* Otherwise we need some copies.  If ELT is being read, then we
         want to store all (modified) sub-elements back into the
@@ -3330,15 +3300,15 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
         This optimization would be most effective if sra_walk_function
         processed the blocks in dominator order.  */
 
-      generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
-      if (list == NULL)
+      generate_copy_inout (elt, is_output, generate_element_ref (elt), &seq);
+      if (seq == NULL)
        return;
-      mark_all_v_defs (list);
+      mark_all_v_defs_seq (seq);
       if (is_output)
-       sra_insert_after (bsi, list);
+       sra_insert_after (gsi, seq);
       else
        {
-         sra_insert_before (bsi, list);
+         sra_insert_before (gsi, seq);
          if (use_all)
            mark_no_warning (elt);
        }
@@ -3350,21 +3320,24 @@ scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
 
 static void
 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
-               block_stmt_iterator *bsi)
+               gimple_stmt_iterator *gsi)
 {
-  tree list, stmt;
+  gimple_seq seq;
+  gimple stmt;
 
   if (lhs_elt->replacement && rhs_elt->replacement)
     {
       /* If we have two scalar operands, modify the existing statement.  */
-      stmt = bsi_stmt (*bsi);
+      stmt = gsi_stmt (*gsi);
 
       /* See the commentary in sra_walk_function concerning
         RETURN_EXPR, and why we should never see one here.  */
-      gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
+      gcc_assert (is_gimple_assign (stmt));
+      gcc_assert (gimple_assign_copy_p (stmt));
 
-      GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement;
-      GIMPLE_STMT_OPERAND (stmt, 1) = REPLDUP (rhs_elt->replacement);
+
+      gimple_assign_set_lhs (stmt, lhs_elt->replacement);
+      gimple_assign_set_rhs1 (stmt, REPLDUP (rhs_elt->replacement));
       update_stmt (stmt);
     }
   else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
@@ -3377,22 +3350,22 @@ scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
         would at least allow those elements that are instantiated in
         both structures to be optimized well.  */
 
-      list = NULL;
+      seq = NULL;
       generate_copy_inout (rhs_elt, false,
-                          generate_element_ref (rhs_elt), &list);
-      if (list)
+                          generate_element_ref (rhs_elt), &seq);
+      if (seq)
        {
-         mark_all_v_defs (list);
-         sra_insert_before (bsi, list);
+         mark_all_v_defs_seq (seq);
+         sra_insert_before (gsi, seq);
        }
 
-      list = NULL;
+      seq = NULL;
       generate_copy_inout (lhs_elt, true,
-                          generate_element_ref (lhs_elt), &list);
-      if (list)
+                          generate_element_ref (lhs_elt), &seq);
+      if (seq)
        {
-         mark_all_v_defs (list);
-         sra_insert_after (bsi, list);
+         mark_all_v_defs_seq (seq);
+         sra_insert_after (gsi, seq);
        }
     }
   else
@@ -3401,14 +3374,14 @@ scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
         case perform pair-wise element assignments and replace the
         original block copy statement.  */
 
-      stmt = bsi_stmt (*bsi);
-      mark_all_v_defs (stmt);
+      stmt = gsi_stmt (*gsi);
+      mark_all_v_defs_stmt (stmt);
 
-      list = NULL;
-      generate_element_copy (lhs_elt, rhs_elt, &list);
-      gcc_assert (list);
-      mark_all_v_defs (list);
-      sra_replace (bsi, list);
+      seq = NULL;
+      generate_element_copy (lhs_elt, rhs_elt, &seq);
+      gcc_assert (seq);
+      mark_all_v_defs_seq (seq);
+      sra_replace (gsi, seq);
     }
 }
 
@@ -3418,23 +3391,23 @@ scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
    CONSTRUCTOR.  */
 
 static void
-scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
+scalarize_init (struct sra_elt *lhs_elt, tree rhs, gimple_stmt_iterator *gsi)
 {
   bool result = true;
-  tree list = NULL, init_list = NULL;
+  gimple_seq seq = NULL, init_seq = NULL;
 
   /* Generate initialization statements for all members extant in the RHS.  */
   if (rhs)
     {
       /* Unshare the expression just in case this is from a decl's initial.  */
       rhs = unshare_expr (rhs);
-      result = generate_element_init (lhs_elt, rhs, &init_list);
+      result = generate_element_init (lhs_elt, rhs, &init_seq);
     }
 
   /* CONSTRUCTOR is defined such that any member not mentioned is assigned
      a zero value.  Initialize the rest of the instantiated elements.  */
-  generate_element_zero (lhs_elt, &list);
-  append_to_statement_list (init_list, &list);
+  generate_element_zero (lhs_elt, &seq);
+  gimple_seq_add_seq (&seq, init_seq);
 
   if (!result)
     {
@@ -3444,11 +3417,11 @@ scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
         constants.  The easiest way to do this is to generate a complete
         copy-out, and then follow that with the constant assignments
         that we were able to build.  DCE will clean things up.  */
-      tree list0 = NULL;
+      gimple_seq seq0 = NULL;
       generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
-                          &list0);
-      append_to_statement_list (list, &list0);
-      list = list0;
+                          &seq0);
+      gimple_seq_add_seq (&seq0, seq);
+      seq = seq0;
     }
 
   if (lhs_elt->use_block_copy || !result)
@@ -3456,20 +3429,20 @@ scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
       /* Since LHS is not fully instantiated, we must leave the structure
         assignment in place.  Treating this case differently from a USE
         exposes constants to later optimizations.  */
-      if (list)
+      if (seq)
        {
-         mark_all_v_defs (list);
-         sra_insert_after (bsi, list);
+         mark_all_v_defs_seq (seq);
+         sra_insert_after (gsi, seq);
        }
     }
   else
     {
       /* The LHS is fully instantiated.  The list of initializations
         replaces the original structure assignment.  */
-      gcc_assert (list);
-      mark_all_v_defs (bsi_stmt (*bsi));
-      mark_all_v_defs (list);
-      sra_replace (bsi, list);
+      gcc_assert (seq);
+      mark_all_v_defs_stmt (gsi_stmt (*gsi));
+      mark_all_v_defs_seq (seq);
+      sra_replace (gsi, seq);
     }
 }
 
@@ -3498,7 +3471,7 @@ mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
 
 static void
 scalarize_ldst (struct sra_elt *elt, tree other,
-               block_stmt_iterator *bsi, bool is_output)
+               gimple_stmt_iterator *gsi, bool is_output)
 {
   /* Shouldn't have gotten called for a scalar.  */
   gcc_assert (!elt->replacement);
@@ -3507,7 +3480,7 @@ scalarize_ldst (struct sra_elt *elt, tree other,
     {
       /* Since ELT is not fully instantiated, we have to leave the
         block copy in place.  Treat this as a USE.  */
-      scalarize_use (elt, NULL, bsi, is_output, false);
+      scalarize_use (elt, NULL, gsi, is_output, false);
     }
   else
     {
@@ -3515,19 +3488,21 @@ scalarize_ldst (struct sra_elt *elt, tree other,
         case we can have each element stored/loaded directly to/from the
         corresponding slot in OTHER.  This avoids a block copy.  */
 
-      tree list = NULL, stmt = bsi_stmt (*bsi);
+      gimple_seq seq = NULL;
+      gimple stmt = gsi_stmt (*gsi);
 
-      mark_all_v_defs (stmt);
-      generate_copy_inout (elt, is_output, other, &list);
-      gcc_assert (list);
-      mark_all_v_defs (list);
+      mark_all_v_defs_stmt (stmt);
+      generate_copy_inout (elt, is_output, other, &seq);
+      gcc_assert (seq);
+      mark_all_v_defs_seq (seq);
 
       /* Preserve EH semantics.  */
       if (stmt_ends_bb_p (stmt))
        {
-         tree_stmt_iterator tsi;
-         tree first, blist = NULL;
-         bool thr = tree_could_throw_p (stmt);
+         gimple_stmt_iterator si;
+         gimple first;
+          gimple_seq blist = NULL;
+         bool thr = stmt_could_throw_p (stmt);
 
          /* If the last statement of this BB created an EH edge
             before scalarization, we have to locate the first
@@ -3538,26 +3513,26 @@ scalarize_ldst (struct sra_elt *elt, tree other,
             list will be added to normal outgoing edges of the same
             BB.  If they access any memory, it's the same memory, so
             we can assume they won't throw.  */
-         tsi = tsi_start (list);
-         for (first = tsi_stmt (tsi);
-              thr && !tsi_end_p (tsi) && !tree_could_throw_p (first);
-              first = tsi_stmt (tsi))
+         si = gsi_start (seq);
+         for (first = gsi_stmt (si);
+              thr && !gsi_end_p (si) && !stmt_could_throw_p (first);
+              first = gsi_stmt (si))
            {
-             tsi_delink (&tsi);
-             append_to_statement_list (first, &blist);
+             gsi_remove (&si, false);
+             gimple_seq_add_stmt (&blist, first);
            }
 
          /* Extract the first remaining statement from LIST, this is
             the EH statement if there is one.  */
-         tsi_delink (&tsi);
+         gsi_remove (&si, false);
 
          if (blist)
-           sra_insert_before (bsi, blist);
+           sra_insert_before (gsi, blist);
 
          /* Replace the old statement with this new representative.  */
-         bsi_replace (bsi, first, true);
+         gsi_replace (gsi, first, true);
 
-         if (!tsi_end_p (tsi))
+         if (!gsi_end_p (si))
            {
              /* If any reference would trap, then they all would.  And more
                 to the point, the first would.  Therefore none of the rest
@@ -3566,16 +3541,16 @@ scalarize_ldst (struct sra_elt *elt, tree other,
                 TREE_THIS_NOTRAP in all INDIRECT_REFs.  */
              do
                {
-                 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
-                 tsi_next (&tsi);
+                 walk_gimple_stmt (&si, NULL, mark_notrap, NULL);
+                 gsi_next (&si);
                }
-             while (!tsi_end_p (tsi));
+             while (!gsi_end_p (si));
 
-             insert_edge_copies (list, bsi->bb);
+             insert_edge_copies_seq (seq, gsi_bb (*gsi));
            }
        }
       else
-       sra_replace (bsi, list);
+       sra_replace (gsi, seq);
     }
 }
 
@@ -3584,7 +3559,7 @@ scalarize_ldst (struct sra_elt *elt, tree other,
 static void
 scalarize_parms (void)
 {
-  tree list = NULL;
+  gimple_seq seq = NULL;
   unsigned i;
   bitmap_iterator bi;
 
@@ -3592,13 +3567,13 @@ scalarize_parms (void)
     {
       tree var = referenced_var (i);
       struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
-      generate_copy_inout (elt, true, var, &list);
+      generate_copy_inout (elt, true, var, &seq);
     }
 
-  if (list)
+  if (seq)
     {
-      insert_edge_copies (list, ENTRY_BLOCK_PTR);
-      mark_all_v_defs (list);
+      insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
+      mark_all_v_defs_seq (seq);
     }
 }
 
@@ -3613,7 +3588,7 @@ scalarize_function (void)
 
   sra_walk_function (&fns);
   scalarize_parms ();
-  bsi_commit_edge_inserts ();
+  gsi_commit_edge_inserts ();
 }
 
 \f
@@ -3663,13 +3638,14 @@ debug_sra_elt_name (struct sra_elt *elt)
 void 
 sra_init_cache (void)
 {
-  if (sra_type_decomp_cache) 
+  if (sra_type_decomp_cache)
     return;
 
   sra_type_decomp_cache = BITMAP_ALLOC (NULL);
   sra_type_inst_cache = BITMAP_ALLOC (NULL);
 }
 
+
 /* Main entry point.  */
 
 static unsigned int
index 55d43a5..198adb4 100644 (file)
@@ -556,31 +556,31 @@ addr_to_parts (aff_tree *addr, struct mem_address *parts)
 /* Force the PARTS to register.  */
 
 static void
-gimplify_mem_ref_parts (block_stmt_iterator *bsi, struct mem_address *parts)
+gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
 {
   if (parts->base)
-    parts->base = force_gimple_operand_bsi (bsi, parts->base,
+    parts->base = force_gimple_operand_gsi (gsi, parts->base,
                                            true, NULL_TREE,
-                                           true, BSI_SAME_STMT);
+                                           true, GSI_SAME_STMT);
   if (parts->index)
-    parts->index = force_gimple_operand_bsi (bsi, parts->index,
+    parts->index = force_gimple_operand_gsi (gsi, parts->index,
                                             true, NULL_TREE,
-                                            true, BSI_SAME_STMT);
+                                            true, GSI_SAME_STMT);
 }
 
 /* Creates and returns a TARGET_MEM_REF for address ADDR.  If necessary
-   computations are emitted in front of BSI.  TYPE is the mode
+   computations are emitted in front of GSI.  TYPE is the mode
    of created memory reference.  */
 
 tree
-create_mem_ref (block_stmt_iterator *bsi, tree type, aff_tree *addr)
+create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr)
 {
   tree mem_ref, tmp;
   tree atype;
   struct mem_address parts;
 
   addr_to_parts (addr, &parts);
-  gimplify_mem_ref_parts (bsi, &parts);
+  gimplify_mem_ref_parts (gsi, &parts);
   mem_ref = create_mem_ref_raw (type, &parts);
   if (mem_ref)
     return mem_ref;
@@ -591,10 +591,10 @@ create_mem_ref (block_stmt_iterator *bsi, tree type, aff_tree *addr)
     {
       /* Move the multiplication to index.  */
       gcc_assert (parts.index);
-      parts.index = force_gimple_operand_bsi (bsi,
+      parts.index = force_gimple_operand_gsi (gsi,
                                fold_build2 (MULT_EXPR, sizetype,
                                             parts.index, parts.step),
-                               true, NULL_TREE, true, BSI_SAME_STMT);
+                               true, NULL_TREE, true, GSI_SAME_STMT);
       parts.step = NULL_TREE;
   
       mem_ref = create_mem_ref_raw (type, &parts);
@@ -616,11 +616,11 @@ create_mem_ref (block_stmt_iterator *bsi, tree type, aff_tree *addr)
          if (parts.index)
            {
              atype = TREE_TYPE (tmp);
-             parts.base = force_gimple_operand_bsi (bsi,
+             parts.base = force_gimple_operand_gsi (gsi,
                        fold_build2 (PLUS_EXPR, atype,
                                     fold_convert (atype, parts.base),
                                     tmp),
-                       true, NULL_TREE, true, BSI_SAME_STMT);
+                       true, NULL_TREE, true, GSI_SAME_STMT);
            }
          else
            {
@@ -643,11 +643,11 @@ create_mem_ref (block_stmt_iterator *bsi, tree type, aff_tree *addr)
       if (parts.base)
        {
          atype = TREE_TYPE (parts.base);
-         parts.base = force_gimple_operand_bsi (bsi,
+         parts.base = force_gimple_operand_gsi (gsi,
                        fold_build2 (POINTER_PLUS_EXPR, atype,
                                     parts.base,
                                     parts.index),
-                       true, NULL_TREE, true, BSI_SAME_STMT);
+                       true, NULL_TREE, true, GSI_SAME_STMT);
        }
       else
        parts.base = parts.index;
@@ -664,11 +664,11 @@ create_mem_ref (block_stmt_iterator *bsi, tree type, aff_tree *addr)
       if (parts.base)
        {
          atype = TREE_TYPE (parts.base);
-         parts.base = force_gimple_operand_bsi (bsi, 
+         parts.base = force_gimple_operand_gsi (gsi, 
                        fold_build2 (POINTER_PLUS_EXPR, atype,
                                     parts.base,
                                     fold_convert (sizetype, parts.offset)),
-                       true, NULL_TREE, true, BSI_SAME_STMT);
+                       true, NULL_TREE, true, GSI_SAME_STMT);
        }
       else
        parts.base = parts.offset;
index be26cb3..67d7f94 100644 (file)
@@ -244,7 +244,7 @@ struct alias_match
 {
   tree rhs;
   bool is_rhs_pointer;
-  tree site;
+  gimple site;
 };
 
 
@@ -252,15 +252,14 @@ struct alias_match
    of STMT matches DATA.  */
 
 static bool
-find_alias_site_helper (tree var ATTRIBUTE_UNUSED, tree stmt, void *data)
+find_alias_site_helper (tree var ATTRIBUTE_UNUSED, gimple stmt, void *data)
 {
   struct alias_match *match = (struct alias_match *) data;
-  tree rhs_pointer = get_rhs (stmt);
+  tree rhs_pointer = NULL_TREE;
   tree to_match = NULL_TREE;
 
-  while (CONVERT_EXPR_P (rhs_pointer)
-         || TREE_CODE (rhs_pointer) == VIEW_CONVERT_EXPR)
-    rhs_pointer = TREE_OPERAND (rhs_pointer, 0);
+  if (gimple_assign_cast_p (stmt))
+    rhs_pointer = gimple_assign_rhs1 (stmt);
 
   if (!rhs_pointer)
     /* Not a type conversion.  */
@@ -287,7 +286,7 @@ find_alias_site_helper (tree var ATTRIBUTE_UNUSED, tree stmt, void *data)
    For now, just implement the case where OBJECT1 is an SSA name defined
    by a PHI statement.  */
 
-static tree
+static gimple
 find_alias_site (tree object1, bool is_ptr1 ATTRIBUTE_UNUSED,
                  tree object2, bool is_ptr2)
 {
@@ -295,10 +294,10 @@ find_alias_site (tree object1, bool is_ptr1 ATTRIBUTE_UNUSED,
 
   match.rhs = object2;
   match.is_rhs_pointer = is_ptr2;
-  match.site = NULL_TREE;
+  match.site = NULL;
 
   if (TREE_CODE (object1) != SSA_NAME)
-    return NULL_TREE;
+    return NULL;
 
   walk_use_def_chains (object1, find_alias_site_helper, &match, false);
   return match.site;
@@ -344,29 +343,55 @@ get_ssa_base (tree expr)
    objs: <ptr, 2>
    PTR shows up twice as an object, but is dereferenced only once.
 
-   The elements of the hash tables are tree_map objects.  */
+   The elements of the hash tables are gimple_map objects.  */
 struct reference_matches
 {
   htab_t ptrs;
   htab_t objs;
 };
 
+struct gimple_tree_map
+{
+  tree from;
+  gimple to;
+};
+
+/* Return true if the from tree in both gimple-tree maps are equal.
+   VA and VB are really instances of struct gimple_tree_map.  */
+
+static int
+gimple_tree_map_eq (const void *va, const void *vb)
+{
+  const struct gimple_tree_map *const a = (const struct gimple_tree_map *) va;
+  const struct gimple_tree_map *const b = (const struct gimple_tree_map *) vb;
+  return (a->from == b->from);
+}
+
+/* Hash a from tree in a gimple_tree_map.  ITEM is really an instance
+   of struct gimple_tree_map.  */
 
-/* Return the match, if any.  Otherwise, return NULL_TREE.  It will
-   return NULL_TREE even when a match was found, if the value associated
-   to KEY is NULL_TREE.  */
+static unsigned int
+gimple_tree_map_hash (const void *item)
+{
+  return htab_hash_pointer (((const struct gimple_tree_map *)item)->from);
+}
+
+/* Return the match, if any.  Otherwise, return NULL.  It will return
+   NULL even when a match was found, if the value associated to KEY is
+   NULL.  */
 
-static inline tree
+static inline gimple
 match (htab_t ref_map, tree key)
 {
-  struct tree_map *found;
+  struct gimple_tree_map *found;
   void **slot = NULL;
   slot = htab_find_slot (ref_map, &key, NO_INSERT);
 
   if (!slot)
-    return NULL_TREE;
+    return NULL;
+
+  found = (struct gimple_tree_map *) *slot;
 
-  found = (struct tree_map *) *slot;
   return found->to;
 }
 
@@ -375,9 +400,11 @@ match (htab_t ref_map, tree key)
    already exists and its value is NULL_TREE.  Otherwise, do nothing.  */
 
 static inline void
-maybe_add_match (htab_t ref_map, struct tree_map *key)
+maybe_add_match (htab_t ref_map, struct gimple_tree_map *key)
 {
-  struct tree_map *found = (struct tree_map *) htab_find (ref_map, key);
+  struct gimple_tree_map *found;
+  
+  found = (struct gimple_tree_map *) htab_find (ref_map, key);
 
   if (found && !found->to)
     found->to = key->to;
@@ -390,10 +417,12 @@ static void
 add_key (htab_t ht, tree t, alloc_pool references_pool)
 {
   void **slot;
-  struct tree_map *tp = (struct tree_map *) pool_alloc (references_pool);
+  struct gimple_tree_map *tp;
+  
+  tp = (struct gimple_tree_map *) pool_alloc (references_pool);
 
-  tp->base.from = t;
-  tp->to = NULL_TREE;
+  tp->from = t;
+  tp->to = NULL;
   slot = htab_find_slot (ht, &t, INSERT);
   *slot = (void *) tp;
 }
@@ -412,8 +441,9 @@ reference_table_alloc_pool (bool build)
   if (ref_table_alloc_pool || !build)
     return ref_table_alloc_pool;
 
-  ref_table_alloc_pool =
-    create_alloc_pool ("ref_table_alloc_pool", sizeof (struct tree_map), 20);
+  ref_table_alloc_pool = create_alloc_pool ("ref_table_alloc_pool",
+                                           sizeof (struct gimple_tree_map),
+                                           20);
 
   return ref_table_alloc_pool;
 }
@@ -430,8 +460,10 @@ build_reference_table (void)
   alloc_pool references_pool = reference_table_alloc_pool (true);
 
   ref_table = XNEW (struct reference_matches);
-  ref_table->objs = htab_create (10, tree_map_base_hash, tree_map_eq, NULL);
-  ref_table->ptrs = htab_create (10, tree_map_base_hash, tree_map_eq, NULL);
+  ref_table->objs = htab_create (10, gimple_tree_map_hash, gimple_tree_map_eq,
+                                NULL);
+  ref_table->ptrs = htab_create (10, gimple_tree_map_hash, gimple_tree_map_eq,
+                                NULL);
 
   for (i = 1; i < num_ssa_names; i++)
     {
@@ -518,8 +550,9 @@ find_references_in_tree_helper (tree *tp,
                                int *walk_subtrees ATTRIBUTE_UNUSED,
                                void *data)
 {
-  struct tree_map match;
+  struct gimple_tree_map match;
   static int parent_tree_code = ERROR_MARK;
+  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
 
   /* Do not report references just for the purpose of taking an address.
      XXX: we rely on the fact that the tree walk is in preorder
@@ -528,16 +561,16 @@ find_references_in_tree_helper (tree *tp,
   if (parent_tree_code == ADDR_EXPR)
     goto finish;
 
-  match.to = (tree) data;
+  match.to = (gimple) wi->info;
 
   if (TREE_CODE (*tp) == INDIRECT_REF)
     {
-      match.base.from = TREE_OPERAND (*tp, 0);
+      match.from = TREE_OPERAND (*tp, 0);
       maybe_add_match (reference_table (true)->ptrs, &match);
     }
   else
     {
-      match.base.from = *tp;
+      match.from = *tp;
       maybe_add_match (reference_table (true)->objs, &match);
     }
 
@@ -553,12 +586,16 @@ static void
 find_references_in_function (void)
 {
   basic_block bb;
-  block_stmt_iterator i;
+  gimple_stmt_iterator i;
 
   FOR_EACH_BB (bb)
-    for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
-      walk_tree (bsi_stmt_ptr (i), find_references_in_tree_helper,
-                (void *) *bsi_stmt_ptr (i), NULL);
+    for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
+      {
+       struct walk_stmt_info wi;
+       memset (&wi, 0, sizeof (wi));
+       wi.info = (void *) gsi_stmt (i);
+       walk_gimple_op (gsi_stmt (i), find_references_in_tree_helper, &wi);
+      }
 }
 
 
@@ -567,7 +604,7 @@ find_references_in_function (void)
    XXX: only the first site is returned in the current
    implementation.  If there are no matching sites, return NULL_TREE.  */
 
-static tree
+static gimple
 reference_site (tree object, bool is_ptr)
 {
   if (is_ptr)
@@ -590,19 +627,19 @@ reference_site (tree object, bool is_ptr)
 static void
 maybe_find_missing_stmts (tree object1, bool is_ptr1,
                           tree object2, bool is_ptr2,
-                          tree *alias_site,
-                          tree *deref_site1,
-                          tree *deref_site2)
+                          gimple *alias_site,
+                          gimple *deref_site1,
+                          gimple *deref_site2)
 {
   if (object1 && object2)
     {
-      if (!*alias_site || !EXPR_HAS_LOCATION (*alias_site))
+      if (!*alias_site || !gimple_has_location (*alias_site))
        *alias_site = find_alias_site (object1, is_ptr1, object2, is_ptr2);
 
-      if (!*deref_site1 || !EXPR_HAS_LOCATION (*deref_site1))
+      if (!*deref_site1 || !gimple_has_location (*deref_site1))
        *deref_site1 = reference_site (object1, is_ptr1);
 
-      if (!*deref_site2 || !EXPR_HAS_LOCATION (*deref_site2))
+      if (!*deref_site2 || !gimple_has_location (*deref_site2))
        *deref_site2 = reference_site (object2, is_ptr2);
     }
 
@@ -683,7 +720,6 @@ get_maybe_star_prefix (tree object, bool is_ptr)
           && TREE_CODE (TREE_TYPE (object)) == POINTER_TYPE) ? "*" : "";
 }
 
-
 /* Callback for contains_node_type_p.
    Returns true if *T has tree code *(int*)DATA.  */
 
@@ -710,18 +746,13 @@ contains_node_type_p (tree t, int type)
 /* Return true if a warning was issued in the front end at STMT.  */
 
 static bool
-already_warned_in_frontend_p (tree stmt)
+already_warned_in_frontend_p (gimple stmt)
 {
-  tree rhs_pointer;
-
-  if (stmt == NULL_TREE)
+  if (stmt == NULL)
     return false;
 
-  rhs_pointer = get_rhs (stmt);
-
-  if ((CONVERT_EXPR_P (rhs_pointer)
-       || TREE_CODE (rhs_pointer) == VIEW_CONVERT_EXPR)
-      && TREE_NO_WARNING (rhs_pointer))
+  if (gimple_assign_cast_p (stmt)
+      && TREE_NO_WARNING (gimple_assign_rhs1 (stmt)))
     return true;
   else
     return false;
@@ -749,13 +780,13 @@ is_method_pointer (tree type)
    case, that is where a pointer was assigned to the address of an object.  */
 
 static bool
-strict_aliasing_warn (tree alias_site,
+strict_aliasing_warn (gimple alias_site,
                       tree object1, bool is_ptr1,
                       tree object2, bool is_ptr2,
                      bool filter_artificials)
 {
-  tree ref_site1 = NULL_TREE;
-  tree ref_site2 = NULL_TREE;
+  gimple ref_site1 = NULL;
+  gimple ref_site2 = NULL;
   const char *name1;
   const char *name2;
   location_t alias_loc;
@@ -773,18 +804,18 @@ strict_aliasing_warn (tree alias_site,
   maybe_find_missing_stmts (object1, is_ptr1, object2, is_ptr2, &alias_site,
                             &ref_site1, &ref_site2);
 
-  if (EXPR_HAS_LOCATION (alias_site))
-    alias_loc = EXPR_LOCATION (alias_site);
+  if (gimple_has_location (alias_site))
+    alias_loc = gimple_location (alias_site);
   else
     return false;
 
-  if (EXPR_HAS_LOCATION (ref_site1))
-    ref1_loc = EXPR_LOCATION (ref_site1);
+  if (gimple_has_location (ref_site1))
+    ref1_loc = gimple_location (ref_site1);
   else
     ref1_loc = alias_loc;
 
-  if (EXPR_HAS_LOCATION (ref_site2))
-    ref2_loc = EXPR_LOCATION (ref_site2);
+  if (gimple_has_location (ref_site2))
+    ref2_loc = gimple_location (ref_site2);
   else
     ref2_loc = alias_loc;
 
@@ -900,7 +931,7 @@ skip_this_pointer (tree ptr ATTRIBUTE_UNUSED, struct ptr_info_def *pi)
 /* Find aliasing to named objects for pointer PTR.  */
 
 static void
-dsa_named_for (tree ptr)
+dsa_named_for (tree ptr ATTRIBUTE_UNUSED)
 {
   struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
 
@@ -970,7 +1001,7 @@ processed_func_p (tree func)
   void **slot = NULL;
 
   if (!seen)
-    seen = htab_create (10, tree_map_base_hash, tree_map_eq, NULL);
+    seen = htab_create (10, gimple_tree_map_hash, gimple_tree_map_eq, NULL);
 
   slot = htab_find_slot (seen, &func, INSERT);
   gcc_assert (slot);
index 2fec08d..e1540f3 100644 (file)
@@ -1,5 +1,5 @@
 /* Alias analysis for trees.
-   Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
+   Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
    Contributed by Diego Novillo <dnovillo@redhat.com>
 
 This file is part of GCC.
@@ -35,7 +35,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "function.h"
 #include "diagnostic.h"
 #include "tree-dump.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-flow.h"
 #include "tree-inline.h"
 #include "tree-pass.h"
@@ -752,7 +752,7 @@ static void
 count_mem_refs (long *num_vuses_p, long *num_vdefs_p,
                long *num_partitioned_p, long *num_unpartitioned_p)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   basic_block bb;
   long num_vdefs, num_vuses, num_partitioned, num_unpartitioned;
   referenced_var_iterator rvi;
@@ -762,10 +762,10 @@ count_mem_refs (long *num_vuses_p, long *num_vdefs_p,
 
   if (num_vuses_p || num_vdefs_p)
     FOR_EACH_BB (bb)
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree stmt = bsi_stmt (bsi);
-         if (stmt_references_memory_p (stmt))
+         gimple stmt = gsi_stmt (gsi);
+         if (gimple_references_memory_p (stmt))
            {
              num_vuses += NUM_SSA_OPERANDS (stmt, SSA_OP_VUSE);
              num_vdefs += NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
@@ -1006,7 +1006,7 @@ debug_mp_info (VEC(mem_sym_stats_t,heap) *mp_info)
    recorded by this function, see compute_memory_partitions).  */
 
 void
-update_mem_sym_stats_from_stmt (tree var, tree stmt, long num_direct_reads,
+update_mem_sym_stats_from_stmt (tree var, gimple stmt, long num_direct_reads,
                                 long num_direct_writes)
 {
   mem_sym_stats_t stats;
@@ -1016,11 +1016,11 @@ update_mem_sym_stats_from_stmt (tree var, tree stmt, long num_direct_reads,
   stats = get_mem_sym_stats_for (var);
 
   stats->num_direct_reads += num_direct_reads;
-  stats->frequency_reads += ((long) bb_for_stmt (stmt)->frequency
+  stats->frequency_reads += ((long) gimple_bb (stmt)->frequency
                              * num_direct_reads);
 
   stats->num_direct_writes += num_direct_writes;
-  stats->frequency_writes += ((long) bb_for_stmt (stmt)->frequency
+  stats->frequency_writes += ((long) gimple_bb (stmt)->frequency
                               * num_direct_writes);
 }
 
@@ -1629,7 +1629,6 @@ done:
   timevar_pop (TV_MEMORY_PARTITIONING);
 }
 
-
 /* Compute may-alias information for every variable referenced in function
    FNDECL.
 
@@ -1812,11 +1811,11 @@ compute_may_aliases (void)
 
   /* Populate all virtual operands and newly promoted register operands.  */
   {
-    block_stmt_iterator bsi;
+    gimple_stmt_iterator gsi;
     basic_block bb;
     FOR_EACH_BB (bb)
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-       update_stmt_if_modified (bsi_stmt (bsi));
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+       update_stmt_if_modified (gsi_stmt (gsi));
   }
 
   /* Debugging dumps.  */
@@ -1852,7 +1851,8 @@ compute_may_aliases (void)
 struct count_ptr_d
 {
   tree ptr;
-  unsigned count;
+  unsigned num_stores;
+  unsigned num_loads;
 };
 
 
@@ -1862,7 +1862,8 @@ struct count_ptr_d
 static tree
 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
 {
-  struct count_ptr_d *count_p = (struct count_ptr_d *) data;
+  struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
+  struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
 
   /* Do not walk inside ADDR_EXPR nodes.  In the expression &ptr->fld,
      pointer 'ptr' is *not* dereferenced, it is simply used to compute
@@ -1874,7 +1875,12 @@ count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
     }
 
   if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
-    count_p->count++;
+    {
+      if (wi_p->is_lhs)
+       count_p->num_stores++;
+      else
+       count_p->num_loads++;
+    }
 
   return NULL_TREE;
 }
@@ -1887,7 +1893,7 @@ count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
    stored in *NUM_STORES_P and *NUM_LOADS_P.  */
 
 void
-count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
+count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
                       unsigned *num_loads_p, unsigned *num_stores_p)
 {
   ssa_op_iter i;
@@ -1909,59 +1915,24 @@ count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
      find all the indirect and direct uses of x_1 inside.  The only
      shortcut we can take is the fact that GIMPLE only allows
      INDIRECT_REFs inside the expressions below.  */
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-      || (TREE_CODE (stmt) == RETURN_EXPR
-         && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
-      || TREE_CODE (stmt) == ASM_EXPR
-      || TREE_CODE (stmt) == CALL_EXPR)
+  if (is_gimple_assign (stmt)
+      || gimple_code (stmt) == GIMPLE_RETURN
+      || gimple_code (stmt) == GIMPLE_ASM
+      || is_gimple_call (stmt))
     {
-      tree lhs, rhs;
+      struct walk_stmt_info wi;
+      struct count_ptr_d count;
 
-      if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
-       {
-         lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-         rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-       }
-      else if (TREE_CODE (stmt) == RETURN_EXPR)
-       {
-         tree e = TREE_OPERAND (stmt, 0);
-         lhs = GIMPLE_STMT_OPERAND (e, 0);
-         rhs = GIMPLE_STMT_OPERAND (e, 1);
-       }
-      else if (TREE_CODE (stmt) == ASM_EXPR)
-       {
-         lhs = ASM_OUTPUTS (stmt);
-         rhs = ASM_INPUTS (stmt);
-       }
-      else
-       {
-         lhs = NULL_TREE;
-         rhs = stmt;
-       }
+      count.ptr = ptr;
+      count.num_stores = 0;
+      count.num_loads = 0;
 
-      if (lhs
-         && (TREE_CODE (lhs) == TREE_LIST
-             || EXPR_P (lhs)
-             || GIMPLE_STMT_P (lhs)))
-       {
-         struct count_ptr_d count;
-         count.ptr = ptr;
-         count.count = 0;
-         walk_tree (&lhs, count_ptr_derefs, &count, NULL);
-         *num_stores_p = count.count;
-       }
+      memset (&wi, 0, sizeof (wi));
+      wi.info = &count;
+      walk_gimple_op (stmt, count_ptr_derefs, &wi);
 
-      if (rhs
-         && (TREE_CODE (rhs) == TREE_LIST
-             || EXPR_P (rhs)
-             || GIMPLE_STMT_P (rhs)))
-       {
-         struct count_ptr_d count;
-         count.ptr = ptr;
-         count.count = 0;
-         walk_tree (&rhs, count_ptr_derefs, &count, NULL);
-         *num_loads_p = count.count;
-       }
+      *num_stores_p = count.num_stores;
+      *num_loads_p = count.num_loads;
     }
 
   gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
@@ -2503,7 +2474,7 @@ create_alias_map_for (tree var, struct alias_info *ai)
    ADDRESSABLE_VARS.  */
 
 static void
-update_alias_info_1 (tree stmt, struct alias_info *ai)
+update_alias_info_1 (gimple stmt, struct alias_info *ai)
 {
   bitmap addr_taken;
   use_operand_p use_p;
@@ -2525,7 +2496,7 @@ update_alias_info_1 (tree stmt, struct alias_info *ai)
     mem_ref_stats->num_asm_sites++;
 
   /* Mark all the variables whose address are taken by the statement.  */
-  addr_taken = addresses_taken (stmt);
+  addr_taken = gimple_addresses_taken (stmt);
   if (addr_taken)
     bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
 
@@ -2547,7 +2518,7 @@ update_alias_info_1 (tree stmt, struct alias_info *ai)
        {
          bitmap addressable_vars = gimple_addressable_vars (cfun);
 
-         gcc_assert (TREE_CODE (stmt) == PHI_NODE);
+         gcc_assert (gimple_code (stmt) == GIMPLE_PHI);
          gcc_assert (addressable_vars);
 
          /* PHI nodes don't have annotations for pinning the set
@@ -2587,7 +2558,7 @@ update_alias_info_1 (tree stmt, struct alias_info *ai)
 
       /* If STMT is a PHI node, then it will not have pointer
         dereferences and it will not be an escape point.  */
-      if (TREE_CODE (stmt) == PHI_NODE)
+      if (gimple_code (stmt) == GIMPLE_PHI)
        continue;
 
       /* Determine whether OP is a dereferenced pointer, and if STMT
@@ -2621,13 +2592,13 @@ update_alias_info_1 (tree stmt, struct alias_info *ai)
         are not GIMPLE invariants), they can only appear on the RHS
         of an assignment and their base address is always an
         INDIRECT_REF expression.  */
-      if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-         && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
-         && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
+      if (is_gimple_assign (stmt)
+         && gimple_assign_rhs_code (stmt) == ADDR_EXPR
+         && !is_gimple_val (gimple_assign_rhs1 (stmt)))
        {
          /* If the RHS if of the form &PTR->FLD and PTR == OP, then
             this represents a potential dereference of PTR.  */
-         tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+         tree rhs = gimple_assign_rhs1 (stmt);
          tree base = get_base_address (TREE_OPERAND (rhs, 0));
          if (TREE_CODE (base) == INDIRECT_REF
              && TREE_OPERAND (base, 0) == op)
@@ -2673,7 +2644,7 @@ update_alias_info_1 (tree stmt, struct alias_info *ai)
          /* If the statement makes a function call, assume
             that pointer OP will be dereferenced in a store
             operation inside the called function.  */
-         if (get_call_expr_in (stmt)
+         if (is_gimple_call (stmt)
              || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
            {
              pointer_set_insert (ai->dereferenced_ptrs_store, var);
@@ -2682,12 +2653,12 @@ update_alias_info_1 (tree stmt, struct alias_info *ai)
        }
     }
 
-  if (TREE_CODE (stmt) == PHI_NODE)
+  if (gimple_code (stmt) == GIMPLE_PHI)
     return;
 
   /* Mark stored variables in STMT as being written to and update the
      memory reference stats for all memory symbols referenced by STMT.  */
-  if (stmt_references_memory_p (stmt))
+  if (gimple_references_memory_p (stmt))
     {
       unsigned i;
       bitmap_iterator bi;
@@ -2716,8 +2687,8 @@ update_alias_info_1 (tree stmt, struct alias_info *ai)
         dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
         memory symbols in its argument list, but these cases do not
         occur so frequently as to constitute a serious problem.  */
-      if (STORED_SYMS (stmt))
-       EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
+      if (gimple_stored_syms (stmt))
+       EXECUTE_IF_SET_IN_BITMAP (gimple_stored_syms (stmt), 0, i, bi)
          {
            tree sym = referenced_var (i);
            pointer_set_insert (ai->written_vars, sym);
@@ -2729,11 +2700,11 @@ update_alias_info_1 (tree stmt, struct alias_info *ai)
          }
 
       if (!stmt_dereferences_ptr_p
-         && LOADED_SYMS (stmt)
+         && gimple_loaded_syms (stmt)
          && stmt_escape_type != ESCAPE_TO_CALL
          && stmt_escape_type != ESCAPE_TO_PURE_CONST
          && stmt_escape_type != ESCAPE_TO_ASM)
-       EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
+       EXECUTE_IF_SET_IN_BITMAP (gimple_loaded_syms (stmt), 0, i, bi)
          update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
     }
 }
@@ -2749,15 +2720,18 @@ update_alias_info (struct alias_info *ai)
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator bsi;
-      tree phi;
+      gimple_stmt_iterator gsi;
+      gimple phi;
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-       if (is_gimple_reg (PHI_RESULT (phi)))
-         update_alias_info_1 (phi, ai);
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+       {
+         phi = gsi_stmt (gsi);
+         if (is_gimple_reg (PHI_RESULT (phi)))
+           update_alias_info_1 (phi, ai);
+       }
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-       update_alias_info_1 (bsi_stmt (bsi), ai);
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+       update_alias_info_1 (gsi_stmt (gsi), ai);
     }
 }
 
@@ -3059,11 +3033,11 @@ may_alias_p (tree ptr, alias_set_type mem_alias_set,
       
       /* The star count is -1 if the type at the end of the
         pointer_to chain is not a record or union type. */ 
-      if (!alias_set_only
-         && ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
+      if (!alias_set_only && 
+         0 /* FIXME tuples ipa_type_escape_star_count_of_interesting_type (var_type) >= 0*/)
        {
          int ptr_star_count = 0;
-         
+
          /* ipa_type_escape_star_count_of_interesting_type is a
             little too restrictive for the pointer type, need to
             allow pointers to primitive types as long as those
@@ -3185,21 +3159,20 @@ set_pt_anything (tree ptr)
    if none.  */
 
 enum escape_type
-is_escape_site (tree stmt)
+is_escape_site (gimple stmt)
 {
-  tree call = get_call_expr_in (stmt);
-  if (call != NULL_TREE)
+  if (is_gimple_call (stmt))
     {
-      if (!TREE_SIDE_EFFECTS (call))
+      if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
        return ESCAPE_TO_PURE_CONST;
 
       return ESCAPE_TO_CALL;
     }
-  else if (TREE_CODE (stmt) == ASM_EXPR)
+  else if (gimple_code (stmt) == GIMPLE_ASM)
     return ESCAPE_TO_ASM;
-  else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+  else if (is_gimple_assign (stmt))
     {
-      tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+      tree lhs = gimple_assign_lhs (stmt);
 
       /* Get to the base of _REF nodes.  */
       if (TREE_CODE (lhs) != SSA_NAME)
@@ -3210,12 +3183,10 @@ is_escape_site (tree stmt)
       if (lhs == NULL_TREE)
        return ESCAPE_UNKNOWN;
 
-      if (CONVERT_EXPR_P (GIMPLE_STMT_OPERAND (stmt, 1))
-         || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
+      if (gimple_assign_cast_p (stmt))
        {
-         tree from
-           = TREE_TYPE (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0));
-         tree to = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 1));
+         tree from = TREE_TYPE (gimple_assign_rhs1 (stmt));
+         tree to = TREE_TYPE (lhs);
 
          /* If the RHS is a conversion between a pointer and an integer, the
             pointer escapes since we can't track the integer.  */
@@ -3245,7 +3216,7 @@ is_escape_site (tree stmt)
         Applications (OOPSLA), pp. 1-19, 1999.  */
       return ESCAPE_STORED_IN_GLOBAL;
     }
-  else if (TREE_CODE (stmt) == RETURN_EXPR)
+  else if (gimple_code (stmt) == RETURN_EXPR)
     return ESCAPE_TO_RETURN;
 
   return NO_ESCAPE;
@@ -3539,7 +3510,6 @@ get_ptr_info (tree t)
   return pi;
 }
 
-
 /* Dump points-to information for SSA_NAME PTR into FILE.  */
 
 void
@@ -3595,10 +3565,10 @@ debug_points_to_info_for (tree var)
    it needs to traverse the whole CFG looking for pointer SSA_NAMEs.  */
 
 void
-dump_points_to_info (FILE *file)
+dump_points_to_info (FILE *file ATTRIBUTE_UNUSED)
 {
   basic_block bb;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
   ssa_op_iter iter;
   const char *fname =
     lang_hooks.decl_printable_name (current_function_decl, 2);
@@ -3623,18 +3593,17 @@ dump_points_to_info (FILE *file)
   /* Dump points-to information for every pointer defined in the program.  */
   FOR_EACH_BB (bb)
     {
-      tree phi;
-
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
        {
+         gimple phi = gsi_stmt (si);
          tree ptr = PHI_RESULT (phi);
          if (POINTER_TYPE_P (TREE_TYPE (ptr)))
            dump_points_to_info_for (file, ptr);
        }
 
-       for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+       for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
          {
-           tree stmt = bsi_stmt (si);
+           gimple stmt = gsi_stmt (si);
            tree def;
            FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
              if (TREE_CODE (def) == SSA_NAME
@@ -3689,7 +3658,6 @@ debug_may_aliases_for (tree var)
   dump_may_aliases_for (stderr, var);
 }
 
-
 /* Return true if VAR may be aliased.  */
 
 bool
index 4b6fe6a..44b5523 100644 (file)
@@ -268,6 +268,7 @@ debug_lattice_value (prop_value_t val)
 }
 
 
+
 /* If SYM is a constant variable with known value, return the value.
    NULL_TREE is returned otherwise.  */
 
@@ -339,9 +340,9 @@ get_default_value (tree var)
     }
   else
     {
-      tree stmt = SSA_NAME_DEF_STMT (var);
+      gimple stmt = SSA_NAME_DEF_STMT (var);
 
-      if (IS_EMPTY_STMT (stmt))
+      if (gimple_nop_p (stmt))
        {
          /* Variables defined by an empty statement are those used
             before being initialized.  If VAR is a local variable, we
@@ -352,9 +353,13 @@ get_default_value (tree var)
          else
            val.lattice_val = VARYING;
        }
-      else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-              || TREE_CODE (stmt) == PHI_NODE)
-       {
+      else if (is_gimple_assign (stmt)
+               /* Value-returning GIMPLE_CALL statements assign to
+                  a variable, and are treated similarly to GIMPLE_ASSIGN.  */
+               || (is_gimple_call (stmt)
+                   && gimple_call_lhs (stmt) != NULL_TREE)
+              || gimple_code (stmt) == GIMPLE_PHI)
+        {
          /* Any other variable defined by an assignment or a PHI node
             is considered UNDEFINED.  */
          val.lattice_val = UNDEFINED;
@@ -497,18 +502,24 @@ set_lattice_value (tree var, prop_value_t new_val)
    Else return VARYING.  */
 
 static ccp_lattice_t
-likely_value (tree stmt)
+likely_value (gimple stmt)
 {
   bool has_constant_operand, has_undefined_operand, all_undefined_operands;
-  stmt_ann_t ann;
   tree use;
   ssa_op_iter iter;
 
-  ann = stmt_ann (stmt);
+  enum tree_code code = gimple_code (stmt);
+
+  /* This function appears to be called only for assignments, calls,
+     conditionals, and switches, due to the logic in visit_stmt.  */
+  gcc_assert (code == GIMPLE_ASSIGN
+              || code == GIMPLE_CALL
+              || code == GIMPLE_COND
+              || code == GIMPLE_SWITCH);
 
   /* If the statement has volatile operands, it won't fold to a
      constant value.  */
-  if (ann->has_volatile_ops)
+  if (gimple_has_volatile_ops (stmt))
     return VARYING;
 
   /* If we are not doing store-ccp, statements with loads
@@ -517,23 +528,31 @@ likely_value (tree stmt)
       && !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
     return VARYING;
 
-
-  /* A CALL_EXPR is assumed to be varying.  NOTE: This may be overly
+  /* A GIMPLE_CALL is assumed to be varying.  NOTE: This may be overly
      conservative, in the presence of const and pure calls.  */
-  if (get_call_expr_in (stmt) != NULL_TREE)
+  if (code == GIMPLE_CALL)
     return VARYING;
 
-  /* Anything other than assignments and conditional jumps are not
-     interesting for CCP.  */
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
-      && !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE)
-      && TREE_CODE (stmt) != COND_EXPR
-      && TREE_CODE (stmt) != SWITCH_EXPR)
-    return VARYING;
+  /* Note that only a GIMPLE_SINGLE_RHS assignment can satisfy
+     is_gimple_min_invariant, so we do not consider calls or
+     other forms of assignment.  */
+  if (code == GIMPLE_ASSIGN
+      && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
+          == GIMPLE_SINGLE_RHS)
+      && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
+    return CONSTANT;
 
-  if (is_gimple_min_invariant (get_rhs (stmt)))
+  if (code == GIMPLE_COND
+      && is_gimple_min_invariant (gimple_cond_lhs (stmt))
+      && is_gimple_min_invariant (gimple_cond_rhs (stmt)))
     return CONSTANT;
 
+  if (code == GIMPLE_SWITCH
+      && is_gimple_min_invariant (gimple_switch_index (stmt)))
+    return CONSTANT;
+
+  /* Arrive here for more complex cases.  */
+
   has_constant_operand = false;
   has_undefined_operand = false;
   all_undefined_operands = true;
@@ -553,13 +572,11 @@ likely_value (tree stmt)
   /* If the operation combines operands like COMPLEX_EXPR make sure to
      not mark the result UNDEFINED if only one part of the result is
      undefined.  */
-  if (has_undefined_operand
-      && all_undefined_operands)
+  if (has_undefined_operand && all_undefined_operands)
     return UNDEFINED;
-  else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-          && has_undefined_operand)
+  else if (code == GIMPLE_ASSIGN && has_undefined_operand)
     {
-      switch (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)))
+      switch (gimple_assign_rhs_code (stmt))
        {
        /* Unary operators are handled with all_undefined_operands.  */
        case PLUS_EXPR:
@@ -595,11 +612,11 @@ likely_value (tree stmt)
 /* Returns true if STMT cannot be constant.  */
 
 static bool
-surely_varying_stmt_p (tree stmt)
+surely_varying_stmt_p (gimple stmt)
 {
   /* If the statement has operands that we cannot handle, it cannot be
      constant.  */
-  if (stmt_ann (stmt)->has_volatile_ops)
+  if (gimple_has_volatile_ops (stmt))
     return true;
 
   if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
@@ -614,15 +631,14 @@ surely_varying_stmt_p (tree stmt)
     }
 
   /* If it contains a call, it is varying.  */
-  if (get_call_expr_in (stmt) != NULL_TREE)
+  if (is_gimple_call (stmt))
     return true;
 
   /* Anything other than assignments and conditional jumps are not
      interesting for CCP.  */
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
-      && !(TREE_CODE (stmt) == RETURN_EXPR && get_rhs (stmt) != NULL_TREE)
-      && TREE_CODE (stmt) != COND_EXPR
-      && TREE_CODE (stmt) != SWITCH_EXPR)
+  if (gimple_code (stmt) != GIMPLE_ASSIGN
+      && (gimple_code (stmt) != GIMPLE_COND)
+      && (gimple_code (stmt) != GIMPLE_SWITCH))
     return true;
 
   return false;
@@ -640,11 +656,11 @@ ccp_initialize (void)
   /* Initialize simulation flags for PHI nodes and statements.  */
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator i;
+      gimple_stmt_iterator i;
 
-      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
+      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
         {
-         tree stmt = bsi_stmt (i);
+         gimple stmt = gsi_stmt (i);
          bool is_varying = surely_varying_stmt_p (stmt);
 
          if (is_varying)
@@ -660,24 +676,25 @@ ccp_initialize (void)
                    set_value_varying (def);
                }
            }
-
-         DONT_SIMULATE_AGAIN (stmt) = is_varying;
+          prop_set_simulate_again (stmt, !is_varying);
        }
     }
 
-  /* Now process PHI nodes.  We never set DONT_SIMULATE_AGAIN on phi node,
-     since we do not know which edges are executable yet, except for
-     phi nodes for virtual operands when we do not do store ccp.  */
+  /* Now process PHI nodes.  We never clear the simulate_again flag on
+     phi nodes, since we do not know which edges are executable yet,
+     except for phi nodes for virtual operands when we do not do store ccp.  */
   FOR_EACH_BB (bb)
     {
-      tree phi;
+      gimple_stmt_iterator i;
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-       {
-         if (!do_store_ccp && !is_gimple_reg (PHI_RESULT (phi)))
-           DONT_SIMULATE_AGAIN (phi) = true;
+      for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
+        {
+          gimple phi = gsi_stmt (i);
+
+         if (!do_store_ccp && !is_gimple_reg (gimple_phi_result (phi)))
+            prop_set_simulate_again (phi, false);
          else
-           DONT_SIMULATE_AGAIN (phi) = false;
+            prop_set_simulate_again (phi, true);
        }
     }
 }
@@ -763,18 +780,18 @@ ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
    of the PHI node that are incoming via executable edges.  */
 
 static enum ssa_prop_result
-ccp_visit_phi_node (tree phi)
+ccp_visit_phi_node (gimple phi)
 {
-  int i;
+  unsigned i;
   prop_value_t *old_val, new_val;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "\nVisiting PHI node: ");
-      print_generic_expr (dump_file, phi, dump_flags);
+      print_gimple_stmt (dump_file, phi, 0, dump_flags);
     }
 
-  old_val = get_value (PHI_RESULT (phi));
+  old_val = get_value (gimple_phi_result (phi));
   switch (old_val->lattice_val)
     {
     case VARYING:
@@ -794,11 +811,11 @@ ccp_visit_phi_node (tree phi)
       gcc_unreachable ();
     }
 
-  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+  for (i = 0; i < gimple_phi_num_args (phi); i++)
     {
       /* Compute the meet operator over all the PHI arguments flowing
         through executable edges.  */
-      edge e = PHI_ARG_EDGE (phi, i);
+      edge e = gimple_phi_arg_edge (phi, i);
 
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
@@ -812,7 +829,7 @@ ccp_visit_phi_node (tree phi)
         the existing value of the PHI node and the current PHI argument.  */
       if (e->flags & EDGE_EXECUTABLE)
        {
-         tree arg = PHI_ARG_DEF (phi, i);
+         tree arg = gimple_phi_arg (phi, i)->def;
          prop_value_t arg_val;
 
          if (is_gimple_min_invariant (arg))
@@ -846,7 +863,7 @@ ccp_visit_phi_node (tree phi)
     }
 
   /* Make the transition to the new value.  */
-  if (set_lattice_value (PHI_RESULT (phi), new_val))
+  if (set_lattice_value (gimple_phi_result (phi), new_val))
     {
       if (new_val.lattice_val == VARYING)
        return SSA_PROP_VARYING;
@@ -865,179 +882,200 @@ ccp_visit_phi_node (tree phi)
    operands are constants.
 
    If simplification is possible, return the simplified RHS,
-   otherwise return the original RHS.  */
+   otherwise return the original RHS or NULL_TREE.  */
 
 static tree
-ccp_fold (tree stmt)
+ccp_fold (gimple stmt)
 {
-  tree rhs = get_rhs (stmt);
-  enum tree_code code = TREE_CODE (rhs);
-  enum tree_code_class kind = TREE_CODE_CLASS (code);
-  tree retval = NULL_TREE;
-
-  if (TREE_CODE (rhs) == SSA_NAME)
+  switch (gimple_code (stmt))
     {
-      /* If the RHS is an SSA_NAME, return its known constant value,
-        if any.  */
-      return get_value (rhs)->value;
-    }
-  else if (do_store_ccp && stmt_makes_single_load (stmt))
-    {
-      /* If the RHS is a memory load, see if the VUSEs associated with
-        it are a valid constant for that memory load.  */
-      prop_value_t *val = get_value_loaded_by (stmt, const_val);
-      if (val && val->mem_ref)
-       {
-         if (operand_equal_p (val->mem_ref, rhs, 0))
-           return val->value;
-
-         /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
-            complex type with a known constant value, return it.  */
-         if ((TREE_CODE (rhs) == REALPART_EXPR
-              || TREE_CODE (rhs) == IMAGPART_EXPR)
-             && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
-           return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
-       }
-      return NULL_TREE;
-    }
-
-  /* Unary operators.  Note that we know the single operand must
-     be a constant.  So this should almost always return a
-     simplified RHS.  */
-  if (kind == tcc_unary)
-    {
-      /* Handle unary operators which can appear in GIMPLE form.  */
-      tree op0 = TREE_OPERAND (rhs, 0);
-
-      /* Simplify the operand down to a constant.  */
-      if (TREE_CODE (op0) == SSA_NAME)
-       {
-         prop_value_t *val = get_value (op0);
-         if (val->lattice_val == CONSTANT)
-           op0 = get_value (op0)->value;
-       }
+    case GIMPLE_ASSIGN:
+      {
+        enum tree_code subcode = gimple_assign_rhs_code (stmt);
+
+        switch (get_gimple_rhs_class (subcode))
+          {
+          case GIMPLE_SINGLE_RHS:
+            {
+              tree rhs = gimple_assign_rhs1 (stmt);
+              enum tree_code_class kind = TREE_CODE_CLASS (subcode);
+
+              if (TREE_CODE (rhs) == SSA_NAME)
+                {
+                  /* If the RHS is an SSA_NAME, return its known constant value,
+                     if any.  */
+                  return get_value (rhs)->value;
+                }
+             /* Handle propagating invariant addresses into address operations.
+                The folding we do here matches that in tree-ssa-forwprop.c.  */
+             else if (TREE_CODE (rhs) == ADDR_EXPR)
+               {
+                 tree *base;
+                 base = &TREE_OPERAND (rhs, 0);
+                 while (handled_component_p (*base))
+                   base = &TREE_OPERAND (*base, 0);
+                 if (TREE_CODE (*base) == INDIRECT_REF
+                     && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
+                   {
+                     prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
+                     if (val->lattice_val == CONSTANT
+                         && TREE_CODE (val->value) == ADDR_EXPR
+                         && useless_type_conversion_p
+                         (TREE_TYPE (TREE_OPERAND (*base, 0)),
+                          TREE_TYPE (val->value))
+                         && useless_type_conversion_p
+                         (TREE_TYPE (*base),
+                          TREE_TYPE (TREE_OPERAND (val->value, 0))))
+                       {
+                         /* We need to return a new tree, not modify the IL
+                            or share parts of it.  So play some tricks to
+                            avoid manually building it.  */
+                         tree ret, save = *base;
+                         *base = TREE_OPERAND (val->value, 0);
+                         ret = unshare_expr (rhs);
+                         recompute_tree_invariant_for_addr_expr (ret);
+                         *base = save;
+                         return ret;
+                       }
+                   }
+               }
 
-      /* Conversions are useless for CCP purposes if they are
-        value-preserving.  Thus the restrictions that
-        useless_type_conversion_p places for pointer type conversions do
-        not apply here.  Substitution later will only substitute to
-        allowed places.  */
-      if ((code == NOP_EXPR || code == CONVERT_EXPR)
-         && ((POINTER_TYPE_P (TREE_TYPE (rhs))
-              && POINTER_TYPE_P (TREE_TYPE (op0)))
-             || useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (op0))))
-       return op0;
-      return fold_unary (code, TREE_TYPE (rhs), op0);
-    }
-
-  /* Binary and comparison operators.  We know one or both of the
-     operands are constants.  */
-  else if (kind == tcc_binary
-           || kind == tcc_comparison
-           || code == TRUTH_AND_EXPR
-           || code == TRUTH_OR_EXPR
-           || code == TRUTH_XOR_EXPR)
-    {
-      /* Handle binary and comparison operators that can appear in
-         GIMPLE form.  */
-      tree op0 = TREE_OPERAND (rhs, 0);
-      tree op1 = TREE_OPERAND (rhs, 1);
-
-      /* Simplify the operands down to constants when appropriate.  */
-      if (TREE_CODE (op0) == SSA_NAME)
-       {
-         prop_value_t *val = get_value (op0);
-         if (val->lattice_val == CONSTANT)
-           op0 = val->value;
-       }
+              else if (do_store_ccp && stmt_makes_single_load (stmt))
+                {
+                  /* If the RHS is a memory load, see if the VUSEs associated with
+                     it are a valid constant for that memory load.  */
+                  prop_value_t *val = get_value_loaded_by (stmt, const_val);
+                  if (val && val->mem_ref)
+                    {
+                      if (operand_equal_p (val->mem_ref, rhs, 0))
+                        return val->value;
+
+                      /* If RHS is extracting REALPART_EXPR or IMAGPART_EXPR of a
+                         complex type with a known constant value, return it.  */
+                      if ((TREE_CODE (rhs) == REALPART_EXPR
+                           || TREE_CODE (rhs) == IMAGPART_EXPR)
+                          && operand_equal_p (val->mem_ref, TREE_OPERAND (rhs, 0), 0))
+                        return fold_build1 (TREE_CODE (rhs), TREE_TYPE (rhs), val->value);
+                    }
+                }
+
+              if (kind == tcc_reference)
+                return fold_const_aggregate_ref (rhs);
+              else if (kind == tcc_declaration)
+                return get_symbol_constant_value (rhs);
+              return rhs;
+            }
+            
+          case GIMPLE_UNARY_RHS:
+            {
+              /* Handle unary operators that can appear in GIMPLE form.
+                 Note that we know the single operand must be a constant,
+                 so this should almost always return a simplified RHS.  */
+              tree lhs = gimple_assign_lhs (stmt);
+              tree op0 = gimple_assign_rhs1 (stmt);
+
+              /* Simplify the operand down to a constant.  */
+              if (TREE_CODE (op0) == SSA_NAME)
+                {
+                  prop_value_t *val = get_value (op0);
+                  if (val->lattice_val == CONSTANT)
+                    op0 = get_value (op0)->value;
+                }
+
+             /* Conversions are useless for CCP purposes if they are
+                value-preserving.  Thus the restrictions that
+                useless_type_conversion_p places for pointer type conversions
+                do not apply here.  Substitution later will only substitute to
+                allowed places.  */
+              if ((subcode == NOP_EXPR || subcode == CONVERT_EXPR)
+                 && ((POINTER_TYPE_P (TREE_TYPE (lhs))
+                      && POINTER_TYPE_P (TREE_TYPE (op0)))
+                     || useless_type_conversion_p (TREE_TYPE (lhs),
+                                                   TREE_TYPE (op0))))
+                return op0;
+
+              return fold_unary (subcode, gimple_expr_type (stmt), op0);
+            }  
+
+          case GIMPLE_BINARY_RHS:
+            {
+              /* Handle binary operators that can appear in GIMPLE form.  */
+              tree op0 = gimple_assign_rhs1 (stmt);
+              tree op1 = gimple_assign_rhs2 (stmt);
+
+              /* Simplify the operands down to constants when appropriate.  */
+              if (TREE_CODE (op0) == SSA_NAME)
+                {
+                  prop_value_t *val = get_value (op0);
+                  if (val->lattice_val == CONSTANT)
+                    op0 = val->value;
+                }
+
+              if (TREE_CODE (op1) == SSA_NAME)
+                {
+                  prop_value_t *val = get_value (op1);
+                  if (val->lattice_val == CONSTANT)
+                    op1 = val->value;
+                }
+
+              return fold_binary (subcode, gimple_expr_type (stmt), op0, op1);
+            }
+
+          default:
+            gcc_unreachable ();
+          }
+      }
+      break;
 
-      if (TREE_CODE (op1) == SSA_NAME)
-       {
-         prop_value_t *val = get_value (op1);
-         if (val->lattice_val == CONSTANT)
-           op1 = val->value;
-       }
+    case GIMPLE_CALL:
+      /* It may be possible to fold away calls to builtin functions if
+         their arguments are constants.  At present, such folding will not
+         be attempted, as likely_value classifies all calls as VARYING.  */
+      gcc_unreachable ();
+      break;
 
-      return fold_binary (code, TREE_TYPE (rhs), op0, op1);
-    }
+    case GIMPLE_COND:
+      {
+        /* Handle comparison operators that can appear in GIMPLE form.  */
+        tree op0 = gimple_cond_lhs (stmt);
+        tree op1 = gimple_cond_rhs (stmt);
+        enum tree_code code = gimple_cond_code (stmt);
+
+        /* Simplify the operands down to constants when appropriate.  */
+        if (TREE_CODE (op0) == SSA_NAME)
+          {
+            prop_value_t *val = get_value (op0);
+            if (val->lattice_val == CONSTANT)
+              op0 = val->value;
+          }
+
+        if (TREE_CODE (op1) == SSA_NAME)
+          {
+            prop_value_t *val = get_value (op1);
+            if (val->lattice_val == CONSTANT)
+              op1 = val->value;
+          }
+
+        return fold_binary (code, boolean_type_node, op0, op1);
+      }
 
-  else if (kind == tcc_declaration)
-    return get_symbol_constant_value (rhs);
+    case GIMPLE_SWITCH:
+      {
+        tree rhs = gimple_switch_index (stmt);
 
-  else if (kind == tcc_reference)
-    return fold_const_aggregate_ref (rhs);
+        if (TREE_CODE (rhs) == SSA_NAME)
+          {
+            /* If the RHS is an SSA_NAME, return its known constant value,
+               if any.  */
+            return get_value (rhs)->value;
+          }
 
-  /* Handle propagating invariant addresses into address operations.
-     The folding we do here matches that in tree-ssa-forwprop.c.  */
-  else if (code == ADDR_EXPR)
-    {
-      tree *base;
-      base = &TREE_OPERAND (rhs, 0);
-      while (handled_component_p (*base))
-       base = &TREE_OPERAND (*base, 0);
-      if (TREE_CODE (*base) == INDIRECT_REF
-         && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
-       {
-         prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
-         if (val->lattice_val == CONSTANT
-             && TREE_CODE (val->value) == ADDR_EXPR
-             && useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (*base, 0)),
-                                           TREE_TYPE (val->value))
-             && useless_type_conversion_p (TREE_TYPE (*base),
-                                           TREE_TYPE (TREE_OPERAND (val->value, 0))))
-           {
-             /* We need to return a new tree, not modify the IL or share
-                parts of it.  So play some tricks to avoid manually
-                building it.  */
-             tree ret, save = *base;
-             *base = TREE_OPERAND (val->value, 0);
-             ret = unshare_expr (rhs);
-             recompute_tree_invariant_for_addr_expr (ret);
-             *base = save;
-             return ret;
-           }
-       }
-    }
+        return rhs;
+      }
 
-  /* We may be able to fold away calls to builtin functions if their
-     arguments are constants.  */
-  else if (code == CALL_EXPR
-          && TREE_CODE (CALL_EXPR_FN (rhs)) == ADDR_EXPR
-          && TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (rhs), 0)) == FUNCTION_DECL
-          && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (rhs), 0)))
-    {
-      if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_USE))
-       {
-         tree *orig, var;
-         size_t i = 0;
-         ssa_op_iter iter;
-         use_operand_p var_p;
-
-         /* Preserve the original values of every operand.  */
-         orig = XNEWVEC (tree,  NUM_SSA_OPERANDS (stmt, SSA_OP_USE));
-         FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
-           orig[i++] = var;
-
-         /* Substitute operands with their values and try to fold.  */
-         replace_uses_in (stmt, NULL, const_val);
-         retval = fold_call_expr (rhs, false);
-
-         /* Restore operands to their original form.  */
-         i = 0;
-         FOR_EACH_SSA_USE_OPERAND (var_p, stmt, iter, SSA_OP_USE)
-           SET_USE (var_p, orig[i++]);
-         free (orig);
-       }
+    default:
+      gcc_unreachable ();
     }
-  else
-    return rhs;
-
-  /* If we got a simplified form, see if we need to convert its type.  */
-  if (retval)
-    return fold_convert (TREE_TYPE (rhs), retval);
-
-  /* No simplification was possible.  */
-  return rhs;
 }
 
 
@@ -1206,11 +1244,12 @@ fold_const_aggregate_ref (tree t)
 
   return NULL_TREE;
 }
-  
-/* Evaluate statement STMT.  */
+
+/* Evaluate statement STMT.
+   Valid only for assignments, calls, conditionals, and switches. */
 
 static prop_value_t
-evaluate_stmt (tree stmt)
+evaluate_stmt (gimple stmt)
 {
   prop_value_t val;
   tree simplified = NULL_TREE;
@@ -1223,12 +1262,31 @@ evaluate_stmt (tree stmt)
 
   /* If the statement is likely to have a CONSTANT result, then try
      to fold the statement to determine the constant value.  */
+  /* FIXME.  This is the only place that we call ccp_fold.
+     Since likely_value never returns CONSTANT for calls, we will
+     not attempt to fold them, including builtins that may profit.  */
   if (likelyvalue == CONSTANT)
     simplified = ccp_fold (stmt);
   /* If the statement is likely to have a VARYING result, then do not
      bother folding the statement.  */
   else if (likelyvalue == VARYING)
-    simplified = get_rhs (stmt);
+    {
+      enum tree_code code = gimple_code (stmt);
+      if (code == GIMPLE_ASSIGN)
+        {
+          enum tree_code subcode = gimple_assign_rhs_code (stmt);
+          
+          /* Other cases cannot satisfy is_gimple_min_invariant
+             without folding.  */
+          if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
+            simplified = gimple_assign_rhs1 (stmt);
+        }
+      else if (code == GIMPLE_SWITCH)
+        simplified = gimple_switch_index (stmt);
+      else
+        /* These cannot satisfy is_gimple_min_invariant without folding.  */
+        gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
+    }
 
   is_constant = simplified && is_gimple_min_invariant (simplified);
 
@@ -1275,45 +1333,55 @@ evaluate_stmt (tree stmt)
   return val;
 }
 
-
 /* Visit the assignment statement STMT.  Set the value of its LHS to the
    value computed by the RHS and store LHS in *OUTPUT_P.  If STMT
    creates virtual definitions, set the value of each new name to that
-   of the RHS (if we can derive a constant out of the RHS).  */
+   of the RHS (if we can derive a constant out of the RHS).
+   Value-returning call statements also perform an assignment, and
+   are handled here.  */
 
 static enum ssa_prop_result
-visit_assignment (tree stmt, tree *output_p)
+visit_assignment (gimple stmt, tree *output_p)
 {
   prop_value_t val;
-  tree lhs, rhs;
   enum ssa_prop_result retval;
 
-  lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-  rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+  tree lhs = gimple_get_lhs (stmt);
 
-  if (TREE_CODE (rhs) == SSA_NAME)
-    {
-      /* For a simple copy operation, we copy the lattice values.  */
-      prop_value_t *nval = get_value (rhs);
-      val = *nval;
-    }
-  else if (do_store_ccp && stmt_makes_single_load (stmt))
+  gcc_assert (gimple_code (stmt) != GIMPLE_CALL
+              || gimple_call_lhs (stmt) != NULL_TREE);
+
+  if (gimple_assign_copy_p (stmt))
     {
-      /* Same as above, but the RHS is not a gimple register and yet
-        has a known VUSE.  If STMT is loading from the same memory
-        location that created the SSA_NAMEs for the virtual operands,
-        we can propagate the value on the RHS.  */
-      prop_value_t *nval = get_value_loaded_by (stmt, const_val);
+      tree rhs = gimple_assign_rhs1 (stmt);
 
-      if (nval
-         && nval->mem_ref
-         && operand_equal_p (nval->mem_ref, rhs, 0))
-       val = *nval;
+      if  (TREE_CODE (rhs) == SSA_NAME)
+        {
+          /* For a simple copy operation, we copy the lattice values.  */
+          prop_value_t *nval = get_value (rhs);
+          val = *nval;
+        }
+      else if (do_store_ccp && stmt_makes_single_load (stmt))
+        {
+          /* Same as above, but the RHS is not a gimple register and yet
+             has a known VUSE.  If STMT is loading from the same memory
+             location that created the SSA_NAMEs for the virtual operands,
+             we can propagate the value on the RHS.  */
+          prop_value_t *nval = get_value_loaded_by (stmt, const_val);
+
+          if (nval
+              && nval->mem_ref
+              && operand_equal_p (nval->mem_ref, rhs, 0))
+            val = *nval;
+          else
+            val = evaluate_stmt (stmt);
+        }
       else
-       val = evaluate_stmt (stmt);
+        val = evaluate_stmt (stmt);
     }
   else
-    /* Evaluate the statement.  */
+    /* Evaluate the statement, which could be
+       either a GIMPLE_ASSIGN or a GIMPLE_CALL.  */
     val = evaluate_stmt (stmt);
 
   retval = SSA_PROP_NOT_INTERESTING;
@@ -1382,12 +1450,12 @@ visit_assignment (tree stmt, tree *output_p)
    SSA_PROP_VARYING.  */
 
 static enum ssa_prop_result
-visit_cond_stmt (tree stmt, edge *taken_edge_p)
+visit_cond_stmt (gimple stmt, edge *taken_edge_p)
 {
   prop_value_t val;
   basic_block block;
 
-  block = bb_for_stmt (stmt);
+  block = gimple_bb (stmt);
   val = evaluate_stmt (stmt);
 
   /* Find which edge out of the conditional block will be taken and add it
@@ -1412,7 +1480,7 @@ visit_cond_stmt (tree stmt, edge *taken_edge_p)
    value, return SSA_PROP_VARYING.  */
 
 static enum ssa_prop_result
-ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
+ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
 {
   tree def;
   ssa_op_iter iter;
@@ -1420,21 +1488,33 @@ ccp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "\nVisiting statement:\n");
-      print_generic_stmt (dump_file, stmt, dump_flags);
+      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
     }
 
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+  switch (gimple_code (stmt))
     {
-      /* If the statement is an assignment that produces a single
-        output value, evaluate its RHS to see if the lattice value of
-        its output has changed.  */
-      return visit_assignment (stmt, output_p);
-    }
-  else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
-    {
-      /* If STMT is a conditional branch, see if we can determine
-        which branch will be taken.  */
-      return visit_cond_stmt (stmt, taken_edge_p);
+      case GIMPLE_ASSIGN:
+        /* If the statement is an assignment that produces a single
+           output value, evaluate its RHS to see if the lattice value of
+           its output has changed.  */
+        return visit_assignment (stmt, output_p);
+
+      case GIMPLE_CALL:
+        /* A value-returning call also performs an assignment.  */
+        if (gimple_call_lhs (stmt) != NULL_TREE)
+          return visit_assignment (stmt, output_p);
+        break;
+
+      case GIMPLE_COND:
+      case GIMPLE_SWITCH:
+        /* If STMT is a conditional branch, see if we can determine
+           which branch will be taken.   */
+        /* FIXME.  It appears that we should be able to optimize
+           computed GOTOs here as well.  */
+        return visit_cond_stmt (stmt, taken_edge_p);
+
+      default:
+        break;
     }
 
   /* Any other kind of statement is not interesting for constant
@@ -1965,30 +2045,24 @@ maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
 }
 
 
-/* A subroutine of fold_stmt_r.  EXPR is a POINTER_PLUS_EXPR.
-
-   A quaint feature extant in our address arithmetic is that there
+/* A quaint feature extant in our address arithmetic is that there
    can be hidden type changes here.  The type of the result need
    not be the same as the type of the input pointer.
 
    What we're after here is an expression of the form
        (T *)(&array + const)
-   where the cast doesn't actually exist, but is implicit in the
+   where array is OP0, const is OP1, RES_TYPE is T and
+   the cast doesn't actually exist, but is implicit in the
    type of the POINTER_PLUS_EXPR.  We'd like to turn this into
        &array[x]
    which may be able to propagate further.  */
 
-static tree
-maybe_fold_stmt_addition (tree expr)
+tree
+maybe_fold_stmt_addition (tree res_type, tree op0, tree op1)
 {
-  tree op0 = TREE_OPERAND (expr, 0);
-  tree op1 = TREE_OPERAND (expr, 1);
-  tree ptr_type = TREE_TYPE (expr);
   tree ptd_type;
   tree t;
 
-  gcc_assert (TREE_CODE (expr) == POINTER_PLUS_EXPR);
-
   /* It had better be a constant.  */
   if (TREE_CODE (op1) != INTEGER_CST)
     return NULL_TREE;
@@ -2039,7 +2113,7 @@ maybe_fold_stmt_addition (tree expr)
       op0 = array_obj;
     }
 
-  ptd_type = TREE_TYPE (ptr_type);
+  ptd_type = TREE_TYPE (res_type);
   /* If we want a pointer to void, reconstruct the reference from the
      array element type.  A pointer to that can be trivially converted
      to void *.  This happens as we fold (void *)(ptr p+ off).  */
@@ -2053,7 +2127,7 @@ maybe_fold_stmt_addition (tree expr)
     t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
                                            ptd_type, false);
   if (t)
-    t = build1 (ADDR_EXPR, ptr_type, t);
+    t = build1 (ADDR_EXPR, res_type, t);
 
   return t;
 }
@@ -2063,7 +2137,7 @@ maybe_fold_stmt_addition (tree expr)
 
 struct fold_stmt_r_data
 {
-  tree stmt;
+  gimple stmt;
   bool *changed_p;
   bool *inside_addr_expr_p;
 };
@@ -2074,12 +2148,17 @@ struct fold_stmt_r_data
 static tree
 fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
 {
-  struct fold_stmt_r_data *fold_stmt_r_data = (struct fold_stmt_r_data *) data;
-  bool *inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
-  bool *changed_p = fold_stmt_r_data->changed_p;
+  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
+  struct fold_stmt_r_data *fold_stmt_r_data;
+  bool *inside_addr_expr_p;
+  bool *changed_p;
   tree expr = *expr_p, t;
   bool volatile_p = TREE_THIS_VOLATILE (expr);
 
+  fold_stmt_r_data = (struct fold_stmt_r_data *) wi->info;
+  inside_addr_expr_p = fold_stmt_r_data->inside_addr_expr_p;
+  changed_p = fold_stmt_r_data->changed_p;
+
   /* ??? It'd be nice if walk_tree had a pre-order option.  */
   switch (TREE_CODE (expr))
     {
@@ -2145,18 +2224,6 @@ fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
         recompute_tree_invariant_for_addr_expr (expr);
       return NULL_TREE;
 
-    case POINTER_PLUS_EXPR:
-      t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
-      if (t)
-       return t;
-      t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
-      if (t)
-       return t;
-      *walk_subtrees = 0;
-
-      t = maybe_fold_stmt_addition (expr);
-      break;
-
     case COMPONENT_REF:
       t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
       if (t)
@@ -2182,6 +2249,20 @@ fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
       t = maybe_fold_tmr (expr);
       break;
 
+    case POINTER_PLUS_EXPR:
+      t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
+      if (t)
+        return t;
+      t = walk_tree (&TREE_OPERAND (expr, 1), fold_stmt_r, data, NULL);
+      if (t)
+        return t;
+      *walk_subtrees = 0;
+
+      t = maybe_fold_stmt_addition (TREE_TYPE (expr),
+                                    TREE_OPERAND (expr, 0),
+                                    TREE_OPERAND (expr, 1));
+      break;
+
     case COND_EXPR:
       if (COMPARISON_CLASS_P (TREE_OPERAND (expr, 0)))
         {
@@ -2193,11 +2274,15 @@ fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
          tem = fold_binary (TREE_CODE (op0), TREE_TYPE (op0),
                             TREE_OPERAND (op0, 0),
                             TREE_OPERAND (op0, 1));
-         set = tem && set_rhs (expr_p, tem);
+          /* This is actually a conditional expression, not a GIMPLE
+             conditional statement, however, the valid_gimple_rhs_p
+             test still applies.  */
+         set = tem && is_gimple_condexpr (tem) && valid_gimple_rhs_p (tem);
          fold_undefer_overflow_warnings (set, fold_stmt_r_data->stmt, 0);
          if (set)
            {
-             t = *expr_p;
+              COND_EXPR_COND (expr) = tem;
+             t = expr;
              break;
            }
         }
@@ -2218,7 +2303,6 @@ fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
   return NULL_TREE;
 }
 
-
 /* Return the string length, maximum string length or maximum value of
    ARG in LENGTH.
    If ARG is an SSA name variable, follow its use-def chains.  If LENGTH
@@ -2231,7 +2315,8 @@ fold_stmt_r (tree *expr_p, int *walk_subtrees, void *data)
 static bool
 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
 {
-  tree var, def_stmt, val;
+  tree var, val;
+  gimple def_stmt;
   
   if (TREE_CODE (arg) != SSA_NAME)
     {
@@ -2290,74 +2375,75 @@ get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
   var = arg;
   def_stmt = SSA_NAME_DEF_STMT (var);
 
-  switch (TREE_CODE (def_stmt))
-    {
-      case GIMPLE_MODIFY_STMT:
-       {
-         tree rhs;
-
-         /* The RHS of the statement defining VAR must either have a
-            constant length or come from another SSA_NAME with a constant
-            length.  */
-         rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
-         STRIP_NOPS (rhs);
-         return get_maxval_strlen (rhs, length, visited, type);
-       }
-
-      case PHI_NODE:
+  switch (gimple_code (def_stmt))
+    {
+      case GIMPLE_ASSIGN:
+        /* The RHS of the statement defining VAR must either have a
+           constant length or come from another SSA_NAME with a constant
+           length.  */
+        if (gimple_assign_single_p (def_stmt)
+            || gimple_assign_unary_nop_p (def_stmt))
+          {
+            tree rhs = gimple_assign_rhs1 (def_stmt);
+            return get_maxval_strlen (rhs, length, visited, type);
+          }
+        return false;
+
+      case GIMPLE_PHI:
        {
          /* All the arguments of the PHI node must have the same constant
             length.  */
-         int i;
-
-         for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
-           {
-             tree arg = PHI_ARG_DEF (def_stmt, i);
-
-             /* If this PHI has itself as an argument, we cannot
-                determine the string length of this argument.  However,
-                if we can find a constant string length for the other
-                PHI args then we can still be sure that this is a
-                constant string length.  So be optimistic and just
-                continue with the next argument.  */
-             if (arg == PHI_RESULT (def_stmt))
-               continue;
-
-             if (!get_maxval_strlen (arg, length, visited, type))
-               return false;
-           }
-
-         return true;
-       }
+         unsigned i;
+
+         for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
+          {
+            tree arg = gimple_phi_arg (def_stmt, i)->def;
+
+            /* If this PHI has itself as an argument, we cannot
+               determine the string length of this argument.  However,
+               if we can find a constant string length for the other
+               PHI args then we can still be sure that this is a
+               constant string length.  So be optimistic and just
+               continue with the next argument.  */
+            if (arg == gimple_phi_result (def_stmt))
+              continue;
+
+            if (!get_maxval_strlen (arg, length, visited, type))
+              return false;
+          }
+        }
+        return true;        
 
       default:
-       break;
+        return false;
     }
-
-
-  return false;
 }
 
 
-/* Fold builtin call FN in statement STMT.  If it cannot be folded into a
-   constant, return NULL_TREE.  Otherwise, return its constant value.  */
+/* Fold builtin call in statement STMT.  Returns a simplified tree.
+   We may return a non-constant expression, including another call
+   to a different function and with different arguments, e.g.,
+   substituting memcpy for strcpy when the string length is known.
+   Note that some builtins expand into inline code that may not
+   be valid in GIMPLE.  Callers must take care.  */
 
 static tree
-ccp_fold_builtin (tree stmt, tree fn)
+ccp_fold_builtin (gimple stmt)
 {
   tree result, val[3];
   tree callee, a;
   int arg_mask, i, type;
   bitmap visited;
   bool ignore;
-  call_expr_arg_iterator iter;
   int nargs;
 
-  ignore = TREE_CODE (stmt) != GIMPLE_MODIFY_STMT;
+  gcc_assert (is_gimple_call (stmt));
+
+  ignore = (gimple_call_lhs (stmt) == NULL);
 
   /* First try the generic builtin folder.  If that succeeds, return the
      result directly.  */
-  result = fold_call_expr (fn, ignore);
+  result = fold_call_stmt (stmt, ignore);
   if (result)
     {
       if (ignore)
@@ -2366,13 +2452,13 @@ ccp_fold_builtin (tree stmt, tree fn)
     }
 
   /* Ignore MD builtins.  */
-  callee = get_callee_fndecl (fn);
+  callee = gimple_call_fndecl (stmt);
   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
     return NULL_TREE;
 
   /* If the builtin could not be folded, and it has no argument list,
      we're done.  */
-  nargs = call_expr_nargs (fn);
+  nargs = gimple_call_num_args (stmt);
   if (nargs == 0)
     return NULL_TREE;
 
@@ -2416,16 +2502,15 @@ ccp_fold_builtin (tree stmt, tree fn)
   visited = BITMAP_ALLOC (NULL);
 
   memset (val, 0, sizeof (val));
-  init_call_expr_arg_iterator (fn, &iter);
-  for (i = 0; arg_mask; i++, arg_mask >>= 1)
+  for (i = 0; i < nargs; i++)
     {
-      a = next_call_expr_arg (&iter);
-      if (arg_mask & 1)
-       {
-         bitmap_clear (visited);
-         if (!get_maxval_strlen (a, &val[i], visited, type))
-           val[i] = NULL_TREE;
-       }
+      if ((arg_mask >> i) & 1)
+        {
+          a = gimple_call_arg (stmt, i);
+          bitmap_clear (visited);
+          if (!get_maxval_strlen (a, &val[i], visited, type))
+            val[i] = NULL_TREE;
+        }
     }
 
   BITMAP_FREE (visited);
@@ -2436,7 +2521,8 @@ ccp_fold_builtin (tree stmt, tree fn)
     case BUILT_IN_STRLEN:
       if (val[0])
        {
-         tree new_val = fold_convert (TREE_TYPE (fn), val[0]);
+         tree new_val =
+              fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
 
          /* If the result is not a valid gimple value, or not a cast
             of a valid gimple value, then we can not use the result.  */
@@ -2450,32 +2536,30 @@ ccp_fold_builtin (tree stmt, tree fn)
     case BUILT_IN_STRCPY:
       if (val[1] && is_gimple_val (val[1]) && nargs == 2)
        result = fold_builtin_strcpy (callee,
-                                     CALL_EXPR_ARG (fn, 0),
-                                     CALL_EXPR_ARG (fn, 1),
+                                      gimple_call_arg (stmt, 0),
+                                      gimple_call_arg (stmt, 1),
                                      val[1]);
       break;
 
     case BUILT_IN_STRNCPY:
       if (val[1] && is_gimple_val (val[1]) && nargs == 3)
        result = fold_builtin_strncpy (callee,
-                                      CALL_EXPR_ARG (fn, 0),
-                                      CALL_EXPR_ARG (fn, 1),
-                                      CALL_EXPR_ARG (fn, 2),
+                                       gimple_call_arg (stmt, 0),
+                                       gimple_call_arg (stmt, 1),
+                                       gimple_call_arg (stmt, 2),
                                       val[1]);
       break;
 
     case BUILT_IN_FPUTS:
-      result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
-                                  CALL_EXPR_ARG (fn, 1),
-                                  TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 0,
-                                  val[0]);
+      result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
+                                   gimple_call_arg (stmt, 1),
+                                  ignore, false, val[0]);
       break;
 
     case BUILT_IN_FPUTS_UNLOCKED:
-      result = fold_builtin_fputs (CALL_EXPR_ARG (fn, 0),
-                                  CALL_EXPR_ARG (fn, 1),
-                                  TREE_CODE (stmt) != GIMPLE_MODIFY_STMT, 1,
-                                  val[0]);
+      result = fold_builtin_fputs (gimple_call_arg (stmt, 0),
+                                  gimple_call_arg (stmt, 1),
+                                   ignore, true, val[0]);
       break;
 
     case BUILT_IN_MEMCPY_CHK:
@@ -2484,10 +2568,10 @@ ccp_fold_builtin (tree stmt, tree fn)
     case BUILT_IN_MEMSET_CHK:
       if (val[2] && is_gimple_val (val[2]))
        result = fold_builtin_memory_chk (callee,
-                                         CALL_EXPR_ARG (fn, 0),
-                                         CALL_EXPR_ARG (fn, 1),
-                                         CALL_EXPR_ARG (fn, 2),
-                                         CALL_EXPR_ARG (fn, 3),
+                                          gimple_call_arg (stmt, 0),
+                                          gimple_call_arg (stmt, 1),
+                                          gimple_call_arg (stmt, 2),
+                                          gimple_call_arg (stmt, 3),
                                          val[2], ignore,
                                          DECL_FUNCTION_CODE (callee));
       break;
@@ -2496,27 +2580,27 @@ ccp_fold_builtin (tree stmt, tree fn)
     case BUILT_IN_STPCPY_CHK:
       if (val[1] && is_gimple_val (val[1]))
        result = fold_builtin_stxcpy_chk (callee,
-                                         CALL_EXPR_ARG (fn, 0),
-                                         CALL_EXPR_ARG (fn, 1),
-                                         CALL_EXPR_ARG (fn, 2),
+                                          gimple_call_arg (stmt, 0),
+                                          gimple_call_arg (stmt, 1),
+                                          gimple_call_arg (stmt, 2),
                                          val[1], ignore,
                                          DECL_FUNCTION_CODE (callee));
       break;
 
     case BUILT_IN_STRNCPY_CHK:
       if (val[2] && is_gimple_val (val[2]))
-       result = fold_builtin_strncpy_chk (CALL_EXPR_ARG (fn, 0),
-                                          CALL_EXPR_ARG (fn, 1),
-                                          CALL_EXPR_ARG (fn, 2),
-                                          CALL_EXPR_ARG (fn, 3),
+       result = fold_builtin_strncpy_chk (gimple_call_arg (stmt, 0),
+                                           gimple_call_arg (stmt, 1),
+                                           gimple_call_arg (stmt, 2),
+                                           gimple_call_arg (stmt, 3),
                                           val[2]);
       break;
 
     case BUILT_IN_SNPRINTF_CHK:
     case BUILT_IN_VSNPRINTF_CHK:
       if (val[1] && is_gimple_val (val[1]))
-       result = fold_builtin_snprintf_chk (fn, val[1],
-                                           DECL_FUNCTION_CODE (callee));
+       result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
+                                                   DECL_FUNCTION_CODE (callee));
       break;
 
     default:
@@ -2528,114 +2612,267 @@ ccp_fold_builtin (tree stmt, tree fn)
   return result;
 }
 
+/* Attempt to fold an assignment statement pointed-to by SI.  Returns a
+   replacement rhs for the statement or NULL_TREE if no simplification
+   could be made.  It is assumed that the operands have been previously
+   folded.  */
+
+static tree
+fold_gimple_assign (gimple_stmt_iterator *si)
+{
+  gimple stmt = gsi_stmt (*si);
+  enum tree_code subcode = gimple_assign_rhs_code (stmt);
+
+  tree result = NULL;
+
+  switch (get_gimple_rhs_class (subcode))
+    {
+    case GIMPLE_SINGLE_RHS:
+      {
+        tree rhs = gimple_assign_rhs1 (stmt);
+        
+        /* Try to fold a conditional expression.  */
+        if (TREE_CODE (rhs) == COND_EXPR)
+          {
+            tree temp = fold (COND_EXPR_COND (rhs));
+            if (temp != COND_EXPR_COND (rhs))
+              result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
+                                    COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
+          }
+
+        /* If we couldn't fold the RHS, hand over to the generic
+           fold routines.  */
+        if (result == NULL_TREE)
+          result = fold (rhs);
+
+        /* Strip away useless type conversions.  Both the NON_LVALUE_EXPR
+           that may have been added by fold, and "useless" type 
+           conversions that might now be apparent due to propagation.  */
+        STRIP_USELESS_TYPE_CONVERSION (result);
+
+        if (result != rhs && valid_gimple_rhs_p (result))
+         return result;
+        else
+          /* It is possible that fold_stmt_r simplified the RHS.
+             Make sure that the subcode of this statement still
+             reflects the principal operator of the rhs operand. */
+          return rhs;
+      }
+      break;
+
+    case GIMPLE_UNARY_RHS:
+      result = fold_unary (subcode,
+                           gimple_expr_type (stmt),
+                           gimple_assign_rhs1 (stmt));
+
+      if (result)
+        {
+          STRIP_USELESS_TYPE_CONVERSION (result);
+          if (valid_gimple_rhs_p (result))
+           return result;
+        }
+      else if ((gimple_assign_rhs_code (stmt) == NOP_EXPR
+               || gimple_assign_rhs_code (stmt) == CONVERT_EXPR)
+              && POINTER_TYPE_P (gimple_expr_type (stmt))
+              && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
+       {
+         tree type = gimple_expr_type (stmt);
+         tree t = maybe_fold_offset_to_reference (gimple_assign_rhs1 (stmt),
+                                                  integer_zero_node,
+                                                  TREE_TYPE (type));
+         if (t)
+           {
+             tree ptr_type = build_pointer_type (TREE_TYPE (t));
+             if (useless_type_conversion_p (type, ptr_type))
+               return build_fold_addr_expr_with_type (t, ptr_type);
+           }
+       }
+      break;
+
+    case GIMPLE_BINARY_RHS:
+      /* Try to fold pointer addition.  */
+      if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+        result = maybe_fold_stmt_addition (
+                   TREE_TYPE (gimple_assign_lhs (stmt)),
+                   gimple_assign_rhs1 (stmt),
+                   gimple_assign_rhs2 (stmt));
+
+      if (!result)
+        result = fold_binary (subcode,
+                              TREE_TYPE (gimple_assign_lhs (stmt)),
+                              gimple_assign_rhs1 (stmt),
+                              gimple_assign_rhs2 (stmt));
+
+      if (result)
+        {
+          STRIP_USELESS_TYPE_CONVERSION (result);
+          if (valid_gimple_rhs_p (result))
+           return result;
+        }
+      break;
+
+    case GIMPLE_INVALID_RHS:
+      gcc_unreachable ();
+    }
+
+  return NULL_TREE;
+}
+
+/* Attempt to fold a conditional statement. Return true if any changes were
+   made. We only attempt to fold the condition expression, and do not perform
+   any transformation that would require alteration of the cfg.  It is
+   assumed that the operands have been previously folded.  */
+
+static bool
+fold_gimple_cond (gimple stmt)
+{
+  tree result = fold_binary (gimple_cond_code (stmt),
+                             boolean_type_node,
+                             gimple_cond_lhs (stmt),
+                             gimple_cond_rhs (stmt));
+
+  if (result)
+    {
+      STRIP_USELESS_TYPE_CONVERSION (result);
+      if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
+        {
+          gimple_cond_set_condition_from_tree (stmt, result);
+          return true;
+        }
+    }
+
+  return false;
+}
+
+
+/* Attempt to fold a call statement referenced by the statement iterator GSI.
+   The statement may be replaced by another statement, e.g., if the call
+   simplifies to a constant value. Return true if any changes were made.
+   It is assumed that the operands have been previously folded.  */
+
+static bool
+fold_gimple_call (gimple_stmt_iterator *gsi)
+{
+  gimple stmt = gsi_stmt (*gsi);
+
+  tree callee = gimple_call_fndecl (stmt);
+
+  /* Check for builtins that CCP can handle using information not
+     available in the generic fold routines.  */
+  if (callee && DECL_BUILT_IN (callee))
+    {
+      tree result = ccp_fold_builtin (stmt);
+
+      if (result)
+        return update_call_from_tree (gsi, result);
+    }
+  else
+    {
+      /* Check for resolvable OBJ_TYPE_REF.  The only sorts we can resolve
+         here are when we've propagated the address of a decl into the
+         object slot.  */
+      /* ??? Should perhaps do this in fold proper.  However, doing it
+         there requires that we create a new CALL_EXPR, and that requires
+         copying EH region info to the new node.  Easier to just do it
+         here where we can just smash the call operand.  */
+      /* ??? Is there a good reason not to do this in fold_stmt_inplace?  */
+      callee = gimple_call_fn (stmt);
+      if (TREE_CODE (callee) == OBJ_TYPE_REF
+          && lang_hooks.fold_obj_type_ref
+          && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
+          && DECL_P (TREE_OPERAND
+                     (OBJ_TYPE_REF_OBJECT (callee), 0)))
+        {
+          tree t;
+
+          /* ??? Caution: Broken ADDR_EXPR semantics means that
+             looking at the type of the operand of the addr_expr
+             can yield an array type.  See silly exception in
+             check_pointer_types_r.  */
+          t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
+          t = lang_hooks.fold_obj_type_ref (callee, t);
+          if (t)
+            {
+              gimple_call_set_fn (stmt, t);
+              return true;
+            }
+        }
+    }
+
+  return false;
+}
 
-/* Fold the statement pointed to by STMT_P.  In some cases, this function may
+/* Fold the statement pointed to by GSI.  In some cases, this function may
    replace the whole statement with a new one.  Returns true iff folding
    makes any changes.  */
 
 bool
-fold_stmt (tree *stmt_p)
+fold_stmt (gimple_stmt_iterator *gsi)
 {
-  tree rhs, result, stmt;
+  tree res;
   struct fold_stmt_r_data fold_stmt_r_data;
+  struct walk_stmt_info wi;
+
   bool changed = false;
   bool inside_addr_expr = false;
 
-  stmt = *stmt_p;
+  gimple stmt = gsi_stmt (*gsi);
 
   fold_stmt_r_data.stmt = stmt;
   fold_stmt_r_data.changed_p = &changed;
   fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
 
-  /* If we replaced constants and the statement makes pointer dereferences,
-     then we may need to fold instances of *&VAR into VAR, etc.  */
-  if (walk_tree (stmt_p, fold_stmt_r, &fold_stmt_r_data, NULL))
-    {
-      *stmt_p = build_call_expr (implicit_built_in_decls[BUILT_IN_TRAP], 0);
-      return true;
-    }
+  memset (&wi, 0, sizeof (wi));
+  wi.info = &fold_stmt_r_data;
 
-  rhs = get_rhs (stmt);
-  if (!rhs)
-    return changed;
-  result = NULL_TREE;
+  /* Fold the individual operands.
+     For example, fold instances of *&VAR into VAR, etc.  */
+  res = walk_gimple_op (stmt, fold_stmt_r, &wi);
+  gcc_assert (!res);
 
-  if (TREE_CODE (rhs) == CALL_EXPR)
+  /* Fold the main computation performed by the statement.  */
+  switch (gimple_code (stmt))
     {
-      tree callee;
-
-      /* Check for builtins that CCP can handle using information not
-        available in the generic fold routines.  */
-      callee = get_callee_fndecl (rhs);
-      if (callee && DECL_BUILT_IN (callee))
-       result = ccp_fold_builtin (stmt, rhs);
-      else
-       {
-         /* Check for resolvable OBJ_TYPE_REF.  The only sorts we can resolve
-            here are when we've propagated the address of a decl into the
-            object slot.  */
-         /* ??? Should perhaps do this in fold proper.  However, doing it
-            there requires that we create a new CALL_EXPR, and that requires
-            copying EH region info to the new node.  Easier to just do it
-            here where we can just smash the call operand. Also
-            CALL_EXPR_RETURN_SLOT_OPT needs to be handled correctly and
-            copied, fold_call_expr does not have not information. */
-         callee = CALL_EXPR_FN (rhs);
-         if (TREE_CODE (callee) == OBJ_TYPE_REF
-             && lang_hooks.fold_obj_type_ref
-             && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
-             && DECL_P (TREE_OPERAND
-                        (OBJ_TYPE_REF_OBJECT (callee), 0)))
-           {
-             tree t;
-
-             /* ??? Caution: Broken ADDR_EXPR semantics means that
-                looking at the type of the operand of the addr_expr
-                can yield an array type.  See silly exception in
-                check_pointer_types_r.  */
+    case GIMPLE_ASSIGN:
+      {
+       tree new_rhs = fold_gimple_assign (gsi);
+       if (new_rhs != NULL_TREE)
+         {
+           gimple_assign_set_rhs_from_tree (gsi, new_rhs);
+           changed = true;
+         }
+       stmt = gsi_stmt (*gsi);
+       break;
+      }
+    case GIMPLE_COND:
+      changed |= fold_gimple_cond (stmt);
+      break;
+    case GIMPLE_CALL:
+      /* The entire statement may be replaced in this case.  */
+      changed |= fold_gimple_call (gsi);
+      break;
 
-             t = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (callee)));
-             t = lang_hooks.fold_obj_type_ref (callee, t);
-             if (t)
-               {
-                 CALL_EXPR_FN (rhs) = t;
-                 changed = true;
-               }
-           }
-       }
-    }
-  else if (TREE_CODE (rhs) == COND_EXPR)
-    {
-      tree temp = fold (COND_EXPR_COND (rhs));
-      if (temp != COND_EXPR_COND (rhs))
-        result = fold_build3 (COND_EXPR, TREE_TYPE (rhs), temp,
-                              COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
+    default:
+      return changed;
+      break;
     }
 
-  /* If we couldn't fold the RHS, hand over to the generic fold routines.  */
-  if (result == NULL_TREE)
-    result = fold (rhs);
-
-  /* Strip away useless type conversions.  Both the NON_LVALUE_EXPR that
-     may have been added by fold, and "useless" type conversions that might
-     now be apparent due to propagation.  */
-  STRIP_USELESS_TYPE_CONVERSION (result);
-
-  if (result != rhs)
-    changed |= set_rhs (stmt_p, result);
-
   return changed;
 }
 
 /* Perform the minimal folding on statement STMT.  Only operations like
    *&x created by constant propagation are handled.  The statement cannot
-   be replaced with a new one.  */
+   be replaced with a new one.  Return true if the statement was
+   changed, false otherwise.  */
 
 bool
-fold_stmt_inplace (tree stmt)
+fold_stmt_inplace (gimple stmt)
 {
-  tree old_stmt = stmt, rhs, new_rhs;
+  tree res;
   struct fold_stmt_r_data fold_stmt_r_data;
+  struct walk_stmt_info wi;
+  gimple_stmt_iterator si;
+
   bool changed = false;
   bool inside_addr_expr = false;
 
@@ -2643,24 +2880,50 @@ fold_stmt_inplace (tree stmt)
   fold_stmt_r_data.changed_p = &changed;
   fold_stmt_r_data.inside_addr_expr_p = &inside_addr_expr;
 
-  walk_tree (&stmt, fold_stmt_r, &fold_stmt_r_data, NULL);
-  gcc_assert (stmt == old_stmt);
+  memset (&wi, 0, sizeof (wi));
+  wi.info = &fold_stmt_r_data;
+
+  /* Fold the individual operands.
+     For example, fold instances of *&VAR into VAR, etc.
 
-  rhs = get_rhs (stmt);
-  if (!rhs || rhs == stmt)
-    return changed;
+     It appears that, at one time, maybe_fold_stmt_indirect
+     would cause the walk to return non-null in order to
+     signal that the entire statement should be replaced with
+     a call to _builtin_trap.  This functionality is currently
+     disabled, as noted in a FIXME, and cannot be supported here.  */
+  res = walk_gimple_op (stmt, fold_stmt_r, &wi);
+  gcc_assert (!res);
 
-  new_rhs = fold (rhs);
-  STRIP_USELESS_TYPE_CONVERSION (new_rhs);
-  if (new_rhs == rhs)
-    return changed;
+  /* Fold the main computation performed by the statement.  */
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_ASSIGN:
+      {
+       unsigned old_num_ops;
+       tree new_rhs;
+       old_num_ops = gimple_num_ops (stmt);
+       si = gsi_for_stmt (stmt);
+       new_rhs = fold_gimple_assign (&si);
+       if (new_rhs != NULL_TREE
+           && get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops)
+         {
+           gimple_assign_set_rhs_from_tree (&si, new_rhs);
+           changed = true;
+         }
+       gcc_assert (gsi_stmt (si) == stmt);
+       break;
+      }
+    case GIMPLE_COND:
+      changed |= fold_gimple_cond (stmt);
+      break;
 
-  changed |= set_rhs (&stmt, new_rhs);
-  gcc_assert (stmt == old_stmt);
+    default:
+      break;
+    }
 
   return changed;
 }
-\f
+
 /* Try to optimize out __builtin_stack_restore.  Optimize it out
    if there is another __builtin_stack_restore in the same basic
    block and no calls or ASM_EXPRs are in between, or if this block's
@@ -2668,28 +2931,30 @@ fold_stmt_inplace (tree stmt)
    ASM_EXPRs after this __builtin_stack_restore.  */
 
 static tree
-optimize_stack_restore (basic_block bb, tree call, block_stmt_iterator i)
+optimize_stack_restore (gimple_stmt_iterator i)
 {
-  tree stack_save, stmt, callee;
+  tree callee, rhs;
+  gimple stmt, stack_save;
+  gimple_stmt_iterator stack_save_gsi;
+
+  basic_block bb = gsi_bb (i);
+  gimple call = gsi_stmt (i);
 
-  if (TREE_CODE (call) != CALL_EXPR
-      || call_expr_nargs (call) != 1
-      || TREE_CODE (CALL_EXPR_ARG (call, 0)) != SSA_NAME
-      || !POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (call, 0))))
+  if (gimple_code (call) != GIMPLE_CALL
+      || gimple_call_num_args (call) != 1
+      || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
+      || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
     return NULL_TREE;
 
-  for (bsi_next (&i); !bsi_end_p (i); bsi_next (&i))
+  for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
     {
-      tree call;
-
-      stmt = bsi_stmt (i);
-      if (TREE_CODE (stmt) == ASM_EXPR)
+      stmt = gsi_stmt (i);
+      if (gimple_code (stmt) == GIMPLE_ASM)
        return NULL_TREE;
-      call = get_call_expr_in (stmt);
-      if (call == NULL)
+      if (gimple_code (stmt) != GIMPLE_CALL)
        continue;
 
-      callee = get_callee_fndecl (call);
+      callee = gimple_call_fndecl (stmt);
       if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
        return NULL_TREE;
 
@@ -2697,55 +2962,54 @@ optimize_stack_restore (basic_block bb, tree call, block_stmt_iterator i)
        break;
     }
 
-  if (bsi_end_p (i)
+  if (gsi_end_p (i)
       && (! single_succ_p (bb)
          || single_succ_edge (bb)->dest != EXIT_BLOCK_PTR))
     return NULL_TREE;
 
-  stack_save = SSA_NAME_DEF_STMT (CALL_EXPR_ARG (call, 0));
-  if (TREE_CODE (stack_save) != GIMPLE_MODIFY_STMT
-      || GIMPLE_STMT_OPERAND (stack_save, 0) != CALL_EXPR_ARG (call, 0)
-      || TREE_CODE (GIMPLE_STMT_OPERAND (stack_save, 1)) != CALL_EXPR
-      || tree_could_throw_p (stack_save)
-      || !has_single_use (CALL_EXPR_ARG (call, 0)))
+  stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
+  if (gimple_code (stack_save) != GIMPLE_CALL
+      || gimple_call_lhs (stack_save) != gimple_call_arg (call, 0)
+      || stmt_could_throw_p (stack_save)
+      || !has_single_use (gimple_call_arg (call, 0)))
     return NULL_TREE;
 
-  callee = get_callee_fndecl (GIMPLE_STMT_OPERAND (stack_save, 1));
+  callee = gimple_call_fndecl (stack_save);
   if (!callee
       || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
       || DECL_FUNCTION_CODE (callee) != BUILT_IN_STACK_SAVE
-      || call_expr_nargs (GIMPLE_STMT_OPERAND (stack_save, 1)) != 0)
+      || gimple_call_num_args (stack_save) != 0)
     return NULL_TREE;
 
-  stmt = stack_save;
-  push_stmt_changes (&stmt);
-  if (!set_rhs (&stmt,
-               build_int_cst (TREE_TYPE (CALL_EXPR_ARG (call, 0)), 0)))
+  stack_save_gsi = gsi_for_stmt (stack_save);
+  push_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
+  rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
+  if (!update_call_from_tree (&stack_save_gsi, rhs))
     {
-      discard_stmt_changes (&stmt);
+      discard_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
       return NULL_TREE;
     }
-  gcc_assert (stmt == stack_save);
-  pop_stmt_changes (&stmt);
+  pop_stmt_changes (gsi_stmt_ptr (&stack_save_gsi));
 
+  /* No effect, so the statement will be deleted.  */
   return integer_zero_node;
 }
-\f
+
 /* If va_list type is a simple pointer and nothing special is needed,
    optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
    __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
    pointer assignment.  */
 
 static tree
-optimize_stdarg_builtin (tree call)
+optimize_stdarg_builtin (gimple call)
 {
   tree callee, lhs, rhs, cfun_va_list;
   bool va_list_simple_ptr;
 
-  if (TREE_CODE (call) != CALL_EXPR)
+  if (gimple_code (call) != GIMPLE_CALL)
     return NULL_TREE;
 
-  callee = get_callee_fndecl (call);
+  callee = gimple_call_fndecl (call);
 
   cfun_va_list = targetm.fn_abi_va_list (callee);
   va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
@@ -2757,21 +3021,21 @@ optimize_stdarg_builtin (tree call)
     case BUILT_IN_VA_START:
       if (!va_list_simple_ptr
          || targetm.expand_builtin_va_start != NULL
-         || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
+          || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
        return NULL_TREE;
 
-      if (call_expr_nargs (call) != 2)
+      if (gimple_call_num_args (call) != 2)
        return NULL_TREE;
 
-      lhs = CALL_EXPR_ARG (call, 0);
+      lhs = gimple_call_arg (call, 0);
       if (!POINTER_TYPE_P (TREE_TYPE (lhs))
          || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
             != TYPE_MAIN_VARIANT (cfun_va_list))
        return NULL_TREE;
-
+      
       lhs = build_fold_indirect_ref (lhs);
       rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
-                            1, integer_zero_node);
+                             1, integer_zero_node);
       rhs = fold_convert (TREE_TYPE (lhs), rhs);
       return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
 
@@ -2779,17 +3043,17 @@ optimize_stdarg_builtin (tree call)
       if (!va_list_simple_ptr)
        return NULL_TREE;
 
-      if (call_expr_nargs (call) != 2)
+      if (gimple_call_num_args (call) != 2)
        return NULL_TREE;
 
-      lhs = CALL_EXPR_ARG (call, 0);
+      lhs = gimple_call_arg (call, 0);
       if (!POINTER_TYPE_P (TREE_TYPE (lhs))
          || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
             != TYPE_MAIN_VARIANT (cfun_va_list))
        return NULL_TREE;
 
       lhs = build_fold_indirect_ref (lhs);
-      rhs = CALL_EXPR_ARG (call, 1);
+      rhs = gimple_call_arg (call, 1);
       if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
          != TYPE_MAIN_VARIANT (cfun_va_list))
        return NULL_TREE;
@@ -2798,53 +3062,73 @@ optimize_stdarg_builtin (tree call)
       return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
 
     case BUILT_IN_VA_END:
+      /* No effect, so the statement will be deleted.  */
       return integer_zero_node;
 
     default:
       gcc_unreachable ();
     }
 }
-\f
+
 /* Convert EXPR into a GIMPLE value suitable for substitution on the
    RHS of an assignment.  Insert the necessary statements before
-   iterator *SI_P. 
-   When IGNORE is set, don't worry about the return value.  */
+   iterator *SI_P.  The statement at *SI_P, which must be a GIMPLE_CALL
+   is replaced.  If the call is expected to produces a result, then it
+   is replaced by an assignment of the new RHS to the result variable.
+   If the result is to be ignored, then the call is replaced by a
+   GIMPLE_NOP.  */
 
-static tree
-convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr, bool ignore)
+static void
+gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
 {
-  tree_stmt_iterator ti;
-  tree stmt = bsi_stmt (*si_p);
-  tree tmp, stmts = NULL;
+  tree lhs;
+  tree tmp = NULL_TREE;  /* Silence warning.  */
+  gimple stmt, new_stmt;
+  gimple_stmt_iterator i;
+  gimple_seq stmts = gimple_seq_alloc();
   struct gimplify_ctx gctx;
 
+  stmt = gsi_stmt (*si_p);
+
+  gcc_assert (is_gimple_call (stmt));
+
+  lhs = gimple_call_lhs (stmt);
+
   push_gimplify_context (&gctx);
-  if (ignore)
-    {
-      tmp = build_empty_stmt ();
-      gimplify_and_add (expr, &stmts);
-    }
-  else
+
+  if (lhs == NULL_TREE)
+    gimplify_and_add (expr, &stmts);
+  else 
     tmp = get_initialized_tmp_var (expr, &stmts, NULL);
+
   pop_gimplify_context (NULL);
 
-  if (EXPR_HAS_LOCATION (stmt))
-    annotate_all_with_locus (&stmts, EXPR_LOCATION (stmt));
+  if (gimple_has_location (stmt))
+    annotate_all_with_location (stmts, gimple_location (stmt));
 
   /* The replacement can expose previously unreferenced variables.  */
-  for (ti = tsi_start (stmts); !tsi_end_p (ti); tsi_next (&ti))
+  for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
+  {
+    new_stmt = gsi_stmt (i);
+    find_new_referenced_vars (new_stmt);
+    gsi_insert_before (si_p, new_stmt, GSI_NEW_STMT);
+    mark_symbols_for_renaming (new_stmt);
+    gsi_next (si_p);
+  }
+
+  if (lhs == NULL_TREE)
+    new_stmt = gimple_build_nop ();
+  else
     {
-      tree new_stmt = tsi_stmt (ti);
-      find_new_referenced_vars (tsi_stmt_ptr (ti));
-      bsi_insert_before (si_p, new_stmt, BSI_NEW_STMT);
-      mark_symbols_for_renaming (new_stmt);
-      bsi_next (si_p);
+      new_stmt = gimple_build_assign (lhs, tmp);
+      copy_virtual_operands (new_stmt, stmt);
+      move_ssa_defining_stmt_for_defs (new_stmt, stmt);
     }
 
-  return tmp;
+  gimple_set_location (new_stmt, gimple_location (stmt));
+  gsi_replace (si_p, new_stmt, false);
 }
 
-
 /* A simple pass that attempts to fold all builtin functions.  This pass
    is run after we've propagated as many constants as we can.  */
 
@@ -2857,32 +3141,32 @@ execute_fold_all_builtins (void)
   
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator i;
-      for (i = bsi_start (bb); !bsi_end_p (i); )
+      gimple_stmt_iterator i;
+      for (i = gsi_start_bb (bb); !gsi_end_p (i); )
        {
-         tree *stmtp = bsi_stmt_ptr (i);
-         tree old_stmt = *stmtp;
-         tree call = get_rhs (*stmtp);
+          gimple stmt, old_stmt;
          tree callee, result;
          enum built_in_function fcode;
 
-         if (!call || TREE_CODE (call) != CALL_EXPR)
+         stmt = gsi_stmt (i);
+
+          if (gimple_code (stmt) != GIMPLE_CALL)
            {
-             bsi_next (&i);
+             gsi_next (&i);
              continue;
            }
-         callee = get_callee_fndecl (call);
+         callee = gimple_call_fndecl (stmt);
          if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
            {
-             bsi_next (&i);
+             gsi_next (&i);
              continue;
            }
          fcode = DECL_FUNCTION_CODE (callee);
 
-         result = ccp_fold_builtin (*stmtp, call);
+         result = ccp_fold_builtin (stmt);
 
          if (result)
-           gimple_remove_stmt_histograms (cfun, *stmtp);
+           gimple_remove_stmt_histograms (cfun, stmt);
 
          if (!result)
            switch (DECL_FUNCTION_CODE (callee))
@@ -2891,77 +3175,71 @@ execute_fold_all_builtins (void)
                /* Resolve __builtin_constant_p.  If it hasn't been
                   folded to integer_one_node by now, it's fairly
                   certain that the value simply isn't constant.  */
-               result = integer_zero_node;
+                result = integer_zero_node;
                break;
 
              case BUILT_IN_STACK_RESTORE:
-               result = optimize_stack_restore (bb, *stmtp, i);
+               result = optimize_stack_restore (i);
                if (result)
                  break;
-               bsi_next (&i);
+               gsi_next (&i);
                continue;
 
              case BUILT_IN_VA_START:
              case BUILT_IN_VA_END:
              case BUILT_IN_VA_COPY:
                /* These shouldn't be folded before pass_stdarg.  */
-               result = optimize_stdarg_builtin (*stmtp);
+               result = optimize_stdarg_builtin (stmt);
                if (result)
                  break;
                /* FALLTHRU */
 
              default:
-               bsi_next (&i);
+               gsi_next (&i);
                continue;
              }
 
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
              fprintf (dump_file, "Simplified\n  ");
-             print_generic_stmt (dump_file, *stmtp, dump_flags);
+             print_gimple_stmt (dump_file, stmt, 0, dump_flags);
            }
 
-         push_stmt_changes (stmtp);
+          old_stmt = stmt;
+         push_stmt_changes (gsi_stmt_ptr (&i));
 
-         if (!set_rhs (stmtp, result))
-           {
-             result = convert_to_gimple_builtin (&i, result,
-                                                 TREE_CODE (old_stmt)
-                                                 != GIMPLE_MODIFY_STMT);
-             if (result)
-               {
-                 bool ok = set_rhs (stmtp, result);
-                 gcc_assert (ok);
-                 todoflags |= TODO_rebuild_alias;
-               }
-           }
+          if (!update_call_from_tree (&i, result))
+            {
+              gimplify_and_update_call_from_tree (&i, result);
+              todoflags |= TODO_rebuild_alias;
+            }
 
-         pop_stmt_changes (stmtp);
+         stmt = gsi_stmt (i);
+         pop_stmt_changes (gsi_stmt_ptr (&i));
 
-         if (maybe_clean_or_replace_eh_stmt (old_stmt, *stmtp)
-             && tree_purge_dead_eh_edges (bb))
+         if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
+             && gimple_purge_dead_eh_edges (bb))
            cfg_changed = true;
 
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
              fprintf (dump_file, "to\n  ");
-             print_generic_stmt (dump_file, *stmtp, dump_flags);
+             print_gimple_stmt (dump_file, stmt, 0, dump_flags);
              fprintf (dump_file, "\n");
            }
 
          /* Retry the same statement if it changed into another
             builtin, there might be new opportunities now.  */
-         call = get_rhs (*stmtp);
-         if (!call || TREE_CODE (call) != CALL_EXPR)
+          if (gimple_code (stmt) != GIMPLE_CALL)
            {
-             bsi_next (&i);
+             gsi_next (&i);
              continue;
            }
-         callee = get_callee_fndecl (call);
+         callee = gimple_call_fndecl (stmt);
          if (!callee
-             || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
+              || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
              || DECL_FUNCTION_CODE (callee) == fcode)
-           bsi_next (&i);
+           gsi_next (&i);
        }
     }
   
index a96029a..d5e5f87 100644 (file)
@@ -841,16 +841,15 @@ build_ssa_conflict_graph (tree_live_info_p liveinfo)
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator bsi;
-      tree phi;
+      gimple_stmt_iterator gsi;
 
       /* Start with live on exit temporaries.  */
       live_track_init (live, live_on_exit (liveinfo, bb));
 
-      for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
+      for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
         {
          tree var;
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
 
          /* A copy between 2 partitions does not introduce an interference 
             by itself.  If they did, you would never be able to coalesce 
@@ -859,12 +858,14 @@ build_ssa_conflict_graph (tree_live_info_p liveinfo)
             
             This is handled by simply removing the SRC of the copy from the 
             live list, and processing the stmt normally.  */
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+         if (is_gimple_assign (stmt))
            {
-             tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-             tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-             if (TREE_CODE (lhs) == SSA_NAME && TREE_CODE (rhs) == SSA_NAME)
-               live_track_clear_var (live, rhs);
+             tree lhs = gimple_assign_lhs (stmt);
+             tree rhs1 = gimple_assign_rhs1 (stmt);
+             if (gimple_assign_copy_p (stmt)
+                  && TREE_CODE (lhs) == SSA_NAME
+                  && TREE_CODE (rhs1) == SSA_NAME)
+               live_track_clear_var (live, rhs1);
            }
 
          FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_DEF)
@@ -880,8 +881,9 @@ build_ssa_conflict_graph (tree_live_info_p liveinfo)
         There must be a conflict recorded between the result of the PHI and 
         any variables that are live.  Otherwise the out-of-ssa translation 
         may create incorrect code.  */
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
+         gimple phi = gsi_stmt (gsi);
          tree result = PHI_RESULT (phi);
          if (live_track_live_p (live, result))
            live_track_process_def (live, result, graph);
@@ -915,11 +917,11 @@ print_exprs (FILE *f, const char *str1, tree expr1, const char *str2,
    printed and compilation is then terminated.  */
 
 static inline void
-abnormal_corrupt (tree phi, int i)
+abnormal_corrupt (gimple phi, int i)
 {
-  edge e = PHI_ARG_EDGE (phi, i);
-  tree res = PHI_RESULT (phi);
-  tree arg = PHI_ARG_DEF (phi, i);
+  edge e = gimple_phi_arg_edge (phi, i);
+  tree res = gimple_phi_result (phi);
+  tree arg = gimple_phi_arg_def (phi, i);
 
   fprintf (stderr, " Corrupt SSA across abnormal edge BB%d->BB%d\n",
           e->src->index, e->dest->index);
@@ -959,10 +961,10 @@ fail_abnormal_edge_coalesce (int x, int y)
 static var_map
 create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   basic_block bb;
   tree var;
-  tree stmt;
+  gimple stmt;
   tree first;
   var_map map;
   ssa_op_iter iter;
@@ -981,24 +983,25 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
 
   FOR_EACH_BB (bb)
     {
-      tree phi, arg;
+      tree arg;
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         int i;
+         gimple phi = gsi_stmt (gsi);
+         size_t i;
          int ver;
          tree res;
          bool saw_copy = false;
 
-         res = PHI_RESULT (phi);
+         res = gimple_phi_result (phi);
          ver = SSA_NAME_VERSION (res);
          register_ssa_partition (map, res);
 
          /* Register ssa_names and coalesces between the args and the result 
             of all PHI.  */
-         for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+         for (i = 0; i < gimple_phi_num_args (phi); i++)
            {
-             edge e = PHI_ARG_EDGE (phi, i);
+             edge e = gimple_phi_arg_edge (phi, i);
              arg = PHI_ARG_DEF (phi, i);
              if (TREE_CODE (arg) == SSA_NAME)
                register_ssa_partition (map, arg);
@@ -1024,27 +1027,29 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
            bitmap_set_bit (used_in_copy, ver);
        }
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
         {
-         stmt = bsi_stmt (bsi);
+         stmt = gsi_stmt (gsi);
 
          /* Register USE and DEF operands in each statement.  */
          FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, (SSA_OP_DEF|SSA_OP_USE))
            register_ssa_partition (map, var);
 
          /* Check for copy coalesces.  */
-         switch (TREE_CODE (stmt))
+         switch (gimple_code (stmt))
            {
-           case GIMPLE_MODIFY_STMT:
+           case GIMPLE_ASSIGN:
              {
-               tree op1 = GIMPLE_STMT_OPERAND (stmt, 0);
-               tree op2 = GIMPLE_STMT_OPERAND (stmt, 1);
-               if (TREE_CODE (op1) == SSA_NAME 
-                   && TREE_CODE (op2) == SSA_NAME
-                   && SSA_NAME_VAR (op1) == SSA_NAME_VAR (op2))
+               tree lhs = gimple_assign_lhs (stmt);
+               tree rhs1 = gimple_assign_rhs1 (stmt);
+
+               if (gimple_assign_copy_p (stmt)
+                    && TREE_CODE (lhs) == SSA_NAME
+                   && TREE_CODE (rhs1) == SSA_NAME
+                   && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs1))
                  {
-                   v1 = SSA_NAME_VERSION (op1);
-                   v2 = SSA_NAME_VERSION (op2);
+                   v1 = SSA_NAME_VERSION (lhs);
+                   v2 = SSA_NAME_VERSION (rhs1);
                    cost = coalesce_cost_bb (bb);
                    add_coalesce (cl, v1, v2, cost);
                    bitmap_set_bit (used_in_copy, v1);
@@ -1053,24 +1058,31 @@ create_outofssa_var_map (coalesce_list_p cl, bitmap used_in_copy)
              }
              break;
 
-           case ASM_EXPR:
+           case GIMPLE_ASM:
              {
                unsigned long noutputs, i;
+               unsigned long ninputs;
                tree *outputs, link;
-               noutputs = list_length (ASM_OUTPUTS (stmt));
+               noutputs = gimple_asm_noutputs (stmt);
+               ninputs = gimple_asm_ninputs (stmt);
                outputs = (tree *) alloca (noutputs * sizeof (tree));
-               for (i = 0, link = ASM_OUTPUTS (stmt); link;
-                    ++i, link = TREE_CHAIN (link))
+               for (i = 0; i < noutputs; ++i) {
+                 link = gimple_asm_output_op (stmt, i);
                  outputs[i] = TREE_VALUE (link);
+                }
 
-               for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
+               for (i = 0; i < ninputs; ++i)
                  {
-                   const char *constraint
-                     = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
-                   tree input = TREE_VALUE (link);
+                    const char *constraint;
+                    tree input;
                    char *end;
                    unsigned long match;
 
+                   link = gimple_asm_input_op (stmt, i);
+                   constraint
+                     = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
+                   input = TREE_VALUE (link);
+
                    if (TREE_CODE (input) != SSA_NAME)
                      continue;
 
@@ -1247,7 +1259,7 @@ coalesce_partitions (var_map map, ssa_conflicts_p graph, coalesce_list_p cl,
                     FILE *debug)
 {
   int x = 0, y = 0;
-  tree var1, var2, phi;
+  tree var1, var2;
   int cost;
   basic_block bb;
   edge e;
@@ -1262,8 +1274,11 @@ coalesce_partitions (var_map map, ssa_conflicts_p graph, coalesce_list_p cl,
       FOR_EACH_EDGE (e, ei, bb->preds)
        if (e->flags & EDGE_ABNORMAL)
          {
-           for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+           gimple_stmt_iterator gsi;
+           for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+                gsi_next (&gsi))
              {
+               gimple phi = gsi_stmt (gsi);
                tree res = PHI_RESULT (phi);
                tree arg = PHI_ARG_DEF (phi, e->dest_idx);
                int v1 = SSA_NAME_VERSION (res);
@@ -1437,4 +1452,3 @@ coalesce_ssa_name (void)
 
   return map;
 }
-
index bc8a874..c228725 100644 (file)
@@ -184,6 +184,55 @@ may_propagate_copy (tree dest, tree orig)
   return true;
 }
 
+/* Like may_propagate_copy, but use as the destination expression
+   the principal expression (typically, the RHS) contained in
+   statement DEST.  This is more efficient when working with the
+   gimple tuples representation.  */
+
+bool
+may_propagate_copy_into_stmt (gimple dest, tree orig)
+{
+  tree type_d;
+  tree type_o;
+
+  /* If the statement is a switch or a single-rhs assignment,
+     then the expression to be replaced by the propagation may
+     be an SSA_NAME.  Fortunately, there is an explicit tree
+     for the expression, so we delegate to may_propagate_copy.  */
+
+  if (gimple_assign_single_p (dest))
+    return may_propagate_copy (gimple_assign_rhs1 (dest), orig);
+  else if (gimple_code (dest) == GIMPLE_SWITCH)
+    return may_propagate_copy (gimple_switch_index (dest), orig);
+
+  /* In other cases, the expression is not materialized, so there
+     is no destination to pass to may_propagate_copy.  On the other
+     hand, the expression cannot be an SSA_NAME, so the analysis
+     is much simpler.  */
+
+  if (TREE_CODE (orig) == SSA_NAME
+      && (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (orig)
+          ||  TREE_CODE (SSA_NAME_VAR (orig)) == MEMORY_PARTITION_TAG))
+    return false;
+
+  if (is_gimple_assign (dest))
+    type_d = TREE_TYPE (gimple_assign_lhs (dest));
+  else if (gimple_code (dest) == GIMPLE_COND)
+    type_d = boolean_type_node;
+  else if (is_gimple_call (dest)
+           && gimple_call_lhs (dest) != NULL_TREE)
+    type_d = TREE_TYPE (gimple_call_lhs (dest));
+  else
+    gcc_unreachable ();
+
+  type_o = TREE_TYPE (orig);
+
+  if (!useless_type_conversion_p (type_d, type_o))
+    return false;
+
+  return true;
+}
+
 /* Similarly, but we know that we're propagating into an ASM_EXPR.  */
 
 bool
@@ -303,7 +352,7 @@ merge_alias_info (tree orig_name, tree new_name)
 
 static void
 replace_exp_1 (use_operand_p op_p, tree val,
-              bool for_propagation ATTRIBUTE_UNUSED)
+              bool for_propagation ATTRIBUTE_UNUSED)
 {
   tree op = USE_FROM_PTR (op_p);
 
@@ -337,6 +386,18 @@ propagate_value (use_operand_p op_p, tree val)
   replace_exp_1 (op_p, val, true);
 }
 
+/* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
+
+   Use this version when not const/copy propagating values.  For example,
+   PRE uses this version when building expressions as they would appear
+   in specific blocks taking into account actions of PHI nodes.  */
+
+void
+replace_exp (use_operand_p op_p, tree val)
+{
+  replace_exp_1 (op_p, val, false);
+}
+
 
 /* Propagate the value VAL (assumed to be a constant or another SSA_NAME)
    into the tree pointed to by OP_P.
@@ -351,13 +412,14 @@ propagate_tree_value (tree *op_p, tree val)
 {
 #if defined ENABLE_CHECKING
   gcc_assert (!(TREE_CODE (val) == SSA_NAME
+                && *op_p
                && TREE_CODE (*op_p) == SSA_NAME
                && !may_propagate_copy (*op_p, val)));
 #endif
 
   if (TREE_CODE (val) == SSA_NAME)
     {
-      if (TREE_CODE (*op_p) == SSA_NAME && POINTER_TYPE_P (TREE_TYPE (*op_p)))
+      if (*op_p && TREE_CODE (*op_p) == SSA_NAME && POINTER_TYPE_P (TREE_TYPE (*op_p)))
        merge_alias_info (*op_p, val);
       *op_p = val;
     }
@@ -366,18 +428,52 @@ propagate_tree_value (tree *op_p, tree val)
 }
 
 
-/* Replace *OP_P with value VAL (assumed to be a constant or another SSA_NAME).
-
-   Use this version when not const/copy propagating values.  For example,
-   PRE uses this version when building expressions as they would appear
-   in specific blocks taking into account actions of PHI nodes.  */
+/* Like propagate_tree_value, but use as the operand to replace
+   the principal expression (typically, the RHS) contained in the
+   statement referenced by iterator GSI.  Note that it is not
+   always possible to update the statement in-place, so a new
+   statement may be created to replace the original.  */
 
 void
-replace_exp (use_operand_p op_p, tree val)
+propagate_tree_value_into_stmt (gimple_stmt_iterator *gsi, tree val)
 {
-  replace_exp_1 (op_p, val, false);
-}
+  gimple stmt = gsi_stmt (*gsi);
 
+  if (is_gimple_assign (stmt))
+    {
+      tree expr = NULL_TREE;
+      if (gimple_assign_single_p (stmt))
+        expr = gimple_assign_rhs1 (stmt);
+      propagate_tree_value (&expr, val);
+      gimple_assign_set_rhs_from_tree (gsi, expr);
+      stmt = gsi_stmt (*gsi);
+    }
+  else if (gimple_code (stmt) == GIMPLE_COND)
+    {
+      tree lhs = NULL_TREE;
+      tree rhs = fold_convert (TREE_TYPE (val), integer_zero_node);
+      propagate_tree_value (&lhs, val);
+      gimple_cond_set_code (stmt, NE_EXPR);
+      gimple_cond_set_lhs (stmt, lhs);
+      gimple_cond_set_rhs (stmt, rhs);
+    }
+  else if (is_gimple_call (stmt)
+           && gimple_call_lhs (stmt) != NULL_TREE)
+    {
+      gimple new_stmt;
+
+      tree expr = NULL_TREE;
+      propagate_tree_value (&expr, val);
+      new_stmt  = gimple_build_assign (gimple_call_lhs (stmt), expr);
+      copy_virtual_operands (new_stmt, stmt);
+      move_ssa_defining_stmt_for_defs (new_stmt, stmt);
+      gsi_replace (gsi, new_stmt, false);
+    }
+  else if (gimple_code (stmt) == GIMPLE_SWITCH)
+    propagate_tree_value (gimple_switch_index_ptr (stmt), val);
+  else
+    gcc_unreachable ();
+}
 
 /*---------------------------------------------------------------------------
                                Copy propagation
@@ -403,24 +499,17 @@ static tree *cached_last_copy_of;
 /* Return true if this statement may generate a useful copy.  */
 
 static bool
-stmt_may_generate_copy (tree stmt)
+stmt_may_generate_copy (gimple stmt)
 {
-  tree lhs, rhs;
-  stmt_ann_t ann;
-
-  if (TREE_CODE (stmt) == PHI_NODE)
-    return !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (stmt));
+  if (gimple_code (stmt) == GIMPLE_PHI)
+    return !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (stmt));
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
     return false;
 
-  lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-  rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-  ann = stmt_ann (stmt);
-
   /* If the statement has volatile operands, it won't generate a
      useful copy.  */
-  if (ann->has_volatile_ops)
+  if (gimple_has_volatile_ops (stmt))
     return false;
 
   /* Statements with loads and/or stores will never generate a useful copy.  */
@@ -430,8 +519,8 @@ stmt_may_generate_copy (tree stmt)
   /* Otherwise, the only statements that generate useful copies are
      assignments whose RHS is just an SSA name that doesn't flow
      through abnormal edges.  */
-  return (TREE_CODE (rhs) == SSA_NAME
-         && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs));
+  return (gimple_assign_rhs_code (stmt) == SSA_NAME
+         && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)));
 }
 
 
@@ -584,15 +673,16 @@ dump_copy_of (FILE *file, tree var)
    all, the names generated will be VUSEd in the same statements.  */
 
 static enum ssa_prop_result
-copy_prop_visit_assignment (tree stmt, tree *result_p)
+copy_prop_visit_assignment (gimple stmt, tree *result_p)
 {
   tree lhs, rhs;
   prop_value_t *rhs_val;
 
-  lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-  rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+  lhs = gimple_assign_lhs (stmt);
+  rhs = gimple_assign_rhs1 (stmt);
+  
 
-  gcc_assert (TREE_CODE (rhs) == SSA_NAME);
+  gcc_assert (gimple_assign_rhs_code (stmt) == SSA_NAME);
 
   rhs_val = get_copy_of_val (rhs);
 
@@ -620,45 +710,42 @@ copy_prop_visit_assignment (tree stmt, tree *result_p)
 }
 
 
-/* Visit the COND_EXPR STMT.  Return SSA_PROP_INTERESTING
+/* Visit the GIMPLE_COND STMT.  Return SSA_PROP_INTERESTING
    if it can determine which edge will be taken.  Otherwise, return
    SSA_PROP_VARYING.  */
 
 static enum ssa_prop_result
-copy_prop_visit_cond_stmt (tree stmt, edge *taken_edge_p)
+copy_prop_visit_cond_stmt (gimple stmt, edge *taken_edge_p)
 {
-  enum ssa_prop_result retval;
-  tree cond;
+  enum ssa_prop_result retval = SSA_PROP_VARYING;
 
-  cond = COND_EXPR_COND (stmt);
-  retval = SSA_PROP_VARYING;
+  tree op0 = gimple_cond_lhs (stmt);
+  tree op1 = gimple_cond_rhs (stmt);
 
   /* The only conditionals that we may be able to compute statically
      are predicates involving two SSA_NAMEs.  */
-  if (COMPARISON_CLASS_P (cond)
-      && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME
-      && TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME)
+  if (TREE_CODE (op0) == SSA_NAME && TREE_CODE (op1) == SSA_NAME)
     {
-      tree op0 = get_last_copy_of (TREE_OPERAND (cond, 0));
-      tree op1 = get_last_copy_of (TREE_OPERAND (cond, 1));
+      op0 = get_last_copy_of (op0);
+      op1 = get_last_copy_of (op1);
 
       /* See if we can determine the predicate's value.  */
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
          fprintf (dump_file, "Trying to determine truth value of ");
          fprintf (dump_file, "predicate ");
-         print_generic_stmt (dump_file, cond, 0);
+         print_gimple_stmt (dump_file, stmt, 0, 0);
        }
 
       /* We can fold COND and get a useful result only when we have
         the same SSA_NAME on both sides of a comparison operator.  */
       if (op0 == op1)
        {
-         tree folded_cond = fold_binary (TREE_CODE (cond), boolean_type_node,
-                                         op0, op1);
+         tree folded_cond = fold_binary (gimple_cond_code (stmt),
+                                          boolean_type_node, op0, op1);
          if (folded_cond)
            {
-             basic_block bb = bb_for_stmt (stmt);
+             basic_block bb = gimple_bb (stmt);
              *taken_edge_p = find_taken_edge (bb, folded_cond);
              if (*taken_edge_p)
                retval = SSA_PROP_INTERESTING;
@@ -685,26 +772,26 @@ copy_prop_visit_cond_stmt (tree stmt, edge *taken_edge_p)
    SSA_PROP_VARYING.  */
 
 static enum ssa_prop_result
-copy_prop_visit_stmt (tree stmt, edge *taken_edge_p, tree *result_p)
+copy_prop_visit_stmt (gimple stmt, edge *taken_edge_p, tree *result_p)
 {
   enum ssa_prop_result retval;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "\nVisiting statement:\n");
-      print_generic_stmt (dump_file, stmt, dump_flags);
+      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
       fprintf (dump_file, "\n");
     }
 
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-      && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == SSA_NAME
-      && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
+  if (gimple_assign_single_p (stmt)
+      && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
+      && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
     {
       /* If the statement is a copy assignment, evaluate its RHS to
         see if the lattice value of its output has changed.  */
       retval = copy_prop_visit_assignment (stmt, result_p);
     }
-  else if (TREE_CODE (stmt) == COND_EXPR)
+  else if (gimple_code (stmt) == GIMPLE_COND)
     {
       /* See if we can determine which edge goes out of a conditional
         jump.  */
@@ -738,27 +825,26 @@ copy_prop_visit_stmt (tree stmt, edge *taken_edge_p, tree *result_p)
    set it to be the value of the LHS of PHI.  */
 
 static enum ssa_prop_result
-copy_prop_visit_phi_node (tree phi)
+copy_prop_visit_phi_node (gimple phi)
 {
   enum ssa_prop_result retval;
-  int i;
-  tree lhs;
+  unsigned i;
   prop_value_t phi_val = { 0, NULL_TREE, NULL_TREE };
 
-  lhs = PHI_RESULT (phi);
+  tree lhs = gimple_phi_result (phi);
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "\nVisiting PHI node: ");
-      print_generic_expr (dump_file, phi, dump_flags);
+      print_gimple_stmt (dump_file, phi, 0, dump_flags);
       fprintf (dump_file, "\n\n");
     }
 
-  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+  for (i = 0; i < gimple_phi_num_args (phi); i++)
     {
       prop_value_t *arg_val;
-      tree arg = PHI_ARG_DEF (phi, i);
-      edge e = PHI_ARG_EDGE (phi, i);
+      tree arg = gimple_phi_arg_def (phi, i);
+      edge e = gimple_phi_arg_edge (phi, i);
 
       /* We don't care about values flowing through non-executable
         edges.  */
@@ -860,14 +946,14 @@ init_copy_prop (void)
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator si;
-      tree phi, def;
+      gimple_stmt_iterator si;
       int depth = bb->loop_depth;
 
-      for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+      for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
        {
-         tree stmt = bsi_stmt (si);
+         gimple stmt = gsi_stmt (si);
          ssa_op_iter iter;
+          tree def;
 
          /* The only statements that we care about are those that may
             generate useful copies.  We also need to mark conditional
@@ -880,31 +966,37 @@ init_copy_prop (void)
             value was loop invariant, it will be hoisted by LICM and
             exposed for copy propagation.  */
          if (stmt_ends_bb_p (stmt))
-           DONT_SIMULATE_AGAIN (stmt) = false;
+            prop_set_simulate_again (stmt, true);
          else if (stmt_may_generate_copy (stmt)
-                  && loop_depth_of_name (GIMPLE_STMT_OPERAND (stmt, 1)) <= depth)
-           DONT_SIMULATE_AGAIN (stmt) = false;
+                   /* Since we are iterating over the statements in
+                      BB, not the phi nodes, STMT will always be an
+                      assignment.  */
+                   && loop_depth_of_name (gimple_assign_rhs1 (stmt)) <= depth)
+            prop_set_simulate_again (stmt, true);
          else
-           DONT_SIMULATE_AGAIN (stmt) = true;
+            prop_set_simulate_again (stmt, false);
 
          /* Mark all the outputs of this statement as not being
             the copy of anything.  */
          FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
-           if (DONT_SIMULATE_AGAIN (stmt))
+            if (!prop_simulate_again_p (stmt))
              set_copy_of_val (def, def);
            else
              cached_last_copy_of[SSA_NAME_VERSION (def)] = def;
        }
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
        {
-         def = PHI_RESULT (phi);
+          gimple phi = gsi_stmt (si);
+          tree def;
+
+         def = gimple_phi_result (phi);
          if (!is_gimple_reg (def))
-           DONT_SIMULATE_AGAIN (phi) = true;
+            prop_set_simulate_again (phi, false);
          else
-           DONT_SIMULATE_AGAIN (phi) = false;
+            prop_set_simulate_again (phi, true);
 
-         if (DONT_SIMULATE_AGAIN (phi))
+         if (!prop_simulate_again_p (phi))
            set_copy_of_val (def, def);
          else
            cached_last_copy_of[SSA_NAME_VERSION (def)] = def;
@@ -1084,4 +1176,3 @@ struct gimple_opt_pass pass_copy_prop =
     | TODO_update_ssa                  /* todo_flags_finish */
  }
 };
-
index 46b3314..d30e237 100644 (file)
@@ -23,13 +23,14 @@ along with GCC; see the file COPYING3.  If not see
 #include "coretypes.h"
 #include "tm.h"
 #include "tree.h"
+#include "gimple.h"
 #include "flags.h"
 #include "basic-block.h"
 #include "function.h"
 #include "diagnostic.h"
 #include "bitmap.h"
 #include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-inline.h"
 #include "timevar.h"
 #include "hashtab.h"
@@ -300,8 +301,9 @@ rename_ssa_copies (void)
 {
   var_map map;
   basic_block bb;
-  block_stmt_iterator bsi;
-  tree phi, stmt, var, part_var;
+  gimple_stmt_iterator gsi;
+  tree var, part_var;
+  gimple stmt, phi;
   unsigned x;
   FILE *debug;
   bool updated = false;
@@ -316,16 +318,15 @@ rename_ssa_copies (void)
   FOR_EACH_BB (bb)
     {
       /* Scan for real copies.  */
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         stmt = bsi_stmt (bsi); 
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+         stmt = gsi_stmt (gsi);
+         if (gimple_assign_ssa_name_copy_p (stmt))
            {
-             tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-             tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+             tree lhs = gimple_assign_lhs (stmt);
+             tree rhs = gimple_assign_rhs1 (stmt);
 
-              if (TREE_CODE (lhs) == SSA_NAME && TREE_CODE (rhs) == SSA_NAME)
-               updated |= copy_rename_partition_coalesce (map, lhs, rhs, debug);
+             updated |= copy_rename_partition_coalesce (map, lhs, rhs, debug);
            }
        }
     }
@@ -333,18 +334,21 @@ rename_ssa_copies (void)
   FOR_EACH_BB (bb)
     {
       /* Treat PHI nodes as copies between the result and each argument.  */
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
         {
-          int i;
-         tree res = PHI_RESULT (phi);
+          size_t i;
+         tree res;
+
+         phi = gsi_stmt (gsi);
+         res = gimple_phi_result (phi);
 
          /* Do not process virtual SSA_NAMES.  */
          if (!is_gimple_reg (SSA_NAME_VAR (res)))
            continue;
 
-          for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+          for (i = 0; i < gimple_phi_num_args (phi); i++)
             {
-              tree arg = PHI_ARG_DEF (phi, i);
+              tree arg = gimple_phi_arg (phi, i)->def;
               if (TREE_CODE (arg) == SSA_NAME)
                updated |= copy_rename_partition_coalesce (map, res, arg, debug);
             }
@@ -407,4 +411,3 @@ struct gimple_opt_pass pass_rename_ssa_copies =
   TODO_dump_func | TODO_verify_ssa      /* todo_flags_finish */
  }
 }; 
-
index 760e20d..3c75046 100644 (file)
@@ -1,5 +1,5 @@
 /* Dead code elimination pass for the GNU compiler.
-   Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007
+   Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008
    Free Software Foundation, Inc.
    Contributed by Ben Elliston <bje@redhat.com>
    and Andrew MacLeod <amacleod@redhat.com>
@@ -59,14 +59,14 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree.h"
 #include "diagnostic.h"
 #include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-dump.h"
 #include "tree-pass.h"
 #include "timevar.h"
 #include "flags.h"
 #include "cfgloop.h"
 #include "tree-scalar-evolution.h"
-\f
+
 static struct stmt_stats
 {
   int total;
@@ -75,7 +75,9 @@ static struct stmt_stats
   int removed_phis;
 } stats;
 
-static VEC(tree,heap) *worklist;
+#define STMT_NECESSARY GF_PLF_1
+
+static VEC(gimple,heap) *worklist;
 
 /* Vector indicating an SSA name has already been processed and marked
    as necessary.  */
@@ -196,30 +198,26 @@ find_all_control_dependences (struct edge_list *el)
     find_control_dependence (el, i);
 }
 
-
-#define NECESSARY(stmt)                stmt->base.asm_written_flag
-
 /* If STMT is not already marked necessary, mark it, and add it to the
    worklist if ADD_TO_WORKLIST is true.  */
 static inline void
-mark_stmt_necessary (tree stmt, bool add_to_worklist)
+mark_stmt_necessary (gimple stmt, bool add_to_worklist)
 {
   gcc_assert (stmt);
-  gcc_assert (!DECL_P (stmt));
 
-  if (NECESSARY (stmt))
+  if (gimple_plf (stmt, STMT_NECESSARY))
     return;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "Marking useful stmt: ");
-      print_generic_stmt (dump_file, stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
       fprintf (dump_file, "\n");
     }
 
-  NECESSARY (stmt) = 1;
+  gimple_set_plf (stmt, STMT_NECESSARY, true);
   if (add_to_worklist)
-    VEC_safe_push (tree, heap, worklist, stmt);
+    VEC_safe_push (gimple, heap, worklist, stmt);
 }
 
 
@@ -228,7 +226,7 @@ mark_stmt_necessary (tree stmt, bool add_to_worklist)
 static inline void
 mark_operand_necessary (tree op)
 {
-  tree stmt;
+  gimple stmt;
   int ver;
 
   gcc_assert (op);
@@ -241,11 +239,11 @@ mark_operand_necessary (tree op)
   stmt = SSA_NAME_DEF_STMT (op);
   gcc_assert (stmt);
 
-  if (NECESSARY (stmt) || IS_EMPTY_STMT (stmt))
+  if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
     return;
 
-  NECESSARY (stmt) = 1;
-  VEC_safe_push (tree, heap, worklist, stmt);
+  gimple_set_plf (stmt, STMT_NECESSARY, true);
+  VEC_safe_push (gimple, heap, worklist, stmt);
 }
 
 
@@ -256,77 +254,76 @@ mark_operand_necessary (tree op)
    necessary.  */
 
 static void
-mark_stmt_if_obviously_necessary (tree stmt, bool aggressive)
+mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
 {
-  stmt_ann_t ann;
-  tree op;
-
+  tree lhs = NULL_TREE;
   /* With non-call exceptions, we have to assume that all statements could
      throw.  If a statement may throw, it is inherently necessary.  */
   if (flag_non_call_exceptions
-      && tree_could_throw_p (stmt))
+      && stmt_could_throw_p (stmt))
     {
       mark_stmt_necessary (stmt, true);
       return;
     }
 
-  /* Statements that are implicitly live.  Most function calls, asm and return
-     statements are required.  Labels and BIND_EXPR nodes are kept because
-     they are control flow, and we have no way of knowing whether they can be
-     removed.  DCE can eliminate all the other statements in a block, and CFG
-     can then remove the block and labels.  */
-  switch (TREE_CODE (stmt))
+  /* Statements that are implicitly live.  Most function calls, asm
+     and return statements are required.  Labels and GIMPLE_BIND nodes
+     are kept because they are control flow, and we have no way of
+     knowing whether they can be removed.  DCE can eliminate all the
+     other statements in a block, and CFG can then remove the block
+     and labels.  */
+  switch (gimple_code (stmt))
     {
-    case PREDICT_EXPR:
-    case LABEL_EXPR:
-    case CASE_LABEL_EXPR:
+    case GIMPLE_PREDICT:
+    case GIMPLE_LABEL:
       mark_stmt_necessary (stmt, false);
       return;
 
-    case ASM_EXPR:
-    case RESX_EXPR:
-    case RETURN_EXPR:
-    case CHANGE_DYNAMIC_TYPE_EXPR:
+    case GIMPLE_ASM:
+    case GIMPLE_RESX:
+    case GIMPLE_RETURN:
+    case GIMPLE_CHANGE_DYNAMIC_TYPE:
       mark_stmt_necessary (stmt, true);
       return;
 
-    case CALL_EXPR:
+    case GIMPLE_CALL:
       /* Most, but not all function calls are required.  Function calls that
         produce no result and have no side effects (i.e. const pure
         functions) are unnecessary.  */
-      if (TREE_SIDE_EFFECTS (stmt))
-       mark_stmt_necessary (stmt, true);
-      return;
-
-    case GIMPLE_MODIFY_STMT:
-      op = get_call_expr_in (stmt);
-      if (op && TREE_SIDE_EFFECTS (op))
+      if (gimple_has_side_effects (stmt))
        {
          mark_stmt_necessary (stmt, true);
          return;
        }
-
+      if (!gimple_call_lhs (stmt))
+        return;
+      lhs = gimple_call_lhs (stmt);
+      /* Fall through */
+
+    case GIMPLE_ASSIGN:
+      if (!lhs)
+        lhs = gimple_assign_lhs (stmt);
       /* These values are mildly magic bits of the EH runtime.  We can't
         see the entire lifetime of these values until landing pads are
         generated.  */
-      if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == EXC_PTR_EXPR
-         || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == FILTER_EXPR)
+      if (TREE_CODE (lhs) == EXC_PTR_EXPR
+         || TREE_CODE (lhs) == FILTER_EXPR)
        {
          mark_stmt_necessary (stmt, true);
          return;
        }
       break;
 
-    case GOTO_EXPR:
+    case GIMPLE_GOTO:
       gcc_assert (!simple_goto_p (stmt));
       mark_stmt_necessary (stmt, true);
       return;
 
-    case COND_EXPR:
-      gcc_assert (EDGE_COUNT (bb_for_stmt (stmt)->succs) == 2);
+    case GIMPLE_COND:
+      gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
       /* Fall through.  */
 
-    case SWITCH_EXPR:
+    case GIMPLE_SWITCH:
       if (! aggressive)
        mark_stmt_necessary (stmt, true);
       break;
@@ -335,12 +332,10 @@ mark_stmt_if_obviously_necessary (tree stmt, bool aggressive)
       break;
     }
 
-  ann = stmt_ann (stmt);
-
   /* If the statement has volatile operands, it needs to be preserved.
      Same for statements that can alter control flow in unpredictable
      ways.  */
-  if (ann->has_volatile_ops || is_ctrl_altering_stmt (stmt))
+  if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
     {
       mark_stmt_necessary (stmt, true);
       return;
@@ -372,16 +367,16 @@ mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el)
 
   EXECUTE_IF_CONTROL_DEPENDENT (bi, bb->index, edge_number)
     {
-      tree t;
+      gimple stmt;
       basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number);
 
       if (TEST_BIT (last_stmt_necessary, cd_bb->index))
        continue;
       SET_BIT (last_stmt_necessary, cd_bb->index);
 
-      t = last_stmt (cd_bb);
-      if (t && is_ctrl_stmt (t))
-       mark_stmt_necessary (t, true);
+      stmt = last_stmt (cd_bb);
+      if (stmt && is_ctrl_stmt (stmt))
+       mark_stmt_necessary (stmt, true);
     }
 }
 
@@ -397,22 +392,24 @@ static void
 find_obviously_necessary_stmts (struct edge_list *el)
 {
   basic_block bb;
-  block_stmt_iterator i;
+  gimple_stmt_iterator gsi;
   edge e;
+  gimple phi, stmt;
 
   FOR_EACH_BB (bb)
     {
-      tree phi;
-
       /* PHI nodes are never inherently necessary.  */
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-       NECESSARY (phi) = 0;
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+       {
+         phi = gsi_stmt (gsi);
+         gimple_set_plf (phi, STMT_NECESSARY, false);
+       }
 
       /* Check all statements in the block.  */
-      for (i = bsi_start (bb); ! bsi_end_p (i); bsi_next (&i))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree stmt = bsi_stmt (i);
-         NECESSARY (stmt) = 0;
+         stmt = gsi_stmt (gsi);
+         gimple_set_plf (stmt, STMT_NECESSARY, false);
          mark_stmt_if_obviously_necessary (stmt, el != NULL);
        }
     }
@@ -442,21 +439,21 @@ find_obviously_necessary_stmts (struct edge_list *el)
 static void
 propagate_necessity (struct edge_list *el)
 {
-  tree stmt;
+  gimple stmt;
   bool aggressive = (el ? true : false); 
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     fprintf (dump_file, "\nProcessing worklist:\n");
 
-  while (VEC_length (tree, worklist) > 0)
+  while (VEC_length (gimple, worklist) > 0)
     {
       /* Take STMT from worklist.  */
-      stmt = VEC_pop (tree, worklist);
+      stmt = VEC_pop (gimple, worklist);
 
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
          fprintf (dump_file, "processing: ");
-         print_generic_stmt (dump_file, stmt, TDF_SLIM);
+         print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
          fprintf (dump_file, "\n");
        }
 
@@ -465,7 +462,7 @@ propagate_necessity (struct edge_list *el)
          /* Mark the last statements of the basic blocks that the block
             containing STMT is control dependent on, but only if we haven't
             already done so.  */
-         basic_block bb = bb_for_stmt (stmt);
+         basic_block bb = gimple_bb (stmt);
          if (bb != ENTRY_BLOCK_PTR
              && ! TEST_BIT (visited_control_parents, bb->index))
            {
@@ -474,7 +471,7 @@ propagate_necessity (struct edge_list *el)
            }
        }
 
-      if (TREE_CODE (stmt) == PHI_NODE)
+      if (gimple_code (stmt) == GIMPLE_PHI)
        {
          /* PHI nodes are somewhat special in that each PHI alternative has
             data and control dependencies.  All the statements feeding the
@@ -482,9 +479,9 @@ propagate_necessity (struct edge_list *el)
             we also consider the control dependent edges leading to the
             predecessor block associated with each PHI alternative as
             necessary.  */
-         int k;
+         size_t k;
 
-         for (k = 0; k < PHI_NUM_ARGS (stmt); k++)
+         for (k = 0; k < gimple_phi_num_args (stmt); k++)
             {
              tree arg = PHI_ARG_DEF (stmt, k);
              if (TREE_CODE (arg) == SSA_NAME)
@@ -493,9 +490,9 @@ propagate_necessity (struct edge_list *el)
 
          if (aggressive)
            {
-             for (k = 0; k < PHI_NUM_ARGS (stmt); k++)
+             for (k = 0; k < gimple_phi_num_args (stmt); k++)
                {
-                 basic_block arg_bb = PHI_ARG_EDGE (stmt, k)->src;
+                 basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
                  if (arg_bb != ENTRY_BLOCK_PTR
                      && ! TEST_BIT (visited_control_parents, arg_bb->index))
                    {
@@ -529,35 +526,33 @@ propagate_necessity (struct edge_list *el)
 static bool
 remove_dead_phis (basic_block bb)
 {
-  tree prev, phi;
   bool something_changed = false;
+  gimple_seq phis;
+  gimple phi;
+  gimple_stmt_iterator gsi;
+  phis = phi_nodes (bb);
 
-  prev = NULL_TREE;
-  phi = phi_nodes (bb);
-  while (phi)
+  for (gsi = gsi_start (phis); !gsi_end_p (gsi);)
     {
       stats.total_phis++;
+      phi = gsi_stmt (gsi);
 
-      if (! NECESSARY (phi))
+      if (!gimple_plf (phi, STMT_NECESSARY))
        {
-         tree next = PHI_CHAIN (phi);
-
          something_changed = true;
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
              fprintf (dump_file, "Deleting : ");
-             print_generic_stmt (dump_file, phi, TDF_SLIM);
+             print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
              fprintf (dump_file, "\n");
            }
 
-         remove_phi_node (phi, prev, true);
+         remove_phi_node (&gsi, true);
          stats.removed_phis++;
-         phi = next;
        }
       else
        {
-         prev = phi;
-         phi = PHI_CHAIN (phi);
+          gsi_next (&gsi);
        }
     }
   return something_changed;
@@ -568,14 +563,14 @@ remove_dead_phis (basic_block bb)
    containing I so that we don't have to look it up.  */
 
 static void
-remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
+remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
 {
-  tree t = bsi_stmt (*i);
+  gimple stmt = gsi_stmt (*i);
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "Deleting : ");
-      print_generic_stmt (dump_file, t, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
       fprintf (dump_file, "\n");
     }
 
@@ -587,7 +582,7 @@ remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
      immediate post-dominator.  The blocks we are circumventing will be
      removed by cleanup_tree_cfg if this change in the flow graph makes them
      unreachable.  */
-  if (is_ctrl_stmt (t))
+  if (is_ctrl_stmt (stmt))
     {
       basic_block post_dom_bb;
 
@@ -649,8 +644,8 @@ remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
        }
     }
   
-  bsi_remove (i, true);  
-  release_defs (t); 
+  gsi_remove (i, true);  
+  release_defs (stmt); 
 }
 
 
@@ -662,7 +657,9 @@ eliminate_unnecessary_stmts (void)
 {
   bool something_changed = false;
   basic_block bb;
-  block_stmt_iterator i;
+  gimple_stmt_iterator gsi;
+  gimple stmt;
+  tree call;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     fprintf (dump_file, "\nEliminating unnecessary statements:\n");
@@ -677,51 +674,56 @@ eliminate_unnecessary_stmts (void)
   FOR_EACH_BB (bb)
     {
       /* Remove dead statements.  */
-      for (i = bsi_start (bb); ! bsi_end_p (i) ; )
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
        {
-         tree t = bsi_stmt (i);
+         stmt = gsi_stmt (gsi);
 
          stats.total++;
 
-         /* If `i' is not necessary then remove it.  */
-         if (! NECESSARY (t))
+         /* If GSI is not necessary then remove it.  */
+         if (!gimple_plf (stmt, STMT_NECESSARY))
            {
-             remove_dead_stmt (&i, bb);
+             remove_dead_stmt (&gsi, bb);
              something_changed = true;
            }
-         else
+         else if (is_gimple_call (stmt))
            {
-             tree call = get_call_expr_in (t);
+             call = gimple_call_fndecl (stmt);
              if (call)
                {
                  tree name;
+                 gimple g;
 
                  /* When LHS of var = call (); is dead, simplify it into
                     call (); saving one operand.  */
-                 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
-                     && (TREE_CODE ((name = GIMPLE_STMT_OPERAND (t, 0)))
-                         == SSA_NAME)
-                     && !TEST_BIT (processed, SSA_NAME_VERSION (name)))
+                 name = gimple_call_lhs (stmt);
+                 if (name && TREE_CODE (name) == SSA_NAME
+                          && !TEST_BIT (processed, SSA_NAME_VERSION (name)))
                    {
-                     tree oldlhs = GIMPLE_STMT_OPERAND (t, 0);
                      something_changed = true;
                      if (dump_file && (dump_flags & TDF_DETAILS))
                        {
                          fprintf (dump_file, "Deleting LHS of call: ");
-                         print_generic_stmt (dump_file, t, TDF_SLIM);
+                         print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
                          fprintf (dump_file, "\n");
                        }
-                     push_stmt_changes (bsi_stmt_ptr (i));
-                     TREE_BLOCK (call) = TREE_BLOCK (t);
-                     bsi_replace (&i, call, false);
-                     maybe_clean_or_replace_eh_stmt (t, call);
-                     mark_symbols_for_renaming (call);
-                     pop_stmt_changes (bsi_stmt_ptr (i));
-                     release_ssa_name (oldlhs);
+                     
+                     push_stmt_changes (gsi_stmt_ptr (&gsi));
+                     g = gimple_copy (stmt);
+                     gimple_call_set_lhs (g, NULL_TREE);
+                     gsi_replace (&gsi, g, false);
+                     maybe_clean_or_replace_eh_stmt (stmt, g);
+                     mark_symbols_for_renaming (g);
+                     pop_stmt_changes (gsi_stmt_ptr (&gsi));
+                     release_ssa_name (name);
                    }
-                 notice_special_calls (call);
+                 notice_special_calls (stmt);
                }
-             bsi_next (&i);
+             gsi_next (&gsi);
+           }
+         else
+           {
+             gsi_next (&gsi);
            }
        }
     }
@@ -749,7 +751,7 @@ print_stats (void)
   fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
           stats.removed_phis, stats.total_phis, (int) percg);
 }
-\f
+
 /* Initialization for this pass.  Set up the used data structures.  */
 
 static void
@@ -772,7 +774,7 @@ tree_dce_init (bool aggressive)
   processed = sbitmap_alloc (num_ssa_names + 1);
   sbitmap_zero (processed);
 
-  worklist = VEC_alloc (tree, heap, 64);
+  worklist = VEC_alloc (gimple, heap, 64);
   cfg_altered = false;
 }
 
@@ -795,9 +797,9 @@ tree_dce_done (bool aggressive)
 
   sbitmap_free (processed);
 
-  VEC_free (tree, heap, worklist);
+  VEC_free (gimple, heap, worklist);
 }
-\f
+
 /* Main routine to eliminate dead code.
 
    AGGRESSIVE controls the aggressiveness of the algorithm.
index 0b15938..4e7a390 100644 (file)
@@ -46,6 +46,37 @@ along with GCC; see the file COPYING3.  If not see
 
 /* This file implements optimizations on the dominator tree.  */
 
+/* Representation of a "naked" right-hand-side expression, to be used
+   in recording available expressions in the expression hash table.  */
+
+enum expr_kind
+{
+  EXPR_SINGLE,
+  EXPR_UNARY,
+  EXPR_BINARY,
+  EXPR_CALL
+};
+
+struct hashable_expr
+{
+  tree type;
+  enum expr_kind kind;
+  union {
+    struct { tree rhs; } single;
+    struct { enum tree_code op;  tree opnd; } unary;
+    struct { enum tree_code op;  tree opnd0; tree opnd1; } binary;
+    struct { tree fn; bool pure; size_t nargs; tree *args; } call;
+  } ops;
+};
+
+/* Structure for recording known values of a conditional expression
+   at the exits from its block.  */
+
+struct cond_equivalence
+{
+  struct hashable_expr cond;
+  tree value;
+};
 
 /* Structure for recording edge equivalences as well as any pending
    edge redirections during the dominator optimizer.
@@ -72,11 +103,10 @@ struct edge_info
      are true or false.  The number of recorded conditions can vary, but
      can be determined by the condition's code.  So we have an array
      and its maximum index rather than use a varray.  */
-  tree *cond_equivalences;
+  struct cond_equivalence *cond_equivalences;
   unsigned int max_cond_equivalences;
 };
 
-
 /* Hash table with expressions made available during the renaming process.
    When an assignment of the form X_i = EXPR is found, the statement is
    stored in this table.  If the same expression EXPR is later found on the
@@ -91,7 +121,11 @@ static htab_t avail_exprs;
    (null).  When we finish processing the block, we pop off entries and
    remove the expressions from the global hash table until we hit the
    marker.  */
-static VEC(tree,heap) *avail_exprs_stack;
+typedef struct expr_hash_elt * expr_hash_elt_t;
+DEF_VEC_P(expr_hash_elt_t);
+DEF_VEC_ALLOC_P(expr_hash_elt_t,heap);
+
+static VEC(expr_hash_elt_t,heap) *avail_exprs_stack;
 
 /* Stack of statements we need to rescan during finalization for newly
    exposed variables.
@@ -100,22 +134,13 @@ static VEC(tree,heap) *avail_exprs_stack;
    expressions are removed from AVAIL_EXPRS.  Else we may change the
    hash code for an expression and be unable to find/remove it from
    AVAIL_EXPRS.  */
-typedef tree *tree_p;
-DEF_VEC_P(tree_p);
-DEF_VEC_ALLOC_P(tree_p,heap);
-
-static VEC(tree_p,heap) *stmts_to_rescan;
+typedef gimple *gimple_p;
+DEF_VEC_P(gimple_p);
+DEF_VEC_ALLOC_P(gimple_p,heap);
 
-/* Structure for entries in the expression hash table.
+static VEC(gimple_p,heap) *stmts_to_rescan;
 
-   This requires more memory for the hash table entries, but allows us
-   to avoid creating silly tree nodes and annotations for conditionals,
-   eliminates 2 global hash tables and two block local varrays.
-   
-   It also allows us to reduce the number of hash table lookups we
-   have to perform in lookup_avail_expr and finally it allows us to
-   significantly reduce the number of calls into the hashing routine
-   itself.  */
+/* Structure for entries in the expression hash table.  */
 
 struct expr_hash_elt
 {
@@ -123,13 +148,17 @@ struct expr_hash_elt
   tree lhs;
 
   /* The expression (rhs) we want to record.  */
-  tree rhs;
+  struct hashable_expr expr;
 
   /* The stmt pointer if this element corresponds to a statement.  */
-  tree stmt;
+  gimple stmt;
 
-  /* The hash value for RHS/ann.  */
+  /* The hash value for RHS.  */
   hashval_t hash;
+
+  /* A unique stamp, typically the address of the hash
+     element itself, used in removing entries from the table.  */
+  struct expr_hash_elt *stamp;
 };
 
 /* Stack of dest,src pairs that need to be restored during finalization.
@@ -157,28 +186,22 @@ struct opt_stats_d
 
 static struct opt_stats_d opt_stats;
 
-struct eq_expr_value
-{
-  tree src;
-  tree dst;
-};
-
 /* Local functions.  */
 static void optimize_stmt (struct dom_walk_data *, 
-                          basic_block bb,
-                          block_stmt_iterator);
-static tree lookup_avail_expr (tree, bool);
+                          basic_block,
+                          gimple_stmt_iterator);
+static tree lookup_avail_expr (gimple, bool);
 static hashval_t avail_expr_hash (const void *);
 static hashval_t real_avail_expr_hash (const void *);
 static int avail_expr_eq (const void *, const void *);
 static void htab_statistics (FILE *, htab_t);
-static void record_cond (tree, tree);
+static void record_cond (struct cond_equivalence *);
 static void record_const_or_copy (tree, tree);
 static void record_equality (tree, tree);
 static void record_equivalences_from_phis (basic_block);
 static void record_equivalences_from_incoming_edge (basic_block);
-static bool eliminate_redundant_computations (tree);
-static void record_equivalences_from_stmt (tree, int, stmt_ann_t);
+static bool eliminate_redundant_computations (gimple_stmt_iterator *);
+static void record_equivalences_from_stmt (gimple, int);
 static void dom_thread_across_edge (struct dom_walk_data *, edge);
 static void dom_opt_finalize_block (struct dom_walk_data *, basic_block);
 static void dom_opt_initialize_block (struct dom_walk_data *, basic_block);
@@ -188,6 +211,366 @@ static void restore_vars_to_original_value (void);
 static edge single_incoming_edge_ignoring_loop_edges (basic_block);
 
 
+/* Given a statement STMT, initialize the hash table element pointed to
+   by ELEMENT.  */
+
+static void
+initialize_hash_element (gimple stmt, tree lhs,
+                         struct expr_hash_elt *element)
+{
+  enum gimple_code code = gimple_code (stmt);
+  struct hashable_expr *expr = &element->expr;
+
+  if (code == GIMPLE_ASSIGN)
+    {
+      enum tree_code subcode = gimple_assign_rhs_code (stmt);
+
+      expr->type = NULL_TREE;
+      
+      switch (get_gimple_rhs_class (subcode))
+        {
+        case GIMPLE_SINGLE_RHS:
+          expr->kind = EXPR_SINGLE;
+          expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
+          break;
+        case GIMPLE_UNARY_RHS:
+          expr->kind = EXPR_UNARY;
+         expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
+          expr->ops.unary.op = subcode;
+          expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
+          break;
+        case GIMPLE_BINARY_RHS:
+          expr->kind = EXPR_BINARY;
+         expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
+          expr->ops.binary.op = subcode;
+          expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
+          expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
+          break;
+        default:
+          gcc_unreachable ();
+        }
+    }
+  else if (code == GIMPLE_COND)
+    {
+      expr->type = boolean_type_node;
+      expr->kind = EXPR_BINARY;
+      expr->ops.binary.op = gimple_cond_code (stmt);
+      expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
+      expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
+    }
+  else if (code == GIMPLE_CALL)
+    {
+      size_t nargs = gimple_call_num_args (stmt);
+      size_t i;
+
+      gcc_assert (gimple_call_lhs (stmt));
+
+      expr->type = TREE_TYPE (gimple_call_lhs (stmt));
+      expr->kind = EXPR_CALL;
+      expr->ops.call.fn = gimple_call_fn (stmt);
+
+      if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
+        expr->ops.call.pure = true;
+      else 
+        expr->ops.call.pure = false;
+
+      expr->ops.call.nargs = nargs;
+      expr->ops.call.args = (tree *) xcalloc (nargs, sizeof (tree));
+      for (i = 0; i < nargs; i++)
+        expr->ops.call.args[i] = gimple_call_arg (stmt, i);
+    }
+  else if (code == GIMPLE_SWITCH)
+    {
+      expr->type = TREE_TYPE (gimple_switch_index (stmt));
+      expr->kind = EXPR_SINGLE;
+      expr->ops.single.rhs = gimple_switch_index (stmt);
+    }
+  else if (code == GIMPLE_GOTO)
+    {
+      expr->type = TREE_TYPE (gimple_goto_dest (stmt));
+      expr->kind = EXPR_SINGLE;
+      expr->ops.single.rhs = gimple_goto_dest (stmt);
+    }
+  else
+    gcc_unreachable ();
+
+  element->lhs = lhs;
+  element->stmt = stmt;
+  element->hash = avail_expr_hash (element);
+  element->stamp = element;
+}
+
+/* Given a conditional expression COND as a tree, initialize
+   a hashable_expr expression EXPR.  The conditional must be a
+   comparison or logical negation.  A constant or a variable is
+   not permitted.  */
+
+static void
+initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
+{
+  expr->type = boolean_type_node;
+  
+  if (COMPARISON_CLASS_P (cond))
+    {
+      expr->kind = EXPR_BINARY;
+      expr->ops.binary.op = TREE_CODE (cond);
+      expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
+      expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
+    }
+  else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
+    {
+      expr->kind = EXPR_UNARY;
+      expr->ops.unary.op = TRUTH_NOT_EXPR;
+      expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
+    }
+  else
+    gcc_unreachable ();
+}
+
+/* Given a hashable_expr expression EXPR and an LHS,
+   initialize the hash table element pointed to by ELEMENT.  */
+
+static void
+initialize_hash_element_from_expr (struct hashable_expr *expr,
+                                   tree lhs,
+                                   struct expr_hash_elt *element)
+{
+  element->expr = *expr;
+  element->lhs = lhs;
+  element->stmt = NULL;
+  element->hash = avail_expr_hash (element);
+  element->stamp = element;
+}
+
+/* Compare two hashable_expr structures for equivalence.
+   They are considered equivalent when the the expressions
+   they denote must necessarily be equal.  The logic is intended
+   to follow that of operand_equal_p in fold-const.c  */
+
+static bool
+hashable_expr_equal_p (const struct hashable_expr *expr0,
+                        const struct hashable_expr *expr1)
+{
+  tree type0 = expr0->type;
+  tree type1 = expr1->type;
+
+  /* If either type is NULL, there is nothing to check.  */
+  if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
+    return false;
+
+  /* If both types don't have the same signedness, precision, and mode,
+     then we can't consider  them equal.  */
+  if (type0 != type1
+      && (TREE_CODE (type0) == ERROR_MARK
+         || TREE_CODE (type1) == ERROR_MARK
+         || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
+         || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
+         || TYPE_MODE (type0) != TYPE_MODE (type1)))
+    return false;
+
+  if (expr0->kind != expr1->kind)
+    return false;
+
+  switch (expr0->kind)
+    {
+    case EXPR_SINGLE:
+      return operand_equal_p (expr0->ops.single.rhs,
+                              expr1->ops.single.rhs, 0);
+
+    case EXPR_UNARY:
+      if (expr0->ops.unary.op != expr1->ops.unary.op)
+        return false;
+
+      if ((expr0->ops.unary.op == NOP_EXPR
+           || expr0->ops.unary.op == CONVERT_EXPR
+           || expr0->ops.unary.op == NON_LVALUE_EXPR)
+          && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
+        return false;
+
+      return operand_equal_p (expr0->ops.unary.opnd,
+                              expr1->ops.unary.opnd, 0);
+
+    case EXPR_BINARY:
+      {
+        if (expr0->ops.binary.op != expr1->ops.binary.op)
+          return false;
+
+        if (operand_equal_p (expr0->ops.binary.opnd0,
+                             expr1->ops.binary.opnd0, 0)
+            && operand_equal_p (expr0->ops.binary.opnd1,
+                                expr1->ops.binary.opnd1, 0))
+          return true;
+
+        /* For commutative ops, allow the other order.  */
+        return (commutative_tree_code (expr0->ops.binary.op)
+                && operand_equal_p (expr0->ops.binary.opnd0,
+                                    expr1->ops.binary.opnd1, 0)
+                && operand_equal_p (expr0->ops.binary.opnd1,
+                                    expr1->ops.binary.opnd0, 0));
+      }
+
+    case EXPR_CALL:
+      {
+        size_t i;
+
+        /* If the calls are to different functions, then they
+           clearly cannot be equal.  */
+        if (! operand_equal_p (expr0->ops.call.fn,
+                               expr1->ops.call.fn, 0))
+          return false;
+
+        if (! expr0->ops.call.pure)
+          return false;
+
+        if (expr0->ops.call.nargs !=  expr1->ops.call.nargs)
+          return false;
+
+        for (i = 0; i < expr0->ops.call.nargs; i++)
+          if (! operand_equal_p (expr0->ops.call.args[i],
+                                 expr1->ops.call.args[i], 0))
+            return false;
+
+        return true;
+      }
+     
+    default:
+      gcc_unreachable ();
+    }
+}
+
+/* Compute a hash value for a hashable_expr value EXPR and a
+   previously accumulated hash value VAL.  If two hashable_expr
+   values compare equal with hashable_expr_equal_p, they must
+   hash to the same value, given an identical value of VAL.
+   The logic is intended to follow iterative_hash_expr in tree.c.  */
+
+static hashval_t
+iterative_hash_hashable_expr (const struct hashable_expr *expr, hashval_t val)
+{
+  switch (expr->kind)
+    {
+    case EXPR_SINGLE:
+      val = iterative_hash_expr (expr->ops.single.rhs, val);
+      break;
+
+    case EXPR_UNARY:
+      val = iterative_hash_object (expr->ops.unary.op, val);
+
+      /* Make sure to include signedness in the hash computation.
+         Don't hash the type, that can lead to having nodes which
+         compare equal according to operand_equal_p, but which
+         have different hash codes.  */
+      if (expr->ops.unary.op == NOP_EXPR
+          || expr->ops.unary.op == CONVERT_EXPR
+          || expr->ops.unary.op == NON_LVALUE_EXPR)
+        val += TYPE_UNSIGNED (expr->type);
+
+      val = iterative_hash_expr (expr->ops.unary.opnd, val);
+      break;
+
+    case EXPR_BINARY:
+      val = iterative_hash_object (expr->ops.binary.op, val);
+      if (commutative_tree_code (expr->ops.binary.op))
+          val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
+                                                  expr->ops.binary.opnd1, val);
+      else
+        {
+          val = iterative_hash_expr (expr->ops.binary.opnd0, val);
+          val = iterative_hash_expr (expr->ops.binary.opnd1, val);
+        }
+      break;
+
+    case EXPR_CALL:
+      {
+        size_t i;
+        enum tree_code code = CALL_EXPR;
+
+        val = iterative_hash_object (code, val);
+        val = iterative_hash_expr (expr->ops.call.fn, val);
+        for (i = 0; i < expr->ops.call.nargs; i++)
+          val = iterative_hash_expr (expr->ops.call.args[i], val);
+      }
+      break;
+     
+    default:
+      gcc_unreachable ();
+    }
+
+  return val;
+}
+
+/* Print a diagnostic dump of an expression hash table entry.  */
+
+static void
+print_expr_hash_elt (FILE * stream, const struct expr_hash_elt *element)
+{
+  if (element->stmt)
+    fprintf (stream, "STMT ");
+  else
+    fprintf (stream, "COND ");
+
+  if (element->lhs)
+    {
+      print_generic_expr (stream, element->lhs, 0);
+      fprintf (stream, " = ");
+    }
+  
+  switch (element->expr.kind)
+    {
+      case EXPR_SINGLE:
+        print_generic_expr (stream, element->expr.ops.single.rhs, 0);
+        break;
+
+      case EXPR_UNARY:
+        fprintf (stream, "%s ", tree_code_name[element->expr.ops.unary.op]);
+        print_generic_expr (stream, element->expr.ops.unary.opnd, 0);
+        break;
+
+      case EXPR_BINARY:
+        print_generic_expr (stream, element->expr.ops.binary.opnd0, 0);
+        fprintf (stream, " %s ", tree_code_name[element->expr.ops.binary.op]);
+        print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
+        break;
+
+      case EXPR_CALL:
+        {
+          size_t i;
+          size_t nargs = element->expr.ops.call.nargs;
+
+          print_generic_expr (stream, element->expr.ops.call.fn, 0);
+          fprintf (stream, " (");
+          for (i = 0; i < nargs; i++)
+            {
+              print_generic_expr (stream, element->expr.ops.call.args[i], 0);
+              if (i + 1 < nargs)
+                fprintf (stream, ", ");
+            }
+          fprintf (stream, ")");
+        }
+        break;
+    }
+  fprintf (stream, "\n");
+
+  if (element->stmt)
+    {
+      fprintf (stream, "          ");
+      print_gimple_stmt (stream, element->stmt, 0, 0);
+    }
+}
+
+/* Delete an expr_hash_elt and reclaim its storage.  */
+
+static void
+free_expr_hash_elt (void *elt)
+{
+  struct expr_hash_elt *element = ((struct expr_hash_elt *)elt);
+
+  if (element->expr.kind == EXPR_CALL)
+    free (element->expr.ops.call.args);
+
+  free (element);
+}
+
 /* Allocate an EDGE_INFO for edge E and attach it to E.
    Return the new EDGE_INFO structure.  */
 
@@ -247,10 +630,10 @@ tree_ssa_dominator_optimize (void)
   memset (&opt_stats, 0, sizeof (opt_stats));
 
   /* Create our hash tables.  */
-  avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
-  avail_exprs_stack = VEC_alloc (tree, heap, 20);
+  avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free_expr_hash_elt);
+  avail_exprs_stack = VEC_alloc (expr_hash_elt_t, heap, 20);
   const_and_copies_stack = VEC_alloc (tree, heap, 20);
-  stmts_to_rescan = VEC_alloc (tree_p, heap, 20);
+  stmts_to_rescan = VEC_alloc (gimple_p, heap, 20);
   need_eh_cleanup = BITMAP_ALLOC (NULL);
 
   /* Setup callbacks for the generic dominator tree walker.  */
@@ -291,12 +674,11 @@ tree_ssa_dominator_optimize (void)
   walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
 
   {
-    block_stmt_iterator bsi;
+    gimple_stmt_iterator gsi;
     basic_block bb;
     FOR_EACH_BB (bb)
-      {
-       for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-         update_stmt_if_modified (bsi_stmt (bsi));
+      {for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+         update_stmt_if_modified (gsi_stmt (gsi));
       }
   }
 
@@ -336,7 +718,7 @@ tree_ssa_dominator_optimize (void)
            }
        }
 
-      tree_purge_all_dead_eh_edges (need_eh_cleanup);
+      gimple_purge_all_dead_eh_edges (need_eh_cleanup);
       bitmap_zero (need_eh_cleanup);
     }
 
@@ -379,9 +761,10 @@ tree_ssa_dominator_optimize (void)
   /* Free asserted bitmaps and stacks.  */
   BITMAP_FREE (need_eh_cleanup);
   
-  VEC_free (tree, heap, avail_exprs_stack);
+  VEC_free (expr_hash_elt_t, heap, avail_exprs_stack);
   VEC_free (tree, heap, const_and_copies_stack);
-  VEC_free (tree_p, heap, stmts_to_rescan);
+  VEC_free (gimple_p, heap, stmts_to_rescan);
+  
   return 0;
 }
 
@@ -414,22 +797,22 @@ struct gimple_opt_pass pass_dominator =
 };
 
 
-/* Given a stmt CONDSTMT containing a COND_EXPR, canonicalize the
-   COND_EXPR into a canonical form.  */
+/* Given a conditional statement CONDSTMT, convert the
+   condition to a canonical form.  */
 
 static void
-canonicalize_comparison (tree condstmt)
+canonicalize_comparison (gimple condstmt)
 {
-  tree cond = COND_EXPR_COND (condstmt);
   tree op0;
   tree op1;
-  enum tree_code code = TREE_CODE (cond);
+  enum tree_code code;
 
-  if (!COMPARISON_CLASS_P (cond))
-    return;
+  gcc_assert (gimple_code (condstmt) == GIMPLE_COND);
 
-  op0 = TREE_OPERAND (cond, 0);
-  op1 = TREE_OPERAND (cond, 1);
+  op0 = gimple_cond_lhs (condstmt);
+  op1 = gimple_cond_rhs (condstmt);
+
+  code = gimple_cond_code (condstmt);
 
   /* If it would be profitable to swap the operands, then do so to
      canonicalize the statement, enabling better optimization.
@@ -446,17 +829,13 @@ canonicalize_comparison (tree condstmt)
          || code == LE_EXPR
          || code == GE_EXPR)
        {
-         TREE_SET_CODE (cond, swap_tree_comparison (code));
-         swap_tree_operands (condstmt,
-                             &TREE_OPERAND (cond, 0),
-                             &TREE_OPERAND (cond, 1));
-         /* If one operand was in the operand cache, but the other is
-            not, because it is a constant, this is a case that the
-            internal updating code of swap_tree_operands can't handle
-            properly.  */
-         if (TREE_CODE_CLASS (TREE_CODE (op0)) 
-             != TREE_CODE_CLASS (TREE_CODE (op1)))
-           update_stmt (condstmt);
+          code = swap_tree_comparison (code);
+
+          gimple_cond_set_code (condstmt, code);
+          gimple_cond_set_lhs (condstmt, op1);
+          gimple_cond_set_rhs (condstmt, op0);
+
+          update_stmt (condstmt);
        }
     }
 }
@@ -474,7 +853,7 @@ dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
 
   /* Push a marker on the stacks of local information so that we know how
      far to unwind when we finalize this block.  */
-  VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
+  VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
   VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
 
   record_equivalences_from_incoming_edge (bb);
@@ -483,52 +862,6 @@ dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
   record_equivalences_from_phis (bb);
 }
 
-/* Given an expression EXPR (a relational expression or a statement), 
-   initialize the hash table element pointed to by ELEMENT.  */
-
-static void
-initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element)
-{
-  /* Hash table elements may be based on conditional expressions or statements.
-
-     For the former case, we have no annotation and we want to hash the
-     conditional expression.  In the latter case we have an annotation and
-     we want to record the expression the statement evaluates.  */
-  if (COMPARISON_CLASS_P (expr) || TREE_CODE (expr) == TRUTH_NOT_EXPR)
-    {
-      element->stmt = NULL;
-      element->rhs = expr;
-    }
-  else if (TREE_CODE (expr) == COND_EXPR)
-    {
-      element->stmt = expr;
-      element->rhs = COND_EXPR_COND (expr);
-    }
-  else if (TREE_CODE (expr) == SWITCH_EXPR)
-    {
-      element->stmt = expr;
-      element->rhs = SWITCH_COND (expr);
-    }
-  else if (TREE_CODE (expr) == RETURN_EXPR && TREE_OPERAND (expr, 0))
-    {
-      element->stmt = expr;
-      element->rhs = GIMPLE_STMT_OPERAND (TREE_OPERAND (expr, 0), 1);
-    }
-  else if (TREE_CODE (expr) == GOTO_EXPR)
-    {
-      element->stmt = expr;
-      element->rhs = GOTO_DESTINATION (expr);
-    }
-  else
-    {
-      element->stmt = expr;
-      element->rhs = GENERIC_TREE_OPERAND (expr, 1);
-    }
-
-  element->lhs = lhs;
-  element->hash = avail_expr_hash (element);
-}
-
 /* Remove all the expressions in LOCALS from TABLE, stopping when there are
    LIMIT entries left in LOCALs.  */
 
@@ -536,15 +869,25 @@ static void
 remove_local_expressions_from_table (void)
 {
   /* Remove all the expressions made available in this block.  */
-  while (VEC_length (tree, avail_exprs_stack) > 0)
+  while (VEC_length (expr_hash_elt_t, avail_exprs_stack) > 0)
     {
       struct expr_hash_elt element;
-      tree expr = VEC_pop (tree, avail_exprs_stack);
+      expr_hash_elt_t victim = VEC_pop (expr_hash_elt_t, avail_exprs_stack);
 
-      if (expr == NULL_TREE)
+      if (victim == NULL)
        break;
 
-      initialize_hash_element (expr, NULL, &element);
+      element = *victim;
+
+      /* This must precede the actual removal from the hash table,
+         as ELEMENT and the table entry may share a call argument
+         vector which will be freed during removal.  */
+      if (dump_file && (dump_flags & TDF_DETAILS))
+        {
+          fprintf (dump_file, "<<<< ");
+          print_expr_hash_elt (dump_file, &element);
+        }
+
       htab_remove_elt_with_hash (avail_exprs, &element, element.hash);
     }
 }
@@ -565,6 +908,15 @@ restore_vars_to_original_value (void)
       if (dest == NULL)
        break;
 
+      if (dump_file && (dump_flags & TDF_DETAILS))
+       {
+         fprintf (dump_file, "<<<< COPY ");
+         print_generic_expr (dump_file, dest, 0);
+         fprintf (dump_file, " = ");
+         print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
+         fprintf (dump_file, "\n");
+       }
+
       prev_value = VEC_pop (tree, const_and_copies_stack);
       SSA_NAME_VALUE (dest) =  prev_value;
     }
@@ -573,7 +925,8 @@ restore_vars_to_original_value (void)
 /* A trivial wrapper so that we can present the generic jump
    threading code with a simple API for simplifying statements.  */
 static tree
-simplify_stmt_for_jump_threading (tree stmt, tree within_stmt ATTRIBUTE_UNUSED)
+simplify_stmt_for_jump_threading (gimple stmt,
+                                 gimple within_stmt ATTRIBUTE_UNUSED)
 {
   return lookup_avail_expr (stmt, false);
 }
@@ -585,16 +938,16 @@ simplify_stmt_for_jump_threading (tree stmt, tree within_stmt ATTRIBUTE_UNUSED)
 static void
 dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
 {
-  /* If we don't already have a dummy condition, build it now.  */
   if (! walk_data->global_data)
-    {
-      tree dummy_cond = build2 (NE_EXPR, boolean_type_node,
-                               integer_zero_node, integer_zero_node);
-      dummy_cond = build3 (COND_EXPR, void_type_node, dummy_cond, NULL, NULL);
-      walk_data->global_data = dummy_cond;
-    }
+  {
+    gimple dummy_cond =
+        gimple_build_cond (NE_EXPR,
+                           integer_zero_node, integer_zero_node,
+                           NULL, NULL);
+    walk_data->global_data = dummy_cond;
+  }
 
-  thread_across_edge ((tree) walk_data->global_data, e, false,
+  thread_across_edge ((gimple) walk_data->global_data, e, false,
                      &const_and_copies_stack,
                      simplify_stmt_for_jump_threading);
 }
@@ -606,8 +959,7 @@ dom_thread_across_edge (struct dom_walk_data *walk_data, edge e)
 static void
 dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
 {
-  tree last;
-
+  gimple last;
 
   /* If we have an outgoing edge to a block with multiple incoming and
      outgoing edges, then we may be able to thread the edge, i.e., we
@@ -620,9 +972,7 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
       dom_thread_across_edge (walk_data, single_succ_edge (bb));
     }
   else if ((last = last_stmt (bb))
-          && TREE_CODE (last) == COND_EXPR
-          && (COMPARISON_CLASS_P (COND_EXPR_COND (last))
-              || TREE_CODE (COND_EXPR_COND (last)) == SSA_NAME)
+          && gimple_code (last) == GIMPLE_COND
           && EDGE_COUNT (bb->succs) == 2
           && (EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL) == 0
           && (EDGE_SUCC (bb, 1)->flags & EDGE_ABNORMAL) == 0)
@@ -641,33 +991,28 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
          /* Push a marker onto the available expression stack so that we
             unwind any expressions related to the TRUE arm before processing
             the false arm below.  */
-         VEC_safe_push (tree, heap, avail_exprs_stack, NULL_TREE);
+          VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, NULL);
          VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
 
          edge_info = (struct edge_info *) true_edge->aux;
 
          /* If we have info associated with this edge, record it into
-            our equivalency tables.  */
+            our equivalence tables.  */
          if (edge_info)
            {
-             tree *cond_equivalences = edge_info->cond_equivalences;
+             struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
              tree lhs = edge_info->lhs;
              tree rhs = edge_info->rhs;
 
-             /* If we have a simple NAME = VALUE equivalency record it.  */
+             /* If we have a simple NAME = VALUE equivalence, record it.  */
              if (lhs && TREE_CODE (lhs) == SSA_NAME)
                record_const_or_copy (lhs, rhs);
 
              /* If we have 0 = COND or 1 = COND equivalences, record them
                 into our expression hash tables.  */
              if (cond_equivalences)
-               for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
-                 {
-                   tree expr = cond_equivalences[i];
-                   tree value = cond_equivalences[i + 1];
-
-                   record_cond (expr, value);
-                 }
+               for (i = 0; i < edge_info->max_cond_equivalences; i++)
+                  record_cond (&cond_equivalences[i]);
            }
 
          dom_thread_across_edge (walk_data, true_edge);
@@ -687,27 +1032,22 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
          edge_info = (struct edge_info *) false_edge->aux;
 
          /* If we have info associated with this edge, record it into
-            our equivalency tables.  */
+            our equivalence tables.  */
          if (edge_info)
            {
-             tree *cond_equivalences = edge_info->cond_equivalences;
+             struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
              tree lhs = edge_info->lhs;
              tree rhs = edge_info->rhs;
 
-             /* If we have a simple NAME = VALUE equivalency record it.  */
+             /* If we have a simple NAME = VALUE equivalence, record it.  */
              if (lhs && TREE_CODE (lhs) == SSA_NAME)
                record_const_or_copy (lhs, rhs);
 
              /* If we have 0 = COND or 1 = COND equivalences, record them
                 into our expression hash tables.  */
              if (cond_equivalences)
-               for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
-                 {
-                   tree expr = cond_equivalences[i];
-                   tree value = cond_equivalences[i + 1];
-
-                   record_cond (expr, value);
-                 }
+               for (i = 0; i < edge_info->max_cond_equivalences; i++)
+                  record_cond (&cond_equivalences[i]);
            }
 
          /* Now thread the edge.  */
@@ -724,16 +1064,16 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
 
   /* If we queued any statements to rescan in this block, then
      go ahead and rescan them now.  */
-  while (VEC_length (tree_p, stmts_to_rescan) > 0)
+  while (VEC_length (gimple_p, stmts_to_rescan) > 0)
     {
-      tree *stmt_p = VEC_last (tree_p, stmts_to_rescan);
-      tree stmt = *stmt_p;
-      basic_block stmt_bb = bb_for_stmt (stmt);
+      gimple *stmt_p = VEC_last (gimple_p, stmts_to_rescan);
+      gimple stmt = *stmt_p;
+      basic_block stmt_bb = gimple_bb (stmt);
 
       if (stmt_bb != bb)
        break;
 
-      VEC_pop (tree_p, stmts_to_rescan);
+      VEC_pop (gimple_p, stmts_to_rescan);
       pop_stmt_changes (stmt_p);
     }
 }
@@ -747,17 +1087,19 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
 static void
 record_equivalences_from_phis (basic_block bb)
 {
-  tree phi;
-
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+  gimple_stmt_iterator gsi;
+  
+  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      tree lhs = PHI_RESULT (phi);
+      gimple phi = gsi_stmt (gsi);
+
+      tree lhs = gimple_phi_result (phi);
       tree rhs = NULL;
-      int i;
+      size_t i;
 
-      for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+      for (i = 0; i < gimple_phi_num_args (phi); i++)
        {
-         tree t = PHI_ARG_DEF (phi, i);
+         tree t = gimple_phi_arg_def (phi, i);
 
          /* Ignore alternatives which are the same as our LHS.  Since
             LHS is a PHI_RESULT, it is known to be a SSA_NAME, so we
@@ -787,8 +1129,7 @@ record_equivalences_from_phis (basic_block bb)
         this, since this is a true assignment and not an equivalence
         inferred from a comparison.  All uses of this ssa name are dominated
         by this assignment, so unwinding just costs time and space.  */
-      if (i == PHI_NUM_ARGS (phi)
-         && may_propagate_copy (lhs, rhs))
+      if (i == gimple_phi_num_args (phi) && may_propagate_copy (lhs, rhs))
        SSA_NAME_VALUE (lhs) = rhs;
     }
 }
@@ -851,21 +1192,14 @@ record_equivalences_from_incoming_edge (basic_block bb)
        {
          tree lhs = edge_info->lhs;
          tree rhs = edge_info->rhs;
-         tree *cond_equivalences = edge_info->cond_equivalences;
+         struct cond_equivalence *cond_equivalences = edge_info->cond_equivalences;
 
          if (lhs)
            record_equality (lhs, rhs);
 
          if (cond_equivalences)
-           {
-             for (i = 0; i < edge_info->max_cond_equivalences; i += 2)
-               {
-                 tree expr = cond_equivalences[i];
-                 tree value = cond_equivalences[i + 1];
-
-                 record_cond (expr, value);
-               }
-           }
+            for (i = 0; i < edge_info->max_cond_equivalences; i++)
+              record_cond (&cond_equivalences[i]);
        }
     }
 }
@@ -907,38 +1241,56 @@ htab_statistics (FILE *file, htab_t htab)
           htab_collisions (htab));
 }
 
-/* Enter a statement into the true/false expression hash table indicating
-   that the condition COND has the value VALUE.  */
+
+/* Enter condition equivalence into the expression hash table.
+   This indicates that a conditional expression has a known
+   boolean value.  */
 
 static void
-record_cond (tree cond, tree value)
+record_cond (struct cond_equivalence *p)
 {
   struct expr_hash_elt *element = XCNEW (struct expr_hash_elt);
   void **slot;
 
-  initialize_hash_element (cond, value, element);
+  initialize_hash_element_from_expr (&p->cond, p->value, element);
 
   slot = htab_find_slot_with_hash (avail_exprs, (void *)element,
                                   element->hash, INSERT);
   if (*slot == NULL)
     {
       *slot = (void *) element;
-      VEC_safe_push (tree, heap, avail_exprs_stack, cond);
+
+      if (dump_file && (dump_flags & TDF_DETAILS))
+        {
+          fprintf (dump_file, "1>>> ");
+          print_expr_hash_elt (dump_file, element);
+        }
+
+      VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
     }
   else
     free (element);
 }
 
-/* Build a new conditional using NEW_CODE, OP0 and OP1 and store
-   the new conditional into *p, then store a boolean_true_node
-   into *(p + 1).  */
+/* Build a cond_equivalence record indicating that the comparison
+   CODE holds between operands OP0 and OP1.  */
    
 static void
-build_and_record_new_cond (enum tree_code new_code, tree op0, tree op1, tree *p)
+build_and_record_new_cond (enum tree_code code,
+                           tree op0, tree op1,
+                           struct cond_equivalence *p)
 {
-  *p = build2 (new_code, boolean_type_node, op0, op1);
-  p++;
-  *p = boolean_true_node;
+  struct hashable_expr *cond = &p->cond;
+
+  gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
+
+  cond->type = boolean_type_node;
+  cond->kind = EXPR_BINARY;
+  cond->ops.binary.op = code;
+  cond->ops.binary.opnd0 = op0;
+  cond->ops.binary.opnd1 = op1;
+
+  p->value = boolean_true_node;
 }
 
 /* Record that COND is true and INVERTED is false into the edge information
@@ -964,119 +1316,125 @@ record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
     case GT_EXPR:
       if (FLOAT_TYPE_P (TREE_TYPE (op0)))
        {
-         edge_info->max_cond_equivalences = 12;
-         edge_info->cond_equivalences = XNEWVEC (tree, 12);
+         edge_info->max_cond_equivalences = 6;
+         edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 6);
          build_and_record_new_cond (ORDERED_EXPR, op0, op1,
-                                    &edge_info->cond_equivalences[8]);
+                                    &edge_info->cond_equivalences[4]);
          build_and_record_new_cond (LTGT_EXPR, op0, op1,
-                                    &edge_info->cond_equivalences[10]);
+                                    &edge_info->cond_equivalences[5]);
        }
       else
-       {
-         edge_info->max_cond_equivalences = 8;
-         edge_info->cond_equivalences = XNEWVEC (tree, 8);
+        {
+          edge_info->max_cond_equivalences = 4;
+         edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
        }
 
       build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
                                  ? LE_EXPR : GE_EXPR),
-                                op0, op1, &edge_info->cond_equivalences[4]);
+                                op0, op1, &edge_info->cond_equivalences[2]);
       build_and_record_new_cond (NE_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[6]);
+                                &edge_info->cond_equivalences[3]);
       break;
 
     case GE_EXPR:
     case LE_EXPR:
       if (FLOAT_TYPE_P (TREE_TYPE (op0)))
        {
-         edge_info->max_cond_equivalences = 6;
-         edge_info->cond_equivalences = XNEWVEC (tree, 6);
+         edge_info->max_cond_equivalences = 3;
+         edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 3);
          build_and_record_new_cond (ORDERED_EXPR, op0, op1,
-                                    &edge_info->cond_equivalences[4]);
+                                    &edge_info->cond_equivalences[2]);
        }
       else
        {
-         edge_info->max_cond_equivalences = 4;
-         edge_info->cond_equivalences = XNEWVEC (tree, 4);
+         edge_info->max_cond_equivalences = 2;
+         edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
        }
       break;
 
     case EQ_EXPR:
       if (FLOAT_TYPE_P (TREE_TYPE (op0)))
        {
-         edge_info->max_cond_equivalences = 10;
-         edge_info->cond_equivalences = XNEWVEC (tree, 10);
+         edge_info->max_cond_equivalences = 5;
+         edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 5);
          build_and_record_new_cond (ORDERED_EXPR, op0, op1,
-                                    &edge_info->cond_equivalences[8]);
+                                    &edge_info->cond_equivalences[4]);
        }
       else
        {
-         edge_info->max_cond_equivalences = 8;
-         edge_info->cond_equivalences = XNEWVEC (tree, 8);
+         edge_info->max_cond_equivalences = 4;
+         edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
        }
       build_and_record_new_cond (LE_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[4]);
+                                &edge_info->cond_equivalences[2]);
       build_and_record_new_cond (GE_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[6]);
+                                &edge_info->cond_equivalences[3]);
       break;
 
     case UNORDERED_EXPR:
-      edge_info->max_cond_equivalences = 16;
-      edge_info->cond_equivalences = XNEWVEC (tree, 16);
+      edge_info->max_cond_equivalences = 8;
+      edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 8);
       build_and_record_new_cond (NE_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[4]);
+                                &edge_info->cond_equivalences[2]);
       build_and_record_new_cond (UNLE_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[6]);
+                                &edge_info->cond_equivalences[3]);
       build_and_record_new_cond (UNGE_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[8]);
+                                &edge_info->cond_equivalences[4]);
       build_and_record_new_cond (UNEQ_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[10]);
+                                &edge_info->cond_equivalences[5]);
       build_and_record_new_cond (UNLT_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[12]);
+                                &edge_info->cond_equivalences[6]);
       build_and_record_new_cond (UNGT_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[14]);
+                                &edge_info->cond_equivalences[7]);
       break;
 
     case UNLT_EXPR:
     case UNGT_EXPR:
-      edge_info->max_cond_equivalences = 8;
-      edge_info->cond_equivalences = XNEWVEC (tree, 8);
+      edge_info->max_cond_equivalences = 4;
+      edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
       build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
                                  ? UNLE_EXPR : UNGE_EXPR),
-                                op0, op1, &edge_info->cond_equivalences[4]);
+                                op0, op1, &edge_info->cond_equivalences[2]);
       build_and_record_new_cond (NE_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[6]);
+                                &edge_info->cond_equivalences[3]);
       break;
 
     case UNEQ_EXPR:
-      edge_info->max_cond_equivalences = 8;
-      edge_info->cond_equivalences = XNEWVEC (tree, 8);
+      edge_info->max_cond_equivalences = 4;
+      edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
       build_and_record_new_cond (UNLE_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[4]);
+                                &edge_info->cond_equivalences[2]);
       build_and_record_new_cond (UNGE_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[6]);
+                                &edge_info->cond_equivalences[3]);
       break;
 
     case LTGT_EXPR:
-      edge_info->max_cond_equivalences = 8;
-      edge_info->cond_equivalences = XNEWVEC (tree, 8);
+      edge_info->max_cond_equivalences = 4;
+      edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 4);
       build_and_record_new_cond (NE_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[4]);
+                                &edge_info->cond_equivalences[2]);
       build_and_record_new_cond (ORDERED_EXPR, op0, op1,
-                                &edge_info->cond_equivalences[6]);
+                                &edge_info->cond_equivalences[3]);
       break;
 
     default:
-      edge_info->max_cond_equivalences = 4;
-      edge_info->cond_equivalences = XNEWVEC (tree, 4);
+      edge_info->max_cond_equivalences = 2;
+      edge_info->cond_equivalences = XNEWVEC (struct cond_equivalence, 2);
       break;
     }
 
   /* Now store the original true and false conditions into the first
      two slots.  */
-  edge_info->cond_equivalences[0] = cond;
-  edge_info->cond_equivalences[1] = boolean_true_node;
-  edge_info->cond_equivalences[2] = inverted;
-  edge_info->cond_equivalences[3] = boolean_false_node;
+  initialize_expr_from_cond (cond, &edge_info->cond_equivalences[0].cond);
+  edge_info->cond_equivalences[0].value = boolean_true_node;
+
+  /* It is possible for INVERTED to be the negation of a comparison,
+     and not a valid RHS or GIMPLE_COND condition.  This happens because
+     invert_truthvalue may return such an expression when asked to invert
+     a floating-point comparison.  These comparisons are not assumed to
+     obey the trichotomy law.  */
+  initialize_expr_from_cond (inverted, &edge_info->cond_equivalences[1].cond);
+  edge_info->cond_equivalences[1].value = boolean_false_node;
 }
 
 /* A helper function for record_const_or_copy and record_equality.
@@ -1087,12 +1445,20 @@ record_const_or_copy_1 (tree x, tree y, tree prev_x)
 {
   SSA_NAME_VALUE (x) = y;
 
+  if (dump_file && (dump_flags & TDF_DETAILS))
+    {
+      fprintf (dump_file, "0>>> COPY ");
+      print_generic_expr (dump_file, x, 0);
+      fprintf (dump_file, " = ");
+      print_generic_expr (dump_file, y, 0);
+      fprintf (dump_file, "\n");
+    }
+
   VEC_reserve (tree, heap, const_and_copies_stack, 2);
   VEC_quick_push (tree, const_and_copies_stack, prev_x);
   VEC_quick_push (tree, const_and_copies_stack, x);
 }
 
-
 /* Return the loop depth of the basic block of the defining statement of X.
    This number should not be treated as absolutely correct because the loop
    information may not be completely up-to-date when dom runs.  However, it
@@ -1102,7 +1468,7 @@ record_const_or_copy_1 (tree x, tree y, tree prev_x)
 int
 loop_depth_of_name (tree x)
 {
-  tree defstmt;
+  gimple defstmt;
   basic_block defbb;
 
   /* If it's not an SSA_NAME, we have no clue where the definition is.  */
@@ -1113,14 +1479,13 @@ loop_depth_of_name (tree x)
      Note that there may not actually be a bb for this statement, if the
      ssa_name is live on entry.  */
   defstmt = SSA_NAME_DEF_STMT (x);
-  defbb = bb_for_stmt (defstmt);
+  defbb = gimple_bb (defstmt);
   if (!defbb)
     return 0;
 
   return defbb->loop_depth;
 }
 
-
 /* Record that X is equal to Y in const_and_copies.  Record undo
    information in the block-local vector.  */
 
@@ -1129,6 +1494,8 @@ record_const_or_copy (tree x, tree y)
 {
   tree prev_x = SSA_NAME_VALUE (x);
 
+  gcc_assert (TREE_CODE (x) == SSA_NAME);
+
   if (TREE_CODE (y) == SSA_NAME)
     {
       tree tmp = SSA_NAME_VALUE (y);
@@ -1189,34 +1556,34 @@ record_equality (tree x, tree y)
    i_2 = i_1 +/- ...  */
 
 static bool
-simple_iv_increment_p (tree stmt)
+simple_iv_increment_p (gimple stmt)
 {
-  tree lhs, rhs, preinc, phi;
-  unsigned i;
+  tree lhs, preinc;
+  gimple phi;
+  size_t i;
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
     return false;
 
-  lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+  lhs = gimple_assign_lhs (stmt);
   if (TREE_CODE (lhs) != SSA_NAME)
     return false;
 
-  rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-
-  if (TREE_CODE (rhs) != PLUS_EXPR
-      && TREE_CODE (rhs) != MINUS_EXPR)
+  if (gimple_assign_rhs_code (stmt) != PLUS_EXPR
+      && gimple_assign_rhs_code (stmt) != MINUS_EXPR)
     return false;
 
-  preinc = TREE_OPERAND (rhs, 0);
+  preinc = gimple_assign_rhs1 (stmt);
+
   if (TREE_CODE (preinc) != SSA_NAME)
     return false;
 
   phi = SSA_NAME_DEF_STMT (preinc);
-  if (TREE_CODE (phi) != PHI_NODE)
+  if (gimple_code (phi) != GIMPLE_PHI)
     return false;
 
-  for (i = 0; i < (unsigned) PHI_NUM_ARGS (phi); i++)
-    if (PHI_ARG_DEF (phi, i) == lhs)
+  for (i = 0; i < gimple_phi_num_args (phi); i++)
+    if (gimple_phi_arg_def (phi, i) == lhs)
       return true;
 
   return false;
@@ -1236,29 +1603,30 @@ cprop_into_successor_phis (basic_block bb)
 
   FOR_EACH_EDGE (e, ei, bb->succs)
     {
-      tree phi;
       int indx;
+      gimple_stmt_iterator gsi;
 
       /* If this is an abnormal edge, then we do not want to copy propagate
         into the PHI alternative associated with this edge.  */
       if (e->flags & EDGE_ABNORMAL)
        continue;
 
-      phi = phi_nodes (e->dest);
-      if (! phi)
+      gsi = gsi_start_phis (e->dest);
+      if (gsi_end_p (gsi))
        continue;
 
       indx = e->dest_idx;
-      for ( ; phi; phi = PHI_CHAIN (phi))
+      for ( ; !gsi_end_p (gsi); gsi_next (&gsi))
        {
          tree new_val;
          use_operand_p orig_p;
          tree orig_val;
+          gimple phi = gsi_stmt (gsi);
 
          /* The alternative may be associated with a constant, so verify
             it is an SSA_NAME before doing anything with it.  */
-         orig_p = PHI_ARG_DEF_PTR (phi, indx);
-         orig_val = USE_FROM_PTR (orig_p);
+         orig_p = gimple_phi_arg_imm_use_ptr (phi, indx);
+         orig_val = get_use_from_ptr (orig_p);
          if (TREE_CODE (orig_val) != SSA_NAME)
            continue;
 
@@ -1281,30 +1649,29 @@ cprop_into_successor_phis (basic_block bb)
 static void
 record_edge_info (basic_block bb)
 {
-  block_stmt_iterator bsi = bsi_last (bb);
+  gimple_stmt_iterator gsi = gsi_last_bb (bb);
   struct edge_info *edge_info;
 
-  if (! bsi_end_p (bsi))
+  if (! gsi_end_p (gsi))
     {
-      tree stmt = bsi_stmt (bsi);
+      gimple stmt = gsi_stmt (gsi);
 
-      if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
+      if (gimple_code (stmt) == GIMPLE_SWITCH)
        {
-         tree cond = SWITCH_COND (stmt);
+         tree index = gimple_switch_index (stmt);
 
-         if (TREE_CODE (cond) == SSA_NAME)
+         if (TREE_CODE (index) == SSA_NAME)
            {
-             tree labels = SWITCH_LABELS (stmt);
-             int i, n_labels = TREE_VEC_LENGTH (labels);
+             int i;
+              int n_labels = gimple_switch_num_labels (stmt);
              tree *info = XCNEWVEC (tree, last_basic_block);
              edge e;
              edge_iterator ei;
 
              for (i = 0; i < n_labels; i++)
                {
-                 tree label = TREE_VEC_ELT (labels, i);
+                 tree label = gimple_switch_label (stmt, i);
                  basic_block target_bb = label_to_block (CASE_LABEL (label));
-
                  if (CASE_HIGH (label)
                      || !CASE_LOW (label)
                      || info[target_bb->index])
@@ -1316,13 +1683,13 @@ record_edge_info (basic_block bb)
              FOR_EACH_EDGE (e, ei, bb->succs)
                {
                  basic_block target_bb = e->dest;
-                 tree node = info[target_bb->index];
+                 tree label = info[target_bb->index];
 
-                 if (node != NULL && node != error_mark_node)
+                 if (label != NULL && label != error_mark_node)
                    {
-                     tree x = fold_convert (TREE_TYPE (cond), CASE_LOW (node));
+                     tree x = fold_convert (TREE_TYPE (index), CASE_LOW (label));
                      edge_info = allocate_edge_info (e);
-                     edge_info->lhs = cond;
+                     edge_info->lhs = index;
                      edge_info->rhs = x;
                    }
                }
@@ -1331,133 +1698,116 @@ record_edge_info (basic_block bb)
        }
 
       /* A COND_EXPR may create equivalences too.  */
-      if (stmt && TREE_CODE (stmt) == COND_EXPR)
+      if (gimple_code (stmt) == GIMPLE_COND)
        {
-         tree cond = COND_EXPR_COND (stmt);
          edge true_edge;
          edge false_edge;
 
-         extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
-
-         /* If the conditional is a single variable 'X', record 'X = 1'
-            for the true edge and 'X = 0' on the false edge.  */
-         if (SSA_VAR_P (cond))
-           {
-             struct edge_info *edge_info;
-
-             edge_info = allocate_edge_info (true_edge);
-             edge_info->lhs = cond;
-             edge_info->rhs = constant_boolean_node (1, TREE_TYPE (cond));
-
-             edge_info = allocate_edge_info (false_edge);
-             edge_info->lhs = cond;
-             edge_info->rhs = constant_boolean_node (0, TREE_TYPE (cond));
-           }
-         /* Equality tests may create one or two equivalences.  */
-         else if (COMPARISON_CLASS_P (cond))
-           {
-             tree op0 = TREE_OPERAND (cond, 0);
-             tree op1 = TREE_OPERAND (cond, 1);
-
-             /* Special case comparing booleans against a constant as we
-                know the value of OP0 on both arms of the branch, i.e., we
-                can record an equivalence for OP0 rather than COND.  */
-             if ((TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
-                 && TREE_CODE (op0) == SSA_NAME
-                 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
-                 && is_gimple_min_invariant (op1))
-               {
-                 if (TREE_CODE (cond) == EQ_EXPR)
-                   {
-                     edge_info = allocate_edge_info (true_edge);
-                     edge_info->lhs = op0;
-                     edge_info->rhs = (integer_zerop (op1)
-                                           ? boolean_false_node
-                                           : boolean_true_node);
-
-                     edge_info = allocate_edge_info (false_edge);
-                     edge_info->lhs = op0;
-                     edge_info->rhs = (integer_zerop (op1)
-                                           ? boolean_true_node
-                                           : boolean_false_node);
-                   }
-                 else
-                   {
-                     edge_info = allocate_edge_info (true_edge);
-                     edge_info->lhs = op0;
-                     edge_info->rhs = (integer_zerop (op1)
-                                           ? boolean_true_node
-                                           : boolean_false_node);
-
-                     edge_info = allocate_edge_info (false_edge);
-                     edge_info->lhs = op0;
-                     edge_info->rhs = (integer_zerop (op1)
-                                           ? boolean_false_node
-                                           : boolean_true_node);
-                   }
-               }
-
-             else if (is_gimple_min_invariant (op0)
-                      && (TREE_CODE (op1) == SSA_NAME
-                          || is_gimple_min_invariant (op1)))
-               {
-                 tree inverted = invert_truthvalue (cond);
-                 struct edge_info *edge_info;
-
-                 edge_info = allocate_edge_info (true_edge);
-                 record_conditions (edge_info, cond, inverted);
-
-                 if (TREE_CODE (cond) == EQ_EXPR)
-                   {
-                     edge_info->lhs = op1;
-                     edge_info->rhs = op0;
-                   }
-
-                 edge_info = allocate_edge_info (false_edge);
-                 record_conditions (edge_info, inverted, cond);
-
-                 if (TREE_CODE (cond) == NE_EXPR)
-                   {
-                     edge_info->lhs = op1;
-                     edge_info->rhs = op0;
-                   }
-               }
-
-             else if (TREE_CODE (op0) == SSA_NAME
-                      && (is_gimple_min_invariant (op1)
-                          || TREE_CODE (op1) == SSA_NAME))
-               {
-                 tree inverted = invert_truthvalue (cond);
-                 struct edge_info *edge_info;
-
-                 edge_info = allocate_edge_info (true_edge);
-                 record_conditions (edge_info, cond, inverted);
-
-                 if (TREE_CODE (cond) == EQ_EXPR)
-                   {
-                     edge_info->lhs = op0;
-                     edge_info->rhs = op1;
-                   }
-
-                 edge_info = allocate_edge_info (false_edge);
-                 record_conditions (edge_info, inverted, cond);
+          tree op0 = gimple_cond_lhs (stmt);
+          tree op1 = gimple_cond_rhs (stmt);
+          enum tree_code code = gimple_cond_code (stmt);
 
-                 if (TREE_CODE (cond) == NE_EXPR)
-                   {
-                     edge_info->lhs = op0;
-                     edge_info->rhs = op1;
-                   }
-               }
-           }
+         extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
 
-         /* ??? TRUTH_NOT_EXPR can create an equivalence too.  */
-       }
+          /* Special case comparing booleans against a constant as we
+             know the value of OP0 on both arms of the branch.  i.e., we
+             can record an equivalence for OP0 rather than COND.  */
+          if ((code == EQ_EXPR || code == NE_EXPR)
+              && TREE_CODE (op0) == SSA_NAME
+              && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
+              && is_gimple_min_invariant (op1))
+            {
+              if (code == EQ_EXPR)
+                {
+                  edge_info = allocate_edge_info (true_edge);
+                  edge_info->lhs = op0;
+                  edge_info->rhs = (integer_zerop (op1)
+                                    ? boolean_false_node
+                                    : boolean_true_node);
+
+                  edge_info = allocate_edge_info (false_edge);
+                  edge_info->lhs = op0;
+                  edge_info->rhs = (integer_zerop (op1)
+                                    ? boolean_true_node
+                                    : boolean_false_node);
+                }
+              else
+                {
+                  edge_info = allocate_edge_info (true_edge);
+                  edge_info->lhs = op0;
+                  edge_info->rhs = (integer_zerop (op1)
+                                    ? boolean_true_node
+                                    : boolean_false_node);
+
+                  edge_info = allocate_edge_info (false_edge);
+                  edge_info->lhs = op0;
+                  edge_info->rhs = (integer_zerop (op1)
+                                    ? boolean_false_node
+                                    : boolean_true_node);
+                }
+            }
+          else if (is_gimple_min_invariant (op0)
+                   && (TREE_CODE (op1) == SSA_NAME
+                       || is_gimple_min_invariant (op1)))
+            {
+              tree cond = build2 (code, boolean_type_node, op0, op1);
+              tree inverted = invert_truthvalue (cond);
+              struct edge_info *edge_info;
+
+              edge_info = allocate_edge_info (true_edge);
+              record_conditions (edge_info, cond, inverted);
+
+              if (code == EQ_EXPR)
+                {
+                  edge_info->lhs = op1;
+                  edge_info->rhs = op0;
+                }
+
+              edge_info = allocate_edge_info (false_edge);
+              record_conditions (edge_info, inverted, cond);
+
+              if (code == NE_EXPR)
+                {
+                  edge_info->lhs = op1;
+                  edge_info->rhs = op0;
+                }
+            }
+
+          else if (TREE_CODE (op0) == SSA_NAME
+                   && (is_gimple_min_invariant (op1)
+                       || TREE_CODE (op1) == SSA_NAME))
+            {
+              tree cond = build2 (code, boolean_type_node, op0, op1);
+              tree inverted = invert_truthvalue (cond);
+              struct edge_info *edge_info;
+
+              edge_info = allocate_edge_info (true_edge);
+              record_conditions (edge_info, cond, inverted);
+
+              if (code == EQ_EXPR)
+                {
+                  edge_info->lhs = op0;
+                  edge_info->rhs = op1;
+                }
+
+              edge_info = allocate_edge_info (false_edge);
+              record_conditions (edge_info, inverted, cond);
+
+              if (TREE_CODE (cond) == NE_EXPR)
+                {
+                  edge_info->lhs = op0;
+                  edge_info->rhs = op1;
+                }
+            }
+        }
+
+      /* ??? TRUTH_NOT_EXPR can create an equivalence too.  */
     }
 }
 
 /* Propagate information from BB to its outgoing edges.
 
-   This can include equivalency information implied by control statements
+   This can include equivalence information implied by control statements
    at the end of BB and const/copy propagation into PHIs in BB's
    successor blocks.  */
 
@@ -1476,16 +1826,17 @@ propagate_to_outgoing_edges (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
    table.  */
 
 static bool
-eliminate_redundant_computations (tree stmt)
+eliminate_redundant_computations (gimple_stmt_iterator* gsi)
 {
-  tree *expr_p, def = NULL_TREE;
-  bool insert = true;
+  tree expr_type;
   tree cached_lhs;
+  bool insert = true;
   bool retval = false;
-  bool modify_expr_p = false;
+  bool assigns_var_p = false;
 
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
-    def = GIMPLE_STMT_OPERAND (stmt, 0);
+  gimple stmt = gsi_stmt (*gsi);
+
+  tree def = gimple_get_lhs (stmt);
 
   /* Certain expressions on the RHS can be optimized away, but can not
      themselves be entered into the hash tables.  */
@@ -1503,80 +1854,117 @@ eliminate_redundant_computations (tree stmt)
 
   opt_stats.num_exprs_considered++;
 
-  /* Get a pointer to the expression we are trying to optimize.  */
-  if (TREE_CODE (stmt) == COND_EXPR)
-    expr_p = &COND_EXPR_COND (stmt);
-  else if (TREE_CODE (stmt) == SWITCH_EXPR)
-    expr_p = &SWITCH_COND (stmt);
-  else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0))
+  /* Get the type of the expression we are trying to optimize.  */
+  if (is_gimple_assign (stmt))
     {
-      expr_p = &GIMPLE_STMT_OPERAND (TREE_OPERAND (stmt, 0), 1);
-      modify_expr_p = true;
+      expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
+      assigns_var_p = true;
     }
-  else
+  else if (gimple_code (stmt) == GIMPLE_COND)
+    expr_type = boolean_type_node;
+  else if (is_gimple_call (stmt))
     {
-      expr_p = &GENERIC_TREE_OPERAND (stmt, 1);
-      modify_expr_p = true;
+      gcc_assert (gimple_call_lhs (stmt));
+      expr_type = TREE_TYPE (gimple_call_lhs (stmt));
+      assigns_var_p = true;
     }
+  else if (gimple_code (stmt) == GIMPLE_SWITCH)
+    expr_type = TREE_TYPE (gimple_switch_index (stmt));
+  else
+    gcc_unreachable ();
+
+  if (!cached_lhs)
+    return false;
 
   /* It is safe to ignore types here since we have already done
      type checking in the hashing and equality routines.  In fact
      type checking here merely gets in the way of constant
      propagation.  Also, make sure that it is safe to propagate
-     CACHED_LHS into *EXPR_P.  */
-  if (cached_lhs
-      && ((TREE_CODE (cached_lhs) != SSA_NAME
-          && (modify_expr_p
-              || useless_type_conversion_p (TREE_TYPE (*expr_p),
-                                           TREE_TYPE (cached_lhs))))
-         || may_propagate_copy (*expr_p, cached_lhs)))
-    {
+     CACHED_LHS into the expression in STMT.  */
+  if ((TREE_CODE (cached_lhs) != SSA_NAME
+       && (assigns_var_p
+           || useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs))))
+      || may_propagate_copy_into_stmt (stmt, cached_lhs))
+  {
+#if defined ENABLE_CHECKING
+      gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
+                 || is_gimple_min_invariant (cached_lhs));
+#endif
+
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
          fprintf (dump_file, "  Replaced redundant expr '");
-         print_generic_expr (dump_file, *expr_p, dump_flags);
+         print_gimple_expr (dump_file, stmt, 0, dump_flags);
          fprintf (dump_file, "' with '");
          print_generic_expr (dump_file, cached_lhs, dump_flags);
-          fprintf (dump_file, "'\n");
+          fprintf (dump_file, "'\n");
        }
 
       opt_stats.num_re++;
 
-#if defined ENABLE_CHECKING
-      gcc_assert (TREE_CODE (cached_lhs) == SSA_NAME
-                 || is_gimple_min_invariant (cached_lhs));
-#endif
-
       if (TREE_CODE (cached_lhs) == ADDR_EXPR
-         || (POINTER_TYPE_P (TREE_TYPE (*expr_p))
+         || (POINTER_TYPE_P (expr_type)
              && is_gimple_min_invariant (cached_lhs)))
        retval = true;
       
-      if (modify_expr_p
-         && !useless_type_conversion_p (TREE_TYPE (*expr_p),
-                                       TREE_TYPE (cached_lhs)))
-       cached_lhs = fold_convert (TREE_TYPE (*expr_p), cached_lhs);
+      if (assigns_var_p
+         && !useless_type_conversion_p (expr_type, TREE_TYPE (cached_lhs)))
+       cached_lhs = fold_convert (expr_type, cached_lhs);
 
-      propagate_tree_value (expr_p, cached_lhs);
-      mark_stmt_modified (stmt);
-    }
+      propagate_tree_value_into_stmt (gsi, cached_lhs);
+
+      /* Since it is always necessary to mark the result as modified,
+         perhaps we should move this into propagate_tree_value_into_stmt
+         itself.  */
+      gimple_set_modified (gsi_stmt (*gsi), true);
+  }
   return retval;
 }
 
-/* STMT, a GIMPLE_MODIFY_STMT, may create certain equivalences, in either
+/* Return true if statement GS is an assignment that peforms a useless
+   type conversion.  It is is intended to be a tuples analog of function
+   tree_ssa_useless_type_conversion.  */
+
+static bool
+gimple_assign_unary_useless_conversion_p (gimple gs)
+{
+  if (is_gimple_assign (gs)
+      && (gimple_assign_rhs_code (gs) == NOP_EXPR
+          || gimple_assign_rhs_code (gs) == CONVERT_EXPR
+          || gimple_assign_rhs_code (gs) == VIEW_CONVERT_EXPR
+          || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR))
+    {
+      tree lhs_type = TREE_TYPE (gimple_assign_lhs (gs));
+      tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (gs));
+      return useless_type_conversion_p (lhs_type, rhs_type);
+    }
+  
+  return false;
+}
+
+/* STMT, a GIMPLE_ASSIGN, may create certain equivalences, in either
    the available expressions table or the const_and_copies table.
    Detect and record those equivalences.  */
+/* We handle only very simple copy equivalences here.  The heavy
+   lifing is done by eliminate_redundant_computations.  */
 
 static void
-record_equivalences_from_stmt (tree stmt, int may_optimize_p, stmt_ann_t ann)
+record_equivalences_from_stmt (gimple stmt, int may_optimize_p)
 {
-  tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-  enum tree_code lhs_code = TREE_CODE (lhs);
+  tree lhs;
+  enum tree_code lhs_code;
 
-  if (lhs_code == SSA_NAME)
-    {
-      tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+  gcc_assert (is_gimple_assign (stmt));
+
+  lhs = gimple_assign_lhs (stmt);
+  lhs_code = TREE_CODE (lhs);
 
+  if (lhs_code == SSA_NAME
+      && (gimple_assign_single_p (stmt)
+          || gimple_assign_unary_useless_conversion_p (stmt)))
+    {
+      tree rhs = gimple_assign_rhs1 (stmt);
+               
       /* Strip away any useless type conversions.  */
       STRIP_USELESS_TYPE_CONVERSION (rhs);
 
@@ -1589,24 +1977,53 @@ record_equivalences_from_stmt (tree stmt, int may_optimize_p, stmt_ann_t ann)
       if (may_optimize_p
          && (TREE_CODE (rhs) == SSA_NAME
              || is_gimple_min_invariant (rhs)))
+      {
+       if (dump_file && (dump_flags & TDF_DETAILS))
+         {
+           fprintf (dump_file, "==== ASGN ");
+           print_generic_expr (dump_file, lhs, 0);
+           fprintf (dump_file, " = ");
+           print_generic_expr (dump_file, rhs, 0);
+           fprintf (dump_file, "\n");
+         }
+
        SSA_NAME_VALUE (lhs) = rhs;
+      }
     }
 
   /* A memory store, even an aliased store, creates a useful
      equivalence.  By exchanging the LHS and RHS, creating suitable
      vops and recording the result in the available expression table,
      we may be able to expose more redundant loads.  */
-  if (!ann->has_volatile_ops
-      && stmt_references_memory_p (stmt)
-      && (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == SSA_NAME
-         || is_gimple_min_invariant (GIMPLE_STMT_OPERAND (stmt, 1)))
+  if (!gimple_has_volatile_ops (stmt)
+      && gimple_references_memory_p (stmt)
+      && gimple_assign_single_p (stmt)
+      && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
+         || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
       && !is_gimple_reg (lhs))
     {
-      tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-      tree new_stmt;
+      tree rhs = gimple_assign_rhs1 (stmt);
+      gimple new_stmt;
 
       /* Build a new statement with the RHS and LHS exchanged.  */
-      new_stmt = build_gimple_modify_stmt (rhs, lhs);
+      if (TREE_CODE (rhs) == SSA_NAME)
+        {
+          /* NOTE tuples.  The call to gimple_build_assign below replaced
+             a call to build_gimple_modify_stmt, which did not set the
+             SSA_NAME_DEF_STMT on the LHS of the assignment.  Doing so
+             may cause an SSA validation failure, as the LHS may be a
+             default-initialized name and should have no definition.  I'm
+             a bit dubious of this, as the artificial statement that we
+             generate here may in fact be ill-formed, but it is simply
+             used as an internal device in this pass, and never becomes
+             part of the CFG.  */
+          gimple defstmt = SSA_NAME_DEF_STMT (rhs);
+          new_stmt = gimple_build_assign (rhs, lhs);
+          SSA_NAME_DEF_STMT (rhs) = defstmt;
+        }
+      else
+        new_stmt = gimple_build_assign (rhs, lhs);
+
       create_ssa_artificial_load_stmt (new_stmt, stmt, true);
 
       /* Finally enter the statement into the available expression
@@ -1619,7 +2036,7 @@ record_equivalences_from_stmt (tree stmt, int may_optimize_p, stmt_ann_t ann)
    CONST_AND_COPIES.  */
 
 static bool
-cprop_operand (tree stmt, use_operand_p op_p)
+cprop_operand (gimple stmt, use_operand_p op_p)
 {
   bool may_have_exposed_new_symbols = false;
   tree val;
@@ -1645,7 +2062,7 @@ cprop_operand (tree stmt, use_operand_p op_p)
        return false;
 
       /* Do not replace hard register operands in asm statements.  */
-      if (TREE_CODE (stmt) == ASM_EXPR
+      if (gimple_code (stmt) == GIMPLE_ASM
          && !may_propagate_copy_into_asm (op))
        return false;
 
@@ -1717,7 +2134,7 @@ cprop_operand (tree stmt, use_operand_p op_p)
       /* And note that we modified this statement.  This is now
         safe, even if we changed virtual operands since we will
         rescan the statement and rewrite its operands again.  */
-      mark_stmt_modified (stmt);
+      gimple_set_modified (stmt, true);
     }
   return may_have_exposed_new_symbols;
 }
@@ -1729,7 +2146,7 @@ cprop_operand (tree stmt, use_operand_p op_p)
    vdef_ops of STMT.  */
 
 static bool
-cprop_into_stmt (tree stmt)
+cprop_into_stmt (gimple stmt)
 {
   bool may_have_exposed_new_symbols = false;
   use_operand_p op_p;
@@ -1744,7 +2161,6 @@ cprop_into_stmt (tree stmt)
   return may_have_exposed_new_symbols;
 }
 
-
 /* Optimize the statement pointed to by iterator SI.
    
    We try to perform some simplistic global redundancy elimination and
@@ -1762,28 +2178,26 @@ cprop_into_stmt (tree stmt)
 
 static void
 optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
-              basic_block bb, block_stmt_iterator si)
+              basic_block bb, gimple_stmt_iterator si)
 {
-  stmt_ann_t ann;
-  tree stmt, old_stmt;
+  gimple stmt, old_stmt;
   bool may_optimize_p;
   bool may_have_exposed_new_symbols = false;
 
-  old_stmt = stmt = bsi_stmt (si);
+  old_stmt = stmt = gsi_stmt (si);
   
-  if (TREE_CODE (stmt) == COND_EXPR)
+  if (gimple_code (stmt) == GIMPLE_COND)
     canonicalize_comparison (stmt);
   
   update_stmt_if_modified (stmt);
-  ann = stmt_ann (stmt);
   opt_stats.num_stmts++;
   may_have_exposed_new_symbols = false;
-  push_stmt_changes (bsi_stmt_ptr (si));
+  push_stmt_changes (gsi_stmt_ptr (&si));
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "Optimizing statement ");
-      print_generic_stmt (dump_file, stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
     }
 
   /* Const/copy propagate into USES, VUSES and the RHS of VDEFs.  */
@@ -1791,27 +2205,34 @@ optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
 
   /* If the statement has been modified with constant replacements,
      fold its RHS before checking for redundant computations.  */
-  if (ann->modified)
+  if (gimple_modified_p (stmt))
     {
-      tree rhs;
+      tree rhs = NULL;
 
       /* Try to fold the statement making sure that STMT is kept
         up to date.  */
-      if (fold_stmt (bsi_stmt_ptr (si)))
+      if (fold_stmt (&si))
        {
-         stmt = bsi_stmt (si);
-         ann = stmt_ann (stmt);
+         stmt = gsi_stmt (si);
 
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
              fprintf (dump_file, "  Folded to: ");
-             print_generic_stmt (dump_file, stmt, TDF_SLIM);
+             print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
            }
        }
 
-      rhs = get_rhs (stmt);
+      /* We only need to consider cases that can yield a gimple operand.  */
+      if (gimple_assign_single_p (stmt))
+        rhs = gimple_assign_rhs1 (stmt);
+      else if (gimple_code (stmt) == GIMPLE_GOTO)
+        rhs = gimple_goto_dest (stmt);
+      else if (gimple_code (stmt) == GIMPLE_SWITCH)
+        /* This should never be an ADDR_EXPR.  */
+        rhs = gimple_switch_index (stmt);
+
       if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
-       recompute_tree_invariant_for_addr_expr (rhs);
+        recompute_tree_invariant_for_addr_expr (rhs);
 
       /* Constant/copy propagation above may change the set of 
         virtual operands associated with this statement.  Folding
@@ -1823,26 +2244,24 @@ optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
 
   /* Check for redundant computations.  Do this optimization only
      for assignments that have no volatile ops and conditionals.  */
-  may_optimize_p = (!ann->has_volatile_ops
-                   && ((TREE_CODE (stmt) == RETURN_EXPR
-                        && TREE_OPERAND (stmt, 0)
-                        && TREE_CODE (TREE_OPERAND (stmt, 0))
-                           == GIMPLE_MODIFY_STMT
-                        && ! (TREE_SIDE_EFFECTS
-                              (GIMPLE_STMT_OPERAND
-                               (TREE_OPERAND (stmt, 0), 1))))
-                       || (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-                           && ! TREE_SIDE_EFFECTS (GIMPLE_STMT_OPERAND (stmt,
-                                                                        1)))
-                       || TREE_CODE (stmt) == COND_EXPR
-                       || TREE_CODE (stmt) == SWITCH_EXPR));
+  may_optimize_p = (!gimple_has_volatile_ops (stmt)
+                    && ((is_gimple_assign (stmt)
+                         && !gimple_rhs_has_side_effects (stmt))
+                        || (is_gimple_call (stmt)
+                            && gimple_call_lhs (stmt) != NULL_TREE
+                            && !gimple_rhs_has_side_effects (stmt))
+                        || gimple_code (stmt) == GIMPLE_COND
+                        || gimple_code (stmt) == GIMPLE_SWITCH));
 
   if (may_optimize_p)
-    may_have_exposed_new_symbols |= eliminate_redundant_computations (stmt);
+    {
+      may_have_exposed_new_symbols |= eliminate_redundant_computations (&si);
+      stmt = gsi_stmt (si);
+    }
 
   /* Record any additional equivalences created by this statement.  */
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
-    record_equivalences_from_stmt (stmt, may_optimize_p, ann);
+  if (is_gimple_assign (stmt))
+    record_equivalences_from_stmt (stmt, may_optimize_p);
 
   /* If STMT is a COND_EXPR and it was modified, then we may know
      where it goes.  If that is the case, then mark the CFG as altered.
@@ -1869,14 +2288,15 @@ optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
 
      Ultimately I suspect we're going to need to change the interface
      into the SSA_NAME manager.  */
-  if (ann->modified)
+  if (gimple_modified_p (stmt))
     {
       tree val = NULL;
 
-      if (TREE_CODE (stmt) == COND_EXPR)
-       val = COND_EXPR_COND (stmt);
-      else if (TREE_CODE (stmt) == SWITCH_EXPR)
-       val = SWITCH_COND (stmt);
+      if (gimple_code (stmt) == GIMPLE_COND)
+        val = fold_binary (gimple_cond_code (stmt), boolean_type_node,
+                           gimple_cond_lhs (stmt),  gimple_cond_rhs (stmt));
+      else if (gimple_code (stmt) == GIMPLE_SWITCH)
+       val = gimple_switch_index (stmt);
 
       if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
        cfg_altered = true;
@@ -1897,47 +2317,50 @@ optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
         AVAIL_EXPRS have been processed.  The change buffer stack for
         all the pushed statements will be processed when this queue
         is emptied.  */
-      VEC_safe_push (tree_p, heap, stmts_to_rescan, bsi_stmt_ptr (si));
+      VEC_safe_push (gimple_p, heap, stmts_to_rescan, gsi_stmt_ptr (&si));
     }
   else
     {
       /* Otherwise, just discard the recently pushed change buffer.  If
         not, the STMTS_TO_RESCAN queue will get out of synch with the
         change buffer stack.  */
-      discard_stmt_changes (bsi_stmt_ptr (si));
+      discard_stmt_changes (gsi_stmt_ptr (&si));
     }
 }
 
-/* Search for an existing instance of STMT in the AVAIL_EXPRS table.  If
-   found, return its LHS. Otherwise insert STMT in the table and return
-   NULL_TREE.
+/* Search for an existing instance of STMT in the AVAIL_EXPRS table.
+   If found, return its LHS. Otherwise insert STMT in the table and
+   return NULL_TREE.
 
-   Also, when an expression is first inserted in the AVAIL_EXPRS table, it
-   is also added to the stack pointed to by BLOCK_AVAIL_EXPRS_P, so that they
-   can be removed when we finish processing this block and its children.
-
-   NOTE: This function assumes that STMT is a GIMPLE_MODIFY_STMT node that
-   contains no CALL_EXPR on its RHS and makes no volatile nor
-   aliased references.  */
+   Also, when an expression is first inserted in the  table, it is also
+   is also added to AVAIL_EXPRS_STACK, so that it can be removed when
+   we finish processing this block and its children.  */
 
 static tree
-lookup_avail_expr (tree stmt, bool insert)
+lookup_avail_expr (gimple stmt, bool insert)
 {
   void **slot;
   tree lhs;
   tree temp;
   struct expr_hash_elt *element = XNEW (struct expr_hash_elt);
 
-  lhs = TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-                           ? GIMPLE_STMT_OPERAND (stmt, 0) : NULL;
+  /* Get LHS of assignment or call, else NULL_TREE.  */
+  lhs = gimple_get_lhs (stmt);
 
   initialize_hash_element (stmt, lhs, element);
 
+  if (dump_file && (dump_flags & TDF_DETAILS))
+    {
+      fprintf (dump_file, "LKUP ");
+      print_expr_hash_elt (dump_file, element);
+    }
+
   /* Don't bother remembering constant assignments and copy operations.
      Constants and copy operations are handled by the constant/copy propagator
      in optimize_stmt.  */
-  if (TREE_CODE (element->rhs) == SSA_NAME
-      || is_gimple_min_invariant (element->rhs))
+  if (element->expr.kind == EXPR_SINGLE
+      && (TREE_CODE (element->expr.ops.single.rhs) == SSA_NAME
+          || is_gimple_min_invariant (element->expr.ops.single.rhs)))
     {
       free (element);
       return NULL_TREE;
@@ -1949,14 +2372,20 @@ lookup_avail_expr (tree stmt, bool insert)
   if (slot == NULL)
     {
       free (element);
-      return NULL_TREE;
+      return NULL_TREE;  
     }
 
   if (*slot == NULL)
     {
       *slot = (void *) element;
-      VEC_safe_push (tree, heap, avail_exprs_stack,
-                    stmt ? stmt : element->rhs);
+
+      if (dump_file && (dump_flags & TDF_DETAILS))
+        {
+          fprintf (dump_file, "2>>> ");
+          print_expr_hash_elt (dump_file, element);
+        }
+
+      VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
       return NULL_TREE;
     }
 
@@ -1974,31 +2403,36 @@ lookup_avail_expr (tree stmt, bool insert)
     }
 
   free (element);
+
+  if (dump_file && (dump_flags & TDF_DETAILS))
+    {
+      fprintf (dump_file, "FIND: ");
+      print_generic_expr (dump_file, lhs, 0);
+      fprintf (dump_file, "\n");
+    }
+
   return lhs;
 }
 
-/* Hashing and equality functions for AVAIL_EXPRS.  The table stores
-   GIMPLE_MODIFY_STMT statements.  We compute a value number for expressions
-   using the code of the expression and the SSA numbers of its operands.  */
+/* Hashing and equality functions for AVAIL_EXPRS.  We compute a value number
+   for expressions using the code of the expression and the SSA numbers of
+   its operands.  */
 
 static hashval_t
 avail_expr_hash (const void *p)
 {
-  tree stmt = ((const struct expr_hash_elt *)p)->stmt;
-  tree rhs = ((const struct expr_hash_elt *)p)->rhs;
+  gimple stmt = ((const struct expr_hash_elt *)p)->stmt;
+  const struct hashable_expr *expr = &((const struct expr_hash_elt *)p)->expr;
   tree vuse;
   ssa_op_iter iter;
   hashval_t val = 0;
 
-  /* iterative_hash_expr knows how to deal with any expression and
-     deals with commutative operators as well, so just use it instead
-     of duplicating such complexities here.  */
-  val = iterative_hash_expr (rhs, val);
+  val = iterative_hash_hashable_expr (expr, val);
 
   /* If the hash table entry is not associated with a statement, then we
      can just hash the expression and not worry about virtual operands
      and such.  */
-  if (!stmt || !stmt_ann (stmt))
+  if (!stmt)
     return val;
 
   /* Add the SSA version numbers of every vuse operand.  This is important
@@ -2020,27 +2454,34 @@ real_avail_expr_hash (const void *p)
 static int
 avail_expr_eq (const void *p1, const void *p2)
 {
-  tree stmt1 = ((const struct expr_hash_elt *)p1)->stmt;
-  tree rhs1 = ((const struct expr_hash_elt *)p1)->rhs;
-  tree stmt2 = ((const struct expr_hash_elt *)p2)->stmt;
-  tree rhs2 = ((const struct expr_hash_elt *)p2)->rhs;
-
-  /* If they are the same physical expression, return true.  */
-  if (rhs1 == rhs2 && stmt1 == stmt2)
+  gimple stmt1 = ((const struct expr_hash_elt *)p1)->stmt;
+  const struct hashable_expr *expr1 = &((const struct expr_hash_elt *)p1)->expr;
+  const struct expr_hash_elt *stamp1 = ((const struct expr_hash_elt *)p1)->stamp;
+  gimple stmt2 = ((const struct expr_hash_elt *)p2)->stmt;
+  const struct hashable_expr *expr2 = &((const struct expr_hash_elt *)p2)->expr;
+  const struct expr_hash_elt *stamp2 = ((const struct expr_hash_elt *)p2)->stamp;
+
+  /* This case should apply only when removing entries from the table.  */
+  if (stamp1 == stamp2)
     return true;
 
-  /* If their codes are not equal, then quit now.  */
-  if (TREE_CODE (rhs1) != TREE_CODE (rhs2))
+  /* FIXME tuples:
+     We add stmts to a hash table and them modify them. To detect the case
+     that we modify a stmt and then search for it, we assume that the hash
+     is always modified by that change.
+     We have to fully check why this doesn't happen on trunk or rewrite
+     this in a more  reliable (and easier to understand) way. */
+  if (((const struct expr_hash_elt *)p1)->hash
+      != ((const struct expr_hash_elt *)p2)->hash)
     return false;
 
   /* In case of a collision, both RHS have to be identical and have the
      same VUSE operands.  */
-  if (types_compatible_p (TREE_TYPE (rhs1), TREE_TYPE (rhs2))
-      && operand_equal_p (rhs1, rhs2, OEP_PURE_SAME))
+  if (hashable_expr_equal_p (expr1, expr2)
+      && types_compatible_p (expr1->type, expr2->type))
     {
+      /* Note that STMT1 and/or STMT2 may be NULL.  */
       bool ret = compare_ssa_operands_equal (stmt1, stmt2, SSA_OP_VUSE);
-      gcc_assert (!ret || ((const struct expr_hash_elt *)p1)->hash
-                 == ((const struct expr_hash_elt *)p2)->hash);
       return ret;
     }
 
@@ -2054,18 +2495,18 @@ avail_expr_eq (const void *p1, const void *p2)
    NULL.  */
 
 static tree
-degenerate_phi_result (tree phi)
+degenerate_phi_result (gimple phi)
 {
-  tree lhs = PHI_RESULT (phi);
+  tree lhs = gimple_phi_result (phi);
   tree val = NULL;
-  int i;
+  size_t i;
 
   /* Ignoring arguments which are the same as LHS, if all the remaining
      arguments are the same, then the PHI is a degenerate and has the
      value of that common argument.  */
-  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+  for (i = 0; i < gimple_phi_num_args (phi); i++)
     {
-      tree arg = PHI_ARG_DEF (phi, i);
+      tree arg = gimple_phi_arg_def (phi, i);
 
       if (arg == lhs)
        continue;
@@ -2074,51 +2515,54 @@ degenerate_phi_result (tree phi)
       else if (!operand_equal_p (arg, val, 0))
        break;
     }
-  return (i == PHI_NUM_ARGS (phi) ? val : NULL);
+  return (i == gimple_phi_num_args (phi) ? val : NULL);
 }
 
-/* Given a tree node T, which is either a PHI_NODE or GIMPLE_MODIFY_STMT,
+/* Given a statement STMT, which is either a PHI node or an assignment,
    remove it from the IL.  */
 
 static void
-remove_stmt_or_phi (tree t)
+remove_stmt_or_phi (gimple stmt)
 {
-  if (TREE_CODE (t) == PHI_NODE)
-    remove_phi_node (t, NULL, true);
+  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
+
+  if (gimple_code (stmt) == GIMPLE_PHI)
+    remove_phi_node (&gsi, true);
   else
     {
-      block_stmt_iterator bsi = bsi_for_stmt (t);
-      bsi_remove (&bsi, true);
-      release_defs (t);
+      gsi_remove (&gsi, true);
+      release_defs (stmt);
     }
 }
 
-/* Given a tree node T, which is either a PHI_NODE or GIMPLE_MODIFY_STMT,
+/* Given a statement STMT, which is either a PHI node or an assignment,
    return the "rhs" of the node, in the case of a non-degenerate
-   PHI, NULL is returned.  */
+   phi, NULL is returned.  */
 
 static tree
-get_rhs_or_phi_arg (tree t)
+get_rhs_or_phi_arg (gimple stmt)
 {
-  if (TREE_CODE (t) == PHI_NODE)
-    return degenerate_phi_result (t);
-  else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
-    return GIMPLE_STMT_OPERAND (t, 1);
-  gcc_unreachable ();
+  if (gimple_code (stmt) == GIMPLE_PHI)
+    return degenerate_phi_result (stmt);
+  else if (gimple_assign_single_p (stmt))
+    return gimple_assign_rhs1 (stmt);
+  else
+    gcc_unreachable ();
 }
 
 
-/* Given a tree node T, which is either a PHI_NODE or a GIMPLE_MODIFY_STMT,
+/* Given a statement STMT, which is either a PHI node or an assignment,
    return the "lhs" of the node.  */
 
 static tree
-get_lhs_or_phi_result (tree t)
+get_lhs_or_phi_result (gimple stmt)
 {
-  if (TREE_CODE (t) == PHI_NODE)
-    return PHI_RESULT (t);
-  else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
-    return GIMPLE_STMT_OPERAND (t, 0);
-  gcc_unreachable ();
+  if (gimple_code (stmt) == GIMPLE_PHI)
+    return gimple_phi_result (stmt);
+  else if (is_gimple_assign (stmt))
+    return gimple_assign_lhs (stmt);
+  else
+    gcc_unreachable ();
 }
 
 /* Propagate RHS into all uses of LHS (when possible).
@@ -2133,7 +2577,7 @@ get_lhs_or_phi_result (tree t)
    opportunities.  */
 
 static void 
-propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
+propagate_rhs_into_lhs (gimple stmt, tree lhs, tree rhs, bitmap interesting_names)
 {
   /* First verify that propagation is valid and isn't going to move a
      loop variant variable outside its loop.  */
@@ -2145,7 +2589,7 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
     {
       use_operand_p use_p;
       imm_use_iterator iter;
-      tree use_stmt;
+      gimple use_stmt;
       bool all = true;
 
       /* Dump details.  */
@@ -2166,8 +2610,8 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
        {
        
          /* It's not always safe to propagate into an ASM_EXPR.  */
-         if (TREE_CODE (use_stmt) == ASM_EXPR
-             && ! may_propagate_copy_into_asm (lhs))
+         if (gimple_code (use_stmt) == GIMPLE_ASM
+              && ! may_propagate_copy_into_asm (lhs))
            {
              all = false;
              continue;
@@ -2177,8 +2621,7 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
              fprintf (dump_file, "    Original statement:");
-             print_generic_expr (dump_file, use_stmt, dump_flags);
-             fprintf (dump_file, "\n");
+             print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
            }
 
          push_stmt_changes (&use_stmt);
@@ -2197,7 +2640,7 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
             Second, if we're propagating a virtual operand and the
             propagation does not change the underlying _DECL node for
             the virtual operand, then no further actions are necessary.  */
-         if (TREE_CODE (use_stmt) == PHI_NODE
+         if (gimple_code (use_stmt) == GIMPLE_PHI
              || (! is_gimple_reg (lhs)
                  && TREE_CODE (rhs) == SSA_NAME
                  && SSA_NAME_VAR (lhs) == SSA_NAME_VAR (rhs)))
@@ -2206,13 +2649,12 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
              if (dump_file && (dump_flags & TDF_DETAILS))
                {
                  fprintf (dump_file, "    Updated statement:");
-                 print_generic_expr (dump_file, use_stmt, dump_flags);
-                 fprintf (dump_file, "\n");
+                 print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
                }
 
              /* Propagation into a PHI may expose new degenerate PHIs,
                 so mark the result of the PHI as interesting.  */
-             if (TREE_CODE (use_stmt) == PHI_NODE)
+             if (gimple_code (use_stmt) == GIMPLE_PHI)
                {
                  tree result = get_lhs_or_phi_result (use_stmt);
                  bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
@@ -2226,6 +2668,12 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
             real statement.  Folding may (or may not) be possible,
             we may expose new operands, expose dead EH edges,
             etc.  */
+          /* NOTE tuples. In the tuples world, fold_stmt_inplace
+             cannot fold a call that simplifies to a constant,
+             because the GIMPLE_CALL must be replaced by a
+             GIMPLE_ASSIGN, and there is no way to effect such a
+             transformation in-place.  We might want to consider
+             using the more general fold_stmt here.  */
          fold_stmt_inplace (use_stmt);
 
          /* Sometimes propagation can expose new operands to the
@@ -2237,34 +2685,32 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
              fprintf (dump_file, "    Updated statement:");
-             print_generic_expr (dump_file, use_stmt, dump_flags);
-             fprintf (dump_file, "\n");
+             print_gimple_stmt (dump_file, use_stmt, 0, dump_flags);
            }
 
          /* If we replaced a variable index with a constant, then
             we would need to update the invariant flag for ADDR_EXPRs.  */
-         if (TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
-             && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == ADDR_EXPR)
+          if (gimple_assign_single_p (use_stmt)
+              && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ADDR_EXPR)
            recompute_tree_invariant_for_addr_expr
-             (GIMPLE_STMT_OPERAND (use_stmt, 1));
+                (gimple_assign_rhs1 (use_stmt));
 
          /* If we cleaned up EH information from the statement,
             mark its containing block as needing EH cleanups.  */
          if (maybe_clean_or_replace_eh_stmt (use_stmt, use_stmt))
            {
-             bitmap_set_bit (need_eh_cleanup, bb_for_stmt (use_stmt)->index);
+             bitmap_set_bit (need_eh_cleanup, gimple_bb (use_stmt)->index);
              if (dump_file && (dump_flags & TDF_DETAILS))
                fprintf (dump_file, "  Flagged to clear EH edges.\n");
            }
 
          /* Propagation may expose new trivial copy/constant propagation
             opportunities.  */
-         if (TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
-             && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME
-             && (TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == SSA_NAME
-                 || is_gimple_min_invariant (GIMPLE_STMT_OPERAND (use_stmt,
-                                                                  1))))
-           {
+          if (gimple_assign_single_p (use_stmt)
+              && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
+              && (TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
+                  || is_gimple_min_invariant (gimple_assign_rhs1 (use_stmt))))
+            {
              tree result = get_lhs_or_phi_result (use_stmt);
              bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
            }
@@ -2273,41 +2719,44 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
             the CFG unexecutable.  We want to identify them as PHI nodes
             at the destination of those unexecutable edges may become
             degenerates.  */
-         else if (TREE_CODE (use_stmt) == COND_EXPR
-                  || TREE_CODE (use_stmt) == SWITCH_EXPR
-                  || TREE_CODE (use_stmt) == GOTO_EXPR)
-           {
+         else if (gimple_code (use_stmt) == GIMPLE_COND
+                  || gimple_code (use_stmt) == GIMPLE_SWITCH
+                  || gimple_code (use_stmt) == GIMPLE_GOTO)
+            {
              tree val;
 
-             if (TREE_CODE (use_stmt) == COND_EXPR)
-               val = COND_EXPR_COND (use_stmt);
-             else if (TREE_CODE (use_stmt) == SWITCH_EXPR)
-               val = SWITCH_COND (use_stmt);
+             if (gimple_code (use_stmt) == GIMPLE_COND)
+                val = fold_binary (gimple_cond_code (use_stmt),
+                                   boolean_type_node,
+                                   gimple_cond_lhs (use_stmt),
+                                   gimple_cond_rhs (use_stmt));
+              else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
+               val = gimple_switch_index (use_stmt);
              else
-               val = GOTO_DESTINATION  (use_stmt);
+               val = gimple_goto_dest  (use_stmt);
 
-             if (is_gimple_min_invariant (val))
+             if (val && is_gimple_min_invariant (val))
                {
-                 basic_block bb = bb_for_stmt (use_stmt);
+                 basic_block bb = gimple_bb (use_stmt);
                  edge te = find_taken_edge (bb, val);
                  edge_iterator ei;
                  edge e;
-                 block_stmt_iterator bsi;
+                 gimple_stmt_iterator gsi, psi;
 
                  /* Remove all outgoing edges except TE.  */
                  for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
                    {
                      if (e != te)
                        {
-                         tree phi;
-
                          /* Mark all the PHI nodes at the destination of
                             the unexecutable edge as interesting.  */
-                         for (phi = phi_nodes (e->dest);
-                              phi;
-                              phi = PHI_CHAIN (phi))
-                           {
-                             tree result = PHI_RESULT (phi);
+                          for (psi = gsi_start_phis (e->dest);
+                               !gsi_end_p (psi);
+                               gsi_next (&psi))
+                            {
+                              gimple phi = gsi_stmt (psi);
+
+                             tree result = gimple_phi_result (phi);
                              int version = SSA_NAME_VERSION (result);
 
                              bitmap_set_bit (interesting_names, version);
@@ -2323,8 +2772,8 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
                        ei_next (&ei);
                    }
 
-                 bsi = bsi_last (bb_for_stmt (use_stmt));
-                 bsi_remove (&bsi, true);
+                 gsi = gsi_last_bb (gimple_bb (use_stmt));
+                 gsi_remove (&gsi, true);
 
                  /* And fixup the flags on the single remaining edge.  */
                  te->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
@@ -2346,7 +2795,7 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
     }
 }
 
-/* T is either a PHI node (potentially a degenerate PHI node) or
+/* STMT is either a PHI node (potentially a degenerate PHI node) or
    a statement that is a trivial copy or constant initialization.
 
    Attempt to eliminate T by propagating its RHS into all uses of
@@ -2354,12 +2803,12 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
    for nodes we want to revisit later.
 
    All exit paths should clear INTERESTING_NAMES for the result
-   of T.  */
+   of STMT.  */
 
 static void
-eliminate_const_or_copy (tree t, bitmap interesting_names)
+eliminate_const_or_copy (gimple stmt, bitmap interesting_names)
 {
-  tree lhs = get_lhs_or_phi_result (t);
+  tree lhs = get_lhs_or_phi_result (stmt);
   tree rhs;
   int version = SSA_NAME_VERSION (lhs);
 
@@ -2371,22 +2820,22 @@ eliminate_const_or_copy (tree t, bitmap interesting_names)
   if (has_zero_uses (lhs))
     {
       bitmap_clear_bit (interesting_names, version);
-      remove_stmt_or_phi (t);
+      remove_stmt_or_phi (stmt);
       return;
     }
 
   /* Get the RHS of the assignment or PHI node if the PHI is a
      degenerate.  */
-  rhs = get_rhs_or_phi_arg (t);
+  rhs = get_rhs_or_phi_arg (stmt);
   if (!rhs)
     {
       bitmap_clear_bit (interesting_names, version);
       return;
     }
 
-  propagate_rhs_into_lhs (t, lhs, rhs, interesting_names);
+  propagate_rhs_into_lhs (stmt, lhs, rhs, interesting_names);
 
-  /* Note that T may well have been deleted by now, so do
+  /* Note that STMT may well have been deleted by now, so do
      not access it, instead use the saved version # to clear
      T's entry in the worklist.  */
   bitmap_clear_bit (interesting_names, version);
@@ -2400,12 +2849,13 @@ eliminate_const_or_copy (tree t, bitmap interesting_names)
 static void
 eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
 {
-  tree phi, next;
+  gimple_stmt_iterator gsi;
   basic_block son;
 
-  for (phi = phi_nodes (bb); phi; phi = next)
+  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      next = PHI_CHAIN (phi);
+      gimple phi = gsi_stmt (gsi);
+
       eliminate_const_or_copy (phi, interesting_names);
     }
 
@@ -2512,7 +2962,7 @@ eliminate_degenerate_phis (void)
      such edges from the CFG as needed.  */
   if (!bitmap_empty_p (need_eh_cleanup))
     {
-      tree_purge_all_dead_eh_edges (need_eh_cleanup);
+      gimple_purge_all_dead_eh_edges (need_eh_cleanup);
       BITMAP_FREE (need_eh_cleanup);
     }
 
index 2f7e923..b4be514 100644 (file)
@@ -63,7 +63,7 @@ along with GCC; see the file COPYING3.  If not see
    relationship between dead store and redundant load elimination.  In
    fact, they are the same transformation applied to different views of
    the CFG.  */
-   
+
 
 struct dse_global_data
 {
@@ -97,7 +97,7 @@ static void dse_initialize_block_local_data (struct dom_walk_data *,
                                             bool);
 static void dse_optimize_stmt (struct dom_walk_data *,
                               basic_block,
-                              block_stmt_iterator);
+                              gimple_stmt_iterator);
 static void dse_record_phis (struct dom_walk_data *, basic_block);
 static void dse_finalize_block (struct dom_walk_data *, basic_block);
 static void record_voperand_set (bitmap, bitmap *, unsigned int);
@@ -105,12 +105,13 @@ static void record_voperand_set (bitmap, bitmap *, unsigned int);
 /* Returns uid of statement STMT.  */
 
 static unsigned
-get_stmt_uid (tree stmt)
+get_stmt_uid (gimple stmt)
 {
-  if (TREE_CODE (stmt) == PHI_NODE)
-    return SSA_NAME_VERSION (PHI_RESULT (stmt)) + gimple_stmt_max_uid (cfun);
+  if (gimple_code (stmt) == GIMPLE_PHI)
+    return SSA_NAME_VERSION (gimple_phi_result (stmt))
+           + gimple_stmt_max_uid (cfun);
 
-  return gimple_stmt_uid (stmt);
+  return gimple_uid (stmt);
 }
 
 /* Set bit UID in bitmaps GLOBAL and *LOCAL, creating *LOCAL as needed.  */
@@ -162,7 +163,7 @@ memory_ssa_name_same (tree *expr_p, int *walk_subtrees ATTRIBUTE_UNUSED,
 {
   struct address_walk_data *walk_data = (struct address_walk_data *) data;
   tree expr = *expr_p;
-  tree def_stmt;
+  gimple def_stmt;
   basic_block def_bb;
 
   if (TREE_CODE (expr) != SSA_NAME)
@@ -174,7 +175,7 @@ memory_ssa_name_same (tree *expr_p, int *walk_subtrees ATTRIBUTE_UNUSED,
     return NULL_TREE;
 
   def_stmt = SSA_NAME_DEF_STMT (expr);
-  def_bb = bb_for_stmt (def_stmt);
+  def_bb = gimple_bb (def_stmt);
 
   /* DEF_STMT must dominate both stores.  So if it is in the same
      basic block as one, it does not post-dominate that store.  */
@@ -185,7 +186,7 @@ memory_ssa_name_same (tree *expr_p, int *walk_subtrees ATTRIBUTE_UNUSED,
          || !dominated_by_p (CDI_POST_DOMINATORS, walk_data->store2_bb,
                              def_bb))
        /* Return non-NULL to stop the walk.  */
-       return def_stmt;
+       return *expr_p;
     }
 
   return NULL_TREE;
@@ -195,14 +196,14 @@ memory_ssa_name_same (tree *expr_p, int *walk_subtrees ATTRIBUTE_UNUSED,
    might be modified after STORE1, before control reaches STORE2.  */
 
 static bool
-memory_address_same (tree store1, tree store2)
+memory_address_same (gimple store1, gimple store2)
 {
   struct address_walk_data walk_data;
 
-  walk_data.store1_bb = bb_for_stmt (store1);
-  walk_data.store2_bb = bb_for_stmt (store2);
+  walk_data.store1_bb = gimple_bb (store1);
+  walk_data.store2_bb = gimple_bb (store2);
 
-  return (walk_tree (&GIMPLE_STMT_OPERAND (store1, 0), memory_ssa_name_same,
+  return (walk_tree (gimple_assign_lhs_ptr (store1), memory_ssa_name_same,
                     &walk_data, NULL)
          == NULL);
 }
@@ -214,15 +215,15 @@ memory_address_same (tree store1, tree store2)
    STMT.  *USE_P is set to the vop killed by *USE_STMT.  */
 
 static bool
-get_kill_of_stmt_lhs (tree stmt,
+get_kill_of_stmt_lhs (gimple stmt,
                      use_operand_p * first_use_p,
-                     use_operand_p * use_p, tree * use_stmt)
+                     use_operand_p * use_p, gimple * use_stmt)
 {
   tree lhs;
 
-  gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
+  gcc_assert (is_gimple_assign (stmt));
 
-  lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+  lhs = gimple_assign_lhs (stmt);
 
   /* We now walk the chain of single uses of the single VDEFs.
      We succeeded finding a kill if the lhs of the use stmt is
@@ -231,7 +232,7 @@ get_kill_of_stmt_lhs (tree stmt,
      the stmt.  */
   do
     {
-      tree use_lhs, use_rhs;
+      tree use_lhs;
       def_operand_p def_p;
 
       /* The stmt must have a single VDEF.  */
@@ -245,17 +246,14 @@ get_kill_of_stmt_lhs (tree stmt,
       first_use_p = use_p;
 
       /* If there are possible hidden uses, give up.  */
-      if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
-       return false;
-      use_rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-      if (TREE_CODE (use_rhs) == CALL_EXPR
-         || (!is_gimple_min_invariant (use_rhs)
-             && TREE_CODE (use_rhs) != SSA_NAME))
+      if (!gimple_assign_single_p (stmt)
+         || (TREE_CODE (gimple_assign_rhs1 (stmt)) != SSA_NAME
+             && !is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
        return false;
 
       /* If the use stmts lhs matches the original lhs we have
         found the kill, otherwise continue walking.  */
-      use_lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+      use_lhs = gimple_assign_lhs (stmt);
       if (operand_equal_p (use_lhs, lhs, 0))
        {
          *use_stmt = stmt;
@@ -266,16 +264,16 @@ get_kill_of_stmt_lhs (tree stmt,
 }
 
 /* A helper of dse_optimize_stmt.
-   Given a GIMPLE_MODIFY_STMT in STMT, check that each VDEF has one
+   Given a GIMPLE_ASSIGN in STMT, check that each VDEF has one
    use, and that one use is another VDEF clobbering the first one.
 
    Return TRUE if the above conditions are met, otherwise FALSE.  */
 
 static bool
-dse_possible_dead_store_p (tree stmt,
+dse_possible_dead_store_p (gimple stmt,
                           use_operand_p *first_use_p,
                           use_operand_p *use_p,
-                          tree *use_stmt,
+                          gimple *use_stmt,
                           struct dse_global_data *dse_gd,
                           struct dse_block_local_data *bd)
 {
@@ -283,8 +281,9 @@ dse_possible_dead_store_p (tree stmt,
   bool fail = false;
   def_operand_p var1;
   vuse_vec_p vv;
-  tree defvar = NULL_TREE, temp;
+  tree defvar = NULL_TREE;
   tree prev_defvar = NULL_TREE;
+  gimple temp;
 
   /* We want to verify that each virtual definition in STMT has
      precisely one use and that all the virtual definitions are
@@ -309,9 +308,9 @@ dse_possible_dead_store_p (tree stmt,
       gcc_assert (*use_p != NULL_USE_OPERAND_P);
       *first_use_p = *use_p;
 
-      /* ???  If we hit a PHI_NODE we could skip to the PHI_RESULT uses.
+      /* ???  If we hit a GIMPLE_PHI we could skip to the PHI_RESULT uses.
         Don't bother to do that for now.  */
-      if (TREE_CODE (temp) == PHI_NODE)
+      if (gimple_code (temp) == GIMPLE_PHI)
        {
          fail = true;
          break;
@@ -326,10 +325,10 @@ dse_possible_dead_store_p (tree stmt,
 
           So we must make sure we're talking about the same LHS.
       */
-      if (TREE_CODE (temp) == GIMPLE_MODIFY_STMT)
+      if (is_gimple_assign (temp))
        {
-         tree base1 = get_base_address (GIMPLE_STMT_OPERAND (stmt, 0));
-         tree base2 =  get_base_address (GIMPLE_STMT_OPERAND (temp, 0));
+         tree base1 = get_base_address (gimple_assign_lhs (stmt));
+         tree base2 = get_base_address (gimple_assign_lhs (temp));
 
          while (base1 && INDIRECT_REF_P (base1))
            base1 = TREE_OPERAND (base1, 0);
@@ -360,7 +359,7 @@ dse_possible_dead_store_p (tree stmt,
 
   if (fail)
     {
-      record_voperand_set (dse_gd->stores, &bd->stores, gimple_stmt_uid (stmt));
+      record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
       return false;
     }
 
@@ -382,36 +381,35 @@ dse_possible_dead_store_p (tree stmt,
 static void
 dse_optimize_stmt (struct dom_walk_data *walk_data,
                   basic_block bb ATTRIBUTE_UNUSED,
-                  block_stmt_iterator bsi)
+                  gimple_stmt_iterator gsi)
 {
   struct dse_block_local_data *bd
     = (struct dse_block_local_data *)
        VEC_last (void_p, walk_data->block_data_stack);
   struct dse_global_data *dse_gd
     = (struct dse_global_data *) walk_data->global_data;
-  tree stmt = bsi_stmt (bsi);
-  stmt_ann_t ann = stmt_ann (stmt);
+  gimple stmt = gsi_stmt (gsi);
 
   /* If this statement has no virtual defs, then there is nothing
      to do.  */
   if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
     return;
 
-  /* We know we have virtual definitions.  If this is a GIMPLE_MODIFY_STMT
+  /* We know we have virtual definitions.  If this is a GIMPLE_ASSIGN
      that's not also a function call, then record it into our table.  */
-  if (get_call_expr_in (stmt))
+  if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
     return;
 
-  if (ann->has_volatile_ops)
+  if (gimple_has_volatile_ops (stmt))
     return;
 
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+  if (is_gimple_assign (stmt))
     {
       use_operand_p first_use_p = NULL_USE_OPERAND_P;
       use_operand_p use_p = NULL;
-      tree use_stmt;
+      gimple use_stmt;
 
-      if (!dse_possible_dead_store_p (stmt, &first_use_p, &use_p, &use_stmt,
+      if (!dse_possible_dead_store_p (stmt, &first_use_p, &use_p, &use_stmt, 
                                      dse_gd, bd))
        return;
 
@@ -421,8 +419,8 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
         SSA-form variables in the address will have the same values.  */
       if (use_p != NULL_USE_OPERAND_P
           && bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
-          && !operand_equal_p (GIMPLE_STMT_OPERAND (stmt, 0),
-                               GIMPLE_STMT_OPERAND (use_stmt, 0), 0)
+          && !operand_equal_p (gimple_assign_lhs (stmt),
+                               gimple_assign_lhs (use_stmt), 0)
           && memory_address_same (stmt, use_stmt))
         {
           /* If we have precisely one immediate use at this point, but
@@ -431,7 +429,8 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
              memory location.  */
           if (!get_kill_of_stmt_lhs (stmt, &first_use_p, &use_p, &use_stmt))
             {
-              record_voperand_set (dse_gd->stores, &bd->stores, gimple_stmt_uid (stmt));
+              record_voperand_set (dse_gd->stores, &bd->stores, 
+                                  gimple_uid (stmt));
               return;
             }
         }
@@ -442,8 +441,8 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
         memory location, then we may have found redundant store.  */
       if (use_p != NULL_USE_OPERAND_P
          && bitmap_bit_p (dse_gd->stores, get_stmt_uid (use_stmt))
-         && operand_equal_p (GIMPLE_STMT_OPERAND (stmt, 0),
-                             GIMPLE_STMT_OPERAND (use_stmt, 0), 0)
+         && operand_equal_p (gimple_assign_lhs (stmt),
+                             gimple_assign_lhs (use_stmt), 0)
          && memory_address_same (stmt, use_stmt))
        {
          ssa_op_iter op_iter;
@@ -462,18 +461,19 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
             *p = *u; *p = *v; where p might be v, then USE_STMT
             acts as a use as well as definition, so store in STMT
             is not dead.  */
-         if (LOADED_SYMS (use_stmt)
-             && bitmap_intersect_p (LOADED_SYMS (use_stmt),
-                                    STORED_SYMS (use_stmt)))
+         if (gimple_loaded_syms (use_stmt)
+             && bitmap_intersect_p (gimple_loaded_syms (use_stmt),
+                                    gimple_stored_syms (use_stmt)))
            {
-             record_voperand_set (dse_gd->stores, &bd->stores, ann->uid);
+              record_voperand_set (dse_gd->stores, &bd->stores, 
+                                  gimple_uid (stmt));
              return;
            }
 
          if (dump_file && (dump_flags & TDF_DETAILS))
             {
               fprintf (dump_file, "  Deleted dead store '");
-              print_generic_expr (dump_file, bsi_stmt (bsi), dump_flags);
+              print_gimple_stmt (dump_file, gsi_stmt (gsi), dump_flags, 0);
               fprintf (dump_file, "'\n");
             }
 
@@ -481,7 +481,8 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
          stmt_lhs = USE_FROM_PTR (first_use_p);
          FOR_EACH_SSA_VDEF_OPERAND (var1, vv, stmt, op_iter)
            {
-             tree usevar, temp;
+             tree usevar;
+             gimple temp;
 
              single_imm_use (DEF_FROM_PTR (var1), &use_p, &temp);
              gcc_assert (VUSE_VECT_NUM_ELEM (*vv) == 1);
@@ -494,14 +495,14 @@ dse_optimize_stmt (struct dom_walk_data *walk_data,
            }
 
          /* Remove the dead store.  */
-         bsi_remove (&bsi, true);
+         gsi_remove (&gsi, true);
 
          /* And release any SSA_NAMEs set in this statement back to the
             SSA_NAME manager.  */
          release_defs (stmt);
        }
 
-      record_voperand_set (dse_gd->stores, &bd->stores, gimple_stmt_uid (stmt));
+      record_voperand_set (dse_gd->stores, &bd->stores, gimple_uid (stmt));
     }
 }
 
@@ -515,13 +516,15 @@ dse_record_phis (struct dom_walk_data *walk_data, basic_block bb)
        VEC_last (void_p, walk_data->block_data_stack);
   struct dse_global_data *dse_gd
     = (struct dse_global_data *) walk_data->global_data;
-  tree phi;
+  gimple phi;
+  gimple_stmt_iterator gsi;
 
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-    if (!is_gimple_reg (PHI_RESULT (phi)))
-      record_voperand_set (dse_gd->stores,
-                          &bd->stores,
-                          get_stmt_uid (phi));
+  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+    {
+      phi = gsi_stmt (gsi);
+      if (!is_gimple_reg (gimple_phi_result (phi)))
+       record_voperand_set (dse_gd->stores, &bd->stores, get_stmt_uid (phi));
+    }
 }
 
 static void
@@ -633,7 +636,7 @@ struct gimple_opt_pass pass_dse =
 static unsigned int
 execute_simple_dse (void)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   basic_block bb;
   bitmap variables_loaded = BITMAP_ALLOC (NULL);
   unsigned int todo = 0;
@@ -641,24 +644,29 @@ execute_simple_dse (void)
   /* Collect into VARIABLES LOADED all variables that are read in function
      body.  */
   FOR_EACH_BB (bb)
-    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-      if (LOADED_SYMS (bsi_stmt (bsi)))
+    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+
+      if (gimple_loaded_syms (gsi_stmt (gsi)))
        bitmap_ior_into (variables_loaded,
-                        LOADED_SYMS (bsi_stmt (bsi)));
+                        gimple_loaded_syms (gsi_stmt (gsi)));
 
   /* Look for statements writing into the write only variables.
      And try to remove them.  */
 
   FOR_EACH_BB (bb)
-    for (bsi = bsi_start (bb); !bsi_end_p (bsi);)
+    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
       {
-       tree stmt = bsi_stmt (bsi), op;
+       gimple stmt = gsi_stmt (gsi);
+        tree op;
        bool removed = false;
         ssa_op_iter iter;
 
-       if (STORED_SYMS (stmt) && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-           && TREE_CODE (stmt) != RETURN_EXPR
-           && !bitmap_intersect_p (STORED_SYMS (stmt), variables_loaded))
+       if (gimple_stored_syms (stmt)
+           && !bitmap_empty_p (gimple_stored_syms (stmt))
+            && (is_gimple_assign (stmt)
+               || (is_gimple_call (stmt)
+                    && gimple_call_lhs (stmt)))
+           && !bitmap_intersect_p (gimple_stored_syms (stmt), variables_loaded))
          {
            unsigned int i;
            bitmap_iterator bi;
@@ -673,7 +681,7 @@ execute_simple_dse (void)
               from removing them as dead.  The flag thus has no use for us
               and we need to look into all operands.  */
              
-           EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
+           EXECUTE_IF_SET_IN_BITMAP (gimple_stored_syms (stmt), 0, i, bi)
              {
                tree var = referenced_var_lookup (i);
                if (TREE_ADDRESSABLE (var)
@@ -682,8 +690,8 @@ execute_simple_dse (void)
                  dead = false;
              }
 
-           if (dead && LOADED_SYMS (stmt))
-             EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
+           if (dead && gimple_loaded_syms (stmt))
+             EXECUTE_IF_SET_IN_BITMAP (gimple_loaded_syms (stmt), 0, i, bi)
                if (TREE_THIS_VOLATILE (referenced_var_lookup (i)))
                  dead = false;
 
@@ -695,49 +703,44 @@ execute_simple_dse (void)
            /* Look for possible occurrence var = indirect_ref (...) where
               indirect_ref itself is volatile.  */
 
-           if (dead && TREE_THIS_VOLATILE (GIMPLE_STMT_OPERAND (stmt, 1)))
+           if (dead && is_gimple_assign (stmt)
+               && TREE_THIS_VOLATILE (gimple_assign_rhs1 (stmt)))
              dead = false;
 
            if (dead)
              {
-               tree call = get_call_expr_in (stmt);
-
                /* When LHS of var = call (); is dead, simplify it into
                   call (); saving one operand.  */
-               if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-                   && call
-                   && TREE_SIDE_EFFECTS (call))
+                if (is_gimple_call (stmt)
+                    && gimple_has_side_effects (stmt))
                  {
                    if (dump_file && (dump_flags & TDF_DETAILS))
                      {
                        fprintf (dump_file, "Deleted LHS of call: ");
-                       print_generic_stmt (dump_file, stmt, TDF_SLIM);
+                       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
                        fprintf (dump_file, "\n");
                      }
-                   push_stmt_changes (bsi_stmt_ptr (bsi));
-                   TREE_BLOCK (call) = TREE_BLOCK (stmt);
-                   bsi_replace (&bsi, call, false);
-                   maybe_clean_or_replace_eh_stmt (stmt, call);
-                   mark_symbols_for_renaming (call);
-                   pop_stmt_changes (bsi_stmt_ptr (bsi));
+                   push_stmt_changes (gsi_stmt_ptr (&gsi));
+                    gimple_call_set_lhs (stmt, NULL);
+                   pop_stmt_changes (gsi_stmt_ptr (&gsi));
                  }
                else
                  {
                    if (dump_file && (dump_flags & TDF_DETAILS))
                      {
                        fprintf (dump_file, "  Deleted dead store '");
-                       print_generic_expr (dump_file, stmt, dump_flags);
+                       print_gimple_stmt (dump_file, stmt, 0, dump_flags);
                        fprintf (dump_file, "'\n");
                      }
                    removed = true;
-                   bsi_remove (&bsi, true);
+                   gsi_remove (&gsi, true);
                    todo |= TODO_cleanup_cfg;
                  }
                todo |= TODO_remove_unused_locals | TODO_ggc_collect;
              }
          }
        if (!removed)
-         bsi_next (&bsi);
+         gsi_next (&gsi);
       }
   BITMAP_FREE (variables_loaded);
   return todo;
index c49f0a4..6c5c6ca 100644 (file)
@@ -33,17 +33,13 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-dump.h"
 #include "langhooks.h"
 #include "flags.h"
+#include "gimple.h"
 
 /* This pass propagates the RHS of assignment statements into use
    sites of the LHS of the assignment.  It's basically a specialized
    form of tree combination.   It is hoped all of this can disappear
    when we have a generalized tree combiner.
 
-   Note carefully that after propagation the resulting statement
-   must still be a proper gimple statement.  Right now we simply
-   only perform propagations we know will result in valid gimple
-   code.  One day we'll want to generalize this code.
-
    One class of common cases we handle is forward propagating a single use
    variable into a COND_EXPR.  
 
@@ -162,6 +158,7 @@ static bool forward_propagate_addr_expr (tree name, tree rhs);
 /* Set to true if we delete EH edges during the optimization.  */
 static bool cfg_changed;
 
+static tree rhs_to_tree (tree type, gimple stmt);
 
 /* Get the next statement we can propagate NAME's value into skipping
    trivial copies.  Returns the statement that is suitable as a
@@ -169,25 +166,25 @@ static bool cfg_changed;
    This only returns destinations in a single-use chain.  FINAL_NAME_P
    if non-NULL is written to the ssa name that represents the use.  */
 
-static tree
+static gimple
 get_prop_dest_stmt (tree name, tree *final_name_p)
 {
   use_operand_p use;
-  tree use_stmt;
+  gimple use_stmt;
 
   do {
     /* If name has multiple uses, bail out.  */
     if (!single_imm_use (name, &use, &use_stmt))
-      return NULL_TREE;
+      return NULL;
 
     /* If this is not a trivial copy, we found it.  */
-    if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT
-       || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) != SSA_NAME
-       || GIMPLE_STMT_OPERAND (use_stmt, 1) != name)
+    if (!gimple_assign_copy_p (use_stmt)
+       || TREE_CODE (gimple_assign_lhs (use_stmt)) != SSA_NAME
+       || gimple_assign_rhs1 (use_stmt) != name)
       break;
 
     /* Continue searching uses of the copy destination.  */
-    name = GIMPLE_STMT_OPERAND (use_stmt, 0);
+    name = gimple_assign_lhs (use_stmt);
   } while (1);
 
   if (final_name_p)
@@ -204,27 +201,28 @@ get_prop_dest_stmt (tree name, tree *final_name_p)
    it is set to whether the chain to NAME is a single use chain
    or not.  SINGLE_USE_P is not written to if SINGLE_USE_ONLY is set.  */
 
-static tree
+static gimple
 get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
 {
   bool single_use = true;
 
   do {
-    tree def_stmt = SSA_NAME_DEF_STMT (name);
+    gimple def_stmt = SSA_NAME_DEF_STMT (name);
 
     if (!has_single_use (name))
       {
        single_use = false;
        if (single_use_only)
-         return NULL_TREE;
+         return NULL;
       }
 
     /* If name is defined by a PHI node or is the default def, bail out.  */
-    if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT)
-      return NULL_TREE;
+    if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
+      return NULL;
 
     /* If name is not a simple copy destination, we found it.  */
-    if (TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt, 1)) != SSA_NAME)
+    if (!gimple_assign_copy_p (def_stmt)
+        || TREE_CODE (gimple_assign_rhs1 (def_stmt)) != SSA_NAME)
       {
        tree rhs;
 
@@ -233,19 +231,19 @@ get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
 
        /* We can look through pointer conversions in the search
           for a useful stmt for the comparison folding.  */
-       rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
-       if (CONVERT_EXPR_P (rhs)
-           && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
-           && POINTER_TYPE_P (TREE_TYPE (rhs))
-           && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0))))
-         name = TREE_OPERAND (rhs, 0);
+       rhs = gimple_assign_rhs1 (def_stmt);
+       if (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
+           && TREE_CODE (rhs) == SSA_NAME
+           && POINTER_TYPE_P (TREE_TYPE (gimple_assign_lhs (def_stmt)))
+           && POINTER_TYPE_P (TREE_TYPE (rhs)))
+         name = rhs;
        else
          return def_stmt;
       }
     else
       {
        /* Continue searching the def of the copy source name.  */
-       name = GIMPLE_STMT_OPERAND (def_stmt, 1);
+       name = gimple_assign_rhs1 (def_stmt);
       }
   } while (1);
 }
@@ -254,41 +252,43 @@ get_prop_source_stmt (tree name, bool single_use_only, bool *single_use_p)
    propagation source.  Returns true if so, otherwise false.  */
 
 static bool
-can_propagate_from (tree def_stmt)
+can_propagate_from (gimple def_stmt)
 {
-  tree rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
   use_operand_p use_p;
   ssa_op_iter iter;
 
+  gcc_assert (is_gimple_assign (def_stmt));
   /* If the rhs has side-effects we cannot propagate from it.  */
-  if (TREE_SIDE_EFFECTS (rhs))
+  if (gimple_has_volatile_ops (def_stmt))
     return false;
 
   /* If the rhs is a load we cannot propagate from it.  */
-  if (REFERENCE_CLASS_P (rhs)
-      || DECL_P (rhs))
+  if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_reference
+      || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) == tcc_declaration)
     return false;
 
   /* Constants can be always propagated.  */
-  if (is_gimple_min_invariant (rhs))
+  if (is_gimple_min_invariant 
+      (rhs_to_tree (TREE_TYPE (gimple_assign_lhs (def_stmt)), def_stmt)))
     return true;
 
-  /* If any of the SSA operands occurs in abnormal PHIs we cannot
-     propagate from this stmt.  */
+  /* We cannot propagate ssa names that occur in abnormal phi nodes.  */
   FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_USE)
     if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
       return false;
 
   /* If the definition is a conversion of a pointer to a function type,
-     then we can not apply optimizations as some targets require function
-     pointers to be canonicalized and in this case this optimization could
-     eliminate a necessary canonicalization.  */
-  if (CONVERT_EXPR_P (rhs)
-      && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
-      && TREE_CODE (TREE_TYPE (TREE_TYPE
-                               (TREE_OPERAND (rhs, 0)))) == FUNCTION_TYPE)
-    return false;
-
+     then we can not apply optimizations as some targets require
+     function pointers to be canonicalized and in this case this
+     optimization could eliminate a necessary canonicalization.  */
+  if (is_gimple_assign (def_stmt)
+      && (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))))
+    {
+      tree rhs = gimple_assign_rhs1 (def_stmt);
+      if (POINTER_TYPE_P (TREE_TYPE (rhs))
+          && TREE_CODE (TREE_TYPE (TREE_TYPE (rhs))) == FUNCTION_TYPE)
+        return false;
+    }
   return true;
 }
 
@@ -299,10 +299,10 @@ can_propagate_from (tree def_stmt)
    as well, otherwise false.  */
 
 static bool
-remove_prop_source_from_use (tree name, tree up_to_stmt)
+remove_prop_source_from_use (tree name, gimple up_to_stmt)
 {
-  block_stmt_iterator bsi;
-  tree stmt;
+  gimple_stmt_iterator gsi;
+  gimple stmt;
 
   do {
     if (!has_zero_uses (name))
@@ -312,16 +312,38 @@ remove_prop_source_from_use (tree name, tree up_to_stmt)
     if (stmt == up_to_stmt)
       return true;
 
-    bsi = bsi_for_stmt (stmt);
+    gsi = gsi_for_stmt (stmt);
     release_defs (stmt);
-    bsi_remove (&bsi, true);
+    gsi_remove (&gsi, true);
 
-    name = GIMPLE_STMT_OPERAND (stmt, 1);
-  } while (TREE_CODE (name) == SSA_NAME);
+    name = (gimple_assign_copy_p (stmt)) ? gimple_assign_rhs1 (stmt) : NULL;
+  } while (name && TREE_CODE (name) == SSA_NAME);
 
   return false;
 }
 
+/* Return the rhs of a gimple_assign STMT in a form of a single tree,
+   converted to type TYPE.
+   
+   This should disappear, but is needed so we can combine expressions and use
+   the fold() interfaces. Long term, we need to develop folding and combine
+   routines that deal with gimple exclusively . */
+
+static tree
+rhs_to_tree (tree type, gimple stmt)
+{
+  enum tree_code code = gimple_assign_rhs_code (stmt);
+  if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
+    return fold_convert (type, build2 (code, type, gimple_assign_rhs1 (stmt),
+                         gimple_assign_rhs2 (stmt)));
+  else if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
+    return fold_convert (type, build1 (code, type, gimple_assign_rhs1 (stmt)));
+  else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
+    return gimple_assign_rhs1 (stmt);
+  else
+    gcc_unreachable ();
+}
+
 /* Combine OP0 CODE OP1 in the context of a COND_EXPR.  Returns
    the folded result in a form suitable for COND_EXPR_COND or
    NULL_TREE, if there is no suitable simplified form.  If
@@ -347,28 +369,124 @@ combine_cond_expr_cond (enum tree_code code, tree type,
   t = canonicalize_cond_expr_cond (t);
 
   /* Bail out if we required an invariant but didn't get one.  */
-  if (!t
-      || (invariant_only
-         && !is_gimple_min_invariant (t)))
+  if (!t || (invariant_only && !is_gimple_min_invariant (t)))
     return NULL_TREE;
 
   return t;
 }
 
 /* Propagate from the ssa name definition statements of COND_EXPR
-   in statement STMT into the conditional if that simplifies it.
+   in GIMPLE_COND statement STMT into the conditional if that simplifies it.
+   Returns zero if no statement was changed, one if there were
+   changes and two if cfg_cleanup needs to run.
+   
+   This must be kept in sync with forward_propagate_into_cond.  */
+
+static int
+forward_propagate_into_gimple_cond (gimple stmt)
+{
+   int did_something = 0;
+
+  do {
+    tree tmp = NULL_TREE;
+    tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
+    gimple def_stmt;
+    bool single_use0_p = false, single_use1_p = false;
+    enum tree_code code = gimple_cond_code (stmt);
+
+    /* We can do tree combining on SSA_NAME and comparison expressions.  */
+    if (TREE_CODE_CLASS (gimple_cond_code (stmt)) == tcc_comparison
+        && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME)
+      {
+       /* For comparisons use the first operand, that is likely to
+          simplify comparisons against constants.  */
+       name = gimple_cond_lhs (stmt);
+       def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
+       if (def_stmt && can_propagate_from (def_stmt))
+         {
+           tree op1 = gimple_cond_rhs (stmt);
+           rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
+           tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
+                                         op1, !single_use0_p);
+         }
+       /* If that wasn't successful, try the second operand.  */
+       if (tmp == NULL_TREE
+           && TREE_CODE (gimple_cond_rhs (stmt)) == SSA_NAME)
+         {
+           tree op0 = gimple_cond_lhs (stmt);
+           name = gimple_cond_rhs (stmt);
+           def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
+           if (!def_stmt || !can_propagate_from (def_stmt))
+             return did_something;
+
+           rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
+           tmp = combine_cond_expr_cond (code, boolean_type_node, op0, rhs1,
+                                         !single_use1_p);
+         }
+       /* If that wasn't successful either, try both operands.  */
+       if (tmp == NULL_TREE
+           && rhs0 != NULL_TREE
+           && rhs1 != NULL_TREE)
+         tmp = combine_cond_expr_cond (code, boolean_type_node, rhs0,
+                                       fold_convert (TREE_TYPE (rhs0), rhs1),
+                                       !(single_use0_p && single_use1_p));
+      }
+
+    if (tmp)
+      {
+       if (dump_file && tmp)
+         {
+            tree cond = build2 (gimple_cond_code (stmt),
+                               boolean_type_node,
+                               gimple_cond_lhs (stmt),
+                               gimple_cond_rhs (stmt));
+           fprintf (dump_file, "  Replaced '");
+           print_generic_expr (dump_file, cond, 0);
+           fprintf (dump_file, "' with '");
+           print_generic_expr (dump_file, tmp, 0);
+           fprintf (dump_file, "'\n");
+         }
+
+        gimple_cond_set_condition_from_tree (stmt, unshare_expr (tmp));
+       update_stmt (stmt);
+
+       /* Remove defining statements.  */
+       remove_prop_source_from_use (name, NULL);
+
+       if (is_gimple_min_invariant (tmp))
+         did_something = 2;
+       else if (did_something == 0)
+         did_something = 1;
+
+       /* Continue combining.  */
+       continue;
+      }
+
+    break;
+  } while (1);
+
+  return did_something;
+}
+
+
+/* Propagate from the ssa name definition statements of COND_EXPR
+   in the rhs of statement STMT into the conditional if that simplifies it.
    Returns zero if no statement was changed, one if there were
-   changes and two if cfg_cleanup needs to run.  */
+   changes and two if cfg_cleanup needs to run.
+
+   This must be kept in sync with forward_propagate_into_gimple_cond.  */
 
 static int
-forward_propagate_into_cond (tree cond_expr, tree stmt)
+forward_propagate_into_cond (gimple_stmt_iterator *gsi_p)
 {
+  gimple stmt = gsi_stmt (*gsi_p);
   int did_something = 0;
 
   do {
     tree tmp = NULL_TREE;
-    tree cond = COND_EXPR_COND (cond_expr);
-    tree name, def_stmt, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
+    tree cond = gimple_assign_rhs1 (stmt);
+    tree name, rhs0 = NULL_TREE, rhs1 = NULL_TREE;
+    gimple def_stmt;
     bool single_use0_p = false, single_use1_p = false;
 
     /* We can do tree combining on SSA_NAME and comparison expressions.  */
@@ -379,14 +497,12 @@ forward_propagate_into_cond (tree cond_expr, tree stmt)
           simplify comparisons against constants.  */
        name = TREE_OPERAND (cond, 0);
        def_stmt = get_prop_source_stmt (name, false, &single_use0_p);
-       if (def_stmt != NULL_TREE
-           && can_propagate_from (def_stmt))
+       if (def_stmt && can_propagate_from (def_stmt))
          {
            tree op1 = TREE_OPERAND (cond, 1);
-           rhs0 = GIMPLE_STMT_OPERAND (def_stmt, 1);
+           rhs0 = rhs_to_tree (TREE_TYPE (op1), def_stmt);
            tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
-                                         fold_convert (TREE_TYPE (op1), rhs0),
-                                         op1, !single_use0_p);
+                                         rhs0, op1, !single_use0_p);
          }
        /* If that wasn't successful, try the second operand.  */
        if (tmp == NULL_TREE
@@ -395,34 +511,30 @@ forward_propagate_into_cond (tree cond_expr, tree stmt)
            tree op0 = TREE_OPERAND (cond, 0);
            name = TREE_OPERAND (cond, 1);
            def_stmt = get_prop_source_stmt (name, false, &single_use1_p);
-           if (def_stmt == NULL_TREE
-               || !can_propagate_from (def_stmt))
+           if (!def_stmt || !can_propagate_from (def_stmt))
              return did_something;
 
-           rhs1 = GIMPLE_STMT_OPERAND (def_stmt, 1);
+           rhs1 = rhs_to_tree (TREE_TYPE (op0), def_stmt);
            tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
-                                         op0,
-                                         fold_convert (TREE_TYPE (op0), rhs1),
-                                         !single_use1_p);
+                                         op0, rhs1, !single_use1_p);
          }
        /* If that wasn't successful either, try both operands.  */
        if (tmp == NULL_TREE
            && rhs0 != NULL_TREE
            && rhs1 != NULL_TREE)
          tmp = combine_cond_expr_cond (TREE_CODE (cond), boolean_type_node,
-                                       rhs0,
-                                       fold_convert (TREE_TYPE (rhs0), rhs1),
+                                       rhs0, fold_convert (TREE_TYPE (rhs0),
+                                                           rhs1),
                                        !(single_use0_p && single_use1_p));
       }
     else if (TREE_CODE (cond) == SSA_NAME)
       {
        name = cond;
        def_stmt = get_prop_source_stmt (name, true, NULL);
-       if (def_stmt == NULL_TREE
-           || !can_propagate_from (def_stmt))
+       if (def_stmt || !can_propagate_from (def_stmt))
          return did_something;
 
-       rhs0 = GIMPLE_STMT_OPERAND (def_stmt, 1);
+       rhs0 = gimple_assign_rhs1 (def_stmt);
        tmp = combine_cond_expr_cond (NE_EXPR, boolean_type_node, rhs0,
                                      build_int_cst (TREE_TYPE (rhs0), 0),
                                      false);
@@ -439,7 +551,8 @@ forward_propagate_into_cond (tree cond_expr, tree stmt)
            fprintf (dump_file, "'\n");
          }
 
-       COND_EXPR_COND (cond_expr) = unshare_expr (tmp);
+       gimple_assign_set_rhs_from_tree (gsi_p, unshare_expr (tmp));
+       stmt = gsi_stmt (*gsi_p);
        update_stmt (stmt);
 
        /* Remove defining statements.  */
@@ -464,20 +577,20 @@ forward_propagate_into_cond (tree cond_expr, tree stmt)
    relevant data structures to match.  */
 
 static void
-tidy_after_forward_propagate_addr (tree stmt)
+tidy_after_forward_propagate_addr (gimple stmt)
 {
   /* We may have turned a trapping insn into a non-trapping insn.  */
   if (maybe_clean_or_replace_eh_stmt (stmt, stmt)
-      && tree_purge_dead_eh_edges (bb_for_stmt (stmt)))
+      && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
     cfg_changed = true;
 
-  if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR)
-     recompute_tree_invariant_for_addr_expr (GIMPLE_STMT_OPERAND (stmt, 1));
+  if (TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
+     recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
 
   mark_symbols_for_renaming (stmt);
 }
 
-/* DEF_RHS contains the address of the 0th element in an array. 
+/* DEF_RHS contains the address of the 0th element in an array.
    USE_STMT uses type of DEF_RHS to compute the address of an
    arbitrary element within the array.  The (variable) byte offset
    of the element is contained in OFFSET.
@@ -494,9 +607,11 @@ tidy_after_forward_propagate_addr (tree stmt)
 
 static bool
 forward_propagate_addr_into_variable_array_index (tree offset,
-                                                 tree def_rhs, tree use_stmt)
+                                                 tree def_rhs,
+                                                 gimple_stmt_iterator *use_stmt_gsi)
 {
   tree index;
+  gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
 
   /* Try to find an expression for a proper index.  This is either
      a multiplication expression by the element size or just the
@@ -506,31 +621,32 @@ forward_propagate_addr_into_variable_array_index (tree offset,
   else
     {
       /* Get the offset's defining statement.  */
-      offset = SSA_NAME_DEF_STMT (offset);
+      offset_def = SSA_NAME_DEF_STMT (offset);
 
       /* The statement which defines OFFSET before type conversion
-         must be a simple GIMPLE_MODIFY_STMT.  */
-      if (TREE_CODE (offset) != GIMPLE_MODIFY_STMT)
+         must be a simple GIMPLE_ASSIGN.  */
+      if (gimple_code (offset_def) != GIMPLE_ASSIGN)
        return false;
 
       /* The RHS of the statement which defines OFFSET must be a
         multiplication of an object by the size of the array elements. 
         This implicitly verifies that the size of the array elements
         is constant.  */
-     offset = GIMPLE_STMT_OPERAND (offset, 1);
-      if (TREE_CODE (offset) != MULT_EXPR
-         || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
-         || !simple_cst_equal (TREE_OPERAND (offset, 1),
-                               TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
+     offset = gimple_assign_rhs1 (offset_def);
+     if (gimple_assign_rhs_code (offset_def) != MULT_EXPR
+        || TREE_CODE (gimple_assign_rhs2 (offset_def)) != INTEGER_CST
+        || !simple_cst_equal (gimple_assign_rhs2 (offset_def),
+                              TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)))))
        return false;
 
       /* The first operand to the MULT_EXPR is the desired index.  */
-      index = TREE_OPERAND (offset, 0);
+      index = offset;
     }
 
   /* Replace the pointer addition with array indexing.  */
-  GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (def_rhs);
-  TREE_OPERAND (TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0), 1)
+  gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
+  use_stmt = gsi_stmt (*use_stmt_gsi);
+  TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
     = index;
 
   /* That should have created gimple, so there is no need to
@@ -546,21 +662,25 @@ forward_propagate_addr_into_variable_array_index (tree offset,
    Try to forward propagate the ADDR_EXPR into the use USE_STMT.
    Often this will allow for removal of an ADDR_EXPR and INDIRECT_REF
    node or for recovery of array indexing from pointer arithmetic.
-   
+
    Return true if the propagation was successful (the propagation can
    be not totally successful, yet things may have been changed).  */
 
 static bool
-forward_propagate_addr_expr_1 (tree name, tree def_rhs, tree use_stmt,
+forward_propagate_addr_expr_1 (tree name, tree def_rhs,
+                              gimple_stmt_iterator *use_stmt_gsi,
                               bool single_use_p)
 {
-  tree lhs, rhs, array_ref;
+  tree lhs, rhs, rhs2, array_ref;
   tree *rhsp, *lhsp;
+  gimple use_stmt = gsi_stmt (*use_stmt_gsi);
+  enum tree_code rhs_code;
 
   gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
 
-  lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
-  rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
+  lhs = gimple_assign_lhs (use_stmt);
+  rhs_code = gimple_assign_rhs_code (use_stmt);
+  rhs = gimple_assign_rhs1 (use_stmt);
 
   /* Trivial cases.  The use statement could be a trivial copy or a
      useless conversion.  Recurse to the uses of the lhs as copyprop does
@@ -568,21 +688,22 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs, tree use_stmt,
      all useless conversions.  Treat the case of a single-use name and
      a conversion to def_rhs type separate, though.  */
   if (TREE_CODE (lhs) == SSA_NAME
-      && (rhs == name
-         || CONVERT_EXPR_P (rhs))
-      && useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (def_rhs)))
+      && ((rhs_code == SSA_NAME && rhs == name)
+         || IS_CONVERT_EXPR_CODE_P (rhs_code))
+      && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (def_rhs)))
     {
       /* Only recurse if we don't deal with a single use.  */
       if (!single_use_p)
        return forward_propagate_addr_expr (lhs, def_rhs);
 
-      GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (def_rhs);
+      gimple_assign_set_rhs1 (use_stmt, unshare_expr (def_rhs));
+      gimple_assign_set_rhs_code (use_stmt, TREE_CODE (def_rhs));
       return true;
     }
 
   /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS. 
      ADDR_EXPR will not appear on the LHS.  */
-  lhsp = &GIMPLE_STMT_OPERAND (use_stmt, 0);
+  lhsp = gimple_assign_lhs_ptr (use_stmt);
   while (handled_component_p (*lhsp))
     lhsp = &TREE_OPERAND (*lhsp, 0);
   lhs = *lhsp;
@@ -609,13 +730,13 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs, tree use_stmt,
 
   /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
      nodes from the RHS.  */
-  rhsp = &GIMPLE_STMT_OPERAND (use_stmt, 1);
+  rhsp = gimple_assign_rhs1_ptr (use_stmt);
   while (handled_component_p (*rhsp)
         || TREE_CODE (*rhsp) == ADDR_EXPR)
     rhsp = &TREE_OPERAND (*rhsp, 0);
   rhs = *rhsp;
 
-  /* Now see if the RHS node is an INDIRECT_REF using NAME.  If so, 
+  /* Now see if the RHS node is an INDIRECT_REF using NAME.  If so,
      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
   if (TREE_CODE (rhs) == INDIRECT_REF
       && TREE_OPERAND (rhs, 0) == name
@@ -654,8 +775,8 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs, tree use_stmt,
       /* If we have folded the VCE, then we have to create a new statement.  */
       if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
        {
-         block_stmt_iterator bsi = bsi_for_stmt (use_stmt);
-         new_rhs = force_gimple_operand_bsi (&bsi, new_rhs, true, NULL, true, BSI_SAME_STMT);
+         gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
+         new_rhs = force_gimple_operand_gsi (&gsi, new_rhs, true, NULL, true, GSI_SAME_STMT);
          /* As we change the deference to a SSA_NAME, we need to return false to make sure that
             the statement does not get removed.  */
          res = false;
@@ -668,8 +789,8 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs, tree use_stmt,
 
   /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
      is nothing to do. */
-  if (TREE_CODE (rhs) != POINTER_PLUS_EXPR
-      || TREE_OPERAND (rhs, 0) != name)
+  if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
+      || gimple_assign_rhs1 (use_stmt) != name)
     return false;
 
   /* The remaining cases are all for turning pointer arithmetic into
@@ -682,44 +803,33 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs, tree use_stmt,
       || !integer_zerop (TREE_OPERAND (array_ref, 1)))
     return false;
 
+  rhs2 = gimple_assign_rhs2 (use_stmt);
   /* Try to optimize &x[0] p+ C where C is a multiple of the size
      of the elements in X into &x[C/element size].  */
-  if (TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
+  if (TREE_CODE (rhs2) == INTEGER_CST)
     {
-      tree orig = unshare_expr (rhs);
-      TREE_OPERAND (rhs, 0) = unshare_expr (def_rhs);
-
-      /* If folding succeeds, then we have just exposed new variables
-        in USE_STMT which will need to be renamed.  If folding fails,
-        then we need to put everything back the way it was.  */
-      if (fold_stmt_inplace (use_stmt))
+      tree new_rhs = maybe_fold_stmt_addition (gimple_expr_type (use_stmt),
+                                              array_ref, rhs2);
+      if (new_rhs)
        {
+         gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
+         use_stmt = gsi_stmt (*use_stmt_gsi);
+         update_stmt (use_stmt);
          tidy_after_forward_propagate_addr (use_stmt);
          return true;
        }
-      else
-       {
-         GIMPLE_STMT_OPERAND (use_stmt, 1) = orig;
-         update_stmt (use_stmt);
-         return false;
-       }
     }
 
   /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
      converting a multiplication of an index by the size of the
      array elements, then the result is converted into the proper
      type for the arithmetic.  */
-  if (TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME
+  if (TREE_CODE (rhs2) == SSA_NAME
       /* Avoid problems with IVopts creating PLUS_EXPRs with a
         different type than their operands.  */
-      && useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (name)))
-    {
-      bool res;
-      
-      res = forward_propagate_addr_into_variable_array_index (TREE_OPERAND (rhs, 1),
-                                                             def_rhs, use_stmt);
-      return res;
-    }
+      && useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (name)))
+    return forward_propagate_addr_into_variable_array_index (rhs2, def_rhs,
+                                                            use_stmt_gsi);
   return false;
 }
 
@@ -733,9 +843,9 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs, tree use_stmt,
 static bool
 forward_propagate_addr_expr (tree name, tree rhs)
 {
-  int stmt_loop_depth = bb_for_stmt (SSA_NAME_DEF_STMT (name))->loop_depth;
+  int stmt_loop_depth = gimple_bb (SSA_NAME_DEF_STMT (name))->loop_depth;
   imm_use_iterator iter;
-  tree use_stmt;
+  gimple use_stmt;
   bool all = true;
   bool single_use_p = has_single_use (name);
 
@@ -746,16 +856,16 @@ forward_propagate_addr_expr (tree name, tree rhs)
 
       /* If the use is not in a simple assignment statement, then
         there is nothing we can do.  */
-      if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT)
+      if (gimple_code (use_stmt) != GIMPLE_ASSIGN)
        {
          all = false;
          continue;
        }
 
       /* If the use is in a deeper loop nest, then we do not want
-       to propagate the ADDR_EXPR into the loop as that is likely
-       adding expression evaluations into the loop.  */
-      if (bb_for_stmt (use_stmt)->loop_depth > stmt_loop_depth)
+        to propagate the ADDR_EXPR into the loop as that is likely
+        adding expression evaluations into the loop.  */
+      if (gimple_bb (use_stmt)->loop_depth > stmt_loop_depth)
        {
          all = false;
          continue;
@@ -763,30 +873,34 @@ forward_propagate_addr_expr (tree name, tree rhs)
 
       push_stmt_changes (&use_stmt);
 
-      result = forward_propagate_addr_expr_1 (name, rhs, use_stmt,
-                                             single_use_p);
+      {
+       gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
+       result = forward_propagate_addr_expr_1 (name, rhs, &gsi,
+                                               single_use_p);
+       use_stmt = gsi_stmt (gsi);
+      }
       all &= result;
 
       pop_stmt_changes (&use_stmt);
 
       /* Remove intermediate now unused copy and conversion chains.  */
-      use_rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
+      use_rhs = gimple_assign_rhs1 (use_stmt);
       if (result
-         && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME
+         && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
          && (TREE_CODE (use_rhs) == SSA_NAME
              || (CONVERT_EXPR_P (use_rhs)
                  && TREE_CODE (TREE_OPERAND (use_rhs, 0)) == SSA_NAME)))
        {
-         block_stmt_iterator bsi = bsi_for_stmt (use_stmt);
+         gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
          release_defs (use_stmt);
-         bsi_remove (&bsi, true);
+         gsi_remove (&gsi, true);
        }
     }
 
   return all;
 }
 
-/* Forward propagate the comparison COND defined in STMT like
+/* Forward propagate the comparison defined in STMT like
    cond_1 = x CMP y to uses of the form
      a_1 = (T')cond_1
      a_1 = !cond_1
@@ -794,83 +908,95 @@ forward_propagate_addr_expr (tree name, tree rhs)
    Returns true if stmt is now unused.  */
 
 static bool
-forward_propagate_comparison (tree cond, tree stmt)
+forward_propagate_comparison (gimple stmt)
 {
-  tree name = GIMPLE_STMT_OPERAND (stmt, 0);
-  tree use_stmt, tmp = NULL_TREE;
+  tree name = gimple_assign_lhs (stmt);
+  gimple use_stmt;
+  tree tmp = NULL_TREE;
 
   /* Don't propagate ssa names that occur in abnormal phis.  */
-  if ((TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME
-       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond, 0)))
-      || (TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME
-         && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (cond, 1))))
+  if ((TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
+       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt)))
+      || (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME
+        && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs2 (stmt))))
     return false;
 
   /* Do not un-cse comparisons.  But propagate through copies.  */
   use_stmt = get_prop_dest_stmt (name, &name);
-  if (use_stmt == NULL_TREE)
+  if (!use_stmt)
     return false;
 
   /* Conversion of the condition result to another integral type.  */
-  if (TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
-      && (CONVERT_EXPR_P (GIMPLE_STMT_OPERAND (use_stmt, 1))
-          || COMPARISON_CLASS_P (GIMPLE_STMT_OPERAND (use_stmt, 1))
-          || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == TRUTH_NOT_EXPR)
-      && INTEGRAL_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (use_stmt, 0))))
+  if (is_gimple_assign (use_stmt)
+      && (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
+         || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
+            == tcc_comparison
+          || gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
+      && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (use_stmt))))
     {
-      tree lhs = GIMPLE_STMT_OPERAND (use_stmt, 0);
-      tree rhs = GIMPLE_STMT_OPERAND (use_stmt, 1);
+      tree lhs = gimple_assign_lhs (use_stmt);
 
       /* We can propagate the condition into a conversion.  */
-      if (CONVERT_EXPR_P (rhs))
+      if (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt)))
        {
          /* Avoid using fold here as that may create a COND_EXPR with
             non-boolean condition as canonical form.  */
-         tmp = build2 (TREE_CODE (cond), TREE_TYPE (lhs),
-                       TREE_OPERAND (cond, 0), TREE_OPERAND (cond, 1));
+         tmp = build2 (gimple_assign_rhs_code (stmt), TREE_TYPE (lhs),
+                        gimple_assign_rhs1 (stmt), gimple_assign_rhs2 (stmt));
        }
       /* We can propagate the condition into X op CST where op
         is EQ_EXPR or NE_EXPR and CST is either one or zero.  */
-      else if (COMPARISON_CLASS_P (rhs)
-              && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
-              && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
-       {
-         enum tree_code code = TREE_CODE (rhs);
-         tree cst = TREE_OPERAND (rhs, 1);
-
-         tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs),
-                                       fold_convert (TREE_TYPE (cst), cond),
-                                       cst, false);
-         if (tmp == NULL_TREE)
-           return false;
-       }
+      else if (TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
+              == tcc_comparison
+             && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == SSA_NAME
+             && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
+      {
+        enum tree_code code = gimple_assign_rhs_code (use_stmt);
+        tree cst = gimple_assign_rhs2 (use_stmt);
+       tree cond;
+
+       cond = build2 (gimple_assign_rhs_code (stmt),
+                      TREE_TYPE (cst),
+                      gimple_assign_rhs1 (stmt),
+                      gimple_assign_rhs2 (stmt));
+
+        tmp = combine_cond_expr_cond (code, TREE_TYPE (lhs), cond, cst, false);
+        if (tmp == NULL_TREE)
+          return false;
+      }
       /* We can propagate the condition into a statement that
         computes the logical negation of the comparison result.  */
-      else if (TREE_CODE (rhs) == TRUTH_NOT_EXPR)
+      else if (gimple_assign_rhs_code (use_stmt) == TRUTH_NOT_EXPR)
        {
-         tree type = TREE_TYPE (TREE_OPERAND (cond, 0));
+         tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
          bool nans = HONOR_NANS (TYPE_MODE (type));
          enum tree_code code;
-         code = invert_tree_comparison (TREE_CODE (cond), nans);
+         code = invert_tree_comparison (gimple_assign_rhs_code (stmt), nans);
          if (code == ERROR_MARK)
            return false;
 
-         tmp = build2 (code, TREE_TYPE (lhs), TREE_OPERAND (cond, 0),
-                       TREE_OPERAND (cond, 1));
+         tmp = build2 (code, TREE_TYPE (lhs), gimple_assign_rhs1 (stmt),
+                        gimple_assign_rhs2 (stmt));
        }
       else
        return false;
 
-      GIMPLE_STMT_OPERAND (use_stmt, 1) = unshare_expr (tmp);
-      update_stmt (use_stmt);
+      {
+       gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
+       gimple_assign_set_rhs_from_tree (&gsi, unshare_expr (tmp));
+       use_stmt = gsi_stmt (gsi);
+       update_stmt (use_stmt);
+      }
 
       /* Remove defining statements.  */
       remove_prop_source_from_use (name, stmt);
 
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
+         tree old_rhs = rhs_to_tree (TREE_TYPE (gimple_assign_lhs (stmt)),
+                                      stmt);
          fprintf (dump_file, "  Replaced '");
-         print_generic_expr (dump_file, rhs, dump_flags);
+         print_generic_expr (dump_file, old_rhs, dump_flags);
          fprintf (dump_file, "' with '");
          print_generic_expr (dump_file, tmp, dump_flags);
          fprintf (dump_file, "'\n");
@@ -897,23 +1023,24 @@ forward_propagate_comparison (tree cond, tree stmt)
    than one forward link.  */
 
 static void
-simplify_not_neg_expr (tree stmt)
+simplify_not_neg_expr (gimple_stmt_iterator *gsi_p)
 {
-  tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-  tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
+  gimple stmt = gsi_stmt (*gsi_p);
+  tree rhs = gimple_assign_rhs1 (stmt);
+  gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
 
   /* See if the RHS_DEF_STMT has the same form as our statement.  */
-  if (TREE_CODE (rhs_def_stmt) == GIMPLE_MODIFY_STMT
-      && TREE_CODE (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1)) == TREE_CODE (rhs))
+  if (is_gimple_assign (rhs_def_stmt)
+      && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
     {
-      tree rhs_def_operand =
-       TREE_OPERAND (GIMPLE_STMT_OPERAND (rhs_def_stmt, 1), 0);
+      tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
 
       /* Verify that RHS_DEF_OPERAND is a suitable SSA_NAME.  */
       if (TREE_CODE (rhs_def_operand) == SSA_NAME
          && ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs_def_operand))
        {
-         GIMPLE_STMT_OPERAND (stmt, 1) = rhs_def_operand;
+         gimple_assign_set_rhs_from_tree (gsi_p, rhs_def_operand);
+         stmt = gsi_stmt (*gsi_p);
          update_stmt (stmt);
        }
     }
@@ -923,26 +1050,26 @@ simplify_not_neg_expr (tree stmt)
    the condition which we may be able to optimize better.  */
 
 static void
-simplify_switch_expr (tree stmt)
+simplify_gimple_switch (gimple stmt)
 {
-  tree cond = SWITCH_COND (stmt);
+  tree cond = gimple_switch_index (stmt);
   tree def, to, ti;
+  gimple def_stmt;
 
   /* The optimization that we really care about is removing unnecessary
      casts.  That will let us do much better in propagating the inferred
      constant at the switch target.  */
   if (TREE_CODE (cond) == SSA_NAME)
     {
-      def = SSA_NAME_DEF_STMT (cond);
-      if (TREE_CODE (def) == GIMPLE_MODIFY_STMT)
+      def_stmt = SSA_NAME_DEF_STMT (cond);
+      if (is_gimple_assign (def_stmt))
        {
-         def = GIMPLE_STMT_OPERAND (def, 1);
-         if (TREE_CODE (def) == NOP_EXPR)
+         if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
            {
              int need_precision;
              bool fail;
 
-             def = TREE_OPERAND (def, 0);
+             def = gimple_assign_rhs1 (def_stmt);
 
 #ifdef ENABLE_CHECKING
              /* ??? Why was Jeff testing this?  We are gimple...  */
@@ -968,7 +1095,7 @@ simplify_switch_expr (tree stmt)
 
              if (!fail)
                {
-                 SWITCH_COND (stmt) = def;
+                 gimple_switch_set_index (stmt, def);
                  update_stmt (stmt);
                }
            }
@@ -988,33 +1115,32 @@ tree_ssa_forward_propagate_single_use_vars (void)
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator bsi;
+      gimple_stmt_iterator gsi;
 
-      /* Note we update BSI within the loop as necessary.  */
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
+      /* Note we update GSI within the loop as necessary.  */
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
        {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
 
          /* If this statement sets an SSA_NAME to an address,
             try to propagate the address into the uses of the SSA_NAME.  */
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+         if (is_gimple_assign (stmt))
            {
-             tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-             tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-
+             tree lhs = gimple_assign_lhs (stmt);
+             tree rhs = gimple_assign_rhs1 (stmt);
 
              if (TREE_CODE (lhs) != SSA_NAME)
                {
-                 bsi_next (&bsi);
+                 gsi_next (&gsi);
                  continue;
                }
 
-             if (TREE_CODE (rhs) == ADDR_EXPR
+             if (gimple_assign_rhs_code (stmt) == ADDR_EXPR
                  /* Handle pointer conversions on invariant addresses
                     as well, as this is valid gimple.  */
-                 || (CONVERT_EXPR_P (rhs)
-                     && TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
-                     && POINTER_TYPE_P (TREE_TYPE (rhs))))
+                 || (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
+                     && TREE_CODE (rhs) == ADDR_EXPR
+                     && POINTER_TYPE_P (TREE_TYPE (lhs))))
                {
                  STRIP_NOPS (rhs);
                  if (!stmt_references_abnormal_ssa_name (stmt)
@@ -1022,61 +1148,64 @@ tree_ssa_forward_propagate_single_use_vars (void)
                    {
                      release_defs (stmt);
                      todoflags |= TODO_remove_unused_locals;
-                     bsi_remove (&bsi, true);
+                     gsi_remove (&gsi, true);
                    }
                  else
-                   bsi_next (&bsi);
+                   gsi_next (&gsi);
                }
-             else if ((TREE_CODE (rhs) == BIT_NOT_EXPR
-                       || TREE_CODE (rhs) == NEGATE_EXPR)
-                      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
+             else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
+                       || gimple_assign_rhs_code (stmt) == NEGATE_EXPR)
+                      && TREE_CODE (rhs) == SSA_NAME)
                {
-                 simplify_not_neg_expr (stmt);
-                 bsi_next (&bsi);
+                 simplify_not_neg_expr (&gsi);
+                 gsi_next (&gsi);
                }
-              else if (TREE_CODE (rhs) == COND_EXPR)
+             else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
                 {
+                  /* In this case the entire COND_EXPR is in rhs1. */
                  int did_something;
                  fold_defer_overflow_warnings ();
-                  did_something = forward_propagate_into_cond (rhs, stmt);
+                  did_something = forward_propagate_into_cond (&gsi);
+                 stmt = gsi_stmt (gsi);
                  if (did_something == 2)
                    cfg_changed = true;
                  fold_undefer_overflow_warnings (!TREE_NO_WARNING (rhs)
                    && did_something, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
-                 bsi_next (&bsi);
+                 gsi_next (&gsi);
                 }
-             else if (COMPARISON_CLASS_P (rhs))
+             else if (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
+                                       == tcc_comparison)
                {
-                 if (forward_propagate_comparison (rhs, stmt))
+                 if (forward_propagate_comparison (stmt))
                    {
                      release_defs (stmt);
                      todoflags |= TODO_remove_unused_locals;
-                     bsi_remove (&bsi, true);
+                     gsi_remove (&gsi, true);
                    }
                  else
-                   bsi_next (&bsi);
+                   gsi_next (&gsi);
                }
              else
-               bsi_next (&bsi);
+               gsi_next (&gsi);
            }
-         else if (TREE_CODE (stmt) == SWITCH_EXPR)
+         else if (gimple_code (stmt) == GIMPLE_SWITCH)
            {
-             simplify_switch_expr (stmt);
-             bsi_next (&bsi);
+             simplify_gimple_switch (stmt);
+             gsi_next (&gsi);
            }
-         else if (TREE_CODE (stmt) == COND_EXPR)
+         else if (gimple_code (stmt) == GIMPLE_COND)
            {
              int did_something;
              fold_defer_overflow_warnings ();
-             did_something = forward_propagate_into_cond (stmt, stmt);
+             did_something = forward_propagate_into_gimple_cond (stmt);
              if (did_something == 2)
                cfg_changed = true;
              fold_undefer_overflow_warnings (did_something, stmt,
                                              WARN_STRICT_OVERFLOW_CONDITIONAL);
-             bsi_next (&bsi);
+             gsi_next (&gsi);
            }
          else
-           bsi_next (&bsi);
+           gsi_next (&gsi);
        }
     }
 
index 93e7810..143608e 100644 (file)
@@ -101,14 +101,13 @@ recognize_if_then_else (basic_block cond_bb,
 static bool
 bb_no_side_effects_p (basic_block bb)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
 
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      tree stmt = bsi_stmt (bsi);
-      stmt_ann_t ann = stmt_ann (stmt);
+      gimple stmt = gsi_stmt (gsi);
 
-      if (ann->has_volatile_ops
+      if (gimple_has_volatile_ops (stmt)
          || !ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
        return false;
     }
@@ -125,12 +124,16 @@ same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest)
 {
   edge e1 = find_edge (bb1, dest);
   edge e2 = find_edge (bb2, dest);
-  tree phi;
+  gimple_stmt_iterator gsi;
+  gimple phi;
 
-  for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
-    if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
-                         PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
-      return false;
+  for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
+    {
+      phi = gsi_stmt (gsi);
+      if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
+                           PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
+        return false;
+    }
 
   return true;
 }
@@ -146,71 +149,86 @@ get_name_for_bit_test (tree candidate)
   if (TREE_CODE (candidate) == SSA_NAME
       && has_single_use (candidate))
     {
-      tree def_stmt = SSA_NAME_DEF_STMT (candidate);
-      if (TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
-         && CONVERT_EXPR_P (GIMPLE_STMT_OPERAND (def_stmt, 1)))
+      gimple def_stmt = SSA_NAME_DEF_STMT (candidate);
+      if (is_gimple_assign (def_stmt)
+         && gimple_assign_cast_p (def_stmt))
        {
-         tree rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
-         if (TYPE_PRECISION (TREE_TYPE (rhs))
-             <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (rhs, 0))))
-           return TREE_OPERAND (rhs, 0);
+         if (TYPE_PRECISION (TREE_TYPE (candidate))
+             <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
+           return gimple_assign_rhs1 (def_stmt);
        }
     }
 
   return candidate;
 }
 
-/* Recognize a single bit test pattern in COND_EXPR and its defining
+/* Helpers for recognize_single_bit_test defined mainly for source code
+   formating.  */
+
+static int
+operand_precision (tree t)
+{
+  return TYPE_PRECISION (TREE_TYPE (t));
+}
+
+static bool
+integral_operand_p (tree t)
+{
+  return INTEGRAL_TYPE_P (TREE_TYPE (t));
+}
+
+/* Recognize a single bit test pattern in GIMPLE_COND and its defining
    statements.  Store the name being tested in *NAME and the bit
-   in *BIT.  The COND_EXPR computes *NAME & (1 << *BIT).
+   in *BIT.  The GIMPLE_COND computes *NAME & (1 << *BIT).
    Returns true if the pattern matched, false otherwise.  */
 
 static bool
-recognize_single_bit_test (tree cond_expr, tree *name, tree *bit)
+recognize_single_bit_test (gimple cond, tree *name, tree *bit)
 {
-  tree t;
+  gimple stmt;
 
   /* Get at the definition of the result of the bit test.  */
-  t = TREE_OPERAND (cond_expr, 0);
-  if (TREE_CODE (t) == NE_EXPR
-      && integer_zerop (TREE_OPERAND (t, 1)))
-    t = TREE_OPERAND (t, 0);
-  if (TREE_CODE (t) != SSA_NAME)
+  if (gimple_cond_code (cond) != NE_EXPR
+      || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
+      || !integer_zerop (gimple_cond_rhs (cond)))
     return false;
-  t = SSA_NAME_DEF_STMT (t);
-  if (TREE_CODE (t) != GIMPLE_MODIFY_STMT)
+  stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
+  if (!is_gimple_assign (stmt))
     return false;
-  t = GIMPLE_STMT_OPERAND (t, 1);
 
   /* Look at which bit is tested.  One form to recognize is
      D.1985_5 = state_3(D) >> control1_4(D);
      D.1986_6 = (int) D.1985_5;
      D.1987_7 = op0 & 1;
      if (D.1987_7 != 0)  */
-  if (TREE_CODE (t) == BIT_AND_EXPR
-      && integer_onep (TREE_OPERAND (t, 1))
-      && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME)
+  if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
+      && integer_onep (gimple_assign_rhs2 (stmt))
+      && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
     {
-      tree orig_name = TREE_OPERAND (t, 0);
+      tree orig_name = gimple_assign_rhs1 (stmt);
 
       /* Look through copies and conversions to eventually
         find the stmt that computes the shift.  */
-      t = orig_name;
-      do {
-       t = SSA_NAME_DEF_STMT (t);
-       if (TREE_CODE (t) != GIMPLE_MODIFY_STMT)
-         break;
-       t = GIMPLE_STMT_OPERAND (t, 1);
-       if (CONVERT_EXPR_P (t))
-         t = TREE_OPERAND (t, 0);
-      } while (TREE_CODE (t) == SSA_NAME);
+      stmt = SSA_NAME_DEF_STMT (orig_name);
+
+      while (is_gimple_assign (stmt)
+            && (gimple_assign_copy_p (stmt)
+                || (gimple_assign_cast_p (stmt)
+                    && integral_operand_p (gimple_assign_lhs (stmt))
+                    && integral_operand_p (gimple_assign_rhs1 (stmt))
+                    && (operand_precision (gimple_assign_lhs (stmt))
+                        <= operand_precision (gimple_assign_rhs1 (stmt))))))
+       {
+         stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
+       }
 
       /* If we found such, decompose it.  */
-      if (TREE_CODE (t) == RSHIFT_EXPR)
+      if (is_gimple_assign (stmt)
+         && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
        {
          /* op0 & (1 << op1) */
-         *bit = TREE_OPERAND (t, 1);
-         *name = TREE_OPERAND (t, 0);
+         *bit = gimple_assign_rhs2 (stmt);
+         *name = gimple_assign_rhs1 (stmt);
        }
       else
        {
@@ -225,13 +243,13 @@ recognize_single_bit_test (tree cond_expr, tree *name, tree *bit)
   /* Another form is
      D.1987_7 = op0 & (1 << CST)
      if (D.1987_7 != 0)  */
-  if (TREE_CODE (t) == BIT_AND_EXPR
-      && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
-      && integer_pow2p (TREE_OPERAND (t, 1)))
+  if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
+      && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
+      && integer_pow2p (gimple_assign_rhs2 (stmt)))
     {
-      *name = TREE_OPERAND (t, 0);
+      *name = gimple_assign_rhs1 (stmt);
       *bit = build_int_cst (integer_type_node,
-                           tree_log2 (TREE_OPERAND (t, 1)));
+                           tree_log2 (gimple_assign_rhs2 (stmt)));
       return true;
     }
 
@@ -239,31 +257,31 @@ recognize_single_bit_test (tree cond_expr, tree *name, tree *bit)
      D.1986_6 = 1 << control1_4(D)
      D.1987_7 = op0 & D.1986_6
      if (D.1987_7 != 0)  */
-  if (TREE_CODE (t) == BIT_AND_EXPR
-      && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
-      && TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME)
+  if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
+      && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
+      && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
     {
-      tree tmp;
+      gimple tmp;
 
       /* Both arguments of the BIT_AND_EXPR can be the single-bit
         specifying expression.  */
-      tmp = SSA_NAME_DEF_STMT (TREE_OPERAND (t, 0));
-      if (TREE_CODE (tmp) == GIMPLE_MODIFY_STMT
-         && TREE_CODE (GIMPLE_STMT_OPERAND (tmp, 1)) == LSHIFT_EXPR
-         && integer_onep (TREE_OPERAND (GIMPLE_STMT_OPERAND (tmp, 1), 0)))
+      tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
+      if (is_gimple_assign (tmp)
+         && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
+         && integer_onep (gimple_assign_rhs1 (tmp)))
        {
-         *name = TREE_OPERAND (t, 1);
-         *bit = TREE_OPERAND (GIMPLE_STMT_OPERAND (tmp, 1), 1);
+         *name = gimple_assign_rhs2 (stmt);
+         *bit = gimple_assign_rhs2 (tmp);
          return true;
        }
 
-      tmp = SSA_NAME_DEF_STMT (TREE_OPERAND (t, 1));
-      if (TREE_CODE (tmp) == GIMPLE_MODIFY_STMT
-         && TREE_CODE (GIMPLE_STMT_OPERAND (tmp, 1)) == LSHIFT_EXPR
-         && integer_onep (TREE_OPERAND (GIMPLE_STMT_OPERAND (tmp, 1), 0)))
+      tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
+      if (is_gimple_assign (tmp)
+         && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
+         && integer_onep (gimple_assign_rhs1 (tmp)))
        {
-         *name = TREE_OPERAND (t, 0);
-         *bit = TREE_OPERAND (GIMPLE_STMT_OPERAND (tmp, 1), 1);
+         *name = gimple_assign_rhs1 (stmt);
+         *bit = gimple_assign_rhs2 (tmp);
          return true;
        }
     }
@@ -271,33 +289,28 @@ recognize_single_bit_test (tree cond_expr, tree *name, tree *bit)
   return false;
 }
 
-/* Recognize a bit test pattern in COND_EXPR and its defining
+/* Recognize a bit test pattern in a GIMPLE_COND and its defining
    statements.  Store the name being tested in *NAME and the bits
    in *BITS.  The COND_EXPR computes *NAME & *BITS.
    Returns true if the pattern matched, false otherwise.  */
 
 static bool
-recognize_bits_test (tree cond_expr, tree *name, tree *bits)
+recognize_bits_test (gimple cond, tree *name, tree *bits)
 {
-  tree t;
+  gimple stmt;
 
   /* Get at the definition of the result of the bit test.  */
-  t = TREE_OPERAND (cond_expr, 0);
-  if (TREE_CODE (t) == NE_EXPR
-      && integer_zerop (TREE_OPERAND (t, 1)))
-    t = TREE_OPERAND (t, 0);
-  if (TREE_CODE (t) != SSA_NAME)
-    return false;
-  t = SSA_NAME_DEF_STMT (t);
-  if (TREE_CODE (t) != GIMPLE_MODIFY_STMT)
+  if (gimple_cond_code (cond) != NE_EXPR
+      || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
+      || !integer_zerop (gimple_cond_rhs (cond)))
     return false;
-  t = GIMPLE_STMT_OPERAND (t, 1);
-
-  if (TREE_CODE (t) != BIT_AND_EXPR)
+  stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
+  if (!is_gimple_assign (stmt)
+      || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
     return false;
 
-  *name = get_name_for_bit_test (TREE_OPERAND (t, 0));
-  *bits = TREE_OPERAND (t, 1);
+  *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
+  *bits = gimple_assign_rhs2 (stmt);
 
   return true;
 }
@@ -309,18 +322,18 @@ recognize_bits_test (tree cond_expr, tree *name, tree *bits)
 static bool
 ifcombine_ifandif (basic_block inner_cond_bb, basic_block outer_cond_bb)
 {
-  block_stmt_iterator bsi;
-  tree inner_cond, outer_cond;
+  gimple_stmt_iterator gsi;
+  gimple inner_cond, outer_cond;
   tree name1, name2, bit1, bit2;
 
   inner_cond = last_stmt (inner_cond_bb);
   if (!inner_cond
-      || TREE_CODE (inner_cond) != COND_EXPR)
+      || gimple_code (inner_cond) != GIMPLE_COND)
     return false;
 
   outer_cond = last_stmt (outer_cond_bb);
   if (!outer_cond
-      || TREE_CODE (outer_cond) != COND_EXPR)
+      || gimple_code (outer_cond) != GIMPLE_COND)
     return false;
 
   /* See if we test a single bit of the same name in both tests.  In
@@ -334,23 +347,23 @@ ifcombine_ifandif (basic_block inner_cond_bb, basic_block outer_cond_bb)
       tree t, t2;
 
       /* Do it.  */
-      bsi = bsi_for_stmt (inner_cond);
+      gsi = gsi_for_stmt (inner_cond);
       t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
                       build_int_cst (TREE_TYPE (name1), 1), bit1);
       t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
                        build_int_cst (TREE_TYPE (name1), 1), bit2);
       t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2);
-      t = force_gimple_operand_bsi (&bsi, t, true, NULL_TREE,
-                                   true, BSI_SAME_STMT);
+      t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                                   true, GSI_SAME_STMT);
       t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
-      t2 = force_gimple_operand_bsi (&bsi, t2, true, NULL_TREE,
-                                    true, BSI_SAME_STMT);
-      COND_EXPR_COND (inner_cond) = fold_build2 (EQ_EXPR, boolean_type_node,
-                                                t2, t);
+      t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE,
+                                    true, GSI_SAME_STMT);
+      t = fold_build2 (EQ_EXPR, boolean_type_node, t2, t);
+      gimple_cond_set_condition_from_tree (inner_cond, t);
       update_stmt (inner_cond);
 
       /* Leave CFG optimization to cfg_cleanup.  */
-      COND_EXPR_COND (outer_cond) = boolean_true_node;
+      gimple_cond_set_condition_from_tree (outer_cond, boolean_true_node);
       update_stmt (outer_cond);
 
       if (dump_file)
@@ -378,17 +391,17 @@ ifcombine_ifandif (basic_block inner_cond_bb, basic_block outer_cond_bb)
 static bool
 ifcombine_iforif (basic_block inner_cond_bb, basic_block outer_cond_bb)
 {
-  tree inner_cond, outer_cond;
+  gimple inner_cond, outer_cond;
   tree name1, name2, bits1, bits2;
 
   inner_cond = last_stmt (inner_cond_bb);
   if (!inner_cond
-      || TREE_CODE (inner_cond) != COND_EXPR)
+      || gimple_code (inner_cond) != GIMPLE_COND)
     return false;
 
   outer_cond = last_stmt (outer_cond_bb);
   if (!outer_cond
-      || TREE_CODE (outer_cond) != COND_EXPR)
+      || gimple_code (outer_cond) != GIMPLE_COND)
     return false;
 
   /* See if we have two bit tests of the same name in both tests.
@@ -397,7 +410,7 @@ ifcombine_iforif (basic_block inner_cond_bb, basic_block outer_cond_bb)
   if (recognize_bits_test (inner_cond, &name1, &bits1)
       && recognize_bits_test (outer_cond, &name2, &bits2))
     {
-      block_stmt_iterator bsi;
+      gimple_stmt_iterator gsi;
       tree t;
 
       /* Find the common name which is bit-tested.  */
@@ -428,19 +441,20 @@ ifcombine_iforif (basic_block inner_cond_bb, basic_block outer_cond_bb)
        return false;
 
       /* Do it.  */
-      bsi = bsi_for_stmt (inner_cond);
+      gsi = gsi_for_stmt (inner_cond);
       t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2);
-      t = force_gimple_operand_bsi (&bsi, t, true, NULL_TREE,
-                                   true, BSI_SAME_STMT);
+      t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                                   true, GSI_SAME_STMT);
       t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
-      t = force_gimple_operand_bsi (&bsi, t, true, NULL_TREE,
-                                   true, BSI_SAME_STMT);
-      COND_EXPR_COND (inner_cond) = fold_build2 (NE_EXPR, boolean_type_node, t,
-                                                build_int_cst (TREE_TYPE (t), 0));
+      t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
+                                   true, GSI_SAME_STMT);
+      t = fold_build2 (NE_EXPR, boolean_type_node, t,
+                      build_int_cst (TREE_TYPE (t), 0));
+      gimple_cond_set_condition_from_tree (inner_cond, t);
       update_stmt (inner_cond);
 
       /* Leave CFG optimization to cfg_cleanup.  */
-      COND_EXPR_COND (outer_cond) = boolean_false_node;
+      gimple_cond_set_condition_from_tree (outer_cond, boolean_false_node);
       update_stmt (outer_cond);
 
       if (dump_file)
@@ -460,17 +474,15 @@ ifcombine_iforif (basic_block inner_cond_bb, basic_block outer_cond_bb)
   /* See if we have two comparisons that we can merge into one.
      This happens for C++ operator overloading where for example
      GE_EXPR is implemented as GT_EXPR || EQ_EXPR.  */
-  else if (COMPARISON_CLASS_P (COND_EXPR_COND (inner_cond))
-          && COMPARISON_CLASS_P (COND_EXPR_COND (outer_cond))
-          && operand_equal_p (TREE_OPERAND (COND_EXPR_COND (inner_cond), 0),
-                              TREE_OPERAND (COND_EXPR_COND (outer_cond), 0), 0)
-          && operand_equal_p (TREE_OPERAND (COND_EXPR_COND (inner_cond), 1),
-                              TREE_OPERAND (COND_EXPR_COND (outer_cond), 1), 0))
+  else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison
+          && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison
+          && operand_equal_p (gimple_cond_lhs (inner_cond),
+                              gimple_cond_lhs (outer_cond), 0)
+          && operand_equal_p (gimple_cond_rhs (inner_cond),
+                              gimple_cond_rhs (outer_cond), 0))
     {
-      tree ccond1 = COND_EXPR_COND (inner_cond);
-      tree ccond2 = COND_EXPR_COND (outer_cond);
-      enum tree_code code1 = TREE_CODE (ccond1);
-      enum tree_code code2 = TREE_CODE (ccond2);
+      enum tree_code code1 = gimple_cond_code (inner_cond);
+      enum tree_code code2 = gimple_cond_code (outer_cond);
       enum tree_code code;
       tree t;
 
@@ -487,7 +499,7 @@ ifcombine_iforif (basic_block inner_cond_bb, basic_block outer_cond_bb)
        code = LE_EXPR;
       else if (CHK (GT, GE))
        code = GE_EXPR;
-      else if (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (ccond1, 0)))
+      else if (INTEGRAL_TYPE_P (TREE_TYPE (gimple_cond_lhs (inner_cond)))
               || flag_unsafe_math_optimizations)
        {
          if (CHK (LT, GT))
@@ -505,16 +517,16 @@ ifcombine_iforif (basic_block inner_cond_bb, basic_block outer_cond_bb)
 #undef CHK
 
       /* Do it.  */
-      t = fold_build2 (code, boolean_type_node,
-                      TREE_OPERAND (ccond2, 0), TREE_OPERAND (ccond2, 1));
+      t = fold_build2 (code, boolean_type_node, gimple_cond_lhs (outer_cond),
+                      gimple_cond_rhs (outer_cond));
       t = canonicalize_cond_expr_cond (t);
       if (!t)
        return false;
-      COND_EXPR_COND (inner_cond) = t;
+      gimple_cond_set_condition_from_tree (inner_cond, t);
       update_stmt (inner_cond);
 
       /* Leave CFG optimization to cfg_cleanup.  */
-      COND_EXPR_COND (outer_cond) = boolean_false_node;
+      gimple_cond_set_condition_from_tree (outer_cond, boolean_false_node);
       update_stmt (outer_cond);
 
       if (dump_file)
@@ -611,10 +623,10 @@ tree_ssa_ifcombine (void)
   for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; ++i)
     {
       basic_block bb = bbs[i];
-      tree stmt = last_stmt (bb);
+      gimple stmt = last_stmt (bb);
 
       if (stmt
-         && TREE_CODE (stmt) == COND_EXPR)
+         && gimple_code (stmt) == GIMPLE_COND)
        cfg_changed |= tree_ssa_ifcombine_bb (bb);
     }
 
index a5f7596..fa3834d 100644 (file)
@@ -403,8 +403,7 @@ static inline void mark_all_vars_used (tree *, void *data);
 /* Helper function for mark_all_vars_used, called via walk_tree.  */
 
 static tree
-mark_all_vars_used_1 (tree *tp, int *walk_subtrees,
-                     void *data)
+mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data)
 {
   tree t = *tp;
   enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
@@ -412,8 +411,8 @@ mark_all_vars_used_1 (tree *tp, int *walk_subtrees,
 
   if (TREE_CODE (t) == SSA_NAME)
     t = SSA_NAME_VAR (t);
-  if ((IS_EXPR_CODE_CLASS (c)
-       || IS_GIMPLE_STMT_CODE_CLASS (c))
+
+  if (IS_EXPR_CODE_CLASS (c)
       && (b = TREE_BLOCK (t)) != NULL)
     TREE_USED (b) = true;
 
@@ -584,6 +583,7 @@ remove_unused_locals (void)
 
   if (optimize)
     mark_scope_block_unused (DECL_INITIAL (current_function_decl));
+
   /* Assume all locals are unused.  */
   FOR_EACH_REFERENCED_VAR (t, rvi)
     var_ann (t)->used = false;
@@ -591,23 +591,34 @@ remove_unused_locals (void)
   /* Walk the CFG marking all referenced symbols.  */
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator bsi;
-      tree phi, def;
+      gimple_stmt_iterator gsi;
+      size_t i;
 
       /* Walk the statements.  */
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-       mark_all_vars_used (bsi_stmt_ptr (bsi), NULL);
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+       {
+         gimple stmt = gsi_stmt (gsi);
+         tree b = gimple_block (stmt);
+
+         if (b)
+           TREE_USED (b) = true;
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+         for (i = 0; i < gimple_num_ops (stmt); i++)
+           mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i), NULL);
+       }
+
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
         {
           use_operand_p arg_p;
           ssa_op_iter i;
+         tree def;
+         gimple phi = gsi_stmt (gsi);
 
          /* No point processing globals.  */
-         if (is_global_var (SSA_NAME_VAR (PHI_RESULT (phi))))
+         if (is_global_var (SSA_NAME_VAR (gimple_phi_result (phi))))
            continue;
 
-          def = PHI_RESULT (phi);
+         def = gimple_phi_result (phi);
          mark_all_vars_used (&def, NULL);
 
           FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
@@ -822,7 +833,7 @@ static void
 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
 {
   int p;
-  tree stmt;
+  gimple stmt;
   use_operand_p use;
   basic_block def_bb = NULL;
   imm_use_iterator imm_iter;
@@ -835,7 +846,7 @@ set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
   stmt = SSA_NAME_DEF_STMT (ssa_name);
   if (stmt)
     {
-      def_bb = bb_for_stmt (stmt);
+      def_bb = gimple_bb (stmt);
       /* Mark defs in liveout bitmap temporarily.  */
       if (def_bb)
        bitmap_set_bit (live->liveout[def_bb->index], p);
@@ -847,16 +858,16 @@ set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
      add it to the list of live on entry blocks.  */
   FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
     {
-      tree use_stmt = USE_STMT (use);
+      gimple use_stmt = USE_STMT (use);
       basic_block add_block = NULL;
 
-      if (TREE_CODE (use_stmt) == PHI_NODE)
+      if (gimple_code (use_stmt) == GIMPLE_PHI)
         {
          /* Uses in PHI's are considered to be live at exit of the SRC block
             as this is where a copy would be inserted.  Check to see if it is
             defined in that block, or whether its live on entry.  */
          int index = PHI_ARG_INDEX_FROM_USE (use);
-         edge e = PHI_ARG_EDGE (use_stmt, index);
+         edge e = gimple_phi_arg_edge (use_stmt, index);
          if (e->src != ENTRY_BLOCK_PTR)
            {
              if (e->src != def_bb)
@@ -866,7 +877,7 @@ set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
       else
         {
          /* If its not defined in this block, its live on entry.  */
-         basic_block use_bb = bb_for_stmt (use_stmt);
+         basic_block use_bb = gimple_bb (use_stmt);
          if (use_bb != def_bb)
            add_block = use_bb;
        }  
@@ -891,9 +902,6 @@ set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
 void
 calculate_live_on_exit (tree_live_info_p liveinfo)
 {
-  unsigned i;
-  int p;
-  tree t, phi;
   basic_block bb;
   edge e;
   edge_iterator ei;
@@ -905,20 +913,29 @@ calculate_live_on_exit (tree_live_info_p liveinfo)
   /* Set all the live-on-exit bits for uses in PHIs.  */
   FOR_EACH_BB (bb)
     {
+      gimple_stmt_iterator gsi;
+      size_t i;
+
       /* Mark the PHI arguments which are live on exit to the pred block.  */
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-       for (i = 0; i < (unsigned)PHI_NUM_ARGS (phi); i++)
-         { 
-           t = PHI_ARG_DEF (phi, i);
-           if (TREE_CODE (t) != SSA_NAME)
-             continue;
-           p = var_to_partition (liveinfo->map, t);
-           if (p == NO_PARTITION)
-             continue;
-           e = PHI_ARG_EDGE (phi, i);
-           if (e->src != ENTRY_BLOCK_PTR)
-             bitmap_set_bit (liveinfo->liveout[e->src->index], p);
-         }
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+       {
+         gimple phi = gsi_stmt (gsi);
+         for (i = 0; i < gimple_phi_num_args (phi); i++)
+           { 
+             tree t = PHI_ARG_DEF (phi, i);
+             int p;
+
+             if (TREE_CODE (t) != SSA_NAME)
+               continue;
+
+             p = var_to_partition (liveinfo->map, t);
+             if (p == NO_PARTITION)
+               continue;
+             e = gimple_phi_arg_edge (phi, i);
+             if (e->src != ENTRY_BLOCK_PTR)
+               bitmap_set_bit (liveinfo->liveout[e->src->index], p);
+           }
+       }
 
       /* Add each successors live on entry to this bock live on exit.  */
       FOR_EACH_EDGE (e, ei, bb->succs)
@@ -1067,7 +1084,7 @@ verify_live_on_entry (tree_live_info_p live)
 {
   unsigned i;
   tree var;
-  tree phi, stmt;
+  gimple stmt;
   basic_block bb;
   edge e;
   int num;
@@ -1091,13 +1108,13 @@ verify_live_on_entry (tree_live_info_p live)
          bitmap loe;
          var = partition_to_var (map, i);
          stmt = SSA_NAME_DEF_STMT (var);
-         tmp = bb_for_stmt (stmt);
+         tmp = gimple_bb (stmt);
          d = gimple_default_def (cfun, SSA_NAME_VAR (var));
 
          loe = live_on_entry (live, e->dest);
          if (loe && bitmap_bit_p (loe, i))
            {
-             if (!IS_EMPTY_STMT (stmt))
+             if (!gimple_nop_p (stmt))
                {
                  num++;
                  print_generic_expr (stderr, var, TDF_SLIM);
@@ -1105,7 +1122,7 @@ verify_live_on_entry (tree_live_info_p live)
                  if (tmp)
                    fprintf (stderr, " in BB%d, ", tmp->index);
                  fprintf (stderr, "by:\n");
-                 print_generic_expr (stderr, stmt, TDF_SLIM);
+                 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
                  fprintf (stderr, "\nIt is also live-on-entry to entry BB %d", 
                           entry_block);
                  fprintf (stderr, " So it appears to have multiple defs.\n");
@@ -1116,7 +1133,8 @@ verify_live_on_entry (tree_live_info_p live)
                    {
                      num++;
                      print_generic_expr (stderr, var, TDF_SLIM);
-                     fprintf (stderr, " is live-on-entry to BB%d ",entry_block);
+                     fprintf (stderr, " is live-on-entry to BB%d ",
+                              entry_block);
                      if (d)
                        {
                          fprintf (stderr, " but is not the default def of ");
@@ -1133,15 +1151,18 @@ verify_live_on_entry (tree_live_info_p live)
              {
                /* The only way this var shouldn't be marked live on entry is 
                   if it occurs in a PHI argument of the block.  */
-               int z, ok = 0;
-               for (phi = phi_nodes (e->dest); 
-                    phi && !ok; 
-                    phi = PHI_CHAIN (phi))
+               size_t z;
+               bool ok = false;
+               gimple_stmt_iterator gsi;
+               for (gsi = gsi_start_phis (e->dest);
+                    !gsi_end_p (gsi) && !ok;
+                    gsi_next (&gsi))
                  {
-                   for (z = 0; z < PHI_NUM_ARGS (phi); z++)
-                     if (var == PHI_ARG_DEF (phi, z))
+                   gimple phi = gsi_stmt (gsi);
+                   for (z = 0; z < gimple_phi_num_args (phi); z++)
+                     if (var == gimple_phi_arg_def (phi, z))
                        {
-                         ok = 1;
+                         ok = true;
                          break;
                        }
                  }
index 4f02160..de47262 100644 (file)
@@ -361,8 +361,8 @@ extern var_map coalesce_ssa_name (void);
 
 
 /* From tree-ssa-ter.c  */
-extern tree *find_replaceable_exprs (var_map);
-extern void dump_replaceable_exprs (FILE *, tree *);
+extern gimple *find_replaceable_exprs (var_map);
+extern void dump_replaceable_exprs (FILE *, gimple *);
 
 
 #endif /* _TREE_SSA_LIVE_H  */
index 47b93f8..9a0dca7 100644 (file)
@@ -50,8 +50,8 @@ static bool
 should_duplicate_loop_header_p (basic_block header, struct loop *loop,
                                int *limit)
 {
-  block_stmt_iterator bsi;
-  tree last;
+  gimple_stmt_iterator bsi;
+  gimple last;
 
   /* Do not copy one block more than once (we do not really want to do
      loop peeling here).  */
@@ -71,19 +71,19 @@ should_duplicate_loop_header_p (basic_block header, struct loop *loop,
     return false;
 
   last = last_stmt (header);
-  if (TREE_CODE (last) != COND_EXPR)
+  if (gimple_code (last) != GIMPLE_COND)
     return false;
 
   /* Approximately copy the conditions that used to be used in jump.c --
      at most 20 insns and no calls.  */
-  for (bsi = bsi_start (header); !bsi_end_p (bsi); bsi_next (&bsi))
+  for (bsi = gsi_start_bb (header); !gsi_end_p (bsi); gsi_next (&bsi))
     {
-      last = bsi_stmt (bsi);
+      last = gsi_stmt (bsi);
 
-      if (TREE_CODE (last) == LABEL_EXPR)
+      if (gimple_code (last) == GIMPLE_LABEL)
        continue;
 
-      if (get_call_expr_in (last))
+      if (is_gimple_call (last))
        return false;
 
       *limit -= estimate_num_insns (last, &eni_size_weights);
@@ -99,17 +99,17 @@ should_duplicate_loop_header_p (basic_block header, struct loop *loop,
 static bool
 do_while_loop_p (struct loop *loop)
 {
-  tree stmt = last_stmt (loop->latch);
+  gimple stmt = last_stmt (loop->latch);
 
   /* If the latch of the loop is not empty, it is not a do-while loop.  */
   if (stmt
-      && TREE_CODE (stmt) != LABEL_EXPR)
+      && gimple_code (stmt) != GIMPLE_LABEL)
     return false;
 
   /* If the header contains just a condition, it is not a do-while loop.  */
   stmt = last_and_only_stmt (loop->header);
   if (stmt
-      && TREE_CODE (stmt) == COND_EXPR)
+      && gimple_code (stmt) == GIMPLE_COND)
     return false;
 
   return true;
@@ -196,7 +196,7 @@ copy_loop_headers (void)
 
       entry = loop_preheader_edge (loop);
 
-      if (!tree_duplicate_sese_region (entry, exit, bbs, n_bbs, copied_bbs))
+      if (!gimple_duplicate_sese_region (entry, exit, bbs, n_bbs, copied_bbs))
        {
          fprintf (dump_file, "Duplication failed.\n");
          continue;
@@ -208,27 +208,27 @@ copy_loop_headers (void)
         we assume that "j < j + 10" is true.  We don't want to warn
         about that case for -Wstrict-overflow, because in general we
         don't warn about overflow involving loops.  Prevent the
-        warning by setting TREE_NO_WARNING.  */
+        warning by setting the no_warning flag in the condition.  */
       if (warn_strict_overflow > 0)
        {
          unsigned int i;
 
          for (i = 0; i < n_bbs; ++i)
            {
-             block_stmt_iterator bsi;
+             gimple_stmt_iterator bsi;
 
-             for (bsi = bsi_start (copied_bbs[i]);
-                  !bsi_end_p (bsi);
-                  bsi_next (&bsi))
+             for (bsi = gsi_start_bb (copied_bbs[i]);
+                  !gsi_end_p (bsi);
+                  gsi_next (&bsi))
                {
-                 tree stmt = bsi_stmt (bsi);
-                 if (TREE_CODE (stmt) == COND_EXPR)
-                   TREE_NO_WARNING (stmt) = 1;
-                 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+                 gimple stmt = gsi_stmt (bsi);
+                 if (gimple_code (stmt) == GIMPLE_COND)
+                   gimple_set_no_warning (stmt, true);
+                 else if (is_gimple_assign (stmt))
                    {
-                     tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-                     if (COMPARISON_CLASS_P (rhs))
-                       TREE_NO_WARNING (stmt) = 1;
+                     enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
+                     if (TREE_CODE_CLASS (rhs_code) == tcc_comparison)
+                       gimple_set_no_warning (stmt, true);
                    }
                }
            }
index 899eb8a..4c85c87 100644 (file)
@@ -70,7 +70,7 @@ along with GCC; see the file COPYING3.  If not see
 
 struct depend
 {
-  tree stmt;
+  gimple stmt;
   struct depend *next;
 };
 
@@ -99,16 +99,16 @@ struct lim_aux_data
                                   MAX_LOOP loop.  */
 };
 
-#define LIM_DATA(STMT) (TREE_CODE (STMT) == PHI_NODE \
-                       ? NULL \
-                       : (struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
+/* Maps statements to their lim_aux_data.  */
+
+static struct pointer_map_t *lim_aux_data_map;
 
 /* Description of a memory reference location.  */
 
 typedef struct mem_ref_loc
 {
   tree *ref;                   /* The reference itself.  */
-  tree stmt;                   /* The statement in that it occurs.  */
+  gimple stmt;                 /* The statement in that it occurs.  */
 } *mem_ref_loc_p;
 
 DEF_VEC_P(mem_ref_loc_p);
@@ -203,6 +203,51 @@ static bool ref_indep_loop_p (struct loop *, mem_ref_p);
    block will be executed.  */
 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
 
+static struct lim_aux_data *
+init_lim_data (gimple stmt)
+{
+  void **p = pointer_map_insert (lim_aux_data_map, stmt);
+
+  *p = XCNEW (struct lim_aux_data);
+  return (struct lim_aux_data *) *p;
+}
+
+static struct lim_aux_data *
+get_lim_data (gimple stmt)
+{
+  void **p = pointer_map_contains (lim_aux_data_map, stmt);
+  if (!p)
+    return NULL;
+
+  return (struct lim_aux_data *) *p;
+}
+
+/* Releases the memory occupied by DATA.  */
+
+static void
+free_lim_aux_data (struct lim_aux_data *data)
+{
+  struct depend *dep, *next;
+
+  for (dep = data->depends; dep; dep = next)
+    {
+      next = dep->next;
+      free (dep);
+    }
+  free (data);
+}
+
+static void
+clear_lim_data (gimple stmt)
+{
+  void **p = pointer_map_contains (lim_aux_data_map, stmt);
+  if (!p)
+    return;
+
+  free_lim_aux_data ((struct lim_aux_data *) *p);
+  *p = NULL;
+}
+
 /* Calls CBCK for each index in memory reference ADDR_P.  There are two
    kinds situations handled; in each of these cases, the memory reference
    and DATA are passed to the callback:
@@ -301,46 +346,32 @@ for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
    Otherwise return MOVE_IMPOSSIBLE.  */
 
 enum move_pos
-movement_possibility (tree stmt)
+movement_possibility (gimple stmt)
 {
-  tree lhs, rhs;
+  tree lhs;
+  enum move_pos ret = MOVE_POSSIBLE;
 
   if (flag_unswitch_loops
-      && TREE_CODE (stmt) == COND_EXPR)
+      && gimple_code (stmt) == GIMPLE_COND)
     {
       /* If we perform unswitching, force the operands of the invariant
         condition to be moved out of the loop.  */
       return MOVE_POSSIBLE;
     }
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (gimple_get_lhs (stmt) == NULL_TREE)
     return MOVE_IMPOSSIBLE;
 
   if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
     return MOVE_IMPOSSIBLE;
 
-  if (stmt_ends_bb_p (stmt))
-    return MOVE_IMPOSSIBLE;
-
-  if (stmt_ann (stmt)->has_volatile_ops)
-    return MOVE_IMPOSSIBLE;
-
-  lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-  if (TREE_CODE (lhs) == SSA_NAME
-      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
+  if (stmt_ends_bb_p (stmt)
+      || gimple_has_volatile_ops (stmt)
+      || gimple_has_side_effects (stmt)
+      || stmt_could_throw_p (stmt))
     return MOVE_IMPOSSIBLE;
 
-  rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-
-  if (TREE_SIDE_EFFECTS (rhs)
-      || tree_could_throw_p (rhs))
-    return MOVE_IMPOSSIBLE;
-
-  if (TREE_CODE (lhs) != SSA_NAME
-      || tree_could_trap_p (rhs))
-    return MOVE_PRESERVE_EXECUTION;
-
-  if (get_call_expr_in (stmt))
+  if (is_gimple_call (stmt))
     {
       /* While pure or const call is guaranteed to have no side effects, we
         cannot move it arbitrarily.  Consider code like
@@ -360,9 +391,23 @@ movement_possibility (tree stmt)
         invalid arguments, moving out a function call that is not executed
         may cause performance regressions in case the call is costly and
         not executed at all.  */
-      return MOVE_PRESERVE_EXECUTION;
+      ret = MOVE_PRESERVE_EXECUTION;
+      lhs = gimple_call_lhs (stmt);
     }
-  return MOVE_POSSIBLE;
+  else if (is_gimple_assign (stmt))
+    lhs = gimple_assign_lhs (stmt);
+  else
+    return MOVE_IMPOSSIBLE;
+
+  if (TREE_CODE (lhs) == SSA_NAME
+      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
+    return MOVE_IMPOSSIBLE;
+
+  if (TREE_CODE (lhs) != SSA_NAME
+      || gimple_could_trap_p (stmt))
+    return MOVE_PRESERVE_EXECUTION;
+
+  return ret;
 }
 
 /* Suppose that operand DEF is used inside the LOOP.  Returns the outermost
@@ -373,23 +418,31 @@ movement_possibility (tree stmt)
 static struct loop *
 outermost_invariant_loop (tree def, struct loop *loop)
 {
-  tree def_stmt;
+  gimple def_stmt;
   basic_block def_bb;
   struct loop *max_loop;
+  struct lim_aux_data *lim_data;
 
-  if (TREE_CODE (def) != SSA_NAME)
+  if (!def)
     return superloop_at_depth (loop, 1);
 
+  if (TREE_CODE (def) != SSA_NAME)
+    {
+      gcc_assert (is_gimple_min_invariant (def));
+      return superloop_at_depth (loop, 1);
+    }
+
   def_stmt = SSA_NAME_DEF_STMT (def);
-  def_bb = bb_for_stmt (def_stmt);
+  def_bb = gimple_bb (def_stmt);
   if (!def_bb)
     return superloop_at_depth (loop, 1);
 
   max_loop = find_common_loop (loop, def_bb->loop_father);
 
-  if (LIM_DATA (def_stmt) && LIM_DATA (def_stmt)->max_loop)
+  lim_data = get_lim_data (def_stmt);
+  if (lim_data != NULL && lim_data->max_loop != NULL)
     max_loop = find_common_loop (max_loop,
-                                loop_outer (LIM_DATA (def_stmt)->max_loop));
+                                loop_outer (lim_data->max_loop));
   if (max_loop == loop)
     return NULL;
   max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
@@ -397,42 +450,6 @@ outermost_invariant_loop (tree def, struct loop *loop)
   return max_loop;
 }
 
-/* Returns the outermost superloop of LOOP in that the expression EXPR is
-   invariant.  */
-
-static struct loop *
-outermost_invariant_loop_expr (tree expr, struct loop *loop)
-{
-  enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
-  unsigned i, nops;
-  struct loop *max_loop = superloop_at_depth (loop, 1), *aloop;
-
-  if (TREE_CODE (expr) == SSA_NAME
-      || TREE_CODE (expr) == INTEGER_CST
-      || is_gimple_min_invariant (expr))
-    return outermost_invariant_loop (expr, loop);
-
-  if (codeclass != tcc_unary
-      && codeclass != tcc_binary
-      && codeclass != tcc_expression
-      && codeclass != tcc_vl_exp
-      && codeclass != tcc_comparison)
-    return NULL;
-
-  nops = TREE_OPERAND_LENGTH (expr);
-  for (i = 0; i < nops; i++)
-    {
-      aloop = outermost_invariant_loop_expr (TREE_OPERAND (expr, i), loop);
-      if (!aloop)
-       return NULL;
-
-      if (flow_loop_nested_p (max_loop, aloop))
-       max_loop = aloop;
-    }
-
-  return max_loop;
-}
-
 /* DATA is a structure containing information associated with a statement
    inside LOOP.  DEF is one of the operands of this statement.
    
@@ -449,10 +466,11 @@ static bool
 add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
                bool add_cost)
 {
-  tree def_stmt = SSA_NAME_DEF_STMT (def);
-  basic_block def_bb = bb_for_stmt (def_stmt);
+  gimple def_stmt = SSA_NAME_DEF_STMT (def);
+  basic_block def_bb = gimple_bb (def_stmt);
   struct loop *max_loop;
   struct depend *dep;
+  struct lim_aux_data *def_data;
 
   if (!def_bb)
     return true;
@@ -464,7 +482,8 @@ add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
   if (flow_loop_nested_p (data->max_loop, max_loop))
     data->max_loop = max_loop;
 
-  if (!LIM_DATA (def_stmt))
+  def_data = get_lim_data (def_stmt);
+  if (!def_data)
     return true;
 
   if (add_cost
@@ -473,7 +492,7 @@ add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
         on it, we will be able to avoid creating a new register for
         it (since it will be only used in these dependent invariants).  */
       && def_bb->loop_father == loop)
-    data->cost += LIM_DATA (def_stmt)->cost;
+    data->cost += def_data->cost;
 
   dep = XNEW (struct depend);
   dep->stmt = def_stmt;
@@ -488,36 +507,39 @@ add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
    values.  */
 
 static unsigned
-stmt_cost (tree stmt)
+stmt_cost (gimple stmt)
 {
-  tree rhs;
+  tree fndecl;
   unsigned cost = 1;
 
   /* Always try to create possibilities for unswitching.  */
-  if (TREE_CODE (stmt) == COND_EXPR)
+  if (gimple_code (stmt) == GIMPLE_COND)
     return LIM_EXPENSIVE;
 
-  rhs = GENERIC_TREE_OPERAND (stmt, 1);
-
   /* Hoisting memory references out should almost surely be a win.  */
-  if (stmt_references_memory_p (stmt))
+  if (gimple_references_memory_p (stmt))
     cost += 20;
 
-  switch (TREE_CODE (rhs))
+  if (is_gimple_call (stmt))
     {
-    case CALL_EXPR:
       /* We should be hoisting calls if possible.  */
 
       /* Unless the call is a builtin_constant_p; this always folds to a
         constant, so moving it is useless.  */
-      rhs = get_callee_fndecl (rhs);
-      if (DECL_BUILT_IN_CLASS (rhs) == BUILT_IN_NORMAL
-         && DECL_FUNCTION_CODE (rhs) == BUILT_IN_CONSTANT_P)
+      fndecl = gimple_call_fndecl (stmt);
+      if (fndecl
+         && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
+         && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P)
        return 0;
 
-      cost += 20;
-      break;
+      return cost + 20;
+    }
+
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
+    return cost;
 
+  switch (gimple_assign_rhs_code (stmt))
+    {
     case MULT_EXPR:
     case TRUNC_DIV_EXPR:
     case CEIL_DIV_EXPR:
@@ -575,27 +597,31 @@ outermost_indep_loop (struct loop *outer, struct loop *loop, mem_ref_p ref)
    it is a store or load.  Otherwise, returns NULL.  */
 
 static tree *
-simple_mem_ref_in_stmt (tree stmt, bool *is_store)
+simple_mem_ref_in_stmt (gimple stmt, bool *is_store)
 {
-  tree *lhs, *rhs;
+  tree *lhs;
+  enum tree_code code;
 
   /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns.  */
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
     return NULL;
 
-  lhs = &GIMPLE_STMT_OPERAND (stmt, 0);
-  rhs = &GIMPLE_STMT_OPERAND (stmt, 1);
+  code = gimple_assign_rhs_code (stmt);
+
+  lhs = gimple_assign_lhs_ptr (stmt);
 
   if (TREE_CODE (*lhs) == SSA_NAME)
     {
-      if (!is_gimple_addressable (*rhs))
+      if (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
+         || !is_gimple_addressable (gimple_assign_rhs1 (stmt)))
        return NULL;
 
       *is_store = false;
-      return rhs;
+      return gimple_assign_rhs1_ptr (stmt);
     }
-  else if (TREE_CODE (*rhs) == SSA_NAME
-          || is_gimple_min_invariant (*rhs))
+  else if (code == SSA_NAME
+          || (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS
+              && is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
     {
       *is_store = true;
       return lhs;
@@ -607,7 +633,7 @@ simple_mem_ref_in_stmt (tree stmt, bool *is_store)
 /* Returns the memory reference contained in STMT.  */
 
 static mem_ref_p
-mem_ref_in_stmt (tree stmt)
+mem_ref_in_stmt (gimple stmt)
 {
   bool store;
   tree *mem = simple_mem_ref_in_stmt (stmt, &store);
@@ -636,12 +662,12 @@ mem_ref_in_stmt (tree stmt)
    is defined in, and true otherwise.  */
 
 static bool
-determine_max_movement (tree stmt, bool must_preserve_exec)
+determine_max_movement (gimple stmt, bool must_preserve_exec)
 {
-  basic_block bb = bb_for_stmt (stmt);
+  basic_block bb = gimple_bb (stmt);
   struct loop *loop = bb->loop_father;
   struct loop *level;
-  struct lim_aux_data *lim_data = LIM_DATA (stmt);
+  struct lim_aux_data *lim_data = get_lim_data (stmt);
   tree val;
   ssa_op_iter iter;
   
@@ -687,24 +713,25 @@ determine_max_movement (tree stmt, bool must_preserve_exec)
    operands) is hoisted at least out of the loop LEVEL.  */
 
 static void
-set_level (tree stmt, struct loop *orig_loop, struct loop *level)
+set_level (gimple stmt, struct loop *orig_loop, struct loop *level)
 {
-  struct loop *stmt_loop = bb_for_stmt (stmt)->loop_father;
+  struct loop *stmt_loop = gimple_bb (stmt)->loop_father;
   struct depend *dep;
+  struct lim_aux_data *lim_data;
 
   stmt_loop = find_common_loop (orig_loop, stmt_loop);
-  if (LIM_DATA (stmt) && LIM_DATA (stmt)->tgt_loop)
+  lim_data = get_lim_data (stmt);
+  if (lim_data != NULL && lim_data->tgt_loop != NULL)
     stmt_loop = find_common_loop (stmt_loop,
-                                 loop_outer (LIM_DATA (stmt)->tgt_loop));
+                                 loop_outer (lim_data->tgt_loop));
   if (flow_loop_nested_p (stmt_loop, level))
     return;
 
-  gcc_assert (LIM_DATA (stmt));
-  gcc_assert (level == LIM_DATA (stmt)->max_loop
-             || flow_loop_nested_p (LIM_DATA (stmt)->max_loop, level));
+  gcc_assert (level == lim_data->max_loop
+             || flow_loop_nested_p (lim_data->max_loop, level));
 
-  LIM_DATA (stmt)->tgt_loop = level;
-  for (dep = LIM_DATA (stmt)->depends; dep; dep = dep->next)
+  lim_data->tgt_loop = level;
+  for (dep = lim_data->depends; dep; dep = dep->next)
     set_level (dep->stmt, orig_loop, level);
 }
 
@@ -713,70 +740,50 @@ set_level (tree stmt, struct loop *orig_loop, struct loop *level)
    information to set it more sanely.  */
 
 static void
-set_profitable_level (tree stmt)
+set_profitable_level (gimple stmt)
 {
-  set_level (stmt, bb_for_stmt (stmt)->loop_father, LIM_DATA (stmt)->max_loop);
+  set_level (stmt, gimple_bb (stmt)->loop_father, get_lim_data (stmt)->max_loop);
 }
 
-/* Returns true if STMT is not a pure call.  */
+/* Returns true if STMT is a call that has side effects.  */
 
 static bool
-nonpure_call_p (tree stmt)
+nonpure_call_p (gimple stmt)
 {
-  tree call = get_call_expr_in (stmt);
-
-  if (!call)
+  if (gimple_code (stmt) != GIMPLE_CALL)
     return false;
 
-  return TREE_SIDE_EFFECTS (call) != 0;
-}
-
-/* Releases the memory occupied by DATA.  */
-
-static void
-free_lim_aux_data (struct lim_aux_data *data)
-{
-  struct depend *dep, *next;
-
-  for (dep = data->depends; dep; dep = next)
-    {
-      next = dep->next;
-      free (dep);
-    }
-  free (data);
+  return gimple_has_side_effects (stmt);
 }
 
 /* Rewrite a/b to a*(1/b).  Return the invariant stmt to process.  */
 
-static tree
-rewrite_reciprocal (block_stmt_iterator *bsi)
+static gimple
+rewrite_reciprocal (gimple_stmt_iterator *bsi)
 {
-  tree stmt, lhs, rhs, stmt1, stmt2, var, name, tmp;
+  gimple stmt, stmt1, stmt2;
+  tree var, name, lhs, type;
 
-  stmt = bsi_stmt (*bsi);
-  lhs = GENERIC_TREE_OPERAND (stmt, 0);
-  rhs = GENERIC_TREE_OPERAND (stmt, 1);
+  stmt = gsi_stmt (*bsi);
+  lhs = gimple_assign_lhs (stmt);
+  type = TREE_TYPE (lhs);
 
-  /* stmt must be GIMPLE_MODIFY_STMT.  */
-  var = create_tmp_var (TREE_TYPE (rhs), "reciptmp");
+  var = create_tmp_var (type, "reciptmp");
   add_referenced_var (var);
 
-  tmp = build2 (RDIV_EXPR, TREE_TYPE (rhs),
-               build_real (TREE_TYPE (rhs), dconst1),
-               TREE_OPERAND (rhs, 1));
-  stmt1 = build_gimple_modify_stmt (var, tmp);
+  stmt1 = gimple_build_assign_with_ops (RDIV_EXPR,
+               var, build_real (type, dconst1), gimple_assign_rhs2 (stmt));
   name = make_ssa_name (var, stmt1);
-  GIMPLE_STMT_OPERAND (stmt1, 0) = name;
-  tmp = build2 (MULT_EXPR, TREE_TYPE (rhs),
-               name, TREE_OPERAND (rhs, 0));
-  stmt2 = build_gimple_modify_stmt (lhs, tmp);
+  gimple_assign_set_lhs (stmt1, name);
+
+  stmt2 = gimple_build_assign_with_ops (MULT_EXPR, lhs, name,
+                                       gimple_assign_rhs1 (stmt));
 
   /* Replace division stmt with reciprocal and multiply stmts.
      The multiply stmt is not invariant, so update iterator
      and avoid rescanning.  */
-  bsi_replace (bsi, stmt1, true);
-  bsi_insert_after (bsi, stmt2, BSI_NEW_STMT);
-  SSA_NAME_DEF_STMT (lhs) = stmt2;
+  gsi_replace (bsi, stmt1, true);
+  gsi_insert_after (bsi, stmt2, GSI_NEW_STMT);
 
   /* Continue processing with invariant reciprocal statement.  */
   return stmt1;
@@ -785,82 +792,79 @@ rewrite_reciprocal (block_stmt_iterator *bsi)
 /* Check if the pattern at *BSI is a bittest of the form
    (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0.  */
 
-static tree
-rewrite_bittest (block_stmt_iterator *bsi)
+static gimple
+rewrite_bittest (gimple_stmt_iterator *bsi)
 {
-  tree stmt, lhs, rhs, var, name, use_stmt, stmt1, stmt2, t;
+  gimple stmt, use_stmt, stmt1, stmt2;
+  tree lhs, var, name, t, a, b;
   use_operand_p use;
 
-  stmt = bsi_stmt (*bsi);
-  lhs = GENERIC_TREE_OPERAND (stmt, 0);
-  rhs = GENERIC_TREE_OPERAND (stmt, 1);
+  stmt = gsi_stmt (*bsi);
+  lhs = gimple_assign_lhs (stmt);
 
   /* Verify that the single use of lhs is a comparison against zero.  */
   if (TREE_CODE (lhs) != SSA_NAME
       || !single_imm_use (lhs, &use, &use_stmt)
-      || TREE_CODE (use_stmt) != COND_EXPR)
+      || gimple_code (use_stmt) != GIMPLE_COND)
     return stmt;
-  t = COND_EXPR_COND (use_stmt);
-  if (TREE_OPERAND (t, 0) != lhs
-      || (TREE_CODE (t) != NE_EXPR
-         && TREE_CODE (t) != EQ_EXPR)
-      || !integer_zerop (TREE_OPERAND (t, 1)))
+  if (gimple_cond_lhs (use_stmt) != lhs
+      || (gimple_cond_code (use_stmt) != NE_EXPR
+         && gimple_cond_code (use_stmt) != EQ_EXPR)
+      || !integer_zerop (gimple_cond_rhs (use_stmt)))
     return stmt;
 
   /* Get at the operands of the shift.  The rhs is TMP1 & 1.  */
-  stmt1 = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
-  if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
+  stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
+  if (gimple_code (stmt1) != GIMPLE_ASSIGN)
     return stmt;
 
   /* There is a conversion in between possibly inserted by fold.  */
-  t = GIMPLE_STMT_OPERAND (stmt1, 1);
-  if (CONVERT_EXPR_P (t))
+  if (gimple_assign_rhs_code (stmt1) == NOP_EXPR
+      || gimple_assign_rhs_code (stmt1) == CONVERT_EXPR)
     {
-      t = TREE_OPERAND (t, 0);
+      t = gimple_assign_rhs1 (stmt1);
       if (TREE_CODE (t) != SSA_NAME
          || !has_single_use (t))
        return stmt;
       stmt1 = SSA_NAME_DEF_STMT (t);
-      if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
+      if (gimple_code (stmt1) != GIMPLE_ASSIGN)
        return stmt;
-      t = GIMPLE_STMT_OPERAND (stmt1, 1);
     }
 
   /* Verify that B is loop invariant but A is not.  Verify that with
      all the stmt walking we are still in the same loop.  */
-  if (TREE_CODE (t) == RSHIFT_EXPR
-      && loop_containing_stmt (stmt1) == loop_containing_stmt (stmt)
-      && outermost_invariant_loop_expr (TREE_OPERAND (t, 1),
-                                        loop_containing_stmt (stmt1)) != NULL
-      && outermost_invariant_loop_expr (TREE_OPERAND (t, 0),
-                                        loop_containing_stmt (stmt1)) == NULL)
-    {
-      tree a = TREE_OPERAND (t, 0);
-      tree b = TREE_OPERAND (t, 1);
+  if (gimple_assign_rhs_code (stmt1) != RSHIFT_EXPR
+      || loop_containing_stmt (stmt1) != loop_containing_stmt (stmt))
+    return stmt;
 
+  a = gimple_assign_rhs1 (stmt1);
+  b = gimple_assign_rhs2 (stmt1);
+
+  if (outermost_invariant_loop (b, loop_containing_stmt (stmt1)) != NULL
+      && outermost_invariant_loop (a, loop_containing_stmt (stmt1)) == NULL)
+    {
       /* 1 << B */
       var = create_tmp_var (TREE_TYPE (a), "shifttmp");
       add_referenced_var (var);
       t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
                       build_int_cst (TREE_TYPE (a), 1), b);
-      stmt1 = build_gimple_modify_stmt (var, t);
+      stmt1 = gimple_build_assign (var, t);
       name = make_ssa_name (var, stmt1);
-      GIMPLE_STMT_OPERAND (stmt1, 0) = name;
+      gimple_assign_set_lhs (stmt1, name);
 
       /* A & (1 << B) */
       t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
-      stmt2 = build_gimple_modify_stmt (var, t);
+      stmt2 = gimple_build_assign (var, t);
       name = make_ssa_name (var, stmt2);
-      GIMPLE_STMT_OPERAND (stmt2, 0) = name;
+      gimple_assign_set_lhs (stmt2, name);
 
       /* Replace the SSA_NAME we compare against zero.  Adjust
         the type of zero accordingly.  */
       SET_USE (use, name);
-      TREE_OPERAND (COND_EXPR_COND (use_stmt), 1)
-       = build_int_cst_type (TREE_TYPE (name), 0);
+      gimple_cond_set_rhs (use_stmt, build_int_cst_type (TREE_TYPE (name), 0));
 
-      bsi_insert_before (bsi, stmt1, BSI_SAME_STMT);
-      bsi_replace (bsi, stmt2, true);
+      gsi_insert_before (bsi, stmt1, GSI_SAME_STMT);
+      gsi_replace (bsi, stmt2, true);
 
       return stmt1;
     }
@@ -878,10 +882,11 @@ determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
                              basic_block bb)
 {
   enum move_pos pos;
-  block_stmt_iterator bsi;
-  tree stmt, rhs;
+  gimple_stmt_iterator bsi;
+  gimple stmt;
   bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
   struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
+  struct lim_aux_data *lim_data;
 
   if (!loop_outer (bb->loop_father))
     return;
@@ -890,9 +895,9 @@ determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
     fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
             bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
 
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+  for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
     {
-      stmt = bsi_stmt (bsi);
+      stmt = gsi_stmt (bsi);
 
       pos = movement_possibility (stmt);
       if (pos == MOVE_IMPOSSIBLE)
@@ -906,61 +911,63 @@ determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
             store-motion work.  */
          else if (stmt_makes_single_store (stmt))
            {
-             stmt_ann (stmt)->common.aux
-               = xcalloc (1, sizeof (struct lim_aux_data));
-             LIM_DATA (stmt)->always_executed_in = outermost;
+             struct lim_aux_data *lim_data = init_lim_data (stmt);
+             lim_data->always_executed_in = outermost;
            }
          continue;
        }
 
-      if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+      if (is_gimple_assign (stmt)
+         && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
+             == GIMPLE_BINARY_RHS))
        {
-         rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+         tree op0 = gimple_assign_rhs1 (stmt);
+         tree op1 = gimple_assign_rhs2 (stmt);
+         struct loop *ol1 = outermost_invariant_loop (op1,
+                                       loop_containing_stmt (stmt));
 
          /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
             to be hoisted out of loop, saving expensive divide.  */
          if (pos == MOVE_POSSIBLE
-             && TREE_CODE (rhs) == RDIV_EXPR
+             && gimple_assign_rhs_code (stmt) == RDIV_EXPR
              && flag_unsafe_math_optimizations
              && !flag_trapping_math
-             && outermost_invariant_loop_expr (TREE_OPERAND (rhs, 1),
-                                               loop_containing_stmt (stmt)) != NULL
-             && outermost_invariant_loop_expr (rhs,
-                                               loop_containing_stmt (stmt)) == NULL)
+             && ol1 != NULL
+             && outermost_invariant_loop (op0, ol1) == NULL)
            stmt = rewrite_reciprocal (&bsi);
 
          /* If the shift count is invariant, convert (A >> B) & 1 to
             A & (1 << B) allowing the bit mask to be hoisted out of the loop
             saving an expensive shift.  */
          if (pos == MOVE_POSSIBLE
-             && TREE_CODE (rhs) == BIT_AND_EXPR
-             && integer_onep (TREE_OPERAND (rhs, 1))
-             && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
-             && has_single_use (TREE_OPERAND (rhs, 0)))
+             && gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
+             && integer_onep (op1)
+             && TREE_CODE (op0) == SSA_NAME
+             && has_single_use (op0))
            stmt = rewrite_bittest (&bsi);
        }
 
-      stmt_ann (stmt)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
-      LIM_DATA (stmt)->always_executed_in = outermost;
+      lim_data = init_lim_data (stmt);
+      lim_data->always_executed_in = outermost;
 
       if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
        continue;
 
       if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
        {
-         LIM_DATA (stmt)->max_loop = NULL;
+         lim_data->max_loop = NULL;
          continue;
        }
 
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
-         print_generic_stmt_indented (dump_file, stmt, 0, 2);
+         print_gimple_stmt (dump_file, stmt, 2, 0);
          fprintf (dump_file, "  invariant up to level %d, cost %d.\n\n",
-                  loop_depth (LIM_DATA (stmt)->max_loop),
-                  LIM_DATA (stmt)->cost);
+                  loop_depth (lim_data->max_loop),
+                  lim_data->cost);
        }
 
-      if (LIM_DATA (stmt)->cost >= LIM_EXPENSIVE)
+      if (lim_data->cost >= LIM_EXPENSIVE)
        set_profitable_level (stmt);
     }
 }
@@ -993,50 +1000,51 @@ move_computations_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
                        basic_block bb)
 {
   struct loop *level;
-  block_stmt_iterator bsi;
-  tree stmt;
+  gimple_stmt_iterator bsi;
+  gimple stmt;
   unsigned cost = 0;
+  struct lim_aux_data *lim_data;
 
   if (!loop_outer (bb->loop_father))
     return;
 
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
+  for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); )
     {
-      stmt = bsi_stmt (bsi);
+      stmt = gsi_stmt (bsi);
 
-      if (!LIM_DATA (stmt))
+      lim_data = get_lim_data (stmt);
+      if (lim_data == NULL)
        {
-         bsi_next (&bsi);
+         gsi_next (&bsi);
          continue;
        }
 
-      cost = LIM_DATA (stmt)->cost;
-      level = LIM_DATA (stmt)->tgt_loop;
-      free_lim_aux_data (LIM_DATA (stmt));
-      stmt_ann (stmt)->common.aux = NULL;
+      cost = lim_data->cost;
+      level = lim_data->tgt_loop;
+      clear_lim_data (stmt);
 
       if (!level)
        {
-         bsi_next (&bsi);
+         gsi_next (&bsi);
          continue;
        }
 
       /* We do not really want to move conditionals out of the loop; we just
         placed it here to force its operands to be moved if necessary.  */
-      if (TREE_CODE (stmt) == COND_EXPR)
+      if (gimple_code (stmt) == GIMPLE_COND)
        continue;
 
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
          fprintf (dump_file, "Moving statement\n");
-         print_generic_stmt (dump_file, stmt, 0);
+         print_gimple_stmt (dump_file, stmt, 0, 0);
          fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
                   cost, level->num);
        }
 
       mark_virtual_ops_for_renaming (stmt);
-      bsi_insert_on_edge (loop_preheader_edge (level), stmt);
-      bsi_remove (&bsi, false);
+      gsi_insert_on_edge (loop_preheader_edge (level), stmt);
+      gsi_remove (&bsi, false);
     }
 }
 
@@ -1056,7 +1064,7 @@ move_computations (void)
   walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
   fini_walk_dominator_tree (&walk_data);
 
-  bsi_commit_edge_inserts ();
+  gsi_commit_edge_inserts ();
   if (need_ssa_update_p ())
     rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
 }
@@ -1067,20 +1075,20 @@ move_computations (void)
 static bool
 may_move_till (tree ref, tree *index, void *data)
 {
-  struct loop *loop = (struct loop*) data, *max_loop;
+  struct loop *loop = (struct loop *) data, *max_loop;
 
   /* If REF is an array reference, check also that the step and the lower
      bound is invariant in LOOP.  */
   if (TREE_CODE (ref) == ARRAY_REF)
     {
-      tree step = array_ref_element_size (ref);
-      tree lbound = array_ref_low_bound (ref);
+      tree step = TREE_OPERAND (ref, 3);
+      tree lbound = TREE_OPERAND (ref, 2);
 
-      max_loop = outermost_invariant_loop_expr (step, loop);
+      max_loop = outermost_invariant_loop (step, loop);
       if (!max_loop)
        return false;
 
-      max_loop = outermost_invariant_loop_expr (lbound, loop);
+      max_loop = outermost_invariant_loop (lbound, loop);
       if (!max_loop)
        return false;
     }
@@ -1092,35 +1100,25 @@ may_move_till (tree ref, tree *index, void *data)
   return true;
 }
 
-/* Forces statements defining (invariant) SSA names in expression EXPR to be
+/* If OP is SSA NAME, force the statement that defines it to be
    moved out of the LOOP.  ORIG_LOOP is the loop in that EXPR is used.  */
 
 static void
-force_move_till_expr (tree expr, struct loop *orig_loop, struct loop *loop)
+force_move_till_op (tree op, struct loop *orig_loop, struct loop *loop)
 {
-  enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
-  unsigned i, nops;
-
-  if (TREE_CODE (expr) == SSA_NAME)
-    {
-      tree stmt = SSA_NAME_DEF_STMT (expr);
-      if (IS_EMPTY_STMT (stmt))
-       return;
+  gimple stmt;
 
-      set_level (stmt, orig_loop, loop);
-      return;
-    }
+  if (!op
+      || is_gimple_min_invariant (op))
+    return;
 
-  if (codeclass != tcc_unary
-      && codeclass != tcc_binary
-      && codeclass != tcc_expression
-      && codeclass != tcc_vl_exp
-      && codeclass != tcc_comparison)
+  gcc_assert (TREE_CODE (op) == SSA_NAME);
+      
+  stmt = SSA_NAME_DEF_STMT (op);
+  if (gimple_nop_p (stmt))
     return;
 
-  nops = TREE_OPERAND_LENGTH (expr);
-  for (i = 0; i < nops; i++)
-    force_move_till_expr (TREE_OPERAND (expr, i), orig_loop, loop);
+  set_level (stmt, orig_loop, loop);
 }
 
 /* Forces statement defining invariants in REF (and *INDEX) to be moved out of
@@ -1136,26 +1134,18 @@ struct fmt_data
 static bool
 force_move_till (tree ref, tree *index, void *data)
 {
-  tree stmt;
   struct fmt_data *fmt_data = (struct fmt_data *) data;
 
   if (TREE_CODE (ref) == ARRAY_REF)
     {
-      tree step = array_ref_element_size (ref);
-      tree lbound = array_ref_low_bound (ref);
+      tree step = TREE_OPERAND (ref, 3);
+      tree lbound = TREE_OPERAND (ref, 2);
 
-      force_move_till_expr (step, fmt_data->orig_loop, fmt_data->loop);
-      force_move_till_expr (lbound, fmt_data->orig_loop, fmt_data->loop);
+      force_move_till_op (step, fmt_data->orig_loop, fmt_data->loop);
+      force_move_till_op (lbound, fmt_data->orig_loop, fmt_data->loop);
     }
 
-  if (TREE_CODE (*index) != SSA_NAME)
-    return true;
-
-  stmt = SSA_NAME_DEF_STMT (*index);
-  if (IS_EMPTY_STMT (stmt))
-    return true;
-
-  set_level (stmt, fmt_data->orig_loop, fmt_data->loop);
+  force_move_till_op (*index, fmt_data->orig_loop, fmt_data->loop);
 
   return true;
 }
@@ -1256,7 +1246,7 @@ mem_ref_locs_alloc (void)
    description REF.  The reference occurs in statement STMT.  */
 
 static void
-record_mem_ref_loc (mem_ref_p ref, struct loop *loop, tree stmt, tree *loc)
+record_mem_ref_loc (mem_ref_p ref, struct loop *loop, gimple stmt, tree *loc)
 {
   mem_ref_loc_p aref = XNEW (struct mem_ref_loc);
   mem_ref_locs_p accs;
@@ -1298,7 +1288,7 @@ mark_ref_stored (mem_ref_p ref, struct loop *loop)
    well.  */
 
 static void
-gather_mem_refs_stmt (struct loop *loop, tree stmt)
+gather_mem_refs_stmt (struct loop *loop, gimple stmt)
 {
   tree *mem = NULL;
   hashval_t hash;
@@ -1358,7 +1348,7 @@ fail:
 static void
 gather_mem_refs_in_loops (void)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
   basic_block bb;
   struct loop *loop;
   loop_iterator li;
@@ -1371,8 +1361,8 @@ gather_mem_refs_in_loops (void)
       if (loop == current_loops->tree_root)
        continue;
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-       gather_mem_refs_stmt (loop, bsi_stmt (bsi));
+      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+       gather_mem_refs_stmt (loop, gsi_stmt (bsi));
     }
 
   /* Propagate the information about clobbered vops and accessed memory
@@ -1826,9 +1816,10 @@ execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref)
 {
   tree tmp_var;
   unsigned i;
-  tree load, store;
+  gimple load, store;
   struct fmt_data fmt_data;
   edge ex;
+  struct lim_aux_data *lim_data;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
@@ -1847,19 +1838,19 @@ execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref)
   rewrite_mem_refs (loop, ref, tmp_var);
 
   /* Emit the load & stores.  */
-  load = build_gimple_modify_stmt (tmp_var, unshare_expr (ref->mem));
-  get_stmt_ann (load)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
-  LIM_DATA (load)->max_loop = loop;
-  LIM_DATA (load)->tgt_loop = loop;
+  load = gimple_build_assign (tmp_var, unshare_expr (ref->mem));
+  lim_data = init_lim_data (load);
+  lim_data->max_loop = loop;
+  lim_data->tgt_loop = loop;
 
   /* Put this into the latch, so that we are sure it will be processed after
      all dependencies.  */
-  bsi_insert_on_edge (loop_latch_edge (loop), load);
+  gsi_insert_on_edge (loop_latch_edge (loop), load);
 
   for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
     {
-      store = build_gimple_modify_stmt (unshare_expr (ref->mem), tmp_var);
-      bsi_insert_on_edge (ex, store);
+      store = gimple_build_assign (unshare_expr (ref->mem), tmp_var);
+      gsi_insert_on_edge (ex, store);
     }
 }
 
@@ -1895,10 +1886,10 @@ ref_always_accessed_p (struct loop *loop, mem_ref_p ref)
   get_all_locs_in_loop (loop, ref, &locs);
   for (i = 0; VEC_iterate (mem_ref_loc_p, locs, i, loc); i++)
     {
-      if (!LIM_DATA (loc->stmt))
+      if (!get_lim_data (loc->stmt))
        continue;
 
-      must_exec = LIM_DATA (loc->stmt)->always_executed_in;
+      must_exec = get_lim_data (loc->stmt)->always_executed_in;
       if (!must_exec)
        continue;
 
@@ -2135,7 +2126,7 @@ store_motion (void)
     store_motion_loop (loop, sm_executed);
 
   BITMAP_FREE (sm_executed);
-  bsi_commit_edge_inserts ();
+  gsi_commit_edge_inserts ();
 }
 
 /* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
@@ -2212,20 +2203,20 @@ static void
 tree_ssa_lim_initialize (void)
 {
   sbitmap contains_call = sbitmap_alloc (last_basic_block);
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
   struct loop *loop;
   basic_block bb;
 
   sbitmap_zero (contains_call);
   FOR_EACH_BB (bb)
     {
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
        {
-         if (nonpure_call_p (bsi_stmt (bsi)))
+         if (nonpure_call_p (gsi_stmt (bsi)))
            break;
        }
 
-      if (!bsi_end_p (bsi))
+      if (!gsi_end_p (bsi))
        SET_BIT (contains_call, bb->index);
     }
 
@@ -2233,6 +2224,8 @@ tree_ssa_lim_initialize (void)
     fill_always_executed_in (loop, contains_call);
 
   sbitmap_free (contains_call);
+
+  lim_aux_data_map = pointer_map_create ();
 }
 
 /* Cleans up after the invariant motion pass.  */
@@ -2250,6 +2243,8 @@ tree_ssa_lim_finalize (void)
       bb->aux = NULL;
     }
 
+  pointer_map_destroy (lim_aux_data_map);
+
   VEC_free (mem_ref_p, heap, memory_accesses.refs_list);
   htab_delete (memory_accesses.refs);
 
index 67af0b3..dc863f8 100644 (file)
@@ -1,5 +1,5 @@
 /* Induction variable canonicalization.
-   Copyright (C) 2004, 2005, 2007 Free Software Foundation, Inc.
+   Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc.
    
 This file is part of GCC.
    
@@ -72,8 +72,9 @@ static void
 create_canonical_iv (struct loop *loop, edge exit, tree niter)
 {
   edge in;
-  tree cond, type, var;
-  block_stmt_iterator incr_at;
+  tree type, var;
+  gimple cond;
+  gimple_stmt_iterator incr_at;
   enum tree_code cmp;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
@@ -97,16 +98,16 @@ create_canonical_iv (struct loop *loop, edge exit, tree niter)
   niter = fold_build2 (PLUS_EXPR, type,
                       niter,
                       build_int_cst (type, 1));
-  incr_at = bsi_last (in->src);
+  incr_at = gsi_last_bb (in->src);
   create_iv (niter,
             build_int_cst (type, -1),
             NULL_TREE, loop,
             &incr_at, false, NULL, &var);
 
   cmp = (exit->flags & EDGE_TRUE_VALUE) ? EQ_EXPR : NE_EXPR;
-  COND_EXPR_COND (cond) = build2 (cmp, boolean_type_node,
-                                 var,
-                                 build_int_cst (type, 0));
+  gimple_cond_set_code (cond, cmp);
+  gimple_cond_set_lhs (cond, var);
+  gimple_cond_set_rhs (cond, build_int_cst (type, 0));
   update_stmt (cond);
 }
 
@@ -116,12 +117,12 @@ unsigned
 tree_num_loop_insns (struct loop *loop, eni_weights *weights)
 {
   basic_block *body = get_loop_body (loop);
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   unsigned size = 1, i;
 
   for (i = 0; i < loop->num_nodes; i++)
-    for (bsi = bsi_start (body[i]); !bsi_end_p (bsi); bsi_next (&bsi))
-      size += estimate_num_insns (bsi_stmt (bsi), weights);
+    for (gsi = gsi_start_bb (body[i]); !gsi_end_p (gsi); gsi_next (&gsi))
+      size += estimate_num_insns (gsi_stmt (gsi), weights);
   free (body);
 
   return size;
@@ -163,7 +164,7 @@ try_unroll_loop_completely (struct loop *loop,
                            enum unroll_level ul)
 {
   unsigned HOST_WIDE_INT n_unroll, ninsns, max_unroll, unr_insns;
-  tree cond;
+  gimple cond;
 
   if (loop->inner)
     return false;
@@ -216,11 +217,11 @@ try_unroll_loop_completely (struct loop *loop,
       sbitmap_ones (wont_exit);
       RESET_BIT (wont_exit, 0);
 
-      if (!tree_duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
-                                              n_unroll, wont_exit,
-                                              exit, &to_remove,
-                                              DLTHE_FLAG_UPDATE_FREQ
-                                              | DLTHE_FLAG_COMPLETTE_PEEL))
+      if (!gimple_duplicate_loop_to_header_edge (loop, loop_preheader_edge (loop),
+                                                n_unroll, wont_exit,
+                                                exit, &to_remove,
+                                                DLTHE_FLAG_UPDATE_FREQ
+                                                | DLTHE_FLAG_COMPLETTE_PEEL))
        {
           free_original_copy_tables ();
          free (wont_exit);
@@ -239,8 +240,10 @@ try_unroll_loop_completely (struct loop *loop,
     }
 
   cond = last_stmt (exit->src);
-  COND_EXPR_COND (cond) = (exit->flags & EDGE_TRUE_VALUE) ? boolean_true_node
-    : boolean_false_node;
+  if (exit->flags & EDGE_TRUE_VALUE)
+    gimple_cond_make_true (cond);
+  else
+    gimple_cond_make_false (cond);
   update_stmt (cond);
   update_ssa (TODO_update_ssa);
 
@@ -386,11 +389,9 @@ empty_loop_p (struct loop *loop)
 {
   edge exit;
   struct tree_niter_desc niter;
-  tree phi, def;
   basic_block *body;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   unsigned i;
-  tree stmt;
 
   /* If the loop has multiple exits, it is too hard for us to handle.
      Similarly, if the exit is not dominating, we cannot determine
@@ -404,8 +405,11 @@ empty_loop_p (struct loop *loop)
     return false;
 
   /* Values of all loop exit phi nodes must be invariants.  */
-  for (phi = phi_nodes (exit->dest); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start(phi_nodes (exit->dest)); !gsi_end_p (gsi); gsi_next (&gsi))
     {
+      gimple phi = gsi_stmt (gsi);
+      tree def;
+
       if (!is_gimple_reg (PHI_RESULT (phi)))
        continue;
 
@@ -427,11 +431,12 @@ empty_loop_p (struct loop *loop)
          return false;
        }
        
-      for (bsi = bsi_start (body[i]); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (body[i]); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
+
          if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS)
-             || stmt_ann (stmt)->has_volatile_ops)
+             || gimple_has_volatile_ops (stmt))
            {
              free (body);
              return false;
@@ -439,25 +444,19 @@ empty_loop_p (struct loop *loop)
 
          /* Also, asm statements and calls may have side effects and we
             cannot change the number of times they are executed.  */
-         switch (TREE_CODE (stmt))
+         switch (gimple_code (stmt))
            {
-           case RETURN_EXPR:
-           case GIMPLE_MODIFY_STMT:
-             stmt = get_call_expr_in (stmt);
-             if (!stmt)
-               break;
-
-           case CALL_EXPR:
-             if (TREE_SIDE_EFFECTS (stmt))
+           case GIMPLE_CALL:
+             if (gimple_has_side_effects (stmt))
                {
                  free (body);
                  return false;
                }
              break;
 
-           case ASM_EXPR:
+           case GIMPLE_ASM:
              /* We cannot remove volatile assembler.  */
-             if (ASM_VOLATILE_P (stmt))
+             if (gimple_asm_volatile_p (stmt))
                {
                  free (body);
                  return false;
@@ -480,8 +479,7 @@ static void
 remove_empty_loop (struct loop *loop)
 {
   edge exit = single_dom_exit (loop), non_exit;
-  tree cond_stmt = last_stmt (exit->src);
-  tree do_exit;
+  gimple cond_stmt = last_stmt (exit->src);
   basic_block *body;
   unsigned n_before, freq_in, freq_h;
   gcov_type exit_count = exit->count;
@@ -494,11 +492,9 @@ remove_empty_loop (struct loop *loop)
     non_exit = EDGE_SUCC (exit->src, 1);
 
   if (exit->flags & EDGE_TRUE_VALUE)
-    do_exit = boolean_true_node;
+    gimple_cond_make_true (cond_stmt);
   else
-    do_exit = boolean_false_node;
-
-  COND_EXPR_COND (cond_stmt) = do_exit;
+    gimple_cond_make_false (cond_stmt);
   update_stmt (cond_stmt);
 
   /* Let us set the probabilities of the edges coming from the exit block.  */
@@ -569,3 +565,4 @@ remove_empty_loops (void)
     }
   return 0;
 }
+
index ce5c05c..c314da4 100644 (file)
@@ -163,7 +163,7 @@ struct iv_use
   unsigned id;         /* The id of the use.  */
   enum use_type type;  /* Type of the use.  */
   struct iv *iv;       /* The induction variable it is based on.  */
-  tree stmt;           /* Statement in that it occurs.  */
+  gimple stmt;         /* Statement in that it occurs.  */
   tree *op_p;          /* The place where it occurs.  */
   bitmap related_cands;        /* The set of "related" iv candidates, plus the common
                           important ones.  */
@@ -191,7 +191,7 @@ struct iv_cand
   bool important;      /* Whether this is an "important" candidate, i.e. such
                           that it should be considered by all uses.  */
   enum iv_position pos;        /* Where it is computed.  */
-  tree incremented_at; /* For original biv, the statement where it is
+  gimple incremented_at;/* For original biv, the statement where it is
                           incremented.  */
   tree var_before;     /* The variable used for it before increment.  */
   tree var_after;      /* The variable used for it after increment.  */
@@ -448,7 +448,7 @@ dump_use (FILE *file, struct iv_use *use)
     }
 
   fprintf (file, "  in statement ");
-  print_generic_expr (file, use->stmt, TDF_SLIM);
+  print_gimple_stmt (file, use->stmt, 0, 0);
   fprintf (file, "\n");
 
   fprintf (file, "  at position ");
@@ -544,9 +544,9 @@ name_info (struct ivopts_data *data, tree name)
    emitted in LOOP.  */
 
 static bool
-stmt_after_ip_normal_pos (struct loop *loop, tree stmt)
+stmt_after_ip_normal_pos (struct loop *loop, gimple stmt)
 {
-  basic_block bb = ip_normal_pos (loop), sbb = bb_for_stmt (stmt);
+  basic_block bb = ip_normal_pos (loop), sbb = gimple_bb (stmt);
 
   gcc_assert (bb);
 
@@ -563,11 +563,11 @@ stmt_after_ip_normal_pos (struct loop *loop, tree stmt)
    variable CAND is incremented.  */
 
 static bool
-stmt_after_ip_original_pos (struct iv_cand *cand, tree stmt)
+stmt_after_ip_original_pos (struct iv_cand *cand, gimple stmt)
 {
-  basic_block cand_bb = bb_for_stmt (cand->incremented_at);
-  basic_block stmt_bb = bb_for_stmt (stmt);
-  block_stmt_iterator bsi;
+  basic_block cand_bb = gimple_bb (cand->incremented_at);
+  basic_block stmt_bb = gimple_bb (stmt);
+  gimple_stmt_iterator bsi;
 
   if (!dominated_by_p (CDI_DOMINATORS, stmt_bb, cand_bb))
     return false;
@@ -577,11 +577,11 @@ stmt_after_ip_original_pos (struct iv_cand *cand, tree stmt)
 
   /* Scan the block from the end, since the original ivs are usually
      incremented at the end of the loop body.  */
-  for (bsi = bsi_last (stmt_bb); ; bsi_prev (&bsi))
+  for (bsi = gsi_last_bb (stmt_bb); ; gsi_prev (&bsi))
     {
-      if (bsi_stmt (bsi) == cand->incremented_at)
+      if (gsi_stmt (bsi) == cand->incremented_at)
        return false;
-      if (bsi_stmt (bsi) == stmt)
+      if (gsi_stmt (bsi) == stmt)
        return true;
     }
 }
@@ -590,7 +590,7 @@ stmt_after_ip_original_pos (struct iv_cand *cand, tree stmt)
    CAND is incremented in LOOP.  */
 
 static bool
-stmt_after_increment (struct loop *loop, struct iv_cand *cand, tree stmt)
+stmt_after_increment (struct loop *loop, struct iv_cand *cand, gimple stmt)
 {
   switch (cand->pos)
     {
@@ -858,7 +858,7 @@ get_iv (struct ivopts_data *data, tree var)
 
   if (!name_info (data, var)->iv)
     {
-      bb = bb_for_stmt (SSA_NAME_DEF_STMT (var));
+      bb = gimple_bb (SSA_NAME_DEF_STMT (var));
 
       if (!bb
          || !flow_bb_inside_loop_p (data->current_loop, bb))
@@ -872,9 +872,9 @@ get_iv (struct ivopts_data *data, tree var)
    not define a simple affine biv with nonzero step.  */
 
 static tree
-determine_biv_step (tree phi)
+determine_biv_step (gimple phi)
 {
-  struct loop *loop = bb_for_stmt (phi)->loop_father;
+  struct loop *loop = gimple_bb (phi)->loop_father;
   tree name = PHI_RESULT (phi);
   affine_iv iv;
 
@@ -892,12 +892,16 @@ determine_biv_step (tree phi)
 static bool
 find_bivs (struct ivopts_data *data)
 {
-  tree phi, step, type, base;
+  gimple phi;
+  tree step, type, base;
   bool found = false;
   struct loop *loop = data->current_loop;
+  gimple_stmt_iterator psi;
 
-  for (phi = phi_nodes (loop->header); phi; phi = PHI_CHAIN (phi))
+  for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
     {
+      phi = gsi_stmt (psi);
+
       if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
        continue;
 
@@ -933,13 +937,17 @@ find_bivs (struct ivopts_data *data)
 static void
 mark_bivs (struct ivopts_data *data)
 {
-  tree phi, var;
+  gimple phi;
+  tree var;
   struct iv *iv, *incr_iv;
   struct loop *loop = data->current_loop;
   basic_block incr_bb;
+  gimple_stmt_iterator psi;
 
-  for (phi = phi_nodes (loop->header); phi; phi = PHI_CHAIN (phi))
+  for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
     {
+      phi = gsi_stmt (psi);
+
       iv = get_iv (data, PHI_RESULT (phi));
       if (!iv)
        continue;
@@ -950,7 +958,7 @@ mark_bivs (struct ivopts_data *data)
        continue;
 
       /* If the increment is in the subloop, ignore it.  */
-      incr_bb = bb_for_stmt (SSA_NAME_DEF_STMT (var));
+      incr_bb = gimple_bb (SSA_NAME_DEF_STMT (var));
       if (incr_bb->loop_father != data->current_loop
          || (incr_bb->flags & BB_IRREDUCIBLE_LOOP))
        continue;
@@ -964,7 +972,7 @@ mark_bivs (struct ivopts_data *data)
    parameters to IV.  */
 
 static bool
-find_givs_in_stmt_scev (struct ivopts_data *data, tree stmt, affine_iv *iv)
+find_givs_in_stmt_scev (struct ivopts_data *data, gimple stmt, affine_iv *iv)
 {
   tree lhs;
   struct loop *loop = data->current_loop;
@@ -972,14 +980,14 @@ find_givs_in_stmt_scev (struct ivopts_data *data, tree stmt, affine_iv *iv)
   iv->base = NULL_TREE;
   iv->step = NULL_TREE;
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
     return false;
 
-  lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+  lhs = gimple_assign_lhs (stmt);
   if (TREE_CODE (lhs) != SSA_NAME)
     return false;
 
-  if (!simple_iv (loop, stmt, GIMPLE_STMT_OPERAND (stmt, 1), iv, true))
+  if (!simple_iv (loop, stmt, lhs, iv, true))
     return false;
   iv->base = expand_simple_operations (iv->base);
 
@@ -993,14 +1001,14 @@ find_givs_in_stmt_scev (struct ivopts_data *data, tree stmt, affine_iv *iv)
 /* Finds general ivs in statement STMT.  */
 
 static void
-find_givs_in_stmt (struct ivopts_data *data, tree stmt)
+find_givs_in_stmt (struct ivopts_data *data, gimple stmt)
 {
   affine_iv iv;
 
   if (!find_givs_in_stmt_scev (data, stmt, &iv))
     return;
 
-  set_iv (data, GIMPLE_STMT_OPERAND (stmt, 0), iv.base, iv.step);
+  set_iv (data, gimple_assign_lhs (stmt), iv.base, iv.step);
 }
 
 /* Finds general ivs in basic block BB.  */
@@ -1008,10 +1016,10 @@ find_givs_in_stmt (struct ivopts_data *data, tree stmt)
 static void
 find_givs_in_bb (struct ivopts_data *data, basic_block bb)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
 
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-    find_givs_in_stmt (data, bsi_stmt (bsi));
+  for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+    find_givs_in_stmt (data, gsi_stmt (bsi));
 }
 
 /* Finds general ivs.  */
@@ -1070,7 +1078,7 @@ find_induction_variables (struct ivopts_data *data)
 
 static struct iv_use *
 record_use (struct ivopts_data *data, tree *use_p, struct iv *iv,
-           tree stmt, enum use_type use_type)
+           gimple stmt, enum use_type use_type)
 {
   struct iv_use *use = XCNEW (struct iv_use);
 
@@ -1107,7 +1115,7 @@ record_invariant (struct ivopts_data *data, tree op, bool nonlinear_use)
       || !is_gimple_reg (op))
     return;
 
-  bb = bb_for_stmt (SSA_NAME_DEF_STMT (op));
+  bb = gimple_bb (SSA_NAME_DEF_STMT (op));
   if (bb
       && flow_bb_inside_loop_p (data->current_loop, bb))
     return;
@@ -1127,7 +1135,7 @@ find_interesting_uses_op (struct ivopts_data *data, tree op)
 {
   struct iv *iv;
   struct iv *civ;
-  tree stmt;
+  gimple stmt;
   struct iv_use *use;
 
   if (TREE_CODE (op) != SSA_NAME)
@@ -1156,8 +1164,8 @@ find_interesting_uses_op (struct ivopts_data *data, tree op)
   *civ = *iv;
 
   stmt = SSA_NAME_DEF_STMT (op);
-  gcc_assert (TREE_CODE (stmt) == PHI_NODE
-             || TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
+  gcc_assert (gimple_code (stmt) == GIMPLE_PHI
+             || is_gimple_assign (stmt));
 
   use = record_use (data, NULL, civ, stmt, USE_NONLINEAR_EXPR);
   iv->use_id = use->id;
@@ -1165,47 +1173,40 @@ find_interesting_uses_op (struct ivopts_data *data, tree op)
   return use;
 }
 
-/* Given a condition *COND_P, checks whether it is a compare of an induction
-   variable and an invariant.  If this is the case, CONTROL_VAR is set
-   to location of the iv, BOUND to the location of the invariant,
-   IV_VAR and IV_BOUND are set to the corresponding induction variable
-   descriptions, and true is returned.  If this is not the case,
-   CONTROL_VAR and BOUND are set to the arguments of the condition and
-   false is returned.  */
+/* Given a condition in statement STMT, checks whether it is a compare
+   of an induction variable and an invariant.  If this is the case,
+   CONTROL_VAR is set to location of the iv, BOUND to the location of
+   the invariant, IV_VAR and IV_BOUND are set to the corresponding
+   induction variable descriptions, and true is returned.  If this is not
+   the case, CONTROL_VAR and BOUND are set to the arguments of the
+   condition and false is returned.  */
 
 static bool
-extract_cond_operands (struct ivopts_data *data, tree *cond_p,
+extract_cond_operands (struct ivopts_data *data, gimple stmt,
                       tree **control_var, tree **bound,
                       struct iv **iv_var, struct iv **iv_bound)
 {
-  /* The nodes returned when COND has just one operand.  Note that you should
-     not modify anything in BOUND or IV_BOUND because of this.  */
+  /* The objects returned when COND has constant operands.  */
   static struct iv const_iv;
   static tree zero;
-  tree cond = *cond_p;
   tree *op0 = &zero, *op1 = &zero, *tmp_op;
   struct iv *iv0 = &const_iv, *iv1 = &const_iv, *tmp_iv;
   bool ret = false;
 
-  zero = integer_zero_node;
-  const_iv.step = integer_zero_node;
-
-  if (TREE_CODE (cond) == SSA_NAME)
+  if (gimple_code (stmt) == GIMPLE_COND)
     {
-      op0 = cond_p;
-      iv0 = get_iv (data, cond);
-      ret = (iv0 && !integer_zerop (iv0->step));
-      goto end;
+      op0 = gimple_cond_lhs_ptr (stmt);
+      op1 = gimple_cond_rhs_ptr (stmt);
     }
-
-  if (!COMPARISON_CLASS_P (cond))
+  else
     {
-      op0 = cond_p;
-      goto end;
+      op0 = gimple_assign_rhs1_ptr (stmt);
+      op1 = gimple_assign_rhs2_ptr (stmt);
     }
 
-  op0 = &TREE_OPERAND (cond, 0);
-  op1 = &TREE_OPERAND (cond, 1);
+  zero = integer_zero_node;
+  const_iv.step = integer_zero_node;
+
   if (TREE_CODE (*op0) == SSA_NAME)
     iv0 = get_iv (data, *op0);
   if (TREE_CODE (*op1) == SSA_NAME)
@@ -1237,16 +1238,16 @@ end:
   return ret;
 }
 
-/* Checks whether the condition *COND_P in STMT is interesting
-   and if so, records it.  */
+/* Checks whether the condition in STMT is interesting and if so,
+   records it.  */
 
 static void
-find_interesting_uses_cond (struct ivopts_data *data, tree stmt, tree *cond_p)
+find_interesting_uses_cond (struct ivopts_data *data, gimple stmt)
 {
   tree *var_p, *bound_p;
   struct iv *var_iv, *civ;
 
-  if (!extract_cond_operands (data, cond_p, &var_p, &bound_p, &var_iv, NULL))
+  if (!extract_cond_operands (data, stmt, &var_p, &bound_p, &var_iv, NULL))
     {
       find_interesting_uses_op (data, *var_p);
       find_interesting_uses_op (data, *bound_p);
@@ -1255,7 +1256,7 @@ find_interesting_uses_cond (struct ivopts_data *data, tree stmt, tree *cond_p)
 
   civ = XNEW (struct iv);
   *civ = *var_iv;
-  record_use (data, cond_p, civ, stmt, USE_COMPARE);
+  record_use (data, NULL, civ, stmt, USE_COMPARE);
 }
 
 /* Returns true if expression EXPR is obviously invariant in LOOP,
@@ -1275,7 +1276,7 @@ expr_invariant_in_loop_p (struct loop *loop, tree expr)
 
   if (TREE_CODE (expr) == SSA_NAME)
     {
-      def_bb = bb_for_stmt (SSA_NAME_DEF_STMT (expr));
+      def_bb = gimple_bb (SSA_NAME_DEF_STMT (expr));
       if (def_bb
          && flow_bb_inside_loop_p (loop, def_bb))
        return false;
@@ -1283,7 +1284,7 @@ expr_invariant_in_loop_p (struct loop *loop, tree expr)
       return true;
     }
 
-  if (!EXPR_P (expr) && !GIMPLE_STMT_P (expr))
+  if (!EXPR_P (expr))
     return false;
 
   len = TREE_OPERAND_LENGTH (expr);
@@ -1294,6 +1295,29 @@ expr_invariant_in_loop_p (struct loop *loop, tree expr)
   return true;
 }
 
+/* Returns true if statement STMT is obviously invariant in LOOP,
+   i.e. if all its operands on the RHS are defined outside of the LOOP.
+   LOOP should not be the function body.  */
+
+bool
+stmt_invariant_in_loop_p (struct loop *loop, gimple stmt)
+{
+  unsigned i;
+  tree lhs;
+
+  gcc_assert (loop_depth (loop) > 0);
+
+  lhs = gimple_get_lhs (stmt);
+  for (i = 0; i < gimple_num_ops (stmt); i++)
+    {
+      tree op = gimple_op (stmt, i);
+      if (op != lhs && !expr_invariant_in_loop_p (loop, op))
+       return false;
+    }
+
+  return true;
+}
+
 /* Cumulates the steps of indices into DATA and replaces their values with the
    initial ones.  Returns false when the value of the index cannot be determined.
    Callback for for_each_index.  */
@@ -1301,7 +1325,7 @@ expr_invariant_in_loop_p (struct loop *loop, tree expr)
 struct ifs_ivopts_data
 {
   struct ivopts_data *ivopts_data;
-  tree stmt;
+  gimple stmt;
   tree step;
 };
 
@@ -1553,7 +1577,7 @@ may_be_nonaddressable_p (tree expr)
 /* Finds addresses in *OP_P inside STMT.  */
 
 static void
-find_interesting_uses_address (struct ivopts_data *data, tree stmt, tree *op_p)
+find_interesting_uses_address (struct ivopts_data *data, gimple stmt, tree *op_p)
 {
   tree base = *op_p, step = build_int_cst (sizetype, 0);
   struct iv *civ;
@@ -1561,7 +1585,7 @@ find_interesting_uses_address (struct ivopts_data *data, tree stmt, tree *op_p)
 
   /* Do not play with volatile memory references.  A bit too conservative,
      perhaps, but safe.  */
-  if (stmt_ann (stmt)->has_volatile_ops)
+  if (gimple_has_volatile_ops (stmt))
     goto fail;
 
   /* Ignore bitfields for now.  Not really something terribly complicated
@@ -1657,7 +1681,7 @@ fail:
 /* Finds and records invariants used in STMT.  */
 
 static void
-find_invariants_stmt (struct ivopts_data *data, tree stmt)
+find_invariants_stmt (struct ivopts_data *data, gimple stmt)
 {
   ssa_op_iter iter;
   use_operand_p use_p;
@@ -1673,61 +1697,55 @@ find_invariants_stmt (struct ivopts_data *data, tree stmt)
 /* Finds interesting uses of induction variables in the statement STMT.  */
 
 static void
-find_interesting_uses_stmt (struct ivopts_data *data, tree stmt)
+find_interesting_uses_stmt (struct ivopts_data *data, gimple stmt)
 {
   struct iv *iv;
-  tree op, lhs, rhs;
+  tree op, *lhs, *rhs;
   ssa_op_iter iter;
   use_operand_p use_p;
+  enum tree_code code;
 
   find_invariants_stmt (data, stmt);
 
-  if (TREE_CODE (stmt) == COND_EXPR)
+  if (gimple_code (stmt) == GIMPLE_COND)
     {
-      find_interesting_uses_cond (data, stmt, &COND_EXPR_COND (stmt));
+      find_interesting_uses_cond (data, stmt);
       return;
     }
 
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+  if (is_gimple_assign (stmt))
     {
-      lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-      rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+      lhs = gimple_assign_lhs_ptr (stmt);
+      rhs = gimple_assign_rhs1_ptr (stmt);
 
-      if (TREE_CODE (lhs) == SSA_NAME)
+      if (TREE_CODE (*lhs) == SSA_NAME)
        {
          /* If the statement defines an induction variable, the uses are not
             interesting by themselves.  */
 
-         iv = get_iv (data, lhs);
+         iv = get_iv (data, *lhs);
 
          if (iv && !integer_zerop (iv->step))
            return;
        }
 
-      switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
+      code = gimple_assign_rhs_code (stmt);
+      if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS
+         && (REFERENCE_CLASS_P (*rhs)
+             || is_gimple_val (*rhs)))
        {
-       case tcc_comparison:
-         find_interesting_uses_cond (data, stmt,
-                                     &GIMPLE_STMT_OPERAND (stmt, 1));
-         return;
+         if (REFERENCE_CLASS_P (*rhs))
+           find_interesting_uses_address (data, stmt, rhs);
+         else
+           find_interesting_uses_op (data, *rhs);
 
-       case tcc_reference:
-         find_interesting_uses_address (data, stmt,
-                                        &GIMPLE_STMT_OPERAND (stmt, 1));
-         if (REFERENCE_CLASS_P (lhs))
-           find_interesting_uses_address (data, stmt,
-                                          &GIMPLE_STMT_OPERAND (stmt, 0));
+         if (REFERENCE_CLASS_P (*lhs))
+           find_interesting_uses_address (data, stmt, lhs);
          return;
-
-       default: ;
        }
-
-      if (REFERENCE_CLASS_P (lhs)
-         && is_gimple_val (rhs))
+      else if (TREE_CODE_CLASS (code) == tcc_comparison)
        {
-         find_interesting_uses_address (data, stmt,
-                                        &GIMPLE_STMT_OPERAND (stmt, 0));
-         find_interesting_uses_op (data, rhs);
+         find_interesting_uses_cond (data, stmt);
          return;
        }
 
@@ -1740,11 +1758,10 @@ find_interesting_uses_stmt (struct ivopts_data *data, tree stmt)
         call (memory).  */
     }
 
-  if (TREE_CODE (stmt) == PHI_NODE
-      && bb_for_stmt (stmt) == data->current_loop->header)
+  if (gimple_code (stmt) == GIMPLE_PHI
+      && gimple_bb (stmt) == data->current_loop->header)
     {
-      lhs = PHI_RESULT (stmt);
-      iv = get_iv (data, lhs);
+      iv = get_iv (data, PHI_RESULT (stmt));
 
       if (iv && !integer_zerop (iv->step))
        return;
@@ -1771,10 +1788,13 @@ find_interesting_uses_stmt (struct ivopts_data *data, tree stmt)
 static void
 find_interesting_uses_outside (struct ivopts_data *data, edge exit)
 {
-  tree phi, def;
+  gimple phi;
+  gimple_stmt_iterator psi;
+  tree def;
 
-  for (phi = phi_nodes (exit->dest); phi; phi = PHI_CHAIN (phi))
+  for (psi = gsi_start_phis (exit->dest); !gsi_end_p (psi); gsi_next (&psi))
     {
+      phi = gsi_stmt (psi);
       def = PHI_ARG_DEF_FROM_EDGE (phi, exit);
       if (is_gimple_reg (def))
        find_interesting_uses_op (data, def);
@@ -1787,8 +1807,7 @@ static void
 find_interesting_uses (struct ivopts_data *data)
 {
   basic_block bb;
-  block_stmt_iterator bsi;
-  tree phi;
+  gimple_stmt_iterator bsi;
   basic_block *body = get_loop_body (data->current_loop);
   unsigned i;
   struct version_info *info;
@@ -1807,10 +1826,10 @@ find_interesting_uses (struct ivopts_data *data)
            && !flow_bb_inside_loop_p (data->current_loop, e->dest))
          find_interesting_uses_outside (data, e);
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-       find_interesting_uses_stmt (data, phi);
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-       find_interesting_uses_stmt (data, bsi_stmt (bsi));
+      for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+       find_interesting_uses_stmt (data, gsi_stmt (bsi));
+      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+       find_interesting_uses_stmt (data, gsi_stmt (bsi));
     }
 
   if (dump_file && (dump_flags & TDF_DETAILS))
@@ -2033,7 +2052,7 @@ find_depends (tree *expr_p, int *ws ATTRIBUTE_UNUSED, void *data)
 static struct iv_cand *
 add_candidate_1 (struct ivopts_data *data,
                 tree base, tree step, bool important, enum iv_position pos,
-                struct iv_use *use, tree incremented_at)
+                struct iv_use *use, gimple incremented_at)
 {
   unsigned i;
   struct iv_cand *cand = NULL;
@@ -2157,10 +2176,10 @@ add_candidate (struct ivopts_data *data,
               tree base, tree step, bool important, struct iv_use *use)
 {
   if (ip_normal_pos (data->current_loop))
-    add_candidate_1 (data, base, step, important, IP_NORMAL, use, NULL_TREE);
+    add_candidate_1 (data, base, step, important, IP_NORMAL, use, NULL);
   if (ip_end_pos (data->current_loop)
       && allow_ip_end_pos_p (data->current_loop))
-    add_candidate_1 (data, base, step, important, IP_END, use, NULL_TREE);
+    add_candidate_1 (data, base, step, important, IP_END, use, NULL);
 }
 
 /* Add a standard "0 + 1 * iteration" iv candidate for a
@@ -2193,7 +2212,8 @@ add_standard_iv_candidates (struct ivopts_data *data)
 static void
 add_old_iv_candidates (struct ivopts_data *data, struct iv *iv)
 {
-  tree phi, def;
+  gimple phi;
+  tree def;
   struct iv_cand *cand;
 
   add_candidate (data, iv->base, iv->step, true, NULL);
@@ -2204,7 +2224,7 @@ add_old_iv_candidates (struct ivopts_data *data, struct iv *iv)
                 iv->step, true, NULL);
 
   phi = SSA_NAME_DEF_STMT (iv->ssa_name);
-  if (TREE_CODE (phi) == PHI_NODE)
+  if (gimple_code (phi) == GIMPLE_PHI)
     {
       /* Additionally record the possibility of leaving the original iv
         untouched.  */
@@ -2643,7 +2663,7 @@ computation_cost (tree expr)
 /* Returns variable containing the value of candidate CAND at statement AT.  */
 
 static tree
-var_at_stmt (struct loop *loop, struct iv_cand *cand, tree stmt)
+var_at_stmt (struct loop *loop, struct iv_cand *cand, gimple stmt)
 {
   if (stmt_after_increment (loop, cand, stmt))
     return cand->var_after;
@@ -2713,7 +2733,7 @@ determine_common_wider_type (tree *a, tree *b)
 
 static bool
 get_computation_aff (struct loop *loop,
-                    struct iv_use *use, struct iv_cand *cand, tree at,
+                    struct iv_use *use, struct iv_cand *cand, gimple at,
                     struct affine_tree_combination *aff)
 {
   tree ubase = use->iv->base;
@@ -2788,7 +2808,7 @@ get_computation_aff (struct loop *loop,
 
 static tree
 get_computation_at (struct loop *loop,
-                   struct iv_use *use, struct iv_cand *cand, tree at)
+                   struct iv_use *use, struct iv_cand *cand, gimple at)
 {
   aff_tree aff;
   tree type = TREE_TYPE (use->iv->base);
@@ -3458,7 +3478,7 @@ difference_cost (struct ivopts_data *data,
 static comp_cost
 get_computation_cost_at (struct ivopts_data *data,
                         struct iv_use *use, struct iv_cand *cand,
-                        bool address_p, bitmap *depends_on, tree at)
+                        bool address_p, bitmap *depends_on, gimple at)
 {
   tree ubase = use->iv->base, ustep = use->iv->step;
   tree cbase, cstep;
@@ -3672,7 +3692,7 @@ determine_use_iv_cost_address (struct ivopts_data *data,
    stores it to VAL.  */
 
 static void
-cand_value_at (struct loop *loop, struct iv_cand *cand, tree at, tree niter,
+cand_value_at (struct loop *loop, struct iv_cand *cand, gimple at, tree niter,
               aff_tree *val)
 {
   aff_tree step, delta, nit;
@@ -3725,7 +3745,7 @@ iv_elimination_compare (struct ivopts_data *data, struct iv_use *use)
   basic_block ex_bb;
   edge exit;
 
-  ex_bb = bb_for_stmt (use->stmt);
+  ex_bb = gimple_bb (use->stmt);
   exit = EDGE_SUCC (ex_bb, 0);
   if (flow_bb_inside_loop_p (loop, exit->dest))
     exit = EDGE_SUCC (ex_bb, 1);
@@ -3751,11 +3771,10 @@ may_eliminate_iv (struct ivopts_data *data,
 
   /* For now works only for exits that dominate the loop latch.
      TODO: extend to other conditions inside loop body.  */
-  ex_bb = bb_for_stmt (use->stmt);
+  ex_bb = gimple_bb (use->stmt);
   if (use->stmt != last_stmt (ex_bb)
-      || TREE_CODE (use->stmt) != COND_EXPR)
-    return false;
-  if (!dominated_by_p (CDI_DOMINATORS, loop->latch, ex_bb))
+      || gimple_code (use->stmt) != GIMPLE_COND
+      || !dominated_by_p (CDI_DOMINATORS, loop->latch, ex_bb))
     return false;
 
   exit = EDGE_SUCC (ex_bb, 0);
@@ -3834,7 +3853,7 @@ determine_use_iv_cost_condition (struct ivopts_data *data,
 
   /* Try expressing the original giv.  If it is compared with an invariant,
      note that we cannot get rid of it.  */
-  ok = extract_cond_operands (data, use->op_p, NULL, NULL, NULL, &cmp_iv);
+  ok = extract_cond_operands (data, use->stmt, NULL, NULL, NULL, &cmp_iv);
   gcc_assert (ok);
 
   express_cost = get_computation_cost (data, use, cand, false,
@@ -4050,7 +4069,9 @@ static void
 determine_set_costs (struct ivopts_data *data)
 {
   unsigned j, n;
-  tree phi, op;
+  gimple phi;
+  gimple_stmt_iterator psi;
+  tree op;
   struct loop *loop = data->current_loop;
   bitmap_iterator bi;
 
@@ -4083,8 +4104,9 @@ determine_set_costs (struct ivopts_data *data)
     }
 
   n = 0;
-  for (phi = phi_nodes (loop->header); phi; phi = PHI_CHAIN (phi))
+  for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
     {
+      phi = gsi_stmt (psi);
       op = PHI_RESULT (phi);
 
       if (!is_gimple_reg (op))
@@ -4925,7 +4947,7 @@ find_optimal_iv_set (struct ivopts_data *data)
 static void
 create_new_iv (struct ivopts_data *data, struct iv_cand *cand)
 {
-  block_stmt_iterator incr_pos;
+  gimple_stmt_iterator incr_pos;
   tree base;
   bool after = false;
 
@@ -4935,11 +4957,11 @@ create_new_iv (struct ivopts_data *data, struct iv_cand *cand)
   switch (cand->pos)
     {
     case IP_NORMAL:
-      incr_pos = bsi_last (ip_normal_pos (data->current_loop));
+      incr_pos = gsi_last_bb (ip_normal_pos (data->current_loop));
       break;
 
     case IP_END:
-      incr_pos = bsi_last (ip_end_pos (data->current_loop));
+      incr_pos = gsi_last_bb (ip_end_pos (data->current_loop));
       after = true;
       break;
 
@@ -4984,17 +5006,15 @@ create_new_ivs (struct ivopts_data *data, struct iv_ca *set)
    is true, remove also the ssa name defined by the statement.  */
 
 static void
-remove_statement (tree stmt, bool including_defined_name)
+remove_statement (gimple stmt, bool including_defined_name)
 {
-  if (TREE_CODE (stmt) == PHI_NODE)
-    {
-      remove_phi_node (stmt, NULL_TREE, including_defined_name);
-    }
+  gimple_stmt_iterator bsi = gsi_for_stmt (stmt);
+
+  if (gimple_code (stmt) == GIMPLE_PHI)
+    remove_phi_node (&bsi, including_defined_name);
   else
     {
-      block_stmt_iterator bsi = bsi_for_stmt (stmt);
-
-      bsi_remove (&bsi, true);
+      gsi_remove (&bsi, true);
       release_defs (stmt); 
     }
 }
@@ -5007,8 +5027,9 @@ rewrite_use_nonlinear_expr (struct ivopts_data *data,
                            struct iv_use *use, struct iv_cand *cand)
 {
   tree comp;
-  tree op, tgt, ass;
-  block_stmt_iterator bsi;
+  tree op, tgt;
+  gimple ass;
+  gimple_stmt_iterator bsi;
 
   /* An important special case -- if we are asked to express value of
      the original iv by itself, just exit; there is no need to
@@ -5018,10 +5039,10 @@ rewrite_use_nonlinear_expr (struct ivopts_data *data,
       && cand->incremented_at == use->stmt)
     {
       tree step, ctype, utype;
-      enum tree_code incr_code = PLUS_EXPR;
+      enum tree_code incr_code = PLUS_EXPR, old_code;
 
-      gcc_assert (TREE_CODE (use->stmt) == GIMPLE_MODIFY_STMT);
-      gcc_assert (GIMPLE_STMT_OPERAND (use->stmt, 0) == cand->var_after);
+      gcc_assert (is_gimple_assign (use->stmt));
+      gcc_assert (gimple_assign_lhs (use->stmt) == cand->var_after);
 
       step = cand->iv->step;
       ctype = TREE_TYPE (step);
@@ -5037,16 +5058,16 @@ rewrite_use_nonlinear_expr (struct ivopts_data *data,
         computations in the loop -- otherwise, the computation
         we rely upon may be removed in remove_unused_ivs,
         thus leading to ICE.  */
-      op = GIMPLE_STMT_OPERAND (use->stmt, 1);
-      if (TREE_CODE (op) == PLUS_EXPR
-         || TREE_CODE (op) == MINUS_EXPR
-         || TREE_CODE (op) == POINTER_PLUS_EXPR)
+      old_code = gimple_assign_rhs_code (use->stmt);
+      if (old_code == PLUS_EXPR
+         || old_code == MINUS_EXPR
+         || old_code == POINTER_PLUS_EXPR)
        {
-         if (TREE_OPERAND (op, 0) == cand->var_before)
-           op = TREE_OPERAND (op, 1);
-         else if (TREE_CODE (op) != MINUS_EXPR
-                  && TREE_OPERAND (op, 1) == cand->var_before)
-           op = TREE_OPERAND (op, 0);
+         if (gimple_assign_rhs1 (use->stmt) == cand->var_before)
+           op = gimple_assign_rhs2 (use->stmt);
+         else if (old_code != MINUS_EXPR
+                  && gimple_assign_rhs2 (use->stmt) == cand->var_before)
+           op = gimple_assign_rhs1 (use->stmt);
          else
            op = NULL_TREE;
        }
@@ -5071,39 +5092,41 @@ rewrite_use_nonlinear_expr (struct ivopts_data *data,
       gcc_assert (comp != NULL_TREE);
     }
 
-  switch (TREE_CODE (use->stmt))
+  switch (gimple_code (use->stmt))
     {
-    case PHI_NODE:
+    case GIMPLE_PHI:
       tgt = PHI_RESULT (use->stmt);
 
       /* If we should keep the biv, do not replace it.  */
       if (name_info (data, tgt)->preserve_biv)
        return;
 
-      bsi = bsi_after_labels (bb_for_stmt (use->stmt));
+      bsi = gsi_after_labels (gimple_bb (use->stmt));
       break;
 
-    case GIMPLE_MODIFY_STMT:
-      tgt = GIMPLE_STMT_OPERAND (use->stmt, 0);
-      bsi = bsi_for_stmt (use->stmt);
+    case GIMPLE_ASSIGN:
+      tgt = gimple_assign_lhs (use->stmt);
+      bsi = gsi_for_stmt (use->stmt);
       break;
 
     default:
       gcc_unreachable ();
     }
 
-  op = force_gimple_operand_bsi (&bsi, comp, false, SSA_NAME_VAR (tgt),
-                                true, BSI_SAME_STMT);
+  op = force_gimple_operand_gsi (&bsi, comp, false, SSA_NAME_VAR (tgt),
+                                true, GSI_SAME_STMT);
 
-  if (TREE_CODE (use->stmt) == PHI_NODE)
+  if (gimple_code (use->stmt) == GIMPLE_PHI)
     {
-      ass = build_gimple_modify_stmt (tgt, op);
-      bsi_insert_before (&bsi, ass, BSI_SAME_STMT);
+      ass = gimple_build_assign (tgt, op);
+      gsi_insert_before (&bsi, ass, GSI_SAME_STMT);
       remove_statement (use->stmt, false);
-      SSA_NAME_DEF_STMT (tgt) = ass;
     }
   else
-    GIMPLE_STMT_OPERAND (use->stmt, 1) = op;
+    {
+      gimple_assign_set_rhs_from_tree (&bsi, op);
+      use->stmt = gsi_stmt (bsi);
+    }
 }
 
 /* Replaces ssa name in index IDX by its basic variable.  Callback for
@@ -5222,7 +5245,7 @@ rewrite_use_address (struct ivopts_data *data,
                     struct iv_use *use, struct iv_cand *cand)
 {
   aff_tree aff;
-  block_stmt_iterator bsi = bsi_for_stmt (use->stmt);
+  gimple_stmt_iterator bsi = gsi_for_stmt (use->stmt);
   tree ref;
   bool ok;
 
@@ -5243,7 +5266,7 @@ rewrite_use_compare (struct ivopts_data *data,
                     struct iv_use *use, struct iv_cand *cand)
 {
   tree comp, *var_p, op, bound;
-  block_stmt_iterator bsi = bsi_for_stmt (use->stmt);
+  gimple_stmt_iterator bsi = gsi_for_stmt (use->stmt);
   enum tree_code compare;
   struct cost_pair *cp = get_use_iv_cost (data, use, cand);
   bool ok;
@@ -5256,10 +5279,12 @@ rewrite_use_compare (struct ivopts_data *data,
 
       compare = iv_elimination_compare (data, use);
       bound = unshare_expr (fold_convert (var_type, bound));
-      op = force_gimple_operand_bsi (&bsi, bound, true, NULL_TREE,
-                                    true, BSI_SAME_STMT);
+      op = force_gimple_operand_gsi (&bsi, bound, true, NULL_TREE,
+                                    true, GSI_SAME_STMT);
 
-      *use->op_p = build2 (compare, boolean_type_node, var, op);
+      gimple_cond_set_lhs (use->stmt, var);
+      gimple_cond_set_code (use->stmt, compare);
+      gimple_cond_set_rhs (use->stmt, op);
       return;
     }
 
@@ -5268,11 +5293,11 @@ rewrite_use_compare (struct ivopts_data *data,
   comp = get_computation (data->current_loop, use, cand);
   gcc_assert (comp != NULL_TREE);
 
-  ok = extract_cond_operands (data, use->op_p, &var_p, NULL, NULL, NULL);
+  ok = extract_cond_operands (data, use->stmt, &var_p, NULL, NULL, NULL);
   gcc_assert (ok);
 
-  *var_p = force_gimple_operand_bsi (&bsi, comp, true, SSA_NAME_VAR (*var_p),
-                                    true, BSI_SAME_STMT);
+  *var_p = force_gimple_operand_gsi (&bsi, comp, true, SSA_NAME_VAR (*var_p),
+                                    true, GSI_SAME_STMT);
 }
 
 /* Rewrites USE using candidate CAND.  */
@@ -5452,7 +5477,7 @@ tree_ssa_iv_optimize_loop (struct ivopts_data *data, struct loop *loop)
        {
          fprintf (dump_file, "  single exit %d -> %d, exit condition ",
                   exit->src->index, exit->dest->index);
-         print_generic_expr (dump_file, last_stmt (exit->src), TDF_SLIM);
+         print_gimple_stmt (dump_file, last_stmt (exit->src), 0, TDF_SLIM);
          fprintf (dump_file, "\n");
        }
 
index 1ff6254..05e87d2 100644 (file)
@@ -49,10 +49,12 @@ along with GCC; see the file COPYING3.  If not see
 
 void
 create_iv (tree base, tree step, tree var, struct loop *loop,
-          block_stmt_iterator *incr_pos, bool after,
+          gimple_stmt_iterator *incr_pos, bool after,
           tree *var_before, tree *var_after)
 {
-  tree stmt, initial, step1, stmts;
+  gimple stmt;
+  tree initial, step1;
+  gimple_seq stmts;
   tree vb, va;
   enum tree_code incr_op = PLUS_EXPR;
   edge pe = loop_preheader_edge (loop);
@@ -63,10 +65,10 @@ create_iv (tree base, tree step, tree var, struct loop *loop,
       add_referenced_var (var);
     }
 
-  vb = make_ssa_name (var, NULL_TREE);
+  vb = make_ssa_name (var, NULL);
   if (var_before)
     *var_before = vb;
-  va = make_ssa_name (var, NULL_TREE);
+  va = make_ssa_name (var, NULL);
   if (var_after)
     *var_after = va;
 
@@ -106,20 +108,17 @@ create_iv (tree base, tree step, tree var, struct loop *loop,
      loop (i.e. the step should be loop invariant).  */
   step = force_gimple_operand (step, &stmts, true, NULL_TREE);
   if (stmts)
-    bsi_insert_on_edge_immediate (pe, stmts);
+    gsi_insert_seq_on_edge_immediate (pe, stmts);
 
-  stmt = build_gimple_modify_stmt (va,
-                                  build2 (incr_op, TREE_TYPE (base),
-                                          vb, step));
-  SSA_NAME_DEF_STMT (va) = stmt;
+  stmt = gimple_build_assign_with_ops (incr_op, va, vb, step);
   if (after)
-    bsi_insert_after (incr_pos, stmt, BSI_NEW_STMT);
+    gsi_insert_after (incr_pos, stmt, GSI_NEW_STMT);
   else
-    bsi_insert_before (incr_pos, stmt, BSI_NEW_STMT);
+    gsi_insert_before (incr_pos, stmt, GSI_NEW_STMT);
 
   initial = force_gimple_operand (base, &stmts, true, var);
   if (stmts)
-    bsi_insert_on_edge_immediate (pe, stmts);
+    gsi_insert_seq_on_edge_immediate (pe, stmts);
 
   stmt = create_phi_node (vb, loop->header);
   SSA_NAME_DEF_STMT (vb) = stmt;
@@ -132,8 +131,8 @@ create_iv (tree base, tree step, tree var, struct loop *loop,
 static void
 add_exit_phis_edge (basic_block exit, tree use)
 {
-  tree phi, def_stmt = SSA_NAME_DEF_STMT (use);
-  basic_block def_bb = bb_for_stmt (def_stmt);
+  gimple phi, def_stmt = SSA_NAME_DEF_STMT (use);
+  basic_block def_bb = gimple_bb (def_stmt);
   struct loop *def_loop;
   edge e;
   edge_iterator ei;
@@ -151,7 +150,8 @@ add_exit_phis_edge (basic_block exit, tree use)
     return;
 
   phi = create_phi_node (use, exit);
-  create_new_def_for (PHI_RESULT (phi), phi, PHI_RESULT_PTR (phi));
+  create_new_def_for (gimple_phi_result (phi), phi,
+                     gimple_phi_result_ptr (phi));
   FOR_EACH_EDGE (e, ei, exit->preds)
     add_phi_arg (phi, use, e);
 }
@@ -164,7 +164,7 @@ add_exit_phis_var (tree var, bitmap livein, bitmap exits)
 {
   bitmap def;
   unsigned index;
-  basic_block def_bb = bb_for_stmt (SSA_NAME_DEF_STMT (var));
+  basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (var));
   bitmap_iterator bi;
 
   if (is_gimple_reg (var))
@@ -243,7 +243,7 @@ find_uses_to_rename_use (basic_block bb, tree use, bitmap *use_blocks,
     return;
 
   ver = SSA_NAME_VERSION (use);
-  def_bb = bb_for_stmt (SSA_NAME_DEF_STMT (use));
+  def_bb = gimple_bb (SSA_NAME_DEF_STMT (use));
   if (!def_bb)
     return;
   def_loop = def_bb->loop_father;
@@ -270,11 +270,11 @@ find_uses_to_rename_use (basic_block bb, tree use, bitmap *use_blocks,
    NEED_PHIS.  */
 
 static void
-find_uses_to_rename_stmt (tree stmt, bitmap *use_blocks, bitmap need_phis)
+find_uses_to_rename_stmt (gimple stmt, bitmap *use_blocks, bitmap need_phis)
 {
   ssa_op_iter iter;
   tree var;
-  basic_block bb = bb_for_stmt (stmt);
+  basic_block bb = gimple_bb (stmt);
 
   FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_USES)
     find_uses_to_rename_use (bb, var, use_blocks, need_phis);
@@ -288,18 +288,17 @@ find_uses_to_rename_stmt (tree stmt, bitmap *use_blocks, bitmap need_phis)
 static void
 find_uses_to_rename_bb (basic_block bb, bitmap *use_blocks, bitmap need_phis)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
   edge e;
   edge_iterator ei;
-  tree phi;
 
   FOR_EACH_EDGE (e, ei, bb->succs)
-    for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
-      find_uses_to_rename_use (bb, PHI_ARG_DEF_FROM_EDGE (phi, e),
+    for (bsi = gsi_start_phis (e->dest); !gsi_end_p (bsi); gsi_next (&bsi))
+      find_uses_to_rename_use (bb, PHI_ARG_DEF_FROM_EDGE (gsi_stmt (bsi), e),
                               use_blocks, need_phis);
  
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-    find_uses_to_rename_stmt (bsi_stmt (bsi), use_blocks, need_phis);
+  for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+    find_uses_to_rename_stmt (gsi_stmt (bsi), use_blocks, need_phis);
 }
      
 /* Marks names that are used outside of the loop they are defined in
@@ -407,14 +406,14 @@ rewrite_into_loop_closed_ssa (bitmap changed_bbs, unsigned update_flag)
 static void
 check_loop_closed_ssa_use (basic_block bb, tree use)
 {
-  tree def;
+  gimple def;
   basic_block def_bb;
   
   if (TREE_CODE (use) != SSA_NAME || !is_gimple_reg (use))
     return;
 
   def = SSA_NAME_DEF_STMT (use);
-  def_bb = bb_for_stmt (def);
+  def_bb = gimple_bb (def);
   gcc_assert (!def_bb
              || flow_bb_inside_loop_p (def_bb->loop_father, bb));
 }
@@ -422,7 +421,7 @@ check_loop_closed_ssa_use (basic_block bb, tree use)
 /* Checks invariants of loop closed ssa form in statement STMT in BB.  */
 
 static void
-check_loop_closed_ssa_stmt (basic_block bb, tree stmt)
+check_loop_closed_ssa_stmt (basic_block bb, gimple stmt)
 {
   ssa_op_iter iter;
   tree var;
@@ -437,9 +436,10 @@ void
 verify_loop_closed_ssa (void)
 {
   basic_block bb;
-  block_stmt_iterator bsi;
-  tree phi;
-  unsigned i;
+  gimple_stmt_iterator bsi;
+  gimple phi;
+  edge e;
+  edge_iterator ei;
 
   if (number_of_loops () <= 1)
     return;
@@ -448,13 +448,16 @@ verify_loop_closed_ssa (void)
 
   FOR_EACH_BB (bb)
     {
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-       for (i = 0; i < (unsigned) PHI_NUM_ARGS (phi); i++)
-         check_loop_closed_ssa_use (PHI_ARG_EDGE (phi, i)->src,
-                                    PHI_ARG_DEF (phi, i));
+      for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+       {
+         phi = gsi_stmt (bsi);
+         FOR_EACH_EDGE (e, ei, bb->preds)
+           check_loop_closed_ssa_use (e->src,
+                                      PHI_ARG_DEF_FROM_EDGE (phi, e));
+       }
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-       check_loop_closed_ssa_stmt (bb, bsi_stmt (bsi));
+      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+       check_loop_closed_ssa_stmt (bb, gsi_stmt (bsi));
     }
 }
 
@@ -466,11 +469,14 @@ split_loop_exit_edge (edge exit)
 {
   basic_block dest = exit->dest;
   basic_block bb = split_edge (exit);
-  tree phi, new_phi, new_name, name;
+  gimple phi, new_phi;
+  tree new_name, name;
   use_operand_p op_p;
+  gimple_stmt_iterator psi;
 
-  for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
+  for (psi = gsi_start_phis (dest); !gsi_end_p (psi); gsi_next (&psi))
     {
+      phi = gsi_stmt (psi);
       op_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (bb));
 
       name = USE_FROM_PTR (op_p);
@@ -507,7 +513,7 @@ ip_end_pos (struct loop *loop)
 basic_block
 ip_normal_pos (struct loop *loop)
 {
-  tree last;
+  gimple last;
   basic_block bb;
   edge exit;
 
@@ -517,7 +523,7 @@ ip_normal_pos (struct loop *loop)
   bb = single_pred (loop->latch);
   last = last_stmt (bb);
   if (!last
-      || TREE_CODE (last) != COND_EXPR)
+      || gimple_code (last) != GIMPLE_COND)
     return NULL;
 
   exit = EDGE_SUCC (bb, 0);
@@ -536,21 +542,21 @@ ip_normal_pos (struct loop *loop)
    the increment should be inserted after *BSI.  */
 
 void
-standard_iv_increment_position (struct loop *loop, block_stmt_iterator *bsi,
+standard_iv_increment_position (struct loop *loop, gimple_stmt_iterator *bsi,
                                bool *insert_after)
 {
   basic_block bb = ip_normal_pos (loop), latch = ip_end_pos (loop);
-  tree last = last_stmt (latch);
+  gimple last = last_stmt (latch);
 
   if (!bb
-      || (last && TREE_CODE (last) != LABEL_EXPR))
+      || (last && gimple_code (last) != GIMPLE_LABEL))
     {
-      *bsi = bsi_last (latch);
+      *bsi = gsi_last_bb (latch);
       *insert_after = true;
     }
   else
     {
-      *bsi = bsi_last (bb);
+      *bsi = gsi_last_bb (bb);
       *insert_after = false;
     }
 }
@@ -584,7 +590,7 @@ copy_phi_node_args (unsigned first_new_block)
    after the loop has been duplicated.  */
 
 bool
-tree_duplicate_loop_to_header_edge (struct loop *loop, edge e,
+gimple_duplicate_loop_to_header_edge (struct loop *loop, edge e,
                                    unsigned int ndupl, sbitmap wont_exit,
                                    edge orig, VEC (edge, heap) **to_remove,
                                    int flags)
@@ -673,7 +679,7 @@ determine_exit_conditions (struct loop *loop, struct tree_niter_desc *desc,
                           tree *exit_base, tree *exit_step,
                           enum tree_code *exit_cmp, tree *exit_bound)
 {
-  tree stmts;
+  gimple_seq stmts;
   tree base = desc->control.base;
   tree step = desc->control.step;
   tree bound = desc->bound;
@@ -748,7 +754,7 @@ determine_exit_conditions (struct loop *loop, struct tree_niter_desc *desc,
 
   cond = force_gimple_operand (unshare_expr (cond), &stmts, false, NULL_TREE);
   if (stmts)
-    bsi_insert_on_edge_immediate (loop_preheader_edge (loop), stmts);
+    gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
   /* cond now may be a gimple comparison, which would be OK, but also any
      other gimple rhs (say a && b).  In this case we need to force it to
      operand.  */
@@ -756,16 +762,16 @@ determine_exit_conditions (struct loop *loop, struct tree_niter_desc *desc,
     {
       cond = force_gimple_operand (cond, &stmts, true, NULL_TREE);
       if (stmts)
-       bsi_insert_on_edge_immediate (loop_preheader_edge (loop), stmts);
+       gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
     }
   *enter_cond = cond;
 
   base = force_gimple_operand (unshare_expr (base), &stmts, true, NULL_TREE);
   if (stmts)
-    bsi_insert_on_edge_immediate (loop_preheader_edge (loop), stmts);
+    gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
   bound = force_gimple_operand (unshare_expr (bound), &stmts, true, NULL_TREE);
   if (stmts)
-    bsi_insert_on_edge_immediate (loop_preheader_edge (loop), stmts);
+    gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
 
   *exit_base = base;
   *exit_step = bigstep;
@@ -859,15 +865,18 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
                                transform_callback transform,
                                void *data)
 {
-  tree  exit_if, ctr_before, ctr_after;
+  gimple exit_if;
+  tree ctr_before, ctr_after;
   tree enter_main_cond, exit_base, exit_step, exit_bound;
   enum tree_code exit_cmp;
-  tree phi_old_loop, phi_new_loop, phi_rest, init, next, new_init, var;
+  gimple phi_old_loop, phi_new_loop, phi_rest;
+  gimple_stmt_iterator psi_old_loop, psi_new_loop;
+  tree init, next, new_init, var;
   struct loop *new_loop;
   basic_block rest, exit_bb;
   edge old_entry, new_entry, old_latch, precond_edge, new_exit;
   edge new_nonexit, e;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
   use_operand_p op;
   bool ok;
   unsigned est_niter, prob_entry, scale_unrolled, scale_rest, freq_e, freq_h;
@@ -937,11 +946,12 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
                                  REG_BR_PROB_BASE,
                                  REG_BR_PROB_BASE - exit->probability);
 
-  bsi = bsi_last (exit_bb);
-  exit_if = build3 (COND_EXPR, void_type_node, boolean_true_node,
-                   NULL_TREE, NULL_TREE);
+  bsi = gsi_last_bb (exit_bb);
+  exit_if = gimple_build_cond (EQ_EXPR, integer_zero_node,
+                              integer_zero_node,
+                              NULL_TREE, NULL_TREE);
 
-  bsi_insert_after (&bsi, exit_if, BSI_NEW_STMT);
+  gsi_insert_after (&bsi, exit_if, GSI_NEW_STMT);
   new_exit = make_edge (exit_bb, rest, EDGE_FALSE_VALUE | irr);
   rescan_loop_exit (new_exit, true, false);
 
@@ -962,12 +972,14 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
   old_entry = loop_preheader_edge (loop);
   new_entry = loop_preheader_edge (new_loop);
   old_latch = loop_latch_edge (loop);
-  for (phi_old_loop = phi_nodes (loop->header),
-       phi_new_loop = phi_nodes (new_loop->header);
-       phi_old_loop;
-       phi_old_loop = PHI_CHAIN (phi_old_loop),
-       phi_new_loop = PHI_CHAIN (phi_new_loop))
+  for (psi_old_loop = gsi_start_phis (loop->header),
+       psi_new_loop = gsi_start_phis (new_loop->header);
+       !gsi_end_p (psi_old_loop);
+       gsi_next (&psi_old_loop), gsi_next (&psi_new_loop))
     {
+      phi_old_loop = gsi_stmt (psi_old_loop);
+      phi_new_loop = gsi_stmt (psi_new_loop);
+
       init = PHI_ARG_DEF_FROM_EDGE (phi_old_loop, old_entry);
       op = PHI_ARG_DEF_PTR_FROM_EDGE (phi_new_loop, new_entry);
       gcc_assert (operand_equal_for_phi_arg_p (init, USE_FROM_PTR (op)));
@@ -986,7 +998,7 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
          add_referenced_var (var);
        }
 
-      new_init = make_ssa_name (var, NULL_TREE);
+      new_init = make_ssa_name (var, NULL);
       phi_rest = create_phi_node (new_init, rest);
       SSA_NAME_DEF_STMT (new_init) = phi_rest;
 
@@ -1007,7 +1019,7 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
   sbitmap_ones (wont_exit);
   RESET_BIT (wont_exit, factor - 1);
 
-  ok = tree_duplicate_loop_to_header_edge
+  ok = gimple_duplicate_loop_to_header_edge
          (loop, loop_latch_edge (loop), factor - 1,
           wont_exit, new_exit, &to_remove, DLTHE_FLAG_UPDATE_FREQ);
   free (wont_exit);
@@ -1049,12 +1061,13 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
 
   /* Finally create the new counter for number of iterations and add the new
      exit instruction.  */
-  bsi = bsi_last (exit_bb);
-  exit_if = bsi_stmt (bsi);
+  bsi = gsi_last_bb (exit_bb);
+  exit_if = gsi_stmt (bsi);
   create_iv (exit_base, exit_step, NULL_TREE, loop,
             &bsi, false, &ctr_before, &ctr_after);
-  COND_EXPR_COND (exit_if) = build2 (exit_cmp, boolean_type_node, ctr_after,
-                                    exit_bound);
+  gimple_cond_set_code (exit_if, exit_cmp);
+  gimple_cond_set_lhs (exit_if, ctr_after);
+  gimple_cond_set_rhs (exit_if, exit_bound);
   update_stmt (exit_if);
 
 #ifdef ENABLE_CHECKING
index 80b45c2..83baae7 100644 (file)
@@ -368,7 +368,8 @@ bound_difference (struct loop *loop, tree x, tree y, bounds *bnds)
   int cnt = 0;
   edge e;
   basic_block bb;
-  tree cond, c0, c1;
+  tree c0, c1;
+  gimple cond;
   enum tree_code cmp;
 
   /* Get rid of unnecessary casts, but preserve the value of
@@ -427,12 +428,10 @@ bound_difference (struct loop *loop, tree x, tree y, bounds *bnds)
       if (!(e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
        continue;
 
-      cond = COND_EXPR_COND (last_stmt (e->src));
-      if (!COMPARISON_CLASS_P (cond))
-       continue;
-      c0 = TREE_OPERAND (cond, 0);
-      cmp = TREE_CODE (cond);
-      c1 = TREE_OPERAND (cond, 1);
+      cond = last_stmt (e->src);
+      c0 = gimple_cond_lhs (cond);
+      cmp = gimple_cond_code (cond);
+      c1 = gimple_cond_rhs (cond);
 
       if (e->flags & EDGE_FALSE_VALUE)
        cmp = invert_tree_comparison (cmp, false);
@@ -1349,7 +1348,7 @@ simplify_replace_tree (tree expr, tree old, tree new_tree)
       || operand_equal_p (expr, old, 0))
     return unshare_expr (new_tree);
 
-  if (!EXPR_P (expr) && !GIMPLE_STMT_P (expr))
+  if (!EXPR_P (expr))
     return expr;
 
   n = TREE_OPERAND_LENGTH (expr);
@@ -1376,8 +1375,9 @@ tree
 expand_simple_operations (tree expr)
 {
   unsigned i, n;
-  tree ret = NULL_TREE, e, ee, stmt;
+  tree ret = NULL_TREE, e, ee, e1;
   enum tree_code code;
+  gimple stmt;
 
   if (expr == NULL_TREE)
     return expr;
@@ -1415,17 +1415,17 @@ expand_simple_operations (tree expr)
     return expr;
 
   stmt = SSA_NAME_DEF_STMT (expr);
-  if (TREE_CODE (stmt) == PHI_NODE)
+  if (gimple_code (stmt) == GIMPLE_PHI)
     {
       basic_block src, dest;
 
-      if (PHI_NUM_ARGS (stmt) != 1)
+      if (gimple_phi_num_args (stmt) != 1)
        return expr;
       e = PHI_ARG_DEF (stmt, 0);
 
       /* Avoid propagating through loop exit phi nodes, which
         could break loop-closed SSA form restrictions.  */
-      dest = bb_for_stmt (stmt);
+      dest = gimple_bb (stmt);
       src = single_pred (dest);
       if (TREE_CODE (e) == SSA_NAME
          && src->loop_father != dest->loop_father)
@@ -1433,24 +1433,44 @@ expand_simple_operations (tree expr)
 
       return expand_simple_operations (e);
     }
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
     return expr;
 
-  e = GIMPLE_STMT_OPERAND (stmt, 1);
-  if (/* Casts are simple.  */
-      !CONVERT_EXPR_P (e)
-      /* Copies are simple.  */
-      && TREE_CODE (e) != SSA_NAME
-      /* Assignments of invariants are simple.  */
-      && !is_gimple_min_invariant (e)
+  e = gimple_assign_rhs1 (stmt);
+  code = gimple_assign_rhs_code (stmt);
+  if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
+    {
+      if (is_gimple_min_invariant (e))
+       return e;
+
+      if (code == SSA_NAME)
+       return expand_simple_operations (e);
+
+      return expr;
+    }
+
+  switch (code)
+    {
+    case NOP_EXPR:
+    case CONVERT_EXPR:
+      /* Casts are simple.  */
+      ee = expand_simple_operations (e);
+      return fold_build1 (code, TREE_TYPE (expr), ee);
+
+    case PLUS_EXPR:
+    case MINUS_EXPR:
+    case POINTER_PLUS_EXPR:
       /* And increments and decrements by a constant are simple.  */
-      && !((TREE_CODE (e) == PLUS_EXPR
-           || TREE_CODE (e) == MINUS_EXPR
-           || TREE_CODE (e) == POINTER_PLUS_EXPR)
-          && is_gimple_min_invariant (TREE_OPERAND (e, 1))))
-    return expr;
+      e1 = gimple_assign_rhs2 (stmt);
+      if (!is_gimple_min_invariant (e1))
+       return expr;
+
+      ee = expand_simple_operations (e);
+      return fold_build2 (code, TREE_TYPE (expr), ee, e1);
 
-  return expand_simple_operations (e);
+    default:
+      return expr;
+    }
 }
 
 /* Tries to simplify EXPR using the condition COND.  Returns the simplified
@@ -1585,6 +1605,7 @@ simplify_using_initial_conditions (struct loop *loop, tree expr)
 {
   edge e;
   basic_block bb;
+  gimple stmt;
   tree cond;
   int cnt = 0;
 
@@ -1605,7 +1626,11 @@ simplify_using_initial_conditions (struct loop *loop, tree expr)
       if (!(e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
        continue;
 
-      cond = COND_EXPR_COND (last_stmt (e->src));
+      stmt = last_stmt (e->src);
+      cond = fold_build2 (gimple_cond_code (stmt),
+                         boolean_type_node,
+                         gimple_cond_lhs (stmt),
+                         gimple_cond_rhs (stmt));
       if (e->flags & EDGE_FALSE_VALUE)
        cond = invert_truthvalue (cond);
       expr = tree_simplify_using_condition (cond, expr);
@@ -1676,9 +1701,9 @@ bool
 loop_only_exit_p (const struct loop *loop, const_edge exit)
 {
   basic_block *body;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
   unsigned i;
-  tree call;
+  gimple call;
 
   if (exit != single_exit (loop))
     return false;
@@ -1686,10 +1711,13 @@ loop_only_exit_p (const struct loop *loop, const_edge exit)
   body = get_loop_body (loop);
   for (i = 0; i < loop->num_nodes; i++)
     {
-      for (bsi = bsi_start (body[0]); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (bsi = gsi_start_bb (body[i]); !gsi_end_p (bsi); gsi_next (&bsi))
        {
-         call = get_call_expr_in (bsi_stmt (bsi));
-         if (call && TREE_SIDE_EFFECTS (call))
+         call = gsi_stmt (bsi);
+         if (gimple_code (call) != GIMPLE_CALL)
+           continue;
+
+         if (gimple_has_side_effects (call))
            {
              free (body);
              return false;
@@ -1714,7 +1742,8 @@ number_of_iterations_exit (struct loop *loop, edge exit,
                           struct tree_niter_desc *niter,
                           bool warn)
 {
-  tree stmt, cond, type;
+  gimple stmt;
+  tree type;
   tree op0, op1;
   enum tree_code code;
   affine_iv iv0, iv1;
@@ -1724,15 +1753,14 @@ number_of_iterations_exit (struct loop *loop, edge exit,
 
   niter->assumptions = boolean_false_node;
   stmt = last_stmt (exit->src);
-  if (!stmt || TREE_CODE (stmt) != COND_EXPR)
+  if (!stmt || gimple_code (stmt) != GIMPLE_COND)
     return false;
 
   /* We want the condition for staying inside loop.  */
-  cond = COND_EXPR_COND (stmt);
+  code = gimple_cond_code (stmt);
   if (exit->flags & EDGE_TRUE_VALUE)
-    cond = invert_truthvalue (cond);
+    code = invert_tree_comparison (code, false);
 
-  code = TREE_CODE (cond);
   switch (code)
     {
     case GT_EXPR:
@@ -1746,8 +1774,8 @@ number_of_iterations_exit (struct loop *loop, edge exit,
       return false;
     }
   
-  op0 = TREE_OPERAND (cond, 0);
-  op1 = TREE_OPERAND (cond, 1);
+  op0 = gimple_cond_lhs (stmt);
+  op1 = gimple_cond_rhs (stmt);
   type = TREE_TYPE (op0);
 
   if (TREE_CODE (type) != INTEGER_TYPE
@@ -1805,7 +1833,7 @@ number_of_iterations_exit (struct loop *loop, edge exit,
   if (warn)
     {
       const char *wording;
-      location_t loc = EXPR_LOCATION (stmt);
+      location_t loc = gimple_location (stmt);
   
       /* We can provide a more specific warning if one of the operator is
         constant and the other advances by +1 or -1.  */
@@ -1915,36 +1943,43 @@ find_loop_niter (struct loop *loop, edge *exit)
    result by a chain of operations such that all but exactly one of their
    operands are constants.  */
 
-static tree
+static gimple
 chain_of_csts_start (struct loop *loop, tree x)
 {
-  tree stmt = SSA_NAME_DEF_STMT (x);
+  gimple stmt = SSA_NAME_DEF_STMT (x);
   tree use;
-  basic_block bb = bb_for_stmt (stmt);
+  basic_block bb = gimple_bb (stmt);
+  enum tree_code code;
 
   if (!bb
       || !flow_bb_inside_loop_p (loop, bb))
-    return NULL_TREE;
+    return NULL;
   
-  if (TREE_CODE (stmt) == PHI_NODE)
+  if (gimple_code (stmt) == GIMPLE_PHI)
     {
       if (bb == loop->header)
        return stmt;
 
-      return NULL_TREE;
+      return NULL;
     }
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
-    return NULL_TREE;
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
+    return NULL;
 
-  if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
-    return NULL_TREE;
-  if (SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF) == NULL_DEF_OPERAND_P)
-    return NULL_TREE;
+  code = gimple_assign_rhs_code (stmt);
+  if (gimple_references_memory_p (stmt)
+      /* Before alias information is computed, operand scanning marks
+        statements that write memory volatile.  However, the statements
+        that only read memory are not marked, thus gimple_references_memory_p
+        returns false for them.  */
+      || TREE_CODE_CLASS (code) == tcc_reference
+      || TREE_CODE_CLASS (code) == tcc_declaration
+      || SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF) == NULL_DEF_OPERAND_P)
+    return NULL;
 
   use = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
   if (use == NULL_USE_OPERAND_P)
-    return NULL_TREE;
+    return NULL;
 
   return chain_of_csts_start (loop, use);
 }
@@ -1957,32 +1992,32 @@ chain_of_csts_start (struct loop *loop, tree x)
    * the value of the phi node in the next iteration can be derived from the
      value in the current iteration by a chain of operations with constants.
    
-   If such phi node exists, it is returned.  If X is a constant, X is returned
-   unchanged.  Otherwise NULL_TREE is returned.  */
+   If such phi node exists, it is returned, otherwise NULL is returned.  */
 
-static tree
+static gimple
 get_base_for (struct loop *loop, tree x)
 {
-  tree phi, init, next;
+  gimple phi;
+  tree init, next;
 
   if (is_gimple_min_invariant (x))
-    return x;
+    return NULL;
 
   phi = chain_of_csts_start (loop, x);
   if (!phi)
-    return NULL_TREE;
+    return NULL;
 
   init = PHI_ARG_DEF_FROM_EDGE (phi, loop_preheader_edge (loop));
   next = PHI_ARG_DEF_FROM_EDGE (phi, loop_latch_edge (loop));
 
   if (TREE_CODE (next) != SSA_NAME)
-    return NULL_TREE;
+    return NULL;
 
   if (!is_gimple_min_invariant (init))
-    return NULL_TREE;
+    return NULL;
 
   if (chain_of_csts_start (loop, next) != phi)
-    return NULL_TREE;
+    return NULL;
 
   return phi;
 }
@@ -1998,9 +2033,8 @@ get_base_for (struct loop *loop, tree x)
 static tree
 get_val_for (tree x, tree base)
 {
-  tree stmt, nx, val;
-  use_operand_p op;
-  ssa_op_iter iter;
+  gimple stmt;
+  tree nx, val, retval, rhs1, rhs2;
 
   gcc_assert (is_gimple_min_invariant (base));
 
@@ -2008,24 +2042,44 @@ get_val_for (tree x, tree base)
     return base;
 
   stmt = SSA_NAME_DEF_STMT (x);
-  if (TREE_CODE (stmt) == PHI_NODE)
+  if (gimple_code (stmt) == GIMPLE_PHI)
     return base;
 
-  FOR_EACH_SSA_USE_OPERAND (op, stmt, iter, SSA_OP_USE)
-    {
-      nx = USE_FROM_PTR (op);
-      val = get_val_for (nx, base);
-      SET_USE (op, val);
-      val = fold (GIMPLE_STMT_OPERAND (stmt, 1));
-      SET_USE (op, nx);
-      /* only iterate loop once.  */
-      return val;
+  gcc_assert (is_gimple_assign (stmt));
+
+  /* STMT must be either an assignment of a single SSA name or an
+     expression involving an SSA name and a constant.  Try to fold that
+     expression using the value for the SSA name.  */
+  rhs1 = gimple_assign_rhs1 (stmt);
+  rhs2 = gimple_assign_rhs2 (stmt);
+  if (TREE_CODE (rhs1) == SSA_NAME)
+    nx = rhs1;
+  else if (rhs2 && TREE_CODE (rhs2) == SSA_NAME)
+    nx = rhs2;
+  else
+    gcc_unreachable ();
+
+  /* NX is now the SSA name for which we want to discover the base value.  */
+  val = get_val_for (nx, base);
+  if (rhs2)
+    {
+      /* If this is a binary expression, fold it.  If folding is
+        not possible, return a tree expression with the RHS of STMT.  */
+      rhs1 = (nx == rhs1) ? val : rhs1;
+      rhs2 = (nx == rhs2) ? val : rhs2;
+      retval = fold_binary (gimple_assign_rhs_code (stmt),
+                           gimple_expr_type (stmt), rhs1, rhs2);
+      if (retval == NULL_TREE)
+       retval= build2 (gimple_assign_rhs_code (stmt),
+                       gimple_expr_type (stmt), rhs1, rhs2);
     }
-
-  /* Should never reach here.  */
-  gcc_unreachable ();
+  else
+    retval = val;
+      
+  return retval;
 }
 
+
 /* Tries to count the number of iterations of LOOP till it exits by EXIT
    by brute force -- i.e. by determining the value of the operands of the
    condition at EXIT in first few iterations of the loop (assuming that
@@ -2036,20 +2090,20 @@ get_val_for (tree x, tree base)
 tree
 loop_niter_by_eval (struct loop *loop, edge exit)
 {
-  tree cond, cnd, acnd;
-  tree op[2], val[2], next[2], aval[2], phi[2];
+  tree acnd;
+  tree op[2], val[2], next[2], aval[2];
+  gimple phi, cond;
   unsigned i, j;
   enum tree_code cmp;
 
   cond = last_stmt (exit->src);
-  if (!cond || TREE_CODE (cond) != COND_EXPR)
+  if (!cond || gimple_code (cond) != GIMPLE_COND)
     return chrec_dont_know;
 
-  cnd = COND_EXPR_COND (cond);
+  cmp = gimple_cond_code (cond);
   if (exit->flags & EDGE_TRUE_VALUE)
-    cnd = invert_truthvalue (cnd);
+    cmp = invert_tree_comparison (cmp, false);
 
-  cmp = TREE_CODE (cnd);
   switch (cmp)
     {
     case EQ_EXPR:
@@ -2058,8 +2112,8 @@ loop_niter_by_eval (struct loop *loop, edge exit)
     case GE_EXPR:
     case LT_EXPR:
     case LE_EXPR:
-      for (j = 0; j < 2; j++)
-       op[j] = TREE_OPERAND (cnd, j);
+      op[0] = gimple_cond_lhs (cond);
+      op[1] = gimple_cond_rhs (cond);
       break;
 
     default:
@@ -2068,23 +2122,19 @@ loop_niter_by_eval (struct loop *loop, edge exit)
 
   for (j = 0; j < 2; j++)
     {
-      phi[j] = get_base_for (loop, op[j]);
-      if (!phi[j])
-       return chrec_dont_know;
-    }
-
-  for (j = 0; j < 2; j++)
-    {
-      if (TREE_CODE (phi[j]) == PHI_NODE)
+      if (is_gimple_min_invariant (op[j]))
        {
-         val[j] = PHI_ARG_DEF_FROM_EDGE (phi[j], loop_preheader_edge (loop));
-         next[j] = PHI_ARG_DEF_FROM_EDGE (phi[j], loop_latch_edge (loop));
+         val[j] = op[j];
+         next[j] = NULL_TREE;
+         op[j] = NULL_TREE;
        }
       else
        {
-         val[j] = phi[j];
-         next[j] = NULL_TREE;
-         op[j] = NULL_TREE;
+         phi = get_base_for (loop, op[j]);
+         if (!phi)
+           return chrec_dont_know;
+         val[j] = PHI_ARG_DEF_FROM_EDGE (phi, loop_preheader_edge (loop));
+         next[j] = PHI_ARG_DEF_FROM_EDGE (phi, loop_latch_edge (loop));
        }
     }
 
@@ -2166,17 +2216,48 @@ find_loop_niter_by_eval (struct loop *loop, edge *exit)
 
 */
 
+static double_int derive_constant_upper_bound_ops (tree, tree,
+                                                  enum tree_code, tree);
+
+/* Returns a constant upper bound on the value of the right-hand side of
+   an assignment statement STMT.  */
+
+static double_int
+derive_constant_upper_bound_assign (gimple stmt)
+{
+  enum tree_code code = gimple_assign_rhs_code (stmt);
+  tree op0 = gimple_assign_rhs1 (stmt);
+  tree op1 = gimple_assign_rhs2 (stmt);
+
+  return derive_constant_upper_bound_ops (TREE_TYPE (gimple_assign_lhs (stmt)),
+                                         op0, code, op1);
+}
+
 /* Returns a constant upper bound on the value of expression VAL.  VAL
    is considered to be unsigned.  If its type is signed, its value must
    be nonnegative.  */
  
 static double_int
-derive_constant_upper_bound (const_tree val)
+derive_constant_upper_bound (tree val)
+{
+  enum tree_code code;
+  tree op0, op1;
+
+  extract_ops_from_tree (val, &code, &op0, &op1);
+  return derive_constant_upper_bound_ops (TREE_TYPE (val), op0, code, op1);
+}
+
+/* Returns a constant upper bound on the value of expression OP0 CODE OP1,
+   whose type is TYPE.  The expression is considered to be unsigned.  If
+   its type is signed, its value must be nonnegative.  */
+static double_int
+derive_constant_upper_bound_ops (tree type, tree op0,
+                                enum tree_code code, tree op1)
 {
-  tree type = TREE_TYPE (val);
-  tree op0, op1, subtype, maxt;
+  tree subtype, maxt;
   double_int bnd, max, mmax, cst;
-  tree stmt;
+  gimple stmt;
 
   if (INTEGRAL_TYPE_P (type))
     maxt = TYPE_MAX_VALUE (type);
@@ -2185,13 +2266,12 @@ derive_constant_upper_bound (const_tree val)
 
   max = tree_to_double_int (maxt);
 
-  switch (TREE_CODE (val))
+  switch (code)
     {
     case INTEGER_CST:
-      return tree_to_double_int (val);
+      return tree_to_double_int (op0);
 
     CASE_CONVERT:
-      op0 = TREE_OPERAND (val, 0);
       subtype = TREE_TYPE (op0);
       if (!TYPE_UNSIGNED (subtype)
          /* If TYPE is also signed, the fact that VAL is nonnegative implies
@@ -2219,9 +2299,6 @@ derive_constant_upper_bound (const_tree val)
     case PLUS_EXPR:
     case POINTER_PLUS_EXPR:
     case MINUS_EXPR:
-      op0 = TREE_OPERAND (val, 0);
-      op1 = TREE_OPERAND (val, 1);
-
       if (TREE_CODE (op1) != INTEGER_CST
          || !tree_expr_nonnegative_p (op0))
        return max;
@@ -2231,7 +2308,7 @@ derive_constant_upper_bound (const_tree val)
         of the signedness of the type.  */
       cst = tree_to_double_int (op1);
       cst = double_int_sext (cst, TYPE_PRECISION (type));
-      if (TREE_CODE (val) == PLUS_EXPR)
+      if (code != MINUS_EXPR)
        cst = double_int_neg (cst);
 
       bnd = derive_constant_upper_bound (op0);
@@ -2285,8 +2362,6 @@ derive_constant_upper_bound (const_tree val)
 
     case FLOOR_DIV_EXPR:
     case EXACT_DIV_EXPR:
-      op0 = TREE_OPERAND (val, 0);
-      op1 = TREE_OPERAND (val, 1);
       if (TREE_CODE (op1) != INTEGER_CST
          || tree_int_cst_sign_bit (op1))
        return max;
@@ -2295,18 +2370,17 @@ derive_constant_upper_bound (const_tree val)
       return double_int_udiv (bnd, tree_to_double_int (op1), FLOOR_DIV_EXPR);
 
     case BIT_AND_EXPR:
-      op1 = TREE_OPERAND (val, 1);
       if (TREE_CODE (op1) != INTEGER_CST
          || tree_int_cst_sign_bit (op1))
        return max;
       return tree_to_double_int (op1);
 
     case SSA_NAME:
-      stmt = SSA_NAME_DEF_STMT (val);
-      if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
-         || GIMPLE_STMT_OPERAND (stmt, 0) != val)
+      stmt = SSA_NAME_DEF_STMT (op0);
+      if (gimple_code (stmt) != GIMPLE_ASSIGN
+         || gimple_assign_lhs (stmt) != op0)
        return max;
-      return derive_constant_upper_bound (GIMPLE_STMT_OPERAND (stmt, 1));
+      return derive_constant_upper_bound_assign (stmt);
 
     default: 
       return max;
@@ -2349,7 +2423,7 @@ record_niter_bound (struct loop *loop, double_int i_bound, bool realistic,
 
 static void
 record_estimate (struct loop *loop, tree bound, double_int i_bound,
-                tree at_stmt, bool is_exit, bool realistic, bool upper)
+                gimple at_stmt, bool is_exit, bool realistic, bool upper)
 {
   double_int delta;
   edge exit;
@@ -2357,7 +2431,7 @@ record_estimate (struct loop *loop, tree bound, double_int i_bound,
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "Statement %s", is_exit ? "(exit)" : "");
-      print_generic_expr (dump_file, at_stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, at_stmt, 0, TDF_SLIM);
       fprintf (dump_file, " is %sexecuted at most ",
               upper ? "" : "probably ");
       print_generic_expr (dump_file, bound, TDF_SLIM);
@@ -2395,7 +2469,7 @@ record_estimate (struct loop *loop, tree bound, double_int i_bound,
   if (is_exit
       || (exit != NULL
          && dominated_by_p (CDI_DOMINATORS,
-                            exit->src, bb_for_stmt (at_stmt))))
+                            exit->src, gimple_bb (at_stmt))))
     delta = double_int_one;
   else
     delta = double_int_two;
@@ -2415,7 +2489,7 @@ record_estimate (struct loop *loop, tree bound, double_int i_bound,
    UPPER is true if we are sure the induction variable does not wrap.  */
 
 static void
-record_nonwrapping_iv (struct loop *loop, tree base, tree step, tree stmt,
+record_nonwrapping_iv (struct loop *loop, tree base, tree step, gimple stmt,
                       tree low, tree high, bool realistic, bool upper)
 {
   tree niter_bound, extreme, delta;
@@ -2434,7 +2508,7 @@ record_nonwrapping_iv (struct loop *loop, tree base, tree step, tree stmt,
       fprintf (dump_file, " + ");
       print_generic_expr (dump_file, step, TDF_SLIM);
       fprintf (dump_file, " * iteration does not wrap in statement ");
-      print_generic_expr (dump_file, stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
       fprintf (dump_file, " in loop %d.\n", loop->num);
     }
 
@@ -2515,7 +2589,7 @@ array_at_struct_end_p (tree ref)
 struct ilb_data
 {
   struct loop *loop;
-  tree stmt;
+  gimple stmt;
   bool reliable;
 };
 
@@ -2602,7 +2676,7 @@ idx_infer_loop_bounds (tree base, tree *idx, void *dta)
    STMT is guaranteed to be executed in every iteration of LOOP.*/
 
 static void
-infer_loop_bounds_from_ref (struct loop *loop, tree stmt, tree ref,
+infer_loop_bounds_from_ref (struct loop *loop, gimple stmt, tree ref,
                            bool reliable)
 {
   struct ilb_data data;
@@ -2618,14 +2692,12 @@ infer_loop_bounds_from_ref (struct loop *loop, tree stmt, tree ref,
    executed in every iteration of LOOP.  */
 
 static void
-infer_loop_bounds_from_array (struct loop *loop, tree stmt, bool reliable)
+infer_loop_bounds_from_array (struct loop *loop, gimple stmt, bool reliable)
 {
-  tree call;
-
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+  if (is_gimple_assign (stmt))
     {
-      tree op0 = GIMPLE_STMT_OPERAND (stmt, 0);
-      tree op1 = GIMPLE_STMT_OPERAND (stmt, 1);
+      tree op0 = gimple_assign_lhs (stmt);
+      tree op1 = gimple_assign_rhs1 (stmt);
 
       /* For each memory access, analyze its access function
         and record a bound on the loop iteration domain.  */
@@ -2635,17 +2707,21 @@ infer_loop_bounds_from_array (struct loop *loop, tree stmt, bool reliable)
       if (REFERENCE_CLASS_P (op1))
        infer_loop_bounds_from_ref (loop, stmt, op1, reliable);
     }
-  
-  
-  call = get_call_expr_in (stmt);
-  if (call)
+  else if (is_gimple_call (stmt))
     {
-      tree arg;
-      call_expr_arg_iterator iter;
+      tree arg, lhs;
+      unsigned i, n = gimple_call_num_args (stmt);
 
-      FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
-       if (REFERENCE_CLASS_P (arg))
-         infer_loop_bounds_from_ref (loop, stmt, arg, reliable);
+      lhs = gimple_call_lhs (stmt);
+      if (lhs && REFERENCE_CLASS_P (lhs))
+       infer_loop_bounds_from_ref (loop, stmt, lhs, reliable);
+
+      for (i = 0; i < n; i++)
+       {
+         arg = gimple_call_arg (stmt, i);
+         if (REFERENCE_CLASS_P (arg))
+           infer_loop_bounds_from_ref (loop, stmt, arg, reliable);
+       }
     }
 }
 
@@ -2653,14 +2729,14 @@ infer_loop_bounds_from_array (struct loop *loop, tree stmt, bool reliable)
    that signed arithmetics in STMT does not overflow.  */
 
 static void
-infer_loop_bounds_from_signedness (struct loop *loop, tree stmt)
+infer_loop_bounds_from_signedness (struct loop *loop, gimple stmt)
 {
   tree def, base, step, scev, type, low, high;
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
     return;
 
-  def = GIMPLE_STMT_OPERAND (stmt, 0);
+  def = gimple_assign_lhs (stmt);
 
   if (TREE_CODE (def) != SSA_NAME)
     return;
@@ -2703,7 +2779,7 @@ infer_loop_bounds_from_undefined (struct loop *loop)
 {
   unsigned i;
   basic_block *bbs;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
   basic_block bb;
   bool reliable;
   
@@ -2718,9 +2794,9 @@ infer_loop_bounds_from_undefined (struct loop *loop)
         # of iterations of the loop.  However, we can use it as a guess.  */
       reliable = dominated_by_p (CDI_DOMINATORS, loop->latch, bb);
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
        {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (bsi);
 
          infer_loop_bounds_from_array (loop, stmt, reliable);
 
@@ -2830,9 +2906,9 @@ estimate_numbers_of_iterations (void)
 /* Returns true if statement S1 dominates statement S2.  */
 
 bool
-stmt_dominates_stmt_p (tree s1, tree s2)
+stmt_dominates_stmt_p (gimple s1, gimple s2)
 {
-  basic_block bb1 = bb_for_stmt (s1), bb2 = bb_for_stmt (s2);
+  basic_block bb1 = gimple_bb (s1), bb2 = gimple_bb (s2);
 
   if (!bb1
       || s1 == s2)
@@ -2840,10 +2916,10 @@ stmt_dominates_stmt_p (tree s1, tree s2)
 
   if (bb1 == bb2)
     {
-      block_stmt_iterator bsi;
+      gimple_stmt_iterator bsi;
 
-      for (bsi = bsi_start (bb1); bsi_stmt (bsi) != s2; bsi_next (&bsi))
-       if (bsi_stmt (bsi) == s1)
+      for (bsi = gsi_start_bb (bb1); gsi_stmt (bsi) != s2; gsi_next (&bsi))
+       if (gsi_stmt (bsi) == s1)
          return true;
 
       return false;
@@ -2859,7 +2935,7 @@ stmt_dominates_stmt_p (tree s1, tree s2)
    statements in the loop.  */
 
 static bool
-n_of_executions_at_most (tree stmt,
+n_of_executions_at_most (gimple stmt,
                         struct nb_iter_bound *niter_bound, 
                         tree niter)
 {
@@ -2900,7 +2976,7 @@ n_of_executions_at_most (tree stmt,
   else
     {
       if (!stmt
-         || (bb_for_stmt (stmt) != bb_for_stmt (niter_bound->stmt)
+         || (gimple_bb (stmt) != gimple_bb (niter_bound->stmt)
              && !stmt_dominates_stmt_p (niter_bound->stmt, stmt)))
        {
          bound = double_int_add (bound, double_int_one);
@@ -2943,7 +3019,7 @@ nowrap_type_p (tree type)
 
 bool
 scev_probably_wraps_p (tree base, tree step, 
-                      tree at_stmt, struct loop *loop,
+                      gimple at_stmt, struct loop *loop,
                       bool use_overflow_semantics)
 {
   struct nb_iter_bound *bound;
index 14044c4..02b4d73 100644 (file)
@@ -201,7 +201,7 @@ struct mem_ref_group
 
 struct mem_ref
 {
-  tree stmt;                   /* Statement in that the reference appears.  */
+  gimple stmt;                 /* Statement in that the reference appears.  */
   tree mem;                    /* The reference.  */
   HOST_WIDE_INT delta;         /* Constant offset of the reference.  */
   struct mem_ref_group *group; /* The group of references it belongs to.  */
@@ -278,7 +278,7 @@ find_or_create_group (struct mem_ref_group **groups, tree base,
    WRITE_P.  The reference occurs in statement STMT.  */
 
 static void
-record_ref (struct mem_ref_group *group, tree stmt, tree mem,
+record_ref (struct mem_ref_group *group, gimple stmt, tree mem,
            HOST_WIDE_INT delta, bool write_p)
 {
   struct mem_ref **aref;
@@ -344,7 +344,7 @@ release_mem_refs (struct mem_ref_group *groups)
 struct ar_data
 {
   struct loop *loop;                   /* Loop of the reference.  */
-  tree stmt;                           /* Statement of the reference.  */
+  gimple stmt;                         /* Statement of the reference.  */
   HOST_WIDE_INT *step;                 /* Step of the memory reference.  */
   HOST_WIDE_INT *delta;                        /* Offset of the memory reference.  */
 };
@@ -411,7 +411,7 @@ idx_analyze_ref (tree base, tree *index, void *data)
 static bool
 analyze_ref (struct loop *loop, tree *ref_p, tree *base,
             HOST_WIDE_INT *step, HOST_WIDE_INT *delta,
-            tree stmt)
+            gimple stmt)
 {
   struct ar_data ar_data;
   tree off;
@@ -451,7 +451,7 @@ analyze_ref (struct loop *loop, tree *ref_p, tree *base,
 
 static bool
 gather_memory_references_ref (struct loop *loop, struct mem_ref_group **refs,
-                             tree ref, bool write_p, tree stmt)
+                             tree ref, bool write_p, gimple stmt)
 {
   tree base;
   HOST_WIDE_INT step, delta;
@@ -480,8 +480,9 @@ gather_memory_references (struct loop *loop, bool *no_other_refs)
   basic_block *body = get_loop_body_in_dom_order (loop);
   basic_block bb;
   unsigned i;
-  block_stmt_iterator bsi;
-  tree stmt, lhs, rhs, call;
+  gimple_stmt_iterator bsi;
+  gimple stmt;
+  tree lhs, rhs;
   struct mem_ref_group *refs = NULL;
 
   *no_other_refs = true;
@@ -494,22 +495,21 @@ gather_memory_references (struct loop *loop, bool *no_other_refs)
       if (bb->loop_father != loop)
        continue;
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
        {
-         stmt = bsi_stmt (bsi);
-         call = get_call_expr_in (stmt);
-         if (call && !(call_expr_flags (call) & ECF_CONST))
-           *no_other_refs = false;
+         stmt = gsi_stmt (bsi);
 
-         if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+         if (gimple_code (stmt) != GIMPLE_ASSIGN)
            {
-             if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
+             if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)
+                 || (is_gimple_call (stmt)
+                     && !(gimple_call_flags (stmt) & ECF_CONST)))
                *no_other_refs = false;
              continue;
            }
 
-         lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-         rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+         lhs = gimple_assign_lhs (stmt);
+         rhs = gimple_assign_rhs1 (stmt);
 
          if (REFERENCE_CLASS_P (rhs))
            *no_other_refs &= gather_memory_references_ref (loop, &refs,
@@ -869,8 +869,9 @@ static void
 issue_prefetch_ref (struct mem_ref *ref, unsigned unroll_factor, unsigned ahead)
 {
   HOST_WIDE_INT delta;
-  tree addr, addr_base, prefetch, write_p, local;
-  block_stmt_iterator bsi;
+  tree addr, addr_base, write_p, local;
+  gimple prefetch;
+  gimple_stmt_iterator bsi;
   unsigned n_prefetches, ap;
   bool nontemporal = ref->reuse_distance >= L2_CACHE_SIZE_BYTES;
 
@@ -879,13 +880,13 @@ issue_prefetch_ref (struct mem_ref *ref, unsigned unroll_factor, unsigned ahead)
             nontemporal ? " nontemporal" : "",
             (void *) ref);
 
-  bsi = bsi_for_stmt (ref->stmt);
+  bsi = gsi_for_stmt (ref->stmt);
 
   n_prefetches = ((unroll_factor + ref->prefetch_mod - 1)
                  / ref->prefetch_mod);
   addr_base = build_fold_addr_expr_with_type (ref->mem, ptr_type_node);
-  addr_base = force_gimple_operand_bsi (&bsi, unshare_expr (addr_base),
-                                       true, NULL, true, BSI_SAME_STMT);
+  addr_base = force_gimple_operand_gsi (&bsi, unshare_expr (addr_base),
+                                       true, NULL, true, GSI_SAME_STMT);
   write_p = ref->write_p ? integer_one_node : integer_zero_node;
   local = build_int_cst (integer_type_node, nontemporal ? 0 : 3);
 
@@ -895,13 +896,13 @@ issue_prefetch_ref (struct mem_ref *ref, unsigned unroll_factor, unsigned ahead)
       delta = (ahead + ap * ref->prefetch_mod) * ref->group->step;
       addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
                          addr_base, size_int (delta));
-      addr = force_gimple_operand_bsi (&bsi, unshare_expr (addr), true, NULL,
-                                      true, BSI_SAME_STMT);
+      addr = force_gimple_operand_gsi (&bsi, unshare_expr (addr), true, NULL,
+                                      true, GSI_SAME_STMT);
 
       /* Create the prefetch instruction.  */
-      prefetch = build_call_expr (built_in_decls[BUILT_IN_PREFETCH],
-                                 3, addr, write_p, local);
-      bsi_insert_before (&bsi, prefetch, BSI_SAME_STMT);
+      prefetch = gimple_build_call (built_in_decls[BUILT_IN_PREFETCH],
+                                   3, addr, write_p, local);
+      gsi_insert_before (&bsi, prefetch, GSI_SAME_STMT);
     }
 }
 
@@ -960,7 +961,7 @@ mark_nontemporal_store (struct mem_ref *ref)
     fprintf (dump_file, "Marked reference %p as a nontemporal store.\n",
             (void *) ref);
 
-  MOVE_NONTEMPORAL (ref->stmt) = true;
+  gimple_assign_set_nontemporal_move (ref->stmt, true);
   ref->storent_p = true;
 
   return true;
@@ -973,22 +974,22 @@ emit_mfence_after_loop (struct loop *loop)
 {
   VEC (edge, heap) *exits = get_loop_exit_edges (loop);
   edge exit;
-  tree call;
-  block_stmt_iterator bsi;
+  gimple call;
+  gimple_stmt_iterator bsi;
   unsigned i;
 
   for (i = 0; VEC_iterate (edge, exits, i, exit); i++)
     {
-      call = build_function_call_expr (FENCE_FOLLOWING_MOVNT, NULL_TREE);
+      call = gimple_build_call (FENCE_FOLLOWING_MOVNT, 0);
 
       if (!single_pred_p (exit->dest)
          /* If possible, we prefer not to insert the fence on other paths
             in cfg.  */
          && !(exit->flags & EDGE_ABNORMAL))
        split_loop_exit_edge (exit);
-      bsi = bsi_after_labels (exit->dest);
+      bsi = gsi_after_labels (exit->dest);
 
-      bsi_insert_before (&bsi, call, BSI_NEW_STMT);
+      gsi_insert_before (&bsi, call, GSI_NEW_STMT);
       mark_virtual_ops_for_renaming (call);
     }
 
index b63c209..8ece4ac 100644 (file)
@@ -103,26 +103,29 @@ tree_ssa_unswitch_loops (void)
 static tree
 tree_may_unswitch_on (basic_block bb, struct loop *loop)
 {
-  tree stmt, def, cond, use;
+  gimple stmt, def;
+  tree cond, use;
   basic_block def_bb;
   ssa_op_iter iter;
 
   /* BB must end in a simple conditional jump.  */
   stmt = last_stmt (bb);
-  if (!stmt || TREE_CODE (stmt) != COND_EXPR)
+  if (!stmt || gimple_code (stmt) != GIMPLE_COND)
     return NULL_TREE;
 
   /* Condition must be invariant.  */
   FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
     {
       def = SSA_NAME_DEF_STMT (use);
-      def_bb = bb_for_stmt (def);
+      def_bb = gimple_bb (def);
       if (def_bb
          && flow_bb_inside_loop_p (loop, def_bb))
        return NULL_TREE;
     }
 
-  cond = COND_EXPR_COND (stmt);
+  cond = fold_build2 (gimple_cond_code (stmt), boolean_type_node,
+                     gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
+
   /* To keep the things simple, we do not directly remove the conditions,
      but just replace tests with 0/1.  Prevent the infinite loop where we
      would unswitch again on such a condition.  */
@@ -140,14 +143,18 @@ static tree
 simplify_using_entry_checks (struct loop *loop, tree cond)
 {
   edge e = loop_preheader_edge (loop);
-  tree stmt;
+  gimple stmt;
 
   while (1)
     {
       stmt = last_stmt (e->src);
       if (stmt
-         && TREE_CODE (stmt) == COND_EXPR
-         && operand_equal_p (COND_EXPR_COND (stmt), cond, 0))
+         && gimple_code (stmt) == GIMPLE_COND
+         && gimple_cond_code (stmt) == TREE_CODE (cond)
+         && operand_equal_p (gimple_cond_lhs (stmt),
+                             TREE_OPERAND (cond, 0), 0)
+         && operand_equal_p (gimple_cond_rhs (stmt),
+                             TREE_OPERAND (cond, 1), 0))
        return (e->flags & EDGE_TRUE_VALUE
                ? boolean_true_node
                : boolean_false_node);
@@ -171,7 +178,8 @@ tree_unswitch_single_loop (struct loop *loop, int num)
   basic_block *bbs;
   struct loop *nloop;
   unsigned i;
-  tree cond = NULL_TREE, stmt;
+  tree cond = NULL_TREE;
+  gimple stmt;
   bool changed = false;
 
   /* Do not unswitch too much.  */
@@ -220,13 +228,13 @@ tree_unswitch_single_loop (struct loop *loop, int num)
       if (integer_nonzerop (cond))
        {
          /* Remove false path.  */
-         COND_EXPR_COND (stmt) = boolean_true_node;
+         gimple_cond_set_condition_from_tree (stmt, boolean_true_node);
          changed = true;
        }
       else if (integer_zerop (cond))
        {
          /* Remove true path.  */
-         COND_EXPR_COND (stmt) = boolean_false_node;
+         gimple_cond_set_condition_from_tree (stmt, boolean_false_node);
          changed = true;
        }
       else
index 52f5a7f..ec3782a 100644 (file)
@@ -37,16 +37,6 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-inline.h"
 #include "tree-scalar-evolution.h"
 
-/* Initializes the loop structures.  */
-
-static void
-tree_loop_optimizer_init (void)
-{
-  loop_optimizer_init (LOOPS_NORMAL
-                      | LOOPS_HAVE_RECORDED_EXITS);
-  rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
-}
-
 /* The loop superpass.  */
 
 static bool
@@ -79,7 +69,10 @@ struct gimple_opt_pass pass_tree_loop =
 static unsigned int
 tree_ssa_loop_init (void)
 {
-  tree_loop_optimizer_init ();
+  loop_optimizer_init (LOOPS_NORMAL
+                      | LOOPS_HAVE_RECORDED_EXITS);
+  rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
+
   if (number_of_loops () <= 1)
     return 0;
 
index 49fd170..dfc00bc 100644 (file)
@@ -110,9 +110,9 @@ struct occurrence {
      inserted in BB.  */
   tree recip_def;
 
-  /* If non-NULL, the GIMPLE_MODIFY_STMT for a reciprocal computation that
+  /* If non-NULL, the GIMPLE_ASSIGN for a reciprocal computation that
      was inserted in BB.  */
-  tree recip_def_stmt;
+  gimple recip_def_stmt;
 
   /* Pointer to a list of "struct occurrence"s for blocks dominated
      by BB.  */
@@ -271,15 +271,15 @@ compute_merit (struct occurrence *occ)
 
 /* Return whether USE_STMT is a floating-point division by DEF.  */
 static inline bool
-is_division_by (tree use_stmt, tree def)
+is_division_by (gimple use_stmt, tree def)
 {
-  return TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
-        && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == RDIV_EXPR
-        && TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 1) == def
+  return is_gimple_assign (use_stmt)
+        && gimple_assign_rhs_code (use_stmt) == RDIV_EXPR
+        && gimple_assign_rhs2 (use_stmt) == def
         /* Do not recognize x / x as valid division, as we are getting
            confused later by replacing all immediate uses x in such
            a stmt.  */
-        && TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0) != def;
+        && gimple_assign_rhs1 (use_stmt) != def;
 }
 
 /* Walk the subset of the dominator tree rooted at OCC, setting the
@@ -292,11 +292,12 @@ is_division_by (tree use_stmt, tree def)
    be used.  */
 
 static void
-insert_reciprocals (block_stmt_iterator *def_bsi, struct occurrence *occ,
+insert_reciprocals (gimple_stmt_iterator *def_gsi, struct occurrence *occ,
                    tree def, tree recip_def, int threshold)
 {
-  tree type, new_stmt;
-  block_stmt_iterator bsi;
+  tree type;
+  gimple new_stmt;
+  gimple_stmt_iterator gsi;
   struct occurrence *occ_child;
 
   if (!recip_def
@@ -306,34 +307,31 @@ insert_reciprocals (block_stmt_iterator *def_bsi, struct occurrence *occ,
       /* Make a variable with the replacement and substitute it.  */
       type = TREE_TYPE (def);
       recip_def = make_rename_temp (type, "reciptmp");
-      new_stmt = build_gimple_modify_stmt (recip_def,
-                                          fold_build2 (RDIV_EXPR, type,
-                                                       build_one_cst (type),
-                                                       def));
-  
+      new_stmt = gimple_build_assign_with_ops (RDIV_EXPR, recip_def,
+                                              build_one_cst (type), def);
   
       if (occ->bb_has_division)
         {
           /* Case 1: insert before an existing division.  */
-          bsi = bsi_after_labels (occ->bb);
-          while (!bsi_end_p (bsi) && !is_division_by (bsi_stmt (bsi), def))
-           bsi_next (&bsi);
+          gsi = gsi_after_labels (occ->bb);
+          while (!gsi_end_p (gsi) && !is_division_by (gsi_stmt (gsi), def))
+           gsi_next (&gsi);
 
-          bsi_insert_before (&bsi, new_stmt, BSI_SAME_STMT);
+          gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
         }
-      else if (def_bsi && occ->bb == def_bsi->bb)
+      else if (def_gsi && occ->bb == def_gsi->bb)
         {
           /* Case 2: insert right after the definition.  Note that this will
             never happen if the definition statement can throw, because in
             that case the sole successor of the statement's basic block will
             dominate all the uses as well.  */
-          bsi_insert_after (def_bsi, new_stmt, BSI_NEW_STMT);
+          gsi_insert_after (def_gsi, new_stmt, GSI_NEW_STMT);
         }
       else
         {
           /* Case 3: insert in a basic block not containing defs/uses.  */
-          bsi = bsi_after_labels (occ->bb);
-          bsi_insert_before (&bsi, new_stmt, BSI_SAME_STMT);
+          gsi = gsi_after_labels (occ->bb);
+          gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
         }
 
       occ->recip_def_stmt = new_stmt;
@@ -341,7 +339,7 @@ insert_reciprocals (block_stmt_iterator *def_bsi, struct occurrence *occ,
 
   occ->recip_def = recip_def;
   for (occ_child = occ->children; occ_child; occ_child = occ_child->next)
-    insert_reciprocals (def_bsi, occ_child, def, recip_def, threshold);
+    insert_reciprocals (def_gsi, occ_child, def, recip_def, threshold);
 }
 
 
@@ -351,13 +349,13 @@ insert_reciprocals (block_stmt_iterator *def_bsi, struct occurrence *occ,
 static inline void
 replace_reciprocal (use_operand_p use_p)
 {
-  tree use_stmt = USE_STMT (use_p);
-  basic_block bb = bb_for_stmt (use_stmt);
+  gimple use_stmt = USE_STMT (use_p);
+  basic_block bb = gimple_bb (use_stmt);
   struct occurrence *occ = (struct occurrence *) bb->aux;
 
   if (occ->recip_def && use_stmt != occ->recip_def_stmt)
     {
-      TREE_SET_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1), MULT_EXPR);
+      gimple_assign_set_rhs_code (use_stmt, MULT_EXPR);
       SET_USE (use_p, occ->recip_def);
       fold_stmt_inplace (use_stmt);
       update_stmt (use_stmt);
@@ -398,7 +396,7 @@ free_bb (struct occurrence *occ)
    DEF must be a GIMPLE register of a floating-point type.  */
 
 static void
-execute_cse_reciprocals_1 (block_stmt_iterator *def_bsi, tree def)
+execute_cse_reciprocals_1 (gimple_stmt_iterator *def_gsi, tree def)
 {
   use_operand_p use_p;
   imm_use_iterator use_iter;
@@ -409,10 +407,10 @@ execute_cse_reciprocals_1 (block_stmt_iterator *def_bsi, tree def)
 
   FOR_EACH_IMM_USE_FAST (use_p, use_iter, def)
     {
-      tree use_stmt = USE_STMT (use_p);
+      gimple use_stmt = USE_STMT (use_p);
       if (is_division_by (use_stmt, def))
        {
-         register_division_in (bb_for_stmt (use_stmt));
+         register_division_in (gimple_bb (use_stmt));
          count++;
        }
     }
@@ -421,11 +419,11 @@ execute_cse_reciprocals_1 (block_stmt_iterator *def_bsi, tree def)
   threshold = targetm.min_divisions_for_recip_mul (TYPE_MODE (TREE_TYPE (def)));
   if (count >= threshold)
     {
-      tree use_stmt;
+      gimple use_stmt;
       for (occ = occ_head; occ; occ = occ->next)
        {
          compute_merit (occ);
-         insert_reciprocals (def_bsi, occ, def, NULL, threshold);
+         insert_reciprocals (def_gsi, occ, def, NULL, threshold);
        }
 
       FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, def)
@@ -478,56 +476,55 @@ execute_cse_reciprocals (void)
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator bsi;
-      tree phi, def;
+      gimple_stmt_iterator gsi;
+      gimple phi;
+      tree def;
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
+         phi = gsi_stmt (gsi);
          def = PHI_RESULT (phi);
          if (FLOAT_TYPE_P (TREE_TYPE (def))
              && is_gimple_reg (def))
            execute_cse_reciprocals_1 (NULL, def);
        }
 
-      for (bsi = bsi_after_labels (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
         {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
 
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
+         if (gimple_has_lhs (stmt)
              && (def = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_DEF)) != NULL
              && FLOAT_TYPE_P (TREE_TYPE (def))
              && TREE_CODE (def) == SSA_NAME)
-           execute_cse_reciprocals_1 (&bsi, def);
+           execute_cse_reciprocals_1 (&gsi, def);
        }
 
       /* Scan for a/func(b) and convert it to reciprocal a*rfunc(b).  */
-      for (bsi = bsi_after_labels (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
         {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
          tree fndecl;
 
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-             && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == RDIV_EXPR)
+         if (is_gimple_assign (stmt)
+             && gimple_assign_rhs_code (stmt) == RDIV_EXPR)
            {
-             tree arg1 = TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 1);
-             tree stmt1;
+             tree arg1 = gimple_assign_rhs2 (stmt);
+             gimple stmt1;
 
              if (TREE_CODE (arg1) != SSA_NAME)
                continue;
 
              stmt1 = SSA_NAME_DEF_STMT (arg1);
 
-             if (TREE_CODE (stmt1) == GIMPLE_MODIFY_STMT
-                 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt1, 1)) == CALL_EXPR
-                 && (fndecl
-                     = get_callee_fndecl (GIMPLE_STMT_OPERAND (stmt1, 1)))
+             if (is_gimple_call (stmt1)
+                 && gimple_call_lhs (stmt1)
+                 && (fndecl = gimple_call_fndecl (stmt1))
                  && (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
                      || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD))
                {
                  enum built_in_function code;
                  bool md_code;
-                 tree arg10;
-                 tree tmp;
 
                  code = DECL_FUNCTION_CODE (fndecl);
                  md_code = DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD;
@@ -536,12 +533,10 @@ execute_cse_reciprocals (void)
                  if (!fndecl)
                    continue;
 
-                 arg10 = CALL_EXPR_ARG (GIMPLE_STMT_OPERAND (stmt1, 1), 0);
-                 tmp = build_call_expr (fndecl, 1, arg10);
-                 GIMPLE_STMT_OPERAND (stmt1, 1) = tmp;
+                 gimple_call_set_fn (stmt1, fndecl);
                  update_stmt (stmt1);
 
-                 TREE_SET_CODE (GIMPLE_STMT_OPERAND (stmt, 1), MULT_EXPR);
+                 gimple_assign_set_rhs_code (stmt, MULT_EXPR);
                  fold_stmt_inplace (stmt);
                  update_stmt (stmt);
                }
@@ -582,18 +577,18 @@ struct gimple_opt_pass pass_cse_reciprocals =
    statements in the vector.  */
 
 static bool
-maybe_record_sincos (VEC(tree, heap) **stmts,
-                    basic_block *top_bb, tree use_stmt)
+maybe_record_sincos (VEC(gimple, heap) **stmts,
+                    basic_block *top_bb, gimple use_stmt)
 {
-  basic_block use_bb = bb_for_stmt (use_stmt);
+  basic_block use_bb = gimple_bb (use_stmt);
   if (*top_bb
       && (*top_bb == use_bb
          || dominated_by_p (CDI_DOMINATORS, use_bb, *top_bb)))
-    VEC_safe_push (tree, heap, *stmts, use_stmt);
+    VEC_safe_push (gimple, heap, *stmts, use_stmt);
   else if (!*top_bb
           || dominated_by_p (CDI_DOMINATORS, *top_bb, use_bb))
     {
-      VEC_safe_push (tree, heap, *stmts, use_stmt);
+      VEC_safe_push (gimple, heap, *stmts, use_stmt);
       *top_bb = use_bb;
     }
   else
@@ -613,20 +608,21 @@ maybe_record_sincos (VEC(tree, heap) **stmts,
 static void
 execute_cse_sincos_1 (tree name)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   imm_use_iterator use_iter;
-  tree def_stmt, use_stmt, fndecl, res, call, stmt, type;
+  tree fndecl, res, type;
+  gimple def_stmt, use_stmt, stmt;
   int seen_cos = 0, seen_sin = 0, seen_cexpi = 0;
-  VEC(tree, heap) *stmts = NULL;
+  VEC(gimple, heap) *stmts = NULL;
   basic_block top_bb = NULL;
   int i;
 
   type = TREE_TYPE (name);
   FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, name)
     {
-      if (TREE_CODE (use_stmt) != GIMPLE_MODIFY_STMT
-         || TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) != CALL_EXPR
-         || !(fndecl = get_callee_fndecl (GIMPLE_STMT_OPERAND (use_stmt, 1)))
+      if (gimple_code (use_stmt) != GIMPLE_CALL
+         || !gimple_call_lhs (use_stmt)
+         || !(fndecl = gimple_call_fndecl (use_stmt))
          || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
        continue;
 
@@ -650,7 +646,7 @@ execute_cse_sincos_1 (tree name)
 
   if (seen_cos + seen_sin + seen_cexpi <= 1)
     {
-      VEC_free(tree, heap, stmts);
+      VEC_free(gimple, heap, stmts);
       return;
     }
 
@@ -660,51 +656,57 @@ execute_cse_sincos_1 (tree name)
   if (!fndecl)
     return;
   res = make_rename_temp (TREE_TYPE (TREE_TYPE (fndecl)), "sincostmp");
-  call = build_call_expr (fndecl, 1, name);
-  stmt = build_gimple_modify_stmt (res, call);
+  stmt = gimple_build_call (fndecl, 1, name);
+  gimple_call_set_lhs (stmt, res);
+
   def_stmt = SSA_NAME_DEF_STMT (name);
   if (!SSA_NAME_IS_DEFAULT_DEF (name)
-      && TREE_CODE (def_stmt) != PHI_NODE
-      && bb_for_stmt (def_stmt) == top_bb)
+      && gimple_code (def_stmt) != GIMPLE_PHI
+      && gimple_bb (def_stmt) == top_bb)
     {
-      bsi = bsi_for_stmt (def_stmt);
-      bsi_insert_after (&bsi, stmt, BSI_SAME_STMT);
+      gsi = gsi_for_stmt (def_stmt);
+      gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
     }
   else
     {
-      bsi = bsi_after_labels (top_bb);
-      bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
+      gsi = gsi_after_labels (top_bb);
+      gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
     }
   update_stmt (stmt);
 
   /* And adjust the recorded old call sites.  */
-  for (i = 0; VEC_iterate(tree, stmts, i, use_stmt); ++i)
+  for (i = 0; VEC_iterate(gimple, stmts, i, use_stmt); ++i)
     {
-      fndecl = get_callee_fndecl (GIMPLE_STMT_OPERAND (use_stmt, 1));
+      tree rhs = NULL;
+      fndecl = gimple_call_fndecl (use_stmt);
+
       switch (DECL_FUNCTION_CODE (fndecl))
        {
        CASE_FLT_FN (BUILT_IN_COS):
-         GIMPLE_STMT_OPERAND (use_stmt, 1) = fold_build1 (REALPART_EXPR,
-                                                          type, res);
+         rhs = fold_build1 (REALPART_EXPR, type, res);
          break;
 
        CASE_FLT_FN (BUILT_IN_SIN):
-         GIMPLE_STMT_OPERAND (use_stmt, 1) = fold_build1 (IMAGPART_EXPR,
-                                                          type, res);
+         rhs = fold_build1 (IMAGPART_EXPR, type, res);
          break;
 
        CASE_FLT_FN (BUILT_IN_CEXPI):
-         GIMPLE_STMT_OPERAND (use_stmt, 1) = res;
+         rhs = res;
          break;
 
        default:;
          gcc_unreachable ();
        }
 
-       update_stmt (use_stmt);
+       /* Replace call with a copy.  */
+       stmt = gimple_build_assign (gimple_call_lhs (use_stmt), rhs);
+
+       gsi = gsi_for_stmt (use_stmt);
+       gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
+       gsi_remove (&gsi, true); 
     }
 
-  VEC_free(tree, heap, stmts);
+  VEC_free(gimple, heap, stmts);
 }
 
 /* Go through all calls to sin, cos and cexpi and call execute_cse_sincos_1
@@ -719,16 +721,16 @@ execute_cse_sincos (void)
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator bsi;
+      gimple_stmt_iterator gsi;
 
-      for (bsi = bsi_after_labels (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
         {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
          tree fndecl;
 
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-             && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CALL_EXPR
-             && (fndecl = get_callee_fndecl (GIMPLE_STMT_OPERAND (stmt, 1)))
+         if (is_gimple_call (stmt)
+             && gimple_call_lhs (stmt)
+             && (fndecl = gimple_call_fndecl (stmt))
              && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
            {
              tree arg;
@@ -738,8 +740,7 @@ execute_cse_sincos (void)
                CASE_FLT_FN (BUILT_IN_COS):
                CASE_FLT_FN (BUILT_IN_SIN):
                CASE_FLT_FN (BUILT_IN_CEXPI):
-                 arg = GIMPLE_STMT_OPERAND (stmt, 1);
-                 arg = CALL_EXPR_ARG (arg, 0);
+                 arg = gimple_call_arg (stmt, 0);
                  if (TREE_CODE (arg) == SSA_NAME)
                    execute_cse_sincos_1 (arg);
                  break;
@@ -793,23 +794,23 @@ execute_convert_to_rsqrt (void)
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator bsi;
+      gimple_stmt_iterator gsi;
 
-      for (bsi = bsi_after_labels (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
         {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
          tree fndecl;
 
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-             && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == CALL_EXPR
-             && (fndecl = get_callee_fndecl (GIMPLE_STMT_OPERAND (stmt, 1)))
+         if (is_gimple_call (stmt)
+             && gimple_call_lhs (stmt)
+             && (fndecl = gimple_call_fndecl (stmt))
              && (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
                  || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD))
            {
              enum built_in_function code;
              bool md_code;
              tree arg1;
-             tree stmt1;
+             gimple stmt1;
 
              code = DECL_FUNCTION_CODE (fndecl);
              md_code = DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD;
@@ -818,30 +819,28 @@ execute_convert_to_rsqrt (void)
              if (!fndecl)
                continue;
 
-             arg1 = CALL_EXPR_ARG (GIMPLE_STMT_OPERAND (stmt, 1), 0);
+             arg1 = gimple_call_arg (stmt, 0);
 
              if (TREE_CODE (arg1) != SSA_NAME)
                continue;
 
              stmt1 = SSA_NAME_DEF_STMT (arg1);
 
-             if (TREE_CODE (stmt1) == GIMPLE_MODIFY_STMT
-                 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt1, 1)) == RDIV_EXPR)
+             if (is_gimple_assign (stmt1)
+                 && gimple_assign_rhs_code (stmt1) == RDIV_EXPR)
                {
                  tree arg10, arg11;
-                 tree tmp;
 
-                 arg10 = TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt1, 1), 0);
-                 arg11 = TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt1, 1), 1);
+                 arg10 = gimple_assign_rhs1 (stmt1);
+                 arg11 = gimple_assign_rhs2 (stmt1);
 
                  /* Swap operands of RDIV_EXPR.  */
-                 TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt1, 1), 0) = arg11;
-                 TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt1, 1), 1) = arg10;
+                 gimple_assign_set_rhs1 (stmt1, arg11);
+                 gimple_assign_set_rhs2 (stmt1, arg10);
                  fold_stmt_inplace (stmt1);
                  update_stmt (stmt1);
 
-                 tmp = build_call_expr (fndecl, 1, arg1);
-                 GIMPLE_STMT_OPERAND (stmt, 1) = tmp;
+                 gimple_call_set_fn (stmt, fndecl);
                  update_stmt (stmt);
                }
            }
index 0aeea9f..304df53 100644 (file)
@@ -1,5 +1,6 @@
 /* SSA operands management for trees.
-   Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
+   Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
+   Free Software Foundation, Inc.
 
 This file is part of GCC.
 
@@ -74,6 +75,10 @@ along with GCC; see the file COPYING3.  If not see
    operand vector for VUSE, then the new vector will also be modified
    such that it contains 'a_5' rather than 'a'.  */
 
+/* Helper functions from gimple.c.  These are GIMPLE manipulation
+   routines that only the operand scanner should need.  */
+void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
+void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
 
 /* Structure storing statistics on how many call clobbers we have, and
    how many where avoided.  */
@@ -122,7 +127,7 @@ static struct
 #define opf_no_vops    (1 << 1)
 
 /* Operand is an implicit reference.  This is used to distinguish
-   explicit assignments in the form of GIMPLE_MODIFY_STMT from
+   explicit assignments in the form of MODIFY_EXPR from
    clobbering sites like function calls or ASM_EXPRs.  */
 #define opf_implicit   (1 << 2)
 
@@ -148,7 +153,7 @@ static bitmap build_loads;
 /* Set for building all the stored symbols.  */
 static bitmap build_stores;
 
-static void get_expr_operands (tree, tree *, int);
+static void get_expr_operands (gimple, tree *, int);
 
 /* Number of functions with initialized ssa_operands.  */
 static int n_initialized = 0;
@@ -178,7 +183,7 @@ static int n_initialized = 0;
 struct scb_d
 {
   /* Pointer to the statement being modified.  */
-  tree *stmt_p;
+  gimple *stmt_p;
 
   /* If the statement references memory these are the sets of symbols
      loaded and stored by the statement.  */
@@ -256,12 +261,18 @@ operand_build_sort_virtual (VEC(tree,heap) *list)
         operand_build_cmp);
 }
 
-
 /*  Return true if the SSA operands cache is active.  */
 
 bool
 ssa_operands_active (void)
 {
+  /* This function may be invoked from contexts where CFUN is NULL
+     (IPA passes), return false for now.  FIXME: operands may be
+     active in each individual function, maybe this function should
+     take CFUN as a parameter.  */
+  if (cfun == NULL)
+    return false;
+
   return cfun->gimple_df && gimple_ssa_operands (cfun)->ops_active;
 }
 
@@ -430,6 +441,7 @@ fini_ssa_operands (void)
 
   if (!n_initialized)
     bitmap_obstack_release (&operands_bitmap_obstack);
+
   if (dump_file && (dump_flags & TDF_STATS))
     {
       fprintf (dump_file, "Original clobbered vars:           %d\n",
@@ -571,11 +583,11 @@ alloc_vop (int num)
    sure the stmt pointer is set to the current stmt.  */
 
 static inline void
-set_virtual_use_link (use_operand_p ptr, tree stmt)
+set_virtual_use_link (use_operand_p ptr, gimple stmt)
 {
   /*  fold_stmt may have changed the stmt pointers.  */
-  if (ptr->stmt != stmt)
-    ptr->stmt = stmt;
+  if (ptr->loc.stmt != stmt)
+    ptr->loc.stmt = stmt;
 
   /* If this use isn't in a list, add it to the correct list.  */
   if (!ptr->prev)
@@ -601,7 +613,7 @@ add_def_op (tree *op, def_optype_p last)
 /* Adds OP to the list of uses of statement STMT after LAST.  */
 
 static inline use_optype_p
-add_use_op (tree stmt, tree *op, use_optype_p last)
+add_use_op (gimple stmt, tree *op, use_optype_p last)
 {
   use_optype_p new_use;
 
@@ -619,7 +631,7 @@ add_use_op (tree stmt, tree *op, use_optype_p last)
    The new vop is appended after PREV.  */
 
 static inline voptype_p
-add_vop (tree stmt, tree op, int num, voptype_p prev)
+add_vop (gimple stmt, tree op, int num, voptype_p prev)
 {
   voptype_p new_vop;
   int x;
@@ -645,7 +657,7 @@ add_vop (tree stmt, tree op, int num, voptype_p prev)
    LAST to the new element.  */
 
 static inline voptype_p
-add_vuse_op (tree stmt, tree op, int num, voptype_p last)
+add_vuse_op (gimple stmt, tree op, int num, voptype_p last)
 {
   voptype_p new_vop = add_vop (stmt, op, num, last);
   VDEF_RESULT (new_vop) = NULL_TREE;
@@ -657,7 +669,7 @@ add_vuse_op (tree stmt, tree op, int num, voptype_p last)
    LAST to the new element.  */
 
 static inline voptype_p
-add_vdef_op (tree stmt, tree op, int num, voptype_p last)
+add_vdef_op (gimple stmt, tree op, int num, voptype_p last)
 {
   voptype_p new_vop = add_vop (stmt, op, num, last);
   VDEF_RESULT (new_vop) = op;
@@ -669,7 +681,7 @@ add_vdef_op (tree stmt, tree op, int num, voptype_p last)
    TODO -- Make build_defs VEC of tree *.  */
 
 static inline void
-finalize_ssa_defs (tree stmt)
+finalize_ssa_defs (gimple stmt)
 {
   unsigned new_i;
   struct def_optype_d new_list;
@@ -677,12 +689,12 @@ finalize_ssa_defs (tree stmt)
   unsigned int num = VEC_length (tree, build_defs);
 
   /* There should only be a single real definition per assignment.  */
-  gcc_assert ((stmt && TREE_CODE (stmt) != GIMPLE_MODIFY_STMT) || num <= 1);
+  gcc_assert ((stmt && gimple_code (stmt) != GIMPLE_ASSIGN) || num <= 1);
 
   new_list.next = NULL;
   last = &new_list;
 
-  old_ops = DEF_OPS (stmt);
+  old_ops = gimple_def_ops (stmt);
 
   new_i = 0;
 
@@ -703,13 +715,13 @@ finalize_ssa_defs (tree stmt)
     last = add_def_op ((tree *) VEC_index (tree, build_defs, new_i), last);
 
   /* Now set the stmt's operands.  */
-  DEF_OPS (stmt) = new_list.next;
+  gimple_set_def_ops (stmt, new_list.next);
 
 #ifdef ENABLE_CHECKING
   {
     def_optype_p ptr;
     unsigned x = 0;
-    for (ptr = DEF_OPS (stmt); ptr; ptr = ptr->next)
+    for (ptr = gimple_def_ops (stmt); ptr; ptr = ptr->next)
       x++;
 
     gcc_assert (x == num);
@@ -722,30 +734,16 @@ finalize_ssa_defs (tree stmt)
    TODO -- Make build_uses VEC of tree *.  */
 
 static inline void
-finalize_ssa_uses (tree stmt)
+finalize_ssa_uses (gimple stmt)
 {
   unsigned new_i;
   struct use_optype_d new_list;
   use_optype_p old_ops, ptr, last;
 
-#ifdef ENABLE_CHECKING
-  {
-    unsigned x;
-    unsigned num = VEC_length (tree, build_uses);
-
-    /* If the pointer to the operand is the statement itself, something is
-       wrong.  It means that we are pointing to a local variable (the 
-       initial call to update_stmt_operands does not pass a pointer to a 
-       statement).  */
-    for (x = 0; x < num; x++)
-      gcc_assert (*((tree *)VEC_index (tree, build_uses, x)) != stmt);
-  }
-#endif
-
   new_list.next = NULL;
   last = &new_list;
 
-  old_ops = USE_OPS (stmt);
+  old_ops = gimple_use_ops (stmt);
 
   /* If there is anything in the old list, free it.  */
   if (old_ops)
@@ -763,12 +761,12 @@ finalize_ssa_uses (tree stmt)
                       last);
 
   /* Now set the stmt's operands.  */
-  USE_OPS (stmt) = new_list.next;
+  gimple_set_use_ops (stmt, new_list.next);
 
 #ifdef ENABLE_CHECKING
   {
     unsigned x = 0;
-    for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
+    for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
       x++;
 
     gcc_assert (x == VEC_length (tree, build_uses));
@@ -778,27 +776,17 @@ finalize_ssa_uses (tree stmt)
 
 
 /* Takes elements from BUILD_VDEFS and turns them into vdef operands of
-   STMT.  FIXME, for now VDEF operators should have a single operand
-   in their RHS.  */
+   STMT.  */
 
 static inline void
-finalize_ssa_vdefs (tree stmt)
+finalize_ssa_vdefs (gimple stmt)
 {
   unsigned new_i;
   struct voptype_d new_list;
   voptype_p old_ops, ptr, last;
-  stmt_ann_t ann = stmt_ann (stmt);
 
   /* Set the symbols referenced by STMT.  */
-  if (!bitmap_empty_p (build_stores))
-    {
-      if (ann->operands.stores == NULL)
-       ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
-
-      bitmap_copy (ann->operands.stores, build_stores);
-    }
-  else
-    BITMAP_FREE (ann->operands.stores);
+  gimple_set_stored_syms (stmt, build_stores, &operands_bitmap_obstack);
 
   /* If aliases have not been computed, do not instantiate a virtual
      operator on STMT.  Initially, we only compute the SSA form on
@@ -813,7 +801,7 @@ finalize_ssa_vdefs (tree stmt)
   new_list.next = NULL;
   last = &new_list;
 
-  old_ops = VDEF_OPS (stmt);
+  old_ops = gimple_vdef_ops (stmt);
   new_i = 0;
   while (old_ops && new_i < VEC_length (tree, build_vdefs))
     {
@@ -868,12 +856,12 @@ finalize_ssa_vdefs (tree stmt)
     }
 
   /* Now set STMT's operands.  */
-  VDEF_OPS (stmt) = new_list.next;
+  gimple_set_vdef_ops (stmt, new_list.next);
 
 #ifdef ENABLE_CHECKING
   {
     unsigned x = 0;
-    for (ptr = VDEF_OPS (stmt); ptr; ptr = ptr->next)
+    for (ptr = gimple_vdef_ops (stmt); ptr; ptr = ptr->next)
       x++;
 
     gcc_assert (x == VEC_length (tree, build_vdefs));
@@ -886,24 +874,14 @@ finalize_ssa_vdefs (tree stmt)
    STMT.  */
 
 static inline void
-finalize_ssa_vuse_ops (tree stmt)
+finalize_ssa_vuse_ops (gimple stmt)
 {
   unsigned new_i, old_i;
   voptype_p old_ops, last;
   VEC(tree,heap) *new_ops;
-  stmt_ann_t ann;
 
   /* Set the symbols referenced by STMT.  */
-  ann = stmt_ann (stmt);
-  if (!bitmap_empty_p (build_loads))
-    {
-      if (ann->operands.loads == NULL)
-       ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
-
-      bitmap_copy (ann->operands.loads, build_loads);
-    }
-  else
-    BITMAP_FREE (ann->operands.loads);
+  gimple_set_loaded_syms (stmt, build_loads, &operands_bitmap_obstack);
 
   /* If aliases have not been computed, do not instantiate a virtual
      operator on STMT.  Initially, we only compute the SSA form on
@@ -916,7 +894,7 @@ finalize_ssa_vuse_ops (tree stmt)
     return;
 
   /* STMT should have at most one VUSE operator.  */
-  old_ops = VUSE_OPS (stmt);
+  old_ops = gimple_vuse_ops (stmt);
   gcc_assert (old_ops == NULL || old_ops->next == NULL);
 
   new_ops = NULL;
@@ -961,7 +939,7 @@ finalize_ssa_vuse_ops (tree stmt)
       for (old_i = 0; old_i < VUSE_NUM (old_ops); old_i++)
        delink_imm_use (VUSE_OP_PTR (old_ops, old_i));
       add_vop_to_freelist (old_ops);
-      VUSE_OPS (stmt) = NULL;
+      gimple_set_vuse_ops (stmt, NULL);
     }
 
   /* If there are any operands, instantiate a VUSE operator for STMT.  */
@@ -975,7 +953,7 @@ finalize_ssa_vuse_ops (tree stmt)
       for (i = 0; VEC_iterate (tree, new_ops, i, op); i++)
        SET_USE (VUSE_OP_PTR (last, (int) i), op);
 
-      VUSE_OPS (stmt) = last;
+      gimple_set_vuse_ops (stmt, last);
       VEC_free (tree, heap, new_ops);
     }
 
@@ -983,10 +961,10 @@ finalize_ssa_vuse_ops (tree stmt)
   {
     unsigned x;
     
-    if (VUSE_OPS (stmt))
+    if (gimple_vuse_ops (stmt))
       {
-       gcc_assert (VUSE_OPS (stmt)->next == NULL);
-       x = VUSE_NUM (VUSE_OPS (stmt));
+       gcc_assert (gimple_vuse_ops (stmt)->next == NULL);
+       x = VUSE_NUM (gimple_vuse_ops (stmt));
       }
     else
       x = 0;
@@ -999,7 +977,7 @@ finalize_ssa_vuse_ops (tree stmt)
 /* Return a new VUSE operand vector for STMT.  */
                                                                               
 static void
-finalize_ssa_vuses (tree stmt)
+finalize_ssa_vuses (gimple stmt)
 {
   unsigned num, num_vdefs;
   unsigned vuse_index;
@@ -1069,12 +1047,15 @@ cleanup_build_arrays (void)
 /* Finalize all the build vectors, fill the new ones into INFO.  */
                                                                               
 static inline void
-finalize_ssa_stmt_operands (tree stmt)
+finalize_ssa_stmt_operands (gimple stmt)
 {
   finalize_ssa_defs (stmt);
   finalize_ssa_uses (stmt);
-  finalize_ssa_vdefs (stmt);
-  finalize_ssa_vuses (stmt);
+  if (gimple_has_mem_ops (stmt))
+    {
+      finalize_ssa_vdefs (stmt);
+      finalize_ssa_vuses (stmt);
+    }
   cleanup_build_arrays ();
 }
 
@@ -1323,7 +1304,7 @@ access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
   return true;
 }
 
-/* Add VAR to the virtual operands array.  FLAGS is as in
+/* Add VAR to the virtual operands for STMT.  FLAGS is as in
    get_expr_operands.  FULL_REF is a tree that contains the entire
    pointer dereference expression, if available, or NULL otherwise.
    OFFSET and SIZE come from the memory access expression that
@@ -1331,7 +1312,7 @@ access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
    affected statement is a call site.  */
 
 static void
-add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
+add_virtual_operand (tree var, gimple stmt, int flags,
                     tree full_ref, HOST_WIDE_INT offset,
                     HOST_WIDE_INT size, bool is_call_site)
 {
@@ -1343,7 +1324,7 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
   v_ann = var_ann (sym);
   
   /* Mark the statement as having memory operands.  */
-  s_ann->references_memory = true;
+  gimple_set_references_memory (stmt, true);
 
   /* If the variable cannot be modified and this is a VDEF change
      it into a VUSE.  This happens when read-only variables are marked
@@ -1351,7 +1332,7 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
      check that this only happens on non-specific stores.
 
      Note that if this is a specific store, i.e. associated with a
-     GIMPLE_MODIFY_STMT, then we can't suppress the VDEF, lest we run
+     MODIFY_EXPR, then we can't suppress the VDEF, lest we run
      into validation problems.
 
      This can happen when programs cast away const, leaving us with a
@@ -1373,9 +1354,8 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
 
   if (aliases == NULL)
     {
-      if (!gimple_aliases_computed_p (cfun)
-         && (flags & opf_def))
-        s_ann->has_volatile_ops = true;
+      if (!gimple_aliases_computed_p (cfun) && (flags & opf_def))
+       gimple_set_has_volatile_ops (stmt, true);
 
       /* The variable is not aliased or it is an alias tag.  */
       if (flags & opf_def)
@@ -1449,18 +1429,18 @@ add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
 }
 
 
-/* Add *VAR_P to the appropriate operand array for S_ANN.  FLAGS is as in
-   get_expr_operands.  If *VAR_P is a GIMPLE register, it will be added to
-   the statement's real operands, otherwise it is added to virtual
-   operands.  */
+/* Add *VAR_P to the appropriate operand array for statement STMT.
+   FLAGS is as in get_expr_operands.  If *VAR_P is a GIMPLE register,
+   it will be added to the statement's real operands, otherwise it is
+   added to virtual operands.  */
 
 static void
-add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
+add_stmt_operand (tree *var_p, gimple stmt, int flags)
 {
   tree var, sym;
   var_ann_t v_ann;
 
-  gcc_assert (SSA_VAR_P (*var_p) && s_ann);
+  gcc_assert (SSA_VAR_P (*var_p));
 
   var = *var_p;
   sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
@@ -1468,7 +1448,7 @@ add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
 
   /* Mark statements with volatile operands.  */
   if (TREE_THIS_VOLATILE (sym))
-    s_ann->has_volatile_ops = true;
+    gimple_set_has_volatile_ops (stmt, true);
 
   if (is_gimple_reg (sym))
     {
@@ -1479,7 +1459,7 @@ add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
        append_use (var_p);
     }
   else
-    add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
+    add_virtual_operand (var, stmt, flags, NULL_TREE, 0, -1, false);
 }
 
 /* Subroutine of get_indirect_ref_operands.  ADDR is the address
@@ -1487,14 +1467,14 @@ add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
    is the same as in get_indirect_ref_operands.  */
 
 static void
-get_addr_dereference_operands (tree stmt, tree *addr, int flags, tree full_ref,
-                              HOST_WIDE_INT offset, HOST_WIDE_INT size,
-                              bool recurse_on_base)
+get_addr_dereference_operands (gimple stmt, tree *addr, int flags,
+                              tree full_ref, HOST_WIDE_INT offset,
+                              HOST_WIDE_INT size, bool recurse_on_base)
 {
   tree ptr = *addr;
-  stmt_ann_t s_ann = stmt_ann (stmt);
 
-  s_ann->references_memory = true;
+  /* Mark the statement as having memory operands.  */
+  gimple_set_references_memory (stmt, true);
 
   if (SSA_VAR_P (ptr))
     {
@@ -1506,7 +1486,7 @@ get_addr_dereference_operands (tree stmt, tree *addr, int flags, tree full_ref,
          && pi->name_mem_tag)
        {
          /* PTR has its own memory tag.  Use it.  */
-         add_virtual_operand (pi->name_mem_tag, s_ann, flags,
+         add_virtual_operand (pi->name_mem_tag, stmt, flags,
                               full_ref, offset, size, false);
        }
       else
@@ -1530,7 +1510,7 @@ get_addr_dereference_operands (tree stmt, tree *addr, int flags, tree full_ref,
                  "NOTE: no flow-sensitive alias info for ");
              print_generic_expr (dump_file, ptr, dump_flags);
              fprintf (dump_file, " in ");
-             print_generic_stmt (dump_file, stmt, 0);
+             print_gimple_stmt (dump_file, stmt, 0, 0);
            }
 
          if (TREE_CODE (ptr) == SSA_NAME)
@@ -1542,7 +1522,7 @@ get_addr_dereference_operands (tree stmt, tree *addr, int flags, tree full_ref,
             and size.  */
          if (v_ann->symbol_mem_tag)
            {
-             add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
+             add_virtual_operand (v_ann->symbol_mem_tag, stmt, flags,
                                   full_ref, 0, -1, false);
              /* Make sure we add the SMT itself.  */
              if (!(flags & opf_no_vops))
@@ -1558,7 +1538,7 @@ get_addr_dereference_operands (tree stmt, tree *addr, int flags, tree full_ref,
             volatile so we won't optimize it out too actively.  */
           else if (!gimple_aliases_computed_p (cfun)
                    && (flags & opf_def))
-            s_ann->has_volatile_ops = true;
+           gimple_set_has_volatile_ops (stmt, true);
        }
     }
   else if (TREE_CODE (ptr) == INTEGER_CST)
@@ -1566,7 +1546,7 @@ get_addr_dereference_operands (tree stmt, tree *addr, int flags, tree full_ref,
       /* If a constant is used as a pointer, we can't generate a real
         operand for it but we mark the statement volatile to prevent
         optimizations from messing things up.  */
-      s_ann->has_volatile_ops = true;
+      gimple_set_has_volatile_ops (stmt, true);
       return;
     }
   else
@@ -1600,15 +1580,14 @@ get_addr_dereference_operands (tree stmt, tree *addr, int flags, tree full_ref,
       something else will do it for us.  */
 
 static void
-get_indirect_ref_operands (tree stmt, tree expr, int flags, tree full_ref,
+get_indirect_ref_operands (gimple stmt, tree expr, int flags, tree full_ref,
                           HOST_WIDE_INT offset, HOST_WIDE_INT size,
                           bool recurse_on_base)
 {
   tree *pptr = &TREE_OPERAND (expr, 0);
-  stmt_ann_t s_ann = stmt_ann (stmt);
 
   if (TREE_THIS_VOLATILE (expr))
-    s_ann->has_volatile_ops = true; 
+    gimple_set_has_volatile_ops (stmt, true);
 
   get_addr_dereference_operands (stmt, pptr, flags, full_ref, offset, size,
                                 recurse_on_base);
@@ -1618,26 +1597,25 @@ get_indirect_ref_operands (tree stmt, tree expr, int flags, tree full_ref,
 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
 
 static void
-get_tmr_operands (tree stmt, tree expr, int flags)
+get_tmr_operands (gimple stmt, tree expr, int flags)
 {
   tree tag;
-  stmt_ann_t s_ann = stmt_ann (stmt);
 
-  /* This statement references memory.  */
-  s_ann->references_memory = 1;
+  /* Mark the statement as having memory operands.  */
+  gimple_set_references_memory (stmt, true);
 
   /* First record the real operands.  */
   get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
   get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
 
   if (TMR_SYMBOL (expr))
-    add_to_addressable_set (TMR_SYMBOL (expr), &s_ann->addresses_taken);
+    gimple_add_to_addresses_taken (stmt, TMR_SYMBOL (expr));
 
   tag = TMR_TAG (expr);
   if (!tag)
     {
       /* Something weird, so ensure that we will be careful.  */
-      s_ann->has_volatile_ops = true;
+      gimple_set_has_volatile_ops (stmt, true);
       return;
     }
   if (!MTAG_P (tag))
@@ -1646,7 +1624,7 @@ get_tmr_operands (tree stmt, tree expr, int flags)
       return;
     }
 
-  add_virtual_operand (tag, s_ann, flags, expr, 0, -1, false);
+  add_virtual_operand (tag, stmt, flags, expr, 0, -1, false);
 }
 
 
@@ -1654,21 +1632,19 @@ get_tmr_operands (tree stmt, tree expr, int flags)
    clobbered variables in the function.  */
 
 static void
-add_call_clobber_ops (tree stmt, tree callee)
+add_call_clobber_ops (gimple stmt, tree callee ATTRIBUTE_UNUSED)
 {
   unsigned u;
   bitmap_iterator bi;
-  stmt_ann_t s_ann = stmt_ann (stmt);
   bitmap not_read_b, not_written_b;
-  tree call = get_call_expr_in (stmt);
 
-  gcc_assert (!(call_expr_flags (call) & (ECF_PURE | ECF_CONST)));
+  gcc_assert (!(gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)));
 
   /* If we created .GLOBAL_VAR earlier, just use it.  */
   if (gimple_global_var (cfun))
     {
       tree var = gimple_global_var (cfun);
-      add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
+      add_virtual_operand (var, stmt, opf_def, NULL, 0, -1, true);
       return;
     }
 
@@ -1676,7 +1652,8 @@ add_call_clobber_ops (tree stmt, tree callee)
      set for each static if the call being processed does not read
      or write that variable.  */
   not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL; 
-  not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL; 
+  not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
+
   /* Add a VDEF operand for every call clobbered variable.  */
   EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, u, bi)
     {
@@ -1702,12 +1679,12 @@ add_call_clobber_ops (tree stmt, tree callee)
        {
          clobber_stats.static_write_clobbers_avoided++;
          if (!not_read)
-           add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+           add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true);
          else
            clobber_stats.static_read_clobbers_avoided++;
        }
       else
-       add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
+       add_virtual_operand (var, stmt, opf_def, NULL, 0, -1, true);
     }
 }
 
@@ -1716,22 +1693,20 @@ add_call_clobber_ops (tree stmt, tree callee)
    function.  */
 
 static void
-add_call_read_ops (tree stmt, tree callee)
+add_call_read_ops (gimple stmt, tree callee ATTRIBUTE_UNUSED)
 {
   unsigned u;
   bitmap_iterator bi;
-  stmt_ann_t s_ann = stmt_ann (stmt);
   bitmap not_read_b;
-  tree call = get_call_expr_in (stmt);
 
   /* Const functions do not reference memory.  */
-  if (call_expr_flags (call) & ECF_CONST)
+  if (gimple_call_flags (stmt) & ECF_CONST)
     return;
 
   not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
 
   /* For pure functions we compute non-escaped uses separately.  */
-  if (call_expr_flags (call) & ECF_PURE)
+  if (gimple_call_flags (stmt) & ECF_PURE)
     EXECUTE_IF_SET_IN_BITMAP (gimple_call_used_vars (cfun), 0, u, bi)
       {
        tree var = referenced_var_lookup (u);
@@ -1749,7 +1724,7 @@ add_call_read_ops (tree stmt, tree callee)
 
        /* See if this variable is really used by this function.  */
        if (!not_read)
-         add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+         add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true);
        else
          clobber_stats.static_readonly_clobbers_avoided++;
       }
@@ -1760,7 +1735,7 @@ add_call_read_ops (tree stmt, tree callee)
   if (gimple_global_var (cfun))
     {
       tree var = gimple_global_var (cfun);
-      add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+      add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true);
       return;
     }
 
@@ -1782,67 +1757,55 @@ add_call_read_ops (tree stmt, tree callee)
          continue;
        }
             
-      add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
+      add_virtual_operand (var, stmt, opf_use, NULL, 0, -1, true);
     }
 }
 
 
-/* A subroutine of get_expr_operands to handle CALL_EXPR.  */
+/* If STMT is a call that may clobber globals and other symbols that
+   escape, add them to the VDEF/VUSE lists for it.  */
 
 static void
-get_call_expr_operands (tree stmt, tree expr)
+maybe_add_call_clobbered_vops (gimple stmt)
 {
-  int call_flags = call_expr_flags (expr);
-  int i, nargs;
-  stmt_ann_t ann = stmt_ann (stmt);
+  int call_flags = gimple_call_flags (stmt);
 
-  ann->references_memory = true;
+  /* Mark the statement as having memory operands.  */
+  gimple_set_references_memory (stmt, true);
 
   /* If aliases have been computed already, add VDEF or VUSE
      operands for all the symbols that have been found to be
      call-clobbered.  */
-  if (gimple_aliases_computed_p (cfun)
-      && !(call_flags & ECF_NOVOPS))
+  if (gimple_aliases_computed_p (cfun) && !(call_flags & ECF_NOVOPS))
     {
       /* A 'pure' or a 'const' function never call-clobbers anything. 
         A 'noreturn' function might, but since we don't return anyway 
         there is no point in recording that.  */ 
-      if (TREE_SIDE_EFFECTS (expr)
-         && !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
-       add_call_clobber_ops (stmt, get_callee_fndecl (expr));
+      if (!(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
+       add_call_clobber_ops (stmt, gimple_call_fndecl (stmt));
       else if (!(call_flags & ECF_CONST))
-       add_call_read_ops (stmt, get_callee_fndecl (expr));
+       add_call_read_ops (stmt, gimple_call_fndecl (stmt));
     }
-
-  /* Find uses in the called function.  */
-  get_expr_operands (stmt, &CALL_EXPR_FN (expr), opf_use);
-  nargs = call_expr_nargs (expr);
-  for (i = 0; i < nargs; i++)
-    get_expr_operands (stmt, &CALL_EXPR_ARG (expr, i), opf_use);
-
-  get_expr_operands (stmt, &CALL_EXPR_STATIC_CHAIN (expr), opf_use);
 }
 
 
 /* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
 
 static void
-get_asm_expr_operands (tree stmt)
+get_asm_expr_operands (gimple stmt)
 {
-  stmt_ann_t s_ann;
-  int i, noutputs;
+  size_t i, noutputs;
   const char **oconstraints;
   const char *constraint;
   bool allows_mem, allows_reg, is_inout;
-  tree link;
 
-  s_ann = stmt_ann (stmt);
-  noutputs = list_length (ASM_OUTPUTS (stmt));
+  noutputs = gimple_asm_noutputs (stmt);
   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
 
   /* Gather all output operands.  */
-  for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link))
+  for (i = 0; i < gimple_asm_noutputs (stmt); i++)
     {
+      tree link = gimple_asm_output_op (stmt, i);
       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
       oconstraints[i] = constraint;
       parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
@@ -1856,16 +1819,17 @@ get_asm_expr_operands (tree stmt)
       if (!allows_reg && allows_mem)
        {
          tree t = get_base_address (TREE_VALUE (link));
-         if (t && DECL_P (t) && s_ann)
-           add_to_addressable_set (t, &s_ann->addresses_taken);
+         if (t && DECL_P (t))
+           gimple_add_to_addresses_taken (stmt, t);
        }
 
       get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
     }
 
   /* Gather all input operands.  */
-  for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
+  for (i = 0; i < gimple_asm_ninputs (stmt); i++)
     {
+      tree link = gimple_asm_input_op (stmt, i);
       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
       parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
                              &allows_mem, &allows_reg);
@@ -1875,57 +1839,39 @@ get_asm_expr_operands (tree stmt)
       if (!allows_reg && allows_mem)
        {
          tree t = get_base_address (TREE_VALUE (link));
-         if (t && DECL_P (t) && s_ann)
-           add_to_addressable_set (t, &s_ann->addresses_taken);
+         if (t && DECL_P (t))
+           gimple_add_to_addresses_taken (stmt, t);
        }
 
       get_expr_operands (stmt, &TREE_VALUE (link), 0);
     }
 
   /* Clobber all memory and addressable symbols for asm ("" : : : "memory");  */
-  for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
-    if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
-      {
-       unsigned i;
-       bitmap_iterator bi;
-
-       s_ann->references_memory = true;
-
-       EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
-         {
-           tree var = referenced_var (i);
-           add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
-         }
-
-       EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
-         {
-           tree var = referenced_var (i);
-           add_stmt_operand (&var, s_ann, opf_def | opf_implicit);
-         }
-       break;
-      }
-}
-
-
-/* Scan operands for the assignment expression EXPR in statement STMT.  */
-
-static void
-get_modify_stmt_operands (tree stmt, tree expr)
-{
-  /* First get operands from the RHS.  */
-  get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 1), opf_use);
+  for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
+    {
+      tree link = gimple_asm_clobber_op (stmt, i);
+      if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
+       {
+         unsigned i;
+         bitmap_iterator bi;
 
-  /* For the LHS, use a regular definition (opf_def) for GIMPLE
-     registers.  If the LHS is a store to memory, we will need
-     a preserving definition (VDEF).
+         /* Mark the statement as having memory operands.  */
+         gimple_set_references_memory (stmt, true);
 
-     Preserving definitions are those that modify a part of an
-     aggregate object. Stores through a pointer are also represented
-     with VDEF operators.
+         EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
+           {
+             tree var = referenced_var (i);
+             add_stmt_operand (&var, stmt, opf_def | opf_implicit);
+           }
 
-     We used to distinguish between preserving and killing definitions.
-     We always emit preserving definitions now.  */
-  get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (expr, 0), opf_def);
+         EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
+           {
+             tree var = referenced_var (i);
+             add_stmt_operand (&var, stmt, opf_def | opf_implicit);
+           }
+         break;
+       }
+    }
 }
 
 
@@ -1934,12 +1880,11 @@ get_modify_stmt_operands (tree stmt, tree expr)
    interpret the operands found.  */
 
 static void
-get_expr_operands (tree stmt, tree *expr_p, int flags)
+get_expr_operands (gimple stmt, tree *expr_p, int flags)
 {
   enum tree_code code;
   enum tree_code_class codeclass;
   tree expr = *expr_p;
-  stmt_ann_t s_ann = stmt_ann (stmt);
 
   if (expr == NULL)
     return;
@@ -1954,7 +1899,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
         reference to it, but the fact that the statement takes its
         address will be of interest to some passes (e.g. alias
         resolution).  */
-      add_to_addressable_set (TREE_OPERAND (expr, 0), &s_ann->addresses_taken);
+      gimple_add_to_addresses_taken (stmt, TREE_OPERAND (expr, 0));
 
       /* If the address is invariant, there may be no interesting
         variable references inside.  */
@@ -1973,13 +1918,13 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
     case SSA_NAME:
     case SYMBOL_MEMORY_TAG:
     case NAME_MEMORY_TAG:
-     add_stmt_operand (expr_p, s_ann, flags);
+     add_stmt_operand (expr_p, stmt, flags);
      return;
 
     case VAR_DECL:
     case PARM_DECL:
     case RESULT_DECL:
-      add_stmt_operand (expr_p, s_ann, flags);
+      add_stmt_operand (expr_p, stmt, flags);
       return;
 
     case MISALIGNED_INDIRECT_REF:
@@ -2005,7 +1950,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
        HOST_WIDE_INT offset, size, maxsize;
 
        if (TREE_THIS_VOLATILE (expr))
-         s_ann->has_volatile_ops = true;
+         gimple_set_has_volatile_ops (stmt, true);
 
        ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
        if (TREE_CODE (ref) == INDIRECT_REF)
@@ -2020,7 +1965,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
        if (code == COMPONENT_REF)
          {
            if (TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
-             s_ann->has_volatile_ops = true; 
+             gimple_set_has_volatile_ops (stmt, true);
            get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
          }
        else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
@@ -2040,10 +1985,6 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
       get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
       return;
 
-    case CALL_EXPR:
-      get_call_expr_operands (stmt, expr);
-      return;
-
     case COND_EXPR:
     case VEC_COND_EXPR:
       get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
@@ -2051,10 +1992,6 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
       get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
       return;
 
-    case GIMPLE_MODIFY_STMT:
-      get_modify_stmt_operands (stmt, expr);
-      return;
-
     case CONSTRUCTOR:
       {
        /* General aggregate CONSTRUCTORs have been decomposed, but they
@@ -2100,101 +2037,14 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
       }
 
     case CHANGE_DYNAMIC_TYPE_EXPR:
-      get_expr_operands (stmt, &CHANGE_DYNAMIC_TYPE_LOCATION (expr), opf_use);
-      return;
-
-    case OMP_FOR:
-      {
-       tree c, clauses = OMP_FOR_CLAUSES (stmt);
-       int i;
-
-       for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (expr)); i++)
-         {
-           tree init = TREE_VEC_ELT (OMP_FOR_INIT (expr), i);
-           tree cond = TREE_VEC_ELT (OMP_FOR_COND (expr), i);
-           tree incr = TREE_VEC_ELT (OMP_FOR_INCR (expr), i);
-
-           get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (init, 0), opf_def);
-           get_expr_operands (stmt, &GIMPLE_STMT_OPERAND (init, 1), opf_use);
-           get_expr_operands (stmt, &TREE_OPERAND (cond, 1), opf_use);
-           get_expr_operands (stmt,
-                              &TREE_OPERAND (GIMPLE_STMT_OPERAND (incr, 1),
-                                             1), opf_use);
-         }
-
-       c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
-       if (c)
-         get_expr_operands (stmt, &OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c),
-                            opf_use);
-       return;
-      }
-
-    case OMP_CONTINUE:
-      {
-       get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_def);
-       get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
-       return;
-      }
-
-    case OMP_PARALLEL:
-      {
-       tree c, clauses = OMP_PARALLEL_CLAUSES (stmt);
-
-       if (OMP_PARALLEL_DATA_ARG (stmt))
-         {
-           get_expr_operands (stmt, &OMP_PARALLEL_DATA_ARG (stmt), opf_use);
-           add_to_addressable_set (OMP_PARALLEL_DATA_ARG (stmt),
-                                   &s_ann->addresses_taken);
-         }
-
-       c = find_omp_clause (clauses, OMP_CLAUSE_IF);
-       if (c)
-         get_expr_operands (stmt, &OMP_CLAUSE_IF_EXPR (c), opf_use);
-       c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
-       if (c)
-         get_expr_operands (stmt, &OMP_CLAUSE_NUM_THREADS_EXPR (c), opf_use);
-       return;
-      }
-
-    case OMP_SECTIONS:
-      {
-       get_expr_operands (stmt, &OMP_SECTIONS_CONTROL (expr), opf_def);
-       return;
-      }
-
-    case OMP_ATOMIC_LOAD:
-      {
-       tree *addr = &TREE_OPERAND (expr, 1);
-       get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_def);
-
-       if (TREE_CODE (*addr) == ADDR_EXPR)
-         get_expr_operands (stmt, &TREE_OPERAND (*addr, 0), opf_def);
-       else
-         get_addr_dereference_operands (stmt, addr, opf_def,
-                                        NULL_TREE, 0, -1, true);
-       return;
-      }
-
-    case OMP_ATOMIC_STORE:
-      {
-       get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
-       return;
-      }
+      gcc_unreachable ();
 
-    case BLOCK:
     case FUNCTION_DECL:
-    case EXC_PTR_EXPR:
-    case FILTER_EXPR:
     case LABEL_DECL:
     case CONST_DECL:
-    case OMP_SINGLE:
-    case OMP_MASTER:
-    case OMP_ORDERED:
-    case OMP_CRITICAL:
-    case OMP_RETURN:
-    case OMP_SECTION:
-    case OMP_SECTIONS_SWITCH:
-    case PREDICT_EXPR:
+    case CASE_LABEL_EXPR:
+    case FILTER_EXPR:
+    case EXC_PTR_EXPR:
       /* Expressions that make no memory references.  */
       return;
 
@@ -2221,59 +2071,28 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
    build_* operand vectors will have potential operands in them.  */
 
 static void
-parse_ssa_operands (tree stmt)
+parse_ssa_operands (gimple stmt)
 {
-  enum tree_code code;
+  enum gimple_code code = gimple_code (stmt);
 
-  code = TREE_CODE (stmt);
-  switch (code)
+  if (code == GIMPLE_ASM)
+    get_asm_expr_operands (stmt);
+  else
     {
-    case GIMPLE_MODIFY_STMT:
-      get_modify_stmt_operands (stmt, stmt);
-      break;
-
-    case COND_EXPR:
-      get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_use);
-      break;
-
-    case SWITCH_EXPR:
-      get_expr_operands (stmt, &SWITCH_COND (stmt), opf_use);
-      break;
-
-    case ASM_EXPR:
-      get_asm_expr_operands (stmt);
-      break;
-
-    case RETURN_EXPR:
-      get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_use);
-      break;
-
-    case GOTO_EXPR:
-      get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_use);
-      break;
+      size_t i, start = 0;
 
-    case LABEL_EXPR:
-      get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_use);
-      break;
+      if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
+       {
+         get_expr_operands (stmt, gimple_op_ptr (stmt, 0), opf_def);
+         start = 1;
+       }
 
-    case BIND_EXPR:
-    case CASE_LABEL_EXPR:
-    case TRY_CATCH_EXPR:
-    case TRY_FINALLY_EXPR:
-    case EH_FILTER_EXPR:
-    case CATCH_EXPR:
-    case RESX_EXPR:
-      /* These nodes contain no variable references.  */
-     break;
+      for (i = start; i < gimple_num_ops (stmt); i++)
+       get_expr_operands (stmt, gimple_op_ptr (stmt, i), opf_use);
 
-    default:
-      /* Notice that if get_expr_operands tries to use &STMT as the
-        operand pointer (which may only happen for USE operands), we
-        will fail in add_stmt_operand.  This default will handle
-        statements like empty statements, or CALL_EXPRs that may
-        appear on the RHS of a statement or as statements themselves.  */
-      get_expr_operands (stmt, &stmt, opf_use);
-      break;
+      /* Add call-clobbered operands, if needed.  */
+      if (code == GIMPLE_CALL)
+       maybe_add_call_clobbered_vops (stmt);
     }
 }
 
@@ -2281,17 +2100,16 @@ parse_ssa_operands (tree stmt)
 /* Create an operands cache for STMT.  */
 
 static void
-build_ssa_operands (tree stmt)
+build_ssa_operands (gimple stmt)
 {
-  stmt_ann_t ann = get_stmt_ann (stmt);
-  
   /* Initially assume that the statement has no volatile operands and
      makes no memory references.  */
-  ann->has_volatile_ops = false;
-  ann->references_memory = false;
+  gimple_set_has_volatile_ops (stmt, false);
+  gimple_set_references_memory (stmt, false);
+
   /* Just clear the bitmap so we don't end up reallocating it over and over.  */
-  if (ann->addresses_taken)
-    bitmap_clear (ann->addresses_taken);
+  if (gimple_addresses_taken (stmt))
+    bitmap_clear (gimple_addresses_taken (stmt));
 
   start_ssa_stmt_operands ();
   parse_ssa_operands (stmt);
@@ -2299,13 +2117,10 @@ build_ssa_operands (tree stmt)
   operand_build_sort_virtual (build_vdefs);
   finalize_ssa_stmt_operands (stmt);
 
-  if (ann->addresses_taken && bitmap_empty_p (ann->addresses_taken))
-    ann->addresses_taken = NULL;
-
   /* For added safety, assume that statements with volatile operands
      also reference memory.  */
-  if (ann->has_volatile_ops)
-    ann->references_memory = true;
+  if (gimple_has_volatile_ops (stmt))
+    gimple_set_references_memory (stmt, true);
 }
 
 
@@ -2313,12 +2128,12 @@ build_ssa_operands (tree stmt)
    the stmt operand lists.  */
 
 void
-free_stmt_operands (tree stmt)
+free_stmt_operands (gimple stmt)
 {
-  def_optype_p defs = DEF_OPS (stmt), last_def;
-  use_optype_p uses = USE_OPS (stmt), last_use;
-  voptype_p vuses = VUSE_OPS (stmt);
-  voptype_p vdefs = VDEF_OPS (stmt), vdef, next_vdef;
+  def_optype_p defs = gimple_def_ops (stmt), last_def;
+  use_optype_p uses = gimple_use_ops (stmt), last_use;
+  voptype_p vuses = gimple_vuse_ops (stmt);
+  voptype_p vdefs = gimple_vdef_ops (stmt), vdef, next_vdef;
   unsigned i;
 
   if (defs)
@@ -2327,7 +2142,7 @@ free_stmt_operands (tree stmt)
        continue;
       last_def->next = gimple_ssa_operands (cfun)->free_defs;
       gimple_ssa_operands (cfun)->free_defs = defs;
-      DEF_OPS (stmt) = NULL;
+      gimple_set_def_ops (stmt, NULL);
     }
 
   if (uses)
@@ -2337,7 +2152,7 @@ free_stmt_operands (tree stmt)
       delink_imm_use (USE_OP_PTR (last_use));
       last_use->next = gimple_ssa_operands (cfun)->free_uses;
       gimple_ssa_operands (cfun)->free_uses = uses;
-      USE_OPS (stmt) = NULL;
+      gimple_set_use_ops (stmt, NULL);
     }
 
   if (vuses)
@@ -2345,7 +2160,7 @@ free_stmt_operands (tree stmt)
       for (i = 0; i < VUSE_NUM (vuses); i++)
        delink_imm_use (VUSE_OP_PTR (vuses, i));
       add_vop_to_freelist (vuses);
-      VUSE_OPS (stmt) = NULL;
+      gimple_set_vuse_ops (stmt, NULL);
     }
 
   if (vdefs)
@@ -2356,46 +2171,35 @@ free_stmt_operands (tree stmt)
          delink_imm_use (VDEF_OP_PTR (vdef, 0));
          add_vop_to_freelist (vdef);
        }
-      VDEF_OPS (stmt) = NULL;
+      gimple_set_vdef_ops (stmt, NULL);
     }
-}
-
 
-/* Free any operands vectors in OPS.  */
+  if (gimple_has_ops (stmt))
+    gimple_set_addresses_taken (stmt, NULL);
 
-void 
-free_ssa_operands (stmt_operands_p ops)
-{
-  ops->def_ops = NULL;
-  ops->use_ops = NULL;
-  ops->vdef_ops = NULL;
-  ops->vuse_ops = NULL;
-  BITMAP_FREE (ops->loads);
-  BITMAP_FREE (ops->stores);
+  if (gimple_has_mem_ops (stmt))
+    {
+      gimple_set_stored_syms (stmt, NULL, &operands_bitmap_obstack);
+      gimple_set_loaded_syms (stmt, NULL, &operands_bitmap_obstack);
+    }
 }
 
 
 /* Get the operands of statement STMT.  */
 
 void
-update_stmt_operands (tree stmt)
+update_stmt_operands (gimple stmt)
 {
-  stmt_ann_t ann = get_stmt_ann (stmt);
-
   /* If update_stmt_operands is called before SSA is initialized, do
      nothing.  */
   if (!ssa_operands_active ())
     return;
 
-  /* The optimizers cannot handle statements that are nothing but a
-     _DECL.  This indicates a bug in the gimplifier.  */
-  gcc_assert (!SSA_VAR_P (stmt));
-
   timevar_push (TV_TREE_OPS);
 
-  gcc_assert (ann->modified);
+  gcc_assert (gimple_modified_p (stmt));
   build_ssa_operands (stmt);
-  ann->modified = 0;
+  gimple_set_modified (stmt, false);
 
   timevar_pop (TV_TREE_OPS);
 }
@@ -2404,50 +2208,45 @@ update_stmt_operands (tree stmt)
 /* Copies virtual operands from SRC to DST.  */
 
 void
-copy_virtual_operands (tree dest, tree src)
+copy_virtual_operands (gimple dest, gimple src)
 {
   unsigned int i, n;
   voptype_p src_vuses, dest_vuses;
   voptype_p src_vdefs, dest_vdefs;
   struct voptype_d vuse;
   struct voptype_d vdef;
-  stmt_ann_t dest_ann;
 
-  VDEF_OPS (dest) = NULL;
-  VUSE_OPS (dest) = NULL;
+  if (!gimple_has_mem_ops (src))
+    return;
 
-  dest_ann = get_stmt_ann (dest);
-  BITMAP_FREE (dest_ann->operands.loads);
-  BITMAP_FREE (dest_ann->operands.stores);
+  gimple_set_vdef_ops (dest, NULL);
+  gimple_set_vuse_ops (dest, NULL);
 
-  if (LOADED_SYMS (src))
-    {
-      dest_ann->operands.loads = BITMAP_ALLOC (&operands_bitmap_obstack);
-      bitmap_copy (dest_ann->operands.loads, LOADED_SYMS (src));
-    }
-
-  if (STORED_SYMS (src))
-    {
-      dest_ann->operands.stores = BITMAP_ALLOC (&operands_bitmap_obstack);
-      bitmap_copy (dest_ann->operands.stores, STORED_SYMS (src));
-    }
+  gimple_set_stored_syms (dest, gimple_stored_syms (src),
+                         &operands_bitmap_obstack);
+  gimple_set_loaded_syms (dest, gimple_loaded_syms (src),
+                         &operands_bitmap_obstack);
 
   /* Copy all the VUSE operators and corresponding operands.  */
   dest_vuses = &vuse;
-  for (src_vuses = VUSE_OPS (src); src_vuses; src_vuses = src_vuses->next)
+  for (src_vuses = gimple_vuse_ops (src);
+       src_vuses;
+       src_vuses = src_vuses->next)
     {
       n = VUSE_NUM (src_vuses);
       dest_vuses = add_vuse_op (dest, NULL_TREE, n, dest_vuses);
       for (i = 0; i < n; i++)
        SET_USE (VUSE_OP_PTR (dest_vuses, i), VUSE_OP (src_vuses, i));
 
-      if (VUSE_OPS (dest) == NULL)
-       VUSE_OPS (dest) = vuse.next;
+      if (gimple_vuse_ops (dest) == NULL)
+       gimple_set_vuse_ops (dest, vuse.next);
     }
 
   /* Copy all the VDEF operators and corresponding operands.  */
   dest_vdefs = &vdef;
-  for (src_vdefs = VDEF_OPS (src); src_vdefs; src_vdefs = src_vdefs->next)
+  for (src_vdefs = gimple_vdef_ops (src);
+       src_vdefs;
+       src_vdefs = src_vdefs->next)
     {
       n = VUSE_NUM (src_vdefs);
       dest_vdefs = add_vdef_op (dest, NULL_TREE, n, dest_vdefs);
@@ -2455,8 +2254,8 @@ copy_virtual_operands (tree dest, tree src)
       for (i = 0; i < n; i++)
        SET_USE (VUSE_OP_PTR (dest_vdefs, i), VUSE_OP (src_vdefs, i));
 
-      if (VDEF_OPS (dest) == NULL)
-       VDEF_OPS (dest) = vdef.next;
+      if (gimple_vdef_ops (dest) == NULL)
+       gimple_set_vdef_ops (dest, vdef.next);
     }
 }
 
@@ -2469,19 +2268,15 @@ copy_virtual_operands (tree dest, tree src)
    uses of this stmt will be de-linked.  */
 
 void
-create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt,
+create_ssa_artificial_load_stmt (gimple new_stmt, gimple old_stmt,
                                 bool delink_imm_uses_p)
 {
   tree op;
   ssa_op_iter iter;
   use_operand_p use_p;
   unsigned i;
-  stmt_ann_t ann;
 
-  /* Create the stmt annotation but make sure to not mark the stmt
-     as modified as we will build operands ourselves.  */
-  ann = get_stmt_ann (new_stmt);
-  ann->modified = 0;
+  gimple_set_modified (new_stmt, false);
 
   /* Process NEW_STMT looking for operands.  */
   start_ssa_stmt_operands ();
@@ -2521,7 +2316,7 @@ create_ssa_artificial_load_stmt (tree new_stmt, tree old_stmt,
    to test the validity of the swap operation.  */
 
 void
-swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
+swap_tree_operands (gimple stmt, tree *exp0, tree *exp1)
 {
   tree op0, op1;
   op0 = *exp0;
@@ -2536,14 +2331,14 @@ swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
       use0 = use1 = NULL;
 
       /* Find the 2 operands in the cache, if they are there.  */
-      for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
+      for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
        if (USE_OP_PTR (ptr)->use == exp0)
          {
            use0 = ptr;
            break;
          }
 
-      for (ptr = USE_OPS (stmt); ptr; ptr = ptr->next)
+      for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
        if (USE_OP_PTR (ptr)->use == exp1)
          {
            use1 = ptr;
@@ -2565,19 +2360,13 @@ swap_tree_operands (tree stmt, tree *exp0, tree *exp1)
   *exp1 = op0;
 }
 
-
-/* Add the base address of REF to the set *ADDRESSES_TAKEN.  If
-   *ADDRESSES_TAKEN is NULL, a new set is created.  REF may be
-   a single variable whose address has been taken or any other valid
-   GIMPLE memory reference (structure reference, array, etc).  */
+/* Add the base address of REF to SET.  */
 
 void
-add_to_addressable_set (tree ref, bitmap *addresses_taken)
+add_to_addressable_set (tree ref, bitmap *set)
 {
   tree var;
 
-  gcc_assert (addresses_taken);
-
   /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
      as the only thing we take the address of.  If VAR is a structure,
      taking the address of a field means that the whole structure may
@@ -2586,14 +2375,29 @@ add_to_addressable_set (tree ref, bitmap *addresses_taken)
   var = get_base_address (ref);
   if (var && SSA_VAR_P (var))
     {
-      if (*addresses_taken == NULL)
-       *addresses_taken = BITMAP_GGC_ALLOC ();      
-      bitmap_set_bit (*addresses_taken, DECL_UID (var));
+      if (*set == NULL)
+       *set = BITMAP_ALLOC (&operands_bitmap_obstack);
+
+      bitmap_set_bit (*set, DECL_UID (var));
       TREE_ADDRESSABLE (var) = 1;
     }
 }
 
 
+/* Add the base address of REF to the set of addresses taken by STMT.
+   REF may be a single variable whose address has been taken or any
+   other valid GIMPLE memory reference (structure reference, array,
+   etc).  If the base address of REF is a decl that has sub-variables,
+   also add all of its sub-variables.  */
+
+void
+gimple_add_to_addresses_taken (gimple stmt, tree ref)
+{
+  gcc_assert (gimple_has_ops (stmt));
+  add_to_addressable_set (ref, gimple_addresses_taken_ptr (stmt));
+}
+
+
 /* Scan the immediate_use list for VAR making sure its linked properly.
    Return TRUE if there is a problem and emit an error message to F.  */
 
@@ -2653,10 +2457,10 @@ verify_imm_links (FILE *f, tree var)
   return false;
 
  error:
-  if (ptr->stmt && stmt_modified_p (ptr->stmt))
+  if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
     {
-      fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->stmt);
-      print_generic_stmt (f, ptr->stmt, TDF_SLIM);
+      fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
+      print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
     }
   fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr, 
           (void *)ptr->use);
@@ -2688,13 +2492,13 @@ dump_immediate_uses_for (FILE *file, tree var)
 
   FOR_EACH_IMM_USE_FAST (use_p, iter, var)
     {
-      if (use_p->stmt == NULL && use_p->use == NULL)
+      if (use_p->loc.stmt == NULL && use_p->use == NULL)
         fprintf (file, "***end of stmt iterator marker***\n");
       else
        if (!is_gimple_reg (USE_FROM_PTR (use_p)))
-         print_generic_stmt (file, USE_STMT (use_p), TDF_VOPS|TDF_MEMSYMS);
+         print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
        else
-         print_generic_stmt (file, USE_STMT (use_p), TDF_SLIM);
+         print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
     }
   fprintf(file, "\n");
 }
@@ -2745,15 +2549,15 @@ debug_immediate_uses_for (tree var)
    needed to keep the SSA form up to date.  */
 
 void
-push_stmt_changes (tree *stmt_p)
+push_stmt_changes (gimple *stmt_p)
 {
-  tree stmt;
+  gimple stmt;
   scb_t buf;
-  
+
   stmt = *stmt_p;
 
   /* It makes no sense to keep track of PHI nodes.  */
-  if (TREE_CODE (stmt) == PHI_NODE)
+  if (gimple_code (stmt) == GIMPLE_PHI)
     return;
 
   buf = XNEW (struct scb_d);
@@ -2761,7 +2565,7 @@ push_stmt_changes (tree *stmt_p)
 
   buf->stmt_p = stmt_p;
 
-  if (stmt_references_memory_p (stmt))
+  if (gimple_references_memory_p (stmt))
     {
       tree op;
       ssa_op_iter i;
@@ -2815,9 +2619,10 @@ mark_difference_for_renaming (bitmap s1, bitmap s2)
    the statement.  */
 
 void
-pop_stmt_changes (tree *stmt_p)
+pop_stmt_changes (gimple *stmt_p)
 {
-  tree op, stmt;
+  tree op;
+  gimple stmt;
   ssa_op_iter iter;
   bitmap loads, stores;
   scb_t buf;
@@ -2825,7 +2630,7 @@ pop_stmt_changes (tree *stmt_p)
   stmt = *stmt_p;
 
   /* It makes no sense to keep track of PHI nodes.  */
-  if (TREE_CODE (stmt) == PHI_NODE)
+  if (gimple_code (stmt) == GIMPLE_PHI)
     return;
 
   buf = VEC_pop (scb_t, scb_stack);
@@ -2843,7 +2648,7 @@ pop_stmt_changes (tree *stmt_p)
      memory anymore, but we still need to act on the differences in
      the sets of symbols.  */
   loads = stores = NULL;
-  if (stmt_references_memory_p (stmt))
+  if (gimple_references_memory_p (stmt))
     {
       tree op;
       ssa_op_iter i;
@@ -2906,14 +2711,14 @@ pop_stmt_changes (tree *stmt_p)
    statement.  It avoids the expensive operand re-scan.  */
 
 void
-discard_stmt_changes (tree *stmt_p)
+discard_stmt_changes (gimple *stmt_p)
 {
   scb_t buf;
-  tree stmt;
+  gimple stmt;
   
   /* It makes no sense to keep track of PHI nodes.  */
   stmt = *stmt_p;
-  if (TREE_CODE (stmt) == PHI_NODE)
+  if (gimple_code (stmt) == GIMPLE_PHI)
     return;
 
   buf = VEC_pop (scb_t, scb_stack);
@@ -2925,15 +2730,3 @@ discard_stmt_changes (tree *stmt_p)
   buf->stmt_p = NULL;
   free (buf);
 }
-
-
-/* Returns true if statement STMT may access memory.  */
-
-bool
-stmt_references_memory_p (tree stmt)
-{
-  if (!gimple_ssa_operands (cfun)->ops_active || TREE_CODE (stmt) == PHI_NODE)
-    return false;
-
-  return stmt_ann (stmt)->references_memory;
-}
index ba9793d..cdbc050 100644 (file)
@@ -157,15 +157,7 @@ typedef struct stmt_operands_d *stmt_operands_p;
 #define SET_USE(USE, V)                set_ssa_use_from_ptr (USE, V)
 #define SET_DEF(DEF, V)                ((*(DEF)) = (V))
 
-#define USE_STMT(USE)          (USE)->stmt
-
-#define DEF_OPS(STMT)          (stmt_ann (STMT)->operands.def_ops)
-#define USE_OPS(STMT)          (stmt_ann (STMT)->operands.use_ops)
-#define VUSE_OPS(STMT)         (stmt_ann (STMT)->operands.vuse_ops)
-#define VDEF_OPS(STMT)         (stmt_ann (STMT)->operands.vdef_ops)
-
-#define LOADED_SYMS(STMT)      (stmt_ann (STMT)->operands.loads)
-#define STORED_SYMS(STMT)      (stmt_ann (STMT)->operands.stores)
+#define USE_STMT(USE)          (USE)->loc.stmt
 
 #define USE_OP_PTR(OP)         (&((OP)->use_ptr))
 #define USE_OP(OP)             (USE_FROM_PTR (USE_OP_PTR (OP)))
@@ -187,11 +179,11 @@ typedef struct stmt_operands_d *stmt_operands_p;
 #define VDEF_NUM(OP)           VUSE_VECT_NUM_ELEM ((OP)->usev)
 #define VDEF_VECT(OP)          &((OP)->usev)
 
-#define PHI_RESULT_PTR(PHI)    get_phi_result_ptr (PHI)
+#define PHI_RESULT_PTR(PHI)    gimple_phi_result_ptr (PHI)
 #define PHI_RESULT(PHI)                DEF_FROM_PTR (PHI_RESULT_PTR (PHI))
 #define SET_PHI_RESULT(PHI, V) SET_DEF (PHI_RESULT_PTR (PHI), (V))
 
-#define PHI_ARG_DEF_PTR(PHI, I)        get_phi_arg_def_ptr ((PHI), (I))
+#define PHI_ARG_DEF_PTR(PHI, I)        gimple_phi_arg_imm_use_ptr ((PHI), (I))
 #define PHI_ARG_DEF(PHI, I)    USE_FROM_PTR (PHI_ARG_DEF_PTR ((PHI), (I)))
 #define SET_PHI_ARG_DEF(PHI, I, V)                                     \
                                SET_USE (PHI_ARG_DEF_PTR ((PHI), (I)), (V))
@@ -204,14 +196,13 @@ typedef struct stmt_operands_d *stmt_operands_p;
 
 extern void init_ssa_operands (void);
 extern void fini_ssa_operands (void);
-extern void free_ssa_operands (stmt_operands_p);
-extern void update_stmt_operands (tree);
-extern void free_stmt_operands (tree);
+extern void update_stmt_operands (gimple);
+extern void free_stmt_operands (gimple);
 extern bool verify_imm_links (FILE *f, tree var);
 
-extern void copy_virtual_operands (tree, tree);
+extern void copy_virtual_operands (gimple, gimple);
 extern int operand_build_cmp (const void *, const void *);
-extern void create_ssa_artificial_load_stmt (tree, tree, bool);
+extern void create_ssa_artificial_load_stmt (gimple, gimple, bool);
 
 extern void dump_immediate_uses (FILE *file);
 extern void dump_immediate_uses_for (FILE *file, tree var);
@@ -222,10 +213,10 @@ extern void debug_decl_set (bitmap);
 
 extern bool ssa_operands_active (void);
 
-extern void add_to_addressable_set (tree, bitmap *);
-extern void push_stmt_changes (tree *);
-extern void pop_stmt_changes (tree *);
-extern void discard_stmt_changes (tree *);
+extern void push_stmt_changes (gimple *);
+extern void pop_stmt_changes (gimple *);
+extern void discard_stmt_changes (gimple *);
+void add_to_addressable_set (tree, bitmap *);
 
 enum ssa_op_iter_type {
   ssa_op_iter_none = 0,
@@ -250,7 +241,7 @@ typedef struct ssa_operand_iterator_d
   enum ssa_op_iter_type iter_type;
   int phi_i;
   int num_phi;
-  tree phi_stmt;
+  gimple phi_stmt;
   bool done;
   unsigned int vuse_index;
   unsigned int mayuse_index;
@@ -316,7 +307,7 @@ typedef struct ssa_operand_iterator_d
 /* This macro will execute a loop over a stmt, regardless of whether it is
    a real stmt or a PHI node, looking at the USE nodes matching FLAGS.  */
 #define FOR_EACH_PHI_OR_STMT_USE(USEVAR, STMT, ITER, FLAGS)    \
-  for ((USEVAR) = (TREE_CODE (STMT) == PHI_NODE                \
+  for ((USEVAR) = (gimple_code (STMT) == GIMPLE_PHI            \
                   ? op_iter_init_phiuse (&(ITER), STMT, FLAGS) \
                   : op_iter_init_use (&(ITER), STMT, FLAGS));  \
        !op_iter_done (&(ITER));                                        \
@@ -325,7 +316,7 @@ typedef struct ssa_operand_iterator_d
 /* This macro will execute a loop over a stmt, regardless of whether it is
    a real stmt or a PHI node, looking at the DEF nodes matching FLAGS.  */
 #define FOR_EACH_PHI_OR_STMT_DEF(DEFVAR, STMT, ITER, FLAGS)    \
-  for ((DEFVAR) = (TREE_CODE (STMT) == PHI_NODE                \
+  for ((DEFVAR) = (gimple_code (STMT) == GIMPLE_PHI            \
                   ? op_iter_init_phidef (&(ITER), STMT, FLAGS) \
                   : op_iter_init_def (&(ITER), STMT, FLAGS));  \
        !op_iter_done (&(ITER));                                        \
index 80917a8..72ba04a 100644 (file)
@@ -37,19 +37,20 @@ along with GCC; see the file COPYING3.  If not see
 #include "pointer-set.h"
 #include "domwalk.h"
 
+static unsigned int tree_ssa_phiopt (void);
 static unsigned int tree_ssa_phiopt_worker (bool);
 static bool conditional_replacement (basic_block, basic_block,
-                                    edge, edge, tree, tree, tree);
+                                    edge, edge, gimple, tree, tree);
 static bool value_replacement (basic_block, basic_block,
-                              edge, edge, tree, tree, tree);
+                              edge, edge, gimple, tree, tree);
 static bool minmax_replacement (basic_block, basic_block,
-                               edge, edge, tree, tree, tree);
+                               edge, edge, gimple, tree, tree);
 static bool abs_replacement (basic_block, basic_block,
-                            edge, edge, tree, tree, tree);
+                            edge, edge, gimple, tree, tree);
 static bool cond_store_replacement (basic_block, basic_block, edge, edge,
                                    struct pointer_set_t *);
 static struct pointer_set_t * get_non_trapping (void);
-static void replace_phi_edge_with_variable (basic_block, edge, tree, tree);
+static void replace_phi_edge_with_variable (basic_block, edge, gimple, tree);
 
 /* This pass tries to replaces an if-then-else block with an
    assignment.  We have four kinds of transformations.  Some of these
@@ -208,18 +209,17 @@ tree_ssa_phiopt_worker (bool do_store_elim)
 
   for (i = 0; i < n; i++) 
     {
-      tree cond_expr;
-      tree phi;
+      gimple cond_stmt, phi;
       basic_block bb1, bb2;
       edge e1, e2;
       tree arg0, arg1;
 
       bb = bb_order[i];
 
-      cond_expr = last_stmt (bb);
-      /* Check to see if the last statement is a COND_EXPR.  */
-      if (!cond_expr
-          || TREE_CODE (cond_expr) != COND_EXPR)
+      cond_stmt = last_stmt (bb);
+      /* Check to see if the last statement is a GIMPLE_COND.  */
+      if (!cond_stmt
+          || gimple_code (cond_stmt) != GIMPLE_COND)
         continue;
 
       e1 = EDGE_SUCC (bb, 0);
@@ -278,16 +278,17 @@ tree_ssa_phiopt_worker (bool do_store_elim)
        }
       else
        {
-         phi = phi_nodes (bb2);
+         gimple_seq phis = phi_nodes (bb2);
 
          /* Check to make sure that there is only one PHI node.
             TODO: we could do it with more than one iff the other PHI nodes
             have the same elements for these two edges.  */
-         if (!phi || PHI_CHAIN (phi) != NULL)
+         if (! gimple_seq_singleton_p (phis))
            continue;
 
-         arg0 = PHI_ARG_DEF_TREE (phi, e1->dest_idx);
-         arg1 = PHI_ARG_DEF_TREE (phi, e2->dest_idx);
+         phi = gsi_stmt (gsi_start (phis));
+         arg0 = gimple_phi_arg_def (phi, e1->dest_idx);
+         arg1 = gimple_phi_arg_def (phi, e2->dest_idx);
 
          /* Something is wrong if we cannot find the arguments in the PHI
             node.  */
@@ -314,7 +315,7 @@ tree_ssa_phiopt_worker (bool do_store_elim)
     {
       /* In cond-store replacement we have added some loads on edges
          and new VOPS (as we moved the store, and created a load).  */
-      bsi_commit_edge_inserts ();
+      gsi_commit_edge_inserts ();
       return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
     }
   else if (cfgchanged)
@@ -382,19 +383,8 @@ blocks_in_phiopt_order (void)
 bool
 empty_block_p (basic_block bb)
 {
-  block_stmt_iterator bsi;
-
   /* BB must have no executable statements.  */
-  bsi = bsi_start (bb);
-  while (!bsi_end_p (bsi)
-         && (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR
-             || IS_EMPTY_STMT (bsi_stmt (bsi))))
-    bsi_next (&bsi);
-
-  if (!bsi_end_p (bsi))
-    return false;
-
-  return true;
+  return gsi_end_p (gsi_after_labels (bb));
 }
 
 /* Replace PHI node element whose edge is E in block BB with variable NEW.
@@ -403,11 +393,11 @@ empty_block_p (basic_block bb)
 
 static void
 replace_phi_edge_with_variable (basic_block cond_block,
-                               edge e, tree phi, tree new_tree)
+                               edge e, gimple phi, tree new_tree)
 {
-  basic_block bb = bb_for_stmt (phi);
+  basic_block bb = gimple_bb (phi);
   basic_block block_to_remove;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
 
   /* Change the PHI argument to new.  */
   SET_USE (PHI_ARG_DEF_PTR (phi, e->dest_idx), new_tree);
@@ -435,8 +425,8 @@ replace_phi_edge_with_variable (basic_block cond_block,
   delete_basic_block (block_to_remove);
 
   /* Eliminate the COND_EXPR at the end of COND_BLOCK.  */
-  bsi = bsi_last (cond_block);
-  bsi_remove (&bsi, true);
+  gsi = gsi_last_bb (cond_block);
+  gsi_remove (&gsi, true);
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     fprintf (dump_file,
@@ -453,16 +443,15 @@ replace_phi_edge_with_variable (basic_block cond_block,
 
 static bool
 conditional_replacement (basic_block cond_bb, basic_block middle_bb,
-                        edge e0, edge e1, tree phi,
+                        edge e0, edge e1, gimple phi,
                         tree arg0, tree arg1)
 {
   tree result;
-  tree old_result = NULL;
-  tree new_stmt, cond;
-  block_stmt_iterator bsi;
+  gimple stmt, new_stmt;
+  tree cond;
+  gimple_stmt_iterator gsi;
   edge true_edge, false_edge;
-  tree new_var = NULL;
-  tree new_var1;
+  tree new_var, new_var2;
 
   /* FIXME: Gimplification of complex type is too hard for now.  */
   if (TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
@@ -480,61 +469,7 @@ conditional_replacement (basic_block cond_bb, basic_block middle_bb,
   if (!empty_block_p (middle_bb))
     return false;
 
-  /* If the condition is not a naked SSA_NAME and its type does not
-     match the type of the result, then we have to create a new
-     variable to optimize this case as it would likely create
-     non-gimple code when the condition was converted to the
-     result's type.  */
-  cond = COND_EXPR_COND (last_stmt (cond_bb));
-  result = PHI_RESULT (phi);
-  if (TREE_CODE (cond) != SSA_NAME
-      && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (cond)))
-    {
-      tree tmp;
-
-      if (!COMPARISON_CLASS_P (cond))
-       return false;
-
-      tmp = create_tmp_var (TREE_TYPE (cond), NULL);
-      add_referenced_var (tmp);
-      new_var = make_ssa_name (tmp, NULL);
-      old_result = cond;
-      cond = new_var;
-    }
-
-  /* If the condition was a naked SSA_NAME and the type is not the
-     same as the type of the result, then convert the type of the
-     condition.  */
-  if (!useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (cond)))
-    cond = fold_convert (TREE_TYPE (result), cond);
-
-  /* We need to know which is the true edge and which is the false
-     edge so that we know when to invert the condition below.  */
-  extract_true_false_edges_from_block (cond_bb, &true_edge, &false_edge);
-
-  /* Insert our new statement at the end of conditional block before the
-     COND_EXPR.  */
-  bsi = bsi_last (cond_bb);
-  bsi_insert_before (&bsi, build_empty_stmt (), BSI_NEW_STMT);
-
-  if (old_result)
-    {
-      tree new1;
-
-      new1 = build2 (TREE_CODE (old_result), TREE_TYPE (old_result),
-                    TREE_OPERAND (old_result, 0),
-                    TREE_OPERAND (old_result, 1));
-
-      new1 = build_gimple_modify_stmt (new_var, new1);
-      SSA_NAME_DEF_STMT (new_var) = new1;
-
-      bsi_insert_after (&bsi, new1, BSI_NEW_STMT);
-    }
-
-  new_var1 = duplicate_ssa_name (PHI_RESULT (phi), NULL);
-
-
-  /* At this point we know we have a COND_EXPR with two successors.
+  /* At this point we know we have a GIMPLE_COND with two successors.
      One successor is BB, the other successor is an empty block which
      falls through into BB.
 
@@ -549,71 +484,46 @@ conditional_replacement (basic_block cond_bb, basic_block middle_bb,
      We use the condition as-is if the argument associated with the
      true edge has the value one or the argument associated with the
      false edge as the value zero.  Note that those conditions are not
-     the same since only one of the outgoing edges from the COND_EXPR
+     the same since only one of the outgoing edges from the GIMPLE_COND
      will directly reach BB and thus be associated with an argument.  */
-  if ((e0 == true_edge && integer_onep (arg0))
-      || (e0 == false_edge && integer_zerop (arg0))
-      || (e1 == true_edge && integer_onep (arg1))
-      || (e1 == false_edge && integer_zerop (arg1)))
-    {
-      new_stmt = build_gimple_modify_stmt (new_var1, cond);
-    }
-  else
-    {
-      tree cond1 = invert_truthvalue (cond);
-
-      cond = cond1;
-
-      /* If what we get back is a conditional expression, there is no
-         way that it can be gimple.  */
-      if (TREE_CODE (cond) == COND_EXPR)
-       {
-         release_ssa_name (new_var1);
-         return false;
-       }
 
-      /* If COND is not something we can expect to be reducible to a GIMPLE
-        condition, return early.  */
-      if (is_gimple_cast (cond))
-       cond1 = TREE_OPERAND (cond, 0);
-      if (TREE_CODE (cond1) == TRUTH_NOT_EXPR
-         && !is_gimple_val (TREE_OPERAND (cond1, 0)))
-       {
-         release_ssa_name (new_var1);
-         return false;
-       }
+  stmt = last_stmt (cond_bb);
+  result = PHI_RESULT (phi);
 
-      /* If what we get back is not gimple try to create it as gimple by
-        using a temporary variable.  */
-      if (is_gimple_cast (cond)
-         && !is_gimple_val (TREE_OPERAND (cond, 0)))
-       {
-         tree op0, tmp, cond_tmp;
-
-         /* Only "real" casts are OK here, not everything that is
-            acceptable to is_gimple_cast.  Make sure we don't do
-            anything stupid here.  */
-         gcc_assert (CONVERT_EXPR_P (cond));
-
-         op0 = TREE_OPERAND (cond, 0);
-         tmp = create_tmp_var (TREE_TYPE (op0), NULL);
-         add_referenced_var (tmp);
-         cond_tmp = make_ssa_name (tmp, NULL);
-         new_stmt = build_gimple_modify_stmt (cond_tmp, op0);
-         SSA_NAME_DEF_STMT (cond_tmp) = new_stmt;
-
-         bsi_insert_after (&bsi, new_stmt, BSI_NEW_STMT);
-         cond = fold_convert (TREE_TYPE (result), cond_tmp);
-       }
+  /* To handle special cases like floating point comparison, it is easier and
+     less error-prone to build a tree and gimplify it on the fly though it is
+     less efficient.  */
+  cond = fold_build2 (gimple_cond_code (stmt), boolean_type_node,
+                     gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
 
-      new_stmt = build_gimple_modify_stmt (new_var1, cond);
+  /* We need to know which is the true edge and which is the false
+     edge so that we know when to invert the condition below.  */
+  extract_true_false_edges_from_block (cond_bb, &true_edge, &false_edge);
+  if ((e0 == true_edge && integer_zerop (arg0))
+      || (e0 == false_edge && integer_onep (arg0))
+      || (e1 == true_edge && integer_zerop (arg1))
+      || (e1 == false_edge && integer_onep (arg1)))
+    cond = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (cond), cond);
+
+  /* Insert our new statements at the end of conditional block before the
+     COND_STMT.  */
+  gsi = gsi_for_stmt (stmt);
+  new_var = force_gimple_operand_gsi (&gsi, cond, true, NULL, true,
+                                     GSI_SAME_STMT);
+
+  if (!useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (new_var)))
+    {
+      new_var2 = create_tmp_var (TREE_TYPE (result), NULL);
+      add_referenced_var (new_var2);
+      new_stmt = gimple_build_assign_with_ops (CONVERT_EXPR, new_var2,
+                                              new_var, NULL);
+      new_var2 = make_ssa_name (new_var2, new_stmt);
+      gimple_assign_set_lhs (new_stmt, new_var2);
+      gsi_insert_before (&gsi, new_stmt, GSI_SAME_STMT);
+      new_var = new_var2;
     }
 
-  bsi_insert_after (&bsi, new_stmt, BSI_NEW_STMT);
-
-  SSA_NAME_DEF_STMT (new_var1) = new_stmt;
-
-  replace_phi_edge_with_variable (cond_bb, e1, phi, new_var1);
+  replace_phi_edge_with_variable (cond_bb, e1, phi, new_var);
 
   /* Note that we optimized this PHI.  */
   return true;
@@ -627,11 +537,12 @@ conditional_replacement (basic_block cond_bb, basic_block middle_bb,
 
 static bool
 value_replacement (basic_block cond_bb, basic_block middle_bb,
-                  edge e0, edge e1, tree phi,
+                  edge e0, edge e1, gimple phi,
                   tree arg0, tree arg1)
 {
-  tree cond;
+  gimple cond;
   edge true_edge, false_edge;
+  enum tree_code code;
 
   /* If the type says honor signed zeros we cannot do this
      optimization.  */
@@ -641,10 +552,11 @@ value_replacement (basic_block cond_bb, basic_block middle_bb,
   if (!empty_block_p (middle_bb))
     return false;
 
-  cond = COND_EXPR_COND (last_stmt (cond_bb));
+  cond = last_stmt (cond_bb);
+  code = gimple_cond_code (cond);
 
   /* This transformation is only valid for equality comparisons.  */
-  if (TREE_CODE (cond) != NE_EXPR && TREE_CODE (cond) != EQ_EXPR)
+  if (code != NE_EXPR && code != EQ_EXPR)
     return false;
 
   /* We need to know which is the true edge and which is the false
@@ -662,10 +574,10 @@ value_replacement (basic_block cond_bb, basic_block middle_bb,
      We now need to verify that the two arguments in the PHI node match
      the two arguments to the equality comparison.  */
 
-  if ((operand_equal_for_phi_arg_p (arg0, TREE_OPERAND (cond, 0))
-       && operand_equal_for_phi_arg_p (arg1, TREE_OPERAND (cond, 1)))
-      || (operand_equal_for_phi_arg_p (arg1, TREE_OPERAND (cond, 0))
-         && operand_equal_for_phi_arg_p (arg0, TREE_OPERAND (cond, 1))))
+  if ((operand_equal_for_phi_arg_p (arg0, gimple_cond_lhs (cond))
+       && operand_equal_for_phi_arg_p (arg1, gimple_cond_rhs (cond)))
+      || (operand_equal_for_phi_arg_p (arg1, gimple_cond_lhs (cond))
+         && operand_equal_for_phi_arg_p (arg0, gimple_cond_rhs (cond))))
     {
       edge e;
       tree arg;
@@ -673,7 +585,7 @@ value_replacement (basic_block cond_bb, basic_block middle_bb,
       /* For NE_EXPR, we want to build an assignment result = arg where
         arg is the PHI argument associated with the true edge.  For
         EQ_EXPR we want the PHI argument associated with the false edge.  */
-      e = (TREE_CODE (cond) == NE_EXPR ? true_edge : false_edge);
+      e = (code == NE_EXPR ? true_edge : false_edge);
 
       /* Unfortunately, E may not reach BB (it may instead have gone to
         OTHER_BLOCK).  If that is the case, then we want the single outgoing
@@ -705,15 +617,15 @@ value_replacement (basic_block cond_bb, basic_block middle_bb,
 
 static bool
 minmax_replacement (basic_block cond_bb, basic_block middle_bb,
-                   edge e0, edge e1, tree phi,
+                   edge e0, edge e1, gimple phi,
                    tree arg0, tree arg1)
 {
   tree result, type;
-  tree cond, new_stmt;
+  gimple cond, new_stmt;
   edge true_edge, false_edge;
   enum tree_code cmp, minmax, ass_code;
   tree smaller, larger, arg_true, arg_false;
-  block_stmt_iterator bsi, bsi_from;
+  gimple_stmt_iterator gsi, gsi_from;
 
   type = TREE_TYPE (PHI_RESULT (phi));
 
@@ -721,21 +633,21 @@ minmax_replacement (basic_block cond_bb, basic_block middle_bb,
   if (HONOR_NANS (TYPE_MODE (type)))
     return false;
 
-  cond = COND_EXPR_COND (last_stmt (cond_bb));
-  cmp = TREE_CODE (cond);
+  cond = last_stmt (cond_bb);
+  cmp = gimple_cond_code (cond);
   result = PHI_RESULT (phi);
 
   /* This transformation is only valid for order comparisons.  Record which
      operand is smaller/larger if the result of the comparison is true.  */
   if (cmp == LT_EXPR || cmp == LE_EXPR)
     {
-      smaller = TREE_OPERAND (cond, 0);
-      larger = TREE_OPERAND (cond, 1);
+      smaller = gimple_cond_lhs (cond);
+      larger = gimple_cond_rhs (cond);
     }
   else if (cmp == GT_EXPR || cmp == GE_EXPR)
     {
-      smaller = TREE_OPERAND (cond, 1);
-      larger = TREE_OPERAND (cond, 0);
+      smaller = gimple_cond_rhs (cond);
+      larger = gimple_cond_lhs (cond);
     }
   else
     return false;
@@ -796,20 +708,19 @@ minmax_replacement (basic_block cond_bb, basic_block middle_bb,
         b = MAX (a, d);
         x = MIN (b, u);  */
 
-      tree assign = last_and_only_stmt (middle_bb);
-      tree lhs, rhs, op0, op1, bound;
+      gimple assign = last_and_only_stmt (middle_bb);
+      tree lhs, op0, op1, bound;
 
       if (!assign
-         || TREE_CODE (assign) != GIMPLE_MODIFY_STMT)
+         || gimple_code (assign) != GIMPLE_ASSIGN)
        return false;
 
-      lhs = GIMPLE_STMT_OPERAND (assign, 0);
-      rhs = GIMPLE_STMT_OPERAND (assign, 1);
-      ass_code = TREE_CODE (rhs);
+      lhs = gimple_assign_lhs (assign);
+      ass_code = gimple_assign_rhs_code (assign);
       if (ass_code != MAX_EXPR && ass_code != MIN_EXPR)
        return false;
-      op0 = TREE_OPERAND (rhs, 0);
-      op1 = TREE_OPERAND (rhs, 1);
+      op0 = gimple_assign_rhs1 (assign);
+      op1 = gimple_assign_rhs2 (assign);
 
       if (true_edge->src == middle_bb)
        {
@@ -931,17 +842,16 @@ minmax_replacement (basic_block cond_bb, basic_block middle_bb,
        }
 
       /* Move the statement from the middle block.  */
-      bsi = bsi_last (cond_bb);
-      bsi_from = bsi_last (middle_bb);
-      bsi_move_before (&bsi_from, &bsi);
+      gsi = gsi_last_bb (cond_bb);
+      gsi_from = gsi_last_bb (middle_bb);
+      gsi_move_before (&gsi_from, &gsi);
     }
 
   /* Emit the statement to compute min/max.  */
   result = duplicate_ssa_name (PHI_RESULT (phi), NULL);
-  new_stmt = build_gimple_modify_stmt (result, build2 (minmax, type, arg0, arg1));
-  SSA_NAME_DEF_STMT (result) = new_stmt;
-  bsi = bsi_last (cond_bb);
-  bsi_insert_before (&bsi, new_stmt, BSI_NEW_STMT);
+  new_stmt = gimple_build_assign_with_ops (minmax, result, arg0, arg1);
+  gsi = gsi_last_bb (cond_bb);
+  gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
 
   replace_phi_edge_with_variable (cond_bb, e1, phi, result);
   return true;
@@ -956,13 +866,13 @@ minmax_replacement (basic_block cond_bb, basic_block middle_bb,
 static bool
 abs_replacement (basic_block cond_bb, basic_block middle_bb,
                 edge e0 ATTRIBUTE_UNUSED, edge e1,
-                tree phi, tree arg0, tree arg1)
+                gimple phi, tree arg0, tree arg1)
 {
   tree result;
-  tree new_stmt, cond;
-  block_stmt_iterator bsi;
+  gimple new_stmt, cond;
+  gimple_stmt_iterator gsi;
   edge true_edge, false_edge;
-  tree assign;
+  gimple assign;
   edge e;
   tree rhs, lhs;
   bool negate;
@@ -985,38 +895,37 @@ abs_replacement (basic_block cond_bb, basic_block middle_bb,
   /* If we got here, then we have found the only executable statement
      in OTHER_BLOCK.  If it is anything other than arg = -arg1 or
      arg1 = -arg0, then we can not optimize.  */
-  if (TREE_CODE (assign) != GIMPLE_MODIFY_STMT)
+  if (gimple_code (assign) != GIMPLE_ASSIGN)
     return false;
 
-  lhs = GIMPLE_STMT_OPERAND (assign, 0);
-  rhs = GIMPLE_STMT_OPERAND (assign, 1);
+  lhs = gimple_assign_lhs (assign);
 
-  if (TREE_CODE (rhs) != NEGATE_EXPR)
+  if (gimple_assign_rhs_code (assign) != NEGATE_EXPR)
     return false;
 
-  rhs = TREE_OPERAND (rhs, 0);
+  rhs = gimple_assign_rhs1 (assign);
               
   /* The assignment has to be arg0 = -arg1 or arg1 = -arg0.  */
   if (!(lhs == arg0 && rhs == arg1)
       && !(lhs == arg1 && rhs == arg0))
     return false;
 
-  cond = COND_EXPR_COND (last_stmt (cond_bb));
+  cond = last_stmt (cond_bb);
   result = PHI_RESULT (phi);
 
   /* Only relationals comparing arg[01] against zero are interesting.  */
-  cond_code = TREE_CODE (cond);
+  cond_code = gimple_cond_code (cond);
   if (cond_code != GT_EXPR && cond_code != GE_EXPR
       && cond_code != LT_EXPR && cond_code != LE_EXPR)
     return false;
 
   /* Make sure the conditional is arg[01] OP y.  */
-  if (TREE_OPERAND (cond, 0) != rhs)
+  if (gimple_cond_lhs (cond) != rhs)
     return false;
 
-  if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (cond, 1)))
-              ? real_zerop (TREE_OPERAND (cond, 1))
-              : integer_zerop (TREE_OPERAND (cond, 1)))
+  if (FLOAT_TYPE_P (TREE_TYPE (gimple_cond_rhs (cond)))
+              ? real_zerop (gimple_cond_rhs (cond))
+              : integer_zerop (gimple_cond_rhs (cond)))
     ;
   else
     return false;
@@ -1050,24 +959,19 @@ abs_replacement (basic_block cond_bb, basic_block middle_bb,
     lhs = result;
 
   /* Build the modify expression with abs expression.  */
-  new_stmt = build_gimple_modify_stmt (lhs,
-                                      build1 (ABS_EXPR, TREE_TYPE (lhs), rhs));
-  SSA_NAME_DEF_STMT (lhs) = new_stmt;
+  new_stmt = gimple_build_assign_with_ops (ABS_EXPR, lhs, rhs, NULL);
 
-  bsi = bsi_last (cond_bb);
-  bsi_insert_before (&bsi, new_stmt, BSI_NEW_STMT);
+  gsi = gsi_last_bb (cond_bb);
+  gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
 
   if (negate)
     {
-      /* Get the right BSI.  We want to insert after the recently
+      /* Get the right GSI.  We want to insert after the recently
         added ABS_EXPR statement (which we know is the first statement
         in the block.  */
-      new_stmt = build_gimple_modify_stmt (result,
-                                          build1 (NEGATE_EXPR, TREE_TYPE (lhs),
-                                                  lhs));
-      SSA_NAME_DEF_STMT (result) = new_stmt;
+      new_stmt = gimple_build_assign_with_ops (NEGATE_EXPR, result, lhs, NULL);
 
-      bsi_insert_after (&bsi, new_stmt, BSI_NEW_STMT);
+      gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
     }
 
   replace_phi_edge_with_variable (cond_bb, e1, phi, result);
@@ -1188,21 +1092,22 @@ add_or_mark_expr (basic_block bb, tree exp,
 static void
 nt_init_block (struct dom_walk_data *data ATTRIBUTE_UNUSED, basic_block bb)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   /* Mark this BB as being on the path to dominator root.  */
   bb->aux = (void*)1;
 
   /* And walk the statements in order.  */
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      tree stmt = bsi_stmt (bsi);
+      gimple stmt = gsi_stmt (gsi);
 
-      if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+      if (is_gimple_assign (stmt))
        {
-         tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-         tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-         add_or_mark_expr (bb, rhs, nontrap_set, false);
-         add_or_mark_expr (bb, lhs, nontrap_set, true);
+         add_or_mark_expr (bb, gimple_assign_lhs (stmt), nontrap_set, true);
+         add_or_mark_expr (bb, gimple_assign_rhs1 (stmt), nontrap_set, false);
+         if (get_gimple_rhs_num_ops (gimple_assign_rhs_code (stmt)) > 1)
+           add_or_mark_expr (bb, gimple_assign_rhs2 (stmt), nontrap_set,
+                             false);
        }
     }
 }
@@ -1274,21 +1179,26 @@ static bool
 cond_store_replacement (basic_block middle_bb, basic_block join_bb,
                        edge e0, edge e1, struct pointer_set_t *nontrap)
 {
-  tree assign = last_and_only_stmt (middle_bb);
-  tree lhs, rhs, newexpr, name;
-  tree newphi;
-  block_stmt_iterator bsi;
+  gimple assign = last_and_only_stmt (middle_bb);
+  tree lhs, rhs, name;
+  gimple newphi, new_stmt;
+  gimple_stmt_iterator gsi;
+  enum tree_code code;
 
   /* Check if middle_bb contains of only one store.  */
   if (!assign
-      || TREE_CODE (assign) != GIMPLE_MODIFY_STMT)
+      || gimple_code (assign) != GIMPLE_ASSIGN)
     return false;
 
-  lhs = GIMPLE_STMT_OPERAND (assign, 0);
+  lhs = gimple_assign_lhs (assign);
+  rhs = gimple_assign_rhs1 (assign);
   if (!INDIRECT_REF_P (lhs))
     return false;
-  rhs = GIMPLE_STMT_OPERAND (assign, 1);
-  if (TREE_CODE (rhs) != SSA_NAME && !is_gimple_min_invariant (rhs))
+
+  /* RHS is either a single SSA_NAME or a constant. */
+  code = gimple_assign_rhs_code (assign);
+  if (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
+      || (code != SSA_NAME && !is_gimple_min_invariant (rhs)))
     return false;
   /* Prove that we can move the store down.  We could also check
      TREE_THIS_NOTRAP here, but in that case we also could move stores,
@@ -1299,8 +1209,8 @@ cond_store_replacement (basic_block middle_bb, basic_block join_bb,
   /* Now we've checked the constraints, so do the transformation:
      1) Remove the single store.  */
   mark_symbols_for_renaming (assign);
-  bsi = bsi_for_stmt (assign);
-  bsi_remove (&bsi, true);
+  gsi = gsi_for_stmt (assign);
+  gsi_remove (&gsi, true);
 
   /* 2) Create a temporary where we can store the old content
         of the memory touched by the store, if we need to.  */
@@ -1317,11 +1227,11 @@ cond_store_replacement (basic_block middle_bb, basic_block join_bb,
   /* 3) Insert a load from the memory of the store to the temporary
         on the edge which did not contain the store.  */
   lhs = unshare_expr (lhs);
-  newexpr = build_gimple_modify_stmt (condstoretemp, lhs);
-  name = make_ssa_name (condstoretemp, newexpr);
-  GIMPLE_STMT_OPERAND (newexpr, 0) = name;
-  mark_symbols_for_renaming (newexpr);
-  bsi_insert_on_edge (e1, newexpr);
+  new_stmt = gimple_build_assign (condstoretemp, lhs);
+  name = make_ssa_name (condstoretemp, new_stmt);
+  gimple_assign_set_lhs (new_stmt, name);
+  mark_symbols_for_renaming (new_stmt);
+  gsi_insert_on_edge (e1, new_stmt);
 
   /* 4) Create a PHI node at the join block, with one argument
         holding the old RHS, and the other holding the temporary
@@ -1331,20 +1241,18 @@ cond_store_replacement (basic_block middle_bb, basic_block join_bb,
   add_phi_arg (newphi, name, e1);
 
   lhs = unshare_expr (lhs);
-  newexpr = build_gimple_modify_stmt (lhs, PHI_RESULT (newphi));
-  mark_symbols_for_renaming (newexpr);
+  new_stmt = gimple_build_assign (lhs, PHI_RESULT (newphi));
+  mark_symbols_for_renaming (new_stmt);
 
   /* 5) Insert that PHI node.  */
-  bsi = bsi_start (join_bb);
-  while (!bsi_end_p (bsi) && TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR)
-    bsi_next (&bsi);
-  if (bsi_end_p (bsi))
+  gsi = gsi_after_labels (join_bb);
+  if (gsi_end_p (gsi))
     {
-      bsi = bsi_last (join_bb);
-      bsi_insert_after (&bsi, newexpr, BSI_NEW_STMT);
+      gsi = gsi_last_bb (join_bb);
+      gsi_insert_after (&gsi, new_stmt, GSI_NEW_STMT);
     }
   else
-    bsi_insert_before (&bsi, newexpr, BSI_NEW_STMT);
+    gsi_insert_before (&gsi, new_stmt, GSI_NEW_STMT);
 
   return true;
 }
index f408939..ff3ee4a 100644 (file)
@@ -95,7 +95,7 @@ along with GCC; see the file COPYING3.  If not see
 struct phiprop_d
 {
   tree value;
-  tree vop_stmt;
+  gimple vop_stmt;
 };
 
 /* Verify if the value recorded for NAME in PHIVN is still valid at
@@ -104,7 +104,7 @@ struct phiprop_d
 static bool
 phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
 {
-  tree vop_stmt = phivn[SSA_NAME_VERSION (name)].vop_stmt;
+  gimple vop_stmt = phivn[SSA_NAME_VERSION (name)].vop_stmt;
   ssa_op_iter ui;
   tree vuse;
 
@@ -112,17 +112,17 @@ phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
      by bb.  */
   FOR_EACH_SSA_TREE_OPERAND (vuse, vop_stmt, ui, SSA_OP_VUSE)
     {
-      tree use_stmt;
+      gimple use_stmt;
       imm_use_iterator ui2;
       bool ok = true;
 
       FOR_EACH_IMM_USE_STMT (use_stmt, ui2, vuse)
        {
          /* If BB does not dominate a VDEF, the value is invalid.  */
-         if (((TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
+         if (((is_gimple_assign (use_stmt)
                && !ZERO_SSA_OPERANDS (use_stmt, SSA_OP_VDEF))
-              || TREE_CODE (use_stmt) == PHI_NODE)
-             && !dominated_by_p (CDI_DOMINATORS, bb_for_stmt (use_stmt), bb))
+              || gimple_code (use_stmt) == GIMPLE_PHI)
+             && !dominated_by_p (CDI_DOMINATORS, gimple_bb (use_stmt), bb))
            {
              ok = false;
              BREAK_FROM_IMM_USE_STMT (ui2);
@@ -139,31 +139,36 @@ phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
    BB with the virtual operands from USE_STMT.  */
 
 static tree
-phiprop_insert_phi (basic_block bb, tree phi, tree use_stmt,
+phiprop_insert_phi (basic_block bb, gimple phi, gimple use_stmt,
                    struct phiprop_d *phivn, size_t n)
 {
-  tree res, new_phi;
+  tree res;
+  gimple new_phi;
   edge_iterator ei;
   edge e;
 
+  gcc_assert (is_gimple_assign (use_stmt)
+             && gimple_assign_rhs_code (use_stmt) == INDIRECT_REF);
+
   /* Build a new PHI node to replace the definition of
      the indirect reference lhs.  */
-  res = GIMPLE_STMT_OPERAND (use_stmt, 0);
+  res = gimple_assign_lhs (use_stmt);
   SSA_NAME_DEF_STMT (res) = new_phi = create_phi_node (res, bb);
 
   /* Add PHI arguments for each edge inserting loads of the
      addressable operands.  */
   FOR_EACH_EDGE (e, ei, bb->preds)
     {
-      tree old_arg, new_var, tmp;
+      tree old_arg, new_var;
+      gimple tmp;
 
       old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
       while (TREE_CODE (old_arg) == SSA_NAME
             && (SSA_NAME_VERSION (old_arg) >= n
                 || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
        {
-         tree def_stmt = SSA_NAME_DEF_STMT (old_arg);
-         old_arg = GIMPLE_STMT_OPERAND (def_stmt, 1);
+         gimple def_stmt = SSA_NAME_DEF_STMT (old_arg);
+         old_arg = gimple_assign_rhs1 (def_stmt);
        }
 
       if (TREE_CODE (old_arg) == SSA_NAME)
@@ -171,18 +176,19 @@ phiprop_insert_phi (basic_block bb, tree phi, tree use_stmt,
        new_var = phivn[SSA_NAME_VERSION (old_arg)].value;
       else
        {
+         gcc_assert (TREE_CODE (old_arg) == ADDR_EXPR);
          old_arg = TREE_OPERAND (old_arg, 0);
          new_var = create_tmp_var (TREE_TYPE (old_arg), NULL);
-         tmp = build2 (GIMPLE_MODIFY_STMT, void_type_node,
-                       NULL_TREE, unshare_expr (old_arg));
+         tmp = gimple_build_assign (new_var, unshare_expr (old_arg));
          if (TREE_CODE (TREE_TYPE (old_arg)) == COMPLEX_TYPE
              || TREE_CODE (TREE_TYPE (old_arg)) == VECTOR_TYPE)
            DECL_GIMPLE_REG_P (new_var) = 1;
+         gcc_assert (is_gimple_reg (new_var));
          add_referenced_var (new_var);
          new_var = make_ssa_name (new_var, tmp);
-         GIMPLE_STMT_OPERAND (tmp, 0) = new_var;
+         gimple_assign_set_lhs (tmp, new_var);
 
-         bsi_insert_on_edge (e, tmp);
+         gsi_insert_on_edge (e, tmp);
 
          update_stmt (tmp);
          mark_symbols_for_renaming (tmp);
@@ -211,11 +217,13 @@ phiprop_insert_phi (basic_block bb, tree phi, tree use_stmt,
    with aliasing issues as we are moving memory reads.  */
 
 static bool
-propagate_with_phi (basic_block bb, tree phi, struct phiprop_d *phivn, size_t n)
+propagate_with_phi (basic_block bb, gimple phi, struct phiprop_d *phivn,
+                   size_t n)
 {
   tree ptr = PHI_RESULT (phi);
-  tree use_stmt, res = NULL_TREE;
-  block_stmt_iterator bsi;
+  gimple use_stmt;
+  tree res = NULL_TREE;
+  gimple_stmt_iterator gsi;
   imm_use_iterator ui;
   use_operand_p arg_p, use;
   ssa_op_iter i;
@@ -238,10 +246,10 @@ propagate_with_phi (basic_block bb, tree phi, struct phiprop_d *phivn, size_t n)
             && (SSA_NAME_VERSION (arg) >= n
                 || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
        {
-         tree def_stmt = SSA_NAME_DEF_STMT (arg);
-         if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT)
+         gimple def_stmt = SSA_NAME_DEF_STMT (arg);
+         if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
            return false;
-         arg = GIMPLE_STMT_OPERAND (def_stmt, 1);
+         arg = gimple_assign_rhs1 (def_stmt);
        }
       if ((TREE_CODE (arg) != ADDR_EXPR
           /* Avoid to have to decay *&a to a[0] later.  */
@@ -255,10 +263,8 @@ propagate_with_phi (basic_block bb, tree phi, struct phiprop_d *phivn, size_t n)
   /* Find a dereferencing use.  First follow (single use) ssa
      copy chains for ptr.  */
   while (single_imm_use (ptr, &use, &use_stmt)
-        && TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
-        && GIMPLE_STMT_OPERAND (use_stmt, 1) == ptr
-        && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME)
-    ptr = GIMPLE_STMT_OPERAND (use_stmt, 0);
+        && gimple_assign_ssa_name_copy_p (use_stmt))
+    ptr = gimple_assign_lhs (use_stmt);
 
   /* Replace the first dereference of *ptr if there is one and if we
      can move the loads to the place of the ptr phi node.  */
@@ -269,23 +275,23 @@ propagate_with_phi (basic_block bb, tree phi, struct phiprop_d *phivn, size_t n)
       tree vuse;
 
       /* Check whether this is a load of *ptr.  */
-      if (!(TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
-           && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 0)) == SSA_NAME 
-           && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == INDIRECT_REF
-           && TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0) == ptr
+      if (!(is_gimple_assign (use_stmt)
+           && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME 
+           && gimple_assign_rhs_code (use_stmt) == INDIRECT_REF
+           && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == ptr
            /* We cannot replace a load that may throw or is volatile.  */
-           && !tree_can_throw_internal (use_stmt)))
+           && !stmt_can_throw_internal (use_stmt)))
        continue;
 
       /* Check if we can move the loads.  The def stmts of all virtual uses
         need to be post-dominated by bb.  */
       FOR_EACH_SSA_TREE_OPERAND (vuse, use_stmt, ui2, SSA_OP_VUSE)
        {
-         tree def_stmt = SSA_NAME_DEF_STMT (vuse);
+         gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
          if (!SSA_NAME_IS_DEFAULT_DEF (vuse)
-             && (bb_for_stmt (def_stmt) == bb
+             && (gimple_bb (def_stmt) == bb
                  || !dominated_by_p (CDI_DOMINATORS,
-                                     bb, bb_for_stmt (def_stmt))))
+                                     bb, gimple_bb (def_stmt))))
            goto next;
        }
 
@@ -302,8 +308,8 @@ propagate_with_phi (basic_block bb, tree phi, struct phiprop_d *phivn, size_t n)
          /* Remove old stmt.  The phi is taken care of by DCE, if we
             want to delete it here we also have to delete all intermediate
             copies.  */
-         bsi = bsi_for_stmt (use_stmt);
-         bsi_remove (&bsi, 0);
+         gsi = gsi_for_stmt (use_stmt);
+         gsi_remove (&gsi, false);
 
          phi_inserted = true;
        }
@@ -311,7 +317,7 @@ propagate_with_phi (basic_block bb, tree phi, struct phiprop_d *phivn, size_t n)
        {
          /* Further replacements are easy, just make a copy out of the
             load.  */
-         GIMPLE_STMT_OPERAND (use_stmt, 1) = res;
+         gimple_assign_set_rhs1 (use_stmt, res);
          update_stmt (use_stmt);
        }
 
@@ -330,10 +336,10 @@ tree_ssa_phiprop_1 (basic_block bb, struct phiprop_d *phivn, size_t n)
 {
   bool did_something = false; 
   basic_block son;
-  tree phi;
+  gimple_stmt_iterator gsi;
 
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-    did_something |= propagate_with_phi (bb, phi, phivn, n);
+  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+    did_something |= propagate_with_phi (bb, gsi_stmt (gsi), phivn, n);
 
   for (son = first_dom_son (CDI_DOMINATORS, bb);
        son;
@@ -355,7 +361,7 @@ tree_ssa_phiprop (void)
   phivn = XCNEWVEC (struct phiprop_d, num_ssa_names);
 
   if (tree_ssa_phiprop_1 (ENTRY_BLOCK_PTR, phivn, num_ssa_names))
-    bsi_commit_edge_inserts ();
+    gsi_commit_edge_inserts ();
 
   free (phivn);
 
index ed337a3..a8a3907 100644 (file)
@@ -30,7 +30,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "diagnostic.h"
 #include "tree-inline.h"
 #include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-dump.h"
 #include "timevar.h"
 #include "fibheap.h"
@@ -60,7 +60,7 @@ along with GCC; see the file COPYING3.  If not see
 */
 
 /* For ease of terminology, "expression node" in the below refers to
-   every expression node but GIMPLE_MODIFY_STMT, because GIMPLE_MODIFY_STMT's
+   every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
    represent the actual statement containing the expressions we care about,
    and we cache the value number by putting it in the expression.  */
 
@@ -193,13 +193,8 @@ pre_expr_eq (const void *p1, const void *p2)
   switch (e1->kind)
     {
     case CONSTANT:
-      {
-       tree const0 = PRE_EXPR_CONSTANT (e1);
-       tree const1 = PRE_EXPR_CONSTANT (e2);
-       return TREE_TYPE (const1) == TREE_TYPE (const0)
-         && expressions_equal_p (const0, const1);
-      }
-      break;
+      return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
+                                      PRE_EXPR_CONSTANT (e2));
     case NAME:
       return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
     case NARY:
@@ -219,7 +214,7 @@ pre_expr_hash (const void *p1)
   switch (e->kind)
     {
     case CONSTANT:
-      return iterative_hash_expr (PRE_EXPR_CONSTANT (e), 0);
+      return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
     case NAME:
       return iterative_hash_expr (PRE_EXPR_NAME (e), 0);
     case NARY:
@@ -321,9 +316,8 @@ get_or_alloc_expr_for_name (tree name)
   result_id = lookup_expression_id (result);
   if (result_id != 0)
     {
-      pre_expr newresult = expression_for_id (result_id);
-      pool_free (pre_expr_pool, result); 
-      result = newresult;
+      pool_free (pre_expr_pool, result);
+      result = expression_for_id (result_id);
       return result;
     }
   get_or_alloc_expression_id (result);
@@ -428,7 +422,7 @@ static struct
 } pre_stats;
 
 static bool do_partial_partial;
-static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int , tree);
+static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
@@ -436,9 +430,10 @@ static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr, bool);
 static bitmap_set_t bitmap_set_new (void);
-static tree create_expression_by_pieces (basic_block, pre_expr, tree, tree,
-                                        tree);
-static tree find_or_generate_expression (basic_block, pre_expr, tree, tree);
+static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
+                                        gimple, tree);
+static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
+                                        gimple);
 
 /* We can add and remove elements and entries to and from sets
    and hash tables, so we use alloc pools for them.  */
@@ -1009,9 +1004,8 @@ get_or_alloc_expr_for_constant (tree constant)
   result_id = lookup_expression_id (newexpr);
   if (result_id != 0)
     {
-      pre_expr newresult = expression_for_id (result_id);
-      pool_free (pre_expr_pool, newexpr); 
-      newexpr = newresult;
+      pool_free (pre_expr_pool, newexpr);
+      newexpr = expression_for_id (result_id);
       return newexpr;
     }
   value_id = get_or_alloc_constant_value_id (constant);
@@ -1025,7 +1019,7 @@ get_or_alloc_expr_for_constant (tree constant)
    a constant.  */
 
 static tree
-get_constant_for_value_id (unsigned int v, tree type)
+get_constant_for_value_id (unsigned int v)
 {
   if (value_id_constant_p (v))
     {
@@ -1036,8 +1030,7 @@ get_constant_for_value_id (unsigned int v, tree type)
       FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
        {
          pre_expr expr = expression_for_id (i);
-         if (expr->kind == CONSTANT
-             && TREE_TYPE (PRE_EXPR_CONSTANT (expr)) == type)
+         if (expr->kind == CONSTANT)
            return PRE_EXPR_CONSTANT (expr);
        }
     }
@@ -1053,6 +1046,30 @@ get_or_alloc_expr_for (tree t)
     return get_or_alloc_expr_for_name (t);
   else if (is_gimple_min_invariant (t))
     return get_or_alloc_expr_for_constant (t);
+  else
+    {
+      /* More complex expressions can result from SCCVN expression
+        simplification that inserts values for them.  As they all
+        do not have VOPs the get handled by the nary ops struct.  */
+      vn_nary_op_t result;
+      unsigned int result_id;
+      vn_nary_op_lookup (t, &result);
+      if (result != NULL)
+       {
+         pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
+         e->kind = NARY;
+         PRE_EXPR_NARY (e) = result;
+         result_id = lookup_expression_id (e);
+         if (result_id != 0)
+           {
+             pool_free (pre_expr_pool, e);
+             e = expression_for_id (result_id);
+             return e;
+           }
+         alloc_expression_id (e);
+         return e;
+       }
+    }
   return NULL;
 }
 
@@ -1077,15 +1094,24 @@ fully_constant_expression (pre_expr e)
                 constants.  */
              tree naryop0 = nary->op[0];
              tree naryop1 = nary->op[1];
-             pre_expr rep0 = get_or_alloc_expr_for (naryop0);
-             pre_expr rep1 = get_or_alloc_expr_for (naryop1);
-             unsigned int vrep0 = get_expr_value_id (rep0);
-             unsigned int vrep1 = get_expr_value_id (rep1);
-             tree const0 = get_constant_for_value_id (vrep0,
-                                                      TREE_TYPE (nary->op[0]));
-             tree const1 = get_constant_for_value_id (vrep1,
-                                                      TREE_TYPE (nary->op[1]));
-             tree result = NULL;
+             tree const0, const1, result;
+             if (is_gimple_min_invariant (naryop0))
+               const0 = naryop0;
+             else
+               {
+                 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
+                 unsigned int vrep0 = get_expr_value_id (rep0);
+                 const0 = get_constant_for_value_id (vrep0);
+               }
+             if (is_gimple_min_invariant (naryop1))
+               const1 = naryop1;
+             else
+               {
+                 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
+                 unsigned int vrep1 = get_expr_value_id (rep1);
+                 const1 = get_constant_for_value_id (vrep1);
+               }
+             result = NULL;
              if (const0 && const1)
                {
                  tree type1 = TREE_TYPE (nary->op[0]);
@@ -1104,11 +1130,16 @@ fully_constant_expression (pre_expr e)
            /* We have to go from trees to pre exprs to value ids to
               constants.  */
              tree naryop0 = nary->op[0];
-             pre_expr rep0 = get_or_alloc_expr_for (naryop0);
-             unsigned int vrep0 = get_expr_value_id (rep0);
-             tree const0 = get_constant_for_value_id (vrep0,
-                                                      TREE_TYPE (nary->op[0]));
-             tree result = NULL;
+             tree const0, result;
+             if (is_gimple_min_invariant (naryop0))
+               const0 = naryop0;
+             else
+               {
+                 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
+                 unsigned int vrep0 = get_expr_value_id (rep0);
+                 const0 = get_constant_for_value_id (vrep0);
+               }
+             result = NULL;
              if (const0)
                {
                  tree type1 = TREE_TYPE (nary->op[0]);
@@ -1145,11 +1176,11 @@ translate_vuses_through_block (VEC (tree, gc) *vuses,
 
   for (i = 0; VEC_iterate (tree, vuses, i, oldvuse); i++)
     {
-      tree phi = SSA_NAME_DEF_STMT (oldvuse);
-      if (TREE_CODE (phi) == PHI_NODE
-         && bb_for_stmt (phi) == phiblock)
+      gimple phi = SSA_NAME_DEF_STMT (oldvuse);
+      if (gimple_code (phi) == GIMPLE_PHI
+         && gimple_bb (phi) == phiblock)
        {
-         edge e = find_edge (block, bb_for_stmt (phi));
+         edge e = find_edge (block, gimple_bb (phi));
          if (e)
            {
              tree def = PHI_ARG_DEF (phi, e->dest_idx);
@@ -1183,9 +1214,9 @@ find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
 {
   pre_expr result;
 
-  result = bitmap_find_leader (set1, val, NULL_TREE);
+  result = bitmap_find_leader (set1, val, NULL);
   if (!result && set2)
-    result = bitmap_find_leader (set2, val, NULL_TREE);
+    result = bitmap_find_leader (set2, val, NULL);
   return result;
 }
 
@@ -1244,6 +1275,7 @@ get_representative_for (const pre_expr e)
     case NAME:
       return PRE_EXPR_NAME (e);
     case CONSTANT:
+      return PRE_EXPR_CONSTANT (e);
     case NARY:
     case REFERENCE:
       {
@@ -1284,7 +1316,7 @@ get_representative_for (const pre_expr e)
       get_var_ann (pretemp);
     }
 
-  name = make_ssa_name (pretemp, build_empty_stmt ());
+  name = make_ssa_name (pretemp, gimple_build_nop ());
   VN_INFO_GET (name)->value_id = value_id;
   if (e->kind == CONSTANT)
     VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
@@ -1572,19 +1604,19 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
       break;
     case NAME:
       {
-       tree phi = NULL;
+       gimple phi = NULL;
        edge e;
-       tree def_stmt;
+       gimple def_stmt;
        tree name = PRE_EXPR_NAME (expr);
 
        def_stmt = SSA_NAME_DEF_STMT (name);
-       if (TREE_CODE (def_stmt) == PHI_NODE
-           && bb_for_stmt (def_stmt) == phiblock)
+       if (gimple_code (def_stmt) == GIMPLE_PHI
+           && gimple_bb (def_stmt) == phiblock)
          phi = def_stmt;
        else
          return expr;
 
-       e = find_edge (pred, bb_for_stmt (phi));
+       e = find_edge (pred, gimple_bb (phi));
        if (e)
          {
            tree def = PHI_ARG_DEF (phi, e->dest_idx);
@@ -1663,7 +1695,7 @@ phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
    Return NULL if no leader is found.  */
 
 static pre_expr
-bitmap_find_leader (bitmap_set_t set, unsigned int val, tree stmt)
+bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
 {
   if (value_id_constant_p (val))
     {
@@ -1703,10 +1735,10 @@ bitmap_find_leader (bitmap_set_t set, unsigned int val, tree stmt)
             be an SSA_NAME first in the list of expressions.  */
          if (stmt)
            {
-             tree def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
-             if (TREE_CODE (def_stmt) != PHI_NODE
-                 && bb_for_stmt (def_stmt) == bb_for_stmt (stmt)
-                 && stmt_ann (def_stmt)->uid >= stmt_ann (stmt)->uid)
+             gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
+             if (gimple_code (def_stmt) != GIMPLE_PHI
+                 && gimple_bb (def_stmt) == gimple_bb (stmt)
+                 && gimple_uid (def_stmt) >= gimple_uid (stmt))
                continue;
            }
          return val;
@@ -1734,11 +1766,11 @@ value_dies_in_block_x (pre_expr expr, basic_block block)
      rather than stores.  */
   for (i = 0; VEC_iterate (tree, vuses, i, vuse); i++)
     {
-      tree def = SSA_NAME_DEF_STMT (vuse);
+      gimple def = SSA_NAME_DEF_STMT (vuse);
 
-      if (bb_for_stmt (def) != block)
+      if (gimple_bb (def) != block)
        continue;
-      if (TREE_CODE (def) == PHI_NODE)
+      if (gimple_code (def) == GIMPLE_PHI)
        continue;
       return true;
     }
@@ -2343,11 +2375,9 @@ compute_antic (void)
    if we have a pure or constant call.  */
 
 static bool
-can_value_number_call (tree stmt)
+can_value_number_call (gimple stmt)
 {
-  tree call = get_call_expr_in (stmt);
-
-  if (call_expr_flags (call) & (ECF_PURE | ECF_CONST))
+  if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
     return true;
   return false;
 }
@@ -2356,9 +2386,11 @@ can_value_number_call (tree stmt)
    FILTER_EXPR or EXC_PTR_EXPR.  */
 
 static bool
-is_exception_related (tree op)
+is_exception_related (gimple stmt)
 {
-  return TREE_CODE (op) == FILTER_EXPR || TREE_CODE (op) == EXC_PTR_EXPR;
+  return (is_gimple_assign (stmt)
+         && (gimple_assign_rhs_code (stmt) == FILTER_EXPR
+             || gimple_assign_rhs_code (stmt) == EXC_PTR_EXPR));
 }
 
 /* Return true if OP is a tree which we can perform PRE on
@@ -2382,12 +2414,12 @@ can_PRE_operation (tree op)
 /* Inserted expressions are placed onto this worklist, which is used
    for performing quick dead code elimination of insertions we made
    that didn't turn out to be necessary.   */
-static VEC(tree,heap) *inserted_exprs;
+static VEC(gimple,heap) *inserted_exprs;
 
 /* Pool allocated fake store expressions are placed onto this
    worklist, which, after performing dead code elimination, is walked
    to see which expressions need to be put into GC'able memory  */
-static VEC(tree, heap) *need_creation;
+static VEC(gimple, heap) *need_creation;
 
 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
    COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
@@ -2405,8 +2437,8 @@ static VEC(tree, heap) *need_creation;
 static tree
 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
                                unsigned int operand,
-                               tree stmts,
-                               tree domstmt,
+                               gimple_seq *stmts,
+                               gimple domstmt,
                                bool in_call)
 {
   vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
@@ -2429,7 +2461,11 @@ create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
                                                      operand + 2 + i, stmts,
                                                      domstmt, true);
          }
-       folded = build_call_array (currop->type, declop->op0, nargs, args);
+       folded = build_call_array (currop->type,
+                                  TREE_CODE (declop->op0) == FUNCTION_DECL
+                                  ? build_fold_addr_expr (declop->op0)
+                                  : declop->op0,
+                                  nargs, args);
        free (args);
        return folded;
       }
@@ -2616,17 +2652,12 @@ create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
    on failure.  */
 
 static tree
-find_or_generate_expression (basic_block block, pre_expr expr, tree stmts,
-                            tree domstmt)
+find_or_generate_expression (basic_block block, pre_expr expr,
+                            gimple_seq *stmts, gimple domstmt)
 {
-  pre_expr leader;
+  pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
+                                       get_expr_value_id (expr), domstmt);
   tree genop = NULL;
-
-  if (expr->kind == CONSTANT)
-    return PRE_EXPR_CONSTANT (expr);
-  
-  leader = bitmap_find_leader (AVAIL_OUT (block),
-                              get_expr_value_id (expr), domstmt);
   if (leader)
     {
       if (leader->kind == NAME)
@@ -2666,7 +2697,7 @@ find_or_generate_expression (basic_block block, pre_expr expr, tree stmts,
   return genop;
 }
 
-#define NECESSARY(stmt)                stmt->base.asm_written_flag
+#define NECESSARY GF_PLF_1
 
 /* Create an expression in pieces, so that we can handle very complex
    expressions that may be ANTIC, but not necessary GIMPLE.
@@ -2687,16 +2718,17 @@ find_or_generate_expression (basic_block block, pre_expr expr, tree stmts,
    can return NULL_TREE to signal failure.  */
 
 static tree
-create_expression_by_pieces (basic_block block, pre_expr expr, tree stmts,
-                            tree domstmt,
-                            tree type)
+create_expression_by_pieces (basic_block block, pre_expr expr,
+                            gimple_seq *stmts, gimple domstmt, tree type)
 {
   tree temp, name;
-  tree folded, forced_stmts, newexpr;
+  tree folded, newexpr;
+  gimple_seq forced_stmts;
   unsigned int value_id;
-  tree_stmt_iterator tsi;
+  gimple_stmt_iterator gsi;
   tree exprtype = type ? type : get_expr_type (expr);
   pre_expr nameexpr;
+  gimple newstmt;
 
   switch (expr->kind)
     {
@@ -2730,10 +2762,14 @@ create_expression_by_pieces (basic_block block, pre_expr expr, tree stmts,
                                                         stmts, domstmt);
              if (!genop1 || !genop2)
                return NULL_TREE;
-
              genop1 = fold_convert (TREE_TYPE (nary->op[0]),
                                     genop1);
-             genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
+             /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR.  It
+                may be a constant with the wrong type.  */
+             if (nary->opcode == POINTER_PLUS_EXPR)
+               genop2 = fold_convert (sizetype, genop2);
+             else
+               genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
              
              folded = fold_build2 (nary->opcode, nary->type,
                                    genop1, genop2);
@@ -2772,14 +2808,14 @@ create_expression_by_pieces (basic_block block, pre_expr expr, tree stmts,
      to the value sets and chain them in the instruction stream.  */
   if (forced_stmts)
     {
-      tsi = tsi_start (forced_stmts);
-      for (; !tsi_end_p (tsi); tsi_next (&tsi))
+      gsi = gsi_start (forced_stmts);
+      for (; !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree stmt = tsi_stmt (tsi);
-         tree forcedname = GIMPLE_STMT_OPERAND (stmt, 0);
+         gimple stmt = gsi_stmt (gsi);
+         tree forcedname = gimple_get_lhs (stmt);
          pre_expr nameexpr;
 
-         VEC_safe_push (tree, heap, inserted_exprs, stmt);
+         VEC_safe_push (gimple, heap, inserted_exprs, stmt);
          if (TREE_CODE (forcedname) == SSA_NAME)
            {
              VN_INFO_GET (forcedname)->valnum = forcedname;
@@ -2791,8 +2827,7 @@ create_expression_by_pieces (basic_block block, pre_expr expr, tree stmts,
            }
          mark_symbols_for_renaming (stmt);
        }
-      tsi = tsi_last (stmts);
-      tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
+      gimple_seq_add_seq (stmts, forced_stmts);
     }
 
   /* Build and insert the assignment of the end result to the temporary
@@ -2810,17 +2845,16 @@ create_expression_by_pieces (basic_block block, pre_expr expr, tree stmts,
       || TREE_CODE (exprtype) == VECTOR_TYPE)
     DECL_GIMPLE_REG_P (temp) = 1;
 
-  newexpr = build_gimple_modify_stmt (temp, newexpr);
-  name = make_ssa_name (temp, newexpr);
-  GIMPLE_STMT_OPERAND (newexpr, 0) = name;
-  NECESSARY (newexpr) = 0;
+  newstmt = gimple_build_assign (temp, newexpr);
+  name = make_ssa_name (temp, newstmt);
+  gimple_assign_set_lhs (newstmt, name);
+  gimple_set_plf (newstmt, NECESSARY, false);
 
-  tsi = tsi_last (stmts);
-  tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
-  VEC_safe_push (tree, heap, inserted_exprs, newexpr);
+  gimple_seq_add_stmt (stmts, newstmt);
+  VEC_safe_push (gimple, heap, inserted_exprs, newstmt);
 
   /* All the symbols in NEWEXPR should be put into SSA form.  */
-  mark_symbols_for_renaming (newexpr);
+  mark_symbols_for_renaming (newstmt);
 
   /* Add a value number to the temporary.
      The value may already exist in either NEW_SETS, or AVAIL_OUT, because
@@ -2840,7 +2874,7 @@ create_expression_by_pieces (basic_block block, pre_expr expr, tree stmts,
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "Inserted ");
-      print_generic_expr (dump_file, newexpr, 0);
+      print_gimple_stmt (dump_file, newstmt, 0, 0);
       fprintf (dump_file, " in predecessor %d\n", block->index);
     }
 
@@ -2868,6 +2902,7 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
   edge_iterator ei;
   tree type = get_expr_type (expr);
   tree temp;
+  gimple phi;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
@@ -2899,7 +2934,7 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
   /* Make the necessary insertions.  */
   FOR_EACH_EDGE (pred, ei, block->preds)
     {
-      tree stmts = alloc_stmt_list ();
+      gimple_seq stmts = NULL;
       tree builtexpr;
       bprime = pred->src;
       eprime = avail[bprime->index];
@@ -2908,10 +2943,10 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
        {
          builtexpr = create_expression_by_pieces (bprime,
                                                   eprime,
-                                                  stmts, NULL_TREE,
+                                                  &stmts, NULL,
                                                   type);
          gcc_assert (!(pred->flags & EDGE_ABNORMAL));
-         bsi_insert_on_edge (pred, stmts);
+         gsi_insert_seq_on_edge (pred, stmts);
          avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr);
          insertions = true;
        }
@@ -2946,18 +2981,18 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
                        }
                      if (stmts)
                        {
-                         tree_stmt_iterator tsi;
-                         tsi = tsi_start (stmts);
-                         for (; !tsi_end_p (tsi); tsi_next (&tsi))
+                         gimple_stmt_iterator gsi;
+                         gsi = gsi_start (stmts);
+                         for (; !gsi_end_p (gsi); gsi_next (&gsi))
                            {
-                             tree stmt = tsi_stmt (tsi);
-                             tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-                             VEC_safe_push (tree, heap, inserted_exprs, stmt);
-                             NECESSARY (lhs) = 0;
+                             gimple stmt = gsi_stmt (gsi);
+                             VEC_safe_push (gimple, heap, inserted_exprs, stmt);
+                             gimple_set_plf (stmt, NECESSARY, false);
                            }
-                         bsi_insert_on_edge (pred, stmts);
+                         gsi_insert_seq_on_edge (pred, stmts);
                        }
-                     NECESSARY (forcedexpr) = 0;
+                     /* FIXME tuples
+                     gimple_set_plf (forcedexpr, NECESSARY, false); */
                      avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
                    }
                }
@@ -2987,18 +3022,18 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
 
              if (stmts)
                {
-                 tree_stmt_iterator tsi;
-                 tsi = tsi_start (stmts);
-                 for (; !tsi_end_p (tsi); tsi_next (&tsi))
+                 gimple_stmt_iterator gsi;
+                 gsi = gsi_start (stmts);
+                 for (; !gsi_end_p (gsi); gsi_next (&gsi))
                    {
-                     tree stmt = tsi_stmt (tsi);
-                     tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-                     VEC_safe_push (tree, heap, inserted_exprs, stmt);
-                     NECESSARY (lhs) = 0;
+                     gimple stmt = gsi_stmt (gsi);
+                     VEC_safe_push (gimple, heap, inserted_exprs, stmt);
+                     gimple_set_plf (stmt, NECESSARY, false);
                    }
-                 bsi_insert_on_edge (pred, stmts);
+                 gsi_insert_seq_on_edge (pred, stmts);
                }
-             NECESSARY (forcedexpr) = 0;
+             /* FIXME tuples
+             gimple_set_plf (forcedexpr, NECESSARY, false); */
              avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
            }
        }
@@ -3025,24 +3060,24 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
   if (TREE_CODE (type) == COMPLEX_TYPE
       || TREE_CODE (type) == VECTOR_TYPE)
     DECL_GIMPLE_REG_P (temp) = 1;
-  temp = create_phi_node (temp, block);
+  phi = create_phi_node (temp, block);
 
-  NECESSARY (temp) = 0;
-  VN_INFO_GET (PHI_RESULT (temp))->valnum = PHI_RESULT (temp);
-  VN_INFO (PHI_RESULT (temp))->value_id = val;
-  VEC_safe_push (tree, heap, inserted_exprs, temp);
+  gimple_set_plf (phi, NECESSARY, false);
+  VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
+  VN_INFO (gimple_phi_result (phi))->value_id = val;
+  VEC_safe_push (gimple, heap, inserted_exprs, phi);
   FOR_EACH_EDGE (pred, ei, block->preds)
     {
       pre_expr ae = avail[pred->src->index];
       gcc_assert (get_expr_type (ae) == type
                  || useless_type_conversion_p (type, get_expr_type (ae)));
       if (ae->kind == CONSTANT)
-       add_phi_arg (temp, PRE_EXPR_CONSTANT (ae), pred);
+       add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred);
       else
-       add_phi_arg (temp, PRE_EXPR_NAME (avail[pred->src->index]), pred);
+       add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred);
     }
 
-  newphi = get_or_alloc_expr_for_name (PHI_RESULT (temp));
+  newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
   add_to_value (val, newphi);
 
   /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
@@ -3068,7 +3103,7 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "Created phi ");
-      print_generic_expr (dump_file, temp, 0);
+      print_gimple_stmt (dump_file, phi, 0, 0);
       fprintf (dump_file, " in block %d\n", block->index);
     }
   pre_stats.phis++;
@@ -3162,16 +3197,9 @@ do_regular_insertion (basic_block block, basic_block dom)
                }
 
              eprime = fully_constant_expression (eprime);
-             if (eprime->kind == CONSTANT)
-               {
-                 edoubleprime = eprime;
-               }
-             else
-               {
-                 vprime = get_expr_value_id (eprime);
-                 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
-                                                    vprime, NULL_TREE);
-               }
+             vprime = get_expr_value_id (eprime);
+             edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
+                                                vprime, NULL);
              if (edoubleprime == NULL)
                {
                  avail[bprime->index] = eprime;
@@ -3303,17 +3331,9 @@ do_partial_partial_insertion (basic_block block, basic_block dom)
                }
 
              eprime = fully_constant_expression (eprime);
-             if (eprime->kind == CONSTANT)
-               {
-                 edoubleprime = eprime;
-               }
-             else
-               {
-                 vprime = get_expr_value_id (eprime);
-                 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
-                                                    vprime, NULL_TREE);
-               }
-             
+             vprime = get_expr_value_id (eprime);
+             edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
+                                                vprime, NULL);
              if (edoubleprime == NULL)
                {
                  by_all = false;
@@ -3427,11 +3447,13 @@ add_to_exp_gen (basic_block block, tree op)
       result = get_or_alloc_expr_for_name (op);
       bitmap_value_insert_into_set (EXP_GEN (block), result);
       if (TREE_CODE (op) != SSA_NAME
-         || TREE_CODE (SSA_NAME_DEF_STMT (op)) != PHI_NODE)
+         || gimple_code (SSA_NAME_DEF_STMT (op)) != GIMPLE_PHI)
        bitmap_value_insert_into_set (maximal_set, result);
     }
 }
 
+/* FIXME tuples */
+#if 0
 /* For each real store operation of the form
    *a = <value> that we see, create a corresponding fake store of the
    form storetmp_<version> = *a.
@@ -3451,10 +3473,10 @@ insert_fake_stores (void)
 
   FOR_ALL_BB (block)
     {
-      block_stmt_iterator bsi;
-      for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
+      gimple_stmt_iterator gsi;
+      for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
 
          /* We can't generate SSA names for stores that are complex
             or aggregate.  We also want to ignore things whose
@@ -3499,9 +3521,9 @@ insert_fake_stores (void)
              GIMPLE_STMT_OPERAND (new_tree, 0) = new_lhs;
              create_ssa_artificial_load_stmt (new_tree, stmt, false);
 
-             NECESSARY (new_tree) = 0;
-             VEC_safe_push (tree, heap, inserted_exprs, new_tree);
-             VEC_safe_push (tree, heap, need_creation, new_tree);
+             gimple_set_plf (new_tree, NECESSARY, false);
+             VEC_safe_push (gimple, heap, inserted_exprs, new_tree);
+             VEC_safe_push (gimple, heap, need_creation, new_tree);
              bsi_insert_after (&bsi, new_tree, BSI_NEW_STMT);
            }
        }
@@ -3518,9 +3540,9 @@ realify_fake_stores (void)
   unsigned int i;
   tree stmt;
 
-  for (i = 0; VEC_iterate (tree, need_creation, i, stmt); i++)
+  for (i = 0; VEC_iterate (gimple, need_creation, i, stmt); i++)
     {
-      if (NECESSARY (stmt))
+      if (gimple_plf (stmt, NECESSARY))
        {
          block_stmt_iterator bsi, bsi2;
          tree rhs;
@@ -3542,13 +3564,15 @@ realify_fake_stores (void)
        release_defs (stmt);
     }
 }
+#endif
 
 /* Create value ids for PHI in BLOCK.  */
 
 static void
-make_values_for_phi (tree phi, basic_block block)
+make_values_for_phi (gimple phi, basic_block block)
 {
-  tree result = PHI_RESULT (phi);
+  tree result = gimple_phi_result (phi);
+
   /* We have no need for virtual phis, as they don't represent
      actual computations.  */
   if (is_gimple_reg (result))
@@ -3632,8 +3656,8 @@ compute_avail (void)
   /* Loop until the worklist is empty.  */
   while (sp)
     {
-      block_stmt_iterator bsi;
-      tree stmt, phi;
+      gimple_stmt_iterator gsi;
+      gimple stmt;
       basic_block dom;
       unsigned int stmt_uid = 1;
 
@@ -3647,21 +3671,18 @@ compute_avail (void)
        bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
 
       /* Generate values for PHI nodes.  */
-      for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
-       make_values_for_phi (phi, block);
+      for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
+       make_values_for_phi (gsi_stmt (gsi), block);
 
       /* Now compute value numbers and populate value sets with all
         the expressions computed in BLOCK.  */
-      for (bsi = bsi_start (block); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         stmt_ann_t ann;
          ssa_op_iter iter;
          tree op;
 
-         stmt = bsi_stmt (bsi);
-         ann = stmt_ann (stmt);
-
-         set_gimple_stmt_uid (stmt, stmt_uid++);
+         stmt = gsi_stmt (gsi);
+         gimple_set_uid (stmt, stmt_uid++);
 
          FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
            {
@@ -3676,106 +3697,147 @@ compute_avail (void)
              bitmap_value_insert_into_set (AVAIL_OUT (block), e);
            }
 
-         switch (TREE_CODE (stmt))
+         if (gimple_has_volatile_ops (stmt)
+             || stmt_could_throw_p (stmt))
+           continue;
+
+         switch (gimple_code (stmt))
            {
-           case RETURN_EXPR:
-             if (!ann->has_volatile_ops)
-               FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
-                 add_to_exp_gen (block, op);
+           case GIMPLE_RETURN:
+             FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
+               add_to_exp_gen (block, op);
              continue;
-           case GIMPLE_MODIFY_STMT:
+
+           case GIMPLE_CALL:
              {
-               tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-               if (!ann->has_volatile_ops
-                   && !tree_could_throw_p (stmt))
-                 {
-                   pre_expr result = NULL;
-                   switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
-                     {
-                     case tcc_unary:
-                       if (is_exception_related (rhs))
-                         continue;
-                     case tcc_binary:
-                       {
-                         vn_nary_op_t nary;
-                         unsigned int i;
+               vn_reference_t ref;
+               unsigned int i;
+               vn_reference_op_t vro;
+               pre_expr result = NULL;
+               VEC(vn_reference_op_s, heap) *ops = NULL;
 
-                         vn_nary_op_lookup (rhs, &nary);
+               if (!can_value_number_call (stmt))
+                 continue;
 
-                         if (!nary)
-                           continue;
+               copy_reference_ops_from_call (stmt, &ops);
+               vn_reference_lookup_pieces (shared_vuses_from_stmt (stmt),
+                                           ops, &ref);
+               VEC_free (vn_reference_op_s, heap, ops);
+               if (!ref)
+                 continue;
 
-                         for (i = 0; i < nary->length; i++)
-                           if (TREE_CODE (nary->op[i]) == SSA_NAME)
-                             add_to_exp_gen (block, nary->op[i]);
+               for (i = 0; VEC_iterate (vn_reference_op_s,
+                                        ref->operands, i,
+                                        vro); i++)
+                 {
+                   if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
+                     add_to_exp_gen (block, vro->op0);
+                   if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
+                     add_to_exp_gen (block, vro->op1);
+                 }
+               result = (pre_expr) pool_alloc (pre_expr_pool);
+               result->kind = REFERENCE;
+               result->id = 0;
+               PRE_EXPR_REFERENCE (result) = ref;
+
+               get_or_alloc_expression_id (result);
+               add_to_value (get_expr_value_id (result), result);
+               if (!in_fre)
+                 {
+                   bitmap_value_insert_into_set (EXP_GEN (block),
+                                                 result);
+                   bitmap_value_insert_into_set (maximal_set, result);
+                 }
+               continue;
+             }
 
-                         result = (pre_expr) pool_alloc (pre_expr_pool);
-                         result->kind = NARY;
-                         result->id = 0;
-                         PRE_EXPR_NARY (result) = nary;
-                       }
-                       break;
-                     case tcc_vl_exp:
-                       if (!can_value_number_call (rhs))
-                         continue;
+           case GIMPLE_ASSIGN:
+             {
+               pre_expr result = NULL;
+               switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
+                 {
+                 case tcc_unary:
+                   if (is_exception_related (stmt))
+                     continue;
+                 case tcc_binary:
+                   {
+                     vn_nary_op_t nary;
+                     unsigned int i;
+
+                     vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
+                                               gimple_assign_rhs_code (stmt),
+                                               gimple_expr_type (stmt),
+                                               gimple_assign_rhs1 (stmt),
+                                               gimple_assign_rhs2 (stmt),
+                                               NULL_TREE, NULL_TREE, &nary);
+
+                     if (!nary)
+                       continue;
+
+                     for (i = 0; i < nary->length; i++)
+                       if (TREE_CODE (nary->op[i]) == SSA_NAME)
+                         add_to_exp_gen (block, nary->op[i]);
+
+                     result = (pre_expr) pool_alloc (pre_expr_pool);
+                     result->kind = NARY;
+                     result->id = 0;
+                     PRE_EXPR_NARY (result) = nary;
+                     break;
+                   }
 
-                     case tcc_declaration:
-                     case tcc_reference:
-                       {
-                         vn_reference_t ref;
-                         unsigned int i;
-                         vn_reference_op_t vro;
-
-                         vn_reference_lookup (rhs,
-                                              shared_vuses_from_stmt (stmt),
-                                              true, &ref);
-                         if (!ref)
-                           continue;
-
-                         for (i = 0; VEC_iterate (vn_reference_op_s,
-                                                  ref->operands, i,
-                                                  vro); i++)
-                           {
-                             if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
-                               add_to_exp_gen (block, vro->op0);
-                             if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
-                               add_to_exp_gen (block, vro->op1);
-                           }
-                         result = (pre_expr) pool_alloc (pre_expr_pool);
-                         result->kind = REFERENCE;
-                         result->id = 0;
-                         PRE_EXPR_REFERENCE (result) = ref;
-                       }
-                       break;
-                     default:
+                 case tcc_declaration:
+                 case tcc_reference:
+                   {
+                     vn_reference_t ref;
+                     unsigned int i;
+                     vn_reference_op_t vro;
+
+                     vn_reference_lookup (gimple_assign_rhs1 (stmt),
+                                          shared_vuses_from_stmt (stmt),
+                                          false, &ref);
+                     if (!ref)
+                       continue;
+
+                     for (i = 0; VEC_iterate (vn_reference_op_s,
+                                              ref->operands, i,
+                                              vro); i++)
                        {
-                         /* For any other statement that we don't
-                            recognize, simply add all referenced
-                            SSA_NAMEs to EXP_GEN.  */
-                         FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
-                           add_to_exp_gen (block, op);
-                         continue;
+                         if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
+                           add_to_exp_gen (block, vro->op0);
+                         if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
+                           add_to_exp_gen (block, vro->op1);
                        }
-                     }
-                   get_or_alloc_expression_id (result);
-                   add_to_value (get_expr_value_id (result), result);
-                   if (!in_fre)
-                     {
-                       bitmap_value_insert_into_set (EXP_GEN (block),
-                                                     result);
-                       bitmap_value_insert_into_set (maximal_set, result);
-                     }
+                     result = (pre_expr) pool_alloc (pre_expr_pool);
+                     result->kind = REFERENCE;
+                     result->id = 0;
+                     PRE_EXPR_REFERENCE (result) = ref;
+                     break;
+                   }
 
+                 default:
+                   /* For any other statement that we don't
+                      recognize, simply add all referenced
+                      SSA_NAMEs to EXP_GEN.  */
+                   FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
+                     add_to_exp_gen (block, op);
+                   continue;
                  }
+
+               get_or_alloc_expression_id (result);
+               add_to_value (get_expr_value_id (result), result);
+               if (!in_fre)
+                 {
+                   bitmap_value_insert_into_set (EXP_GEN (block), result);
+                   bitmap_value_insert_into_set (maximal_set, result);
+                 }
+
                continue;
              }
            default:
              break;
-
            }
-
-
        }
+
       /* Put the dominator children of BLOCK on the worklist of blocks
         to compute available sets for.  */
       for (son = first_dom_son (CDI_DOMINATORS, block);
@@ -3793,30 +3855,27 @@ compute_avail (void)
    be used for replacement.  */
 
 static tree
-do_SCCVN_insertion (tree stmt, tree ssa_vn)
+do_SCCVN_insertion (gimple stmt, tree ssa_vn)
 {
-  basic_block bb = bb_for_stmt (stmt);
-  block_stmt_iterator bsi;
-  tree expr, stmts;
+  basic_block bb = gimple_bb (stmt);
+  gimple_stmt_iterator gsi;
+  gimple_seq stmts = NULL;
+  tree expr;
   pre_expr e;
 
   /* First create a value expression from the expression we want
      to insert and associate it with the value handle for SSA_VN.  */
-
-  /* TODO: Handle complex expressions.  */
-  e = get_or_alloc_expr_for (VN_INFO (ssa_vn)->expr);
+  e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
   if (e == NULL)
     return NULL_TREE;
 
-/* Then use create_expression_by_pieces to generate a valid
+  /* Then use create_expression_by_pieces to generate a valid
      expression to insert at this point of the IL stream.  */
-  stmts = alloc_stmt_list ();
-  expr = create_expression_by_pieces (bb, e, stmts, stmt,
-                                     NULL);
+  expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
   if (expr == NULL_TREE)
     return NULL_TREE;
-  bsi = bsi_for_stmt (stmt);
-  bsi_insert_before (&bsi, stmts, BSI_SAME_STMT);
+  gsi = gsi_for_stmt (stmt);
+  gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
 
   return expr;
 }
@@ -3831,30 +3890,35 @@ eliminate (void)
 
   FOR_EACH_BB (b)
     {
-      block_stmt_iterator i;
+      gimple_stmt_iterator i;
 
-      for (i = bsi_start (b); !bsi_end_p (i); bsi_next (&i))
+      for (i = gsi_start_bb (b); !gsi_end_p (i); gsi_next (&i))
        {
-         tree stmt = bsi_stmt (i);
+         gimple stmt = gsi_stmt (i);
 
          /* Lookup the RHS of the expression, see if we have an
             available computation for it.  If so, replace the RHS with
             the available computation.  */
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-             && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME
-             && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) != SSA_NAME
-             && !is_gimple_min_invariant (GIMPLE_STMT_OPERAND (stmt, 1))
-             && !stmt_ann (stmt)->has_volatile_ops)
+         if (gimple_has_lhs (stmt)
+             && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
+             && !gimple_assign_ssa_name_copy_p (stmt)
+             && (!gimple_assign_single_p (stmt)
+                 || !is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
+             && !gimple_has_volatile_ops  (stmt)
+             && !has_zero_uses (gimple_get_lhs (stmt)))
            {
-             tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-             tree *rhs_p = &GIMPLE_STMT_OPERAND (stmt, 1);
+             tree lhs = gimple_get_lhs (stmt);
+             tree rhs = NULL_TREE;
              tree sprime = NULL;
              pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
              pre_expr sprimeexpr;
 
+             if (gimple_assign_single_p (stmt))
+               rhs = gimple_assign_rhs1 (stmt);
+
              sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
                                               get_expr_value_id (lhsexpr),
-                                              NULL_TREE);
+                                              NULL);
 
              if (sprimeexpr)
                {
@@ -3876,14 +3940,15 @@ eliminate (void)
                  if (dump_file && (dump_flags & TDF_DETAILS))
                    {
                      fprintf (dump_file, "Replaced ");
-                     print_generic_expr (dump_file, *rhs_p, 0);
+                     print_gimple_expr (dump_file, stmt, 0, 0);
                      fprintf (dump_file, " with ");
                      print_generic_expr (dump_file, sprime, 0);
                      fprintf (dump_file, " in ");
-                     print_generic_stmt (dump_file, stmt, 0);
+                     print_gimple_stmt (dump_file, stmt, 0, 0);
                    }
                  pre_stats.eliminations++;
-                 propagate_tree_value (rhs_p, sprime);
+                 propagate_tree_value_into_stmt (&i, sprime);
+                 stmt = gsi_stmt (i);
                  update_stmt (stmt);
                  continue;
                }
@@ -3897,38 +3962,41 @@ eliminate (void)
                  if (val != VN_TOP
                      && TREE_CODE (val) == SSA_NAME
                      && VN_INFO (val)->needs_insertion
-                     && can_PRE_operation (VN_INFO (val)->expr))
+                     && can_PRE_operation (vn_get_expr_for (val)))
                    sprime = do_SCCVN_insertion (stmt, val);
                }
              if (sprime
                  && sprime != lhs
-                 && (TREE_CODE (*rhs_p) != SSA_NAME
-                     || may_propagate_copy (*rhs_p, sprime)))
+                 && (rhs == NULL_TREE
+                     || TREE_CODE (rhs) != SSA_NAME
+                     || may_propagate_copy (rhs, sprime)))
                {
-                 gcc_assert (sprime != *rhs_p);
+                 gcc_assert (sprime != rhs);
 
                  if (dump_file && (dump_flags & TDF_DETAILS))
                    {
                      fprintf (dump_file, "Replaced ");
-                     print_generic_expr (dump_file, *rhs_p, 0);
+                     print_gimple_expr (dump_file, stmt, 0, 0);
                      fprintf (dump_file, " with ");
                      print_generic_expr (dump_file, sprime, 0);
                      fprintf (dump_file, " in ");
-                     print_generic_stmt (dump_file, stmt, 0);
+                     print_gimple_stmt (dump_file, stmt, 0, 0);
                    }
 
                  if (TREE_CODE (sprime) == SSA_NAME)
-                   NECESSARY (SSA_NAME_DEF_STMT (sprime)) = 1;
+                   gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
+                                   NECESSARY, true);
                  /* We need to make sure the new and old types actually match,
                     which may require adding a simple cast, which fold_convert
                     will do for us.  */
-                 if (TREE_CODE (*rhs_p) != SSA_NAME
-                     && !useless_type_conversion_p (TREE_TYPE (*rhs_p),
-                                                   TREE_TYPE (sprime)))
-                   sprime = fold_convert (TREE_TYPE (*rhs_p), sprime);
+                 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
+                     && !useless_type_conversion_p (gimple_expr_type (stmt),
+                                                    TREE_TYPE (sprime)))
+                   sprime = fold_convert (gimple_expr_type (stmt), sprime);
 
                  pre_stats.eliminations++;
-                 propagate_tree_value (rhs_p, sprime);
+                 propagate_tree_value_into_stmt (&i, sprime);
+                 stmt = gsi_stmt (i);
                  update_stmt (stmt);
 
                  /* If we removed EH side effects from the statement, clean
@@ -3936,7 +4004,7 @@ eliminate (void)
                  if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
                    {
                      bitmap_set_bit (need_eh_cleanup,
-                                     bb_for_stmt (stmt)->index);
+                                     gimple_bb (stmt)->index);
                      if (dump_file && (dump_flags & TDF_DETAILS))
                        fprintf (dump_file, "  Removed EH side effects.\n");
                    }
@@ -3944,36 +4012,24 @@ eliminate (void)
            }
          /* Visit COND_EXPRs and fold the comparison with the
             available value-numbers.  */
-         else if (TREE_CODE (stmt) == COND_EXPR
-                  && COMPARISON_CLASS_P (COND_EXPR_COND (stmt)))
+         else if (gimple_code (stmt) == GIMPLE_COND)
            {
-             tree cond = COND_EXPR_COND (stmt);
-             tree op0 = TREE_OPERAND (cond, 0);
-             tree op1 = TREE_OPERAND (cond, 1);
+             tree op0 = gimple_cond_lhs (stmt);
+             tree op1 = gimple_cond_rhs (stmt);
              tree result;
 
              if (TREE_CODE (op0) == SSA_NAME)
                op0 = VN_INFO (op0)->valnum;
              if (TREE_CODE (op1) == SSA_NAME)
                op1 = VN_INFO (op1)->valnum;
-             result = fold_binary (TREE_CODE (cond), TREE_TYPE (cond),
+             result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
                                    op0, op1);
              if (result && TREE_CODE (result) == INTEGER_CST)
                {
-                 COND_EXPR_COND (stmt) = result;
-                 update_stmt (stmt);
-                 todo = TODO_cleanup_cfg;
-               }
-           }
-         else if (TREE_CODE (stmt) == COND_EXPR
-                  && TREE_CODE (COND_EXPR_COND (stmt)) == SSA_NAME)
-           {
-             tree op = COND_EXPR_COND (stmt);
-             op = VN_INFO (op)->valnum;
-             if (TREE_CODE (op) == INTEGER_CST)
-               {
-                 COND_EXPR_COND (stmt) = integer_zerop (op)
-                   ? boolean_false_node : boolean_true_node;
+                 if (integer_zerop (result))
+                   gimple_cond_make_false (stmt);
+                 else
+                   gimple_cond_make_true (stmt);
                  update_stmt (stmt);
                  todo = TODO_cleanup_cfg;
                }
@@ -3992,10 +4048,10 @@ eliminate (void)
    mark that statement necessary. Return the stmt, if it is newly
    necessary.  */
 
-static inline tree
+static inline gimple
 mark_operand_necessary (tree op)
 {
-  tree stmt;
+  gimple stmt;
 
   gcc_assert (op);
 
@@ -4005,11 +4061,11 @@ mark_operand_necessary (tree op)
   stmt = SSA_NAME_DEF_STMT (op);
   gcc_assert (stmt);
 
-  if (NECESSARY (stmt)
-      || IS_EMPTY_STMT (stmt))
+  if (gimple_plf (stmt, NECESSARY)
+      || gimple_nop_p (stmt))
     return NULL;
 
-  NECESSARY (stmt) = 1;
+  gimple_set_plf (stmt, NECESSARY, true);
   return stmt;
 }
 
@@ -4021,36 +4077,36 @@ mark_operand_necessary (tree op)
 static void
 remove_dead_inserted_code (void)
 {
-  VEC(tree,heap) *worklist = NULL;
+  VEC(gimple,heap) *worklist = NULL;
   int i;
-  tree t;
+  gimple t;
 
-  worklist = VEC_alloc (tree, heap, VEC_length (tree, inserted_exprs));
-  for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
+  worklist = VEC_alloc (gimple, heap, VEC_length (gimple, inserted_exprs));
+  for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
     {
-      if (NECESSARY (t))
-       VEC_quick_push (tree, worklist, t);
+      if (gimple_plf (t, NECESSARY))
+       VEC_quick_push (gimple, worklist, t);
     }
-  while (VEC_length (tree, worklist) > 0)
+  while (VEC_length (gimple, worklist) > 0)
     {
-      t = VEC_pop (tree, worklist);
+      t = VEC_pop (gimple, worklist);
 
       /* PHI nodes are somewhat special in that each PHI alternative has
         data and control dependencies.  All the statements feeding the
         PHI node's arguments are always necessary. */
-      if (TREE_CODE (t) == PHI_NODE)
+      if (gimple_code (t) == GIMPLE_PHI)
        {
-         int k;
+         unsigned k;
 
-         VEC_reserve (tree, heap, worklist, PHI_NUM_ARGS (t));
-         for (k = 0; k < PHI_NUM_ARGS (t); k++)
+         VEC_reserve (gimple, heap, worklist, gimple_phi_num_args (t));
+         for (k = 0; k < gimple_phi_num_args (t); k++)
            {
              tree arg = PHI_ARG_DEF (t, k);
              if (TREE_CODE (arg) == SSA_NAME)
                {
-                 arg = mark_operand_necessary (arg);
-                 if (arg)
-                   VEC_quick_push (tree, worklist, arg);
+                 gimple n = mark_operand_necessary (arg);
+                 if (n)
+                   VEC_quick_push (gimple, worklist, n);
                }
            }
        }
@@ -4069,38 +4125,34 @@ remove_dead_inserted_code (void)
 
          FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
            {
-             tree n = mark_operand_necessary (use);
+             gimple n = mark_operand_necessary (use);
              if (n)
-               VEC_safe_push (tree, heap, worklist, n);
+               VEC_safe_push (gimple, heap, worklist, n);
            }
        }
     }
 
-  for (i = 0; VEC_iterate (tree, inserted_exprs, i, t); i++)
+  for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++)
     {
-      if (!NECESSARY (t))
+      if (!gimple_plf (t, NECESSARY))
        {
-         block_stmt_iterator bsi;
+         gimple_stmt_iterator gsi;
 
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
              fprintf (dump_file, "Removing unnecessary insertion:");
-             print_generic_stmt (dump_file, t, 0);
+             print_gimple_stmt (dump_file, t, 0, 0);
            }
 
-         if (TREE_CODE (t) == PHI_NODE)
-           {
-             remove_phi_node (t, NULL_TREE, true);
-           }
+         gsi = gsi_for_stmt (t);
+         if (gimple_code (t) == GIMPLE_PHI)
+           remove_phi_node (&gsi, true);
          else
-           {
-             bsi = bsi_for_stmt (t);
-             bsi_remove (&bsi, true);
-             release_defs (t);
-           }
+           gsi_remove (&gsi, true);
+         release_defs (t);
        }
     }
-  VEC_free (tree, heap, worklist);
+  VEC_free (gimple, heap, worklist);
 }
 
 /* Initialize data structures used by PRE.  */
@@ -4171,8 +4223,8 @@ fini_pre (void)
 
   free (postorder);
   VEC_free (bitmap_set_t, heap, value_expressions);
-  VEC_free (tree, heap, inserted_exprs);
-  VEC_free (tree, heap, need_creation);
+  VEC_free (gimple, heap, inserted_exprs);
+  VEC_free (gimple, heap, need_creation);
   bitmap_obstack_release (&grand_bitmap_obstack);
   free_alloc_pool (bitmap_set_pool);
   free_alloc_pool (pre_expr_pool);
@@ -4190,7 +4242,7 @@ fini_pre (void)
 
   if (!bitmap_empty_p (need_eh_cleanup))
     {
-      tree_purge_all_dead_eh_edges (need_eh_cleanup);
+      gimple_purge_all_dead_eh_edges (need_eh_cleanup);
       cleanup_tree_cfg ();
     }
 
@@ -4204,7 +4256,7 @@ fini_pre (void)
    only wants to do full redundancy elimination.  */
 
 static unsigned int
-execute_pre (bool do_fre)
+execute_pre (bool do_fre ATTRIBUTE_UNUSED)
 {
   unsigned int todo = 0;
 
@@ -4214,8 +4266,11 @@ execute_pre (bool do_fre)
      loop_optimizer_init may create new phis, etc.  */
   if (!do_fre)
     loop_optimizer_init (LOOPS_NORMAL);
+  /* FIXME tuples */
+#if 0
   if (0 && !do_fre)
     insert_fake_stores ();
+#endif
 
   if (!run_scc_vn (do_fre))
     {
@@ -4266,15 +4321,18 @@ execute_pre (bool do_fre)
   statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
   statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
   statistics_counter_event (cfun, "Constified", pre_stats.constified);
-  bsi_commit_edge_inserts ();
+  gsi_commit_edge_inserts ();
 
   clear_expression_ids ();
   free_scc_vn ();
   if (!do_fre)
     {
       remove_dead_inserted_code ();
+  /* FIXME tuples */
+#if 0
       if (0)
        realify_fake_stores ();
+#endif
     }
 
   fini_pre ();
index b037180..611f2b2 100644 (file)
@@ -41,6 +41,7 @@
 #include "varray.h"
 #include "vec.h"
 #include "value-prof.h"
+#include "gimple.h"
 
 /* This file implements a generic value propagation engine based on
    the same propagation used by the SSA-CCP algorithm [1].
@@ -96,7 +97,7 @@
    5- Simulation terminates when all three work lists are drained.
 
    Before calling ssa_propagate, it is important to clear
-   DONT_SIMULATE_AGAIN for all the statements in the program that
+   prop_simulate_again_p for all the statements in the program that
    should be simulated.  This initialization allows an implementation
    to specify which statements should never be simulated.
 
 static ssa_prop_visit_stmt_fn ssa_prop_visit_stmt;
 static ssa_prop_visit_phi_fn ssa_prop_visit_phi;
 
-/* Use the deprecated flag to mark statements that have been
-   added to one of the SSA edges worklists.  This flag is used to
-   avoid visiting statements unnecessarily when draining an SSA edge
-   worklist.  If while simulating a basic block, we find a statement with
+/* Keep track of statements that have been added to one of the SSA
+   edges worklists.  This flag is used to avoid visiting statements
+   unnecessarily when draining an SSA edge worklist.  If while
+   simulating a basic block, we find a statement with
    STMT_IN_SSA_EDGE_WORKLIST set, we clear it to prevent SSA edge
-   processing from visiting it again.  */
-#define STMT_IN_SSA_EDGE_WORKLIST(T) ((T)->base.deprecated_flag)
+   processing from visiting it again.
+
+   NOTE: users of the propagation engine are not allowed to use
+   the GF_PLF_1 flag.  */
+#define STMT_IN_SSA_EDGE_WORKLIST      GF_PLF_1
 
 /* A bitmap to keep track of executable blocks in the CFG.  */
 static sbitmap executable_blocks;
@@ -142,7 +146,7 @@ static sbitmap bb_in_list;
    definition has changed.  SSA edges are def-use edges in the SSA
    web.  For each D-U edge, we store the target statement or PHI node
    U.  */
-static GTY(()) VEC(tree,gc) *interesting_ssa_edges;
+static GTY(()) VEC(gimple,gc) *interesting_ssa_edges;
 
 /* Identical to INTERESTING_SSA_EDGES.  For performance reasons, the
    list of SSA edges is split into two.  One contains all SSA edges
@@ -158,7 +162,7 @@ static GTY(()) VEC(tree,gc) *interesting_ssa_edges;
    don't use a separate worklist for VARYING edges, we end up with
    situations where lattice values move from
    UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING.  */
-static GTY(()) VEC(tree,gc) *varying_ssa_edges;
+static GTY(()) VEC(gimple,gc) *varying_ssa_edges;
 
 
 /* Return true if the block worklist empty.  */
@@ -257,16 +261,16 @@ add_ssa_edge (tree var, bool is_varying)
 
   FOR_EACH_IMM_USE_FAST (use_p, iter, var)
     {
-      tree use_stmt = USE_STMT (use_p);
+      gimple use_stmt = USE_STMT (use_p);
 
-      if (!DONT_SIMULATE_AGAIN (use_stmt)
-         && !STMT_IN_SSA_EDGE_WORKLIST (use_stmt))
+      if (prop_simulate_again_p (use_stmt)
+         && !gimple_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST))
        {
-         STMT_IN_SSA_EDGE_WORKLIST (use_stmt) = 1;
+         gimple_set_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST, true);
          if (is_varying)
-           VEC_safe_push (tree, gc, varying_ssa_edges, use_stmt);
+           VEC_safe_push (gimple, gc, varying_ssa_edges, use_stmt);
          else
-           VEC_safe_push (tree, gc, interesting_ssa_edges, use_stmt);
+           VEC_safe_push (gimple, gc, interesting_ssa_edges, use_stmt);
        }
     }
 }
@@ -302,7 +306,7 @@ add_control_edge (edge e)
 /* Simulate the execution of STMT and update the work lists accordingly.  */
 
 static void
-simulate_stmt (tree stmt)
+simulate_stmt (gimple stmt)
 {
   enum ssa_prop_result val = SSA_PROP_NOT_INTERESTING;
   edge taken_edge = NULL;
@@ -310,20 +314,20 @@ simulate_stmt (tree stmt)
 
   /* Don't bother visiting statements that are already
      considered varying by the propagator.  */
-  if (DONT_SIMULATE_AGAIN (stmt))
+  if (!prop_simulate_again_p (stmt))
     return;
 
-  if (TREE_CODE (stmt) == PHI_NODE)
+  if (gimple_code (stmt) == GIMPLE_PHI)
     {
       val = ssa_prop_visit_phi (stmt);
-      output_name = PHI_RESULT (stmt);
+      output_name = gimple_phi_result (stmt);
     }
   else
     val = ssa_prop_visit_stmt (stmt, &taken_edge, &output_name);
 
   if (val == SSA_PROP_VARYING)
     {
-      DONT_SIMULATE_AGAIN (stmt) = 1;
+      prop_set_simulate_again (stmt, false);
 
       /* If the statement produced a new varying value, add the SSA
         edges coming out of OUTPUT_NAME.  */
@@ -336,7 +340,7 @@ simulate_stmt (tree stmt)
        {
          edge e;
          edge_iterator ei;
-         basic_block bb = bb_for_stmt (stmt);
+         basic_block bb = gimple_bb (stmt);
          FOR_EACH_EDGE (e, ei, bb->succs)
            add_control_edge (e);
        }
@@ -362,36 +366,36 @@ simulate_stmt (tree stmt)
    SSA edge is added to it in simulate_stmt.  */
 
 static void
-process_ssa_edge_worklist (VEC(tree,gc) **worklist)
+process_ssa_edge_worklist (VEC(gimple,gc) **worklist)
 {
   /* Drain the entire worklist.  */
-  while (VEC_length (tree, *worklist) > 0)
+  while (VEC_length (gimple, *worklist) > 0)
     {
       basic_block bb;
 
       /* Pull the statement to simulate off the worklist.  */
-      tree stmt = VEC_pop (tree, *worklist);
+      gimple stmt = VEC_pop (gimple, *worklist);
 
       /* If this statement was already visited by simulate_block, then
         we don't need to visit it again here.  */
-      if (!STMT_IN_SSA_EDGE_WORKLIST (stmt))
+      if (!gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
        continue;
 
       /* STMT is no longer in a worklist.  */
-      STMT_IN_SSA_EDGE_WORKLIST (stmt) = 0;
+      gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
 
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
          fprintf (dump_file, "\nSimulating statement (from ssa_edges): ");
-         print_generic_stmt (dump_file, stmt, dump_flags);
+         print_gimple_stmt (dump_file, stmt, 0, dump_flags);
        }
 
-      bb = bb_for_stmt (stmt);
+      bb = gimple_bb (stmt);
 
       /* PHI nodes are always visited, regardless of whether or not
         the destination block is executable.  Otherwise, visit the
         statement only if its block is marked executable.  */
-      if (TREE_CODE (stmt) == PHI_NODE
+      if (gimple_code (stmt) == GIMPLE_PHI
          || TEST_BIT (executable_blocks, bb->index))
        simulate_stmt (stmt);
     }
@@ -404,7 +408,7 @@ process_ssa_edge_worklist (VEC(tree,gc) **worklist)
 static void
 simulate_block (basic_block block)
 {
-  tree phi;
+  gimple_stmt_iterator gsi;
 
   /* There is nothing to do for the exit block.  */
   if (block == EXIT_BLOCK_PTR)
@@ -415,14 +419,14 @@ simulate_block (basic_block block)
 
   /* Always simulate PHI nodes, even if we have simulated this block
      before.  */
-  for (phi = phi_nodes (block); phi; phi = PHI_CHAIN (phi))
-    simulate_stmt (phi);
+  for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
+    simulate_stmt (gsi_stmt (gsi));
 
   /* If this is the first time we've simulated this block, then we
      must simulate each of its statements.  */
   if (!TEST_BIT (executable_blocks, block->index))
     {
-      block_stmt_iterator j;
+      gimple_stmt_iterator j;
       unsigned int normal_edge_count;
       edge e, normal_edge;
       edge_iterator ei;
@@ -430,17 +434,17 @@ simulate_block (basic_block block)
       /* Note that we have simulated this block.  */
       SET_BIT (executable_blocks, block->index);
 
-      for (j = bsi_start (block); !bsi_end_p (j); bsi_next (&j))
+      for (j = gsi_start_bb (block); !gsi_end_p (j); gsi_next (&j))
        {
-         tree stmt = bsi_stmt (j);
+         gimple stmt = gsi_stmt (j);
 
          /* If this statement is already in the worklist then
             "cancel" it.  The reevaluation implied by the worklist
             entry will produce the same value we generate here and
             thus reevaluating it again from the worklist is
             pointless.  */
-         if (STMT_IN_SSA_EDGE_WORKLIST (stmt))
-           STMT_IN_SSA_EDGE_WORKLIST (stmt) = 0;
+         if (gimple_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST))
+           gimple_set_plf (stmt, STMT_IN_SSA_EDGE_WORKLIST, false);
 
          simulate_stmt (stmt);
        }
@@ -482,8 +486,8 @@ ssa_prop_init (void)
   size_t i;
 
   /* Worklists of SSA edges.  */
-  interesting_ssa_edges = VEC_alloc (tree, gc, 20);
-  varying_ssa_edges = VEC_alloc (tree, gc, 20);
+  interesting_ssa_edges = VEC_alloc (gimple, gc, 20);
+  varying_ssa_edges = VEC_alloc (gimple, gc, 20);
 
   executable_blocks = sbitmap_alloc (last_basic_block);
   sbitmap_zero (executable_blocks);
@@ -506,10 +510,13 @@ ssa_prop_init (void)
      (including the edges coming out of ENTRY_BLOCK_PTR).  */
   FOR_ALL_BB (bb)
     {
-      block_stmt_iterator si;
+      gimple_stmt_iterator si;
 
-      for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
-       STMT_IN_SSA_EDGE_WORKLIST (bsi_stmt (si)) = 0;
+      for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
+       gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
+    
+      for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+       gimple_set_plf (gsi_stmt (si), STMT_IN_SSA_EDGE_WORKLIST, false);
 
       FOR_EACH_EDGE (e, ei, bb->succs)
        e->flags &= ~EDGE_EXECUTABLE;
@@ -527,8 +534,8 @@ ssa_prop_init (void)
 static void
 ssa_prop_fini (void)
 {
-  VEC_free (tree, gc, interesting_ssa_edges);
-  VEC_free (tree, gc, varying_ssa_edges);
+  VEC_free (gimple, gc, interesting_ssa_edges);
+  VEC_free (gimple, gc, varying_ssa_edges);
   VEC_free (basic_block, heap, cfg_blocks);
   cfg_blocks = NULL;
   sbitmap_free (bb_in_list);
@@ -536,47 +543,20 @@ ssa_prop_fini (void)
 }
 
 
-/* Get the main expression from statement STMT.  */
-
-tree
-get_rhs (tree stmt)
-{
-  enum tree_code code = TREE_CODE (stmt);
-
-  switch (code)
-    {
-    case RETURN_EXPR:
-      stmt = TREE_OPERAND (stmt, 0);
-      if (!stmt || TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
-       return stmt;
-      /* FALLTHRU */
-
-    case GIMPLE_MODIFY_STMT:
-      stmt = GENERIC_TREE_OPERAND (stmt, 1);
-      if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
-       return TREE_OPERAND (stmt, 0);
-      else
-       return stmt;
-
-    case COND_EXPR:
-      return COND_EXPR_COND (stmt);
-    case SWITCH_EXPR:
-      return SWITCH_COND (stmt);
-    case GOTO_EXPR:
-      return GOTO_DESTINATION (stmt);
-    case LABEL_EXPR:
-      return LABEL_EXPR_LABEL (stmt);
-
-    default:
-      return stmt;
-    }
-}
-
-
-/* Return true if EXPR is a valid GIMPLE expression.  */
+/* Return true if EXPR is an acceptable right-hand-side for a
+   GIMPLE assignment.  We validate the entire tree, not just
+   the root node, thus catching expressions that embed complex
+   operands that are not permitted in GIMPLE.  This function
+   is needed because the folding routines in fold-const.c
+   may return such expressions in some cases, e.g., an array
+   access with an embedded index addition.  It may make more
+   sense to have folding routines that are sensitive to the
+   constraints on GIMPLE operands, rather than abandoning any
+   any attempt to fold if the usual folding turns out to be too
+   aggressive.  */
 
 bool
-valid_gimple_expression_p (tree expr)
+valid_gimple_rhs_p (tree expr)
 {
   enum tree_code code = TREE_CODE (expr);
 
@@ -588,6 +568,7 @@ valid_gimple_expression_p (tree expr)
       break;
 
     case tcc_constant:
+      /* All constants are ok.  */
       break;
 
     case tcc_binary:
@@ -604,23 +585,26 @@ valid_gimple_expression_p (tree expr)
 
     case tcc_expression:
       switch (code)
-       {
-       case ADDR_EXPR:
-         {
-           tree t = TREE_OPERAND (expr, 0);
-           while (handled_component_p (t))
-             {
-               /* ??? More checks needed, see the GIMPLE verifier.  */
-               if ((TREE_CODE (t) == ARRAY_REF
-                    || TREE_CODE (t) == ARRAY_RANGE_REF)
-                   && !is_gimple_val (TREE_OPERAND (t, 1)))
-                 return false;
-               t = TREE_OPERAND (t, 0);
-             }
-           if (!is_gimple_id (t))
-             return false;
-           break;
-         }
+        {
+        case ADDR_EXPR:
+          {
+           tree t;
+           if (is_gimple_min_invariant (expr))
+             return true;
+            t = TREE_OPERAND (expr, 0);
+            while (handled_component_p (t))
+              {
+                /* ??? More checks needed, see the GIMPLE verifier.  */
+                if ((TREE_CODE (t) == ARRAY_REF
+                     || TREE_CODE (t) == ARRAY_RANGE_REF)
+                    && !is_gimple_val (TREE_OPERAND (t, 1)))
+                  return false;
+                t = TREE_OPERAND (t, 0);
+              }
+            if (!is_gimple_id (t))
+              return false;
+          }
+          break;
 
        case TRUTH_NOT_EXPR:
          if (!is_gimple_val (TREE_OPERAND (expr, 0)))
@@ -645,24 +629,11 @@ valid_gimple_expression_p (tree expr)
       break;
 
     case tcc_vl_exp:
-      switch (code)
-       {
-       case CALL_EXPR:
-         break;
-       default:
-         return false;
-       }
-      break;
+      return false;
 
     case tcc_exceptional:
-      switch (code)
-       {
-       case SSA_NAME:
-         break;
-
-       default:
-         return false;
-       }
+      if (code != SSA_NAME)
+        return false;
       break;
 
     default:
@@ -673,101 +644,144 @@ valid_gimple_expression_p (tree expr)
 }
 
 
-/* Set the main expression of *STMT_P to EXPR.  If EXPR is not a valid
-   GIMPLE expression no changes are done and the function returns
-   false.  */
+/* Return true if EXPR is a CALL_EXPR suitable for representation
+   as a single GIMPLE_CALL statement.  If the arguments require
+   further gimplification, return false.  */
 
 bool
-set_rhs (tree *stmt_p, tree expr)
+valid_gimple_call_p (tree expr)
 {
-  tree stmt = *stmt_p, op;
-  tree new_stmt;
-  tree var;
-  ssa_op_iter iter;
-  int eh_region;
+  unsigned i, nargs;
 
-  if (!valid_gimple_expression_p (expr))
+  if (TREE_CODE (expr) != CALL_EXPR)
     return false;
 
-  if (EXPR_HAS_LOCATION (stmt)
-      && (EXPR_P (expr)
-         || GIMPLE_STMT_P (expr))
-      && ! EXPR_HAS_LOCATION (expr)
-      && TREE_SIDE_EFFECTS (expr)
-      && TREE_CODE (expr) != LABEL_EXPR)
-    SET_EXPR_LOCATION (expr, EXPR_LOCATION (stmt));
+  nargs = call_expr_nargs (expr);
+  for (i = 0; i < nargs; i++)
+    if (! is_gimple_operand (CALL_EXPR_ARG (expr, i)))
+      return false;
 
-  switch (TREE_CODE (stmt))
-    {
-    case RETURN_EXPR:
-      op = TREE_OPERAND (stmt, 0);
-      if (TREE_CODE (op) != GIMPLE_MODIFY_STMT)
-       {
-         GIMPLE_STMT_OPERAND (stmt, 0) = expr;
-         break;
-       }
-      stmt = op;
-      /* FALLTHRU */
+  return true;
+}
 
-    case GIMPLE_MODIFY_STMT:
-      op = GIMPLE_STMT_OPERAND (stmt, 1);
-      if (TREE_CODE (op) == WITH_SIZE_EXPR)
-       TREE_OPERAND (op, 0) = expr;
-      else
-       GIMPLE_STMT_OPERAND (stmt, 1) = expr;
-      break;
 
-    case COND_EXPR:
-      if (!is_gimple_condexpr (expr))
-        return false;
-      COND_EXPR_COND (stmt) = expr;
-      break;
-    case SWITCH_EXPR:
-      SWITCH_COND (stmt) = expr;
-      break;
-    case GOTO_EXPR:
-      GOTO_DESTINATION (stmt) = expr;
-      break;
-    case LABEL_EXPR:
-      LABEL_EXPR_LABEL (stmt) = expr;
-      break;
+/* Make SSA names defined by OLD_STMT point to NEW_STMT
+   as their defining statement.  */
 
-    default:
-      /* Replace the whole statement with EXPR.  If EXPR has no side
-        effects, then replace *STMT_P with an empty statement.  */
-      new_stmt = TREE_SIDE_EFFECTS (expr) ? expr : build_empty_stmt ();
-      *stmt_p = new_stmt;
-
-      /* Preserve the annotation, the histograms and the EH region information
-         associated with the original statement. The EH information
-        needs to be preserved only if the new statement still can throw.  */
-      new_stmt->base.ann = (tree_ann_t) stmt_ann (stmt);
-      gimple_move_stmt_histograms (cfun, new_stmt, stmt);
-      if (tree_could_throw_p (new_stmt))
-       {
-         eh_region = lookup_stmt_eh_region (stmt);
-         /* We couldn't possibly turn a nothrow into a throw statement.  */
-         gcc_assert (eh_region >= 0);
-         remove_stmt_from_eh_region (stmt);
-         add_stmt_to_eh_region (new_stmt, eh_region);
-       }
+void
+move_ssa_defining_stmt_for_defs (gimple new_stmt, gimple old_stmt)
+{
+  tree var;
+  ssa_op_iter iter;
 
-      if (gimple_in_ssa_p (cfun)
-         && TREE_SIDE_EFFECTS (expr))
-       {
-         /* Fix all the SSA_NAMEs created by *STMT_P to point to its new
-            replacement.  */
-         FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_DEFS)
-           {
-             if (TREE_CODE (var) == SSA_NAME)
-               SSA_NAME_DEF_STMT (var) = *stmt_p;
-           }
-       }
-      stmt->base.ann = NULL;
-      break;
+  if (gimple_in_ssa_p (cfun))
+    {
+      /* Make defined SSA_NAMEs point to the new
+         statement as their definition.  */
+      FOR_EACH_SSA_TREE_OPERAND (var, old_stmt, iter, SSA_OP_ALL_DEFS)
+        {
+          if (TREE_CODE (var) == SSA_NAME)
+            SSA_NAME_DEF_STMT (var) = new_stmt;
+        }
     }
+}
 
-  return true;
+
+/* Update a GIMPLE_CALL statement at iterator *SI_P to reflect the
+   value of EXPR, which is expected to be the result of folding the
+   call.  This can only be done if EXPR is a CALL_EXPR with valid
+   GIMPLE operands as arguments, or if it is a suitable RHS expression
+   for a GIMPLE_ASSIGN.  More complex expressions will require
+   gimplification, which will introduce addtional statements.  In this
+   event, no update is performed, and the function returns false.
+   Note that we cannot mutate a GIMPLE_CALL in-place, so we always
+   replace the statement at *SI_P with an entirely new statement.
+   The new statement need not be a call, e.g., if the original call
+   folded to a constant.  */
+
+bool
+update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
+{
+  tree lhs;
+
+  gimple stmt = gsi_stmt (*si_p);
+
+  gcc_assert (is_gimple_call (stmt));
+
+  lhs = gimple_call_lhs (stmt);
+
+  if (valid_gimple_call_p (expr))
+    {
+      /* The call has simplified to another call.  */
+      tree fn = CALL_EXPR_FN (expr);
+      unsigned i;
+      unsigned nargs = call_expr_nargs (expr);
+      VEC(tree, heap) *args = NULL;
+      gimple new_stmt;
+
+      if (nargs > 0)
+        {
+          args = VEC_alloc (tree, heap, nargs);
+          VEC_safe_grow (tree, heap, args, nargs);
+      
+          for (i = 0; i < nargs; i++)
+            VEC_replace (tree, args, i, CALL_EXPR_ARG (expr, i));
+        }
+
+      new_stmt = gimple_build_call_vec (fn, args);
+      gimple_call_set_lhs (new_stmt, lhs);
+      copy_virtual_operands (new_stmt, stmt);
+      move_ssa_defining_stmt_for_defs (new_stmt, stmt);
+      gimple_set_location (new_stmt, gimple_location (stmt));
+      gsi_replace (si_p, new_stmt, false);
+      VEC_free (tree, heap, args);
+
+      return true;
+    }
+  else if (valid_gimple_rhs_p (expr))
+    {
+      gimple new_stmt;
+
+      /* The call has simplified to an expression
+         that cannot be represented as a GIMPLE_CALL. */
+      if (lhs)
+        {
+          /* A value is expected.
+             Introduce a new GIMPLE_ASSIGN statement.  */
+          STRIP_USELESS_TYPE_CONVERSION (expr);
+          new_stmt = gimple_build_assign (lhs, expr);
+          copy_virtual_operands (new_stmt, stmt);
+          move_ssa_defining_stmt_for_defs (new_stmt, stmt);
+        }
+      else if (!TREE_SIDE_EFFECTS (expr))
+        {
+          /* No value is expected, and EXPR has no effect.
+             Replace it with an empty statement.  */
+          new_stmt = gimple_build_nop ();
+        }
+      else
+        {
+          /* No value is expected, but EXPR has an effect,
+             e.g., it could be a reference to a volatile
+             variable.  Create an assignment statement
+             with a dummy (unused) lhs variable.  */
+          STRIP_USELESS_TYPE_CONVERSION (expr);
+          lhs = create_tmp_var (TREE_TYPE (expr), NULL);
+          new_stmt = gimple_build_assign (lhs, expr);
+          add_referenced_var (lhs);
+          lhs = make_ssa_name (lhs, new_stmt);
+          gimple_assign_set_lhs (new_stmt, lhs);
+          copy_virtual_operands (new_stmt, stmt);
+          move_ssa_defining_stmt_for_defs (new_stmt, stmt);
+        }
+      gimple_set_location (new_stmt, gimple_location (stmt));
+      gsi_replace (si_p, new_stmt, false);
+      return true;
+    }
+  else
+    /* The call simplified to an expression that is
+       not a valid GIMPLE RHS.  */
+    return false;
 }
 
 
@@ -787,8 +801,8 @@ ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
 
   /* Iterate until the worklists are empty.  */
   while (!cfg_blocks_empty_p () 
-        || VEC_length (tree, interesting_ssa_edges) > 0
-        || VEC_length (tree, varying_ssa_edges) > 0)
+        || VEC_length (gimple, interesting_ssa_edges) > 0
+        || VEC_length (gimple, varying_ssa_edges) > 0)
     {
       if (!cfg_blocks_empty_p ())
        {
@@ -812,7 +826,7 @@ ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
 /* Return the first VDEF operand for STMT.  */
 
 tree
-first_vdef (tree stmt)
+first_vdef (gimple stmt)
 {
   ssa_op_iter iter;
   tree op;
@@ -831,18 +845,23 @@ first_vdef (tree stmt)
    because they are not interesting for the optimizers.  */
 
 bool
-stmt_makes_single_load (tree stmt)
+stmt_makes_single_load (gimple stmt)
 {
   tree rhs;
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
+    return false;
+
+  /* Only a GIMPLE_SINGLE_RHS assignment may have a
+     declaration or reference as its RHS.  */
+  if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
+      != GIMPLE_SINGLE_RHS)
     return false;
 
   if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF|SSA_OP_VUSE))
     return false;
 
-  rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-  STRIP_NOPS (rhs);
+  rhs = gimple_assign_rhs1 (stmt);
 
   return (!TREE_THIS_VOLATILE (rhs)
          && (DECL_P (rhs)
@@ -856,18 +875,22 @@ stmt_makes_single_load (tree stmt)
    because they are not interesting for the optimizers.  */
 
 bool
-stmt_makes_single_store (tree stmt)
+stmt_makes_single_store (gimple stmt)
 {
   tree lhs;
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (gimple_code (stmt) != GIMPLE_ASSIGN
+      && gimple_code (stmt) != GIMPLE_CALL)
     return false;
 
   if (ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
     return false;
 
-  lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-  STRIP_NOPS (lhs);
+  lhs = gimple_get_lhs (stmt);
+
+  /* A call statement may have a null LHS.  */
+  if (!lhs)
+    return false;
 
   return (!TREE_THIS_VOLATILE (lhs)
           && (DECL_P (lhs)
@@ -880,7 +903,7 @@ stmt_makes_single_store (tree stmt)
    NULL.  */
 
 prop_value_t *
-get_value_loaded_by (tree stmt, prop_value_t *values)
+get_value_loaded_by (gimple stmt, prop_value_t *values)
 {
   ssa_op_iter i;
   tree vuse;
@@ -911,13 +934,10 @@ struct prop_stats_d
 static struct prop_stats_d prop_stats;
 
 /* Replace USE references in statement STMT with the values stored in
-   PROP_VALUE. Return true if at least one reference was replaced.  If
-   REPLACED_ADDRESSES_P is given, it will be set to true if an address
-   constant was replaced.  */
+   PROP_VALUE. Return true if at least one reference was replaced.  */
 
-bool
-replace_uses_in (tree stmt, bool *replaced_addresses_p,
-                prop_value_t *prop_value)
+static bool
+replace_uses_in (gimple stmt, prop_value_t *prop_value)
 {
   bool replaced = false;
   use_operand_p use;
@@ -931,7 +951,7 @@ replace_uses_in (tree stmt, bool *replaced_addresses_p,
       if (val == tuse || val == NULL_TREE)
        continue;
 
-      if (TREE_CODE (stmt) == ASM_EXPR
+      if (gimple_code (stmt) == GIMPLE_ASM
          && !may_propagate_copy_into_asm (tuse))
        continue;
 
@@ -946,8 +966,6 @@ replace_uses_in (tree stmt, bool *replaced_addresses_p,
       propagate_value (use, val);
 
       replaced = true;
-      if (POINTER_TYPE_P (TREE_TYPE (tuse)) && replaced_addresses_p)
-       *replaced_addresses_p = true;
     }
 
   return replaced;
@@ -955,9 +973,7 @@ replace_uses_in (tree stmt, bool *replaced_addresses_p,
 
 
 /* Replace the VUSE references in statement STMT with the values
-   stored in PROP_VALUE.  Return true if a reference was replaced.  If
-   REPLACED_ADDRESSES_P is given, it will be set to true if an address
-   constant was replaced.
+   stored in PROP_VALUE.  Return true if a reference was replaced.
 
    Replacing VUSE operands is slightly more complex than replacing
    regular USEs.  We are only interested in two types of replacements
@@ -1016,8 +1032,7 @@ replace_uses_in (tree stmt, bool *replaced_addresses_p,
       replace_uses_in.  */
 
 static bool
-replace_vuses_in (tree stmt, bool *replaced_addresses_p,
-                  prop_value_t *prop_value)
+replace_vuses_in (gimple stmt, prop_value_t *prop_value)
 {
   bool replaced = false;
   ssa_op_iter iter;
@@ -1029,29 +1044,21 @@ replace_vuses_in (tree stmt, bool *replaced_addresses_p,
         see if we are trying to propagate a constant or a GIMPLE
         register (case #1 above).  */
       prop_value_t *val = get_value_loaded_by (stmt, prop_value);
-      tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+      tree rhs = gimple_assign_rhs1 (stmt);
 
       if (val
          && val->value
          && (is_gimple_reg (val->value)
              || is_gimple_min_invariant (val->value))
          && simple_cst_equal (rhs, val->mem_ref) == 1)
-
        {
-         /* If we are replacing a constant address, inform our
-            caller.  */
-         if (TREE_CODE (val->value) != SSA_NAME
-             && POINTER_TYPE_P (TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 1)))
-             && replaced_addresses_p)
-           *replaced_addresses_p = true;
-
          /* We can only perform the substitution if the load is done
             from the same memory location as the original store.
             Since we already know that there are no intervening
             stores between DEF_STMT and STMT, we only need to check
             that the RHS of STMT is the same as the memory reference
             propagated together with the value.  */
-         GIMPLE_STMT_OPERAND (stmt, 1) = val->value;
+         gimple_assign_set_rhs1 (stmt, val->value);
 
          if (TREE_CODE (val->value) != SSA_NAME)
            prop_stats.num_const_prop++;
@@ -1094,18 +1101,20 @@ replace_vuses_in (tree stmt, bool *replaced_addresses_p,
    values from PROP_VALUE.  */
 
 static void
-replace_phi_args_in (tree phi, prop_value_t *prop_value)
+replace_phi_args_in (gimple phi, prop_value_t *prop_value)
 {
-  int i;
+  size_t i;
   bool replaced = false;
-  tree prev_phi = NULL;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
-    prev_phi = unshare_expr (phi);
+    {
+      fprintf (dump_file, "Folding PHI node: ");
+      print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
+    }
 
-  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+  for (i = 0; i < gimple_phi_num_args (phi); i++)
     {
-      tree arg = PHI_ARG_DEF (phi, i);
+      tree arg = gimple_phi_arg_def (phi, i);
 
       if (TREE_CODE (arg) == SSA_NAME)
        {
@@ -1125,72 +1134,84 @@ replace_phi_args_in (tree phi, prop_value_t *prop_value)
                 through an abnormal edge, update the replacement
                 accordingly.  */
              if (TREE_CODE (val) == SSA_NAME
-                 && PHI_ARG_EDGE (phi, i)->flags & EDGE_ABNORMAL)
+                 && gimple_phi_arg_edge (phi, i)->flags & EDGE_ABNORMAL)
                SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
            }
        }
     }
   
-  if (replaced && dump_file && (dump_flags & TDF_DETAILS))
+  if (dump_file && (dump_flags & TDF_DETAILS))
     {
-      fprintf (dump_file, "Folded PHI node: ");
-      print_generic_stmt (dump_file, prev_phi, TDF_SLIM);
-      fprintf (dump_file, "           into: ");
-      print_generic_stmt (dump_file, phi, TDF_SLIM);
-      fprintf (dump_file, "\n");
+      if (!replaced)
+       fprintf (dump_file, "No folding possible\n");
+      else
+       {
+         fprintf (dump_file, "Folded into: ");
+         print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
+         fprintf (dump_file, "\n");
+       }
     }
 }
 
 
-/* If STMT has a predicate whose value can be computed using the value
-   range information computed by VRP, compute its value and return true.
-   Otherwise, return false.  */
+/* If the statement pointed by SI has a predicate whose value can be
+   computed using the value range information computed by VRP, compute
+   its value and return true.  Otherwise, return false.  */
 
 static bool
-fold_predicate_in (tree stmt)
+fold_predicate_in (gimple_stmt_iterator *si)
 {
-  tree *pred_p = NULL;
-  bool modify_stmt_p = false;
+  bool assignment_p = false;
   tree val;
+  gimple stmt = gsi_stmt (*si);
 
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-      && COMPARISON_CLASS_P (GIMPLE_STMT_OPERAND (stmt, 1)))
+  if (is_gimple_assign (stmt)
+      && TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
     {
-      modify_stmt_p = true;
-      pred_p = &GIMPLE_STMT_OPERAND (stmt, 1);
+      assignment_p = true;
+      val = vrp_evaluate_conditional (gimple_assign_rhs_code (stmt),
+                                     gimple_assign_rhs1 (stmt),
+                                     gimple_assign_rhs2 (stmt),
+                                     stmt);
     }
-  else if (TREE_CODE (stmt) == COND_EXPR)
-    pred_p = &COND_EXPR_COND (stmt);
+  else if (gimple_code (stmt) == GIMPLE_COND)
+    val = vrp_evaluate_conditional (gimple_cond_code (stmt),
+                                   gimple_cond_lhs (stmt),
+                                   gimple_cond_rhs (stmt),
+                                   stmt);
   else
     return false;
 
-  if (TREE_CODE (*pred_p) == SSA_NAME)
-    val = vrp_evaluate_conditional (EQ_EXPR,
-                                   *pred_p,
-                                   boolean_true_node,
-                                   stmt);
-  else
-    val = vrp_evaluate_conditional (TREE_CODE (*pred_p),
-                                   TREE_OPERAND (*pred_p, 0),
-                                   TREE_OPERAND (*pred_p, 1),
-                                   stmt);
 
   if (val)
     {
-      if (modify_stmt_p)
-        val = fold_convert (TREE_TYPE (*pred_p), val);
+      if (assignment_p)
+        val = fold_convert (gimple_expr_type (stmt), val);
       
       if (dump_file)
        {
          fprintf (dump_file, "Folding predicate ");
-         print_generic_expr (dump_file, *pred_p, 0);
+         print_gimple_expr (dump_file, stmt, 0, 0);
          fprintf (dump_file, " to ");
          print_generic_expr (dump_file, val, 0);
          fprintf (dump_file, "\n");
        }
 
       prop_stats.num_pred_folded++;
-      *pred_p = val;
+
+      if (is_gimple_assign (stmt))
+       gimple_assign_set_rhs_from_tree (si, val);
+      else
+       {
+         gcc_assert (gimple_code (stmt) == GIMPLE_COND);
+         if (integer_zerop (val))
+           gimple_cond_make_false (stmt);
+         else if (integer_onep (val))
+           gimple_cond_make_true (stmt);
+         else
+           gcc_unreachable ();
+       }
+
       return true;
     }
 
@@ -1222,78 +1243,83 @@ substitute_and_fold (prop_value_t *prop_value, bool use_ranges_p)
     return false;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
-    fprintf (dump_file, "\nSubstituing values and folding statements\n\n");
+    fprintf (dump_file, "\nSubstituting values and folding statements\n\n");
 
   memset (&prop_stats, 0, sizeof (prop_stats));
 
   /* Substitute values in every statement of every basic block.  */
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator i;
-      tree phi;
+      gimple_stmt_iterator i;
 
       /* Propagate known values into PHI nodes.  */
       if (prop_value)
-       for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-         replace_phi_args_in (phi, prop_value);
+       for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
+         replace_phi_args_in (gsi_stmt (i), prop_value);
 
       /* Propagate known values into stmts.  Do a backward walk to expose
         more trivially deletable stmts.  */
-      for (i = bsi_last (bb); !bsi_end_p (i);)
+      for (i = gsi_last_bb (bb); !gsi_end_p (i);)
        {
-          bool replaced_address, did_replace;
-         tree call, prev_stmt = NULL;
-         tree stmt = bsi_stmt (i);
+          bool did_replace;
+         gimple stmt = gsi_stmt (i);
+         enum gimple_code code = gimple_code (stmt);
 
          /* Ignore ASSERT_EXPRs.  They are used by VRP to generate
             range information for names and they are discarded
             afterwards.  */
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-             && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ASSERT_EXPR)
+
+         if (code == GIMPLE_ASSIGN
+             && TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR)
            {
-             bsi_prev (&i);
+             gsi_prev (&i);
              continue;
            }
 
          /* No point propagating into a stmt whose result is not used,
             but instead we might be able to remove a trivially dead stmt.  */
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-             && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME
-             && !stmt_ann (stmt)->has_volatile_ops
-             && has_zero_uses (GIMPLE_STMT_OPERAND (stmt, 0))
-             && !tree_could_throw_p (stmt)
-             && (!(call = get_call_expr_in (stmt))
-                 || !TREE_SIDE_EFFECTS (call)))
+         if (gimple_get_lhs (stmt)
+             && TREE_CODE (gimple_get_lhs (stmt)) == SSA_NAME
+             && has_zero_uses (gimple_get_lhs (stmt))
+             && !stmt_could_throw_p (stmt)
+             && !gimple_has_side_effects (stmt))
            {
-             block_stmt_iterator i2;
+             gimple_stmt_iterator i2;
+
              if (dump_file && dump_flags & TDF_DETAILS)
                {
                  fprintf (dump_file, "Removing dead stmt ");
-                 print_generic_expr (dump_file, stmt, 0);
+                 print_gimple_stmt (dump_file, stmt, 0, 0);
                  fprintf (dump_file, "\n");
                }
              prop_stats.num_dce++;
-             bsi_prev (&i);
-             i2 = bsi_for_stmt (stmt);
-             bsi_remove (&i2, true);
+             gsi_prev (&i);
+             i2 = gsi_for_stmt (stmt);
+             gsi_remove (&i2, true);
              release_defs (stmt);
              continue;
            }
 
          /* Record the state of the statement before replacements.  */
-         push_stmt_changes (bsi_stmt_ptr (i));
+         push_stmt_changes (gsi_stmt_ptr (&i));
 
          /* Replace the statement with its folded version and mark it
             folded.  */
          did_replace = false;
-         replaced_address = false;
          if (dump_file && (dump_flags & TDF_DETAILS))
-           prev_stmt = unshare_expr (stmt);
+           {
+             fprintf (dump_file, "Folding statement: ");
+             print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
+           }
 
          /* If we have range information, see if we can fold
             predicate expressions.  */
          if (use_ranges_p)
-           did_replace = fold_predicate_in (stmt);
+           {
+             did_replace = fold_predicate_in (&i);
+             /* fold_predicate_in should not have reallocated STMT.  */
+             gcc_assert (gsi_stmt (i) == stmt);
+           }
 
          if (prop_value)
            {
@@ -1302,48 +1328,54 @@ substitute_and_fold (prop_value_t *prop_value, bool use_ranges_p)
                 information is not collected on virtuals, so we only
                 need to check this for real uses).  */
              if (!did_replace)
-               did_replace |= replace_uses_in (stmt, &replaced_address,
-                                               prop_value);
+               did_replace |= replace_uses_in (stmt, prop_value);
 
-             did_replace |= replace_vuses_in (stmt, &replaced_address,
-                                              prop_value);
+             did_replace |= replace_vuses_in (stmt, prop_value);
            }
 
          /* If we made a replacement, fold and cleanup the statement.  */
          if (did_replace)
            {
-             tree old_stmt = stmt;
-             tree rhs;
+             gimple old_stmt = stmt;
 
-             fold_stmt (bsi_stmt_ptr (i));
-             stmt = bsi_stmt (i);
+             fold_stmt (&i);
+             stmt = gsi_stmt (i);
 
               /* If we cleaned up EH information from the statement,
                  remove EH edges.  */
              if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
-               tree_purge_dead_eh_edges (bb);
-
-             rhs = get_rhs (stmt);
-             if (TREE_CODE (rhs) == ADDR_EXPR)
-               recompute_tree_invariant_for_addr_expr (rhs);
-
-             if (dump_file && (dump_flags & TDF_DETAILS))
-               {
-                 fprintf (dump_file, "Folded statement: ");
-                 print_generic_stmt (dump_file, prev_stmt, TDF_SLIM);
-                 fprintf (dump_file, "            into: ");
-                 print_generic_stmt (dump_file, stmt, TDF_SLIM);
-                 fprintf (dump_file, "\n");
-               }
+               gimple_purge_dead_eh_edges (bb);
+
+              if (is_gimple_assign (stmt)
+                  && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
+                      == GIMPLE_SINGLE_RHS))
+              {
+                tree rhs = gimple_assign_rhs1 (stmt);
+                
+                if (TREE_CODE (rhs) == ADDR_EXPR)
+                  recompute_tree_invariant_for_addr_expr (rhs);
+              }
 
              /* Determine what needs to be done to update the SSA form.  */
-             pop_stmt_changes (bsi_stmt_ptr (i));
+             pop_stmt_changes (gsi_stmt_ptr (&i));
              something_changed = true;
            }
          else
            {
              /* The statement was not modified, discard the change buffer.  */
-             discard_stmt_changes (bsi_stmt_ptr (i));
+             discard_stmt_changes (gsi_stmt_ptr (&i));
+           }
+
+         if (dump_file && (dump_flags & TDF_DETAILS))
+           {
+             if (did_replace)
+               {
+                 fprintf (dump_file, "Folded into: ");
+                 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
+                 fprintf (dump_file, "\n");
+               }
+             else
+               fprintf (dump_file, "Not folded\n");
            }
 
          /* Some statements may be simplified using ranges.  For
@@ -1355,7 +1387,7 @@ substitute_and_fold (prop_value_t *prop_value, bool use_ranges_p)
          if (use_ranges_p)
            simplify_stmt_using_ranges (stmt);
 
-         bsi_prev (&i);
+         gsi_prev (&i);
        }
     }
 
index 1813378..e472bac 100644 (file)
@@ -22,9 +22,21 @@ along with GCC; see the file COPYING3.  If not see
 #ifndef _TREE_SSA_PROPAGATE_H
 #define _TREE_SSA_PROPAGATE_H 1
 
-/* Use the TREE_VISITED bitflag to mark statements and PHI nodes that
-   have been deemed varying and should not be simulated again.  */
-#define DONT_SIMULATE_AGAIN(T) TREE_VISITED (T)
+/* If SIM_P is true, statement S will be simulated again.  */
+
+static inline void
+prop_set_simulate_again (gimple s, bool visit_p)
+{
+  gimple_set_visited (s, visit_p);
+}
+
+/* Return true if statement T should be simulated again.  */
+
+static inline bool
+prop_simulate_again_p (gimple s)
+{
+  return gimple_visited_p (s);
+}
 
 /* Lattice values used for propagation purposes.  Specific instances
    of a propagation engine must return these values from the statement
@@ -106,20 +118,20 @@ typedef struct value_range_d value_range_t;
 
 
 /* Call-back functions used by the value propagation engine.  */
-typedef enum ssa_prop_result (*ssa_prop_visit_stmt_fn) (tree, edge *, tree *);
-typedef enum ssa_prop_result (*ssa_prop_visit_phi_fn) (tree);
+typedef enum ssa_prop_result (*ssa_prop_visit_stmt_fn) (gimple, edge *, tree *);
+typedef enum ssa_prop_result (*ssa_prop_visit_phi_fn) (gimple);
 
 
 /* In tree-ssa-propagate.c  */
 void ssa_propagate (ssa_prop_visit_stmt_fn, ssa_prop_visit_phi_fn);
-tree get_rhs (tree);
-bool valid_gimple_expression_p (tree expr);
-bool set_rhs (tree *, tree);
-tree first_vdef (tree);
-bool stmt_makes_single_load (tree);
-bool stmt_makes_single_store (tree);
-prop_value_t *get_value_loaded_by (tree, prop_value_t *);
-bool replace_uses_in (tree, bool *, prop_value_t *);
+bool valid_gimple_rhs_p (tree);
+bool valid_gimple_call_p (tree);
+void move_ssa_defining_stmt_for_defs (gimple, gimple);
+bool update_call_from_tree (gimple_stmt_iterator *, tree);
+tree first_vdef (gimple);
+bool stmt_makes_single_load (gimple);
+bool stmt_makes_single_store (gimple);
+prop_value_t *get_value_loaded_by (gimple, prop_value_t *);
 bool substitute_and_fold (prop_value_t *, bool);
 
 #endif /* _TREE_SSA_PROPAGATE_H  */
index 5fcaa7b..a3facd8 100644 (file)
@@ -29,7 +29,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "diagnostic.h"
 #include "tree-inline.h"
 #include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-dump.h"
 #include "timevar.h"
 #include "tree-iterator.h"
@@ -230,23 +230,21 @@ get_rank (tree e)
 
   if (TREE_CODE (e) == SSA_NAME)
     {
-      tree stmt;
-      tree rhs;
+      gimple stmt;
       long rank, maxrank;
-      int i;
-      int n;
+      int i, n;
 
       if (TREE_CODE (SSA_NAME_VAR (e)) == PARM_DECL
          && SSA_NAME_IS_DEFAULT_DEF (e))
        return find_operand_rank (e);
 
       stmt = SSA_NAME_DEF_STMT (e);
-      if (bb_for_stmt (stmt) == NULL)
+      if (gimple_bb (stmt) == NULL)
        return 0;
 
-      if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
+      if (!is_gimple_assign (stmt)
          || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
-       return bb_rank[bb_for_stmt (stmt)->index];
+       return bb_rank[gimple_bb (stmt)->index];
 
       /* If we already have a rank for this expression, use that.  */
       rank = find_operand_rank (e);
@@ -256,19 +254,28 @@ get_rank (tree e)
       /* Otherwise, find the maximum rank for the operands, or the bb
         rank, whichever is less.   */
       rank = 0;
-      maxrank = bb_rank[bb_for_stmt(stmt)->index];
-      rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-      n = TREE_OPERAND_LENGTH (rhs);
-      if (n == 0)
-       rank = MAX (rank, get_rank (rhs));
+      maxrank = bb_rank[gimple_bb(stmt)->index];
+      if (gimple_assign_single_p (stmt))
+       {
+         tree rhs = gimple_assign_rhs1 (stmt);
+         n = TREE_OPERAND_LENGTH (rhs);
+         if (n == 0)
+           rank = MAX (rank, get_rank (rhs));
+         else
+           {
+             for (i = 0;
+                  i < n && TREE_OPERAND (rhs, i) && rank != maxrank; i++)
+               rank = MAX(rank, get_rank (TREE_OPERAND (rhs, i)));
+           }
+       }
       else
        {
-         for (i = 0;
-              i < n
-                && TREE_OPERAND (rhs, i)
-                && rank != maxrank;
-              i++)
-           rank = MAX(rank, get_rank (TREE_OPERAND (rhs, i)));
+         n = gimple_num_ops (stmt);
+         for (i = 1; i < n && rank != maxrank; i++)
+           {
+             gcc_assert (gimple_op (stmt, i));
+             rank = MAX(rank, get_rank (gimple_op (stmt, i)));
+           }
        }
 
       if (dump_file && (dump_flags & TDF_DETAILS))
@@ -349,21 +356,21 @@ add_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op)
    operation with tree code CODE, and is inside LOOP.  */
 
 static bool
-is_reassociable_op (tree stmt, enum tree_code code, struct loop *loop)
+is_reassociable_op (gimple stmt, enum tree_code code, struct loop *loop)
 {
-  basic_block bb;
+  basic_block bb = gimple_bb (stmt);
 
-  if (IS_EMPTY_STMT (stmt))
+  if (gimple_bb (stmt) == NULL)
     return false;
 
-  bb = bb_for_stmt (stmt);
   if (!flow_bb_inside_loop_p (loop, bb))
     return false;
 
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-      && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == code
-      && has_single_use (GIMPLE_STMT_OPERAND (stmt, 0)))
+  if (is_gimple_assign (stmt)
+      && gimple_assign_rhs_code (stmt) == code
+      && has_single_use (gimple_assign_lhs (stmt)))
     return true;
+
   return false;
 }
 
@@ -374,15 +381,13 @@ is_reassociable_op (tree stmt, enum tree_code code, struct loop *loop)
 static tree
 get_unary_op (tree name, enum tree_code opcode)
 {
-  tree stmt = SSA_NAME_DEF_STMT (name);
-  tree rhs;
+  gimple stmt = SSA_NAME_DEF_STMT (name);
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt))
     return NULL_TREE;
 
-  rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-  if (TREE_CODE (rhs) == opcode)
-    return TREE_OPERAND (rhs, 0);
+  if (gimple_assign_rhs_code (stmt) == opcode)
+    return gimple_assign_rhs1 (stmt);
   return NULL_TREE;
 }
 
@@ -806,18 +811,20 @@ optimize_ops_list (enum tree_code opcode,
    update" operation.  */
 
 static bool
-is_phi_for_stmt (tree stmt, tree operand)
+is_phi_for_stmt (gimple stmt, tree operand)
 {
-  tree def_stmt;
-  tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+  gimple def_stmt;
+  tree lhs;
   use_operand_p arg_p;
   ssa_op_iter i;
 
   if (TREE_CODE (operand) != SSA_NAME)
     return false;
 
+  lhs = gimple_assign_lhs (stmt);
+
   def_stmt = SSA_NAME_DEF_STMT (operand);
-  if (TREE_CODE (def_stmt) != PHI_NODE)
+  if (gimple_code (def_stmt) != GIMPLE_PHI)
     return false;
 
   FOR_EACH_PHI_ARG (arg_p, def_stmt, i, SSA_OP_USE)
@@ -831,10 +838,11 @@ is_phi_for_stmt (tree stmt, tree operand)
    order.  */
 
 static void
-rewrite_expr_tree (tree stmt, unsigned int opindex,
+rewrite_expr_tree (gimple stmt, unsigned int opindex,
                   VEC(operand_entry_t, heap) * ops)
 {
-  tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+  tree rhs1 = gimple_assign_rhs1 (stmt);
+  tree rhs2 = gimple_assign_rhs2 (stmt);
   operand_entry_t oe;
 
   /* If we have three operands left, then we want to make sure the one
@@ -897,24 +905,22 @@ rewrite_expr_tree (tree stmt, unsigned int opindex,
       oe1 = VEC_index (operand_entry_t, ops, opindex);
       oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
 
-      if (TREE_OPERAND (rhs, 0) != oe1->op
-         || TREE_OPERAND (rhs, 1) != oe2->op)
+      if (rhs1 != oe1->op || rhs2 != oe2->op)
        {
-
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
              fprintf (dump_file, "Transforming ");
-             print_generic_expr (dump_file, rhs, 0);
+             print_gimple_stmt (dump_file, stmt, 0, 0);
            }
 
-         TREE_OPERAND (rhs, 0) = oe1->op;
-         TREE_OPERAND (rhs, 1) = oe2->op;
+         gimple_assign_set_rhs1 (stmt, oe1->op);
+         gimple_assign_set_rhs2 (stmt, oe2->op);
          update_stmt (stmt);
 
          if (dump_file && (dump_flags & TDF_DETAILS))
            {
              fprintf (dump_file, " into ");
-             print_generic_stmt (dump_file, rhs, 0);
+             print_gimple_stmt (dump_file, stmt, 0, 0);
            }
 
        }
@@ -927,28 +933,27 @@ rewrite_expr_tree (tree stmt, unsigned int opindex,
   /* Rewrite the next operator.  */
   oe = VEC_index (operand_entry_t, ops, opindex);
 
-  if (oe->op != TREE_OPERAND (rhs, 1))
+  if (oe->op != rhs2)
     {
 
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
          fprintf (dump_file, "Transforming ");
-         print_generic_expr (dump_file, rhs, 0);
+         print_gimple_stmt (dump_file, stmt, 0, 0);
        }
 
-      TREE_OPERAND (rhs, 1) = oe->op;
+      gimple_assign_set_rhs2 (stmt, oe->op);
       update_stmt (stmt);
 
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
          fprintf (dump_file, " into ");
-         print_generic_stmt (dump_file, rhs, 0);
+         print_gimple_stmt (dump_file, stmt, 0, 0);
        }
     }
   /* Recurse on the LHS of the binary operator, which is guaranteed to
      be the non-leaf side.  */
-  rewrite_expr_tree (SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0)),
-                    opindex + 1, ops);
+  rewrite_expr_tree (SSA_NAME_DEF_STMT (rhs1), opindex + 1, ops);
 }
 
 /* Transform STMT, which is really (A +B) + (C + D) into the left
@@ -956,114 +961,114 @@ rewrite_expr_tree (tree stmt, unsigned int opindex,
    Recurse on D if necessary.  */
 
 static void
-linearize_expr (tree stmt)
+linearize_expr (gimple stmt)
 {
-  block_stmt_iterator bsinow, bsirhs;
-  tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-  enum tree_code rhscode = TREE_CODE (rhs);
-  tree binrhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
-  tree binlhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
-  tree newbinrhs = NULL_TREE;
+  gimple_stmt_iterator gsinow, gsirhs;
+  gimple binlhs = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
+  gimple binrhs = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
+  enum tree_code rhscode = gimple_assign_rhs_code (stmt);
+  gimple newbinrhs = NULL;
   struct loop *loop = loop_containing_stmt (stmt);
 
-  gcc_assert (is_reassociable_op (binlhs, TREE_CODE (rhs), loop)
-             && is_reassociable_op (binrhs, TREE_CODE (rhs), loop));
+  gcc_assert (is_reassociable_op (binlhs, rhscode, loop)
+             && is_reassociable_op (binrhs, rhscode, loop));
+
+  gsinow = gsi_for_stmt (stmt);
+  gsirhs = gsi_for_stmt (binrhs);
+  gsi_move_before (&gsirhs, &gsinow);
 
-  bsinow = bsi_for_stmt (stmt);
-  bsirhs = bsi_for_stmt (binrhs);
-  bsi_move_before (&bsirhs, &bsinow);
+  gimple_assign_set_rhs2 (stmt, gimple_assign_rhs1 (binrhs));
+  gimple_assign_set_rhs1 (binrhs, gimple_assign_lhs (binlhs));
+  gimple_assign_set_rhs1 (stmt, gimple_assign_lhs (binrhs));
 
-  TREE_OPERAND (rhs, 1) = TREE_OPERAND (GIMPLE_STMT_OPERAND (binrhs, 1), 0);
-  if (TREE_CODE (TREE_OPERAND (rhs, 1)) == SSA_NAME)
-    newbinrhs = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
-  TREE_OPERAND (GIMPLE_STMT_OPERAND (binrhs, 1), 0)
-    = GIMPLE_STMT_OPERAND (binlhs, 0);
-  TREE_OPERAND (rhs, 0) = GIMPLE_STMT_OPERAND (binrhs, 0);
+  if (TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
+    newbinrhs = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "Linearized: ");
-      print_generic_stmt (dump_file, rhs, 0);
+      print_gimple_stmt (dump_file, stmt, 0, 0);
     }
 
   reassociate_stats.linearized++;
   update_stmt (binrhs);
   update_stmt (binlhs);
   update_stmt (stmt);
-  TREE_VISITED (binrhs) = 1;
-  TREE_VISITED (binlhs) = 1;
-  TREE_VISITED (stmt) = 1;
+
+  gimple_set_visited (stmt, true);
+  gimple_set_visited (binlhs, true);
+  gimple_set_visited (binrhs, true);
 
   /* Tail recurse on the new rhs if it still needs reassociation.  */
   if (newbinrhs && is_reassociable_op (newbinrhs, rhscode, loop))
+    /* ??? This should probably be linearize_expr (newbinrhs) but I don't
+          want to change the algorithm while converting to tuples.  */
     linearize_expr (stmt);
 }
 
-/* If LHS has a single immediate use that is a GIMPLE_MODIFY_STMT, return
+/* If LHS has a single immediate use that is a GIMPLE_ASSIGN statement, return
    it.  Otherwise, return NULL.  */
 
-static tree
+static gimple
 get_single_immediate_use (tree lhs)
 {
   use_operand_p immuse;
-  tree immusestmt;
+  gimple immusestmt;
 
   if (TREE_CODE (lhs) == SSA_NAME
-      && single_imm_use (lhs, &immuse, &immusestmt))
-    {
-      if (TREE_CODE (immusestmt) == RETURN_EXPR)
-       immusestmt = TREE_OPERAND (immusestmt, 0);
-      if (TREE_CODE (immusestmt) == GIMPLE_MODIFY_STMT)
-       return immusestmt;
-    }
-  return NULL_TREE;
+      && single_imm_use (lhs, &immuse, &immusestmt)
+      && is_gimple_assign (immusestmt))
+    return immusestmt;
+
+  return NULL;
 }
-static VEC(tree, heap) *broken_up_subtracts;
 
+static VEC(tree, heap) *broken_up_subtracts;
 
 /* Recursively negate the value of TONEGATE, and return the SSA_NAME
    representing the negated value.  Insertions of any necessary
-   instructions go before BSI.
+   instructions go before GSI.
    This function is recursive in that, if you hand it "a_5" as the
    value to negate, and a_5 is defined by "a_5 = b_3 + b_4", it will
    transform b_3 + b_4 into a_5 = -b_3 + -b_4.  */
 
 static tree
-negate_value (tree tonegate, block_stmt_iterator *bsi)
+negate_value (tree tonegate, gimple_stmt_iterator *gsi)
 {
-  tree negatedef = tonegate;
+  gimple negatedefstmt= NULL;
   tree resultofnegate;
 
-  if (TREE_CODE (tonegate) == SSA_NAME)
-    negatedef = SSA_NAME_DEF_STMT (tonegate);
-
   /* If we are trying to negate a name, defined by an add, negate the
      add operands instead.  */
+  if (TREE_CODE (tonegate) == SSA_NAME)
+    negatedefstmt = SSA_NAME_DEF_STMT (tonegate);
   if (TREE_CODE (tonegate) == SSA_NAME
-      && TREE_CODE (negatedef) == GIMPLE_MODIFY_STMT
-      && TREE_CODE (GIMPLE_STMT_OPERAND (negatedef, 0)) == SSA_NAME
-      && has_single_use (GIMPLE_STMT_OPERAND (negatedef, 0))
-      && TREE_CODE (GIMPLE_STMT_OPERAND (negatedef, 1)) == PLUS_EXPR)
+      && is_gimple_assign (negatedefstmt)
+      && TREE_CODE (gimple_assign_lhs (negatedefstmt)) == SSA_NAME
+      && has_single_use (gimple_assign_lhs (negatedefstmt))
+      && gimple_assign_rhs_code (negatedefstmt) == PLUS_EXPR)
     {
-      block_stmt_iterator bsi;
-      tree binop = GIMPLE_STMT_OPERAND (negatedef, 1);
-
-      bsi = bsi_for_stmt (negatedef);
-      TREE_OPERAND (binop, 0) = negate_value (TREE_OPERAND (binop, 0),
-                                             &bsi);
-      bsi = bsi_for_stmt (negatedef);
-      TREE_OPERAND (binop, 1) = negate_value (TREE_OPERAND (binop, 1),
-                                             &bsi);
-      update_stmt (negatedef);
-      return GIMPLE_STMT_OPERAND (negatedef, 0);
+      gimple_stmt_iterator gsi;
+      tree rhs1 = gimple_assign_rhs1 (negatedefstmt);
+      tree rhs2 = gimple_assign_rhs2 (negatedefstmt);
+
+      gsi = gsi_for_stmt (negatedefstmt);
+      rhs1 = negate_value (rhs1, &gsi);
+      gimple_assign_set_rhs1 (negatedefstmt, rhs1);
+
+      gsi = gsi_for_stmt (negatedefstmt);
+      rhs2 = negate_value (rhs2, &gsi);
+      gimple_assign_set_rhs2 (negatedefstmt, rhs2);
+
+      update_stmt (negatedefstmt);
+      return gimple_assign_lhs (negatedefstmt);
     }
 
   tonegate = fold_build1 (NEGATE_EXPR, TREE_TYPE (tonegate), tonegate);
-  resultofnegate = force_gimple_operand_bsi (bsi, tonegate, true,
-                                            NULL_TREE, true, BSI_SAME_STMT);
+  resultofnegate = force_gimple_operand_gsi (gsi, tonegate, true,
+                                            NULL_TREE, true, GSI_SAME_STMT);
   VEC_safe_push (tree, heap, broken_up_subtracts, resultofnegate);
   return resultofnegate;
-
 }
 
 /* Return true if we should break up the subtract in STMT into an add
@@ -1073,14 +1078,12 @@ negate_value (tree tonegate, block_stmt_iterator *bsi)
    exposes the adds to reassociation.  */
 
 static bool
-should_break_up_subtract (tree stmt)
+should_break_up_subtract (gimple stmt)
 {
-
-  tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-  tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-  tree binlhs = TREE_OPERAND (rhs, 0);
-  tree binrhs = TREE_OPERAND (rhs, 1);
-  tree immusestmt;
+  tree lhs = gimple_assign_lhs (stmt);
+  tree binlhs = gimple_assign_rhs1 (stmt);
+  tree binrhs = gimple_assign_rhs2 (stmt);
+  gimple immusestmt;
   struct loop *loop = loop_containing_stmt (stmt);
 
   if (TREE_CODE (binlhs) == SSA_NAME
@@ -1093,28 +1096,28 @@ should_break_up_subtract (tree stmt)
 
   if (TREE_CODE (lhs) == SSA_NAME
       && (immusestmt = get_single_immediate_use (lhs))
-      && TREE_CODE (GIMPLE_STMT_OPERAND (immusestmt, 1)) == PLUS_EXPR)
+      && is_gimple_assign (immusestmt)
+      && gimple_assign_rhs_code (immusestmt) == PLUS_EXPR)
     return true;
   return false;
-
 }
 
 /* Transform STMT from A - B into A + -B.  */
 
 static void
-break_up_subtract (tree stmt, block_stmt_iterator *bsi)
+break_up_subtract (gimple stmt, gimple_stmt_iterator *gsip)
 {
-  tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+  tree rhs1 = gimple_assign_rhs1 (stmt);
+  tree rhs2 = gimple_assign_rhs2 (stmt);
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "Breaking up subtract ");
-      print_generic_stmt (dump_file, stmt, 0);
+      print_gimple_stmt (dump_file, stmt, 0, 0);
     }
 
-  TREE_SET_CODE (GIMPLE_STMT_OPERAND (stmt, 1), PLUS_EXPR);
-  TREE_OPERAND (rhs, 1) = negate_value (TREE_OPERAND (rhs, 1), bsi);
-
+  rhs2 = negate_value (rhs2, gsip);
+  gimple_assign_set_rhs_with_ops (gsip, PLUS_EXPR, rhs1, rhs2);
   update_stmt (stmt);
 }
 
@@ -1122,19 +1125,18 @@ break_up_subtract (tree stmt, block_stmt_iterator *bsi)
    Place the operands of the expression tree in the vector named OPS.  */
 
 static void
-linearize_expr_tree (VEC(operand_entry_t, heap) **ops, tree stmt)
+linearize_expr_tree (VEC(operand_entry_t, heap) **ops, gimple stmt)
 {
-  block_stmt_iterator bsinow, bsilhs;
-  tree rhs = GENERIC_TREE_OPERAND (stmt, 1);
-  tree binrhs = TREE_OPERAND (rhs, 1);
-  tree binlhs = TREE_OPERAND (rhs, 0);
-  tree binlhsdef, binrhsdef;
+  gimple_stmt_iterator gsinow, gsilhs;
+  tree binlhs = gimple_assign_rhs1 (stmt);
+  tree binrhs = gimple_assign_rhs2 (stmt);
+  gimple binlhsdef, binrhsdef;
   bool binlhsisreassoc = false;
   bool binrhsisreassoc = false;
-  enum tree_code rhscode = TREE_CODE (rhs);
+  enum tree_code rhscode = gimple_assign_rhs_code (stmt);
   struct loop *loop = loop_containing_stmt (stmt);
 
-  TREE_VISITED (stmt) = 1;
+  gimple_set_visited (stmt, true);
 
   if (TREE_CODE (binlhs) == SSA_NAME)
     {
@@ -1168,17 +1170,18 @@ linearize_expr_tree (VEC(operand_entry_t, heap) **ops, tree stmt)
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
          fprintf (dump_file, "swapping operands of ");
-         print_generic_expr (dump_file, stmt, 0);
+         print_gimple_stmt (dump_file, stmt, 0, 0);
        }
 
-      swap_tree_operands (stmt, &TREE_OPERAND (rhs, 0),
-                         &TREE_OPERAND (rhs, 1));
+      swap_tree_operands (stmt,
+                         gimple_assign_rhs1_ptr (stmt),
+                         gimple_assign_rhs2_ptr (stmt));
       update_stmt (stmt);
 
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
          fprintf (dump_file, " is now ");
-         print_generic_stmt (dump_file, stmt, 0);
+         print_gimple_stmt (dump_file, stmt, 0, 0);
        }
 
       /* We want to make it so the lhs is always the reassociative op,
@@ -1190,17 +1193,16 @@ linearize_expr_tree (VEC(operand_entry_t, heap) **ops, tree stmt)
   else if (binrhsisreassoc)
     {
       linearize_expr (stmt);
-      gcc_assert (rhs == GIMPLE_STMT_OPERAND (stmt, 1));
-      binlhs = TREE_OPERAND (rhs, 0);
-      binrhs = TREE_OPERAND (rhs, 1);
+      binlhs = gimple_assign_rhs1 (stmt);
+      binrhs = gimple_assign_rhs2 (stmt);
     }
 
   gcc_assert (TREE_CODE (binrhs) != SSA_NAME
              || !is_reassociable_op (SSA_NAME_DEF_STMT (binrhs),
                                      rhscode, loop));
-  bsinow = bsi_for_stmt (stmt);
-  bsilhs = bsi_for_stmt (SSA_NAME_DEF_STMT (binlhs));
-  bsi_move_before (&bsilhs, &bsinow);
+  gsinow = gsi_for_stmt (stmt);
+  gsilhs = gsi_for_stmt (SSA_NAME_DEF_STMT (binlhs));
+  gsi_move_before (&gsilhs, &gsinow);
   linearize_expr_tree (ops, SSA_NAME_DEF_STMT (binlhs));
   add_to_ops_vec (ops, binrhs);
 }
@@ -1216,7 +1218,7 @@ repropagate_negates (void)
 
   for (i = 0; VEC_iterate (tree, broken_up_subtracts, i, negate); i++)
     {
-      tree user = get_single_immediate_use (negate);
+      gimple user = get_single_immediate_use (negate);
 
       /* The negate operand can be either operand of a PLUS_EXPR
         (it can be the LHS if the RHS is a constant for example).
@@ -1224,27 +1226,27 @@ repropagate_negates (void)
         Force the negate operand to the RHS of the PLUS_EXPR, then
         transform the PLUS_EXPR into a MINUS_EXPR.  */
       if (user
-         && TREE_CODE (user) == GIMPLE_MODIFY_STMT
-         && TREE_CODE (GIMPLE_STMT_OPERAND (user, 1)) == PLUS_EXPR)
+         && is_gimple_assign (user)
+         && gimple_assign_rhs_code (user) == PLUS_EXPR)
        {
-         tree rhs = GIMPLE_STMT_OPERAND (user, 1);
-
          /* If the negated operand appears on the LHS of the
             PLUS_EXPR, exchange the operands of the PLUS_EXPR
             to force the negated operand to the RHS of the PLUS_EXPR.  */
-         if (TREE_OPERAND (GIMPLE_STMT_OPERAND (user, 1), 0) == negate)
+         if (gimple_assign_rhs1 (user) == negate)
            {
-             tree temp = TREE_OPERAND (rhs, 0);
-             TREE_OPERAND (rhs, 0) = TREE_OPERAND (rhs, 1);
-             TREE_OPERAND (rhs, 1) = temp;
+             swap_tree_operands (user,
+                                 gimple_assign_rhs1_ptr (user),
+                                 gimple_assign_rhs2_ptr (user));
            }
 
          /* Now transform the PLUS_EXPR into a MINUS_EXPR and replace
             the RHS of the PLUS_EXPR with the operand of the NEGATE_EXPR.  */
-         if (TREE_OPERAND (GIMPLE_STMT_OPERAND (user, 1), 1) == negate)
+         if (gimple_assign_rhs2 (user) == negate)
            {
-             TREE_SET_CODE (rhs, MINUS_EXPR);
-             TREE_OPERAND (rhs, 1) = get_unary_op (negate, NEGATE_EXPR);
+             tree rhs1 = gimple_assign_rhs1 (user);
+             tree rhs2 = get_unary_op (negate, NEGATE_EXPR);
+             gimple_stmt_iterator gsi = gsi_for_stmt (user);
+             gimple_assign_set_rhs_with_ops (&gsi, MINUS_EXPR, rhs1, rhs2);
              update_stmt (user);
            }
        }
@@ -1264,43 +1266,50 @@ repropagate_negates (void)
    k = t - q
    
    we want to break up k = t - q, but we won't until we've transformed q
-   = b - r, which won't be broken up until we transform b = c - d.  */
+   = b - r, which won't be broken up until we transform b = c - d.
+
+   En passant, clear the GIMPLE visited flag on every statement.  */
 
 static void
 break_up_subtract_bb (basic_block bb)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   basic_block son;
 
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      tree stmt = bsi_stmt (bsi);
+      gimple stmt = gsi_stmt (gsi);
+      gimple_set_visited (stmt, false);
 
-      if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+      /* Look for simple gimple subtract operations.  */
+      if (is_gimple_assign (stmt)
+         && gimple_assign_rhs_code (stmt) == MINUS_EXPR)
        {
-         tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-         tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+         tree lhs = gimple_assign_lhs (stmt);
+         tree rhs1 = gimple_assign_rhs1 (stmt);
+         tree rhs2 = gimple_assign_rhs2 (stmt);
 
-         TREE_VISITED (stmt) = 0;
          /* If associative-math we can do reassociation for
             non-integral types.  Or, we can do reassociation for
             non-saturating fixed-point types.  */
          if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
-              || !INTEGRAL_TYPE_P (TREE_TYPE (rhs)))
-             && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs))
-                 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(lhs))
+              || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
+              || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
+             && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs))
+                 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(rhs1))
+                 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(rhs2))
                  || !flag_associative_math)
-             && (!NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE (rhs))
-                 || !NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE(lhs))))
+             && (!NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE (lhs))
+                 || !NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE(rhs1))
+                 || !NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE(rhs2))))
            continue;
 
          /* Check for a subtract used only in an addition.  If this
             is the case, transform it into add of a negate for better
             reassociation.  IE transform C = A-B into C = A + -B if C
             is only used in an addition.  */
-         if (TREE_CODE (rhs) == MINUS_EXPR)
-           if (should_break_up_subtract (stmt))
-             break_up_subtract (stmt, &bsi);
+         if (should_break_up_subtract (stmt))
+           break_up_subtract (stmt, &gsi);
        }
     }
   for (son = first_dom_son (CDI_DOMINATORS, bb);
@@ -1315,36 +1324,48 @@ break_up_subtract_bb (basic_block bb)
 static void
 reassociate_bb (basic_block bb)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   basic_block son;
 
-  for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
+  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
     {
-      tree stmt = bsi_stmt (bsi);
+      gimple stmt = gsi_stmt (gsi);
 
-      if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+      if (is_gimple_assign (stmt))
        {
-         tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-         tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+         tree lhs, rhs1, rhs2;
+         enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
 
-         /* If this was part of an already processed tree, we don't
-            need to touch it again. */
-         if (TREE_VISITED (stmt))
+         /* If this is not a gimple binary expression, there is
+            nothing for us to do with it.  */
+         if (get_gimple_rhs_class (rhs_code) != GIMPLE_BINARY_RHS)
            continue;
 
+         /* If this was part of an already processed statement,
+            we don't need to touch it again. */
+         if (gimple_visited_p (stmt))
+           continue;
+
+         lhs = gimple_assign_lhs (stmt);
+         rhs1 = gimple_assign_rhs1 (stmt);
+         rhs2 = gimple_assign_rhs2 (stmt);
+
          /* If associative-math we can do reassociation for
             non-integral types.  Or, we can do reassociation for
             non-saturating fixed-point types.  */
          if ((!INTEGRAL_TYPE_P (TREE_TYPE (lhs))
-              || !INTEGRAL_TYPE_P (TREE_TYPE (rhs)))
-             && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs))
-                 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(lhs))
+              || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
+              || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2)))
+             && (!SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs))
+                 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(rhs1))
+                 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE(rhs2))
                  || !flag_associative_math)
-             && (!NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE (rhs))
-                 || !NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE(lhs))))
+             && (!NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE (lhs))
+                 || !NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE(rhs1))
+                 || !NON_SAT_FIXED_POINT_TYPE_P (TREE_TYPE(rhs2))))
            continue;
 
-         if (associative_tree_code (TREE_CODE (rhs)))
+         if (associative_tree_code (rhs_code))
            {
              VEC(operand_entry_t, heap) *ops = NULL;
 
@@ -1353,30 +1374,31 @@ reassociate_bb (basic_block bb)
              if (TREE_CODE (lhs) == SSA_NAME && has_zero_uses (lhs))
                continue;
 
-             TREE_VISITED (stmt) = 1;
+             gimple_set_visited (stmt, true);
              linearize_expr_tree (&ops, stmt);
              qsort (VEC_address (operand_entry_t, ops),
                     VEC_length (operand_entry_t, ops),
                     sizeof (operand_entry_t),
                     sort_by_operand_rank);
-             optimize_ops_list (TREE_CODE (rhs), &ops);
+             optimize_ops_list (rhs_code, &ops);
 
              if (VEC_length (operand_entry_t, ops) == 1)
                {
                  if (dump_file && (dump_flags & TDF_DETAILS))
                    {
                      fprintf (dump_file, "Transforming ");
-                     print_generic_expr (dump_file, rhs, 0);
+                     print_gimple_stmt (dump_file, stmt, 0, 0);
                    }
-                 GIMPLE_STMT_OPERAND (stmt, 1) 
-                   = VEC_last (operand_entry_t, ops)->op;
+                 
+                 gimple_assign_set_rhs_from_tree (&gsi,
+                                                  VEC_last (operand_entry_t,
+                                                            ops)->op);
                  update_stmt (stmt);
 
                  if (dump_file && (dump_flags & TDF_DETAILS))
                    {
                      fprintf (dump_file, " into ");
-                     print_generic_stmt (dump_file,
-                                         GIMPLE_STMT_OPERAND (stmt, 1), 0);
+                     print_gimple_stmt (dump_file, stmt, 0, 0);
                    }
                }
              else
@@ -1408,7 +1430,7 @@ dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops)
   for (i = 0; VEC_iterate (operand_entry_t, ops, i, oe); i++)
     {
       fprintf (file, "Op %d -> rank: %d, tree: ", i, oe->rank);
-      print_generic_stmt (file, oe->op, 0);
+      print_generic_expr (file, oe->op, 0);
     }
 }
 
@@ -1542,3 +1564,4 @@ struct gimple_opt_pass pass_reassoc =
   TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
  }
 };
+
index 85ceb7e..42d394f 100644 (file)
@@ -29,7 +29,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "diagnostic.h"
 #include "tree-inline.h"
 #include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-dump.h"
 #include "timevar.h"
 #include "fibheap.h"
@@ -210,6 +210,86 @@ VN_INFO_GET (tree name)
 }
 
 
+/* Get the representative expression for the SSA_NAME NAME.  Returns
+   the representative SSA_NAME if there is no expression associated with it.  */
+
+tree
+vn_get_expr_for (tree name)
+{
+  vn_ssa_aux_t vn = VN_INFO (name);
+  gimple def_stmt;
+  tree expr = NULL_TREE;
+
+  if (vn->valnum == VN_TOP)
+    return name;
+
+  /* If the value-number is a constant it is the representative
+     expression.  */
+  if (TREE_CODE (vn->valnum) != SSA_NAME)
+    return vn->valnum;
+
+  /* Get to the information of the value of this SSA_NAME.  */
+  vn = VN_INFO (vn->valnum);
+
+  /* If the value-number is a constant it is the representative
+     expression.  */
+  if (TREE_CODE (vn->valnum) != SSA_NAME)
+    return vn->valnum;
+
+  /* Else if we have an expression, return it.  */
+  if (vn->expr != NULL_TREE)
+    return vn->expr;
+
+  /* Otherwise use the defining statement to build the expression.  */
+  def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
+
+  /* If the value number is a default-definition or a PHI result
+     use it directly.  */
+  if (gimple_nop_p (def_stmt)
+      || gimple_code (def_stmt) == GIMPLE_PHI)
+    return vn->valnum;
+
+  if (!is_gimple_assign (def_stmt))
+    return vn->valnum;
+
+  /* FIXME tuples.  This is incomplete and likely will miss some
+     simplifications.  */
+  switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
+    {
+    case tcc_reference:
+      if (gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
+         && gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
+         && gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
+       expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
+                           gimple_expr_type (def_stmt),
+                           TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
+      break;
+
+    case tcc_unary:
+      expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
+                         gimple_expr_type (def_stmt),
+                         gimple_assign_rhs1 (def_stmt));
+      break;
+
+    case tcc_binary:
+      expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
+                         gimple_expr_type (def_stmt),
+                         gimple_assign_rhs1 (def_stmt),
+                         gimple_assign_rhs2 (def_stmt));
+      break;
+
+    default:;
+    }
+  if (expr == NULL_TREE)
+    return vn->valnum;
+
+  /* Cache the expression.  */
+  vn->expr = expr;
+
+  return expr;
+}
+
+
 /* Free a phi operation structure VP.  */
 
 static void
@@ -236,7 +316,7 @@ vn_constant_eq (const void *p1, const void *p2)
   const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
   const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
 
-  return expressions_equal_p (vc1->constant, vc2->constant);
+  return vn_constant_eq_with_type (vc1->constant, vc2->constant);
 }
 
 /* Hash table hash function for vn_constant_t.  */
@@ -256,8 +336,8 @@ get_constant_value_id (tree constant)
 {
   void **slot;
   struct vn_constant_s vc;
-  
-  vc.hashcode = iterative_hash_expr (constant, 0);
+
+  vc.hashcode = vn_hash_constant_with_type (constant);
   vc.constant = constant;
   slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
                                   vc.hashcode, NO_INSERT);
@@ -275,7 +355,7 @@ get_or_alloc_constant_value_id (tree constant)
   void **slot;
   vn_constant_t vc = XNEW (struct vn_constant_s);
   
-  vc->hashcode = iterative_hash_expr (constant, 0);
+  vc->hashcode = vn_hash_constant_with_type (constant);
   vc->constant = constant;
   slot = htab_find_slot_with_hash (constant_to_value_id, vc,
                                   vc->hashcode, INSERT);  
@@ -399,7 +479,7 @@ vn_reference_eq (const void *p1, const void *p2)
 /* Place the vuses from STMT into *result.  */
 
 static inline void
-vuses_to_vec (tree stmt, VEC (tree, gc) **result)
+vuses_to_vec (gimple stmt, VEC (tree, gc) **result)
 {
   ssa_op_iter iter;
   tree vuse;
@@ -419,7 +499,7 @@ vuses_to_vec (tree stmt, VEC (tree, gc) **result)
    the vector.  */
 
 VEC (tree, gc) *
-copy_vuses_from_stmt (tree stmt)
+copy_vuses_from_stmt (gimple stmt)
 {
   VEC (tree, gc) *vuses = NULL;
 
@@ -431,7 +511,7 @@ copy_vuses_from_stmt (tree stmt)
 /* Place the vdefs from STMT into *result.  */
 
 static inline void
-vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
+vdefs_to_vec (gimple stmt, VEC (tree, gc) **result)
 {
   ssa_op_iter iter;
   tree vdef;
@@ -449,7 +529,7 @@ vdefs_to_vec (tree stmt, VEC (tree, gc) **result)
    the vector.  */
 
 static VEC (tree, gc) *
-copy_vdefs_from_stmt (tree stmt)
+copy_vdefs_from_stmt (gimple stmt)
 {
   VEC (tree, gc) *vdefs = NULL;
 
@@ -466,7 +546,7 @@ static VEC (tree, gc) *shared_lookup_vops;
    variable.  */
 
 VEC (tree, gc) *
-shared_vuses_from_stmt (tree stmt)
+shared_vuses_from_stmt (gimple stmt)
 {
   VEC_truncate (tree, shared_lookup_vops, 0);
   vuses_to_vec (stmt, &shared_lookup_vops);
@@ -474,54 +554,12 @@ shared_vuses_from_stmt (tree stmt)
   return shared_lookup_vops;
 }
 
-/* Copy the operations present in load/store/call REF into RESULT, a vector of
+/* Copy the operations present in load/store REF into RESULT, a vector of
    vn_reference_op_s's.  */
 
 static void
 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
 {
-  /* Calls are different from all other reference operations.  */
-  if (TREE_CODE (ref) == CALL_EXPR)
-    {
-      vn_reference_op_s temp;
-      tree callfn;
-      call_expr_arg_iterator iter;
-      tree callarg;
-
-      /* Copy the call_expr opcode, type, function being called, and
-        arguments.  */
-      memset (&temp, 0, sizeof (temp));
-      temp.type = TREE_TYPE (ref);
-      temp.opcode = CALL_EXPR;
-      VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
-      
-      /* We make no attempt to simplify the called function because
-      the typical &FUNCTION_DECL form is also used in function pointer
-      cases that become constant.  If we simplify the original to
-      FUNCTION_DECL but not the function pointer case (which can
-      happen because we have no fold functions that operate on
-      vn_reference_t), we will claim they are not equivalent.
-
-      An example of this behavior can be see if CALL_EXPR_FN below is
-      replaced with get_callee_fndecl and gcc.dg/tree-ssa/ssa-pre-13.c
-      is compiled.  */
-      callfn = CALL_EXPR_FN (ref);
-      temp.type = TREE_TYPE (callfn);
-      temp.opcode = TREE_CODE (callfn);
-      temp.op0 = callfn;
-      VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
-
-      FOR_EACH_CALL_EXPR_ARG (callarg, iter, ref)
-       {
-         memset (&temp, 0, sizeof (temp));
-         temp.type = TREE_TYPE (callarg);
-         temp.opcode = TREE_CODE (callarg);
-         temp.op0 = callarg;
-         VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
-       }
-      return;
-    }
-
   if (TREE_CODE (ref) == TARGET_MEM_REF)
     {
       vn_reference_op_s temp;
@@ -631,6 +669,53 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
     }
 }
 
+/* Copy the operations present in load/store/call REF into RESULT, a vector of
+   vn_reference_op_s's.  */
+
+void
+copy_reference_ops_from_call (gimple call,
+                             VEC(vn_reference_op_s, heap) **result)
+{
+  vn_reference_op_s temp;
+  tree callfn;
+  unsigned i;
+
+  /* Copy the call_expr opcode, type, function being called, and
+     arguments.  */
+  memset (&temp, 0, sizeof (temp));
+  temp.type = gimple_call_return_type (call);
+  temp.opcode = CALL_EXPR;
+  VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+
+  /* FIXME tuples
+     We make no attempt to simplify the called function because
+     the typical &FUNCTION_DECL form is also used in function pointer
+     cases that become constant.  If we simplify the original to
+     FUNCTION_DECL but not the function pointer case (which can
+     happen because we have no fold functions that operate on
+     vn_reference_t), we will claim they are not equivalent.
+
+     An example of this behavior can be see if CALL_EXPR_FN below is
+     replaced with get_callee_fndecl and gcc.dg/tree-ssa/ssa-pre-13.c
+     is compiled.  */
+  callfn = gimple_call_fn (call);
+  temp.type = TREE_TYPE (callfn);
+  temp.opcode = TREE_CODE (callfn);
+  temp.op0 = callfn;
+  VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+
+  for (i = 0; i < gimple_call_num_args (call); ++i)
+    {
+      tree callarg = gimple_call_arg (call, i);
+      memset (&temp, 0, sizeof (temp));
+      temp.type = TREE_TYPE (callarg);
+      temp.opcode = TREE_CODE (callarg);
+      temp.op0 = callarg;
+      VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
+    }
+  return;
+}
+
 /* Create a vector of vn_reference_op_s structures from REF, a
    REFERENCE_CLASS_P tree.  The vector is not shared. */
 
@@ -643,6 +728,18 @@ create_reference_ops_from_ref (tree ref)
   return result;
 }
 
+/* Create a vector of vn_reference_op_s structures from CALL, a
+   call statement.  The vector is not shared.  */
+
+static VEC(vn_reference_op_s, heap) *
+create_reference_ops_from_call (gimple call)
+{
+  VEC (vn_reference_op_s, heap) *result = NULL;
+
+  copy_reference_ops_from_call (call, &result);
+  return result;
+}
+
 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
 
 /* Create a vector of vn_reference_op_s structures from REF, a
@@ -659,6 +756,20 @@ shared_reference_ops_from_ref (tree ref)
   return shared_lookup_references;
 }
 
+/* Create a vector of vn_reference_op_s structures from CALL, a
+   call statement.  The vector is shared among all callers of
+   this function.  */
+
+static VEC(vn_reference_op_s, heap) *
+shared_reference_ops_from_call (gimple call)
+{
+  if (!call)
+    return NULL;
+  VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
+  copy_reference_ops_from_call (call, &shared_lookup_references);
+  return shared_lookup_references;
+}
+
 
 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
    structures into their value numbers.  This is done in-place, and
@@ -720,16 +831,17 @@ valueize_vuses (VEC (tree, gc) *orig)
    Take into account only definitions that alias REF if following
    back-edges.  */
 
-static tree
+static gimple
 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
 {
-  tree def_stmt, vuse;
+  gimple def_stmt;
+  tree vuse;
   unsigned int i;
 
   gcc_assert (VEC_length (tree, vuses) >= 1);
 
   def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
-  if (TREE_CODE (def_stmt) == PHI_NODE)
+  if (gimple_code (def_stmt) == GIMPLE_PHI)
     {
       /* We can only handle lookups over PHI nodes for a single
         virtual operand.  */
@@ -739,23 +851,22 @@ get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
          goto cont;
        }
       else
-       return NULL_TREE;
+       return NULL;
     }
 
   /* Verify each VUSE reaches the same defining stmt.  */
   for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
     {
-      tree tmp = SSA_NAME_DEF_STMT (vuse);
+      gimple tmp = SSA_NAME_DEF_STMT (vuse);
       if (tmp != def_stmt)
-       return NULL_TREE;
+       return NULL;
     }
 
   /* Now see if the definition aliases ref, and loop until it does.  */
 cont:
   while (def_stmt
-        && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
-        && !get_call_expr_in (def_stmt)
-        && !refs_may_alias_p (ref, GIMPLE_STMT_OPERAND (def_stmt, 0)))
+        && is_gimple_assign (def_stmt)
+        && !refs_may_alias_p (ref, gimple_get_lhs (def_stmt)))
     def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
 
   return def_stmt;
@@ -823,7 +934,8 @@ vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk,
                     vn_reference_t *vnresult)
 {
   struct vn_reference_s vr1;
-  tree result, def_stmt;
+  tree result;
+  gimple def_stmt;
   if (vnresult)
     *vnresult = NULL;
 
@@ -838,12 +950,8 @@ vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk,
       && maywalk
       && vr1.vuses
       && VEC_length (tree, vr1.vuses) >= 1
-      && !get_call_expr_in (op)
       && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
-      && TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
-      /* If there is a call involved, op must be assumed to
-        be clobbered.  */
-      && !get_call_expr_in (def_stmt))
+      && is_gimple_assign (def_stmt))
     {
       /* We are now at an aliasing definition for the vuses we want to
         look up.  Re-do the lookup with the vdefs for this stmt.  */
@@ -1056,6 +1164,38 @@ vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
   return ((vn_nary_op_t)*slot)->result;
 }
 
+/* Lookup the rhs of STMT in the current hash table, and return the resulting
+   value number if it exists in the hash table.  Return NULL_TREE if
+   it does not exist in the hash table.  VNRESULT will contain the
+   vn_nary_op_t from the hashtable if it exists.  */
+
+tree
+vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
+{
+  void **slot;
+  struct vn_nary_op_s vno1;
+  unsigned i;
+
+  if (vnresult)
+    *vnresult = NULL;
+  vno1.opcode = gimple_assign_rhs_code (stmt);
+  vno1.length = gimple_num_ops (stmt) - 1;
+  vno1.type = TREE_TYPE (gimple_assign_lhs (stmt));
+  for (i = 0; i < vno1.length; ++i)
+    vno1.op[i] = gimple_op (stmt, i + 1);
+  vno1.hashcode = vn_nary_op_compute_hash (&vno1);
+  slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
+                                  NO_INSERT);
+  if (!slot && current_info == optimistic_info)
+    slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
+                                    NO_INSERT);
+  if (!slot)
+    return NULL_TREE;
+  if (vnresult)
+    *vnresult = (vn_nary_op_t)*slot;
+  return ((vn_nary_op_t)*slot)->result;
+}
+
 /* Insert a n-ary operation into the current hash table using it's
    pieces.  Return the vn_nary_op_t structure we created and put in
    the hashtable.  */
@@ -1127,6 +1267,36 @@ vn_nary_op_insert (tree op, tree result)
   return vno1;
 }
 
+/* Insert the rhs of STMT into the current hash table with a value number of
+   RESULT.  */
+
+vn_nary_op_t
+vn_nary_op_insert_stmt (gimple stmt, tree result)
+{
+  unsigned length = gimple_num_ops (stmt) - 1;
+  void **slot;
+  vn_nary_op_t vno1;
+  unsigned i;
+
+  vno1 = (vn_nary_op_t) obstack_alloc (&current_info->nary_obstack,
+                                      (sizeof (struct vn_nary_op_s)
+                                       - sizeof (tree) * (4 - length)));
+  vno1->value_id = VN_INFO (result)->value_id;
+  vno1->opcode = gimple_assign_rhs_code (stmt);
+  vno1->length = length;
+  vno1->type = TREE_TYPE (gimple_assign_lhs (stmt));
+  for (i = 0; i < vno1->length; ++i)
+    vno1->op[i] = gimple_op (stmt, i + 1);
+  vno1->result = result;
+  vno1->hashcode = vn_nary_op_compute_hash (vno1);
+  slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
+                                  INSERT);
+  gcc_assert (!*slot);
+
+  *slot = vno1;
+  return vno1;
+}
+
 /* Compute a hashcode for PHI operation VP1 and return it.  */
 
 static inline hashval_t
@@ -1192,23 +1362,23 @@ static VEC(tree, heap) *shared_lookup_phiargs;
    it does not exist in the hash table. */
 
 static tree
-vn_phi_lookup (tree phi)
+vn_phi_lookup (gimple phi)
 {
   void **slot;
   struct vn_phi_s vp1;
-  int i;
+  unsigned i;
 
   VEC_truncate (tree, shared_lookup_phiargs, 0);
 
   /* Canonicalize the SSA_NAME's to their value number.  */
-  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+  for (i = 0; i < gimple_phi_num_args (phi); i++)
     {
       tree def = PHI_ARG_DEF (phi, i);
       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
       VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
     }
   vp1.phiargs = shared_lookup_phiargs;
-  vp1.block = bb_for_stmt (phi);
+  vp1.block = gimple_bb (phi);
   vp1.hashcode = vn_phi_compute_hash (&vp1);
   slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
                                   NO_INSERT);
@@ -1224,15 +1394,15 @@ vn_phi_lookup (tree phi)
    RESULT.  */
 
 static vn_phi_t
-vn_phi_insert (tree phi, tree result)
+vn_phi_insert (gimple phi, tree result)
 {
   void **slot;
   vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
-  int i;
+  unsigned i;
   VEC (tree, heap) *args = NULL;
 
   /* Canonicalize the SSA_NAME's to their value number.  */
-  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+  for (i = 0; i < gimple_phi_num_args (phi); i++)
     {
       tree def = PHI_ARG_DEF (phi, i);
       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
@@ -1240,7 +1410,7 @@ vn_phi_insert (tree phi, tree result)
     }
   vp1->value_id = VN_INFO (result)->value_id;
   vp1->phiargs = args;
-  vp1->block = bb_for_stmt (phi);
+  vp1->block = gimple_bb (phi);
   vp1->result = result;
   vp1->hashcode = vn_phi_compute_hash (vp1);
 
@@ -1314,7 +1484,7 @@ set_ssa_val_to (tree from, tree to)
    Return true if a value number changed. */
 
 static bool
-defs_to_varying (tree stmt)
+defs_to_varying (gimple stmt)
 {
   bool changed = false;
   ssa_op_iter iter;
@@ -1331,7 +1501,7 @@ defs_to_varying (tree stmt)
 }
 
 static bool expr_has_constants (tree expr);
-static tree try_to_simplify (tree stmt, tree rhs);
+static tree try_to_simplify (gimple stmt);
 
 /* Visit a copy between LHS and RHS, return true if the value number
    changed.  */
@@ -1339,7 +1509,6 @@ static tree try_to_simplify (tree stmt, tree rhs);
 static bool
 visit_copy (tree lhs, tree rhs)
 {
-
   /* Follow chains of copies to their destination.  */
   while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
     rhs = SSA_VAL (rhs);
@@ -1356,10 +1525,10 @@ visit_copy (tree lhs, tree rhs)
    value number of LHS has changed as a result.  */
 
 static bool
-visit_unary_op (tree lhs, tree op)
+visit_unary_op (tree lhs, gimple stmt)
 {
   bool changed = false;
-  tree result = vn_nary_op_lookup (op, NULL);
+  tree result = vn_nary_op_lookup_stmt (stmt, NULL);
 
   if (result)
     {
@@ -1368,7 +1537,7 @@ visit_unary_op (tree lhs, tree op)
   else
     {
       changed = set_ssa_val_to (lhs, lhs);
-      vn_nary_op_insert (op, lhs);
+      vn_nary_op_insert_stmt (stmt, lhs);
     }
 
   return changed;
@@ -1378,19 +1547,60 @@ visit_unary_op (tree lhs, tree op)
    value number of LHS has changed as a result.  */
 
 static bool
-visit_binary_op (tree lhs, tree op)
+visit_binary_op (tree lhs, gimple stmt)
+{
+  bool changed = false;
+  tree result = vn_nary_op_lookup_stmt (stmt, NULL);
+
+  if (result)
+    {
+      changed = set_ssa_val_to (lhs, result);
+    }
+  else
+    {
+      changed = set_ssa_val_to (lhs, lhs);
+      vn_nary_op_insert_stmt (stmt, lhs);
+    }
+
+  return changed;
+}
+
+/* Visit a call STMT storing into LHS.  Return true if the value number
+   of the LHS has changed as a result.  */
+
+static bool
+visit_reference_op_call (tree lhs, gimple stmt)
 {
   bool changed = false;
-  tree result = vn_nary_op_lookup (op, NULL);
+  struct vn_reference_s vr1;
+  tree result;
 
+  vr1.vuses = valueize_vuses (shared_vuses_from_stmt (stmt));
+  vr1.operands = valueize_refs (shared_reference_ops_from_call (stmt));
+  vr1.hashcode = vn_reference_compute_hash (&vr1);
+  result = vn_reference_lookup_1 (&vr1, NULL);
   if (result)
     {
       changed = set_ssa_val_to (lhs, result);
+      if (TREE_CODE (result) == SSA_NAME
+         && VN_INFO (result)->has_constants)
+       VN_INFO (lhs)->has_constants = true;
     }
   else
     {
+      void **slot;
+      vn_reference_t vr2;
       changed = set_ssa_val_to (lhs, lhs);
-      vn_nary_op_insert (op, lhs);
+      vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
+      vr2->vuses = valueize_vuses (copy_vuses_from_stmt (stmt));
+      vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
+      vr2->hashcode = vr1.hashcode;
+      vr2->result = lhs;
+      slot = htab_find_slot_with_hash (current_info->references,
+                                      vr2, vr2->hashcode, INSERT);
+      if (*slot)
+       free_reference (*slot);
+      *slot = vr2;
     }
 
   return changed;
@@ -1400,7 +1610,7 @@ visit_binary_op (tree lhs, tree op)
    and return true if the value number of the LHS has changed as a result.  */
 
 static bool
-visit_reference_op_load (tree lhs, tree op, tree stmt)
+visit_reference_op_load (tree lhs, tree op, gimple stmt)
 {
   bool changed = false;
   tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true,
@@ -1421,7 +1631,7 @@ visit_reference_op_load (tree lhs, tree op, tree stmt)
          && !is_gimple_min_invariant (val)
          && TREE_CODE (val) != SSA_NAME)
         {
-         tree tem = try_to_simplify (stmt, val);
+         tree tem = try_to_simplify (stmt);
          if (tem)
            val = tem;
        }
@@ -1433,9 +1643,10 @@ visit_reference_op_load (tree lhs, tree op, tree stmt)
         a new SSA_NAME we create.  */
       if (!result && may_insert)
         {
-         result = make_ssa_name (SSA_NAME_VAR (lhs), NULL_TREE);
+         result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
          /* Initialize value-number information properly.  */
          VN_INFO_GET (result)->valnum = result;
+         VN_INFO (result)->value_id = get_next_value_id ();
          VN_INFO (result)->expr = val;
          VN_INFO (result)->has_constants = expr_has_constants (val);
          VN_INFO (result)->needs_insertion = true;
@@ -1489,7 +1700,7 @@ visit_reference_op_load (tree lhs, tree op, tree stmt)
    and return true if the value number of the LHS has changed as a result.  */
 
 static bool
-visit_reference_op_store (tree lhs, tree op, tree stmt)
+visit_reference_op_store (tree lhs, tree op, gimple stmt)
 {
   bool changed = false;
   tree result;
@@ -1588,13 +1799,13 @@ visit_reference_op_store (tree lhs, tree op, tree stmt)
    changed.  */
 
 static bool
-visit_phi (tree phi)
+visit_phi (gimple phi)
 {
   bool changed = false;
   tree result;
   tree sameval = VN_TOP;
   bool allsame = true;
-  int i;
+  unsigned i;
 
   /* TODO: We could check for this in init_sccvn, and replace this
      with a gcc_assert.  */
@@ -1603,7 +1814,7 @@ visit_phi (tree phi)
 
   /* See if all non-TOP arguments have the same value.  TOP is
      equivalent to everything, so we can ignore it.  */
-  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+  for (i = 0; i < gimple_phi_num_args (phi); i++)
     {
       tree def = PHI_ARG_DEF (phi, i);
 
@@ -1690,6 +1901,32 @@ expr_has_constants (tree expr)
   return false;
 }
 
+/* Return true if STMT contains constants.  */
+
+static bool
+stmt_has_constants (gimple stmt)
+{
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
+    return false;
+
+  switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
+    {
+    case GIMPLE_UNARY_RHS:
+      return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
+
+    case GIMPLE_BINARY_RHS:
+      return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
+             || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
+    case GIMPLE_SINGLE_RHS:
+      /* Constants inside reference ops are rarely interesting, but
+        it can take a lot of looking to find them.  */
+      return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
+    default:
+      gcc_unreachable ();
+    }
+  return false;
+}
+
 /* Replace SSA_NAMES in expr with their value numbers, and return the
    result.
    This is performed in place. */
@@ -1722,11 +1959,11 @@ valueize_expr (tree expr)
    simplified. */
 
 static tree
-simplify_binary_expression (tree stmt, tree rhs)
+simplify_binary_expression (gimple stmt)
 {
   tree result = NULL_TREE;
-  tree op0 = TREE_OPERAND (rhs, 0);
-  tree op1 = TREE_OPERAND (rhs, 1);
+  tree op0 = gimple_assign_rhs1 (stmt);
+  tree op1 = gimple_assign_rhs2 (stmt);
 
   /* This will not catch every single case we could combine, but will
      catch those with constants.  The goal here is to simultaneously
@@ -1734,8 +1971,9 @@ simplify_binary_expression (tree stmt, tree rhs)
      expansion of expressions during simplification.  */
   if (TREE_CODE (op0) == SSA_NAME)
     {
-      if (VN_INFO (op0)->has_constants)
-       op0 = valueize_expr (VN_INFO (op0)->expr);
+      if (VN_INFO (op0)->has_constants
+         || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
+       op0 = valueize_expr (vn_get_expr_for (op0));
       else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
        op0 = SSA_VAL (op0);
     }
@@ -1743,28 +1981,29 @@ simplify_binary_expression (tree stmt, tree rhs)
   if (TREE_CODE (op1) == SSA_NAME)
     {
       if (VN_INFO (op1)->has_constants)
-       op1 = valueize_expr (VN_INFO (op1)->expr);
+       op1 = valueize_expr (vn_get_expr_for (op1));
       else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
        op1 = SSA_VAL (op1);
     }
 
   /* Avoid folding if nothing changed.  */
-  if (op0 == TREE_OPERAND (rhs, 0)
-      && op1 == TREE_OPERAND (rhs, 1))
+  if (op0 == gimple_assign_rhs1 (stmt)
+      && op1 == gimple_assign_rhs2 (stmt))
     return NULL_TREE;
 
   fold_defer_overflow_warnings ();
 
-  result = fold_binary (TREE_CODE (rhs), TREE_TYPE (rhs), op0, op1);
+  result = fold_binary (gimple_assign_rhs_code (stmt),
+                       TREE_TYPE (gimple_get_lhs (stmt)), op0, op1);
 
-  fold_undefer_overflow_warnings (result && valid_gimple_expression_p (result),
+  fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
                                  stmt, 0);
 
   /* Make sure result is not a complex expression consisting
      of operators of operators (IE (a + b) + (a + c))
      Otherwise, we will end up with unbounded expressions if
      fold does anything at all.  */
-  if (result && valid_gimple_expression_p (result))
+  if (result && valid_gimple_rhs_p (result))
     return result;
 
   return NULL_TREE;
@@ -1774,24 +2013,32 @@ simplify_binary_expression (tree stmt, tree rhs)
    simplified. */
 
 static tree
-simplify_unary_expression (tree rhs)
+simplify_unary_expression (gimple stmt)
 {
   tree result = NULL_TREE;
-  tree op0 = TREE_OPERAND (rhs, 0);
+  tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
+
+  /* We handle some tcc_reference codes here that are all
+     GIMPLE_ASSIGN_SINGLE codes.  */
+  if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
+      || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
+      || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
+    op0 = TREE_OPERAND (op0, 0);
 
   if (TREE_CODE (op0) != SSA_NAME)
     return NULL_TREE;
 
+  orig_op0 = op0;
   if (VN_INFO (op0)->has_constants)
-    op0 = valueize_expr (VN_INFO (op0)->expr);
-  else if (CONVERT_EXPR_P (rhs)
-          || TREE_CODE (rhs) == REALPART_EXPR
-          || TREE_CODE (rhs) == IMAGPART_EXPR
-          || TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
+    op0 = valueize_expr (vn_get_expr_for (op0));
+  else if (gimple_assign_cast_p (stmt)
+          || gimple_assign_rhs_code (stmt) == REALPART_EXPR
+          || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
+          || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
     {
       /* We want to do tree-combining on conversion-like expressions.
          Make sure we feed only SSA_NAMEs or constants to fold though.  */
-      tree tem = valueize_expr (VN_INFO (op0)->expr);
+      tree tem = valueize_expr (vn_get_expr_for (op0));
       if (UNARY_CLASS_P (tem)
          || BINARY_CLASS_P (tem)
          || TREE_CODE (tem) == VIEW_CONVERT_EXPR
@@ -1801,36 +2048,38 @@ simplify_unary_expression (tree rhs)
     }
 
   /* Avoid folding if nothing changed, but remember the expression.  */
-  if (op0 == TREE_OPERAND (rhs, 0))
-    return rhs;
+  if (op0 == orig_op0)
+    return NULL_TREE;
 
-  result = fold_unary (TREE_CODE (rhs), TREE_TYPE (rhs), op0);
+  result = fold_unary (gimple_assign_rhs_code (stmt),
+                      gimple_expr_type (stmt), op0);
   if (result)
     {
       STRIP_USELESS_TYPE_CONVERSION (result);
-      if (valid_gimple_expression_p (result))
+      if (valid_gimple_rhs_p (result))
         return result;
     }
 
-  return rhs;
+  return NULL_TREE;
 }
 
 /* Try to simplify RHS using equivalences and constant folding.  */
 
 static tree
-try_to_simplify (tree stmt, tree rhs)
+try_to_simplify (gimple stmt)
 {
   tree tem;
 
   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
      in this case, there is no point in doing extra work.  */
-  if (TREE_CODE (rhs) == SSA_NAME)
-    return rhs;
+  if (gimple_assign_copy_p (stmt)
+      && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
+    return NULL_TREE;
 
-  switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
+  switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
     {
     case tcc_declaration:
-      tem = get_symbol_constant_value (rhs);
+      tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
       if (tem)
        return tem;
       break;
@@ -1838,29 +2087,29 @@ try_to_simplify (tree stmt, tree rhs)
     case tcc_reference:
       /* Do not do full-blown reference lookup here, but simplify
         reads from constant aggregates.  */
-      tem = fold_const_aggregate_ref (rhs);
+      tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
       if (tem)
        return tem;
 
       /* Fallthrough for some codes that can operate on registers.  */
-      if (!(TREE_CODE (rhs) == REALPART_EXPR
-           || TREE_CODE (rhs) == IMAGPART_EXPR
-           || TREE_CODE (rhs) == VIEW_CONVERT_EXPR))
+      if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
+           || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
+           || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
        break;
       /* We could do a little more with unary ops, if they expand
         into binary ops, but it's debatable whether it is worth it. */
     case tcc_unary:
-      return simplify_unary_expression (rhs);
+      return simplify_unary_expression (stmt);
       break;
     case tcc_comparison:
     case tcc_binary:
-      return simplify_binary_expression (stmt, rhs);
+      return simplify_binary_expression (stmt);
       break;
     default:
       break;
     }
 
-  return rhs;
+  return NULL_TREE;
 }
 
 /* Visit and value number USE, return true if the value number
@@ -1870,67 +2119,52 @@ static bool
 visit_use (tree use)
 {
   bool changed = false;
-  tree stmt = SSA_NAME_DEF_STMT (use);
-  stmt_ann_t ann;
+  gimple stmt = SSA_NAME_DEF_STMT (use);
 
   VN_INFO (use)->use_processed = true;
 
   gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
   if (dump_file && (dump_flags & TDF_DETAILS)
-      && !IS_EMPTY_STMT (stmt))
+      && !SSA_NAME_IS_DEFAULT_DEF (use))
     {
       fprintf (dump_file, "Value numbering ");
       print_generic_expr (dump_file, use, 0);
       fprintf (dump_file, " stmt = ");
-      print_generic_stmt (dump_file, stmt, 0);
+      print_gimple_stmt (dump_file, stmt, 0, 0);
     }
 
-  /* RETURN_EXPR may have an embedded MODIFY_STMT.  */
-  if (TREE_CODE (stmt) == RETURN_EXPR
-      && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
-    stmt = TREE_OPERAND (stmt, 0);
-
-  ann = stmt_ann (stmt);
-
   /* Handle uninitialized uses.  */
-  if (IS_EMPTY_STMT (stmt))
-    {
-      changed = set_ssa_val_to (use, use);
-    }
+  if (SSA_NAME_IS_DEFAULT_DEF (use))
+    changed = set_ssa_val_to (use, use);
   else
     {
-      if (TREE_CODE (stmt) == PHI_NODE)
-       {
-         changed = visit_phi (stmt);
-       }
-      else if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
-              || (ann && ann->has_volatile_ops)
-              || tree_could_throw_p (stmt))
+      if (gimple_code (stmt) == GIMPLE_PHI)
+       changed = visit_phi (stmt);
+      else if (!gimple_has_lhs (stmt)
+              || gimple_has_volatile_ops (stmt)
+              || stmt_could_throw_p (stmt))
+       changed = defs_to_varying (stmt);
+      else if (is_gimple_assign (stmt))
        {
-         changed = defs_to_varying (stmt);
-       }
-      else
-       {
-         tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-         tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+         tree lhs = gimple_assign_lhs (stmt);
          tree simplified;
 
-         STRIP_USELESS_TYPE_CONVERSION (rhs);
-
          /* Shortcut for copies. Simplifying copies is pointless,
             since we copy the expression and value they represent.  */
-         if (TREE_CODE (rhs) == SSA_NAME && TREE_CODE (lhs) == SSA_NAME)
+         if (gimple_assign_copy_p (stmt)
+             && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
+             && TREE_CODE (lhs) == SSA_NAME)
            {
-             changed = visit_copy (lhs, rhs);
+             changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
              goto done;
            }
-         simplified = try_to_simplify (stmt, rhs);
-         if (simplified && simplified != rhs)
+         simplified = try_to_simplify (stmt);
+         if (simplified)
            {
              if (dump_file && (dump_flags & TDF_DETAILS))
                {
                  fprintf (dump_file, "RHS ");
-                 print_generic_expr (dump_file, rhs, 0);
+                 print_gimple_expr (dump_file, stmt, 0, 0);
                  fprintf (dump_file, " simplified to ");
                  print_generic_expr (dump_file, simplified, 0);
                  if (TREE_CODE (lhs) == SSA_NAME)
@@ -1944,16 +2178,17 @@ visit_use (tree use)
             screw up phi congruence because constants are not
             uniquely associated with a single ssa name that can be
             looked up.  */
-         if (simplified && is_gimple_min_invariant (simplified)
-             && TREE_CODE (lhs) == SSA_NAME
-             && simplified != rhs)
+         if (simplified
+             && is_gimple_min_invariant (simplified)
+             && TREE_CODE (lhs) == SSA_NAME)
            {
              VN_INFO (lhs)->expr = simplified;
              VN_INFO (lhs)->has_constants = true;
              changed = set_ssa_val_to (lhs, simplified);
              goto done;
            }
-         else if (simplified && TREE_CODE (simplified) == SSA_NAME
+         else if (simplified
+                  && TREE_CODE (simplified) == SSA_NAME
                   && TREE_CODE (lhs) == SSA_NAME)
            {
              changed = visit_copy (lhs, simplified);
@@ -1968,13 +2203,10 @@ visit_use (tree use)
                     valuizing may change the IL stream.  */
                  VN_INFO (lhs)->expr = unshare_expr (simplified);
                }
-             rhs = simplified;
-           }
-         else if (expr_has_constants (rhs) && TREE_CODE (lhs) == SSA_NAME)
-           {
-             VN_INFO (lhs)->has_constants = true;
-             VN_INFO (lhs)->expr = unshare_expr (rhs);
            }
+         else if (stmt_has_constants (stmt)
+                  && TREE_CODE (lhs) == SSA_NAME)
+           VN_INFO (lhs)->has_constants = true;
          else if (TREE_CODE (lhs) == SSA_NAME)
            {
              /* We reset expr and constantness here because we may
@@ -1983,56 +2215,64 @@ visit_use (tree use)
                 even if they were optimistically constant. */
 
              VN_INFO (lhs)->has_constants = false;
-             VN_INFO (lhs)->expr = lhs;
+             VN_INFO (lhs)->expr = NULL_TREE;
            }
 
          if (TREE_CODE (lhs) == SSA_NAME
              /* We can substitute SSA_NAMEs that are live over
                 abnormal edges with their constant value.  */
-             && !is_gimple_min_invariant (rhs)
+             && !(gimple_assign_copy_p (stmt)
+                  && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
+             && !(simplified
+                  && is_gimple_min_invariant (simplified))
              && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
            changed = defs_to_varying (stmt);
          else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
            {
-             changed = visit_reference_op_store (lhs, rhs, stmt);
+             changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
            }
          else if (TREE_CODE (lhs) == SSA_NAME)
            {
-             if (is_gimple_min_invariant (rhs))
+             if ((gimple_assign_copy_p (stmt)
+                  && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
+                 || (simplified
+                     && is_gimple_min_invariant (simplified)))
                {
                  VN_INFO (lhs)->has_constants = true;
-                 VN_INFO (lhs)->expr = rhs;
-                 changed = set_ssa_val_to (lhs, rhs);
+                 if (simplified)
+                   changed = set_ssa_val_to (lhs, simplified);
+                 else
+                   changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
                }
              else
                {
-                 switch (TREE_CODE_CLASS (TREE_CODE (rhs)))
+                 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
                    {
-                   case tcc_unary:
-                     changed = visit_unary_op (lhs, rhs);
-                     break;
-                   case tcc_binary:
-                     changed = visit_binary_op (lhs, rhs);
+                   case GIMPLE_UNARY_RHS:
+                     changed = visit_unary_op (lhs, stmt);
                      break;
-                     /* If tcc_vl_expr ever encompasses more than
-                        CALL_EXPR, this will need to be changed.  */
-                   case tcc_vl_exp:
-                     if (call_expr_flags (rhs)  & (ECF_PURE | ECF_CONST))
-                       changed = visit_reference_op_load (lhs, rhs, stmt);
-                     else
-                       changed = defs_to_varying (stmt);
+                   case GIMPLE_BINARY_RHS:
+                     changed = visit_binary_op (lhs, stmt);
                      break;
-                   case tcc_declaration:
-                   case tcc_reference:
-                     changed = visit_reference_op_load (lhs, rhs, stmt);
-                     break;
-                   case tcc_expression:
-                     if (TREE_CODE (rhs) == ADDR_EXPR)
+                   case GIMPLE_SINGLE_RHS:
+                     switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
                        {
-                         changed = visit_unary_op (lhs, rhs);
-                         goto done;
+                       case tcc_declaration:
+                       case tcc_reference:
+                         changed = visit_reference_op_load
+                             (lhs, gimple_assign_rhs1 (stmt), stmt);
+                         break;
+                       case tcc_expression:
+                         if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
+                           {
+                             changed = visit_unary_op (lhs, stmt);
+                             break;
+                           }
+                         /* Fallthrough.  */
+                       default:
+                         changed = defs_to_varying (stmt);
                        }
-                     /* Fallthrough.  */
+                     break;
                    default:
                      changed = defs_to_varying (stmt);
                      break;
@@ -2042,6 +2282,39 @@ visit_use (tree use)
          else
            changed = defs_to_varying (stmt);
        }
+      else if (is_gimple_call (stmt))
+       {
+         tree lhs = gimple_call_lhs (stmt);
+
+         /* ???  We could try to simplify calls.  */
+
+         if (stmt_has_constants (stmt)
+             && TREE_CODE (lhs) == SSA_NAME)
+           VN_INFO (lhs)->has_constants = true;
+         else if (TREE_CODE (lhs) == SSA_NAME)
+           {
+             /* We reset expr and constantness here because we may
+                have been value numbering optimistically, and
+                iterating. They may become non-constant in this case,
+                even if they were optimistically constant. */
+             VN_INFO (lhs)->has_constants = false;
+             VN_INFO (lhs)->expr = NULL_TREE;
+           }
+
+         if (TREE_CODE (lhs) == SSA_NAME
+             && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
+           changed = defs_to_varying (stmt);
+         /* ???  We should handle stores from calls.  */
+         else if (TREE_CODE (lhs) == SSA_NAME)
+           {
+             if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
+               changed = visit_reference_op_call (lhs, stmt);
+             else
+               changed = defs_to_varying (stmt);
+           }
+         else
+           changed = defs_to_varying (stmt);
+       }
     }
  done:
   return changed;
@@ -2054,20 +2327,20 @@ compare_ops (const void *pa, const void *pb)
 {
   const tree opa = *((const tree *)pa);
   const tree opb = *((const tree *)pb);
-  tree opstmta = SSA_NAME_DEF_STMT (opa);
-  tree opstmtb = SSA_NAME_DEF_STMT (opb);
+  gimple opstmta = SSA_NAME_DEF_STMT (opa);
+  gimple opstmtb = SSA_NAME_DEF_STMT (opb);
   basic_block bba;
   basic_block bbb;
 
-  if (IS_EMPTY_STMT (opstmta) && IS_EMPTY_STMT (opstmtb))
+  if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
     return 0;
-  else if (IS_EMPTY_STMT (opstmta))
+  else if (gimple_nop_p (opstmta))
     return -1;
-  else if (IS_EMPTY_STMT (opstmtb))
+  else if (gimple_nop_p (opstmtb))
     return 1;
 
-  bba = bb_for_stmt (opstmta);
-  bbb = bb_for_stmt (opstmtb);
+  bba = gimple_bb (opstmta);
+  bbb = gimple_bb (opstmtb);
 
   if (!bba && !bbb)
     return 0;
@@ -2078,13 +2351,14 @@ compare_ops (const void *pa, const void *pb)
 
   if (bba == bbb)
     {
-      if (TREE_CODE (opstmta) == PHI_NODE && TREE_CODE (opstmtb) == PHI_NODE)
+      if (gimple_code (opstmta) == GIMPLE_PHI
+         && gimple_code (opstmtb) == GIMPLE_PHI)
        return 0;
-      else if (TREE_CODE (opstmta) == PHI_NODE)
+      else if (gimple_code (opstmta) == GIMPLE_PHI)
        return -1;
-      else if (TREE_CODE (opstmtb) == PHI_NODE)
+      else if (gimple_code (opstmtb) == GIMPLE_PHI)
        return 1;
-      return gimple_stmt_uid (opstmta) - gimple_stmt_uid (opstmtb);
+      return gimple_uid (opstmta) - gimple_uid (opstmtb);
     }
   return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
 }
@@ -2130,6 +2404,9 @@ process_scc (VEC (tree, heap) *scc)
        {
          changed = false;
          iterations++;
+         /* As we are value-numbering optimistically we have to
+            clear the expression tables and the simplified expressions
+            in each iteration until we converge.  */
          htab_empty (optimistic_info->nary);
          htab_empty (optimistic_info->phis);
          htab_empty (optimistic_info->references);
@@ -2138,6 +2415,8 @@ process_scc (VEC (tree, heap) *scc)
          empty_alloc_pool (optimistic_info->phis_pool);
          empty_alloc_pool (optimistic_info->references_pool);
          for (i = 0; VEC_iterate (tree, scc, i, var); i++)
+           VN_INFO (var)->expr = NULL_TREE;
+         for (i = 0; VEC_iterate (tree, scc, i, var); i++)
            changed |= visit_use (var);
        }
 
@@ -2210,7 +2489,8 @@ DFS (tree name)
   VEC(ssa_op_iter, heap) *itervec = NULL;
   VEC(tree, heap) *namevec = NULL;
   use_operand_p usep = NULL;
-  tree defstmt, use;
+  gimple defstmt;
+  tree use;
   ssa_op_iter iter;
 
 start_over:
@@ -2224,10 +2504,10 @@ start_over:
   defstmt = SSA_NAME_DEF_STMT (name);
 
   /* Recursively DFS on our operands, looking for SCC's.  */
-  if (!IS_EMPTY_STMT (defstmt))
+  if (!gimple_nop_p (defstmt))
     {
       /* Push a new iterator.  */
-      if (TREE_CODE (defstmt) == PHI_NODE)
+      if (gimple_code (defstmt) == GIMPLE_PHI)
        usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
       else
        usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
@@ -2378,7 +2658,7 @@ init_scc_vn (void)
       if (name)
        {
          VN_INFO_GET (name)->valnum = VN_TOP;
-         VN_INFO (name)->expr = name;
+         VN_INFO (name)->expr = NULL_TREE;
          VN_INFO (name)->value_id = 0;
        }
     }
@@ -2601,10 +2881,6 @@ expressions_equal_p (tree e1, tree e2)
   if (!e1 || !e2)
     return false;
 
-  /* Likewise if they are not of the same type.  */
-  if (TREE_TYPE (e1) != TREE_TYPE (e2))
-    return false;
-
   /* Recurse on elements of lists.  */
   if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
     {
index 314cf8f..923be19 100644 (file)
@@ -104,7 +104,30 @@ typedef struct vn_constant_s
   hashval_t hashcode;
   tree constant;
 } *vn_constant_t;
-  
+
+/* Hash the constant CONSTANT with distinguishing type incompatible
+   constants in the types_compatible_p sense.  */
+
+static inline hashval_t
+vn_hash_constant_with_type (tree constant)
+{
+  tree type = TREE_TYPE (constant);
+  return (iterative_hash_expr (constant, 0)
+         + INTEGRAL_TYPE_P (type)
+         + (INTEGRAL_TYPE_P (type)
+            ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
+}
+
+/* Compare the constants C1 and C2 with distinguishing type incompatible
+   constants in the types_compatible_p sense.  */
+
+static inline bool
+vn_constant_eq_with_type (tree c1, tree c2)
+{
+  return (expressions_equal_p (c1, c2)
+         && types_compatible_p (TREE_TYPE (c1), TREE_TYPE (c2)));
+}
+
 typedef struct vn_ssa_aux
 {
   /* Value number. This may be an SSA name or a constant.  */
@@ -138,16 +161,20 @@ typedef struct vn_ssa_aux
 /* Return the value numbering info for an SSA_NAME.  */
 extern vn_ssa_aux_t VN_INFO (tree);
 extern vn_ssa_aux_t VN_INFO_GET (tree);
+tree vn_get_expr_for (tree);
 bool run_scc_vn (bool);
 void free_scc_vn (void);
 tree vn_nary_op_lookup (tree, vn_nary_op_t *);
+tree vn_nary_op_lookup_stmt (gimple, vn_nary_op_t *);
 tree vn_nary_op_lookup_pieces (unsigned int, enum tree_code,
                               tree, tree, tree, tree, tree,
                               vn_nary_op_t *);
 vn_nary_op_t vn_nary_op_insert (tree, tree);
+vn_nary_op_t vn_nary_op_insert_stmt (gimple, tree);
 vn_nary_op_t vn_nary_op_insert_pieces (unsigned int, enum tree_code,
                                       tree, tree, tree, tree,
                                       tree, tree, unsigned int);
+void copy_reference_ops_from_call (gimple, VEC(vn_reference_op_s, heap) **);
 tree vn_reference_lookup_pieces (VEC (tree, gc) *,
                                 VEC (vn_reference_op_s, heap) *,
                                 vn_reference_t *);
@@ -166,6 +193,6 @@ unsigned int get_next_value_id (void);
 unsigned int get_constant_value_id (tree);
 unsigned int get_or_alloc_constant_value_id (tree);
 bool value_id_constant_p (unsigned int);
-VEC (tree, gc) *shared_vuses_from_stmt (tree);
-VEC (tree, gc) *copy_vuses_from_stmt (tree);
+VEC (tree, gc) *shared_vuses_from_stmt (gimple);
+VEC (tree, gc) *copy_vuses_from_stmt (gimple);
 #endif /* TREE_SSA_SCCVN_H  */
index ebf54e2..e56cce0 100644 (file)
@@ -28,7 +28,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "diagnostic.h"
 #include "tree-inline.h"
 #include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-dump.h"
 #include "timevar.h"
 #include "fibheap.h"
@@ -82,18 +82,18 @@ static struct
    we return NULL.  */
 
 static basic_block
-find_bb_for_arg (tree phi, tree def)
+find_bb_for_arg (gimple phi, tree def)
 {
-  int i;
+  size_t i;
   bool foundone = false;
   basic_block result = NULL;
-  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+  for (i = 0; i < gimple_phi_num_args (phi); i++)
     if (PHI_ARG_DEF (phi, i) == def)
       {
        if (foundone)
          return NULL;
        foundone = true;
-       result = PHI_ARG_EDGE (phi, i)->src;
+       result = gimple_phi_arg_edge (phi, i)->src;
       }
   return result;
 }
@@ -107,9 +107,9 @@ find_bb_for_arg (tree phi, tree def)
    used in, so that you only have one place you can sink it to.  */
 
 static bool
-all_immediate_uses_same_place (tree stmt)
+all_immediate_uses_same_place (gimple stmt)
 {
-  tree firstuse = NULL_TREE;
+  gimple firstuse = NULL;
   ssa_op_iter op_iter;
   imm_use_iterator imm_iter;
   use_operand_p use_p;
@@ -119,7 +119,7 @@ all_immediate_uses_same_place (tree stmt)
     {
       FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
         {
-         if (firstuse == NULL_TREE)
+         if (firstuse == NULL)
            firstuse = USE_STMT (use_p);
          else
            if (firstuse != USE_STMT (use_p))
@@ -134,16 +134,16 @@ all_immediate_uses_same_place (tree stmt)
    but we still must avoid moving them around.  */
 
 bool
-is_hidden_global_store (tree stmt)
+is_hidden_global_store (gimple stmt)
 {
   /* Check virtual definitions.  If we get here, the only virtual
-     definitions we should see are those generated by assignment
+     definitions we should see are those generated by assignment or call
      statements.  */
   if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
     {
       tree lhs;
 
-      gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
+      gcc_assert (is_gimple_assign (stmt) || is_gimple_call (stmt));
 
       /* Note that we must not check the individual virtual operands
         here.  In particular, if this is an aliased store, we could
@@ -170,7 +170,8 @@ is_hidden_global_store (tree stmt)
         address is a pointer, we check if its name tag or symbol tag is
         a global variable.  Otherwise, we check if the base variable
         is a global.  */
-      lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+      lhs = gimple_get_lhs (stmt);
+
       if (REFERENCE_CLASS_P (lhs))
        lhs = get_base_address (lhs);
 
@@ -200,7 +201,7 @@ is_hidden_global_store (tree stmt)
 /* Find the nearest common dominator of all of the immediate uses in IMM.  */
 
 static basic_block
-nearest_common_dominator_of_uses (tree stmt)
+nearest_common_dominator_of_uses (gimple stmt)
 {  
   bitmap blocks = BITMAP_ALLOC (NULL);
   basic_block commondom;
@@ -216,18 +217,18 @@ nearest_common_dominator_of_uses (tree stmt)
     {
       FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
         {
-         tree usestmt = USE_STMT (use_p);
+         gimple usestmt = USE_STMT (use_p);
          basic_block useblock;
 
-         if (TREE_CODE (usestmt) == PHI_NODE)
+         if (gimple_code (usestmt) == GIMPLE_PHI)
            {
              int idx = PHI_ARG_INDEX_FROM_USE (use_p);
 
-             useblock = PHI_ARG_EDGE (usestmt, idx)->src;
+             useblock = gimple_phi_arg_edge (usestmt, idx)->src;
            }
          else
            {
-             useblock = bb_for_stmt (usestmt);
+             useblock = gimple_bb (usestmt);
            }
 
          /* Short circuit. Nothing dominates the entry block.  */
@@ -249,23 +250,22 @@ nearest_common_dominator_of_uses (tree stmt)
 
 /* Given a statement (STMT) and the basic block it is currently in (FROMBB), 
    determine the location to sink the statement to, if any.
-   Returns true if there is such location; in that case, TOBB is set to the
-   basic block of the location, and TOBSI points to the statement before
-   that STMT should be moved.  */
+   Returns true if there is such location; in that case, TOGSI points to the
+   statement before that STMT should be moved.  */
 
 static bool
-statement_sink_location (tree stmt, basic_block frombb, basic_block *tobb,
-                        block_stmt_iterator *tobsi)
+statement_sink_location (gimple stmt, basic_block frombb,
+                        gimple_stmt_iterator *togsi)
 {
-  tree use, def;
+  gimple use;
+  tree def;
   use_operand_p one_use = NULL_USE_OPERAND_P;
   basic_block sinkbb;
   use_operand_p use_p;
   def_operand_p def_p;
   ssa_op_iter iter;
-  stmt_ann_t ann;
-  tree rhs;
   imm_use_iterator imm_iter;
+  enum tree_code code;
 
   FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
     {
@@ -281,9 +281,8 @@ statement_sink_location (tree stmt, basic_block frombb, basic_block *tobb,
   if (one_use == NULL_USE_OPERAND_P)
     return false;
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
     return false;
-  rhs = GIMPLE_STMT_OPERAND (stmt, 1);
 
   /* There are a few classes of things we can't or don't move, some because we
      don't have code to handle it, some because it's not profitable and some
@@ -305,13 +304,13 @@ statement_sink_location (tree stmt, basic_block frombb, basic_block *tobb,
      sunk.  
 
   */
-  ann = stmt_ann (stmt);
+  code = gimple_assign_rhs_code (stmt);
   if (stmt_ends_bb_p (stmt)
-      || TREE_SIDE_EFFECTS (rhs)
-      || TREE_CODE (rhs) == EXC_PTR_EXPR
-      || TREE_CODE (rhs) == FILTER_EXPR
+      || gimple_has_side_effects (stmt)
+      || code == EXC_PTR_EXPR
+      || code == FILTER_EXPR
       || is_hidden_global_store (stmt)
-      || ann->has_volatile_ops
+      || gimple_has_volatile_ops (stmt)
       || !ZERO_SSA_OPERANDS (stmt, SSA_OP_VUSE))
     return false;
   
@@ -365,20 +364,19 @@ statement_sink_location (tree stmt, basic_block frombb, basic_block *tobb,
          fprintf (dump_file, "Common dominator of all uses is %d\n",
                   commondom->index);
        }
-      *tobb = commondom;
-      *tobsi = bsi_after_labels (commondom);
+      *togsi = gsi_after_labels (commondom);
       return true;
     }
 
   use = USE_STMT (one_use);
-  if (TREE_CODE (use) != PHI_NODE)
+  if (gimple_code (use) != GIMPLE_PHI)
     {
-      sinkbb = bb_for_stmt (use);
+      sinkbb = gimple_bb (use);
       if (sinkbb == frombb || sinkbb->loop_depth > frombb->loop_depth
          || sinkbb->loop_father != frombb->loop_father)
        return false;
-      *tobb = sinkbb;
-      *tobsi = bsi_for_stmt (use);
+
+      *togsi = gsi_for_stmt (use);
       return true;
     }
 
@@ -399,14 +397,13 @@ statement_sink_location (tree stmt, basic_block frombb, basic_block *tobb,
      If the use is a phi, and is in the same bb as the def, 
      we can't sink it.  */
 
-  if (bb_for_stmt (use) == frombb)
+  if (gimple_bb (use) == frombb)
     return false;
   if (sinkbb == frombb || sinkbb->loop_depth > frombb->loop_depth
       || sinkbb->loop_father != frombb->loop_father)
     return false;
 
-  *tobb = sinkbb;
-  *tobsi = bsi_after_labels (sinkbb);
+  *togsi = gsi_after_labels (sinkbb);
 
   return true;
 }
@@ -417,7 +414,7 @@ static void
 sink_code_in_bb (basic_block bb)
 {
   basic_block son;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   edge_iterator ei;
   edge e;
   bool last = true;
@@ -432,50 +429,49 @@ sink_code_in_bb (basic_block bb)
     if (e->flags & EDGE_ABNORMAL)
       goto earlyout;
 
-  for (bsi = bsi_last (bb); !bsi_end_p (bsi);)
+  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
     {
-      tree stmt = bsi_stmt (bsi);      
-      block_stmt_iterator tobsi;
-      basic_block tobb;
+      gimple stmt = gsi_stmt (gsi);    
+      gimple_stmt_iterator togsi;
 
-      if (!statement_sink_location (stmt, bb, &tobb, &tobsi))
+      if (!statement_sink_location (stmt, bb, &togsi))
        {
-         if (!bsi_end_p (bsi))
-           bsi_prev (&bsi);
+         if (!gsi_end_p (gsi))
+           gsi_prev (&gsi);
          last = false;
          continue;
        }      
       if (dump_file)
        {
          fprintf (dump_file, "Sinking ");
-         print_generic_expr (dump_file, stmt, TDF_VOPS);
+         print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS);
          fprintf (dump_file, " from bb %d to bb %d\n",
-                  bb->index, tobb->index);
+                  bb->index, (gsi_bb (togsi))->index);
        }
       
       /* If this is the end of the basic block, we need to insert at the end
          of the basic block.  */
-      if (bsi_end_p (tobsi))
-       bsi_move_to_bb_end (&bsi, tobb);
+      if (gsi_end_p (togsi))
+       gsi_move_to_bb_end (&gsi, gsi_bb (togsi));
       else
-       bsi_move_before (&bsi, &tobsi);
+       gsi_move_before (&gsi, &togsi);
 
       sink_stats.sunk++;
 
       /* If we've just removed the last statement of the BB, the
-        bsi_end_p() test below would fail, but bsi_prev() would have
+        gsi_end_p() test below would fail, but gsi_prev() would have
         succeeded, and we want it to succeed.  So we keep track of
         whether we're at the last statement and pick up the new last
         statement.  */
       if (last)
        {
-         bsi = bsi_last (bb);
+         gsi = gsi_last_bb (bb);
          continue;
        }
 
       last = false;
-      if (!bsi_end_p (bsi))
-       bsi_prev (&bsi);
+      if (!gsi_end_p (gsi))
+       gsi_prev (&gsi);
       
     }
  earlyout:
index 7c4cd3d..3b9ce02 100644 (file)
@@ -39,7 +39,7 @@
 #include "tree-inline.h"
 #include "varray.h"
 #include "c-tree.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "hashtab.h"
 #include "function.h"
 #include "cgraph.h"
@@ -3087,7 +3087,6 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
   switch (TREE_CODE_CLASS (TREE_CODE (t)))
     {
     case tcc_expression:
-    case tcc_vl_exp:
       {
        switch (TREE_CODE (t))
          {
@@ -3109,37 +3108,6 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
              return;
            }
            break;
-         case CALL_EXPR:
-           /* XXX: In interprocedural mode, if we didn't have the
-              body, we would need to do *each pointer argument =
-              &ANYTHING added.  */
-           if (call_expr_flags (t) & (ECF_MALLOC | ECF_MAY_BE_ALLOCA))
-             {
-               varinfo_t vi;
-               tree heapvar = heapvar_lookup (t);
-
-               if (heapvar == NULL)
-                 {
-                   heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
-                   DECL_EXTERNAL (heapvar) = 1;
-                   get_var_ann (heapvar)->is_heapvar = 1;
-                   if (gimple_referenced_vars (cfun))
-                     add_referenced_var (heapvar);
-                   heapvar_insert (t, heapvar);
-                 }
-
-               temp.var = create_variable_info_for (heapvar,
-                                                    alias_get_name (heapvar));
-
-               vi = get_varinfo (temp.var);
-               vi->is_artificial_var = 1;
-               vi->is_heap_var = 1;
-               temp.type = ADDRESSOF;
-               temp.offset = 0;
-               VEC_safe_push (ce_s, heap, *results, &temp);
-               return;
-             }
-           break;
          default:;
          }
        break;
@@ -3165,6 +3133,8 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
       }
     case tcc_unary:
       {
+       /* FIXME tuples: this won't trigger, instead get_constraint_for
+          needs to be fed with piecewise trees.  */
        switch (TREE_CODE (t))
          {
          CASE_CONVERT:
@@ -3186,25 +3156,10 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
          }
        break;
       }
-    case tcc_binary:
-      {
-       if (TREE_CODE (t) == POINTER_PLUS_EXPR)
-         {
-           get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
-                                          TREE_OPERAND (t, 1), results);
-           return;
-         }
-       break;
-      }
     case tcc_exceptional:
       {
        switch (TREE_CODE (t))
          {
-         case PHI_NODE:
-           {
-             get_constraint_for_1 (PHI_RESULT (t), results, address_p);
-             return;
-           }
          case SSA_NAME:
            {
              get_constraint_for_ssa_var (t, results, address_p);
@@ -3544,20 +3499,23 @@ make_escape_constraint (tree op)
    RHS.  */
 
 static void
-handle_rhs_call  (tree rhs)
+handle_rhs_call (gimple stmt)
 {
-  tree arg;
-  call_expr_arg_iterator iter;
+  unsigned i;
 
-  FOR_EACH_CALL_EXPR_ARG (arg, iter, rhs)
-    /* Find those pointers being passed, and make sure they end up
-       pointing to anything.  */
-    if (could_have_pointers (arg))
-      make_escape_constraint (arg);
+  for (i = 0; i < gimple_call_num_args (stmt); ++i)
+    {
+      tree arg = gimple_call_arg (stmt, i);
+
+      /* Find those pointers being passed, and make sure they end up
+        pointing to anything.  */
+      if (could_have_pointers (arg))
+       make_escape_constraint (arg);
+    }
 
   /* The static chain escapes as well.  */
-  if (CALL_EXPR_STATIC_CHAIN (rhs))
-    make_escape_constraint (CALL_EXPR_STATIC_CHAIN (rhs));
+  if (gimple_call_chain (stmt))
+    make_escape_constraint (gimple_call_chain (stmt));
 }
 
 /* For non-IPA mode, generate constraints necessary for a call
@@ -3612,22 +3570,20 @@ handle_lhs_call (tree lhs, int flags)
    const function that returns a pointer in the statement STMT.  */
 
 static void
-handle_const_call (tree stmt)
+handle_const_call (gimple stmt)
 {
-  tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-  tree call = get_call_expr_in (stmt);
+  tree lhs = gimple_call_lhs (stmt);
   VEC(ce_s, heap) *lhsc = NULL;
   struct constraint_expr rhsc;
-  unsigned int j;
+  unsigned int j, k;
   struct constraint_expr *lhsp;
-  tree arg, tmpvar;
-  call_expr_arg_iterator iter;
+  tree tmpvar;
   struct constraint_expr tmpc;
 
   get_constraint_for (lhs, &lhsc);
 
   /* If this is a nested function then it can return anything.  */
-  if (CALL_EXPR_STATIC_CHAIN (call))
+  if (gimple_call_chain (stmt))
     {
       rhsc.var = anything_id;
       rhsc.offset = 0;
@@ -3652,18 +3608,22 @@ handle_const_call (tree stmt)
   process_constraint (new_constraint (tmpc, rhsc));
 
   /* May return arguments.  */
-  FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
-    if (could_have_pointers (arg))
-      {
-       VEC(ce_s, heap) *argc = NULL;
-       struct constraint_expr *argp;
-       int i;
-
-       get_constraint_for (arg, &argc);
-       for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
-         process_constraint (new_constraint (tmpc, *argp));
-       VEC_free (ce_s, heap, argc);
-      }
+  for (k = 0; k < gimple_call_num_args (stmt); ++k)
+    {
+      tree arg = gimple_call_arg (stmt, k);
+
+      if (could_have_pointers (arg))
+       {
+         VEC(ce_s, heap) *argc = NULL;
+         struct constraint_expr *argp;
+         int i;
+
+         get_constraint_for (arg, &argc);
+         for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
+           process_constraint (new_constraint (tmpc, *argp));
+         VEC_free (ce_s, heap, argc);
+       }
+    }
 
   for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
     process_constraint (new_constraint (*lhsp, tmpc));
@@ -3675,28 +3635,30 @@ handle_const_call (tree stmt)
    pure function in statement STMT.  */
 
 static void
-handle_pure_call (tree stmt)
+handle_pure_call (gimple stmt)
 {
-  tree call = get_call_expr_in (stmt);
-  tree arg;
-  call_expr_arg_iterator iter;
+  unsigned i;
 
   /* Memory reached from pointer arguments is call-used.  */
-  FOR_EACH_CALL_EXPR_ARG (arg, iter, call)
-    if (could_have_pointers (arg))
-      make_constraint_to (callused_id, arg);
+  for (i = 0; i < gimple_call_num_args (stmt); ++i)
+    {
+      tree arg = gimple_call_arg (stmt, i);
+
+      if (could_have_pointers (arg))
+       make_constraint_to (callused_id, arg);
+    }
 
   /* The static chain is used as well.  */
-  if (CALL_EXPR_STATIC_CHAIN (call))
-    make_constraint_to (callused_id, CALL_EXPR_STATIC_CHAIN (call));
+  if (gimple_call_chain (stmt))
+    make_constraint_to (callused_id, gimple_call_chain (stmt));
 
   /* If the call returns a pointer it may point to reachable memory
      from the arguments.  Not so for malloc functions though.  */
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-      && could_have_pointers (GIMPLE_STMT_OPERAND (stmt, 0))
-      && !(call_expr_flags (call) & ECF_MALLOC))
+  if (gimple_call_lhs (stmt)
+      && could_have_pointers (gimple_call_lhs (stmt))
+      && !(gimple_call_flags (stmt) & ECF_MALLOC))
     {
-      tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+      tree lhs = gimple_call_lhs (stmt);
       VEC(ce_s, heap) *lhsc = NULL;
       struct constraint_expr rhsc;
       struct constraint_expr *lhsp;
@@ -3705,7 +3667,7 @@ handle_pure_call (tree stmt)
       get_constraint_for (lhs, &lhsc);
 
       /* If this is a nested function then it can return anything.  */
-      if (CALL_EXPR_STATIC_CHAIN (call))
+      if (gimple_call_chain (stmt))
        {
          rhsc.var = anything_id;
          rhsc.offset = 0;
@@ -3733,40 +3695,37 @@ handle_pure_call (tree stmt)
    when building alias sets and computing alias grouping heuristics.  */
 
 static void
-find_func_aliases (tree origt)
+find_func_aliases (gimple origt)
 {
-  tree call, t = origt;
+  gimple t = origt;
   VEC(ce_s, heap) *lhsc = NULL;
   VEC(ce_s, heap) *rhsc = NULL;
   struct constraint_expr *c;
   enum escape_type stmt_escape_type;
 
-  if (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0))
-    t = TREE_OPERAND (t, 0);
-
   /* Now build constraints expressions.  */
-  if (TREE_CODE (t) == PHI_NODE)
+  if (gimple_code (t) == GIMPLE_PHI)
     {
-      gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (PHI_RESULT (t))));
+      gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
 
       /* Only care about pointers and structures containing
         pointers.  */
-      if (could_have_pointers (PHI_RESULT (t)))
+      if (could_have_pointers (gimple_phi_result (t)))
        {
-         int i;
+         size_t i;
          unsigned int j;
 
          /* For a phi node, assign all the arguments to
             the result.  */
-         get_constraint_for (PHI_RESULT (t), &lhsc);
-         for (i = 0; i < PHI_NUM_ARGS (t); i++)
+         get_constraint_for (gimple_phi_result (t), &lhsc);
+         for (i = 0; i < gimple_phi_num_args (t); i++)
            {
              tree rhstype;
              tree strippedrhs = PHI_ARG_DEF (t, i);
 
              STRIP_NOPS (strippedrhs);
              rhstype = TREE_TYPE (strippedrhs);
-             get_constraint_for (PHI_ARG_DEF (t, i), &rhsc);
+             get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
 
              for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
                {
@@ -3782,87 +3741,73 @@ find_func_aliases (tree origt)
        }
     }
   /* In IPA mode, we need to generate constraints to pass call
-     arguments through their calls.   There are two cases, either a
-     GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
-     CALL_EXPR when we are not.
+     arguments through their calls.   There are two cases,
+     either a GIMPLE_CALL returning a value, or just a plain
+     GIMPLE_CALL when we are not.
 
      In non-ipa mode, we need to generate constraints for each
      pointer passed by address.  */
-  else if ((call = get_call_expr_in (t)) != NULL_TREE)
+  else if (is_gimple_call (t))
     {
-      int flags = call_expr_flags (call);
       if (!in_ipa_mode)
        {
+         int flags = gimple_call_flags (t);
+
          /* Const functions can return their arguments and addresses
             of global memory but not of escaped memory.  */
          if (flags & ECF_CONST)
            {
-             if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
-                 && could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
+             if (gimple_call_lhs (t)
+                 && could_have_pointers (gimple_call_lhs (t)))
                handle_const_call (t);
            }
+         /* Pure functions can return addresses in and of memory
+            reachable from their arguments, but they are not an escape
+            point for reachable memory of their arguments.  */
          else if (flags & ECF_PURE)
            {
              handle_pure_call (t);
-             if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
-                 && could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
-               handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0), flags);
+             if (gimple_call_lhs (t)
+                 && could_have_pointers (gimple_call_lhs (t)))
+               handle_lhs_call (gimple_call_lhs (t), flags);
            }
-         /* Pure functions can return addresses in and of memory
-            reachable from their arguments, but they are not an escape
-            point for reachable memory of their arguments.  But as we
-            do not compute call-used memory separately we cannot do
-            something special here.  */
-         else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
+         else
            {
-             handle_rhs_call (GIMPLE_STMT_OPERAND (t, 1));
-             if (could_have_pointers (GIMPLE_STMT_OPERAND (t, 1)))
-               handle_lhs_call (GIMPLE_STMT_OPERAND (t, 0), flags);
+             handle_rhs_call (t);
+             if (gimple_call_lhs (t)
+                 && could_have_pointers (gimple_call_lhs (t)))
+               handle_lhs_call (gimple_call_lhs (t), flags);
            }
-         else
-           handle_rhs_call (t);
        }
       else
        {
          tree lhsop;
-         tree rhsop;
-         tree arg;
-         call_expr_arg_iterator iter;
          varinfo_t fi;
          int i = 1;
+         size_t j;
          tree decl;
-         if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
-           {
-             lhsop = GIMPLE_STMT_OPERAND (t, 0);
-             rhsop = GIMPLE_STMT_OPERAND (t, 1);
-           }
-         else
-           {
-             lhsop = NULL;
-             rhsop = t;
-           }
-         decl = get_callee_fndecl (rhsop);
+
+         lhsop = gimple_call_lhs (t);
+         decl = gimple_call_fndecl (t);
 
          /* If we can directly resolve the function being called, do so.
             Otherwise, it must be some sort of indirect expression that
             we should still be able to handle.  */
          if (decl)
-           {
-             fi = get_vi_for_tree (decl);
-           }
+           fi = get_vi_for_tree (decl);
          else
            {
-             decl = CALL_EXPR_FN (rhsop);
+             decl = gimple_call_fn (t);
              fi = get_vi_for_tree (decl);
            }
 
          /* Assign all the passed arguments to the appropriate incoming
             parameters of the function.  */
-
-         FOR_EACH_CALL_EXPR_ARG (arg, iter, rhsop)
+         for (j = 0; j < gimple_call_num_args (t); j++)
            {
              struct constraint_expr lhs ;
              struct constraint_expr *rhsp;
+             tree arg = gimple_call_arg (t, j);
 
              get_constraint_for (arg, &rhsc);
              if (TREE_CODE (decl) != FUNCTION_DECL)
@@ -3914,19 +3859,33 @@ find_func_aliases (tree origt)
   /* Otherwise, just a regular assignment statement.  Only care about
      operations with pointer result, others are dealt with as escape
      points if they have pointer operands.  */
-  else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT
-          && could_have_pointers (GIMPLE_STMT_OPERAND (t, 0)))
+  else if (is_gimple_assign (t)
+          && could_have_pointers (gimple_assign_lhs (t)))
     {
-      tree lhsop = GIMPLE_STMT_OPERAND (t, 0);
-      tree rhsop = GIMPLE_STMT_OPERAND (t, 1);
+      /* Otherwise, just a regular assignment statement.  */
+      tree lhsop = gimple_assign_lhs (t);
+      tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
 
-      if (AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
+      if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
        do_structure_copy (lhsop, rhsop);
       else
        {
          unsigned int j;
+         struct constraint_expr temp;
          get_constraint_for (lhsop, &lhsc);
-         get_constraint_for (rhsop, &rhsc);
+
+         if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
+           get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
+                                          gimple_assign_rhs2 (t), &rhsc);
+         else if (rhsop)
+           get_constraint_for (rhsop, &rhsc);
+         else
+           {
+             temp.type = ADDRESSOF;
+             temp.var = anything_id;
+             temp.offset = 0;
+             VEC_safe_push (ce_s, heap, rhsc, &temp);
+           }
          for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
            {
              struct constraint_expr *c2;
@@ -3937,11 +3896,11 @@ find_func_aliases (tree origt)
            }
        }
     }
-  else if (TREE_CODE (t) == CHANGE_DYNAMIC_TYPE_EXPR)
+  else if (gimple_code (t) == GIMPLE_CHANGE_DYNAMIC_TYPE)
     {
       unsigned int j;
 
-      get_constraint_for (CHANGE_DYNAMIC_TYPE_LOCATION (t), &lhsc);
+      get_constraint_for (gimple_cdt_location (t), &lhsc);
       for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
        get_varinfo (c->var)->no_tbaa_pruning = true;
     }
@@ -3949,48 +3908,47 @@ find_func_aliases (tree origt)
   stmt_escape_type = is_escape_site (t);
   if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
     {
-      tree rhs;
-      gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
-      rhs = GIMPLE_STMT_OPERAND (t, 1);
-      if (TREE_CODE (rhs) == ADDR_EXPR)
+      gcc_assert (is_gimple_assign (t));
+      if (gimple_assign_rhs_code (t) == ADDR_EXPR)
        {
+         tree rhs = gimple_assign_rhs1 (t);
          tree base = get_base_address (TREE_OPERAND (rhs, 0));
          if (base
              && (!DECL_P (base)
                  || !is_global_var (base)))
            make_escape_constraint (rhs);
        }
-      else if (TREE_CODE (rhs) == SSA_NAME
-              && POINTER_TYPE_P (TREE_TYPE (rhs)))
-       make_escape_constraint (rhs);
-      else if (could_have_pointers (rhs))
-       make_escape_constraint (rhs);
+      else if (get_gimple_rhs_class (gimple_assign_rhs_code (t))
+              == GIMPLE_SINGLE_RHS)
+       {
+         if (could_have_pointers (gimple_assign_rhs1 (t)))
+           make_escape_constraint (gimple_assign_rhs1 (t));
+       }
+      /* FIXME tuples
+      else
+       gcc_unreachable ();  */
     }
   else if (stmt_escape_type == ESCAPE_BAD_CAST)
     {
-      tree rhs;
-      gcc_assert (TREE_CODE (t) == GIMPLE_MODIFY_STMT);
-      rhs = GIMPLE_STMT_OPERAND (t, 1);
-      gcc_assert (CONVERT_EXPR_P (rhs)
-                 || TREE_CODE (rhs) == VIEW_CONVERT_EXPR);
-      rhs = TREE_OPERAND (rhs, 0);
-      make_escape_constraint (rhs);
+      gcc_assert (is_gimple_assign (t));
+      gcc_assert (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
+                 || gimple_assign_rhs_code (t) == VIEW_CONVERT_EXPR);
+      make_escape_constraint (gimple_assign_rhs1 (t));
     }
   else if (stmt_escape_type == ESCAPE_TO_ASM)
     {
-      tree link;
-      int i;
-      for (i = 0, link = ASM_OUTPUTS (t); link; i++, link = TREE_CHAIN (link))
+      unsigned i;
+      for (i = 0; i < gimple_asm_noutputs (t); ++i)
        {
-         tree op = TREE_VALUE (link);
+         tree op = TREE_VALUE (gimple_asm_output_op (t, i));
          if (op && could_have_pointers (op))
            /* Strictly we'd only need the constraints from ESCAPED and
               NONLOCAL.  */
            make_escape_constraint (op);
        }
-      for (i = 0, link = ASM_INPUTS (t); link; i++, link = TREE_CHAIN (link))
+      for (i = 0; i < gimple_asm_ninputs (t); ++i)
        {
-         tree op = TREE_VALUE (link);
+         tree op = TREE_VALUE (gimple_asm_input_op (t, i));
          if (op && could_have_pointers (op))
            /* Strictly we'd only need the constraint to ESCAPED.  */
            make_escape_constraint (op);
@@ -4002,7 +3960,7 @@ find_func_aliases (tree origt)
      number of statements re-scanned.  It's not really necessary to
      re-scan *all* statements.  */
   if (!in_ipa_mode)
-    mark_stmt_modified (origt);
+    gimple_set_modified (origt, true);
   VEC_free (ce_s, heap, rhsc);
   VEC_free (ce_s, heap, lhsc);
 }
@@ -5435,25 +5393,28 @@ compute_points_to_sets (void)
   /* Now walk all statements and derive aliases.  */
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator bsi;
-      tree phi;
+      gimple_stmt_iterator gsi;
+
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+       {
+         gimple phi = gsi_stmt (gsi);
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-       if (is_gimple_reg (PHI_RESULT (phi)))
-         find_func_aliases (phi);
+         if (is_gimple_reg (gimple_phi_result (phi)))
+           find_func_aliases (phi);
+       }
 
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
        {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
 
          find_func_aliases (stmt);
 
-         /* The information in CHANGE_DYNAMIC_TYPE_EXPR nodes has now
-            been captured, and we can remove them.  */
-         if (TREE_CODE (stmt) == CHANGE_DYNAMIC_TYPE_EXPR)
-           bsi_remove (&bsi, true);
+         /* The information in GIMPLE_CHANGE_DYNAMIC_TYPE statements
+            has now been captured, and we can remove them.  */
+         if (gimple_code (stmt) == GIMPLE_CHANGE_DYNAMIC_TYPE)
+           gsi_remove (&gsi, true);
          else
-           bsi_next (&bsi);
+           gsi_next (&gsi);
        }
     }
 
@@ -5607,22 +5568,19 @@ ipa_pta_execute (void)
 
          FOR_EACH_BB_FN (bb, func)
            {
-             block_stmt_iterator bsi;
-             tree phi;
+             gimple_stmt_iterator gsi;
 
-             for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+             for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+                  gsi_next (&gsi))
                {
-                 if (is_gimple_reg (PHI_RESULT (phi)))
-                   {
-                     find_func_aliases (phi);
-                   }
-               }
+                 gimple phi = gsi_stmt (gsi);
 
-             for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-               {
-                 tree stmt = bsi_stmt (bsi);
-                 find_func_aliases (stmt);
+                 if (is_gimple_reg (gimple_phi_result (phi)))
+                   find_func_aliases (phi);
                }
+
+             for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+               find_func_aliases (gsi_stmt (gsi));
            }
          current_function_decl = old_func_decl;
          pop_cfun ();
@@ -5708,5 +5666,4 @@ delete_alias_heapvars (void)
   heapvar_for_stmt = NULL;
 }
 
-
 #include "gt-tree-ssa-structalias.h"
index 0d0d6bd..e684f5d 100644 (file)
@@ -25,8 +25,8 @@ struct constraint;
 typedef struct constraint *constraint_t;
 
 /* In tree-ssa-alias.c.  */
-enum escape_type is_escape_site (tree);
-void update_mem_sym_stats_from_stmt (tree, tree, long, long);
+enum escape_type is_escape_site (gimple);
+void update_mem_sym_stats_from_stmt (tree, gimple, long, long);
 
 /* In tree-ssa-structalias.c.  */
 extern void compute_points_to_sets (void);
index e028675..099c197 100644 (file)
@@ -89,7 +89,7 @@ along with GCC; see the file COPYING3.  If not see
    TER implements this but stepping through the instructions in a block and
    tracking potential expressions for replacement, and the partitions they are
    dependent on.  Expressions are represented by the SSA_NAME_VERSION of the
-   DEF on the LHS of a GIMPLE_MODIFY_STMT and the expression is the RHS.
+   DEF on the LHS of a GIMPLE_ASSIGN and the expression is the RHS.
 
    When a stmt is determined to be a possible replacement expression, the
    following steps are taken:
@@ -159,7 +159,7 @@ typedef struct temp_expr_table_d
 {
   var_map map;
   bitmap *partition_dependencies;      /* Partitions expr is dependent on.  */
-  tree *replaceable_expressions;       /* Replacement expression table.  */
+  gimple *replaceable_expressions;     /* Replacement expression table.  */
   bitmap *expr_decl_uids;              /* Base uids of exprs.  */
   bitmap *kill_list;                   /* Expr's killed by a partition.  */
   int virtual_partition;               /* Pseudo partition for virtual ops.  */
@@ -216,10 +216,10 @@ new_temp_expr_table (var_map map)
 /* Free TER table T.  If there are valid replacements, return the expression 
    vector.  */
 
-static tree *
+static gimple *
 free_temp_expr_table (temp_expr_table_p t)
 {
-  tree *ret = NULL;
+  gimple *ret = NULL;
   unsigned i;
 
 #ifdef ENABLE_CHECKING
@@ -255,7 +255,7 @@ version_to_be_replaced_p (temp_expr_table_p tab, int version)
 {
   if (!tab->replaceable_expressions)
     return false;
-  return tab->replaceable_expressions[version] != NULL_TREE;
+  return tab->replaceable_expressions[version] != NULL;
 }
 
 
@@ -360,20 +360,20 @@ add_dependence (temp_expr_table_p tab, int version, tree var)
 /* Return TRUE if expression STMT is suitable for replacement.  */
 
 static inline bool
-is_replaceable_p (tree stmt)
+is_replaceable_p (gimple stmt)
 {
-  tree call_expr;
   use_operand_p use_p;
-  tree def, use_stmt;
+  tree def;
+  gimple use_stmt;
   location_t locus1, locus2;
   tree block1, block2;
 
   /* Only consider modify stmts.  */
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt))
     return false;
 
   /* If the statement may throw an exception, it cannot be replaced.  */
-  if (tree_could_throw_p (stmt))
+  if (stmt_could_throw_p (stmt))
     return false;
 
   /* Punt if there is more than 1 def.  */
@@ -386,33 +386,21 @@ is_replaceable_p (tree stmt)
     return false;
 
   /* If the use isn't in this block, it wont be replaced either.  */
-  if (bb_for_stmt (use_stmt) != bb_for_stmt (stmt))
+  if (gimple_bb (use_stmt) != gimple_bb (stmt))
     return false;
 
-  if (GIMPLE_STMT_P (stmt))
-    {
-      locus1 = GIMPLE_STMT_LOCUS (stmt);
-      block1 = GIMPLE_STMT_BLOCK (stmt);
-    }
-  else
-    {
-      locus1 = *EXPR_LOCUS (stmt);
-      block1 = TREE_BLOCK (stmt);
-    }
-  if (GIMPLE_STMT_P (use_stmt))
-    {
-      locus2 = GIMPLE_STMT_LOCUS (use_stmt);
-      block2 = GIMPLE_STMT_BLOCK (use_stmt);
-    }
-  if (TREE_CODE (use_stmt) == PHI_NODE)
+  locus1 = gimple_location (stmt);
+  block1 = gimple_block (stmt);
+
+  if (gimple_code (use_stmt) == GIMPLE_PHI)
     {
       locus2 = 0;
       block2 = NULL_TREE;
     }
   else
     {
-      locus2 = *EXPR_LOCUS (use_stmt);
-      block2 = TREE_BLOCK (use_stmt);
+      locus2 = gimple_location (use_stmt);
+      block2 = gimple_block (use_stmt);
     }
 
   if (!optimize
@@ -420,7 +408,7 @@ is_replaceable_p (tree stmt)
     return false;
 
   /* Used in this block, but at the TOP of the block, not the end.  */
-  if (TREE_CODE (use_stmt) == PHI_NODE)
+  if (gimple_code (use_stmt) == GIMPLE_PHI)
     return false;
 
   /* There must be no VDEFs.  */
@@ -428,26 +416,26 @@ is_replaceable_p (tree stmt)
     return false;
 
   /* Without alias info we can't move around loads.  */
-  if (stmt_ann (stmt)->references_memory && !optimize)
+  if (gimple_references_memory_p (stmt) && !optimize)
     return false;
 
   /* Float expressions must go through memory if float-store is on.  */
   if (flag_float_store 
-      && FLOAT_TYPE_P (TREE_TYPE (GENERIC_TREE_OPERAND (stmt, 1))))
+      && FLOAT_TYPE_P (gimple_expr_type (stmt)))
     return false;
 
   /* An assignment with a register variable on the RHS is not
      replaceable.  */
-  if (TREE_CODE (GENERIC_TREE_OPERAND (stmt, 1)) == VAR_DECL
-      && DECL_HARD_REGISTER (GENERIC_TREE_OPERAND (stmt, 1)))
+  if (gimple_assign_rhs_code (stmt) == VAR_DECL
+      && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt)))
     return false;
 
   /* No function calls can be replaced.  */
-  if ((call_expr = get_call_expr_in (stmt)) != NULL_TREE)
+  if (is_gimple_call (stmt))
     return false;
 
   /* Leave any stmt with volatile operands alone as well.  */
-  if (stmt_ann (stmt)->has_volatile_ops)
+  if (gimple_has_volatile_ops (stmt))
     return false;
 
   return true;
@@ -480,7 +468,7 @@ finished_with_expr (temp_expr_table_p tab, int version, bool free_expr)
 /* Create an expression entry for a replaceable expression.  */
 
 static void 
-process_replaceable (temp_expr_table_p tab, tree stmt)
+process_replaceable (temp_expr_table_p tab, gimple stmt)
 {
   tree var, def, basevar;
   int version;
@@ -574,7 +562,7 @@ mark_replaceable (temp_expr_table_p tab, tree var, bool more_replacing)
 
   /* Set the replaceable expression.  */
   if (!tab->replaceable_expressions)
-    tab->replaceable_expressions = XCNEWVEC (tree, num_ssa_names + 1);
+    tab->replaceable_expressions = XCNEWVEC (gimple, num_ssa_names + 1);
   tab->replaceable_expressions[version] = SSA_NAME_DEF_STMT (var);
 }
 
@@ -585,20 +573,20 @@ mark_replaceable (temp_expr_table_p tab, tree var, bool more_replacing)
 static void
 find_replaceable_in_bb (temp_expr_table_p tab, basic_block bb)
 {
-  block_stmt_iterator bsi;
-  tree stmt, def, use;
-  stmt_ann_t ann;
+  gimple_stmt_iterator bsi;
+  gimple stmt;
+  tree def, use;
   int partition;
   var_map map = tab->map;
   ssa_op_iter iter;
   bool stmt_replaceable;
 
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+  for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
     {
-      stmt = bsi_stmt (bsi);
-      ann = stmt_ann (stmt);
+      stmt = gsi_stmt (bsi);
 
       stmt_replaceable = is_replaceable_p (stmt);
+
       /* Determine if this stmt finishes an existing expression.  */
       FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
        {
@@ -627,7 +615,7 @@ find_replaceable_in_bb (temp_expr_table_p tab, basic_block bb)
              /* Mark expression as replaceable unless stmt is volatile or the 
                 def variable has the same root variable as something in the 
                 substitution list.  */
-             if (ann->has_volatile_ops || same_root_var)
+             if (gimple_has_volatile_ops (stmt) || same_root_var)
                finished_with_expr (tab, ver, true);
              else
                mark_replaceable (tab, use, stmt_replaceable);
@@ -665,12 +653,12 @@ find_replaceable_in_bb (temp_expr_table_p tab, basic_block bb)
    NULL is returned by the function, otherwise an expression vector indexed
    by SSA_NAME version numbers.  */
 
-extern tree *
+extern gimple *
 find_replaceable_exprs (var_map map)
 {
   basic_block bb;
   temp_expr_table_p table;
-  tree *ret;
+  gimple *ret;
 
   table = new_temp_expr_table (map);
   FOR_EACH_BB (bb)
@@ -700,23 +688,20 @@ find_replaceable_exprs (var_map map)
 
 /* Dump TER expression table EXPR to file F.  */
 
-extern void
-dump_replaceable_exprs (FILE *f, tree *expr)
+void
+dump_replaceable_exprs (FILE *f, gimple *expr)
 {
-  tree stmt, var;
-  int x;
+  tree var;
+  unsigned x;
 
   fprintf (f, "\nReplacing Expressions\n");
-  for (x = 0; x < (int)num_ssa_names; x++)
+  for (x = 0; x < num_ssa_names; x++)
     if (expr[x])
       {
-        stmt = expr[x];
-       var = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_DEF);
-       gcc_assert (var != NULL_TREE);
+       var = ssa_name (x);
        print_generic_expr (f, var, TDF_SLIM);
        fprintf (f, " replace with --> ");
-       print_generic_expr (f, GENERIC_TREE_OPERAND (stmt, 1),
-                           TDF_SLIM);
+       print_gimple_stmt (f, expr[x], 0, TDF_SLIM);
        fprintf (f, "\n");
       }
   fprintf (f, "\n");
@@ -728,7 +713,7 @@ dump_replaceable_exprs (FILE *f, tree *expr)
    exclusively to debug TER.  F is the place to send debug info and T is the
    table being debugged.  */
 
-extern void
+void
 debug_ter (FILE *f, temp_expr_table_p t)
 {
   unsigned x, y;
index 462da5d..6a82161 100644 (file)
@@ -55,7 +55,7 @@ static int stmt_count;
 bool
 potentially_threadable_block (basic_block bb)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
 
   /* If BB has a single successor or a single predecessor, then
      there is no threading opportunity.  */
@@ -64,12 +64,12 @@ potentially_threadable_block (basic_block bb)
 
   /* If BB does not end with a conditional, switch or computed goto,
      then there is no threading opportunity.  */
-  bsi = bsi_last (bb);
-  if (bsi_end_p (bsi)
-      || ! bsi_stmt (bsi)
-      || (TREE_CODE (bsi_stmt (bsi)) != COND_EXPR
-         && TREE_CODE (bsi_stmt (bsi)) != GOTO_EXPR
-         && TREE_CODE (bsi_stmt (bsi)) != SWITCH_EXPR))
+  gsi = gsi_last_bb (bb);
+  if (gsi_end_p (gsi)
+      || ! gsi_stmt (gsi)
+      || (gimple_code (gsi_stmt (gsi)) != GIMPLE_COND
+         && gimple_code (gsi_stmt (gsi)) != GIMPLE_GOTO
+         && gimple_code (gsi_stmt (gsi)) != GIMPLE_SWITCH))
     return false;
 
   return true;
@@ -80,28 +80,27 @@ potentially_threadable_block (basic_block bb)
    BB.  If no such ASSERT_EXPR is found, return OP.  */
 
 static tree
-lhs_of_dominating_assert (tree op, basic_block bb, tree stmt)
+lhs_of_dominating_assert (tree op, basic_block bb, gimple stmt)
 {
   imm_use_iterator imm_iter;
-  tree use_stmt;
+  gimple use_stmt;
   use_operand_p use_p;
 
   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
     {
       use_stmt = USE_STMT (use_p);
       if (use_stmt != stmt
-          && TREE_CODE (use_stmt) == GIMPLE_MODIFY_STMT
-          && TREE_CODE (GIMPLE_STMT_OPERAND (use_stmt, 1)) == ASSERT_EXPR
-          && TREE_OPERAND (GIMPLE_STMT_OPERAND (use_stmt, 1), 0) == op
-         && dominated_by_p (CDI_DOMINATORS, bb, bb_for_stmt (use_stmt)))
+          && gimple_assign_single_p (use_stmt)
+          && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ASSERT_EXPR
+          && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == op
+         && dominated_by_p (CDI_DOMINATORS, bb, gimple_bb (use_stmt)))
        {
-         return GIMPLE_STMT_OPERAND (use_stmt, 0);
+         return gimple_assign_lhs (use_stmt);
        }
     }
   return op;
 }
 
-
 /* We record temporary equivalences created by PHI nodes or
    statements within the target block.  Doing so allows us to
    identify more jump threading opportunities, even in blocks
@@ -161,23 +160,24 @@ record_temporary_equivalence (tree x, tree y, VEC(tree, heap) **stack)
 static bool
 record_temporary_equivalences_from_phis (edge e, VEC(tree, heap) **stack)
 {
-  tree phi;
+  gimple_stmt_iterator gsi;
 
   /* Each PHI creates a temporary equivalence, record them.
      These are context sensitive equivalences and will be removed
      later.  */
-  for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
     {
+      gimple phi = gsi_stmt (gsi);
       tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
-      tree dst = PHI_RESULT (phi);
+      tree dst = gimple_phi_result (phi);
 
       /* If the desired argument is not the same as this PHI's result 
         and it is set by a PHI in E->dest, then we can not thread
         through E->dest.  */
       if (src != dst
          && TREE_CODE (src) == SSA_NAME
-         && TREE_CODE (SSA_NAME_DEF_STMT (src)) == PHI_NODE
-         && bb_for_stmt (SSA_NAME_DEF_STMT (src)) == e->dest)
+         && gimple_code (SSA_NAME_DEF_STMT (src)) == GIMPLE_PHI
+         && gimple_bb (SSA_NAME_DEF_STMT (src)) == e->dest)
        return false;
 
       /* We consider any non-virtual PHI as a statement since it
@@ -190,6 +190,56 @@ record_temporary_equivalences_from_phis (edge e, VEC(tree, heap) **stack)
   return true;
 }
 
+/* Fold the RHS of an assignment statement and return it as a tree.
+   May return NULL_TREE if no simplification is possible.  */
+
+static tree
+fold_assignment_stmt (gimple stmt)
+{
+  enum tree_code subcode = gimple_assign_rhs_code (stmt);
+
+  switch (get_gimple_rhs_class (subcode))
+    {
+    case GIMPLE_SINGLE_RHS:
+      {
+        tree rhs = gimple_assign_rhs1 (stmt);
+
+        if (TREE_CODE (rhs) == COND_EXPR)
+          {
+            /* Sadly, we have to handle conditional assignments specially
+               here, because fold expects all the operands of an expression
+               to be folded before the expression itself is folded, but we
+               can't just substitute the folded condition here.  */
+            tree cond = fold (COND_EXPR_COND (rhs));
+            if (cond == boolean_true_node)
+              rhs = COND_EXPR_THEN (rhs);
+            else if (cond == boolean_false_node)
+              rhs = COND_EXPR_ELSE (rhs);
+          }
+
+        return fold (rhs);
+      }
+      break;
+    case GIMPLE_UNARY_RHS:
+      {
+        tree lhs = gimple_assign_lhs (stmt);
+        tree op0 = gimple_assign_rhs1 (stmt);
+        return fold_unary (subcode, TREE_TYPE (lhs), op0);
+      }
+      break;
+    case GIMPLE_BINARY_RHS:
+      {
+        tree lhs = gimple_assign_lhs (stmt);
+        tree op0 = gimple_assign_rhs1 (stmt);
+        tree op1 = gimple_assign_rhs2 (stmt);
+        return fold_binary (subcode, TREE_TYPE (lhs), op0, op1);
+      }
+      break;
+    default:
+      gcc_unreachable ();
+    }
+}
+
 /* Try to simplify each statement in E->dest, ultimately leading to
    a simplification of the COND_EXPR at the end of E->dest.
 
@@ -204,17 +254,17 @@ record_temporary_equivalences_from_phis (edge e, VEC(tree, heap) **stack)
 
    If we are able to simplify a statement into the form
    SSA_NAME = (SSA_NAME | gimple invariant), then we can record
-   a context sensitive equivalency which may help us simplify
+   a context sensitive equivalence which may help us simplify
    later statements in E->dest.  */
 
-static tree
+static gimple
 record_temporary_equivalences_from_stmts_at_dest (edge e,
                                                  VEC(tree, heap) **stack,
-                                                 tree (*simplify) (tree,
-                                                                   tree))
+                                                 tree (*simplify) (gimple,
+                                                                   gimple))
 {
-  block_stmt_iterator bsi;
-  tree stmt = NULL;
+  gimple stmt = NULL;
+  gimple_stmt_iterator gsi;
   int max_stmt_count;
 
   max_stmt_count = PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS);
@@ -223,21 +273,20 @@ record_temporary_equivalences_from_stmts_at_dest (edge e,
      we discover.  Note any equivalences we discover are context
      sensitive (ie, are dependent on traversing E) and must be unwound
      when we're finished processing E.  */
-  for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi))
+  for (gsi = gsi_start_bb (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
     {
       tree cached_lhs = NULL;
-      tree rhs;
 
-      stmt = bsi_stmt (bsi);
+      stmt = gsi_stmt (gsi);
 
       /* Ignore empty statements and labels.  */
-      if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR)
+      if (gimple_code (stmt) == GIMPLE_NOP || gimple_code (stmt) == GIMPLE_LABEL)
        continue;
 
       /* If the statement has volatile operands, then we assume we
         can not thread through this block.  This is overly
         conservative in some ways.  */
-      if (TREE_CODE (stmt) == ASM_EXPR && ASM_VOLATILE_P (stmt))
+      if (gimple_code (stmt) == GIMPLE_ASM && gimple_asm_volatile_p (stmt))
        return NULL;
 
       /* If duplicating this block is going to cause too much code
@@ -246,15 +295,16 @@ record_temporary_equivalences_from_stmts_at_dest (edge e,
       if (stmt_count > max_stmt_count)
        return NULL;
 
-      /* If this is not a GIMPLE_MODIFY_STMT which sets an SSA_NAME to a new
+      /* If this is not a statement that sets an SSA_NAME to a new
         value, then do not try to simplify this statement as it will
         not simplify in any way that is helpful for jump threading.  */
-      if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
-         || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
+      if ((gimple_code (stmt) != GIMPLE_ASSIGN
+           || TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
+          && (gimple_code (stmt) != GIMPLE_CALL
+              || gimple_call_lhs (stmt) == NULL_TREE
+              || TREE_CODE (gimple_call_lhs (stmt)) != SSA_NAME))
        continue;
 
-      rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-
       /* The result of __builtin_object_size depends on all the arguments
         of a phi node. Temporarily using only one edge produces invalid
         results. For example
@@ -272,9 +322,9 @@ record_temporary_equivalences_from_stmts_at_dest (edge e,
         remaining bytes. If we use only one edge on the phi, the result will
         change to be the remaining bytes for the corresponding phi argument. */
 
-      if (TREE_CODE (rhs) == CALL_EXPR)
+      if (is_gimple_call (stmt))
        {
-         tree fndecl = get_callee_fndecl (rhs);
+         tree fndecl = gimple_call_fndecl (stmt);
          if (fndecl && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
            continue;
        }
@@ -286,16 +336,18 @@ record_temporary_equivalences_from_stmts_at_dest (edge e,
 
         Handle simple copy operations as well as implied copies from
         ASSERT_EXPRs.  */
-      if (TREE_CODE (rhs) == SSA_NAME)
-       cached_lhs = rhs;
-      else if (TREE_CODE (rhs) == ASSERT_EXPR)
-       cached_lhs = TREE_OPERAND (rhs, 0);
+      if (gimple_assign_single_p (stmt)
+          && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
+       cached_lhs = gimple_assign_rhs1 (stmt);
+      else if (gimple_assign_single_p (stmt)
+               && TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR)
+       cached_lhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
       else
        {
          /* A statement that is not a trivial copy or ASSERT_EXPR.
             We're going to temporarily copy propagate the operands
             and see if that allows us to simplify this statement.  */
-         tree *copy, pre_fold_expr;
+         tree *copy;
          ssa_op_iter iter;
          use_operand_p use_p;
          unsigned int num, i = 0;
@@ -318,33 +370,17 @@ record_temporary_equivalences_from_stmts_at_dest (edge e,
            }
 
          /* Try to fold/lookup the new expression.  Inserting the
-            expression into the hash table is unlikely to help
-            Sadly, we have to handle conditional assignments specially
-            here, because fold expects all the operands of an expression
-            to be folded before the expression itself is folded, but we
-            can't just substitute the folded condition here.  */
-         if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == COND_EXPR)
-           {
-             tree cond = COND_EXPR_COND (GIMPLE_STMT_OPERAND (stmt, 1));
-             cond = fold (cond);
-             if (cond == boolean_true_node)
-               pre_fold_expr = COND_EXPR_THEN (GIMPLE_STMT_OPERAND (stmt, 1));
-             else if (cond == boolean_false_node)
-               pre_fold_expr = COND_EXPR_ELSE (GIMPLE_STMT_OPERAND (stmt, 1));
-             else
-               pre_fold_expr = GIMPLE_STMT_OPERAND (stmt, 1);
-           }
+            expression into the hash table is unlikely to help.  */
+          if (is_gimple_call (stmt))
+            cached_lhs = fold_call_stmt (stmt, false);
          else
-           pre_fold_expr = GIMPLE_STMT_OPERAND (stmt, 1);
-
-         if (pre_fold_expr)
-           {
-             cached_lhs = fold (pre_fold_expr);
-             if (TREE_CODE (cached_lhs) != SSA_NAME
-                 && !is_gimple_min_invariant (cached_lhs))
-               cached_lhs = (*simplify) (stmt, stmt);
-           }
+            cached_lhs = fold_assignment_stmt (stmt);
 
+          if (!cached_lhs
+              || (TREE_CODE (cached_lhs) != SSA_NAME
+                  && !is_gimple_min_invariant (cached_lhs)))
+            cached_lhs = (*simplify) (stmt, stmt);
+          
          /* Restore the statement's original uses/defs.  */
          i = 0;
          FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE)
@@ -358,16 +394,14 @@ record_temporary_equivalences_from_stmts_at_dest (edge e,
       if (cached_lhs
          && (TREE_CODE (cached_lhs) == SSA_NAME
              || is_gimple_min_invariant (cached_lhs)))
-       record_temporary_equivalence (GIMPLE_STMT_OPERAND (stmt, 0),
-                                     cached_lhs,
-                                     stack);
+       record_temporary_equivalence (gimple_get_lhs (stmt), cached_lhs, stack);
     }
   return stmt;
 }
 
 /* Simplify the control statement at the end of the block E->dest.
 
-   To avoid allocating memory unnecessarily, a scratch COND_EXPR
+   To avoid allocating memory unnecessarily, a scratch GIMPLE_COND
    is available to use/clobber in DUMMY_COND.
 
    Use SIMPLIFY (a pointer to a callback function) to further simplify
@@ -378,30 +412,24 @@ record_temporary_equivalences_from_stmts_at_dest (edge e,
 
 static tree
 simplify_control_stmt_condition (edge e,
-                                tree stmt,
-                                tree dummy_cond,
-                                tree (*simplify) (tree, tree),
+                                gimple stmt,
+                                gimple dummy_cond,
+                                tree (*simplify) (gimple, gimple),
                                 bool handle_dominating_asserts)
 {
   tree cond, cached_lhs;
-
-  if (TREE_CODE (stmt) == COND_EXPR)
-    cond = COND_EXPR_COND (stmt);
-  else if (TREE_CODE (stmt) == GOTO_EXPR)
-    cond = GOTO_DESTINATION (stmt);
-  else
-    cond = SWITCH_COND (stmt);
+  enum gimple_code code = gimple_code (stmt);
 
   /* For comparisons, we have to update both operands, then try
      to simplify the comparison.  */
-  if (COMPARISON_CLASS_P (cond))
+  if (code == GIMPLE_COND)
     {
       tree op0, op1;
       enum tree_code cond_code;
 
-      op0 = TREE_OPERAND (cond, 0);
-      op1 = TREE_OPERAND (cond, 1);
-      cond_code = TREE_CODE (cond);
+      op0 = gimple_cond_lhs (stmt);
+      op1 = gimple_cond_rhs (stmt);
+      cond_code = gimple_cond_code (stmt);
 
       /* Get the current value of both operands.  */
       if (TREE_CODE (op0) == SSA_NAME)
@@ -434,11 +462,10 @@ simplify_control_stmt_condition (edge e,
         example, op0 might be a constant while op1 is an
         SSA_NAME.  Failure to canonicalize will cause us to
         miss threading opportunities.  */
-      if (cond_code != SSA_NAME
-         && tree_swap_operands_p (op0, op1, false))
+      if (tree_swap_operands_p (op0, op1, false))
        {
          tree tmp;
-         cond_code = swap_tree_comparison (TREE_CODE (cond));
+         cond_code = swap_tree_comparison (cond_code);
          tmp = op0;
          op0 = op1;
          op1 = tmp;
@@ -446,34 +473,47 @@ simplify_control_stmt_condition (edge e,
 
       /* Stuff the operator and operands into our dummy conditional
         expression.  */
-      TREE_SET_CODE (COND_EXPR_COND (dummy_cond), cond_code);
-      TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op0;
-      TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1) = op1;
+      gimple_cond_set_code (dummy_cond, cond_code);
+      gimple_cond_set_lhs (dummy_cond, op0);
+      gimple_cond_set_rhs (dummy_cond, op1);
 
       /* We absolutely do not care about any type conversions
          we only care about a zero/nonzero value.  */
       fold_defer_overflow_warnings ();
 
-      cached_lhs = fold (COND_EXPR_COND (dummy_cond));
-      while (CONVERT_EXPR_P (cached_lhs))
-       cached_lhs = TREE_OPERAND (cached_lhs, 0);
+      cached_lhs = fold_binary (cond_code, boolean_type_node, op0, op1);
+      if (cached_lhs)
+        while (TREE_CODE (cached_lhs) == NOP_EXPR
+               || TREE_CODE (cached_lhs) == CONVERT_EXPR)
+          cached_lhs = TREE_OPERAND (cached_lhs, 0);
 
-      fold_undefer_overflow_warnings (is_gimple_min_invariant (cached_lhs),
+      fold_undefer_overflow_warnings ((cached_lhs
+                                       && is_gimple_min_invariant (cached_lhs)),
                                      stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
 
       /* If we have not simplified the condition down to an invariant,
         then use the pass specific callback to simplify the condition.  */
-      if (! is_gimple_min_invariant (cached_lhs))
-       cached_lhs = (*simplify) (dummy_cond, stmt);
+      if (!cached_lhs
+          || !is_gimple_min_invariant (cached_lhs))
+        cached_lhs = (*simplify) (dummy_cond, stmt);
+
+      return cached_lhs;
     }
 
+  if (code == GIMPLE_SWITCH)
+    cond = gimple_switch_index (stmt);
+  else if (code == GIMPLE_GOTO)
+    cond = gimple_goto_dest (stmt);
+  else
+    gcc_unreachable ();
+
   /* We can have conditionals which just test the state of a variable
      rather than use a relational operator.  These are simpler to handle.  */
-  else if (TREE_CODE (cond) == SSA_NAME)
+  if (TREE_CODE (cond) == SSA_NAME)
     {
       cached_lhs = cond;
 
-      /* Get the variable's current value from the equivalency chains.
+      /* Get the variable's current value from the equivalence chains.
 
         It is possible to get loops in the SSA_NAME_VALUE chains
         (consider threading the backedge of a loop where we have
@@ -527,13 +567,13 @@ simplify_control_stmt_condition (edge e,
    SIMPLIFY is a pass-specific function used to simplify statements.  */
 
 void
-thread_across_edge (tree dummy_cond,
+thread_across_edge (gimple dummy_cond,
                    edge e,
                    bool handle_dominating_asserts,
                    VEC(tree, heap) **stack,
-                   tree (*simplify) (tree, tree))
+                   tree (*simplify) (gimple, gimple))
 {
-  tree stmt;
+  gimple stmt;
 
   /* If E is a backedge, then we want to verify that the COND_EXPR,
      SWITCH_EXPR or GOTO_EXPR at the end of e->dest is not affected
@@ -543,15 +583,15 @@ thread_across_edge (tree dummy_cond,
     {
       ssa_op_iter iter;
       use_operand_p use_p;
-      tree last = bsi_stmt (bsi_last (e->dest));
+      gimple last = gsi_stmt (gsi_last_bb (e->dest));
 
       FOR_EACH_SSA_USE_OPERAND (use_p, last, iter, SSA_OP_USE | SSA_OP_VUSE)
        {
          tree use = USE_FROM_PTR (use_p);
 
           if (TREE_CODE (use) == SSA_NAME
-             && TREE_CODE (SSA_NAME_DEF_STMT (use)) != PHI_NODE
-             && bb_for_stmt (SSA_NAME_DEF_STMT (use)) == e->dest)
+             && gimple_code (SSA_NAME_DEF_STMT (use)) != GIMPLE_PHI
+             && gimple_bb (SSA_NAME_DEF_STMT (use)) == e->dest)
            goto fail;
        }
     }
@@ -570,9 +610,9 @@ thread_across_edge (tree dummy_cond,
 
   /* If we stopped at a COND_EXPR or SWITCH_EXPR, see if we know which arm
      will be taken.  */
-  if (TREE_CODE (stmt) == COND_EXPR
-      || TREE_CODE (stmt) == GOTO_EXPR
-      || TREE_CODE (stmt) == SWITCH_EXPR)
+  if (gimple_code (stmt) == GIMPLE_COND
+      || gimple_code (stmt) == GIMPLE_GOTO
+      || gimple_code (stmt) == GIMPLE_SWITCH)
     {
       tree cond;
 
index 54f87af..dedd00e 100644 (file)
@@ -169,23 +169,23 @@ struct thread_stats_d thread_stats;
 static void
 remove_ctrl_stmt_and_useless_edges (basic_block bb, basic_block dest_bb)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   edge e;
   edge_iterator ei;
 
-  bsi = bsi_last (bb);
+  gsi = gsi_last_bb (bb);
 
   /* If the duplicate ends with a control statement, then remove it.
 
      Note that if we are duplicating the template block rather than the
      original basic block, then the duplicate might not have any real
      statements in it.  */
-  if (!bsi_end_p (bsi)
-      && bsi_stmt (bsi)
-      && (TREE_CODE (bsi_stmt (bsi)) == COND_EXPR
-         || TREE_CODE (bsi_stmt (bsi)) == GOTO_EXPR
-         || TREE_CODE (bsi_stmt (bsi)) == SWITCH_EXPR))
-    bsi_remove (&bsi, true);
+  if (!gsi_end_p (gsi)
+      && gsi_stmt (gsi)
+      && (gimple_code (gsi_stmt (gsi)) == GIMPLE_COND
+         || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
+         || gimple_code (gsi_stmt (gsi)) == GIMPLE_SWITCH))
+    gsi_remove (&gsi, true);
 
   for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
     {
@@ -311,7 +311,7 @@ static void
 create_edge_and_update_destination_phis (struct redirection_data *rd)
 {
   edge e = make_edge (rd->dup_block, rd->outgoing_edge->dest, EDGE_FALLTHRU);
-  tree phi;
+  gimple_stmt_iterator gsi;
 
   rescan_loop_exit (e, true, false);
   e->probability = REG_BR_PROB_BASE;
@@ -322,10 +322,12 @@ create_edge_and_update_destination_phis (struct redirection_data *rd)
      from the duplicate block, then we will need to add a new argument
      to them.  The argument should have the same value as the argument
      associated with the outgoing edge stored in RD.  */
-  for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
     {
+      gimple phi = gsi_stmt (gsi);
+
       int indx = rd->outgoing_edge->dest_idx;
-      add_phi_arg (phi, PHI_ARG_DEF (phi, indx), e);
+      add_phi_arg (phi, gimple_phi_arg_def (phi, indx), e);
     }
 }
 
@@ -468,24 +470,24 @@ redirect_edges (void **slot, void *data)
 static bool
 redirection_block_p (basic_block bb)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
 
   /* Advance to the first executable statement.  */
-  bsi = bsi_start (bb);
-  while (!bsi_end_p (bsi)
-          && (TREE_CODE (bsi_stmt (bsi)) == LABEL_EXPR
-              || IS_EMPTY_STMT (bsi_stmt (bsi))))
-    bsi_next (&bsi);
-
+  gsi = gsi_start_bb (bb);
+  while (!gsi_end_p (gsi)
+         && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
+             || gimple_nop_p (gsi_stmt (gsi))))
+    gsi_next (&gsi);
+  
   /* Check if this is an empty block.  */
-  if (bsi_end_p (bsi))
+  if (gsi_end_p (gsi))
     return true;
 
   /* Test that we've reached the terminating control statement.  */
-  return bsi_stmt (bsi)
-        && (TREE_CODE (bsi_stmt (bsi)) == COND_EXPR
-            || TREE_CODE (bsi_stmt (bsi)) == GOTO_EXPR
-            || TREE_CODE (bsi_stmt (bsi)) == SWITCH_EXPR);
+  return gsi_stmt (gsi)
+         && (gimple_code (gsi_stmt (gsi)) == GIMPLE_COND
+             || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
+             || gimple_code (gsi_stmt (gsi)) == GIMPLE_SWITCH);
 }
 
 /* BB is a block which ends with a COND_EXPR or SWITCH_EXPR and when BB
index 0d19c2d..335d7ae 100644 (file)
@@ -1,5 +1,5 @@
 /* Routines for discovering and unpropagating edge equivalences.
-   Copyright (C) 2005, 2007 Free Software Foundation, Inc.
+   Copyright (C) 2005, 2007, 2008 Free Software Foundation, Inc.
 
 This file is part of GCC.
 
@@ -65,50 +65,35 @@ associate_equivalences_with_edges (void)
      then it might create a useful equivalence.  */
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator bsi = bsi_last (bb);
-      tree stmt;
+      gimple_stmt_iterator gsi = gsi_last_bb (bb);
+      gimple stmt;
 
       /* If the block does not end with a COND_EXPR or SWITCH_EXPR
         then there is nothing to do.  */
-      if (bsi_end_p (bsi))
+      if (gsi_end_p (gsi))
        continue;
 
-      stmt = bsi_stmt (bsi);
+      stmt = gsi_stmt (gsi);
 
       if (!stmt)
        continue;
 
       /* A COND_EXPR may create an equivalency in a variety of different
         ways.  */
-      if (TREE_CODE (stmt) == COND_EXPR)
+      if (gimple_code (stmt) == GIMPLE_COND)
        {
-         tree cond = COND_EXPR_COND (stmt);
          edge true_edge;
          edge false_edge;
          struct edge_equivalency *equivalency;
+         enum tree_code code = gimple_cond_code (stmt);
 
          extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
 
-         /* If the conditional is a single variable 'X', record 'X = 1'
-            for the true edge and 'X = 0' on the false edge.  */
-         if (TREE_CODE (cond) == SSA_NAME
-             && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (cond))
-           {
-             equivalency = XNEW (struct edge_equivalency);
-             equivalency->rhs = constant_boolean_node (1, TREE_TYPE (cond));
-             equivalency->lhs = cond;
-             true_edge->aux = equivalency;
-
-             equivalency = XNEW (struct edge_equivalency);
-             equivalency->rhs = constant_boolean_node (0, TREE_TYPE (cond));
-             equivalency->lhs = cond;
-             false_edge->aux = equivalency;
-           }
          /* Equality tests may create one or two equivalences.  */
-         else if (TREE_CODE (cond) == EQ_EXPR || TREE_CODE (cond) == NE_EXPR)
+         if (code == EQ_EXPR || code == NE_EXPR)
            {
-             tree op0 = TREE_OPERAND (cond, 0);
-             tree op1 = TREE_OPERAND (cond, 1);
+             tree op0 = gimple_cond_lhs (stmt);
+             tree op1 = gimple_cond_rhs (stmt);
 
              /* Special case comparing booleans against a constant as we
                 know the value of OP0 on both arms of the branch.  i.e., we
@@ -118,7 +103,7 @@ associate_equivalences_with_edges (void)
                  && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
                  && is_gimple_min_invariant (op1))
                {
-                 if (TREE_CODE (cond) == EQ_EXPR)
+                 if (code == EQ_EXPR)
                    {
                      equivalency = XNEW (struct edge_equivalency);
                      equivalency->lhs = op0;
@@ -170,7 +155,7 @@ associate_equivalences_with_edges (void)
                  equivalency = XNEW (struct edge_equivalency);
                  equivalency->lhs = op0;
                  equivalency->rhs = op1;
-                 if (TREE_CODE (cond) == EQ_EXPR)
+                 if (code == EQ_EXPR)
                    true_edge->aux = equivalency;
                  else 
                    false_edge->aux = equivalency;
@@ -184,15 +169,14 @@ associate_equivalences_with_edges (void)
       /* For a SWITCH_EXPR, a case label which represents a single
         value and which is the only case label which reaches the
         target block creates an equivalence.  */
-      if (TREE_CODE (stmt) == SWITCH_EXPR)
+      else if (gimple_code (stmt) == GIMPLE_SWITCH)
        {
-         tree cond = SWITCH_COND (stmt);
+         tree cond = gimple_switch_index (stmt);
 
          if (TREE_CODE (cond) == SSA_NAME
              && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (cond))
            {
-             tree labels = SWITCH_LABELS (stmt);
-             int i, n_labels = TREE_VEC_LENGTH (labels);
+             int i, n_labels = gimple_switch_num_labels (stmt);
              tree *info = XCNEWVEC (tree, n_basic_blocks);
 
              /* Walk over the case label vector.  Record blocks
@@ -200,10 +184,9 @@ associate_equivalences_with_edges (void)
                 a single value.  */
              for (i = 0; i < n_labels; i++)
                {
-                 tree label = TREE_VEC_ELT (labels, i);
+                 tree label = gimple_switch_label (stmt, i);
                  basic_block bb = label_to_block (CASE_LABEL (label));
 
-
                  if (CASE_HIGH (label)
                      || !CASE_LOW (label)
                      || info[bb->index])
@@ -475,11 +458,12 @@ uncprop_into_successor_phis (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
      destination of the edge.  Then remove the temporary equivalence.  */
   FOR_EACH_EDGE (e, ei, bb->succs)
     {
-      tree phi = phi_nodes (e->dest);
+      gimple_seq phis = phi_nodes (e->dest);
+      gimple_stmt_iterator gsi;
 
       /* If there are no PHI nodes in this destination, then there is
         no sense in recording any equivalences.  */
-      if (!phi)
+      if (!phis)
        continue;
 
       /* Record any equivalency associated with E.  */
@@ -490,9 +474,10 @@ uncprop_into_successor_phis (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
        }
 
       /* Walk over the PHI nodes, unpropagating values.  */
-      for ( ; phi; phi = PHI_CHAIN (phi))
+      for (gsi = gsi_start (phis) ; !gsi_end_p (gsi); gsi_next (&gsi))
        {
          /* Sigh.  We'll have more efficient access to this one day.  */
+         gimple phi = gsi_stmt (gsi);
          tree arg = PHI_ARG_DEF (phi, e->dest_idx);
          struct equiv_hash_elt equiv_hash_elt;
          void **slot;
@@ -624,3 +609,4 @@ struct gimple_opt_pass pass_uncprop =
   TODO_dump_func | TODO_verify_ssa     /* todo_flags_finish */
  }
 };
+
index 2ac1c11..fbfcbf2 100644 (file)
@@ -37,7 +37,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "bitmap.h"
 #include "pointer-set.h"
 #include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-inline.h"
 #include "varray.h"
 #include "timevar.h"
@@ -171,19 +171,23 @@ redirect_edge_var_map_destroy (void)
 edge
 ssa_redirect_edge (edge e, basic_block dest)
 {
-  tree phi;
+  gimple_stmt_iterator gsi;
+  gimple phi;
 
   redirect_edge_var_map_clear (e);
 
   /* Remove the appropriate PHI arguments in E's destination block.  */
-  for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      tree def = PHI_ARG_DEF (phi, e->dest_idx);
+      tree def;
+
+      phi = gsi_stmt (gsi);
+      def = gimple_phi_arg_def (phi, e->dest_idx);
 
       if (def == NULL_TREE)
        continue;
 
-      redirect_edge_var_map_add (e, PHI_RESULT (phi), def);
+      redirect_edge_var_map_add (e, gimple_phi_result (phi), def);
     }
 
   e = redirect_edge_succ_nodup (e, dest);
@@ -191,26 +195,31 @@ ssa_redirect_edge (edge e, basic_block dest)
   return e;
 }
 
+
 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
    E->dest.  */
 
 void
 flush_pending_stmts (edge e)
 {
-  tree phi;
+  gimple phi;
   edge_var_map_vector v;
   edge_var_map *vm;
   int i;
+  gimple_stmt_iterator gsi;
 
   v = redirect_edge_var_map_vector (e);
   if (!v)
     return;
 
-  for (phi = phi_nodes (e->dest), i = 0;
-       phi && VEC_iterate (edge_var_map, v, i, vm);
-       phi = PHI_CHAIN (phi), i++)
+  for (gsi = gsi_start_phis (e->dest), i = 0;
+       !gsi_end_p (gsi) && VEC_iterate (edge_var_map, v, i, vm);
+       gsi_next (&gsi), i++)
     {
-      tree def = redirect_edge_var_map_def (vm);
+      tree def;
+
+      phi = gsi_stmt (gsi);
+      def = redirect_edge_var_map_def (vm);
       add_phi_arg (phi, def, e);
     }
 
@@ -256,7 +265,7 @@ verify_ssa_name (tree ssa_name, bool is_virtual)
     }
 
   if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
-      && !IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name)))
+      && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
     {
       error ("found a default name with a non-empty defining statement");
       return true;
@@ -279,7 +288,7 @@ verify_ssa_name (tree ssa_name, bool is_virtual)
 
 static bool
 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
-           tree stmt, bool is_virtual)
+           gimple stmt, bool is_virtual)
 {
   if (verify_ssa_name (ssa_name, is_virtual))
     goto err;
@@ -297,9 +306,9 @@ verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
     {
       error ("SSA_NAME_DEF_STMT is wrong");
       fprintf (stderr, "Expected definition statement:\n");
-      print_generic_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), TDF_VOPS);
+      print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
       fprintf (stderr, "\nActual definition statement:\n");
-      print_generic_stmt (stderr, stmt, TDF_VOPS);
+      print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
       goto err;
     }
 
@@ -309,7 +318,7 @@ err:
   fprintf (stderr, "while verifying SSA_NAME ");
   print_generic_expr (stderr, ssa_name, 0);
   fprintf (stderr, " in statement\n");
-  print_generic_stmt (stderr, stmt, TDF_VOPS);
+  print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
 
   return true;
 }
@@ -331,7 +340,7 @@ err:
 
 static bool
 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
-           tree stmt, bool check_abnormal, bitmap names_defined_in_bb)
+           gimple stmt, bool check_abnormal, bitmap names_defined_in_bb)
 {
   bool err = false;
   tree ssa_name = USE_FROM_PTR (use_p);
@@ -342,7 +351,7 @@ verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
 
   TREE_VISITED (ssa_name) = 1;
 
-  if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (ssa_name))
+  if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
       && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
     ; /* Default definitions have empty statements.  Nothing to do.  */
   else if (!def_bb)
@@ -381,9 +390,9 @@ verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
     }
   else
     {
-      tree listvar ;
+      tree listvar;
       if (use_p->prev->use == NULL)
-       listvar = use_p->prev->stmt;
+       listvar = use_p->prev->loc.ssa_name;
       else
        listvar = USE_FROM_PTR (use_p->prev);
       if (listvar != ssa_name)
@@ -398,7 +407,7 @@ verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
       fprintf (stderr, "for SSA_NAME: ");
       print_generic_expr (stderr, ssa_name, TDF_VOPS);
       fprintf (stderr, " in statement:\n");
-      print_generic_stmt (stderr, stmt, TDF_VOPS);
+      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
     }
 
   return err;
@@ -414,11 +423,11 @@ verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
       definition of SSA_NAME.  */
 
 static bool
-verify_phi_args (tree phi, basic_block bb, basic_block *definition_block)
+verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
 {
   edge e;
   bool err = false;
-  unsigned i, phi_num_args = PHI_NUM_ARGS (phi);
+  size_t i, phi_num_args = gimple_phi_num_args (phi);
 
   if (EDGE_COUNT (bb->preds) != phi_num_args)
     {
@@ -429,7 +438,7 @@ verify_phi_args (tree phi, basic_block bb, basic_block *definition_block)
 
   for (i = 0; i < phi_num_args; i++)
     {
-      use_operand_p op_p = PHI_ARG_DEF_PTR (phi, i);
+      use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
       tree op = USE_FROM_PTR (op_p);
 
       e = EDGE_PRED (bb, i);
@@ -451,7 +460,7 @@ verify_phi_args (tree phi, basic_block bb, basic_block *definition_block)
 
       if (TREE_CODE (op) == SSA_NAME)
        {
-         err = verify_ssa_name (op, !is_gimple_reg (PHI_RESULT (phi)));
+         err = verify_ssa_name (op, !is_gimple_reg (gimple_phi_result (phi)));
          err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
                             op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
        }
@@ -475,7 +484,7 @@ error:
   if (err)
     {
       fprintf (stderr, "for PHI node\n");
-      print_generic_stmt (stderr, phi, TDF_VOPS|TDF_MEMSYMS);
+      print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
     }
 
 
@@ -735,13 +744,13 @@ verify_ssa (bool check_modified_stmt)
       tree name = ssa_name (i);
       if (name)
        {
-         tree stmt;
+         gimple stmt;
          TREE_VISITED (name) = 0;
 
          stmt = SSA_NAME_DEF_STMT (name);
-         if (!IS_EMPTY_STMT (stmt))
+         if (!gimple_nop_p (stmt))
            {
-             basic_block bb = bb_for_stmt (stmt);
+             basic_block bb = gimple_bb (stmt);
              verify_def (bb, definition_block,
                          name, stmt, !is_gimple_reg (name));
 
@@ -756,9 +765,9 @@ verify_ssa (bool check_modified_stmt)
   FOR_EACH_BB (bb)
     {
       edge e;
-      tree phi;
+      gimple phi;
       edge_iterator ei;
-      block_stmt_iterator bsi;
+      gimple_stmt_iterator gsi;
 
       /* Make sure that all edges have a clear 'aux' field.  */
       FOR_EACH_EDGE (e, ei, bb->preds)
@@ -772,45 +781,46 @@ verify_ssa (bool check_modified_stmt)
        }
 
       /* Verify the arguments for every PHI node in the block.  */
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
+         phi = gsi_stmt (gsi);
          if (verify_phi_args (phi, bb, definition_block))
            goto err;
 
          bitmap_set_bit (names_defined_in_bb,
-                         SSA_NAME_VERSION (PHI_RESULT (phi)));
+                         SSA_NAME_VERSION (gimple_phi_result (phi)));
        }
 
       /* Now verify all the uses and vuses in every statement of the block.  */
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
          use_operand_p use_p;
 
-         if (check_modified_stmt && stmt_modified_p (stmt))
+         if (check_modified_stmt && gimple_modified_p (stmt))
            {
              error ("stmt (%p) marked modified after optimization pass: ",
                     (void *)stmt);
-             print_generic_stmt (stderr, stmt, TDF_VOPS);
+             print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
              goto err;
            }
 
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-             && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
+         if (is_gimple_assign (stmt)
+             && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
            {
              tree lhs, base_address;
 
-             lhs = GIMPLE_STMT_OPERAND (stmt, 0);
+             lhs = gimple_assign_lhs (stmt);
              base_address = get_base_address (lhs);
 
              if (base_address
                  && gimple_aliases_computed_p (cfun)
                  && SSA_VAR_P (base_address)
-                 && !stmt_ann (stmt)->has_volatile_ops
+                 && !gimple_has_volatile_ops (stmt)
                  && ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
                {
                  error ("statement makes a memory store, but has no VDEFS");
-                 print_generic_stmt (stderr, stmt, TDF_VOPS);
+                 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
                  goto err;
                }
            }
@@ -820,7 +830,7 @@ verify_ssa (bool check_modified_stmt)
              if (verify_ssa_name (op, true))
                {
                  error ("in statement");
-                 print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
+                 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
                  goto err;
                }
            }
@@ -830,7 +840,7 @@ verify_ssa (bool check_modified_stmt)
              if (verify_ssa_name (op, false))
                {
                  error ("in statement");
-                 print_generic_stmt (stderr, stmt, TDF_VOPS|TDF_MEMSYMS);
+                 print_gimple_stmt (stderr, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
                  goto err;
                }
            }
@@ -951,7 +961,7 @@ delete_tree_ssa (void)
 {
   size_t i;
   basic_block bb;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   referenced_var_iterator rvi;
   tree var;
 
@@ -967,17 +977,32 @@ delete_tree_ssa (void)
       release_ssa_name (var);
     }
 
-  /* Remove annotations from every tree in the function.  */
+  /* FIXME.  This may not be necessary.  We will release all this
+     memory en masse in free_ssa_operands.  This clearing used to be
+     necessary to avoid problems with the inliner, but it may not be
+     needed anymore.  */
   FOR_EACH_BB (bb)
     {
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree stmt = bsi_stmt (bsi);
-         stmt_ann_t ann = get_stmt_ann (stmt);
+         gimple stmt = gsi_stmt (gsi);
+
+         if (gimple_has_ops (stmt))
+           {
+             gimple_set_def_ops (stmt, NULL);
+             gimple_set_use_ops (stmt, NULL);
+             gimple_set_addresses_taken (stmt, NULL);
+           }
+
+         if (gimple_has_mem_ops (stmt))
+           {
+             gimple_set_vdef_ops (stmt, NULL);
+             gimple_set_vuse_ops (stmt, NULL);
+             BITMAP_FREE (stmt->gsmem.membase.stores);
+             BITMAP_FREE (stmt->gsmem.membase.loads);
+           }
 
-         free_ssa_operands (&ann->operands);
-         ann->addresses_taken = 0;
-         mark_stmt_modified (stmt);
+         gimple_set_modified (stmt, true);
        }
       set_phi_nodes (bb, NULL);
     }
@@ -1001,7 +1026,8 @@ delete_tree_ssa (void)
 
   fini_ssanames ();
   fini_phinodes ();
-  /* we no longer maintain the SSA operand cache at this point.  */
+
+  /* We no longer maintain the SSA operand cache at this point.  */
   if (ssa_operands_active ())
     fini_ssa_operands ();
 
@@ -1209,12 +1235,9 @@ tree_ssa_useless_type_conversion (tree expr)
   if (CONVERT_EXPR_P (expr)
       || TREE_CODE (expr) == VIEW_CONVERT_EXPR
       || TREE_CODE (expr) == NON_LVALUE_EXPR)
-    /* FIXME: Use of GENERIC_TREE_TYPE here is a temporary measure to work
-       around known bugs with GIMPLE_MODIFY_STMTs appearing in places
-       they shouldn't.  See PR 30391.  */
     return useless_type_conversion_p
       (TREE_TYPE (expr),
-       GENERIC_TREE_TYPE (TREE_OPERAND (expr, 0)));
+       TREE_TYPE (TREE_OPERAND (expr, 0)));
 
   return false;
 }
@@ -1238,33 +1261,33 @@ static bool
 walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
                       struct pointer_set_t *visited, bool is_dfs)
 {
-  tree def_stmt;
+  gimple def_stmt;
 
   if (pointer_set_insert (visited, var))
     return false;
 
   def_stmt = SSA_NAME_DEF_STMT (var);
 
-  if (TREE_CODE (def_stmt) != PHI_NODE)
+  if (gimple_code (def_stmt) != GIMPLE_PHI)
     {
       /* If we reached the end of the use-def chain, call FN.  */
       return fn (var, def_stmt, data);
     }
   else
     {
-      int i;
+      size_t i;
 
       /* When doing a breadth-first search, call FN before following the
         use-def links for each argument.  */
       if (!is_dfs)
-       for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
-         if (fn (PHI_ARG_DEF (def_stmt, i), def_stmt, data))
+       for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
+         if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
            return true;
 
       /* Follow use-def links out of each PHI argument.  */
-      for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
+      for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
        {
-         tree arg = PHI_ARG_DEF (def_stmt, i);
+         tree arg = gimple_phi_arg_def (def_stmt, i);
 
          /* ARG may be NULL for newly introduced PHI nodes.  */
          if (arg
@@ -1276,8 +1299,8 @@ walk_use_def_chains_1 (tree var, walk_use_def_chains_fn fn, void *data,
       /* When doing a depth-first search, call FN after following the
         use-def links for each argument.  */
       if (is_dfs)
-       for (i = 0; i < PHI_NUM_ARGS (def_stmt); i++)
-         if (fn (PHI_ARG_DEF (def_stmt, i), def_stmt, data))
+       for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
+         if (fn (gimple_phi_arg_def (def_stmt, i), def_stmt, data))
            return true;
     }
   
@@ -1310,7 +1333,7 @@ void
 walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
                      bool is_dfs)
 {
-  tree def_stmt;
+  gimple def_stmt;
 
   gcc_assert (TREE_CODE (var) == SSA_NAME);
 
@@ -1318,7 +1341,7 @@ walk_use_def_chains (tree var, walk_use_def_chains_fn fn, void *data,
 
   /* We only need to recurse if the reaching definition comes from a PHI
      node.  */
-  if (TREE_CODE (def_stmt) != PHI_NODE)
+  if (gimple_code (def_stmt) != GIMPLE_PHI)
     (*fn) (var, def_stmt, data);
   else
     {
@@ -1345,7 +1368,7 @@ ssa_undefined_value_p (tree t)
     return false;
 
   /* The value is undefined iff its definition statement is empty.  */
-  return IS_EMPTY_STMT (SSA_NAME_DEF_STMT (t));
+  return gimple_nop_p (SSA_NAME_DEF_STMT (t));
 }
 
 /* Emit warnings for uninitialized variables.  This is done in two passes.
@@ -1370,8 +1393,8 @@ static void
 warn_uninit (tree t, const char *gmsgid, void *data)
 {
   tree var = SSA_NAME_VAR (t);
-  tree context = (tree) data;
-  location_t *locus;
+  gimple context = (gimple) data;
+  location_t location;
   expanded_location xloc, floc;
 
   if (!ssa_undefined_value_p (t))
@@ -1382,11 +1405,11 @@ warn_uninit (tree t, const char *gmsgid, void *data)
   if (TREE_NO_WARNING (var))
     return;
 
-  locus = (context != NULL && EXPR_HAS_LOCATION (context)
-          ? EXPR_LOCUS (context)
-          : &DECL_SOURCE_LOCATION (var));
-  warning_at (*locus, OPT_Wuninitialized, gmsgid, var);
-  xloc = expand_location (*locus);
+  location = (context != NULL && gimple_has_location (context))
+            ? gimple_location (context)
+            : DECL_SOURCE_LOCATION (var);
+  warning_at (location, OPT_Wuninitialized, gmsgid, var);
+  xloc = expand_location (location);
   floc = expand_location (DECL_SOURCE_LOCATION (cfun->decl));
   if (xloc.file != floc.file
       || xloc.line < floc.line
@@ -1397,7 +1420,7 @@ warn_uninit (tree t, const char *gmsgid, void *data)
 }
 
 struct walk_data {
-  tree stmt;
+  gimple stmt;
   bool always_executed;
 };
 
@@ -1407,7 +1430,8 @@ struct walk_data {
 static tree
 warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data_)
 {
-  struct walk_data *data = (struct walk_data *)data_;
+  struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
+  struct walk_data *data = (struct walk_data *) wi->info;
   tree t = *tp;
 
   switch (TREE_CODE (t))
@@ -1446,17 +1470,17 @@ warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data_)
    and warn about them.  */
 
 static void
-warn_uninitialized_phi (tree phi)
+warn_uninitialized_phi (gimple phi)
 {
-  int i, n = PHI_NUM_ARGS (phi);
+  size_t i, n = gimple_phi_num_args (phi);
 
   /* Don't look at memory tags.  */
-  if (!is_gimple_reg (PHI_RESULT (phi)))
+  if (!is_gimple_reg (gimple_phi_result (phi)))
     return;
 
   for (i = 0; i < n; ++i)
     {
-      tree op = PHI_ARG_DEF (phi, i);
+      tree op = gimple_phi_arg_def (phi, i);
       if (TREE_CODE (op) == SSA_NAME)
        warn_uninit (op, "%qD may be used uninitialized in this function",
                     NULL);
@@ -1466,7 +1490,7 @@ warn_uninitialized_phi (tree phi)
 static unsigned int
 execute_early_warn_uninitialized (void)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   basic_block bb;
   struct walk_data data;
 
@@ -1476,12 +1500,14 @@ execute_early_warn_uninitialized (void)
     {
       data.always_executed = dominated_by_p (CDI_POST_DOMINATORS,
                                             single_succ (ENTRY_BLOCK_PTR), bb);
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-        {
-         data.stmt = bsi_stmt (bsi);
-         walk_tree (bsi_stmt_ptr (bsi), warn_uninitialized_var,
-                    &data, NULL);
-        }
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+       {
+         struct walk_stmt_info wi;
+         data.stmt = gsi_stmt (gsi);
+         memset (&wi, 0, sizeof (wi));
+         wi.info = &data;
+         walk_gimple_op (gsi_stmt (gsi), warn_uninitialized_var, &wi);
+       }
     }
   return 0;
 }
@@ -1490,7 +1516,7 @@ static unsigned int
 execute_late_warn_uninitialized (void)
 {
   basic_block bb;
-  tree phi;
+  gimple_stmt_iterator gsi;
 
   /* Re-do the plain uninitialized variable check, as optimization may have
      straightened control flow.  Do this first so that we don't accidentally
@@ -1498,8 +1524,9 @@ execute_late_warn_uninitialized (void)
   execute_early_warn_uninitialized ();
 
   FOR_EACH_BB (bb)
-    for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-      warn_uninitialized_phi (phi);
+    for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+      warn_uninitialized_phi (gsi_stmt (gsi));
+
   return 0;
 }
 
@@ -1554,32 +1581,33 @@ execute_update_addresses_taken (void)
 {
   tree var;
   referenced_var_iterator rvi;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   basic_block bb;
   bitmap addresses_taken = BITMAP_ALLOC (NULL);
   bitmap vars_updated = BITMAP_ALLOC (NULL);
   bool update_vops = false;
-  tree phi;
 
   /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
      the function body.  */
   FOR_EACH_BB (bb)
     {
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         stmt_ann_t s_ann = stmt_ann (bsi_stmt (bsi));
-
-         if (s_ann->addresses_taken)
-           bitmap_ior_into (addresses_taken, s_ann->addresses_taken);
+         bitmap taken = gimple_addresses_taken (gsi_stmt (gsi));
+         if (taken)
+           bitmap_ior_into (addresses_taken, taken);
        }
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+
+      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         unsigned i, phi_num_args = PHI_NUM_ARGS (phi);
-         for (i = 0; i < phi_num_args; i++)
+         size_t i;
+         gimple phi = gsi_stmt (gsi);
+
+         for (i = 0; i < gimple_phi_num_args (phi); i++)
            {
              tree op = PHI_ARG_DEF (phi, i), var;
              if (TREE_CODE (op) == ADDR_EXPR
-                 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL_TREE
+                 && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
                  && DECL_P (var))
                bitmap_set_bit (addresses_taken, DECL_UID (var));
            }
@@ -1611,14 +1639,14 @@ execute_update_addresses_taken (void)
      variables.  */
   if (update_vops)
     FOR_EACH_BB (bb)
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
 
-         if ((LOADED_SYMS (stmt)
-              && bitmap_intersect_p (LOADED_SYMS (stmt), vars_updated))
-             || (STORED_SYMS (stmt)
-                 && bitmap_intersect_p (STORED_SYMS (stmt), vars_updated)))
+         if ((gimple_loaded_syms (stmt)
+              && bitmap_intersect_p (gimple_loaded_syms (stmt), vars_updated))
+             || (gimple_stored_syms (stmt)
+                 && bitmap_intersect_p (gimple_stored_syms (stmt), vars_updated)))
            update_stmt (stmt);
        }
   BITMAP_FREE (addresses_taken);
index 8d675b4..f3101d2 100644 (file)
@@ -115,17 +115,12 @@ ssanames_print_statistics (void)
    used without a preceding definition).  */
 
 tree
-make_ssa_name_fn (struct function *fn, tree var, tree stmt)
+make_ssa_name_fn (struct function *fn, tree var, gimple stmt)
 {
   tree t;
   use_operand_p imm;
 
-  gcc_assert (DECL_P (var)
-             || TREE_CODE (var) == INDIRECT_REF);
-
-  gcc_assert (!stmt
-             || EXPR_P (stmt) || GIMPLE_STMT_P (stmt)
-             || TREE_CODE (stmt) == PHI_NODE);
+  gcc_assert (DECL_P (var));
 
   /* If our free list has an element, then use it.  */
   if (FREE_SSANAMES (fn))
@@ -161,7 +156,7 @@ make_ssa_name_fn (struct function *fn, tree var, tree stmt)
   imm->use = NULL;
   imm->prev = imm;
   imm->next = imm;
-  imm->stmt = t;
+  imm->loc.ssa_name = t;
 
   return t;
 }
@@ -219,7 +214,8 @@ release_ssa_name (tree var)
 
       imm->prev = imm;
       imm->next = imm;
-      imm->stmt = var;
+      imm->loc.ssa_name = var;
+
       /* First put back the right tree node so that the tree checking
         macros do not complain.  */
       TREE_SET_CODE (var, SSA_NAME);
@@ -243,7 +239,7 @@ release_ssa_name (tree var)
 /* Creates a duplicate of a ssa name NAME defined in statement STMT.  */
 
 tree
-duplicate_ssa_name (tree name, tree stmt)
+duplicate_ssa_name (tree name, gimple stmt)
 {
   tree new_name = make_ssa_name (SSA_NAME_VAR (name), stmt);
   struct ptr_info_def *old_ptr_info = SSA_NAME_PTR_INFO (name);
@@ -285,7 +281,7 @@ duplicate_ssa_name_ptr_info (tree name, struct ptr_info_def *ptr_info)
 /* Release all the SSA_NAMEs created by STMT.  */
 
 void
-release_defs (tree stmt)
+release_defs (gimple stmt)
 {
   tree def;
   ssa_op_iter iter;
index f922887..96a72fa 100644 (file)
@@ -112,7 +112,8 @@ static unsigned HOST_WIDE_INT
 va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
                      bool gpr_p)
 {
-  tree stmt, lhs, orig_lhs;
+  tree lhs, orig_lhs;
+  gimple stmt;
   unsigned HOST_WIDE_INT ret = 0, val, counter_val;
   unsigned int max_size;
 
@@ -130,6 +131,8 @@ va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
   orig_lhs = lhs = rhs;
   while (lhs)
     {
+      enum tree_code rhs_code;
+
       if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
        {
          if (counter_val >= max_size)
@@ -144,38 +147,32 @@ va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
 
       stmt = SSA_NAME_DEF_STMT (lhs);
 
-      if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT
-         || GIMPLE_STMT_OPERAND (stmt, 0) != lhs)
+      if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
        return (unsigned HOST_WIDE_INT) -1;
 
-      rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-      if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
-       rhs = TREE_OPERAND (rhs, 0);
-
-      if (TREE_CODE (rhs) == SSA_NAME)
+      rhs_code = gimple_assign_rhs_code (stmt);
+      if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
+          || gimple_assign_cast_p (stmt))
+         && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
        {
-         lhs = rhs;
+         lhs = gimple_assign_rhs1 (stmt);
          continue;
        }
 
-      if (CONVERT_EXPR_P (rhs)
-         && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
+      if ((rhs_code == POINTER_PLUS_EXPR
+          || rhs_code == PLUS_EXPR)
+         && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
+         && host_integerp (gimple_assign_rhs2 (stmt), 1))
        {
-         lhs = TREE_OPERAND (rhs, 0);
+         ret += tree_low_cst (gimple_assign_rhs2 (stmt), 1);
+         lhs = gimple_assign_rhs1 (stmt);
          continue;
        }
 
-      if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
-          || TREE_CODE (rhs) == PLUS_EXPR)
-         && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
-         && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
-         && host_integerp (TREE_OPERAND (rhs, 1), 1))
-       {
-         ret += tree_low_cst (TREE_OPERAND (rhs, 1), 1);
-         lhs = TREE_OPERAND (rhs, 0);
-         continue;
-       }
+      if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
+       return (unsigned HOST_WIDE_INT) -1;
 
+      rhs = gimple_assign_rhs1 (stmt);
       if (TREE_CODE (counter) != TREE_CODE (rhs))
        return (unsigned HOST_WIDE_INT) -1;
 
@@ -196,6 +193,8 @@ va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
   val = ret + counter_val;
   while (lhs)
     {
+      enum tree_code rhs_code;
+
       if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
        break;
 
@@ -206,31 +205,22 @@ va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
 
       stmt = SSA_NAME_DEF_STMT (lhs);
 
-      rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-      if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
-       rhs = TREE_OPERAND (rhs, 0);
-
-      if (TREE_CODE (rhs) == SSA_NAME)
+      rhs_code = gimple_assign_rhs_code (stmt);
+      if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
+          || gimple_assign_cast_p (stmt))
+         && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
        {
-         lhs = rhs;
+         lhs = gimple_assign_rhs1 (stmt);
          continue;
        }
 
-      if (CONVERT_EXPR_P (rhs)
-         && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
+      if ((rhs_code == POINTER_PLUS_EXPR
+          || rhs_code == PLUS_EXPR)
+         && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
+         && host_integerp (gimple_assign_rhs2 (stmt), 1))
        {
-         lhs = TREE_OPERAND (rhs, 0);
-         continue;
-       }
-
-      if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
-          || TREE_CODE (rhs) == PLUS_EXPR)
-         && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
-         && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
-         && host_integerp (TREE_OPERAND (rhs, 1), 1))
-       {
-         val -= tree_low_cst (TREE_OPERAND (rhs, 1), 1);
-         lhs = TREE_OPERAND (rhs, 0);
+         val -= tree_low_cst (gimple_assign_rhs2 (stmt), 1);
+         lhs = gimple_assign_rhs1 (stmt);
          continue;
        }
 
@@ -247,7 +237,7 @@ static tree
 find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
                        void *data)
 {
-  bitmap va_list_vars = (bitmap) data;
+  bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
   tree var = *tp;
 
   if (TREE_CODE (var) == SSA_NAME)
@@ -442,12 +432,6 @@ check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
   if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
     return;
 
- if (((TREE_CODE (rhs) == POINTER_PLUS_EXPR
-       || TREE_CODE (rhs) == PLUS_EXPR)
-      && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
-     || CONVERT_EXPR_P (rhs))
-    rhs = TREE_OPERAND (rhs, 0);
-
   if (TREE_CODE (rhs) != SSA_NAME
       || ! bitmap_bit_p (si->va_list_escape_vars,
                         DECL_UID (SSA_NAME_VAR (rhs))))
@@ -504,11 +488,12 @@ check_all_va_list_escapes (struct stdarg_info *si)
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator i;
+      gimple_stmt_iterator i;
 
-      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
+      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
        {
-         tree stmt = bsi_stmt (i), use;
+         gimple stmt = gsi_stmt (i);
+         tree use;
          ssa_op_iter iter;
 
          FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
@@ -517,16 +502,13 @@ check_all_va_list_escapes (struct stdarg_info *si)
                                  DECL_UID (SSA_NAME_VAR (use))))
                continue;
 
-             if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+             if (is_gimple_assign (stmt))
                {
-                 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-                 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-
-                 if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
-                   rhs = TREE_OPERAND (rhs, 0);
+                 tree rhs = gimple_assign_rhs1 (stmt);
+                 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
 
                  /* x = *ap_temp;  */
-                 if (TREE_CODE (rhs) == INDIRECT_REF
+                 if (gimple_assign_rhs_code (stmt) == INDIRECT_REF
                      && TREE_OPERAND (rhs, 0) == use
                      && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
                      && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
@@ -550,13 +532,16 @@ check_all_va_list_escapes (struct stdarg_info *si)
                     other_ap_temp = (some_type *) ap_temp;
                     ap = ap_temp;
                     statements.  */
-                 if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
-                      && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
-                     || CONVERT_EXPR_P (rhs))
-                   rhs = TREE_OPERAND (rhs, 0);
-
-                 if (rhs == use)
+                 if (rhs == use
+                     && ((rhs_code == POINTER_PLUS_EXPR
+                          && (TREE_CODE (gimple_assign_rhs2 (stmt))
+                              == INTEGER_CST))
+                         || gimple_assign_cast_p (stmt)
+                         || (get_gimple_rhs_class (rhs_code)
+                             == GIMPLE_SINGLE_RHS)))
                    {
+                     tree lhs = gimple_assign_lhs (stmt);
+
                      if (TREE_CODE (lhs) == SSA_NAME
                          && bitmap_bit_p (si->va_list_escape_vars,
                                           DECL_UID (SSA_NAME_VAR (lhs))))
@@ -572,7 +557,7 @@ check_all_va_list_escapes (struct stdarg_info *si)
              if (dump_file && (dump_flags & TDF_DETAILS))
                {
                  fputs ("va_list escapes in ", dump_file);
-                 print_generic_expr (dump_file, stmt, dump_flags);
+                 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
                  fputc ('\n', dump_file);
                }
              return true;
@@ -604,6 +589,7 @@ execute_optimize_stdarg (void)
   bool va_list_escapes = false;
   bool va_list_simple_ptr;
   struct stdarg_info si;
+  struct walk_stmt_info wi;
   const char *funcname = NULL;
   tree cfun_va_list;
 
@@ -624,18 +610,17 @@ execute_optimize_stdarg (void)
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator i;
+      gimple_stmt_iterator i;
 
-      for (i = bsi_start (bb); !bsi_end_p (i); bsi_next (&i))
+      for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
        {
-         tree stmt = bsi_stmt (i);
-         tree call = get_call_expr_in (stmt), callee;
-         tree ap;
+         gimple stmt = gsi_stmt (i);
+         tree callee, ap;
 
-         if (!call)
+         if (!is_gimple_call (stmt))
            continue;
 
-         callee = get_callee_fndecl (call);
+         callee = gimple_call_fndecl (stmt);
          if (!callee
              || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
            continue;
@@ -655,7 +640,7 @@ execute_optimize_stdarg (void)
            }
 
          si.va_start_count++;
-         ap = CALL_EXPR_ARG (call, 0);
+         ap = gimple_call_arg (stmt, 0);
 
          if (TREE_CODE (ap) != ADDR_EXPR)
            {
@@ -731,10 +716,12 @@ execute_optimize_stdarg (void)
     cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
 
   calculate_dominance_info (CDI_DOMINATORS);
+  memset (&wi, 0, sizeof (wi));
+  wi.info = si.va_list_vars;
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator i;
+      gimple_stmt_iterator i;
 
       si.compute_sizes = -1;
       si.bb = bb;
@@ -745,12 +732,13 @@ execute_optimize_stdarg (void)
         any real data movement.  */
       if (va_list_simple_ptr)
        {
-         tree phi, lhs, rhs;
+         tree lhs, rhs;
          use_operand_p uop;
          ssa_op_iter soi;
 
-         for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+         for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
            {
+             gimple phi = gsi_stmt (i);
              lhs = PHI_RESULT (phi);
 
              if (!is_gimple_reg (lhs))
@@ -766,14 +754,12 @@ execute_optimize_stdarg (void)
                  else
                    check_va_list_escapes (&si, lhs, rhs);
 
-                 if (si.va_list_escapes
-                     || walk_tree (&phi, find_va_list_reference,
-                                   si.va_list_vars, NULL))
+                 if (si.va_list_escapes)
                    {
                      if (dump_file && (dump_flags & TDF_DETAILS))
                        {
                          fputs ("va_list escapes in ", dump_file);
-                         print_generic_expr (dump_file, phi, dump_flags);
+                         print_gimple_stmt (dump_file, phi, 0, dump_flags);
                          fputc ('\n', dump_file);
                        }
                      va_list_escapes = true;
@@ -782,18 +768,16 @@ execute_optimize_stdarg (void)
            }
        }
 
-      for (i = bsi_start (bb);
-          !bsi_end_p (i) && !va_list_escapes;
-          bsi_next (&i))
+      for (i = gsi_start_bb (bb);
+          !gsi_end_p (i) && !va_list_escapes;
+          gsi_next (&i))
        {
-         tree stmt = bsi_stmt (i);
-         tree call;
+         gimple stmt = gsi_stmt (i);
 
          /* Don't look at __builtin_va_{start,end}, they are ok.  */
-         call = get_call_expr_in (stmt);
-         if (call)
+         if (is_gimple_call (stmt))
            {
-             tree callee = get_callee_fndecl (call);
+             tree callee = gimple_call_fndecl (stmt);
 
              if (callee
                  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
@@ -802,44 +786,54 @@ execute_optimize_stdarg (void)
                continue;
            }
 
-         if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+         if (is_gimple_assign (stmt))
            {
-             tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-             tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-
-             if (TREE_CODE (rhs) == WITH_SIZE_EXPR)
-               rhs = TREE_OPERAND (rhs, 0);
+             tree lhs = gimple_assign_lhs (stmt);
+             tree rhs = gimple_assign_rhs1 (stmt);
 
              if (va_list_simple_ptr)
                {
-                 /* Check for tem = ap.  */
-                 if (va_list_ptr_read (&si, rhs, lhs))
-                   continue;
+                 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
+                     == GIMPLE_SINGLE_RHS)
+                   {
+                     /* Check for tem = ap.  */
+                     if (va_list_ptr_read (&si, rhs, lhs))
+                       continue;
 
-                 /* Check for the last insn in:
-                    tem1 = ap;
-                    tem2 = tem1 + CST;
-                    ap = tem2;
-                    sequence.  */
-                 else if (va_list_ptr_write (&si, lhs, rhs))
-                   continue;
+                     /* Check for the last insn in:
+                        tem1 = ap;
+                        tem2 = tem1 + CST;
+                        ap = tem2;
+                        sequence.  */
+                     else if (va_list_ptr_write (&si, lhs, rhs))
+                       continue;
+                   }
 
-                 else
+                 if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
+                      && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
+                     || IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
+                     || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
+                         == GIMPLE_SINGLE_RHS))
                    check_va_list_escapes (&si, lhs, rhs);
                }
              else
                {
-                 /* Check for ap[0].field = temp.  */
-                 if (va_list_counter_struct_op (&si, lhs, rhs, true))
-                   continue;
+                 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
+                     == GIMPLE_SINGLE_RHS)
+                   {
+                     /* Check for ap[0].field = temp.  */
+                     if (va_list_counter_struct_op (&si, lhs, rhs, true))
+                       continue;
 
-                 /* Check for temp = ap[0].field.  */
-                 else if (va_list_counter_struct_op (&si, rhs, lhs, false))
-                   continue;
+                     /* Check for temp = ap[0].field.  */
+                     else if (va_list_counter_struct_op (&si, rhs, lhs,
+                                                         false))
+                       continue;
+                   }
 
                  /* Do any architecture specific checking.  */
-                 else if (targetm.stdarg_optimize_hook
-                          && targetm.stdarg_optimize_hook (&si, lhs, rhs))
+                 if (targetm.stdarg_optimize_hook
+                     && targetm.stdarg_optimize_hook (&si, stmt))
                    continue;
                }
            }
@@ -851,13 +845,12 @@ execute_optimize_stdarg (void)
             fully), or some unexpected use of va_list.  None of these should
             happen in a gimplified VA_ARG_EXPR.  */
          if (si.va_list_escapes
-             || walk_tree (&stmt, find_va_list_reference,
-                           si.va_list_vars, NULL))
+             || walk_gimple_op (stmt, find_va_list_reference, &wi))
            {
              if (dump_file && (dump_flags & TDF_DETAILS))
                {
                  fputs ("va_list escapes in ", dump_file);
-                 print_generic_expr (dump_file, stmt, dump_flags);
+                 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
                  fputc ('\n', dump_file);
                }
              va_list_escapes = true;
index 2205c1a..8bcfcfd 100644 (file)
@@ -147,10 +147,10 @@ struct switch_conv_info
 
   /* The first load statement that loads a temporary from a new static array.
    */
-  tree arr_ref_first;
+  gimple arr_ref_first;
 
   /* The last load statement that loads a temporary from a new static array.  */
-  tree arr_ref_last;
+  gimple arr_ref_last;
 
   /* String reason why the case wasn't a good candidate that is written to the
      dump file, if there is one.  */
@@ -166,22 +166,21 @@ static struct switch_conv_info info;
    satisfies the size of the new array.  */
 
 static bool
-check_range (tree swtch)
+check_range (gimple swtch)
 {
   tree min_case, max_case;
-  tree cases = SWITCH_LABELS (swtch);
-  unsigned int branch_num = TREE_VEC_LENGTH (cases);
+  unsigned int branch_num = gimple_switch_num_labels (swtch);
   tree range_max;
 
   /* The gimplifier has already sorted the cases by CASE_LOW and ensured there
      is a default label which is the last in the vector.  */
 
-  min_case = TREE_VEC_ELT (cases, 0);
+  min_case = gimple_switch_label (swtch, 1);
   info.range_min = CASE_LOW (min_case);
 
   gcc_assert (branch_num > 1);
-  gcc_assert (CASE_LOW (TREE_VEC_ELT (cases, branch_num - 1)) == NULL_TREE);
-  max_case = TREE_VEC_ELT (cases, branch_num - 2);
+  gcc_assert (CASE_LOW (gimple_switch_label (swtch, 0)) == NULL_TREE);
+  max_case = gimple_switch_label (swtch, branch_num - 1);
   if (CASE_HIGH (max_case) != NULL_TREE)
     range_max = CASE_HIGH (max_case);
   else
@@ -283,25 +282,26 @@ check_process_case (tree cs)
 static bool
 check_final_bb (void)
 {
-  tree phi;
+  gimple_stmt_iterator gsi;
 
   info.phi_count = 0;
-  for (phi = phi_nodes (info.final_bb); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (info.final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      int i;
+      gimple phi = gsi_stmt (gsi);
+      unsigned int i;
 
       info.phi_count++;
 
-      for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+      for (i = 0; i < gimple_phi_num_args (phi); i++)
        {
-         basic_block bb = PHI_ARG_EDGE (phi, i)->src;
+         basic_block bb = gimple_phi_arg_edge (phi, i)->src;
 
          if ((bb == info.switch_bb
               || (single_pred_p (bb) && single_pred (bb) == info.switch_bb))
-             && !is_gimple_min_invariant (PHI_ARG_ELT (phi, i).def))
+             && !is_gimple_min_invariant (gimple_phi_arg_def (phi, i)))
            {
              info.reason = "   Non-invariant value from a case\n";
-             return false; /* non invariant argument */
+             return false; /* Non-invariant argument.  */
            }
        }
     }
@@ -326,10 +326,8 @@ create_temp_arrays (void)
                                                 sizeof (tree));
 
   for (i = 0; i < info.phi_count; i++)
-    {
-      info.constructors[i] = VEC_alloc (constructor_elt, gc,
-                                  tree_low_cst (info.range_size, 1) + 1);
-    }
+    info.constructors[i]
+      = VEC_alloc (constructor_elt, gc, tree_low_cst (info.range_size, 1) + 1);
 }
 
 /* Free the arrays created by create_temp_arrays().  The vectors that are
@@ -351,10 +349,10 @@ free_temp_arrays (void)
 static void
 gather_default_values (tree default_case)
 {
-  tree phi;
+  gimple_stmt_iterator gsi;
   basic_block bb = label_to_block (CASE_LABEL (default_case));
   edge e;
-  int i;
+  int i = 0;
 
   gcc_assert (CASE_LOW (default_case) == NULL_TREE);
 
@@ -363,11 +361,12 @@ gather_default_values (tree default_case)
   else
     e = single_succ_edge (bb);
 
-  for (phi = phi_nodes (info.final_bb), i = 0; phi; phi = PHI_CHAIN (phi), i++)
+  for (gsi = gsi_start_phis (info.final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
+      gimple phi = gsi_stmt (gsi);
       tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
       gcc_assert (val);
-      info.default_values[i] = val;
+      info.default_values[i++] = val;
     }
 }
 
@@ -376,18 +375,18 @@ gather_default_values (tree default_case)
    order of phi nodes.  SWTCH is the switch statement being converted.  */
 
 static void
-build_constructors (tree swtch)
+build_constructors (gimple swtch)
 {
-  int i;
-  tree cases = SWITCH_LABELS (swtch);
+  unsigned i, branch_num = gimple_switch_num_labels (swtch);
   tree pos = info.range_min;
 
-  for (i = 0; i < TREE_VEC_LENGTH (cases) - 1; i++)
+  for (i = 1; i < branch_num; i++)
     {
-      tree cs = TREE_VEC_ELT (cases, i);
+      tree cs = gimple_switch_label (swtch, i);
       basic_block bb = label_to_block (CASE_LABEL (cs));
       edge e;
-      tree phi, high;
+      tree high;
+      gimple_stmt_iterator gsi;
       int j;
 
       if (bb == info.final_bb)
@@ -405,7 +404,8 @@ build_constructors (tree swtch)
 
              elt = VEC_quick_push (constructor_elt,
                                    info.constructors[k], NULL);
-             elt->index = int_const_binop (MINUS_EXPR, pos, info.range_min, 0);
+             elt->index = int_const_binop (MINUS_EXPR, pos,
+                                           info.range_min, 0);
              elt->value = info.default_values[k];
            }
 
@@ -418,8 +418,10 @@ build_constructors (tree swtch)
        high = CASE_HIGH (cs);
       else
        high = CASE_LOW (cs);
-      for (phi = phi_nodes (info.final_bb); phi; phi = PHI_CHAIN (phi))
+      for (gsi = gsi_start_phis (info.final_bb);
+          !gsi_end_p (gsi); gsi_next (&gsi))
        {
+         gimple phi = gsi_stmt (gsi);
          tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
          pos = CASE_LOW (cs);
 
@@ -449,15 +451,12 @@ build_constructors (tree swtch)
    is a temporary variable holding the index for loads from the new array.  */
 
 static void
-build_one_array (tree swtch, int num, tree arr_index_type, tree phi, tree tidx)
+build_one_array (gimple swtch, int num, tree arr_index_type, gimple phi,
+                tree tidx)
 {
-  tree array_type;
-  tree ctor;
-  tree decl;
-  tree value_type;
-  tree name;
-  tree fetch, load;
-  block_stmt_iterator bsi;
+  tree array_type, ctor, decl, value_type, name, fetch;
+  gimple load;
+  gimple_stmt_iterator gsi;
 
   gcc_assert (info.default_values[num]);
   value_type = TREE_TYPE (info.default_values[num]);
@@ -478,21 +477,19 @@ build_one_array (tree swtch, int num, tree arr_index_type, tree phi, tree tidx)
   varpool_finalize_decl (decl);
   mark_sym_for_renaming (decl);
 
-  name = make_ssa_name (SSA_NAME_VAR (PHI_RESULT (phi)), NULL_TREE);
+  name = make_ssa_name (SSA_NAME_VAR (PHI_RESULT (phi)), NULL);
   info.target_inbound_names[num] = name;
 
   fetch = build4 (ARRAY_REF, value_type, decl, tidx, NULL_TREE,
                  NULL_TREE);
-  load = build_gimple_modify_stmt (name, fetch);
+  load = gimple_build_assign (name, fetch);
   SSA_NAME_DEF_STMT (name) = load;
 
-  bsi = bsi_for_stmt (swtch);
-  bsi_insert_before (&bsi, load, BSI_SAME_STMT);
+  gsi = gsi_for_stmt (swtch);
+  gsi_insert_before (&gsi, load, GSI_SAME_STMT);
   mark_symbols_for_renaming (load);
 
   info.arr_ref_last = load;
-
-  return;
 }
 
 /* Builds and initializes static arrays initialized with values gathered from
@@ -500,54 +497,53 @@ build_one_array (tree swtch, int num, tree arr_index_type, tree phi, tree tidx)
    them.  */
 
 static void
-build_arrays (tree swtch)
+build_arrays (gimple swtch)
 {
   tree arr_index_type;
   tree tidx, sub;
-  block_stmt_iterator bsi;
-  tree phi = phi_nodes (info.final_bb);
+  gimple stmt;
+  gimple_stmt_iterator gsi;
   int i;
 
-  bsi = bsi_for_stmt (swtch);
+  gsi = gsi_for_stmt (swtch);
 
   arr_index_type = build_index_type (info.range_size);
   tidx = make_rename_temp (arr_index_type, "csti");
   sub = fold_build2 (MINUS_EXPR, TREE_TYPE (info.index_expr), info.index_expr,
                     fold_convert (TREE_TYPE (info.index_expr),
                                   info.range_min));
-  sub = force_gimple_operand_bsi (&bsi, fold_convert (arr_index_type, sub),
-                                 false, NULL, true, BSI_SAME_STMT);
-  sub = build_gimple_modify_stmt (tidx, sub);
-
-  bsi_insert_before (&bsi, sub, BSI_SAME_STMT);
-  mark_symbols_for_renaming (sub);
-  info.arr_ref_first = sub;
+  sub = force_gimple_operand_gsi (&gsi, fold_convert (arr_index_type, sub),
+                                 false, NULL, true, GSI_SAME_STMT);
+  stmt = gimple_build_assign (tidx, sub);
 
-  for (phi = phi_nodes (info.final_bb), i = 0; phi; phi = PHI_CHAIN (phi), i++)
-    build_one_array (swtch, i, arr_index_type, phi, tidx);
+  gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+  mark_symbols_for_renaming (stmt);
+  info.arr_ref_first = stmt;
 
-  return;
+  for (gsi = gsi_start_phis (info.final_bb), i = 0;
+       !gsi_end_p (gsi); gsi_next (&gsi), i++)
+    build_one_array (swtch, i, arr_index_type, gsi_stmt (gsi), tidx);
 }
 
 /* Generates and appropriately inserts loads of default values at the position
    given by BSI.  Returns the last inserted statement.  */
 
-static tree
-gen_def_assigns (block_stmt_iterator *bsi)
+static gimple
+gen_def_assigns (gimple_stmt_iterator *gsi)
 {
   int i;
-  tree assign = NULL_TREE;
+  gimple assign = NULL;
 
   for (i = 0; i < info.phi_count; i++)
     {
-      tree name = make_ssa_name (SSA_NAME_VAR (info.target_inbound_names[i]),
-                                NULL_TREE);
+      tree name
+       = make_ssa_name (SSA_NAME_VAR (info.target_inbound_names[i]), NULL);
 
       info.target_outbound_names[i] = name;
-      assign = build_gimple_modify_stmt (name, info.default_values[i]);
+      assign = gimple_build_assign (name, info.default_values[i]);
       SSA_NAME_DEF_STMT (name) = assign;
-      bsi_insert_before (bsi, assign, BSI_SAME_STMT);
-      find_new_referenced_vars (&assign);
+      gsi_insert_before (gsi, assign, GSI_SAME_STMT);
+      find_new_referenced_vars (assign);
       mark_symbols_for_renaming (assign);
     }
   return assign;
@@ -583,11 +579,13 @@ prune_bbs (basic_block bbd, basic_block final)
 static void
 fix_phi_nodes (edge e1f, edge e2f, basic_block bbf)
 {
-  tree phi;
+  gimple_stmt_iterator gsi;
   int i;
 
-  for (phi = phi_nodes (bbf), i = 0; phi; phi = PHI_CHAIN (phi), i++)
+  for (gsi = gsi_start_phis (bbf), i = 0;
+       !gsi_end_p (gsi); gsi_next (&gsi), i++)
     {
+      gimple phi = gsi_stmt (gsi);
       add_phi_arg (phi, info.target_inbound_names[i], e1f);
       add_phi_arg (phi, info.target_outbound_names[i], e2f);
     }
@@ -616,28 +614,29 @@ fix_phi_nodes (edge e1f, edge e2f, basic_block bbf)
 */
 
 static void
-gen_inbound_check (tree swtch)
+gen_inbound_check (gimple swtch)
 {
   tree label_decl1 = create_artificial_label ();
   tree label_decl2 = create_artificial_label ();
   tree label_decl3 = create_artificial_label ();
-  tree label1, label2, label3;
+  gimple label1, label2, label3;
 
   tree utype;
   tree tmp_u;
-  tree cast, cast_assign;
-  tree ulb, minus, minus_assign;
+  tree cast;
+  gimple cast_assign, minus_assign;
+  tree ulb, minus;
   tree bound;
 
-  tree if_expr;
+  gimple cond_stmt;
 
-  tree last_assign;
-  block_stmt_iterator bsi;
+  gimple last_assign;
+  gimple_stmt_iterator gsi;
   basic_block bb0, bb1, bb2, bbf, bbd;
   edge e01, e02, e21, e1d, e1f, e2f;
 
   gcc_assert (info.default_values);
-  bb0 = bb_for_stmt (swtch);
+  bb0 = gimple_bb (swtch);
 
   /* Make sure we do not generate arithmetics in a subrange.  */
   if (TREE_TYPE (TREE_TYPE (info.index_expr)))
@@ -646,52 +645,50 @@ gen_inbound_check (tree swtch)
     utype = unsigned_type_for (TREE_TYPE (info.index_expr));
 
   /* (end of) block 0 */
-  bsi = bsi_for_stmt (info.arr_ref_first);
+  gsi = gsi_for_stmt (info.arr_ref_first);
   tmp_u = make_rename_temp (utype, "csui");
 
   cast = fold_convert (utype, info.index_expr);
-  cast_assign = build_gimple_modify_stmt (tmp_u, cast);
-  find_new_referenced_vars (&cast_assign);
-  bsi_insert_before (&bsi, cast_assign, BSI_SAME_STMT);
+  cast_assign = gimple_build_assign (tmp_u, cast);
+  find_new_referenced_vars (cast_assign);
+  gsi_insert_before (&gsi, cast_assign, GSI_SAME_STMT);
   mark_symbols_for_renaming (cast_assign);
 
   ulb = fold_convert (utype, info.range_min);
   minus = fold_build2 (MINUS_EXPR, utype, tmp_u, ulb);
-  minus = force_gimple_operand_bsi (&bsi, minus, false, NULL, true,
-                                   BSI_SAME_STMT);
-  minus_assign = build_gimple_modify_stmt (tmp_u, minus);
-  find_new_referenced_vars (&minus_assign);
-  bsi_insert_before (&bsi, minus_assign, BSI_SAME_STMT);
+  minus = force_gimple_operand_gsi (&gsi, minus, false, NULL, true,
+                                   GSI_SAME_STMT);
+  minus_assign = gimple_build_assign (tmp_u, minus);
+  find_new_referenced_vars (minus_assign);
+  gsi_insert_before (&gsi, minus_assign, GSI_SAME_STMT);
   mark_symbols_for_renaming (minus_assign);
 
   bound = fold_convert (utype, info.range_size);
 
-  if_expr = build3 (COND_EXPR, void_type_node,
-                   build2 (LE_EXPR, boolean_type_node, tmp_u, bound),
-                   NULL_TREE, NULL_TREE);
+  cond_stmt = gimple_build_cond (LE_EXPR, tmp_u, bound, NULL_TREE, NULL_TREE);
 
-  find_new_referenced_vars (&if_expr);
-  bsi_insert_before (&bsi, if_expr, BSI_SAME_STMT);
-  mark_symbols_for_renaming (if_expr);
+  find_new_referenced_vars (cond_stmt);
+  gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
+  mark_symbols_for_renaming (cond_stmt);
 
   /* block 2 */
-  bsi = bsi_for_stmt (info.arr_ref_first);
-  label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
-  bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
-  last_assign = gen_def_assigns (&bsi);
+  gsi = gsi_for_stmt (info.arr_ref_first);
+  label2 = gimple_build_label (label_decl2);
+  gsi_insert_before (&gsi, label2, GSI_SAME_STMT);
+  last_assign = gen_def_assigns (&gsi);
 
   /* block 1 */
-  bsi = bsi_for_stmt (info.arr_ref_first);
-  label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
-  bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
+  gsi = gsi_for_stmt (info.arr_ref_first);
+  label1 = gimple_build_label (label_decl1);
+  gsi_insert_before (&gsi, label1, GSI_SAME_STMT);
 
   /* block F */
-  bsi = bsi_start (info.final_bb);
-  label3 = build1 (LABEL_EXPR, void_type_node, label_decl3);
-  bsi_insert_before (&bsi, label3, BSI_SAME_STMT);
+  gsi = gsi_start_bb (info.final_bb);
+  label3 = gimple_build_label (label_decl3);
+  gsi_insert_before (&gsi, label3, GSI_SAME_STMT);
 
   /* cfg fix */
-  e02 = split_block (bb0, if_expr);
+  e02 = split_block (bb0, cond_stmt);
   bb2 = e02->dest;
 
   e21 = split_block (bb2, last_assign);
@@ -728,8 +725,8 @@ gen_inbound_check (tree swtch)
   bb2->frequency = EDGE_FREQUENCY (e02);
   bbf->frequency = EDGE_FREQUENCY (e1f) + EDGE_FREQUENCY (e2f);
 
-  prune_bbs (bbd, info.final_bb); /* to keep calc_dfs_tree() in dominance.c
-                                    happy */
+  prune_bbs (bbd, info.final_bb); /* To keep calc_dfs_tree() in dominance.c
+                                    happy */
 
   fix_phi_nodes (e1f, e2f, bbf);
 
@@ -742,31 +739,24 @@ gen_inbound_check (tree swtch)
    one after another until one fails or the conversion is completed.  */
 
 static bool
-process_switch (tree swtch)
+process_switch (gimple swtch)
 {
-  int i;
-  tree cases;
+  unsigned int i, branch_num = gimple_switch_num_labels (swtch);
   tree index_type;
 
   /* Operand 2 is either NULL_TREE or a vector of cases (stmt.c).  */
-  if (TREE_OPERAND (swtch, 2) == NULL_TREE)
+  if (branch_num < 2)
     {
-      info.reason = "swtch has no labels\n";
+      info.reason = "switch has no labels\n";
       return false;
     }
 
-  /* Comment from stmt.c:
-     The switch body is lowered in gimplify.c, we should never have switches
-     with a non-NULL SWITCH_BODY here.  */
-  gcc_assert (!SWITCH_BODY (swtch));
-
-  cases = SWITCH_LABELS (swtch);
   info.final_bb = NULL;
-  info.switch_bb = bb_for_stmt (swtch);
-  info.index_expr = SWITCH_COND (swtch);
+  info.switch_bb = gimple_bb (swtch);
+  info.index_expr = gimple_switch_index (swtch);
   index_type = TREE_TYPE (info.index_expr);
-  info.arr_ref_first = NULL_TREE;
-  info.arr_ref_last = NULL_TREE;
+  info.arr_ref_first = NULL;
+  info.arr_ref_last = NULL;
   info.default_prob = 0;
   info.default_count = 0;
   info.other_count = 0;
@@ -785,16 +775,13 @@ process_switch (tree swtch)
 
   /* For all the cases, see whether they are empty, the assignments they
      represent constant and so on...  */
-  for (i = 0; i < TREE_VEC_LENGTH (cases); i++)
-    {
-      tree part_case = TREE_VEC_ELT (cases, i);
-      if (!check_process_case (part_case))
-       {
-         if (dump_file)
-           fprintf (dump_file, "Processing of case %i failed\n", i);
-         return false;
-       }
-    }
+  for (i = 0; i < branch_num; i++)
+    if (!check_process_case (gimple_switch_label (swtch, i)))
+      {
+       if (dump_file)
+         fprintf (dump_file, "Processing of case %i failed\n", i);
+       return false;
+      }
 
   if (!check_final_bb ())
     return false;
@@ -803,7 +790,7 @@ process_switch (tree swtch)
      transformation.  */
 
   create_temp_arrays ();
-  gather_default_values (TREE_VEC_ELT (cases, TREE_VEC_LENGTH (cases) - 1));
+  gather_default_values (gimple_switch_label (swtch, 0));
   build_constructors (swtch);
 
   build_arrays (swtch); /* Build the static arrays and assignments.   */
@@ -824,17 +811,17 @@ do_switchconv (void)
 
   FOR_EACH_BB (bb)
   {
-    tree stmt = last_stmt (bb);
-    if (stmt && TREE_CODE (stmt) == SWITCH_EXPR)
+    gimple stmt = last_stmt (bb);
+    if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
       {
-       expanded_location loc = expand_location (EXPR_LOCATION (stmt));
-
        if (dump_file)
          {
+           expanded_location loc = expand_location (gimple_location (stmt));
+
            fprintf (dump_file, "beginning to process the following "
                     "SWITCH statement (%s:%d) : ------- \n",
                     loc.file, loc.line);
-           print_generic_stmt (dump_file, stmt, 2);
+           print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
            fprintf (dump_file, "\n");
          }
 
index 92127b4..78bf155 100644 (file)
@@ -100,11 +100,8 @@ along with GCC; see the file COPYING3.  If not see
 
 struct tailcall
 {
-  /* The block in that the call occur.  */
-  basic_block call_block;
-
   /* The iterator pointing to the call statement.  */
-  block_stmt_iterator call_bsi;
+  gimple_stmt_iterator call_gsi;
 
   /* True if it is a call to the current function.  */
   bool tail_recursion;
@@ -191,13 +188,13 @@ suitable_for_tail_call_opt_p (void)
 }
 
 /* Checks whether the expression EXPR in stmt AT is independent of the
-   statement pointed to by BSI (in a sense that we already know EXPR's value
-   at BSI).  We use the fact that we are only called from the chain of
+   statement pointed to by GSI (in a sense that we already know EXPR's value
+   at GSI).  We use the fact that we are only called from the chain of
    basic blocks that have only single successor.  Returns the expression
-   containing the value of EXPR at BSI.  */
+   containing the value of EXPR at GSI.  */
 
 static tree
-independent_of_stmt_p (tree expr, tree at, block_stmt_iterator bsi)
+independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
 {
   basic_block bb, call_bb, at_bb;
   edge e;
@@ -210,8 +207,8 @@ independent_of_stmt_p (tree expr, tree at, block_stmt_iterator bsi)
     return NULL_TREE;
 
   /* Mark the blocks in the chain leading to the end.  */
-  at_bb = bb_for_stmt (at);
-  call_bb = bb_for_stmt (bsi_stmt (bsi));
+  at_bb = gimple_bb (at);
+  call_bb = gimple_bb (gsi_stmt (gsi));
   for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
     bb->aux = &bb->aux;
   bb->aux = &bb->aux;
@@ -219,7 +216,7 @@ independent_of_stmt_p (tree expr, tree at, block_stmt_iterator bsi)
   while (1)
     { 
       at = SSA_NAME_DEF_STMT (expr);
-      bb = bb_for_stmt (at);
+      bb = gimple_bb (at);
 
       /* The default definition or defined before the chain.  */
       if (!bb || !bb->aux)
@@ -227,16 +224,16 @@ independent_of_stmt_p (tree expr, tree at, block_stmt_iterator bsi)
 
       if (bb == call_bb)
        {
-         for (; !bsi_end_p (bsi); bsi_next (&bsi))
-           if (bsi_stmt (bsi) == at)
+         for (; !gsi_end_p (gsi); gsi_next (&gsi))
+           if (gsi_stmt (gsi) == at)
              break;
 
-         if (!bsi_end_p (bsi))
+         if (!gsi_end_p (gsi))
            expr = NULL_TREE;
          break;
        }
 
-      if (TREE_CODE (at) != PHI_NODE)
+      if (gimple_code (at) != GIMPLE_PHI)
        {
          expr = NULL_TREE;
          break;
@@ -263,27 +260,33 @@ independent_of_stmt_p (tree expr, tree at, block_stmt_iterator bsi)
   return expr;
 }
 
-/* Simulates the effect of an assignment of ASS in STMT on the return value
-   of the tail recursive CALL passed in ASS_VAR.  M and A are the
-   multiplicative and the additive factor for the real return value.  */
+/* Simulates the effect of an assignment STMT on the return value of the tail
+   recursive CALL passed in ASS_VAR.  M and A are the multiplicative and the
+   additive factor for the real return value.  */
 
 static bool
-process_assignment (tree ass, tree stmt, block_stmt_iterator call, tree *m,
+process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
                    tree *a, tree *ass_var)
 {
   tree op0, op1, non_ass_var;
-  tree dest = GIMPLE_STMT_OPERAND (ass, 0);
-  tree src = GIMPLE_STMT_OPERAND (ass, 1);
-  enum tree_code code = TREE_CODE (src);
-  tree src_var = src;
-
+  tree dest = gimple_assign_lhs (stmt);
+  enum tree_code code = gimple_assign_rhs_code (stmt);
+  enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
+  tree src_var = gimple_assign_rhs1 (stmt);
+  
   /* See if this is a simple copy operation of an SSA name to the function
      result.  In that case we may have a simple tail call.  Ignore type
      conversions that can never produce extra code between the function
      call and the function return.  */
-  STRIP_NOPS (src_var);
-  if (TREE_CODE (src_var) == SSA_NAME)
+  if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
+      && (TREE_CODE (src_var) == SSA_NAME))
     {
+      /* Reject a tailcall if the type conversion might need
+        additional code.  */
+      if (IS_CONVERT_EXPR_CODE_P (code)
+         && TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
+       return false;
+
       if (src_var != *ass_var)
        return false;
 
@@ -291,7 +294,7 @@ process_assignment (tree ass, tree stmt, block_stmt_iterator call, tree *m,
       return true;
     }
 
-  if (TREE_CODE_CLASS (code) != tcc_binary)
+  if (rhs_class != GIMPLE_BINARY_RHS)
     return false;
 
   /* Accumulator optimizations will reverse the order of operations.
@@ -311,8 +314,8 @@ process_assignment (tree ass, tree stmt, block_stmt_iterator call, tree *m,
      TODO -- Extend it for cases where the linear transformation of the output
      is expressed in a more complicated way.  */
 
-  op0 = TREE_OPERAND (src, 0);
-  op1 = TREE_OPERAND (src, 1);
+  op0 = gimple_assign_rhs1 (stmt);
+  op1 = gimple_assign_rhs2 (stmt);
 
   if (op0 == *ass_var
       && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
@@ -346,7 +349,8 @@ process_assignment (tree ass, tree stmt, block_stmt_iterator call, tree *m,
       *ass_var = dest;
       return true;
 
-      /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR, POINTER_PLUS_EXPR).  */
+      /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR,
+        POINTER_PLUS_EXPR).  */
 
     default:
       return false;
@@ -359,12 +363,14 @@ static tree
 propagate_through_phis (tree var, edge e)
 {
   basic_block dest = e->dest;
-  tree phi;
-
-  for (phi = phi_nodes (dest); phi; phi = PHI_CHAIN (phi))
-    if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
-      return PHI_RESULT (phi);
-
+  gimple_stmt_iterator gsi;
+  for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
+    {
+      gimple phi = gsi_stmt (gsi);
+      if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
+        return PHI_RESULT (phi);
+    }
   return var;
 }
 
@@ -374,52 +380,44 @@ propagate_through_phis (tree var, edge e)
 static void
 find_tail_calls (basic_block bb, struct tailcall **ret)
 {
-  tree ass_var, ret_var, stmt, func, param, call = NULL_TREE;
-  block_stmt_iterator bsi, absi;
+  tree ass_var = NULL_TREE, ret_var, func, param;
+  gimple stmt, call = NULL;
+  gimple_stmt_iterator gsi, agsi;
   bool tail_recursion;
   struct tailcall *nw;
   edge e;
   tree m, a;
   basic_block abb;
-  stmt_ann_t ann;
+  size_t idx;
 
   if (!single_succ_p (bb))
     return;
 
-  for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
+  for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
     {
-      stmt = bsi_stmt (bsi);
+      stmt = gsi_stmt (gsi);
 
       /* Ignore labels.  */
-      if (TREE_CODE (stmt) == LABEL_EXPR)
+      if (gimple_code (stmt) == GIMPLE_LABEL)
        continue;
 
       /* Check for a call.  */
-      if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
-       {
-         ass_var = GIMPLE_STMT_OPERAND (stmt, 0);
-         call = GIMPLE_STMT_OPERAND (stmt, 1);
-         if (TREE_CODE (call) == WITH_SIZE_EXPR)
-           call = TREE_OPERAND (call, 0);
-       }
-      else
+      if (is_gimple_call (stmt))
        {
-         ass_var = NULL_TREE;
          call = stmt;
+         ass_var = gimple_call_lhs (stmt);
+         break;
        }
 
-      if (TREE_CODE (call) == CALL_EXPR)
-       break;
-
       /* If the statement has virtual or volatile operands, fail.  */
-      ann = stmt_ann (stmt);
       if (!ZERO_SSA_OPERANDS (stmt, (SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS))
-         || ann->has_volatile_ops
-         || (!gimple_aliases_computed_p (cfun) && ann->references_memory))
+         || gimple_has_volatile_ops (stmt)
+         || (!gimple_aliases_computed_p (cfun)
+             && gimple_references_memory_p (stmt)))
        return;
     }
 
-  if (bsi_end_p (bsi))
+  if (gsi_end_p (gsi))
     {
       edge_iterator ei;
       /* Recurse to the predecessors.  */
@@ -445,16 +443,15 @@ find_tail_calls (basic_block bb, struct tailcall **ret)
 
   /* We found the call, check whether it is suitable.  */
   tail_recursion = false;
-  func = get_callee_fndecl (call);
+  func = gimple_call_fndecl (call);
   if (func == current_function_decl)
     {
-      call_expr_arg_iterator iter;
       tree arg;
-      for (param = DECL_ARGUMENTS (func),
-            arg = first_call_expr_arg (call, &iter);
-          param && arg;
-          param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
+      for (param = DECL_ARGUMENTS (func), idx = 0;
+          param && idx < gimple_call_num_args (call);
+          param = TREE_CHAIN (param), idx ++)
        {
+         arg = gimple_call_arg (call, idx);
          if (param != arg)
            {
              /* Make sure there are no problems with copying.  The parameter
@@ -463,7 +460,7 @@ find_tail_calls (basic_block bb, struct tailcall **ret)
                 we emitted a suitable type conversion statement.  */
              if (!is_gimple_reg_type (TREE_TYPE (param))
                  || !useless_type_conversion_p (TREE_TYPE (param),
-                                               TREE_TYPE (arg)))
+                                                TREE_TYPE (arg)))
                break;
 
              /* The parameter should be a real operand, so that phi node
@@ -471,13 +468,13 @@ find_tail_calls (basic_block bb, struct tailcall **ret)
                 of copying the value.  This test implies is_gimple_reg_type
                 from the previous condition, however this one could be
                 relaxed by being more careful with copying the new value
-                of the parameter (emitting appropriate GIMPLE_MODIFY_STMT and
+                of the parameter (emitting appropriate GIMPLE_ASSIGN and
                 updating the virtual operands).  */
              if (!is_gimple_reg (param))
                break;
            }
        }
-      if (!arg && !param)
+      if (idx == gimple_call_num_args (call) && !param)
        tail_recursion = true;
     }
 
@@ -489,48 +486,36 @@ find_tail_calls (basic_block bb, struct tailcall **ret)
   a = NULL_TREE;
 
   abb = bb;
-  absi = bsi;
+  agsi = gsi;
   while (1)
     {
-      bsi_next (&absi);
+      gsi_next (&agsi);
 
-      while (bsi_end_p (absi))
+      while (gsi_end_p (agsi))
        {
          ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
          abb = single_succ (abb);
-         absi = bsi_start (abb);
+         agsi = gsi_start_bb (abb);
        }
 
-      stmt = bsi_stmt (absi);
+      stmt = gsi_stmt (agsi);
 
-      if (TREE_CODE (stmt) == LABEL_EXPR)
+      if (gimple_code (stmt) == GIMPLE_LABEL)
        continue;
 
-      if (TREE_CODE (stmt) == RETURN_EXPR)
+      if (gimple_code (stmt) == GIMPLE_RETURN)
        break;
 
-      if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+      if (gimple_code (stmt) != GIMPLE_ASSIGN)
        return;
 
-      if (!process_assignment (stmt, stmt, bsi, &m, &a, &ass_var))
+      /* This is a gimple assign. */
+      if (! process_assignment (stmt, gsi, &m, &a, &ass_var))
        return;
     }
 
   /* See if this is a tail call we can handle.  */
-  ret_var = TREE_OPERAND (stmt, 0);
-  if (ret_var
-      && TREE_CODE (ret_var) == GIMPLE_MODIFY_STMT)
-    {
-      tree ret_op = GIMPLE_STMT_OPERAND (ret_var, 1);
-      STRIP_NOPS (ret_op);
-      if (!tail_recursion
-         && TREE_CODE (ret_op) != SSA_NAME)
-       return;
-
-      if (!process_assignment (ret_var, stmt, bsi, &m, &a, &ass_var))
-       return;
-      ret_var = GIMPLE_STMT_OPERAND (ret_var, 0);
-    }
+  ret_var = gimple_return_retval (stmt);
 
   /* We may proceed if there either is no return value, or the return value
      is identical to the call's return.  */
@@ -545,8 +530,7 @@ find_tail_calls (basic_block bb, struct tailcall **ret)
 
   nw = XNEW (struct tailcall);
 
-  nw->call_block = bb;
-  nw->call_bsi = bsi;
+  nw->call_gsi = gsi;
 
   nw->tail_recursion = tail_recursion;
 
@@ -557,15 +541,70 @@ find_tail_calls (basic_block bb, struct tailcall **ret)
   *ret = nw;
 }
 
-/* Adjust the accumulator values according to A and M after BSI, and update
-   the phi nodes on edge BACK.  */
+/* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E.  */
 
 static void
-adjust_accumulator_values (block_stmt_iterator bsi, tree m, tree a, edge back)
+add_successor_phi_arg (edge e, tree var, tree phi_arg)
+{
+  gimple_stmt_iterator gsi;
+
+  for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
+    if (PHI_RESULT (gsi_stmt (gsi)) == var)
+      break;
+
+  gcc_assert (!gsi_end_p (gsi));
+  add_phi_arg (gsi_stmt (gsi), phi_arg, e);
+}
+
+/* Creates a GIMPLE statement which computes the operation specified by
+   CODE, OP0 and OP1 to a new variable with name LABEL and inserts the
+   statement in the position specified by GSI and UPDATE.  Returns the
+   tree node of the statement's result.  */
+
+static tree
+adjust_return_value_with_ops (enum tree_code code, const char *label, 
+                             tree op0, tree op1, gimple_stmt_iterator gsi,
+                             enum gsi_iterator_update update)
 {
-  tree stmt, var, phi, tmp;
+
   tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
-  tree a_acc_arg = a_acc, m_acc_arg = m_acc;
+  tree tmp = create_tmp_var (ret_type, label);
+  gimple stmt = gimple_build_assign_with_ops (code, tmp, op0, op1);
+  tree result;
+
+  add_referenced_var (tmp);
+  result = make_ssa_name (tmp, stmt);
+  gimple_assign_set_lhs (stmt, result);
+  update_stmt (stmt);
+  gsi_insert_before (&gsi, stmt, update);
+  return result;
+}
+
+/* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by 
+   the computation specified by CODE and OP1 and insert the statement
+   at the position specified by GSI as a new statement.  Returns new SSA name
+   of updated accumulator.  */
+
+static tree
+update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
+                            gimple_stmt_iterator gsi)
+{
+  gimple stmt = gimple_build_assign_with_ops (code, SSA_NAME_VAR (acc), acc,
+                                             op1);
+  tree var = make_ssa_name (SSA_NAME_VAR (acc), stmt);
+  gimple_assign_set_lhs (stmt, var);
+  update_stmt (stmt);
+  gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
+  return var;
+}
+
+/* Adjust the accumulator values according to A and M after GSI, and update
+   the phi nodes on edge BACK.  */
+
+static void
+adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
+{
+  tree var, a_acc_arg = a_acc, m_acc_arg = m_acc;
 
   if (a)
     {
@@ -574,58 +613,23 @@ adjust_accumulator_values (block_stmt_iterator bsi, tree m, tree a, edge back)
          if (integer_onep (a))
            var = m_acc;
          else
-           {
-             stmt = build_gimple_modify_stmt (NULL_TREE,
-                                              build2 (MULT_EXPR, ret_type,
-                                                      m_acc, a));
-
-             tmp = create_tmp_var (ret_type, "acc_tmp");
-             add_referenced_var (tmp);
-
-             var = make_ssa_name (tmp, stmt);
-             GIMPLE_STMT_OPERAND (stmt, 0) = var;
-             bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
-           }
+           var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
+                                               a, gsi, GSI_NEW_STMT);
        }
       else
        var = a;
 
-      stmt = build_gimple_modify_stmt (NULL_TREE, build2 (PLUS_EXPR, ret_type,
-                                                         a_acc, var));
-      var = make_ssa_name (SSA_NAME_VAR (a_acc), stmt);
-      GIMPLE_STMT_OPERAND (stmt, 0) = var;
-      bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
-      a_acc_arg = var;
+      a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
     }
 
   if (m)
-    {
-      stmt = build_gimple_modify_stmt (NULL_TREE,
-                                      build2 (MULT_EXPR, ret_type,
-                                              m_acc, m));
-      var = make_ssa_name (SSA_NAME_VAR (m_acc), stmt);
-      GIMPLE_STMT_OPERAND (stmt, 0) = var;
-      bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
-      m_acc_arg = var;
-    }
+    m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
 
   if (a_acc)
-    {
-      for (phi = phi_nodes (back->dest); phi; phi = PHI_CHAIN (phi))
-       if (PHI_RESULT (phi) == a_acc)
-         break;
-
-      add_phi_arg (phi, a_acc_arg, back);
-    }
+    add_successor_phi_arg (back, a_acc, a_acc_arg);
 
   if (m_acc)
-    {
-      for (phi = phi_nodes (back->dest); phi; phi = PHI_CHAIN (phi))
-       if (PHI_RESULT (phi) == m_acc)
-         break;
-
-      add_phi_arg (phi, m_acc_arg, back);
-    }
+    add_successor_phi_arg (back, m_acc, m_acc_arg);
 }
 
 /* Adjust value of the return at the end of BB according to M and A
@@ -634,56 +638,23 @@ adjust_accumulator_values (block_stmt_iterator bsi, tree m, tree a, edge back)
 static void
 adjust_return_value (basic_block bb, tree m, tree a)
 {
-  tree ret_stmt = last_stmt (bb), ret_var, var, stmt, tmp;
-  tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
-  tree *ret_op;
-  block_stmt_iterator bsi = bsi_last (bb);
+  tree retval;
+  gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb));
+  gimple_stmt_iterator gsi = gsi_last_bb (bb);
 
-  gcc_assert (TREE_CODE (ret_stmt) == RETURN_EXPR);
+  gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
 
-  ret_var = TREE_OPERAND (ret_stmt, 0);
-  if (!ret_var)
+  retval = gimple_return_retval (ret_stmt);
+  if (!retval || retval == error_mark_node)
     return;
 
-  if (TREE_CODE (ret_var) == GIMPLE_MODIFY_STMT)
-    {
-      ret_op = &GIMPLE_STMT_OPERAND (ret_var, 1);
-      ret_var = *ret_op;
-    }
-  else
-    ret_op = &TREE_OPERAND (ret_stmt, 0);
-
   if (m)
-    {
-      stmt = build_gimple_modify_stmt (NULL_TREE,
-                                      build2 (MULT_EXPR, ret_type,
-                                              m_acc, ret_var));
-
-      tmp = create_tmp_var (ret_type, "acc_tmp");
-      add_referenced_var (tmp);
-
-      var = make_ssa_name (tmp, stmt);
-      GIMPLE_STMT_OPERAND (stmt, 0) = var;
-      bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
-    }
-  else
-    var = ret_var;
-
+    retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
+                                          gsi, GSI_SAME_STMT);
   if (a)
-    {
-      stmt = build_gimple_modify_stmt (NULL_TREE,
-                                      build2 (PLUS_EXPR, ret_type,
-                                              a_acc, var));
-
-      tmp = create_tmp_var (ret_type, "acc_tmp");
-      add_referenced_var (tmp);
-
-      var = make_ssa_name (tmp, stmt);
-      GIMPLE_STMT_OPERAND (stmt, 0) = var;
-      bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
-    }
-
-  *ret_op = var;
+    retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
+                                          gsi, GSI_SAME_STMT);
+  gimple_return_set_retval (ret_stmt, retval);
   update_stmt (ret_stmt);
 }
 
@@ -735,90 +706,92 @@ arg_needs_copy_p (tree param)
 static void
 eliminate_tail_call (struct tailcall *t)
 {
-  tree param, stmt, rslt, call;
+  tree param, rslt;
+  gimple stmt, call;
   tree arg;
-  call_expr_arg_iterator iter;
+  size_t idx;
   basic_block bb, first;
   edge e;
-  tree phi;
-  block_stmt_iterator bsi;
-  tree orig_stmt;
+  gimple phi;
+  gimple_stmt_iterator gsi;
+  gimple orig_stmt;
 
-  stmt = orig_stmt = bsi_stmt (t->call_bsi);
-  bb = t->call_block;
+  stmt = orig_stmt = gsi_stmt (t->call_gsi);
+  bb = gsi_bb (t->call_gsi);
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
               bb->index);
-      print_generic_stmt (dump_file, stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
       fprintf (dump_file, "\n");
     }
 
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
-    stmt = GIMPLE_STMT_OPERAND (stmt, 1);
+  gcc_assert (is_gimple_call (stmt));
 
   first = single_succ (ENTRY_BLOCK_PTR);
 
-  /* Remove the code after call_bsi that will become unreachable.  The
+  /* Remove the code after call_gsi that will become unreachable.  The
      possibly unreachable code in other blocks is removed later in
      cfg cleanup.  */
-  bsi = t->call_bsi;
-  bsi_next (&bsi);
-  while (!bsi_end_p (bsi))
+  gsi = t->call_gsi;
+  gsi_next (&gsi);
+  while (!gsi_end_p (gsi))
     {
-      tree t = bsi_stmt (bsi);
+      gimple t = gsi_stmt (gsi);
       /* Do not remove the return statement, so that redirect_edge_and_branch
         sees how the block ends.  */
-      if (TREE_CODE (t) == RETURN_EXPR)
+      if (gimple_code (t) == GIMPLE_RETURN)
        break;
 
-      bsi_remove (&bsi, true);
+      gsi_remove (&gsi, true);
       release_defs (t);
     }
 
   /* Number of executions of function has reduced by the tailcall.  */
-  e = single_succ_edge (t->call_block);
+  e = single_succ_edge (gsi_bb (t->call_gsi));
   decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
   decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
   if (e->dest != EXIT_BLOCK_PTR)
     decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
 
   /* Replace the call by a jump to the start of function.  */
-  e = redirect_edge_and_branch (single_succ_edge (t->call_block), first);
+  e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
+                               first);
   gcc_assert (e);
-  PENDING_STMT (e) = NULL_TREE;
+  PENDING_STMT (e) = NULL;
 
   /* Add phi node entries for arguments.  The ordering of the phi nodes should
      be the same as the ordering of the arguments.  */
   for (param = DECL_ARGUMENTS (current_function_decl),
-        arg = first_call_expr_arg (stmt, &iter),
-        phi = phi_nodes (first);
+        idx = 0, gsi = gsi_start_phis (first);
        param;
-       param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
+       param = TREE_CHAIN (param), idx++)
     {
       if (!arg_needs_copy_p (param))
        continue;
+
+      arg = gimple_call_arg (stmt, idx);
+      phi = gsi_stmt (gsi);
       gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
 
       add_phi_arg (phi, arg, e);
-      phi = PHI_CHAIN (phi);
+      gsi_next (&gsi);
     }
 
   /* Update the values of accumulators.  */
-  adjust_accumulator_values (t->call_bsi, t->mult, t->add, e);
+  adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
 
-  call = bsi_stmt (t->call_bsi);
-  if (TREE_CODE (call) == GIMPLE_MODIFY_STMT)
+  call = gsi_stmt (t->call_gsi);
+  rslt = gimple_call_lhs (call);
+  if (rslt != NULL_TREE)
     {
-      rslt = GIMPLE_STMT_OPERAND (call, 0);
-
       /* Result of the call will no longer be defined.  So adjust the
         SSA_NAME_DEF_STMT accordingly.  */
-      SSA_NAME_DEF_STMT (rslt) = build_empty_stmt ();
+      SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
     }
 
-  bsi_remove (&t->call_bsi, true);
+  gsi_remove (&t->call_gsi, true);
   release_defs (call);
 }
 
@@ -866,21 +839,40 @@ optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
 
   if (opt_tailcalls)
     {
-      tree stmt = bsi_stmt (t->call_bsi);
+      gimple stmt = gsi_stmt (t->call_gsi);
 
-      stmt = get_call_expr_in (stmt);
-      CALL_EXPR_TAILCALL (stmt) = 1;
+      gimple_call_set_tail (stmt, true);
       if (dump_file && (dump_flags & TDF_DETAILS))
         {
          fprintf (dump_file, "Found tail call ");
-         print_generic_expr (dump_file, stmt, dump_flags);
-         fprintf (dump_file, " in bb %i\n", t->call_block->index);
+         print_gimple_stmt (dump_file, stmt, 0, dump_flags);
+         fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
        }
     }
 
   return false;
 }
 
+/* Creates a tail-call accumulator of the same type as the return type of the
+   current function.  LABEL is the name used to creating the temporary
+   variable for the accumulator.  The accumulator will be inserted in the
+   phis of a basic block BB with single predecessor with an initial value
+   INIT converted to the current function return type.  */
+
+static tree
+create_tailcall_accumulator (const char *label, basic_block bb, tree init)
+{
+  tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
+  tree tmp = create_tmp_var (ret_type, label);
+  gimple phi;
+
+  add_referenced_var (tmp);
+  phi = create_phi_node (tmp, bb);
+  /* RET_TYPE can be a float when -ffast-maths is enabled.  */
+  add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb));
+  return PHI_RESULT (phi);
+}
 /* Optimizes tail calls in the function, turning the tail recursion
    into iteration.  */
 
@@ -892,7 +884,8 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls)
   struct tailcall *tailcalls = NULL, *act, *next;
   bool changed = false;
   basic_block first = single_succ (ENTRY_BLOCK_PTR);
-  tree stmt, param, ret_type, tmp, phi;
+  tree param;
+  gimple stmt;
   edge_iterator ei;
 
   if (!suitable_for_tail_opt_p ())
@@ -907,7 +900,7 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls)
       stmt = last_stmt (e->src);
 
       if (stmt
-         && TREE_CODE (stmt) == RETURN_EXPR)
+         && gimple_code (stmt) == GIMPLE_RETURN)
        find_tail_calls (e->src, &tailcalls);
     }
 
@@ -932,7 +925,7 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls)
              {
                tree name = gimple_default_def (cfun, param);
                tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
-               tree phi;
+               gimple phi;
 
                set_default_def (param, new_name);
                phi = create_phi_node (name, first);
@@ -943,44 +936,12 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls)
        }
 
       if (act->add && !a_acc)
-       {
-         ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
-
-         tmp = create_tmp_var (ret_type, "add_acc");
-         add_referenced_var (tmp);
-
-         phi = create_phi_node (tmp, first);
-         add_phi_arg (phi,
-                      /* RET_TYPE can be a float when -ffast-maths is
-                         enabled.  */
-                      fold_convert (ret_type, integer_zero_node),
-                      single_pred_edge (first));
-         a_acc = PHI_RESULT (phi);
-       }
+       a_acc = create_tailcall_accumulator ("add_acc", first,
+                                            integer_zero_node);
 
       if (act->mult && !m_acc)
-       {
-         ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
-
-         tmp = create_tmp_var (ret_type, "mult_acc");
-         add_referenced_var (tmp);
-
-         phi = create_phi_node (tmp, first);
-         add_phi_arg (phi,
-                      /* RET_TYPE can be a float when -ffast-maths is
-                         enabled.  */
-                      fold_convert (ret_type, integer_one_node),
-                      single_pred_edge (first));
-         m_acc = PHI_RESULT (phi);
-       }
-    }
-
-
-  if (phis_constructed)
-    {
-      /* Reverse the order of the phi nodes, so that it matches the order
-        of operands of the function, as assumed by eliminate_tail_call.  */
-      set_phi_nodes (first, phi_reverse (phi_nodes (first)));
+       m_acc = create_tailcall_accumulator ("mult_acc", first,
+                                            integer_one_node);
     }
 
   for (; tailcalls; tailcalls = next)
@@ -998,7 +959,7 @@ tree_optimize_tail_calls_1 (bool opt_tailcalls)
          stmt = last_stmt (e->src);
 
          if (stmt
-             && TREE_CODE (stmt) == RETURN_EXPR)
+             && gimple_code (stmt) == GIMPLE_RETURN)
            adjust_return_value (e->src, m_acc, a_acc);
        }
     }
index 818ae7c..c9753a0 100644 (file)
@@ -75,10 +75,10 @@ vect_determine_vectorization_factor (loop_vec_info loop_vinfo)
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
   int nbbs = loop->num_nodes;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
   unsigned int vectorization_factor = 0;
   tree scalar_type;
-  tree phi;
+  gimple phi;
   tree vectype;
   unsigned int nunits;
   stmt_vec_info stmt_info;
@@ -91,13 +91,14 @@ vect_determine_vectorization_factor (loop_vec_info loop_vinfo)
     {
       basic_block bb = bbs[i];
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
        {
+         phi = gsi_stmt (si);
          stmt_info = vinfo_for_stmt (phi);
          if (vect_print_dump_info (REPORT_DETAILS))
            {
              fprintf (vect_dump, "==> examining phi: ");
-             print_generic_expr (vect_dump, phi, TDF_SLIM);
+             print_gimple_stmt (vect_dump, phi, 0, TDF_SLIM);
            }
 
          gcc_assert (stmt_info);
@@ -142,15 +143,15 @@ vect_determine_vectorization_factor (loop_vec_info loop_vinfo)
            }
        }
 
-      for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+      for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
         {
-         tree stmt = bsi_stmt (si);
+         gimple stmt = gsi_stmt (si);
          stmt_info = vinfo_for_stmt (stmt);
 
          if (vect_print_dump_info (REPORT_DETAILS))
            {
              fprintf (vect_dump, "==> examining statement: ");
-             print_generic_expr (vect_dump, stmt, TDF_SLIM);
+             print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
            }
 
          gcc_assert (stmt_info);
@@ -164,23 +165,22 @@ vect_determine_vectorization_factor (loop_vec_info loop_vinfo)
              continue;
            }
 
-         if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+         if (gimple_get_lhs (stmt) == NULL_TREE)
            {
              if (vect_print_dump_info (REPORT_UNVECTORIZED_LOOPS))
                {
                  fprintf (vect_dump, "not vectorized: irregular stmt.");
-                 print_generic_expr (vect_dump, stmt, TDF_SLIM);
+                 print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
                }
              return false;
            }
 
-         if (!GIMPLE_STMT_P (stmt)
-             && VECTOR_MODE_P (TYPE_MODE (TREE_TYPE (stmt))))
+         if (VECTOR_MODE_P (TYPE_MODE (gimple_expr_type (stmt))))
            {
              if (vect_print_dump_info (REPORT_UNVECTORIZED_LOOPS))
                {
                  fprintf (vect_dump, "not vectorized: vector stmt in loop:");
-                 print_generic_expr (vect_dump, stmt, TDF_SLIM);
+                 print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
                }
              return false;
            }
@@ -196,7 +196,6 @@ vect_determine_vectorization_factor (loop_vec_info loop_vinfo)
            }
          else
            {
-             tree operation;
 
              gcc_assert (! STMT_VINFO_DATA_REF (stmt_info)
                          && !is_pattern_stmt_p (stmt_info));
@@ -216,16 +215,16 @@ vect_determine_vectorization_factor (loop_vec_info loop_vinfo)
                 cannot rely on invariant motion to always take invariants out
                 of the loop, and so in the case of promotion we also have to 
                 check the rhs.  */
-             scalar_type = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 0));
+             scalar_type = gimple_expr_type (stmt);
 
-             operation = GIMPLE_STMT_OPERAND (stmt, 1);
-             if (CONVERT_EXPR_P (operation)
-                 || TREE_CODE (operation) == WIDEN_MULT_EXPR
-                 || TREE_CODE (operation) == FLOAT_EXPR)
+             if (is_gimple_assign (stmt)
+                 && (gimple_assign_cast_p (stmt)
+                     || gimple_assign_rhs_code (stmt) == WIDEN_MULT_EXPR
+                     || gimple_assign_rhs_code (stmt) == FLOAT_EXPR))
                {
-                 tree rhs_type = TREE_TYPE (TREE_OPERAND (operation, 0));
-                 if (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (rhs_type)) < 
-                     TREE_INT_CST_LOW (TYPE_SIZE_UNIT (scalar_type)))
+                 tree rhs_type = TREE_TYPE (gimple_assign_rhs1 (stmt));
+                 if (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (rhs_type))
+                     TREE_INT_CST_LOW (TYPE_SIZE_UNIT (scalar_type)))
                    scalar_type = rhs_type;
                }
 
@@ -315,11 +314,11 @@ vect_analyze_operations (loop_vec_info loop_vinfo)
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
   int nbbs = loop->num_nodes;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
   unsigned int vectorization_factor = 0;
   int i;
   bool ok;
-  tree phi;
+  gimple phi;
   stmt_vec_info stmt_info;
   bool need_to_vectorize = false;
   int min_profitable_iters;
@@ -337,15 +336,16 @@ vect_analyze_operations (loop_vec_info loop_vinfo)
     {
       basic_block bb = bbs[i];
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
         {
+         phi = gsi_stmt (si);
          ok = true;
 
          stmt_info = vinfo_for_stmt (phi);
          if (vect_print_dump_info (REPORT_DETAILS))
            {
              fprintf (vect_dump, "examining phi: ");
-             print_generic_expr (vect_dump, phi, TDF_SLIM);
+             print_gimple_stmt (vect_dump, phi, 0, TDF_SLIM);
            }
 
          if (! is_loop_header_bb_p (bb))
@@ -398,22 +398,22 @@ vect_analyze_operations (loop_vec_info loop_vinfo)
                {
                  fprintf (vect_dump,
                           "not vectorized: relevant phi not supported: ");
-                 print_generic_expr (vect_dump, phi, TDF_SLIM);
+                 print_gimple_stmt (vect_dump, phi, 0, TDF_SLIM);
                }
              return false;
            }
        }
 
-      for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+      for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
        {
-         tree stmt = bsi_stmt (si);
+         gimple stmt = gsi_stmt (si);
          stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
          enum vect_def_type relevance = STMT_VINFO_RELEVANT (stmt_info);
 
          if (vect_print_dump_info (REPORT_DETAILS))
            {
              fprintf (vect_dump, "==> examining statement: ");
-             print_generic_expr (vect_dump, stmt, TDF_SLIM);
+             print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
            }
 
          gcc_assert (stmt_info);
@@ -454,8 +454,7 @@ vect_analyze_operations (loop_vec_info loop_vinfo)
 
          if (STMT_VINFO_RELEVANT_P (stmt_info))
            {
-             gcc_assert (GIMPLE_STMT_P (stmt)
-                         || !VECTOR_MODE_P (TYPE_MODE (TREE_TYPE (stmt))));
+             gcc_assert (!VECTOR_MODE_P (TYPE_MODE (gimple_expr_type (stmt))));
              gcc_assert (STMT_VINFO_VECTYPE (stmt_info));
              need_to_vectorize = true;
            }
@@ -480,7 +479,7 @@ vect_analyze_operations (loop_vec_info loop_vinfo)
                {
                  fprintf (vect_dump, "not vectorized: relevant stmt not ");
                  fprintf (vect_dump, "supported: ");
-                 print_generic_expr (vect_dump, stmt, TDF_SLIM);
+                 print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
                }
              return false;
            }
@@ -497,7 +496,7 @@ vect_analyze_operations (loop_vec_info loop_vinfo)
                {
                  fprintf (vect_dump, "not vectorized: live stmt not ");
                  fprintf (vect_dump, "supported: ");
-                 print_generic_expr (vect_dump, stmt, TDF_SLIM);
+                 print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
                }
              return false;
            }   
@@ -520,7 +519,7 @@ vect_analyze_operations (loop_vec_info loop_vinfo)
                    {
                      fprintf (vect_dump, "not vectorized: the size of group "
                               "of strided accesses is not a power of 2");
-                     print_generic_expr (vect_dump, stmt, TDF_SLIM);
+                     print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
                    }
                  return false;
                }
@@ -649,7 +648,7 @@ vect_analyze_operations (loop_vec_info loop_vinfo)
    used in STMT for anything other than indexing an array.  */
 
 static bool
-exist_non_indexing_operands_for_use_p (tree use, tree stmt)
+exist_non_indexing_operands_for_use_p (tree use, gimple stmt)
 {
   tree operand;
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
@@ -673,10 +672,12 @@ exist_non_indexing_operands_for_use_p (tree use, tree stmt)
      Therefore, all we need to check is if STMT falls into the
      first case, and whether var corresponds to USE.  */
  
-  if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
+  if (TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
     return false;
 
-  operand = GIMPLE_STMT_OPERAND (stmt, 1);
+  if (!gimple_assign_copy_p (stmt))
+    return false;
+  operand = gimple_assign_rhs1 (stmt);
 
   if (TREE_CODE (operand) != SSA_NAME)
     return false;
@@ -698,17 +699,18 @@ exist_non_indexing_operands_for_use_p (tree use, tree stmt)
 static void
 vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop)
 {
-  tree phi;
   basic_block bb = loop->header;
   tree dumy;
-  VEC(tree,heap) *worklist = VEC_alloc (tree, heap, 64);
+  VEC(gimple,heap) *worklist = VEC_alloc (gimple, heap, 64);
+  gimple_stmt_iterator gsi;
 
   if (vect_print_dump_info (REPORT_DETAILS))
     fprintf (vect_dump, "=== vect_analyze_scalar_cycles ===");
 
   /* First - identify all inductions.  */
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis  (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
+      gimple phi = gsi_stmt (gsi);
       tree access_fn = NULL;
       tree def = PHI_RESULT (phi);
       stmt_vec_info stmt_vinfo = vinfo_for_stmt (phi);
@@ -716,7 +718,7 @@ vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop)
       if (vect_print_dump_info (REPORT_DETAILS))
        {
          fprintf (vect_dump, "Analyze phi: ");
-         print_generic_expr (vect_dump, phi, TDF_SLIM);
+         print_gimple_stmt (vect_dump, phi, 0, TDF_SLIM);
        }
 
       /* Skip virtual phi's. The data dependences that are associated with
@@ -737,7 +739,7 @@ vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop)
       if (!access_fn
          || !vect_is_simple_iv_evolution (loop->num, access_fn, &dumy, &dumy)) 
        {
-         VEC_safe_push (tree, heap, worklist, phi);      
+         VEC_safe_push (gimple, heap, worklist, phi);    
          continue;
        }
 
@@ -748,17 +750,17 @@ vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop)
 
 
   /* Second - identify all reductions.  */
-  while (VEC_length (tree, worklist) > 0)
+  while (VEC_length (gimple, worklist) > 0)
     {
-      tree phi = VEC_pop (tree, worklist);
+      gimple phi = VEC_pop (gimple, worklist);
       tree def = PHI_RESULT (phi);
       stmt_vec_info stmt_vinfo = vinfo_for_stmt (phi);
-      tree reduc_stmt;
+      gimple reduc_stmt;
 
       if (vect_print_dump_info (REPORT_DETAILS))
         { 
           fprintf (vect_dump, "Analyze phi: ");
-          print_generic_expr (vect_dump, phi, TDF_SLIM);
+          print_gimple_stmt (vect_dump, phi, 0, TDF_SLIM);
         }
 
       gcc_assert (is_gimple_reg (SSA_NAME_VAR (def)));
@@ -778,7 +780,7 @@ vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop)
           fprintf (vect_dump, "Unknown def-use cycle pattern.");
     }
 
-  VEC_free (tree, heap, worklist);
+  VEC_free (gimple, heap, worklist);
   return;
 }
 
@@ -833,7 +835,8 @@ static void
 vect_insert_into_interleaving_chain (struct data_reference *dra,
                                     struct data_reference *drb)
 {
-  tree prev, next, next_init;
+  gimple prev, next;
+  tree next_init;
   stmt_vec_info stmtinfo_a = vinfo_for_stmt (DR_STMT (dra)); 
   stmt_vec_info stmtinfo_b = vinfo_for_stmt (DR_STMT (drb));
 
@@ -855,7 +858,7 @@ vect_insert_into_interleaving_chain (struct data_reference *dra,
 
   /* We got to the end of the list. Insert here.  */
   DR_GROUP_NEXT_DR (vinfo_for_stmt (prev)) = DR_STMT (dra);
-  DR_GROUP_NEXT_DR (stmtinfo_a) = NULL_TREE;
+  DR_GROUP_NEXT_DR (stmtinfo_a) = NULL;
 }
 
 
@@ -888,8 +891,10 @@ vect_update_interleaving_chain (struct data_reference *drb,
 {
   stmt_vec_info stmtinfo_a = vinfo_for_stmt (DR_STMT (dra)); 
   stmt_vec_info stmtinfo_b = vinfo_for_stmt (DR_STMT (drb));
-  tree next_init, init_dra_chain, init_drb_chain, first_a, first_b;
-  tree node, prev, next, node_init, first_stmt;
+  tree next_init, init_dra_chain, init_drb_chain;
+  gimple first_a, first_b;
+  tree node_init;
+  gimple node, prev, next, first_stmt;
 
   /* 1. New stmts - both DRA and DRB are not a part of any chain.   */
   if (!DR_GROUP_FIRST_DR (stmtinfo_a) && !DR_GROUP_FIRST_DR (stmtinfo_b))
@@ -912,10 +917,10 @@ vect_update_interleaving_chain (struct data_reference *drb,
   /* 3. DRA is a part of a chain and DRB is not.  */  
   if (DR_GROUP_FIRST_DR (stmtinfo_a) && !DR_GROUP_FIRST_DR (stmtinfo_b))
     {
-      tree old_first_stmt = DR_GROUP_FIRST_DR (stmtinfo_a);
+      gimple old_first_stmt = DR_GROUP_FIRST_DR (stmtinfo_a);
       tree init_old = DR_INIT (STMT_VINFO_DATA_REF (vinfo_for_stmt (
                                                              old_first_stmt)));
-      tree tmp;
+      gimple tmp;
 
       if (tree_int_cst_compare (init_old, DR_INIT (drb)) > 0)
        {
@@ -991,7 +996,7 @@ vect_update_interleaving_chain (struct data_reference *drb,
        {
          /* We got to the end of the list. Insert here.  */
          DR_GROUP_NEXT_DR (vinfo_for_stmt (prev)) = node;
-         DR_GROUP_NEXT_DR (vinfo_for_stmt (node)) = NULL_TREE;
+         DR_GROUP_NEXT_DR (vinfo_for_stmt (node)) = NULL;
          prev = node;
        }                       
       DR_GROUP_FIRST_DR (vinfo_for_stmt (node)) = first_stmt;
@@ -1122,8 +1127,8 @@ vect_check_interleaving (struct data_reference *dra,
 static bool
 vect_same_range_drs (data_reference_p dr_i, data_reference_p dr_j)
 {
-  tree stmt_i = DR_STMT (dr_i);
-  tree stmt_j = DR_STMT (dr_j);
+  gimple stmt_i = DR_STMT (dr_i);
+  gimple stmt_j = DR_STMT (dr_j);
 
   if (operand_equal_p (DR_REF (dr_i), DR_REF (dr_j), 0)
       || (DR_GROUP_FIRST_DR (vinfo_for_stmt (stmt_i))
@@ -1351,7 +1356,7 @@ vect_analyze_data_ref_dependences (loop_vec_info loop_vinfo)
 static bool
 vect_compute_data_ref_alignment (struct data_reference *dr)
 {
-  tree stmt = DR_STMT (dr);
+  gimple stmt = DR_STMT (dr);
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);  
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
@@ -1570,7 +1575,7 @@ vect_verify_datarefs_alignment (loop_vec_info loop_vinfo)
 
   for (i = 0; VEC_iterate (data_reference_p, datarefs, i, dr); i++)
     {
-      tree stmt = DR_STMT (dr);
+      gimple stmt = DR_STMT (dr);
       stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
 
       /* For interleaving, only the alignment of the first access matters.  */
@@ -1608,7 +1613,7 @@ vect_verify_datarefs_alignment (loop_vec_info loop_vinfo)
 static bool
 vector_alignment_reachable_p (struct data_reference *dr)
 {
-  tree stmt = DR_STMT (dr);
+  gimple stmt = DR_STMT (dr);
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
   tree vectype = STMT_VINFO_VECTYPE (stmt_info);
 
@@ -1773,7 +1778,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
   bool do_peeling = false;
   bool do_versioning = false;
   bool stat;
-  tree stmt;
+  gimple stmt;
   stmt_vec_info stmt_info;
   int vect_versioning_for_alias_required;
 
@@ -1857,7 +1862,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
     {
       int mis;
       int npeel = 0;
-      tree stmt = DR_STMT (dr0);
+      gimple stmt = DR_STMT (dr0);
       stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
       tree vectype = STMT_VINFO_VECTYPE (stmt_info);
       int nelements = TYPE_VECTOR_SUBPARTS (vectype);
@@ -1973,12 +1978,12 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
 
           if (!supportable_dr_alignment)
             {
-              tree stmt;
+              gimple stmt;
               int mask;
               tree vectype;
 
               if (known_alignment_for_access_p (dr)
-                  || VEC_length (tree,
+                  || VEC_length (gimple,
                                  LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo))
                      >= (unsigned) PARAM_VALUE (PARAM_VECT_MAX_VERSION_FOR_ALIGNMENT_CHECKS))
                 {
@@ -2004,29 +2009,29 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
               gcc_assert (!LOOP_VINFO_PTR_MASK (loop_vinfo)
                           || LOOP_VINFO_PTR_MASK (loop_vinfo) == mask);
               LOOP_VINFO_PTR_MASK (loop_vinfo) = mask;
-              VEC_safe_push (tree, heap,
+              VEC_safe_push (gimple, heap,
                              LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo),
                              DR_STMT (dr));
             }
         }
       
       /* Versioning requires at least one misaligned data reference.  */
-      if (VEC_length (tree, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo)) == 0)
+      if (VEC_length (gimple, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo)) == 0)
         do_versioning = false;
       else if (!do_versioning)
-        VEC_truncate (tree, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo), 0);
+        VEC_truncate (gimple, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo), 0);
     }
 
   if (do_versioning)
     {
-      VEC(tree,heap) *may_misalign_stmts
+      VEC(gimple,heap) *may_misalign_stmts
         = LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo);
-      tree stmt;
+      gimple stmt;
 
       /* It can now be assumed that the data references in the statements
          in LOOP_VINFO_MAY_MISALIGN_STMTS will be aligned in the version
          of the loop being vectorized.  */
-      for (i = 0; VEC_iterate (tree, may_misalign_stmts, i, stmt); i++)
+      for (i = 0; VEC_iterate (gimple, may_misalign_stmts, i, stmt); i++)
         {
           stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
           dr = STMT_VINFO_DATA_REF (stmt_info);
@@ -2088,7 +2093,7 @@ vect_analyze_group_access (struct data_reference *dr)
   tree step = DR_STEP (dr);
   tree scalar_type = TREE_TYPE (DR_REF (dr));
   HOST_WIDE_INT type_size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (scalar_type));
-  tree stmt = DR_STMT (dr);
+  gimple stmt = DR_STMT (dr);
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
   HOST_WIDE_INT dr_step = TREE_INT_CST_LOW (step);
@@ -2132,12 +2137,12 @@ vect_analyze_group_access (struct data_reference *dr)
   if (DR_GROUP_FIRST_DR (vinfo_for_stmt (stmt)) == stmt)
     {
       /* First stmt in the interleaving chain. Check the chain.  */
-      tree next = DR_GROUP_NEXT_DR (vinfo_for_stmt (stmt));
+      gimple next = DR_GROUP_NEXT_DR (vinfo_for_stmt (stmt));
       struct data_reference *data_ref = dr;
       unsigned int count = 1;
       tree next_step;
       tree prev_init = DR_INIT (data_ref);
-      tree prev = stmt;
+      gimple prev = stmt;
       HOST_WIDE_INT diff, count_in_bytes;
 
       while (next)
@@ -2280,7 +2285,7 @@ vect_analyze_group_access (struct data_reference *dr)
       /* SLP: create an SLP data structure for every interleaving group of 
         stores for further analysis in vect_analyse_slp.  */
       if (!DR_IS_READ (dr) && !slp_impossible)
-       VEC_safe_push (tree, heap, LOOP_VINFO_STRIDED_STORES (loop_vinfo), stmt);
+       VEC_safe_push (gimple, heap, LOOP_VINFO_STRIDED_STORES (loop_vinfo), stmt);
     }
 
   return true;
@@ -2296,7 +2301,7 @@ vect_analyze_data_ref_access (struct data_reference *dr)
 {
   tree step = DR_STEP (dr);
   tree scalar_type = TREE_TYPE (DR_REF (dr));
-  tree stmt = DR_STMT (dr);
+  gimple stmt = DR_STMT (dr);
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
@@ -2317,7 +2322,7 @@ vect_analyze_data_ref_access (struct data_reference *dr)
     {
       /* Interleaved accesses are not yet supported within outer-loop
         vectorization for references in the inner-loop.  */
-      DR_GROUP_FIRST_DR (vinfo_for_stmt (stmt)) = NULL_TREE;
+      DR_GROUP_FIRST_DR (vinfo_for_stmt (stmt)) = NULL;
 
       /* For the rest of the analysis we use the outer-loop step.  */
       step = STMT_VINFO_DR_STEP (stmt_info);
@@ -2338,7 +2343,7 @@ vect_analyze_data_ref_access (struct data_reference *dr)
   if (!tree_int_cst_compare (step, TYPE_SIZE_UNIT (scalar_type)))
     {
       /* Mark that it is not interleaving.  */
-      DR_GROUP_FIRST_DR (vinfo_for_stmt (stmt)) = NULL_TREE;
+      DR_GROUP_FIRST_DR (vinfo_for_stmt (stmt)) = NULL;
       return true;
     }
 
@@ -2470,23 +2475,23 @@ vect_free_slp_tree (slp_tree node)
   if (SLP_TREE_RIGHT (node))
     vect_free_slp_tree (SLP_TREE_RIGHT (node));
    
-  VEC_free (tree, heap, SLP_TREE_SCALAR_STMTS (node));
+  VEC_free (gimple, heap, SLP_TREE_SCALAR_STMTS (node));
   
   if (SLP_TREE_VEC_STMTS (node))
-    VEC_free (tree, heap, SLP_TREE_VEC_STMTS (node));
+    VEC_free (gimple, heap, SLP_TREE_VEC_STMTS (node));
 
   free (node);
 }
 
 
-/* Get the defs for the RHS (collect them in DEF_STMTS0/1), check that they are 
-   of a legal type and that they match the defs of the first stmt of the SLP 
-   group (stored in FIRST_STMT_...).  */
+/* Get the defs for the rhs of STMT (collect them in DEF_STMTS0/1), check that
+   they are of a legal type and that they match the defs of the first stmt of
+   the SLP group (stored in FIRST_STMT_...).  */
 
 static bool
 vect_get_and_check_slp_defs (loop_vec_info loop_vinfo, slp_tree slp_node,
-                            tree rhs, VEC (tree, heap) **def_stmts0,
-                            VEC (tree, heap) **def_stmts1,
+                            gimple stmt, VEC (gimple, heap) **def_stmts0,
+                            VEC (gimple, heap) **def_stmts1,
                             enum vect_def_type *first_stmt_dt0,
                             enum vect_def_type *first_stmt_dt1,
                             tree *first_stmt_def0_type, 
@@ -2495,25 +2500,20 @@ vect_get_and_check_slp_defs (loop_vec_info loop_vinfo, slp_tree slp_node,
                             int ncopies_for_cost)
 {
   tree oprnd;
-  enum operation_type op_type = TREE_OPERAND_LENGTH (rhs);
-  unsigned int i, number_of_oprnds = op_type;
-  tree def, def_stmt;
+  unsigned int i, number_of_oprnds;
+  tree def;
+  gimple def_stmt;
   enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
   stmt_vec_info stmt_info = 
-    vinfo_for_stmt (VEC_index (tree, SLP_TREE_SCALAR_STMTS (slp_node), 0));
+    vinfo_for_stmt (VEC_index (gimple, SLP_TREE_SCALAR_STMTS (slp_node), 0));
+  enum gimple_rhs_class rhs_class;
 
-  /* Store.  */
-  if (!op_type)
-    number_of_oprnds = 1;
-  else
-    gcc_assert (op_type == unary_op || op_type == binary_op);
+  rhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (stmt));
+  number_of_oprnds = gimple_num_ops (stmt) - 1;        /* RHS only */
 
   for (i = 0; i < number_of_oprnds; i++)
     {
-      if (op_type)
-       oprnd = TREE_OPERAND (rhs, i);
-      else
-       oprnd = rhs;
+      oprnd = gimple_op (stmt, i + 1);
 
       if (!vect_is_simple_use (oprnd, loop_vinfo, &def_stmt, &def, &dt[i])
          || (!def_stmt && dt[i] != vect_constant_def))
@@ -2537,7 +2537,7 @@ vect_get_and_check_slp_defs (loop_vec_info loop_vinfo, slp_tree slp_node,
            *first_stmt_const_oprnd = oprnd;
 
          /* Analyze costs (for the first stmt of the group only).  */
-         if (op_type)
+         if (rhs_class != GIMPLE_SINGLE_RHS)
            /* Not memory operation (we don't call this functions for loads).  */
            vect_model_simple_cost (stmt_info, ncopies_for_cost, dt, slp_node);
          else
@@ -2600,9 +2600,9 @@ vect_get_and_check_slp_defs (loop_vec_info loop_vinfo, slp_tree slp_node,
          
        case vect_loop_def:
          if (i == 0)
-           VEC_safe_push (tree, heap, *def_stmts0, def_stmt);
+           VEC_safe_push (gimple, heap, *def_stmts0, def_stmt);
          else
-           VEC_safe_push (tree, heap, *def_stmts1, def_stmt);
+           VEC_safe_push (gimple, heap, *def_stmts1, def_stmt);
          break;
 
        default:
@@ -2634,15 +2634,16 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, slp_tree *node,
                     int *inside_cost, int *outside_cost,
                     int ncopies_for_cost)
 {
-  VEC (tree, heap) *def_stmts0 = VEC_alloc (tree, heap, group_size);
-  VEC (tree, heap) *def_stmts1 =  VEC_alloc (tree, heap, group_size);
+  VEC (gimple, heap) *def_stmts0 = VEC_alloc (gimple, heap, group_size);
+  VEC (gimple, heap) *def_stmts1 =  VEC_alloc (gimple, heap, group_size);
   unsigned int i;
-  VEC (tree, heap) *stmts = SLP_TREE_SCALAR_STMTS (*node);
-  tree stmt = VEC_index (tree, stmts, 0);
+  VEC (gimple, heap) *stmts = SLP_TREE_SCALAR_STMTS (*node);
+  gimple stmt = VEC_index (gimple, stmts, 0);
   enum vect_def_type first_stmt_dt0 = 0, first_stmt_dt1 = 0;
-  enum tree_code first_stmt_code = 0;
+  enum tree_code first_stmt_code = 0, rhs_code;
   tree first_stmt_def1_type = NULL_TREE, first_stmt_def0_type = NULL_TREE;
-  tree lhs, rhs, prev_stmt = NULL_TREE;
+  tree lhs;
+  gimple prev_stmt = NULL;
   bool stop_recursion = false, need_same_oprnds = false;
   tree vectype, scalar_type, first_op1 = NULL_TREE;
   unsigned int vectorization_factor = 0, ncopies;
@@ -2654,26 +2655,28 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, slp_tree *node,
   struct data_reference *first_dr;
 
   /* For every stmt in NODE find its def stmt/s.  */
-  for (i = 0; VEC_iterate (tree, stmts, i, stmt); i++)
+  for (i = 0; VEC_iterate (gimple, stmts, i, stmt); i++)
     {
       if (vect_print_dump_info (REPORT_SLP)) 
        {
          fprintf (vect_dump, "Build SLP for ");
-         print_generic_expr (vect_dump, stmt, TDF_SLIM);
+         print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
        }
 
-      if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+      lhs = gimple_get_lhs (stmt);
+      if (lhs == NULL_TREE)
        {
          if (vect_print_dump_info (REPORT_SLP)) 
            {
-             fprintf (vect_dump, "Build SLP failed: not MODIFY_STMT ");
-             print_generic_expr (vect_dump, stmt, TDF_SLIM);
+             fprintf (vect_dump,
+                      "Build SLP failed: not GIMPLE_ASSIGN nor GIMPLE_CALL");
+             print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
            }
          
          return false;
        }
 
-      scalar_type = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 0));
+      scalar_type = TREE_TYPE (lhs);
       vectype = get_vectype_for_scalar_type (scalar_type);
       if (!vectype)
         {
@@ -2698,24 +2701,26 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, slp_tree *node,
          return false;
        }
 
-      lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-      rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+      if (is_gimple_call (stmt))
+       rhs_code = CALL_EXPR;
+      else
+       rhs_code = gimple_assign_rhs_code (stmt);
 
       /* Check the operation.  */
       if (i == 0)
        {
-         first_stmt_code = TREE_CODE (rhs);
+         first_stmt_code = rhs_code;
 
          /* Shift arguments should be equal in all the packed stmts for a 
             vector shift with scalar shift operand.  */
-         if (TREE_CODE (rhs) == LSHIFT_EXPR || TREE_CODE (rhs) == RSHIFT_EXPR
-             || TREE_CODE (rhs) == LROTATE_EXPR
-             || TREE_CODE (rhs) == RROTATE_EXPR)
+         if (rhs_code == LSHIFT_EXPR || rhs_code == RSHIFT_EXPR
+             || rhs_code == LROTATE_EXPR
+             || rhs_code == RROTATE_EXPR)
            {
              vec_mode = TYPE_MODE (vectype);
 
              /* First see if we have a vector/vector shift.  */
-             optab = optab_for_tree_code (TREE_CODE (rhs), vectype,
+             optab = optab_for_tree_code (rhs_code, vectype,
                                           optab_vector);
 
              if (!optab
@@ -2723,7 +2728,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, slp_tree *node,
                      == CODE_FOR_nothing))
                {
                  /* No vector/vector shift, try for a vector/scalar shift.  */
-                 optab = optab_for_tree_code (TREE_CODE (rhs), vectype,
+                 optab = optab_for_tree_code (rhs_code, vectype,
                                               optab_scalar);
 
                  if (!optab)
@@ -2744,35 +2749,37 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, slp_tree *node,
                  if (!VECTOR_MODE_P (optab_op2_mode))
                    {
                      need_same_oprnds = true;
-                     first_op1 = TREE_OPERAND (rhs, 1);
+                     first_op1 = gimple_assign_rhs2 (stmt);
                    }
                }
            }
        }
       else
        {
-         if ((first_stmt_code != TREE_CODE (rhs))
-             && ((first_stmt_code != IMAGPART_EXPR) || (TREE_CODE (rhs) != REALPART_EXPR))
-             && ((first_stmt_code != REALPART_EXPR) || (TREE_CODE (rhs) != IMAGPART_EXPR)))
+         if (first_stmt_code != rhs_code
+             && (first_stmt_code != IMAGPART_EXPR
+                 || rhs_code != REALPART_EXPR)
+             && (first_stmt_code != REALPART_EXPR
+                 || rhs_code != IMAGPART_EXPR))
            {
              if (vect_print_dump_info (REPORT_SLP)) 
                {
                  fprintf (vect_dump, 
                           "Build SLP failed: different operation in stmt ");
-                 print_generic_expr (vect_dump, stmt, TDF_SLIM);
+                 print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
                }
              
              return false;
            }
          
          if (need_same_oprnds 
-             && !operand_equal_p (first_op1, TREE_OPERAND (rhs, 1), 0))
+             && !operand_equal_p (first_op1, gimple_assign_rhs2 (stmt), 0))
            {
              if (vect_print_dump_info (REPORT_SLP)) 
                {
                  fprintf (vect_dump, 
                           "Build SLP failed: different shift arguments in ");
-                 print_generic_expr (vect_dump, stmt, TDF_SLIM);
+                 print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
                }
              
              return false;
@@ -2785,7 +2792,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, slp_tree *node,
          if (REFERENCE_CLASS_P (lhs))
            {
              /* Store.  */
-             if (!vect_get_and_check_slp_defs (loop_vinfo, *node, rhs, 
+             if (!vect_get_and_check_slp_defs (loop_vinfo, *node, stmt,
                                                &def_stmts0, &def_stmts1, 
                                                &first_stmt_dt0, 
                                                &first_stmt_dt1, 
@@ -2812,7 +2819,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, slp_tree *node,
                          {
                            fprintf (vect_dump, "Build SLP failed: strided "
                                     " loads need permutation or have gaps ");
-                           print_generic_expr (vect_dump, stmt, TDF_SLIM);
+                           print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
                          }
 
                        return false;
@@ -2826,7 +2833,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, slp_tree *node,
                          {
                            fprintf (vect_dump, "Build SLP failed: unsupported "
                                     " unaligned load ");
-                           print_generic_expr (vect_dump, stmt, TDF_SLIM);
+                           print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
                          }
 
                        return false;
@@ -2849,7 +2856,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, slp_tree *node,
                          {
                            fprintf (vect_dump, "Build SLP failed: strided "
                                     " loads need permutation or have gaps ");
-                           print_generic_expr (vect_dump, stmt, TDF_SLIM);
+                           print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
                          }
                        return false;
                      }
@@ -2864,13 +2871,13 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, slp_tree *node,
        } /* Strided access.  */
       else
        {
-         if (REFERENCE_CLASS_P (rhs))
+         if (TREE_CODE_CLASS (rhs_code) == tcc_reference)
            {
              /* Not strided load. */
              if (vect_print_dump_info (REPORT_SLP)) 
                {
                  fprintf (vect_dump, "Build SLP failed: not strided load ");
-                 print_generic_expr (vect_dump, stmt, TDF_SLIM);
+                 print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
                }
 
              /* FORNOW: Not strided loads are not supported.  */
@@ -2878,22 +2885,23 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, slp_tree *node,
            }
 
          /* Not memory operation.  */
-         if (!BINARY_CLASS_P (rhs) && !UNARY_CLASS_P (rhs))
+         if (TREE_CODE_CLASS (rhs_code) != tcc_binary
+             && TREE_CODE_CLASS (rhs_code) != tcc_unary)
            {
              if (vect_print_dump_info (REPORT_SLP)) 
                {
                  fprintf (vect_dump, "Build SLP failed: operation");
                  fprintf (vect_dump, " unsupported ");
-                 print_generic_expr (vect_dump, stmt, TDF_SLIM);
+                 print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
                }
 
              return false;
            }
 
          /* Find the def-stmts.  */ 
-         if (!vect_get_and_check_slp_defs (loop_vinfo, *node, rhs, &def_stmts0, 
-                                           &def_stmts1, &first_stmt_dt0, 
-                                           &first_stmt_dt1, 
+         if (!vect_get_and_check_slp_defs (loop_vinfo, *node, stmt,
+                                           &def_stmts0, &def_stmts1,
+                                           &first_stmt_dt0, &first_stmt_dt1, 
                                            &first_stmt_def0_type, 
                                            &first_stmt_def1_type,
                                            &first_stmt_const_oprnd,
@@ -2953,16 +2961,16 @@ static void
 vect_print_slp_tree (slp_tree node)
 {
   int i;
-  tree stmt;
+  gimple stmt;
 
   if (!node)
     return;
 
   fprintf (vect_dump, "node ");
-  for (i = 0; VEC_iterate (tree, SLP_TREE_SCALAR_STMTS (node), i, stmt); i++)
+  for (i = 0; VEC_iterate (gimple, SLP_TREE_SCALAR_STMTS (node), i, stmt); i++)
     {
       fprintf (vect_dump, "\n\tstmt %d ", i);
-      print_generic_expr (vect_dump, stmt, TDF_SLIM);  
+      print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);  
     }
   fprintf (vect_dump, "\n");
 
@@ -2980,12 +2988,12 @@ static void
 vect_mark_slp_stmts (slp_tree node, enum slp_vect_type mark, int j)
 {
   int i;
-  tree stmt;
+  gimple stmt;
 
   if (!node)
     return;
 
-  for (i = 0; VEC_iterate (tree, SLP_TREE_SCALAR_STMTS (node), i, stmt); i++)
+  for (i = 0; VEC_iterate (gimple, SLP_TREE_SCALAR_STMTS (node), i, stmt); i++)
     if (j < 0 || i == j)
       STMT_SLP_TYPE (vinfo_for_stmt (stmt)) = mark;
 
@@ -2999,13 +3007,14 @@ vect_mark_slp_stmts (slp_tree node, enum slp_vect_type mark, int j)
    Return FALSE if it's impossible to SLP any stmt in the loop.  */
 
 static bool
-vect_analyze_slp_instance (loop_vec_info loop_vinfo, tree stmt)
+vect_analyze_slp_instance (loop_vec_info loop_vinfo, gimple stmt)
 {
   slp_instance new_instance;
   slp_tree node = XNEW (struct _slp_tree);
   unsigned int group_size = DR_GROUP_SIZE (vinfo_for_stmt (stmt));
   unsigned int unrolling_factor = 1, nunits;
-  tree vectype, scalar_type, next;
+  tree vectype, scalar_type;
+  gimple next;
   unsigned int vectorization_factor = 0, ncopies;
   bool slp_impossible = false; 
   int inside_cost = 0, outside_cost = 0, ncopies_for_cost;
@@ -3035,12 +3044,12 @@ vect_analyze_slp_instance (loop_vec_info loop_vinfo, tree stmt)
     }
 
   /* Create a node (a root of the SLP tree) for the packed strided stores.  */ 
-  SLP_TREE_SCALAR_STMTS (node) = VEC_alloc (tree, heap, group_size);
+  SLP_TREE_SCALAR_STMTS (node) = VEC_alloc (gimple, heap, group_size);
   next = stmt;
   /* Collect the stores and store them in SLP_TREE_SCALAR_STMTS.  */
   while (next)
     {
-      VEC_safe_push (tree, heap, SLP_TREE_SCALAR_STMTS (node), next);
+      VEC_safe_push (gimple, heap, SLP_TREE_SCALAR_STMTS (node), next);
       next = DR_GROUP_NEXT_DR (vinfo_for_stmt (next));
     }
 
@@ -3098,13 +3107,13 @@ static bool
 vect_analyze_slp (loop_vec_info loop_vinfo)
 {
   unsigned int i;
-  VEC (tree, heap) *strided_stores = LOOP_VINFO_STRIDED_STORES (loop_vinfo);
-  tree store;
+  VEC (gimple, heap) *strided_stores = LOOP_VINFO_STRIDED_STORES (loop_vinfo);
+  gimple store;
 
   if (vect_print_dump_info (REPORT_SLP))
     fprintf (vect_dump, "=== vect_analyze_slp ===");
 
-  for (i = 0; VEC_iterate (tree, strided_stores, i, store); i++)
+  for (i = 0; VEC_iterate (gimple, strided_stores, i, store); i++)
     if (!vect_analyze_slp_instance (loop_vinfo, store))
       {
        /* SLP failed. No instance can be SLPed in the loop.  */
@@ -3160,17 +3169,17 @@ static void
 vect_detect_hybrid_slp_stmts (slp_tree node)
 {
   int i;
-  tree stmt;
+  gimple stmt;
   imm_use_iterator imm_iter;
-  tree use_stmt;
+  gimple use_stmt;
 
   if (!node)
     return;
 
-  for (i = 0; VEC_iterate (tree, SLP_TREE_SCALAR_STMTS (node), i, stmt); i++)
+  for (i = 0; VEC_iterate (gimple, SLP_TREE_SCALAR_STMTS (node), i, stmt); i++)
     if (PURE_SLP_STMT (vinfo_for_stmt (stmt))
-       && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
-      FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, GIMPLE_STMT_OPERAND (stmt, 0))
+       && TREE_CODE (gimple_op (stmt, 0)) == SSA_NAME)
+      FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, gimple_op (stmt, 0))
        if (vinfo_for_stmt (use_stmt)
            && !STMT_SLP_TYPE (vinfo_for_stmt (use_stmt)))
          vect_mark_slp_stmts (node, hybrid, i);
@@ -3233,7 +3242,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo)
 
   for (i = 0; VEC_iterate (data_reference_p, datarefs, i, dr); i++)
     {
-      tree stmt;
+      gimple stmt;
       stmt_vec_info stmt_info;
       basic_block bb;
       tree base, offset, init; 
@@ -3255,7 +3264,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo)
           if (vect_print_dump_info (REPORT_UNVECTORIZED_LOOPS))
             {
               fprintf (vect_dump, "not vectorized: data ref analysis failed ");
-              print_generic_expr (vect_dump, stmt, TDF_SLIM);
+              print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
             }
           return false;
         }
@@ -3283,7 +3292,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo)
       init = unshare_expr (DR_INIT (dr));
        
       /* Update DR field in stmt_vec_info struct.  */
-      bb = bb_for_stmt (stmt);
+      bb = gimple_bb (stmt);
 
       /* If the dataref is in an inner-loop of the loop that is considered for
         for vectorization, we also want to analyze the access relative to
@@ -3394,7 +3403,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo)
             {
               fprintf (vect_dump,
                        "not vectorized: more than one data ref in stmt: ");
-              print_generic_expr (vect_dump, stmt, TDF_SLIM);
+              print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
             }
           return false;
         }
@@ -3410,7 +3419,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo)
             {
               fprintf (vect_dump,
                        "not vectorized: no vectype for stmt: ");
-              print_generic_expr (vect_dump, stmt, TDF_SLIM);
+              print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
               fprintf (vect_dump, " scalar_type: ");
               print_generic_expr (vect_dump, scalar_type, TDF_DETAILS);
             }
@@ -3429,7 +3438,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo)
    Mark STMT as "relevant for vectorization" and add it to WORKLIST.  */
 
 static void
-vect_mark_relevant (VEC(tree,heap) **worklist, tree stmt,
+vect_mark_relevant (VEC(gimple,heap) **worklist, gimple stmt,
                    enum vect_relevant relevant, bool live_p)
 {
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
@@ -3441,7 +3450,7 @@ vect_mark_relevant (VEC(tree,heap) **worklist, tree stmt,
 
   if (STMT_VINFO_IN_PATTERN_P (stmt_info))
     {
-      tree pattern_stmt;
+      gimple pattern_stmt;
 
       /* This is the last stmt in a sequence that was detected as a 
          pattern that can potentially be vectorized.  Don't mark the stmt
@@ -3471,7 +3480,7 @@ vect_mark_relevant (VEC(tree,heap) **worklist, tree stmt,
       return;
     }
 
-  VEC_safe_push (tree, heap, *worklist, stmt);
+  VEC_safe_push (gimple, heap, *worklist, stmt);
 }
 
 
@@ -3488,7 +3497,7 @@ vect_mark_relevant (VEC(tree,heap) **worklist, tree stmt,
    CHECKME: what other side effects would the vectorizer allow?  */
 
 static bool
-vect_stmt_relevant_p (tree stmt, loop_vec_info loop_vinfo,
+vect_stmt_relevant_p (gimple stmt, loop_vec_info loop_vinfo,
                      enum vect_relevant *relevant, bool *live_p)
 {
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
@@ -3506,7 +3515,7 @@ vect_stmt_relevant_p (tree stmt, loop_vec_info loop_vinfo,
     *relevant = vect_used_in_loop;
 
   /* changing memory.  */
-  if (TREE_CODE (stmt) != PHI_NODE)
+  if (gimple_code (stmt) != GIMPLE_PHI)
     if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
       {
        if (vect_print_dump_info (REPORT_DETAILS))
@@ -3519,7 +3528,7 @@ vect_stmt_relevant_p (tree stmt, loop_vec_info loop_vinfo,
     {
       FOR_EACH_IMM_USE_FAST (use_p, imm_iter, DEF_FROM_PTR (def_p))
        {
-         basic_block bb = bb_for_stmt (USE_STMT (use_p));
+         basic_block bb = gimple_bb (USE_STMT (use_p));
          if (!flow_bb_inside_loop_p (loop, bb))
            {
              if (vect_print_dump_info (REPORT_DETAILS))
@@ -3527,7 +3536,7 @@ vect_stmt_relevant_p (tree stmt, loop_vec_info loop_vinfo,
 
              /* We expect all such uses to be in the loop exit phis
                 (because of loop closed form)   */
-             gcc_assert (TREE_CODE (USE_STMT (use_p)) == PHI_NODE);
+             gcc_assert (gimple_code (USE_STMT (use_p)) == GIMPLE_PHI);
              gcc_assert (bb == single_exit (loop)->dest);
 
               *live_p = true;
@@ -3565,14 +3574,15 @@ vect_stmt_relevant_p (tree stmt, loop_vec_info loop_vinfo,
    Return true if everything is as expected. Return false otherwise.  */
 
 static bool
-process_use (tree stmt, tree use, loop_vec_info loop_vinfo, bool live_p, 
-            enum vect_relevant relevant, VEC(tree,heap) **worklist)
+process_use (gimple stmt, tree use, loop_vec_info loop_vinfo, bool live_p, 
+            enum vect_relevant relevant, VEC(gimple,heap) **worklist)
 {
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
   stmt_vec_info dstmt_vinfo;
   basic_block bb, def_bb;
-  tree def, def_stmt;
+  tree def;
+  gimple def_stmt;
   enum vect_def_type dt;
 
   /* case 1: we are only interested in uses that need to be vectorized.  Uses 
@@ -3587,10 +3597,10 @@ process_use (tree stmt, tree use, loop_vec_info loop_vinfo, bool live_p,
       return false;
     }
 
-  if (!def_stmt || IS_EMPTY_STMT (def_stmt))
+  if (!def_stmt || gimple_nop_p (def_stmt))
     return true;
 
-  def_bb = bb_for_stmt (def_stmt);
+  def_bb = gimple_bb (def_stmt);
   if (!flow_bb_inside_loop_p (loop, def_bb))
     {
       if (vect_print_dump_info (REPORT_DETAILS))
@@ -3604,10 +3614,10 @@ process_use (tree stmt, tree use, loop_vec_info loop_vinfo, bool live_p,
      as there should be no other uses for DEF_STMT in the loop.  So we just 
      check that everything is as expected, and we are done.  */
   dstmt_vinfo = vinfo_for_stmt (def_stmt);
-  bb = bb_for_stmt (stmt);
-  if (TREE_CODE (stmt) == PHI_NODE
+  bb = gimple_bb (stmt);
+  if (gimple_code (stmt) == GIMPLE_PHI
       && STMT_VINFO_DEF_TYPE (stmt_vinfo) == vect_reduction_def
-      && TREE_CODE (def_stmt) != PHI_NODE
+      && gimple_code (def_stmt) != GIMPLE_PHI
       && STMT_VINFO_DEF_TYPE (dstmt_vinfo) == vect_reduction_def
       && bb->loop_father == def_bb->loop_father)
     {
@@ -3712,47 +3722,47 @@ process_use (tree stmt, tree use, loop_vec_info loop_vinfo, bool live_p,
 static bool
 vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
 {
-  VEC(tree,heap) *worklist;
+  VEC(gimple,heap) *worklist;
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
   unsigned int nbbs = loop->num_nodes;
-  block_stmt_iterator si;
-  tree stmt;
-  stmt_ann_t ann;
+  gimple_stmt_iterator si;
+  gimple stmt;
   unsigned int i;
   stmt_vec_info stmt_vinfo;
   basic_block bb;
-  tree phi;
+  gimple phi;
   bool live_p;
   enum vect_relevant relevant;
 
   if (vect_print_dump_info (REPORT_DETAILS))
     fprintf (vect_dump, "=== vect_mark_stmts_to_be_vectorized ===");
 
-  worklist = VEC_alloc (tree, heap, 64);
+  worklist = VEC_alloc (gimple, heap, 64);
 
   /* 1. Init worklist.  */
   for (i = 0; i < nbbs; i++)
     {
       bb = bbs[i];
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
        { 
+         phi = gsi_stmt (si);
          if (vect_print_dump_info (REPORT_DETAILS))
            {
              fprintf (vect_dump, "init: phi relevant? ");
-             print_generic_expr (vect_dump, phi, TDF_SLIM);
+             print_gimple_stmt (vect_dump, phi, 0, TDF_SLIM);
            }
 
          if (vect_stmt_relevant_p (phi, loop_vinfo, &relevant, &live_p))
            vect_mark_relevant (&worklist, phi, relevant, live_p);
        }
-      for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+      for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
        {
-         stmt = bsi_stmt (si);
+         stmt = gsi_stmt (si);
          if (vect_print_dump_info (REPORT_DETAILS))
            {
              fprintf (vect_dump, "init: stmt relevant? ");
-             print_generic_expr (vect_dump, stmt, TDF_SLIM);
+             print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
            } 
 
          if (vect_stmt_relevant_p (stmt, loop_vinfo, &relevant, &live_p))
@@ -3761,22 +3771,21 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
     }
 
   /* 2. Process_worklist */
-  while (VEC_length (tree, worklist) > 0)
+  while (VEC_length (gimple, worklist) > 0)
     {
       use_operand_p use_p;
       ssa_op_iter iter;
 
-      stmt = VEC_pop (tree, worklist);
+      stmt = VEC_pop (gimple, worklist);
       if (vect_print_dump_info (REPORT_DETAILS))
        {
           fprintf (vect_dump, "worklist: examine stmt: ");
-          print_generic_expr (vect_dump, stmt, TDF_SLIM);
+          print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
        }
 
       /* Examine the USEs of STMT. For each USE, mark the stmt that defines it 
         (DEF_STMT) as relevant/irrelevant and live/dead according to the 
         liveness and relevance properties of STMT.  */
-      ann = stmt_ann (stmt);
       stmt_vinfo = vinfo_for_stmt (stmt);
       relevant = STMT_VINFO_RELEVANT (stmt_vinfo);
       live_p = STMT_VINFO_LIVE_P (stmt_vinfo);
@@ -3814,25 +3823,25 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
          switch (tmp_relevant)
            {
            case vect_unused_in_loop:
-             gcc_assert (TREE_CODE (stmt) != PHI_NODE);
+             gcc_assert (gimple_code (stmt) != GIMPLE_PHI);
              relevant = vect_used_by_reduction;
              break;
 
            case vect_used_in_outer_by_reduction:
            case vect_used_in_outer:
-             gcc_assert (TREE_CODE (stmt) != WIDEN_SUM_EXPR
-                         && TREE_CODE (stmt) != DOT_PROD_EXPR);
+             gcc_assert (gimple_code (stmt) != WIDEN_SUM_EXPR
+                         && gimple_code (stmt) != DOT_PROD_EXPR);
              break;
 
            case vect_used_by_reduction:
-             if (TREE_CODE (stmt) == PHI_NODE)
+             if (gimple_code (stmt) == GIMPLE_PHI)
                break;
              /* fall through */
            case vect_used_in_loop:
            default:
              if (vect_print_dump_info (REPORT_DETAILS))
                fprintf (vect_dump, "unsupported use of reduction.");
-             VEC_free (tree, heap, worklist);
+             VEC_free (gimple, heap, worklist);
              return false;
            }
          live_p = false;       
@@ -3843,13 +3852,13 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
          tree op = USE_FROM_PTR (use_p);
          if (!process_use (stmt, op, loop_vinfo, live_p, relevant, &worklist))
            {
-             VEC_free (tree, heap, worklist);
+             VEC_free (gimple, heap, worklist);
              return false;
            }
        }
     } /* while worklist */
 
-  VEC_free (tree, heap, worklist);
+  VEC_free (gimple, heap, worklist);
   return true;
 }
 
@@ -3868,22 +3877,24 @@ vect_can_advance_ivs_p (loop_vec_info loop_vinfo)
 {
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   basic_block bb = loop->header;
-  tree phi;
+  gimple phi;
+  gimple_stmt_iterator gsi;
 
   /* Analyze phi functions of the loop header.  */
 
   if (vect_print_dump_info (REPORT_DETAILS))
     fprintf (vect_dump, "vect_can_advance_ivs_p:");
 
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
       tree access_fn = NULL;
       tree evolution_part;
 
+      phi = gsi_stmt (gsi);
       if (vect_print_dump_info (REPORT_DETAILS))
        {
           fprintf (vect_dump, "Analyze phi: ");
-          print_generic_expr (vect_dump, phi, TDF_SLIM);
+          print_gimple_stmt (vect_dump, phi, 0, TDF_SLIM);
        }
 
       /* Skip virtual phi's. The data dependences that are associated with
@@ -3950,7 +3961,7 @@ vect_can_advance_ivs_p (loop_vec_info loop_vinfo)
    can be constructed, place it in NUMBER_OF_ITERATIONS.
    Return the loop exit condition.  */
 
-static tree
+static gimple
 vect_get_loop_niters (struct loop *loop, tree *number_of_iterations)
 {
   tree niters;
@@ -4018,7 +4029,7 @@ loop_vec_info
 vect_analyze_loop_form (struct loop *loop)
 {
   loop_vec_info loop_vinfo;
-  tree loop_cond;
+  gimple loop_cond;
   tree number_of_iterations = NULL;
   loop_vec_info inner_loop_vinfo = NULL;
 
index 8313e54..9919389 100644 (file)
@@ -30,7 +30,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "machmode.h"
 #include "langhooks.h"
 #include "tree-flow.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-iterator.h"
 #include "tree-pass.h"
 #include "flags.h"
@@ -96,35 +96,35 @@ build_word_mode_vector_type (int nunits)
   return vector_last_type;
 }
 
-typedef tree (*elem_op_func) (block_stmt_iterator *,
+typedef tree (*elem_op_func) (gimple_stmt_iterator *,
                              tree, tree, tree, tree, tree, enum tree_code);
 
 static inline tree
-tree_vec_extract (block_stmt_iterator *bsi, tree type,
+tree_vec_extract (gimple_stmt_iterator *gsi, tree type,
                  tree t, tree bitsize, tree bitpos)
 {
   if (bitpos)
-    return gimplify_build3 (bsi, BIT_FIELD_REF, type, t, bitsize, bitpos);
+    return gimplify_build3 (gsi, BIT_FIELD_REF, type, t, bitsize, bitpos);
   else
-    return gimplify_build1 (bsi, VIEW_CONVERT_EXPR, type, t);
+    return gimplify_build1 (gsi, VIEW_CONVERT_EXPR, type, t);
 }
 
 static tree
-do_unop (block_stmt_iterator *bsi, tree inner_type, tree a,
+do_unop (gimple_stmt_iterator *gsi, tree inner_type, tree a,
         tree b ATTRIBUTE_UNUSED, tree bitpos, tree bitsize,
         enum tree_code code)
 {
-  a = tree_vec_extract (bsi, inner_type, a, bitsize, bitpos);
-  return gimplify_build1 (bsi, code, inner_type, a);
+  a = tree_vec_extract (gsi, inner_type, a, bitsize, bitpos);
+  return gimplify_build1 (gsi, code, inner_type, a);
 }
 
 static tree
-do_binop (block_stmt_iterator *bsi, tree inner_type, tree a, tree b,
+do_binop (gimple_stmt_iterator *gsi, tree inner_type, tree a, tree b,
          tree bitpos, tree bitsize, enum tree_code code)
 {
-  a = tree_vec_extract (bsi, inner_type, a, bitsize, bitpos);
-  b = tree_vec_extract (bsi, inner_type, b, bitsize, bitpos);
-  return gimplify_build2 (bsi, code, inner_type, a, b);
+  a = tree_vec_extract (gsi, inner_type, a, bitsize, bitpos);
+  b = tree_vec_extract (gsi, inner_type, b, bitsize, bitpos);
+  return gimplify_build2 (gsi, code, inner_type, a, b);
 }
 
 /* Expand vector addition to scalars.  This does bit twiddling
@@ -141,7 +141,7 @@ do_binop (block_stmt_iterator *bsi, tree inner_type, tree a, tree b,
    This optimization should be done only if 4 vector items or more
    fit into a word.  */
 static tree
-do_plus_minus (block_stmt_iterator *bsi, tree word_type, tree a, tree b,
+do_plus_minus (gimple_stmt_iterator *gsi, tree word_type, tree a, tree b,
               tree bitpos ATTRIBUTE_UNUSED, tree bitsize ATTRIBUTE_UNUSED,
               enum tree_code code)
 {
@@ -153,26 +153,26 @@ do_plus_minus (block_stmt_iterator *bsi, tree word_type, tree a, tree b,
   low_bits = build_replicated_const (word_type, inner_type, max >> 1);
   high_bits = build_replicated_const (word_type, inner_type, max & ~(max >> 1));
 
-  a = tree_vec_extract (bsi, word_type, a, bitsize, bitpos);
-  b = tree_vec_extract (bsi, word_type, b, bitsize, bitpos);
+  a = tree_vec_extract (gsi, word_type, a, bitsize, bitpos);
+  b = tree_vec_extract (gsi, word_type, b, bitsize, bitpos);
 
-  signs = gimplify_build2 (bsi, BIT_XOR_EXPR, word_type, a, b);
-  b_low = gimplify_build2 (bsi, BIT_AND_EXPR, word_type, b, low_bits);
+  signs = gimplify_build2 (gsi, BIT_XOR_EXPR, word_type, a, b);
+  b_low = gimplify_build2 (gsi, BIT_AND_EXPR, word_type, b, low_bits);
   if (code == PLUS_EXPR)
-    a_low = gimplify_build2 (bsi, BIT_AND_EXPR, word_type, a, low_bits);
+    a_low = gimplify_build2 (gsi, BIT_AND_EXPR, word_type, a, low_bits);
   else
     {
-      a_low = gimplify_build2 (bsi, BIT_IOR_EXPR, word_type, a, high_bits);
-      signs = gimplify_build1 (bsi, BIT_NOT_EXPR, word_type, signs);
+      a_low = gimplify_build2 (gsi, BIT_IOR_EXPR, word_type, a, high_bits);
+      signs = gimplify_build1 (gsi, BIT_NOT_EXPR, word_type, signs);
     }
 
-  signs = gimplify_build2 (bsi, BIT_AND_EXPR, word_type, signs, high_bits);
-  result_low = gimplify_build2 (bsi, code, word_type, a_low, b_low);
-  return gimplify_build2 (bsi, BIT_XOR_EXPR, word_type, result_low, signs);
+  signs = gimplify_build2 (gsi, BIT_AND_EXPR, word_type, signs, high_bits);
+  result_low = gimplify_build2 (gsi, code, word_type, a_low, b_low);
+  return gimplify_build2 (gsi, BIT_XOR_EXPR, word_type, result_low, signs);
 }
 
 static tree
-do_negate (block_stmt_iterator *bsi, tree word_type, tree b,
+do_negate (gimple_stmt_iterator *gsi, tree word_type, tree b,
           tree unused ATTRIBUTE_UNUSED, tree bitpos ATTRIBUTE_UNUSED,
           tree bitsize ATTRIBUTE_UNUSED,
           enum tree_code code ATTRIBUTE_UNUSED)
@@ -185,19 +185,19 @@ do_negate (block_stmt_iterator *bsi, tree word_type, tree b,
   low_bits = build_replicated_const (word_type, inner_type, max >> 1);
   high_bits = build_replicated_const (word_type, inner_type, max & ~(max >> 1));
 
-  b = tree_vec_extract (bsi, word_type, b, bitsize, bitpos);
+  b = tree_vec_extract (gsi, word_type, b, bitsize, bitpos);
 
-  b_low = gimplify_build2 (bsi, BIT_AND_EXPR, word_type, b, low_bits);
-  signs = gimplify_build1 (bsi, BIT_NOT_EXPR, word_type, b);
-  signs = gimplify_build2 (bsi, BIT_AND_EXPR, word_type, signs, high_bits);
-  result_low = gimplify_build2 (bsi, MINUS_EXPR, word_type, high_bits, b_low);
-  return gimplify_build2 (bsi, BIT_XOR_EXPR, word_type, result_low, signs);
+  b_low = gimplify_build2 (gsi, BIT_AND_EXPR, word_type, b, low_bits);
+  signs = gimplify_build1 (gsi, BIT_NOT_EXPR, word_type, b);
+  signs = gimplify_build2 (gsi, BIT_AND_EXPR, word_type, signs, high_bits);
+  result_low = gimplify_build2 (gsi, MINUS_EXPR, word_type, high_bits, b_low);
+  return gimplify_build2 (gsi, BIT_XOR_EXPR, word_type, result_low, signs);
 }
 
 /* Expand a vector operation to scalars, by using many operations
    whose type is the vector type's inner type.  */
 static tree
-expand_vector_piecewise (block_stmt_iterator *bsi, elem_op_func f,
+expand_vector_piecewise (gimple_stmt_iterator *gsi, elem_op_func f,
                         tree type, tree inner_type,
                         tree a, tree b, enum tree_code code)
 {
@@ -213,7 +213,7 @@ expand_vector_piecewise (block_stmt_iterator *bsi, elem_op_func f,
   for (i = 0; i < nunits;
        i += delta, index = int_const_binop (PLUS_EXPR, index, part_width, 0))
     {
-      tree result = f (bsi, inner_type, a, b, index, part_width, code);
+      tree result = f (gsi, inner_type, a, b, index, part_width, code);
       constructor_elt *ce = VEC_quick_push (constructor_elt, v, NULL);
       ce->index = NULL_TREE;
       ce->value = result;
@@ -226,7 +226,7 @@ expand_vector_piecewise (block_stmt_iterator *bsi, elem_op_func f,
    a scalar integer type, or to use a different size for the items
    in the vector type.  */
 static tree
-expand_vector_parallel (block_stmt_iterator *bsi, elem_op_func f, tree type,
+expand_vector_parallel (gimple_stmt_iterator *gsi, elem_op_func f, tree type,
                        tree a, tree b,
                        enum tree_code code)
 {
@@ -239,23 +239,24 @@ expand_vector_parallel (block_stmt_iterator *bsi, elem_op_func f, tree type,
      one word, do it a word at a time; finally, if the vector is smaller
      than one word, do it as a scalar.  */
   if (TYPE_MODE (TREE_TYPE (type)) == word_mode)
-     return expand_vector_piecewise (bsi, f,
+     return expand_vector_piecewise (gsi, f,
                                     type, TREE_TYPE (type),
                                     a, b, code);
   else if (n_words > 1)
     {
       tree word_type = build_word_mode_vector_type (n_words);
-      result = expand_vector_piecewise (bsi, f,
+      result = expand_vector_piecewise (gsi, f,
                                        word_type, TREE_TYPE (word_type),
                                        a, b, code);
-      result = gimplify_val (bsi, word_type, result);
+      result = force_gimple_operand_gsi (gsi, result, true, NULL, true,
+                                         GSI_SAME_STMT);
     }
   else
     {
       /* Use a single scalar operation with a mode no wider than word_mode.  */
       mode = mode_for_size (tree_low_cst (TYPE_SIZE (type), 1), MODE_INT, 0);
       compute_type = lang_hooks.types.type_for_mode (mode, 1);
-      result = f (bsi, compute_type, a, b, NULL_TREE, NULL_TREE, code);
+      result = f (gsi, compute_type, a, b, NULL_TREE, NULL_TREE, code);
     }
 
   return result;
@@ -267,7 +268,7 @@ expand_vector_parallel (block_stmt_iterator *bsi, elem_op_func f, tree type,
    they can process at least four items, that is, only if the vector
    holds at least four items and if a word can hold four items.  */
 static tree
-expand_vector_addition (block_stmt_iterator *bsi,
+expand_vector_addition (gimple_stmt_iterator *gsi,
                        elem_op_func f, elem_op_func f_parallel,
                        tree type, tree a, tree b, enum tree_code code)
 {
@@ -277,17 +278,17 @@ expand_vector_addition (block_stmt_iterator *bsi,
   if (INTEGRAL_TYPE_P (TREE_TYPE (type))
       && parts_per_word >= 4
       && TYPE_VECTOR_SUBPARTS (type) >= 4)
-    return expand_vector_parallel (bsi, f_parallel,
+    return expand_vector_parallel (gsi, f_parallel,
                                   type, a, b, code);
   else
-    return expand_vector_piecewise (bsi, f,
+    return expand_vector_piecewise (gsi, f,
                                    type, TREE_TYPE (type),
                                    a, b, code);
 }
 
 static tree
-expand_vector_operation (block_stmt_iterator *bsi, tree type, tree compute_type,
-                        tree rhs, enum tree_code code)
+expand_vector_operation (gimple_stmt_iterator *gsi, tree type, tree compute_type,
+                        gimple assign, enum tree_code code)
 {
   enum machine_mode compute_mode = TYPE_MODE (compute_type);
 
@@ -305,28 +306,28 @@ expand_vector_operation (block_stmt_iterator *bsi, tree type, tree compute_type,
       case PLUS_EXPR:
       case MINUS_EXPR:
         if (!TYPE_OVERFLOW_TRAPS (type))
-          return expand_vector_addition (bsi, do_binop, do_plus_minus, type,
-                                        TREE_OPERAND (rhs, 0),
-                                        TREE_OPERAND (rhs, 1), code);
+          return expand_vector_addition (gsi, do_binop, do_plus_minus, type,
+                                        gimple_assign_rhs1 (assign),
+                                        gimple_assign_rhs2 (assign), code);
        break;
 
       case NEGATE_EXPR:
         if (!TYPE_OVERFLOW_TRAPS (type))
-          return expand_vector_addition (bsi, do_unop, do_negate, type,
-                                        TREE_OPERAND (rhs, 0),
+          return expand_vector_addition (gsi, do_unop, do_negate, type,
+                                        gimple_assign_rhs1 (assign),
                                         NULL_TREE, code);
        break;
 
       case BIT_AND_EXPR:
       case BIT_IOR_EXPR:
       case BIT_XOR_EXPR:
-        return expand_vector_parallel (bsi, do_binop, type,
-                                      TREE_OPERAND (rhs, 0),
-                                      TREE_OPERAND (rhs, 1), code);
+        return expand_vector_parallel (gsi, do_binop, type,
+                                      gimple_assign_rhs1 (assign),
+                                      gimple_assign_rhs2 (assign), code);
 
       case BIT_NOT_EXPR:
-        return expand_vector_parallel (bsi, do_unop, type,
-                                      TREE_OPERAND (rhs, 0),
+        return expand_vector_parallel (gsi, do_unop, type,
+                                      gimple_assign_rhs1 (assign),
                                       NULL_TREE, code);
 
       default:
@@ -334,13 +335,13 @@ expand_vector_operation (block_stmt_iterator *bsi, tree type, tree compute_type,
       }
 
   if (TREE_CODE_CLASS (code) == tcc_unary)
-    return expand_vector_piecewise (bsi, do_unop, type, compute_type,
-                                   TREE_OPERAND (rhs, 0),
+    return expand_vector_piecewise (gsi, do_unop, type, compute_type,
+                                   gimple_assign_rhs1 (assign),
                                    NULL_TREE, code);
   else
-    return expand_vector_piecewise (bsi, do_binop, type, compute_type,
-                                   TREE_OPERAND (rhs, 0),
-                                   TREE_OPERAND (rhs, 1), code);
+    return expand_vector_piecewise (gsi, do_binop, type, compute_type,
+                                   gimple_assign_rhs1 (assign),
+                                   gimple_assign_rhs2 (assign), code);
 }
 \f
 /* Return a type for the widest vector mode whose components are of mode
@@ -387,43 +388,34 @@ type_for_widest_vector_mode (enum machine_mode inner_mode, optab op, int satp)
 /* Process one statement.  If we identify a vector operation, expand it.  */
 
 static void
-expand_vector_operations_1 (block_stmt_iterator *bsi)
+expand_vector_operations_1 (gimple_stmt_iterator *gsi)
 {
-  tree stmt = bsi_stmt (*bsi);
-  tree *p_lhs, *p_rhs, lhs, rhs, type, compute_type;
+  gimple stmt = gsi_stmt (*gsi);
+  tree lhs, rhs1, rhs2 = NULL, type, compute_type;
   enum tree_code code;
   enum machine_mode compute_mode;
   optab op;
+  enum gimple_rhs_class rhs_class;
+  tree new_rhs;
 
-  switch (TREE_CODE (stmt))
-    {
-    case RETURN_EXPR:
-      stmt = TREE_OPERAND (stmt, 0);
-      if (!stmt || TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
-       return;
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
+    return;
 
-      /* FALLTHRU */
+  code = gimple_assign_rhs_code (stmt);
+  rhs_class = get_gimple_rhs_class (code);
 
-    case GIMPLE_MODIFY_STMT:
-      p_lhs = &GIMPLE_STMT_OPERAND (stmt, 0);
-      p_rhs = &GIMPLE_STMT_OPERAND (stmt, 1);
-      lhs = *p_lhs;
-      rhs = *p_rhs;
-      break;
+  if (rhs_class != GIMPLE_UNARY_RHS && rhs_class != GIMPLE_BINARY_RHS)
+    return;
 
-    default:
-      return;
-    }
+  lhs = gimple_assign_lhs (stmt);
+  rhs1 = gimple_assign_rhs1 (stmt);
+  type = gimple_expr_type (stmt);
+  if (rhs_class == GIMPLE_BINARY_RHS)
+    rhs2 = gimple_assign_rhs2 (stmt);
 
-  type = TREE_TYPE (rhs);
   if (TREE_CODE (type) != VECTOR_TYPE)
     return;
 
-  code = TREE_CODE (rhs);
-  if (TREE_CODE_CLASS (code) != tcc_unary
-      && TREE_CODE_CLASS (code) != tcc_binary)
-    return;
-
   if (code == NOP_EXPR 
       || code == FLOAT_EXPR
       || code == FIX_TRUNC_EXPR
@@ -435,17 +427,18 @@ expand_vector_operations_1 (block_stmt_iterator *bsi)
   /* The signedness is determined from input argument.  */
   if (code == VEC_UNPACK_FLOAT_HI_EXPR
       || code == VEC_UNPACK_FLOAT_LO_EXPR)
-    type = TREE_TYPE (TREE_OPERAND (rhs, 0));
+    type = TREE_TYPE (rhs1);
 
   /* Choose between vector shift/rotate by vector and vector shift/rotate by
      scalar */
-  if (code == LSHIFT_EXPR || code == RSHIFT_EXPR || code == LROTATE_EXPR
+  if (code == LSHIFT_EXPR 
+      || code == RSHIFT_EXPR 
+      || code == LROTATE_EXPR
       || code == RROTATE_EXPR)
     {
       /* If the 2nd argument is vector, we need a vector/vector shift */
-      if (VECTOR_MODE_P (TYPE_MODE (TREE_TYPE (TREE_OPERAND (rhs, 1)))))
+      if (VECTOR_MODE_P (TYPE_MODE (TREE_TYPE (rhs2))))
        op = optab_for_tree_code (code, type, optab_vector);
-
       else
        {
          /* Try for a vector/scalar shift, and if we don't have one, see if we
@@ -471,7 +464,7 @@ expand_vector_operations_1 (block_stmt_iterator *bsi)
       || code == VEC_PACK_TRUNC_EXPR
       || code == VEC_PACK_SAT_EXPR
       || code == VEC_PACK_FIX_TRUNC_EXPR)
-    type = TREE_TYPE (TREE_OPERAND (rhs, 0));
+    type = TREE_TYPE (rhs1);
 
   /* Optabs will try converting a negation into a subtraction, so
      look for it as well.  TODO: negation of floating-point vectors
@@ -513,13 +506,17 @@ expand_vector_operations_1 (block_stmt_iterator *bsi)
     }
 
   gcc_assert (code != VEC_LSHIFT_EXPR && code != VEC_RSHIFT_EXPR);
-  rhs = expand_vector_operation (bsi, type, compute_type, rhs, code);
-  if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
-    *p_rhs = rhs;
-  else
-    *p_rhs = gimplify_build1 (bsi, VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
+  new_rhs = expand_vector_operation (gsi, type, compute_type, stmt, code);
+  if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_rhs)))
+    new_rhs = gimplify_build1 (gsi, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
+                               new_rhs);
+
+  /* NOTE:  We should avoid using gimple_assign_set_rhs_from_tree. One
+     way to do it is change expand_vector_operation and its callees to
+     return a tree_code, RHS1 and RHS2 instead of a tree. */
+  gimple_assign_set_rhs_from_tree (gsi, new_rhs);
 
-  mark_stmt_modified (bsi_stmt (*bsi));
+  gimple_set_modified (gsi_stmt (*gsi), true);
 }
 \f
 /* Use this to lower vector operations introduced by the vectorizer,
@@ -534,15 +531,15 @@ gate_expand_vector_operations (void)
 static unsigned int
 expand_vector_operations (void)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   basic_block bb;
 
   FOR_EACH_BB (bb)
     {
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         expand_vector_operations_1 (&bsi);
-         update_stmt_if_modified (bsi_stmt (bsi));
+         expand_vector_operations_1 (&gsi);
+         update_stmt_if_modified (gsi_stmt (gsi));
        }
     }
   return 0;
index b9a302d..e53780f 100644 (file)
@@ -1,5 +1,5 @@
 /* Analysis Utilities for Loop Vectorization.
-   Copyright (C) 2006, 2007 Free Software Foundation, Inc.
+   Copyright (C) 2006, 2007, 2008 Free Software Foundation, Inc.
    Contributed by Dorit Nuzman <dorit@il.ibm.com>
 
 This file is part of GCC.
@@ -42,14 +42,14 @@ along with GCC; see the file COPYING3.  If not see
 
 /* Function prototypes */
 static void vect_pattern_recog_1 
-  (tree (* ) (tree, tree *, tree *), block_stmt_iterator);
-static bool widened_name_p (tree, tree, tree *, tree *);
+  (gimple (* ) (gimple, tree *, tree *), gimple_stmt_iterator);
+static bool widened_name_p (tree, gimple, tree *, gimple *);
 
 /* Pattern recognition functions  */
-static tree vect_recog_widen_sum_pattern (tree, tree *, tree *);
-static tree vect_recog_widen_mult_pattern (tree, tree *, tree *);
-static tree vect_recog_dot_prod_pattern (tree, tree *, tree *);
-static tree vect_recog_pow_pattern (tree, tree *, tree *);
+static gimple vect_recog_widen_sum_pattern (gimple, tree *, tree *);
+static gimple vect_recog_widen_mult_pattern (gimple, tree *, tree *);
+static gimple vect_recog_dot_prod_pattern (gimple, tree *, tree *);
+static gimple vect_recog_pow_pattern (gimple, tree *, tree *);
 static vect_recog_func_ptr vect_vect_recog_func_ptrs[NUM_PATTERNS] = {
        vect_recog_widen_mult_pattern,
        vect_recog_widen_sum_pattern,
@@ -66,12 +66,12 @@ static vect_recog_func_ptr vect_vect_recog_func_ptrs[NUM_PATTERNS] = {
 */
 
 static bool
-widened_name_p (tree name, tree use_stmt, tree *half_type, tree *def_stmt)
+widened_name_p (tree name, gimple use_stmt, tree *half_type, gimple *def_stmt)
 {
   tree dummy;
+  gimple dummy_gimple;
   loop_vec_info loop_vinfo;
   stmt_vec_info stmt_vinfo;
-  tree expr;
   tree type = TREE_TYPE (name);
   tree oprnd0;
   enum vect_def_type dt;
@@ -90,14 +90,13 @@ widened_name_p (tree name, tree use_stmt, tree *half_type, tree *def_stmt)
   if (! *def_stmt)
     return false;
 
-  if (TREE_CODE (*def_stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (*def_stmt))
     return false;
 
-  expr = GIMPLE_STMT_OPERAND (*def_stmt, 1);
-  if (TREE_CODE (expr) != NOP_EXPR)
+  if (gimple_assign_rhs_code (*def_stmt) != NOP_EXPR)
     return false;
 
-  oprnd0 = TREE_OPERAND (expr, 0);
+  oprnd0 = gimple_assign_rhs1 (*def_stmt);
 
   *half_type = TREE_TYPE (oprnd0);
   if (!INTEGRAL_TYPE_P (type) || !INTEGRAL_TYPE_P (*half_type)
@@ -105,12 +104,24 @@ widened_name_p (tree name, tree use_stmt, tree *half_type, tree *def_stmt)
       || (TYPE_PRECISION (type) < (TYPE_PRECISION (*half_type) * 2)))
     return false;
 
-  if (!vect_is_simple_use (oprnd0, loop_vinfo, &dummy, &dummy, &dt))
+  if (!vect_is_simple_use (oprnd0, loop_vinfo, &dummy_gimple, &dummy, &dt))
     return false;
 
   return true;
 }
 
+/* Helper to return a new temporary for pattern of TYPE for STMT.  If STMT
+   is NULL, the caller must set SSA_NAME_DEF_STMT for the returned SSA var. */
+
+static tree
+vect_recog_temp_ssa_var (tree type, gimple stmt)
+{
+  tree var = create_tmp_var (type, "patt");
+
+  add_referenced_var (var);
+  var = make_ssa_name (var, stmt);
+  return var;
+}
 
 /* Function vect_recog_dot_prod_pattern
 
@@ -157,24 +168,24 @@ widened_name_p (tree name, tree use_stmt, tree *half_type, tree *def_stmt)
          the correct order (as is the case when this computation is in an
          inner-loop nested in an outer-loop that us being vectorized).  */
 
-static tree
-vect_recog_dot_prod_pattern (tree last_stmt, tree *type_in, tree *type_out)
+static gimple
+vect_recog_dot_prod_pattern (gimple last_stmt, tree *type_in, tree *type_out)
 {
-  tree stmt, expr;
+  gimple stmt;
   tree oprnd0, oprnd1;
   tree oprnd00, oprnd01;
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
   tree type, half_type;
-  tree pattern_expr;
+  gimple pattern_stmt;
   tree prod_type;
   loop_vec_info loop_info = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
   struct loop *loop = LOOP_VINFO_LOOP (loop_info);
+  tree var, rhs;
 
-  if (TREE_CODE (last_stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (last_stmt))
     return NULL;
 
-  expr = GIMPLE_STMT_OPERAND (last_stmt, 1);
-  type = TREE_TYPE (expr);
+  type = gimple_expr_type (last_stmt);
 
   /* Look for the following pattern 
           DX = (TYPE1) X;
@@ -200,7 +211,7 @@ vect_recog_dot_prod_pattern (tree last_stmt, tree *type_in, tree *type_out)
   /* Starting from LAST_STMT, follow the defs of its uses in search
      of the above pattern.  */
 
-  if (TREE_CODE (expr) != PLUS_EXPR)
+  if (gimple_assign_rhs_code (last_stmt) != PLUS_EXPR)
     return NULL;
 
   if (STMT_VINFO_IN_PATTERN_P (stmt_vinfo))
@@ -208,22 +219,21 @@ vect_recog_dot_prod_pattern (tree last_stmt, tree *type_in, tree *type_out)
       /* Has been detected as widening-summation?  */
 
       stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
-      expr = GIMPLE_STMT_OPERAND (stmt, 1);
-      type = TREE_TYPE (expr);
-      if (TREE_CODE (expr) != WIDEN_SUM_EXPR)
+      type = gimple_expr_type (stmt);
+      if (gimple_assign_rhs_code (stmt) != WIDEN_SUM_EXPR)
         return NULL;
-      oprnd0 = TREE_OPERAND (expr, 0);
-      oprnd1 = TREE_OPERAND (expr, 1);
+      oprnd0 = gimple_assign_rhs1 (stmt);
+      oprnd1 = gimple_assign_rhs2 (stmt);
       half_type = TREE_TYPE (oprnd0);
     }
   else
     {
-      tree def_stmt;
+      gimple def_stmt;
 
       if (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_reduction_def)
         return NULL;
-      oprnd0 = TREE_OPERAND (expr, 0);
-      oprnd1 = TREE_OPERAND (expr, 1);
+      oprnd0 = gimple_assign_rhs1 (last_stmt);
+      oprnd1 = gimple_assign_rhs2 (last_stmt);
       if (TYPE_MAIN_VARIANT (TREE_TYPE (oprnd0)) != TYPE_MAIN_VARIANT (type)
           || TYPE_MAIN_VARIANT (TREE_TYPE (oprnd1)) != TYPE_MAIN_VARIANT (type))
         return NULL;
@@ -232,8 +242,7 @@ vect_recog_dot_prod_pattern (tree last_stmt, tree *type_in, tree *type_out)
       if (widened_name_p (oprnd0, stmt, &half_type, &def_stmt))
         {
           stmt = def_stmt;
-          expr = GIMPLE_STMT_OPERAND (stmt, 1);
-          oprnd0 = TREE_OPERAND (expr, 0);
+          oprnd0 = gimple_assign_rhs1 (stmt);
         }
       else
         half_type = type;
@@ -248,37 +257,35 @@ vect_recog_dot_prod_pattern (tree last_stmt, tree *type_in, tree *type_out)
   stmt = SSA_NAME_DEF_STMT (oprnd0);
   /* FORNOW.  Can continue analyzing the def-use chain when this stmt in a phi 
      inside the loop (in case we are analyzing an outer-loop).  */
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt))
     return NULL; 
   stmt_vinfo = vinfo_for_stmt (stmt);
   gcc_assert (stmt_vinfo);
   if (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_loop_def)
     return NULL;
-  expr = GIMPLE_STMT_OPERAND (stmt, 1);
-  if (TREE_CODE (expr) != MULT_EXPR)
+  if (gimple_assign_rhs_code (stmt) != MULT_EXPR)
     return NULL;
   if (STMT_VINFO_IN_PATTERN_P (stmt_vinfo))
     {
       /* Has been detected as a widening multiplication?  */
 
       stmt = STMT_VINFO_RELATED_STMT (stmt_vinfo);
-      expr = GIMPLE_STMT_OPERAND (stmt, 1);
-      if (TREE_CODE (expr) != WIDEN_MULT_EXPR)
+      if (gimple_assign_rhs_code (stmt) != WIDEN_MULT_EXPR)
         return NULL;
       stmt_vinfo = vinfo_for_stmt (stmt);
       gcc_assert (stmt_vinfo);
       gcc_assert (STMT_VINFO_DEF_TYPE (stmt_vinfo) == vect_loop_def);
-      oprnd00 = TREE_OPERAND (expr, 0);
-      oprnd01 = TREE_OPERAND (expr, 1);
+      oprnd00 = gimple_assign_rhs1 (stmt);
+      oprnd01 = gimple_assign_rhs2 (stmt);
     }
   else
     {
       tree half_type0, half_type1;
-      tree def_stmt;
+      gimple def_stmt;
       tree oprnd0, oprnd1;
 
-      oprnd0 = TREE_OPERAND (expr, 0);
-      oprnd1 = TREE_OPERAND (expr, 1);
+      oprnd0 = gimple_assign_rhs1 (stmt);
+      oprnd1 = gimple_assign_rhs2 (stmt);
       if (TYPE_MAIN_VARIANT (TREE_TYPE (oprnd0)) 
                                != TYPE_MAIN_VARIANT (prod_type)
           || TYPE_MAIN_VARIANT (TREE_TYPE (oprnd1)) 
@@ -286,10 +293,10 @@ vect_recog_dot_prod_pattern (tree last_stmt, tree *type_in, tree *type_out)
         return NULL;
       if (!widened_name_p (oprnd0, stmt, &half_type0, &def_stmt))
         return NULL;
-      oprnd00 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt, 1), 0);
+      oprnd00 = gimple_assign_rhs1 (def_stmt);
       if (!widened_name_p (oprnd1, stmt, &half_type1, &def_stmt))
         return NULL;
-      oprnd01 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt, 1), 0);
+      oprnd01 = gimple_assign_rhs1 (def_stmt);
       if (TYPE_MAIN_VARIANT (half_type0) != TYPE_MAIN_VARIANT (half_type1))
         return NULL;
       if (TYPE_PRECISION (prod_type) != TYPE_PRECISION (half_type0) * 2)
@@ -301,11 +308,14 @@ vect_recog_dot_prod_pattern (tree last_stmt, tree *type_in, tree *type_out)
   *type_out = type;
   
   /* Pattern detected. Create a stmt to be used to replace the pattern: */
-  pattern_expr = build3 (DOT_PROD_EXPR, type, oprnd00, oprnd01, oprnd1);
+  var = vect_recog_temp_ssa_var (type, NULL);
+  rhs =        build3 (DOT_PROD_EXPR, type, oprnd00, oprnd01, oprnd1),
+  pattern_stmt = gimple_build_assign (var, rhs);
+                                     
   if (vect_print_dump_info (REPORT_DETAILS))
     {
       fprintf (vect_dump, "vect_recog_dot_prod_pattern: detected: ");
-      print_generic_expr (vect_dump, pattern_expr, TDF_SLIM);
+      print_gimple_stmt (vect_dump, pattern_stmt, 0, TDF_SLIM);
     }
 
   /* We don't allow changing the order of the computation in the inner-loop
@@ -317,10 +327,9 @@ vect_recog_dot_prod_pattern (tree last_stmt, tree *type_in, tree *type_out)
       return NULL;
     }
 
-  return pattern_expr;
+  return pattern_stmt;
 }
-
-
 /* Function vect_recog_widen_mult_pattern
 
    Try to find the following pattern:
@@ -352,34 +361,33 @@ vect_recog_dot_prod_pattern (tree last_stmt, tree *type_in, tree *type_out)
         WIDEN_MULT <a_t, b_t>
 */
 
-static tree
-vect_recog_widen_mult_pattern (tree last_stmt, 
+static gimple
+vect_recog_widen_mult_pattern (gimple last_stmt, 
                               tree *type_in, 
                               tree *type_out)
 {
-  tree expr;
-  tree def_stmt0, def_stmt1;
+  gimple def_stmt0, def_stmt1;
   tree oprnd0, oprnd1;
   tree type, half_type0, half_type1;
-  tree pattern_expr;
+  gimple pattern_stmt;
   tree vectype;
   tree dummy;
+  tree var;
   enum tree_code dummy_code;
 
-  if (TREE_CODE (last_stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (last_stmt))
     return NULL;
 
-  expr = GIMPLE_STMT_OPERAND (last_stmt, 1);
-  type = TREE_TYPE (expr);
+  type = gimple_expr_type (last_stmt);
 
   /* Starting from LAST_STMT, follow the defs of its uses in search
      of the above pattern.  */
 
-  if (TREE_CODE (expr) != MULT_EXPR)
+  if (gimple_assign_rhs_code (last_stmt) != MULT_EXPR)
     return NULL;
 
-  oprnd0 = TREE_OPERAND (expr, 0);
-  oprnd1 = TREE_OPERAND (expr, 1);
+  oprnd0 = gimple_assign_rhs1 (last_stmt);
+  oprnd1 = gimple_assign_rhs2 (last_stmt);
   if (TYPE_MAIN_VARIANT (TREE_TYPE (oprnd0)) != TYPE_MAIN_VARIANT (type)
       || TYPE_MAIN_VARIANT (TREE_TYPE (oprnd1)) != TYPE_MAIN_VARIANT (type))
     return NULL;
@@ -387,12 +395,12 @@ vect_recog_widen_mult_pattern (tree last_stmt,
   /* Check argument 0 */
   if (!widened_name_p (oprnd0, last_stmt, &half_type0, &def_stmt0))
     return NULL;
-  oprnd0 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt0, 1), 0);
+  oprnd0 = gimple_assign_rhs1 (def_stmt0);
 
   /* Check argument 1 */
   if (!widened_name_p (oprnd1, last_stmt, &half_type1, &def_stmt1))
     return NULL;
-  oprnd1 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt1, 1), 0);
+  oprnd1 = gimple_assign_rhs1 (def_stmt1);
 
   if (TYPE_MAIN_VARIANT (half_type0) != TYPE_MAIN_VARIANT (half_type1))
     return NULL;
@@ -405,18 +413,23 @@ vect_recog_widen_mult_pattern (tree last_stmt,
   vectype = get_vectype_for_scalar_type (half_type0);
   if (!vectype
       || !supportable_widening_operation (WIDEN_MULT_EXPR, last_stmt, vectype,
-                                       &dummy, &dummy, &dummy_code,
-                                       &dummy_code))
+                                         &dummy, &dummy, &dummy_code,
+                                         &dummy_code))
     return NULL;
 
   *type_in = vectype;
   *type_out = NULL_TREE;
 
   /* Pattern supported. Create a stmt to be used to replace the pattern: */
-  pattern_expr = build2 (WIDEN_MULT_EXPR, type, oprnd0, oprnd1);
+  var = vect_recog_temp_ssa_var (type, NULL);
+  pattern_stmt = gimple_build_assign_with_ops (WIDEN_MULT_EXPR, var, oprnd0,
+                                              oprnd1);
+  SSA_NAME_DEF_STMT (var) = pattern_stmt;
+
   if (vect_print_dump_info (REPORT_DETAILS))
-    print_generic_expr (vect_dump, pattern_expr, TDF_SLIM);
-  return pattern_expr;
+    print_gimple_stmt (vect_dump, pattern_stmt, 0, TDF_SLIM);
+
+  return pattern_stmt;
 }
 
 
@@ -441,43 +454,40 @@ vect_recog_widen_mult_pattern (tree last_stmt,
 
    * Return value: A new stmt that will be used to replace the sequence of
    stmts that constitute the pattern. In this case it will be:
-        x * x
+        x = x * x
    or
-       sqrt (x)
+       x = sqrt (x)
 */
 
-static tree
-vect_recog_pow_pattern (tree last_stmt, tree *type_in, tree *type_out)
+static gimple
+vect_recog_pow_pattern (gimple last_stmt, tree *type_in, tree *type_out)
 {
-  tree expr;
   tree type;
-  tree fn, base, exp;
+  tree fn, base, exp = NULL;
+  gimple stmt;
+  tree var;
 
-  if (TREE_CODE (last_stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_call (last_stmt) || gimple_call_lhs (last_stmt) == NULL)
     return NULL;
 
-  expr = GIMPLE_STMT_OPERAND (last_stmt, 1);
-  type = TREE_TYPE (expr);
-
-  if (TREE_CODE (expr) != CALL_EXPR)
-    return NULL_TREE;
+  type = gimple_expr_type (last_stmt);
 
-  fn = get_callee_fndecl (expr);
+  fn = gimple_call_fndecl (last_stmt);
   switch (DECL_FUNCTION_CODE (fn))
     {
     case BUILT_IN_POWIF:
     case BUILT_IN_POWI:
     case BUILT_IN_POWF:
     case BUILT_IN_POW:
-      base = CALL_EXPR_ARG (expr, 0);
-      exp = CALL_EXPR_ARG (expr, 1);
+      base = gimple_call_arg (last_stmt, 0);
+      exp = gimple_call_arg (last_stmt, 1);
       if (TREE_CODE (exp) != REAL_CST
          && TREE_CODE (exp) != INTEGER_CST)
-        return NULL_TREE;
+        return NULL;
       break;
 
-    default:;
-      return NULL_TREE;
+    default:
+      return NULL;
     }
 
   /* We now have a pow or powi builtin function call with a constant
@@ -492,7 +502,11 @@ vect_recog_pow_pattern (tree last_stmt, tree *type_in, tree *type_out)
           && REAL_VALUES_EQUAL (TREE_REAL_CST (exp), dconst2)))
     {
       *type_in = TREE_TYPE (base);
-      return build2 (MULT_EXPR, TREE_TYPE (base), base, base);
+
+      var = vect_recog_temp_ssa_var (TREE_TYPE (base), NULL);
+      stmt = gimple_build_assign_with_ops (MULT_EXPR, var, base, base);
+      SSA_NAME_DEF_STMT (var) = stmt;
+      return stmt;
     }
 
   /* Catch square root.  */
@@ -503,13 +517,18 @@ vect_recog_pow_pattern (tree last_stmt, tree *type_in, tree *type_out)
       *type_in = get_vectype_for_scalar_type (TREE_TYPE (base));
       if (*type_in)
        {
-         newfn = build_call_expr (newfn, 1, base);
-         if (vectorizable_function (newfn, *type_in, *type_in) != NULL_TREE)
-           return newfn;
+         gimple stmt = gimple_build_call (newfn, 1, base);
+         if (vectorizable_function (stmt, *type_in, *type_in)
+             != NULL_TREE)
+           {
+             var = vect_recog_temp_ssa_var (TREE_TYPE (base), stmt);
+             gimple_call_set_lhs (stmt, var); 
+             return stmt;
+           }
        }
     }
 
-  return NULL_TREE;
+  return NULL;
 }
 
 
@@ -552,22 +571,22 @@ vect_recog_pow_pattern (tree last_stmt, tree *type_in, tree *type_out)
         the correct order (as is the case when this computation is in an 
         inner-loop nested in an outer-loop that us being vectorized).  */
 
-static tree
-vect_recog_widen_sum_pattern (tree last_stmt, tree *type_in, tree *type_out)
+static gimple
+vect_recog_widen_sum_pattern (gimple last_stmt, tree *type_in, tree *type_out)
 {
-  tree stmt, expr;
+  gimple stmt;
   tree oprnd0, oprnd1;
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
   tree type, half_type;
-  tree pattern_expr;
+  gimple pattern_stmt;
   loop_vec_info loop_info = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
   struct loop *loop = LOOP_VINFO_LOOP (loop_info);
+  tree var;
 
-  if (TREE_CODE (last_stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (last_stmt))
     return NULL;
 
-  expr = GIMPLE_STMT_OPERAND (last_stmt, 1);
-  type = TREE_TYPE (expr);
+  type = gimple_expr_type (last_stmt);
 
   /* Look for the following pattern
           DX = (TYPE) X;
@@ -579,14 +598,14 @@ vect_recog_widen_sum_pattern (tree last_stmt, tree *type_in, tree *type_out)
   /* Starting from LAST_STMT, follow the defs of its uses in search
      of the above pattern.  */
 
-  if (TREE_CODE (expr) != PLUS_EXPR)
+  if (gimple_assign_rhs_code (last_stmt) != PLUS_EXPR)
     return NULL;
 
   if (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_reduction_def)
     return NULL;
 
-  oprnd0 = TREE_OPERAND (expr, 0);
-  oprnd1 = TREE_OPERAND (expr, 1);
+  oprnd0 = gimple_assign_rhs1 (last_stmt);
+  oprnd1 = gimple_assign_rhs2 (last_stmt);
   if (TYPE_MAIN_VARIANT (TREE_TYPE (oprnd0)) != TYPE_MAIN_VARIANT (type)
       || TYPE_MAIN_VARIANT (TREE_TYPE (oprnd1)) != TYPE_MAIN_VARIANT (type))
     return NULL;
@@ -600,16 +619,20 @@ vect_recog_widen_sum_pattern (tree last_stmt, tree *type_in, tree *type_out)
   if (!widened_name_p (oprnd0, last_stmt, &half_type, &stmt))
     return NULL;
 
-  oprnd0 = TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0);
+  oprnd0 = gimple_assign_rhs1 (stmt);
   *type_in = half_type;
   *type_out = type;
 
   /* Pattern detected. Create a stmt to be used to replace the pattern: */
-  pattern_expr = build2 (WIDEN_SUM_EXPR, type, oprnd0, oprnd1);
+  var = vect_recog_temp_ssa_var (type, NULL);
+  pattern_stmt = gimple_build_assign_with_ops (WIDEN_SUM_EXPR, var,
+                                              oprnd0, oprnd1);
+  SSA_NAME_DEF_STMT (var) = pattern_stmt;
+
   if (vect_print_dump_info (REPORT_DETAILS))
     {
       fprintf (vect_dump, "vect_recog_widen_sum_pattern: detected: ");
-      print_generic_expr (vect_dump, pattern_expr, TDF_SLIM);
+      print_gimple_stmt (vect_dump, pattern_stmt, 0, TDF_SLIM);
     }
 
   /* We don't allow changing the order of the computation in the inner-loop
@@ -621,7 +644,7 @@ vect_recog_widen_sum_pattern (tree last_stmt, tree *type_in, tree *type_out)
       return NULL;
     }
 
-  return pattern_expr;
+  return pattern_stmt;
 }
 
 
@@ -649,23 +672,19 @@ vect_recog_widen_sum_pattern (tree last_stmt, tree *type_in, tree *type_out)
 
 static void
 vect_pattern_recog_1 (
-       tree (* vect_recog_func) (tree, tree *, tree *),
-       block_stmt_iterator si)
+       gimple (* vect_recog_func) (gimple, tree *, tree *),
+       gimple_stmt_iterator si)
 {
-  tree stmt = bsi_stmt (si);
+  gimple stmt = gsi_stmt (si), pattern_stmt;
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
   stmt_vec_info pattern_stmt_info;
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
-  tree pattern_expr;
   tree pattern_vectype;
   tree type_in, type_out;
-  tree pattern_type;
   enum tree_code code;
-  tree var, var_name;
-  stmt_ann_t ann;
 
-  pattern_expr = (* vect_recog_func) (stmt, &type_in, &type_out);
-  if (!pattern_expr) 
+  pattern_stmt = (* vect_recog_func) (stmt, &type_in, &type_out);
+  if (!pattern_stmt)
     return; 
  
   if (VECTOR_MODE_P (TYPE_MODE (type_in))) 
@@ -685,8 +704,15 @@ vect_pattern_recog_1 (
       if (!pattern_vectype)
         return;
 
-      optab = optab_for_tree_code (TREE_CODE (pattern_expr), pattern_vectype,
-                                  optab_default);
+      if (is_gimple_assign (pattern_stmt))
+       code = gimple_assign_rhs_code (pattern_stmt);
+      else
+        {
+         gcc_assert (is_gimple_call (pattern_stmt));
+         code = CALL_EXPR;
+       }
+
+      optab = optab_for_tree_code (code, pattern_vectype, optab_default);
       vec_mode = TYPE_MODE (pattern_vectype);
       if (!optab
           || (icode = optab_handler (optab, vec_mode)->insn_code) ==
@@ -702,28 +728,20 @@ vect_pattern_recog_1 (
   if (vect_print_dump_info (REPORT_DETAILS))
     {
       fprintf (vect_dump, "pattern recognized: "); 
-      print_generic_expr (vect_dump, pattern_expr, TDF_SLIM);
+      print_gimple_stmt (vect_dump, pattern_stmt, 0, TDF_SLIM);
     }
   
-  /* Mark the stmts that are involved in the pattern,
-     create a new stmt to express the pattern and insert it.  */
-  code = TREE_CODE (pattern_expr);
-  pattern_type = TREE_TYPE (pattern_expr);
-  var = create_tmp_var (pattern_type, "patt");
-  add_referenced_var (var);
-  var_name = make_ssa_name (var, NULL_TREE);
-  pattern_expr = build_gimple_modify_stmt (var_name, pattern_expr);
-  SSA_NAME_DEF_STMT (var_name) = pattern_expr;
-  bsi_insert_before (&si, pattern_expr, BSI_SAME_STMT);
-  ann = stmt_ann (pattern_expr);
-  set_stmt_info (ann, new_stmt_vec_info (pattern_expr, loop_vinfo));
-  pattern_stmt_info = vinfo_for_stmt (pattern_expr);
+  /* Mark the stmts that are involved in the pattern. */
+  gsi_insert_before (&si, pattern_stmt, GSI_SAME_STMT);
+  set_vinfo_for_stmt (pattern_stmt,
+                     new_stmt_vec_info (pattern_stmt, loop_vinfo));
+  pattern_stmt_info = vinfo_for_stmt (pattern_stmt);
   
   STMT_VINFO_RELATED_STMT (pattern_stmt_info) = stmt;
   STMT_VINFO_DEF_TYPE (pattern_stmt_info) = STMT_VINFO_DEF_TYPE (stmt_info);
   STMT_VINFO_VECTYPE (pattern_stmt_info) = pattern_vectype;
   STMT_VINFO_IN_PATTERN_P (stmt_info) = true;
-  STMT_VINFO_RELATED_STMT (stmt_info) = pattern_expr;
+  STMT_VINFO_RELATED_STMT (stmt_info) = pattern_stmt;
 
   return;
 }
@@ -804,10 +822,10 @@ vect_pattern_recog (loop_vec_info loop_vinfo)
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
   unsigned int nbbs = loop->num_nodes;
-  block_stmt_iterator si;
-  tree stmt;
+  gimple_stmt_iterator si;
+  gimple stmt;
   unsigned int i, j;
-  tree (* vect_recog_func_ptr) (tree, tree *, tree *);
+  gimple (* vect_recog_func_ptr) (gimple, tree *, tree *);
 
   if (vect_print_dump_info (REPORT_DETAILS))
     fprintf (vect_dump, "=== vect_pattern_recog ===");
@@ -817,9 +835,9 @@ vect_pattern_recog (loop_vec_info loop_vinfo)
   for (i = 0; i < nbbs; i++)
     {
       basic_block bb = bbs[i];
-      for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+      for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
         {
-          stmt = bsi_stmt (si);
+          stmt = gsi_stmt (si);
 
           /* Scan over all generic vect_recog_xxx_pattern functions.  */
           for (j = 0; j < NUM_PATTERNS; j++)
index 1fa786d..362d37c 100644 (file)
@@ -46,20 +46,22 @@ along with GCC; see the file COPYING3.  If not see
 #include "real.h"
 
 /* Utility functions for the code transformation.  */
-static bool vect_transform_stmt (tree, block_stmt_iterator *, bool *, slp_tree);
+static bool vect_transform_stmt (gimple, gimple_stmt_iterator *, bool *,
+                                slp_tree);
 static tree vect_create_destination_var (tree, tree);
 static tree vect_create_data_ref_ptr 
-  (tree, struct loop*, tree, tree *, tree *, bool, bool *); 
+  (gimple, struct loop*, tree, tree *, gimple *, bool, bool *);
 static tree vect_create_addr_base_for_vector_ref 
-  (tree, tree *, tree, struct loop *);
+  (gimple, gimple_seq *, tree, struct loop *);
 static tree vect_get_new_vect_var (tree, enum vect_var_kind, const char *);
-static tree vect_get_vec_def_for_operand (tree, tree, tree *);
-static tree vect_init_vector (tree, tree, tree, block_stmt_iterator *);
+static tree vect_get_vec_def_for_operand (tree, gimple, tree *);
+static tree vect_init_vector (gimple, tree, tree, gimple_stmt_iterator *);
 static void vect_finish_stmt_generation 
-  (tree stmt, tree vec_stmt, block_stmt_iterator *);
+  (gimple stmt, gimple vec_stmt, gimple_stmt_iterator *);
 static bool vect_is_simple_cond (tree, loop_vec_info); 
-static void vect_create_epilog_for_reduction (tree, tree, enum tree_code, tree);
-static tree get_initial_def_for_reduction (tree, tree, tree *);
+static void vect_create_epilog_for_reduction (tree, gimple, enum tree_code,
+                                             gimple);
+static tree get_initial_def_for_reduction (gimple, tree, tree *);
 
 /* Utility function dealing with loop peeling (not peeling itself).  */
 static void vect_generate_tmps_on_preheader 
@@ -73,7 +75,7 @@ static int vect_min_worthwhile_factor (enum tree_code);
 
 
 static int
-cost_for_stmt (tree stmt)
+cost_for_stmt (gimple stmt)
 {
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
 
@@ -148,11 +150,11 @@ vect_estimate_min_profitable_iters (loop_vec_info loop_vinfo)
 
   /* Requires loop versioning tests to handle misalignment.  */
 
-  if (VEC_length (tree, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo)))
+  if (VEC_length (gimple, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo)))
     {
       /*  FIXME: Make cost depend on complexity of individual check.  */
       vec_outside_cost +=
-        VEC_length (tree, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo));
+       VEC_length (gimple, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo));
       if (vect_print_dump_info (REPORT_COST))
         fprintf (vect_dump, "cost model: Adding cost of checks for loop "
                  "versioning to treat misalignment.\n");
@@ -168,7 +170,7 @@ vect_estimate_min_profitable_iters (loop_vec_info loop_vinfo)
                  "versioning aliasing.\n");
     }
 
-  if (VEC_length (tree, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo))
+  if (VEC_length (gimple, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo))
       || VEC_length (ddr_p, LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo)))
     {
       vec_outside_cost += TARG_COND_TAKEN_BRANCH_COST;
@@ -188,7 +190,7 @@ vect_estimate_min_profitable_iters (loop_vec_info loop_vinfo)
 
   for (i = 0; i < nbbs; i++)
     {
-      block_stmt_iterator si;
+      gimple_stmt_iterator si;
       basic_block bb = bbs[i];
 
       if (bb->loop_father == loop->inner)
@@ -196,9 +198,9 @@ vect_estimate_min_profitable_iters (loop_vec_info loop_vinfo)
       else
        factor = 1;
 
-      for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+      for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
        {
-         tree stmt = bsi_stmt (si);
+         gimple stmt = gsi_stmt (si);
          stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
          /* Skip stmts that are not vectorized inside the loop.  */
          if (!STMT_VINFO_RELEVANT_P (stmt_info)
@@ -338,7 +340,7 @@ vect_estimate_min_profitable_iters (loop_vec_info loop_vinfo)
   if (runtime_test)
     {
       /* Cost model check occurs at versioning.  */
-      if (VEC_length (tree, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo))
+      if (VEC_length (gimple, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo))
          || VEC_length (ddr_p, LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo)))
        scalar_outside_cost += TARG_COND_NOT_TAKEN_BRANCH_COST;
       else
@@ -451,18 +453,34 @@ vect_model_reduction_cost (stmt_vec_info stmt_info, enum tree_code reduc_code,
   enum tree_code code;
   optab optab;
   tree vectype;
-  tree orig_stmt;
+  gimple stmt, orig_stmt;
   tree reduction_op;
   enum machine_mode mode;
-  tree operation = GIMPLE_STMT_OPERAND (STMT_VINFO_STMT (stmt_info), 1);
-  int op_type = TREE_CODE_LENGTH (TREE_CODE (operation));
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
 
+
   /* Cost of reduction op inside loop.  */
   STMT_VINFO_INSIDE_OF_LOOP_COST (stmt_info) += ncopies * TARG_VEC_STMT_COST;
 
-  reduction_op = TREE_OPERAND (operation, op_type-1);
+  stmt = STMT_VINFO_STMT (stmt_info);
+
+  switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
+    {
+    case GIMPLE_SINGLE_RHS:
+      gcc_assert (TREE_OPERAND_LENGTH (gimple_assign_rhs1 (stmt)) == ternary_op);
+      reduction_op = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
+      break;
+    case GIMPLE_UNARY_RHS:
+      reduction_op = gimple_assign_rhs1 (stmt);
+      break;
+    case GIMPLE_BINARY_RHS:
+      reduction_op = gimple_assign_rhs2 (stmt);
+      break;
+    default:
+      gcc_unreachable ();
+    }
+
   vectype = get_vectype_for_scalar_type (TREE_TYPE (reduction_op));
   if (!vectype)
     {
@@ -480,7 +498,7 @@ vect_model_reduction_cost (stmt_vec_info stmt_info, enum tree_code reduc_code,
   if (!orig_stmt) 
     orig_stmt = STMT_VINFO_STMT (stmt_info);
 
-  code = TREE_CODE (GIMPLE_STMT_OPERAND (orig_stmt, 1));
+  code = gimple_assign_rhs_code (orig_stmt);
 
   /* Add in cost for initial definition.  */
   outer_cost += TARG_SCALAR_TO_VEC_COST;
@@ -498,7 +516,7 @@ vect_model_reduction_cost (stmt_vec_info stmt_info, enum tree_code reduc_code,
        {
          int vec_size_in_bits = tree_low_cst (TYPE_SIZE (vectype), 1);
          tree bitsize =
-           TYPE_SIZE (TREE_TYPE ( GIMPLE_STMT_OPERAND (orig_stmt, 0)));
+           TYPE_SIZE (TREE_TYPE (gimple_assign_lhs (orig_stmt)));
          int element_bitsize = tree_low_cst (bitsize, 1);
          int nelements = vec_size_in_bits / element_bitsize;
 
@@ -590,7 +608,7 @@ vect_model_simple_cost (stmt_vec_info stmt_info, int ncopies,
 static int
 vect_cost_strided_group_size (stmt_vec_info stmt_info)
 {
-  tree first_stmt = DR_GROUP_FIRST_DR (stmt_info);
+  gimple first_stmt = DR_GROUP_FIRST_DR (stmt_info);
 
   if (first_stmt == STMT_VINFO_STMT (stmt_info))
     return DR_GROUP_SIZE (stmt_info);
@@ -661,7 +679,7 @@ vect_model_load_cost (stmt_vec_info stmt_info, int ncopies, slp_tree slp_node)
 {
   int group_size;
   int alignment_support_cheme;
-  tree first_stmt;
+  gimple first_stmt;
   struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info), *first_dr;
   int inside_cost = 0, outside_cost = 0;
 
@@ -846,21 +864,21 @@ vect_get_new_vect_var (tree type, enum vect_var_kind var_kind, const char *name)
    FORNOW: We are only handling array accesses with step 1.  */
 
 static tree
-vect_create_addr_base_for_vector_ref (tree stmt,
-                                      tree *new_stmt_list,
+vect_create_addr_base_for_vector_ref (gimple stmt,
+                                     gimple_seq *new_stmt_list,
                                      tree offset,
                                      struct loop *loop)
 {
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
   struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
-  struct loop *containing_loop = (bb_for_stmt (stmt))->loop_father;
+  struct loop *containing_loop = (gimple_bb (stmt))->loop_father;
   tree data_ref_base = unshare_expr (DR_BASE_ADDRESS (dr));
   tree base_name;
   tree data_ref_base_var;
-  tree new_base_stmt;
   tree vec_stmt;
   tree addr_base, addr_expr;
-  tree dest, new_stmt;
+  tree dest;
+  gimple_seq seq = NULL;
   tree base_offset = unshare_expr (DR_OFFSET (dr));
   tree init = unshare_expr (DR_INIT (dr));
   tree vect_ptr_type, addr_expr2;
@@ -883,17 +901,17 @@ vect_create_addr_base_for_vector_ref (tree stmt,
   base_name = build_fold_indirect_ref (data_ref_base);
   data_ref_base_var = create_tmp_var (TREE_TYPE (data_ref_base), "batmp");
   add_referenced_var (data_ref_base_var);
-  data_ref_base = force_gimple_operand (data_ref_base, &new_base_stmt,
-                                       true, data_ref_base_var);
-  append_to_statement_list_force(new_base_stmt, new_stmt_list);
+  data_ref_base = force_gimple_operand (data_ref_base, &seq, true,
+                                       data_ref_base_var);
+  gimple_seq_add_seq (new_stmt_list, seq);
 
   /* Create base_offset */
   base_offset = size_binop (PLUS_EXPR, base_offset, init);
   base_offset = fold_convert (sizetype, base_offset);
   dest = create_tmp_var (TREE_TYPE (base_offset), "base_off");
   add_referenced_var (dest);
-  base_offset = force_gimple_operand (base_offset, &new_stmt, true, dest); 
-  append_to_statement_list_force (new_stmt, new_stmt_list);
+  base_offset = force_gimple_operand (base_offset, &seq, true, dest);
+  gimple_seq_add_seq (new_stmt_list, seq);
 
   if (offset)
     {
@@ -903,8 +921,8 @@ vect_create_addr_base_for_vector_ref (tree stmt,
       offset = fold_build2 (MULT_EXPR, TREE_TYPE (offset), offset, step);
       base_offset = fold_build2 (PLUS_EXPR, TREE_TYPE (base_offset),
                                 base_offset, offset);
-      base_offset = force_gimple_operand (base_offset, &new_stmt, false, tmp);
-      append_to_statement_list_force (new_stmt, new_stmt_list);
+      base_offset = force_gimple_operand (base_offset, &seq, false, tmp);
+      gimple_seq_add_seq (new_stmt_list, seq);
     }
   
   /* base + base_offset */
@@ -921,8 +939,8 @@ vect_create_addr_base_for_vector_ref (tree stmt,
   addr_expr2 = vect_get_new_vect_var (vect_ptr_type, vect_pointer_var,
                                      get_name (base_name));
   add_referenced_var (addr_expr2);
-  vec_stmt = force_gimple_operand (vec_stmt, &new_stmt, false, addr_expr2);
-  append_to_statement_list_force (new_stmt, new_stmt_list);
+  vec_stmt = force_gimple_operand (vec_stmt, &seq, false, addr_expr2);
+  gimple_seq_add_seq (new_stmt_list, seq);
 
   if (vect_print_dump_info (REPORT_DETAILS))
     {
@@ -944,8 +962,8 @@ vect_create_addr_base_for_vector_ref (tree stmt,
 
    Input:
    1. STMT: a stmt that references memory. Expected to be of the form
-         GIMPLE_MODIFY_STMT <name, data-ref> or
-        GIMPLE_MODIFY_STMT <data-ref, name>.
+         GIMPLE_ASSIGN <name, data-ref> or
+        GIMPLE_ASSIGN <data-ref, name>.
    2. AT_LOOP: the loop where the vector memref is to be created.
    3. OFFSET (optional): an offset to be added to the initial address accessed
         by the data-ref in STMT.
@@ -978,8 +996,8 @@ vect_create_addr_base_for_vector_ref (tree stmt,
    4. Return the pointer.  */
 
 static tree
-vect_create_data_ref_ptr (tree stmt, struct loop *at_loop,
-                         tree offset, tree *initial_address, tree *ptr_incr,
+vect_create_data_ref_ptr (gimple stmt, struct loop *at_loop,
+                         tree offset, tree *initial_address, gimple *ptr_incr,
                          bool only_init, bool *inv_p)
 {
   tree base_name;
@@ -987,23 +1005,23 @@ vect_create_data_ref_ptr (tree stmt, struct loop *at_loop,
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   bool nested_in_vect_loop = nested_in_vect_loop_p (loop, stmt);
-  struct loop *containing_loop = (bb_for_stmt (stmt))->loop_father;
+  struct loop *containing_loop = (gimple_bb (stmt))->loop_father;
   tree vectype = STMT_VINFO_VECTYPE (stmt_info);
   tree vect_ptr_type;
   tree vect_ptr;
   tree tag;
   tree new_temp;
-  tree vec_stmt;
-  tree new_stmt_list = NULL_TREE;
+  gimple vec_stmt;
+  gimple_seq new_stmt_list = NULL;
   edge pe;
   basic_block new_bb;
   tree vect_ptr_init;
   struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
   tree vptr;
-  block_stmt_iterator incr_bsi;
+  gimple_stmt_iterator incr_gsi;
   bool insert_after;
   tree indx_before_incr, indx_after_incr;
-  tree incr;
+  gimple incr;
   tree step;
 
   /* Check the step (evolution) of the load in LOOP, and record
@@ -1020,7 +1038,7 @@ vect_create_data_ref_ptr (tree stmt, struct loop *at_loop,
 
   /* Create an expression for the first address accessed by this load
      in LOOP.  */ 
-  base_name =  build_fold_indirect_ref (unshare_expr (DR_BASE_ADDRESS (dr)));
+  base_name = build_fold_indirect_ref (unshare_expr (DR_BASE_ADDRESS (dr)));
 
   if (vect_print_dump_info (REPORT_DETAILS))
     {
@@ -1099,18 +1117,18 @@ vect_create_data_ref_ptr (tree stmt, struct loop *at_loop,
   pe = loop_preheader_edge (loop);
   if (new_stmt_list)
     {
-      new_bb = bsi_insert_on_edge_immediate (pe, new_stmt_list);
+      new_bb = gsi_insert_seq_on_edge_immediate (pe, new_stmt_list);
       gcc_assert (!new_bb);
     }
 
   *initial_address = new_temp;
 
   /* Create: p = (vectype *) initial_base  */
-  vec_stmt = fold_convert (vect_ptr_type, new_temp);
-  vec_stmt = build_gimple_modify_stmt (vect_ptr, vec_stmt);
+  vec_stmt = gimple_build_assign (vect_ptr,
+                                 fold_convert (vect_ptr_type, new_temp));
   vect_ptr_init = make_ssa_name (vect_ptr, vec_stmt);
-  GIMPLE_STMT_OPERAND (vec_stmt, 0) = vect_ptr_init;
-  new_bb = bsi_insert_on_edge_immediate (pe, vec_stmt);
+  gimple_assign_set_lhs (vec_stmt, vect_ptr_init);
+  new_bb = gsi_insert_on_edge_immediate (pe, vec_stmt);
   gcc_assert (!new_bb);
 
 
@@ -1135,15 +1153,14 @@ vect_create_data_ref_ptr (tree stmt, struct loop *at_loop,
       if (*inv_p)
        step = size_zero_node;
 
-      standard_iv_increment_position (loop, &incr_bsi, &insert_after);
+      standard_iv_increment_position (loop, &incr_gsi, &insert_after);
 
       create_iv (vect_ptr_init,
                 fold_convert (vect_ptr_type, step),
-                NULL_TREE, loop, &incr_bsi, insert_after,
+                NULL_TREE, loop, &incr_gsi, insert_after,
                 &indx_before_incr, &indx_after_incr);
-      incr = bsi_stmt (incr_bsi);
-      set_stmt_info (stmt_ann (incr),
-                    new_stmt_vec_info (incr, loop_vinfo));
+      incr = gsi_stmt (incr_gsi);
+      set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo));
 
       /* Copy the points-to information if it exists. */
       if (DR_PTR_INFO (dr))
@@ -1169,13 +1186,13 @@ vect_create_data_ref_ptr (tree stmt, struct loop *at_loop,
   gcc_assert (nested_in_vect_loop);
   if (!only_init)
     {
-      standard_iv_increment_position (containing_loop, &incr_bsi, 
+      standard_iv_increment_position (containing_loop, &incr_gsi,
                                      &insert_after);
       create_iv (vptr, fold_convert (vect_ptr_type, DR_STEP (dr)), NULL_TREE, 
-                containing_loop, &incr_bsi, insert_after, &indx_before_incr, 
+                containing_loop, &incr_gsi, insert_after, &indx_before_incr,
                 &indx_after_incr);
-      incr = bsi_stmt (incr_bsi);
-      set_stmt_info (stmt_ann (incr), new_stmt_vec_info (incr, loop_vinfo));
+      incr = gsi_stmt (incr_gsi);
+      set_vinfo_for_stmt (incr, new_stmt_vec_info (incr, loop_vinfo));
 
       /* Copy the points-to information if it exists. */
       if (DR_PTR_INFO (dr))
@@ -1230,16 +1247,15 @@ vect_create_data_ref_ptr (tree stmt, struct loop *at_loop,
 */
 
 static tree
-bump_vector_ptr (tree dataref_ptr, tree ptr_incr, block_stmt_iterator *bsi,
-                 tree stmt, tree bump)
+bump_vector_ptr (tree dataref_ptr, gimple ptr_incr, gimple_stmt_iterator *gsi,
+                gimple stmt, tree bump)
 {
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
   struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
   tree vectype = STMT_VINFO_VECTYPE (stmt_info);
-  tree vptr_type = TREE_TYPE (dataref_ptr);
   tree ptr_var = SSA_NAME_VAR (dataref_ptr);
   tree update = TYPE_SIZE_UNIT (vectype);
-  tree incr_stmt;
+  gimple incr_stmt;
   ssa_op_iter iter;
   use_operand_p use_p;
   tree new_dataref_ptr;
@@ -1247,12 +1263,11 @@ bump_vector_ptr (tree dataref_ptr, tree ptr_incr, block_stmt_iterator *bsi,
   if (bump)
     update = bump;
     
-  incr_stmt = build_gimple_modify_stmt (ptr_var,
-                                       build2 (POINTER_PLUS_EXPR, vptr_type,
-                                               dataref_ptr, update));
+  incr_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, ptr_var,
+                                           dataref_ptr, update);
   new_dataref_ptr = make_ssa_name (ptr_var, incr_stmt);
-  GIMPLE_STMT_OPERAND (incr_stmt, 0) = new_dataref_ptr;
-  vect_finish_stmt_generation (stmt, incr_stmt, bsi);
+  gimple_assign_set_lhs (incr_stmt, new_dataref_ptr);
+  vect_finish_stmt_generation (stmt, incr_stmt, gsi);
 
   /* Copy the points-to information if it exists. */
   if (DR_PTR_INFO (dr))
@@ -1313,12 +1328,12 @@ vect_create_destination_var (tree scalar_dest, tree vectype)
    It will be used in the vectorization of STMT.  */
 
 static tree
-vect_init_vector (tree stmt, tree vector_var, tree vector_type,
-                 block_stmt_iterator *bsi)
+vect_init_vector (gimple stmt, tree vector_var, tree vector_type,
+                 gimple_stmt_iterator *gsi)
 {
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
   tree new_var;
-  tree init_stmt;
+  gimple init_stmt;
   tree vec_oprnd;
   edge pe;
   tree new_temp;
@@ -1326,12 +1341,12 @@ vect_init_vector (tree stmt, tree vector_var, tree vector_type,
  
   new_var = vect_get_new_vect_var (vector_type, vect_simple_var, "cst_");
   add_referenced_var (new_var); 
-  init_stmt = build_gimple_modify_stmt (new_var, vector_var);
+  init_stmt = gimple_build_assign  (new_var, vector_var);
   new_temp = make_ssa_name (new_var, init_stmt);
-  GIMPLE_STMT_OPERAND (init_stmt, 0) = new_temp;
+  gimple_assign_set_lhs (init_stmt, new_temp);
 
-  if (bsi)
-    vect_finish_stmt_generation (stmt, init_stmt, bsi);
+  if (gsi)
+    vect_finish_stmt_generation (stmt, init_stmt, gsi);
   else
     {
       loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
@@ -1340,17 +1355,17 @@ vect_init_vector (tree stmt, tree vector_var, tree vector_type,
       if (nested_in_vect_loop_p (loop, stmt))
         loop = loop->inner;
       pe = loop_preheader_edge (loop);
-      new_bb = bsi_insert_on_edge_immediate (pe, init_stmt);
+      new_bb = gsi_insert_on_edge_immediate (pe, init_stmt);
       gcc_assert (!new_bb);
     }
 
   if (vect_print_dump_info (REPORT_DETAILS))
     {
       fprintf (vect_dump, "created new init_stmt: ");
-      print_generic_expr (vect_dump, init_stmt, TDF_SLIM);
+      print_gimple_stmt (vect_dump, init_stmt, 0, TDF_SLIM);
     }
 
-  vec_oprnd = GIMPLE_STMT_OPERAND (init_stmt, 0);
+  vec_oprnd = gimple_assign_lhs (init_stmt);
   return vec_oprnd;
 }
 
@@ -1364,8 +1379,8 @@ static void
 vect_get_constant_vectors (slp_tree slp_node, VEC(tree,heap) **vec_oprnds,
                           unsigned int op_num)
 {
-  VEC (tree, heap) *stmts = SLP_TREE_SCALAR_STMTS (slp_node);
-  tree stmt = VEC_index (tree, stmts, 0);
+  VEC (gimple, heap) *stmts = SLP_TREE_SCALAR_STMTS (slp_node);
+  gimple stmt = VEC_index (gimple, stmts, 0);
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
   tree vectype = STMT_VINFO_VECTYPE (stmt_vinfo);
   int nunits = TYPE_VECTOR_SUBPARTS (vectype);
@@ -1373,8 +1388,8 @@ vect_get_constant_vectors (slp_tree slp_node, VEC(tree,heap) **vec_oprnds,
   tree t = NULL_TREE;
   int j, number_of_places_left_in_vector;
   tree vector_type;
-  tree op, vop, operation;
-  int group_size = VEC_length (tree, stmts);
+  tree op, vop;
+  int group_size = VEC_length (gimple, stmts);
   unsigned int vec_num, i;
   int number_of_copies = 1;
   bool is_store = false;
@@ -1407,13 +1422,12 @@ vect_get_constant_vectors (slp_tree slp_node, VEC(tree,heap) **vec_oprnds,
   constant_p = true;
   for (j = 0; j < number_of_copies; j++)
     {
-      for (i = group_size - 1; VEC_iterate (tree, stmts, i, stmt); i--)
+      for (i = group_size - 1; VEC_iterate (gimple, stmts, i, stmt); i--)
         {
-          operation = GIMPLE_STMT_OPERAND (stmt, 1);
          if (is_store)
-           op = operation;
+           op = gimple_assign_rhs1 (stmt);
          else
-           op = TREE_OPERAND (operation, op_num);
+           op = gimple_op (stmt, op_num + 1);
          if (!CONSTANT_CLASS_P (op))
            constant_p = false;
 
@@ -1471,17 +1485,17 @@ static void
 vect_get_slp_vect_defs (slp_tree slp_node, VEC (tree,heap) **vec_oprnds)
 {
   tree vec_oprnd;
-  tree vec_def_stmt;
+  gimple vec_def_stmt;
   unsigned int i;
 
   gcc_assert (SLP_TREE_VEC_STMTS (slp_node));
 
   for (i = 0; 
-       VEC_iterate (tree, SLP_TREE_VEC_STMTS (slp_node), i, vec_def_stmt); 
+       VEC_iterate (gimple, SLP_TREE_VEC_STMTS (slp_node), i, vec_def_stmt);
        i++)
     {
       gcc_assert (vec_def_stmt);
-      vec_oprnd = GIMPLE_STMT_OPERAND (vec_def_stmt, 0);
+      vec_oprnd = gimple_get_lhs (vec_def_stmt);
       VEC_quick_push (tree, *vec_oprnds, vec_oprnd);
     }
 }
@@ -1500,7 +1514,8 @@ static void
 vect_get_slp_defs (slp_tree slp_node, VEC (tree,heap) **vec_oprnds0,
                    VEC (tree,heap) **vec_oprnds1)
 {
-  tree operation, first_stmt;
+  gimple first_stmt;
+  enum tree_code code;
 
   /* Allocate memory for vectorized defs.  */
   *vec_oprnds0 = VEC_alloc (tree, heap, 
@@ -1515,14 +1530,14 @@ vect_get_slp_defs (slp_tree slp_node, VEC (tree,heap) **vec_oprnds0,
     /* Build vectors from scalar defs.  */
     vect_get_constant_vectors (slp_node, vec_oprnds0, 0);
 
-  first_stmt = VEC_index (tree, SLP_TREE_SCALAR_STMTS (slp_node), 0);
+  first_stmt = VEC_index (gimple, SLP_TREE_SCALAR_STMTS (slp_node), 0);
   if (STMT_VINFO_DATA_REF (vinfo_for_stmt (first_stmt)))
     /* Since we don't call this function with loads, this is a group of 
        stores.  */
     return;
 
-  operation = GIMPLE_STMT_OPERAND (first_stmt, 1);
-  if (TREE_OPERAND_LENGTH (operation) == unary_op || !vec_oprnds1)
+  code = gimple_assign_rhs_code (first_stmt);
+  if (get_gimple_rhs_class (code) != GIMPLE_BINARY_RHS || !vec_oprnds1)
     return;
 
   *vec_oprnds1 = VEC_alloc (tree, heap, 
@@ -1550,12 +1565,12 @@ vect_get_slp_defs (slp_tree slp_node, VEC (tree,heap) **vec_oprnds0,
    [X, X + S, X + 2*S, X + 3*S].  */
 
 static tree
-get_initial_def_for_induction (tree iv_phi)
+get_initial_def_for_induction (gimple iv_phi)
 {
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (iv_phi);
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
-  tree scalar_type = TREE_TYPE (PHI_RESULT_TREE (iv_phi));
+  tree scalar_type = TREE_TYPE (gimple_phi_result (iv_phi));
   tree vectype; 
   int nunits;
   edge pe = loop_preheader_edge (loop);
@@ -1565,8 +1580,8 @@ get_initial_def_for_induction (tree iv_phi)
   tree access_fn;
   tree new_var;
   tree new_name;
-  tree init_stmt;
-  tree induction_phi, induc_def, new_stmt, vec_def, vec_dest;
+  gimple init_stmt, induction_phi, new_stmt;
+  tree induc_def, vec_def, vec_dest;
   tree init_expr, step_expr;
   int vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
   int i;
@@ -1575,14 +1590,14 @@ get_initial_def_for_induction (tree iv_phi)
   tree expr;
   stmt_vec_info phi_info = vinfo_for_stmt (iv_phi);
   bool nested_in_vect_loop = false;
-  tree stmts;
+  gimple_seq stmts = NULL;
   imm_use_iterator imm_iter;
   use_operand_p use_p;
-  tree exit_phi;
+  gimple exit_phi;
   edge latch_e;
   tree loop_arg;
-  block_stmt_iterator si;
-  basic_block bb = bb_for_stmt (iv_phi);
+  gimple_stmt_iterator si;
+  basic_block bb = gimple_bb (iv_phi);
 
   vectype = get_vectype_for_scalar_type (scalar_type);
   gcc_assert (vectype);
@@ -1593,9 +1608,9 @@ get_initial_def_for_induction (tree iv_phi)
   gcc_assert (ncopies >= 1);
 
   /* Find the first insertion point in the BB.  */
-  si = bsi_after_labels (bb);
+  si = gsi_after_labels (bb);
 
-  if (INTEGRAL_TYPE_P (scalar_type))
+  if (INTEGRAL_TYPE_P (scalar_type) || POINTER_TYPE_P (scalar_type))
     step_expr = build_int_cst (scalar_type, 0);
   else
     step_expr = build_real (scalar_type, dconst0);
@@ -1608,7 +1623,7 @@ get_initial_def_for_induction (tree iv_phi)
     }
   else
     iv_loop = loop;
-  gcc_assert (iv_loop == (bb_for_stmt (iv_phi))->loop_father);
+  gcc_assert (iv_loop == (gimple_bb (iv_phi))->loop_father);
 
   latch_e = loop_latch_edge (iv_loop);
   loop_arg = PHI_ARG_DEF_FROM_EDGE (iv_phi, latch_e);
@@ -1639,7 +1654,7 @@ get_initial_def_for_induction (tree iv_phi)
       new_name = force_gimple_operand (init_expr, &stmts, false, new_var);
       if (stmts)
        {
-         new_bb = bsi_insert_on_edge_immediate (pe, stmts);
+         new_bb = gsi_insert_seq_on_edge_immediate (pe, stmts);
          gcc_assert (!new_bb);
        }
 
@@ -1647,21 +1662,21 @@ get_initial_def_for_induction (tree iv_phi)
       t = tree_cons (NULL_TREE, init_expr, t);
       for (i = 1; i < nunits; i++)
        {
-         tree tmp;
-
          /* Create: new_name_i = new_name + step_expr  */
-         tmp = fold_build2 (PLUS_EXPR, scalar_type, new_name, step_expr);
-         init_stmt = build_gimple_modify_stmt (new_var, tmp);
+         enum tree_code code = POINTER_TYPE_P (scalar_type)
+                               ? POINTER_PLUS_EXPR : PLUS_EXPR;
+         init_stmt = gimple_build_assign_with_ops (code, new_var,
+                                                   new_name, step_expr);
          new_name = make_ssa_name (new_var, init_stmt);
-         GIMPLE_STMT_OPERAND (init_stmt, 0) = new_name;
+         gimple_assign_set_lhs (init_stmt, new_name);
 
-         new_bb = bsi_insert_on_edge_immediate (pe, init_stmt);
+         new_bb = gsi_insert_on_edge_immediate (pe, init_stmt);
          gcc_assert (!new_bb);
 
          if (vect_print_dump_info (REPORT_DETAILS))
            {
              fprintf (vect_dump, "created new init_stmt: ");
-             print_generic_expr (vect_dump, init_stmt, TDF_SLIM);
+             print_gimple_stmt (vect_dump, init_stmt, 0, TDF_SLIM);
            }
          t = tree_cons (NULL_TREE, new_name, t);
        }
@@ -1707,19 +1722,17 @@ get_initial_def_for_induction (tree iv_phi)
   vec_dest = vect_get_new_vect_var (vectype, vect_simple_var, "vec_iv_");
   add_referenced_var (vec_dest);
   induction_phi = create_phi_node (vec_dest, iv_loop->header);
-  set_stmt_info (get_stmt_ann (induction_phi),
-                 new_stmt_vec_info (induction_phi, loop_vinfo));
+  set_vinfo_for_stmt (induction_phi,
+                     new_stmt_vec_info (induction_phi, loop_vinfo));
   induc_def = PHI_RESULT (induction_phi);
 
   /* Create the iv update inside the loop  */
-  new_stmt = build_gimple_modify_stmt (NULL_TREE,
-                                      build2 (PLUS_EXPR, vectype,
-                                              induc_def, vec_step));
+  new_stmt = gimple_build_assign_with_ops (PLUS_EXPR, vec_dest,
+                                          induc_def, vec_step);
   vec_def = make_ssa_name (vec_dest, new_stmt);
-  GIMPLE_STMT_OPERAND (new_stmt, 0) = vec_def;
-  bsi_insert_before (&si, new_stmt, BSI_SAME_STMT);
-  set_stmt_info (get_stmt_ann (new_stmt),
-                new_stmt_vec_info (new_stmt, loop_vinfo));
+  gimple_assign_set_lhs (new_stmt, vec_def);
+  gsi_insert_before (&si, new_stmt, GSI_SAME_STMT);
+  set_vinfo_for_stmt (new_stmt, new_stmt_vec_info (new_stmt, loop_vinfo));
 
   /* Set the arguments of the phi node:  */
   add_phi_arg (induction_phi, vec_init, pe);
@@ -1752,16 +1765,15 @@ get_initial_def_for_induction (tree iv_phi)
       prev_stmt_vinfo = vinfo_for_stmt (induction_phi);
       for (i = 1; i < ncopies; i++)
        {
-         tree tmp;
-
          /* vec_i = vec_prev + vec_step  */
-         tmp = build2 (PLUS_EXPR, vectype, vec_def, vec_step);
-         new_stmt = build_gimple_modify_stmt (NULL_TREE, tmp);
+         new_stmt = gimple_build_assign_with_ops (PLUS_EXPR, vec_dest,
+                                                  vec_def, vec_step);
          vec_def = make_ssa_name (vec_dest, new_stmt);
-         GIMPLE_STMT_OPERAND (new_stmt, 0) = vec_def;
-         bsi_insert_before (&si, new_stmt, BSI_SAME_STMT);
-         set_stmt_info (get_stmt_ann (new_stmt),
-                        new_stmt_vec_info (new_stmt, loop_vinfo));
+         gimple_assign_set_lhs (new_stmt, vec_def);
+
+         gsi_insert_before (&si, new_stmt, GSI_SAME_STMT);
+         set_vinfo_for_stmt (new_stmt,
+                             new_stmt_vec_info (new_stmt, loop_vinfo));
          STMT_VINFO_RELATED_STMT (prev_stmt_vinfo) = new_stmt;
          prev_stmt_vinfo = vinfo_for_stmt (new_stmt); 
        }
@@ -1774,7 +1786,7 @@ get_initial_def_for_induction (tree iv_phi)
       exit_phi = NULL;
       FOR_EACH_IMM_USE_FAST (use_p, imm_iter, loop_arg)
         {
-         if (!flow_bb_inside_loop_p (iv_loop, bb_for_stmt (USE_STMT (use_p))))
+         if (!flow_bb_inside_loop_p (iv_loop, gimple_bb (USE_STMT (use_p))))
            {
              exit_phi = USE_STMT (use_p);
              break;
@@ -1792,7 +1804,7 @@ get_initial_def_for_induction (tree iv_phi)
          if (vect_print_dump_info (REPORT_DETAILS))
            {
              fprintf (vect_dump, "vector of inductions after inner-loop:");
-             print_generic_expr (vect_dump, new_stmt, TDF_SLIM);
+             print_gimple_stmt (vect_dump, new_stmt, 0, TDF_SLIM);
            }
        }
     }
@@ -1801,9 +1813,9 @@ get_initial_def_for_induction (tree iv_phi)
   if (vect_print_dump_info (REPORT_DETAILS))
     {
       fprintf (vect_dump, "transform induction: created def-use cycle:");
-      print_generic_expr (vect_dump, induction_phi, TDF_SLIM);
+      print_gimple_stmt (vect_dump, induction_phi, 0, TDF_SLIM);
       fprintf (vect_dump, "\n");
-      print_generic_expr (vect_dump, SSA_NAME_DEF_STMT (vec_def), TDF_SLIM);
+      print_gimple_stmt (vect_dump, SSA_NAME_DEF_STMT (vec_def), 0, TDF_SLIM);
     }
 
   STMT_VINFO_VEC_STMT (phi_info) = induction_phi;
@@ -1823,11 +1835,11 @@ get_initial_def_for_induction (tree iv_phi)
    needs to be introduced.  */
 
 static tree
-vect_get_vec_def_for_operand (tree op, tree stmt, tree *scalar_def)
+vect_get_vec_def_for_operand (tree op, gimple stmt, tree *scalar_def)
 {
   tree vec_oprnd;
-  tree vec_stmt;
-  tree def_stmt;
+  gimple vec_stmt;
+  gimple def_stmt;
   stmt_vec_info def_stmt_info = NULL;
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
   tree vectype = STMT_VINFO_VECTYPE (stmt_vinfo);
@@ -1860,7 +1872,7 @@ vect_get_vec_def_for_operand (tree op, tree stmt, tree *scalar_def)
       if (def_stmt)
         {
           fprintf (vect_dump, "  def_stmt =  ");
-          print_generic_expr (vect_dump, def_stmt, TDF_SLIM);
+         print_gimple_stmt (vect_dump, def_stmt, 0, TDF_SLIM);
         }
     }
 
@@ -1913,16 +1925,18 @@ vect_get_vec_def_for_operand (tree op, tree stmt, tree *scalar_def)
     case vect_loop_def:
       {
        if (scalar_def) 
-         *scalar_def = def_stmt;
+         *scalar_def = NULL/* FIXME tuples: def_stmt*/;
 
         /* Get the def from the vectorized stmt.  */
         def_stmt_info = vinfo_for_stmt (def_stmt);
         vec_stmt = STMT_VINFO_VEC_STMT (def_stmt_info);
         gcc_assert (vec_stmt);
-       if (TREE_CODE (vec_stmt) == PHI_NODE)
+       if (gimple_code (vec_stmt) == GIMPLE_PHI)
          vec_oprnd = PHI_RESULT (vec_stmt);
+       else if (is_gimple_call (vec_stmt))
+         vec_oprnd = gimple_call_lhs (vec_stmt);
        else
-         vec_oprnd = GIMPLE_STMT_OPERAND (vec_stmt, 0);
+         vec_oprnd = gimple_assign_lhs (vec_stmt);
         return vec_oprnd;
       }
 
@@ -1931,8 +1945,8 @@ vect_get_vec_def_for_operand (tree op, tree stmt, tree *scalar_def)
       {
        struct loop *loop;
 
-        gcc_assert (TREE_CODE (def_stmt) == PHI_NODE);
-       loop = (bb_for_stmt (def_stmt))->loop_father; 
+       gcc_assert (gimple_code (def_stmt) == GIMPLE_PHI);
+       loop = (gimple_bb (def_stmt))->loop_father; 
 
         /* Get the def before the loop  */
         op = PHI_ARG_DEF_FROM_EDGE (def_stmt, loop_preheader_edge (loop));
@@ -1942,12 +1956,12 @@ vect_get_vec_def_for_operand (tree op, tree stmt, tree *scalar_def)
     /* Case 5: operand is defined by loop-header phi - induction.  */
     case vect_induction_def:
       {
-       gcc_assert (TREE_CODE (def_stmt) == PHI_NODE);
+       gcc_assert (gimple_code (def_stmt) == GIMPLE_PHI);
 
         /* Get the def from the vectorized stmt.  */
         def_stmt_info = vinfo_for_stmt (def_stmt);
         vec_stmt = STMT_VINFO_VEC_STMT (def_stmt_info);
-        gcc_assert (vec_stmt && (TREE_CODE (vec_stmt) == PHI_NODE));
+       gcc_assert (vec_stmt && gimple_code (vec_stmt) == GIMPLE_PHI);
         vec_oprnd = PHI_RESULT (vec_stmt);
         return vec_oprnd;
       }
@@ -2017,7 +2031,7 @@ vect_get_vec_def_for_operand (tree op, tree stmt, tree *scalar_def)
 static tree
 vect_get_vec_def_for_stmt_copy (enum vect_def_type dt, tree vec_oprnd)
 {
-  tree vec_stmt_for_operand;
+  gimple vec_stmt_for_operand;
   stmt_vec_info def_stmt_info;
 
   /* Do nothing; can reuse same def.  */
@@ -2029,7 +2043,7 @@ vect_get_vec_def_for_stmt_copy (enum vect_def_type dt, tree vec_oprnd)
   gcc_assert (def_stmt_info);
   vec_stmt_for_operand = STMT_VINFO_RELATED_STMT (def_stmt_info);
   gcc_assert (vec_stmt_for_operand);
-  vec_oprnd = GIMPLE_STMT_OPERAND (vec_stmt_for_operand, 0);
+  vec_oprnd = gimple_get_lhs (vec_stmt_for_operand);
   return vec_oprnd;
 }
 
@@ -2059,8 +2073,9 @@ vect_get_vec_defs_for_stmt_copy (enum vect_def_type *dt,
 /* Get vectorized definitions for OP0 and OP1, or SLP_NODE if it is not NULL.  */
 
 static void
-vect_get_vec_defs (tree op0, tree op1, tree stmt, VEC(tree,heap) **vec_oprnds0, 
-                  VEC(tree,heap) **vec_oprnds1, slp_tree slp_node)
+vect_get_vec_defs (tree op0, tree op1, gimple stmt,
+                  VEC(tree,heap) **vec_oprnds0, VEC(tree,heap) **vec_oprnds1,
+                  slp_tree slp_node)
 {
   if (slp_node)
     vect_get_slp_defs (slp_node, vec_oprnds0, vec_oprnds1);
@@ -2087,30 +2102,29 @@ vect_get_vec_defs (tree op0, tree op1, tree stmt, VEC(tree,heap) **vec_oprnds0,
    Insert a new stmt.  */
 
 static void
-vect_finish_stmt_generation (tree stmt, tree vec_stmt, 
-                            block_stmt_iterator *bsi)
+vect_finish_stmt_generation (gimple stmt, gimple vec_stmt,
+                            gimple_stmt_iterator *gsi)
 {
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
 
-  gcc_assert (stmt == bsi_stmt (*bsi));
-  gcc_assert (TREE_CODE (stmt) != LABEL_EXPR);
+  gcc_assert (stmt == gsi_stmt (*gsi));
+  gcc_assert (gimple_code (stmt) != GIMPLE_LABEL);
 
-  bsi_insert_before (bsi, vec_stmt, BSI_SAME_STMT);
+  gsi_insert_before (gsi, vec_stmt, GSI_SAME_STMT);
 
-  set_stmt_info (get_stmt_ann (vec_stmt), 
-                new_stmt_vec_info (vec_stmt, loop_vinfo)); 
+  set_vinfo_for_stmt (vec_stmt, new_stmt_vec_info (vec_stmt, loop_vinfo));
 
   if (vect_print_dump_info (REPORT_DETAILS))
     {
       fprintf (vect_dump, "add new stmt: ");
-      print_generic_expr (vect_dump, vec_stmt, TDF_SLIM);
+      print_gimple_stmt (vect_dump, vec_stmt, 0, TDF_SLIM);
     }
 
-  /* Make sure bsi points to the stmt that is being vectorized.  */
-  gcc_assert (stmt == bsi_stmt (*bsi));
+  /* Make sure gsi points to the stmt that is being vectorized.  */
+  gcc_assert (stmt == gsi_stmt (*gsi));
 
-  SET_EXPR_LOCATION (vec_stmt, EXPR_LOCATION (stmt));
+  gimple_set_location (vec_stmt, gimple_location (stmt));
 }
 
 
@@ -2158,14 +2172,14 @@ vect_finish_stmt_generation (tree stmt, tree vec_stmt,
    A cost model should help decide between these two schemes.  */
 
 static tree
-get_initial_def_for_reduction (tree stmt, tree init_val, tree *adjustment_def)
+get_initial_def_for_reduction (gimple stmt, tree init_val, tree *adjustment_def)
 {
   stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_vinfo);
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   tree vectype = STMT_VINFO_VECTYPE (stmt_vinfo);
   int nunits =  TYPE_VECTOR_SUBPARTS (vectype);
-  enum tree_code code = TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1));
+  enum tree_code code = gimple_assign_rhs_code (stmt);
   tree type = TREE_TYPE (init_val);
   tree vecdef;
   tree def_for_init;
@@ -2179,7 +2193,7 @@ get_initial_def_for_reduction (tree stmt, tree init_val, tree *adjustment_def)
   if (nested_in_vect_loop_p (loop, stmt))
     nested_in_vect_loop = true;
   else
-    gcc_assert (loop == (bb_for_stmt (stmt))->loop_father);
+    gcc_assert (loop == (gimple_bb (stmt))->loop_father);
 
   vecdef = vect_get_vec_def_for_operand (init_val, stmt, NULL);
 
@@ -2267,8 +2281,9 @@ get_initial_def_for_reduction (tree stmt, tree init_val, tree *adjustment_def)
 */
 
 static void
-vect_create_epilog_for_reduction (tree vect_def, tree stmt,
-                                  enum tree_code reduc_code, tree reduction_phi)
+vect_create_epilog_for_reduction (tree vect_def, gimple stmt,
+                                 enum tree_code reduc_code,
+                                 gimple reduction_phi)
 {
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
   tree vectype;
@@ -2278,15 +2293,16 @@ vect_create_epilog_for_reduction (tree vect_def, tree stmt,
   basic_block exit_bb;
   tree scalar_dest;
   tree scalar_type;
-  tree new_phi;
-  block_stmt_iterator exit_bsi;
+  gimple new_phi;
+  gimple_stmt_iterator exit_gsi;
   tree vec_dest;
   tree new_temp = NULL_TREE;
   tree new_name;
-  tree epilog_stmt = NULL_TREE;
-  tree new_scalar_dest, exit_phi, new_dest;
+  gimple epilog_stmt = NULL;
+  tree new_scalar_dest, new_dest;
+  gimple exit_phi;
   tree bitsize, bitpos, bytesize; 
-  enum tree_code code = TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1));
+  enum tree_code code = gimple_assign_rhs_code (stmt);
   tree adjustment_def;
   tree vec_initial_def;
   tree orig_name;
@@ -2294,12 +2310,10 @@ vect_create_epilog_for_reduction (tree vect_def, tree stmt,
   use_operand_p use_p;
   bool extract_scalar_result = false;
   tree reduction_op, expr;
-  tree orig_stmt;
-  tree use_stmt;
-  tree operation = GIMPLE_STMT_OPERAND (stmt, 1);
+  gimple orig_stmt;
+  gimple use_stmt;
   bool nested_in_vect_loop = false;
-  int op_type;
-  VEC(tree,heap) *phis = NULL;
+  VEC(gimple,heap) *phis = NULL;
   int i;
   
   if (nested_in_vect_loop_p (loop, stmt))
@@ -2308,8 +2322,22 @@ vect_create_epilog_for_reduction (tree vect_def, tree stmt,
       nested_in_vect_loop = true;
     }
   
-  op_type = TREE_OPERAND_LENGTH (operation);
-  reduction_op = TREE_OPERAND (operation, op_type-1);
+  switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
+    {
+    case GIMPLE_SINGLE_RHS:
+      gcc_assert (TREE_OPERAND_LENGTH (gimple_assign_rhs1 (stmt)) == ternary_op);
+      reduction_op = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
+      break;
+    case GIMPLE_UNARY_RHS:
+      reduction_op = gimple_assign_rhs1 (stmt);
+      break;
+    case GIMPLE_BINARY_RHS:
+      reduction_op = gimple_assign_rhs2 (stmt);
+      break;
+    default:
+      gcc_unreachable ();
+    }
+
   vectype = get_vectype_for_scalar_type (TREE_TYPE (reduction_op));
   gcc_assert (vectype);
   mode = TYPE_MODE (vectype);
@@ -2330,9 +2358,9 @@ vect_create_epilog_for_reduction (tree vect_def, tree stmt,
   if (vect_print_dump_info (REPORT_DETAILS))
     {
       fprintf (vect_dump, "transform reduction: created def-use cycle:");
-      print_generic_expr (vect_dump, reduction_phi, TDF_SLIM);
+      print_gimple_stmt (vect_dump, reduction_phi, 0, TDF_SLIM);
       fprintf (vect_dump, "\n");
-      print_generic_expr (vect_dump, SSA_NAME_DEF_STMT (vect_def), TDF_SLIM);
+      print_gimple_stmt (vect_dump, SSA_NAME_DEF_STMT (vect_def), 0, TDF_SLIM);
     }
 
 
@@ -2369,7 +2397,7 @@ vect_create_epilog_for_reduction (tree vect_def, tree stmt,
   exit_bb = single_exit (loop)->dest;
   new_phi = create_phi_node (SSA_NAME_VAR (vect_def), exit_bb);
   SET_PHI_ARG_DEF (new_phi, single_exit (loop)->dest_idx, vect_def);
-  exit_bsi = bsi_after_labels (exit_bb);
+  exit_gsi = gsi_after_labels (exit_bb);
 
   /* 2.2 Get the relevant tree-code to use in the epilog for schemes 2,3 
          (i.e. when reduc_code is not available) and in the final adjustment
@@ -2393,8 +2421,8 @@ vect_create_epilog_for_reduction (tree vect_def, tree stmt,
       gcc_assert (STMT_VINFO_IN_PATTERN_P (stmt_vinfo));
       gcc_assert (STMT_VINFO_RELATED_STMT (stmt_vinfo) == stmt);
     }
-  code = TREE_CODE (GIMPLE_STMT_OPERAND (orig_stmt, 1));
-  scalar_dest = GIMPLE_STMT_OPERAND (orig_stmt, 0);
+  code = gimple_assign_rhs_code (orig_stmt);
+  scalar_dest = gimple_assign_lhs (orig_stmt);
   scalar_type = TREE_TYPE (scalar_dest);
   new_scalar_dest = vect_create_destination_var (scalar_dest, NULL);
   bitsize = TYPE_SIZE (scalar_type);
@@ -2424,10 +2452,10 @@ vect_create_epilog_for_reduction (tree vect_def, tree stmt,
 
       vec_dest = vect_create_destination_var (scalar_dest, vectype);
       tmp = build1 (reduc_code, vectype,  PHI_RESULT (new_phi));
-      epilog_stmt = build_gimple_modify_stmt (vec_dest, tmp);
+      epilog_stmt = gimple_build_assign (vec_dest, tmp);
       new_temp = make_ssa_name (vec_dest, epilog_stmt);
-      GIMPLE_STMT_OPERAND (epilog_stmt, 0) = new_temp;
-      bsi_insert_before (&exit_bsi, epilog_stmt, BSI_SAME_STMT);
+      gimple_assign_set_lhs (epilog_stmt, new_temp);
+      gsi_insert_before (&exit_gsi, epilog_stmt, GSI_SAME_STMT);
 
       extract_scalar_result = true;
     }
@@ -2480,17 +2508,17 @@ vect_create_epilog_for_reduction (tree vect_def, tree stmt,
               bit_offset /= 2)
            {
              tree bitpos = size_int (bit_offset);
-             tree tmp = build2 (shift_code, vectype, new_temp, bitpos);
-             epilog_stmt = build_gimple_modify_stmt (vec_dest, tmp);
+             epilog_stmt = gimple_build_assign_with_ops (shift_code, vec_dest,
+                                                         new_temp, bitpos);
              new_name = make_ssa_name (vec_dest, epilog_stmt);
-             GIMPLE_STMT_OPERAND (epilog_stmt, 0) = new_name;
-             bsi_insert_before (&exit_bsi, epilog_stmt, BSI_SAME_STMT);
+             gimple_assign_set_lhs (epilog_stmt, new_name);
+             gsi_insert_before (&exit_gsi, epilog_stmt, GSI_SAME_STMT);
 
-             tmp = build2 (code, vectype, new_name, new_temp);
-             epilog_stmt = build_gimple_modify_stmt (vec_dest, tmp);
+             epilog_stmt = gimple_build_assign_with_ops (code, vec_dest,
+                                                         new_name, new_temp);
              new_temp = make_ssa_name (vec_dest, epilog_stmt);
-             GIMPLE_STMT_OPERAND (epilog_stmt, 0) = new_temp;
-             bsi_insert_before (&exit_bsi, epilog_stmt, BSI_SAME_STMT);
+             gimple_assign_set_lhs (epilog_stmt, new_temp);
+             gsi_insert_before (&exit_gsi, epilog_stmt, GSI_SAME_STMT);
            }
 
          extract_scalar_result = true;
@@ -2516,30 +2544,30 @@ vect_create_epilog_for_reduction (tree vect_def, tree stmt,
          vec_size_in_bits = tree_low_cst (TYPE_SIZE (vectype), 1);
          rhs = build3 (BIT_FIELD_REF, scalar_type, vec_temp, bitsize,
                         bitsize_zero_node);
-         epilog_stmt = build_gimple_modify_stmt (new_scalar_dest, rhs);
+         epilog_stmt = gimple_build_assign (new_scalar_dest, rhs);
          new_temp = make_ssa_name (new_scalar_dest, epilog_stmt);
-         GIMPLE_STMT_OPERAND (epilog_stmt, 0) = new_temp;
-         bsi_insert_before (&exit_bsi, epilog_stmt, BSI_SAME_STMT);
+         gimple_assign_set_lhs (epilog_stmt, new_temp);
+         gsi_insert_before (&exit_gsi, epilog_stmt, GSI_SAME_STMT);
              
          for (bit_offset = element_bitsize;
               bit_offset < vec_size_in_bits;
               bit_offset += element_bitsize)
            { 
-             tree tmp;
              tree bitpos = bitsize_int (bit_offset);
              tree rhs = build3 (BIT_FIELD_REF, scalar_type, vec_temp, bitsize,
                                 bitpos);
                
-             epilog_stmt = build_gimple_modify_stmt (new_scalar_dest, rhs);
+             epilog_stmt = gimple_build_assign (new_scalar_dest, rhs);
              new_name = make_ssa_name (new_scalar_dest, epilog_stmt);
-             GIMPLE_STMT_OPERAND (epilog_stmt, 0) = new_name;
-             bsi_insert_before (&exit_bsi, epilog_stmt, BSI_SAME_STMT);
+             gimple_assign_set_lhs (epilog_stmt, new_name);
+             gsi_insert_before (&exit_gsi, epilog_stmt, GSI_SAME_STMT);
 
-             tmp = build2 (code, scalar_type, new_name, new_temp);
-             epilog_stmt = build_gimple_modify_stmt (new_scalar_dest, tmp);
+             epilog_stmt = gimple_build_assign_with_ops (code,
+                                                         new_scalar_dest,
+                                                         new_name, new_temp);
              new_temp = make_ssa_name (new_scalar_dest, epilog_stmt);
-             GIMPLE_STMT_OPERAND (epilog_stmt, 0) = new_temp;
-             bsi_insert_before (&exit_bsi, epilog_stmt, BSI_SAME_STMT);
+             gimple_assign_set_lhs (epilog_stmt, new_temp);
+             gsi_insert_before (&exit_gsi, epilog_stmt, GSI_SAME_STMT);
            }
 
          extract_scalar_result = false;
@@ -2565,10 +2593,10 @@ vect_create_epilog_for_reduction (tree vect_def, tree stmt,
        bitpos = bitsize_zero_node;
 
       rhs = build3 (BIT_FIELD_REF, scalar_type, new_temp, bitsize, bitpos);
-      epilog_stmt = build_gimple_modify_stmt (new_scalar_dest, rhs);
+      epilog_stmt = gimple_build_assign (new_scalar_dest, rhs);
       new_temp = make_ssa_name (new_scalar_dest, epilog_stmt);
-      GIMPLE_STMT_OPERAND (epilog_stmt, 0) = new_temp; 
-      bsi_insert_before (&exit_bsi, epilog_stmt, BSI_SAME_STMT);
+      gimple_assign_set_lhs (epilog_stmt, new_temp);
+      gsi_insert_before (&exit_gsi, epilog_stmt, GSI_SAME_STMT);
     }
 
 vect_finalize_reduction:
@@ -2592,10 +2620,11 @@ vect_finalize_reduction:
          expr = build2 (code, scalar_type, new_temp, adjustment_def);
          new_dest = vect_create_destination_var (scalar_dest, scalar_type);
        }
-      epilog_stmt = build_gimple_modify_stmt (new_dest, expr);
+      epilog_stmt = gimple_build_assign (new_dest, expr);
       new_temp = make_ssa_name (new_dest, epilog_stmt);
-      GIMPLE_STMT_OPERAND (epilog_stmt, 0) = new_temp;
-      bsi_insert_before (&exit_bsi, epilog_stmt, BSI_SAME_STMT);
+      gimple_assign_set_lhs (epilog_stmt, new_temp);
+      SSA_NAME_DEF_STMT (new_temp) = epilog_stmt;
+      gsi_insert_before (&exit_gsi, epilog_stmt, GSI_SAME_STMT);
     }
 
 
@@ -2605,19 +2634,19 @@ vect_finalize_reduction:
      Find the loop-closed-use at the loop exit of the original scalar result.
      (The reduction result is expected to have two immediate uses - one at the 
      latch block, and one at the loop exit).  */
-  phis = VEC_alloc (tree, heap, 10);
+  phis = VEC_alloc (gimple, heap, 10);
   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, scalar_dest)
     {
-      if (!flow_bb_inside_loop_p (loop, bb_for_stmt (USE_STMT (use_p))))
+      if (!flow_bb_inside_loop_p (loop, gimple_bb (USE_STMT (use_p))))
        {
          exit_phi = USE_STMT (use_p);
-         VEC_quick_push (tree, phis, exit_phi);
+         VEC_quick_push (gimple, phis, exit_phi);
        }
     }
   /* We expect to have found an exit_phi because of loop-closed-ssa form.  */
-  gcc_assert (!VEC_empty (tree, phis));
+  gcc_assert (!VEC_empty (gimple, phis));
 
-  for (i = 0; VEC_iterate (tree, phis, i, exit_phi); i++)
+  for (i = 0; VEC_iterate (gimple, phis, i, exit_phi); i++)
     {
       if (nested_in_vect_loop)
        {
@@ -2630,8 +2659,8 @@ vect_finalize_reduction:
 
          epilog_stmt = adjustment_def ? epilog_stmt :  new_phi;
          STMT_VINFO_VEC_STMT (stmt_vinfo) = epilog_stmt;
-         set_stmt_info (get_stmt_ann (epilog_stmt),
-         new_stmt_vec_info (epilog_stmt, loop_vinfo));
+         set_vinfo_for_stmt (epilog_stmt,
+                             new_stmt_vec_info (epilog_stmt, loop_vinfo));
          continue;
        }
 
@@ -2641,7 +2670,7 @@ vect_finalize_reduction:
        FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
          SET_USE (use_p, new_temp);
     }
-  VEC_free (tree, heap, phis);
+  VEC_free (gimple, heap, phis);
 } 
 
 
@@ -2686,28 +2715,28 @@ vect_finalize_reduction:
    does *NOT* necessarily hold for reduction patterns.  */
 
 bool
-vectorizable_reduction (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
+vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
+                       gimple *vec_stmt)
 {
   tree vec_dest;
   tree scalar_dest;
-  tree op;
   tree loop_vec_def0 = NULL_TREE, loop_vec_def1 = NULL_TREE;
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
   tree vectype = STMT_VINFO_VECTYPE (stmt_info);
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
-  tree operation;
   enum tree_code code, orig_code, epilog_reduc_code = 0;
   enum machine_mode vec_mode;
   int op_type;
   optab optab, reduc_optab;
   tree new_temp = NULL_TREE;
-  tree def, def_stmt;
+  tree def;
+  gimple def_stmt;
   enum vect_def_type dt;
-  tree new_phi;
+  gimple new_phi;
   tree scalar_type;
   bool is_simple_use;
-  tree orig_stmt;
+  gimple orig_stmt;
   stmt_vec_info orig_stmt_info;
   tree expr = NULL_TREE;
   int i;
@@ -2715,8 +2744,9 @@ vectorizable_reduction (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
   int ncopies = LOOP_VINFO_VECT_FACTOR (loop_vinfo) / nunits;
   stmt_vec_info prev_stmt_info;
   tree reduc_def;
-  tree new_stmt = NULL_TREE;
+  gimple new_stmt = NULL;
   int j;
+  tree ops[3];
 
   if (nested_in_vect_loop_p (loop, stmt))
     {
@@ -2772,14 +2802,41 @@ vectorizable_reduction (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
         inside the loop body. The last operand is the reduction variable,
         which is defined by the loop-header-phi.  */
 
-  gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
+  gcc_assert (is_gimple_assign (stmt));
 
-  operation = GIMPLE_STMT_OPERAND (stmt, 1);
-  code = TREE_CODE (operation);
-  op_type = TREE_OPERAND_LENGTH (operation);
-  if (op_type != binary_op && op_type != ternary_op)
-    return false;
-  scalar_dest = GIMPLE_STMT_OPERAND (stmt, 0);
+  /* Flatten RHS */
+  switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
+    {
+    case GIMPLE_SINGLE_RHS:
+      op_type = TREE_OPERAND_LENGTH (gimple_assign_rhs1 (stmt));
+      if (op_type == ternary_op)
+       {
+         tree rhs = gimple_assign_rhs1 (stmt);
+         ops[0] = TREE_OPERAND (rhs, 0);
+         ops[1] = TREE_OPERAND (rhs, 1);
+         ops[2] = TREE_OPERAND (rhs, 2);
+         code = TREE_CODE (rhs);
+       }
+      else
+       return false;
+      break;
+
+    case GIMPLE_BINARY_RHS:
+      code = gimple_assign_rhs_code (stmt);
+      op_type = TREE_CODE_LENGTH (code);
+      gcc_assert (op_type == binary_op);
+      ops[0] = gimple_assign_rhs1 (stmt);
+      ops[1] = gimple_assign_rhs2 (stmt);
+      break;
+
+    case GIMPLE_UNARY_RHS:
+      return false;
+
+    default:
+      gcc_unreachable ();
+    }
+
+  scalar_dest = gimple_assign_lhs (stmt);
   scalar_type = TREE_TYPE (scalar_dest);
   if (!POINTER_TYPE_P (scalar_type) && !INTEGRAL_TYPE_P (scalar_type) 
       && !SCALAR_FLOAT_TYPE_P (scalar_type))
@@ -2789,8 +2846,8 @@ vectorizable_reduction (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
      The last use is the reduction variable.  */
   for (i = 0; i < op_type-1; i++)
     {
-      op = TREE_OPERAND (operation, i);
-      is_simple_use = vect_is_simple_use (op, loop_vinfo, &def_stmt, &def, &dt);
+      is_simple_use = vect_is_simple_use (ops[i], loop_vinfo, &def_stmt,
+                                         &def, &dt);
       gcc_assert (is_simple_use);
       if (dt != vect_loop_def
          && dt != vect_invariant_def
@@ -2799,11 +2856,10 @@ vectorizable_reduction (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
        return false;
     }
 
-  op = TREE_OPERAND (operation, i);
-  is_simple_use = vect_is_simple_use (op, loop_vinfo, &def_stmt, &def, &dt);
+  is_simple_use = vect_is_simple_use (ops[i], loop_vinfo, &def_stmt, &def, &dt);
   gcc_assert (is_simple_use);
   gcc_assert (dt == vect_reduction_def);
-  gcc_assert (TREE_CODE (def_stmt) == PHI_NODE);
+  gcc_assert (gimple_code (def_stmt) == GIMPLE_PHI);
   if (orig_stmt) 
     gcc_assert (orig_stmt == vect_is_simple_reduction (loop_vinfo, def_stmt));
   else
@@ -2883,7 +2939,7 @@ vectorizable_reduction (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
     {
       /* This is a reduction pattern: get the vectype from the type of the
          reduction variable, and get the tree-code from orig_stmt.  */
-      orig_code = TREE_CODE (GIMPLE_STMT_OPERAND (orig_stmt, 1));
+      orig_code = gimple_assign_rhs_code (orig_stmt);
       vectype = get_vectype_for_scalar_type (TREE_TYPE (def));
       if (!vectype)
        {
@@ -2951,12 +3007,10 @@ vectorizable_reduction (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
       /* Handle uses.  */
       if (j == 0)
         {
-          op = TREE_OPERAND (operation, 0);
-          loop_vec_def0 = vect_get_vec_def_for_operand (op, stmt, NULL);
+         loop_vec_def0 = vect_get_vec_def_for_operand (ops[0], stmt, NULL);
           if (op_type == ternary_op)
             {
-              op = TREE_OPERAND (operation, 1);
-              loop_vec_def1 = vect_get_vec_def_for_operand (op, stmt, NULL);
+             loop_vec_def1 = vect_get_vec_def_for_operand (ops[1], stmt, NULL);
             }
 
           /* Get the vector def for the reduction variable from the phi node */
@@ -2971,7 +3025,7 @@ vectorizable_reduction (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
 
           /* Get the vector def for the reduction variable from the vectorized
              reduction operation generated in the previous iteration (j-1)  */
-          reduc_def = GIMPLE_STMT_OPERAND (new_stmt ,0);
+         reduc_def = gimple_assign_lhs (new_stmt);
         }
 
       /* Arguments are ready. create the new vector stmt.  */
@@ -2980,10 +3034,10 @@ vectorizable_reduction (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
       else
         expr = build3 (code, vectype, loop_vec_def0, loop_vec_def1, 
                       reduc_def);
-      new_stmt = build_gimple_modify_stmt (vec_dest, expr);
+      new_stmt = gimple_build_assign (vec_dest, expr);
       new_temp = make_ssa_name (vec_dest, new_stmt);
-      GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp;
-      vect_finish_stmt_generation (stmt, new_stmt, bsi);
+      gimple_assign_set_lhs (new_stmt, new_temp);
+      vect_finish_stmt_generation (stmt, new_stmt, gsi);
 
       if (j == 0)
        STMT_VINFO_VEC_STMT (stmt_info) = *vec_stmt = new_stmt;
@@ -3003,14 +3057,14 @@ vectorizable_reduction (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
    of the function, or NULL_TREE if the function cannot be vectorized.  */
 
 tree
-vectorizable_function (tree call, tree vectype_out, tree vectype_in)
+vectorizable_function (gimple call, tree vectype_out, tree vectype_in)
 {
-  tree fndecl = get_callee_fndecl (call);
+  tree fndecl = gimple_call_fndecl (call);
   enum built_in_function code;
 
   /* We only handle functions that do not read or clobber memory -- i.e.
      const or novops ones.  */
-  if (!(call_expr_flags (call) & (ECF_CONST | ECF_NOVOPS)))
+  if (!(gimple_call_flags (call) & (ECF_CONST | ECF_NOVOPS)))
     return NULL_TREE;
 
   if (!fndecl
@@ -3031,11 +3085,10 @@ vectorizable_function (tree call, tree vectype_out, tree vectype_in)
    Return FALSE if not a vectorizable STMT, TRUE otherwise.  */
 
 bool
-vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
+vectorizable_call (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt)
 {
   tree vec_dest;
   tree scalar_dest;
-  tree operation;
   tree op, type;
   tree vec_oprnd0 = NULL_TREE, vec_oprnd1 = NULL_TREE;
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt), prev_stmt_info;
@@ -3044,13 +3097,14 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
   int nunits_out;
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
-  tree fndecl, rhs, new_temp, def, def_stmt, rhs_type, lhs_type;
+  tree fndecl, new_temp, def, rhs_type, lhs_type;
+  gimple def_stmt;
   enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
-  tree new_stmt;
-  int ncopies, j, nargs;
-  call_expr_arg_iterator iter;
-  tree vargs;
+  gimple new_stmt;
+  int ncopies, j;
+  VEC(tree, heap) *vargs = NULL;
   enum { NARROW, NONE, WIDEN } modifier;
+  size_t i, nargs;
 
   if (!STMT_VINFO_RELEVANT_P (stmt_info))
     return false;
@@ -3063,25 +3117,24 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
     return false;
 
   /* Is STMT a vectorizable call?   */
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_call (stmt))
     return false;
 
-  if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
-    return false;
-
-  operation = GIMPLE_STMT_OPERAND (stmt, 1);
-  if (TREE_CODE (operation) != CALL_EXPR)
+  if (TREE_CODE (gimple_call_lhs (stmt)) != SSA_NAME)
     return false;
 
   /* Process function arguments.  */
   rhs_type = NULL_TREE;
-  nargs = 0;
-  FOR_EACH_CALL_EXPR_ARG (op, iter, operation)
+  nargs = gimple_call_num_args (stmt);
+
+  for (i = 0; i < nargs; i++)
     {
+      op = gimple_call_arg (stmt, i);
+
       /* Bail out if the function has more than two arguments, we
         do not have interesting builtin functions to vectorize with
         more than two arguments.  */
-      if (nargs >= 2)
+      if (i >= 2)
        return false;
 
       /* We can only handle calls with arguments of the same type.  */
@@ -3100,8 +3153,6 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
            fprintf (vect_dump, "use not simple.");
          return false;
        }
-
-      ++nargs;
     }
 
   /* No arguments is also not good.  */
@@ -3113,7 +3164,7 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
     return false;
   nunits_in = TYPE_VECTOR_SUBPARTS (vectype_in);
 
-  lhs_type = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 0));
+  lhs_type = TREE_TYPE (gimple_call_lhs (stmt));
   vectype_out = get_vectype_for_scalar_type (lhs_type);
   if (!vectype_out)
     return false;
@@ -3133,7 +3184,7 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
      is available.  TODO -- in some cases, it might be profitable to
      insert the calls for pieces of the vector, in order to be able
      to vectorize other operations in the loop.  */
-  fndecl = vectorizable_function (operation, vectype_out, vectype_in);
+  fndecl = vectorizable_function (stmt, vectype_out, vectype_in);
   if (fndecl == NULL_TREE)
     {
       if (vect_print_dump_info (REPORT_DETAILS))
@@ -3184,7 +3235,7 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
     }
 
   /* Handle def.  */
-  scalar_dest = GIMPLE_STMT_OPERAND (stmt, 0);
+  scalar_dest = gimple_call_lhs (stmt);
   vec_dest = vect_create_destination_var (scalar_dest, vectype_out);
 
   prev_stmt_info = NULL;
@@ -3194,12 +3245,14 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
       for (j = 0; j < ncopies; ++j)
        {
          /* Build argument list for the vectorized call.  */
-         /* FIXME: Rewrite this so that it doesn't
-            construct a temporary list.  */
-         vargs = NULL_TREE;
-         nargs = 0;
-         FOR_EACH_CALL_EXPR_ARG (op, iter, operation)
+         if (j == 0)
+           vargs = VEC_alloc (tree, heap, nargs);
+         else
+           VEC_truncate (tree, vargs, 0);
+
+         for (i = 0; i < nargs; i++)
            {
+             op = gimple_call_arg (stmt, i);
              if (j == 0)
                vec_oprnd0
                  = vect_get_vec_def_for_operand (op, stmt, NULL);
@@ -3207,18 +3260,14 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
                vec_oprnd0
                  = vect_get_vec_def_for_stmt_copy (dt[nargs], vec_oprnd0);
 
-             vargs = tree_cons (NULL_TREE, vec_oprnd0, vargs);
-
-             ++nargs;
+             VEC_quick_push (tree, vargs, vec_oprnd0);
            }
-         vargs = nreverse (vargs);
 
-         rhs = build_function_call_expr (fndecl, vargs);
-         new_stmt = build_gimple_modify_stmt (vec_dest, rhs);
+         new_stmt = gimple_build_call_vec (fndecl, vargs);
          new_temp = make_ssa_name (vec_dest, new_stmt);
-         GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp;
+         gimple_call_set_lhs (new_stmt, new_temp);
 
-         vect_finish_stmt_generation (stmt, new_stmt, bsi);
+         vect_finish_stmt_generation (stmt, new_stmt, gsi);
 
          if (j == 0)
            STMT_VINFO_VEC_STMT (stmt_info) = *vec_stmt = new_stmt;
@@ -3234,12 +3283,14 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
       for (j = 0; j < ncopies; ++j)
        {
          /* Build argument list for the vectorized call.  */
-         /* FIXME: Rewrite this so that it doesn't
-            construct a temporary list.  */
-         vargs = NULL_TREE;
-         nargs = 0;
-         FOR_EACH_CALL_EXPR_ARG (op, iter, operation)
+         if (j == 0)
+           vargs = VEC_alloc (tree, heap, nargs * 2);
+         else
+           VEC_truncate (tree, vargs, 0);
+
+         for (i = 0; i < nargs; i++)
            {
+             op = gimple_call_arg (stmt, i);
              if (j == 0)
                {
                  vec_oprnd0
@@ -3255,19 +3306,15 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
                    = vect_get_vec_def_for_stmt_copy (dt[nargs], vec_oprnd0);
                }
 
-             vargs = tree_cons (NULL_TREE, vec_oprnd0, vargs);
-             vargs = tree_cons (NULL_TREE, vec_oprnd1, vargs);
-
-             ++nargs;
+             VEC_quick_push (tree, vargs, vec_oprnd0);
+             VEC_quick_push (tree, vargs, vec_oprnd1);
            }
-         vargs = nreverse (vargs);
 
-         rhs = build_function_call_expr (fndecl, vargs);
-         new_stmt = build_gimple_modify_stmt (vec_dest, rhs);
+         new_stmt = gimple_build_call_vec (fndecl, vargs);
          new_temp = make_ssa_name (vec_dest, new_stmt);
-         GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp;
+         gimple_call_set_lhs (new_stmt, new_temp);
 
-         vect_finish_stmt_generation (stmt, new_stmt, bsi);
+         vect_finish_stmt_generation (stmt, new_stmt, gsi);
 
          if (j == 0)
            STMT_VINFO_VEC_STMT (stmt_info) = new_stmt;
@@ -3286,13 +3333,21 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
       return false;
     }
 
+  VEC_free (tree, heap, vargs);
+
   /* The call in STMT might prevent it from being removed in dce.
      We however cannot remove it here, due to the way the ssa name
      it defines is mapped to the new definition.  So just replace
      rhs of the statement with something harmless.  */
+
   type = TREE_TYPE (scalar_dest);
-  GIMPLE_STMT_OPERAND (stmt, 1) = fold_convert (type, integer_zero_node);
-  update_stmt (stmt);
+  new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
+                                 fold_convert (type, integer_zero_node));
+  set_vinfo_for_stmt (new_stmt, stmt_info);
+  set_vinfo_for_stmt (stmt, NULL);
+  STMT_VINFO_STMT (stmt_info) = new_stmt;
+  gsi_replace (gsi, new_stmt, false);
+  SSA_NAME_DEF_STMT (gimple_assign_lhs (new_stmt)) = new_stmt;
 
   return true;
 }
@@ -3307,14 +3362,15 @@ vectorizable_call (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
    needs to be created (DECL is a function-decl of a target-builtin).
    STMT is the original scalar stmt that we are vectorizing.  */
 
-static tree
-vect_gen_widened_results_half (enum tree_code code, tree vectype, tree decl,
+static gimple
+vect_gen_widened_results_half (enum tree_code code,
+                              tree vectype ATTRIBUTE_UNUSED,
+                              tree decl,
                                tree vec_oprnd0, tree vec_oprnd1, int op_type,
-                               tree vec_dest, block_stmt_iterator *bsi,
-                              tree stmt)
+                              tree vec_dest, gimple_stmt_iterator *gsi,
+                              gimple stmt)
 { 
-  tree expr; 
-  tree new_stmt; 
+  gimple new_stmt;
   tree new_temp; 
   tree sym; 
   ssa_op_iter iter;
@@ -3324,23 +3380,24 @@ vect_gen_widened_results_half (enum tree_code code, tree vectype, tree decl,
     {  
       /* Target specific support  */ 
       if (op_type == binary_op)
-       expr = build_call_expr (decl, 2, vec_oprnd0, vec_oprnd1);
+       new_stmt = gimple_build_call (decl, 2, vec_oprnd0, vec_oprnd1);
       else
-       expr = build_call_expr (decl, 1, vec_oprnd0);
+       new_stmt = gimple_build_call (decl, 1, vec_oprnd0);
+      new_temp = make_ssa_name (vec_dest, new_stmt);
+      gimple_call_set_lhs (new_stmt, new_temp);
     } 
   else 
-    { 
+    {
       /* Generic support */ 
       gcc_assert (op_type == TREE_CODE_LENGTH (code)); 
-      if (op_type == binary_op) 
-        expr = build2 (code, vectype, vec_oprnd0, vec_oprnd1); 
-      else  
-        expr = build1 (code, vectype, vec_oprnd0); 
+      if (op_type != binary_op)
+       vec_oprnd1 = NULL;
+      new_stmt = gimple_build_assign_with_ops (code, vec_dest, vec_oprnd0,
+                                              vec_oprnd1);
+      new_temp = make_ssa_name (vec_dest, new_stmt);
+      gimple_assign_set_lhs (new_stmt, new_temp);
     } 
-  new_stmt = build_gimple_modify_stmt (vec_dest, expr);
-  new_temp = make_ssa_name (vec_dest, new_stmt); 
-  GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp; 
-  vect_finish_stmt_generation (stmt, new_stmt, bsi); 
+  vect_finish_stmt_generation (stmt, new_stmt, gsi);
 
   if (code == CALL_EXPR)
     {
@@ -3362,12 +3419,11 @@ vect_gen_widened_results_half (enum tree_code code, tree vectype, tree decl,
    Return FALSE if not a vectorizable STMT, TRUE otherwise.  */
 
 bool
-vectorizable_conversion (tree stmt, block_stmt_iterator *bsi,
-                        tree *vec_stmt, slp_tree slp_node)
+vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
+                        gimple *vec_stmt, slp_tree slp_node)
 {
   tree vec_dest;
   tree scalar_dest;
-  tree operation;
   tree op0;
   tree vec_oprnd0 = NULL_TREE, vec_oprnd1 = NULL_TREE;
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
@@ -3376,9 +3432,10 @@ vectorizable_conversion (tree stmt, block_stmt_iterator *bsi,
   enum tree_code code, code1 = ERROR_MARK, code2 = ERROR_MARK;
   tree decl1 = NULL_TREE, decl2 = NULL_TREE;
   tree new_temp;
-  tree def, def_stmt;
+  tree def;
+  gimple def_stmt;
   enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
-  tree new_stmt = NULL_TREE;
+  gimple new_stmt = NULL;
   stmt_vec_info prev_stmt_info;
   int nunits_in;
   int nunits_out;
@@ -3400,26 +3457,25 @@ vectorizable_conversion (tree stmt, block_stmt_iterator *bsi,
   if (STMT_VINFO_DEF_TYPE (stmt_info) != vect_loop_def)
     return false;
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt))
     return false;
 
-  if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
+  if (TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
     return false;
 
-  operation = GIMPLE_STMT_OPERAND (stmt, 1);
-  code = TREE_CODE (operation);
+  code = gimple_assign_rhs_code (stmt);
   if (code != FIX_TRUNC_EXPR && code != FLOAT_EXPR)
     return false;
 
   /* Check types of lhs and rhs.  */
-  op0 = TREE_OPERAND (operation, 0);
+  op0 = gimple_assign_rhs1 (stmt);
   rhs_type = TREE_TYPE (op0);
   vectype_in = get_vectype_for_scalar_type (rhs_type);
   if (!vectype_in)
     return false;
   nunits_in = TYPE_VECTOR_SUBPARTS (vectype_in);
 
-  scalar_dest = GIMPLE_STMT_OPERAND (stmt, 0);
+  scalar_dest = gimple_assign_lhs (stmt);
   lhs_type = TREE_TYPE (scalar_dest);
   vectype_out = get_vectype_for_scalar_type (lhs_type);
   if (!vectype_out)
@@ -3532,13 +3588,11 @@ vectorizable_conversion (tree stmt, block_stmt_iterator *bsi,
            targetm.vectorize.builtin_conversion (code, vectype_in);
          for (i = 0; VEC_iterate (tree, vec_oprnds0, i, vop0); i++)
            { 
-             new_stmt = build_call_expr (builtin_decl, 1, vop0);
-
              /* Arguments are ready. create the new vector stmt.  */
-             new_stmt = build_gimple_modify_stmt (vec_dest, new_stmt);
+             new_stmt = gimple_build_call (builtin_decl, 1, vop0);
              new_temp = make_ssa_name (vec_dest, new_stmt);
-             GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp;
-             vect_finish_stmt_generation (stmt, new_stmt, bsi);
+             gimple_call_set_lhs (new_stmt, new_temp);
+             vect_finish_stmt_generation (stmt, new_stmt, gsi);
              FOR_EACH_SSA_TREE_OPERAND (sym, new_stmt, iter, 
                                         SSA_OP_ALL_VIRTUALS)
                {
@@ -3547,7 +3601,7 @@ vectorizable_conversion (tree stmt, block_stmt_iterator *bsi,
                  mark_sym_for_renaming (sym);
                }
              if (slp_node)
-               VEC_quick_push (tree, SLP_TREE_VEC_STMTS (slp_node), new_stmt);
+               VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node), new_stmt);
            }
 
          if (j == 0)
@@ -3576,7 +3630,7 @@ vectorizable_conversion (tree stmt, block_stmt_iterator *bsi,
          new_stmt
            = vect_gen_widened_results_half (code1, vectype_out, decl1, 
                                             vec_oprnd0, vec_oprnd1,
-                                            unary_op, vec_dest, bsi, stmt);
+                                            unary_op, vec_dest, gsi, stmt);
          if (j == 0)
            STMT_VINFO_VEC_STMT (stmt_info) = new_stmt;
          else
@@ -3587,7 +3641,7 @@ vectorizable_conversion (tree stmt, block_stmt_iterator *bsi,
          new_stmt
            = vect_gen_widened_results_half (code2, vectype_out, decl2,
                                             vec_oprnd0, vec_oprnd1,
-                                            unary_op, vec_dest, bsi, stmt);
+                                            unary_op, vec_dest, gsi, stmt);
          STMT_VINFO_RELATED_STMT (prev_stmt_info) = new_stmt;
          prev_stmt_info = vinfo_for_stmt (new_stmt);
        }
@@ -3614,10 +3668,11 @@ vectorizable_conversion (tree stmt, block_stmt_iterator *bsi,
 
          /* Arguments are ready. Create the new vector stmt.  */
          expr = build2 (code1, vectype_out, vec_oprnd0, vec_oprnd1);
-         new_stmt = build_gimple_modify_stmt (vec_dest, expr);
+         new_stmt = gimple_build_assign_with_ops (code1, vec_dest, vec_oprnd0,
+                                                  vec_oprnd1);
          new_temp = make_ssa_name (vec_dest, new_stmt);
-         GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp;
-         vect_finish_stmt_generation (stmt, new_stmt, bsi);
+         gimple_assign_set_lhs (new_stmt, new_temp);
+         vect_finish_stmt_generation (stmt, new_stmt, gsi);
 
          if (j == 0)
            STMT_VINFO_VEC_STMT (stmt_info) = new_stmt;
@@ -3645,8 +3700,8 @@ vectorizable_conversion (tree stmt, block_stmt_iterator *bsi,
    Return FALSE if not a vectorizable STMT, TRUE otherwise.  */
 
 bool
-vectorizable_assignment (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt, 
-                        slp_tree slp_node)
+vectorizable_assignment (gimple stmt, gimple_stmt_iterator *gsi,
+                        gimple *vec_stmt, slp_tree slp_node)
 {
   tree vec_dest;
   tree scalar_dest;
@@ -3655,7 +3710,8 @@ vectorizable_assignment (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
   tree vectype = STMT_VINFO_VECTYPE (stmt_info);
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
   tree new_temp;
-  tree def, def_stmt;
+  tree def;
+  gimple def_stmt;
   enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
   int nunits = TYPE_VECTOR_SUBPARTS (vectype);
   int ncopies = LOOP_VINFO_VECT_FACTOR (loop_vinfo) / nunits;
@@ -3679,14 +3735,19 @@ vectorizable_assignment (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
     return false;
 
   /* Is vectorizable assignment?  */
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt))
     return false;
 
-  scalar_dest = GIMPLE_STMT_OPERAND (stmt, 0);
+  scalar_dest = gimple_assign_lhs (stmt);
   if (TREE_CODE (scalar_dest) != SSA_NAME)
     return false;
 
-  op = GIMPLE_STMT_OPERAND (stmt, 1);
+  if (gimple_assign_single_p (stmt)
+      || gimple_assign_rhs_code (stmt) == PAREN_EXPR)
+    op = gimple_assign_rhs1 (stmt);
+  else
+    return false;
+
   if (!vect_is_simple_use (op, loop_vinfo, &def_stmt, &def, &dt[0]))
     {
       if (vect_print_dump_info (REPORT_DETAILS))
@@ -3716,14 +3777,14 @@ vectorizable_assignment (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
   /* Arguments are ready. create the new vector stmt.  */
   for (i = 0; VEC_iterate (tree, vec_oprnds, i, vop); i++)
     {
-      *vec_stmt = build_gimple_modify_stmt (vec_dest, vop);
+      *vec_stmt = gimple_build_assign (vec_dest, vop);
       new_temp = make_ssa_name (vec_dest, *vec_stmt);
-      GIMPLE_STMT_OPERAND (*vec_stmt, 0) = new_temp;
-      vect_finish_stmt_generation (stmt, *vec_stmt, bsi);
+      gimple_assign_set_lhs (*vec_stmt, new_temp);
+      vect_finish_stmt_generation (stmt, *vec_stmt, gsi);
       STMT_VINFO_VEC_STMT (stmt_info) = *vec_stmt;
 
       if (slp_node)
-       VEC_quick_push (tree, SLP_TREE_VEC_STMTS (slp_node), *vec_stmt);
+       VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node), *vec_stmt);
    }
   
   VEC_free (tree, heap, vec_oprnds);       
@@ -3766,8 +3827,8 @@ vect_min_worthwhile_factor (enum tree_code code)
    Return FALSE if not a vectorizable STMT, TRUE otherwise.  */
 
 bool
-vectorizable_induction (tree phi, block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
-                        tree *vec_stmt)
+vectorizable_induction (gimple phi, gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
+                       gimple *vec_stmt)
 {
   stmt_vec_info stmt_info = vinfo_for_stmt (phi);
   tree vectype = STMT_VINFO_VECTYPE (stmt_info);
@@ -3787,7 +3848,7 @@ vectorizable_induction (tree phi, block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
 
   gcc_assert (STMT_VINFO_DEF_TYPE (stmt_info) == vect_induction_def);
 
-  if (TREE_CODE (phi) != PHI_NODE)
+  if (gimple_code (phi) != GIMPLE_PHI)
     return false;
 
   if (!vec_stmt) /* transformation not required.  */
@@ -3818,12 +3879,11 @@ vectorizable_induction (tree phi, block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
    Return FALSE if not a vectorizable STMT, TRUE otherwise.  */
 
 bool
-vectorizable_operation (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt, 
-                       slp_tree slp_node)
+vectorizable_operation (gimple stmt, gimple_stmt_iterator *gsi,
+                       gimple *vec_stmt, slp_tree slp_node)
 {
   tree vec_dest;
   tree scalar_dest;
-  tree operation;
   tree op0, op1 = NULL;
   tree vec_oprnd1 = NULL_TREE;
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
@@ -3837,9 +3897,10 @@ vectorizable_operation (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
   optab optab;
   int icode;
   enum machine_mode optab_op2_mode;
-  tree def, def_stmt;
+  tree def;
+  gimple def_stmt;
   enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
-  tree new_stmt = NULL_TREE;
+  gimple new_stmt = NULL;
   stmt_vec_info prev_stmt_info;
   int nunits_in = TYPE_VECTOR_SUBPARTS (vectype);
   int nunits_out;
@@ -3872,13 +3933,13 @@ vectorizable_operation (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
     return false;
 
   /* Is STMT a vectorizable binary/unary operation?   */
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt))
     return false;
 
-  if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
+  if (TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
     return false;
 
-  scalar_dest = GIMPLE_STMT_OPERAND (stmt, 0);
+  scalar_dest = gimple_assign_lhs (stmt);
   vectype_out = get_vectype_for_scalar_type (TREE_TYPE (scalar_dest));
   if (!vectype_out)
     return false;
@@ -3886,8 +3947,7 @@ vectorizable_operation (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
   if (nunits_out != nunits_in)
     return false;
 
-  operation = GIMPLE_STMT_OPERAND (stmt, 1);
-  code = TREE_CODE (operation);
+  code = gimple_assign_rhs_code (stmt);
 
   /* For pointer addition, we should use the normal plus for
      the vector addition.  */
@@ -3895,7 +3955,7 @@ vectorizable_operation (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
     code = PLUS_EXPR;
 
   /* Support only unary or binary operations.  */
-  op_type = TREE_OPERAND_LENGTH (operation);
+  op_type = TREE_CODE_LENGTH (code);
   if (op_type != unary_op && op_type != binary_op)
     {
       if (vect_print_dump_info (REPORT_DETAILS))
@@ -3903,7 +3963,7 @@ vectorizable_operation (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
       return false;
     }
 
-  op0 = TREE_OPERAND (operation, 0);
+  op0 = gimple_assign_rhs1 (stmt);
   if (!vect_is_simple_use (op0, loop_vinfo, &def_stmt, &def, &dt[0]))
     {
       if (vect_print_dump_info (REPORT_DETAILS))
@@ -3913,7 +3973,7 @@ vectorizable_operation (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
 
   if (op_type == binary_op)
     {
-      op1 = TREE_OPERAND (operation, 1);
+      op1 = gimple_assign_rhs2 (stmt);
       if (!vect_is_simple_use (op1, loop_vinfo, &def_stmt, &def, &dt[1]))
        {
          if (vect_print_dump_info (REPORT_DETAILS))
@@ -4140,21 +4200,14 @@ vectorizable_operation (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
       /* Arguments are ready. Create the new vector stmt.  */
       for (i = 0; VEC_iterate (tree, vec_oprnds0, i, vop0); i++)
         {
-          if (op_type == binary_op)
-            {
-              vop1 = VEC_index (tree, vec_oprnds1, i);
-              new_stmt = build_gimple_modify_stmt (vec_dest,
-                                        build2 (code, vectype, vop0, vop1));
-            }
-         else
-           new_stmt = build_gimple_modify_stmt (vec_dest,
-                                   build1 (code, vectype, vop0));
-
+         vop1 = ((op_type == binary_op)
+                 ? VEC_index (tree, vec_oprnds1, i) : NULL);
+         new_stmt = gimple_build_assign_with_ops (code, vec_dest, vop0, vop1);
          new_temp = make_ssa_name (vec_dest, new_stmt);
-         GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp;
-         vect_finish_stmt_generation (stmt, new_stmt, bsi);
+         gimple_assign_set_lhs (new_stmt, new_temp);
+         vect_finish_stmt_generation (stmt, new_stmt, gsi);
           if (slp_node)
-            VEC_quick_push (tree, SLP_TREE_VEC_STMTS (slp_node), new_stmt);
+           VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node), new_stmt);
         }
 
       if (j == 0)
@@ -4181,12 +4234,11 @@ vectorizable_operation (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
    Return FALSE if not a vectorizable STMT, TRUE otherwise.  */
 
 bool
-vectorizable_type_demotion (tree stmt, block_stmt_iterator *bsi,
-                           tree *vec_stmt)
+vectorizable_type_demotion (gimple stmt, gimple_stmt_iterator *gsi,
+                           gimple *vec_stmt)
 {
   tree vec_dest;
   tree scalar_dest;
-  tree operation;
   tree op0;
   tree vec_oprnd0=NULL, vec_oprnd1=NULL;
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
@@ -4194,16 +4246,16 @@ vectorizable_type_demotion (tree stmt, block_stmt_iterator *bsi,
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   enum tree_code code, code1 = ERROR_MARK;
   tree new_temp;
-  tree def, def_stmt;
+  tree def;
+  gimple def_stmt;
   enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
-  tree new_stmt;
+  gimple new_stmt;
   stmt_vec_info prev_stmt_info;
   int nunits_in;
   int nunits_out;
   tree vectype_out;
   int ncopies;
   int j;
-  tree expr;
   tree vectype_in;
 
   if (!STMT_VINFO_RELEVANT_P (stmt_info))
@@ -4213,24 +4265,23 @@ vectorizable_type_demotion (tree stmt, block_stmt_iterator *bsi,
     return false;
 
   /* Is STMT a vectorizable type-demotion operation?  */
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt))
     return false;
 
-  if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
+  if (TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
     return false;
 
-  operation = GIMPLE_STMT_OPERAND (stmt, 1);
-  code = TREE_CODE (operation);
+  code = gimple_assign_rhs_code (stmt);
   if (code != NOP_EXPR && code != CONVERT_EXPR)
     return false;
 
-  op0 = TREE_OPERAND (operation, 0);
+  op0 = gimple_assign_rhs1 (stmt);
   vectype_in = get_vectype_for_scalar_type (TREE_TYPE (op0));
   if (!vectype_in)
     return false;
   nunits_in = TYPE_VECTOR_SUBPARTS (vectype_in);
 
-  scalar_dest = GIMPLE_STMT_OPERAND (stmt, 0);
+  scalar_dest = gimple_assign_lhs (stmt);
   vectype_out = get_vectype_for_scalar_type (TREE_TYPE (scalar_dest));
   if (!vectype_out)
     return false;
@@ -4306,11 +4357,11 @@ vectorizable_type_demotion (tree stmt, block_stmt_iterator *bsi,
        }
 
       /* Arguments are ready. Create the new vector stmt.  */
-      expr = build2 (code1, vectype_out, vec_oprnd0, vec_oprnd1);
-      new_stmt = build_gimple_modify_stmt (vec_dest, expr);
+      new_stmt = gimple_build_assign_with_ops (code1, vec_dest, vec_oprnd0,
+                                              vec_oprnd1);
       new_temp = make_ssa_name (vec_dest, new_stmt);
-      GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp;
-      vect_finish_stmt_generation (stmt, new_stmt, bsi);
+      gimple_assign_set_lhs (new_stmt, new_temp);
+      vect_finish_stmt_generation (stmt, new_stmt, gsi);
 
       if (j == 0)
        STMT_VINFO_VEC_STMT (stmt_info) = new_stmt;
@@ -4334,12 +4385,11 @@ vectorizable_type_demotion (tree stmt, block_stmt_iterator *bsi,
    Return FALSE if not a vectorizable STMT, TRUE otherwise.  */
 
 bool
-vectorizable_type_promotion (tree stmt, block_stmt_iterator *bsi,
-                             tree *vec_stmt)
+vectorizable_type_promotion (gimple stmt, gimple_stmt_iterator *gsi,
+                            gimple *vec_stmt)
 {
   tree vec_dest;
   tree scalar_dest;
-  tree operation;
   tree op0, op1 = NULL;
   tree vec_oprnd0=NULL, vec_oprnd1=NULL;
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
@@ -4348,9 +4398,10 @@ vectorizable_type_promotion (tree stmt, block_stmt_iterator *bsi,
   enum tree_code code, code1 = ERROR_MARK, code2 = ERROR_MARK;
   tree decl1 = NULL_TREE, decl2 = NULL_TREE;
   int op_type; 
-  tree def, def_stmt;
+  tree def;
+  gimple def_stmt;
   enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
-  tree new_stmt;
+  gimple new_stmt;
   stmt_vec_info prev_stmt_info;
   int nunits_in;
   int nunits_out;
@@ -4366,25 +4417,24 @@ vectorizable_type_promotion (tree stmt, block_stmt_iterator *bsi,
     return false;
 
   /* Is STMT a vectorizable type-promotion operation?  */
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt))
     return false;
 
-  if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
+  if (TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
     return false;
 
-  operation = GIMPLE_STMT_OPERAND (stmt, 1);
-  code = TREE_CODE (operation);
+  code = gimple_assign_rhs_code (stmt);
   if (code != NOP_EXPR && code != CONVERT_EXPR
       && code != WIDEN_MULT_EXPR)
     return false;
 
-  op0 = TREE_OPERAND (operation, 0);
+  op0 = gimple_assign_rhs1 (stmt);
   vectype_in = get_vectype_for_scalar_type (TREE_TYPE (op0));
   if (!vectype_in)
     return false;
   nunits_in = TYPE_VECTOR_SUBPARTS (vectype_in);
 
-  scalar_dest = GIMPLE_STMT_OPERAND (stmt, 0);
+  scalar_dest = gimple_assign_lhs (stmt);
   vectype_out = get_vectype_for_scalar_type (TREE_TYPE (scalar_dest));
   if (!vectype_out)
     return false;
@@ -4420,7 +4470,7 @@ vectorizable_type_promotion (tree stmt, block_stmt_iterator *bsi,
   op_type = TREE_CODE_LENGTH (code);
   if (op_type == binary_op)
     {
-      op1 = TREE_OPERAND (operation, 1);
+      op1 = gimple_assign_rhs2 (stmt);
       if (!vect_is_simple_use (op1, loop_vinfo, &def_stmt, &def, &dt[1]))
         {
          if (vect_print_dump_info (REPORT_DETAILS))
@@ -4482,7 +4532,7 @@ vectorizable_type_promotion (tree stmt, block_stmt_iterator *bsi,
          or a using a tree-code.  */
       /* Generate first half of the widened result:  */
       new_stmt = vect_gen_widened_results_half (code1, vectype_out, decl1, 
-                        vec_oprnd0, vec_oprnd1, op_type, vec_dest, bsi, stmt);
+                       vec_oprnd0, vec_oprnd1, op_type, vec_dest, gsi, stmt);
       if (j == 0)
         STMT_VINFO_VEC_STMT (stmt_info) = new_stmt;
       else
@@ -4491,7 +4541,7 @@ vectorizable_type_promotion (tree stmt, block_stmt_iterator *bsi,
 
       /* Generate second half of the widened result:  */
       new_stmt = vect_gen_widened_results_half (code2, vectype_out, decl2,
-                        vec_oprnd0, vec_oprnd1, op_type, vec_dest, bsi, stmt);
+                       vec_oprnd0, vec_oprnd1, op_type, vec_dest, gsi, stmt);
       STMT_VINFO_RELATED_STMT (prev_stmt_info) = new_stmt;
       prev_stmt_info = vinfo_for_stmt (new_stmt);
 
@@ -4605,17 +4655,19 @@ vect_strided_store_supported (tree vectype)
 static bool
 vect_permute_store_chain (VEC(tree,heap) *dr_chain, 
                          unsigned int length, 
-                         tree stmt, 
-                         block_stmt_iterator *bsi,
+                         gimple stmt,
+                         gimple_stmt_iterator *gsi,
                          VEC(tree,heap) **result_chain)
 {
-  tree perm_dest, perm_stmt, vect1, vect2, high, low;
+  tree perm_dest, vect1, vect2, high, low;
+  gimple perm_stmt;
   tree vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
-  tree scalar_dest, tmp;
+  tree scalar_dest;
   int i;
   unsigned int j;
+  enum tree_code high_code, low_code;
   
-  scalar_dest = GIMPLE_STMT_OPERAND (stmt, 0);
+  scalar_dest = gimple_assign_lhs (stmt);
 
   /* Check that the operation is supported.  */
   if (!vect_strided_store_supported (vectype))
@@ -4639,13 +4691,20 @@ vect_permute_store_chain (VEC(tree,heap) *dr_chain,
          DECL_GIMPLE_REG_P (perm_dest) = 1;
          add_referenced_var (perm_dest);
           if (BYTES_BIG_ENDIAN)
-           tmp = build2 (VEC_INTERLEAVE_HIGH_EXPR, vectype, vect1, vect2); 
+           {
+             high_code = VEC_INTERLEAVE_HIGH_EXPR;
+             low_code = VEC_INTERLEAVE_LOW_EXPR;
+           }
          else
-           tmp = build2 (VEC_INTERLEAVE_LOW_EXPR, vectype, vect1, vect2);
-         perm_stmt = build_gimple_modify_stmt (perm_dest, tmp);
+           {
+             low_code = VEC_INTERLEAVE_HIGH_EXPR;
+             high_code = VEC_INTERLEAVE_LOW_EXPR;
+           }
+         perm_stmt = gimple_build_assign_with_ops (high_code, perm_dest,
+                                                   vect1, vect2);
          high = make_ssa_name (perm_dest, perm_stmt);
-         GIMPLE_STMT_OPERAND (perm_stmt, 0) = high;
-         vect_finish_stmt_generation (stmt, perm_stmt, bsi);
+         gimple_assign_set_lhs (perm_stmt, high);
+         vect_finish_stmt_generation (stmt, perm_stmt, gsi);
          VEC_replace (tree, *result_chain, 2*j, high);
 
          /* Create interleaving stmt:
@@ -4656,14 +4715,11 @@ vect_permute_store_chain (VEC(tree,heap) *dr_chain,
          perm_dest = create_tmp_var (vectype, "vect_inter_low");
          DECL_GIMPLE_REG_P (perm_dest) = 1;
          add_referenced_var (perm_dest);
-         if (BYTES_BIG_ENDIAN)
-           tmp = build2 (VEC_INTERLEAVE_LOW_EXPR, vectype, vect1, vect2);
-         else
-           tmp = build2 (VEC_INTERLEAVE_HIGH_EXPR, vectype, vect1, vect2);
-         perm_stmt = build_gimple_modify_stmt (perm_dest, tmp);
+         perm_stmt = gimple_build_assign_with_ops (low_code, perm_dest,
+                                                   vect1, vect2);
          low = make_ssa_name (perm_dest, perm_stmt);
-         GIMPLE_STMT_OPERAND (perm_stmt, 0) = low;
-         vect_finish_stmt_generation (stmt, perm_stmt, bsi);
+         gimple_assign_set_lhs (perm_stmt, low);
+         vect_finish_stmt_generation (stmt, perm_stmt, gsi);
          VEC_replace (tree, *result_chain, 2*j+1, low);
        }
       dr_chain = VEC_copy (tree, heap, *result_chain);
@@ -4681,7 +4737,7 @@ vect_permute_store_chain (VEC(tree,heap) *dr_chain,
    Return FALSE if not a vectorizable STMT, TRUE otherwise.  */
 
 bool
-vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
+vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
                    slp_tree slp_node)
 {
   tree scalar_dest;
@@ -4696,14 +4752,15 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
   enum machine_mode vec_mode;
   tree dummy;
   enum dr_alignment_support alignment_support_scheme;
-  tree def, def_stmt;
+  tree def;
+  gimple def_stmt;
   enum vect_def_type dt;
   stmt_vec_info prev_stmt_info = NULL;
   tree dataref_ptr = NULL_TREE;
   int nunits = TYPE_VECTOR_SUBPARTS (vectype);
   int ncopies = LOOP_VINFO_VECT_FACTOR (loop_vinfo) / nunits;
   int j;
-  tree next_stmt, first_stmt = NULL_TREE;
+  gimple next_stmt, first_stmt = NULL;
   bool strided_store = false;
   unsigned int group_size, i;
   VEC(tree,heap) *dr_chain = NULL, *oprnds = NULL, *result_chain = NULL;
@@ -4736,16 +4793,17 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
 
   /* Is vectorizable store? */
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt))
     return false;
 
-  scalar_dest = GIMPLE_STMT_OPERAND (stmt, 0);
+  scalar_dest = gimple_assign_lhs (stmt);
   if (TREE_CODE (scalar_dest) != ARRAY_REF
       && TREE_CODE (scalar_dest) != INDIRECT_REF
       && !STMT_VINFO_STRIDED_ACCESS (stmt_info))
     return false;
 
-  op = GIMPLE_STMT_OPERAND (stmt, 1);
+  gcc_assert (gimple_assign_single_p (stmt));
+  op = gimple_assign_rhs1 (stmt);
   if (!vect_is_simple_use (op, loop_vinfo, &def_stmt, &def, &dt))
     {
       if (vect_print_dump_info (REPORT_DETAILS))
@@ -4795,7 +4853,8 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
           next_stmt = DR_GROUP_NEXT_DR (stmt_info);
           while (next_stmt)
             {
-              op = GIMPLE_STMT_OPERAND (next_stmt, 1);
+             gcc_assert (gimple_assign_single_p (next_stmt));
+             op = gimple_assign_rhs1 (next_stmt);
               if (!vect_is_simple_use (op, loop_vinfo, &def_stmt, &def, &dt))
                 {
                   if (vect_print_dump_info (REPORT_DETAILS))
@@ -4833,7 +4892,7 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
          < DR_GROUP_SIZE (vinfo_for_stmt (first_stmt))
          && !slp)
        {
-         *vec_stmt = NULL_TREE;
+         *vec_stmt = NULL;
          return true;
        }
 
@@ -4906,8 +4965,8 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
   prev_stmt_info = NULL;
   for (j = 0; j < ncopies; j++)
     {
-      tree new_stmt;
-      tree ptr_incr;
+      gimple new_stmt;
+      gimple ptr_incr;
 
       if (j == 0)
        {
@@ -4936,7 +4995,8 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
                     there is no interleaving, GROUP_SIZE is 1, and only one 
                     iteration of the loop will be executed.  */
                  gcc_assert (next_stmt);
-                 op = GIMPLE_STMT_OPERAND (next_stmt, 1);
+                 gcc_assert (gimple_assign_single_p (next_stmt));
+                 op = gimple_assign_rhs1 (next_stmt);
 
                  vec_oprnd = vect_get_vec_def_for_operand (op, next_stmt, 
                                                            NULL);
@@ -4972,14 +5032,14 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
              VEC_replace(tree, oprnds, i, vec_oprnd);
            }
          dataref_ptr = 
-               bump_vector_ptr (dataref_ptr, ptr_incr, bsi, stmt, NULL_TREE);
+               bump_vector_ptr (dataref_ptr, ptr_incr, gsi, stmt, NULL_TREE);
        }
 
       if (strided_store)
        {
          result_chain = VEC_alloc (tree, heap, group_size);     
          /* Permute.  */
-         if (!vect_permute_store_chain (dr_chain, group_size, stmt, bsi, 
+         if (!vect_permute_store_chain (dr_chain, group_size, stmt, gsi,
                                         &result_chain))
            return false;
        }
@@ -4989,7 +5049,7 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
        {
          if (i > 0)
            /* Bump the vector pointer.  */
-           dataref_ptr = bump_vector_ptr (dataref_ptr, ptr_incr, bsi, stmt,
+           dataref_ptr = bump_vector_ptr (dataref_ptr, ptr_incr, gsi, stmt,
                                           NULL_TREE);
 
          if (slp)
@@ -5001,8 +5061,8 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
 
          data_ref = build_fold_indirect_ref (dataref_ptr);
          /* Arguments are ready. Create the new vector stmt.  */
-         new_stmt = build_gimple_modify_stmt (data_ref, vec_oprnd);
-         vect_finish_stmt_generation (stmt, new_stmt, bsi);
+         new_stmt = gimple_build_assign (data_ref, vec_oprnd);
+         vect_finish_stmt_generation (stmt, new_stmt, gsi);
          mark_symbols_for_renaming (new_stmt);
          
           if (j == 0)
@@ -5078,7 +5138,7 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
    Return value - the result of the loop-header phi node.  */
 
 static tree
-vect_setup_realignment (tree stmt, block_stmt_iterator *bsi,
+vect_setup_realignment (gimple stmt, gimple_stmt_iterator *gsi,
                         tree *realignment_token,
                        enum dr_alignment_support alignment_support_scheme,
                        tree init_addr,
@@ -5089,22 +5149,22 @@ vect_setup_realignment (tree stmt, block_stmt_iterator *bsi,
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   edge pe;
-  tree scalar_dest = GIMPLE_STMT_OPERAND (stmt, 0);
+  tree scalar_dest = gimple_assign_lhs (stmt);
   tree vec_dest;
-  tree inc;
+  gimple inc;
   tree ptr;
   tree data_ref;
-  tree new_stmt;
+  gimple new_stmt;
   basic_block new_bb;
   tree msq_init = NULL_TREE;
   tree new_temp;
-  tree phi_stmt;
+  gimple phi_stmt;
   tree msq = NULL_TREE;
-  tree stmts = NULL_TREE;
+  gimple_seq stmts = NULL;
   bool inv_p;
   bool compute_in_loop = false;
   bool nested_in_vect_loop = nested_in_vect_loop_p (loop, stmt);
-  struct loop *containing_loop = (bb_for_stmt (stmt))->loop_father;
+  struct loop *containing_loop = (gimple_bb (stmt))->loop_father;
   struct loop *loop_for_initial_load;
 
   gcc_assert (alignment_support_scheme == dr_explicit_realign
@@ -5188,13 +5248,13 @@ vect_setup_realignment (tree stmt, block_stmt_iterator *bsi,
       ptr = vect_create_data_ref_ptr (stmt, loop_for_initial_load, NULL_TREE,
                                      &init_addr, &inc, true, &inv_p);
       data_ref = build1 (ALIGN_INDIRECT_REF, vectype, ptr);
-      new_stmt = build_gimple_modify_stmt (vec_dest, data_ref);
+      new_stmt = gimple_build_assign (vec_dest, data_ref);
       new_temp = make_ssa_name (vec_dest, new_stmt);
-      GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp;
+      gimple_assign_set_lhs (new_stmt, new_temp);
       mark_symbols_for_renaming (new_stmt);
-      new_bb = bsi_insert_on_edge_immediate (pe, new_stmt);
+      new_bb = gsi_insert_on_edge_immediate (pe, new_stmt);
       gcc_assert (!new_bb);
-      msq_init = GIMPLE_STMT_OPERAND (new_stmt, 0);
+      msq_init = gimple_assign_lhs (new_stmt);
     }
 
   /* 4. Create realignment token using a target builtin, if available.
@@ -5214,29 +5274,29 @@ vect_setup_realignment (tree stmt, block_stmt_iterator *bsi,
          init_addr = vect_create_addr_base_for_vector_ref (stmt, &stmts,
                                                        NULL_TREE, loop);
          pe = loop_preheader_edge (loop);
-         new_bb = bsi_insert_on_edge_immediate (pe, stmts);
+         new_bb = gsi_insert_seq_on_edge_immediate (pe, stmts);
          gcc_assert (!new_bb);
        }
 
       builtin_decl = targetm.vectorize.builtin_mask_for_load ();
-      new_stmt = build_call_expr (builtin_decl, 1, init_addr);
-      vec_dest = vect_create_destination_var (scalar_dest, 
-                                             TREE_TYPE (new_stmt));
-      new_stmt = build_gimple_modify_stmt (vec_dest, new_stmt);
+      new_stmt = gimple_build_call (builtin_decl, 1, init_addr);
+      vec_dest =
+       vect_create_destination_var (scalar_dest,
+                                    gimple_call_return_type (new_stmt));
       new_temp = make_ssa_name (vec_dest, new_stmt);
-      GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp;
+      gimple_call_set_lhs (new_stmt, new_temp);
 
       if (compute_in_loop)
-       bsi_insert_before (bsi, new_stmt, BSI_SAME_STMT);
+       gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
       else
        {
          /* Generate the misalignment computation outside LOOP.  */
          pe = loop_preheader_edge (loop);
-         new_bb = bsi_insert_on_edge_immediate (pe, new_stmt);
+         new_bb = gsi_insert_on_edge_immediate (pe, new_stmt);
          gcc_assert (!new_bb);
        }
 
-      *realignment_token = GIMPLE_STMT_OPERAND (new_stmt, 0);
+      *realignment_token = gimple_call_lhs (new_stmt);
 
       /* The result of the CALL_EXPR to this builtin is determined from
          the value of the parameter and no global variables are touched
@@ -5257,7 +5317,7 @@ vect_setup_realignment (tree stmt, block_stmt_iterator *bsi,
 
   pe = loop_preheader_edge (containing_loop);
   vec_dest = vect_create_destination_var (scalar_dest, vectype);
-  msq = make_ssa_name (vec_dest, NULL_TREE);
+  msq = make_ssa_name (vec_dest, NULL);
   phi_stmt = create_phi_node (msq, containing_loop->header);
   SSA_NAME_DEF_STMT (msq) = phi_stmt;
   add_phi_arg (phi_stmt, msq_init, pe);
@@ -5393,13 +5453,13 @@ vect_strided_load_supported (tree vectype)
 static bool
 vect_permute_load_chain (VEC(tree,heap) *dr_chain, 
                         unsigned int length, 
-                        tree stmt, 
-                        block_stmt_iterator *bsi,
+                        gimple stmt,
+                        gimple_stmt_iterator *gsi,
                         VEC(tree,heap) **result_chain)
 {
-  tree perm_dest, perm_stmt, data_ref, first_vect, second_vect;
+  tree perm_dest, data_ref, first_vect, second_vect;
+  gimple perm_stmt;
   tree vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
-  tree tmp;
   int i;
   unsigned int j;
 
@@ -5420,13 +5480,13 @@ vect_permute_load_chain (VEC(tree,heap) *dr_chain,
          DECL_GIMPLE_REG_P (perm_dest) = 1;
          add_referenced_var (perm_dest);
 
-         tmp = build2 (VEC_EXTRACT_EVEN_EXPR, vectype,
-                       first_vect, second_vect);
-         perm_stmt = build_gimple_modify_stmt (perm_dest, tmp);
+         perm_stmt = gimple_build_assign_with_ops (VEC_EXTRACT_EVEN_EXPR,
+                                                   perm_dest, first_vect,
+                                                   second_vect);
 
          data_ref = make_ssa_name (perm_dest, perm_stmt);
-         GIMPLE_STMT_OPERAND (perm_stmt, 0) = data_ref;
-         vect_finish_stmt_generation (stmt, perm_stmt, bsi);
+         gimple_assign_set_lhs (perm_stmt, data_ref);
+         vect_finish_stmt_generation (stmt, perm_stmt, gsi);
          mark_symbols_for_renaming (perm_stmt);
 
          VEC_replace (tree, *result_chain, j/2, data_ref);           
@@ -5436,12 +5496,12 @@ vect_permute_load_chain (VEC(tree,heap) *dr_chain,
          DECL_GIMPLE_REG_P (perm_dest) = 1;
          add_referenced_var (perm_dest);
 
-         tmp = build2 (VEC_EXTRACT_ODD_EXPR, vectype, 
-                       first_vect, second_vect);
-         perm_stmt = build_gimple_modify_stmt (perm_dest, tmp);
+         perm_stmt = gimple_build_assign_with_ops (VEC_EXTRACT_ODD_EXPR,
+                                                   perm_dest, first_vect,
+                                                   second_vect);
          data_ref = make_ssa_name (perm_dest, perm_stmt);
-         GIMPLE_STMT_OPERAND (perm_stmt, 0) = data_ref;
-         vect_finish_stmt_generation (stmt, perm_stmt, bsi);
+         gimple_assign_set_lhs (perm_stmt, data_ref);
+         vect_finish_stmt_generation (stmt, perm_stmt, gsi);
          mark_symbols_for_renaming (perm_stmt);
 
          VEC_replace (tree, *result_chain, j/2+length/2, data_ref);
@@ -5460,12 +5520,12 @@ vect_permute_load_chain (VEC(tree,heap) *dr_chain,
 */
 
 static bool
-vect_transform_strided_load (tree stmt, VEC(tree,heap) *dr_chain, int size,
-                            block_stmt_iterator *bsi)
+vect_transform_strided_load (gimple stmt, VEC(tree,heap) *dr_chain, int size,
+                            gimple_stmt_iterator *gsi)
 {
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
-  tree first_stmt = DR_GROUP_FIRST_DR (stmt_info);
-  tree next_stmt, new_stmt;
+  gimple first_stmt = DR_GROUP_FIRST_DR (stmt_info);
+  gimple next_stmt, new_stmt;
   VEC(tree,heap) *result_chain = NULL;
   unsigned int i, gap_count;
   tree tmp_data_ref;
@@ -5475,7 +5535,7 @@ vect_transform_strided_load (tree stmt, VEC(tree,heap) *dr_chain, int size,
      vectors, that are ready for vector computation.  */
   result_chain = VEC_alloc (tree, heap, size);
   /* Permute.  */
-  if (!vect_permute_load_chain (dr_chain, size, stmt, bsi, &result_chain))
+  if (!vect_permute_load_chain (dr_chain, size, stmt, gsi, &result_chain))
     return false;
 
   /* Put a permuted data-ref in the VECTORIZED_STMT field.  
@@ -5512,9 +5572,10 @@ vect_transform_strided_load (tree stmt, VEC(tree,heap) *dr_chain, int size,
            STMT_VINFO_VEC_STMT (vinfo_for_stmt (next_stmt)) = new_stmt;
          else
             {
-             tree prev_stmt = STMT_VINFO_VEC_STMT (vinfo_for_stmt (next_stmt));
-             tree rel_stmt = STMT_VINFO_RELATED_STMT (
-                                                      vinfo_for_stmt (prev_stmt));
+             gimple prev_stmt =
+               STMT_VINFO_VEC_STMT (vinfo_for_stmt (next_stmt));
+             gimple rel_stmt =
+               STMT_VINFO_RELATED_STMT (vinfo_for_stmt (prev_stmt));
              while (rel_stmt)
                {
                  prev_stmt = rel_stmt;
@@ -5546,44 +5607,44 @@ vect_transform_strided_load (tree stmt, VEC(tree,heap) *dr_chain, int size,
    Return FALSE if not a vectorizable STMT, TRUE otherwise.  */
 
 bool
-vectorizable_load (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt, 
+vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
                   slp_tree slp_node)
 {
   tree scalar_dest;
   tree vec_dest = NULL;
   tree data_ref = NULL;
-  tree op;
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
   stmt_vec_info prev_stmt_info; 
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
-  struct loop *containing_loop = (bb_for_stmt (stmt))->loop_father;
+  struct loop *containing_loop = (gimple_bb (stmt))->loop_father;
   bool nested_in_vect_loop = nested_in_vect_loop_p (loop, stmt);
   struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info), *first_dr;
   tree vectype = STMT_VINFO_VECTYPE (stmt_info);
   tree new_temp;
   int mode;
-  tree new_stmt = NULL_TREE;
+  gimple new_stmt = NULL;
   tree dummy;
   enum dr_alignment_support alignment_support_scheme;
   tree dataref_ptr = NULL_TREE;
-  tree ptr_incr;
+  gimple ptr_incr;
   int nunits = TYPE_VECTOR_SUBPARTS (vectype);
   int ncopies = LOOP_VINFO_VECT_FACTOR (loop_vinfo) / nunits;
   int i, j, group_size;
   tree msq = NULL_TREE, lsq;
   tree offset = NULL_TREE;
   tree realignment_token = NULL_TREE;
-  tree phi = NULL_TREE;
+  gimple phi = NULL;
   VEC(tree,heap) *dr_chain = NULL;
   bool strided_load = false;
-  tree first_stmt;
+  gimple first_stmt;
   tree scalar_type;
   bool inv_p;
   bool compute_in_loop = false;
   struct loop *at_loop;
   int vec_num;
   bool slp = (slp_node != NULL);
+  enum tree_code code;
 
   /* FORNOW: SLP with multiple types is not supported. The SLP analysis verifies
       this, so we can safely override NCOPIES with 1 here.  */
@@ -5607,16 +5668,16 @@ vectorizable_load (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
     return false;
 
   /* Is vectorizable load? */
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt))
     return false;
 
-  scalar_dest = GIMPLE_STMT_OPERAND (stmt, 0);
+  scalar_dest = gimple_assign_lhs (stmt);
   if (TREE_CODE (scalar_dest) != SSA_NAME)
     return false;
 
-  op = GIMPLE_STMT_OPERAND (stmt, 1);
-  if (TREE_CODE (op) != ARRAY_REF 
-      && TREE_CODE (op) != INDIRECT_REF
+  code = gimple_assign_rhs_code (stmt);
+  if (code != ARRAY_REF
+      && code != INDIRECT_REF
       && !STMT_VINFO_STRIDED_ACCESS (stmt_info))
     return false;
 
@@ -5818,7 +5879,7 @@ vectorizable_load (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
        || alignment_support_scheme == dr_explicit_realign)
       && !compute_in_loop)
     {
-      msq = vect_setup_realignment (first_stmt, bsi, &realignment_token,
+      msq = vect_setup_realignment (first_stmt, gsi, &realignment_token,
                                    alignment_support_scheme, NULL_TREE,
                                    &at_loop);
       if (alignment_support_scheme == dr_explicit_realign_optimized)
@@ -5841,12 +5902,12 @@ vectorizable_load (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
                                                &inv_p);
       else
         dataref_ptr = 
-               bump_vector_ptr (dataref_ptr, ptr_incr, bsi, stmt, NULL_TREE);
+               bump_vector_ptr (dataref_ptr, ptr_incr, gsi, stmt, NULL_TREE);
 
       for (i = 0; i < vec_num; i++)
        {
          if (i > 0)
-           dataref_ptr = bump_vector_ptr (dataref_ptr, ptr_incr, bsi, stmt,
+           dataref_ptr = bump_vector_ptr (dataref_ptr, ptr_incr, gsi, stmt,
                                           NULL_TREE);
 
          /* 2. Create the vector-load in the loop.  */
@@ -5872,24 +5933,24 @@ vectorizable_load (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
                tree vs_minus_1 = size_int (TYPE_VECTOR_SUBPARTS (vectype) - 1);
 
                if (compute_in_loop)
-                 msq = vect_setup_realignment (first_stmt, bsi, 
+                 msq = vect_setup_realignment (first_stmt, gsi,
                                                &realignment_token,
                                                dr_explicit_realign, 
                                                dataref_ptr, NULL);
 
                data_ref = build1 (ALIGN_INDIRECT_REF, vectype, dataref_ptr);
                vec_dest = vect_create_destination_var (scalar_dest, vectype);
-               new_stmt = build_gimple_modify_stmt (vec_dest, data_ref);
+               new_stmt = gimple_build_assign (vec_dest, data_ref);
                new_temp = make_ssa_name (vec_dest, new_stmt);
-               GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp;
-               vect_finish_stmt_generation (stmt, new_stmt, bsi);
+               gimple_assign_set_lhs (new_stmt, new_temp);
+               vect_finish_stmt_generation (stmt, new_stmt, gsi);
                copy_virtual_operands (new_stmt, stmt);
                mark_symbols_for_renaming (new_stmt);
                msq = new_temp;
 
                bump = size_binop (MULT_EXPR, vs_minus_1,
                                   TYPE_SIZE_UNIT (scalar_type));
-               ptr = bump_vector_ptr (dataref_ptr, NULL_TREE, bsi, stmt, bump);
+               ptr = bump_vector_ptr (dataref_ptr, NULL, gsi, stmt, bump);
                data_ref = build1 (ALIGN_INDIRECT_REF, vectype, ptr);
                break;
              }
@@ -5900,10 +5961,10 @@ vectorizable_load (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
              gcc_unreachable ();
            }
          vec_dest = vect_create_destination_var (scalar_dest, vectype);
-         new_stmt = build_gimple_modify_stmt (vec_dest, data_ref);
+         new_stmt = gimple_build_assign (vec_dest, data_ref);
          new_temp = make_ssa_name (vec_dest, new_stmt);
-         GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp;
-         vect_finish_stmt_generation (stmt, new_stmt, bsi);
+         gimple_assign_set_lhs (new_stmt, new_temp);
+         vect_finish_stmt_generation (stmt, new_stmt, gsi);
          mark_symbols_for_renaming (new_stmt);
 
          /* 3. Handle explicit realignment if necessary/supported. Create in
@@ -5911,19 +5972,22 @@ vectorizable_load (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
          if (alignment_support_scheme == dr_explicit_realign_optimized
              || alignment_support_scheme == dr_explicit_realign)
            {
-             lsq = GIMPLE_STMT_OPERAND (new_stmt, 0);
+             tree tmp;
+
+             lsq = gimple_assign_lhs (new_stmt);
              if (!realignment_token)
                realignment_token = dataref_ptr;
              vec_dest = vect_create_destination_var (scalar_dest, vectype);
-             new_stmt = build3 (REALIGN_LOAD_EXPR, vectype, msq, lsq, 
-                                realignment_token);
-             new_stmt = build_gimple_modify_stmt (vec_dest, new_stmt);
+             tmp = build3 (REALIGN_LOAD_EXPR, vectype, msq, lsq,
+                           realignment_token);
+             new_stmt = gimple_build_assign (vec_dest, tmp);
              new_temp = make_ssa_name (vec_dest, new_stmt);
-             GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp;
-             vect_finish_stmt_generation (stmt, new_stmt, bsi);
+             gimple_assign_set_lhs (new_stmt, new_temp);
+             vect_finish_stmt_generation (stmt, new_stmt, gsi);
 
              if (alignment_support_scheme == dr_explicit_realign_optimized)
                {
+                 gcc_assert (phi);
                  if (i == vec_num - 1 && j == ncopies - 1)
                    add_phi_arg (phi, lsq, loop_latch_edge (containing_loop));
                  msq = lsq;
@@ -5944,19 +6008,19 @@ vectorizable_load (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
                  /* CHECKME: bitpos depends on endianess?  */
                  bitpos = bitsize_zero_node;
                  vec_inv = build3 (BIT_FIELD_REF, scalar_type, new_temp, 
-                                                           bitsize, bitpos);
+                                   bitsize, bitpos);
                  vec_dest = 
                        vect_create_destination_var (scalar_dest, NULL_TREE);
-                 new_stmt = build_gimple_modify_stmt (vec_dest, vec_inv);
+                 new_stmt = gimple_build_assign (vec_dest, vec_inv);
                   new_temp = make_ssa_name (vec_dest, new_stmt);
-                  GIMPLE_STMT_OPERAND (new_stmt, 0) = new_temp;
-                  vect_finish_stmt_generation (stmt, new_stmt, bsi);
+                 gimple_assign_set_lhs (new_stmt, new_temp);
+                 vect_finish_stmt_generation (stmt, new_stmt, gsi);
 
                  for (k = nunits - 1; k >= 0; --k)
                    t = tree_cons (NULL_TREE, new_temp, t);
                  /* FIXME: use build_constructor directly.  */
                  vec_inv = build_constructor_from_list (vectype, t);
-                 new_temp = vect_init_vector (stmt, vec_inv, vectype, bsi);
+                 new_temp = vect_init_vector (stmt, vec_inv, vectype, gsi);
                  new_stmt = SSA_NAME_DEF_STMT (new_temp);
                }
              else
@@ -5970,7 +6034,7 @@ vectorizable_load (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
 
          /* Store vector loads in the corresponding SLP_NODE.  */
          if (slp)
-           VEC_quick_push (tree, SLP_TREE_VEC_STMTS (slp_node), new_stmt);
+           VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node), new_stmt);
        }
 
       /* FORNOW: SLP with multiple types is unsupported.  */
@@ -5979,7 +6043,7 @@ vectorizable_load (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
 
       if (strided_load)
        {
-         if (!vect_transform_strided_load (stmt, dr_chain, group_size, bsi))
+         if (!vect_transform_strided_load (stmt, dr_chain, group_size, gsi))
            return false;         
          *vec_stmt = STMT_VINFO_VEC_STMT (stmt_info);
           VEC_free (tree, heap, dr_chain);
@@ -6008,37 +6072,42 @@ vectorizable_load (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt,
    it can be supported.  */
 
 bool
-vectorizable_live_operation (tree stmt,
-                             block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
-                             tree *vec_stmt ATTRIBUTE_UNUSED)
+vectorizable_live_operation (gimple stmt,
+                            gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
+                            gimple *vec_stmt ATTRIBUTE_UNUSED)
 {
-  tree operation;
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
   loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   int i;
   int op_type;
   tree op;
-  tree def, def_stmt;
+  tree def;
+  gimple def_stmt;
   enum vect_def_type dt; 
+  enum tree_code code;
+  enum gimple_rhs_class rhs_class;
 
   gcc_assert (STMT_VINFO_LIVE_P (stmt_info));
 
   if (STMT_VINFO_DEF_TYPE (stmt_info) == vect_reduction_def)
     return false;
 
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt))
     return false;
 
-  if (TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) != SSA_NAME)
+  if (TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
     return false;
 
   /* FORNOW. CHECKME. */
   if (nested_in_vect_loop_p (loop, stmt))
     return false;
 
-  operation = GIMPLE_STMT_OPERAND (stmt, 1);
-  op_type = TREE_OPERAND_LENGTH (operation);
+  code = gimple_assign_rhs_code (stmt);
+  op_type = TREE_CODE_LENGTH (code);
+  rhs_class = get_gimple_rhs_class (code);
+  gcc_assert (rhs_class != GIMPLE_UNARY_RHS || op_type == unary_op);
+  gcc_assert (rhs_class != GIMPLE_BINARY_RHS || op_type == binary_op);
 
   /* FORNOW: support only if all uses are invariant. This means
      that the scalar operations can remain in place, unvectorized.
@@ -6046,7 +6115,10 @@ vectorizable_live_operation (tree stmt,
 
   for (i = 0; i < op_type; i++)
     {
-      op = TREE_OPERAND (operation, i);
+      if (rhs_class == GIMPLE_SINGLE_RHS)
+       op = TREE_OPERAND (gimple_op (stmt, 1), i);
+      else
+       op = gimple_op (stmt, i + 1);
       if (op && !vect_is_simple_use (op, loop_vinfo, &def_stmt, &def, &dt))
         {
           if (vect_print_dump_info (REPORT_DETAILS))
@@ -6087,7 +6159,7 @@ vect_is_simple_cond (tree cond, loop_vec_info loop_vinfo)
 
   if (TREE_CODE (lhs) == SSA_NAME)
     {
-      tree lhs_def_stmt = SSA_NAME_DEF_STMT (lhs);
+      gimple lhs_def_stmt = SSA_NAME_DEF_STMT (lhs);
       if (!vect_is_simple_use (lhs, loop_vinfo, &lhs_def_stmt, &def, &dt))
        return false;
     }
@@ -6097,7 +6169,7 @@ vect_is_simple_cond (tree cond, loop_vec_info loop_vinfo)
 
   if (TREE_CODE (rhs) == SSA_NAME)
     {
-      tree rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
+      gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
       if (!vect_is_simple_use (rhs, loop_vinfo, &rhs_def_stmt, &def, &dt))
        return false;
     }
@@ -6118,7 +6190,8 @@ vect_is_simple_cond (tree cond, loop_vec_info loop_vinfo)
    Return FALSE if not a vectorizable STMT, TRUE otherwise.  */
 
 bool
-vectorizable_condition (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
+vectorizable_condition (gimple stmt, gimple_stmt_iterator *gsi,
+                       gimple *vec_stmt)
 {
   tree scalar_dest = NULL_TREE;
   tree vec_dest = NULL_TREE;
@@ -6135,6 +6208,7 @@ vectorizable_condition (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
   enum vect_def_type dt;
   int nunits = TYPE_VECTOR_SUBPARTS (vectype);
   int ncopies = LOOP_VINFO_VECT_FACTOR (loop_vinfo) / nunits;
+  enum tree_code code;
 
   gcc_assert (ncopies >= 1);
   if (ncopies > 1)
@@ -6159,14 +6233,16 @@ vectorizable_condition (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
     }
 
   /* Is vectorizable conditional operation?  */
-  if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (stmt))
     return false;
 
-  op = GIMPLE_STMT_OPERAND (stmt, 1);
+  code = gimple_assign_rhs_code (stmt);
 
-  if (TREE_CODE (op) != COND_EXPR)
+  if (code != COND_EXPR)
     return false;
 
+  gcc_assert (gimple_assign_single_p (stmt));
+  op = gimple_assign_rhs1 (stmt);
   cond_expr = TREE_OPERAND (op, 0);
   then_clause = TREE_OPERAND (op, 1);
   else_clause = TREE_OPERAND (op, 2);
@@ -6181,7 +6257,7 @@ vectorizable_condition (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
 
   if (TREE_CODE (then_clause) == SSA_NAME)
     {
-      tree then_def_stmt = SSA_NAME_DEF_STMT (then_clause);
+      gimple then_def_stmt = SSA_NAME_DEF_STMT (then_clause);
       if (!vect_is_simple_use (then_clause, loop_vinfo, 
                               &then_def_stmt, &def, &dt))
        return false;
@@ -6193,7 +6269,7 @@ vectorizable_condition (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
 
   if (TREE_CODE (else_clause) == SSA_NAME)
     {
-      tree else_def_stmt = SSA_NAME_DEF_STMT (else_clause);
+      gimple else_def_stmt = SSA_NAME_DEF_STMT (else_clause);
       if (!vect_is_simple_use (else_clause, loop_vinfo, 
                               &else_def_stmt, &def, &dt))
        return false;
@@ -6215,7 +6291,7 @@ vectorizable_condition (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
   /* Transform */
 
   /* Handle def.  */
-  scalar_dest = GIMPLE_STMT_OPERAND (stmt, 0);
+  scalar_dest = gimple_assign_lhs (stmt);
   vec_dest = vect_create_destination_var (scalar_dest, vectype);
 
   /* Handle cond expr.  */
@@ -6232,10 +6308,10 @@ vectorizable_condition (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
   vec_cond_expr = build3 (VEC_COND_EXPR, vectype, 
                          vec_compare, vec_then_clause, vec_else_clause);
 
-  *vec_stmt = build_gimple_modify_stmt (vec_dest, vec_cond_expr);
+  *vec_stmt = gimple_build_assign (vec_dest, vec_cond_expr);
   new_temp = make_ssa_name (vec_dest, *vec_stmt);
-  GIMPLE_STMT_OPERAND (*vec_stmt, 0) = new_temp;
-  vect_finish_stmt_generation (stmt, *vec_stmt, bsi);
+  gimple_assign_set_lhs (*vec_stmt, new_temp);
+  vect_finish_stmt_generation (stmt, *vec_stmt, gsi);
   
   return true;
 }
@@ -6246,57 +6322,57 @@ vectorizable_condition (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
    Create a vectorized stmt to replace STMT, and insert it at BSI.  */
 
 static bool
-vect_transform_stmt (tree stmt, block_stmt_iterator *bsi, bool *strided_store, 
-                    slp_tree slp_node)
+vect_transform_stmt (gimple stmt, gimple_stmt_iterator *gsi,
+                    bool *strided_store, slp_tree slp_node)
 {
   bool is_store = false;
-  tree vec_stmt = NULL_TREE;
+  gimple vec_stmt = NULL;
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
-  tree orig_stmt_in_pattern;
+  gimple orig_stmt_in_pattern;
   bool done;
 
   switch (STMT_VINFO_TYPE (stmt_info))
     {
     case type_demotion_vec_info_type:
       gcc_assert (!slp_node);
-      done = vectorizable_type_demotion (stmt, bsi, &vec_stmt);
+      done = vectorizable_type_demotion (stmt, gsi, &vec_stmt);
       gcc_assert (done);
       break;
 
     case type_promotion_vec_info_type:
       gcc_assert (!slp_node);
-      done = vectorizable_type_promotion (stmt, bsi, &vec_stmt);
+      done = vectorizable_type_promotion (stmt, gsi, &vec_stmt);
       gcc_assert (done);
       break;
 
     case type_conversion_vec_info_type:
-      done = vectorizable_conversion (stmt, bsi, &vec_stmt, slp_node);
+      done = vectorizable_conversion (stmt, gsi, &vec_stmt, slp_node);
       gcc_assert (done);
       break;
 
     case induc_vec_info_type:
       gcc_assert (!slp_node);
-      done = vectorizable_induction (stmt, bsi, &vec_stmt);
+      done = vectorizable_induction (stmt, gsi, &vec_stmt);
       gcc_assert (done);
       break;
 
     case op_vec_info_type:
-      done = vectorizable_operation (stmt, bsi, &vec_stmt, slp_node);
+      done = vectorizable_operation (stmt, gsi, &vec_stmt, slp_node);
       gcc_assert (done);
       break;
 
     case assignment_vec_info_type:
-      done = vectorizable_assignment (stmt, bsi, &vec_stmt, slp_node);
+      done = vectorizable_assignment (stmt, gsi, &vec_stmt, slp_node);
       gcc_assert (done);
       break;
 
     case load_vec_info_type:
-      done = vectorizable_load (stmt, bsi, &vec_stmt, slp_node);
+      done = vectorizable_load (stmt, gsi, &vec_stmt, slp_node);
       gcc_assert (done);
       break;
 
     case store_vec_info_type:
-      done = vectorizable_store (stmt, bsi, &vec_stmt, slp_node);
+      done = vectorizable_store (stmt, gsi, &vec_stmt, slp_node);
       gcc_assert (done);
       if (STMT_VINFO_STRIDED_ACCESS (stmt_info))
        {
@@ -6314,18 +6390,18 @@ vect_transform_stmt (tree stmt, block_stmt_iterator *bsi, bool *strided_store,
 
     case condition_vec_info_type:
       gcc_assert (!slp_node);
-      done = vectorizable_condition (stmt, bsi, &vec_stmt);
+      done = vectorizable_condition (stmt, gsi, &vec_stmt);
       gcc_assert (done);
       break;
 
     case call_vec_info_type:
       gcc_assert (!slp_node);
-      done = vectorizable_call (stmt, bsi, &vec_stmt);
+      done = vectorizable_call (stmt, gsi, &vec_stmt);
       break;
 
     case reduc_vec_info_type:
       gcc_assert (!slp_node);
-      done = vectorizable_reduction (stmt, bsi, &vec_stmt);
+      done = vectorizable_reduction (stmt, gsi, &vec_stmt);
       gcc_assert (done);
       break;
 
@@ -6341,7 +6417,7 @@ vect_transform_stmt (tree stmt, block_stmt_iterator *bsi, bool *strided_store,
   if (STMT_VINFO_LIVE_P (stmt_info)
       && STMT_VINFO_TYPE (stmt_info) != reduc_vec_info_type)
     {
-      done = vectorizable_live_operation (stmt, bsi, &vec_stmt);
+      done = vectorizable_live_operation (stmt, gsi, &vec_stmt);
       gcc_assert (done);
     }
 
@@ -6375,19 +6451,20 @@ vect_transform_stmt (tree stmt, block_stmt_iterator *bsi, bool *strided_store,
 static tree
 vect_build_loop_niters (loop_vec_info loop_vinfo)
 {
-  tree ni_name, stmt, var;
+  tree ni_name, var;
+  gimple_seq stmts = NULL;
   edge pe;
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   tree ni = unshare_expr (LOOP_VINFO_NITERS (loop_vinfo));
 
   var = create_tmp_var (TREE_TYPE (ni), "niters");
   add_referenced_var (var);
-  ni_name = force_gimple_operand (ni, &stmt, false, var);
+  ni_name = force_gimple_operand (ni, &stmts, false, var);
 
   pe = loop_preheader_edge (loop);
-  if (stmt)
+  if (stmts)
     {
-      basic_block new_bb = bsi_insert_on_edge_immediate (pe, stmt);
+      basic_block new_bb = gsi_insert_seq_on_edge_immediate (pe, stmts);
       gcc_assert (!new_bb);
     }
       
@@ -6412,7 +6489,8 @@ vect_generate_tmps_on_preheader (loop_vec_info loop_vinfo,
 
   edge pe;
   basic_block new_bb;
-  tree stmt, ni_name;
+  gimple_seq stmts;
+  tree ni_name;
   tree var;
   tree ratio_name;
   tree ratio_mult_vf_name;
@@ -6437,9 +6515,10 @@ vect_generate_tmps_on_preheader (loop_vec_info loop_vinfo,
       var = create_tmp_var (TREE_TYPE (ni), "bnd");
       add_referenced_var (var);
 
-      ratio_name = force_gimple_operand (ratio_name, &stmt, true, var);
+      stmts = NULL;
+      ratio_name = force_gimple_operand (ratio_name, &stmts, true, var);
       pe = loop_preheader_edge (loop);
-      new_bb = bsi_insert_on_edge_immediate (pe, stmt);
+      new_bb = gsi_insert_seq_on_edge_immediate (pe, stmts);
       gcc_assert (!new_bb);
     }
        
@@ -6452,10 +6531,11 @@ vect_generate_tmps_on_preheader (loop_vec_info loop_vinfo,
       var = create_tmp_var (TREE_TYPE (ni), "ratio_mult_vf");
       add_referenced_var (var);
 
-      ratio_mult_vf_name = force_gimple_operand (ratio_mult_vf_name, &stmt,
+      stmts = NULL;
+      ratio_mult_vf_name = force_gimple_operand (ratio_mult_vf_name, &stmts,
                                                 true, var);
       pe = loop_preheader_edge (loop);
-      new_bb = bsi_insert_on_edge_immediate (pe, stmt);
+      new_bb = gsi_insert_seq_on_edge_immediate (pe, stmts);
       gcc_assert (!new_bb);
     }
 
@@ -6514,7 +6594,8 @@ vect_update_ivs_after_vectorizer (loop_vec_info loop_vinfo, tree niters,
 {
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   basic_block exit_bb = single_exit (loop)->dest;
-  tree phi, phi1;
+  gimple phi, phi1;
+  gimple_stmt_iterator gsi, gsi1;
   basic_block update_bb = update_e->dest;
 
   /* gcc_assert (vect_can_advance_ivs_p (loop_vinfo)); */
@@ -6522,21 +6603,23 @@ vect_update_ivs_after_vectorizer (loop_vec_info loop_vinfo, tree niters,
   /* Make sure there exists a single-predecessor exit bb:  */
   gcc_assert (single_pred_p (exit_bb));
 
-  for (phi = phi_nodes (loop->header), phi1 = phi_nodes (update_bb); 
-       phi && phi1; 
-       phi = PHI_CHAIN (phi), phi1 = PHI_CHAIN (phi1))
+  for (gsi = gsi_start_phis (loop->header), gsi1 = gsi_start_phis (update_bb);
+       !gsi_end_p (gsi) && !gsi_end_p (gsi1);
+       gsi_next (&gsi), gsi_next (&gsi1))
     {
       tree access_fn = NULL;
       tree evolution_part;
       tree init_expr;
       tree step_expr;
       tree var, ni, ni_name;
-      block_stmt_iterator last_bsi;
+      gimple_stmt_iterator last_gsi;
 
+      phi = gsi_stmt (gsi);
+      phi1 = gsi_stmt (gsi1);
       if (vect_print_dump_info (REPORT_DETAILS))
         {
           fprintf (vect_dump, "vect_update_ivs_after_vectorizer: phi: ");
-          print_generic_expr (vect_dump, phi, TDF_SLIM);
+         print_gimple_stmt (vect_dump, phi, 0, TDF_SLIM);
         }
 
       /* Skip virtual phi's.  */
@@ -6588,9 +6671,9 @@ vect_update_ivs_after_vectorizer (loop_vec_info loop_vinfo, tree niters,
       var = create_tmp_var (TREE_TYPE (init_expr), "tmp");
       add_referenced_var (var);
 
-      last_bsi = bsi_last (exit_bb);
-      ni_name = force_gimple_operand_bsi (&last_bsi, ni, false, var,
-                                         true, BSI_SAME_STMT);
+      last_gsi = gsi_last_bb (exit_bb);
+      ni_name = force_gimple_operand_gsi (&last_gsi, ni, false, var,
+                                         true, GSI_SAME_STMT);
       
       /* Fix phi expressions in the successor bb.  */
       SET_PHI_ARG_DEF (phi1, update_e->dest_idx, ni_name);
@@ -6665,7 +6748,7 @@ vect_do_peeling_for_loop_bound (loop_vec_info loop_vinfo, tree *ratio)
 
   /* If cost model check not done during versioning and 
      peeling for alignment.  */
-  if (!VEC_length (tree, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo))
+  if (!VEC_length (gimple, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo))
       && !VEC_length (ddr_p, LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo))
       && !LOOP_PEELING_FOR_ALIGNMENT (loop_vinfo))
     {
@@ -6745,11 +6828,12 @@ vect_gen_niters_for_prolog_loop (loop_vec_info loop_vinfo, tree loop_niters)
 {
   struct data_reference *dr = LOOP_VINFO_UNALIGNED_DR (loop_vinfo);
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
-  tree var, stmt;
+  tree var;
+  gimple_seq stmts;
   tree iters, iters_name;
   edge pe;
   basic_block new_bb;
-  tree dr_stmt = DR_STMT (dr);
+  gimple dr_stmt = DR_STMT (dr);
   stmt_vec_info stmt_info = vinfo_for_stmt (dr_stmt);
   tree vectype = STMT_VINFO_VECTYPE (stmt_info);
   int vectype_align = TYPE_ALIGN (vectype) / BITS_PER_UNIT;
@@ -6776,7 +6860,7 @@ vect_gen_niters_for_prolog_loop (loop_vec_info loop_vinfo, tree loop_niters)
     }
   else
     {
-      tree new_stmts = NULL_TREE;
+      gimple_seq new_stmts = NULL;
       tree start_addr = vect_create_addr_base_for_vector_ref (dr_stmt, 
                                                &new_stmts, NULL_TREE, loop);
       tree ptr_type = TREE_TYPE (start_addr);
@@ -6790,7 +6874,7 @@ vect_gen_niters_for_prolog_loop (loop_vec_info loop_vinfo, tree loop_niters)
       tree byte_misalign;
       tree elem_misalign;
 
-      new_bb = bsi_insert_on_edge_immediate (pe, new_stmts);
+      new_bb = gsi_insert_seq_on_edge_immediate (pe, new_stmts);
       gcc_assert (!new_bb);
   
       /* Create:  byte_misalign = addr & (vectype_size - 1)  */
@@ -6822,12 +6906,13 @@ vect_gen_niters_for_prolog_loop (loop_vec_info loop_vinfo, tree loop_niters)
 
   var = create_tmp_var (niters_type, "prolog_loop_niters");
   add_referenced_var (var);
-  iters_name = force_gimple_operand (iters, &stmt, false, var);
+  stmts = NULL;
+  iters_name = force_gimple_operand (iters, &stmts, false, var);
 
   /* Insert stmt on loop preheader edge.  */
-  if (stmt)
+  if (stmts)
     {
-      basic_block new_bb = bsi_insert_on_edge_immediate (pe, stmt);
+      basic_block new_bb = gsi_insert_seq_on_edge_immediate (pe, stmts);
       gcc_assert (!new_bb);
     }
 
@@ -6905,7 +6990,7 @@ vect_do_peeling_for_alignment (loop_vec_info loop_vinfo)
   
 
   /* If cost model check not done during versioning.  */
-  if (!VEC_length (tree, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo))
+  if (!VEC_length (gimple, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo))
       && !VEC_length (ddr_p, LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo)))
     {
       check_profitability = true;
@@ -6971,12 +7056,12 @@ vect_do_peeling_for_alignment (loop_vec_info loop_vinfo)
 static void
 vect_create_cond_for_align_checks (loop_vec_info loop_vinfo,
                                    tree *cond_expr,
-                                   tree *cond_expr_stmt_list)
+                                  gimple_seq *cond_expr_stmt_list)
 {
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
-  VEC(tree,heap) *may_misalign_stmts
+  VEC(gimple,heap) *may_misalign_stmts
     = LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo);
-  tree ref_stmt, tmp;
+  gimple ref_stmt;
   int mask = LOOP_VINFO_PTR_MASK (loop_vinfo);
   tree mask_cst;
   unsigned int i;
@@ -6984,7 +7069,8 @@ vect_create_cond_for_align_checks (loop_vec_info loop_vinfo,
   tree int_ptrsize_type;
   char tmp_name[20];
   tree or_tmp_name = NULL_TREE;
-  tree and_tmp, and_tmp_name, and_stmt;
+  tree and_tmp, and_tmp_name;
+  gimple and_stmt;
   tree ptrsize_zero;
   tree part_cond_expr;
 
@@ -7001,28 +7087,28 @@ vect_create_cond_for_align_checks (loop_vec_info loop_vinfo,
   /* Create expression (mask & (dr_1 || ... || dr_n)) where dr_i is the address
      of the first vector of the i'th data reference. */
 
-  for (i = 0; VEC_iterate (tree, may_misalign_stmts, i, ref_stmt); i++)
+  for (i = 0; VEC_iterate (gimple, may_misalign_stmts, i, ref_stmt); i++)
     {
-      tree new_stmt_list = NULL_TREE;   
+      gimple_seq new_stmt_list = NULL;
       tree addr_base;
-      tree addr_tmp, addr_tmp_name, addr_stmt;
-      tree or_tmp, new_or_tmp_name, or_stmt;
+      tree addr_tmp, addr_tmp_name;
+      tree or_tmp, new_or_tmp_name;
+      gimple addr_stmt, or_stmt;
 
       /* create: addr_tmp = (int)(address_of_first_vector) */
-      addr_base = vect_create_addr_base_for_vector_ref (ref_stmt, 
-                                       &new_stmt_list, NULL_TREE, loop);
-
-      if (new_stmt_list != NULL_TREE)
-        append_to_statement_list_force (new_stmt_list, cond_expr_stmt_list);
+      addr_base =
+       vect_create_addr_base_for_vector_ref (ref_stmt, &new_stmt_list,
+                                             NULL_TREE, loop);
+      if (new_stmt_list != NULL)
+       gimple_seq_add_seq (cond_expr_stmt_list, new_stmt_list);
 
       sprintf (tmp_name, "%s%d", "addr2int", i);
       addr_tmp = create_tmp_var (int_ptrsize_type, tmp_name);
       add_referenced_var (addr_tmp);
-      addr_tmp_name = make_ssa_name (addr_tmp, NULL_TREE);
-      addr_stmt = fold_convert (int_ptrsize_type, addr_base);
-      addr_stmt = build_gimple_modify_stmt (addr_tmp_name, addr_stmt);
+      addr_tmp_name = make_ssa_name (addr_tmp, NULL);
+      addr_stmt = gimple_build_assign (addr_tmp_name, addr_base);
       SSA_NAME_DEF_STMT (addr_tmp_name) = addr_stmt;
-      append_to_statement_list_force (addr_stmt, cond_expr_stmt_list);
+      gimple_seq_add_stmt (cond_expr_stmt_list, addr_stmt);
 
       /* The addresses are OR together.  */
 
@@ -7032,12 +7118,12 @@ vect_create_cond_for_align_checks (loop_vec_info loop_vinfo,
           sprintf (tmp_name, "%s%d", "orptrs", i);
           or_tmp = create_tmp_var (int_ptrsize_type, tmp_name);
           add_referenced_var (or_tmp);
-          new_or_tmp_name = make_ssa_name (or_tmp, NULL_TREE);
-         tmp = build2 (BIT_IOR_EXPR, int_ptrsize_type,
-                       or_tmp_name, addr_tmp_name);
-          or_stmt = build_gimple_modify_stmt (new_or_tmp_name, tmp);
+         new_or_tmp_name = make_ssa_name (or_tmp, NULL);
+         or_stmt = gimple_build_assign_with_ops (BIT_IOR_EXPR,
+                                                 new_or_tmp_name,
+                                                 or_tmp_name, addr_tmp_name);
           SSA_NAME_DEF_STMT (new_or_tmp_name) = or_stmt;
-          append_to_statement_list_force (or_stmt, cond_expr_stmt_list);
+         gimple_seq_add_stmt (cond_expr_stmt_list, or_stmt);
           or_tmp_name = new_or_tmp_name;
         }
       else
@@ -7050,12 +7136,12 @@ vect_create_cond_for_align_checks (loop_vec_info loop_vinfo,
   /* create: and_tmp = or_tmp & mask  */
   and_tmp = create_tmp_var (int_ptrsize_type, "andmask" );
   add_referenced_var (and_tmp);
-  and_tmp_name = make_ssa_name (and_tmp, NULL_TREE);
+  and_tmp_name = make_ssa_name (and_tmp, NULL);
 
-  tmp = build2 (BIT_AND_EXPR, int_ptrsize_type, or_tmp_name, mask_cst);
-  and_stmt = build_gimple_modify_stmt (and_tmp_name, tmp);
+  and_stmt = gimple_build_assign_with_ops (BIT_AND_EXPR, and_tmp_name,
+                                          or_tmp_name, mask_cst);
   SSA_NAME_DEF_STMT (and_tmp_name) = and_stmt;
-  append_to_statement_list_force (and_stmt, cond_expr_stmt_list);
+  gimple_seq_add_stmt (cond_expr_stmt_list, and_stmt);
 
   /* Make and_tmp the left operand of the conditional test against zero.
      if and_tmp has a nonzero bit then some address is unaligned.  */
@@ -7124,7 +7210,7 @@ vect_vfa_segment_size (struct data_reference *dr, tree vect_factor)
 static void
 vect_create_cond_for_alias_checks (loop_vec_info loop_vinfo,
                                   tree * cond_expr,
-                                  tree * cond_expr_stmt_list)
+                                  gimple_seq * cond_expr_stmt_list)
 {
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   VEC (ddr_p, heap) * may_alias_ddrs =
@@ -7151,10 +7237,10 @@ vect_create_cond_for_alias_checks (loop_vec_info loop_vinfo,
   for (i = 0; VEC_iterate (ddr_p, may_alias_ddrs, i, ddr); i++)
     {
       struct data_reference *dr_a, *dr_b;
-      tree dr_group_first_a, dr_group_first_b;
+      gimple dr_group_first_a, dr_group_first_b;
       tree addr_base_a, addr_base_b;
       tree segment_length_a, segment_length_b;
-      tree stmt_a, stmt_b;
+      gimple stmt_a, stmt_b;
 
       dr_a = DDR_A (ddr);
       stmt_a = DR_STMT (DDR_A (ddr));
@@ -7209,7 +7295,7 @@ vect_create_cond_for_alias_checks (loop_vec_info loop_vinfo,
       
       if (*cond_expr)
        *cond_expr = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
-                                 *cond_expr, part_cond_expr);
+                                 *cond_expr, part_cond_expr);
       else
        *cond_expr = part_cond_expr;
     }
@@ -7241,15 +7327,16 @@ vect_loop_versioning (loop_vec_info loop_vinfo)
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   struct loop *nloop;
   tree cond_expr = NULL_TREE;
-  tree cond_expr_stmt_list = NULL_TREE;
+  gimple_seq cond_expr_stmt_list = NULL;
   basic_block condition_bb;
-  block_stmt_iterator cond_exp_bsi;
+  gimple_stmt_iterator gsi, cond_exp_gsi;
   basic_block merge_bb;
   basic_block new_exit_bb;
   edge new_exit_e, e;
-  tree orig_phi, new_phi, arg;
+  gimple orig_phi, new_phi;
+  tree arg;
   unsigned prob = 4 * REG_BR_PROB_BASE / 5;
-  tree gimplify_stmt_list;
+  gimple_seq gimplify_stmt_list = NULL;
   tree scalar_loop_iters = LOOP_VINFO_NITERS (loop_vinfo);
   int min_profitable_iters = 0;
   unsigned int th;
@@ -7267,7 +7354,7 @@ vect_loop_versioning (loop_vec_info loop_vinfo)
   cond_expr = force_gimple_operand (cond_expr, &cond_expr_stmt_list,
                                    false, NULL_TREE);
 
-  if (VEC_length (tree, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo)))
+  if (VEC_length (gimple, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo)))
       vect_create_cond_for_align_checks (loop_vinfo, &cond_expr,
                                         &cond_expr_stmt_list);
 
@@ -7278,9 +7365,8 @@ vect_loop_versioning (loop_vec_info loop_vinfo)
   cond_expr =
     fold_build2 (NE_EXPR, boolean_type_node, cond_expr, integer_zero_node);
   cond_expr =
-    force_gimple_operand (cond_expr, &gimplify_stmt_list, true,
-                         NULL_TREE);
-  append_to_statement_list (gimplify_stmt_list, &cond_expr_stmt_list);
+    force_gimple_operand (cond_expr, &gimplify_stmt_list, true, NULL_TREE);
+  gimple_seq_add_seq (&cond_expr_stmt_list, gimplify_stmt_list);
 
   initialize_original_copy_tables ();
   nloop = loop_version (loop, cond_expr, &condition_bb,
@@ -7301,9 +7387,9 @@ vect_loop_versioning (loop_vec_info loop_vinfo)
   new_exit_e = single_exit (loop);
   e = EDGE_SUCC (new_exit_bb, 0);
 
-  for (orig_phi = phi_nodes (merge_bb); orig_phi; 
-       orig_phi = PHI_CHAIN (orig_phi))
+  for (gsi = gsi_start_phis (merge_bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
+      orig_phi = gsi_stmt (gsi);
       new_phi = create_phi_node (SSA_NAME_VAR (PHI_RESULT (orig_phi)),
                                  new_exit_bb);
       arg = PHI_ARG_DEF_FROM_EDGE (orig_phi, e);
@@ -7316,8 +7402,8 @@ vect_loop_versioning (loop_vec_info loop_vinfo)
   update_ssa (TODO_update_ssa);
   if (cond_expr_stmt_list)
     {
-      cond_exp_bsi = bsi_last (condition_bb);
-      bsi_insert_before (&cond_exp_bsi, cond_expr_stmt_list, BSI_SAME_STMT);
+      cond_exp_gsi = gsi_last_bb (condition_bb);
+      gsi_insert_seq_before (&cond_exp_gsi, cond_expr_stmt_list, GSI_SAME_STMT);
     }
 }
 
@@ -7325,17 +7411,17 @@ vect_loop_versioning (loop_vec_info loop_vinfo)
    stmt_vec_info.  */
 
 static void
-vect_remove_stores (tree first_stmt)
+vect_remove_stores (gimple first_stmt)
 {
-  tree next = first_stmt;
-  tree tmp;
-  block_stmt_iterator next_si;
+  gimple next = first_stmt;
+  gimple tmp;
+  gimple_stmt_iterator next_si;
 
   while (next)
     {
       /* Free the attached stmt_vec_info and remove the stmt.  */
-      next_si = bsi_for_stmt (next);
-      bsi_remove (&next_si, true);
+      next_si = gsi_for_stmt (next);
+      gsi_remove (&next_si, true);
       tmp = DR_GROUP_NEXT_DR (vinfo_for_stmt (next));
       free_stmt_vec_info (next);
       next = tmp;
@@ -7348,9 +7434,9 @@ vect_remove_stores (tree first_stmt)
 static bool
 vect_schedule_slp_instance (slp_tree node, unsigned int vec_stmts_size)
 {
-  tree stmt;
+  gimple stmt;
   bool strided_store, is_store;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
   stmt_vec_info stmt_info;
 
   if (!node)
@@ -7359,18 +7445,18 @@ vect_schedule_slp_instance (slp_tree node, unsigned int vec_stmts_size)
   vect_schedule_slp_instance (SLP_TREE_LEFT (node), vec_stmts_size);
   vect_schedule_slp_instance (SLP_TREE_RIGHT (node), vec_stmts_size);
   
-  stmt = VEC_index(tree, SLP_TREE_SCALAR_STMTS (node), 0);
+  stmt = VEC_index(gimple, SLP_TREE_SCALAR_STMTS (node), 0);
   stmt_info = vinfo_for_stmt (stmt);
-  SLP_TREE_VEC_STMTS (node) = VEC_alloc (tree, heap, vec_stmts_size);
+  SLP_TREE_VEC_STMTS (node) = VEC_alloc (gimple, heap, vec_stmts_size);
   SLP_TREE_NUMBER_OF_VEC_STMTS (node) = vec_stmts_size;
 
   if (vect_print_dump_info (REPORT_DETAILS))
     {
       fprintf (vect_dump, "------>vectorizing SLP node starting from: ");
-      print_generic_expr (vect_dump, stmt, TDF_SLIM);
+      print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
     }  
 
-  si = bsi_for_stmt (stmt);
+  si = gsi_for_stmt (stmt);
   is_store = vect_transform_stmt (stmt, &si, &strided_store, node);
   if (is_store)
     {
@@ -7436,7 +7522,7 @@ vect_transform_loop (loop_vec_info loop_vinfo)
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
   basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
   int nbbs = loop->num_nodes;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
   int i;
   tree ratio = NULL;
   int vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
@@ -7447,7 +7533,7 @@ vect_transform_loop (loop_vec_info loop_vinfo)
   if (vect_print_dump_info (REPORT_DETAILS))
     fprintf (vect_dump, "=== vec_transform_loop ===");
 
-  if (VEC_length (tree, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo))
+  if (VEC_length (gimple, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo))
       || VEC_length (ddr_p, LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo)))
     vect_loop_versioning (loop_vinfo);
 
@@ -7493,14 +7579,15 @@ vect_transform_loop (loop_vec_info loop_vinfo)
     {
       basic_block bb = bbs[i];
       stmt_vec_info stmt_info;
-      tree phi;
+      gimple phi;
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
         {
+         phi = gsi_stmt (si);
          if (vect_print_dump_info (REPORT_DETAILS))
            {
              fprintf (vect_dump, "------>vectorizing phi: ");
-             print_generic_expr (vect_dump, phi, TDF_SLIM);
+             print_gimple_stmt (vect_dump, phi, 0, TDF_SLIM);
            }
          stmt_info = vinfo_for_stmt (phi);
          if (!stmt_info)
@@ -7523,15 +7610,15 @@ vect_transform_loop (loop_vec_info loop_vinfo)
            }
        }
 
-      for (si = bsi_start (bb); !bsi_end_p (si);)
+      for (si = gsi_start_bb (bb); !gsi_end_p (si);)
        {
-         tree stmt = bsi_stmt (si);
+         gimple stmt = gsi_stmt (si);
          bool is_store;
 
          if (vect_print_dump_info (REPORT_DETAILS))
            {
              fprintf (vect_dump, "------>vectorizing statement: ");
-             print_generic_expr (vect_dump, stmt, TDF_SLIM);
+             print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
            }   
 
          stmt_info = vinfo_for_stmt (stmt);
@@ -7541,14 +7628,14 @@ vect_transform_loop (loop_vec_info loop_vinfo)
             need to be vectorized.  */
          if (!stmt_info)
            {
-             bsi_next (&si);
+             gsi_next (&si);
              continue;
            }
 
          if (!STMT_VINFO_RELEVANT_P (stmt_info)
              && !STMT_VINFO_LIVE_P (stmt_info))
            {
-             bsi_next (&si);
+             gsi_next (&si);
              continue;
            }
 
@@ -7580,7 +7667,7 @@ vect_transform_loop (loop_vec_info loop_vinfo)
                     vect_schedule_slp_instance and their vinfo is destroyed. */
                  if (is_store)
                    {
-                     bsi_next (&si);
+                     gsi_next (&si);
                      continue;
                    }
                }
@@ -7588,7 +7675,7 @@ vect_transform_loop (loop_vec_info loop_vinfo)
              /* Hybrid SLP stmts must be vectorized in addition to SLP.  */
              if (PURE_SLP_STMT (stmt_info))
                {
-                 bsi_next (&si);
+                 gsi_next (&si);
                  continue;
                }
            }
@@ -7607,18 +7694,18 @@ vect_transform_loop (loop_vec_info loop_vinfo)
                     interleaving chain was completed - free all the stores in
                     the chain.  */
                  vect_remove_stores (DR_GROUP_FIRST_DR (stmt_info));
-                 bsi_remove (&si, true);
+                 gsi_remove (&si, true);
                  continue;
                }
              else
                {
                  /* Free the attached stmt_vec_info and remove the stmt.  */
                  free_stmt_vec_info (stmt);
-                 bsi_remove (&si, true);
+                 gsi_remove (&si, true);
                  continue;
                }
            }
-         bsi_next (&si);
+         gsi_next (&si);
        }                       /* stmts in BB */
     }                          /* BBs in loop */
 
index 5bfa30b..3526de4 100644 (file)
@@ -144,6 +144,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-data-ref.h"
 #include "tree-scalar-evolution.h"
 #include "input.h"
+#include "hashtab.h"
 #include "tree-vectorizer.h"
 #include "tree-pass.h"
 
@@ -163,6 +164,10 @@ static LOC vect_loop_location;
 
 /* Bitmap of virtual variables to be renamed.  */
 bitmap vect_memsyms_to_rename;
+
+/* Vector mapping GIMPLE stmt to stmt_vec_info. */
+VEC(vec_void_p,heap) *stmt_vec_info_vec;
+
 \f
 /*************************************************************************
   Simple Loop Peeling Utilities
@@ -198,18 +203,17 @@ rename_use_op (use_operand_p op_p)
 static void
 rename_variables_in_bb (basic_block bb)
 {
-  tree phi;
-  block_stmt_iterator bsi;
-  tree stmt;
+  gimple_stmt_iterator gsi;
+  gimple stmt;
   use_operand_p use_p;
   ssa_op_iter iter;
   edge e;
   edge_iterator ei;
   struct loop *loop = bb->loop_father;
 
-  for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+  for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
-      stmt = bsi_stmt (bsi);
+      stmt = gsi_stmt (gsi);
       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
        rename_use_op (use_p);
     }
@@ -218,8 +222,8 @@ rename_variables_in_bb (basic_block bb)
     {
       if (!flow_bb_inside_loop_p (loop, e->dest))
        continue;
-      for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
-        rename_use_op (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e));
+      for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
+        rename_use_op (PHI_ARG_DEF_PTR_FROM_EDGE (gsi_stmt (gsi), e));
     }
 }
 
@@ -253,13 +257,14 @@ slpeel_update_phis_for_duplicate_loop (struct loop *orig_loop,
                                       struct loop *new_loop, bool after)
 {
   tree new_ssa_name;
-  tree phi_new, phi_orig;
+  gimple phi_new, phi_orig;
   tree def;
   edge orig_loop_latch = loop_latch_edge (orig_loop);
   edge orig_entry_e = loop_preheader_edge (orig_loop);
   edge new_loop_exit_e = single_exit (new_loop);
   edge new_loop_entry_e = loop_preheader_edge (new_loop);
   edge entry_arg_e = (after ? orig_loop_latch : orig_entry_e);
+  gimple_stmt_iterator gsi_new, gsi_orig;
 
   /*
      step 1. For each loop-header-phi:
@@ -290,11 +295,14 @@ slpeel_update_phis_for_duplicate_loop (struct loop *orig_loop,
   /* Scan the phis in the headers of the old and new loops
      (they are organized in exactly the same order).  */
 
-  for (phi_new = phi_nodes (new_loop->header),
-       phi_orig = phi_nodes (orig_loop->header);
-       phi_new && phi_orig;
-       phi_new = PHI_CHAIN (phi_new), phi_orig = PHI_CHAIN (phi_orig))
+  for (gsi_new = gsi_start_phis (new_loop->header),
+       gsi_orig = gsi_start_phis (orig_loop->header);
+       !gsi_end_p (gsi_new) && !gsi_end_p (gsi_orig);
+       gsi_next (&gsi_new), gsi_next (&gsi_orig))
     {
+      phi_new = gsi_stmt (gsi_new);
+      phi_orig = gsi_stmt (gsi_orig);
+
       /* step 1.  */
       def = PHI_ARG_DEF_FROM_EDGE (phi_orig, entry_arg_e);
       add_phi_arg (phi_new, def, new_loop_entry_e);
@@ -485,8 +493,8 @@ slpeel_update_phi_nodes_for_guard1 (edge guard_edge, struct loop *loop,
                                     bool is_new_loop, basic_block *new_exit_bb,
                                     bitmap *defs)
 {
-  tree orig_phi, new_phi;
-  tree update_phi, update_phi2;
+  gimple orig_phi, new_phi;
+  gimple update_phi, update_phi2;
   tree guard_arg, loop_arg;
   basic_block new_merge_bb = guard_edge->dest;
   edge e = EDGE_SUCC (new_merge_bb, 0);
@@ -495,16 +503,21 @@ slpeel_update_phi_nodes_for_guard1 (edge guard_edge, struct loop *loop,
   edge new_exit_e;
   tree current_new_name;
   tree name;
+  gimple_stmt_iterator gsi_orig, gsi_update;
 
   /* Create new bb between loop and new_merge_bb.  */
   *new_exit_bb = split_edge (single_exit (loop));
 
   new_exit_e = EDGE_SUCC (*new_exit_bb, 0);
 
-  for (orig_phi = phi_nodes (orig_bb), update_phi = phi_nodes (update_bb);
-       orig_phi && update_phi;
-       orig_phi = PHI_CHAIN (orig_phi), update_phi = PHI_CHAIN (update_phi))
+  for (gsi_orig = gsi_start_phis (orig_bb),
+       gsi_update = gsi_start_phis (update_bb);
+       !gsi_end_p (gsi_orig) && !gsi_end_p (gsi_update);
+       gsi_next (&gsi_orig), gsi_next (&gsi_update))
     {
+      orig_phi = gsi_stmt (gsi_orig);
+      update_phi = gsi_stmt (gsi_update);
+
       /* Virtual phi; Mark it for renaming. We actually want to call
         mar_sym_for_renaming, but since all ssa renaming datastructures
         are going to be freed before we get to call ssa_update, we just
@@ -578,8 +591,6 @@ slpeel_update_phi_nodes_for_guard1 (edge guard_edge, struct loop *loop,
       set_current_def (current_new_name, PHI_RESULT (new_phi));
       bitmap_set_bit (*defs, SSA_NAME_VERSION (current_new_name));
     }
-
-  set_phi_nodes (new_merge_bb, phi_reverse (phi_nodes (new_merge_bb)));
 }
 
 
@@ -613,8 +624,8 @@ static void
 slpeel_update_phi_nodes_for_guard2 (edge guard_edge, struct loop *loop,
                                     bool is_new_loop, basic_block *new_exit_bb)
 {
-  tree orig_phi, new_phi;
-  tree update_phi, update_phi2;
+  gimple orig_phi, new_phi;
+  gimple update_phi, update_phi2;
   tree guard_arg, loop_arg;
   basic_block new_merge_bb = guard_edge->dest;
   edge e = EDGE_SUCC (new_merge_bb, 0);
@@ -623,15 +634,16 @@ slpeel_update_phi_nodes_for_guard2 (edge guard_edge, struct loop *loop,
   tree orig_def, orig_def_new_name;
   tree new_name, new_name2;
   tree arg;
+  gimple_stmt_iterator gsi;
 
   /* Create new bb between loop and new_merge_bb.  */
   *new_exit_bb = split_edge (single_exit (loop));
 
   new_exit_e = EDGE_SUCC (*new_exit_bb, 0);
 
-  for (update_phi = phi_nodes (update_bb); update_phi; 
-       update_phi = PHI_CHAIN (update_phi))
+  for (gsi = gsi_start_phis (update_bb); !gsi_end_p (gsi); gsi_next (&gsi))
     {
+      update_phi = gsi_stmt (gsi);
       orig_phi = update_phi;
       orig_def = PHI_ARG_DEF_FROM_EDGE (orig_phi, e);
       /* This loop-closed-phi actually doesn't represent a use
@@ -732,8 +744,6 @@ slpeel_update_phi_nodes_for_guard2 (edge guard_edge, struct loop *loop,
                                                                 == guard_arg);
       SET_PHI_ARG_DEF (update_phi2, guard_edge->dest_idx, PHI_RESULT (new_phi));
     }
-
-  set_phi_nodes (new_merge_bb, phi_reverse (phi_nodes (new_merge_bb)));
 }
 
 
@@ -745,35 +755,40 @@ slpeel_update_phi_nodes_for_guard2 (edge guard_edge, struct loop *loop,
 void
 slpeel_make_loop_iterate_ntimes (struct loop *loop, tree niters)
 {
-  tree indx_before_incr, indx_after_incr, cond_stmt, cond;
-  tree orig_cond;
+  tree indx_before_incr, indx_after_incr;
+  gimple cond_stmt;
+  gimple orig_cond;
   edge exit_edge = single_exit (loop);
-  block_stmt_iterator loop_cond_bsi;
-  block_stmt_iterator incr_bsi;
+  gimple_stmt_iterator loop_cond_gsi;
+  gimple_stmt_iterator incr_gsi;
   bool insert_after;
   tree init = build_int_cst (TREE_TYPE (niters), 0);
   tree step = build_int_cst (TREE_TYPE (niters), 1);
   LOC loop_loc;
+  enum tree_code code;
 
   orig_cond = get_loop_exit_condition (loop);
   gcc_assert (orig_cond);
-  loop_cond_bsi = bsi_for_stmt (orig_cond);
+  loop_cond_gsi = gsi_for_stmt (orig_cond);
 
-  standard_iv_increment_position (loop, &incr_bsi, &insert_after);
+  standard_iv_increment_position (loop, &incr_gsi, &insert_after);
   create_iv (init, step, NULL_TREE, loop,
-             &incr_bsi, insert_after, &indx_before_incr, &indx_after_incr);
+             &incr_gsi, insert_after, &indx_before_incr, &indx_after_incr);
+
+  indx_after_incr = force_gimple_operand_gsi (&loop_cond_gsi, indx_after_incr,
+                                             true, NULL_TREE, true,
+                                             GSI_SAME_STMT);
+  niters = force_gimple_operand_gsi (&loop_cond_gsi, niters, true, NULL_TREE,
+                                    true, GSI_SAME_STMT);
 
-  if (exit_edge->flags & EDGE_TRUE_VALUE) /* 'then' edge exits the loop.  */
-    cond = build2 (GE_EXPR, boolean_type_node, indx_after_incr, niters);
-  else /* 'then' edge loops back.  */
-    cond = build2 (LT_EXPR, boolean_type_node, indx_after_incr, niters);
+  code = (exit_edge->flags & EDGE_TRUE_VALUE) ? GE_EXPR : LT_EXPR;
+  cond_stmt = gimple_build_cond (code, indx_after_incr, niters, NULL_TREE,
+                                NULL_TREE);
 
-  cond_stmt = build3 (COND_EXPR, TREE_TYPE (orig_cond), cond,
-                     NULL_TREE, NULL_TREE);
-  bsi_insert_before (&loop_cond_bsi, cond_stmt, BSI_SAME_STMT);
+  gsi_insert_before (&loop_cond_gsi, cond_stmt, GSI_SAME_STMT);
 
   /* Remove old loop exit test:  */
-  bsi_remove (&loop_cond_bsi, true);
+  gsi_remove (&loop_cond_gsi, true);
 
   loop_loc = find_loop_location (loop);
   if (dump_file && (dump_flags & TDF_DETAILS))
@@ -781,7 +796,7 @@ slpeel_make_loop_iterate_ntimes (struct loop *loop, tree niters)
       if (loop_loc != UNKNOWN_LOC)
         fprintf (dump_file, "\nloop at %s:%d: ",
                  LOC_FILE (loop_loc), LOC_LINE (loop_loc));
-      print_generic_expr (dump_file, cond_stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, cond_stmt, 0, TDF_SLIM);
     }
 
   loop->nb_iterations = niters;
@@ -799,8 +814,10 @@ slpeel_tree_duplicate_loop_to_edge_cfg (struct loop *loop, edge e)
   bool at_exit;
   bool was_imm_dom;
   basic_block exit_dest; 
-  tree phi, phi_arg;
+  gimple phi;
+  tree phi_arg;
   edge exit, new_exit;
+  gimple_stmt_iterator gsi;
 
   at_exit = (e == single_exit (loop)); 
   if (!at_exit && e != loop_preheader_edge (loop))
@@ -837,8 +854,9 @@ slpeel_tree_duplicate_loop_to_edge_cfg (struct loop *loop, edge e)
 
   /* Duplicating phi args at exit bbs as coming 
      also from exit of duplicated loop.  */
-  for (phi = phi_nodes (exit_dest); phi; phi = PHI_CHAIN (phi))
+  for (gsi = gsi_start_phis (exit_dest); !gsi_end_p (gsi); gsi_next (&gsi))
     {
+      phi = gsi_stmt (gsi);
       phi_arg = PHI_ARG_DEF_FROM_EDGE (phi, single_exit (loop));
       if (phi_arg)
        {
@@ -880,8 +898,11 @@ slpeel_tree_duplicate_loop_to_edge_cfg (struct loop *loop, edge e)
 
       /* We have to add phi args to the loop->header here as coming 
         from new_exit_e edge.  */
-      for (phi = phi_nodes (loop->header); phi; phi = PHI_CHAIN (phi))
+      for (gsi = gsi_start_phis (loop->header);
+           !gsi_end_p (gsi);
+           gsi_next (&gsi))
        {
+         phi = gsi_stmt (gsi);
          phi_arg = PHI_ARG_DEF_FROM_EDGE (phi, entry_e);
          if (phi_arg)
            add_phi_arg (phi, phi_arg, new_exit_e);     
@@ -908,26 +929,26 @@ static edge
 slpeel_add_loop_guard (basic_block guard_bb, tree cond, basic_block exit_bb,
                       basic_block dom_bb)
 {
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   edge new_e, enter_e;
-  tree cond_stmt;
-  tree gimplify_stmt_list;
+  gimple cond_stmt;
+  gimple_seq gimplify_stmt_list = NULL;
 
   enter_e = EDGE_SUCC (guard_bb, 0);
   enter_e->flags &= ~EDGE_FALLTHRU;
   enter_e->flags |= EDGE_FALSE_VALUE;
-  bsi = bsi_last (guard_bb);
+  gsi = gsi_last_bb (guard_bb);
 
   cond =
     force_gimple_operand (cond, &gimplify_stmt_list, true,
                          NULL_TREE);
-  cond_stmt = build3 (COND_EXPR, void_type_node, cond,
-                     NULL_TREE, NULL_TREE);
+  cond_stmt = gimple_build_cond (NE_EXPR, cond, integer_zero_node,
+                                NULL_TREE, NULL_TREE);
   if (gimplify_stmt_list)
-    bsi_insert_after (&bsi, gimplify_stmt_list, BSI_NEW_STMT);
+    gsi_insert_seq_after (&gsi, gimplify_stmt_list, GSI_NEW_STMT);
 
-  bsi = bsi_last (guard_bb);
-  bsi_insert_after (&bsi, cond_stmt, BSI_NEW_STMT);
+  gsi = gsi_last_bb (guard_bb);
+  gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
 
   /* Add new edge to connect guard block to the merge/loop-exit block.  */
   new_e = make_edge (guard_bb, exit_bb, EDGE_TRUE_VALUE);
@@ -949,8 +970,8 @@ slpeel_can_duplicate_loop_p (const struct loop *loop, const_edge e)
 {
   edge exit_e = single_exit (loop);
   edge entry_e = loop_preheader_edge (loop);
-  tree orig_cond = get_loop_exit_condition (loop);
-  block_stmt_iterator loop_exit_bsi = bsi_last (exit_e->src);
+  gimple orig_cond = get_loop_exit_condition (loop);
+  gimple_stmt_iterator loop_exit_gsi = gsi_last_bb (exit_e->src);
 
   if (need_ssa_update_p ())
     return false;
@@ -963,7 +984,7 @@ slpeel_can_duplicate_loop_p (const struct loop *loop, const_edge e)
       || !empty_block_p (loop->latch)
       || !single_exit (loop)
       /* Verify that new loop exit condition can be trivially modified.  */
-      || (!orig_cond || orig_cond != bsi_stmt (loop_exit_bsi))
+      || (!orig_cond || orig_cond != gsi_stmt (loop_exit_gsi))
       || (e != exit_e && e != entry_e))
     return false;
 
@@ -1017,12 +1038,12 @@ set_prologue_iterations (basic_block bb_before_first_loop,
 {
   edge e;
   basic_block cond_bb, then_bb;
-  tree var, prologue_after_cost_adjust_name, stmt;
-  block_stmt_iterator bsi;
-  tree newphi;
+  tree var, prologue_after_cost_adjust_name;
+  gimple_stmt_iterator gsi;
+  gimple newphi;
   edge e_true, e_false, e_fallthru;
-  tree cond_stmt;
-  tree gimplify_stmt_list;
+  gimple cond_stmt;
+  gimple_seq gimplify_stmt_list = NULL, stmts = NULL;
   tree cost_pre_condition = NULL_TREE;
   tree scalar_loop_iters = 
     unshare_expr (LOOP_VINFO_NITERS_UNCHANGED (loop_vec_info_for_loop (loop)));
@@ -1050,25 +1071,25 @@ set_prologue_iterations (basic_block bb_before_first_loop,
   cost_pre_condition =
     force_gimple_operand (cost_pre_condition, &gimplify_stmt_list,
                          true, NULL_TREE);
-  cond_stmt = build3 (COND_EXPR, void_type_node, cost_pre_condition,
-                     NULL_TREE, NULL_TREE);
+  cond_stmt = gimple_build_cond (NE_EXPR, cost_pre_condition,
+                                integer_zero_node, NULL_TREE, NULL_TREE);
 
-  bsi = bsi_last (cond_bb);
+  gsi = gsi_last_bb (cond_bb);
   if (gimplify_stmt_list)
-    bsi_insert_after (&bsi, gimplify_stmt_list, BSI_NEW_STMT);
+    gsi_insert_seq_after (&gsi, gimplify_stmt_list, GSI_NEW_STMT);
 
-  bsi = bsi_last (cond_bb);
-  bsi_insert_after (&bsi, cond_stmt, BSI_NEW_STMT);
+  gsi = gsi_last_bb (cond_bb);
+  gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
                                          
   var = create_tmp_var (TREE_TYPE (scalar_loop_iters),
                        "prologue_after_cost_adjust");
   add_referenced_var (var);
   prologue_after_cost_adjust_name = 
-    force_gimple_operand (scalar_loop_iters, &stmt, false, var);
+    force_gimple_operand (scalar_loop_iters, &stmts, false, var);
 
-  bsi = bsi_last (then_bb);
-  if (stmt)
-    bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
+  gsi = gsi_last_bb (then_bb);
+  if (stmts)
+    gsi_insert_seq_after (&gsi, stmts, GSI_NEW_STMT);
 
   newphi = create_phi_node (var, bb_before_first_loop);
   add_phi_arg (newphi, prologue_after_cost_adjust_name, e_fallthru);
@@ -1150,7 +1171,7 @@ slpeel_tree_peel_loop_to_edge (struct loop *loop,
    cfg_hooks->split_edge, the function tree_split_edge 
    is actually called and, when calling cfg_hooks->duplicate_block,
    the function tree_duplicate_bb is called.  */
-  tree_register_cfg_hooks ();
+  gimple_register_cfg_hooks ();
 
 
   /* 1. Generate a copy of LOOP and put it on E (E is the entry/exit of LOOP).
@@ -1381,18 +1402,17 @@ slpeel_tree_peel_loop_to_edge (struct loop *loop,
 LOC
 find_loop_location (struct loop *loop)
 {
-  tree node = NULL_TREE;
+  gimple stmt = NULL;
   basic_block bb;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
 
   if (!loop)
     return UNKNOWN_LOC;
 
-  node = get_loop_exit_condition (loop);
+  stmt = get_loop_exit_condition (loop);
 
-  if (node && CAN_HAVE_LOCATION_P (node) && EXPR_HAS_LOCATION (node)
-      && EXPR_FILENAME (node) && EXPR_LINENO (node))
-    return EXPR_LOC (node);
+  if (stmt && gimple_location (stmt) != UNKNOWN_LOC)
+    return gimple_location (stmt);
 
   /* If we got here the loop is probably not "well formed",
      try to estimate the loop location */
@@ -1402,11 +1422,11 @@ find_loop_location (struct loop *loop)
 
   bb = loop->header;
 
-  for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+  for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
     {
-      node = bsi_stmt (si);
-      if (node && CAN_HAVE_LOCATION_P (node) && EXPR_HAS_LOCATION (node))
-        return EXPR_LOC (node);
+      stmt = gsi_stmt (si);
+      if (gimple_location (stmt) != UNKNOWN_LOC)
+        return gimple_location (stmt);
     }
 
   return UNKNOWN_LOC;
@@ -1504,7 +1524,7 @@ vect_print_dump_info (enum verbosity_levels vl)
    Create and initialize a new stmt_vec_info struct for STMT.  */
 
 stmt_vec_info
-new_stmt_vec_info (tree stmt, loop_vec_info loop_vinfo)
+new_stmt_vec_info (gimple stmt, loop_vec_info loop_vinfo)
 {
   stmt_vec_info res;
   res = (stmt_vec_info) xcalloc (1, sizeof (struct _stmt_vec_info));
@@ -1526,7 +1546,8 @@ new_stmt_vec_info (tree stmt, loop_vec_info loop_vinfo)
   STMT_VINFO_DR_STEP (res) = NULL;
   STMT_VINFO_DR_ALIGNED_TO (res) = NULL;
 
-  if (TREE_CODE (stmt) == PHI_NODE && is_loop_header_bb_p (bb_for_stmt (stmt)))
+  if (gimple_code (stmt) == GIMPLE_PHI
+      && is_loop_header_bb_p (gimple_bb (stmt)))
     STMT_VINFO_DEF_TYPE (res) = vect_unknown_def_type;
   else
     STMT_VINFO_DEF_TYPE (res) = vect_loop_def;
@@ -1534,22 +1555,39 @@ new_stmt_vec_info (tree stmt, loop_vec_info loop_vinfo)
   STMT_VINFO_INSIDE_OF_LOOP_COST (res) = 0;
   STMT_VINFO_OUTSIDE_OF_LOOP_COST (res) = 0;
   STMT_SLP_TYPE (res) = 0;
-  DR_GROUP_FIRST_DR (res) = NULL_TREE;
-  DR_GROUP_NEXT_DR (res) = NULL_TREE;
+  DR_GROUP_FIRST_DR (res) = NULL;
+  DR_GROUP_NEXT_DR (res) = NULL;
   DR_GROUP_SIZE (res) = 0;
   DR_GROUP_STORE_COUNT (res) = 0;
   DR_GROUP_GAP (res) = 0;
-  DR_GROUP_SAME_DR_STMT (res) = NULL_TREE;
+  DR_GROUP_SAME_DR_STMT (res) = NULL;
   DR_GROUP_READ_WRITE_DEPENDENCE (res) = false;
 
   return res;
 }
 
+/* Create a hash table for stmt_vec_info. */
+
+void
+init_stmt_vec_info_vec (void)
+{
+  gcc_assert (!stmt_vec_info_vec);
+  stmt_vec_info_vec = VEC_alloc (vec_void_p, heap, 50);
+}
+
+/* Free hash table for stmt_vec_info. */
+
+void
+free_stmt_vec_info_vec (void)
+{
+  gcc_assert (stmt_vec_info_vec);
+  VEC_free (vec_void_p, heap, stmt_vec_info_vec);
+}
 
 /* Free stmt vectorization related info.  */
 
 void
-free_stmt_vec_info (tree stmt)
+free_stmt_vec_info (gimple stmt)
 {
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
 
@@ -1557,8 +1595,8 @@ free_stmt_vec_info (tree stmt)
     return;
 
   VEC_free (dr_p, heap, STMT_VINFO_SAME_ALIGN_REFS (stmt_info));
+  set_vinfo_for_stmt (stmt, NULL);
   free (stmt_info);
-  set_stmt_info (stmt_ann (stmt), NULL);
 }
 
 
@@ -1586,7 +1624,7 @@ new_loop_vec_info (struct loop *loop)
 {
   loop_vec_info res;
   basic_block *bbs;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
   unsigned int i, nbbs;
 
   res = (loop_vec_info) xcalloc (1, sizeof (struct _loop_vec_info));
@@ -1598,7 +1636,6 @@ new_loop_vec_info (struct loop *loop)
   for (i = 0; i < loop->num_nodes; i++)
     {
       basic_block bb = bbs[i];
-      tree phi;
 
       /* BBs in a nested inner-loop will have been already processed (because 
         we will have called vect_analyze_loop_form for any nested inner-loop).
@@ -1611,18 +1648,21 @@ new_loop_vec_info (struct loop *loop)
        {
          /* Inner-loop bb.  */
          gcc_assert (loop->inner && bb->loop_father == loop->inner);
-         for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+         for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
            {
+             gimple phi = gsi_stmt (si);
              stmt_vec_info stmt_info = vinfo_for_stmt (phi);
-             loop_vec_info inner_loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
+             loop_vec_info inner_loop_vinfo =
+               STMT_VINFO_LOOP_VINFO (stmt_info);
              gcc_assert (loop->inner == LOOP_VINFO_LOOP (inner_loop_vinfo));
              STMT_VINFO_LOOP_VINFO (stmt_info) = res;
            }
-         for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+         for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
           {
-             tree stmt = bsi_stmt (si);
+             gimple stmt = gsi_stmt (si);
              stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
-             loop_vec_info inner_loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
+             loop_vec_info inner_loop_vinfo =
+                STMT_VINFO_LOOP_VINFO (stmt_info);
              gcc_assert (loop->inner == LOOP_VINFO_LOOP (inner_loop_vinfo));
              STMT_VINFO_LOOP_VINFO (stmt_info) = res;
           }
@@ -1630,17 +1670,18 @@ new_loop_vec_info (struct loop *loop)
       else
        {
          /* bb in current nest.  */
-         for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+         for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
            {
-             stmt_ann_t ann = get_stmt_ann (phi);
-             set_stmt_info (ann, new_stmt_vec_info (phi, res));
+             gimple phi = gsi_stmt (si);
+             gimple_set_uid (phi, 0);
+             set_vinfo_for_stmt (phi, new_stmt_vec_info (phi, res));
            }
 
-         for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+         for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
            {
-             tree stmt = bsi_stmt (si);
-             stmt_ann_t ann = stmt_ann (stmt);
-             set_stmt_info (ann, new_stmt_vec_info (stmt, res));
+             gimple stmt = gsi_stmt (si);
+             gimple_set_uid (stmt, 0);
+             set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, res));
            }
        }
     }
@@ -1667,10 +1708,12 @@ new_loop_vec_info (struct loop *loop)
   LOOP_VINFO_DDRS (res) = VEC_alloc (ddr_p, heap, 10 * 10);
   LOOP_VINFO_UNALIGNED_DR (res) = NULL;
   LOOP_VINFO_MAY_MISALIGN_STMTS (res) =
-    VEC_alloc (tree, heap, PARAM_VALUE (PARAM_VECT_MAX_VERSION_FOR_ALIGNMENT_CHECKS));
+    VEC_alloc (gimple, heap,
+              PARAM_VALUE (PARAM_VECT_MAX_VERSION_FOR_ALIGNMENT_CHECKS));
   LOOP_VINFO_MAY_ALIAS_DDRS (res) =
-    VEC_alloc (ddr_p, heap, PARAM_VALUE (PARAM_VECT_MAX_VERSION_FOR_ALIAS_CHECKS));
-  LOOP_VINFO_STRIDED_STORES (res) = VEC_alloc (tree, heap, 10);
+    VEC_alloc (ddr_p, heap,
+              PARAM_VALUE (PARAM_VECT_MAX_VERSION_FOR_ALIAS_CHECKS));
+  LOOP_VINFO_STRIDED_STORES (res) = VEC_alloc (gimple, heap, 10);
   LOOP_VINFO_SLP_INSTANCES (res) = VEC_alloc (slp_instance, heap, 10);
   LOOP_VINFO_SLP_UNROLLING_FACTOR (res) = 1;
 
@@ -1689,7 +1732,7 @@ destroy_loop_vec_info (loop_vec_info loop_vinfo, bool clean_stmts)
   struct loop *loop;
   basic_block *bbs;
   int nbbs;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
   int j;
   VEC (slp_instance, heap) *slp_instances;
   slp_instance instance;
@@ -1707,7 +1750,7 @@ destroy_loop_vec_info (loop_vec_info loop_vinfo, bool clean_stmts)
       free (LOOP_VINFO_BBS (loop_vinfo));
       free_data_refs (LOOP_VINFO_DATAREFS (loop_vinfo));
       free_dependence_relations (LOOP_VINFO_DDRS (loop_vinfo));
-      VEC_free (tree, heap, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo));
+      VEC_free (gimple, heap, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo));
 
       free (loop_vinfo);
       loop->aux = NULL;
@@ -1717,14 +1760,13 @@ destroy_loop_vec_info (loop_vec_info loop_vinfo, bool clean_stmts)
   for (j = 0; j < nbbs; j++)
     {
       basic_block bb = bbs[j];
-      tree phi;
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
-        free_stmt_vec_info (phi);
+      for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+        free_stmt_vec_info (gsi_stmt (si));
 
-      for (si = bsi_start (bb); !bsi_end_p (si); )
+      for (si = gsi_start_bb (bb); !gsi_end_p (si); )
        {
-         tree stmt = bsi_stmt (si);
+         gimple stmt = gsi_stmt (si);
          stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
 
          if (stmt_info)
@@ -1732,7 +1774,7 @@ destroy_loop_vec_info (loop_vec_info loop_vinfo, bool clean_stmts)
              /* Check if this is a "pattern stmt" (introduced by the 
                 vectorizer during the pattern recognition pass).  */
              bool remove_stmt_p = false;
-             tree orig_stmt = STMT_VINFO_RELATED_STMT (stmt_info);
+             gimple orig_stmt = STMT_VINFO_RELATED_STMT (stmt_info);
              if (orig_stmt)
                {
                  stmt_vec_info orig_stmt_info = vinfo_for_stmt (orig_stmt);
@@ -1746,22 +1788,22 @@ destroy_loop_vec_info (loop_vec_info loop_vinfo, bool clean_stmts)
 
              /* Remove dead "pattern stmts".  */
              if (remove_stmt_p)
-               bsi_remove (&si, true);
+               gsi_remove (&si, true);
            }
-         bsi_next (&si);
+         gsi_next (&si);
        }
     }
 
   free (LOOP_VINFO_BBS (loop_vinfo));
   free_data_refs (LOOP_VINFO_DATAREFS (loop_vinfo));
   free_dependence_relations (LOOP_VINFO_DDRS (loop_vinfo));
-  VEC_free (tree, heap, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo));
+  VEC_free (gimple, heap, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo));
   VEC_free (ddr_p, heap, LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo));
   slp_instances = LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
   for (j = 0; VEC_iterate (slp_instance, slp_instances, j, instance); j++)
     vect_free_slp_tree (SLP_INSTANCE_TREE (instance));
   VEC_free (slp_instance, heap, LOOP_VINFO_SLP_INSTANCES (loop_vinfo));
-  VEC_free (tree, heap, LOOP_VINFO_STRIDED_STORES (loop_vinfo));
+  VEC_free (gimple, heap, LOOP_VINFO_STRIDED_STORES (loop_vinfo));
 
   free (loop_vinfo);
   loop->aux = NULL;
@@ -1850,7 +1892,7 @@ get_vectype_for_scalar_type (tree scalar_type)
 enum dr_alignment_support
 vect_supportable_dr_alignment (struct data_reference *dr)
 {
-  tree stmt = DR_STMT (dr);
+  gimple stmt = DR_STMT (dr);
   stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
   tree vectype = STMT_VINFO_VECTYPE (stmt_info);
   enum machine_mode mode = (int) TYPE_MODE (vectype);
@@ -1972,14 +2014,14 @@ vect_supportable_dr_alignment (struct data_reference *dr)
    in reduction/induction computations).  */
 
 bool
-vect_is_simple_use (tree operand, loop_vec_info loop_vinfo, tree *def_stmt,
+vect_is_simple_use (tree operand, loop_vec_info loop_vinfo, gimple *def_stmt,
                    tree *def, enum vect_def_type *dt)
 { 
   basic_block bb;
   stmt_vec_info stmt_vinfo;
   struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
 
-  *def_stmt = NULL_TREE;
+  *def_stmt = NULL;
   *def = NULL_TREE;
   
   if (vect_print_dump_info (REPORT_DETAILS))
@@ -2014,7 +2056,7 @@ vect_is_simple_use (tree operand, loop_vec_info loop_vinfo, tree *def_stmt,
     }
     
   *def_stmt = SSA_NAME_DEF_STMT (operand);
-  if (*def_stmt == NULL_TREE )
+  if (*def_stmt == NULL)
     {
       if (vect_print_dump_info (REPORT_DETAILS))
         fprintf (vect_dump, "no def_stmt.");
@@ -2024,27 +2066,19 @@ vect_is_simple_use (tree operand, loop_vec_info loop_vinfo, tree *def_stmt,
   if (vect_print_dump_info (REPORT_DETAILS))
     {
       fprintf (vect_dump, "def_stmt: ");
-      print_generic_expr (vect_dump, *def_stmt, TDF_SLIM);
+      print_gimple_stmt (vect_dump, *def_stmt, 0, TDF_SLIM);
     }
 
   /* empty stmt is expected only in case of a function argument.
-     (Otherwise - we expect a phi_node or a GIMPLE_MODIFY_STMT).  */
-  if (IS_EMPTY_STMT (*def_stmt))
+     (Otherwise - we expect a phi_node or a GIMPLE_ASSIGN).  */
+  if (gimple_nop_p (*def_stmt))
     {
-      tree arg = TREE_OPERAND (*def_stmt, 0);
-      if (is_gimple_min_invariant (arg))
-        {
-          *def = operand;
-          *dt = vect_invariant_def;
-          return true;
-        }
-
-      if (vect_print_dump_info (REPORT_DETAILS))
-        fprintf (vect_dump, "Unexpected empty stmt.");
-      return false;
+      *def = operand;
+      *dt = vect_invariant_def;
+      return true;
     }
 
-  bb = bb_for_stmt (*def_stmt);
+  bb = gimple_bb (*def_stmt);
   if (!flow_bb_inside_loop_p (loop, bb))
     *dt = vect_invariant_def;
   else
@@ -2063,16 +2097,21 @@ vect_is_simple_use (tree operand, loop_vec_info loop_vinfo, tree *def_stmt,
   if (vect_print_dump_info (REPORT_DETAILS))
     fprintf (vect_dump, "type of def: %d.",*dt);
 
-  switch (TREE_CODE (*def_stmt))
+  switch (gimple_code (*def_stmt))
     {
-    case PHI_NODE:
-      *def = PHI_RESULT (*def_stmt);
+    case GIMPLE_PHI:
+      *def = gimple_phi_result (*def_stmt);
       break;
 
-    case GIMPLE_MODIFY_STMT:
-      *def = GIMPLE_STMT_OPERAND (*def_stmt, 0);
+    case GIMPLE_ASSIGN:
+      *def = gimple_assign_lhs (*def_stmt);
       break;
 
+    case GIMPLE_CALL:
+      *def = gimple_call_lhs (*def_stmt);
+      if (*def != NULL)
+       break;
+      /* FALLTHRU */
     default:
       if (vect_print_dump_info (REPORT_DETAILS))
         fprintf (vect_dump, "unsupported defining stmt: ");
@@ -2102,7 +2141,7 @@ vect_is_simple_use (tree operand, loop_vec_info loop_vinfo, tree *def_stmt,
    CODE1 and CODE2 are CALL_EXPR.  */
 
 bool
-supportable_widening_operation (enum tree_code code, tree stmt, tree vectype,
+supportable_widening_operation (enum tree_code code, gimple stmt, tree vectype,
                                 tree *decl1, tree *decl2,
                                 enum tree_code *code1, enum tree_code *code2)
 {
@@ -2113,8 +2152,7 @@ supportable_widening_operation (enum tree_code code, tree stmt, tree vectype,
   enum machine_mode vec_mode;
   enum insn_code icode1, icode2;
   optab optab1, optab2;
-  tree expr = GIMPLE_STMT_OPERAND (stmt, 1);
-  tree type = TREE_TYPE (expr);
+  tree type = gimple_expr_type (stmt);
   tree wide_vectype = get_vectype_for_scalar_type (type);
   enum tree_code c1, c2;
 
@@ -2256,14 +2294,13 @@ supportable_widening_operation (enum tree_code code, tree stmt, tree vectype,
 
 bool
 supportable_narrowing_operation (enum tree_code code,
-                                const_tree stmt, const_tree vectype,
+                                const_gimple stmt, const_tree vectype,
                                 enum tree_code *code1)
 {
   enum machine_mode vec_mode;
   enum insn_code icode1;
   optab optab1;
-  tree expr = GIMPLE_STMT_OPERAND (stmt, 1);
-  tree type = TREE_TYPE (expr);
+  tree type = gimple_expr_type (stmt);
   tree narrow_vectype = get_vectype_for_scalar_type (type);
   enum tree_code c1;
 
@@ -2340,6 +2377,15 @@ reduction_code_for_scalar_code (enum tree_code code,
   }
 }
 
+/* Error reporting helper for vect_is_simple_reduction below. GIMPLE statement
+   STMT is printed with a message MSG. */
+
+static void
+report_vect_op (gimple stmt, const char *msg)
+{
+  fprintf (vect_dump, "%s", msg);
+  print_gimple_stmt (vect_dump, stmt, 0, TDF_SLIM);
+}
 
 /* Function vect_is_simple_reduction
 
@@ -2360,17 +2406,16 @@ reduction_code_for_scalar_code (enum tree_code code,
    Condition 1 is tested here.
    Conditions 2,3 are tested in vect_mark_stmts_to_be_vectorized.  */
 
-tree
-vect_is_simple_reduction (loop_vec_info loop_info, tree phi)
+gimple
+vect_is_simple_reduction (loop_vec_info loop_info, gimple phi)
 {
-  struct loop *loop = (bb_for_stmt (phi))->loop_father;
+  struct loop *loop = (gimple_bb (phi))->loop_father;
   struct loop *vect_loop = LOOP_VINFO_LOOP (loop_info);
   edge latch_e = loop_latch_edge (loop);
   tree loop_arg = PHI_ARG_DEF_FROM_EDGE (phi, latch_e);
-  tree def_stmt, def1, def2;
+  gimple def_stmt, def1, def2;
   enum tree_code code;
-  int op_type;
-  tree operation, op1, op2;
+  tree op1, op2;
   tree type;
   int nloop_uses;
   tree name;
@@ -2383,8 +2428,8 @@ vect_is_simple_reduction (loop_vec_info loop_info, tree phi)
   nloop_uses = 0;
   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, name)
     {
-      tree use_stmt = USE_STMT (use_p);
-      if (flow_bb_inside_loop_p (loop, bb_for_stmt (use_stmt))
+      gimple use_stmt = USE_STMT (use_p);
+      if (flow_bb_inside_loop_p (loop, gimple_bb (use_stmt))
          && vinfo_for_stmt (use_stmt)
          && !is_pattern_stmt_p (vinfo_for_stmt (use_stmt)))
         nloop_uses++;
@@ -2392,7 +2437,7 @@ vect_is_simple_reduction (loop_vec_info loop_info, tree phi)
         {
           if (vect_print_dump_info (REPORT_DETAILS))
             fprintf (vect_dump, "reduction used in loop.");
-          return NULL_TREE;
+          return NULL;
         }
     }
 
@@ -2403,7 +2448,7 @@ vect_is_simple_reduction (loop_vec_info loop_info, tree phi)
          fprintf (vect_dump, "reduction: not ssa_name: ");
          print_generic_expr (vect_dump, loop_arg, TDF_SLIM);
        }
-      return NULL_TREE;
+      return NULL;
     }
 
   def_stmt = SSA_NAME_DEF_STMT (loop_arg);
@@ -2411,22 +2456,22 @@ vect_is_simple_reduction (loop_vec_info loop_info, tree phi)
     {
       if (vect_print_dump_info (REPORT_DETAILS))
        fprintf (vect_dump, "reduction: no def_stmt.");
-      return NULL_TREE;
+      return NULL;
     }
 
-  if (TREE_CODE (def_stmt) != GIMPLE_MODIFY_STMT)
+  if (!is_gimple_assign (def_stmt))
     {
       if (vect_print_dump_info (REPORT_DETAILS))
-        print_generic_expr (vect_dump, def_stmt, TDF_SLIM);
-      return NULL_TREE;
+        print_gimple_stmt (vect_dump, def_stmt, 0, TDF_SLIM);
+      return NULL;
     }
 
-  name = GIMPLE_STMT_OPERAND (def_stmt, 0);
+  name = gimple_assign_lhs (def_stmt);
   nloop_uses = 0;
   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, name)
     {
-      tree use_stmt = USE_STMT (use_p);
-      if (flow_bb_inside_loop_p (loop, bb_for_stmt (use_stmt))
+      gimple use_stmt = USE_STMT (use_p);
+      if (flow_bb_inside_loop_p (loop, gimple_bb (use_stmt))
          && vinfo_for_stmt (use_stmt)
          && !is_pattern_stmt_p (vinfo_for_stmt (use_stmt)))
        nloop_uses++;
@@ -2434,47 +2479,37 @@ vect_is_simple_reduction (loop_vec_info loop_info, tree phi)
        {
          if (vect_print_dump_info (REPORT_DETAILS))
            fprintf (vect_dump, "reduction used in loop.");
-         return NULL_TREE;
+         return NULL;
        }
     }
 
-  operation = GIMPLE_STMT_OPERAND (def_stmt, 1);
-  code = TREE_CODE (operation);
+  code = gimple_assign_rhs_code (def_stmt);
+
   if (!commutative_tree_code (code) || !associative_tree_code (code))
     {
       if (vect_print_dump_info (REPORT_DETAILS))
-        {
-          fprintf (vect_dump, "reduction: not commutative/associative: ");
-          print_generic_expr (vect_dump, operation, TDF_SLIM);
-        }
-      return NULL_TREE;
+        report_vect_op (def_stmt, "reduction: not commutative/associative: ");
+      return NULL;
     }
 
-  op_type = TREE_OPERAND_LENGTH (operation);
-  if (op_type != binary_op)
+  if (get_gimple_rhs_class (code) != GIMPLE_BINARY_RHS)
     {
       if (vect_print_dump_info (REPORT_DETAILS))
-        {
-          fprintf (vect_dump, "reduction: not binary operation: ");
-          print_generic_expr (vect_dump, operation, TDF_SLIM);
-        }
-      return NULL_TREE;
+       report_vect_op (def_stmt, "reduction: not binary operation: ");
+      return NULL;
     }
 
-  op1 = TREE_OPERAND (operation, 0);
-  op2 = TREE_OPERAND (operation, 1);
+  op1 = gimple_assign_rhs1 (def_stmt);
+  op2 = gimple_assign_rhs2 (def_stmt);
   if (TREE_CODE (op1) != SSA_NAME || TREE_CODE (op2) != SSA_NAME)
     {
       if (vect_print_dump_info (REPORT_DETAILS))
-        {
-          fprintf (vect_dump, "reduction: uses not ssa_names: ");
-          print_generic_expr (vect_dump, operation, TDF_SLIM);
-        }
-      return NULL_TREE;
+       report_vect_op (def_stmt, "reduction: uses not ssa_names: ");
+      return NULL;
     }
 
   /* Check that it's ok to change the order of the computation.  */
-  type = TREE_TYPE (operation);
+  type = TREE_TYPE (gimple_assign_lhs (def_stmt));
   if (TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (op1))
       || TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (TREE_TYPE (op2)))
     {
@@ -2487,7 +2522,7 @@ vect_is_simple_reduction (loop_vec_info loop_info, tree phi)
           fprintf (vect_dump, ",");
           print_generic_expr (vect_dump, TREE_TYPE (op2), TDF_SLIM);
         }
-      return NULL_TREE;
+      return NULL;
     }
 
   /* Generally, when vectorizing a reduction we change the order of the
@@ -2503,32 +2538,24 @@ vect_is_simple_reduction (loop_vec_info loop_info, tree phi)
     {
       /* Changing the order of operations changes the semantics.  */
       if (vect_print_dump_info (REPORT_DETAILS))
-        {
-          fprintf (vect_dump, "reduction: unsafe fp math optimization: ");
-          print_generic_expr (vect_dump, operation, TDF_SLIM);
-        }
-      return NULL_TREE;
+       report_vect_op (def_stmt, "reduction: unsafe fp math optimization: ");
+      return NULL;
     }
   else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type)
           && !nested_in_vect_loop_p (vect_loop, def_stmt))
     {
       /* Changing the order of operations changes the semantics.  */
       if (vect_print_dump_info (REPORT_DETAILS))
-        {
-          fprintf (vect_dump, "reduction: unsafe int math optimization: ");
-          print_generic_expr (vect_dump, operation, TDF_SLIM);
-        }
-      return NULL_TREE;
+       report_vect_op (def_stmt, "reduction: unsafe int math optimization: ");
+      return NULL;
     }
   else if (SAT_FIXED_POINT_TYPE_P (type))
     {
       /* Changing the order of operations changes the semantics.  */
       if (vect_print_dump_info (REPORT_DETAILS))
-        {
-          fprintf (vect_dump, "reduction: unsafe fixed-point math optimization: ");
-          print_generic_expr (vect_dump, operation, TDF_SLIM);
-        }
-      return NULL_TREE;
+       report_vect_op (def_stmt, 
+                       "reduction: unsafe fixed-point math optimization: ");
+      return NULL;
     }
 
   /* reduction is safe. we're dealing with one of the following:
@@ -2537,14 +2564,11 @@ vect_is_simple_reduction (loop_vec_info loop_info, tree phi)
    */
   def1 = SSA_NAME_DEF_STMT (op1);
   def2 = SSA_NAME_DEF_STMT (op2);
-  if (!def1 || !def2 || IS_EMPTY_STMT (def1) || IS_EMPTY_STMT (def2))
+  if (!def1 || !def2 || gimple_nop_p (def1) || gimple_nop_p (def2))
     {
       if (vect_print_dump_info (REPORT_DETAILS))
-        {
-          fprintf (vect_dump, "reduction: no defs for operands: ");
-          print_generic_expr (vect_dump, operation, TDF_SLIM);
-        }
-      return NULL_TREE;
+       report_vect_op (def_stmt, "reduction: no defs for operands: ");
+      return NULL;
     }
 
 
@@ -2553,48 +2577,40 @@ vect_is_simple_reduction (loop_vec_info loop_info, tree phi)
      or it's an induction (defined by a loop-header phi-node).  */
 
   if (def2 == phi
-      && flow_bb_inside_loop_p (loop, bb_for_stmt (def1))
-      && (TREE_CODE (def1) == GIMPLE_MODIFY_STMT 
+      && flow_bb_inside_loop_p (loop, gimple_bb (def1))
+      && (is_gimple_assign (def1)
          || STMT_VINFO_DEF_TYPE (vinfo_for_stmt (def1)) == vect_induction_def
-         || (TREE_CODE (def1) == PHI_NODE 
+         || (gimple_code (def1) == GIMPLE_PHI
              && STMT_VINFO_DEF_TYPE (vinfo_for_stmt (def1)) == vect_loop_def
-             && !is_loop_header_bb_p (bb_for_stmt (def1)))))
+             && !is_loop_header_bb_p (gimple_bb (def1)))))
     {
       if (vect_print_dump_info (REPORT_DETAILS))
-        {
-          fprintf (vect_dump, "detected reduction:");
-          print_generic_expr (vect_dump, operation, TDF_SLIM);
-        }
+       report_vect_op (def_stmt, "detected reduction:");
       return def_stmt;
     }
   else if (def1 == phi
-          && flow_bb_inside_loop_p (loop, bb_for_stmt (def2))
-          && (TREE_CODE (def2) == GIMPLE_MODIFY_STMT 
+          && flow_bb_inside_loop_p (loop, gimple_bb (def2))
+          && (is_gimple_assign (def2)
               || STMT_VINFO_DEF_TYPE (vinfo_for_stmt (def2)) == vect_induction_def
-              || (TREE_CODE (def2) == PHI_NODE
+              || (gimple_code (def2) == GIMPLE_PHI
                   && STMT_VINFO_DEF_TYPE (vinfo_for_stmt (def2)) == vect_loop_def
-                  && !is_loop_header_bb_p (bb_for_stmt (def2)))))
+                  && !is_loop_header_bb_p (gimple_bb (def2)))))
     {
       /* Swap operands (just for simplicity - so that the rest of the code
         can assume that the reduction variable is always the last (second)
         argument).  */
       if (vect_print_dump_info (REPORT_DETAILS))
-        {
-          fprintf (vect_dump, "detected reduction: need to swap operands:");
-          print_generic_expr (vect_dump, operation, TDF_SLIM);
-        }
-      swap_tree_operands (def_stmt, &TREE_OPERAND (operation, 0), 
-                                   &TREE_OPERAND (operation, 1));
+       report_vect_op (def_stmt ,
+                       "detected reduction: need to swap operands:");
+      swap_tree_operands (def_stmt, gimple_assign_rhs1_ptr (def_stmt),
+                         gimple_assign_rhs2_ptr (def_stmt));
       return def_stmt;
     }
   else
     {
       if (vect_print_dump_info (REPORT_DETAILS))
-        {
-          fprintf (vect_dump, "reduction: unknown pattern.");
-          print_generic_expr (vect_dump, operation, TDF_SLIM);
-        }
-      return NULL_TREE;
+       report_vect_op (def_stmt, "reduction: unknown pattern.");
+      return NULL;
     }
 }
 
@@ -2673,6 +2689,8 @@ vectorize_loops (void)
      need to be renamed.  */
   vect_memsyms_to_rename = BITMAP_ALLOC (NULL);
 
+  init_stmt_vec_info_vec ();
+
   /*  ----------- Analyze loops. -----------  */
 
   /* If some loop was duplicated, it gets bigger number 
@@ -2717,6 +2735,8 @@ vectorize_loops (void)
       loop->aux = NULL;
     }
 
+  free_stmt_vec_info_vec ();
+
   return num_vectorized_loops > 0 ? TODO_cleanup_cfg : 0;
 }
 
index 1c082f8..ea80a8d 100644 (file)
@@ -89,9 +89,9 @@ typedef struct _slp_tree {
   struct _slp_tree *left;
   struct _slp_tree *right;
   /* A group of scalar stmts to be vectorized together.  */
-  VEC (tree, heap) *stmts;
+  VEC (gimple, heap) *stmts;
   /* Vectorized stmt/s.  */
-  VEC (tree, heap) *vec_stmts;
+  VEC (gimple, heap) *vec_stmts;
   /* Number of vector stmts that are created to replace the group of scalar 
      stmts. It is calculated during the transformation phase as the number of 
      scalar elements in one scalar iteration (GROUP_SIZE) multiplied by VF 
@@ -200,14 +200,14 @@ typedef struct _loop_vec_info {
 
   /* Statements in the loop that have data references that are candidates for a
      runtime (loop versioning) misalignment check.  */
-  VEC(tree,heap) *may_misalign_stmts;
+  VEC(gimple,heap) *may_misalign_stmts;
 
   /* The loop location in the source.  */
   LOC loop_line_number;
 
   /* All interleaving chains of stores in the loop, represented by the first
      stmt in the chain.  */
-  VEC(tree, heap) *strided_stores;
+  VEC(gimple, heap) *strided_stores;
 
   /* All SLP instances in the loop. This is a subset of the set of STRIDED_STORES
      of the loop.  */
@@ -255,10 +255,10 @@ loop_vec_info_for_loop (struct loop *loop)
 }
 
 static inline bool
-nested_in_vect_loop_p (struct loop *loop, tree stmt)
+nested_in_vect_loop_p (struct loop *loop, gimple stmt)
 {
   return (loop->inner 
-          && (loop->inner == (bb_for_stmt (stmt))->loop_father));
+          && (loop->inner == (gimple_bb (stmt))->loop_father));
 }
 
 /*-----------------------------------------------------------------*/
@@ -329,7 +329,7 @@ typedef struct _stmt_vec_info {
   enum stmt_vec_info_type type;
 
   /* The stmt to which this info struct refers to.  */
-  tree stmt;
+  gimple stmt;
 
   /* The loop_vec_info with respect to which STMT is vectorized.  */
   loop_vec_info loop_vinfo;
@@ -347,7 +347,7 @@ typedef struct _stmt_vec_info {
   tree vectype;
 
   /* The vectorized version of the stmt.  */
-  tree vectorized_stmt;
+  gimple vectorized_stmt;
 
 
   /** The following is relevant only for stmts that contain a non-scalar
@@ -378,7 +378,7 @@ typedef struct _stmt_vec_info {
         related_stmt of the "pattern stmt" points back to this stmt (which is 
         the last stmt in the original sequence of stmts that constitutes the 
         pattern).  */
-  tree related_stmt;
+  gimple related_stmt;
 
   /* List of datarefs that are known to have the same alignment as the dataref
      of this stmt.  */
@@ -389,9 +389,9 @@ typedef struct _stmt_vec_info {
 
   /* Interleaving info.  */
   /* First data-ref in the interleaving group.  */
-  tree first_dr;
+  gimple first_dr;
   /* Pointer to the next data-ref in the group.  */
-  tree next_dr;
+  gimple next_dr;
   /* The size of the interleaving group.  */
   unsigned int size;
   /* For stores, number of stores from this group seen. We vectorize the last
@@ -402,7 +402,7 @@ typedef struct _stmt_vec_info {
   unsigned int gap;
   /* In case that two or more stmts share data-ref, this is the pointer to the
      previously detected stmt with the same dr.  */
-  tree same_dr_stmt;
+  gimple same_dr_stmt;
   /* For loads only, if there is a store with the same location, this field is
      TRUE.  */
   bool read_write_dep;
@@ -522,27 +522,46 @@ typedef struct _stmt_vec_info {
 #define TARG_VEC_STORE_COST          1
 #endif
 
-static inline void set_stmt_info (stmt_ann_t ann, stmt_vec_info stmt_info);
-static inline stmt_vec_info vinfo_for_stmt (tree stmt);
+/* Avoid GTY(()) on stmt_vec_info.  */
+typedef void *vec_void_p;
+DEF_VEC_P (vec_void_p);
+DEF_VEC_ALLOC_P (vec_void_p, heap);
 
-static inline void
-set_stmt_info (stmt_ann_t ann, stmt_vec_info stmt_info)
+extern VEC(vec_void_p,heap) *stmt_vec_info_vec;
+
+void init_stmt_vec_info_vec (void);
+void free_stmt_vec_info_vec (void);
+
+static inline stmt_vec_info
+vinfo_for_stmt (gimple stmt)
 {
-  if (ann)
-    ann->common.aux = (char *) stmt_info;
+  unsigned int uid = gimple_uid (stmt);
+  if (uid == 0)
+    return NULL;
+
+  gcc_assert (uid <= VEC_length (vec_void_p, stmt_vec_info_vec));
+  return (stmt_vec_info) VEC_index (vec_void_p, stmt_vec_info_vec, uid - 1);
 }
 
-static inline stmt_vec_info
-vinfo_for_stmt (tree stmt)
+static inline void
+set_vinfo_for_stmt (gimple stmt, stmt_vec_info info)
 {
-  stmt_ann_t ann = stmt_ann (stmt);
-  return ann ? (stmt_vec_info) ann->common.aux : NULL;
+  unsigned int uid = gimple_uid (stmt);
+  if (uid == 0)
+    {
+      gcc_assert (info);
+      uid = VEC_length (vec_void_p, stmt_vec_info_vec) + 1;
+      gimple_set_uid (stmt, uid);
+      VEC_safe_push (vec_void_p, heap, stmt_vec_info_vec, (vec_void_p) info);
+    }
+  else
+    VEC_replace (vec_void_p, stmt_vec_info_vec, uid - 1, (vec_void_p) info);
 }
 
 static inline bool
 is_pattern_stmt_p (stmt_vec_info stmt_info)
 {
-  tree related_stmt;
+  gimple related_stmt;
   stmt_vec_info related_stmt_info;
 
   related_stmt = STMT_VINFO_RELATED_STMT (stmt_info);
@@ -643,24 +662,24 @@ extern void slpeel_verify_cfg_after_peeling (struct loop *, struct loop *);
  *************************************************************************/
 /** In tree-vectorizer.c **/
 extern tree get_vectype_for_scalar_type (tree);
-extern bool vect_is_simple_use (tree, loop_vec_info, tree *, tree *,
+extern bool vect_is_simple_use (tree, loop_vec_info, gimple *, tree *,
                                enum vect_def_type *);
 extern bool vect_is_simple_iv_evolution (unsigned, tree, tree *, tree *);
-extern tree vect_is_simple_reduction (loop_vec_info, tree);
+extern gimple vect_is_simple_reduction (loop_vec_info, gimple);
 extern bool vect_can_force_dr_alignment_p (const_tree, unsigned int);
 extern enum dr_alignment_support vect_supportable_dr_alignment
   (struct data_reference *);
 extern bool reduction_code_for_scalar_code (enum tree_code, enum tree_code *);
-extern bool supportable_widening_operation (enum tree_code, tree, tree,
+extern bool supportable_widening_operation (enum tree_code, gimple, tree,
   tree *, tree *, enum tree_code *, enum tree_code *);
-extern bool supportable_narrowing_operation (enum tree_code, const_tree,
+extern bool supportable_narrowing_operation (enum tree_code, const_gimple,
                                             const_tree, enum tree_code *);
 
 /* Creation and deletion of loop and stmt info structs.  */
 extern loop_vec_info new_loop_vec_info (struct loop *loop);
 extern void destroy_loop_vec_info (loop_vec_info, bool);
-extern stmt_vec_info new_stmt_vec_info (tree stmt, loop_vec_info);
-extern void free_stmt_vec_info (tree stmt);
+extern stmt_vec_info new_stmt_vec_info (gimple stmt, loop_vec_info);
+extern void free_stmt_vec_info (gimple stmt);
 
 
 /** In tree-vect-analyze.c  **/
@@ -673,28 +692,33 @@ extern loop_vec_info vect_analyze_loop_form (struct loop *);
 /* Pattern recognition functions.
    Additional pattern recognition functions can (and will) be added
    in the future.  */
-typedef tree (* vect_recog_func_ptr) (tree, tree *, tree *);
+typedef gimple (* vect_recog_func_ptr) (gimple, tree *, tree *);
 #define NUM_PATTERNS 4
 void vect_pattern_recog (loop_vec_info);
 
 
 /** In tree-vect-transform.c  **/
-extern bool vectorizable_load (tree, block_stmt_iterator *, tree *, slp_tree);
-extern bool vectorizable_store (tree, block_stmt_iterator *, tree *, slp_tree);
-extern bool vectorizable_operation (tree, block_stmt_iterator *, tree *, 
+extern bool vectorizable_load (gimple, gimple_stmt_iterator *, gimple *,
+                              slp_tree);
+extern bool vectorizable_store (gimple, gimple_stmt_iterator *, gimple *,
+                               slp_tree);
+extern bool vectorizable_operation (gimple, gimple_stmt_iterator *, gimple *,
                                    slp_tree);
-extern bool vectorizable_type_promotion (tree, block_stmt_iterator *, tree *);
-extern bool vectorizable_type_demotion (tree, block_stmt_iterator *, tree *);
-extern bool vectorizable_conversion (tree, block_stmt_iterator *, 
-                                    tree *, slp_tree);
-extern bool vectorizable_assignment (tree, block_stmt_iterator *, tree *, 
+extern bool vectorizable_type_promotion (gimple, gimple_stmt_iterator *,
+                                        gimple *);
+extern bool vectorizable_type_demotion (gimple, gimple_stmt_iterator *,
+                                       gimple *);
+extern bool vectorizable_conversion (gimple, gimple_stmt_iterator *, gimple *,
+                                    slp_tree);
+extern bool vectorizable_assignment (gimple, gimple_stmt_iterator *, gimple *,
                                     slp_tree);
-extern tree vectorizable_function (tree, tree, tree);
-extern bool vectorizable_call (tree, block_stmt_iterator *, tree *);
-extern bool vectorizable_condition (tree, block_stmt_iterator *, tree *);
-extern bool vectorizable_live_operation (tree, block_stmt_iterator *, tree *);
-extern bool vectorizable_reduction (tree, block_stmt_iterator *, tree *);
-extern bool vectorizable_induction (tree, block_stmt_iterator *, tree *);
+extern tree vectorizable_function (gimple, tree, tree);
+extern bool vectorizable_call (gimple, gimple_stmt_iterator *, gimple *);
+extern bool vectorizable_condition (gimple, gimple_stmt_iterator *, gimple *);
+extern bool vectorizable_live_operation (gimple, gimple_stmt_iterator *,
+                                        gimple *);
+extern bool vectorizable_reduction (gimple, gimple_stmt_iterator *, gimple *);
+extern bool vectorizable_induction (gimple, gimple_stmt_iterator *, gimple *);
 extern int  vect_estimate_min_profitable_iters (loop_vec_info);
 extern void vect_model_simple_cost (stmt_vec_info, int, enum vect_def_type *, 
                                    slp_tree);
index 62f3147..40bec2e 100644 (file)
@@ -38,6 +38,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "tree-ssa-propagate.h"
 #include "tree-chrec.h"
 
+
 /* Set of SSA names found during the dominator traversal of a
    sub-graph in find_assert_locations.  */
 static sbitmap found_in_subgraph;
@@ -64,7 +65,7 @@ struct assert_locus_d
   edge e;
 
   /* Pointer to the statement that generated this assertion.  */
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
 
   /* Predicate code for the ASSERT_EXPR.  Must be COMPARISON_CLASS_P.  */
   enum tree_code comp_code;
@@ -104,7 +105,7 @@ static value_range_t **vr_value;
 static int *vr_phi_edge_counts;
 
 typedef struct {
-  tree stmt;
+  gimple stmt;
   tree vec;
 } switch_update;
 
@@ -276,6 +277,18 @@ is_overflow_infinity (const_tree val)
          && (vrp_val_is_min (val) || vrp_val_is_max (val)));
 }
 
+/* Return whether STMT has a constant rhs that is_overflow_infinity. */
+
+static inline bool
+stmt_overflow_infinity (gimple stmt)
+{
+  if (is_gimple_assign (stmt)
+      && get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) ==
+      GIMPLE_SINGLE_RHS)
+    return is_overflow_infinity (gimple_assign_rhs1 (stmt));
+  return false;
+}
+
 /* If VAL is now an overflow infinity, return VAL.  Otherwise, return
    the same value with TREE_OVERFLOW clear.  This can be used to avoid
    confusing a regular value with an overflow value.  */
@@ -777,21 +790,143 @@ vrp_expr_computes_nonnegative (tree expr, bool *strict_overflow_p)
              && ssa_name_nonnegative_p (expr)));
 }
 
+/* Return true if the result of assignment STMT is know to be non-negative.
+   If the return value is based on the assumption that signed overflow is
+   undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
+   *STRICT_OVERFLOW_P.*/
+
+static bool
+gimple_assign_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
+{
+  enum tree_code code = gimple_assign_rhs_code (stmt);
+  switch (get_gimple_rhs_class (code))
+    {
+    case GIMPLE_UNARY_RHS:
+      return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
+                                            gimple_expr_type (stmt),
+                                            gimple_assign_rhs1 (stmt),
+                                            strict_overflow_p);
+    case GIMPLE_BINARY_RHS:
+      return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
+                                             gimple_expr_type (stmt),
+                                             gimple_assign_rhs1 (stmt),
+                                             gimple_assign_rhs2 (stmt),
+                                             strict_overflow_p);
+    case GIMPLE_SINGLE_RHS:
+      return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
+                                             strict_overflow_p);
+    case GIMPLE_INVALID_RHS:
+      gcc_unreachable ();
+    default:
+      gcc_unreachable ();
+    }
+}
+
+/* Return true if return value of call STMT is know to be non-negative.
+   If the return value is based on the assumption that signed overflow is
+   undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
+   *STRICT_OVERFLOW_P.*/
+
+static bool
+gimple_call_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
+{
+  tree arg0 = gimple_call_num_args (stmt) > 0 ?
+    gimple_call_arg (stmt, 0) : NULL_TREE;
+  tree arg1 = gimple_call_num_args (stmt) > 1 ?
+    gimple_call_arg (stmt, 1) : NULL_TREE;
+
+  return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt),
+                                       gimple_call_fndecl (stmt),
+                                       arg0,
+                                       arg1,
+                                       strict_overflow_p);
+}
+
+/* Return true if STMT is know to to compute a non-negative value.
+   If the return value is based on the assumption that signed overflow is
+   undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
+   *STRICT_OVERFLOW_P.*/
+
+static bool
+gimple_stmt_nonnegative_warnv_p (gimple stmt, bool *strict_overflow_p)
+{
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_ASSIGN:
+      return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p);
+    case GIMPLE_CALL:
+      return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p);
+    default:
+      gcc_unreachable ();
+    }
+}
+
+/* Return true if the result of assignment STMT is know to be non-zero.
+   If the return value is based on the assumption that signed overflow is
+   undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
+   *STRICT_OVERFLOW_P.*/
+
+static bool
+gimple_assign_nonzero_warnv_p (gimple stmt, bool *strict_overflow_p)
+{
+  enum tree_code code = gimple_assign_rhs_code (stmt);
+  switch (get_gimple_rhs_class (code))
+    {
+    case GIMPLE_UNARY_RHS:
+      return tree_unary_nonzero_warnv_p (gimple_assign_rhs_code (stmt),
+                                        gimple_expr_type (stmt),
+                                        gimple_assign_rhs1 (stmt),
+                                        strict_overflow_p);
+    case GIMPLE_BINARY_RHS:
+      return tree_binary_nonzero_warnv_p (gimple_assign_rhs_code (stmt),
+                                         gimple_expr_type (stmt),
+                                         gimple_assign_rhs1 (stmt),
+                                         gimple_assign_rhs2 (stmt),
+                                         strict_overflow_p);
+    case GIMPLE_SINGLE_RHS:
+      return tree_single_nonzero_warnv_p (gimple_assign_rhs1 (stmt),
+                                         strict_overflow_p);
+    case GIMPLE_INVALID_RHS:
+      gcc_unreachable ();
+    default:
+      gcc_unreachable ();
+    }
+}
+
+/* Return true if STMT is know to to compute a non-zero value.
+   If the return value is based on the assumption that signed overflow is
+   undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
+   *STRICT_OVERFLOW_P.*/
+
+static bool
+gimple_stmt_nonzero_warnv_p (gimple stmt, bool *strict_overflow_p)
+{
+  switch (gimple_code (stmt))
+    {
+    case GIMPLE_ASSIGN:
+      return gimple_assign_nonzero_warnv_p (stmt, strict_overflow_p);
+    case GIMPLE_CALL:
+      return gimple_alloca_call_p (stmt);
+    default:
+      gcc_unreachable ();
+    }
+}
+
 /* Like tree_expr_nonzero_warnv_p, but this function uses value ranges
    obtained so far.  */
 
 static bool
-vrp_expr_computes_nonzero (tree expr, bool *strict_overflow_p)
+vrp_stmt_computes_nonzero (gimple stmt, bool *strict_overflow_p)
 {
-  if (tree_expr_nonzero_warnv_p (expr, strict_overflow_p)
-      || (TREE_CODE (expr) == SSA_NAME
-         && ssa_name_nonzero_p (expr)))
+  if (gimple_stmt_nonzero_warnv_p (stmt, strict_overflow_p))
     return true;
 
   /* If we have an expression of the form &X->a, then the expression
      is nonnull if X is nonnull.  */
-  if (TREE_CODE (expr) == ADDR_EXPR)
+  if (is_gimple_assign (stmt)
+      && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
     {
+      tree expr = gimple_assign_rhs1 (stmt);
       tree base = get_base_address (TREE_OPERAND (expr, 0));
 
       if (base != NULL_TREE
@@ -2709,10 +2844,9 @@ extract_range_from_comparison (value_range_t *vr, enum tree_code code,
                               tree type, tree op0, tree op1)
 {
   bool sop = false;
-  tree val = vrp_evaluate_conditional_warnv_with_ops (code,
-                                                     op0,
-                                                     op1,
-                                                     false, &sop);
+  tree val;
+  
+  val = vrp_evaluate_conditional_warnv_with_ops (code, op0, op1, false, &sop);
 
   /* A disadvantage of using a special infinity as an overflow
      representation is that we lose the ability to record overflow
@@ -2735,56 +2869,67 @@ extract_range_from_comparison (value_range_t *vr, enum tree_code code,
     set_value_range_to_truthvalue (vr, type);
 }
 
+/* Try to derive a nonnegative or nonzero range out of STMT relying
+   primarily on generic routines in fold in conjunction with range data.
+   Store the result in *VR */
 
-/* Try to compute a useful range out of expression EXPR and store it
+static void
+extract_range_basic (value_range_t *vr, gimple stmt)
+{
+  bool sop = false;
+  tree type = gimple_expr_type (stmt);
+
+  if (INTEGRAL_TYPE_P (type)
+      && gimple_stmt_nonnegative_warnv_p (stmt, &sop))
+    set_value_range_to_nonnegative (vr, type,
+                                   sop || stmt_overflow_infinity (stmt));
+  else if (vrp_stmt_computes_nonzero (stmt, &sop)
+          && !sop)
+    set_value_range_to_nonnull (vr, type);
+  else
+    set_value_range_to_varying (vr);
+}
+
+
+/* Try to compute a useful range out of assignment STMT and store it
    in *VR.  */
 
 static void
-extract_range_from_expr (value_range_t *vr, tree expr)
+extract_range_from_assignment (value_range_t *vr, gimple stmt)
 {
-  enum tree_code code = TREE_CODE (expr);
+  enum tree_code code = gimple_assign_rhs_code (stmt);
 
   if (code == ASSERT_EXPR)
-    extract_range_from_assert (vr, expr);
+    extract_range_from_assert (vr, gimple_assign_rhs1 (stmt));
   else if (code == SSA_NAME)
-    extract_range_from_ssa_name (vr, expr);
+    extract_range_from_ssa_name (vr, gimple_assign_rhs1 (stmt));
   else if (TREE_CODE_CLASS (code) == tcc_binary
           || code == TRUTH_AND_EXPR
           || code == TRUTH_OR_EXPR
           || code == TRUTH_XOR_EXPR)
-    extract_range_from_binary_expr (vr, TREE_CODE (expr), TREE_TYPE (expr),
-                                   TREE_OPERAND (expr, 0),
-                                   TREE_OPERAND (expr, 1));
+    extract_range_from_binary_expr (vr, gimple_assign_rhs_code (stmt),
+                                   gimple_expr_type (stmt),
+                                   gimple_assign_rhs1 (stmt),
+                                   gimple_assign_rhs2 (stmt));
   else if (TREE_CODE_CLASS (code) == tcc_unary)
-    extract_range_from_unary_expr (vr, TREE_CODE (expr), TREE_TYPE (expr),
-                                  TREE_OPERAND (expr, 0));
+    extract_range_from_unary_expr (vr, gimple_assign_rhs_code (stmt),
+                                  gimple_expr_type (stmt),
+                                  gimple_assign_rhs1 (stmt));
   else if (code == COND_EXPR)
-    extract_range_from_cond_expr (vr, expr);
+    extract_range_from_cond_expr (vr, gimple_assign_rhs1 (stmt));
   else if (TREE_CODE_CLASS (code) == tcc_comparison)
-    extract_range_from_comparison (vr, TREE_CODE (expr), TREE_TYPE (expr),
-                                  TREE_OPERAND (expr, 0),
-                                  TREE_OPERAND (expr, 1));
-  else if (is_gimple_min_invariant (expr))
-    set_value_range_to_value (vr, expr, NULL);
+    extract_range_from_comparison (vr, gimple_assign_rhs_code (stmt),
+                                  gimple_expr_type (stmt),
+                                  gimple_assign_rhs1 (stmt),
+                                  gimple_assign_rhs2 (stmt));
+  else if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS
+          && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
+    set_value_range_to_value (vr, gimple_assign_rhs1 (stmt), NULL);
   else
     set_value_range_to_varying (vr);
 
-  /* If we got a varying range from the tests above, try a final
-     time to derive a nonnegative or nonzero range.  This time
-     relying primarily on generic routines in fold in conjunction
-     with range data.  */
   if (vr->type == VR_VARYING)
-    {
-      bool sop = false;
-
-      if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
-         && vrp_expr_computes_nonnegative (expr, &sop))
-       set_value_range_to_nonnegative (vr, TREE_TYPE (expr),
-                                       sop || is_overflow_infinity (expr));
-      else if (vrp_expr_computes_nonzero (expr, &sop)
-              && !sop)
-        set_value_range_to_nonnull (vr, TREE_TYPE (expr));
-    }
+    extract_range_basic (vr, stmt);
 }
 
 /* Given a range VR, a LOOP and a variable VAR, determine whether it
@@ -2792,8 +2937,8 @@ extract_range_from_expr (value_range_t *vr, tree expr)
    for VAR.  If so, update VR with the new limits.  */
 
 static void
-adjust_range_with_scev (value_range_t *vr, struct loop *loop, tree stmt,
-                       tree var)
+adjust_range_with_scev (value_range_t *vr, struct loop *loop,
+                       gimple stmt, tree var)
 {
   tree init, step, chrec, tmin, tmax, min, max, type;
   enum ev_direction dir;
@@ -2926,7 +3071,7 @@ adjust_range_with_scev (value_range_t *vr, struct loop *loop, tree stmt,
    overflow.  */
 
 static bool
-vrp_var_may_overflow (tree var, tree stmt)
+vrp_var_may_overflow (tree var, gimple stmt)
 {
   struct loop *l;
   tree chrec, init, step;
@@ -3374,31 +3519,32 @@ debug_all_value_ranges (void)
    create a new SSA name N and return the assertion assignment
    'V = ASSERT_EXPR <V, V OP W>'.  */
 
-static tree
+static gimple
 build_assert_expr_for (tree cond, tree v)
 {
-  tree n, assertion;
+  tree n;
+  gimple assertion;
 
   gcc_assert (TREE_CODE (v) == SSA_NAME);
-  n = duplicate_ssa_name (v, NULL_TREE);
+  n = duplicate_ssa_name (v, NULL);
 
   if (COMPARISON_CLASS_P (cond))
     {
       tree a = build2 (ASSERT_EXPR, TREE_TYPE (v), v, cond); 
-      assertion = build_gimple_modify_stmt (n, a);
+      assertion = gimple_build_assign (n, a);
     }
   else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
     {
       /* Given !V, build the assignment N = false.  */
       tree op0 = TREE_OPERAND (cond, 0);
       gcc_assert (op0 == v);
-      assertion = build_gimple_modify_stmt (n, boolean_false_node);
+      assertion = gimple_build_assign (n, boolean_false_node);
     }
   else if (TREE_CODE (cond) == SSA_NAME)
     {
       /* Given V, build the assignment N = true.  */
       gcc_assert (v == cond);
-      assertion = build_gimple_modify_stmt (n, boolean_true_node);
+      assertion = gimple_build_assign (n, boolean_true_node);
     }
   else
     gcc_unreachable ();
@@ -3419,10 +3565,11 @@ build_assert_expr_for (tree cond, tree v)
    point values.  */
 
 static inline bool
-fp_predicate (const_tree expr)
+fp_predicate (gimple stmt)
 {
-  return (COMPARISON_CLASS_P (expr)
-         && FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0))));
+  GIMPLE_CHECK (stmt, GIMPLE_COND);
+
+  return FLOAT_TYPE_P (TREE_TYPE (gimple_cond_lhs (stmt)));
 }
 
 
@@ -3432,7 +3579,7 @@ fp_predicate (const_tree expr)
    inferred.  */
 
 static bool
-infer_value_range (tree stmt, tree op, enum tree_code *comp_code_p, tree *val_p)
+infer_value_range (gimple stmt, tree op, enum tree_code *comp_code_p, tree *val_p)
 {
   *val_p = NULL_TREE;
   *comp_code_p = ERROR_MARK;
@@ -3444,21 +3591,21 @@ infer_value_range (tree stmt, tree op, enum tree_code *comp_code_p, tree *val_p)
 
   /* Similarly, don't infer anything from statements that may throw
      exceptions.  */
-  if (tree_could_throw_p (stmt))
+  if (stmt_could_throw_p (stmt))
     return false;
 
   /* If STMT is the last statement of a basic block with no
      successors, there is no point inferring anything about any of its
      operands.  We would not be able to find a proper insertion point
      for the assertion, anyway.  */
-  if (stmt_ends_bb_p (stmt) && EDGE_COUNT (bb_for_stmt (stmt)->succs) == 0)
+  if (stmt_ends_bb_p (stmt) && EDGE_COUNT (gimple_bb (stmt)->succs) == 0)
     return false;
 
   /* We can only assume that a pointer dereference will yield
      non-NULL if -fdelete-null-pointer-checks is enabled.  */
   if (flag_delete_null_pointer_checks
       && POINTER_TYPE_P (TREE_TYPE (op))
-      && TREE_CODE (stmt) != ASM_EXPR)
+      && gimple_code (stmt) != GIMPLE_ASM)
     {
       unsigned num_uses, num_loads, num_stores;
 
@@ -3495,7 +3642,7 @@ dump_asserts_for (FILE *file, tree name)
   while (loc)
     {
       fprintf (file, "\t");
-      print_generic_expr (file, bsi_stmt (loc->si), 0);
+      print_gimple_stmt (file, gsi_stmt (loc->si), 0, 0);
       fprintf (file, "\n\tBB #%d", loc->bb->index);
       if (loc->e)
        {
@@ -3565,7 +3712,7 @@ register_new_assert_for (tree name, tree expr,
                         tree val,
                         basic_block bb,
                         edge e,
-                        block_stmt_iterator si)
+                        gimple_stmt_iterator si)
 {
   assert_locus_t n, loc, last_loc;
   bool found;
@@ -3575,8 +3722,8 @@ register_new_assert_for (tree name, tree expr,
   gcc_assert (bb == NULL || e == NULL);
 
   if (e == NULL)
-    gcc_assert (TREE_CODE (bsi_stmt (si)) != COND_EXPR
-               && TREE_CODE (bsi_stmt (si)) != SWITCH_EXPR);
+    gcc_assert (gimple_code (gsi_stmt (si)) != GIMPLE_COND
+               && gimple_code (gsi_stmt (si)) != GIMPLE_SWITCH);
 #endif
 
   /* The new assertion A will be inserted at BB or E.  We need to
@@ -3748,7 +3895,7 @@ extract_code_and_val_from_cond_with_ops (tree name, enum tree_code cond_code,
    Return true if an assertion for NAME could be registered.  */
 
 static bool
-register_edge_assert_for_2 (tree name, edge e, block_stmt_iterator bsi,
+register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
                            enum tree_code cond_code,
                            tree cond_op0, tree cond_op1, bool invert)
 {
@@ -3780,30 +3927,29 @@ register_edge_assert_for_2 (tree name, edge e, block_stmt_iterator bsi,
       && TREE_CODE (val) == INTEGER_CST
       && TYPE_UNSIGNED (TREE_TYPE (val)))
     {
-      tree def_stmt = SSA_NAME_DEF_STMT (name);
+      gimple def_stmt = SSA_NAME_DEF_STMT (name);
       tree cst2 = NULL_TREE, name2 = NULL_TREE, name3 = NULL_TREE;
 
       /* Extract CST2 from the (optional) addition.  */
-      if (TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
-         && TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt, 1)) == PLUS_EXPR)
+      if (is_gimple_assign (def_stmt)
+         && gimple_assign_rhs_code (def_stmt) == PLUS_EXPR)
        {
-         name2 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt, 1), 0);
-         cst2 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt, 1), 1);
+         name2 = gimple_assign_rhs1 (def_stmt);
+         cst2 = gimple_assign_rhs2 (def_stmt);
          if (TREE_CODE (name2) == SSA_NAME
              && TREE_CODE (cst2) == INTEGER_CST)
            def_stmt = SSA_NAME_DEF_STMT (name2);
        }
 
       /* Extract NAME2 from the (optional) sign-changing cast.  */
-      if (TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
-          && CONVERT_EXPR_P (GIMPLE_STMT_OPERAND (def_stmt, 1)))
+      if (gimple_assign_cast_p (def_stmt))
        {
-         tree rhs = GIMPLE_STMT_OPERAND (def_stmt, 1);
-         if (CONVERT_EXPR_P (rhs)
-             && ! TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (rhs, 0)))
-             && (TYPE_PRECISION (TREE_TYPE (rhs))
-                 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (rhs, 0)))))
-           name3 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt, 1), 0);
+         if ((gimple_assign_rhs_code (def_stmt) == NOP_EXPR
+              || gimple_assign_rhs_code (def_stmt) == CONVERT_EXPR)
+             && ! TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))
+             && (TYPE_PRECISION (gimple_expr_type (def_stmt))
+                 == TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))))
+           name3 = gimple_assign_rhs1 (def_stmt);
        }
 
       /* If name3 is used later, create an ASSERT_EXPR for it.  */
@@ -3880,10 +4026,11 @@ register_edge_assert_for_2 (tree name, edge e, block_stmt_iterator bsi,
 
 static bool
 register_edge_assert_for_1 (tree op, enum tree_code code,
-                           edge e, block_stmt_iterator bsi)
+                           edge e, gimple_stmt_iterator bsi)
 {
   bool retval = false;
-  tree op_def, rhs, val;
+  gimple op_def;
+  tree val;
   enum tree_code rhs_code;
 
   /* We only care about SSA_NAMEs.  */
@@ -3907,17 +4054,16 @@ register_edge_assert_for_1 (tree op, enum tree_code code,
      a truth operation or some bit operations, then we may be able
      to register information about the operands of that assignment.  */
   op_def = SSA_NAME_DEF_STMT (op);
-  if (TREE_CODE (op_def) != GIMPLE_MODIFY_STMT)
+  if (gimple_code (op_def) != GIMPLE_ASSIGN)
     return retval;
 
-  rhs = GIMPLE_STMT_OPERAND (op_def, 1);
-  rhs_code = TREE_CODE (rhs);
+  rhs_code = gimple_assign_rhs_code (op_def);
 
-  if (COMPARISON_CLASS_P (rhs))
+  if (TREE_CODE_CLASS (rhs_code) == tcc_comparison)
     {
       bool invert = (code == EQ_EXPR ? true : false);
-      tree op0 = TREE_OPERAND (rhs, 0);
-      tree op1 = TREE_OPERAND (rhs, 1);
+      tree op0 = gimple_assign_rhs1 (op_def);
+      tree op1 = gimple_assign_rhs2 (op_def);
 
       if (TREE_CODE (op0) == SSA_NAME)
         retval |= register_edge_assert_for_2 (op0, e, bsi, rhs_code, op0, op1,
@@ -3927,34 +4073,36 @@ register_edge_assert_for_1 (tree op, enum tree_code code,
                                              invert);
     }
   else if ((code == NE_EXPR
-           && (TREE_CODE (rhs) == TRUTH_AND_EXPR
-               || TREE_CODE (rhs) == BIT_AND_EXPR))
+           && (gimple_assign_rhs_code (op_def) == TRUTH_AND_EXPR
+               || gimple_assign_rhs_code (op_def) == BIT_AND_EXPR))
           || (code == EQ_EXPR
-              && (TREE_CODE (rhs) == TRUTH_OR_EXPR
-                  || TREE_CODE (rhs) == BIT_IOR_EXPR)))
+              && (gimple_assign_rhs_code (op_def) == TRUTH_OR_EXPR
+                  || gimple_assign_rhs_code (op_def) == BIT_IOR_EXPR)))
     {
       /* Recurse on each operand.  */
-      retval |= register_edge_assert_for_1 (TREE_OPERAND (rhs, 0),
+      retval |= register_edge_assert_for_1 (gimple_assign_rhs1 (op_def),
                                            code, e, bsi);
-      retval |= register_edge_assert_for_1 (TREE_OPERAND (rhs, 1),
+      retval |= register_edge_assert_for_1 (gimple_assign_rhs2 (op_def),
                                            code, e, bsi);
     }
-  else if (TREE_CODE (rhs) == TRUTH_NOT_EXPR)
+  else if (gimple_assign_rhs_code (op_def) == TRUTH_NOT_EXPR)
     {
       /* Recurse, flipping CODE.  */
       code = invert_tree_comparison (code, false);
-      retval |= register_edge_assert_for_1 (TREE_OPERAND (rhs, 0),
+      retval |= register_edge_assert_for_1 (gimple_assign_rhs1 (op_def),
                                            code, e, bsi);
     }
-  else if (TREE_CODE (rhs) == SSA_NAME)
+  else if (gimple_assign_rhs_code (op_def) == SSA_NAME)
     {
       /* Recurse through the copy.  */
-      retval |= register_edge_assert_for_1 (rhs, code, e, bsi);
+      retval |= register_edge_assert_for_1 (gimple_assign_rhs1 (op_def),
+                                           code, e, bsi);
     }
-  else if (CONVERT_EXPR_P (rhs))
+  else if (gimple_assign_rhs_code (op_def) == NOP_EXPR
+          || gimple_assign_rhs_code (op_def) == CONVERT_EXPR)
     { 
       /* Recurse through the type conversion.  */
-      retval |= register_edge_assert_for_1 (TREE_OPERAND (rhs, 0),
+      retval |= register_edge_assert_for_1 (gimple_assign_rhs1 (op_def),
                                            code, e, bsi);
     }
 
@@ -3966,7 +4114,7 @@ register_edge_assert_for_1 (tree op, enum tree_code code,
    Return true if an assertion for NAME could be registered.  */
 
 static bool
-register_edge_assert_for (tree name, edge e, block_stmt_iterator si,
+register_edge_assert_for (tree name, edge e, gimple_stmt_iterator si,
                          enum tree_code cond_code, tree cond_op0,
                          tree cond_op1)
 {
@@ -4001,14 +4149,14 @@ register_edge_assert_for (tree name, edge e, block_stmt_iterator si,
   if (((comp_code == EQ_EXPR && integer_onep (val))
        || (comp_code == NE_EXPR && integer_zerop (val))))
     {
-      tree def_stmt = SSA_NAME_DEF_STMT (name);
+      gimple def_stmt = SSA_NAME_DEF_STMT (name);
 
-      if (TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
-         && (TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt, 1)) == TRUTH_AND_EXPR
-             || TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt, 1)) == BIT_AND_EXPR))
+      if (is_gimple_assign (def_stmt)
+         && (gimple_assign_rhs_code (def_stmt) == TRUTH_AND_EXPR
+             || gimple_assign_rhs_code (def_stmt) == BIT_AND_EXPR))
        {
-         tree op0 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt, 1), 0);
-         tree op1 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt, 1), 1);
+         tree op0 = gimple_assign_rhs1 (def_stmt);
+         tree op1 = gimple_assign_rhs2 (def_stmt);
          retval |= register_edge_assert_for_1 (op0, NE_EXPR, e, si);
          retval |= register_edge_assert_for_1 (op1, NE_EXPR, e, si);
        }
@@ -4020,18 +4168,17 @@ register_edge_assert_for (tree name, edge e, block_stmt_iterator si,
   if (((comp_code == EQ_EXPR && integer_zerop (val))
        || (comp_code == NE_EXPR && integer_onep (val))))
     {
-      tree def_stmt = SSA_NAME_DEF_STMT (name);
+      gimple def_stmt = SSA_NAME_DEF_STMT (name);
 
-      if (TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
-         && (TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt, 1)) == TRUTH_OR_EXPR
+      if (is_gimple_assign (def_stmt)
+         && (gimple_assign_rhs_code (def_stmt) == TRUTH_OR_EXPR
              /* For BIT_IOR_EXPR only if NAME == 0 both operands have
                 necessarily zero value.  */
              || (comp_code == EQ_EXPR
-                 && (TREE_CODE (GIMPLE_STMT_OPERAND (def_stmt, 1))
-                       == BIT_IOR_EXPR))))
+                 && (gimple_assign_rhs_code (def_stmt) == BIT_IOR_EXPR))))
        {
-         tree op0 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt, 1), 0);
-         tree op1 = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt, 1), 1);
+         tree op0 = gimple_assign_rhs1 (def_stmt);
+         tree op1 = gimple_assign_rhs2 (def_stmt);
          retval |= register_edge_assert_for_1 (op0, EQ_EXPR, e, si);
          retval |= register_edge_assert_for_1 (op1, EQ_EXPR, e, si);
        }
@@ -4052,17 +4199,17 @@ static bool find_assert_locations (basic_block bb);
    list of assertions for the corresponding operands.  */
 
 static bool
-find_conditional_asserts (basic_block bb, tree last)
+find_conditional_asserts (basic_block bb, gimple last)
 {
   bool need_assert;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
   tree op;
   edge_iterator ei;
   edge e;
   ssa_op_iter iter;
 
   need_assert = false;
-  bsi = bsi_for_stmt (last);
+  bsi = gsi_for_stmt (last);
 
   /* Look for uses of the operands in each of the sub-graphs
      rooted at BB.  We need to check each of the outgoing edges
@@ -4106,15 +4253,10 @@ find_conditional_asserts (basic_block bb, tree last)
         conditional predicate.  */
       FOR_EACH_SSA_TREE_OPERAND (op, last, iter, SSA_OP_USE)
        {
-         tree cond = COND_EXPR_COND (last);
-         if (op != cond)
-           need_assert |= register_edge_assert_for (op, e, bsi,
-                                                    TREE_CODE (cond),
-                                                    TREE_OPERAND (cond, 0),
-                                                    TREE_OPERAND (cond, 1));
-         else
-           need_assert |= register_edge_assert_for (op, e, bsi, EQ_EXPR, op,
-                                                    boolean_true_node);
+         need_assert |= register_edge_assert_for (op, e, bsi,
+                                                  gimple_cond_code (last),
+                                                  gimple_cond_lhs (last),
+                                                  gimple_cond_rhs (last));
        }
     }
 
@@ -4162,26 +4304,26 @@ compare_case_labels (const void *p1, const void *p2)
    list of assertions for the corresponding operands.  */
 
 static bool
-find_switch_asserts (basic_block bb, tree last)
+find_switch_asserts (basic_block bb, gimple last)
 {
   bool need_assert;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator bsi;
   tree op;
   edge e;
-  tree vec = SWITCH_LABELS (last), vec2;
-  size_t n = TREE_VEC_LENGTH (vec);
+  tree vec2;
+  size_t n = gimple_switch_num_labels(last);
   unsigned int idx;
 
   need_assert = false;
-  bsi = bsi_for_stmt (last);
-  op = TREE_OPERAND (last, 0);
+  bsi = gsi_for_stmt (last);
+  op = gimple_switch_index (last);
   if (TREE_CODE (op) != SSA_NAME)
     return false;
 
   /* Build a vector of case labels sorted by destination label.  */
   vec2 = make_tree_vec (n);
   for (idx = 0; idx < n; ++idx)
-    TREE_VEC_ELT (vec2, idx) = TREE_VEC_ELT (vec, idx);
+    TREE_VEC_ELT (vec2, idx) = gimple_switch_label (last, idx);
   qsort (&TREE_VEC_ELT (vec2, 0), n, sizeof (tree), compare_case_labels);
 
   for (idx = 0; idx < n; ++idx)
@@ -4321,8 +4463,9 @@ find_switch_asserts (basic_block bb, tree last)
 static bool
 find_assert_locations (basic_block bb)
 {
-  block_stmt_iterator si;
-  tree last, phi;
+  gimple_stmt_iterator si;
+  gimple last;
+  gimple phi;
   bool need_assert;
   basic_block son;
 
@@ -4334,10 +4477,11 @@ find_assert_locations (basic_block bb)
   need_assert = false;
 
   /* Traverse all PHI nodes in BB marking used operands.  */
-  for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+  for (si = gsi_start_phis (bb); !gsi_end_p(si); gsi_next (&si))
     {
       use_operand_p arg_p;
       ssa_op_iter i;
+      phi = gsi_stmt (si);
 
       FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
        {
@@ -4352,13 +4496,14 @@ find_assert_locations (basic_block bb)
 
   /* Traverse all the statements in BB marking used names and looking
      for statements that may infer assertions for their used operands.  */
-  last = NULL_TREE;
-  for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+  last = NULL;
+  for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
     {
-      tree stmt, op;
+      gimple stmt;
+      tree op;
       ssa_op_iter i;
 
-      stmt = bsi_stmt (si);
+      stmt = gsi_stmt (si);
 
       /* See if we can derive an assertion for any of STMT's operands.  */
       FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
@@ -4387,20 +4532,16 @@ find_assert_locations (basic_block bb)
              if (comp_code == NE_EXPR && integer_zerop (value))
                {
                  tree t = op;
-                 tree def_stmt = SSA_NAME_DEF_STMT (t);
+                 gimple def_stmt = SSA_NAME_DEF_STMT (t);
        
-                 while (TREE_CODE (def_stmt) == GIMPLE_MODIFY_STMT
+                 while (is_gimple_assign (def_stmt)
+                        && gimple_assign_rhs_code (def_stmt)  == NOP_EXPR
                         && TREE_CODE
-                            (GIMPLE_STMT_OPERAND (def_stmt, 1)) == NOP_EXPR
-                        && TREE_CODE
-                            (TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt, 1),
-                                           0)) == SSA_NAME
+                            (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
                         && POINTER_TYPE_P
-                            (TREE_TYPE (TREE_OPERAND
-                                         (GIMPLE_STMT_OPERAND (def_stmt,
-                                                               1), 0))))
+                            (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
                    {
-                     t = TREE_OPERAND (GIMPLE_STMT_OPERAND (def_stmt, 1), 0);
+                     t = gimple_assign_rhs1 (def_stmt);
                      def_stmt = SSA_NAME_DEF_STMT (t);
 
                      /* Note we want to register the assert for the
@@ -4435,13 +4576,13 @@ find_assert_locations (basic_block bb)
      involving integer operands, recurse into each of the sub-graphs
      rooted at BB to determine if we need to add ASSERT_EXPRs.  */
   if (last
-      && TREE_CODE (last) == COND_EXPR
-      && !fp_predicate (COND_EXPR_COND (last))
+      && gimple_code (last) == GIMPLE_COND
+      && !fp_predicate (last)
       && !ZERO_SSA_OPERANDS (last, SSA_OP_USE))
     need_assert |= find_conditional_asserts (bb, last);
 
   if (last
-      && TREE_CODE (last) == SWITCH_EXPR
+      && gimple_code (last) == GIMPLE_SWITCH
       && !ZERO_SSA_OPERANDS (last, SSA_OP_USE))
     need_assert |= find_switch_asserts (bb, last);
 
@@ -4462,32 +4603,33 @@ static bool
 process_assert_insertions_for (tree name, assert_locus_t loc)
 {
   /* Build the comparison expression NAME_i COMP_CODE VAL.  */
-  tree stmt, cond, assert_expr;
+  gimple stmt;
+  tree cond;
+  gimple assert_stmt;
   edge_iterator ei;
   edge e;
 
   cond = build2 (loc->comp_code, boolean_type_node, loc->expr, loc->val);
-  assert_expr = build_assert_expr_for (cond, name);
-
+  assert_stmt = build_assert_expr_for (cond, name);
   if (loc->e)
     {
       /* We have been asked to insert the assertion on an edge.  This
         is used only by COND_EXPR and SWITCH_EXPR assertions.  */
 #if defined ENABLE_CHECKING
-      gcc_assert (TREE_CODE (bsi_stmt (loc->si)) == COND_EXPR
-         || TREE_CODE (bsi_stmt (loc->si)) == SWITCH_EXPR);
+      gcc_assert (gimple_code (gsi_stmt (loc->si)) == GIMPLE_COND
+         || gimple_code (gsi_stmt (loc->si)) == GIMPLE_SWITCH);
 #endif
 
-      bsi_insert_on_edge (loc->e, assert_expr);
+      gsi_insert_on_edge (loc->e, assert_stmt);
       return true;
     }
 
   /* Otherwise, we can insert right after LOC->SI iff the
      statement must not be the last statement in the block.  */
-  stmt = bsi_stmt (loc->si);
+  stmt = gsi_stmt (loc->si);
   if (!stmt_ends_bb_p (stmt))
     {
-      bsi_insert_after (&loc->si, assert_expr, BSI_SAME_STMT);
+      gsi_insert_after (&loc->si, assert_stmt, GSI_SAME_STMT);
       return false;
     }
 
@@ -4498,7 +4640,7 @@ process_assert_insertions_for (tree name, assert_locus_t loc)
   FOR_EACH_EDGE (e, ei, loc->bb->succs)
     if (!(e->flags & EDGE_ABNORMAL))
       {
-       bsi_insert_on_edge (e, assert_expr);
+       gsi_insert_on_edge (e, assert_stmt);
        return true;
       }
 
@@ -4537,7 +4679,7 @@ process_assert_insertions (void)
     }
 
   if (update_edges_p)
-    bsi_commit_edge_inserts ();
+    gsi_commit_edge_inserts ();
 
   statistics_counter_event (cfun, "Number of ASSERT_EXPR expressions inserted",
                            num_asserts);
@@ -4624,7 +4766,7 @@ insert_range_assertions (void)
    IGNORE_OFF_BY_ONE is true if the ARRAY_REF is inside a ADDR_EXPR.  */
 
 static void
-check_array_ref (tree ref, location_t* locus, bool ignore_off_by_one)
+check_array_ref (tree ref, const location_t *location, bool ignore_off_by_one)
 {
   value_range_t* vr = NULL;
   tree low_sub, up_sub;
@@ -4664,7 +4806,7 @@ check_array_ref (tree ref, location_t* locus, bool ignore_off_by_one)
           && tree_int_cst_lt (low_sub, low_bound))
         {
           warning (OPT_Warray_bounds,
-                   "%Harray subscript is outside array bounds", locus);
+                   "%Harray subscript is outside array bounds", location);
           TREE_NO_WARNING (ref) = 1;
         }
     }
@@ -4679,14 +4821,14 @@ check_array_ref (tree ref, location_t* locus, bool ignore_off_by_one)
                                        up_sub)))
     {
       warning (OPT_Warray_bounds, "%Harray subscript is above array bounds",
-               locus);
+               location);
       TREE_NO_WARNING (ref) = 1;
     }
   else if (TREE_CODE (low_sub) == INTEGER_CST
            && tree_int_cst_lt (low_sub, low_bound))
     {
       warning (OPT_Warray_bounds, "%Harray subscript is below array bounds",
-               locus);
+               location);
       TREE_NO_WARNING (ref) = 1;
     }
 }
@@ -4695,14 +4837,20 @@ check_array_ref (tree ref, location_t* locus, bool ignore_off_by_one)
    address of an ARRAY_REF, and call check_array_ref on it.  */
 
 static void
-search_for_addr_array(tree t, location_t* location)
+search_for_addr_array(tree t, const location_t *location)
 {
   while (TREE_CODE (t) == SSA_NAME)
     {
-      t = SSA_NAME_DEF_STMT (t);
-      if (TREE_CODE (t) != GIMPLE_MODIFY_STMT)
+      gimple g = SSA_NAME_DEF_STMT (t);
+
+      if (gimple_code (g) != GIMPLE_ASSIGN)
        return;
-      t = GIMPLE_STMT_OPERAND (t, 1);
+
+      if (get_gimple_rhs_class (gimple_assign_rhs_code (g)) !=
+         GIMPLE_SINGLE_RHS)
+       return;
+
+      t = gimple_assign_rhs1 (g);
     }
 
 
@@ -4731,14 +4879,8 @@ static tree
 check_array_bounds (tree *tp, int *walk_subtree, void *data)
 {
   tree t = *tp;
-  tree stmt = (tree)data;
-  location_t *location = EXPR_LOCUS (stmt);
-
-  if (!EXPR_HAS_LOCATION (stmt))
-    {
-      *walk_subtree = FALSE;
-      return NULL_TREE;
-    }
+  struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
+  const location_t *location = (const location_t *) wi->info;
 
   *walk_subtree = TRUE;
 
@@ -4748,14 +4890,6 @@ check_array_bounds (tree *tp, int *walk_subtree, void *data)
   if (TREE_CODE (t) == INDIRECT_REF
       || (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0)))
     search_for_addr_array (TREE_OPERAND (t, 0), location);
-  else if (TREE_CODE (t) == CALL_EXPR)
-    {
-      tree arg;
-      call_expr_arg_iterator iter;
-
-      FOR_EACH_CALL_EXPR_ARG (arg, iter, t) 
-       search_for_addr_array (arg, location);
-    }
 
   if (TREE_CODE (t) == ADDR_EXPR)
     *walk_subtree = FALSE;
@@ -4770,7 +4904,7 @@ static void
 check_all_array_refs (void)
 {
   basic_block bb;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
 
   FOR_EACH_BB (bb)
     {
@@ -4778,21 +4912,46 @@ check_all_array_refs (void)
       if (single_pred_p (bb))
       {
        basic_block pred_bb = EDGE_PRED (bb, 0)->src;
-       tree ls = NULL_TREE;
+       gimple ls = NULL;
 
-       if (!bsi_end_p (bsi_last (pred_bb)))
-         ls = bsi_stmt (bsi_last (pred_bb));
+       if (!gsi_end_p (gsi_last_bb (pred_bb)))
+         ls = gsi_stmt (gsi_last_bb (pred_bb));
 
-       if (ls && TREE_CODE (ls) == COND_EXPR
-           && ((COND_EXPR_COND (ls) == boolean_false_node
+       if (ls && gimple_code (ls) == GIMPLE_COND
+           && ((gimple_cond_false_p (ls)
                 && (EDGE_PRED (bb, 0)->flags & EDGE_TRUE_VALUE))
-               || (COND_EXPR_COND (ls) == boolean_true_node
+               || (gimple_cond_true_p (ls)
                    && (EDGE_PRED (bb, 0)->flags & EDGE_FALSE_VALUE))))
          continue;
       }
-      for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
-       walk_tree (bsi_stmt_ptr (si), check_array_bounds,
-                  bsi_stmt (si), NULL);
+      for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
+       {
+         gimple stmt = gsi_stmt (si);
+         const location_t *location = gimple_location_ptr (stmt);
+         struct walk_stmt_info wi;
+         if (!gimple_has_location (stmt))
+           continue;
+
+         if (is_gimple_call (stmt))
+           {
+             size_t i;
+             size_t n = gimple_call_num_args (stmt);
+             for (i = 0; i < n; i++)
+               {
+                 tree arg = gimple_call_arg (stmt, i);
+                 search_for_addr_array (arg, location);
+               }
+           }
+         else
+           {
+             memset (&wi, 0, sizeof (wi));
+             wi.info = CONST_CAST (void *, (const void *) location);
+
+             walk_gimple_op (gsi_stmt (si),
+                             check_array_bounds,
+                             &wi);
+           }
+       }
     }
 }
 
@@ -4824,21 +4983,22 @@ static void
 remove_range_assertions (void)
 {
   basic_block bb;
-  block_stmt_iterator si;
+  gimple_stmt_iterator si;
 
   /* Note that the BSI iterator bump happens at the bottom of the
      loop and no bump is necessary if we're removing the statement
      referenced by the current BSI.  */
   FOR_EACH_BB (bb)
-    for (si = bsi_start (bb); !bsi_end_p (si);)
+    for (si = gsi_start_bb (bb); !gsi_end_p (si);)
       {
-       tree stmt = bsi_stmt (si);
-       tree use_stmt;
+       gimple stmt = gsi_stmt (si);
+       gimple use_stmt;
 
-       if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
-           && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ASSERT_EXPR)
+       if (is_gimple_assign (stmt)
+           && gimple_assign_rhs_code (stmt) == ASSERT_EXPR)
          {
-           tree rhs = GIMPLE_STMT_OPERAND (stmt, 1), var;
+           tree rhs = gimple_assign_rhs1 (stmt);
+           tree var;
            tree cond = fold (ASSERT_EXPR_COND (rhs));
            use_operand_p use_p;
            imm_use_iterator iter;
@@ -4848,7 +5008,7 @@ remove_range_assertions (void)
            /* Propagate the RHS into every use of the LHS.  */
            var = ASSERT_EXPR_VAR (rhs);
            FOR_EACH_IMM_USE_STMT (use_stmt, iter,
-                                  GIMPLE_STMT_OPERAND (stmt, 0))
+                                  gimple_assign_lhs (stmt))
              FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
                {
                  SET_USE (use_p, var);
@@ -4856,11 +5016,11 @@ remove_range_assertions (void)
                }
 
            /* And finally, remove the copy, it is not needed.  */
-           bsi_remove (&si, true);
+           gsi_remove (&si, true);
            release_defs (stmt); 
          }
        else
-         bsi_next (&si);
+         gsi_next (&si);
       }
 
   sbitmap_free (blocks_visited);
@@ -4870,32 +5030,31 @@ remove_range_assertions (void)
 /* Return true if STMT is interesting for VRP.  */
 
 static bool
-stmt_interesting_for_vrp (tree stmt)
+stmt_interesting_for_vrp (gimple stmt)
 {
-  if (TREE_CODE (stmt) == PHI_NODE
-      && is_gimple_reg (PHI_RESULT (stmt))
-      && (INTEGRAL_TYPE_P (TREE_TYPE (PHI_RESULT (stmt)))
-         || POINTER_TYPE_P (TREE_TYPE (PHI_RESULT (stmt)))))
+  if (gimple_code (stmt) == GIMPLE_PHI
+      && is_gimple_reg (gimple_phi_result (stmt))
+      && (INTEGRAL_TYPE_P (TREE_TYPE (gimple_phi_result (stmt)))
+         || POINTER_TYPE_P (TREE_TYPE (gimple_phi_result (stmt)))))
     return true;
-  else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+  else if (is_gimple_assign (stmt) || is_gimple_call (stmt))
     {
-      tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-      tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+      tree lhs = gimple_get_lhs (stmt);
 
       /* In general, assignments with virtual operands are not useful
         for deriving ranges, with the obvious exception of calls to
         builtin functions.  */
-      if (TREE_CODE (lhs) == SSA_NAME
+      if (lhs && TREE_CODE (lhs) == SSA_NAME
          && (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
              || POINTER_TYPE_P (TREE_TYPE (lhs)))
-         && ((TREE_CODE (rhs) == CALL_EXPR
-              && TREE_CODE (CALL_EXPR_FN (rhs)) == ADDR_EXPR
-              && DECL_P (TREE_OPERAND (CALL_EXPR_FN (rhs), 0))
-              && DECL_IS_BUILTIN (TREE_OPERAND (CALL_EXPR_FN (rhs), 0)))
+         && ((is_gimple_call (stmt)
+              && gimple_call_fndecl (stmt) != NULL_TREE
+              && DECL_IS_BUILTIN (gimple_call_fndecl (stmt)))
              || ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)))
        return true;
     }
-  else if (TREE_CODE (stmt) == COND_EXPR || TREE_CODE (stmt) == SWITCH_EXPR)
+  else if (gimple_code (stmt) == GIMPLE_COND
+          || gimple_code (stmt) == GIMPLE_SWITCH)
     return true;
 
   return false;
@@ -4914,24 +5073,24 @@ vrp_initialize (void)
 
   FOR_EACH_BB (bb)
     {
-      block_stmt_iterator si;
-      tree phi;
+      gimple_stmt_iterator si;
 
-      for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+      for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
        {
+         gimple phi = gsi_stmt (si);
          if (!stmt_interesting_for_vrp (phi))
            {
              tree lhs = PHI_RESULT (phi);
              set_value_range_to_varying (get_value_range (lhs));
-             DONT_SIMULATE_AGAIN (phi) = true;
+             prop_set_simulate_again (phi, false);
            }
          else
-           DONT_SIMULATE_AGAIN (phi) = false;
+           prop_set_simulate_again (phi, true);
        }
 
-      for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
+      for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
         {
-         tree stmt = bsi_stmt (si);
+         gimple stmt = gsi_stmt (si);
 
          if (!stmt_interesting_for_vrp (stmt))
            {
@@ -4939,11 +5098,11 @@ vrp_initialize (void)
              tree def;
              FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
                set_value_range_to_varying (get_value_range (def));
-             DONT_SIMULATE_AGAIN (stmt) = true;
+             prop_set_simulate_again (stmt, false);
            }
          else
            {
-             DONT_SIMULATE_AGAIN (stmt) = false;
+             prop_set_simulate_again (stmt, true);
            }
        }
     }
@@ -4954,13 +5113,12 @@ vrp_initialize (void)
    the SSA name in *OUTPUT_P.  */
 
 static enum ssa_prop_result
-vrp_visit_assignment (tree stmt, tree *output_p)
+vrp_visit_assignment_or_call (gimple stmt, tree *output_p)
 {
-  tree lhs, rhs, def;
+  tree def, lhs;
   ssa_op_iter iter;
-
-  lhs = GIMPLE_STMT_OPERAND (stmt, 0);
-  rhs = GIMPLE_STMT_OPERAND (stmt, 1);
+  enum gimple_code code = gimple_code (stmt);
+  lhs = gimple_get_lhs (stmt);
 
   /* We only keep track of ranges in integral and pointer types.  */
   if (TREE_CODE (lhs) == SSA_NAME
@@ -4974,7 +5132,10 @@ vrp_visit_assignment (tree stmt, tree *output_p)
       struct loop *l;
       value_range_t new_vr = { VR_UNDEFINED, NULL_TREE, NULL_TREE, NULL };
 
-      extract_range_from_expr (&new_vr, rhs);
+      if (code == GIMPLE_CALL)
+       extract_range_basic (&new_vr, stmt);
+      else
+       extract_range_from_assignment (&new_vr, stmt);
 
       /* If STMT is inside a loop, we may be able to know something
         else about the range of LHS by examining scalar evolution
@@ -5235,15 +5396,12 @@ vrp_evaluate_conditional_warnv_with_ops (enum tree_code code, tree op0,
   if (use_equiv_p)
     {
       if (TREE_CODE (op0) == SSA_NAME && TREE_CODE (op1) == SSA_NAME)
-       return compare_names (code, op0, op1,
-                             strict_overflow_p);
+       return compare_names (code, op0, op1, strict_overflow_p);
       else if (TREE_CODE (op0) == SSA_NAME)
-       return compare_name_with_value (code, op0, op1,
-                                       strict_overflow_p);
+       return compare_name_with_value (code, op0, op1, strict_overflow_p);
       else if (TREE_CODE (op1) == SSA_NAME)
        return (compare_name_with_value
-               (swap_tree_comparison (code), op1, op0,
-                strict_overflow_p));
+               (swap_tree_comparison (code), op1, op0, strict_overflow_p));
     }
   else
     {
@@ -5253,15 +5411,12 @@ vrp_evaluate_conditional_warnv_with_ops (enum tree_code code, tree op0,
       vr1 = (TREE_CODE (op1) == SSA_NAME) ? get_value_range (op1) : NULL;
 
       if (vr0 && vr1)
-       return compare_ranges (code, vr0, vr1,
-                              strict_overflow_p);
+       return compare_ranges (code, vr0, vr1, strict_overflow_p);
       else if (vr0 && vr1 == NULL)
-       return compare_range_with_value (code, vr0, op1,
-                                        strict_overflow_p);
+       return compare_range_with_value (code, vr0, op1, strict_overflow_p);
       else if (vr0 == NULL && vr1)
        return (compare_range_with_value
-               (swap_tree_comparison (code), vr1, op0,
-                strict_overflow_p));
+               (swap_tree_comparison (code), vr1, op0, strict_overflow_p));
     }
   return NULL_TREE;
 }
@@ -5274,17 +5429,13 @@ vrp_evaluate_conditional_warnv_with_ops (enum tree_code code, tree op0,
    appropriate.  */
 
 tree
-vrp_evaluate_conditional (enum tree_code code, tree op0, tree op1, tree stmt)
+vrp_evaluate_conditional (enum tree_code code, tree op0, tree op1, gimple stmt)
 {
   bool sop;
   tree ret;
 
   sop = false;
-  ret = vrp_evaluate_conditional_warnv_with_ops (code,
-                                                op0,
-                                                op1,
-                                                true,
-                                                &sop);
+  ret = vrp_evaluate_conditional_warnv_with_ops (code, op0, op1, true, &sop);
 
   if (ret && sop)
     {
@@ -5306,13 +5457,13 @@ vrp_evaluate_conditional (enum tree_code code, tree op0, tree op1, tree stmt)
 
       if (issue_strict_overflow_warning (wc))
        {
-         location_t locus;
+         location_t location;
 
-         if (!EXPR_HAS_LOCATION (stmt))
-           locus = input_location;
+         if (!gimple_has_location (stmt))
+           location = input_location;
          else
-           locus = EXPR_LOCATION (stmt);
-         warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
+           location = gimple_location (stmt);
+         warning (OPT_Wstrict_overflow, "%H%s", &location, warnmsg);
        }
     }
 
@@ -5346,14 +5497,14 @@ vrp_evaluate_conditional (enum tree_code code, tree op0, tree op1, tree stmt)
 
       if (warnmsg)
        {
-         location_t locus;
+         location_t location;
 
-         if (!EXPR_HAS_LOCATION (stmt))
-           locus = input_location;
+         if (!gimple_has_location (stmt))
+           location = input_location;
          else
-           locus = EXPR_LOCATION (stmt);
+           location = gimple_location (stmt);
 
-         warning (OPT_Wtype_limits, "%H%s", &locus, warnmsg);
+         warning (OPT_Wtype_limits, "%H%s", &location, warnmsg);
        }
     }
 
@@ -5367,13 +5518,12 @@ vrp_evaluate_conditional (enum tree_code code, tree op0, tree op1, tree stmt)
    SSA_PROP_VARYING.  */
 
 static enum ssa_prop_result
-vrp_visit_cond_stmt (tree stmt, edge *taken_edge_p)
+vrp_visit_cond_stmt (gimple stmt, edge *taken_edge_p)
 {
-  tree cond, val;
+  tree val;
   bool sop;
 
   *taken_edge_p = NULL;
-  cond = COND_EXPR_COND (stmt);
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
@@ -5381,7 +5531,7 @@ vrp_visit_cond_stmt (tree stmt, edge *taken_edge_p)
       ssa_op_iter i;
 
       fprintf (dump_file, "\nVisiting conditional with predicate: ");
-      print_generic_expr (dump_file, cond, 0);
+      print_gimple_stmt (dump_file, stmt, 0, 0);
       fprintf (dump_file, "\nWith known ranges\n");
       
       FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
@@ -5439,22 +5589,14 @@ vrp_visit_cond_stmt (tree stmt, edge *taken_edge_p)
      4 more predicates folded in SPEC.  */
   sop = false;
 
-  if (TREE_CODE (cond) == SSA_NAME)
-    val = vrp_evaluate_conditional_warnv_with_ops (EQ_EXPR,
-                                                  cond,
-                                                  boolean_true_node,
-                                                  false,
-                                                  &sop);
-  else
-    val = vrp_evaluate_conditional_warnv_with_ops (TREE_CODE (cond),
-                                                  TREE_OPERAND (cond, 0),
-                                                  TREE_OPERAND (cond, 1),
-                                                  false,
-                                                  &sop);
+  val = vrp_evaluate_conditional_warnv_with_ops (gimple_cond_code (stmt),
+                                                gimple_cond_lhs (stmt),
+                                                gimple_cond_rhs (stmt),
+                                                false, &sop);
   if (val)
     {
       if (!sop)
-       *taken_edge_p = find_taken_edge (bb_for_stmt (stmt), val);
+       *taken_edge_p = find_taken_edge (gimple_bb (stmt), val);
       else
        {
          if (dump_file && (dump_flags & TDF_DETAILS))
@@ -5479,7 +5621,7 @@ vrp_visit_cond_stmt (tree stmt, edge *taken_edge_p)
 
 /* Searches the case label vector VEC for the index *IDX of the CASE_LABEL
    that includes the value VAL.  The search is restricted to the range
-   [START_IDX, n - 2] where n is the size of VEC (n - 1 is the default label).
+   [START_IDX, n - 1] where n is the size of VEC.
 
    If there is a CASE_LABEL for VAL, its index is placed in IDX and true is
    returned.
@@ -5487,25 +5629,25 @@ vrp_visit_cond_stmt (tree stmt, edge *taken_edge_p)
    If there is no CASE_LABEL for VAL and the is one that is larger than VAL,
    it is placed in IDX and false is returned.
 
-   If VAL is larger than any CASE_LABEL, n - 1 is placed on IDX and false is
+   If VAL is larger than any CASE_LABEL, n is placed on IDX and false is
    returned. */
 
 static bool
-find_case_label_index (tree vec, size_t start_idx, tree val, size_t *idx)
+find_case_label_index (gimple stmt, size_t start_idx, tree val, size_t *idx)
 {
-  size_t n = TREE_VEC_LENGTH (vec);
+  size_t n = gimple_switch_num_labels (stmt);
   size_t low, high;
 
   /* Find case label for minimum of the value range or the next one.
      At each iteration we are searching in [low, high - 1]. */
 
-  for (low = start_idx, high = n - 1; high != low; )
+  for (low = start_idx, high = n; high != low; )
     {
       tree t;
       int cmp;
-      /* Note that i != high, so we never ask for n - 1. */
+      /* Note that i != high, so we never ask for n. */
       size_t i = (high + low) / 2;
-      t = TREE_VEC_ELT (vec, i);
+      t = gimple_switch_label (stmt, i);
 
       /* Cache the result of comparing CASE_LOW and val.  */
       cmp = tree_int_cst_compare (CASE_LOW (t), val);
@@ -5541,11 +5683,12 @@ find_case_label_index (tree vec, size_t start_idx, tree val, size_t *idx)
    Returns true if the default label is not needed. */
 
 static bool
-find_case_label_range (tree vec, tree min, tree max, size_t *min_idx, size_t *max_idx)
+find_case_label_range (gimple stmt, tree min, tree max, size_t *min_idx,
+                      size_t *max_idx)
 {
   size_t i, j;
-  bool min_take_default = !find_case_label_index (vec, 0, min, &i);
-  bool max_take_default = !find_case_label_index (vec, i, max, &j);
+  bool min_take_default = !find_case_label_index (stmt, 1, min, &i);
+  bool max_take_default = !find_case_label_index (stmt, i, max, &j);
 
   if (i == j
       && min_take_default
@@ -5568,20 +5711,20 @@ find_case_label_range (tree vec, tree min, tree max, size_t *min_idx, size_t *ma
 
       /* If the case label range is continuous, we do not need
         the default case label.  Verify that.  */
-      high = CASE_LOW (TREE_VEC_ELT (vec, i));
-      if (CASE_HIGH (TREE_VEC_ELT (vec, i)))
-       high = CASE_HIGH (TREE_VEC_ELT (vec, i));
+      high = CASE_LOW (gimple_switch_label (stmt, i));
+      if (CASE_HIGH (gimple_switch_label (stmt, i)))
+       high = CASE_HIGH (gimple_switch_label (stmt, i));
       for (k = i + 1; k <= j; ++k)
        {
-         low = CASE_LOW (TREE_VEC_ELT (vec, k));
+         low = CASE_LOW (gimple_switch_label (stmt, k));
          if (!integer_onep (int_const_binop (MINUS_EXPR, low, high, 0)))
            {
              take_default = true;
              break;
            }
          high = low;
-         if (CASE_HIGH (TREE_VEC_ELT (vec, k)))
-           high = CASE_HIGH (TREE_VEC_ELT (vec, k));
+         if (CASE_HIGH (gimple_switch_label (stmt, k)))
+           high = CASE_HIGH (gimple_switch_label (stmt, k));
        }
 
       *min_idx = i;
@@ -5596,16 +5739,15 @@ find_case_label_range (tree vec, tree min, tree max, size_t *min_idx, size_t *ma
    SSA_PROP_VARYING.  */
 
 static enum ssa_prop_result
-vrp_visit_switch_stmt (tree stmt, edge *taken_edge_p)
+vrp_visit_switch_stmt (gimple stmt, edge *taken_edge_p)
 {
   tree op, val;
   value_range_t *vr;
   size_t i = 0, j = 0, n;
-  tree vec;
   bool take_default;
 
   *taken_edge_p = NULL;
-  op = TREE_OPERAND (stmt, 0);
+  op = gimple_switch_index (stmt);
   if (TREE_CODE (op) != SSA_NAME)
     return SSA_PROP_VARYING;
 
@@ -5624,26 +5766,26 @@ vrp_visit_switch_stmt (tree stmt, edge *taken_edge_p)
     return SSA_PROP_VARYING;
 
   /* Find the single edge that is taken from the switch expression.  */
-  vec = SWITCH_LABELS (stmt);
-  n = TREE_VEC_LENGTH (vec);
+  n = gimple_switch_num_labels (stmt);
 
-  take_default = !find_case_label_range (vec, vr->min, vr->max, &i, &j);
+  take_default = !find_case_label_range (stmt, vr->min, vr->max, &i, &j);
 
   /* Check if the range spans no CASE_LABEL. If so, we only reach the default
      label */
   if (j < i)
     {
       gcc_assert (take_default);
-      val = TREE_VEC_ELT (vec, n - 1);
+      val = gimple_switch_default_label (stmt);
     }
   else
     {
       /* Check if labels with index i to j and maybe the default label
         are all reaching the same label.  */
 
-      val = TREE_VEC_ELT (vec, i);
+      val = gimple_switch_label (stmt, i);
       if (take_default
-         && CASE_LABEL (TREE_VEC_ELT (vec, n - 1)) != CASE_LABEL (val))
+         && CASE_LABEL (gimple_switch_default_label (stmt))
+         != CASE_LABEL (val))
        {
          if (dump_file && (dump_flags & TDF_DETAILS))
            fprintf (dump_file, "  not a single destination for this "
@@ -5652,7 +5794,7 @@ vrp_visit_switch_stmt (tree stmt, edge *taken_edge_p)
        }
       for (++i; i <= j; ++i)
         {
-          if (CASE_LABEL (TREE_VEC_ELT (vec, i)) != CASE_LABEL (val))
+          if (CASE_LABEL (gimple_switch_label (stmt, i)) != CASE_LABEL (val))
            {
              if (dump_file && (dump_flags & TDF_DETAILS))
                fprintf (dump_file, "  not a single destination for this "
@@ -5662,7 +5804,7 @@ vrp_visit_switch_stmt (tree stmt, edge *taken_edge_p)
         }
     }
 
-  *taken_edge_p = find_edge (bb_for_stmt (stmt),
+  *taken_edge_p = find_edge (gimple_bb (stmt),
                             label_to_block (CASE_LABEL (val)));
 
   if (dump_file && (dump_flags & TDF_DETAILS))
@@ -5685,37 +5827,33 @@ vrp_visit_switch_stmt (tree stmt, edge *taken_edge_p)
    If STMT produces a varying value, return SSA_PROP_VARYING.  */
 
 static enum ssa_prop_result
-vrp_visit_stmt (tree stmt, edge *taken_edge_p, tree *output_p)
+vrp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
 {
   tree def;
   ssa_op_iter iter;
-  stmt_ann_t ann;
 
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "\nVisiting statement:\n");
-      print_generic_stmt (dump_file, stmt, dump_flags);
+      print_gimple_stmt (dump_file, stmt, 0, dump_flags);
       fprintf (dump_file, "\n");
     }
 
-  ann = stmt_ann (stmt);
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+  if (is_gimple_assign (stmt) || is_gimple_call (stmt))
     {
-      tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-
       /* In general, assignments with virtual operands are not useful
         for deriving ranges, with the obvious exception of calls to
         builtin functions.  */
-      if ((TREE_CODE (rhs) == CALL_EXPR
-          && TREE_CODE (CALL_EXPR_FN (rhs)) == ADDR_EXPR
-          && DECL_P (TREE_OPERAND (CALL_EXPR_FN (rhs), 0))
-          && DECL_IS_BUILTIN (TREE_OPERAND (CALL_EXPR_FN (rhs), 0)))
+
+      if ((is_gimple_call (stmt)
+          && gimple_call_fndecl (stmt) != NULL_TREE
+          && DECL_IS_BUILTIN (gimple_call_fndecl (stmt)))
          || ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
-       return vrp_visit_assignment (stmt, output_p);
+       return vrp_visit_assignment_or_call (stmt, output_p);
     }
-  else if (TREE_CODE (stmt) == COND_EXPR)
+  else if (gimple_code (stmt) == GIMPLE_COND)
     return vrp_visit_cond_stmt (stmt, taken_edge_p);
-  else if (TREE_CODE (stmt) == SWITCH_EXPR)
+  else if (gimple_code (stmt) == GIMPLE_SWITCH)
     return vrp_visit_switch_stmt (stmt, taken_edge_p);
 
   /* All other statements produce nothing of interest for VRP, so mark
@@ -5878,9 +6016,9 @@ give_up:
    value ranges, set a new range for the LHS of PHI.  */
 
 static enum ssa_prop_result
-vrp_visit_phi_node (tree phi)
+vrp_visit_phi_node (gimple phi)
 {
-  int i;
+  size_t i;
   tree lhs = PHI_RESULT (phi);
   value_range_t *lhs_vr = get_value_range (lhs);
   value_range_t vr_result = { VR_UNDEFINED, NULL_TREE, NULL_TREE, NULL };
@@ -5891,19 +6029,19 @@ vrp_visit_phi_node (tree phi)
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
       fprintf (dump_file, "\nVisiting PHI node: ");
-      print_generic_expr (dump_file, phi, dump_flags);
+      print_gimple_stmt (dump_file, phi, 0, dump_flags);
     }
 
   edges = 0;
-  for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+  for (i = 0; i < gimple_phi_num_args (phi); i++)
     {
-      edge e = PHI_ARG_EDGE (phi, i);
+      edge e = gimple_phi_arg_edge (phi, i);
 
       if (dump_file && (dump_flags & TDF_DETAILS))
        {
          fprintf (dump_file,
              "\n    Argument #%d (%d -> %d %sexecutable)\n",
-             i, e->src->index, e->dest->index,
+             (int) i, e->src->index, e->dest->index,
              (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
        }
 
@@ -6029,13 +6167,15 @@ varying:
    than zero and the second operand is an exact power of two.  */
 
 static void
-simplify_div_or_mod_using_ranges (tree stmt, tree rhs, enum tree_code rhs_code)
+simplify_div_or_mod_using_ranges (gimple stmt)
 {
+  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
   tree val = NULL;
-  tree op = TREE_OPERAND (rhs, 0);
-  value_range_t *vr = get_value_range (TREE_OPERAND (rhs, 0));
+  tree op0 = gimple_assign_rhs1 (stmt);
+  tree op1 = gimple_assign_rhs2 (stmt);
+  value_range_t *vr = get_value_range (gimple_assign_rhs1 (stmt));
 
-  if (TYPE_UNSIGNED (TREE_TYPE (op)))
+  if (TYPE_UNSIGNED (TREE_TYPE (op0)))
     {
       val = integer_one_node;
     }
@@ -6050,39 +6190,41 @@ simplify_div_or_mod_using_ranges (tree stmt, tree rhs, enum tree_code rhs_code)
          && integer_onep (val)
          && issue_strict_overflow_warning (WARN_STRICT_OVERFLOW_MISC))
        {
-         location_t locus;
+         location_t location;
 
-         if (!EXPR_HAS_LOCATION (stmt))
-           locus = input_location;
+         if (!gimple_has_location (stmt))
+           location = input_location;
          else
-           locus = EXPR_LOCATION (stmt);
+           location = gimple_location (stmt);
          warning (OPT_Wstrict_overflow,
                   ("%Hassuming signed overflow does not occur when "
                    "simplifying / or %% to >> or &"),
-                  &locus);
+                  &location);
        }
     }
 
   if (val && integer_onep (val))
     {
       tree t;
-      tree op0 = TREE_OPERAND (rhs, 0);
-      tree op1 = TREE_OPERAND (rhs, 1);
 
       if (rhs_code == TRUNC_DIV_EXPR)
        {
          t = build_int_cst (NULL_TREE, tree_log2 (op1));
-         t = build2 (RSHIFT_EXPR, TREE_TYPE (op0), op0, t);
+         gimple_assign_set_rhs_code (stmt, RSHIFT_EXPR);
+         gimple_assign_set_rhs1 (stmt, op0);
+         gimple_assign_set_rhs2 (stmt, t);
        }
       else
        {
          t = build_int_cst (TREE_TYPE (op1), 1);
          t = int_const_binop (MINUS_EXPR, op1, t, 0);
          t = fold_convert (TREE_TYPE (op0), t);
-         t = build2 (BIT_AND_EXPR, TREE_TYPE (op0), op0, t);
+
+         gimple_assign_set_rhs_code (stmt, BIT_AND_EXPR);
+         gimple_assign_set_rhs1 (stmt, op0);
+         gimple_assign_set_rhs2 (stmt, t);
        }
 
-      GIMPLE_STMT_OPERAND (stmt, 1) = t;
       update_stmt (stmt);
     }
 }
@@ -6092,12 +6234,12 @@ simplify_div_or_mod_using_ranges (tree stmt, tree rhs, enum tree_code rhs_code)
    ABS_EXPR into a NEGATE_EXPR.  */
 
 static void
-simplify_abs_using_ranges (tree stmt, tree rhs)
+simplify_abs_using_ranges (gimple stmt)
 {
   tree val = NULL;
-  tree op = TREE_OPERAND (rhs, 0);
+  tree op = gimple_assign_rhs1 (stmt);
   tree type = TREE_TYPE (op);
-  value_range_t *vr = get_value_range (TREE_OPERAND (rhs, 0));
+  value_range_t *vr = get_value_range (op);
 
   if (TYPE_UNSIGNED (type))
     {
@@ -6126,28 +6268,25 @@ simplify_abs_using_ranges (tree stmt, tree rhs)
       if (val
          && (integer_onep (val) || integer_zerop (val)))
        {
-         tree t;
-
          if (sop && issue_strict_overflow_warning (WARN_STRICT_OVERFLOW_MISC))
            {
-             location_t locus;
+             location_t location;
 
-             if (!EXPR_HAS_LOCATION (stmt))
-               locus = input_location;
+             if (!gimple_has_location (stmt))
+               location = input_location;
              else
-               locus = EXPR_LOCATION (stmt);
+               location = gimple_location (stmt);
              warning (OPT_Wstrict_overflow,
                       ("%Hassuming signed overflow does not occur when "
                        "simplifying abs (X) to X or -X"),
-                      &locus);
+                      &location);
            }
 
+         gimple_assign_set_rhs1 (stmt, op);
          if (integer_onep (val))
-           t = build1 (NEGATE_EXPR, TREE_TYPE (op), op);
+           gimple_assign_set_rhs_code (stmt, NEGATE_EXPR);
          else
-           t = op;
-
-         GIMPLE_STMT_OPERAND (stmt, 1) = t;
+           gimple_assign_set_rhs_code (stmt, SSA_NAME);
          update_stmt (stmt);
        }
     }
@@ -6226,12 +6365,11 @@ test_for_singularity (enum tree_code cond_code, tree op0,
    the original conditional.  */
 
 static void
-simplify_cond_using_ranges (tree stmt)
+simplify_cond_using_ranges (gimple stmt)
 {
-  tree cond = COND_EXPR_COND (stmt);
-  tree op0 = TREE_OPERAND (cond, 0);
-  tree op1 = TREE_OPERAND (cond, 1);
-  enum tree_code cond_code = TREE_CODE (cond);
+  tree op0 = gimple_cond_lhs (stmt);
+  tree op1 = gimple_cond_rhs (stmt);
+  enum tree_code cond_code = gimple_cond_code (stmt);
 
   if (cond_code != NE_EXPR
       && cond_code != EQ_EXPR
@@ -6252,17 +6390,19 @@ simplify_cond_using_ranges (tree stmt)
              if (dump_file)
                {
                  fprintf (dump_file, "Simplified relational ");
-                 print_generic_expr (dump_file, cond, 0);
+                 print_gimple_stmt (dump_file, stmt, 0, 0);
                  fprintf (dump_file, " into ");
                }
 
-             COND_EXPR_COND (stmt)
-               = build2 (EQ_EXPR, boolean_type_node, op0, new);
+             gimple_cond_set_code (stmt, EQ_EXPR);
+             gimple_cond_set_lhs (stmt, op0);
+             gimple_cond_set_rhs (stmt, new);
+
              update_stmt (stmt);
 
              if (dump_file)
                {
-                 print_generic_expr (dump_file, COND_EXPR_COND (stmt), 0);
+                 print_gimple_stmt (dump_file, stmt, 0, 0);
                  fprintf (dump_file, "\n");
                }
              return;
@@ -6280,17 +6420,19 @@ simplify_cond_using_ranges (tree stmt)
              if (dump_file)
                {
                  fprintf (dump_file, "Simplified relational ");
-                 print_generic_expr (dump_file, cond, 0);
+                 print_gimple_stmt (dump_file, stmt, 0, 0);
                  fprintf (dump_file, " into ");
                }
 
-             COND_EXPR_COND (stmt)
-               = build2 (NE_EXPR, boolean_type_node, op0, new);
+             gimple_cond_set_code (stmt, NE_EXPR);
+             gimple_cond_set_lhs (stmt, op0);
+             gimple_cond_set_rhs (stmt, new);
+
              update_stmt (stmt);
 
              if (dump_file)
                {
-                 print_generic_expr (dump_file, COND_EXPR_COND (stmt), 0);
+                 print_gimple_stmt (dump_file, stmt, 0, 0);
                  fprintf (dump_file, "\n");
                }
              return;
@@ -6304,15 +6446,15 @@ simplify_cond_using_ranges (tree stmt)
    argument.  */
 
 static void
-simplify_switch_using_ranges (tree stmt)
+simplify_switch_using_ranges (gimple stmt)
 {
-  tree op = TREE_OPERAND (stmt, 0);
+  tree op = gimple_switch_index (stmt);
   value_range_t *vr;
   bool take_default;
   edge e;
   edge_iterator ei;
   size_t i = 0, j = 0, n, n2;
-  tree vec, vec2;
+  tree vec2;
   switch_update su;
 
   if (TREE_CODE (op) != SSA_NAME)
@@ -6326,35 +6468,36 @@ simplify_switch_using_ranges (tree stmt)
     return;
 
   /* Find case label for min/max of the value range.  */
-  vec = SWITCH_LABELS (stmt);
-  n = TREE_VEC_LENGTH (vec);
-  take_default = !find_case_label_range (vec, vr->min, vr->max, &i, &j);
+  n = gimple_switch_num_labels (stmt);
+  take_default = !find_case_label_range (stmt, vr->min, vr->max, &i, &j);
 
   /* Bail out if this is just all edges taken.  */
-  if (i == 0
-      && j == n - 2
+  if (i == 1
+      && j == n - 1
       && take_default)
     return;
 
   /* Build a new vector of taken case labels.  */
   vec2 = make_tree_vec (j - i + 1 + (int)take_default);
-  for (n2 = 0; i <= j; ++i, ++n2)
-    TREE_VEC_ELT (vec2, n2) = TREE_VEC_ELT (vec, i);
+  n2 = 0;
 
   /* Add the default edge, if necessary.  */
   if (take_default)
-    TREE_VEC_ELT (vec2, n2++) = TREE_VEC_ELT (vec, n - 1);
+    TREE_VEC_ELT (vec2, n2++) = gimple_switch_default_label (stmt);
+
+  for (; i <= j; ++i, ++n2)
+    TREE_VEC_ELT (vec2, n2) = gimple_switch_label (stmt, i);
 
   /* Mark needed edges.  */
   for (i = 0; i < n2; ++i)
     {
-      e = find_edge (bb_for_stmt (stmt),
+      e = find_edge (gimple_bb (stmt),
                     label_to_block (CASE_LABEL (TREE_VEC_ELT (vec2, i))));
       e->aux = (void *)-1;
     }
 
   /* Queue not needed edges for later removal.  */
-  FOR_EACH_EDGE (e, ei, bb_for_stmt (stmt)->succs)
+  FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->succs)
     {
       if (e->aux == (void *)-1)
        {
@@ -6378,31 +6521,29 @@ simplify_switch_using_ranges (tree stmt)
 /* Simplify STMT using ranges if possible.  */
 
 void
-simplify_stmt_using_ranges (tree stmt)
+simplify_stmt_using_ranges (gimple stmt)
 {
-  if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+  if (is_gimple_assign (stmt))
     {
-      tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
-      enum tree_code rhs_code = TREE_CODE (rhs);
+      enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
 
       /* Transform TRUNC_DIV_EXPR and TRUNC_MOD_EXPR into RSHIFT_EXPR
         and BIT_AND_EXPR respectively if the first operand is greater
         than zero and the second operand is an exact power of two.  */
       if ((rhs_code == TRUNC_DIV_EXPR || rhs_code == TRUNC_MOD_EXPR)
-         && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))
-         && integer_pow2p (TREE_OPERAND (rhs, 1)))
-       simplify_div_or_mod_using_ranges (stmt, rhs, rhs_code);
+         && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt)))
+         && integer_pow2p (gimple_assign_rhs2 (stmt)))
+       simplify_div_or_mod_using_ranges (stmt);
 
       /* Transform ABS (X) into X or -X as appropriate.  */
       if (rhs_code == ABS_EXPR
-         && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
-         && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0))))
-       simplify_abs_using_ranges (stmt, rhs);
+         && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
+         && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
+       simplify_abs_using_ranges (stmt);
     }
-  else if (TREE_CODE (stmt) == COND_EXPR
-          && COMPARISON_CLASS_P (COND_EXPR_COND (stmt)))
+  else if (gimple_code (stmt) == GIMPLE_COND)
     simplify_cond_using_ranges (stmt);
-  else if (TREE_CODE (stmt) == SWITCH_EXPR)
+  else if (gimple_code (stmt) == GIMPLE_SWITCH)
     simplify_switch_using_ranges (stmt);
 }
 
@@ -6419,26 +6560,17 @@ static VEC(tree,heap) *stack;
    for any overflow warnings.  */
 
 static tree
-simplify_stmt_for_jump_threading (tree stmt, tree within_stmt)
+simplify_stmt_for_jump_threading (gimple stmt, gimple within_stmt)
 {
-  tree conditional;
   /* We only use VRP information to simplify conditionals.  This is
      overly conservative, but it's unclear if doing more would be
      worth the compile time cost.  */
-  if (TREE_CODE (stmt) != COND_EXPR)
+  if (gimple_code (stmt) != GIMPLE_COND)
     return NULL;
 
-  conditional = COND_EXPR_COND (stmt);
-  if (TREE_CODE (conditional) == SSA_NAME)
-    return vrp_evaluate_conditional (EQ_EXPR,
-                                    conditional,
-                                    boolean_true_node,
-                                    within_stmt);
-  else
-    return vrp_evaluate_conditional (TREE_CODE (conditional),
-                                    TREE_OPERAND (conditional, 0),
-                                    TREE_OPERAND (conditional, 1),
-                                    within_stmt);
+  return vrp_evaluate_conditional (gimple_cond_code (stmt),
+                                  gimple_cond_lhs (stmt),
+                                  gimple_cond_rhs (stmt), within_stmt);
 }
 
 /* Blocks which have more than one predecessor and more than
@@ -6465,7 +6597,7 @@ static void
 identify_jump_threads (void)
 {
   basic_block bb;
-  tree dummy;
+  gimple dummy;
   int i;
   edge e;
 
@@ -6493,8 +6625,9 @@ identify_jump_threads (void)
   /* To avoid lots of silly node creation, we create a single
      conditional and just modify it in-place when attempting to
      thread jumps.  */
-  dummy = build2 (EQ_EXPR, boolean_type_node, NULL, NULL);
-  dummy = build3 (COND_EXPR, void_type_node, dummy, NULL, NULL);
+  dummy = gimple_build_cond (EQ_EXPR,
+                            integer_zero_node, integer_zero_node,
+                            NULL, NULL);
 
   /* Walk through all the blocks finding those which present a
      potential jump threading opportunity.  We could set this up
@@ -6504,7 +6637,7 @@ identify_jump_threads (void)
      point in compilation.  */
   FOR_EACH_BB (bb)
     {
-      tree last, cond;
+      gimple last;
 
       /* If the generic jump threading code does not find this block
         interesting, then there is nothing to do.  */
@@ -6514,21 +6647,17 @@ identify_jump_threads (void)
       /* We only care about blocks ending in a COND_EXPR.  While there
         may be some value in handling SWITCH_EXPR here, I doubt it's
         terribly important.  */
-      last = bsi_stmt (bsi_last (bb));
-      if (TREE_CODE (last) != COND_EXPR)
+      last = gsi_stmt (gsi_last_bb (bb));
+      if (gimple_code (last) != GIMPLE_COND)
        continue;
 
       /* We're basically looking for any kind of conditional with
         integral type arguments.  */
-      cond = COND_EXPR_COND (last);
-      if ((TREE_CODE (cond) == SSA_NAME
-          && INTEGRAL_TYPE_P (TREE_TYPE (cond)))
-         || (COMPARISON_CLASS_P (cond)
-             && TREE_CODE (TREE_OPERAND (cond, 0)) == SSA_NAME
-             && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (cond, 0)))
-             && (TREE_CODE (TREE_OPERAND (cond, 1)) == SSA_NAME
-                 || is_gimple_min_invariant (TREE_OPERAND (cond, 1)))
-             && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (cond, 1)))))
+      if (TREE_CODE (gimple_cond_lhs (last)) == SSA_NAME
+         && INTEGRAL_TYPE_P (TREE_TYPE (gimple_cond_lhs (last)))
+         && (TREE_CODE (gimple_cond_rhs (last)) == SSA_NAME
+             || is_gimple_min_invariant (gimple_cond_rhs (last)))
+         && INTEGRAL_TYPE_P (TREE_TYPE (gimple_cond_rhs (last))))
        {
          edge_iterator ei;
 
@@ -6543,8 +6672,7 @@ identify_jump_threads (void)
              if (e->flags & (EDGE_DFS_BACK | EDGE_COMPLEX))
                continue;
 
-             thread_across_edge (dummy, e, true,
-                                 &stack,
+             thread_across_edge (dummy, e, true, &stack,
                                  simplify_stmt_for_jump_threading);
            }
        }
@@ -6720,7 +6848,13 @@ execute_vrp (void)
     remove_edge (e);
   /* Update SWITCH_EXPR case label vector.  */
   for (i = 0; VEC_iterate (switch_update, to_update_switch_stmts, i, su); ++i)
-    SWITCH_LABELS (su->stmt) = su->vec;
+    {
+      size_t j;
+      size_t n = TREE_VEC_LENGTH (su->vec);
+      gimple_switch_set_num_labels (su->stmt, n);
+      for (j = 0; j < n; j++)
+       gimple_switch_set_label (su->stmt, j, TREE_VEC_ELT (su->vec, j));
+    }
 
   if (VEC_length (edge, to_remove_edges) > 0)
     free_dominance_info (CDI_DOMINATORS);
@@ -6730,7 +6864,6 @@ execute_vrp (void)
 
   scev_finalize ();
   loop_optimizer_finalize ();
-
   return 0;
 }
 
index 0af1189..cdecd70 100644 (file)
@@ -105,8 +105,7 @@ const char *const tree_code_class_strings[] =
   "binary",
   "statement",
   "vl_exp",
-  "expression",
-  "gimple_stmt"
+  "expression"
 };
 
 /* obstack.[ch] explicitly declined to prototype this.  */
@@ -132,14 +131,12 @@ static const char * const tree_node_kind_names[] = {
   "temp_tree_lists",
   "vecs",
   "binfos",
-  "phi_nodes",
   "ssa names",
   "constructors",
   "random kinds",
   "lang_decl kinds",
   "lang_type kinds",
   "omp clauses",
-  "gimple statements"
 };
 #endif /* GATHER_STATISTICS */
 
@@ -440,7 +437,7 @@ decl_assembler_name_hash (const_tree asmname)
 
 /* Compute the number of bytes occupied by a tree with code CODE.
    This function cannot be used for nodes that have variable sizes,
-   including TREE_VEC, PHI_NODE, STRING_CST, and CALL_EXPR.  */
+   including TREE_VEC, STRING_CST, and CALL_EXPR.  */
 size_t
 tree_code_size (enum tree_code code)
 {
@@ -488,10 +485,6 @@ tree_code_size (enum tree_code code)
       return (sizeof (struct tree_exp)
              + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
 
-    case tcc_gimple_stmt:
-      return (sizeof (struct gimple_stmt)
-             + (TREE_CODE_LENGTH (code) - 1) * sizeof (char *));
-
     case tcc_constant:  /* a constant */
       switch (code)
        {
@@ -515,8 +508,7 @@ tree_code_size (enum tree_code code)
        case PLACEHOLDER_EXPR:  return sizeof (struct tree_common);
 
        case TREE_VEC:
-       case OMP_CLAUSE:
-       case PHI_NODE:          gcc_unreachable ();
+       case OMP_CLAUSE:        gcc_unreachable ();
 
        case SSA_NAME:          return sizeof (struct tree_ssa_name);
 
@@ -543,10 +535,6 @@ tree_size (const_tree node)
   const enum tree_code code = TREE_CODE (node);
   switch (code)
     {
-    case PHI_NODE:
-      return (sizeof (struct tree_phi_node)
-             + (PHI_ARG_CAPACITY (node) - 1) * sizeof (struct phi_arg_d));
-
     case TREE_BINFO:
       return (offsetof (struct tree_binfo, base_binfos)
              + VEC_embedded_size (tree, BINFO_N_BASE_BINFOS (node)));
@@ -574,9 +562,8 @@ tree_size (const_tree node)
 
 /* Return a newly allocated node of code CODE.  For decl and type
    nodes, some other fields are initialized.  The rest of the node is
-   initialized to zero.  This function cannot be used for PHI_NODE,
-   TREE_VEC or OMP_CLAUSE nodes, which is enforced by asserts in
-   tree_code_size.
+   initialized to zero.  This function cannot be used for TREE_VEC or
+   OMP_CLAUSE nodes, which is enforced by asserts in tree_code_size.
 
    Achoo!  I got a code in the node.  */
 
@@ -618,10 +605,6 @@ make_node_stat (enum tree_code code MEM_STAT_DECL)
       kind = c_kind;
       break;
 
-    case tcc_gimple_stmt:
-      kind = gimple_stmt_kind;
-      break;
-
     case tcc_exceptional:  /* something random, like an identifier.  */
       switch (code)
        {
@@ -637,10 +620,6 @@ make_node_stat (enum tree_code code MEM_STAT_DECL)
          kind = binfo_kind;
          break;
 
-       case PHI_NODE:
-         kind = phi_kind;
-         break;
-
        case SSA_NAME:
          kind = ssa_name_kind;
          break;
@@ -739,17 +718,6 @@ make_node_stat (enum tree_code code MEM_STAT_DECL)
        }
       break;
 
-    case tcc_gimple_stmt:
-      switch (code)
-       {
-      case GIMPLE_MODIFY_STMT:
-       TREE_SIDE_EFFECTS (t) = 1;
-       break;
-
-      default:
-       break;
-       }
-
     default:
       /* Other classes need no special treatment.  */
       break;
@@ -774,8 +742,7 @@ copy_node_stat (tree node MEM_STAT_DECL)
   t = (tree) ggc_alloc_zone_pass_stat (length, &tree_zone);
   memcpy (t, node, length);
 
-  if (!GIMPLE_TUPLE_P (node))
-    TREE_CHAIN (t) = 0;
+  TREE_CHAIN (t) = 0;
   TREE_ASM_WRITTEN (t) = 0;
   TREE_VISITED (t) = 0;
   t->base.ann = 0;
@@ -2043,10 +2010,6 @@ expr_align (const_tree t)
       align1 = TYPE_ALIGN (TREE_TYPE (t));
       return MAX (align0, align1);
 
-    case GIMPLE_MODIFY_STMT:
-      /* We should never ask for the alignment of a gimple statement.  */
-      gcc_unreachable ();
-
     case SAVE_EXPR:         case COMPOUND_EXPR:       case MODIFY_EXPR:
     case INIT_EXPR:         case TARGET_EXPR:         case WITH_CLEANUP_EXPR:
     case CLEANUP_POINT_EXPR:
@@ -2418,8 +2381,6 @@ tree_node_structure (const_tree t)
     case tcc_statement:
     case tcc_vl_exp:
       return TS_EXP;
-    case tcc_gimple_stmt:
-      return TS_GIMPLE_STATEMENT;
     default:  /* tcc_constant and tcc_exceptional */
       break;
     }
@@ -2433,13 +2394,10 @@ tree_node_structure (const_tree t)
     case VECTOR_CST:           return TS_VECTOR;
     case STRING_CST:           return TS_STRING;
       /* tcc_exceptional cases.  */
-    /* FIXME tuples: eventually this should be TS_BASE.  For now, nothing
-       returns TS_BASE.  */
     case ERROR_MARK:           return TS_COMMON;
     case IDENTIFIER_NODE:      return TS_IDENTIFIER;
     case TREE_LIST:            return TS_LIST;
     case TREE_VEC:             return TS_VEC;
-    case PHI_NODE:             return TS_PHI_NODE;
     case SSA_NAME:             return TS_SSA_NAME;
     case PLACEHOLDER_EXPR:     return TS_COMMON;
     case STATEMENT_LIST:       return TS_STATEMENT_LIST;
@@ -3289,15 +3247,6 @@ build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
 
   gcc_assert (TREE_CODE_LENGTH (code) == 2);
 
-#if 1
-  /* FIXME tuples: Statement's aren't expressions!  */
-  if (code == GIMPLE_MODIFY_STMT)
-    return build_gimple_modify_stmt_stat (arg0, arg1 PASS_MEM_STAT);
-#else
-  /* Must use build_gimple_modify_stmt to construct GIMPLE_MODIFY_STMTs.  */
-  gcc_assert (code != GIMPLE_MODIFY_STMT);
-#endif
-
   if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
       && arg0 && arg1 && tt && POINTER_TYPE_P (tt))
     gcc_assert (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST);
@@ -3336,21 +3285,6 @@ build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
 }
 
 
-/* Build a GIMPLE_MODIFY_STMT node.  This tree code doesn't have a
-   type, so we can't use build2 (a.k.a. build2_stat).  */
-
-tree
-build_gimple_modify_stmt_stat (tree arg0, tree arg1 MEM_STAT_DECL)
-{
-  tree t;
-
-  t = make_node_stat (GIMPLE_MODIFY_STMT PASS_MEM_STAT);
-  /* ?? We don't care about setting flags for tuples...  */
-  GIMPLE_STMT_OPERAND (t, 0) = arg0;
-  GIMPLE_STMT_OPERAND (t, 1) = arg1;
-  return t;
-}
-
 tree
 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
             tree arg2 MEM_STAT_DECL)
@@ -3601,79 +3535,14 @@ expand_location (source_location loc)
 /* Source location accessor functions.  */
 
 
-/* The source location of this expression.  Non-tree_exp nodes such as
-   decls and constants can be shared among multiple locations, so
-   return nothing.  */
-location_t
-expr_location (const_tree node)
-{
-  if (GIMPLE_STMT_P (node))
-    return GIMPLE_STMT_LOCUS (node);
-  return EXPR_P (node) ? node->exp.locus : UNKNOWN_LOCATION;
-}
-
-void
-set_expr_location (tree node, location_t locus)
-{
-  if (GIMPLE_STMT_P (node))
-    GIMPLE_STMT_LOCUS (node) = locus;
-  else
-    EXPR_CHECK (node)->exp.locus = locus;
-}
-
-bool
-expr_has_location (const_tree node)
-{
-  return expr_location (node) != UNKNOWN_LOCATION;
-}
-
-source_location *
-expr_locus (const_tree node)
-{
-  if (GIMPLE_STMT_P (node))
-    return CONST_CAST (source_location *, &GIMPLE_STMT_LOCUS (node));
-  return (EXPR_P (node)
-         ? CONST_CAST (source_location *, &node->exp.locus)
-         : (source_location *) NULL);
-}
-
 void
 set_expr_locus (tree node, source_location *loc)
 {
   if (loc == NULL)
-    {
-      if (GIMPLE_STMT_P (node))
-       GIMPLE_STMT_LOCUS (node) = UNKNOWN_LOCATION;
-      else
-       EXPR_CHECK (node)->exp.locus = UNKNOWN_LOCATION;
-    }
+    EXPR_CHECK (node)->exp.locus = UNKNOWN_LOCATION;
   else
-    {
-      if (GIMPLE_STMT_P (node))
-       GIMPLE_STMT_LOCUS (node) = *loc;
-      else
-       EXPR_CHECK (node)->exp.locus = *loc;
-    }
-}
-
-/* Return the file name of the location of NODE.  */
-const char *
-expr_filename (const_tree node)
-{
-  if (GIMPLE_STMT_P (node))
-    return LOCATION_FILE (GIMPLE_STMT_LOCUS (node));
-  return LOCATION_FILE (EXPR_CHECK (node)->exp.locus);
+    EXPR_CHECK (node)->exp.locus = *loc;
 }
-
-/* Return the line number of the location of NODE.  */
-int
-expr_lineno (const_tree node)
-{
-  if (GIMPLE_STMT_P (node))
-    return LOCATION_LINE (GIMPLE_STMT_LOCUS (node));
-  return LOCATION_LINE (EXPR_CHECK (node)->exp.locus);
-}
-
 \f
 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
    is ATTRIBUTE.  */
@@ -5374,7 +5243,7 @@ commutative_tree_code (enum tree_code code)
 }
 
 /* Generate a hash value for an expression.  This can be used iteratively
-   by passing a previous result as the "val" argument.
+   by passing a previous result as the VAL argument.
 
    This function is intended to produce the same hash for expressions which
    would compare equal using operand_equal_p.  */
@@ -5502,6 +5371,29 @@ iterative_hash_expr (const_tree t, hashval_t val)
       break;
     }
 }
+
+/* Generate a hash value for a pair of expressions.  This can be used
+   iteratively by passing a previous result as the VAL argument.
+
+   The same hash value is always returned for a given pair of expressions,
+   regardless of the order in which they are presented.  This is useful in
+   hashing the operands of commutative functions.  */
+
+hashval_t
+iterative_hash_exprs_commutative (const_tree t1,
+                                  const_tree t2, hashval_t val)
+{
+  hashval_t one = iterative_hash_expr (t1, 0);
+  hashval_t two = iterative_hash_expr (t2, 0);
+  hashval_t t;
+
+  if (one > two)
+    t = one, one = two, two = t;
+  val = iterative_hash_hashval_t (one, val);
+  val = iterative_hash_hashval_t (two, val);
+
+  return val;
+}
 \f
 /* Constructors for pointer, array and function types.
    (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
@@ -6489,8 +6381,7 @@ get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
     }
 }
 
-/* auto_var_in_fn_p is called to determine whether VAR is an automatic
-   variable defined in function FN.  */
+/* Return true if VAR is an automatic variable defined in function FN.  */
 
 bool
 auto_var_in_fn_p (const_tree var, const_tree fn)
@@ -7128,18 +7019,6 @@ tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
      idx + 1, len, function, trim_filename (file), line);
 }
 
-/* Similar to above, except that the check is for the bounds of a PHI_NODE's
-   (dynamically sized) vector.  */
-
-void
-phi_node_elt_check_failed (int idx, int len, const char *file, int line,
-                           const char *function)
-{
-  internal_error
-    ("tree check: accessed elt %d of phi_node with %d elts in %s, at %s:%d",
-     idx + 1, len, function, trim_filename (file), line);
-}
-
 /* Similar to above, except that the check is for the bounds of the operand
    vector of an expression node EXP.  */
 
@@ -8690,6 +8569,10 @@ walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
        WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
       }
 
+    case CHANGE_DYNAMIC_TYPE_EXPR:
+      WALK_SUBTREE (CHANGE_DYNAMIC_TYPE_NEW_TYPE (*tp));
+      WALK_SUBTREE_TAIL (CHANGE_DYNAMIC_TYPE_LOCATION (*tp));
+
     case DECL_EXPR:
       /* If this is a TYPE_DECL, walk into the fields of the type that it's
         defining.  We only want to walk into these fields of a type in this
@@ -8761,8 +8644,7 @@ walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
       /* FALLTHRU */
 
     default:
-      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
-         || IS_GIMPLE_STMT_CODE_CLASS (TREE_CODE_CLASS (code)))
+      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
        {
          int i, len;
 
@@ -8774,8 +8656,8 @@ walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
          if (len)
            {
              for (i = 0; i < len - 1; ++i)
-               WALK_SUBTREE (GENERIC_TREE_OPERAND (*tp, i));
-             WALK_SUBTREE_TAIL (GENERIC_TREE_OPERAND (*tp, len - 1));
+               WALK_SUBTREE (TREE_OPERAND (*tp, i));
+             WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
            }
        }
       /* If this is a type, walk the needed fields in the type.  */
@@ -8807,31 +8689,6 @@ walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
 }
 
 
-/* Return true if STMT is an empty statement or contains nothing but
-   empty statements.  */
-
-bool
-empty_body_p (tree stmt)
-{
-  tree_stmt_iterator i;
-  tree body;
-
-  if (IS_EMPTY_STMT (stmt))
-    return true;
-  else if (TREE_CODE (stmt) == BIND_EXPR)
-    body = BIND_EXPR_BODY (stmt);
-  else if (TREE_CODE (stmt) == STATEMENT_LIST)
-    body = stmt;
-  else
-    return false;
-
-  for (i = tsi_start (body); !tsi_end_p (i); tsi_next (&i))
-    if (!empty_body_p (tsi_stmt (i)))
-      return false;
-
-  return true;
-}
-
 tree *
 tree_block (tree t)
 {
@@ -8839,28 +8696,10 @@ tree_block (tree t)
 
   if (IS_EXPR_CODE_CLASS (c))
     return &t->exp.block;
-  else if (IS_GIMPLE_STMT_CODE_CLASS (c))
-    return &GIMPLE_STMT_BLOCK (t);
   gcc_unreachable ();
   return NULL;
 }
 
-tree *
-generic_tree_operand (tree node, int i)
-{
-  if (GIMPLE_STMT_P (node))
-    return &GIMPLE_STMT_OPERAND (node, i);
-  return &TREE_OPERAND (node, i);
-}
-
-tree *
-generic_tree_type (tree node)
-{
-  if (GIMPLE_STMT_P (node))
-    return &void_type_node;
-  return &TREE_TYPE (node);
-}
-
 /* Build and return a TREE_LIST of arguments in the CALL_EXPR exp.
    FIXME: don't use this function.  It exists for compatibility with
    the old representation of CALL_EXPRs where a list was used to hold the
@@ -8876,6 +8715,46 @@ call_expr_arglist (tree exp)
   return arglist;
 }
 
+
+/* Create a nameless artificial label and put it in the current function
+   context.  Returns the newly created label.  */
+
+tree
+create_artificial_label (void)
+{
+  tree lab = build_decl (LABEL_DECL, NULL_TREE, void_type_node);
+
+  DECL_ARTIFICIAL (lab) = 1;
+  DECL_IGNORED_P (lab) = 1;
+  DECL_CONTEXT (lab) = current_function_decl;
+  return lab;
+}
+
+/*  Given a tree, try to return a useful variable name that we can use
+    to prefix a temporary that is being assigned the value of the tree.
+    I.E. given  <temp> = &A, return A.  */
+
+const char *
+get_name (tree t)
+{
+  tree stripped_decl;
+
+  stripped_decl = t;
+  STRIP_NOPS (stripped_decl);
+  if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
+    return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
+  else
+    {
+      switch (TREE_CODE (stripped_decl))
+       {
+       case ADDR_EXPR:
+         return get_name (TREE_OPERAND (stripped_decl, 0));
+       default:
+         return NULL;
+       }
+    }
+}
+
 /* Return true if TYPE has a variable argument list.  */
 
 bool
index 26e72c4..e9c8917 100644 (file)
@@ -882,14 +882,6 @@ DEFTREECODE (ASM_EXPR, "asm_expr", tcc_statement, 4)
    nodes to implement SSA versioning.  */
 DEFTREECODE (SSA_NAME, "ssa_name", tcc_exceptional, 0)
 
-/* SSA PHI operator.  PHI_RESULT is the new SSA_NAME node created by
-   the PHI node.  PHI_ARG_LENGTH is the number of arguments.
-   PHI_ARG_ELT returns the Ith tuple <ssa_name, edge> from the
-   argument list.  Each tuple contains the incoming reaching
-   definition (SSA_NAME node) and the edge via which that definition
-   is coming through.  */
-DEFTREECODE (PHI_NODE, "phi_node", tcc_exceptional, 0)
-
 /* Used to represent a typed exception handler.  CATCH_TYPES is the type (or
    list of types) handled, and CATCH_BODY is the code for the handler.  */
 DEFTREECODE (CATCH_EXPR, "catch_expr", tcc_statement, 2)
@@ -985,32 +977,15 @@ DEFTREECODE (TARGET_MEM_REF, "target_mem_ref", tcc_reference, 7)
    exposed to TREE_RANGE_CHECK.  */
 /* OpenMP - #pragma omp parallel [clause1 ... clauseN]
    Operand 0: OMP_PARALLEL_BODY: Code to be executed by all threads.
-   Operand 1: OMP_PARALLEL_CLAUSES: List of clauses.
-   Operand 2: OMP_PARALLEL_FN: FUNCTION_DECL used when outlining the
-             body of the parallel region.  Only valid after
-             pass_lower_omp.
-   Operand 3: OMP_PARALLEL_DATA_ARG: Local variable in the parent
-             function containing data to be shared with the child
-             function.  */
+   Operand 1: OMP_PARALLEL_CLAUSES: List of clauses.  */
 
-DEFTREECODE (OMP_PARALLEL, "omp_parallel", tcc_statement, 4)
+DEFTREECODE (OMP_PARALLEL, "omp_parallel", tcc_statement, 2)
 
 /* OpenMP - #pragma omp task [clause1 ... clauseN]
    Operand 0: OMP_TASK_BODY: Code to be executed by all threads.
-   Operand 1: OMP_TASK_CLAUSES: List of clauses.
-   Operand 2: OMP_TASK_FN: FUNCTION_DECL used when outlining the
-             body of the task region.  Only valid after
-             pass_lower_omp.
-   Operand 3: OMP_TASK_DATA_ARG: Local variable in the parent
-             function containing data to be shared with the child
-             function.
-   Operand 4: OMP_TASK_COPYFN: FUNCTION_DECL used for constructing
-             firstprivate variables.
-   Operand 5: OMP_TASK_ARG_SIZE: Length of the task argument block.
-   Operand 6: OMP_TASK_ARG_ALIGN: Required alignment of the task
-             argument block.  */
-
-DEFTREECODE (OMP_TASK, "omp_task", tcc_statement, 7)
+   Operand 1: OMP_TASK_CLAUSES: List of clauses.  */
+
+DEFTREECODE (OMP_TASK, "omp_task", tcc_statement, 2)
 
 /* OpenMP - #pragma omp for [clause1 ... clauseN]
    Operand 0: OMP_FOR_BODY: Loop body.
@@ -1026,7 +1001,7 @@ DEFTREECODE (OMP_TASK, "omp_task", tcc_statement, 7)
        OMP_FOR structured block, but are evaluated before the loop
        body begins.
 
-   VAR must be a signed integer variable, which is implicitly thread
+   VAR must be an integer or pointer variable, which is implicitly thread
    private.  N1, N2 and INCR are required to be loop invariant integer
    expressions that are evaluated without any synchronization.
    The evaluation order, frequency of evaluation and side-effects are
@@ -1035,14 +1010,8 @@ DEFTREECODE (OMP_FOR, "omp_for", tcc_statement, 6)
 
 /* OpenMP - #pragma omp sections [clause1 ... clauseN]
    Operand 0: OMP_SECTIONS_BODY: Sections body.
-   Operand 1: OMP_SECTIONS_CLAUSES: List of clauses.
-   Operand 2: OMP_SECTIONS_CONTROL: The control variable used for deciding
-             which of the sections to execute.  */
-DEFTREECODE (OMP_SECTIONS, "omp_sections", tcc_statement, 3)
-
-/* This tree immediately follows OMP_SECTIONS, and represents the switch
-   used to decide which branch is taken.  */
-DEFTREECODE (OMP_SECTIONS_SWITCH, "omp_sections_switch", tcc_statement, 0)
+   Operand 1: OMP_SECTIONS_CLAUSES: List of clauses.  */
+DEFTREECODE (OMP_SECTIONS, "omp_sections", tcc_statement, 2)
 
 /* OpenMP - #pragma omp single
    Operand 0: OMP_SINGLE_BODY: Single section body.
@@ -1066,14 +1035,6 @@ DEFTREECODE (OMP_ORDERED, "omp_ordered", tcc_statement, 1)
    Operand 1: OMP_CRITICAL_NAME: Identifier for critical section.  */
 DEFTREECODE (OMP_CRITICAL, "omp_critical", tcc_statement, 2)
 
-/* Return from an OpenMP directive.  */
-DEFTREECODE (OMP_RETURN, "omp_return", tcc_statement, 0)
-
-/* OpenMP - An intermediate tree code to mark the location of the
-   loop or sections iteration in the partially lowered code.
-   The arguments are definition and use of the control variable.  */
-DEFTREECODE (OMP_CONTINUE, "omp_continue", tcc_statement, 2)
-
 /* OpenMP - #pragma omp atomic
    Operand 0: The address at which the atomic operation is to be performed.
        This address should be stabilized with save_expr.
@@ -1082,18 +1043,6 @@ DEFTREECODE (OMP_CONTINUE, "omp_continue", tcc_statement, 2)
        build_fold_indirect_ref of the address.  */
 DEFTREECODE (OMP_ATOMIC, "omp_atomic", tcc_statement, 2)
 
-/* Codes used for lowering of OMP_ATOMIC.  Although the form of the OMP_ATOMIC
-   statement is very simple (just in form mem op= expr), various implicit
-   conversions may cause the expression become more complex, so that it does
-   not fit the gimple grammar very well.  To overcome this problem, OMP_ATOMIC
-   is rewritten as a sequence of two codes in gimplification:
-
-   OMP_LOAD (tmp, mem)
-   val = some computations involving tmp;
-   OMP_STORE (val)  */
-DEFTREECODE (OMP_ATOMIC_LOAD, "omp_atomic_load", tcc_statement, 2)
-DEFTREECODE (OMP_ATOMIC_STORE, "omp_atomic_store", tcc_statement, 1)
-
 /* OpenMP clauses.  */
 DEFTREECODE (OMP_CLAUSE, "omp_clause", tcc_exceptional, 0)
 
@@ -1140,11 +1089,6 @@ DEFTREECODE (WIDEN_MULT_EXPR, "widen_mult_expr", tcc_binary, 2)
 DEFTREECODE (VEC_LSHIFT_EXPR, "vec_lshift_expr", tcc_binary, 2)
 DEFTREECODE (VEC_RSHIFT_EXPR, "vec_rshift_expr", tcc_binary, 2)
 \f
-/* GIMPLE tree codes.  */
-
-/* Assignment expression.  Operand 0 is the what to set; 1, the new value.  */
-DEFTREECODE (GIMPLE_MODIFY_STMT, "gimple_modify_stmt", tcc_gimple_stmt, 2)
-
 /* Widening vector multiplication.
    The two operands are vectors with N elements of size S. Multiplying the
    elements of the two vectors will result in N products of size 2*S.
index 0ffd91b..5f5b37d 100644 (file)
@@ -69,8 +69,7 @@ enum tree_code_class {
                      but usually no interesting value.  */
   tcc_vl_exp,      /* A function call or other expression with a
                      variable-length operand vector.  */
-  tcc_expression,  /* Any other expression.  */
-  tcc_gimple_stmt  /* A GIMPLE statement.  */
+  tcc_expression   /* Any other expression.  */
 };
 
 /* Each tree code class has an associated string representation.
@@ -174,32 +173,10 @@ extern const enum tree_code_class tree_code_type[];
 #define IS_EXPR_CODE_CLASS(CLASS)\
        ((CLASS) >= tcc_reference && (CLASS) <= tcc_expression)
 
-/* Returns nonzero iff CLASS is a GIMPLE statement.  */
-
-#define IS_GIMPLE_STMT_CODE_CLASS(CLASS) ((CLASS) == tcc_gimple_stmt)
-
 /* Returns nonzero iff NODE is an expression of some kind.  */
 
 #define EXPR_P(NODE) IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (NODE)))
 
-/* Returns nonzero iff NODE is an OpenMP directive.  */
-
-#define OMP_DIRECTIVE_P(NODE)                          \
-    (TREE_CODE (NODE) == OMP_PARALLEL                  \
-     || TREE_CODE (NODE) == OMP_TASK                   \
-     || TREE_CODE (NODE) == OMP_FOR                    \
-     || TREE_CODE (NODE) == OMP_SECTIONS               \
-     || TREE_CODE (NODE) == OMP_SECTIONS_SWITCH                \
-     || TREE_CODE (NODE) == OMP_SINGLE                 \
-     || TREE_CODE (NODE) == OMP_SECTION                        \
-     || TREE_CODE (NODE) == OMP_MASTER                 \
-     || TREE_CODE (NODE) == OMP_ORDERED                        \
-     || TREE_CODE (NODE) == OMP_CRITICAL               \
-     || TREE_CODE (NODE) == OMP_RETURN                 \
-     || TREE_CODE (NODE) == OMP_ATOMIC_LOAD                            \
-     || TREE_CODE (NODE) == OMP_ATOMIC_STORE                           \
-     || TREE_CODE (NODE) == OMP_CONTINUE)
-
 /* Number of argument-words in each kind of tree-node.  */
 
 extern const unsigned char tree_code_length[];
@@ -407,8 +384,6 @@ struct tree_base GTY(())
 
   unsigned spare : 23;
 
-  /* FIXME tuples: Eventually, we need to move this somewhere external to
-     the trees.  */
   union tree_ann_d *ann;
 };
 
@@ -419,16 +394,6 @@ struct tree_common GTY(())
   tree type;
 };
 
-/* GIMPLE_MODIFY_STMT */
-struct gimple_stmt GTY(())
-{
-  struct tree_base base;
-  location_t locus;
-  tree block;
-  /* FIXME tuples: Eventually this should be of type ``struct gimple_expr''.  */
-  tree GTY ((length ("TREE_CODE_LENGTH (TREE_CODE (&%h))"))) operands[1];
-};
-
 /* The following table lists the uses of each of the above flags and
    for which types of nodes they are defined.
 
@@ -477,7 +442,7 @@ struct gimple_stmt GTY(())
            POINTER_TYPE, REFERENCE_TYPE
 
        MOVE_NONTEMPORAL in
-           GIMPLE_MODIFY_STMT
+           MODIFY_EXPR
 
        CASE_HIGH_SEEN in
            CASE_LABEL_EXPR
@@ -523,9 +488,6 @@ struct gimple_stmt GTY(())
        DECL_BY_REFERENCE in
            PARM_DECL, RESULT_DECL
 
-       OMP_RETURN_NOWAIT in
-           OMP_RETURN
-
        OMP_SECTION_LAST in
            OMP_SECTION
 
@@ -815,14 +777,6 @@ enum tree_node_structure_enum {
                               __FUNCTION__);                           \
     __t; })
 
-#define GIMPLE_STMT_CHECK(T) __extension__                             \
-({  __typeof (T) const __t = (T);                                      \
-    char const __c = TREE_CODE_CLASS (TREE_CODE (__t));                        \
-    if (!IS_GIMPLE_STMT_CODE_CLASS (__c))                              \
-      tree_class_check_failed (__t, tcc_gimple_stmt, __FILE__, __LINE__,\
-                              __FUNCTION__);                           \
-    __t; })
-
 /* These checks have to be special cased.  */
 #define NON_TYPE_CHECK(T) __extension__                                        \
 ({  __typeof (T) const __t = (T);                                      \
@@ -842,17 +796,6 @@ enum tree_node_structure_enum {
                                 __FILE__, __LINE__, __FUNCTION__);     \
     &__t->vec.a[__i]; }))
 
-#define PHI_NODE_ELT_CHECK(T, I) __extension__                         \
-(*({__typeof (T) const __t = (T);                                      \
-    const int __i = (I);                                               \
-    if (TREE_CODE (__t) != PHI_NODE)                                   \
-      tree_check_failed (__t, __FILE__, __LINE__, __FUNCTION__,        \
-                        PHI_NODE, 0);                                  \
-    if (__i < 0 || __i >= __t->phi.capacity)                           \
-      phi_node_elt_check_failed (__i, __t->phi.num_args,               \
-                                __FILE__, __LINE__, __FUNCTION__);     \
-    &__t->phi.a[__i]; }))
-
 #define OMP_CLAUSE_ELT_CHECK(T, I) __extension__                       \
 (*({__typeof (T) const __t = (T);                                      \
     const int __i = (I);                                               \
@@ -868,8 +811,6 @@ enum tree_node_structure_enum {
 #define TREE_OPERAND_CHECK(T, I) __extension__                         \
 (*({__typeof (T) const __t = EXPR_CHECK (T);                           \
     const int __i = (I);                                               \
-    if (GIMPLE_TUPLE_P (__t))                                          \
-      gcc_unreachable ();                                              \
     if (__i < 0 || __i >= TREE_OPERAND_LENGTH (__t))                   \
       tree_operand_check_failed (__i, __t,                             \
                                 __FILE__, __LINE__, __FUNCTION__);     \
@@ -885,15 +826,6 @@ enum tree_node_structure_enum {
                                 __FILE__, __LINE__, __FUNCTION__);     \
     &__t->exp.operands[__i]; }))
 
-/* Special checks for GIMPLE_STMT_OPERANDs.  */
-#define GIMPLE_STMT_OPERAND_CHECK(T, I) __extension__                  \
-(*({__typeof (T) const __t = GIMPLE_STMT_CHECK (T);                    \
-    const int __i = (I);                                               \
-    if (__i < 0 || __i >= TREE_OPERAND_LENGTH (__t))                   \
-      tree_operand_check_failed (__i, __t,                             \
-                                __FILE__, __LINE__, __FUNCTION__);     \
-    &__t->gstmt.operands[__i]; }))
-
 #define TREE_RTL_OPERAND_CHECK(T, CODE, I) __extension__               \
 (*(rtx *)                                                              \
  ({__typeof (T) const __t = (T);                                       \
@@ -916,8 +848,6 @@ enum tree_node_structure_enum {
 
 #define TREE_CHAIN(NODE) __extension__ \
 (*({__typeof (NODE) const __t = (NODE);                                \
-    if (GIMPLE_TUPLE_P (__t))                                  \
-      gcc_unreachable ();                                      \
     &__t->common.chain; }))
 
 /* In all nodes that are expressions, this is the data type of the expression.
@@ -926,8 +856,6 @@ enum tree_node_structure_enum {
    In VECTOR_TYPE nodes, this is the type of the elements.  */
 #define TREE_TYPE(NODE) __extension__ \
 (*({__typeof (NODE) const __t = (NODE);                                        \
-    if (GIMPLE_TUPLE_P (__t))                                  \
-      gcc_unreachable ();                                      \
     &__t->common.type; }))
 
 extern void tree_contains_struct_check_failed (const_tree,
@@ -985,14 +913,11 @@ extern void omp_clause_range_check_failed (const_tree, const char *, int,
 #define TREE_CLASS_CHECK(T, CODE)              (T)
 #define TREE_RANGE_CHECK(T, CODE1, CODE2)      (T)
 #define EXPR_CHECK(T)                          (T)
-#define GIMPLE_STMT_CHECK(T)                   (T)
 #define NON_TYPE_CHECK(T)                      (T)
 #define TREE_VEC_ELT_CHECK(T, I)               ((T)->vec.a[I])
 #define TREE_OPERAND_CHECK(T, I)               ((T)->exp.operands[I])
 #define TREE_OPERAND_CHECK_CODE(T, CODE, I)    ((T)->exp.operands[I])
-#define GIMPLE_STMT_OPERAND_CHECK(T, I)                ((T)->gstmt.operands[I])
 #define TREE_RTL_OPERAND_CHECK(T, CODE, I)  (*(rtx *) &((T)->exp.operands[I]))
-#define PHI_NODE_ELT_CHECK(T, i)       ((T)->phi.a[i])
 #define OMP_CLAUSE_ELT_CHECK(T, i)             ((T)->omp_clause.ops[i])
 #define OMP_CLAUSE_RANGE_CHECK(T, CODE1, CODE2)        (T)
 #define OMP_CLAUSE_SUBCODE_CHECK(T, CODE)      (T)
@@ -1028,27 +953,6 @@ extern void omp_clause_range_check_failed (const_tree, const char *, int,
   TREE_CHECK5 (T, INTEGER_TYPE, ENUMERAL_TYPE, BOOLEAN_TYPE, REAL_TYPE,        \
               FIXED_POINT_TYPE)
 
-/* Nonzero if NODE is a GIMPLE statement.  */
-#define GIMPLE_STMT_P(NODE) \
-  (TREE_CODE_CLASS (TREE_CODE ((NODE))) == tcc_gimple_stmt)
-
-/* Nonzero if NODE is a GIMPLE tuple.  */
-#define GIMPLE_TUPLE_P(NODE) (GIMPLE_STMT_P (NODE) || TREE_CODE (NODE) == PHI_NODE)
-
-/* A GIMPLE tuple that has a ``locus'' field.  */
-#define GIMPLE_TUPLE_HAS_LOCUS_P(NODE) GIMPLE_STMT_P ((NODE))
-
-/* Like TREE_OPERAND but works with GIMPLE stmt tuples as well.
-
-   If you know the NODE is a GIMPLE statement, use GIMPLE_STMT_OPERAND.  If the
-   NODE code is unknown at compile time, use this macro.  */
-#define GENERIC_TREE_OPERAND(NODE, I) *(generic_tree_operand ((NODE), (I)))
-
-/* Like TREE_TYPE but returns void_type_node for gimple tuples that have
-   no type.  */
-
-#define GENERIC_TREE_TYPE(NODE) *(generic_tree_type ((NODE)))
-
 /* Here is how primitive or already-canonicalized types' hash codes
    are made.  */
 #define TYPE_HASH(TYPE) (TYPE_UID (TYPE))
@@ -1057,16 +961,12 @@ extern void omp_clause_range_check_failed (const_tree, const char *, int,
    used in hash tables which are saved to a PCH.  */
 #define TREE_HASH(NODE) ((size_t) (NODE) & 0777777)
 
-/* The TREE_CHAIN but it is able to handle tuples.  */
-#define GENERIC_NEXT(NODE)                                     \
-  (TREE_CODE (NODE) == PHI_NODE ? PHI_CHAIN (NODE) :           \
-     GIMPLE_STMT_P (NODE) ? NULL_TREE : TREE_CHAIN (NODE))
-
-/* Tests if expression is conversion expr (NOP_EXPRs or CONVERT_EXPRs).  */
+/* Tests if CODE is a conversion expr (NOP_EXPR or CONVERT_EXPR).  */
+#define IS_CONVERT_EXPR_CODE_P(CODE)                           \
+  ((CODE) == NOP_EXPR || (CODE) == CONVERT_EXPR)
 
-#define CONVERT_EXPR_P(EXP)                                    \
-  (TREE_CODE (EXP) == NOP_EXPR                                 \
-   || TREE_CODE (EXP) == CONVERT_EXPR)
+/* Similarly, but accept an expressions instead of a tree code.  */
+#define CONVERT_EXPR_P(EXP)    IS_CONVERT_EXPR_CODE_P (TREE_CODE (EXP))
 
 /* Generate case for NOP_EXPR, CONVERT_EXPR.  */
 
@@ -1082,7 +982,7 @@ extern void omp_clause_range_check_failed (const_tree, const char *, int,
          || TREE_CODE (EXP) == NON_LVALUE_EXPR)                \
         && TREE_OPERAND (EXP, 0) != error_mark_node            \
         && (TYPE_MODE (TREE_TYPE (EXP))                        \
-            == TYPE_MODE (GENERIC_TREE_TYPE (TREE_OPERAND (EXP, 0))))) \
+            == TYPE_MODE (TREE_TYPE (TREE_OPERAND (EXP, 0))))) \
     (EXP) = TREE_OPERAND (EXP, 0)
 
 /* Like STRIP_NOPS, but don't let the signedness change either.  */
@@ -1279,7 +1179,7 @@ extern void omp_clause_range_check_failed (const_tree, const char *, int,
 
 /* In a MODIFY_EXPR, means that the store in the expression is nontemporal.  */
 #define MOVE_NONTEMPORAL(NODE) \
-  (GIMPLE_MODIFY_STMT_CHECK (NODE)->base.static_flag)
+  (EXPR_CHECK (NODE)->base.static_flag)
 
 /* In an INTEGER_CST, REAL_CST, COMPLEX_CST, or VECTOR_CST, this means
    there was an overflow in folding.  */
@@ -1658,28 +1558,25 @@ struct tree_constructor GTY(())
 #define VL_EXP_OPERAND_LENGTH(NODE) \
   ((int)TREE_INT_CST_LOW (VL_EXP_CHECK (NODE)->exp.operands[0]))
 
-/* In gimple statements.  */
-#define GIMPLE_STMT_OPERAND(NODE, I) GIMPLE_STMT_OPERAND_CHECK (NODE, I)
-#define GIMPLE_STMT_LOCUS(NODE) (GIMPLE_STMT_CHECK (NODE)->gstmt.locus)
-#define GIMPLE_STMT_BLOCK(NODE) (GIMPLE_STMT_CHECK (NODE)->gstmt.block)
-
 /* In a LOOP_EXPR node.  */
 #define LOOP_EXPR_BODY(NODE) TREE_OPERAND_CHECK_CODE (NODE, LOOP_EXPR, 0)
 
 /* The source location of this expression.  Non-tree_exp nodes such as
    decls and constants can be shared among multiple locations, so
    return nothing.  */
-#define EXPR_LOCATION(NODE) expr_location ((NODE))
-#define SET_EXPR_LOCATION(NODE, FROM) set_expr_location ((NODE), (FROM))
-#define EXPR_HAS_LOCATION(NODE) expr_has_location ((NODE))
-#define EXPR_LOCUS(NODE) expr_locus ((NODE))
+#define EXPR_LOCATION(NODE) (EXPR_P ((NODE)) ? (NODE)->exp.locus : UNKNOWN_LOCATION)
+#define SET_EXPR_LOCATION(NODE, LOCUS) EXPR_CHECK ((NODE))->exp.locus = (LOCUS)
+#define EXPR_HAS_LOCATION(NODE) (EXPR_LOCATION (NODE) != UNKNOWN_LOCATION)
+#define EXPR_LOCUS(NODE) (EXPR_P (NODE) \
+                         ? CONST_CAST (source_location *, &(NODE)->exp.locus) \
+                         : (source_location *) NULL)
 #define SET_EXPR_LOCUS(NODE, FROM) set_expr_locus ((NODE), (FROM))
-#define EXPR_FILENAME(NODE) (expr_filename ((NODE)))
-#define EXPR_LINENO(NODE) (expr_lineno ((NODE)))
+#define EXPR_FILENAME(NODE) LOCATION_FILE (EXPR_CHECK ((NODE))->exp.locus)
+#define EXPR_LINENO(NODE) LOCATION_LINE (EXPR_CHECK (NODE)->exp.locus)
 
 /* True if a tree is an expression or statement that can have a
    location.  */
-#define CAN_HAVE_LOCATION_P(NODE) (EXPR_P (NODE) || GIMPLE_STMT_P (NODE))
+#define CAN_HAVE_LOCATION_P(NODE) (EXPR_P (NODE))
 
 /* In a TARGET_EXPR node.  */
 #define TARGET_EXPR_SLOT(NODE) TREE_OPERAND_CHECK_CODE (NODE, TARGET_EXPR, 0)
@@ -1805,22 +1702,13 @@ struct tree_constructor GTY(())
 
 #define OMP_PARALLEL_BODY(NODE)    TREE_OPERAND (OMP_PARALLEL_CHECK (NODE), 0)
 #define OMP_PARALLEL_CLAUSES(NODE) TREE_OPERAND (OMP_PARALLEL_CHECK (NODE), 1)
-#define OMP_PARALLEL_FN(NODE) TREE_OPERAND (OMP_PARALLEL_CHECK (NODE), 2)
-#define OMP_PARALLEL_DATA_ARG(NODE) TREE_OPERAND (OMP_PARALLEL_CHECK (NODE), 3)
 
 #define OMP_TASK_BODY(NODE)       TREE_OPERAND (OMP_TASK_CHECK (NODE), 0)
 #define OMP_TASK_CLAUSES(NODE)    TREE_OPERAND (OMP_TASK_CHECK (NODE), 1)
-#define OMP_TASK_FN(NODE)         TREE_OPERAND (OMP_TASK_CHECK (NODE), 2)
-#define OMP_TASK_DATA_ARG(NODE)           TREE_OPERAND (OMP_TASK_CHECK (NODE), 3)
-#define OMP_TASK_COPYFN(NODE)     TREE_OPERAND (OMP_TASK_CHECK (NODE), 4)
-#define OMP_TASK_ARG_SIZE(NODE)           TREE_OPERAND (OMP_TASK_CHECK (NODE), 5)
-#define OMP_TASK_ARG_ALIGN(NODE)   TREE_OPERAND (OMP_TASK_CHECK (NODE), 6)
 
 #define OMP_TASKREG_CHECK(NODE)          TREE_RANGE_CHECK (NODE, OMP_PARALLEL, OMP_TASK)
 #define OMP_TASKREG_BODY(NODE)    TREE_OPERAND (OMP_TASKREG_CHECK (NODE), 0)
 #define OMP_TASKREG_CLAUSES(NODE) TREE_OPERAND (OMP_TASKREG_CHECK (NODE), 1)
-#define OMP_TASKREG_FN(NODE) TREE_OPERAND (OMP_TASKREG_CHECK (NODE), 2)
-#define OMP_TASKREG_DATA_ARG(NODE) TREE_OPERAND (OMP_TASKREG_CHECK (NODE), 3)
 
 #define OMP_FOR_BODY(NODE)        TREE_OPERAND (OMP_FOR_CHECK (NODE), 0)
 #define OMP_FOR_CLAUSES(NODE)     TREE_OPERAND (OMP_FOR_CHECK (NODE), 1)
@@ -1831,7 +1719,6 @@ struct tree_constructor GTY(())
 
 #define OMP_SECTIONS_BODY(NODE)    TREE_OPERAND (OMP_SECTIONS_CHECK (NODE), 0)
 #define OMP_SECTIONS_CLAUSES(NODE) TREE_OPERAND (OMP_SECTIONS_CHECK (NODE), 1)
-#define OMP_SECTIONS_CONTROL(NODE) TREE_OPERAND (OMP_SECTIONS_CHECK (NODE), 2)
 
 #define OMP_SECTION_BODY(NODE)    TREE_OPERAND (OMP_SECTION_CHECK (NODE), 0)
 
@@ -1856,13 +1743,6 @@ struct tree_constructor GTY(())
 #define OMP_SECTION_LAST(NODE) \
   (OMP_SECTION_CHECK (NODE)->base.private_flag)
 
-/* True on an OMP_RETURN statement if the return does not require a
-   thread synchronization via some sort of barrier.  The exact barrier
-   that would otherwise be emitted is dependent on the OMP statement
-   with which this return is associated.  */
-#define OMP_RETURN_NOWAIT(NODE) \
-  (OMP_RETURN_CHECK (NODE)->base.private_flag)
-
 /* True on an OMP_PARALLEL statement if it represents an explicit
    combined parallel work-sharing constructs.  */
 #define OMP_PARALLEL_COMBINED(NODE) \
@@ -1887,6 +1767,8 @@ struct tree_constructor GTY(())
   OMP_CLAUSE_OPERAND (OMP_CLAUSE_SUBCODE_CHECK (NODE,                  \
                                                OMP_CLAUSE_LASTPRIVATE),\
                      1)
+#define OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ(NODE) \
+  (OMP_CLAUSE_CHECK (NODE))->omp_clause.gimple_reduction_init
 
 #define OMP_CLAUSE_IF_EXPR(NODE) \
   OMP_CLAUSE_OPERAND (OMP_CLAUSE_SUBCODE_CHECK (NODE, OMP_CLAUSE_IF), 0)
@@ -1908,6 +1790,10 @@ struct tree_constructor GTY(())
   OMP_CLAUSE_OPERAND (OMP_CLAUSE_SUBCODE_CHECK (NODE, OMP_CLAUSE_REDUCTION), 1)
 #define OMP_CLAUSE_REDUCTION_MERGE(NODE) \
   OMP_CLAUSE_OPERAND (OMP_CLAUSE_SUBCODE_CHECK (NODE, OMP_CLAUSE_REDUCTION), 2)
+#define OMP_CLAUSE_REDUCTION_GIMPLE_INIT(NODE) \
+  (OMP_CLAUSE_CHECK (NODE))->omp_clause.gimple_reduction_init
+#define OMP_CLAUSE_REDUCTION_GIMPLE_MERGE(NODE) \
+  (OMP_CLAUSE_CHECK (NODE))->omp_clause.gimple_reduction_merge
 #define OMP_CLAUSE_REDUCTION_PLACEHOLDER(NODE) \
   OMP_CLAUSE_OPERAND (OMP_CLAUSE_SUBCODE_CHECK (NODE, OMP_CLAUSE_REDUCTION), 3)
 
@@ -1951,10 +1837,8 @@ struct tree_exp GTY(())
    only field that can be relied upon.  */
 #define SSA_NAME_VAR(NODE)     SSA_NAME_CHECK (NODE)->ssa_name.var
 
-/* Returns the statement which defines this reference.   Note that
-   we use the same field when chaining SSA_NAME nodes together on
-   the SSA_NAME freelist.  */
-#define SSA_NAME_DEF_STMT(NODE)        SSA_NAME_CHECK (NODE)->common.chain
+/* Returns the statement which defines this SSA name.  */
+#define SSA_NAME_DEF_STMT(NODE)        SSA_NAME_CHECK (NODE)->ssa_name.def_stmt
 
 /* Returns the SSA version number of this SSA name.  Note that in
    tree SSA, version numbers are not per variable and may be recycled.  */
@@ -2000,7 +1884,12 @@ typedef struct ssa_use_operand_d GTY(())
 {
   struct ssa_use_operand_d* GTY((skip(""))) prev;
   struct ssa_use_operand_d* GTY((skip(""))) next;
-  tree GTY((skip(""))) stmt;
+  /* Immediate uses for a given SSA name are maintained as a cyclic
+     list.  To recognize the root of this list, the location field
+     needs to point to the original SSA name.  Since statements and
+     SSA names are of different data types, we need this union.  See
+     the explanation in struct immediate_use_iterator_d.  */
+  union { gimple stmt; tree ssa_name; } GTY((skip(""))) loc;
   tree *GTY((skip(""))) use;
 } ssa_use_operand_t;
 
@@ -2014,6 +1903,9 @@ struct tree_ssa_name GTY(())
   /* _DECL wrapped by this SSA name.  */
   tree var;
 
+  /* Statement that defines this SSA name.  */
+  gimple def_stmt;
+
   /* SSA version number.  */
   unsigned int version;
 
@@ -2031,29 +1923,6 @@ struct tree_ssa_name GTY(())
   struct ssa_use_operand_d imm_uses;
 };
 \f
-/* In a PHI_NODE node.  */
-
-/* These 2 macros should be considered off limits for use by developers.  If
-   you wish to access the use or def fields of a PHI_NODE in the SSA
-   optimizers, use the accessor macros found in tree-ssa-operands.h.
-   These two macros are to be used only by those accessor macros, and other
-   select places where we *absolutely* must take the address of the tree.  */
-
-#define PHI_RESULT_TREE(NODE)          PHI_NODE_CHECK (NODE)->phi.result
-#define PHI_ARG_DEF_TREE(NODE, I)      PHI_NODE_ELT_CHECK (NODE, I).def
-
-/* PHI_NODEs for each basic block are chained together in a single linked
-   list.  The head of the list is linked from the block annotation, and
-   the link to the next PHI is in PHI_CHAIN.  */
-#define PHI_CHAIN(NODE)                        PHI_NODE_CHECK (NODE)->phi.chain
-
-#define PHI_NUM_ARGS(NODE)             PHI_NODE_CHECK (NODE)->phi.num_args
-#define PHI_ARG_CAPACITY(NODE)         PHI_NODE_CHECK (NODE)->phi.capacity
-#define PHI_ARG_ELT(NODE, I)           PHI_NODE_ELT_CHECK (NODE, I)
-#define PHI_ARG_EDGE(NODE, I)          (EDGE_PRED (PHI_BB ((NODE)), (I)))
-#define PHI_BB(NODE)                   PHI_NODE_CHECK (NODE)->phi.bb
-#define PHI_ARG_IMM_USE_NODE(NODE, I)  PHI_NODE_ELT_CHECK (NODE, I).imm_use
-
 struct phi_arg_d GTY(())
 {
   /* imm_use MUST be the first element in struct because we do some
@@ -2062,22 +1931,6 @@ struct phi_arg_d GTY(())
   tree def;
 };
 
-struct tree_phi_node GTY(())
-{
-  struct tree_base common;
-  tree chain;
-  tree result;
-  int num_args;
-  int capacity;
-
-  /* Basic block holding this PHI node.  */
-  struct basic_block_def *bb;
-
-  /* Arguments of the PHI node.  These are maintained in the same
-     order as predecessor edge vector BB->PREDS.  */
-  struct phi_arg_d GTY ((length ("((tree)&%h)->phi.num_args"))) a[1];
-};
-
 \f
 #define OMP_CLAUSE_CODE(NODE)                                  \
        (OMP_CLAUSE_CHECK (NODE))->omp_clause.code
@@ -2100,6 +1953,12 @@ struct tree_omp_clause GTY(())
     enum omp_clause_schedule_kind schedule_kind;
     enum tree_code                reduction_code;
   } GTY ((skip)) subcode;
+
+  /* The gimplification of OMP_CLAUSE_REDUCTION_{INIT,MERGE} for omp-low's
+     usage.  */
+  gimple_seq gimple_reduction_init;
+  gimple_seq gimple_reduction_merge;
+
   tree GTY ((length ("omp_clause_num_ops[OMP_CLAUSE_CODE ((tree)&%h)]"))) ops[1];
 };
 \f
@@ -3574,11 +3433,9 @@ union tree_node GTY ((ptr_alias (union lang_tree_node),
   struct tree_vec GTY ((tag ("TS_VEC"))) vec;
   struct tree_exp GTY ((tag ("TS_EXP"))) exp;
   struct tree_ssa_name GTY ((tag ("TS_SSA_NAME"))) ssa_name;
-  struct tree_phi_node GTY ((tag ("TS_PHI_NODE"))) phi;
   struct tree_block GTY ((tag ("TS_BLOCK"))) block;
   struct tree_binfo GTY ((tag ("TS_BINFO"))) binfo;
   struct tree_statement_list GTY ((tag ("TS_STATEMENT_LIST"))) stmt_list;
-  struct gimple_stmt GTY ((tag ("TS_GIMPLE_STATEMENT"))) gstmt;
   struct tree_constructor GTY ((tag ("TS_CONSTRUCTOR"))) constructor;
   struct tree_memory_tag GTY ((tag ("TS_MEMORY_TAG"))) mtag;
   struct tree_omp_clause GTY ((tag ("TS_OMP_CLAUSE"))) omp_clause;
@@ -4006,8 +3863,8 @@ extern hashval_t decl_assembler_name_hash (const_tree asmname);
 extern size_t tree_size (const_tree);
 
 /* Compute the number of bytes occupied by a tree with code CODE.  This
-   function cannot be used for TREE_VEC or PHI_NODE codes, which are of
-   variable length.  */
+   function cannot be used for TREE_VEC codes, which are of variable
+   length.  */
 extern size_t tree_code_size (enum tree_code);
 
 /* Lowest level primitive for allocating a node.
@@ -4083,10 +3940,6 @@ extern tree build7_stat (enum tree_code, tree, tree, tree, tree, tree,
 #define build7(c,t1,t2,t3,t4,t5,t6,t7,t8) \
   build7_stat (c,t1,t2,t3,t4,t5,t6,t7,t8 MEM_STAT_INFO)
 
-extern tree build_gimple_modify_stmt_stat (tree, tree MEM_STAT_DECL);
-#define build_gimple_modify_stmt(t1,t2) \
-  build_gimple_modify_stmt_stat (t1,t2 MEM_STAT_INFO)
-
 extern tree build_int_cst (tree, HOST_WIDE_INT);
 extern tree build_int_cst_type (tree, HOST_WIDE_INT);
 extern tree build_int_cstu (tree, unsigned HOST_WIDE_INT);
@@ -4821,15 +4674,19 @@ extern bool commutative_tree_code (enum tree_code);
 extern tree upper_bound_in_type (tree, tree);
 extern tree lower_bound_in_type (tree, tree);
 extern int operand_equal_for_phi_arg_p (const_tree, const_tree);
-extern bool empty_body_p (tree);
 extern tree call_expr_arg (tree, int);
 extern tree *call_expr_argp (tree, int);
 extern tree call_expr_arglist (tree);
+extern tree create_artificial_label (void);
+extern const char *get_name (tree);
 extern bool stdarg_p (tree);
 extern bool prototype_p (tree);
 extern int function_args_count (tree);
 extern bool auto_var_in_fn_p (const_tree, const_tree);
 \f
+/* In gimplify.c */
+extern tree unshare_expr (tree);
+\f
 /* In stmt.c */
 
 extern void expand_expr_stmt (tree);
@@ -4883,10 +4740,11 @@ extern tree fold_ignored_result (tree);
 extern tree fold_abs_const (tree, tree);
 extern tree fold_indirect_ref_1 (tree, tree);
 extern void fold_defer_overflow_warnings (void);
-extern void fold_undefer_overflow_warnings (bool, const_tree, int);
+extern void fold_undefer_overflow_warnings (bool, const_gimple, int);
 extern void fold_undefer_and_ignore_overflow_warnings (void);
 extern bool fold_deferring_overflow_warnings_p (void);
 extern tree maybe_fold_offset_to_reference (tree, tree, tree);
+extern tree maybe_fold_stmt_addition (tree, tree, tree);
 
 extern tree force_fit_type_double (tree, unsigned HOST_WIDE_INT, HOST_WIDE_INT,
                                   int, bool);
@@ -4952,7 +4810,6 @@ extern tree constant_boolean_node (int, tree);
 extern tree build_low_bits_mask (tree, unsigned);
 
 extern bool tree_swap_operands_p (const_tree, const_tree, bool);
-extern void swap_tree_operands (tree, tree *, tree *);
 extern enum tree_code swap_tree_comparison (enum tree_code);
 
 extern bool ptr_difference_const (tree, tree, HOST_WIDE_INT *);
@@ -4969,8 +4826,7 @@ extern bool tree_binary_nonnegative_warnv_p (enum tree_code, tree, tree, tree,
                                              bool *);
 extern bool tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p);
 extern bool tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p);
-extern bool tree_call_nonnegative_warnv_p (enum tree_code code, tree, tree,
-                                           tree, tree, bool *);
+extern bool tree_call_nonnegative_warnv_p (tree, tree, tree, tree, bool *);
 
 extern bool tree_expr_nonzero_warnv_p (tree, bool *);
 
@@ -4996,12 +4852,14 @@ extern tree build_call_expr (tree, int, ...);
 extern tree mathfn_built_in (tree, enum built_in_function fn);
 extern tree strip_float_extensions (tree);
 extern tree c_strlen (tree, int);
-extern tree std_gimplify_va_arg_expr (tree, tree, tree *, tree *);
+extern tree std_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
 extern tree build_va_arg_indirect_ref (tree);
 extern tree build_string_literal (int, const char *);
 extern bool validate_arglist (const_tree, ...);
 extern rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
 extern int get_pointer_alignment (tree, unsigned int);
+extern tree fold_call_stmt (gimple, bool);
+extern tree gimple_fold_builtin_snprintf_chk (gimple, tree, enum built_in_function);
 
 /* In convert.c */
 extern tree strip_float_extensions (tree);
@@ -5018,6 +4876,8 @@ extern int tree_log2 (const_tree);
 extern int tree_floor_log2 (const_tree);
 extern int simple_cst_equal (const_tree, const_tree);
 extern hashval_t iterative_hash_expr (const_tree, hashval_t);
+extern hashval_t iterative_hash_exprs_commutative (const_tree,
+                                                   const_tree, hashval_t);
 extern hashval_t iterative_hash_hashval_t (hashval_t, hashval_t);
 extern int compare_tree_int (const_tree, unsigned HOST_WIDE_INT);
 extern int type_list_equal (const_tree, const_tree);
@@ -5049,18 +4909,9 @@ extern tree build_addr (tree, tree);
 extern bool fields_compatible_p (const_tree, const_tree);
 extern tree find_compatible_field (tree, tree);
 
-extern location_t expr_location (const_tree);
-extern void set_expr_location (tree, location_t);
-extern bool expr_has_location (const_tree);
-
-extern location_t *expr_locus (const_tree);
 extern void set_expr_locus (tree, source_location *);
-extern const char *expr_filename (const_tree);
-extern int expr_lineno (const_tree);
 
 extern tree *tree_block (tree);
-extern tree *generic_tree_operand (tree, int);
-extern tree *generic_tree_type (tree);
 extern location_t *block_nonartificial_location (tree);
 
 /* In function.c */
@@ -5081,7 +4932,7 @@ extern void preserve_temp_slots (rtx);
 extern int aggregate_value_p (const_tree, const_tree);
 extern void push_function_context (void);
 extern void pop_function_context (void);
-extern tree gimplify_parameters (void);
+extern gimple_seq gimplify_parameters (void);
 
 /* In print-rtl.c */
 #ifdef BUFSIZ
@@ -5139,6 +4990,7 @@ extern int flags_from_decl_or_type (const_tree);
 extern int call_expr_flags (const_tree);
 
 extern int setjmp_call_p (const_tree);
+extern bool gimple_alloca_call_p (const_gimple);
 extern bool alloca_call_p (const_tree);
 extern bool must_pass_in_stack_var_size (enum machine_mode, const_tree);
 extern bool must_pass_in_stack_var_size_or_pad (enum machine_mode, const_tree);
@@ -5197,12 +5049,6 @@ extern void expand_anon_union_decl (tree, tree, tree);
 extern tree tree_overlaps_hard_reg_set (tree, HARD_REG_SET *);
 #endif
 
-/* In gimplify.c.  */
-extern tree create_artificial_label (void);
-extern void gimplify_function_tree (tree);
-extern const char *get_name (const_tree);
-extern tree unshare_expr (tree);
-extern void sort_case_labels (tree);
 \f
 /* Interface of the DWARF2 unwind info support.  */
 
@@ -5284,14 +5130,12 @@ typedef enum
   temp_list_kind,
   vec_kind,
   binfo_kind,
-  phi_kind,
   ssa_name_kind,
   constr_kind,
   x_kind,
   lang_decl,
   lang_type,
   omp_clause_kind,
-  gimple_stmt_kind,
   all_kinds
 } tree_node_kind;
 
index a16e7d4..b94a3f9 100644 (file)
@@ -78,22 +78,21 @@ static struct value_prof_hooks *value_prof_hooks;
    same information as above.  */
 
 
-static tree tree_divmod_fixed_value (tree, tree, tree, tree, 
-                                   tree, int, gcov_type, gcov_type);
-static tree tree_mod_pow2 (tree, tree, tree, tree, int, gcov_type, gcov_type);
-static tree tree_mod_subtract (tree, tree, tree, tree, int, int, int,
-                               gcov_type, gcov_type, gcov_type);
-static bool tree_divmod_fixed_value_transform (tree);
-static bool tree_mod_pow2_value_transform (tree);
-static bool tree_mod_subtract_transform (tree);
-static bool tree_stringops_transform (block_stmt_iterator *);
-static bool tree_ic_transform (tree);
+static tree gimple_divmod_fixed_value (gimple, tree, int, gcov_type, gcov_type);
+static tree gimple_mod_pow2 (gimple, int, gcov_type, gcov_type);
+static tree gimple_mod_subtract (gimple, int, int, int, gcov_type, gcov_type,
+                                gcov_type);
+static bool gimple_divmod_fixed_value_transform (gimple_stmt_iterator *);
+static bool gimple_mod_pow2_value_transform (gimple_stmt_iterator *);
+static bool gimple_mod_subtract_transform (gimple_stmt_iterator *);
+static bool gimple_stringops_transform (gimple_stmt_iterator *);
+static bool gimple_ic_transform (gimple);
 
 /* Allocate histogram value.  */
 
 static histogram_value
 gimple_alloc_histogram_value (struct function *fun ATTRIBUTE_UNUSED,
-                             enum hist_type type, tree stmt, tree value)
+                             enum hist_type type, gimple stmt, tree value)
 {
    histogram_value hist = (histogram_value) xcalloc (1, sizeof (*hist));
    hist->hvalue.value = value;
@@ -115,13 +114,13 @@ histogram_hash (const void *x)
 static int
 histogram_eq (const void *x, const void *y)
 {
-  return ((const_histogram_value) x)->hvalue.stmt == (const_tree)y;
+  return ((const_histogram_value) x)->hvalue.stmt == (const_gimple) y;
 }
 
 /* Set histogram for STMT.  */
 
 static void
-set_histogram_value (struct function *fun, tree stmt, histogram_value hist)
+set_histogram_value (struct function *fun, gimple stmt, histogram_value hist)
 {
   void **loc;
   if (!hist && !VALUE_HISTOGRAMS (fun))
@@ -144,7 +143,7 @@ set_histogram_value (struct function *fun, tree stmt, histogram_value hist)
 /* Get histogram list for STMT.  */
 
 histogram_value
-gimple_histogram_value (struct function *fun, tree stmt)
+gimple_histogram_value (struct function *fun, gimple stmt)
 {
   if (!VALUE_HISTOGRAMS (fun))
     return NULL;
@@ -155,16 +154,19 @@ gimple_histogram_value (struct function *fun, tree stmt)
 /* Add histogram for STMT.  */
 
 void
-gimple_add_histogram_value (struct function *fun, tree stmt, histogram_value hist)
+gimple_add_histogram_value (struct function *fun, gimple stmt,
+                           histogram_value hist)
 {
   hist->hvalue.next = gimple_histogram_value (fun, stmt);
   set_histogram_value (fun, stmt, hist);
 }
 
+
 /* Remove histogram HIST from STMT's histogram list.  */
 
 void
-gimple_remove_histogram_value (struct function *fun, tree stmt, histogram_value hist)
+gimple_remove_histogram_value (struct function *fun, gimple stmt,
+                              histogram_value hist)
 {
   histogram_value hist2 = gimple_histogram_value (fun, stmt);
   if (hist == hist2)
@@ -184,13 +186,16 @@ gimple_remove_histogram_value (struct function *fun, tree stmt, histogram_value
   free (hist);
 }
 
+
 /* Lookup histogram of type TYPE in the STMT.  */
 
 histogram_value
-gimple_histogram_value_of_type (struct function *fun, tree stmt, enum hist_type type)
+gimple_histogram_value_of_type (struct function *fun, gimple stmt,
+                               enum hist_type type)
 {
   histogram_value hist;
-  for (hist = gimple_histogram_value (fun, stmt); hist; hist = hist->hvalue.next)
+  for (hist = gimple_histogram_value (fun, stmt); hist;
+       hist = hist->hvalue.next)
     if (hist->type == type)
       return hist;
   return NULL;
@@ -302,7 +307,7 @@ dump_histogram_value (FILE *dump_file, histogram_value hist)
 /* Dump all histograms attached to STMT to DUMP_FILE.  */
 
 void
-dump_histograms_for_stmt (struct function *fun, FILE *dump_file, tree stmt)
+dump_histograms_for_stmt (struct function *fun, FILE *dump_file, gimple stmt)
 {
   histogram_value hist;
   for (hist = gimple_histogram_value (fun, stmt); hist; hist = hist->hvalue.next)
@@ -312,7 +317,7 @@ dump_histograms_for_stmt (struct function *fun, FILE *dump_file, tree stmt)
 /* Remove all histograms associated with STMT.  */
 
 void
-gimple_remove_stmt_histograms (struct function *fun, tree stmt)
+gimple_remove_stmt_histograms (struct function *fun, gimple stmt)
 {
   histogram_value val;
   while ((val = gimple_histogram_value (fun, stmt)) != NULL)
@@ -322,8 +327,8 @@ gimple_remove_stmt_histograms (struct function *fun, tree stmt)
 /* Duplicate all histograms associates with OSTMT to STMT.  */
 
 void
-gimple_duplicate_stmt_histograms (struct function *fun, tree stmt,
-                                 struct function *ofun, tree ostmt)
+gimple_duplicate_stmt_histograms (struct function *fun, gimple stmt,
+                                 struct function *ofun, gimple ostmt)
 {
   histogram_value val;
   for (val = gimple_histogram_value (ofun, ostmt); val != NULL; val = val->hvalue.next)
@@ -341,7 +346,7 @@ gimple_duplicate_stmt_histograms (struct function *fun, tree stmt,
 /* Move all histograms associated with OSTMT to STMT.  */
 
 void
-gimple_move_stmt_histograms (struct function *fun, tree stmt, tree ostmt)
+gimple_move_stmt_histograms (struct function *fun, gimple stmt, gimple ostmt)
 {
   histogram_value val = gimple_histogram_value (fun, ostmt);
   if (val)
@@ -370,36 +375,38 @@ visit_hist (void **slot, void *data)
     {
       error ("Dead histogram");
       dump_histogram_value (stderr, hist);
-      debug_generic_stmt (hist->hvalue.stmt);
+      debug_gimple_stmt (hist->hvalue.stmt);
       error_found = true;
     }
   return 1;
 }
 
+
 /* Verify sanity of the histograms.  */
 
 void
 verify_histograms (void)
 {
   basic_block bb;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   histogram_value hist;
   struct pointer_set_t *visited_hists;
 
   error_found = false;
   visited_hists = pointer_set_create ();
   FOR_EACH_BB (bb)
-    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
       {
-       tree stmt = bsi_stmt (bsi);
+       gimple stmt = gsi_stmt (gsi);
 
-       for (hist = gimple_histogram_value (cfun, stmt); hist; hist = hist->hvalue.next)
+       for (hist = gimple_histogram_value (cfun, stmt); hist;
+            hist = hist->hvalue.next)
          {
            if (hist->hvalue.stmt != stmt)
              {
-               error ("Histogram value statement does not correspond to statement"
-                      " it is associated with");
-               debug_generic_stmt (stmt);
+               error ("Histogram value statement does not correspond to "
+                      "the statement it is associated with");
+               debug_gimple_stmt (stmt);
                dump_histogram_value (stderr, hist);
                error_found = true;
              }
@@ -439,38 +446,45 @@ free_histograms (void)
     }
 }
 
-/* The overall number of invocations of the counter should match execution count
-   of basic block.  Report it as error rather than internal error as it might
-   mean that user has misused the profile somehow.  */
+
+/* The overall number of invocations of the counter should match
+   execution count of basic block.  Report it as error rather than
+   internal error as it might mean that user has misused the profile
+   somehow.  */
+
 static bool
-check_counter (tree stmt, const char * name, gcov_type all, gcov_type bb_count)
+check_counter (gimple stmt, const char *name, gcov_type all, gcov_type bb_count)
 {
   if (all != bb_count)
     {
-      location_t * locus;
-      locus = (stmt != NULL && EXPR_HAS_LOCATION (stmt)
-              ? EXPR_LOCUS (stmt)
-              : &DECL_SOURCE_LOCATION (current_function_decl));
-      error ("%HCorrupted value profile: %s profiler overall count (%d) does not match BB count (%d)",
-            locus, name, (int)all, (int)bb_count);
+      location_t locus;
+      locus = (stmt != NULL)
+              ? gimple_location (stmt)
+              : DECL_SOURCE_LOCATION (current_function_decl);
+      error ("%HCorrupted value profile: %s profiler overall count (%d) "
+            "does not match BB count (%d)", &locus, name, (int)all,
+            (int)bb_count);
       return true;
     }
+
   return false;
 }
 
-/* Tree based transformations. */
+
+/* GIMPLE based transformations. */
+
 static bool
-tree_value_profile_transformations (void)
+gimple_value_profile_transformations (void)
 {
   basic_block bb;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   bool changed = false;
 
   FOR_EACH_BB (bb)
     {
-      for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
        {
-         tree stmt = bsi_stmt (bsi);
+         gimple stmt = gsi_stmt (gsi);
          histogram_value th = gimple_histogram_value (cfun, stmt);
          if (!th)
            continue;
@@ -478,7 +492,7 @@ tree_value_profile_transformations (void)
          if (dump_file)
            {
              fprintf (dump_file, "Trying transformations on stmt ");
-             print_generic_stmt (dump_file, stmt, TDF_SLIM);
+             print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
              dump_histograms_for_stmt (cfun, dump_file, stmt);
            }
 
@@ -490,19 +504,19 @@ tree_value_profile_transformations (void)
             current statement remain valid (although possibly
             modified) upon return.  */
          if (flag_value_profile_transformations
-             && (tree_mod_subtract_transform (stmt)
-                 || tree_divmod_fixed_value_transform (stmt)
-                 || tree_mod_pow2_value_transform (stmt)
-                 || tree_stringops_transform (&bsi)
-                 || tree_ic_transform (stmt)))
+             && (gimple_mod_subtract_transform (&gsi)
+                 || gimple_divmod_fixed_value_transform (&gsi)
+                 || gimple_mod_pow2_value_transform (&gsi)
+                 || gimple_stringops_transform (&gsi)
+                 || gimple_ic_transform (stmt)))
            {
-             stmt = bsi_stmt (bsi);
+             stmt = gsi_stmt (gsi);
              changed = true;
              /* Original statement may no longer be in the same block. */
-             if (bb != bb_for_stmt (stmt))
+             if (bb != gimple_bb (stmt))
                {
-                 bb = bb_for_stmt (stmt);
-                 bsi = bsi_for_stmt (stmt);
+                 bb = gimple_bb (stmt);
+                 gsi = gsi_for_stmt (stmt);
                }
            }
         }
@@ -516,57 +530,61 @@ tree_value_profile_transformations (void)
   return changed;
 }
 
-/* Generate code for transformation 1 (with OPERATION, operands OP1
-   and OP2, whose value is expected to be VALUE, parent modify-expr STMT and
-   probability of taking the optimal path PROB, which is equivalent to COUNT/ALL
-   within roundoff error).  This generates the result into a temp and returns 
-   the temp; it does not replace or alter the original STMT.  */
+
+/* Generate code for transformation 1 (with parent gimple assignment
+   STMT and probability of taking the optimal path PROB, which is
+   equivalent to COUNT/ALL within roundoff error).  This generates the
+   result into a temp and returns the temp; it does not replace or
+   alter the original STMT.  */
+
 static tree
-tree_divmod_fixed_value (tree stmt, tree operation, 
-                        tree op1, tree op2, tree value, int prob, gcov_type count,
-                        gcov_type all)
+gimple_divmod_fixed_value (gimple stmt, tree value, int prob, gcov_type count,
+                          gcov_type all)
 {
-  tree stmt1, stmt2, stmt3;
+  gimple stmt1, stmt2, stmt3, label1, label2;
   tree tmp1, tmp2, tmpv;
   tree label_decl1 = create_artificial_label ();
   tree label_decl2 = create_artificial_label ();
-  tree label1, label2;
-  tree bb1end, bb2end, bb3end;
+  gimple bb1end, bb2end, bb3end;
   basic_block bb, bb2, bb3, bb4;
-  tree optype = TREE_TYPE (operation);
+  tree optype, op1, op2;
   edge e12, e13, e23, e24, e34;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
+
+  gcc_assert (is_gimple_assign (stmt)
+             && (gimple_assign_rhs_code (stmt) == TRUNC_DIV_EXPR
+                 || gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR));
+
+  optype = TREE_TYPE (gimple_assign_lhs (stmt));
+  op1 = gimple_assign_rhs1 (stmt);
+  op2 = gimple_assign_rhs2 (stmt);
 
-  bb = bb_for_stmt (stmt);
-  bsi = bsi_for_stmt (stmt);
+  bb = gimple_bb (stmt);
+  gsi = gsi_for_stmt (stmt);
 
   tmpv = create_tmp_var (optype, "PROF");
   tmp1 = create_tmp_var (optype, "PROF");
-  stmt1 = build_gimple_modify_stmt (tmpv, fold_convert (optype, value));
-  stmt2 = build_gimple_modify_stmt (tmp1, op2);
-  stmt3 = build3 (COND_EXPR, void_type_node,
-           build2 (NE_EXPR, boolean_type_node, tmp1, tmpv),
-           NULL_TREE, NULL_TREE);
-  bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
+  stmt1 = gimple_build_assign (tmpv, fold_convert (optype, value));
+  stmt2 = gimple_build_assign (tmp1, op2);
+  stmt3 = gimple_build_cond (NE_EXPR, tmp1, tmpv, NULL_TREE, NULL_TREE);
+  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
   bb1end = stmt3;
 
   tmp2 = create_tmp_var (optype, "PROF");
-  label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
-  stmt1 = build_gimple_modify_stmt (tmp2,
-                                   build2 (TREE_CODE (operation), optype,
-                                           op1, tmpv));
-  bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+  label1 = gimple_build_label (label_decl1);
+  stmt1 = gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt), tmp2,
+                                       op1, tmpv);
+  gsi_insert_before (&gsi, label1, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
   bb2end = stmt1;
 
-  label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
-  stmt1 = build_gimple_modify_stmt (tmp2,
-                                   build2 (TREE_CODE (operation), optype,
-                                           op1, op2));
-  bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+  label2 = gimple_build_label (label_decl2);
+  stmt1 = gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt), tmp2,
+                                       op1, op2);
+  gsi_insert_before (&gsi, label2, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
   bb3end = stmt1;
 
   /* Fix CFG. */
@@ -602,35 +620,33 @@ tree_divmod_fixed_value (tree stmt, tree operation,
   return tmp2;
 }
 
+
 /* Do transform 1) on INSN if applicable.  */
+
 static bool
-tree_divmod_fixed_value_transform (tree stmt)
+gimple_divmod_fixed_value_transform (gimple_stmt_iterator *si)
 {
   histogram_value histogram;
   enum tree_code code;
   gcov_type val, count, all;
-  tree modify, op, op1, op2, result, value, tree_val;
+  tree result, value, tree_val;
   gcov_type prob;
+  gimple stmt;
 
-  modify = stmt;
-  if (TREE_CODE (stmt) == RETURN_EXPR
-      && TREE_OPERAND (stmt, 0)
-      && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
-    modify = TREE_OPERAND (stmt, 0);
-  if (TREE_CODE (modify) != GIMPLE_MODIFY_STMT)
+  stmt = gsi_stmt (*si);
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
     return false;
-  op = GIMPLE_STMT_OPERAND (modify, 1);
-  if (!INTEGRAL_TYPE_P (TREE_TYPE (op)))
+
+  if (!INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt))))
     return false;
-  code = TREE_CODE (op);
+
+  code = gimple_assign_rhs_code (stmt);
   
   if (code != TRUNC_DIV_EXPR && code != TRUNC_MOD_EXPR)
     return false;
 
-  op1 = TREE_OPERAND (op, 0);
-  op2 = TREE_OPERAND (op, 1);
-
-  histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_SINGLE_VALUE);
+  histogram = gimple_histogram_value_of_type (cfun, stmt,
+                                             HIST_TYPE_SINGLE_VALUE);
   if (!histogram)
     return false;
 
@@ -643,11 +659,12 @@ tree_divmod_fixed_value_transform (tree stmt)
   /* We require that count is at least half of all; this means
      that for the transformation to fire the value must be constant
      at least 50% of time (and 75% gives the guarantee of usage).  */
-  if (simple_cst_equal (op2, value) != 1 || 2 * count < all
-      || !maybe_hot_bb_p (bb_for_stmt (stmt)))
+  if (simple_cst_equal (gimple_assign_rhs2 (stmt), value) != 1
+      || 2 * count < all
+      || !maybe_hot_bb_p (gimple_bb (stmt)))
     return false;
 
-  if (check_counter (stmt, "value", all, bb_for_stmt (stmt)->count))
+  if (check_counter (stmt, "value", all, gimple_bb (stmt)->count))
     return false;
 
   /* Compute probability of taking the optimal path.  */
@@ -659,7 +676,7 @@ tree_divmod_fixed_value_transform (tree stmt)
   tree_val = build_int_cst_wide (get_gcov_type (),
                                 (unsigned HOST_WIDE_INT) val,
                                 val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1);
-  result = tree_divmod_fixed_value (stmt, op, op1, op2, tree_val, prob, count, all);
+  result = gimple_divmod_fixed_value (stmt, tree_val, prob, count, all);
 
   if (dump_file)
     {
@@ -668,68 +685,68 @@ tree_divmod_fixed_value_transform (tree stmt)
       fprintf (dump_file, "=");
       print_generic_expr (dump_file, tree_val, TDF_SLIM);
       fprintf (dump_file, " transformation on insn ");
-      print_generic_stmt (dump_file, stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
     }
 
-  GIMPLE_STMT_OPERAND (modify, 1) = result;
+  gimple_assign_set_rhs_from_tree (si, result);
 
   return true;
 }
 
-/* Generate code for transformation 2 (with OPERATION, operands OP1
-   and OP2, parent modify-expr STMT and probability of taking the optimal 
-   path PROB, which is equivalent to COUNT/ALL within roundoff error).  
-   This generates the result into a temp and returns 
+/* Generate code for transformation 2 (with parent gimple assign STMT and
+   probability of taking the optimal path PROB, which is equivalent to COUNT/ALL
+   within roundoff error).  This generates the result into a temp and returns 
    the temp; it does not replace or alter the original STMT.  */
 static tree
-tree_mod_pow2 (tree stmt, tree operation, tree op1, tree op2, int prob, 
-              gcov_type count, gcov_type all)
+gimple_mod_pow2 (gimple stmt, int prob, gcov_type count, gcov_type all)
 {
-  tree stmt1, stmt2, stmt3, stmt4;
+  gimple stmt1, stmt2, stmt3, stmt4;
   tree tmp2, tmp3;
   tree label_decl1 = create_artificial_label ();
   tree label_decl2 = create_artificial_label ();
-  tree label1, label2;
-  tree bb1end, bb2end, bb3end;
+  gimple label1, label2;
+  gimple bb1end, bb2end, bb3end;
   basic_block bb, bb2, bb3, bb4;
-  tree optype = TREE_TYPE (operation);
+  tree optype, op1, op2;
   edge e12, e13, e23, e24, e34;
-  block_stmt_iterator bsi;
-  tree result = create_tmp_var (optype, "PROF");
+  gimple_stmt_iterator gsi;
+  tree result;
 
-  bb = bb_for_stmt (stmt);
-  bsi = bsi_for_stmt (stmt);
+  gcc_assert (is_gimple_assign (stmt)
+             && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
 
+  optype = TREE_TYPE (gimple_assign_lhs (stmt));
+  op1 = gimple_assign_rhs1 (stmt);
+  op2 = gimple_assign_rhs2 (stmt);
+
+  bb = gimple_bb (stmt);
+  gsi = gsi_for_stmt (stmt);
+
+  result = create_tmp_var (optype, "PROF");
   tmp2 = create_tmp_var (optype, "PROF");
   tmp3 = create_tmp_var (optype, "PROF");
-  stmt2 = build_gimple_modify_stmt (tmp2, 
-                                   build2 (PLUS_EXPR, optype, op2,
-                                           build_int_cst (optype, -1)));
-  stmt3 = build_gimple_modify_stmt (tmp3,
-                                   build2 (BIT_AND_EXPR, optype, tmp2, op2));
-  stmt4 = build3 (COND_EXPR, void_type_node,
-                 build2 (NE_EXPR, boolean_type_node,
-                         tmp3, build_int_cst (optype, 0)),
-                 NULL_TREE, NULL_TREE);
-  bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt4, BSI_SAME_STMT);
+  stmt2 = gimple_build_assign_with_ops (PLUS_EXPR, tmp2, op2,
+                                       build_int_cst (optype, -1));
+  stmt3 = gimple_build_assign_with_ops (BIT_AND_EXPR, tmp3, tmp2, op2);
+  stmt4 = gimple_build_cond (NE_EXPR, tmp3, build_int_cst (optype, 0),
+                            NULL_TREE, NULL_TREE);
+  gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt4, GSI_SAME_STMT);
   bb1end = stmt4;
 
-  /* tmp2 == op2-1 inherited from previous block */
-  label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
-  stmt1 = build_gimple_modify_stmt (result,
-                                   build2 (BIT_AND_EXPR, optype, op1, tmp2));
-  bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+  /* tmp2 == op2-1 inherited from previous block.  */
+  label1 = gimple_build_label (label_decl1);
+  stmt1 = gimple_build_assign_with_ops (BIT_AND_EXPR, result, op1, tmp2);
+  gsi_insert_before (&gsi, label1, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
   bb2end = stmt1;
 
-  label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
-  stmt1 = build_gimple_modify_stmt (result,
-                                   build2 (TREE_CODE (operation), optype,
-                                           op1, op2));
-  bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+  label2 = gimple_build_label (label_decl2);
+  stmt1 = gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt), result,
+                                       op1, op2);
+  gsi_insert_before (&gsi, label2, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
   bb3end = stmt1;
 
   /* Fix CFG. */
@@ -767,32 +784,28 @@ tree_mod_pow2 (tree stmt, tree operation, tree op1, tree op2, int prob,
 
 /* Do transform 2) on INSN if applicable.  */
 static bool
-tree_mod_pow2_value_transform (tree stmt)
+gimple_mod_pow2_value_transform (gimple_stmt_iterator *si)
 {
   histogram_value histogram;
   enum tree_code code;
   gcov_type count, wrong_values, all;
-  tree modify, op, op1, op2, result, value;
+  tree lhs_type, result, value;
   gcov_type prob;
+  gimple stmt;
 
-  modify = stmt;
-  if (TREE_CODE (stmt) == RETURN_EXPR
-      && TREE_OPERAND (stmt, 0)
-      && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
-    modify = TREE_OPERAND (stmt, 0);
-  if (TREE_CODE (modify) != GIMPLE_MODIFY_STMT)
+  stmt = gsi_stmt (*si);
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
     return false;
-  op = GIMPLE_STMT_OPERAND (modify, 1);
-  if (!INTEGRAL_TYPE_P (TREE_TYPE (op)))
+
+  lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
+  if (!INTEGRAL_TYPE_P (lhs_type))
     return false;
-  code = TREE_CODE (op);
+
+  code = gimple_assign_rhs_code (stmt);
   
-  if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (TREE_TYPE (op)))
+  if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (lhs_type))
     return false;
 
-  op1 = TREE_OPERAND (op, 0);
-  op2 = TREE_OPERAND (op, 1);
-
   histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_POW2);
   if (!histogram)
     return false;
@@ -804,20 +817,21 @@ tree_mod_pow2_value_transform (tree stmt)
   gimple_remove_histogram_value (cfun, stmt, histogram);
 
   /* We require that we hit a power of 2 at least half of all evaluations.  */
-  if (simple_cst_equal (op2, value) != 1 || count < wrong_values
-      || !maybe_hot_bb_p (bb_for_stmt (stmt)))
+  if (simple_cst_equal (gimple_assign_rhs2 (stmt), value) != 1
+      || count < wrong_values
+      || !maybe_hot_bb_p (gimple_bb (stmt)))
     return false;
 
   if (dump_file)
     {
       fprintf (dump_file, "Mod power of 2 transformation on insn ");
-      print_generic_stmt (dump_file, stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
     }
 
   /* Compute probability of taking the optimal path.  */
   all = count + wrong_values;
 
-  if (check_counter (stmt, "pow2", all, bb_for_stmt (stmt)->count))
+  if (check_counter (stmt, "pow2", all, gimple_bb (stmt)->count))
     return false;
 
   if (all > 0)
@@ -825,81 +839,80 @@ tree_mod_pow2_value_transform (tree stmt)
   else
     prob = 0;
 
-  result = tree_mod_pow2 (stmt, op, op1, op2, prob, count, all);
+  result = gimple_mod_pow2 (stmt, prob, count, all);
 
-  GIMPLE_STMT_OPERAND (modify, 1) = result;
+  gimple_assign_set_rhs_from_tree (si, result);
 
   return true;
 }
 
-/* Generate code for transformations 3 and 4 (with OPERATION, operands OP1
-   and OP2, parent modify-expr STMT, and NCOUNTS the number of cases to
-   support.  Currently only NCOUNTS==0 or 1 is supported and this is
-   built into this interface.  The probabilities of taking the optimal 
-   paths are PROB1 and PROB2, which are equivalent to COUNT1/ALL and
+/* Generate code for transformations 3 and 4 (with parent gimple assign STMT, and
+   NCOUNTS the number of cases to support.  Currently only NCOUNTS==0 or 1 is
+   supported and this is built into this interface.  The probabilities of taking
+   the optimal paths are PROB1 and PROB2, which are equivalent to COUNT1/ALL and
    COUNT2/ALL respectively within roundoff error).  This generates the 
    result into a temp and returns the temp; it does not replace or alter 
    the original STMT.  */
 /* FIXME: Generalize the interface to handle NCOUNTS > 1.  */
 
 static tree
-tree_mod_subtract (tree stmt, tree operation, tree op1, tree op2, 
-                   int prob1, int prob2, int ncounts,
-                   gcov_type count1, gcov_type count2, gcov_type all)
+gimple_mod_subtract (gimple stmt, int prob1, int prob2, int ncounts,
+                    gcov_type count1, gcov_type count2, gcov_type all)
 {
-  tree stmt1, stmt2, stmt3;
+  gimple stmt1, stmt2, stmt3;
   tree tmp1;
   tree label_decl1 = create_artificial_label ();
   tree label_decl2 = create_artificial_label ();
   tree label_decl3 = create_artificial_label ();
-  tree label1, label2, label3;
-  tree bb1end, bb2end = NULL_TREE, bb3end;
+  gimple label1, label2, label3;
+  gimple bb1end, bb2end = NULL, bb3end;
   basic_block bb, bb2, bb3, bb4;
-  tree optype = TREE_TYPE (operation);
+  tree optype, op1, op2;
   edge e12, e23 = 0, e24, e34, e14;
-  block_stmt_iterator bsi;
-  tree result = create_tmp_var (optype, "PROF");
+  gimple_stmt_iterator gsi;
+  tree result;
+
+  gcc_assert (is_gimple_assign (stmt)
+             && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
 
-  bb = bb_for_stmt (stmt);
-  bsi = bsi_for_stmt (stmt);
+  optype = TREE_TYPE (gimple_assign_lhs (stmt));
+  op1 = gimple_assign_rhs1 (stmt);
+  op2 = gimple_assign_rhs2 (stmt);
 
+  bb = gimple_bb (stmt);
+  gsi = gsi_for_stmt (stmt);
+
+  result = create_tmp_var (optype, "PROF");
   tmp1 = create_tmp_var (optype, "PROF");
-  stmt1 = build_gimple_modify_stmt (result, op1);
-  stmt2 = build_gimple_modify_stmt (tmp1, op2);
-  stmt3 = build3 (COND_EXPR, void_type_node,
-           build2 (LT_EXPR, boolean_type_node, result, tmp1),
-           NULL_TREE, NULL_TREE);
-  bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
+  stmt1 = gimple_build_assign (result, op1);
+  stmt2 = gimple_build_assign (tmp1, op2);
+  stmt3 = gimple_build_cond (LT_EXPR, result, tmp1, NULL_TREE, NULL_TREE);
+  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
   bb1end = stmt3;
 
   if (ncounts) /* Assumed to be 0 or 1 */
     {
-      label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
-      stmt1 = build_gimple_modify_stmt (result,
-                                       build2 (MINUS_EXPR, optype,
-                                               result, tmp1));
-      stmt2 = build3 (COND_EXPR, void_type_node,
-               build2 (LT_EXPR, boolean_type_node, result, tmp1),
-               NULL_TREE, NULL_TREE);
-      bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
-      bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
-      bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
+      label1 = gimple_build_label (label_decl1);
+      stmt1 = gimple_build_assign_with_ops (MINUS_EXPR, result, result, tmp1);
+      stmt2 = gimple_build_cond (LT_EXPR, result, tmp1, NULL_TREE, NULL_TREE);
+      gsi_insert_before (&gsi, label1, GSI_SAME_STMT);
+      gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
+      gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
       bb2end = stmt2;
     }
 
   /* Fallback case. */
-  label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
-  stmt1 = build_gimple_modify_stmt (result,
-                                   build2 (TREE_CODE (operation), optype,
-                                           result, tmp1));
-  bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+  label2 = gimple_build_label (label_decl2);
+  stmt1 = gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt), result,
+                                       result, tmp1);
+  gsi_insert_before (&gsi, label2, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
   bb3end = stmt1;
 
-  label3 = build1 (LABEL_EXPR, void_type_node, label_decl3);
-  bsi_insert_before (&bsi, label3, BSI_SAME_STMT);
+  label3 = gimple_build_label (label_decl3);
+  gsi_insert_before (&gsi, label3, GSI_SAME_STMT);
 
   /* Fix CFG. */
   /* Edge e23 connects bb2 to bb3, etc. */
@@ -947,36 +960,34 @@ tree_mod_subtract (tree stmt, tree operation, tree op1, tree op2,
   return result;
 }
 
-/* Do transforms 3) and 4) on INSN if applicable.  */
+
+/* Do transforms 3) and 4) on the statement pointed-to by SI if applicable.  */
+
 static bool
-tree_mod_subtract_transform (tree stmt)
+gimple_mod_subtract_transform (gimple_stmt_iterator *si)
 {
   histogram_value histogram;
   enum tree_code code;
   gcov_type count, wrong_values, all;
-  tree modify, op, op1, op2, result, value;
+  tree lhs_type, result, value;
   gcov_type prob1, prob2;
   unsigned int i, steps;
   gcov_type count1, count2;
+  gimple stmt;
 
-  modify = stmt;
-  if (TREE_CODE (stmt) == RETURN_EXPR
-      && TREE_OPERAND (stmt, 0)
-      && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
-    modify = TREE_OPERAND (stmt, 0);
-  if (TREE_CODE (modify) != GIMPLE_MODIFY_STMT)
+  stmt = gsi_stmt (*si);
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
     return false;
-  op = GIMPLE_STMT_OPERAND (modify, 1);
-  if (!INTEGRAL_TYPE_P (TREE_TYPE (op)))
+
+  lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
+  if (!INTEGRAL_TYPE_P (lhs_type))
     return false;
-  code = TREE_CODE (op);
+
+  code = gimple_assign_rhs_code (stmt);
   
-  if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (TREE_TYPE (op)))
+  if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (lhs_type))
     return false;
 
-  op1 = TREE_OPERAND (op, 0);
-  op2 = TREE_OPERAND (op, 1);
-
   histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_INTERVAL);
   if (!histogram)
     return false;
@@ -995,7 +1006,7 @@ tree_mod_subtract_transform (tree stmt)
   count2 = histogram->hvalue.counters[1];
 
   /* Compute probability of taking the optimal path.  */
-  if (check_counter (stmt, "interval", all, bb_for_stmt (stmt)->count))
+  if (check_counter (stmt, "interval", all, gimple_bb (stmt)->count))
     {
       gimple_remove_histogram_value (cfun, stmt, histogram);
       return false;
@@ -1011,14 +1022,14 @@ tree_mod_subtract_transform (tree stmt)
        break;
     }
   if (i == steps
-      || !maybe_hot_bb_p (bb_for_stmt (stmt)))
+      || !maybe_hot_bb_p (gimple_bb (stmt)))
     return false;
 
   gimple_remove_histogram_value (cfun, stmt, histogram);
   if (dump_file)
     {
       fprintf (dump_file, "Mod subtract transformation on insn ");
-      print_generic_stmt (dump_file, stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
     }
 
   /* Compute probability of taking the optimal path(s).  */
@@ -1034,10 +1045,9 @@ tree_mod_subtract_transform (tree stmt)
 
   /* In practice, "steps" is always 2.  This interface reflects this,
      and will need to be changed if "steps" can change.  */
-  result = tree_mod_subtract (stmt, op, op1, op2, prob1, prob2, i,
-                             count1, count2, all);
+  result = gimple_mod_subtract (stmt, prob1, prob2, i, count1, count2, all);
 
-  GIMPLE_STMT_OPERAND (modify, 1) = result;
+  gimple_assign_set_rhs_from_tree (si, result);
 
   return true;
 }
@@ -1082,52 +1092,48 @@ find_func_by_pid (int   pid)
     old call
  */
 
-static tree
-tree_ic (tree stmt, tree call, struct cgraph_node* direct_call, 
-        int prob, gcov_type count, gcov_type all)
+static gimple
+gimple_ic (gimple stmt, gimple call, struct cgraph_node *direct_call, 
+          int prob, gcov_type count, gcov_type all)
 {
-  tree stmt1, stmt2, stmt3;
+  gimple stmt1, stmt2, stmt3;
   tree tmp1, tmpv, tmp;
   tree label_decl1 = create_artificial_label ();
   tree label_decl2 = create_artificial_label ();
-  tree label1, label2;
-  tree bb1end, bb2end, bb3end;
-  tree new_call;
+  gimple label1, label2;
+  gimple bb1end, bb2end, bb3end;
   basic_block bb, bb2, bb3, bb4;
   tree optype = build_pointer_type (void_type_node);
   edge e12, e13, e23, e24, e34;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   int region;
 
-  bb = bb_for_stmt (stmt);
-  bsi = bsi_for_stmt (stmt);
+  bb = gimple_bb (stmt);
+  gsi = gsi_for_stmt (stmt);
 
   tmpv = create_tmp_var (optype, "PROF");
   tmp1 = create_tmp_var (optype, "PROF");
-  stmt1 = build_gimple_modify_stmt (tmpv, 
-                                   unshare_expr (CALL_EXPR_FN (call)));
+  stmt1 = gimple_build_assign (tmpv, unshare_expr (gimple_call_fn (call)));
+
   tmp = fold_convert (optype, build_addr (direct_call->decl, 
                                          current_function_decl));
-  stmt2 = build_gimple_modify_stmt (tmp1, tmp);
-  stmt3 = build3 (COND_EXPR, void_type_node,
-                 build2 (NE_EXPR, boolean_type_node, tmp1, tmpv),
-                 NULL_TREE, NULL_TREE);
-  bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
+  stmt2 = gimple_build_assign (tmp1, tmp);
+  stmt3 = gimple_build_cond (NE_EXPR, tmp1, tmpv, NULL_TREE, NULL_TREE);
+  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
   bb1end = stmt3;
 
-  label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
-  stmt1 = unshare_expr (stmt);
-  new_call = get_call_expr_in (stmt1);
-  CALL_EXPR_FN (new_call) = build_addr (direct_call->decl, 
-                                       current_function_decl);
-  bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+  label1 = gimple_build_label (label_decl1);
+  stmt1 = gimple_copy (stmt);
+  gimple_call_set_fn (stmt,
+                     build_addr (direct_call->decl, current_function_decl));
+  gsi_insert_before (&gsi, label1, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
   bb2end = stmt1;
 
-  label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
-  bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
+  label2 = gimple_build_label (label_decl2);
+  gsi_insert_before (&gsi, label2, GSI_SAME_STMT);
   bb3end = stmt;
 
   /* Fix CFG. */
@@ -1161,15 +1167,15 @@ tree_ic (tree stmt, tree call, struct cgraph_node* direct_call,
 
   /* Fix eh edges */
   region = lookup_stmt_eh_region (stmt);
-  if (region >=0 && tree_could_throw_p (stmt1))
+  if (region >= 0 && stmt_could_throw_p (stmt1))
     {
       add_stmt_to_eh_region (stmt1, region);
       make_eh_edges (stmt1);
     }
 
-  if (region >=0 && tree_could_throw_p (stmt))
+  if (region >= 0 && stmt_could_throw_p (stmt))
     {
-      tree_purge_dead_eh_edges (bb4);
+      gimple_purge_dead_eh_edges (bb4);
       make_eh_edges (stmt);
     }
 
@@ -1183,22 +1189,21 @@ tree_ic (tree stmt, tree call, struct cgraph_node* direct_call,
  */
 
 static bool
-tree_ic_transform (tree stmt)
+gimple_ic_transform (gimple stmt)
 {
   histogram_value histogram;
   gcov_type val, count, all;
   gcov_type prob;
-  tree call, callee, modify;
+  tree callee;
+  gimple modify;
   struct cgraph_node *direct_call;
   
-  call = get_call_expr_in (stmt);
-
-  if (!call || TREE_CODE (call) != CALL_EXPR)
+  if (gimple_code (stmt) != GIMPLE_CALL)
     return false;
 
-  callee = CALL_EXPR_FN (call);
+  callee = gimple_call_fn (stmt);
 
-  if (TREE_CODE (callee) == ADDR_EXPR)
+  if (TREE_CODE (callee) == FUNCTION_DECL)
     return false;
 
   histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_INDIR_CALL);
@@ -1222,18 +1227,18 @@ tree_ic_transform (tree stmt)
   if (direct_call == NULL)
     return false;
 
-  modify = tree_ic (stmt, call, direct_call, prob, count, all);
+  modify = gimple_ic (stmt, stmt, direct_call, prob, count, all);
 
   if (dump_file)
     {
       fprintf (dump_file, "Indirect call -> direct call ");
-      print_generic_expr (dump_file, call, TDF_SLIM);
+      print_generic_expr (dump_file, gimple_call_fn (stmt), TDF_SLIM);
       fprintf (dump_file, "=> ");
       print_generic_expr (dump_file, direct_call->decl, TDF_SLIM);
       fprintf (dump_file, " transformation on insn ");
-      print_generic_stmt (dump_file, stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
       fprintf (dump_file, " to ");
-      print_generic_stmt (dump_file, modify, TDF_SLIM);
+      print_gimple_stmt (dump_file, modify, 0, TDF_SLIM);
       fprintf (dump_file, "hist->count "HOST_WIDEST_INT_PRINT_DEC
               " hist->all "HOST_WIDEST_INT_PRINT_DEC"\n", count, all);
     }
@@ -1243,7 +1248,7 @@ tree_ic_transform (tree stmt)
 
 /* Return true if the stringop CALL with FNDECL shall be profiled.  */
 static bool
-interesting_stringop_to_profile_p (tree fndecl, tree call)
+interesting_stringop_to_profile_p (tree fndecl, gimple call)
 {
   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
 
@@ -1255,16 +1260,14 @@ interesting_stringop_to_profile_p (tree fndecl, tree call)
     {
      case BUILT_IN_MEMCPY:
      case BUILT_IN_MEMPCPY:
-       return validate_arglist (call,
-                               POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE,
-                               VOID_TYPE);
+       return validate_gimple_arglist (call, POINTER_TYPE, POINTER_TYPE,
+                                      INTEGER_TYPE, VOID_TYPE);
      case BUILT_IN_MEMSET:
-       return validate_arglist (call,
-                               POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
-                               VOID_TYPE);
+       return validate_gimple_arglist (call, POINTER_TYPE, INTEGER_TYPE,
+                                     INTEGER_TYPE, VOID_TYPE);
      case BUILT_IN_BZERO:
-       return validate_arglist (call, POINTER_TYPE, INTEGER_TYPE,
-                               VOID_TYPE);
+       return validate_gimple_arglist (call, POINTER_TYPE, INTEGER_TYPE,
+                                      VOID_TYPE);
      default:
        gcc_unreachable ();
     }
@@ -1279,27 +1282,26 @@ interesting_stringop_to_profile_p (tree fndecl, tree call)
    assuming constant propagation of VALUE will happen later.
 */
 static void
-tree_stringop_fixed_value (tree stmt, tree value, int prob, gcov_type count,
+gimple_stringop_fixed_value (gimple stmt, tree value, int prob, gcov_type count,
                           gcov_type all)
 {
-  tree stmt1, stmt2, stmt3;
+  gimple stmt1, stmt2, stmt3;
   tree tmp1, tmpv;
   tree label_decl1 = create_artificial_label ();
   tree label_decl2 = create_artificial_label ();
-  tree label1, label2;
-  tree bb1end, bb2end;
+  gimple label1, label2;
+  gimple bb1end, bb2end;
   basic_block bb, bb2, bb3, bb4;
   edge e12, e13, e23, e24, e34;
-  block_stmt_iterator bsi;
-  tree call = get_call_expr_in (stmt);
-  tree blck_size = CALL_EXPR_ARG (call, 2);
+  gimple_stmt_iterator gsi;
+  tree blck_size = gimple_call_arg (stmt, 2);
   tree optype = TREE_TYPE (blck_size);
   int region;
 
-  bb = bb_for_stmt (stmt);
-  bsi = bsi_for_stmt (stmt);
+  bb = gimple_bb (stmt);
+  gsi = gsi_for_stmt (stmt);
 
-  if (bsi_end_p (bsi))
+  if (gsi_end_p (gsi))
     {
       edge_iterator ei;
       for (ei = ei_start (bb->succs); (e34 = ei_safe_edge (ei)); )
@@ -1309,34 +1311,31 @@ tree_stringop_fixed_value (tree stmt, tree value, int prob, gcov_type count,
   else
     {
       e34 = split_block (bb, stmt);
-      bsi = bsi_for_stmt (stmt);
+      gsi = gsi_for_stmt (stmt);
     }
   bb4 = e34->dest;
 
   tmpv = create_tmp_var (optype, "PROF");
   tmp1 = create_tmp_var (optype, "PROF");
-  stmt1 = build_gimple_modify_stmt (tmpv, fold_convert (optype, value));
-  stmt2 = build_gimple_modify_stmt (tmp1, blck_size);
-  stmt3 = build3 (COND_EXPR, void_type_node,
-           build2 (NE_EXPR, boolean_type_node, tmp1, tmpv),
-           NULL_TREE, NULL_TREE);
-  bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt2, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt3, BSI_SAME_STMT);
+  stmt1 = gimple_build_assign (tmpv, fold_convert (optype, value));
+  stmt2 = gimple_build_assign (tmp1, blck_size);
+  stmt3 = gimple_build_cond (NE_EXPR, tmp1, tmpv, NULL_TREE, NULL_TREE);
+  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
   bb1end = stmt3;
 
-  label1 = build1 (LABEL_EXPR, void_type_node, label_decl1);
-  stmt1 = unshare_expr (stmt);
-  call = get_call_expr_in (stmt1);
-  CALL_EXPR_ARG (call, 2) = value;
-  bsi_insert_before (&bsi, label1, BSI_SAME_STMT);
-  bsi_insert_before (&bsi, stmt1, BSI_SAME_STMT);
+  label1 = gimple_build_label (label_decl1);
+  stmt1 = gimple_copy (stmt);
+  gimple_call_set_arg (stmt1, 2, value);
+  gsi_insert_before (&gsi, label1, GSI_SAME_STMT);
+  gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
   region = lookup_stmt_eh_region (stmt);
   if (region >= 0)
     add_stmt_to_eh_region (stmt1, region);
   bb2end = stmt1;
-  label2 = build1 (LABEL_EXPR, void_type_node, label_decl2);
-  bsi_insert_before (&bsi, label2, BSI_SAME_STMT);
+  label2 = gimple_build_label (label_decl2);
+  gsi_insert_before (&gsi, label2, GSI_SAME_STMT);
 
   /* Fix CFG. */
   /* Edge e23 connects bb2 to bb3, etc. */
@@ -1369,10 +1368,9 @@ tree_stringop_fixed_value (tree stmt, tree value, int prob, gcov_type count,
 /* Find values inside STMT for that we want to measure histograms for
    division/modulo optimization.  */
 static bool
-tree_stringops_transform (block_stmt_iterator *bsi)
+gimple_stringops_transform (gimple_stmt_iterator *gsi)
 {
-  tree stmt = bsi_stmt (*bsi);
-  tree call = get_call_expr_in (stmt);
+  gimple stmt = gsi_stmt (*gsi);
   tree fndecl;
   tree blck_size;
   enum built_in_function fcode;
@@ -1384,19 +1382,19 @@ tree_stringops_transform (block_stmt_iterator *bsi)
   gcov_type prob;
   tree tree_val;
 
-  if (!call)
+  if (gimple_code (stmt) != GIMPLE_CALL)
     return false;
-  fndecl = get_callee_fndecl (call);
+  fndecl = gimple_call_fndecl (stmt);
   if (!fndecl)
     return false;
   fcode = DECL_FUNCTION_CODE (fndecl);
-  if (!interesting_stringop_to_profile_p (fndecl, call))
+  if (!interesting_stringop_to_profile_p (fndecl, stmt))
     return false;
 
   if (fcode == BUILT_IN_BZERO)
-    blck_size = CALL_EXPR_ARG (call, 1);
+    blck_size = gimple_call_arg (stmt, 1);
   else
-    blck_size = CALL_EXPR_ARG (call, 2);
+    blck_size = gimple_call_arg (stmt, 2);
   if (TREE_CODE (blck_size) == INTEGER_CST)
     return false;
 
@@ -1411,28 +1409,28 @@ tree_stringops_transform (block_stmt_iterator *bsi)
   /* We require that count is at least half of all; this means
      that for the transformation to fire the value must be constant
      at least 80% of time.  */
-  if ((6 * count / 5) < all || !maybe_hot_bb_p (bb_for_stmt (stmt)))
+  if ((6 * count / 5) < all || !maybe_hot_bb_p (gimple_bb (stmt)))
     return false;
-  if (check_counter (stmt, "value", all, bb_for_stmt (stmt)->count))
+  if (check_counter (stmt, "value", all, gimple_bb (stmt)->count))
     return false;
   if (all > 0)
     prob = (count * REG_BR_PROB_BASE + all / 2) / all;
   else
     prob = 0;
-  dest = CALL_EXPR_ARG (call, 0);
+  dest = gimple_call_arg (stmt, 0);
   dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
   switch (fcode)
     {
     case BUILT_IN_MEMCPY:
     case BUILT_IN_MEMPCPY:
-      src = CALL_EXPR_ARG (call, 1);
+      src = gimple_call_arg (stmt, 1);
       src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
       if (!can_move_by_pieces (val, MIN (dest_align, src_align)))
        return false;
       break;
     case BUILT_IN_MEMSET:
       if (!can_store_by_pieces (val, builtin_memset_read_str,
-                               CALL_EXPR_ARG (call, 1),
+                               gimple_call_arg (stmt, 1),
                                dest_align, true))
        return false;
       break;
@@ -1452,15 +1450,15 @@ tree_stringops_transform (block_stmt_iterator *bsi)
     {
       fprintf (dump_file, "Single value %i stringop transformation on ",
               (int)val);
-      print_generic_stmt (dump_file, stmt, TDF_SLIM);
+      print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
     }
-  tree_stringop_fixed_value (stmt, tree_val, prob, count, all);
+  gimple_stringop_fixed_value (stmt, tree_val, prob, count, all);
   
   return true;
 }
 
 void
-stringop_block_profile (tree stmt, unsigned int *expected_align,
+stringop_block_profile (gimple stmt, unsigned int *expected_align,
                        HOST_WIDE_INT *expected_size)
 {
   histogram_value histogram;
@@ -1520,31 +1518,25 @@ struct value_prof_hooks {
 /* Find values inside STMT for that we want to measure histograms for
    division/modulo optimization.  */
 static void
-tree_divmod_values_to_profile (tree stmt, histogram_values *values)
+gimple_divmod_values_to_profile (gimple stmt, histogram_values *values)
 {
-  tree assign, lhs, rhs, divisor, op0, type;
+  tree lhs, divisor, op0, type;
   histogram_value hist;
 
-  if (TREE_CODE (stmt) == RETURN_EXPR)
-    assign = TREE_OPERAND (stmt, 0);
-  else
-    assign = stmt;
-
-  if (!assign
-      || TREE_CODE (assign) != GIMPLE_MODIFY_STMT)
+  if (gimple_code (stmt) != GIMPLE_ASSIGN)
     return;
-  lhs = GIMPLE_STMT_OPERAND (assign, 0);
+
+  lhs = gimple_assign_lhs (stmt);
   type = TREE_TYPE (lhs);
   if (!INTEGRAL_TYPE_P (type))
     return;
 
-  rhs = GIMPLE_STMT_OPERAND (assign, 1);
-  switch (TREE_CODE (rhs))
+  switch (gimple_assign_rhs_code (stmt))
     {
     case TRUNC_DIV_EXPR:
     case TRUNC_MOD_EXPR:
-      divisor = TREE_OPERAND (rhs, 1);
-      op0 = TREE_OPERAND (rhs, 0);
+      divisor = gimple_assign_rhs2 (stmt);
+      op0 = gimple_assign_rhs1 (stmt);
 
       VEC_reserve (histogram_value, heap, *values, 3);
 
@@ -1552,12 +1544,13 @@ tree_divmod_values_to_profile (tree stmt, histogram_values *values)
        /* Check for the case where the divisor is the same value most
           of the time.  */
        VEC_quick_push (histogram_value, *values,
-                       gimple_alloc_histogram_value (cfun, HIST_TYPE_SINGLE_VALUE,
+                       gimple_alloc_histogram_value (cfun,
+                                                     HIST_TYPE_SINGLE_VALUE,
                                                      stmt, divisor));
 
       /* For mod, check whether it is not often a noop (or replaceable by
         a few subtractions).  */
-      if (TREE_CODE (rhs) == TRUNC_MOD_EXPR
+      if (gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR
          && TYPE_UNSIGNED (type))
        {
           tree val;
@@ -1584,19 +1577,16 @@ tree_divmod_values_to_profile (tree stmt, histogram_values *values)
    indirect/virtual call optimization. */ 
 
 static void
-tree_indirect_call_to_profile (tree stmt, histogram_values *values)
+gimple_indirect_call_to_profile (gimple stmt, histogram_values *values)
 {
-  tree                 call;
-  tree                 callee;
-
-  call = get_call_expr_in (stmt);
+  tree callee;
 
-  if (!call || TREE_CODE (call) != CALL_EXPR)
+  if (gimple_code (stmt) != GIMPLE_CALL)
     return;
 
-  callee = CALL_EXPR_FN (call);
+  callee = gimple_call_fn (stmt);
   
-  if (TREE_CODE (callee) == ADDR_EXPR)
+  if (TREE_CODE (callee) == FUNCTION_DECL)
     return;
 
   VEC_reserve (histogram_value, heap, *values, 3);
@@ -1611,29 +1601,28 @@ tree_indirect_call_to_profile (tree stmt, histogram_values *values)
 /* Find values inside STMT for that we want to measure histograms for
    string operations.  */
 static void
-tree_stringops_values_to_profile (tree stmt, histogram_values *values)
+gimple_stringops_values_to_profile (gimple stmt, histogram_values *values)
 {
-  tree call = get_call_expr_in (stmt);
   tree fndecl;
   tree blck_size;
   tree dest;
   enum built_in_function fcode;
 
-  if (!call)
+  if (gimple_code (stmt) != GIMPLE_CALL)
     return;
-  fndecl = get_callee_fndecl (call);
+  fndecl = gimple_call_fndecl (stmt);
   if (!fndecl)
     return;
   fcode = DECL_FUNCTION_CODE (fndecl);
 
-  if (!interesting_stringop_to_profile_p (fndecl, call))
+  if (!interesting_stringop_to_profile_p (fndecl, stmt))
     return;
 
-  dest = CALL_EXPR_ARG (call, 0);
+  dest = gimple_call_arg (stmt, 0);
   if (fcode == BUILT_IN_BZERO)
-    blck_size = CALL_EXPR_ARG (call, 1);
+    blck_size = gimple_call_arg (stmt, 1);
   else
-    blck_size = CALL_EXPR_ARG (call, 2);
+    blck_size = gimple_call_arg (stmt, 2);
 
   if (TREE_CODE (blck_size) != INTEGER_CST)
     {
@@ -1654,28 +1643,28 @@ tree_stringops_values_to_profile (tree stmt, histogram_values *values)
    them to list VALUES.  */
 
 static void
-tree_values_to_profile (tree stmt, histogram_values *values)
+gimple_values_to_profile (gimple stmt, histogram_values *values)
 {
   if (flag_value_profile_transformations)
     {
-      tree_divmod_values_to_profile (stmt, values);
-      tree_stringops_values_to_profile (stmt, values);
-      tree_indirect_call_to_profile (stmt, values);
+      gimple_divmod_values_to_profile (stmt, values);
+      gimple_stringops_values_to_profile (stmt, values);
+      gimple_indirect_call_to_profile (stmt, values);
     }
 }
 
 static void
-tree_find_values_to_profile (histogram_values *values)
+gimple_find_values_to_profile (histogram_values *values)
 {
   basic_block bb;
-  block_stmt_iterator bsi;
+  gimple_stmt_iterator gsi;
   unsigned i;
   histogram_value hist = NULL;
 
   *values = NULL;
   FOR_EACH_BB (bb)
-    for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
-      tree_values_to_profile (bsi_stmt (bsi), values);
+    for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+      gimple_values_to_profile (gsi_stmt (gsi), values);
   
   for (i = 0; VEC_iterate (histogram_value, *values, i, hist); i++)
     {
@@ -1715,22 +1704,22 @@ tree_find_values_to_profile (histogram_values *values)
       if (dump_file)
         {
          fprintf (dump_file, "Stmt ");
-          print_generic_expr (dump_file, hist->hvalue.stmt, TDF_SLIM);
+          print_gimple_stmt (dump_file, hist->hvalue.stmt, 0, TDF_SLIM);
          dump_histogram_value (dump_file, hist);
         }
     }
 }
 
-static struct value_prof_hooks tree_value_prof_hooks = {
-  tree_find_values_to_profile,
-  tree_value_profile_transformations
+static struct value_prof_hooks gimple_value_prof_hooks = {
+  gimple_find_values_to_profile,
+  gimple_value_profile_transformations
 };
 
 void
-tree_register_value_prof_hooks (void)
+gimple_register_value_prof_hooks (void)
 {
   gcc_assert (current_ir_type () == IR_GIMPLE);
-  value_prof_hooks = &tree_value_prof_hooks;
+  value_prof_hooks = &gimple_value_prof_hooks;
 }
 \f
 /* IR-independent entry points.  */
index 2ab9df1..e66f4e2 100644 (file)
@@ -46,7 +46,7 @@ struct histogram_value_t
   struct
     {
       tree value;              /* The value to profile.  */
-      tree stmt;               /* Insn containing the value.  */
+      gimple stmt;             /* Insn containing the value.  */
       gcov_type *counters;                     /* Pointer to first counter.  */
       struct histogram_value_t *next;          /* Linked list pointer.  */
     } hvalue;
@@ -71,7 +71,7 @@ DEF_VEC_ALLOC_P(histogram_value,heap);
 typedef VEC(histogram_value,heap) *histogram_values;
 
 /* Hooks registration.  */
-extern void tree_register_value_prof_hooks (void);
+extern void gimple_register_value_prof_hooks (void);
 
 /* IR-independent entry points.  */
 extern void find_values_to_profile (histogram_values *);
@@ -109,17 +109,19 @@ struct profile_hooks {
   void (*gen_ior_profiler) (histogram_value, unsigned, unsigned);
 };
 
-histogram_value gimple_histogram_value (struct function *, tree);
-histogram_value gimple_histogram_value_of_type (struct function *, tree, enum hist_type);
-void gimple_add_histogram_value (struct function *, tree, histogram_value);
-void dump_histograms_for_stmt (struct function *, FILE *, tree);
-void gimple_remove_histogram_value (struct function *, tree, histogram_value);
-void gimple_remove_stmt_histograms (struct function *, tree);
-void gimple_duplicate_stmt_histograms (struct function *, tree, struct function *, tree);
-void gimple_move_stmt_histograms (struct function *, tree, tree);
+histogram_value gimple_histogram_value (struct function *, gimple);
+histogram_value gimple_histogram_value_of_type (struct function *, gimple,
+                                               enum hist_type);
+void gimple_add_histogram_value (struct function *, gimple, histogram_value);
+void dump_histograms_for_stmt (struct function *, FILE *, gimple);
+void gimple_remove_histogram_value (struct function *, gimple, histogram_value);
+void gimple_remove_stmt_histograms (struct function *, gimple);
+void gimple_duplicate_stmt_histograms (struct function *, gimple,
+                                      struct function *, gimple);
+void gimple_move_stmt_histograms (struct function *, gimple, gimple);
 void verify_histograms (void);
 void free_histograms (void);
-void stringop_block_profile (tree, unsigned int *, HOST_WIDE_INT *);
+void stringop_block_profile (gimple, unsigned int *, HOST_WIDE_INT *);
 
 /* In profile.c.  */
 extern void init_branch_prob (void);
index daa0c15..1d1cc9e 100644 (file)
@@ -32,7 +32,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "debug.h" 
 #include "target.h"
 #include "output.h"
-#include "tree-gimple.h"
+#include "gimple.h"
 #include "tree-flow.h"
 
 /*  This file contains basic routines manipulating variable pool.