aarch64 - Set the mode for the unspec in speculation_tracker insn.
[platform/upstream/linaro-gcc.git] / gcc / tree-object-size.c
1 /* __builtin_object_size (ptr, object_size_type) computation
2    Copyright (C) 2004-2016 Free Software Foundation, Inc.
3    Contributed by Jakub Jelinek <jakub@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "gimple-fold.h"
33 #include "gimple-iterator.h"
34 #include "tree-cfg.h"
35
36 struct object_size_info
37 {
38   int object_size_type;
39   bitmap visited, reexamine;
40   int pass;
41   bool changed;
42   unsigned int *depths;
43   unsigned int *stack, *tos;
44 };
45
46 static const unsigned HOST_WIDE_INT unknown[4] = {
47   HOST_WIDE_INT_M1U,
48   HOST_WIDE_INT_M1U,
49   0,
50   0
51 };
52
53 static tree compute_object_offset (const_tree, const_tree);
54 static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
55                                                 const_tree, int);
56 static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
57 static tree pass_through_call (const gcall *);
58 static void collect_object_sizes_for (struct object_size_info *, tree);
59 static void expr_object_size (struct object_size_info *, tree, tree);
60 static bool merge_object_sizes (struct object_size_info *, tree, tree,
61                                 unsigned HOST_WIDE_INT);
62 static bool plus_stmt_object_size (struct object_size_info *, tree, gimple *);
63 static bool cond_expr_object_size (struct object_size_info *, tree, gimple *);
64 static void init_offset_limit (void);
65 static void check_for_plus_in_loops (struct object_size_info *, tree);
66 static void check_for_plus_in_loops_1 (struct object_size_info *, tree,
67                                        unsigned int);
68
69 /* object_sizes[0] is upper bound for number of bytes till the end of
70    the object.
71    object_sizes[1] is upper bound for number of bytes till the end of
72    the subobject (innermost array or field with address taken).
73    object_sizes[2] is lower bound for number of bytes till the end of
74    the object and object_sizes[3] lower bound for subobject.  */
75 static vec<unsigned HOST_WIDE_INT> object_sizes[4];
76
77 /* Bitmaps what object sizes have been computed already.  */
78 static bitmap computed[4];
79
80 /* Maximum value of offset we consider to be addition.  */
81 static unsigned HOST_WIDE_INT offset_limit;
82
83
84 /* Initialize OFFSET_LIMIT variable.  */
85 static void
86 init_offset_limit (void)
87 {
88   if (tree_fits_uhwi_p (TYPE_MAX_VALUE (sizetype)))
89     offset_limit = tree_to_uhwi (TYPE_MAX_VALUE (sizetype));
90   else
91     offset_limit = -1;
92   offset_limit /= 2;
93 }
94
95
96 /* Compute offset of EXPR within VAR.  Return error_mark_node
97    if unknown.  */
98
99 static tree
100 compute_object_offset (const_tree expr, const_tree var)
101 {
102   enum tree_code code = PLUS_EXPR;
103   tree base, off, t;
104
105   if (expr == var)
106     return size_zero_node;
107
108   switch (TREE_CODE (expr))
109     {
110     case COMPONENT_REF:
111       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
112       if (base == error_mark_node)
113         return base;
114
115       t = TREE_OPERAND (expr, 1);
116       off = size_binop (PLUS_EXPR, DECL_FIELD_OFFSET (t),
117                         size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
118                                   / BITS_PER_UNIT));
119       break;
120
121     case REALPART_EXPR:
122     CASE_CONVERT:
123     case VIEW_CONVERT_EXPR:
124     case NON_LVALUE_EXPR:
125       return compute_object_offset (TREE_OPERAND (expr, 0), var);
126
127     case IMAGPART_EXPR:
128       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
129       if (base == error_mark_node)
130         return base;
131
132       off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
133       break;
134
135     case ARRAY_REF:
136       base = compute_object_offset (TREE_OPERAND (expr, 0), var);
137       if (base == error_mark_node)
138         return base;
139
140       t = TREE_OPERAND (expr, 1);
141       if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
142         {
143           code = MINUS_EXPR;
144           t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
145         }
146       t = fold_convert (sizetype, t);
147       off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
148       break;
149
150     case MEM_REF:
151       gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
152       return wide_int_to_tree (sizetype, mem_ref_offset (expr));
153
154     default:
155       return error_mark_node;
156     }
157
158   return size_binop (code, base, off);
159 }
160
161
162 /* Compute __builtin_object_size for PTR, which is a ADDR_EXPR.
163    OBJECT_SIZE_TYPE is the second argument from __builtin_object_size.
164    If unknown, return unknown[object_size_type].  */
165
166 static unsigned HOST_WIDE_INT
167 addr_object_size (struct object_size_info *osi, const_tree ptr,
168                   int object_size_type)
169 {
170   tree pt_var, pt_var_size = NULL_TREE, var_size, bytes;
171
172   gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
173
174   pt_var = TREE_OPERAND (ptr, 0);
175   while (handled_component_p (pt_var))
176     pt_var = TREE_OPERAND (pt_var, 0);
177
178   if (pt_var
179       && TREE_CODE (pt_var) == MEM_REF)
180     {
181       unsigned HOST_WIDE_INT sz;
182
183       if (!osi || (object_size_type & 1) != 0
184           || TREE_CODE (TREE_OPERAND (pt_var, 0)) != SSA_NAME)
185         {
186           sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
187                                             object_size_type & ~1);
188         }
189       else
190         {
191           tree var = TREE_OPERAND (pt_var, 0);
192           if (osi->pass == 0)
193             collect_object_sizes_for (osi, var);
194           if (bitmap_bit_p (computed[object_size_type],
195                             SSA_NAME_VERSION (var)))
196             sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
197           else
198             sz = unknown[object_size_type];
199         }
200       if (sz != unknown[object_size_type])
201         {
202           offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
203           if (wi::neg_p (dsz))
204             sz = 0;
205           else if (wi::fits_uhwi_p (dsz))
206             sz = dsz.to_uhwi ();
207           else
208             sz = unknown[object_size_type];
209         }
210
211       if (sz != unknown[object_size_type] && sz < offset_limit)
212         pt_var_size = size_int (sz);
213     }
214   else if (pt_var
215            && DECL_P (pt_var)
216            && tree_fits_uhwi_p (DECL_SIZE_UNIT (pt_var))
217            && tree_to_uhwi (DECL_SIZE_UNIT (pt_var)) < offset_limit)
218     pt_var_size = DECL_SIZE_UNIT (pt_var);
219   else if (pt_var
220            && TREE_CODE (pt_var) == STRING_CST
221            && TYPE_SIZE_UNIT (TREE_TYPE (pt_var))
222            && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
223            && tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (pt_var)))
224               < offset_limit)
225     pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
226   else
227     return unknown[object_size_type];
228
229   if (pt_var != TREE_OPERAND (ptr, 0))
230     {
231       tree var;
232
233       if (object_size_type & 1)
234         {
235           var = TREE_OPERAND (ptr, 0);
236
237           while (var != pt_var
238                  && TREE_CODE (var) != BIT_FIELD_REF
239                  && TREE_CODE (var) != COMPONENT_REF
240                  && TREE_CODE (var) != ARRAY_REF
241                  && TREE_CODE (var) != ARRAY_RANGE_REF
242                  && TREE_CODE (var) != REALPART_EXPR
243                  && TREE_CODE (var) != IMAGPART_EXPR)
244             var = TREE_OPERAND (var, 0);
245           if (var != pt_var && TREE_CODE (var) == ARRAY_REF)
246             var = TREE_OPERAND (var, 0);
247           if (! TYPE_SIZE_UNIT (TREE_TYPE (var))
248               || ! tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (var)))
249               || (pt_var_size
250                   && tree_int_cst_lt (pt_var_size,
251                                       TYPE_SIZE_UNIT (TREE_TYPE (var)))))
252             var = pt_var;
253           else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
254             {
255               tree v = var;
256               /* For &X->fld, compute object size only if fld isn't the last
257                  field, as struct { int i; char c[1]; } is often used instead
258                  of flexible array member.  */
259               while (v && v != pt_var)
260                 switch (TREE_CODE (v))
261                   {
262                   case ARRAY_REF:
263                     if (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (v, 0)))
264                         && TREE_CODE (TREE_OPERAND (v, 1)) == INTEGER_CST)
265                       {
266                         tree domain
267                           = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (v, 0)));
268                         if (domain
269                             && TYPE_MAX_VALUE (domain)
270                             && TREE_CODE (TYPE_MAX_VALUE (domain))
271                                == INTEGER_CST
272                             && tree_int_cst_lt (TREE_OPERAND (v, 1),
273                                                 TYPE_MAX_VALUE (domain)))
274                           {
275                             v = NULL_TREE;
276                             break;
277                           }
278                       }
279                     v = TREE_OPERAND (v, 0);
280                     break;
281                   case REALPART_EXPR:
282                   case IMAGPART_EXPR:
283                     v = NULL_TREE;
284                     break;
285                   case COMPONENT_REF:
286                     if (TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE)
287                       {
288                         v = NULL_TREE;
289                         break;
290                       }
291                     while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
292                       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
293                           != UNION_TYPE
294                           && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
295                           != QUAL_UNION_TYPE)
296                         break;
297                       else
298                         v = TREE_OPERAND (v, 0);
299                     if (TREE_CODE (v) == COMPONENT_REF
300                         && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
301                            == RECORD_TYPE)
302                       {
303                         tree fld_chain = DECL_CHAIN (TREE_OPERAND (v, 1));
304                         for (; fld_chain; fld_chain = DECL_CHAIN (fld_chain))
305                           if (TREE_CODE (fld_chain) == FIELD_DECL)
306                             break;
307
308                         if (fld_chain)
309                           {
310                             v = NULL_TREE;
311                             break;
312                           }
313                         v = TREE_OPERAND (v, 0);
314                       }
315                     while (v != pt_var && TREE_CODE (v) == COMPONENT_REF)
316                       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
317                           != UNION_TYPE
318                           && TREE_CODE (TREE_TYPE (TREE_OPERAND (v, 0)))
319                           != QUAL_UNION_TYPE)
320                         break;
321                       else
322                         v = TREE_OPERAND (v, 0);
323                     if (v != pt_var)
324                       v = NULL_TREE;
325                     else
326                       v = pt_var;
327                     break;
328                   default:
329                     v = pt_var;
330                     break;
331                   }
332               if (v == pt_var)
333                 var = pt_var;
334             }
335         }
336       else
337         var = pt_var;
338
339       if (var != pt_var)
340         var_size = TYPE_SIZE_UNIT (TREE_TYPE (var));
341       else if (!pt_var_size)
342         return unknown[object_size_type];
343       else
344         var_size = pt_var_size;
345       bytes = compute_object_offset (TREE_OPERAND (ptr, 0), var);
346       if (bytes != error_mark_node)
347         {
348           if (TREE_CODE (bytes) == INTEGER_CST
349               && tree_int_cst_lt (var_size, bytes))
350             bytes = size_zero_node;
351           else
352             bytes = size_binop (MINUS_EXPR, var_size, bytes);
353         }
354       if (var != pt_var
355           && pt_var_size
356           && TREE_CODE (pt_var) == MEM_REF
357           && bytes != error_mark_node)
358         {
359           tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
360           if (bytes2 != error_mark_node)
361             {
362               if (TREE_CODE (bytes2) == INTEGER_CST
363                   && tree_int_cst_lt (pt_var_size, bytes2))
364                 bytes2 = size_zero_node;
365               else
366                 bytes2 = size_binop (MINUS_EXPR, pt_var_size, bytes2);
367               bytes = size_binop (MIN_EXPR, bytes, bytes2);
368             }
369         }
370     }
371   else if (!pt_var_size)
372     return unknown[object_size_type];
373   else
374     bytes = pt_var_size;
375
376   if (tree_fits_uhwi_p (bytes))
377     return tree_to_uhwi (bytes);
378
379   return unknown[object_size_type];
380 }
381
382
383 /* Compute __builtin_object_size for CALL, which is a GIMPLE_CALL.
384    Handles various allocation calls.  OBJECT_SIZE_TYPE is the second
385    argument from __builtin_object_size.  If unknown, return
386    unknown[object_size_type].  */
387
388 static unsigned HOST_WIDE_INT
389 alloc_object_size (const gcall *call, int object_size_type)
390 {
391   tree callee, bytes = NULL_TREE;
392   tree alloc_size;
393   int arg1 = -1, arg2 = -1;
394
395   gcc_assert (is_gimple_call (call));
396
397   callee = gimple_call_fndecl (call);
398   if (!callee)
399     return unknown[object_size_type];
400
401   alloc_size = lookup_attribute ("alloc_size",
402                                  TYPE_ATTRIBUTES (TREE_TYPE (callee)));
403   if (alloc_size && TREE_VALUE (alloc_size))
404     {
405       tree p = TREE_VALUE (alloc_size);
406
407       arg1 = TREE_INT_CST_LOW (TREE_VALUE (p))-1;
408       if (TREE_CHAIN (p))
409         arg2 = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (p)))-1;
410     }
411
412   if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
413     switch (DECL_FUNCTION_CODE (callee))
414       {
415       case BUILT_IN_CALLOC:
416         arg2 = 1;
417         /* fall through */
418       case BUILT_IN_MALLOC:
419       case BUILT_IN_ALLOCA:
420       case BUILT_IN_ALLOCA_WITH_ALIGN:
421         arg1 = 0;
422       default:
423         break;
424       }
425
426   if (arg1 < 0 || arg1 >= (int)gimple_call_num_args (call)
427       || TREE_CODE (gimple_call_arg (call, arg1)) != INTEGER_CST
428       || (arg2 >= 0
429           && (arg2 >= (int)gimple_call_num_args (call)
430               || TREE_CODE (gimple_call_arg (call, arg2)) != INTEGER_CST)))
431     return unknown[object_size_type];
432
433   if (arg2 >= 0)
434     bytes = size_binop (MULT_EXPR,
435         fold_convert (sizetype, gimple_call_arg (call, arg1)),
436         fold_convert (sizetype, gimple_call_arg (call, arg2)));
437   else if (arg1 >= 0)
438     bytes = fold_convert (sizetype, gimple_call_arg (call, arg1));
439
440   if (bytes && tree_fits_uhwi_p (bytes))
441     return tree_to_uhwi (bytes);
442
443   return unknown[object_size_type];
444 }
445
446
447 /* If object size is propagated from one of function's arguments directly
448    to its return value, return that argument for GIMPLE_CALL statement CALL.
449    Otherwise return NULL.  */
450
451 static tree
452 pass_through_call (const gcall *call)
453 {
454   tree callee = gimple_call_fndecl (call);
455
456   if (callee
457       && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
458     switch (DECL_FUNCTION_CODE (callee))
459       {
460       case BUILT_IN_MEMCPY:
461       case BUILT_IN_MEMMOVE:
462       case BUILT_IN_MEMSET:
463       case BUILT_IN_STRCPY:
464       case BUILT_IN_STRNCPY:
465       case BUILT_IN_STRCAT:
466       case BUILT_IN_STRNCAT:
467       case BUILT_IN_MEMCPY_CHK:
468       case BUILT_IN_MEMMOVE_CHK:
469       case BUILT_IN_MEMSET_CHK:
470       case BUILT_IN_STRCPY_CHK:
471       case BUILT_IN_STRNCPY_CHK:
472       case BUILT_IN_STPNCPY_CHK:
473       case BUILT_IN_STRCAT_CHK:
474       case BUILT_IN_STRNCAT_CHK:
475       case BUILT_IN_ASSUME_ALIGNED:
476         if (gimple_call_num_args (call) >= 1)
477           return gimple_call_arg (call, 0);
478         break;
479       default:
480         break;
481       }
482
483   return NULL_TREE;
484 }
485
486
487 /* Compute __builtin_object_size value for PTR.  OBJECT_SIZE_TYPE is the
488    second argument from __builtin_object_size.  */
489
490 unsigned HOST_WIDE_INT
491 compute_builtin_object_size (tree ptr, int object_size_type)
492 {
493   gcc_assert (object_size_type >= 0 && object_size_type <= 3);
494
495   if (! offset_limit)
496     init_offset_limit ();
497
498   if (TREE_CODE (ptr) == ADDR_EXPR)
499     return addr_object_size (NULL, ptr, object_size_type);
500
501   if (TREE_CODE (ptr) == SSA_NAME
502       && POINTER_TYPE_P (TREE_TYPE (ptr))
503       && computed[object_size_type] != NULL)
504     {
505       if (!bitmap_bit_p (computed[object_size_type], SSA_NAME_VERSION (ptr)))
506         {
507           struct object_size_info osi;
508           bitmap_iterator bi;
509           unsigned int i;
510
511           if (num_ssa_names > object_sizes[object_size_type].length ())
512             object_sizes[object_size_type].safe_grow (num_ssa_names);
513           if (dump_file)
514             {
515               fprintf (dump_file, "Computing %s %sobject size for ",
516                        (object_size_type & 2) ? "minimum" : "maximum",
517                        (object_size_type & 1) ? "sub" : "");
518               print_generic_expr (dump_file, ptr, dump_flags);
519               fprintf (dump_file, ":\n");
520             }
521
522           osi.visited = BITMAP_ALLOC (NULL);
523           osi.reexamine = BITMAP_ALLOC (NULL);
524           osi.object_size_type = object_size_type;
525           osi.depths = NULL;
526           osi.stack = NULL;
527           osi.tos = NULL;
528
529           /* First pass: walk UD chains, compute object sizes that
530              can be computed.  osi.reexamine bitmap at the end will
531              contain what variables were found in dependency cycles
532              and therefore need to be reexamined.  */
533           osi.pass = 0;
534           osi.changed = false;
535           collect_object_sizes_for (&osi, ptr);
536
537           /* Second pass: keep recomputing object sizes of variables
538              that need reexamination, until no object sizes are
539              increased or all object sizes are computed.  */
540           if (! bitmap_empty_p (osi.reexamine))
541             {
542               bitmap reexamine = BITMAP_ALLOC (NULL);
543
544               /* If looking for minimum instead of maximum object size,
545                  detect cases where a pointer is increased in a loop.
546                  Although even without this detection pass 2 would eventually
547                  terminate, it could take a long time.  If a pointer is
548                  increasing this way, we need to assume 0 object size.
549                  E.g. p = &buf[0]; while (cond) p = p + 4;  */
550               if (object_size_type & 2)
551                 {
552                   osi.depths = XCNEWVEC (unsigned int, num_ssa_names);
553                   osi.stack = XNEWVEC (unsigned int, num_ssa_names);
554                   osi.tos = osi.stack;
555                   osi.pass = 1;
556                   /* collect_object_sizes_for is changing
557                      osi.reexamine bitmap, so iterate over a copy.  */
558                   bitmap_copy (reexamine, osi.reexamine);
559                   EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
560                     if (bitmap_bit_p (osi.reexamine, i))
561                       check_for_plus_in_loops (&osi, ssa_name (i));
562
563                   free (osi.depths);
564                   osi.depths = NULL;
565                   free (osi.stack);
566                   osi.stack = NULL;
567                   osi.tos = NULL;
568                 }
569
570               do
571                 {
572                   osi.pass = 2;
573                   osi.changed = false;
574                   /* collect_object_sizes_for is changing
575                      osi.reexamine bitmap, so iterate over a copy.  */
576                   bitmap_copy (reexamine, osi.reexamine);
577                   EXECUTE_IF_SET_IN_BITMAP (reexamine, 0, i, bi)
578                     if (bitmap_bit_p (osi.reexamine, i))
579                       {
580                         collect_object_sizes_for (&osi, ssa_name (i));
581                         if (dump_file && (dump_flags & TDF_DETAILS))
582                           {
583                             fprintf (dump_file, "Reexamining ");
584                             print_generic_expr (dump_file, ssa_name (i),
585                                                 dump_flags);
586                             fprintf (dump_file, "\n");
587                           }
588                       }
589                 }
590               while (osi.changed);
591
592               BITMAP_FREE (reexamine);
593             }
594           EXECUTE_IF_SET_IN_BITMAP (osi.reexamine, 0, i, bi)
595             bitmap_set_bit (computed[object_size_type], i);
596
597           /* Debugging dumps.  */
598           if (dump_file)
599             {
600               EXECUTE_IF_SET_IN_BITMAP (osi.visited, 0, i, bi)
601                 if (object_sizes[object_size_type][i]
602                     != unknown[object_size_type])
603                   {
604                     print_generic_expr (dump_file, ssa_name (i),
605                                         dump_flags);
606                     fprintf (dump_file,
607                              ": %s %sobject size "
608                              HOST_WIDE_INT_PRINT_UNSIGNED "\n",
609                              (object_size_type & 2) ? "minimum" : "maximum",
610                              (object_size_type & 1) ? "sub" : "",
611                              object_sizes[object_size_type][i]);
612                   }
613             }
614
615           BITMAP_FREE (osi.reexamine);
616           BITMAP_FREE (osi.visited);
617         }
618
619       return object_sizes[object_size_type][SSA_NAME_VERSION (ptr)];
620     }
621
622   return unknown[object_size_type];
623 }
624
625 /* Compute object_sizes for PTR, defined to VALUE, which is not an SSA_NAME.  */
626
627 static void
628 expr_object_size (struct object_size_info *osi, tree ptr, tree value)
629 {
630   int object_size_type = osi->object_size_type;
631   unsigned int varno = SSA_NAME_VERSION (ptr);
632   unsigned HOST_WIDE_INT bytes;
633
634   gcc_assert (object_sizes[object_size_type][varno]
635               != unknown[object_size_type]);
636   gcc_assert (osi->pass == 0);
637
638   if (TREE_CODE (value) == WITH_SIZE_EXPR)
639     value = TREE_OPERAND (value, 0);
640
641   /* Pointer variables should have been handled by merge_object_sizes.  */
642   gcc_assert (TREE_CODE (value) != SSA_NAME
643               || !POINTER_TYPE_P (TREE_TYPE (value)));
644
645   if (TREE_CODE (value) == ADDR_EXPR)
646     bytes = addr_object_size (osi, value, object_size_type);
647   else
648     bytes = unknown[object_size_type];
649
650   if ((object_size_type & 2) == 0)
651     {
652       if (object_sizes[object_size_type][varno] < bytes)
653         object_sizes[object_size_type][varno] = bytes;
654     }
655   else
656     {
657       if (object_sizes[object_size_type][varno] > bytes)
658         object_sizes[object_size_type][varno] = bytes;
659     }
660 }
661
662
663 /* Compute object_sizes for PTR, defined to the result of a call.  */
664
665 static void
666 call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
667 {
668   int object_size_type = osi->object_size_type;
669   unsigned int varno = SSA_NAME_VERSION (ptr);
670   unsigned HOST_WIDE_INT bytes;
671
672   gcc_assert (is_gimple_call (call));
673
674   gcc_assert (object_sizes[object_size_type][varno]
675               != unknown[object_size_type]);
676   gcc_assert (osi->pass == 0);
677
678   bytes = alloc_object_size (call, object_size_type);
679
680   if ((object_size_type & 2) == 0)
681     {
682       if (object_sizes[object_size_type][varno] < bytes)
683         object_sizes[object_size_type][varno] = bytes;
684     }
685   else
686     {
687       if (object_sizes[object_size_type][varno] > bytes)
688         object_sizes[object_size_type][varno] = bytes;
689     }
690 }
691
692
693 /* Compute object_sizes for PTR, defined to an unknown value.  */
694
695 static void
696 unknown_object_size (struct object_size_info *osi, tree ptr)
697 {
698   int object_size_type = osi->object_size_type;
699   unsigned int varno = SSA_NAME_VERSION (ptr);
700   unsigned HOST_WIDE_INT bytes;
701
702   gcc_assert (object_sizes[object_size_type][varno]
703               != unknown[object_size_type]);
704   gcc_assert (osi->pass == 0);
705
706   bytes = unknown[object_size_type];
707
708   if ((object_size_type & 2) == 0)
709     {
710       if (object_sizes[object_size_type][varno] < bytes)
711         object_sizes[object_size_type][varno] = bytes;
712     }
713   else
714     {
715       if (object_sizes[object_size_type][varno] > bytes)
716         object_sizes[object_size_type][varno] = bytes;
717     }
718 }
719
720
721 /* Merge object sizes of ORIG + OFFSET into DEST.  Return true if
722    the object size might need reexamination later.  */
723
724 static bool
725 merge_object_sizes (struct object_size_info *osi, tree dest, tree orig,
726                     unsigned HOST_WIDE_INT offset)
727 {
728   int object_size_type = osi->object_size_type;
729   unsigned int varno = SSA_NAME_VERSION (dest);
730   unsigned HOST_WIDE_INT orig_bytes;
731
732   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
733     return false;
734   if (offset >= offset_limit)
735     {
736       object_sizes[object_size_type][varno] = unknown[object_size_type];
737       return false;
738     }
739
740   if (osi->pass == 0)
741     collect_object_sizes_for (osi, orig);
742
743   orig_bytes = object_sizes[object_size_type][SSA_NAME_VERSION (orig)];
744   if (orig_bytes != unknown[object_size_type])
745     orig_bytes = (offset > orig_bytes)
746                  ? (unsigned HOST_WIDE_INT) 0 : orig_bytes - offset;
747
748   if ((object_size_type & 2) == 0)
749     {
750       if (object_sizes[object_size_type][varno] < orig_bytes)
751         {
752           object_sizes[object_size_type][varno] = orig_bytes;
753           osi->changed = true;
754         }
755     }
756   else
757     {
758       if (object_sizes[object_size_type][varno] > orig_bytes)
759         {
760           object_sizes[object_size_type][varno] = orig_bytes;
761           osi->changed = true;
762         }
763     }
764   return bitmap_bit_p (osi->reexamine, SSA_NAME_VERSION (orig));
765 }
766
767
768 /* Compute object_sizes for VAR, defined to the result of an assignment
769    with operator POINTER_PLUS_EXPR.  Return true if the object size might
770    need reexamination  later.  */
771
772 static bool
773 plus_stmt_object_size (struct object_size_info *osi, tree var, gimple *stmt)
774 {
775   int object_size_type = osi->object_size_type;
776   unsigned int varno = SSA_NAME_VERSION (var);
777   unsigned HOST_WIDE_INT bytes;
778   tree op0, op1;
779
780   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
781     {
782       op0 = gimple_assign_rhs1 (stmt);
783       op1 = gimple_assign_rhs2 (stmt);
784     }
785   else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
786     {
787       tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
788       gcc_assert (TREE_CODE (rhs) == MEM_REF);
789       op0 = TREE_OPERAND (rhs, 0);
790       op1 = TREE_OPERAND (rhs, 1);
791     }
792   else
793     gcc_unreachable ();
794
795   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
796     return false;
797
798   /* Handle PTR + OFFSET here.  */
799   if (TREE_CODE (op1) == INTEGER_CST
800       && (TREE_CODE (op0) == SSA_NAME
801           || TREE_CODE (op0) == ADDR_EXPR))
802     {
803       if (! tree_fits_uhwi_p (op1))
804         bytes = unknown[object_size_type];
805       else if (TREE_CODE (op0) == SSA_NAME)
806         return merge_object_sizes (osi, var, op0, tree_to_uhwi (op1));
807       else
808         {
809           unsigned HOST_WIDE_INT off = tree_to_uhwi (op1);
810
811           /* op0 will be ADDR_EXPR here.  */
812           bytes = addr_object_size (osi, op0, object_size_type);
813           if (bytes == unknown[object_size_type])
814             ;
815           else if (off > offset_limit)
816             bytes = unknown[object_size_type];
817           else if (off > bytes)
818             bytes = 0;
819           else
820             bytes -= off;
821         }
822     }
823   else
824     bytes = unknown[object_size_type];
825
826   if ((object_size_type & 2) == 0)
827     {
828       if (object_sizes[object_size_type][varno] < bytes)
829         object_sizes[object_size_type][varno] = bytes;
830     }
831   else
832     {
833       if (object_sizes[object_size_type][varno] > bytes)
834         object_sizes[object_size_type][varno] = bytes;
835     }
836   return false;
837 }
838
839
840 /* Compute object_sizes for VAR, defined at STMT, which is
841    a COND_EXPR.  Return true if the object size might need reexamination
842    later.  */
843
844 static bool
845 cond_expr_object_size (struct object_size_info *osi, tree var, gimple *stmt)
846 {
847   tree then_, else_;
848   int object_size_type = osi->object_size_type;
849   unsigned int varno = SSA_NAME_VERSION (var);
850   bool reexamine = false;
851
852   gcc_assert (gimple_assign_rhs_code (stmt) == COND_EXPR);
853
854   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
855     return false;
856
857   then_ = gimple_assign_rhs2 (stmt);
858   else_ = gimple_assign_rhs3 (stmt);
859
860   if (TREE_CODE (then_) == SSA_NAME)
861     reexamine |= merge_object_sizes (osi, var, then_, 0);
862   else
863     expr_object_size (osi, var, then_);
864
865   if (TREE_CODE (else_) == SSA_NAME)
866     reexamine |= merge_object_sizes (osi, var, else_, 0);
867   else
868     expr_object_size (osi, var, else_);
869
870   return reexamine;
871 }
872
873 /* Compute object sizes for VAR.
874    For ADDR_EXPR an object size is the number of remaining bytes
875    to the end of the object (where what is considered an object depends on
876    OSI->object_size_type).
877    For allocation GIMPLE_CALL like malloc or calloc object size is the size
878    of the allocation.
879    For POINTER_PLUS_EXPR where second operand is a constant integer,
880    object size is object size of the first operand minus the constant.
881    If the constant is bigger than the number of remaining bytes until the
882    end of the object, object size is 0, but if it is instead a pointer
883    subtraction, object size is unknown[object_size_type].
884    To differentiate addition from subtraction, ADDR_EXPR returns
885    unknown[object_size_type] for all objects bigger than half of the address
886    space, and constants less than half of the address space are considered
887    addition, while bigger constants subtraction.
888    For a memcpy like GIMPLE_CALL that always returns one of its arguments, the
889    object size is object size of that argument.
890    Otherwise, object size is the maximum of object sizes of variables
891    that it might be set to.  */
892
893 static void
894 collect_object_sizes_for (struct object_size_info *osi, tree var)
895 {
896   int object_size_type = osi->object_size_type;
897   unsigned int varno = SSA_NAME_VERSION (var);
898   gimple *stmt;
899   bool reexamine;
900
901   if (bitmap_bit_p (computed[object_size_type], varno))
902     return;
903
904   if (osi->pass == 0)
905     {
906       if (bitmap_set_bit (osi->visited, varno))
907         {
908           object_sizes[object_size_type][varno]
909             = (object_size_type & 2) ? -1 : 0;
910         }
911       else
912         {
913           /* Found a dependency loop.  Mark the variable for later
914              re-examination.  */
915           bitmap_set_bit (osi->reexamine, varno);
916           if (dump_file && (dump_flags & TDF_DETAILS))
917             {
918               fprintf (dump_file, "Found a dependency loop at ");
919               print_generic_expr (dump_file, var, dump_flags);
920               fprintf (dump_file, "\n");
921             }
922           return;
923         }
924     }
925
926   if (dump_file && (dump_flags & TDF_DETAILS))
927     {
928       fprintf (dump_file, "Visiting use-def links for ");
929       print_generic_expr (dump_file, var, dump_flags);
930       fprintf (dump_file, "\n");
931     }
932
933   stmt = SSA_NAME_DEF_STMT (var);
934   reexamine = false;
935
936   switch (gimple_code (stmt))
937     {
938     case GIMPLE_ASSIGN:
939       {
940         tree rhs = gimple_assign_rhs1 (stmt);
941         if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
942             || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
943                 && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
944           reexamine = plus_stmt_object_size (osi, var, stmt);
945         else if (gimple_assign_rhs_code (stmt) == COND_EXPR)
946           reexamine = cond_expr_object_size (osi, var, stmt);
947         else if (gimple_assign_single_p (stmt)
948                  || gimple_assign_unary_nop_p (stmt))
949           {
950             if (TREE_CODE (rhs) == SSA_NAME
951                 && POINTER_TYPE_P (TREE_TYPE (rhs)))
952               reexamine = merge_object_sizes (osi, var, rhs, 0);
953             else
954               expr_object_size (osi, var, rhs);
955           }
956         else
957           unknown_object_size (osi, var);
958         break;
959       }
960
961     case GIMPLE_CALL:
962       {
963         gcall *call_stmt = as_a <gcall *> (stmt);
964         tree arg = pass_through_call (call_stmt);
965         if (arg)
966           {
967             if (TREE_CODE (arg) == SSA_NAME
968                 && POINTER_TYPE_P (TREE_TYPE (arg)))
969               reexamine = merge_object_sizes (osi, var, arg, 0);
970             else
971               expr_object_size (osi, var, arg);
972           }
973         else
974           call_object_size (osi, var, call_stmt);
975         break;
976       }
977
978     case GIMPLE_ASM:
979       /* Pointers defined by __asm__ statements can point anywhere.  */
980       object_sizes[object_size_type][varno] = unknown[object_size_type];
981       break;
982
983     case GIMPLE_NOP:
984       if (SSA_NAME_VAR (var)
985           && TREE_CODE (SSA_NAME_VAR (var)) == PARM_DECL)
986         expr_object_size (osi, var, SSA_NAME_VAR (var));
987       else
988         /* Uninitialized SSA names point nowhere.  */
989         object_sizes[object_size_type][varno] = unknown[object_size_type];
990       break;
991
992     case GIMPLE_PHI:
993       {
994         unsigned i;
995
996         for (i = 0; i < gimple_phi_num_args (stmt); i++)
997           {
998             tree rhs = gimple_phi_arg (stmt, i)->def;
999
1000             if (object_sizes[object_size_type][varno]
1001                 == unknown[object_size_type])
1002               break;
1003
1004             if (TREE_CODE (rhs) == SSA_NAME)
1005               reexamine |= merge_object_sizes (osi, var, rhs, 0);
1006             else if (osi->pass == 0)
1007               expr_object_size (osi, var, rhs);
1008           }
1009         break;
1010       }
1011
1012     default:
1013       gcc_unreachable ();
1014     }
1015
1016   if (! reexamine
1017       || object_sizes[object_size_type][varno] == unknown[object_size_type])
1018     {
1019       bitmap_set_bit (computed[object_size_type], varno);
1020       bitmap_clear_bit (osi->reexamine, varno);
1021     }
1022   else
1023     {
1024       bitmap_set_bit (osi->reexamine, varno);
1025       if (dump_file && (dump_flags & TDF_DETAILS))
1026         {
1027           fprintf (dump_file, "Need to reexamine ");
1028           print_generic_expr (dump_file, var, dump_flags);
1029           fprintf (dump_file, "\n");
1030         }
1031     }
1032 }
1033
1034
1035 /* Helper function for check_for_plus_in_loops.  Called recursively
1036    to detect loops.  */
1037
1038 static void
1039 check_for_plus_in_loops_1 (struct object_size_info *osi, tree var,
1040                            unsigned int depth)
1041 {
1042   gimple *stmt = SSA_NAME_DEF_STMT (var);
1043   unsigned int varno = SSA_NAME_VERSION (var);
1044
1045   if (osi->depths[varno])
1046     {
1047       if (osi->depths[varno] != depth)
1048         {
1049           unsigned int *sp;
1050
1051           /* Found a loop involving pointer addition.  */
1052           for (sp = osi->tos; sp > osi->stack; )
1053             {
1054               --sp;
1055               bitmap_clear_bit (osi->reexamine, *sp);
1056               bitmap_set_bit (computed[osi->object_size_type], *sp);
1057               object_sizes[osi->object_size_type][*sp] = 0;
1058               if (*sp == varno)
1059                 break;
1060             }
1061         }
1062       return;
1063     }
1064   else if (! bitmap_bit_p (osi->reexamine, varno))
1065     return;
1066
1067   osi->depths[varno] = depth;
1068   *osi->tos++ = varno;
1069
1070   switch (gimple_code (stmt))
1071     {
1072
1073     case GIMPLE_ASSIGN:
1074       {
1075         if ((gimple_assign_single_p (stmt)
1076              || gimple_assign_unary_nop_p (stmt))
1077             && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
1078           {
1079             tree rhs = gimple_assign_rhs1 (stmt);
1080
1081             check_for_plus_in_loops_1 (osi, rhs, depth);
1082           }
1083         else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1084           {
1085             tree basevar = gimple_assign_rhs1 (stmt);
1086             tree cst = gimple_assign_rhs2 (stmt);
1087
1088             gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1089
1090             check_for_plus_in_loops_1 (osi, basevar,
1091                                        depth + !integer_zerop (cst));
1092           }
1093         else
1094           gcc_unreachable ();
1095         break;
1096       }
1097
1098     case GIMPLE_CALL:
1099       {
1100         gcall *call_stmt = as_a <gcall *> (stmt);
1101         tree arg = pass_through_call (call_stmt);
1102         if (arg)
1103           {
1104             if (TREE_CODE (arg) == SSA_NAME)
1105               check_for_plus_in_loops_1 (osi, arg, depth);
1106             else
1107               gcc_unreachable ();
1108           }
1109         break;
1110       }
1111
1112     case GIMPLE_PHI:
1113       {
1114         unsigned i;
1115
1116         for (i = 0; i < gimple_phi_num_args (stmt); i++)
1117           {
1118             tree rhs = gimple_phi_arg (stmt, i)->def;
1119
1120             if (TREE_CODE (rhs) == SSA_NAME)
1121               check_for_plus_in_loops_1 (osi, rhs, depth);
1122           }
1123         break;
1124       }
1125
1126     default:
1127       gcc_unreachable ();
1128     }
1129
1130   osi->depths[varno] = 0;
1131   osi->tos--;
1132 }
1133
1134
1135 /* Check if some pointer we are computing object size of is being increased
1136    within a loop.  If yes, assume all the SSA variables participating in
1137    that loop have minimum object sizes 0.  */
1138
1139 static void
1140 check_for_plus_in_loops (struct object_size_info *osi, tree var)
1141 {
1142   gimple *stmt = SSA_NAME_DEF_STMT (var);
1143
1144   /* NOTE: In the pre-tuples code, we handled a CALL_EXPR here,
1145      and looked for a POINTER_PLUS_EXPR in the pass-through
1146      argument, if any.  In GIMPLE, however, such an expression
1147      is not a valid call operand.  */
1148
1149   if (is_gimple_assign (stmt)
1150       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
1151     {
1152       tree basevar = gimple_assign_rhs1 (stmt);
1153       tree cst = gimple_assign_rhs2 (stmt);
1154
1155       gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1156
1157       if (integer_zerop (cst))
1158         return;
1159
1160       osi->depths[SSA_NAME_VERSION (basevar)] = 1;
1161       *osi->tos++ = SSA_NAME_VERSION (basevar);
1162       check_for_plus_in_loops_1 (osi, var, 2);
1163       osi->depths[SSA_NAME_VERSION (basevar)] = 0;
1164       osi->tos--;
1165     }
1166 }
1167
1168
1169 /* Initialize data structures for the object size computation.  */
1170
1171 void
1172 init_object_sizes (void)
1173 {
1174   int object_size_type;
1175
1176   if (computed[0])
1177     return;
1178
1179   for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1180     {
1181       object_sizes[object_size_type].safe_grow (num_ssa_names);
1182       computed[object_size_type] = BITMAP_ALLOC (NULL);
1183     }
1184
1185   init_offset_limit ();
1186 }
1187
1188
1189 /* Destroy data structures after the object size computation.  */
1190
1191 static void
1192 fini_object_sizes (void)
1193 {
1194   int object_size_type;
1195
1196   for (object_size_type = 0; object_size_type <= 3; object_size_type++)
1197     {
1198       object_sizes[object_size_type].release ();
1199       BITMAP_FREE (computed[object_size_type]);
1200     }
1201 }
1202
1203
1204 /* Simple pass to optimize all __builtin_object_size () builtins.  */
1205
1206 namespace {
1207
1208 const pass_data pass_data_object_sizes =
1209 {
1210   GIMPLE_PASS, /* type */
1211   "objsz", /* name */
1212   OPTGROUP_NONE, /* optinfo_flags */
1213   TV_NONE, /* tv_id */
1214   ( PROP_cfg | PROP_ssa ), /* properties_required */
1215   0, /* properties_provided */
1216   0, /* properties_destroyed */
1217   0, /* todo_flags_start */
1218   0, /* todo_flags_finish */
1219 };
1220
1221 class pass_object_sizes : public gimple_opt_pass
1222 {
1223 public:
1224   pass_object_sizes (gcc::context *ctxt)
1225     : gimple_opt_pass (pass_data_object_sizes, ctxt), insert_min_max_p (false)
1226   {}
1227
1228   /* opt_pass methods: */
1229   opt_pass * clone () { return new pass_object_sizes (m_ctxt); }
1230   void set_pass_param (unsigned int n, bool param)
1231     {
1232       gcc_assert (n == 0);
1233       insert_min_max_p = param;
1234     }
1235   virtual unsigned int execute (function *);
1236
1237  private:
1238   /* Determines whether the pass instance creates MIN/MAX_EXPRs.  */
1239   bool insert_min_max_p;
1240 }; // class pass_object_sizes
1241
1242 /* Dummy valueize function.  */
1243
1244 static tree
1245 do_valueize (tree t)
1246 {
1247   return t;
1248 }
1249
1250 unsigned int
1251 pass_object_sizes::execute (function *fun)
1252 {
1253   basic_block bb;
1254   FOR_EACH_BB_FN (bb, fun)
1255     {
1256       gimple_stmt_iterator i;
1257       for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1258         {
1259           tree result;
1260           gimple *call = gsi_stmt (i);
1261           if (!gimple_call_builtin_p (call, BUILT_IN_OBJECT_SIZE))
1262             continue;
1263
1264           init_object_sizes ();
1265
1266           /* If insert_min_max_p, only attempt to fold
1267              __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
1268              and rather than folding the builtin to the constant if any,
1269              create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
1270              call result and the computed constant.  */
1271           if (insert_min_max_p)
1272             {
1273               tree ost = gimple_call_arg (call, 1);
1274               if (tree_fits_uhwi_p (ost))
1275                 {
1276                   unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1277                   tree ptr = gimple_call_arg (call, 0);
1278                   tree lhs = gimple_call_lhs (call);
1279                   if ((object_size_type == 1 || object_size_type == 3)
1280                       && (TREE_CODE (ptr) == ADDR_EXPR
1281                           || TREE_CODE (ptr) == SSA_NAME)
1282                       && lhs)
1283                     {
1284                       tree type = TREE_TYPE (lhs);
1285                       unsigned HOST_WIDE_INT bytes
1286                         = compute_builtin_object_size (ptr, object_size_type);
1287                       if (bytes != (unsigned HOST_WIDE_INT) (object_size_type == 1
1288                                                              ? -1 : 0)
1289                           && wi::fits_to_tree_p (bytes, type))
1290                         {
1291                           tree tem = make_ssa_name (type);
1292                           gimple_call_set_lhs (call, tem);
1293                           enum tree_code code
1294                             = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
1295                           tree cst = build_int_cstu (type, bytes);
1296                           gimple *g
1297                             = gimple_build_assign (lhs, code, tem, cst);
1298                           gsi_insert_after (&i, g, GSI_NEW_STMT);
1299                           update_stmt (call);
1300                         }
1301                     }
1302                 }
1303               continue;
1304             }
1305
1306           tree lhs = gimple_call_lhs (call);
1307           if (!lhs)
1308             continue;
1309
1310           result = gimple_fold_stmt_to_constant (call, do_valueize);
1311           if (!result)
1312             {
1313               tree ost = gimple_call_arg (call, 1);
1314
1315               if (tree_fits_uhwi_p (ost))
1316                 {
1317                   unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
1318
1319                   if (object_size_type < 2)
1320                     result = fold_convert (size_type_node,
1321                                            integer_minus_one_node);
1322                   else if (object_size_type < 4)
1323                     result = build_zero_cst (size_type_node);
1324                 }
1325
1326               if (!result)
1327                 continue;
1328             }
1329
1330           gcc_assert (TREE_CODE (result) == INTEGER_CST);
1331
1332           if (dump_file && (dump_flags & TDF_DETAILS))
1333             {
1334               fprintf (dump_file, "Simplified\n  ");
1335               print_gimple_stmt (dump_file, call, 0, dump_flags);
1336               fprintf (dump_file, " to ");
1337               print_generic_expr (dump_file, result, 0);
1338               fprintf (dump_file, "\n");
1339             }
1340
1341           /* Propagate into all uses and fold those stmts.  */
1342           replace_uses_by (lhs, result);
1343         }
1344     }
1345
1346   fini_object_sizes ();
1347   return 0;
1348 }
1349
1350 } // anon namespace
1351
1352 gimple_opt_pass *
1353 make_pass_object_sizes (gcc::context *ctxt)
1354 {
1355   return new pass_object_sizes (ctxt);
1356 }