Tizen 2.0 Release
[profile/ivi/osmesa.git] / src / glsl / lower_jumps.cpp
1 /*
2  * Copyright © 2010 Luca Barbieri
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  */
23
24 /**
25  * \file lower_jumps.cpp
26  *
27  * This pass lowers jumps (break, continue, and return) to if/else structures.
28  *
29  * It can be asked to:
30  * 1. Pull jumps out of ifs where possible
31  * 2. Remove all "continue"s, replacing them with an "execute flag"
32  * 3. Replace all "break" with a single conditional one at the end of the loop
33  * 4. Replace all "return"s with a single return at the end of the function,
34  *    for the main function and/or other functions
35  *
36  * Applying this pass gives several benefits:
37  * 1. All functions can be inlined.
38  * 2. nv40 and other pre-DX10 chips without "continue" can be supported
39  * 3. nv30 and other pre-DX10 chips with no control flow at all are better
40  *    supported
41  *
42  * Continues are lowered by adding a per-loop "execute flag", initialized to
43  * true, that when cleared inhibits all execution until the end of the loop.
44  *
45  * Breaks are lowered to continues, plus setting a "break flag" that is checked
46  * at the end of the loop, and trigger the unique "break".
47  *
48  * Returns are lowered to breaks/continues, plus adding a "return flag" that
49  * causes loops to break again out of their enclosing loops until all the
50  * loops are exited: then the "execute flag" logic will ignore everything
51  * until the end of the function.
52  *
53  * Note that "continue" and "return" can also be implemented by adding
54  * a dummy loop and using break.
55  * However, this is bad for hardware with limited nesting depth, and
56  * prevents further optimization, and thus is not currently performed.
57  */
58
59 #include "glsl_types.h"
60 #include <string.h>
61 #include "ir.h"
62
63 enum jump_strength
64 {
65    strength_none,
66    strength_always_clears_execute_flag,
67    strength_continue,
68    strength_break,
69    strength_return
70 };
71
72 struct block_record
73 {
74    /* minimum jump strength (of lowered IR, not pre-lowering IR)
75     *
76     * If the block ends with a jump, must be the strength of the jump.
77     * Otherwise, the jump would be dead and have been deleted before)
78     *
79     * If the block doesn't end with a jump, it can be different than strength_none if all paths before it lead to some jump
80     * (e.g. an if with a return in one branch, and a break in the other, while not lowering them)
81     * Note that identical jumps are usually unified though.
82     */
83    jump_strength min_strength;
84
85    /* can anything clear the execute flag? */
86    bool may_clear_execute_flag;
87
88    block_record()
89    {
90       this->min_strength = strength_none;
91       this->may_clear_execute_flag = false;
92    }
93 };
94
95 struct loop_record
96 {
97    ir_function_signature* signature;
98    ir_loop* loop;
99
100    /* used to avoid lowering the break used to represent lowered breaks */
101    unsigned nesting_depth;
102    bool in_if_at_the_end_of_the_loop;
103
104    bool may_set_return_flag;
105
106    ir_variable* break_flag;
107    ir_variable* execute_flag; /* cleared to emulate continue */
108
109    loop_record(ir_function_signature* p_signature = 0, ir_loop* p_loop = 0)
110    {
111       this->signature = p_signature;
112       this->loop = p_loop;
113       this->nesting_depth = 0;
114       this->in_if_at_the_end_of_the_loop = false;
115       this->may_set_return_flag = false;
116       this->break_flag = 0;
117       this->execute_flag = 0;
118    }
119
120    ir_variable* get_execute_flag()
121    {
122       /* also supported for the "function loop" */
123       if(!this->execute_flag) {
124          exec_list& list = this->loop ? this->loop->body_instructions : signature->body;
125          this->execute_flag = new(this->signature) ir_variable(glsl_type::bool_type, "execute_flag", ir_var_temporary);
126          list.push_head(new(this->signature) ir_assignment(new(this->signature) ir_dereference_variable(execute_flag), new(this->signature) ir_constant(true), 0));
127          list.push_head(this->execute_flag);
128       }
129       return this->execute_flag;
130    }
131
132    ir_variable* get_break_flag()
133    {
134       assert(this->loop);
135       if(!this->break_flag) {
136          this->break_flag = new(this->signature) ir_variable(glsl_type::bool_type, "break_flag", ir_var_temporary);
137          this->loop->insert_before(this->break_flag);
138          this->loop->insert_before(new(this->signature) ir_assignment(new(this->signature) ir_dereference_variable(break_flag), new(this->signature) ir_constant(false), 0));
139       }
140       return this->break_flag;
141    }
142 };
143
144 struct function_record
145 {
146    ir_function_signature* signature;
147    ir_variable* return_flag; /* used to break out of all loops and then jump to the return instruction */
148    ir_variable* return_value;
149    bool lower_return;
150    unsigned nesting_depth;
151
152    function_record(ir_function_signature* p_signature = 0,
153                    bool lower_return = false)
154    {
155       this->signature = p_signature;
156       this->return_flag = 0;
157       this->return_value = 0;
158       this->nesting_depth = 0;
159       this->lower_return = lower_return;
160    }
161
162    ir_variable* get_return_flag()
163    {
164       if(!this->return_flag) {
165          this->return_flag = new(this->signature) ir_variable(glsl_type::bool_type, "return_flag", ir_var_temporary);
166          this->signature->body.push_head(new(this->signature) ir_assignment(new(this->signature) ir_dereference_variable(return_flag), new(this->signature) ir_constant(false), 0));
167          this->signature->body.push_head(this->return_flag);
168       }
169       return this->return_flag;
170    }
171
172    ir_variable* get_return_value()
173    {
174       if(!this->return_value) {
175          assert(!this->signature->return_type->is_void());
176          return_value = new(this->signature) ir_variable(this->signature->return_type, "return_value", ir_var_temporary);
177          this->signature->body.push_head(this->return_value);
178       }
179       return this->return_value;
180    }
181 };
182
183 struct ir_lower_jumps_visitor : public ir_control_flow_visitor {
184    bool progress;
185
186    struct function_record function;
187    struct loop_record loop;
188    struct block_record block;
189
190    bool pull_out_jumps;
191    bool lower_continue;
192    bool lower_break;
193    bool lower_sub_return;
194    bool lower_main_return;
195
196    ir_lower_jumps_visitor()
197    {
198       this->progress = false;
199    }
200
201    void truncate_after_instruction(exec_node *ir)
202    {
203       if (!ir)
204          return;
205
206       while (!ir->get_next()->is_tail_sentinel()) {
207          ((ir_instruction *)ir->get_next())->remove();
208          this->progress = true;
209       }
210    }
211
212    void move_outer_block_inside(ir_instruction *ir, exec_list *inner_block)
213    {
214       while (!ir->get_next()->is_tail_sentinel()) {
215          ir_instruction *move_ir = (ir_instruction *)ir->get_next();
216
217          move_ir->remove();
218          inner_block->push_tail(move_ir);
219       }
220    }
221
222    /**
223     * Insert the instructions necessary to lower a return statement,
224     * before the given return instruction.
225     */
226    void insert_lowered_return(ir_return *ir)
227    {
228       ir_variable* return_flag = this->function.get_return_flag();
229       if(!this->function.signature->return_type->is_void()) {
230          ir_variable* return_value = this->function.get_return_value();
231          ir->insert_before(
232             new(ir) ir_assignment(
233                new (ir) ir_dereference_variable(return_value),
234                ir->value));
235       }
236       ir->insert_before(
237          new(ir) ir_assignment(
238             new (ir) ir_dereference_variable(return_flag),
239             new (ir) ir_constant(true)));
240       this->loop.may_set_return_flag = true;
241    }
242
243    /**
244     * If the given instruction is a return, lower it to instructions
245     * that store the return value (if there is one), set the return
246     * flag, and then break.
247     *
248     * It is safe to pass NULL to this function.
249     */
250    void lower_return_unconditionally(ir_instruction *ir)
251    {
252       if (get_jump_strength(ir) != strength_return) {
253          return;
254       }
255       insert_lowered_return((ir_return*)ir);
256       ir->replace_with(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
257    }
258
259    /**
260     * Create the necessary instruction to replace a break instruction.
261     */
262    ir_instruction *create_lowered_break()
263    {
264       void *ctx = this->function.signature;
265       return new(ctx) ir_assignment(
266           new(ctx) ir_dereference_variable(this->loop.get_break_flag()),
267           new(ctx) ir_constant(true),
268           0);
269    }
270
271    /**
272     * If the given instruction is a break, lower it to an instruction
273     * that sets the break flag, without consulting
274     * should_lower_jump().
275     *
276     * It is safe to pass NULL to this function.
277     */
278    void lower_break_unconditionally(ir_instruction *ir)
279    {
280       if (get_jump_strength(ir) != strength_break) {
281          return;
282       }
283       ir->replace_with(create_lowered_break());
284    }
285
286    /**
287     * If the block ends in a conditional or unconditional break, lower
288     * it, even though should_lower_jump() says it needn't be lowered.
289     */
290    void lower_final_breaks(exec_list *block)
291    {
292       ir_instruction *ir = (ir_instruction *) block->get_tail();
293       lower_break_unconditionally(ir);
294       ir_if *ir_if = ir->as_if();
295       if (ir_if) {
296           lower_break_unconditionally(
297               (ir_instruction *) ir_if->then_instructions.get_tail());
298           lower_break_unconditionally(
299               (ir_instruction *) ir_if->else_instructions.get_tail());
300       }
301    }
302
303    virtual void visit(class ir_loop_jump * ir)
304    {
305       truncate_after_instruction(ir);
306       this->block.min_strength = ir->is_break() ? strength_break : strength_continue;
307    }
308
309    virtual void visit(class ir_return * ir)
310    {
311       truncate_after_instruction(ir);
312       this->block.min_strength = strength_return;
313    }
314
315    virtual void visit(class ir_discard * ir)
316    {
317    }
318
319    enum jump_strength get_jump_strength(ir_instruction* ir)
320    {
321       if(!ir)
322          return strength_none;
323       else if(ir->ir_type == ir_type_loop_jump) {
324          if(((ir_loop_jump*)ir)->is_break())
325             return strength_break;
326          else
327             return strength_continue;
328       } else if(ir->ir_type == ir_type_return)
329          return strength_return;
330       else
331          return strength_none;
332    }
333
334    bool should_lower_jump(ir_jump* ir)
335    {
336       unsigned strength = get_jump_strength(ir);
337       bool lower;
338       switch(strength)
339       {
340       case strength_none:
341          lower = false; /* don't change this, code relies on it */
342          break;
343       case strength_continue:
344          lower = lower_continue;
345          break;
346       case strength_break:
347          assert(this->loop.loop);
348          /* never lower "canonical break" */
349          if(ir->get_next()->is_tail_sentinel() && (this->loop.nesting_depth == 0
350                || (this->loop.nesting_depth == 1 && this->loop.in_if_at_the_end_of_the_loop)))
351             lower = false;
352          else
353             lower = lower_break;
354          break;
355       case strength_return:
356          /* never lower return at the end of a this->function */
357          if(this->function.nesting_depth == 0 && ir->get_next()->is_tail_sentinel())
358             lower = false;
359          else
360             lower = this->function.lower_return;
361          break;
362       }
363       return lower;
364    }
365
366    block_record visit_block(exec_list* list)
367    {
368       /* Note: since visiting a node may change that node's next
369        * pointer, we can't use visit_exec_list(), because
370        * visit_exec_list() caches the node's next pointer before
371        * visiting it.  So we use foreach_list() instead.
372        *
373        * foreach_list() isn't safe if the node being visited gets
374        * removed, but fortunately this visitor doesn't do that.
375        */
376
377       block_record saved_block = this->block;
378       this->block = block_record();
379       foreach_list(node, list) {
380          ((ir_instruction *) node)->accept(this);
381       }
382       block_record ret = this->block;
383       this->block = saved_block;
384       return ret;
385    }
386
387    virtual void visit(ir_if *ir)
388    {
389       if(this->loop.nesting_depth == 0 && ir->get_next()->is_tail_sentinel())
390          this->loop.in_if_at_the_end_of_the_loop = true;
391
392       ++this->function.nesting_depth;
393       ++this->loop.nesting_depth;
394
395       block_record block_records[2];
396       ir_jump* jumps[2];
397
398       block_records[0] = visit_block(&ir->then_instructions);
399       block_records[1] = visit_block(&ir->else_instructions);
400
401 retry: /* we get here if we put code after the if inside a branch */
402    for(unsigned i = 0; i < 2; ++i) {
403       exec_list& list = i ? ir->else_instructions : ir->then_instructions;
404       jumps[i] = 0;
405       if(!list.is_empty() && get_jump_strength((ir_instruction*)list.get_tail()))
406          jumps[i] = (ir_jump*)list.get_tail();
407    }
408
409       for(;;) {
410          jump_strength jump_strengths[2];
411
412          for(unsigned i = 0; i < 2; ++i) {
413             if(jumps[i]) {
414                jump_strengths[i] = block_records[i].min_strength;
415                assert(jump_strengths[i] == get_jump_strength(jumps[i]));
416             } else
417                jump_strengths[i] = strength_none;
418          }
419
420          /* move both jumps out if possible */
421          if(pull_out_jumps && jump_strengths[0] == jump_strengths[1]) {
422             bool unify = true;
423             if(jump_strengths[0] == strength_continue)
424                ir->insert_after(new(ir) ir_loop_jump(ir_loop_jump::jump_continue));
425             else if(jump_strengths[0] == strength_break)
426                ir->insert_after(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
427             /* FINISHME: unify returns with identical expressions */
428             else if(jump_strengths[0] == strength_return && this->function.signature->return_type->is_void())
429                ir->insert_after(new(ir) ir_return(NULL));
430             else
431                unify = false;
432
433             if(unify) {
434                jumps[0]->remove();
435                jumps[1]->remove();
436                this->progress = true;
437
438                jumps[0] = 0;
439                jumps[1] = 0;
440                block_records[0].min_strength = strength_none;
441                block_records[1].min_strength = strength_none;
442                break;
443             }
444          }
445
446          /* lower a jump: if both need to lowered, start with the strongest one, so that
447           * we might later unify the lowered version with the other one
448           */
449          bool should_lower[2];
450          for(unsigned i = 0; i < 2; ++i)
451             should_lower[i] = should_lower_jump(jumps[i]);
452
453          int lower;
454          if(should_lower[1] && should_lower[0])
455             lower = jump_strengths[1] > jump_strengths[0];
456          else if(should_lower[0])
457             lower = 0;
458          else if(should_lower[1])
459             lower = 1;
460          else
461             break;
462
463          if(jump_strengths[lower] == strength_return) {
464             /* To lower a return, we create a return flag (if the
465              * function doesn't have one already) and add instructions
466              * that: 1. store the return value (if this function has a
467              * non-void return) and 2. set the return flag
468              */
469             insert_lowered_return((ir_return*)jumps[lower]);
470             if(this->loop.loop) {
471                ir_loop_jump* lowered = 0;
472                lowered = new(ir) ir_loop_jump(ir_loop_jump::jump_break);
473                block_records[lower].min_strength = strength_break;
474                jumps[lower]->replace_with(lowered);
475                jumps[lower] = lowered;
476             } else
477                goto lower_continue;
478             this->progress = true;
479          } else if(jump_strengths[lower] == strength_break) {
480             /* We can't lower to an actual continue because that would execute the increment.
481              *
482              * In the lowered code, we instead put the break check between the this->loop body and the increment,
483              * which is impossible with a real continue as defined by the GLSL IR currently.
484              *
485              * Smarter options (such as undoing the increment) are possible but it's not worth implementing them,
486              * because if break is lowered, continue is almost surely lowered too.
487              */
488             jumps[lower]->insert_before(create_lowered_break());
489             goto lower_continue;
490          } else if(jump_strengths[lower] == strength_continue) {
491 lower_continue:
492             ir_variable* execute_flag = this->loop.get_execute_flag();
493             jumps[lower]->replace_with(new(ir) ir_assignment(new (ir) ir_dereference_variable(execute_flag), new (ir) ir_constant(false), 0));
494             jumps[lower] = 0;
495             block_records[lower].min_strength = strength_always_clears_execute_flag;
496             block_records[lower].may_clear_execute_flag = true;
497             this->progress = true;
498
499             /* Let the loop run again, in case the other branch of the
500              * if needs to be lowered too.
501              */
502          }
503       }
504
505       /* move out a jump out if possible */
506       if(pull_out_jumps) {
507          int move_out = -1;
508          if(jumps[0] && block_records[1].min_strength >= strength_continue)
509             move_out = 0;
510          else if(jumps[1] && block_records[0].min_strength >= strength_continue)
511             move_out = 1;
512
513          if(move_out >= 0)
514          {
515             jumps[move_out]->remove();
516             ir->insert_after(jumps[move_out]);
517             jumps[move_out] = 0;
518             block_records[move_out].min_strength = strength_none;
519             this->progress = true;
520          }
521       }
522
523       if(block_records[0].min_strength < block_records[1].min_strength)
524          this->block.min_strength = block_records[0].min_strength;
525       else
526          this->block.min_strength = block_records[1].min_strength;
527       this->block.may_clear_execute_flag = this->block.may_clear_execute_flag || block_records[0].may_clear_execute_flag || block_records[1].may_clear_execute_flag;
528
529       if(this->block.min_strength)
530          truncate_after_instruction(ir);
531       else if(this->block.may_clear_execute_flag)
532       {
533          int move_into = -1;
534          if(block_records[0].min_strength && !block_records[1].may_clear_execute_flag)
535             move_into = 1;
536          else if(block_records[1].min_strength && !block_records[0].may_clear_execute_flag)
537             move_into = 0;
538
539          if(move_into >= 0) {
540             assert(!block_records[move_into].min_strength && !block_records[move_into].may_clear_execute_flag); /* otherwise, we just truncated */
541
542             exec_list* list = move_into ? &ir->else_instructions : &ir->then_instructions;
543             exec_node* next = ir->get_next();
544             if(!next->is_tail_sentinel()) {
545                move_outer_block_inside(ir, list);
546
547                exec_list list;
548                list.head = next;
549                block_records[move_into] = visit_block(&list);
550
551                this->progress = true;
552                goto retry;
553             }
554          } else {
555             ir_instruction* ir_after;
556             for(ir_after = (ir_instruction*)ir->get_next(); !ir_after->is_tail_sentinel();)
557             {
558                ir_if* ir_if = ir_after->as_if();
559                if(ir_if && ir_if->else_instructions.is_empty()) {
560                   ir_dereference_variable* ir_if_cond_deref = ir_if->condition->as_dereference_variable();
561                   if(ir_if_cond_deref && ir_if_cond_deref->var == this->loop.execute_flag) {
562                      ir_instruction* ir_next = (ir_instruction*)ir_after->get_next();
563                      ir_after->insert_before(&ir_if->then_instructions);
564                      ir_after->remove();
565                      ir_after = ir_next;
566                      continue;
567                   }
568                }
569                ir_after = (ir_instruction*)ir_after->get_next();
570
571                /* only set this if we find any unprotected instruction */
572                this->progress = true;
573             }
574
575             if(!ir->get_next()->is_tail_sentinel()) {
576                assert(this->loop.execute_flag);
577                ir_if* if_execute = new(ir) ir_if(new(ir) ir_dereference_variable(this->loop.execute_flag));
578                move_outer_block_inside(ir, &if_execute->then_instructions);
579                ir->insert_after(if_execute);
580             }
581          }
582       }
583       --this->loop.nesting_depth;
584       --this->function.nesting_depth;
585    }
586
587    virtual void visit(ir_loop *ir)
588    {
589       ++this->function.nesting_depth;
590       loop_record saved_loop = this->loop;
591       this->loop = loop_record(this->function.signature, ir);
592
593       /* Recursively lower nested jumps.  This satisfies the
594        * CONTAINED_JUMPS_LOWERED postcondition, except in the case of
595        * an unconditional continue or return at the bottom of the
596        * loop, which are handled below.
597        */
598       block_record body = visit_block(&ir->body_instructions);
599
600       /* If the loop ends in an unconditional continue, eliminate it
601        * because it is redundant.
602        */
603       ir_instruction *ir_last
604          = (ir_instruction *) ir->body_instructions.get_tail();
605       if (get_jump_strength(ir_last) == strength_continue) {
606          ir_last->remove();
607       }
608
609       /* If the loop ends in an unconditional return, and we are
610        * lowering returns, lower it.
611        */
612       if (this->function.lower_return)
613          lower_return_unconditionally(ir_last);
614
615       if(body.min_strength >= strength_break) {
616          /* FINISHME: turn the this->loop into an if, or replace it with its body */
617       }
618
619       if(this->loop.break_flag) {
620          /* We only get here if we are lowering breaks */
621          assert (lower_break);
622
623          /* If a break flag was generated while visiting the body of
624           * the loop, then at least one break was lowered, so we need
625           * to generate an if statement at the end of the loop that
626           * does a "break" if the break flag is set.  The break we
627           * generate won't violate the CONTAINED_JUMPS_LOWERED
628           * postcondition, because should_lower_jump() always returns
629           * false for a break that happens at the end of a loop.
630           *
631           * However, if the loop already ends in a conditional or
632           * unconditional break, then we need to lower that break,
633           * because it won't be at the end of the loop anymore.
634           */
635          lower_final_breaks(&ir->body_instructions);
636
637          ir_if* break_if = new(ir) ir_if(new(ir) ir_dereference_variable(this->loop.break_flag));
638          break_if->then_instructions.push_tail(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
639          ir->body_instructions.push_tail(break_if);
640       }
641
642       if(this->loop.may_set_return_flag) {
643          assert(this->function.return_flag);
644          ir_if* return_if = new(ir) ir_if(new(ir) ir_dereference_variable(this->function.return_flag));
645          saved_loop.may_set_return_flag = true;
646          if(saved_loop.loop)
647             return_if->then_instructions.push_tail(new(ir) ir_loop_jump(ir_loop_jump::jump_break));
648          else
649             move_outer_block_inside(ir, &return_if->else_instructions);
650          ir->insert_after(return_if);
651       }
652
653       this->loop = saved_loop;
654       --this->function.nesting_depth;
655    }
656
657    virtual void visit(ir_function_signature *ir)
658    {
659       /* these are not strictly necessary */
660       assert(!this->function.signature);
661       assert(!this->loop.loop);
662
663       bool lower_return;
664       if (strcmp(ir->function_name(), "main") == 0)
665          lower_return = lower_main_return;
666       else
667          lower_return = lower_sub_return;
668
669       function_record saved_function = this->function;
670       loop_record saved_loop = this->loop;
671       this->function = function_record(ir, lower_return);
672       this->loop = loop_record(ir);
673
674       assert(!this->loop.loop);
675       visit_block(&ir->body);
676
677       /* If the body ended in an unconditional return of non-void,
678        * then we don't need to lower it because it's the one canonical
679        * return.
680        *
681        * If the body ended in a return of void, eliminate it because
682        * it is redundant.
683        */
684       if (ir->return_type->is_void() &&
685           get_jump_strength((ir_instruction *) ir->body.get_tail())) {
686          ir_jump *jump = (ir_jump *) ir->body.get_tail();
687          assert (jump->ir_type == ir_type_return);
688          jump->remove();
689       }
690
691       if(this->function.return_value)
692          ir->body.push_tail(new(ir) ir_return(new (ir) ir_dereference_variable(this->function.return_value)));
693
694       this->loop = saved_loop;
695       this->function = saved_function;
696    }
697
698    virtual void visit(class ir_function * ir)
699    {
700       visit_block(&ir->signatures);
701    }
702 };
703
704 bool
705 do_lower_jumps(exec_list *instructions, bool pull_out_jumps, bool lower_sub_return, bool lower_main_return, bool lower_continue, bool lower_break)
706 {
707    ir_lower_jumps_visitor v;
708    v.pull_out_jumps = pull_out_jumps;
709    v.lower_continue = lower_continue;
710    v.lower_break = lower_break;
711    v.lower_sub_return = lower_sub_return;
712    v.lower_main_return = lower_main_return;
713
714    do {
715       v.progress = false;
716       visit_exec_list(instructions, &v);
717    } while (v.progress);
718
719    return v.progress;
720 }