1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
27 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
41 #include "basic-block.h"
42 #include "integrate.h"
48 #include "target-def.h"
49 #include "langhooks.h"
52 #ifndef TARGET_NO_PROTOTYPE
53 #define TARGET_NO_PROTOTYPE 0
56 #define min(A,B) ((A) < (B) ? (A) : (B))
57 #define max(A,B) ((A) > (B) ? (A) : (B))
61 enum processor_type rs6000_cpu;
62 struct rs6000_cpu_select rs6000_select[3] =
64 /* switch name, tune arch */
65 { (const char *)0, "--with-cpu=", 1, 1 },
66 { (const char *)0, "-mcpu=", 1, 1 },
67 { (const char *)0, "-mtune=", 1, 0 },
70 /* Size of long double */
71 const char *rs6000_long_double_size_string;
72 int rs6000_long_double_type_size;
74 /* Whether -mabi=altivec has appeared */
75 int rs6000_altivec_abi;
77 /* Whether VRSAVE instructions should be generated. */
78 int rs6000_altivec_vrsave;
80 /* String from -mvrsave= option. */
81 const char *rs6000_altivec_vrsave_string;
83 /* Set to non-zero once AIX common-mode calls have been defined. */
84 static int common_mode_defined;
86 /* Save information from a "cmpxx" operation until the branch or scc is
88 rtx rs6000_compare_op0, rs6000_compare_op1;
89 int rs6000_compare_fp_p;
91 /* Label number of label created for -mrelocatable, to call to so we can
92 get the address of the GOT section */
93 int rs6000_pic_labelno;
96 /* Which abi to adhere to */
97 const char *rs6000_abi_name = RS6000_ABI_NAME;
99 /* Semantics of the small data area */
100 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
102 /* Which small data model to use */
103 const char *rs6000_sdata_name = (char *)0;
105 /* Counter for labels which are to be placed in .fixup. */
106 int fixuplabelno = 0;
109 /* ABI enumeration available for subtarget to use. */
110 enum rs6000_abi rs6000_current_abi;
112 /* ABI string from -mabi= option. */
113 const char *rs6000_abi_string;
116 const char *rs6000_debug_name;
117 int rs6000_debug_stack; /* debug stack applications */
118 int rs6000_debug_arg; /* debug argument handling */
120 /* Flag to say the TOC is initialized */
122 char toc_label_name[10];
124 /* Alias set for saves and restores from the rs6000 stack. */
125 static int rs6000_sr_alias_set;
127 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
128 The only place that looks at this is rs6000_set_default_type_attributes;
129 everywhere else should rely on the presence or absence of a longcall
130 attribute on the function declaration. */
131 int rs6000_default_long_calls;
132 const char *rs6000_longcall_switch;
134 static void rs6000_add_gc_roots PARAMS ((void));
135 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
136 static rtx expand_block_move_mem PARAMS ((enum machine_mode, rtx, rtx));
137 static void validate_condition_mode
138 PARAMS ((enum rtx_code, enum machine_mode));
139 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
140 static void rs6000_maybe_dead PARAMS ((rtx));
141 static void rs6000_emit_stack_tie PARAMS ((void));
142 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
143 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
144 static unsigned rs6000_hash_constant PARAMS ((rtx));
145 static unsigned toc_hash_function PARAMS ((const void *));
146 static int toc_hash_eq PARAMS ((const void *, const void *));
147 static int toc_hash_mark_entry PARAMS ((void **, void *));
148 static void toc_hash_mark_table PARAMS ((void *));
149 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
150 static void rs6000_free_machine_status PARAMS ((struct function *));
151 static void rs6000_init_machine_status PARAMS ((struct function *));
152 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
153 static int rs6000_ra_ever_killed PARAMS ((void));
154 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
155 const struct attribute_spec rs6000_attribute_table[];
156 static void rs6000_set_default_type_attributes PARAMS ((tree));
157 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
158 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
159 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
160 HOST_WIDE_INT, HOST_WIDE_INT));
162 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
164 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
165 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
166 static void rs6000_elf_select_section PARAMS ((tree, int,
167 unsigned HOST_WIDE_INT));
168 static void rs6000_elf_unique_section PARAMS ((tree, int));
170 #ifdef OBJECT_FORMAT_COFF
171 static void xcoff_asm_named_section PARAMS ((const char *, unsigned int));
172 static void rs6000_xcoff_select_section PARAMS ((tree, int,
173 unsigned HOST_WIDE_INT));
174 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
176 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
177 static int rs6000_adjust_priority PARAMS ((rtx, int));
178 static int rs6000_issue_rate PARAMS ((void));
180 static void rs6000_init_builtins PARAMS ((void));
181 static void altivec_init_builtins PARAMS ((void));
182 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
183 static rtx altivec_expand_builtin PARAMS ((tree, rtx));
184 static rtx altivec_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
185 static rtx altivec_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
186 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
187 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
188 static rtx altivec_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
189 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
190 static void rs6000_parse_abi_options PARAMS ((void));
191 static void rs6000_parse_vrsave_option PARAMS ((void));
192 static int first_altivec_reg_to_save PARAMS ((void));
193 static unsigned int compute_vrsave_mask PARAMS ((void));
194 static void is_altivec_return_reg PARAMS ((rtx, void *));
195 int vrsave_operation PARAMS ((rtx, enum machine_mode));
196 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
197 static void altivec_frame_fixup PARAMS ((rtx, rtx, HOST_WIDE_INT));
198 static int easy_vector_constant PARAMS ((rtx));
200 /* Default register names. */
201 char rs6000_reg_names[][8] =
203 "0", "1", "2", "3", "4", "5", "6", "7",
204 "8", "9", "10", "11", "12", "13", "14", "15",
205 "16", "17", "18", "19", "20", "21", "22", "23",
206 "24", "25", "26", "27", "28", "29", "30", "31",
207 "0", "1", "2", "3", "4", "5", "6", "7",
208 "8", "9", "10", "11", "12", "13", "14", "15",
209 "16", "17", "18", "19", "20", "21", "22", "23",
210 "24", "25", "26", "27", "28", "29", "30", "31",
211 "mq", "lr", "ctr","ap",
212 "0", "1", "2", "3", "4", "5", "6", "7",
214 /* AltiVec registers. */
215 "0", "1", "2", "3", "4", "5", "6", "7",
216 "8", "9", "10", "11", "12", "13", "14", "15",
217 "16", "17", "18", "19", "20", "21", "22", "23",
218 "24", "25", "26", "27", "28", "29", "30", "31",
222 #ifdef TARGET_REGNAMES
223 static const char alt_reg_names[][8] =
225 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
226 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
227 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
228 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
229 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
230 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
231 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
232 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
233 "mq", "lr", "ctr", "ap",
234 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
236 /* AltiVec registers. */
237 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
238 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
239 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
240 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
245 #ifndef MASK_STRICT_ALIGN
246 #define MASK_STRICT_ALIGN 0
249 /* Initialize the GCC target structure. */
250 #undef TARGET_ATTRIBUTE_TABLE
251 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
252 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
253 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
255 #undef TARGET_ASM_ALIGNED_DI_OP
256 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
258 /* Default unaligned ops are only provided for ELF. Find the ops needed
259 for non-ELF systems. */
260 #ifndef OBJECT_FORMAT_ELF
261 #ifdef OBJECT_FORMAT_COFF
262 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
264 #undef TARGET_ASM_UNALIGNED_HI_OP
265 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
266 #undef TARGET_ASM_UNALIGNED_SI_OP
267 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
268 #undef TARGET_ASM_UNALIGNED_DI_OP
269 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
272 #undef TARGET_ASM_UNALIGNED_HI_OP
273 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
274 #undef TARGET_ASM_UNALIGNED_SI_OP
275 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
279 /* This hook deals with fixups for relocatable code and DI-mode objects
281 #undef TARGET_ASM_INTEGER
282 #define TARGET_ASM_INTEGER rs6000_assemble_integer
284 #undef TARGET_ASM_FUNCTION_PROLOGUE
285 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
286 #undef TARGET_ASM_FUNCTION_EPILOGUE
287 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
290 #undef TARGET_SECTION_TYPE_FLAGS
291 #define TARGET_SECTION_TYPE_FLAGS rs6000_elf_section_type_flags
294 #undef TARGET_SCHED_ISSUE_RATE
295 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
296 #undef TARGET_SCHED_ADJUST_COST
297 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
298 #undef TARGET_SCHED_ADJUST_PRIORITY
299 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
301 #undef TARGET_INIT_BUILTINS
302 #define TARGET_INIT_BUILTINS rs6000_init_builtins
304 #undef TARGET_EXPAND_BUILTIN
305 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
307 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
308 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
310 struct gcc_target targetm = TARGET_INITIALIZER;
312 /* Override command line options. Mostly we process the processor
313 type and sometimes adjust other TARGET_ options. */
316 rs6000_override_options (default_cpu)
317 const char *default_cpu;
320 struct rs6000_cpu_select *ptr;
322 /* Simplify the entries below by making a mask for any POWER
323 variant and any PowerPC variant. */
325 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
326 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
327 | MASK_PPC_GFXOPT | MASK_POWERPC64)
328 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
332 const char *const name; /* Canonical processor name. */
333 const enum processor_type processor; /* Processor type enum value. */
334 const int target_enable; /* Target flags to enable. */
335 const int target_disable; /* Target flags to disable. */
336 } const processor_target_table[]
337 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
338 POWER_MASKS | POWERPC_MASKS},
339 {"power", PROCESSOR_POWER,
340 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
341 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
342 {"power2", PROCESSOR_POWER,
343 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
344 POWERPC_MASKS | MASK_NEW_MNEMONICS},
345 {"power3", PROCESSOR_PPC630,
346 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
347 POWER_MASKS | MASK_PPC_GPOPT},
348 {"powerpc", PROCESSOR_POWERPC,
349 MASK_POWERPC | MASK_NEW_MNEMONICS,
350 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
351 {"powerpc64", PROCESSOR_POWERPC64,
352 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
353 POWER_MASKS | POWERPC_OPT_MASKS},
354 {"rios", PROCESSOR_RIOS1,
355 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
356 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
357 {"rios1", PROCESSOR_RIOS1,
358 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
359 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
360 {"rsc", PROCESSOR_PPC601,
361 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
362 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
363 {"rsc1", PROCESSOR_PPC601,
364 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
365 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
366 {"rios2", PROCESSOR_RIOS2,
367 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
368 POWERPC_MASKS | MASK_NEW_MNEMONICS},
369 {"rs64a", PROCESSOR_RS64A,
370 MASK_POWERPC | MASK_NEW_MNEMONICS,
371 POWER_MASKS | POWERPC_OPT_MASKS},
372 {"401", PROCESSOR_PPC403,
373 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
374 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
375 {"403", PROCESSOR_PPC403,
376 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
377 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
378 {"405", PROCESSOR_PPC405,
379 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
380 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
381 {"505", PROCESSOR_MPCCORE,
382 MASK_POWERPC | MASK_NEW_MNEMONICS,
383 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
384 {"601", PROCESSOR_PPC601,
385 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
386 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
387 {"602", PROCESSOR_PPC603,
388 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
389 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
390 {"603", PROCESSOR_PPC603,
391 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
392 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
393 {"603e", PROCESSOR_PPC603,
394 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
395 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
396 {"ec603e", PROCESSOR_PPC603,
397 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
398 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
399 {"604", PROCESSOR_PPC604,
400 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
401 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
402 {"604e", PROCESSOR_PPC604e,
403 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
404 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
405 {"620", PROCESSOR_PPC620,
406 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
407 POWER_MASKS | MASK_PPC_GPOPT},
408 {"630", PROCESSOR_PPC630,
409 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
410 POWER_MASKS | MASK_PPC_GPOPT},
411 {"740", PROCESSOR_PPC750,
412 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
413 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
414 {"750", PROCESSOR_PPC750,
415 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
416 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
417 {"7400", PROCESSOR_PPC7400,
418 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
419 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
420 {"7450", PROCESSOR_PPC7450,
421 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
422 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
423 {"801", PROCESSOR_MPCCORE,
424 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
425 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
426 {"821", PROCESSOR_MPCCORE,
427 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
428 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
429 {"823", PROCESSOR_MPCCORE,
430 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
431 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
432 {"860", PROCESSOR_MPCCORE,
433 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
434 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
436 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
438 /* Save current -mmultiple/-mno-multiple status. */
439 int multiple = TARGET_MULTIPLE;
440 /* Save current -mstring/-mno-string status. */
441 int string = TARGET_STRING;
443 /* Identify the processor type. */
444 rs6000_select[0].string = default_cpu;
445 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
447 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
449 ptr = &rs6000_select[i];
450 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
452 for (j = 0; j < ptt_size; j++)
453 if (! strcmp (ptr->string, processor_target_table[j].name))
456 rs6000_cpu = processor_target_table[j].processor;
460 target_flags |= processor_target_table[j].target_enable;
461 target_flags &= ~processor_target_table[j].target_disable;
467 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
471 /* If we are optimizing big endian systems for space, use the store
472 multiple instructions. */
473 if (BYTES_BIG_ENDIAN && optimize_size)
474 target_flags |= MASK_MULTIPLE;
476 /* If -mmultiple or -mno-multiple was explicitly used, don't
477 override with the processor default */
478 if (TARGET_MULTIPLE_SET)
479 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
481 /* If -mstring or -mno-string was explicitly used, don't override
482 with the processor default. */
483 if (TARGET_STRING_SET)
484 target_flags = (target_flags & ~MASK_STRING) | string;
486 /* Don't allow -mmultiple or -mstring on little endian systems
487 unless the cpu is a 750, because the hardware doesn't support the
488 instructions used in little endian mode, and causes an alignment
489 trap. The 750 does not cause an alignment trap (except when the
490 target is unaligned). */
492 if (! BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
496 target_flags &= ~MASK_MULTIPLE;
497 if (TARGET_MULTIPLE_SET)
498 warning ("-mmultiple is not supported on little endian systems");
503 target_flags &= ~MASK_STRING;
504 if (TARGET_STRING_SET)
505 warning ("-mstring is not supported on little endian systems");
509 if (flag_pic != 0 && DEFAULT_ABI == ABI_AIX)
514 warning ("-f%s ignored (all code is position independent)",
515 (flag_pic > 1) ? "PIC" : "pic");
518 #ifdef XCOFF_DEBUGGING_INFO
519 if (flag_function_sections && (write_symbols != NO_DEBUG)
520 && DEFAULT_ABI == ABI_AIX)
522 warning ("-ffunction-sections disabled on AIX when debugging");
523 flag_function_sections = 0;
526 if (flag_data_sections && (DEFAULT_ABI == ABI_AIX))
528 warning ("-fdata-sections not supported on AIX");
529 flag_data_sections = 0;
533 /* Set debug flags */
534 if (rs6000_debug_name)
536 if (! strcmp (rs6000_debug_name, "all"))
537 rs6000_debug_stack = rs6000_debug_arg = 1;
538 else if (! strcmp (rs6000_debug_name, "stack"))
539 rs6000_debug_stack = 1;
540 else if (! strcmp (rs6000_debug_name, "arg"))
541 rs6000_debug_arg = 1;
543 error ("unknown -mdebug-%s switch", rs6000_debug_name);
546 /* Set size of long double */
547 rs6000_long_double_type_size = 64;
548 if (rs6000_long_double_size_string)
551 int size = strtol (rs6000_long_double_size_string, &tail, 10);
552 if (*tail != '\0' || (size != 64 && size != 128))
553 error ("Unknown switch -mlong-double-%s",
554 rs6000_long_double_size_string);
556 rs6000_long_double_type_size = size;
559 /* Handle -mabi= options. */
560 rs6000_parse_abi_options ();
562 /* Handle -mvrsave= option. */
563 rs6000_parse_vrsave_option ();
565 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
566 using TARGET_OPTIONS to handle a toggle switch, but we're out of
567 bits in target_flags so TARGET_SWITCHES cannot be used.
568 Assumption here is that rs6000_longcall_switch points into the
569 text of the complete option, rather than being a copy, so we can
570 scan back for the presence or absence of the no- modifier. */
571 if (rs6000_longcall_switch)
573 const char *base = rs6000_longcall_switch;
574 while (base[-1] != 'm') base--;
576 if (*rs6000_longcall_switch != '\0')
577 error ("invalid option `%s'", base);
578 rs6000_default_long_calls = (base[0] != 'n');
581 #ifdef TARGET_REGNAMES
582 /* If the user desires alternate register names, copy in the
583 alternate names now. */
585 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
588 #ifdef SUBTARGET_OVERRIDE_OPTIONS
589 SUBTARGET_OVERRIDE_OPTIONS;
591 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
592 SUBSUBTARGET_OVERRIDE_OPTIONS;
595 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
596 If -maix-struct-return or -msvr4-struct-return was explicitly
597 used, don't override with the ABI default. */
598 if (!(target_flags & MASK_AIX_STRUCT_RET_SET))
600 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
601 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
603 target_flags |= MASK_AIX_STRUCT_RET;
606 /* Register global variables with the garbage collector. */
607 rs6000_add_gc_roots ();
609 /* Allocate an alias set for register saves & restores from stack. */
610 rs6000_sr_alias_set = new_alias_set ();
613 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
615 /* We can only guarantee the availability of DI pseudo-ops when
616 assembling for 64-bit targets. */
619 targetm.asm_out.aligned_op.di = NULL;
620 targetm.asm_out.unaligned_op.di = NULL;
623 /* Arrange to save and restore machine status around nested functions. */
624 init_machine_status = rs6000_init_machine_status;
625 free_machine_status = rs6000_free_machine_status;
628 /* Handle -mvrsave= options. */
630 rs6000_parse_vrsave_option ()
632 /* Generate VRSAVE instructions by default. */
633 if (rs6000_altivec_vrsave_string == 0
634 || ! strcmp (rs6000_altivec_vrsave_string, "yes"))
635 rs6000_altivec_vrsave = 1;
636 else if (! strcmp (rs6000_altivec_vrsave_string, "no"))
637 rs6000_altivec_vrsave = 0;
639 error ("unknown -mvrsave= option specified: '%s'",
640 rs6000_altivec_vrsave_string);
643 /* Handle -mabi= options. */
645 rs6000_parse_abi_options ()
647 if (rs6000_abi_string == 0)
649 else if (! strcmp (rs6000_abi_string, "altivec"))
650 rs6000_altivec_abi = 1;
651 else if (! strcmp (rs6000_abi_string, "no-altivec"))
652 rs6000_altivec_abi = 0;
654 error ("unknown ABI specified: '%s'", rs6000_abi_string);
658 optimization_options (level, size)
659 int level ATTRIBUTE_UNUSED;
660 int size ATTRIBUTE_UNUSED;
664 /* Do anything needed at the start of the asm file. */
667 rs6000_file_start (file, default_cpu)
669 const char *default_cpu;
673 const char *start = buffer;
674 struct rs6000_cpu_select *ptr;
676 if (flag_verbose_asm)
678 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
679 rs6000_select[0].string = default_cpu;
681 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
683 ptr = &rs6000_select[i];
684 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
686 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
692 switch (rs6000_sdata)
694 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
695 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
696 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
697 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
700 if (rs6000_sdata && g_switch_value)
702 fprintf (file, "%s -G %d", start, g_switch_value);
712 /* Return non-zero if this function is known to have a null epilogue. */
717 if (reload_completed)
719 rs6000_stack_t *info = rs6000_stack_info ();
721 if (info->first_gp_reg_save == 32
722 && info->first_fp_reg_save == 64
723 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
726 && info->vrsave_mask == 0
734 /* Returns 1 always. */
737 any_operand (op, mode)
738 rtx op ATTRIBUTE_UNUSED;
739 enum machine_mode mode ATTRIBUTE_UNUSED;
744 /* Returns 1 if op is the count register. */
746 count_register_operand (op, mode)
748 enum machine_mode mode ATTRIBUTE_UNUSED;
750 if (GET_CODE (op) != REG)
753 if (REGNO (op) == COUNT_REGISTER_REGNUM)
756 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
762 /* Returns 1 if op is an altivec register. */
764 altivec_register_operand (op, mode)
766 enum machine_mode mode ATTRIBUTE_UNUSED;
769 return (register_operand (op, mode)
770 && (GET_CODE (op) != REG
771 || REGNO (op) > FIRST_PSEUDO_REGISTER
772 || ALTIVEC_REGNO_P (REGNO (op))));
776 xer_operand (op, mode)
778 enum machine_mode mode ATTRIBUTE_UNUSED;
780 if (GET_CODE (op) != REG)
783 if (XER_REGNO_P (REGNO (op)))
789 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
790 by such constants completes more quickly. */
793 s8bit_cint_operand (op, mode)
795 enum machine_mode mode ATTRIBUTE_UNUSED;
797 return ( GET_CODE (op) == CONST_INT
798 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
801 /* Return 1 if OP is a constant that can fit in a D field. */
804 short_cint_operand (op, mode)
806 enum machine_mode mode ATTRIBUTE_UNUSED;
808 return (GET_CODE (op) == CONST_INT
809 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
812 /* Similar for an unsigned D field. */
815 u_short_cint_operand (op, mode)
817 enum machine_mode mode ATTRIBUTE_UNUSED;
819 return (GET_CODE (op) == CONST_INT
820 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
823 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
826 non_short_cint_operand (op, mode)
828 enum machine_mode mode ATTRIBUTE_UNUSED;
830 return (GET_CODE (op) == CONST_INT
831 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
834 /* Returns 1 if OP is a CONST_INT that is a positive value
835 and an exact power of 2. */
838 exact_log2_cint_operand (op, mode)
840 enum machine_mode mode ATTRIBUTE_UNUSED;
842 return (GET_CODE (op) == CONST_INT
844 && exact_log2 (INTVAL (op)) >= 0);
847 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
851 gpc_reg_operand (op, mode)
853 enum machine_mode mode;
855 return (register_operand (op, mode)
856 && (GET_CODE (op) != REG
857 || (REGNO (op) >= ARG_POINTER_REGNUM
858 && !XER_REGNO_P (REGNO (op)))
859 || REGNO (op) < MQ_REGNO));
862 /* Returns 1 if OP is either a pseudo-register or a register denoting a
866 cc_reg_operand (op, mode)
868 enum machine_mode mode;
870 return (register_operand (op, mode)
871 && (GET_CODE (op) != REG
872 || REGNO (op) >= FIRST_PSEUDO_REGISTER
873 || CR_REGNO_P (REGNO (op))));
876 /* Returns 1 if OP is either a pseudo-register or a register denoting a
877 CR field that isn't CR0. */
880 cc_reg_not_cr0_operand (op, mode)
882 enum machine_mode mode;
884 return (register_operand (op, mode)
885 && (GET_CODE (op) != REG
886 || REGNO (op) >= FIRST_PSEUDO_REGISTER
887 || CR_REGNO_NOT_CR0_P (REGNO (op))));
890 /* Returns 1 if OP is either a constant integer valid for a D-field or
891 a non-special register. If a register, it must be in the proper
892 mode unless MODE is VOIDmode. */
895 reg_or_short_operand (op, mode)
897 enum machine_mode mode;
899 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
902 /* Similar, except check if the negation of the constant would be
903 valid for a D-field. */
906 reg_or_neg_short_operand (op, mode)
908 enum machine_mode mode;
910 if (GET_CODE (op) == CONST_INT)
911 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
913 return gpc_reg_operand (op, mode);
916 /* Returns 1 if OP is either a constant integer valid for a DS-field or
917 a non-special register. If a register, it must be in the proper
918 mode unless MODE is VOIDmode. */
921 reg_or_aligned_short_operand (op, mode)
923 enum machine_mode mode;
925 if (gpc_reg_operand (op, mode))
927 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
934 /* Return 1 if the operand is either a register or an integer whose
935 high-order 16 bits are zero. */
938 reg_or_u_short_operand (op, mode)
940 enum machine_mode mode;
942 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
945 /* Return 1 is the operand is either a non-special register or ANY
949 reg_or_cint_operand (op, mode)
951 enum machine_mode mode;
953 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
956 /* Return 1 is the operand is either a non-special register or ANY
957 32-bit signed constant integer. */
960 reg_or_arith_cint_operand (op, mode)
962 enum machine_mode mode;
964 return (gpc_reg_operand (op, mode)
965 || (GET_CODE (op) == CONST_INT
966 #if HOST_BITS_PER_WIDE_INT != 32
967 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
968 < (unsigned HOST_WIDE_INT) 0x100000000ll)
973 /* Return 1 is the operand is either a non-special register or a 32-bit
974 signed constant integer valid for 64-bit addition. */
977 reg_or_add_cint64_operand (op, mode)
979 enum machine_mode mode;
981 return (gpc_reg_operand (op, mode)
982 || (GET_CODE (op) == CONST_INT
983 #if HOST_BITS_PER_WIDE_INT == 32
984 && INTVAL (op) < 0x7fff8000
986 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
992 /* Return 1 is the operand is either a non-special register or a 32-bit
993 signed constant integer valid for 64-bit subtraction. */
996 reg_or_sub_cint64_operand (op, mode)
998 enum machine_mode mode;
1000 return (gpc_reg_operand (op, mode)
1001 || (GET_CODE (op) == CONST_INT
1002 #if HOST_BITS_PER_WIDE_INT == 32
1003 && (- INTVAL (op)) < 0x7fff8000
1005 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1011 /* Return 1 is the operand is either a non-special register or ANY
1012 32-bit unsigned constant integer. */
1015 reg_or_logical_cint_operand (op, mode)
1017 enum machine_mode mode;
1019 if (GET_CODE (op) == CONST_INT)
1021 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1023 if (GET_MODE_BITSIZE (mode) <= 32)
1026 if (INTVAL (op) < 0)
1030 return ((INTVAL (op) & GET_MODE_MASK (mode)
1031 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1033 else if (GET_CODE (op) == CONST_DOUBLE)
1035 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1039 return CONST_DOUBLE_HIGH (op) == 0;
1042 return gpc_reg_operand (op, mode);
1045 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1048 got_operand (op, mode)
1050 enum machine_mode mode ATTRIBUTE_UNUSED;
1052 return (GET_CODE (op) == SYMBOL_REF
1053 || GET_CODE (op) == CONST
1054 || GET_CODE (op) == LABEL_REF);
1057 /* Return 1 if the operand is a simple references that can be loaded via
1058 the GOT (labels involving addition aren't allowed). */
1061 got_no_const_operand (op, mode)
1063 enum machine_mode mode ATTRIBUTE_UNUSED;
1065 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1068 /* Return the number of instructions it takes to form a constant in an
1069 integer register. */
1072 num_insns_constant_wide (value)
1073 HOST_WIDE_INT value;
1075 /* signed constant loadable with {cal|addi} */
1076 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1079 /* constant loadable with {cau|addis} */
1080 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1083 #if HOST_BITS_PER_WIDE_INT == 64
1084 else if (TARGET_POWERPC64)
1086 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1087 HOST_WIDE_INT high = value >> 31;
1089 if (high == 0 || high == -1)
1095 return num_insns_constant_wide (high) + 1;
1097 return (num_insns_constant_wide (high)
1098 + num_insns_constant_wide (low) + 1);
1107 num_insns_constant (op, mode)
1109 enum machine_mode mode;
1111 if (GET_CODE (op) == CONST_INT)
1113 #if HOST_BITS_PER_WIDE_INT == 64
1114 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1115 && mask64_operand (op, mode))
1119 return num_insns_constant_wide (INTVAL (op));
1122 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1127 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1128 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1129 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1132 else if (GET_CODE (op) == CONST_DOUBLE)
1138 int endian = (WORDS_BIG_ENDIAN == 0);
1140 if (mode == VOIDmode || mode == DImode)
1142 high = CONST_DOUBLE_HIGH (op);
1143 low = CONST_DOUBLE_LOW (op);
1147 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1148 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1150 low = l[1 - endian];
1154 return (num_insns_constant_wide (low)
1155 + num_insns_constant_wide (high));
1159 if (high == 0 && low >= 0)
1160 return num_insns_constant_wide (low);
1162 else if (high == -1 && low < 0)
1163 return num_insns_constant_wide (low);
1165 else if (mask64_operand (op, mode))
1169 return num_insns_constant_wide (high) + 1;
1172 return (num_insns_constant_wide (high)
1173 + num_insns_constant_wide (low) + 1);
1181 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1182 register with one instruction per word. We only do this if we can
1183 safely read CONST_DOUBLE_{LOW,HIGH}. */
1186 easy_fp_constant (op, mode)
1188 enum machine_mode mode;
1190 if (GET_CODE (op) != CONST_DOUBLE
1191 || GET_MODE (op) != mode
1192 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1195 /* Consider all constants with -msoft-float to be easy. */
1196 if (TARGET_SOFT_FLOAT && mode != DImode)
1199 /* If we are using V.4 style PIC, consider all constants to be hard. */
1200 if (flag_pic && DEFAULT_ABI == ABI_V4)
1203 #ifdef TARGET_RELOCATABLE
1204 /* Similarly if we are using -mrelocatable, consider all constants
1206 if (TARGET_RELOCATABLE)
1215 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1216 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1218 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1219 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1222 else if (mode == SFmode)
1227 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1228 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1230 return num_insns_constant_wide (l) == 1;
1233 else if (mode == DImode)
1234 return ((TARGET_POWERPC64
1235 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1236 || (num_insns_constant (op, DImode) <= 2));
1238 else if (mode == SImode)
1244 /* Return 1 if the operand is a CONST_INT and can be put into a
1245 register with one instruction. */
1248 easy_vector_constant (op)
1254 if (GET_CODE (op) != CONST_VECTOR)
1257 units = CONST_VECTOR_NUNITS (op);
1259 /* We can generate 0 easily. Look for that. */
1260 for (i = 0; i < units; ++i)
1262 elt = CONST_VECTOR_ELT (op, i);
1264 /* We could probably simplify this by just checking for equality
1265 with CONST0_RTX for the current mode, but let's be safe
1268 switch (GET_CODE (elt))
1271 if (INTVAL (elt) != 0)
1275 if (CONST_DOUBLE_LOW (elt) != 0 || CONST_DOUBLE_HIGH (elt) != 0)
1283 /* We could probably generate a few other constants trivially, but
1284 gcc doesn't generate them yet. FIXME later. */
1288 /* Return 1 if the operand is the constant 0. This works for scalars
1289 as well as vectors. */
1291 zero_constant (op, mode)
1293 enum machine_mode mode;
1295 return op == CONST0_RTX (mode);
1298 /* Return 1 if the operand is 0.0. */
1300 zero_fp_constant (op, mode)
1302 enum machine_mode mode;
1304 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1307 /* Return 1 if the operand is in volatile memory. Note that during
1308 the RTL generation phase, memory_operand does not return TRUE for
1309 volatile memory references. So this function allows us to
1310 recognize volatile references where its safe. */
1313 volatile_mem_operand (op, mode)
1315 enum machine_mode mode;
1317 if (GET_CODE (op) != MEM)
1320 if (!MEM_VOLATILE_P (op))
1323 if (mode != GET_MODE (op))
1326 if (reload_completed)
1327 return memory_operand (op, mode);
1329 if (reload_in_progress)
1330 return strict_memory_address_p (mode, XEXP (op, 0));
1332 return memory_address_p (mode, XEXP (op, 0));
1335 /* Return 1 if the operand is an offsettable memory operand. */
1338 offsettable_mem_operand (op, mode)
1340 enum machine_mode mode;
1342 return ((GET_CODE (op) == MEM)
1343 && offsettable_address_p (reload_completed || reload_in_progress,
1344 mode, XEXP (op, 0)));
1347 /* Return 1 if the operand is either an easy FP constant (see above) or
1351 mem_or_easy_const_operand (op, mode)
1353 enum machine_mode mode;
1355 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1358 /* Return 1 if the operand is either a non-special register or an item
1359 that can be used as the operand of a `mode' add insn. */
1362 add_operand (op, mode)
1364 enum machine_mode mode;
1366 if (GET_CODE (op) == CONST_INT)
1367 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1368 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1370 return gpc_reg_operand (op, mode);
1373 /* Return 1 if OP is a constant but not a valid add_operand. */
1376 non_add_cint_operand (op, mode)
1378 enum machine_mode mode ATTRIBUTE_UNUSED;
1380 return (GET_CODE (op) == CONST_INT
1381 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1382 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1385 /* Return 1 if the operand is a non-special register or a constant that
1386 can be used as the operand of an OR or XOR insn on the RS/6000. */
1389 logical_operand (op, mode)
1391 enum machine_mode mode;
1393 HOST_WIDE_INT opl, oph;
1395 if (gpc_reg_operand (op, mode))
1398 if (GET_CODE (op) == CONST_INT)
1400 opl = INTVAL (op) & GET_MODE_MASK (mode);
1402 #if HOST_BITS_PER_WIDE_INT <= 32
1403 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1407 else if (GET_CODE (op) == CONST_DOUBLE)
1409 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1412 opl = CONST_DOUBLE_LOW (op);
1413 oph = CONST_DOUBLE_HIGH (op);
1420 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1421 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1424 /* Return 1 if C is a constant that is not a logical operand (as
1425 above), but could be split into one. */
1428 non_logical_cint_operand (op, mode)
1430 enum machine_mode mode;
1432 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1433 && ! logical_operand (op, mode)
1434 && reg_or_logical_cint_operand (op, mode));
1437 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1438 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1439 Reject all ones and all zeros, since these should have been optimized
1440 away and confuse the making of MB and ME. */
1443 mask_operand (op, mode)
1445 enum machine_mode mode ATTRIBUTE_UNUSED;
1447 HOST_WIDE_INT c, lsb;
1449 if (GET_CODE (op) != CONST_INT)
1454 /* Fail in 64-bit mode if the mask wraps around because the upper
1455 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1456 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1459 /* We don't change the number of transitions by inverting,
1460 so make sure we start with the LS bit zero. */
1464 /* Reject all zeros or all ones. */
1468 /* Find the first transition. */
1471 /* Invert to look for a second transition. */
1474 /* Erase first transition. */
1477 /* Find the second transition (if any). */
1480 /* Match if all the bits above are 1's (or c is zero). */
1484 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1485 It is if there are no more than one 1->0 or 0->1 transitions.
1486 Reject all ones and all zeros, since these should have been optimized
1487 away and confuse the making of MB and ME. */
1490 mask64_operand (op, mode)
1492 enum machine_mode mode;
1494 if (GET_CODE (op) == CONST_INT)
1496 HOST_WIDE_INT c, lsb;
1498 /* We don't change the number of transitions by inverting,
1499 so make sure we start with the LS bit zero. */
1504 /* Reject all zeros or all ones. */
1508 /* Find the transition, and check that all bits above are 1's. */
1512 else if (GET_CODE (op) == CONST_DOUBLE
1513 && (mode == VOIDmode || mode == DImode))
1515 HOST_WIDE_INT low, high, lsb;
1517 if (HOST_BITS_PER_WIDE_INT < 64)
1518 high = CONST_DOUBLE_HIGH (op);
1520 low = CONST_DOUBLE_LOW (op);
1523 if (HOST_BITS_PER_WIDE_INT < 64)
1530 if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
1534 return high == -lsb;
1538 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
1544 /* Return 1 if the operand is either a non-special register or a constant
1545 that can be used as the operand of a PowerPC64 logical AND insn. */
1548 and64_operand (op, mode)
1550 enum machine_mode mode;
1552 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1553 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
1555 return (logical_operand (op, mode) || mask64_operand (op, mode));
1558 /* Return 1 if the operand is either a non-special register or a
1559 constant that can be used as the operand of an RS/6000 logical AND insn. */
1562 and_operand (op, mode)
1564 enum machine_mode mode;
1566 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
1567 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
1569 return (logical_operand (op, mode) || mask_operand (op, mode));
1572 /* Return 1 if the operand is a general register or memory operand. */
1575 reg_or_mem_operand (op, mode)
1577 enum machine_mode mode;
1579 return (gpc_reg_operand (op, mode)
1580 || memory_operand (op, mode)
1581 || volatile_mem_operand (op, mode));
1584 /* Return 1 if the operand is a general register or memory operand without
1585 pre_inc or pre_dec which produces invalid form of PowerPC lwa
1589 lwa_operand (op, mode)
1591 enum machine_mode mode;
1595 if (reload_completed && GET_CODE (inner) == SUBREG)
1596 inner = SUBREG_REG (inner);
1598 return gpc_reg_operand (inner, mode)
1599 || (memory_operand (inner, mode)
1600 && GET_CODE (XEXP (inner, 0)) != PRE_INC
1601 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
1602 && (GET_CODE (XEXP (inner, 0)) != PLUS
1603 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
1604 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
1607 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
1610 symbol_ref_operand (op, mode)
1612 enum machine_mode mode;
1614 if (mode != VOIDmode && GET_MODE (op) != mode)
1617 return (GET_CODE (op) == SYMBOL_REF);
1620 /* Return 1 if the operand, used inside a MEM, is a valid first argument
1621 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
1624 call_operand (op, mode)
1626 enum machine_mode mode;
1628 if (mode != VOIDmode && GET_MODE (op) != mode)
1631 return (GET_CODE (op) == SYMBOL_REF
1632 || (GET_CODE (op) == REG
1633 && (REGNO (op) == LINK_REGISTER_REGNUM
1634 || REGNO (op) == COUNT_REGISTER_REGNUM
1635 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
1638 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
1639 this file and the function is not weakly defined. */
1642 current_file_function_operand (op, mode)
1644 enum machine_mode mode ATTRIBUTE_UNUSED;
1646 return (GET_CODE (op) == SYMBOL_REF
1647 && (SYMBOL_REF_FLAG (op)
1648 || (op == XEXP (DECL_RTL (current_function_decl), 0)
1649 && ! DECL_WEAK (current_function_decl))));
1652 /* Return 1 if this operand is a valid input for a move insn. */
1655 input_operand (op, mode)
1657 enum machine_mode mode;
1659 /* Memory is always valid. */
1660 if (memory_operand (op, mode))
1663 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
1664 if (GET_CODE (op) == CONSTANT_P_RTX)
1667 /* For floating-point, easy constants are valid. */
1668 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1670 && easy_fp_constant (op, mode))
1673 /* Allow any integer constant. */
1674 if (GET_MODE_CLASS (mode) == MODE_INT
1675 && (GET_CODE (op) == CONST_INT
1676 || GET_CODE (op) == CONST_DOUBLE))
1679 /* For floating-point or multi-word mode, the only remaining valid type
1681 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1682 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1683 return register_operand (op, mode);
1685 /* The only cases left are integral modes one word or smaller (we
1686 do not get called for MODE_CC values). These can be in any
1688 if (register_operand (op, mode))
1691 /* A SYMBOL_REF referring to the TOC is valid. */
1692 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (op))
1695 /* A constant pool expression (relative to the TOC) is valid */
1696 if (TOC_RELATIVE_EXPR_P (op))
1699 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
1701 if (DEFAULT_ABI == ABI_V4
1702 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
1703 && small_data_operand (op, Pmode))
1709 /* Return 1 for an operand in small memory on V.4/eabi. */
1712 small_data_operand (op, mode)
1713 rtx op ATTRIBUTE_UNUSED;
1714 enum machine_mode mode ATTRIBUTE_UNUSED;
1719 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
1722 if (DEFAULT_ABI != ABI_V4)
1725 if (GET_CODE (op) == SYMBOL_REF)
1728 else if (GET_CODE (op) != CONST
1729 || GET_CODE (XEXP (op, 0)) != PLUS
1730 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
1731 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
1736 rtx sum = XEXP (op, 0);
1737 HOST_WIDE_INT summand;
1739 /* We have to be careful here, because it is the referenced address
1740 that must be 32k from _SDA_BASE_, not just the symbol. */
1741 summand = INTVAL (XEXP (sum, 1));
1742 if (summand < 0 || summand > g_switch_value)
1745 sym_ref = XEXP (sum, 0);
1748 if (*XSTR (sym_ref, 0) != '@')
1759 constant_pool_expr_1 (op, have_sym, have_toc)
1764 switch (GET_CODE(op))
1767 if (CONSTANT_POOL_ADDRESS_P (op))
1769 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
1777 else if (! strcmp (XSTR (op, 0), toc_label_name))
1786 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
1787 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
1789 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
1798 constant_pool_expr_p (op)
1803 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
1807 toc_relative_expr_p (op)
1812 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
1815 /* Try machine-dependent ways of modifying an illegitimate address
1816 to be legitimate. If we find one, return the new, valid address.
1817 This is used from only one place: `memory_address' in explow.c.
1819 OLDX is the address as it was before break_out_memory_refs was
1820 called. In some cases it is useful to look at this to decide what
1823 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
1825 It is always safe for this function to do nothing. It exists to
1826 recognize opportunities to optimize the output.
1828 On RS/6000, first check for the sum of a register with a constant
1829 integer that is out of range. If so, generate code to add the
1830 constant with the low-order 16 bits masked to the register and force
1831 this result into another register (this can be done with `cau').
1832 Then generate an address of REG+(CONST&0xffff), allowing for the
1833 possibility of bit 16 being a one.
1835 Then check for the sum of a register and something not constant, try to
1836 load the other things into a register and return the sum. */
1838 rs6000_legitimize_address (x, oldx, mode)
1840 rtx oldx ATTRIBUTE_UNUSED;
1841 enum machine_mode mode;
1843 if (GET_CODE (x) == PLUS
1844 && GET_CODE (XEXP (x, 0)) == REG
1845 && GET_CODE (XEXP (x, 1)) == CONST_INT
1846 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
1848 HOST_WIDE_INT high_int, low_int;
1850 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
1851 high_int = INTVAL (XEXP (x, 1)) - low_int;
1852 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
1853 GEN_INT (high_int)), 0);
1854 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
1856 else if (GET_CODE (x) == PLUS
1857 && GET_CODE (XEXP (x, 0)) == REG
1858 && GET_CODE (XEXP (x, 1)) != CONST_INT
1859 && GET_MODE_NUNITS (mode) == 1
1860 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
1861 && (TARGET_POWERPC64 || mode != DImode)
1864 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
1865 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
1867 else if (ALTIVEC_VECTOR_MODE (mode))
1871 /* Make sure both operands are registers. */
1872 if (GET_CODE (x) == PLUS)
1873 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
1874 force_reg (Pmode, XEXP (x, 1)));
1876 reg = force_reg (Pmode, x);
1879 else if (TARGET_ELF && TARGET_32BIT && TARGET_NO_TOC && ! flag_pic
1880 && GET_CODE (x) != CONST_INT
1881 && GET_CODE (x) != CONST_DOUBLE
1883 && GET_MODE_NUNITS (mode) == 1
1884 && (GET_MODE_BITSIZE (mode) <= 32
1885 || (TARGET_HARD_FLOAT && mode == DFmode)))
1887 rtx reg = gen_reg_rtx (Pmode);
1888 emit_insn (gen_elf_high (reg, (x)));
1889 return gen_rtx_LO_SUM (Pmode, reg, (x));
1891 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
1893 && GET_CODE (x) != CONST_INT
1894 && GET_CODE (x) != CONST_DOUBLE
1896 && (TARGET_HARD_FLOAT || mode != DFmode)
1900 rtx reg = gen_reg_rtx (Pmode);
1901 emit_insn (gen_macho_high (reg, (x)));
1902 return gen_rtx_LO_SUM (Pmode, reg, (x));
1905 && CONSTANT_POOL_EXPR_P (x)
1906 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
1908 return create_TOC_reference (x);
1914 /* The convention appears to be to define this wherever it is used.
1915 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
1916 is now used here. */
1917 #ifndef REG_MODE_OK_FOR_BASE_P
1918 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
1921 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
1922 replace the input X, or the original X if no replacement is called for.
1923 The output parameter *WIN is 1 if the calling macro should goto WIN,
1926 For RS/6000, we wish to handle large displacements off a base
1927 register by splitting the addend across an addiu/addis and the mem insn.
1928 This cuts number of extra insns needed from 3 to 1.
1930 On Darwin, we use this to generate code for floating point constants.
1931 A movsf_low is generated so we wind up with 2 instructions rather than 3.
1932 The Darwin code is inside #if TARGET_MACHO because only then is
1933 machopic_function_base_name() defined. */
1935 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
1937 enum machine_mode mode;
1940 int ind_levels ATTRIBUTE_UNUSED;
1943 /* We must recognize output that we have already generated ourselves. */
1944 if (GET_CODE (x) == PLUS
1945 && GET_CODE (XEXP (x, 0)) == PLUS
1946 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
1947 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1948 && GET_CODE (XEXP (x, 1)) == CONST_INT)
1950 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1951 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
1952 opnum, (enum reload_type)type);
1958 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
1959 && GET_CODE (x) == LO_SUM
1960 && GET_CODE (XEXP (x, 0)) == PLUS
1961 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
1962 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
1963 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
1964 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
1965 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
1966 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
1967 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
1969 /* Result of previous invocation of this function on Darwin
1970 floating point constant. */
1971 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
1972 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
1973 opnum, (enum reload_type)type);
1978 if (GET_CODE (x) == PLUS
1979 && GET_CODE (XEXP (x, 0)) == REG
1980 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1981 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
1982 && GET_CODE (XEXP (x, 1)) == CONST_INT
1983 && !ALTIVEC_VECTOR_MODE (mode))
1985 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1986 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1988 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1990 /* Check for 32-bit overflow. */
1991 if (high + low != val)
1997 /* Reload the high part into a base reg; leave the low part
1998 in the mem directly. */
2000 x = gen_rtx_PLUS (GET_MODE (x),
2001 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2005 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2006 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2007 opnum, (enum reload_type)type);
2012 if (GET_CODE (x) == SYMBOL_REF
2013 && DEFAULT_ABI == ABI_DARWIN
2014 && !ALTIVEC_VECTOR_MODE (mode)
2017 /* Darwin load of floating point constant. */
2018 rtx offset = gen_rtx (CONST, Pmode,
2019 gen_rtx (MINUS, Pmode, x,
2020 gen_rtx (SYMBOL_REF, Pmode,
2021 machopic_function_base_name ())));
2022 x = gen_rtx (LO_SUM, GET_MODE (x),
2023 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2024 gen_rtx (HIGH, Pmode, offset)), offset);
2025 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2026 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2027 opnum, (enum reload_type)type);
2033 && CONSTANT_POOL_EXPR_P (x)
2034 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2036 (x) = create_TOC_reference (x);
2044 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2045 that is a valid memory address for an instruction.
2046 The MODE argument is the machine mode for the MEM expression
2047 that wants to use this address.
2049 On the RS/6000, there are four valid address: a SYMBOL_REF that
2050 refers to a constant pool entry of an address (or the sum of it
2051 plus a constant), a short (16-bit signed) constant plus a register,
2052 the sum of two registers, or a register indirect, possibly with an
2053 auto-increment. For DFmode and DImode with an constant plus register,
2054 we must ensure that both words are addressable or PowerPC64 with offset
2057 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2058 32-bit DImode, TImode), indexed addressing cannot be used because
2059 adjacent memory cells are accessed by adding word-sized offsets
2060 during assembly output. */
2062 rs6000_legitimate_address (mode, x, reg_ok_strict)
2063 enum machine_mode mode;
2067 if (LEGITIMATE_INDIRECT_ADDRESS_P (x, reg_ok_strict))
2069 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2070 && !ALTIVEC_VECTOR_MODE (mode)
2072 && LEGITIMATE_INDIRECT_ADDRESS_P (XEXP (x, 0), reg_ok_strict))
2074 if (LEGITIMATE_SMALL_DATA_P (mode, x))
2076 if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
2078 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2080 && GET_CODE (x) == PLUS
2081 && GET_CODE (XEXP (x, 0)) == REG
2082 && XEXP (x, 0) == virtual_stack_vars_rtx
2083 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2085 if (LEGITIMATE_OFFSET_ADDRESS_P (mode, x, reg_ok_strict))
2088 && (TARGET_HARD_FLOAT || TARGET_POWERPC64 || mode != DFmode)
2089 && (TARGET_POWERPC64 || mode != DImode)
2090 && LEGITIMATE_INDEXED_ADDRESS_P (x, reg_ok_strict))
2092 if (LEGITIMATE_LO_SUM_ADDRESS_P (mode, x, reg_ok_strict))
2097 /* Try to output insns to set TARGET equal to the constant C if it can
2098 be done in less than N insns. Do all computations in MODE.
2099 Returns the place where the output has been placed if it can be
2100 done and the insns have been emitted. If it would take more than N
2101 insns, zero is returned and no insns and emitted. */
2104 rs6000_emit_set_const (dest, mode, source, n)
2106 enum machine_mode mode;
2107 int n ATTRIBUTE_UNUSED;
2109 HOST_WIDE_INT c0, c1;
2111 if (mode == QImode || mode == HImode || mode == SImode)
2114 dest = gen_reg_rtx (mode);
2115 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
2119 if (GET_CODE (source) == CONST_INT)
2121 c0 = INTVAL (source);
2124 else if (GET_CODE (source) == CONST_DOUBLE)
2126 #if HOST_BITS_PER_WIDE_INT >= 64
2127 c0 = CONST_DOUBLE_LOW (source);
2130 c0 = CONST_DOUBLE_LOW (source);
2131 c1 = CONST_DOUBLE_HIGH (source);
2137 return rs6000_emit_set_long_const (dest, c0, c1);
2140 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
2141 fall back to a straight forward decomposition. We do this to avoid
2142 exponential run times encountered when looking for longer sequences
2143 with rs6000_emit_set_const. */
2145 rs6000_emit_set_long_const (dest, c1, c2)
2147 HOST_WIDE_INT c1, c2;
2149 if (!TARGET_POWERPC64)
2151 rtx operand1, operand2;
2153 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
2155 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
2157 emit_move_insn (operand1, GEN_INT (c1));
2158 emit_move_insn (operand2, GEN_INT (c2));
2162 HOST_WIDE_INT ud1, ud2, ud3, ud4;
2165 ud2 = (c1 & 0xffff0000) >> 16;
2166 #if HOST_BITS_PER_WIDE_INT >= 64
2170 ud4 = (c2 & 0xffff0000) >> 16;
2172 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
2173 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
2176 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
2178 emit_move_insn (dest, GEN_INT (ud1));
2181 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
2182 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
2185 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
2188 emit_move_insn (dest, GEN_INT (ud2 << 16));
2190 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2192 else if ((ud4 == 0xffff && (ud3 & 0x8000))
2193 || (ud4 == 0 && ! (ud3 & 0x8000)))
2196 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
2199 emit_move_insn (dest, GEN_INT (ud3 << 16));
2202 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
2203 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
2205 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2210 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
2213 emit_move_insn (dest, GEN_INT (ud4 << 16));
2216 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
2218 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
2220 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
2221 GEN_INT (ud2 << 16)));
2223 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
2229 /* Emit a move from SOURCE to DEST in mode MODE. */
2231 rs6000_emit_move (dest, source, mode)
2234 enum machine_mode mode;
2238 operands[1] = source;
2240 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
2241 if (GET_CODE (operands[1]) == CONST_DOUBLE
2242 && ! FLOAT_MODE_P (mode)
2243 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2245 /* FIXME. This should never happen. */
2246 /* Since it seems that it does, do the safe thing and convert
2248 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
2250 if (GET_CODE (operands[1]) == CONST_DOUBLE
2251 && ! FLOAT_MODE_P (mode)
2252 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
2253 && CONST_DOUBLE_LOW (operands[1]) >= 0)
2254 || (CONST_DOUBLE_HIGH (operands[1]) == -1
2255 && CONST_DOUBLE_LOW (operands[1]) < 0)))
2258 /* Check if GCC is setting up a block move that will end up using FP
2259 registers as temporaries. We must make sure this is acceptable. */
2260 if (GET_CODE (operands[0]) == MEM
2261 && GET_CODE (operands[1]) == MEM
2263 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
2264 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
2265 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
2266 ? 32 : MEM_ALIGN (operands[0])))
2267 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
2269 : MEM_ALIGN (operands[1]))))
2270 && ! MEM_VOLATILE_P (operands [0])
2271 && ! MEM_VOLATILE_P (operands [1]))
2273 emit_move_insn (adjust_address (operands[0], SImode, 0),
2274 adjust_address (operands[1], SImode, 0));
2275 emit_move_insn (adjust_address (operands[0], SImode, 4),
2276 adjust_address (operands[1], SImode, 4));
2280 if (! no_new_pseudos && GET_CODE (operands[0]) != REG)
2281 operands[1] = force_reg (mode, operands[1]);
2283 if (mode == SFmode && ! TARGET_POWERPC && TARGET_HARD_FLOAT
2284 && GET_CODE (operands[0]) == MEM)
2288 if (reload_in_progress || reload_completed)
2289 regnum = true_regnum (operands[1]);
2290 else if (GET_CODE (operands[1]) == REG)
2291 regnum = REGNO (operands[1]);
2295 /* If operands[1] is a register, on POWER it may have
2296 double-precision data in it, so truncate it to single
2298 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
2301 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
2302 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
2303 operands[1] = newreg;
2307 /* Handle the case where reload calls us with an invalid address;
2308 and the case of CONSTANT_P_RTX. */
2309 if (!VECTOR_MODE_P (mode)
2310 && (! general_operand (operands[1], mode)
2311 || ! nonimmediate_operand (operands[0], mode)
2312 || GET_CODE (operands[1]) == CONSTANT_P_RTX))
2314 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2318 /* FIXME: In the long term, this switch statement should go away
2319 and be replaced by a sequence of tests based on things like
2325 if (CONSTANT_P (operands[1])
2326 && GET_CODE (operands[1]) != CONST_INT)
2327 operands[1] = force_const_mem (mode, operands[1]);
2333 if (CONSTANT_P (operands[1])
2334 && ! easy_fp_constant (operands[1], mode))
2335 operands[1] = force_const_mem (mode, operands[1]);
2342 if (CONSTANT_P (operands[1])
2343 && !easy_vector_constant (operands[1]))
2344 operands[1] = force_const_mem (mode, operands[1]);
2349 /* Use default pattern for address of ELF small data */
2352 && DEFAULT_ABI == ABI_V4
2353 && (GET_CODE (operands[1]) == SYMBOL_REF
2354 || GET_CODE (operands[1]) == CONST)
2355 && small_data_operand (operands[1], mode))
2357 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2361 if (DEFAULT_ABI == ABI_V4
2362 && mode == Pmode && mode == SImode
2363 && flag_pic == 1 && got_operand (operands[1], mode))
2365 emit_insn (gen_movsi_got (operands[0], operands[1]));
2369 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
2370 && TARGET_NO_TOC && ! flag_pic
2372 && CONSTANT_P (operands[1])
2373 && GET_CODE (operands[1]) != HIGH
2374 && GET_CODE (operands[1]) != CONST_INT)
2376 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
2378 /* If this is a function address on -mcall-aixdesc,
2379 convert it to the address of the descriptor. */
2380 if (DEFAULT_ABI == ABI_AIX
2381 && GET_CODE (operands[1]) == SYMBOL_REF
2382 && XSTR (operands[1], 0)[0] == '.')
2384 const char *name = XSTR (operands[1], 0);
2386 while (*name == '.')
2388 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
2389 CONSTANT_POOL_ADDRESS_P (new_ref)
2390 = CONSTANT_POOL_ADDRESS_P (operands[1]);
2391 SYMBOL_REF_FLAG (new_ref) = SYMBOL_REF_FLAG (operands[1]);
2392 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
2393 operands[1] = new_ref;
2396 if (DEFAULT_ABI == ABI_DARWIN)
2398 emit_insn (gen_macho_high (target, operands[1]));
2399 emit_insn (gen_macho_low (operands[0], target, operands[1]));
2403 emit_insn (gen_elf_high (target, operands[1]));
2404 emit_insn (gen_elf_low (operands[0], target, operands[1]));
2408 /* If this is a SYMBOL_REF that refers to a constant pool entry,
2409 and we have put it in the TOC, we just need to make a TOC-relative
2412 && GET_CODE (operands[1]) == SYMBOL_REF
2413 && CONSTANT_POOL_EXPR_P (operands[1])
2414 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
2415 get_pool_mode (operands[1])))
2417 operands[1] = create_TOC_reference (operands[1]);
2419 else if (mode == Pmode
2420 && CONSTANT_P (operands[1])
2421 && ((GET_CODE (operands[1]) != CONST_INT
2422 && ! easy_fp_constant (operands[1], mode))
2423 || (GET_CODE (operands[1]) == CONST_INT
2424 && num_insns_constant (operands[1], mode) > 2)
2425 || (GET_CODE (operands[0]) == REG
2426 && FP_REGNO_P (REGNO (operands[0]))))
2427 && GET_CODE (operands[1]) != HIGH
2428 && ! LEGITIMATE_CONSTANT_POOL_ADDRESS_P (operands[1])
2429 && ! TOC_RELATIVE_EXPR_P (operands[1]))
2431 /* Emit a USE operation so that the constant isn't deleted if
2432 expensive optimizations are turned on because nobody
2433 references it. This should only be done for operands that
2434 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
2435 This should not be done for operands that contain LABEL_REFs.
2436 For now, we just handle the obvious case. */
2437 if (GET_CODE (operands[1]) != LABEL_REF)
2438 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
2441 /* Darwin uses a special PIC legitimizer. */
2442 if (DEFAULT_ABI == ABI_DARWIN && flag_pic)
2445 rs6000_machopic_legitimize_pic_address (operands[1], mode,
2447 if (operands[0] != operands[1])
2448 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2453 /* If we are to limit the number of things we put in the TOC and
2454 this is a symbol plus a constant we can add in one insn,
2455 just put the symbol in the TOC and add the constant. Don't do
2456 this if reload is in progress. */
2457 if (GET_CODE (operands[1]) == CONST
2458 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
2459 && GET_CODE (XEXP (operands[1], 0)) == PLUS
2460 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
2461 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
2462 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
2463 && ! side_effects_p (operands[0]))
2466 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
2467 rtx other = XEXP (XEXP (operands[1], 0), 1);
2469 sym = force_reg (mode, sym);
2471 emit_insn (gen_addsi3 (operands[0], sym, other));
2473 emit_insn (gen_adddi3 (operands[0], sym, other));
2477 operands[1] = force_const_mem (mode, operands[1]);
2480 && CONSTANT_POOL_EXPR_P (XEXP (operands[1], 0))
2481 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
2482 get_pool_constant (XEXP (operands[1], 0)),
2483 get_pool_mode (XEXP (operands[1], 0))))
2486 = gen_rtx_MEM (mode,
2487 create_TOC_reference (XEXP (operands[1], 0)));
2488 set_mem_alias_set (operands[1], get_TOC_alias_set ());
2489 RTX_UNCHANGING_P (operands[1]) = 1;
2495 if (GET_CODE (operands[0]) == MEM
2496 && GET_CODE (XEXP (operands[0], 0)) != REG
2497 && ! reload_in_progress)
2499 = replace_equiv_address (operands[0],
2500 copy_addr_to_reg (XEXP (operands[0], 0)));
2502 if (GET_CODE (operands[1]) == MEM
2503 && GET_CODE (XEXP (operands[1], 0)) != REG
2504 && ! reload_in_progress)
2506 = replace_equiv_address (operands[1],
2507 copy_addr_to_reg (XEXP (operands[1], 0)));
2514 /* Above, we may have called force_const_mem which may have returned
2515 an invalid address. If we can, fix this up; otherwise, reload will
2516 have to deal with it. */
2517 if (GET_CODE (operands[1]) == MEM
2518 && ! memory_address_p (mode, XEXP (operands[1], 0))
2519 && ! reload_in_progress)
2520 operands[1] = adjust_address (operands[1], mode, 0);
2522 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2526 /* Initialize a variable CUM of type CUMULATIVE_ARGS
2527 for a call to a function whose data type is FNTYPE.
2528 For a library call, FNTYPE is 0.
2530 For incoming args we set the number of arguments in the prototype large
2531 so we never return a PARALLEL. */
2534 init_cumulative_args (cum, fntype, libname, incoming)
2535 CUMULATIVE_ARGS *cum;
2537 rtx libname ATTRIBUTE_UNUSED;
2540 static CUMULATIVE_ARGS zero_cumulative;
2542 *cum = zero_cumulative;
2544 cum->fregno = FP_ARG_MIN_REG;
2545 cum->vregno = ALTIVEC_ARG_MIN_REG;
2546 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
2547 cum->call_cookie = CALL_NORMAL;
2548 cum->sysv_gregno = GP_ARG_MIN_REG;
2551 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
2553 else if (cum->prototype)
2554 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
2555 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
2556 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
2559 cum->nargs_prototype = 0;
2561 cum->orig_nargs = cum->nargs_prototype;
2563 /* Check for a longcall attribute. */
2565 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
2566 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
2567 cum->call_cookie = CALL_LONG;
2569 if (TARGET_DEBUG_ARG)
2571 fprintf (stderr, "\ninit_cumulative_args:");
2574 tree ret_type = TREE_TYPE (fntype);
2575 fprintf (stderr, " ret code = %s,",
2576 tree_code_name[ (int)TREE_CODE (ret_type) ]);
2579 if (cum->call_cookie & CALL_LONG)
2580 fprintf (stderr, " longcall,");
2582 fprintf (stderr, " proto = %d, nargs = %d\n",
2583 cum->prototype, cum->nargs_prototype);
2587 /* If defined, a C expression which determines whether, and in which
2588 direction, to pad out an argument with extra space. The value
2589 should be of type `enum direction': either `upward' to pad above
2590 the argument, `downward' to pad below, or `none' to inhibit
2593 For the AIX ABI structs are always stored left shifted in their
2597 function_arg_padding (mode, type)
2598 enum machine_mode mode;
2601 if (type != 0 && AGGREGATE_TYPE_P (type))
2604 /* This is the default definition. */
2605 return (! BYTES_BIG_ENDIAN
2608 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2609 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
2610 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
2611 ? downward : upward));
2614 /* If defined, a C expression that gives the alignment boundary, in bits,
2615 of an argument with the specified mode and type. If it is not defined,
2616 PARM_BOUNDARY is used for all arguments.
2618 V.4 wants long longs to be double word aligned. */
2621 function_arg_boundary (mode, type)
2622 enum machine_mode mode;
2623 tree type ATTRIBUTE_UNUSED;
2625 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
2627 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2630 return PARM_BOUNDARY;
2633 /* Update the data in CUM to advance over an argument
2634 of mode MODE and data type TYPE.
2635 (TYPE is null for libcalls where that information may not be available.) */
2638 function_arg_advance (cum, mode, type, named)
2639 CUMULATIVE_ARGS *cum;
2640 enum machine_mode mode;
2644 cum->nargs_prototype--;
2646 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2648 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
2651 cum->words += RS6000_ARG_SIZE (mode, type);
2653 else if (DEFAULT_ABI == ABI_V4)
2655 if (TARGET_HARD_FLOAT
2656 && (mode == SFmode || mode == DFmode))
2658 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2663 cum->words += cum->words & 1;
2664 cum->words += RS6000_ARG_SIZE (mode, type);
2670 int gregno = cum->sysv_gregno;
2672 /* Aggregates and IEEE quad get passed by reference. */
2673 if ((type && AGGREGATE_TYPE_P (type))
2677 n_words = RS6000_ARG_SIZE (mode, type);
2679 /* Long long is put in odd registers. */
2680 if (n_words == 2 && (gregno & 1) == 0)
2683 /* Long long is not split between registers and stack. */
2684 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
2686 /* Long long is aligned on the stack. */
2688 cum->words += cum->words & 1;
2689 cum->words += n_words;
2692 /* Note: continuing to accumulate gregno past when we've started
2693 spilling to the stack indicates the fact that we've started
2694 spilling to the stack to expand_builtin_saveregs. */
2695 cum->sysv_gregno = gregno + n_words;
2698 if (TARGET_DEBUG_ARG)
2700 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2701 cum->words, cum->fregno);
2702 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
2703 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
2704 fprintf (stderr, "mode = %4s, named = %d\n",
2705 GET_MODE_NAME (mode), named);
2710 int align = (TARGET_32BIT && (cum->words & 1) != 0
2711 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2713 cum->words += align + RS6000_ARG_SIZE (mode, type);
2715 if (GET_MODE_CLASS (mode) == MODE_FLOAT && TARGET_HARD_FLOAT)
2718 if (TARGET_DEBUG_ARG)
2720 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
2721 cum->words, cum->fregno);
2722 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
2723 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
2724 fprintf (stderr, "named = %d, align = %d\n", named, align);
2729 /* Determine where to put an argument to a function.
2730 Value is zero to push the argument on the stack,
2731 or a hard register in which to store the argument.
2733 MODE is the argument's machine mode.
2734 TYPE is the data type of the argument (as a tree).
2735 This is null for libcalls where that information may
2737 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2738 the preceding args and about the function being called.
2739 NAMED is nonzero if this argument is a named parameter
2740 (otherwise it is an extra parameter matching an ellipsis).
2742 On RS/6000 the first eight words of non-FP are normally in registers
2743 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
2744 Under V.4, the first 8 FP args are in registers.
2746 If this is floating-point and no prototype is specified, we use
2747 both an FP and integer register (or possibly FP reg and stack). Library
2748 functions (when TYPE is zero) always have the proper types for args,
2749 so we can pass the FP value just in one register. emit_library_function
2750 doesn't support PARALLEL anyway. */
2753 function_arg (cum, mode, type, named)
2754 CUMULATIVE_ARGS *cum;
2755 enum machine_mode mode;
2759 enum rs6000_abi abi = DEFAULT_ABI;
2761 /* Return a marker to indicate whether CR1 needs to set or clear the
2762 bit that V.4 uses to say fp args were passed in registers.
2763 Assume that we don't need the marker for software floating point,
2764 or compiler generated library calls. */
2765 if (mode == VOIDmode)
2768 && TARGET_HARD_FLOAT
2769 && cum->nargs_prototype < 0
2770 && type && (cum->prototype || TARGET_NO_PROTOTYPE))
2772 return GEN_INT (cum->call_cookie
2773 | ((cum->fregno == FP_ARG_MIN_REG)
2774 ? CALL_V4_SET_FP_ARGS
2775 : CALL_V4_CLEAR_FP_ARGS));
2778 return GEN_INT (cum->call_cookie);
2781 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
2783 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
2784 return gen_rtx_REG (mode, cum->vregno);
2788 else if (abi == ABI_V4)
2790 if (TARGET_HARD_FLOAT
2791 && (mode == SFmode || mode == DFmode))
2793 if (cum->fregno <= FP_ARG_V4_MAX_REG)
2794 return gen_rtx_REG (mode, cum->fregno);
2801 int gregno = cum->sysv_gregno;
2803 /* Aggregates and IEEE quad get passed by reference. */
2804 if ((type && AGGREGATE_TYPE_P (type))
2808 n_words = RS6000_ARG_SIZE (mode, type);
2810 /* Long long is put in odd registers. */
2811 if (n_words == 2 && (gregno & 1) == 0)
2814 /* Long long is not split between registers and stack. */
2815 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
2816 return gen_rtx_REG (mode, gregno);
2823 int align = (TARGET_32BIT && (cum->words & 1) != 0
2824 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
2825 int align_words = cum->words + align;
2827 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2830 if (USE_FP_FOR_ARG_P (*cum, mode, type))
2833 || ((cum->nargs_prototype > 0)
2834 /* IBM AIX extended its linkage convention definition always
2835 to require FP args after register save area hole on the
2837 && (DEFAULT_ABI != ABI_AIX
2839 || (align_words < GP_ARG_NUM_REG))))
2840 return gen_rtx_REG (mode, cum->fregno);
2842 return gen_rtx_PARALLEL (mode,
2844 gen_rtx_EXPR_LIST (VOIDmode,
2845 ((align_words >= GP_ARG_NUM_REG)
2848 + RS6000_ARG_SIZE (mode, type)
2850 /* If this is partially on the stack, then
2851 we only include the portion actually
2852 in registers here. */
2853 ? gen_rtx_REG (SImode,
2854 GP_ARG_MIN_REG + align_words)
2855 : gen_rtx_REG (mode,
2856 GP_ARG_MIN_REG + align_words))),
2858 gen_rtx_EXPR_LIST (VOIDmode,
2859 gen_rtx_REG (mode, cum->fregno),
2862 else if (align_words < GP_ARG_NUM_REG)
2863 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
2869 /* For an arg passed partly in registers and partly in memory,
2870 this is the number of registers used.
2871 For args passed entirely in registers or entirely in memory, zero. */
2874 function_arg_partial_nregs (cum, mode, type, named)
2875 CUMULATIVE_ARGS *cum;
2876 enum machine_mode mode;
2878 int named ATTRIBUTE_UNUSED;
2880 if (DEFAULT_ABI == ABI_V4)
2883 if (USE_FP_FOR_ARG_P (*cum, mode, type)
2884 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
2886 if (cum->nargs_prototype >= 0)
2890 if (cum->words < GP_ARG_NUM_REG
2891 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
2893 int ret = GP_ARG_NUM_REG - cum->words;
2894 if (ret && TARGET_DEBUG_ARG)
2895 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
2903 /* A C expression that indicates when an argument must be passed by
2904 reference. If nonzero for an argument, a copy of that argument is
2905 made in memory and a pointer to the argument is passed instead of
2906 the argument itself. The pointer is passed in whatever way is
2907 appropriate for passing a pointer to that type.
2909 Under V.4, structures and unions are passed by reference. */
2912 function_arg_pass_by_reference (cum, mode, type, named)
2913 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
2914 enum machine_mode mode ATTRIBUTE_UNUSED;
2916 int named ATTRIBUTE_UNUSED;
2918 if (DEFAULT_ABI == ABI_V4
2919 && ((type && AGGREGATE_TYPE_P (type))
2922 if (TARGET_DEBUG_ARG)
2923 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
2931 /* Perform any needed actions needed for a function that is receiving a
2932 variable number of arguments.
2936 MODE and TYPE are the mode and type of the current parameter.
2938 PRETEND_SIZE is a variable that should be set to the amount of stack
2939 that must be pushed by the prolog to pretend that our caller pushed
2942 Normally, this macro will push all remaining incoming registers on the
2943 stack and set PRETEND_SIZE to the length of the registers pushed. */
2946 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
2947 CUMULATIVE_ARGS *cum;
2948 enum machine_mode mode;
2954 CUMULATIVE_ARGS next_cum;
2955 int reg_size = TARGET_32BIT ? 4 : 8;
2956 rtx save_area = NULL_RTX, mem;
2957 int first_reg_offset, set;
2961 fntype = TREE_TYPE (current_function_decl);
2962 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
2963 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
2964 != void_type_node));
2966 /* For varargs, we do not want to skip the dummy va_dcl argument.
2967 For stdargs, we do want to skip the last named argument. */
2970 function_arg_advance (&next_cum, mode, type, 1);
2972 if (DEFAULT_ABI == ABI_V4)
2974 /* Indicate to allocate space on the stack for varargs save area. */
2975 cfun->machine->sysv_varargs_p = 1;
2977 save_area = plus_constant (virtual_stack_vars_rtx,
2978 - RS6000_VARARGS_SIZE);
2980 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
2984 first_reg_offset = next_cum.words;
2985 save_area = virtual_incoming_args_rtx;
2986 cfun->machine->sysv_varargs_p = 0;
2988 if (MUST_PASS_IN_STACK (mode, type))
2989 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
2992 set = get_varargs_alias_set ();
2993 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
2995 mem = gen_rtx_MEM (BLKmode,
2996 plus_constant (save_area,
2997 first_reg_offset * reg_size)),
2998 set_mem_alias_set (mem, set);
2999 set_mem_align (mem, BITS_PER_WORD);
3002 (GP_ARG_MIN_REG + first_reg_offset, mem,
3003 GP_ARG_NUM_REG - first_reg_offset,
3004 (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD);
3006 /* ??? Does ABI_V4 need this at all? */
3007 *pretend_size = (GP_ARG_NUM_REG - first_reg_offset) * UNITS_PER_WORD;
3010 /* Save FP registers if needed. */
3011 if (DEFAULT_ABI == ABI_V4
3012 && TARGET_HARD_FLOAT && ! no_rtl
3013 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
3015 int fregno = next_cum.fregno;
3016 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
3017 rtx lab = gen_label_rtx ();
3018 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
3020 emit_jump_insn (gen_rtx_SET (VOIDmode,
3022 gen_rtx_IF_THEN_ELSE (VOIDmode,
3023 gen_rtx_NE (VOIDmode, cr1,
3025 gen_rtx_LABEL_REF (VOIDmode, lab),
3028 while (fregno <= FP_ARG_V4_MAX_REG)
3030 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
3031 set_mem_alias_set (mem, set);
3032 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
3041 /* Create the va_list data type. */
3044 rs6000_build_va_list ()
3046 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
3048 /* For AIX, prefer 'char *' because that's what the system
3049 header files like. */
3050 if (DEFAULT_ABI != ABI_V4)
3051 return build_pointer_type (char_type_node);
3053 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3054 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
3056 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
3057 unsigned_char_type_node);
3058 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
3059 unsigned_char_type_node);
3060 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
3062 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
3065 DECL_FIELD_CONTEXT (f_gpr) = record;
3066 DECL_FIELD_CONTEXT (f_fpr) = record;
3067 DECL_FIELD_CONTEXT (f_ovf) = record;
3068 DECL_FIELD_CONTEXT (f_sav) = record;
3070 TREE_CHAIN (record) = type_decl;
3071 TYPE_NAME (record) = type_decl;
3072 TYPE_FIELDS (record) = f_gpr;
3073 TREE_CHAIN (f_gpr) = f_fpr;
3074 TREE_CHAIN (f_fpr) = f_ovf;
3075 TREE_CHAIN (f_ovf) = f_sav;
3077 layout_type (record);
3079 /* The correct type is an array type of one element. */
3080 return build_array_type (record, build_index_type (size_zero_node));
3083 /* Implement va_start. */
3086 rs6000_va_start (stdarg_p, valist, nextarg)
3091 HOST_WIDE_INT words, n_gpr, n_fpr;
3092 tree f_gpr, f_fpr, f_ovf, f_sav;
3093 tree gpr, fpr, ovf, sav, t;
3095 /* Only SVR4 needs something special. */
3096 if (DEFAULT_ABI != ABI_V4)
3098 std_expand_builtin_va_start (stdarg_p, valist, nextarg);
3102 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3103 f_fpr = TREE_CHAIN (f_gpr);
3104 f_ovf = TREE_CHAIN (f_fpr);
3105 f_sav = TREE_CHAIN (f_ovf);
3107 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3108 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3109 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3110 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3111 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3113 /* Count number of gp and fp argument registers used. */
3114 words = current_function_args_info.words;
3115 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
3116 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
3118 if (TARGET_DEBUG_ARG)
3120 fputs ("va_start: words = ", stderr);
3121 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, words);
3122 fputs (", n_gpr = ", stderr);
3123 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_gpr);
3124 fputs (", n_fpr = ", stderr);
3125 fprintf (stderr, HOST_WIDE_INT_PRINT_DEC, n_fpr);
3126 putc ('\n', stderr);
3129 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
3130 TREE_SIDE_EFFECTS (t) = 1;
3131 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3133 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
3134 TREE_SIDE_EFFECTS (t) = 1;
3135 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3137 /* Find the overflow area. */
3138 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
3140 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
3141 build_int_2 (words * UNITS_PER_WORD, 0));
3142 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3143 TREE_SIDE_EFFECTS (t) = 1;
3144 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3146 /* Find the register save area. */
3147 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
3148 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
3149 build_int_2 (-RS6000_VARARGS_SIZE, -1));
3150 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
3151 TREE_SIDE_EFFECTS (t) = 1;
3152 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3155 /* Implement va_arg. */
3158 rs6000_va_arg (valist, type)
3161 tree f_gpr, f_fpr, f_ovf, f_sav;
3162 tree gpr, fpr, ovf, sav, reg, t, u;
3163 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
3164 rtx lab_false, lab_over, addr_rtx, r;
3166 if (DEFAULT_ABI != ABI_V4)
3167 return std_expand_builtin_va_arg (valist, type);
3169 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
3170 f_fpr = TREE_CHAIN (f_gpr);
3171 f_ovf = TREE_CHAIN (f_fpr);
3172 f_sav = TREE_CHAIN (f_ovf);
3174 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
3175 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
3176 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
3177 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
3178 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
3180 size = int_size_in_bytes (type);
3181 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3183 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
3185 /* Aggregates and long doubles are passed by reference. */
3191 size = UNITS_PER_WORD;
3194 else if (FLOAT_TYPE_P (type) && ! TARGET_SOFT_FLOAT)
3196 /* FP args go in FP registers, if present. */
3205 /* Otherwise into GP registers. */
3213 /* Pull the value out of the saved registers ... */
3215 lab_false = gen_label_rtx ();
3216 lab_over = gen_label_rtx ();
3217 addr_rtx = gen_reg_rtx (Pmode);
3219 /* Vectors never go in registers. */
3220 if (TREE_CODE (type) != VECTOR_TYPE)
3222 TREE_THIS_VOLATILE (reg) = 1;
3223 emit_cmp_and_jump_insns
3224 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
3225 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
3228 /* Long long is aligned in the registers. */
3231 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
3232 build_int_2 (n_reg - 1, 0));
3233 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
3234 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
3235 TREE_SIDE_EFFECTS (u) = 1;
3236 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
3240 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
3244 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
3245 build_int_2 (n_reg, 0));
3246 TREE_SIDE_EFFECTS (u) = 1;
3248 u = build1 (CONVERT_EXPR, integer_type_node, u);
3249 TREE_SIDE_EFFECTS (u) = 1;
3251 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
3252 TREE_SIDE_EFFECTS (u) = 1;
3254 t = build (PLUS_EXPR, ptr_type_node, t, u);
3255 TREE_SIDE_EFFECTS (t) = 1;
3257 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3259 emit_move_insn (addr_rtx, r);
3261 emit_jump_insn (gen_jump (lab_over));
3265 emit_label (lab_false);
3267 /* ... otherwise out of the overflow area. */
3269 /* Make sure we don't find reg 7 for the next int arg.
3271 All AltiVec vectors go in the overflow area. So in the AltiVec
3272 case we need to get the vectors from the overflow area, but
3273 remember where the GPRs and FPRs are. */
3274 if (n_reg > 1 && TREE_CODE (type) != VECTOR_TYPE)
3276 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
3277 TREE_SIDE_EFFECTS (t) = 1;
3278 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3281 /* Care for on-stack alignment if needed. */
3288 /* Vectors are 16 byte aligned. */
3289 if (TREE_CODE (type) == VECTOR_TYPE)
3294 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
3295 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
3299 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
3301 emit_move_insn (addr_rtx, r);
3303 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
3304 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
3305 TREE_SIDE_EFFECTS (t) = 1;
3306 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3308 emit_label (lab_over);
3312 r = gen_rtx_MEM (Pmode, addr_rtx);
3313 set_mem_alias_set (r, get_varargs_alias_set ());
3314 emit_move_insn (addr_rtx, r);
3322 #define def_builtin(MASK, NAME, TYPE, CODE) \
3324 if ((MASK) & target_flags) \
3325 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, NULL); \
3328 struct builtin_description
3330 const unsigned int mask;
3331 const enum insn_code icode;
3332 const char *const name;
3333 const enum rs6000_builtins code;
3336 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
3338 static const struct builtin_description bdesc_3arg[] =
3340 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
3341 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
3342 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
3343 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
3344 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
3345 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
3346 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
3347 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
3348 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
3349 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
3350 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
3351 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
3352 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
3353 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
3354 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
3355 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
3356 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
3357 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
3358 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
3359 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
3360 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
3361 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
3362 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
3365 /* DST operations: void foo (void *, const int, const char). */
3367 static const struct builtin_description bdesc_dst[] =
3369 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
3370 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
3371 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
3372 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
3375 /* Simple binary operations: VECc = foo (VECa, VECb). */
3377 static const struct builtin_description bdesc_2arg[] =
3379 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
3380 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
3381 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
3382 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
3383 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
3384 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
3385 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
3386 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
3387 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
3388 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
3389 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
3390 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
3391 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
3392 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
3393 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
3394 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
3395 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
3396 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
3397 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
3398 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
3399 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
3400 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
3401 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
3402 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
3403 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
3404 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
3405 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
3406 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
3407 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
3408 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
3409 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
3410 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
3411 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
3412 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
3413 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
3414 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
3415 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
3416 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
3417 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
3418 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
3419 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
3420 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
3421 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
3422 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
3423 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
3424 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
3425 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
3426 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
3427 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
3428 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
3429 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
3430 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
3431 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
3432 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
3433 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
3434 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
3435 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
3436 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
3437 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
3438 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
3439 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
3440 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
3441 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
3442 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
3443 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
3444 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
3445 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
3446 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
3447 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
3448 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
3449 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
3450 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
3451 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
3452 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
3453 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
3454 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
3455 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
3456 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
3457 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
3458 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
3459 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
3460 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
3461 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
3462 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
3463 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
3464 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
3465 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
3466 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
3467 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
3468 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
3469 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
3470 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
3471 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
3472 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
3473 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
3474 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
3475 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
3476 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
3477 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
3478 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
3479 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
3480 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
3481 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
3482 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
3483 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
3484 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
3485 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
3486 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
3487 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
3488 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
3489 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
3490 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
3491 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
3494 /* AltiVec predicates. */
3496 struct builtin_description_predicates
3498 const unsigned int mask;
3499 const enum insn_code icode;
3501 const char *const name;
3502 const enum rs6000_builtins code;
3505 static const struct builtin_description_predicates bdesc_altivec_preds[] =
3507 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
3508 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
3509 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
3510 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
3511 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
3512 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
3513 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
3514 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
3515 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
3516 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
3517 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
3518 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
3519 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
3522 /* ABS* opreations. */
3524 static const struct builtin_description bdesc_abs[] =
3526 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
3527 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
3528 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
3529 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
3530 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
3531 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
3532 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
3535 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
3538 static const struct builtin_description bdesc_1arg[] =
3540 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
3541 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
3542 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
3543 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
3544 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
3545 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
3546 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
3547 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
3548 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
3549 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
3550 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
3551 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
3552 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
3553 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
3554 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
3555 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
3556 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
3560 altivec_expand_unop_builtin (icode, arglist, target)
3561 enum insn_code icode;
3566 tree arg0 = TREE_VALUE (arglist);
3567 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3568 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3569 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3571 /* If we got invalid arguments bail out before generating bad rtl. */
3572 if (arg0 == error_mark_node)
3577 /* Only allow 5-bit *signed* literals. */
3578 case CODE_FOR_altivec_vspltisb:
3579 case CODE_FOR_altivec_vspltish:
3580 case CODE_FOR_altivec_vspltisw:
3581 if (GET_CODE (op0) != CONST_INT
3582 || INTVAL (op0) > 0x1f
3583 || INTVAL (op0) < -0x1f)
3585 error ("argument 1 must be a 5-bit signed literal");
3594 || GET_MODE (target) != tmode
3595 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3596 target = gen_reg_rtx (tmode);
3598 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3599 op0 = copy_to_mode_reg (mode0, op0);
3601 pat = GEN_FCN (icode) (target, op0);
3610 altivec_expand_abs_builtin (icode, arglist, target)
3611 enum insn_code icode;
3615 rtx pat, scratch1, scratch2;
3616 tree arg0 = TREE_VALUE (arglist);
3617 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3618 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3619 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3621 /* If we have invalid arguments, bail out before generating bad rtl. */
3622 if (arg0 == error_mark_node)
3626 || GET_MODE (target) != tmode
3627 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3628 target = gen_reg_rtx (tmode);
3630 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3631 op0 = copy_to_mode_reg (mode0, op0);
3633 scratch1 = gen_reg_rtx (mode0);
3634 scratch2 = gen_reg_rtx (mode0);
3636 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
3645 altivec_expand_binop_builtin (icode, arglist, target)
3646 enum insn_code icode;
3651 tree arg0 = TREE_VALUE (arglist);
3652 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3653 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3654 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3655 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3656 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3657 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3659 /* If we got invalid arguments bail out before generating bad rtl. */
3660 if (arg0 == error_mark_node || arg1 == error_mark_node)
3665 /* Only allow 5-bit unsigned literals. */
3666 case CODE_FOR_altivec_vcfux:
3667 case CODE_FOR_altivec_vcfsx:
3668 case CODE_FOR_altivec_vctsxs:
3669 case CODE_FOR_altivec_vctuxs:
3670 case CODE_FOR_altivec_vspltb:
3671 case CODE_FOR_altivec_vsplth:
3672 case CODE_FOR_altivec_vspltw:
3673 if (TREE_CODE (arg1) != INTEGER_CST
3674 || TREE_INT_CST_LOW (arg1) & ~0x1f)
3676 error ("argument 2 must be a 5-bit unsigned literal");
3685 || GET_MODE (target) != tmode
3686 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3687 target = gen_reg_rtx (tmode);
3689 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3690 op0 = copy_to_mode_reg (mode0, op0);
3691 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3692 op1 = copy_to_mode_reg (mode1, op1);
3694 pat = GEN_FCN (icode) (target, op0, op1);
3703 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
3704 enum insn_code icode;
3710 tree cr6_form = TREE_VALUE (arglist);
3711 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
3712 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3713 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3714 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3715 enum machine_mode tmode = SImode;
3716 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3717 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3720 if (TREE_CODE (cr6_form) != INTEGER_CST)
3722 error ("argument 1 of __builtin_altivec_predicate must be a constant");
3726 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
3731 /* If we have invalid arguments, bail out before generating bad rtl. */
3732 if (arg0 == error_mark_node || arg1 == error_mark_node)
3736 || GET_MODE (target) != tmode
3737 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3738 target = gen_reg_rtx (tmode);
3740 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3741 op0 = copy_to_mode_reg (mode0, op0);
3742 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3743 op1 = copy_to_mode_reg (mode1, op1);
3745 scratch = gen_reg_rtx (mode0);
3747 pat = GEN_FCN (icode) (scratch, op0, op1,
3748 gen_rtx (SYMBOL_REF, Pmode, opcode));
3753 /* The vec_any* and vec_all* predicates use the same opcodes for two
3754 different operations, but the bits in CR6 will be different
3755 depending on what information we want. So we have to play tricks
3756 with CR6 to get the right bits out.
3758 If you think this is disgusting, look at the specs for the
3759 AltiVec predicates. */
3761 switch (cr6_form_int)
3764 emit_insn (gen_cr6_test_for_zero (target));
3767 emit_insn (gen_cr6_test_for_zero_reverse (target));
3770 emit_insn (gen_cr6_test_for_lt (target));
3773 emit_insn (gen_cr6_test_for_lt_reverse (target));
3776 error ("argument 1 of __builtin_altivec_predicate is out of range");
3784 altivec_expand_stv_builtin (icode, arglist)
3785 enum insn_code icode;
3788 tree arg0 = TREE_VALUE (arglist);
3789 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3790 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3791 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3792 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3793 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3795 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
3796 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
3797 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
3799 /* Invalid arguments. Bail before doing anything stoopid! */
3800 if (arg0 == error_mark_node
3801 || arg1 == error_mark_node
3802 || arg2 == error_mark_node)
3805 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
3806 op0 = copy_to_mode_reg (mode2, op0);
3807 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
3808 op1 = copy_to_mode_reg (mode0, op1);
3809 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
3810 op2 = copy_to_mode_reg (mode1, op2);
3812 pat = GEN_FCN (icode) (op1, op2, op0);
3819 altivec_expand_ternop_builtin (icode, arglist, target)
3820 enum insn_code icode;
3825 tree arg0 = TREE_VALUE (arglist);
3826 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3827 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3828 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3829 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3830 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
3831 enum machine_mode tmode = insn_data[icode].operand[0].mode;
3832 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
3833 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
3834 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
3836 /* If we got invalid arguments bail out before generating bad rtl. */
3837 if (arg0 == error_mark_node
3838 || arg1 == error_mark_node
3839 || arg2 == error_mark_node)
3844 /* Only allow 4-bit unsigned literals. */
3845 case CODE_FOR_altivec_vsldoi_4sf:
3846 case CODE_FOR_altivec_vsldoi_4si:
3847 case CODE_FOR_altivec_vsldoi_8hi:
3848 case CODE_FOR_altivec_vsldoi_16qi:
3849 if (TREE_CODE (arg2) != INTEGER_CST
3850 || TREE_INT_CST_LOW (arg2) & ~0xf)
3852 error ("argument 3 must be a 4-bit unsigned literal");
3861 || GET_MODE (target) != tmode
3862 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3863 target = gen_reg_rtx (tmode);
3865 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3866 op0 = copy_to_mode_reg (mode0, op0);
3867 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
3868 op1 = copy_to_mode_reg (mode1, op1);
3869 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
3870 op2 = copy_to_mode_reg (mode2, op2);
3872 pat = GEN_FCN (icode) (target, op0, op1, op2);
3880 altivec_expand_builtin (exp, target)
3884 struct builtin_description *d;
3885 struct builtin_description_predicates *dp;
3887 enum insn_code icode;
3888 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3889 tree arglist = TREE_OPERAND (exp, 1);
3890 tree arg0, arg1, arg2;
3891 rtx op0, op1, op2, pat;
3892 enum machine_mode tmode, mode0, mode1, mode2;
3893 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3897 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
3898 icode = CODE_FOR_altivec_lvx_16qi;
3899 arg0 = TREE_VALUE (arglist);
3900 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3901 tmode = insn_data[icode].operand[0].mode;
3902 mode0 = insn_data[icode].operand[1].mode;
3905 || GET_MODE (target) != tmode
3906 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3907 target = gen_reg_rtx (tmode);
3909 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3910 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3912 pat = GEN_FCN (icode) (target, op0);
3918 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
3919 icode = CODE_FOR_altivec_lvx_8hi;
3920 arg0 = TREE_VALUE (arglist);
3921 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3922 tmode = insn_data[icode].operand[0].mode;
3923 mode0 = insn_data[icode].operand[1].mode;
3926 || GET_MODE (target) != tmode
3927 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3928 target = gen_reg_rtx (tmode);
3930 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3931 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3933 pat = GEN_FCN (icode) (target, op0);
3939 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
3940 icode = CODE_FOR_altivec_lvx_4si;
3941 arg0 = TREE_VALUE (arglist);
3942 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3943 tmode = insn_data[icode].operand[0].mode;
3944 mode0 = insn_data[icode].operand[1].mode;
3947 || GET_MODE (target) != tmode
3948 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3949 target = gen_reg_rtx (tmode);
3951 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3952 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3954 pat = GEN_FCN (icode) (target, op0);
3960 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
3961 icode = CODE_FOR_altivec_lvx_4sf;
3962 arg0 = TREE_VALUE (arglist);
3963 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3964 tmode = insn_data[icode].operand[0].mode;
3965 mode0 = insn_data[icode].operand[1].mode;
3968 || GET_MODE (target) != tmode
3969 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
3970 target = gen_reg_rtx (tmode);
3972 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
3973 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3975 pat = GEN_FCN (icode) (target, op0);
3981 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
3982 icode = CODE_FOR_altivec_stvx_16qi;
3983 arg0 = TREE_VALUE (arglist);
3984 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3985 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
3986 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
3987 mode0 = insn_data[icode].operand[0].mode;
3988 mode1 = insn_data[icode].operand[1].mode;
3990 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
3991 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
3992 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
3993 op1 = copy_to_mode_reg (mode1, op1);
3995 pat = GEN_FCN (icode) (op0, op1);
4000 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
4001 icode = CODE_FOR_altivec_stvx_8hi;
4002 arg0 = TREE_VALUE (arglist);
4003 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4004 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4005 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4006 mode0 = insn_data[icode].operand[0].mode;
4007 mode1 = insn_data[icode].operand[1].mode;
4009 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4010 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4011 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4012 op1 = copy_to_mode_reg (mode1, op1);
4014 pat = GEN_FCN (icode) (op0, op1);
4019 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
4020 icode = CODE_FOR_altivec_stvx_4si;
4021 arg0 = TREE_VALUE (arglist);
4022 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4023 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4024 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4025 mode0 = insn_data[icode].operand[0].mode;
4026 mode1 = insn_data[icode].operand[1].mode;
4028 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4029 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4030 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4031 op1 = copy_to_mode_reg (mode1, op1);
4033 pat = GEN_FCN (icode) (op0, op1);
4038 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
4039 icode = CODE_FOR_altivec_stvx_4sf;
4040 arg0 = TREE_VALUE (arglist);
4041 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4042 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4043 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4044 mode0 = insn_data[icode].operand[0].mode;
4045 mode1 = insn_data[icode].operand[1].mode;
4047 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4048 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
4049 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
4050 op1 = copy_to_mode_reg (mode1, op1);
4052 pat = GEN_FCN (icode) (op0, op1);
4057 case ALTIVEC_BUILTIN_STVX:
4058 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
4059 case ALTIVEC_BUILTIN_STVEBX:
4060 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
4061 case ALTIVEC_BUILTIN_STVEHX:
4062 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
4063 case ALTIVEC_BUILTIN_STVEWX:
4064 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
4065 case ALTIVEC_BUILTIN_STVXL:
4066 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
4068 case ALTIVEC_BUILTIN_MFVSCR:
4069 icode = CODE_FOR_altivec_mfvscr;
4070 tmode = insn_data[icode].operand[0].mode;
4073 || GET_MODE (target) != tmode
4074 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4075 target = gen_reg_rtx (tmode);
4077 pat = GEN_FCN (icode) (target);
4083 case ALTIVEC_BUILTIN_MTVSCR:
4084 icode = CODE_FOR_altivec_mtvscr;
4085 arg0 = TREE_VALUE (arglist);
4086 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4087 mode0 = insn_data[icode].operand[0].mode;
4089 /* If we got invalid arguments bail out before generating bad rtl. */
4090 if (arg0 == error_mark_node)
4093 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4094 op0 = copy_to_mode_reg (mode0, op0);
4096 pat = GEN_FCN (icode) (op0);
4101 case ALTIVEC_BUILTIN_DSSALL:
4102 emit_insn (gen_altivec_dssall ());
4105 case ALTIVEC_BUILTIN_DSS:
4106 icode = CODE_FOR_altivec_dss;
4107 arg0 = TREE_VALUE (arglist);
4108 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4109 mode0 = insn_data[icode].operand[0].mode;
4111 /* If we got invalid arguments bail out before generating bad rtl. */
4112 if (arg0 == error_mark_node)
4115 if (TREE_CODE (arg0) != INTEGER_CST
4116 || TREE_INT_CST_LOW (arg0) & ~0x3)
4118 error ("argument to dss must be a 2-bit unsigned literal");
4122 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
4123 op0 = copy_to_mode_reg (mode0, op0);
4125 emit_insn (gen_altivec_dss (op0));
4129 /* Handle DST variants. */
4130 d = (struct builtin_description *) bdesc_dst;
4131 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4132 if (d->code == fcode)
4134 arg0 = TREE_VALUE (arglist);
4135 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4136 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4137 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4138 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4139 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4140 mode0 = insn_data[d->icode].operand[0].mode;
4141 mode1 = insn_data[d->icode].operand[1].mode;
4142 mode2 = insn_data[d->icode].operand[2].mode;
4144 /* Invalid arguments, bail out before generating bad rtl. */
4145 if (arg0 == error_mark_node
4146 || arg1 == error_mark_node
4147 || arg2 == error_mark_node)
4150 if (TREE_CODE (arg2) != INTEGER_CST
4151 || TREE_INT_CST_LOW (arg2) & ~0x3)
4153 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
4157 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
4158 op0 = copy_to_mode_reg (mode0, op0);
4159 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
4160 op1 = copy_to_mode_reg (mode1, op1);
4162 pat = GEN_FCN (d->icode) (op0, op1, op2);
4169 /* Expand abs* operations. */
4170 d = (struct builtin_description *) bdesc_abs;
4171 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4172 if (d->code == fcode)
4173 return altivec_expand_abs_builtin (d->icode, arglist, target);
4175 /* Handle simple unary operations. */
4176 d = (struct builtin_description *) bdesc_1arg;
4177 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4178 if (d->code == fcode)
4179 return altivec_expand_unop_builtin (d->icode, arglist, target);
4181 /* Handle simple binary operations. */
4182 d = (struct builtin_description *) bdesc_2arg;
4183 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4184 if (d->code == fcode)
4185 return altivec_expand_binop_builtin (d->icode, arglist, target);
4187 /* Expand the AltiVec predicates. */
4188 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4189 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4190 if (dp->code == fcode)
4191 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
4193 /* LV* are funky. We initialized them differently. */
4196 case ALTIVEC_BUILTIN_LVSL:
4197 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsl,
4199 case ALTIVEC_BUILTIN_LVSR:
4200 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvsr,
4202 case ALTIVEC_BUILTIN_LVEBX:
4203 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvebx,
4205 case ALTIVEC_BUILTIN_LVEHX:
4206 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvehx,
4208 case ALTIVEC_BUILTIN_LVEWX:
4209 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvewx,
4211 case ALTIVEC_BUILTIN_LVXL:
4212 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvxl,
4214 case ALTIVEC_BUILTIN_LVX:
4215 return altivec_expand_binop_builtin (CODE_FOR_altivec_lvx,
4222 /* Handle simple ternary operations. */
4223 d = (struct builtin_description *) bdesc_3arg;
4224 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4225 if (d->code == fcode)
4226 return altivec_expand_ternop_builtin (d->icode, arglist, target);
4232 /* Expand an expression EXP that calls a built-in function,
4233 with result going to TARGET if that's convenient
4234 (and in mode MODE if that's convenient).
4235 SUBTARGET may be used as the target for computing one of EXP's operands.
4236 IGNORE is nonzero if the value is to be ignored. */
4239 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
4242 rtx subtarget ATTRIBUTE_UNUSED;
4243 enum machine_mode mode ATTRIBUTE_UNUSED;
4244 int ignore ATTRIBUTE_UNUSED;
4247 return altivec_expand_builtin (exp, target);
4253 rs6000_init_builtins ()
4256 altivec_init_builtins ();
4260 altivec_init_builtins (void)
4262 struct builtin_description *d;
4263 struct builtin_description_predicates *dp;
4266 tree endlink = void_list_node;
4268 tree pint_type_node = build_pointer_type (integer_type_node);
4269 tree pvoid_type_node = build_pointer_type (void_type_node);
4270 tree pshort_type_node = build_pointer_type (short_integer_type_node);
4271 tree pchar_type_node = build_pointer_type (char_type_node);
4272 tree pfloat_type_node = build_pointer_type (float_type_node);
4274 tree v4sf_ftype_v4sf_v4sf_v16qi
4275 = build_function_type (V4SF_type_node,
4276 tree_cons (NULL_TREE, V4SF_type_node,
4277 tree_cons (NULL_TREE, V4SF_type_node,
4278 tree_cons (NULL_TREE,
4281 tree v4si_ftype_v4si_v4si_v16qi
4282 = build_function_type (V4SI_type_node,
4283 tree_cons (NULL_TREE, V4SI_type_node,
4284 tree_cons (NULL_TREE, V4SI_type_node,
4285 tree_cons (NULL_TREE,
4288 tree v8hi_ftype_v8hi_v8hi_v16qi
4289 = build_function_type (V8HI_type_node,
4290 tree_cons (NULL_TREE, V8HI_type_node,
4291 tree_cons (NULL_TREE, V8HI_type_node,
4292 tree_cons (NULL_TREE,
4295 tree v16qi_ftype_v16qi_v16qi_v16qi
4296 = build_function_type (V16QI_type_node,
4297 tree_cons (NULL_TREE, V16QI_type_node,
4298 tree_cons (NULL_TREE, V16QI_type_node,
4299 tree_cons (NULL_TREE,
4303 /* V4SI foo (char). */
4304 tree v4si_ftype_char
4305 = build_function_type (V4SI_type_node,
4306 tree_cons (NULL_TREE, char_type_node, endlink));
4308 /* V8HI foo (char). */
4309 tree v8hi_ftype_char
4310 = build_function_type (V8HI_type_node,
4311 tree_cons (NULL_TREE, char_type_node, endlink));
4313 /* V16QI foo (char). */
4314 tree v16qi_ftype_char
4315 = build_function_type (V16QI_type_node,
4316 tree_cons (NULL_TREE, char_type_node, endlink));
4317 /* V4SF foo (V4SF). */
4318 tree v4sf_ftype_v4sf
4319 = build_function_type (V4SF_type_node,
4320 tree_cons (NULL_TREE, V4SF_type_node, endlink));
4322 /* V4SI foo (int *). */
4323 tree v4si_ftype_pint
4324 = build_function_type (V4SI_type_node,
4325 tree_cons (NULL_TREE, pint_type_node, endlink));
4326 /* V8HI foo (short *). */
4327 tree v8hi_ftype_pshort
4328 = build_function_type (V8HI_type_node,
4329 tree_cons (NULL_TREE, pshort_type_node, endlink));
4330 /* V16QI foo (char *). */
4331 tree v16qi_ftype_pchar
4332 = build_function_type (V16QI_type_node,
4333 tree_cons (NULL_TREE, pchar_type_node, endlink));
4334 /* V4SF foo (float *). */
4335 tree v4sf_ftype_pfloat
4336 = build_function_type (V4SF_type_node,
4337 tree_cons (NULL_TREE, pfloat_type_node, endlink));
4339 /* V8HI foo (V16QI). */
4340 tree v8hi_ftype_v16qi
4341 = build_function_type (V8HI_type_node,
4342 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4344 /* void foo (void *, int, char/literal). */
4345 tree void_ftype_pvoid_int_char
4346 = build_function_type (void_type_node,
4347 tree_cons (NULL_TREE, pvoid_type_node,
4348 tree_cons (NULL_TREE, integer_type_node,
4349 tree_cons (NULL_TREE,
4353 /* void foo (int *, V4SI). */
4354 tree void_ftype_pint_v4si
4355 = build_function_type (void_type_node,
4356 tree_cons (NULL_TREE, pint_type_node,
4357 tree_cons (NULL_TREE, V4SI_type_node,
4359 /* void foo (short *, V8HI). */
4360 tree void_ftype_pshort_v8hi
4361 = build_function_type (void_type_node,
4362 tree_cons (NULL_TREE, pshort_type_node,
4363 tree_cons (NULL_TREE, V8HI_type_node,
4365 /* void foo (char *, V16QI). */
4366 tree void_ftype_pchar_v16qi
4367 = build_function_type (void_type_node,
4368 tree_cons (NULL_TREE, pchar_type_node,
4369 tree_cons (NULL_TREE, V16QI_type_node,
4371 /* void foo (float *, V4SF). */
4372 tree void_ftype_pfloat_v4sf
4373 = build_function_type (void_type_node,
4374 tree_cons (NULL_TREE, pfloat_type_node,
4375 tree_cons (NULL_TREE, V4SF_type_node,
4378 /* void foo (V4SI). */
4379 tree void_ftype_v4si
4380 = build_function_type (void_type_node,
4381 tree_cons (NULL_TREE, V4SI_type_node,
4384 /* void foo (vint, int, void *). */
4385 tree void_ftype_v4si_int_pvoid
4386 = build_function_type (void_type_node,
4387 tree_cons (NULL_TREE, V4SI_type_node,
4388 tree_cons (NULL_TREE, integer_type_node,
4389 tree_cons (NULL_TREE,
4393 /* void foo (vchar, int, void *). */
4394 tree void_ftype_v16qi_int_pvoid
4395 = build_function_type (void_type_node,
4396 tree_cons (NULL_TREE, V16QI_type_node,
4397 tree_cons (NULL_TREE, integer_type_node,
4398 tree_cons (NULL_TREE,
4402 /* void foo (vshort, int, void *). */
4403 tree void_ftype_v8hi_int_pvoid
4404 = build_function_type (void_type_node,
4405 tree_cons (NULL_TREE, V8HI_type_node,
4406 tree_cons (NULL_TREE, integer_type_node,
4407 tree_cons (NULL_TREE,
4411 /* void foo (char). */
4413 = build_function_type (void_type_node,
4414 tree_cons (NULL_TREE, char_type_node,
4417 /* void foo (void). */
4418 tree void_ftype_void
4419 = build_function_type (void_type_node, void_list_node);
4421 /* vshort foo (void). */
4422 tree v8hi_ftype_void
4423 = build_function_type (V8HI_type_node, void_list_node);
4425 tree v4si_ftype_v4si_v4si
4426 = build_function_type (V4SI_type_node,
4427 tree_cons (NULL_TREE, V4SI_type_node,
4428 tree_cons (NULL_TREE, V4SI_type_node,
4431 /* These are for the unsigned 5 bit literals. */
4433 tree v4sf_ftype_v4si_char
4434 = build_function_type (V4SF_type_node,
4435 tree_cons (NULL_TREE, V4SI_type_node,
4436 tree_cons (NULL_TREE, char_type_node,
4438 tree v4si_ftype_v4sf_char
4439 = build_function_type (V4SI_type_node,
4440 tree_cons (NULL_TREE, V4SF_type_node,
4441 tree_cons (NULL_TREE, char_type_node,
4443 tree v4si_ftype_v4si_char
4444 = build_function_type (V4SI_type_node,
4445 tree_cons (NULL_TREE, V4SI_type_node,
4446 tree_cons (NULL_TREE, char_type_node,
4448 tree v8hi_ftype_v8hi_char
4449 = build_function_type (V8HI_type_node,
4450 tree_cons (NULL_TREE, V8HI_type_node,
4451 tree_cons (NULL_TREE, char_type_node,
4453 tree v16qi_ftype_v16qi_char
4454 = build_function_type (V16QI_type_node,
4455 tree_cons (NULL_TREE, V16QI_type_node,
4456 tree_cons (NULL_TREE, char_type_node,
4459 /* These are for the unsigned 4 bit literals. */
4461 tree v16qi_ftype_v16qi_v16qi_char
4462 = build_function_type (V16QI_type_node,
4463 tree_cons (NULL_TREE, V16QI_type_node,
4464 tree_cons (NULL_TREE, V16QI_type_node,
4465 tree_cons (NULL_TREE,
4469 tree v8hi_ftype_v8hi_v8hi_char
4470 = build_function_type (V8HI_type_node,
4471 tree_cons (NULL_TREE, V8HI_type_node,
4472 tree_cons (NULL_TREE, V8HI_type_node,
4473 tree_cons (NULL_TREE,
4477 tree v4si_ftype_v4si_v4si_char
4478 = build_function_type (V4SI_type_node,
4479 tree_cons (NULL_TREE, V4SI_type_node,
4480 tree_cons (NULL_TREE, V4SI_type_node,
4481 tree_cons (NULL_TREE,
4485 tree v4sf_ftype_v4sf_v4sf_char
4486 = build_function_type (V4SF_type_node,
4487 tree_cons (NULL_TREE, V4SF_type_node,
4488 tree_cons (NULL_TREE, V4SF_type_node,
4489 tree_cons (NULL_TREE,
4493 /* End of 4 bit literals. */
4495 tree v4sf_ftype_v4sf_v4sf
4496 = build_function_type (V4SF_type_node,
4497 tree_cons (NULL_TREE, V4SF_type_node,
4498 tree_cons (NULL_TREE, V4SF_type_node,
4500 tree v4sf_ftype_v4sf_v4sf_v4si
4501 = build_function_type (V4SF_type_node,
4502 tree_cons (NULL_TREE, V4SF_type_node,
4503 tree_cons (NULL_TREE, V4SF_type_node,
4504 tree_cons (NULL_TREE,
4507 tree v4sf_ftype_v4sf_v4sf_v4sf
4508 = build_function_type (V4SF_type_node,
4509 tree_cons (NULL_TREE, V4SF_type_node,
4510 tree_cons (NULL_TREE, V4SF_type_node,
4511 tree_cons (NULL_TREE,
4514 tree v4si_ftype_v4si_v4si_v4si
4515 = build_function_type (V4SI_type_node,
4516 tree_cons (NULL_TREE, V4SI_type_node,
4517 tree_cons (NULL_TREE, V4SI_type_node,
4518 tree_cons (NULL_TREE,
4522 tree v8hi_ftype_v8hi_v8hi
4523 = build_function_type (V8HI_type_node,
4524 tree_cons (NULL_TREE, V8HI_type_node,
4525 tree_cons (NULL_TREE, V8HI_type_node,
4527 tree v8hi_ftype_v8hi_v8hi_v8hi
4528 = build_function_type (V8HI_type_node,
4529 tree_cons (NULL_TREE, V8HI_type_node,
4530 tree_cons (NULL_TREE, V8HI_type_node,
4531 tree_cons (NULL_TREE,
4534 tree v4si_ftype_v8hi_v8hi_v4si
4535 = build_function_type (V4SI_type_node,
4536 tree_cons (NULL_TREE, V8HI_type_node,
4537 tree_cons (NULL_TREE, V8HI_type_node,
4538 tree_cons (NULL_TREE,
4541 tree v4si_ftype_v16qi_v16qi_v4si
4542 = build_function_type (V4SI_type_node,
4543 tree_cons (NULL_TREE, V16QI_type_node,
4544 tree_cons (NULL_TREE, V16QI_type_node,
4545 tree_cons (NULL_TREE,
4549 tree v16qi_ftype_v16qi_v16qi
4550 = build_function_type (V16QI_type_node,
4551 tree_cons (NULL_TREE, V16QI_type_node,
4552 tree_cons (NULL_TREE, V16QI_type_node,
4555 tree v4si_ftype_v4sf_v4sf
4556 = build_function_type (V4SI_type_node,
4557 tree_cons (NULL_TREE, V4SF_type_node,
4558 tree_cons (NULL_TREE, V4SF_type_node,
4561 tree v4si_ftype_v4si
4562 = build_function_type (V4SI_type_node,
4563 tree_cons (NULL_TREE, V4SI_type_node, endlink));
4565 tree v8hi_ftype_v8hi
4566 = build_function_type (V8HI_type_node,
4567 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4569 tree v16qi_ftype_v16qi
4570 = build_function_type (V16QI_type_node,
4571 tree_cons (NULL_TREE, V16QI_type_node, endlink));
4573 tree v8hi_ftype_v16qi_v16qi
4574 = build_function_type (V8HI_type_node,
4575 tree_cons (NULL_TREE, V16QI_type_node,
4576 tree_cons (NULL_TREE, V16QI_type_node,
4579 tree v4si_ftype_v8hi_v8hi
4580 = build_function_type (V4SI_type_node,
4581 tree_cons (NULL_TREE, V8HI_type_node,
4582 tree_cons (NULL_TREE, V8HI_type_node,
4585 tree v8hi_ftype_v4si_v4si
4586 = build_function_type (V8HI_type_node,
4587 tree_cons (NULL_TREE, V4SI_type_node,
4588 tree_cons (NULL_TREE, V4SI_type_node,
4591 tree v16qi_ftype_v8hi_v8hi
4592 = build_function_type (V16QI_type_node,
4593 tree_cons (NULL_TREE, V8HI_type_node,
4594 tree_cons (NULL_TREE, V8HI_type_node,
4597 tree v4si_ftype_v16qi_v4si
4598 = build_function_type (V4SI_type_node,
4599 tree_cons (NULL_TREE, V16QI_type_node,
4600 tree_cons (NULL_TREE, V4SI_type_node,
4603 tree v4si_ftype_v16qi_v16qi
4604 = build_function_type (V4SI_type_node,
4605 tree_cons (NULL_TREE, V16QI_type_node,
4606 tree_cons (NULL_TREE, V16QI_type_node,
4609 tree v4si_ftype_v8hi_v4si
4610 = build_function_type (V4SI_type_node,
4611 tree_cons (NULL_TREE, V8HI_type_node,
4612 tree_cons (NULL_TREE, V4SI_type_node,
4615 tree v4si_ftype_v8hi
4616 = build_function_type (V4SI_type_node,
4617 tree_cons (NULL_TREE, V8HI_type_node, endlink));
4619 tree int_ftype_v4si_v4si
4620 = build_function_type (integer_type_node,
4621 tree_cons (NULL_TREE, V4SI_type_node,
4622 tree_cons (NULL_TREE, V4SI_type_node,
4625 tree int_ftype_v4sf_v4sf
4626 = build_function_type (integer_type_node,
4627 tree_cons (NULL_TREE, V4SF_type_node,
4628 tree_cons (NULL_TREE, V4SF_type_node,
4631 tree int_ftype_v16qi_v16qi
4632 = build_function_type (integer_type_node,
4633 tree_cons (NULL_TREE, V16QI_type_node,
4634 tree_cons (NULL_TREE, V16QI_type_node,
4637 tree int_ftype_int_v4si_v4si
4638 = build_function_type
4640 tree_cons (NULL_TREE, integer_type_node,
4641 tree_cons (NULL_TREE, V4SI_type_node,
4642 tree_cons (NULL_TREE, V4SI_type_node,
4645 tree int_ftype_int_v4sf_v4sf
4646 = build_function_type
4648 tree_cons (NULL_TREE, integer_type_node,
4649 tree_cons (NULL_TREE, V4SF_type_node,
4650 tree_cons (NULL_TREE, V4SF_type_node,
4653 tree int_ftype_int_v8hi_v8hi
4654 = build_function_type
4656 tree_cons (NULL_TREE, integer_type_node,
4657 tree_cons (NULL_TREE, V8HI_type_node,
4658 tree_cons (NULL_TREE, V8HI_type_node,
4661 tree int_ftype_int_v16qi_v16qi
4662 = build_function_type
4664 tree_cons (NULL_TREE, integer_type_node,
4665 tree_cons (NULL_TREE, V16QI_type_node,
4666 tree_cons (NULL_TREE, V16QI_type_node,
4669 tree v16qi_ftype_int_pvoid
4670 = build_function_type (V16QI_type_node,
4671 tree_cons (NULL_TREE, integer_type_node,
4672 tree_cons (NULL_TREE, pvoid_type_node,
4675 tree v4si_ftype_int_pvoid
4676 = build_function_type (V4SI_type_node,
4677 tree_cons (NULL_TREE, integer_type_node,
4678 tree_cons (NULL_TREE, pvoid_type_node,
4681 tree v8hi_ftype_int_pvoid
4682 = build_function_type (V8HI_type_node,
4683 tree_cons (NULL_TREE, integer_type_node,
4684 tree_cons (NULL_TREE, pvoid_type_node,
4687 tree int_ftype_v8hi_v8hi
4688 = build_function_type (integer_type_node,
4689 tree_cons (NULL_TREE, V8HI_type_node,
4690 tree_cons (NULL_TREE, V8HI_type_node,
4693 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pfloat, ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
4694 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf, ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
4695 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pint, ALTIVEC_BUILTIN_LD_INTERNAL_4si);
4696 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si, ALTIVEC_BUILTIN_ST_INTERNAL_4si);
4697 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pshort, ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
4698 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi, ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
4699 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pchar, ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
4700 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi, ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
4701 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
4702 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
4703 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
4704 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
4705 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSL);
4706 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVSR);
4707 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEBX);
4708 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEHX);
4709 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVEWX);
4710 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVXL);
4711 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pvoid, ALTIVEC_BUILTIN_LVX);
4712 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
4713 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
4714 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
4715 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
4716 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
4718 /* Add the simple ternary operators. */
4719 d = (struct builtin_description *) bdesc_3arg;
4720 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
4723 enum machine_mode mode0, mode1, mode2, mode3;
4729 mode0 = insn_data[d->icode].operand[0].mode;
4730 mode1 = insn_data[d->icode].operand[1].mode;
4731 mode2 = insn_data[d->icode].operand[2].mode;
4732 mode3 = insn_data[d->icode].operand[3].mode;
4734 /* When all four are of the same mode. */
4735 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
4740 type = v4si_ftype_v4si_v4si_v4si;
4743 type = v4sf_ftype_v4sf_v4sf_v4sf;
4746 type = v8hi_ftype_v8hi_v8hi_v8hi;
4749 type = v16qi_ftype_v16qi_v16qi_v16qi;
4755 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
4760 type = v4si_ftype_v4si_v4si_v16qi;
4763 type = v4sf_ftype_v4sf_v4sf_v16qi;
4766 type = v8hi_ftype_v8hi_v8hi_v16qi;
4769 type = v16qi_ftype_v16qi_v16qi_v16qi;
4775 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
4776 && mode3 == V4SImode)
4777 type = v4si_ftype_v16qi_v16qi_v4si;
4778 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
4779 && mode3 == V4SImode)
4780 type = v4si_ftype_v8hi_v8hi_v4si;
4781 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
4782 && mode3 == V4SImode)
4783 type = v4sf_ftype_v4sf_v4sf_v4si;
4785 /* vchar, vchar, vchar, 4 bit literal. */
4786 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
4788 type = v16qi_ftype_v16qi_v16qi_char;
4790 /* vshort, vshort, vshort, 4 bit literal. */
4791 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
4793 type = v8hi_ftype_v8hi_v8hi_char;
4795 /* vint, vint, vint, 4 bit literal. */
4796 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
4798 type = v4si_ftype_v4si_v4si_char;
4800 /* vfloat, vfloat, vfloat, 4 bit literal. */
4801 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
4803 type = v4sf_ftype_v4sf_v4sf_char;
4808 def_builtin (d->mask, d->name, type, d->code);
4811 /* Add the DST variants. */
4812 d = (struct builtin_description *) bdesc_dst;
4813 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
4814 def_builtin (d->mask, d->name, void_ftype_pvoid_int_char, d->code);
4816 /* Initialize the predicates. */
4817 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
4818 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
4820 enum machine_mode mode1;
4823 mode1 = insn_data[dp->icode].operand[1].mode;
4828 type = int_ftype_int_v4si_v4si;
4831 type = int_ftype_int_v8hi_v8hi;
4834 type = int_ftype_int_v16qi_v16qi;
4837 type = int_ftype_int_v4sf_v4sf;
4843 def_builtin (dp->mask, dp->name, type, dp->code);
4846 /* Add the simple binary operators. */
4847 d = (struct builtin_description *) bdesc_2arg;
4848 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4850 enum machine_mode mode0, mode1, mode2;
4856 mode0 = insn_data[d->icode].operand[0].mode;
4857 mode1 = insn_data[d->icode].operand[1].mode;
4858 mode2 = insn_data[d->icode].operand[2].mode;
4860 /* When all three operands are of the same mode. */
4861 if (mode0 == mode1 && mode1 == mode2)
4866 type = v4sf_ftype_v4sf_v4sf;
4869 type = v4si_ftype_v4si_v4si;
4872 type = v16qi_ftype_v16qi_v16qi;
4875 type = v8hi_ftype_v8hi_v8hi;
4882 /* A few other combos we really don't want to do manually. */
4884 /* vint, vfloat, vfloat. */
4885 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
4886 type = v4si_ftype_v4sf_v4sf;
4888 /* vshort, vchar, vchar. */
4889 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
4890 type = v8hi_ftype_v16qi_v16qi;
4892 /* vint, vshort, vshort. */
4893 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
4894 type = v4si_ftype_v8hi_v8hi;
4896 /* vshort, vint, vint. */
4897 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
4898 type = v8hi_ftype_v4si_v4si;
4900 /* vchar, vshort, vshort. */
4901 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
4902 type = v16qi_ftype_v8hi_v8hi;
4904 /* vint, vchar, vint. */
4905 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
4906 type = v4si_ftype_v16qi_v4si;
4908 /* vint, vchar, vchar. */
4909 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
4910 type = v4si_ftype_v16qi_v16qi;
4912 /* vint, vshort, vint. */
4913 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
4914 type = v4si_ftype_v8hi_v4si;
4916 /* vint, vint, 5 bit literal. */
4917 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
4918 type = v4si_ftype_v4si_char;
4920 /* vshort, vshort, 5 bit literal. */
4921 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
4922 type = v8hi_ftype_v8hi_char;
4924 /* vchar, vchar, 5 bit literal. */
4925 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
4926 type = v16qi_ftype_v16qi_char;
4928 /* vfloat, vint, 5 bit literal. */
4929 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
4930 type = v4sf_ftype_v4si_char;
4932 /* vint, vfloat, 5 bit literal. */
4933 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
4934 type = v4si_ftype_v4sf_char;
4937 else if (mode0 == SImode)
4942 type = int_ftype_v4si_v4si;
4945 type = int_ftype_v4sf_v4sf;
4948 type = int_ftype_v16qi_v16qi;
4951 type = int_ftype_v8hi_v8hi;
4961 def_builtin (d->mask, d->name, type, d->code);
4964 /* Initialize the abs* operators. */
4965 d = (struct builtin_description *) bdesc_abs;
4966 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
4968 enum machine_mode mode0;
4971 mode0 = insn_data[d->icode].operand[0].mode;
4976 type = v4si_ftype_v4si;
4979 type = v8hi_ftype_v8hi;
4982 type = v16qi_ftype_v16qi;
4985 type = v4sf_ftype_v4sf;
4991 def_builtin (d->mask, d->name, type, d->code);
4994 /* Add the simple unary operators. */
4995 d = (struct builtin_description *) bdesc_1arg;
4996 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4998 enum machine_mode mode0, mode1;
5004 mode0 = insn_data[d->icode].operand[0].mode;
5005 mode1 = insn_data[d->icode].operand[1].mode;
5007 if (mode0 == V4SImode && mode1 == QImode)
5008 type = v4si_ftype_char;
5009 else if (mode0 == V8HImode && mode1 == QImode)
5010 type = v8hi_ftype_char;
5011 else if (mode0 == V16QImode && mode1 == QImode)
5012 type = v16qi_ftype_char;
5013 else if (mode0 == V4SFmode && mode1 == V4SFmode)
5014 type = v4sf_ftype_v4sf;
5015 else if (mode0 == V8HImode && mode1 == V16QImode)
5016 type = v8hi_ftype_v16qi;
5017 else if (mode0 == V4SImode && mode1 == V8HImode)
5018 type = v4si_ftype_v8hi;
5022 def_builtin (d->mask, d->name, type, d->code);
5027 /* Generate a memory reference for expand_block_move, copying volatile,
5028 and other bits from an original memory reference. */
5031 expand_block_move_mem (mode, addr, orig_mem)
5032 enum machine_mode mode;
5036 rtx mem = gen_rtx_MEM (mode, addr);
5038 MEM_COPY_ATTRIBUTES (mem, orig_mem);
5042 /* Expand a block move operation, and return 1 if successful. Return 0
5043 if we should let the compiler generate normal code.
5045 operands[0] is the destination
5046 operands[1] is the source
5047 operands[2] is the length
5048 operands[3] is the alignment */
5050 #define MAX_MOVE_REG 4
5053 expand_block_move (operands)
5056 rtx orig_dest = operands[0];
5057 rtx orig_src = operands[1];
5058 rtx bytes_rtx = operands[2];
5059 rtx align_rtx = operands[3];
5060 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
5071 rtx stores[MAX_MOVE_REG];
5074 /* If this is not a fixed size move, just call memcpy */
5078 /* If this is not a fixed size alignment, abort */
5079 if (GET_CODE (align_rtx) != CONST_INT)
5081 align = INTVAL (align_rtx);
5083 /* Anything to move? */
5084 bytes = INTVAL (bytes_rtx);
5088 /* store_one_arg depends on expand_block_move to handle at least the size of
5089 reg_parm_stack_space. */
5090 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
5093 /* Move the address into scratch registers. */
5094 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
5095 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
5097 if (TARGET_STRING) /* string instructions are available */
5099 for ( ; bytes > 0; bytes -= move_bytes)
5101 if (bytes > 24 /* move up to 32 bytes at a time */
5109 && ! fixed_regs[12])
5111 move_bytes = (bytes > 32) ? 32 : bytes;
5112 emit_insn (gen_movstrsi_8reg (expand_block_move_mem (BLKmode,
5115 expand_block_move_mem (BLKmode,
5118 GEN_INT ((move_bytes == 32)
5122 else if (bytes > 16 /* move up to 24 bytes at a time */
5128 && ! fixed_regs[10])
5130 move_bytes = (bytes > 24) ? 24 : bytes;
5131 emit_insn (gen_movstrsi_6reg (expand_block_move_mem (BLKmode,
5134 expand_block_move_mem (BLKmode,
5137 GEN_INT (move_bytes),
5140 else if (bytes > 8 /* move up to 16 bytes at a time */
5146 move_bytes = (bytes > 16) ? 16 : bytes;
5147 emit_insn (gen_movstrsi_4reg (expand_block_move_mem (BLKmode,
5150 expand_block_move_mem (BLKmode,
5153 GEN_INT (move_bytes),
5156 else if (bytes >= 8 && TARGET_POWERPC64
5157 /* 64-bit loads and stores require word-aligned
5159 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5162 tmp_reg = gen_reg_rtx (DImode);
5163 emit_move_insn (tmp_reg,
5164 expand_block_move_mem (DImode,
5165 src_reg, orig_src));
5166 emit_move_insn (expand_block_move_mem (DImode,
5167 dest_reg, orig_dest),
5170 else if (bytes > 4 && !TARGET_POWERPC64)
5171 { /* move up to 8 bytes at a time */
5172 move_bytes = (bytes > 8) ? 8 : bytes;
5173 emit_insn (gen_movstrsi_2reg (expand_block_move_mem (BLKmode,
5176 expand_block_move_mem (BLKmode,
5179 GEN_INT (move_bytes),
5182 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5183 { /* move 4 bytes */
5185 tmp_reg = gen_reg_rtx (SImode);
5186 emit_move_insn (tmp_reg,
5187 expand_block_move_mem (SImode,
5188 src_reg, orig_src));
5189 emit_move_insn (expand_block_move_mem (SImode,
5190 dest_reg, orig_dest),
5193 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5194 { /* move 2 bytes */
5196 tmp_reg = gen_reg_rtx (HImode);
5197 emit_move_insn (tmp_reg,
5198 expand_block_move_mem (HImode,
5199 src_reg, orig_src));
5200 emit_move_insn (expand_block_move_mem (HImode,
5201 dest_reg, orig_dest),
5204 else if (bytes == 1) /* move 1 byte */
5207 tmp_reg = gen_reg_rtx (QImode);
5208 emit_move_insn (tmp_reg,
5209 expand_block_move_mem (QImode,
5210 src_reg, orig_src));
5211 emit_move_insn (expand_block_move_mem (QImode,
5212 dest_reg, orig_dest),
5216 { /* move up to 4 bytes at a time */
5217 move_bytes = (bytes > 4) ? 4 : bytes;
5218 emit_insn (gen_movstrsi_1reg (expand_block_move_mem (BLKmode,
5221 expand_block_move_mem (BLKmode,
5224 GEN_INT (move_bytes),
5228 if (bytes > move_bytes)
5230 if (! TARGET_POWERPC64)
5232 emit_insn (gen_addsi3 (src_reg, src_reg,
5233 GEN_INT (move_bytes)));
5234 emit_insn (gen_addsi3 (dest_reg, dest_reg,
5235 GEN_INT (move_bytes)));
5239 emit_insn (gen_adddi3 (src_reg, src_reg,
5240 GEN_INT (move_bytes)));
5241 emit_insn (gen_adddi3 (dest_reg, dest_reg,
5242 GEN_INT (move_bytes)));
5248 else /* string instructions not available */
5250 num_reg = offset = 0;
5251 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
5253 /* Calculate the correct offset for src/dest */
5257 dest_addr = dest_reg;
5261 src_addr = plus_constant (src_reg, offset);
5262 dest_addr = plus_constant (dest_reg, offset);
5265 /* Generate the appropriate load and store, saving the stores
5267 if (bytes >= 8 && TARGET_POWERPC64
5268 /* 64-bit loads and stores require word-aligned
5270 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
5273 tmp_reg = gen_reg_rtx (DImode);
5274 emit_insn (gen_movdi (tmp_reg,
5275 expand_block_move_mem (DImode,
5278 stores[num_reg++] = gen_movdi (expand_block_move_mem (DImode,
5283 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
5286 tmp_reg = gen_reg_rtx (SImode);
5287 emit_insn (gen_movsi (tmp_reg,
5288 expand_block_move_mem (SImode,
5291 stores[num_reg++] = gen_movsi (expand_block_move_mem (SImode,
5296 else if (bytes >= 2 && (align >= 2 || ! STRICT_ALIGNMENT))
5299 tmp_reg = gen_reg_rtx (HImode);
5300 emit_insn (gen_movhi (tmp_reg,
5301 expand_block_move_mem (HImode,
5304 stores[num_reg++] = gen_movhi (expand_block_move_mem (HImode,
5312 tmp_reg = gen_reg_rtx (QImode);
5313 emit_insn (gen_movqi (tmp_reg,
5314 expand_block_move_mem (QImode,
5317 stores[num_reg++] = gen_movqi (expand_block_move_mem (QImode,
5323 if (num_reg >= MAX_MOVE_REG)
5325 for (i = 0; i < num_reg; i++)
5326 emit_insn (stores[i]);
5331 for (i = 0; i < num_reg; i++)
5332 emit_insn (stores[i]);
5339 /* Return 1 if OP is a load multiple operation. It is known to be a
5340 PARALLEL and the first section will be tested. */
5343 load_multiple_operation (op, mode)
5345 enum machine_mode mode ATTRIBUTE_UNUSED;
5347 int count = XVECLEN (op, 0);
5348 unsigned int dest_regno;
5352 /* Perform a quick check so we don't blow up below. */
5354 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5355 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5356 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5359 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5360 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5362 for (i = 1; i < count; i++)
5364 rtx elt = XVECEXP (op, 0, i);
5366 if (GET_CODE (elt) != SET
5367 || GET_CODE (SET_DEST (elt)) != REG
5368 || GET_MODE (SET_DEST (elt)) != SImode
5369 || REGNO (SET_DEST (elt)) != dest_regno + i
5370 || GET_CODE (SET_SRC (elt)) != MEM
5371 || GET_MODE (SET_SRC (elt)) != SImode
5372 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
5373 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
5374 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
5375 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
5382 /* Similar, but tests for store multiple. Here, the second vector element
5383 is a CLOBBER. It will be tested later. */
5386 store_multiple_operation (op, mode)
5388 enum machine_mode mode ATTRIBUTE_UNUSED;
5390 int count = XVECLEN (op, 0) - 1;
5391 unsigned int src_regno;
5395 /* Perform a quick check so we don't blow up below. */
5397 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5398 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5399 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5402 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5403 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5405 for (i = 1; i < count; i++)
5407 rtx elt = XVECEXP (op, 0, i + 1);
5409 if (GET_CODE (elt) != SET
5410 || GET_CODE (SET_SRC (elt)) != REG
5411 || GET_MODE (SET_SRC (elt)) != SImode
5412 || REGNO (SET_SRC (elt)) != src_regno + i
5413 || GET_CODE (SET_DEST (elt)) != MEM
5414 || GET_MODE (SET_DEST (elt)) != SImode
5415 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
5416 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
5417 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
5418 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
5425 /* Return 1 for a parallel vrsave operation. */
5428 vrsave_operation (op, mode)
5430 enum machine_mode mode ATTRIBUTE_UNUSED;
5432 int count = XVECLEN (op, 0);
5433 unsigned int dest_regno, src_regno;
5437 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5438 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5439 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
5442 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5443 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5445 if (dest_regno != VRSAVE_REGNO
5446 && src_regno != VRSAVE_REGNO)
5449 for (i = 1; i < count; i++)
5451 rtx elt = XVECEXP (op, 0, i);
5453 if (GET_CODE (elt) != CLOBBER
5454 && GET_CODE (elt) != SET)
5461 /* Return 1 for an PARALLEL suitable for mtcrf. */
5464 mtcrf_operation (op, mode)
5466 enum machine_mode mode ATTRIBUTE_UNUSED;
5468 int count = XVECLEN (op, 0);
5472 /* Perform a quick check so we don't blow up below. */
5474 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5475 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
5476 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
5478 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
5480 if (GET_CODE (src_reg) != REG
5481 || GET_MODE (src_reg) != SImode
5482 || ! INT_REGNO_P (REGNO (src_reg)))
5485 for (i = 0; i < count; i++)
5487 rtx exp = XVECEXP (op, 0, i);
5491 if (GET_CODE (exp) != SET
5492 || GET_CODE (SET_DEST (exp)) != REG
5493 || GET_MODE (SET_DEST (exp)) != CCmode
5494 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
5496 unspec = SET_SRC (exp);
5497 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
5499 if (GET_CODE (unspec) != UNSPEC
5500 || XINT (unspec, 1) != 20
5501 || XVECLEN (unspec, 0) != 2
5502 || XVECEXP (unspec, 0, 0) != src_reg
5503 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
5504 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
5510 /* Return 1 for an PARALLEL suitable for lmw. */
5513 lmw_operation (op, mode)
5515 enum machine_mode mode ATTRIBUTE_UNUSED;
5517 int count = XVECLEN (op, 0);
5518 unsigned int dest_regno;
5520 unsigned int base_regno;
5521 HOST_WIDE_INT offset;
5524 /* Perform a quick check so we don't blow up below. */
5526 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5527 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
5528 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
5531 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
5532 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
5535 || count != 32 - (int) dest_regno)
5538 if (LEGITIMATE_INDIRECT_ADDRESS_P (src_addr, 0))
5541 base_regno = REGNO (src_addr);
5542 if (base_regno == 0)
5545 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, src_addr, 0))
5547 offset = INTVAL (XEXP (src_addr, 1));
5548 base_regno = REGNO (XEXP (src_addr, 0));
5553 for (i = 0; i < count; i++)
5555 rtx elt = XVECEXP (op, 0, i);
5558 HOST_WIDE_INT newoffset;
5560 if (GET_CODE (elt) != SET
5561 || GET_CODE (SET_DEST (elt)) != REG
5562 || GET_MODE (SET_DEST (elt)) != SImode
5563 || REGNO (SET_DEST (elt)) != dest_regno + i
5564 || GET_CODE (SET_SRC (elt)) != MEM
5565 || GET_MODE (SET_SRC (elt)) != SImode)
5567 newaddr = XEXP (SET_SRC (elt), 0);
5568 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5573 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5575 addr_reg = XEXP (newaddr, 0);
5576 newoffset = INTVAL (XEXP (newaddr, 1));
5580 if (REGNO (addr_reg) != base_regno
5581 || newoffset != offset + 4 * i)
5588 /* Return 1 for an PARALLEL suitable for stmw. */
5591 stmw_operation (op, mode)
5593 enum machine_mode mode ATTRIBUTE_UNUSED;
5595 int count = XVECLEN (op, 0);
5596 unsigned int src_regno;
5598 unsigned int base_regno;
5599 HOST_WIDE_INT offset;
5602 /* Perform a quick check so we don't blow up below. */
5604 || GET_CODE (XVECEXP (op, 0, 0)) != SET
5605 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
5606 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
5609 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
5610 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
5613 || count != 32 - (int) src_regno)
5616 if (LEGITIMATE_INDIRECT_ADDRESS_P (dest_addr, 0))
5619 base_regno = REGNO (dest_addr);
5620 if (base_regno == 0)
5623 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, dest_addr, 0))
5625 offset = INTVAL (XEXP (dest_addr, 1));
5626 base_regno = REGNO (XEXP (dest_addr, 0));
5631 for (i = 0; i < count; i++)
5633 rtx elt = XVECEXP (op, 0, i);
5636 HOST_WIDE_INT newoffset;
5638 if (GET_CODE (elt) != SET
5639 || GET_CODE (SET_SRC (elt)) != REG
5640 || GET_MODE (SET_SRC (elt)) != SImode
5641 || REGNO (SET_SRC (elt)) != src_regno + i
5642 || GET_CODE (SET_DEST (elt)) != MEM
5643 || GET_MODE (SET_DEST (elt)) != SImode)
5645 newaddr = XEXP (SET_DEST (elt), 0);
5646 if (LEGITIMATE_INDIRECT_ADDRESS_P (newaddr, 0))
5651 else if (LEGITIMATE_OFFSET_ADDRESS_P (SImode, newaddr, 0))
5653 addr_reg = XEXP (newaddr, 0);
5654 newoffset = INTVAL (XEXP (newaddr, 1));
5658 if (REGNO (addr_reg) != base_regno
5659 || newoffset != offset + 4 * i)
5666 /* A validation routine: say whether CODE, a condition code, and MODE
5667 match. The other alternatives either don't make sense or should
5668 never be generated. */
5671 validate_condition_mode (code, mode)
5673 enum machine_mode mode;
5675 if (GET_RTX_CLASS (code) != '<'
5676 || GET_MODE_CLASS (mode) != MODE_CC)
5679 /* These don't make sense. */
5680 if ((code == GT || code == LT || code == GE || code == LE)
5681 && mode == CCUNSmode)
5684 if ((code == GTU || code == LTU || code == GEU || code == LEU)
5685 && mode != CCUNSmode)
5688 if (mode != CCFPmode
5689 && (code == ORDERED || code == UNORDERED
5690 || code == UNEQ || code == LTGT
5691 || code == UNGT || code == UNLT
5692 || code == UNGE || code == UNLE))
5695 /* These should never be generated except for
5696 flag_unsafe_math_optimizations. */
5697 if (mode == CCFPmode
5698 && ! flag_unsafe_math_optimizations
5699 && (code == LE || code == GE
5700 || code == UNEQ || code == LTGT
5701 || code == UNGT || code == UNLT))
5704 /* These are invalid; the information is not there. */
5705 if (mode == CCEQmode
5706 && code != EQ && code != NE)
5710 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
5711 We only check the opcode against the mode of the CC value here. */
5714 branch_comparison_operator (op, mode)
5716 enum machine_mode mode ATTRIBUTE_UNUSED;
5718 enum rtx_code code = GET_CODE (op);
5719 enum machine_mode cc_mode;
5721 if (GET_RTX_CLASS (code) != '<')
5724 cc_mode = GET_MODE (XEXP (op, 0));
5725 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5728 validate_condition_mode (code, cc_mode);
5733 /* Return 1 if OP is a comparison operation that is valid for a branch
5734 insn and which is true if the corresponding bit in the CC register
5738 branch_positive_comparison_operator (op, mode)
5740 enum machine_mode mode;
5744 if (! branch_comparison_operator (op, mode))
5747 code = GET_CODE (op);
5748 return (code == EQ || code == LT || code == GT
5749 || code == LTU || code == GTU
5750 || code == UNORDERED);
5753 /* Return 1 if OP is a comparison operation that is valid for an scc insn.
5754 We check the opcode against the mode of the CC value and disallow EQ or
5755 NE comparisons for integers. */
5758 scc_comparison_operator (op, mode)
5760 enum machine_mode mode;
5762 enum rtx_code code = GET_CODE (op);
5763 enum machine_mode cc_mode;
5765 if (GET_MODE (op) != mode && mode != VOIDmode)
5768 if (GET_RTX_CLASS (code) != '<')
5771 cc_mode = GET_MODE (XEXP (op, 0));
5772 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
5775 validate_condition_mode (code, cc_mode);
5777 if (code == NE && cc_mode != CCFPmode)
5784 trap_comparison_operator (op, mode)
5786 enum machine_mode mode;
5788 if (mode != VOIDmode && mode != GET_MODE (op))
5790 return GET_RTX_CLASS (GET_CODE (op)) == '<';
5794 boolean_operator (op, mode)
5796 enum machine_mode mode ATTRIBUTE_UNUSED;
5798 enum rtx_code code = GET_CODE (op);
5799 return (code == AND || code == IOR || code == XOR);
5803 boolean_or_operator (op, mode)
5805 enum machine_mode mode ATTRIBUTE_UNUSED;
5807 enum rtx_code code = GET_CODE (op);
5808 return (code == IOR || code == XOR);
5812 min_max_operator (op, mode)
5814 enum machine_mode mode ATTRIBUTE_UNUSED;
5816 enum rtx_code code = GET_CODE (op);
5817 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
5820 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
5821 mask required to convert the result of a rotate insn into a shift
5822 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
5825 includes_lshift_p (shiftop, andop)
5829 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5831 shift_mask <<= INTVAL (shiftop);
5833 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5836 /* Similar, but for right shift. */
5839 includes_rshift_p (shiftop, andop)
5843 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
5845 shift_mask >>= INTVAL (shiftop);
5847 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
5850 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
5851 to perform a left shift. It must have exactly SHIFTOP least
5852 signifigant 0's, then one or more 1's, then zero or more 0's. */
5855 includes_rldic_lshift_p (shiftop, andop)
5859 if (GET_CODE (andop) == CONST_INT)
5861 HOST_WIDE_INT c, lsb, shift_mask;
5864 if (c == 0 || c == ~0)
5868 shift_mask <<= INTVAL (shiftop);
5870 /* Find the least signifigant one bit. */
5873 /* It must coincide with the LSB of the shift mask. */
5874 if (-lsb != shift_mask)
5877 /* Invert to look for the next transition (if any). */
5880 /* Remove the low group of ones (originally low group of zeros). */
5883 /* Again find the lsb, and check we have all 1's above. */
5887 else if (GET_CODE (andop) == CONST_DOUBLE
5888 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5890 HOST_WIDE_INT low, high, lsb;
5891 HOST_WIDE_INT shift_mask_low, shift_mask_high;
5893 low = CONST_DOUBLE_LOW (andop);
5894 if (HOST_BITS_PER_WIDE_INT < 64)
5895 high = CONST_DOUBLE_HIGH (andop);
5897 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
5898 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
5901 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5903 shift_mask_high = ~0;
5904 if (INTVAL (shiftop) > 32)
5905 shift_mask_high <<= INTVAL (shiftop) - 32;
5909 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
5916 return high == -lsb;
5919 shift_mask_low = ~0;
5920 shift_mask_low <<= INTVAL (shiftop);
5924 if (-lsb != shift_mask_low)
5927 if (HOST_BITS_PER_WIDE_INT < 64)
5932 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
5935 return high == -lsb;
5939 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
5945 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
5946 to perform a left shift. It must have SHIFTOP or more least
5947 signifigant 0's, with the remainder of the word 1's. */
5950 includes_rldicr_lshift_p (shiftop, andop)
5954 if (GET_CODE (andop) == CONST_INT)
5956 HOST_WIDE_INT c, lsb, shift_mask;
5959 shift_mask <<= INTVAL (shiftop);
5962 /* Find the least signifigant one bit. */
5965 /* It must be covered by the shift mask.
5966 This test also rejects c == 0. */
5967 if ((lsb & shift_mask) == 0)
5970 /* Check we have all 1's above the transition, and reject all 1's. */
5971 return c == -lsb && lsb != 1;
5973 else if (GET_CODE (andop) == CONST_DOUBLE
5974 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
5976 HOST_WIDE_INT low, lsb, shift_mask_low;
5978 low = CONST_DOUBLE_LOW (andop);
5980 if (HOST_BITS_PER_WIDE_INT < 64)
5982 HOST_WIDE_INT high, shift_mask_high;
5984 high = CONST_DOUBLE_HIGH (andop);
5988 shift_mask_high = ~0;
5989 if (INTVAL (shiftop) > 32)
5990 shift_mask_high <<= INTVAL (shiftop) - 32;
5994 if ((lsb & shift_mask_high) == 0)
5997 return high == -lsb;
6003 shift_mask_low = ~0;
6004 shift_mask_low <<= INTVAL (shiftop);
6008 if ((lsb & shift_mask_low) == 0)
6011 return low == -lsb && lsb != 1;
6017 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
6018 for lfq and stfq insns.
6020 Note reg1 and reg2 *must* be hard registers. To be sure we will
6021 abort if we are passed pseudo registers. */
6024 registers_ok_for_quad_peep (reg1, reg2)
6027 /* We might have been passed a SUBREG. */
6028 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
6031 return (REGNO (reg1) == REGNO (reg2) - 1);
6034 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
6035 addr1 and addr2 must be in consecutive memory locations
6036 (addr2 == addr1 + 8). */
6039 addrs_ok_for_quad_peep (addr1, addr2)
6046 /* Extract an offset (if used) from the first addr. */
6047 if (GET_CODE (addr1) == PLUS)
6049 /* If not a REG, return zero. */
6050 if (GET_CODE (XEXP (addr1, 0)) != REG)
6054 reg1 = REGNO (XEXP (addr1, 0));
6055 /* The offset must be constant! */
6056 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
6058 offset1 = INTVAL (XEXP (addr1, 1));
6061 else if (GET_CODE (addr1) != REG)
6065 reg1 = REGNO (addr1);
6066 /* This was a simple (mem (reg)) expression. Offset is 0. */
6070 /* Make sure the second address is a (mem (plus (reg) (const_int))). */
6071 if (GET_CODE (addr2) != PLUS)
6074 if (GET_CODE (XEXP (addr2, 0)) != REG
6075 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
6078 if (reg1 != REGNO (XEXP (addr2, 0)))
6081 /* The offset for the second addr must be 8 more than the first addr. */
6082 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
6085 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
6090 /* Return the register class of a scratch register needed to copy IN into
6091 or out of a register in CLASS in MODE. If it can be done directly,
6092 NO_REGS is returned. */
6095 secondary_reload_class (class, mode, in)
6096 enum reg_class class;
6097 enum machine_mode mode ATTRIBUTE_UNUSED;
6102 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN && flag_pic))
6104 /* We cannot copy a symbolic operand directly into anything
6105 other than BASE_REGS for TARGET_ELF. So indicate that a
6106 register from BASE_REGS is needed as an intermediate
6109 On Darwin, pic addresses require a load from memory, which
6110 needs a base register. */
6111 if (class != BASE_REGS
6112 && (GET_CODE (in) == SYMBOL_REF
6113 || GET_CODE (in) == HIGH
6114 || GET_CODE (in) == LABEL_REF
6115 || GET_CODE (in) == CONST))
6119 if (GET_CODE (in) == REG)
6122 if (regno >= FIRST_PSEUDO_REGISTER)
6124 regno = true_regnum (in);
6125 if (regno >= FIRST_PSEUDO_REGISTER)
6129 else if (GET_CODE (in) == SUBREG)
6131 regno = true_regnum (in);
6132 if (regno >= FIRST_PSEUDO_REGISTER)
6138 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
6140 if (class == GENERAL_REGS || class == BASE_REGS
6141 || (regno >= 0 && INT_REGNO_P (regno)))
6144 /* Constants, memory, and FP registers can go into FP registers. */
6145 if ((regno == -1 || FP_REGNO_P (regno))
6146 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
6149 /* Memory, and AltiVec registers can go into AltiVec registers. */
6150 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
6151 && class == ALTIVEC_REGS)
6154 /* We can copy among the CR registers. */
6155 if ((class == CR_REGS || class == CR0_REGS)
6156 && regno >= 0 && CR_REGNO_P (regno))
6159 /* Otherwise, we need GENERAL_REGS. */
6160 return GENERAL_REGS;
6163 /* Given a comparison operation, return the bit number in CCR to test. We
6164 know this is a valid comparison.
6166 SCC_P is 1 if this is for an scc. That means that %D will have been
6167 used instead of %C, so the bits will be in different places.
6169 Return -1 if OP isn't a valid comparison for some reason. */
6176 enum rtx_code code = GET_CODE (op);
6177 enum machine_mode cc_mode;
6182 if (GET_RTX_CLASS (code) != '<')
6187 if (GET_CODE (reg) != REG
6188 || ! CR_REGNO_P (REGNO (reg)))
6191 cc_mode = GET_MODE (reg);
6192 cc_regnum = REGNO (reg);
6193 base_bit = 4 * (cc_regnum - CR0_REGNO);
6195 validate_condition_mode (code, cc_mode);
6200 return scc_p ? base_bit + 3 : base_bit + 2;
6202 return base_bit + 2;
6203 case GT: case GTU: case UNLE:
6204 return base_bit + 1;
6205 case LT: case LTU: case UNGE:
6207 case ORDERED: case UNORDERED:
6208 return base_bit + 3;
6211 /* If scc, we will have done a cror to put the bit in the
6212 unordered position. So test that bit. For integer, this is ! LT
6213 unless this is an scc insn. */
6214 return scc_p ? base_bit + 3 : base_bit;
6217 return scc_p ? base_bit + 3 : base_bit + 1;
6224 /* Return the GOT register. */
6227 rs6000_got_register (value)
6228 rtx value ATTRIBUTE_UNUSED;
6230 /* The second flow pass currently (June 1999) can't update
6231 regs_ever_live without disturbing other parts of the compiler, so
6232 update it here to make the prolog/epilogue code happy. */
6233 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
6234 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
6236 current_function_uses_pic_offset_table = 1;
6238 return pic_offset_table_rtx;
6241 /* Functions to init, mark and free struct machine_function.
6242 These will be called, via pointer variables,
6243 from push_function_context and pop_function_context. */
6246 rs6000_init_machine_status (p)
6249 p->machine = (machine_function *) xcalloc (1, sizeof (machine_function));
6253 rs6000_free_machine_status (p)
6256 if (p->machine == NULL)
6264 /* Print an operand. Recognize special options, documented below. */
6267 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
6268 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
6270 #define SMALL_DATA_RELOC "sda21"
6271 #define SMALL_DATA_REG 0
6275 print_operand (file, x, code)
6283 /* These macros test for integers and extract the low-order bits. */
6285 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
6286 && GET_MODE (X) == VOIDmode)
6288 #define INT_LOWPART(X) \
6289 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
6294 /* Write out an instruction after the call which may be replaced
6295 with glue code by the loader. This depends on the AIX version. */
6296 asm_fprintf (file, RS6000_CALL_GLUE);
6299 /* %a is output_address. */
6302 /* If X is a constant integer whose low-order 5 bits are zero,
6303 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
6304 in the AIX assembler where "sri" with a zero shift count
6305 writes a trash instruction. */
6306 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
6313 /* If constant, low-order 16 bits of constant, unsigned.
6314 Otherwise, write normally. */
6316 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
6318 print_operand (file, x, 0);
6322 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
6323 for 64-bit mask direction. */
6324 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
6327 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
6331 /* There used to be a comment for 'C' reading "This is an
6332 optional cror needed for certain floating-point
6333 comparisons. Otherwise write nothing." */
6335 /* Similar, except that this is for an scc, so we must be able to
6336 encode the test in a single bit that is one. We do the above
6337 for any LE, GE, GEU, or LEU and invert the bit for NE. */
6338 if (GET_CODE (x) == LE || GET_CODE (x) == GE
6339 || GET_CODE (x) == LEU || GET_CODE (x) == GEU)
6341 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6343 fprintf (file, "cror %d,%d,%d\n\t", base_bit + 3,
6345 base_bit + (GET_CODE (x) == GE || GET_CODE (x) == GEU));
6348 else if (GET_CODE (x) == NE)
6350 int base_bit = 4 * (REGNO (XEXP (x, 0)) - CR0_REGNO);
6352 fprintf (file, "crnor %d,%d,%d\n\t", base_bit + 3,
6353 base_bit + 2, base_bit + 2);
6358 /* X is a CR register. Print the number of the EQ bit of the CR */
6359 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6360 output_operand_lossage ("invalid %%E value");
6362 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
6366 /* X is a CR register. Print the shift count needed to move it
6367 to the high-order four bits. */
6368 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6369 output_operand_lossage ("invalid %%f value");
6371 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
6375 /* Similar, but print the count for the rotate in the opposite
6377 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6378 output_operand_lossage ("invalid %%F value");
6380 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
6384 /* X is a constant integer. If it is negative, print "m",
6385 otherwise print "z". This is to make a aze or ame insn. */
6386 if (GET_CODE (x) != CONST_INT)
6387 output_operand_lossage ("invalid %%G value");
6388 else if (INTVAL (x) >= 0)
6395 /* If constant, output low-order five bits. Otherwise, write
6398 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
6400 print_operand (file, x, 0);
6404 /* If constant, output low-order six bits. Otherwise, write
6407 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
6409 print_operand (file, x, 0);
6413 /* Print `i' if this is a constant, else nothing. */
6419 /* Write the bit number in CCR for jump. */
6422 output_operand_lossage ("invalid %%j code");
6424 fprintf (file, "%d", i);
6428 /* Similar, but add one for shift count in rlinm for scc and pass
6429 scc flag to `ccr_bit'. */
6432 output_operand_lossage ("invalid %%J code");
6434 /* If we want bit 31, write a shift count of zero, not 32. */
6435 fprintf (file, "%d", i == 31 ? 0 : i + 1);
6439 /* X must be a constant. Write the 1's complement of the
6442 output_operand_lossage ("invalid %%k value");
6444 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
6448 /* X must be a symbolic constant on ELF. Write an
6449 expression suitable for an 'addi' that adds in the low 16
6451 if (GET_CODE (x) != CONST)
6453 print_operand_address (file, x);
6458 if (GET_CODE (XEXP (x, 0)) != PLUS
6459 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
6460 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
6461 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
6462 output_operand_lossage ("invalid %%K value");
6463 print_operand_address (file, XEXP (XEXP (x, 0), 0));
6465 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
6469 /* %l is output_asm_label. */
6472 /* Write second word of DImode or DFmode reference. Works on register
6473 or non-indexed memory only. */
6474 if (GET_CODE (x) == REG)
6475 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
6476 else if (GET_CODE (x) == MEM)
6478 /* Handle possible auto-increment. Since it is pre-increment and
6479 we have already done it, we can just use an offset of word. */
6480 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6481 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6482 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
6485 output_address (XEXP (adjust_address_nv (x, SImode,
6489 if (small_data_operand (x, GET_MODE (x)))
6490 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6491 reg_names[SMALL_DATA_REG]);
6496 /* MB value for a mask operand. */
6497 if (! mask_operand (x, SImode))
6498 output_operand_lossage ("invalid %%m value");
6500 val = INT_LOWPART (x);
6502 /* If the high bit is set and the low bit is not, the value is zero.
6503 If the high bit is zero, the value is the first 1 bit we find from
6505 if ((val & 0x80000000) && ((val & 1) == 0))
6510 else if ((val & 0x80000000) == 0)
6512 for (i = 1; i < 32; i++)
6513 if ((val <<= 1) & 0x80000000)
6515 fprintf (file, "%d", i);
6519 /* Otherwise, look for the first 0 bit from the right. The result is its
6520 number plus 1. We know the low-order bit is one. */
6521 for (i = 0; i < 32; i++)
6522 if (((val >>= 1) & 1) == 0)
6525 /* If we ended in ...01, i would be 0. The correct value is 31, so
6527 fprintf (file, "%d", 31 - i);
6531 /* ME value for a mask operand. */
6532 if (! mask_operand (x, SImode))
6533 output_operand_lossage ("invalid %%M value");
6535 val = INT_LOWPART (x);
6537 /* If the low bit is set and the high bit is not, the value is 31.
6538 If the low bit is zero, the value is the first 1 bit we find from
6540 if ((val & 1) && ((val & 0x80000000) == 0))
6545 else if ((val & 1) == 0)
6547 for (i = 0; i < 32; i++)
6548 if ((val >>= 1) & 1)
6551 /* If we had ....10, i would be 0. The result should be
6552 30, so we need 30 - i. */
6553 fprintf (file, "%d", 30 - i);
6557 /* Otherwise, look for the first 0 bit from the left. The result is its
6558 number minus 1. We know the high-order bit is one. */
6559 for (i = 0; i < 32; i++)
6560 if (((val <<= 1) & 0x80000000) == 0)
6563 fprintf (file, "%d", i);
6566 /* %n outputs the negative of its operand. */
6569 /* Write the number of elements in the vector times 4. */
6570 if (GET_CODE (x) != PARALLEL)
6571 output_operand_lossage ("invalid %%N value");
6573 fprintf (file, "%d", XVECLEN (x, 0) * 4);
6577 /* Similar, but subtract 1 first. */
6578 if (GET_CODE (x) != PARALLEL)
6579 output_operand_lossage ("invalid %%O value");
6581 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
6585 /* X is a CONST_INT that is a power of two. Output the logarithm. */
6587 || INT_LOWPART (x) < 0
6588 || (i = exact_log2 (INT_LOWPART (x))) < 0)
6589 output_operand_lossage ("invalid %%p value");
6591 fprintf (file, "%d", i);
6595 /* The operand must be an indirect memory reference. The result
6596 is the register number. */
6597 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
6598 || REGNO (XEXP (x, 0)) >= 32)
6599 output_operand_lossage ("invalid %%P value");
6601 fprintf (file, "%d", REGNO (XEXP (x, 0)));
6605 /* This outputs the logical code corresponding to a boolean
6606 expression. The expression may have one or both operands
6607 negated (if one, only the first one). For condition register
6608 logical operations, it will also treat the negated
6609 CR codes as NOTs, but not handle NOTs of them. */
6611 const char *const *t = 0;
6613 enum rtx_code code = GET_CODE (x);
6614 static const char * const tbl[3][3] = {
6615 { "and", "andc", "nor" },
6616 { "or", "orc", "nand" },
6617 { "xor", "eqv", "xor" } };
6621 else if (code == IOR)
6623 else if (code == XOR)
6626 output_operand_lossage ("invalid %%q value");
6628 if (GET_CODE (XEXP (x, 0)) != NOT)
6632 if (GET_CODE (XEXP (x, 1)) == NOT)
6643 /* X is a CR register. Print the mask for `mtcrf'. */
6644 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
6645 output_operand_lossage ("invalid %%R value");
6647 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
6651 /* Low 5 bits of 32 - value */
6653 output_operand_lossage ("invalid %%s value");
6655 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
6659 /* PowerPC64 mask position. All 0's and all 1's are excluded.
6660 CONST_INT 32-bit mask is considered sign-extended so any
6661 transition must occur within the CONST_INT, not on the boundary. */
6662 if (! mask64_operand (x, DImode))
6663 output_operand_lossage ("invalid %%S value");
6665 val = INT_LOWPART (x);
6667 if (val & 1) /* Clear Left */
6669 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6670 if (!((val >>= 1) & 1))
6673 #if HOST_BITS_PER_WIDE_INT == 32
6674 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6676 val = CONST_DOUBLE_HIGH (x);
6681 for (i = 32; i < 64; i++)
6682 if (!((val >>= 1) & 1))
6686 /* i = index of last set bit from right
6687 mask begins at 63 - i from left */
6689 output_operand_lossage ("%%S computed all 1's mask");
6691 fprintf (file, "%d", 63 - i);
6694 else /* Clear Right */
6696 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6697 if ((val >>= 1) & 1)
6700 #if HOST_BITS_PER_WIDE_INT == 32
6701 if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6703 val = CONST_DOUBLE_HIGH (x);
6705 if (val == (HOST_WIDE_INT) -1)
6708 for (i = 32; i < 64; i++)
6709 if ((val >>= 1) & 1)
6713 /* i = index of last clear bit from right
6714 mask ends at 62 - i from left */
6716 output_operand_lossage ("%%S computed all 0's mask");
6718 fprintf (file, "%d", 62 - i);
6723 /* Print the symbolic name of a branch target register. */
6724 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
6725 && REGNO (x) != COUNT_REGISTER_REGNUM))
6726 output_operand_lossage ("invalid %%T value");
6727 else if (REGNO (x) == LINK_REGISTER_REGNUM)
6728 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
6730 fputs ("ctr", file);
6734 /* High-order 16 bits of constant for use in unsigned operand. */
6736 output_operand_lossage ("invalid %%u value");
6738 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6739 (INT_LOWPART (x) >> 16) & 0xffff);
6743 /* High-order 16 bits of constant for use in signed operand. */
6745 output_operand_lossage ("invalid %%v value");
6747 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
6748 (INT_LOWPART (x) >> 16) & 0xffff);
6752 /* Print `u' if this has an auto-increment or auto-decrement. */
6753 if (GET_CODE (x) == MEM
6754 && (GET_CODE (XEXP (x, 0)) == PRE_INC
6755 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
6760 /* Print the trap code for this operand. */
6761 switch (GET_CODE (x))
6764 fputs ("eq", file); /* 4 */
6767 fputs ("ne", file); /* 24 */
6770 fputs ("lt", file); /* 16 */
6773 fputs ("le", file); /* 20 */
6776 fputs ("gt", file); /* 8 */
6779 fputs ("ge", file); /* 12 */
6782 fputs ("llt", file); /* 2 */
6785 fputs ("lle", file); /* 6 */
6788 fputs ("lgt", file); /* 1 */
6791 fputs ("lge", file); /* 5 */
6799 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
6802 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
6803 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
6805 print_operand (file, x, 0);
6809 /* MB value for a PowerPC64 rldic operand. */
6810 val = (GET_CODE (x) == CONST_INT
6811 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
6816 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
6817 if ((val <<= 1) < 0)
6820 #if HOST_BITS_PER_WIDE_INT == 32
6821 if (GET_CODE (x) == CONST_INT && i >= 0)
6822 i += 32; /* zero-extend high-part was all 0's */
6823 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
6825 val = CONST_DOUBLE_LOW (x);
6832 for ( ; i < 64; i++)
6833 if ((val <<= 1) < 0)
6838 fprintf (file, "%d", i + 1);
6842 if (GET_CODE (x) == MEM
6843 && LEGITIMATE_INDEXED_ADDRESS_P (XEXP (x, 0), 0))
6848 /* Like 'L', for third word of TImode */
6849 if (GET_CODE (x) == REG)
6850 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
6851 else if (GET_CODE (x) == MEM)
6853 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6854 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6855 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
6857 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
6858 if (small_data_operand (x, GET_MODE (x)))
6859 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6860 reg_names[SMALL_DATA_REG]);
6865 /* X is a SYMBOL_REF. Write out the name preceded by a
6866 period and without any trailing data in brackets. Used for function
6867 names. If we are configured for System V (or the embedded ABI) on
6868 the PowerPC, do not emit the period, since those systems do not use
6869 TOCs and the like. */
6870 if (GET_CODE (x) != SYMBOL_REF)
6873 if (XSTR (x, 0)[0] != '.')
6875 switch (DEFAULT_ABI)
6885 case ABI_AIX_NODESC:
6891 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
6893 assemble_name (file, XSTR (x, 0));
6898 /* Like 'L', for last word of TImode. */
6899 if (GET_CODE (x) == REG)
6900 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
6901 else if (GET_CODE (x) == MEM)
6903 if (GET_CODE (XEXP (x, 0)) == PRE_INC
6904 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
6905 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
6907 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
6908 if (small_data_operand (x, GET_MODE (x)))
6909 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6910 reg_names[SMALL_DATA_REG]);
6914 /* Print AltiVec memory operand. */
6919 if (GET_CODE (x) != MEM)
6924 if (GET_CODE (tmp) == REG)
6925 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
6926 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
6928 if (REGNO (XEXP (tmp, 0)) == 0)
6929 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
6930 reg_names[ REGNO (XEXP (tmp, 0)) ]);
6932 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
6933 reg_names[ REGNO (XEXP (tmp, 1)) ]);
6941 if (GET_CODE (x) == REG)
6942 fprintf (file, "%s", reg_names[REGNO (x)]);
6943 else if (GET_CODE (x) == MEM)
6945 /* We need to handle PRE_INC and PRE_DEC here, since we need to
6946 know the width from the mode. */
6947 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
6948 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
6949 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6950 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
6951 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
6952 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
6954 output_address (XEXP (x, 0));
6957 output_addr_const (file, x);
6961 output_operand_lossage ("invalid %%xn code");
6965 /* Print the address of an operand. */
6968 print_operand_address (file, x)
6972 if (GET_CODE (x) == REG)
6973 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
6974 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
6975 || GET_CODE (x) == LABEL_REF)
6977 output_addr_const (file, x);
6978 if (small_data_operand (x, GET_MODE (x)))
6979 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
6980 reg_names[SMALL_DATA_REG]);
6981 else if (TARGET_TOC)
6984 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
6986 if (REGNO (XEXP (x, 0)) == 0)
6987 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
6988 reg_names[ REGNO (XEXP (x, 0)) ]);
6990 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
6991 reg_names[ REGNO (XEXP (x, 1)) ]);
6993 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
6995 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (XEXP (x, 1)));
6996 fprintf (file, "(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
6999 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
7000 && CONSTANT_P (XEXP (x, 1)))
7002 output_addr_const (file, XEXP (x, 1));
7003 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7007 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
7008 && CONSTANT_P (XEXP (x, 1)))
7010 fprintf (file, "lo16(");
7011 output_addr_const (file, XEXP (x, 1));
7012 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
7015 else if (LEGITIMATE_CONSTANT_POOL_ADDRESS_P (x))
7017 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
7019 rtx contains_minus = XEXP (x, 1);
7023 /* Find the (minus (sym) (toc)) buried in X, and temporarily
7024 turn it into (sym) for output_addr_const. */
7025 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
7026 contains_minus = XEXP (contains_minus, 0);
7028 minus = XEXP (contains_minus, 0);
7029 symref = XEXP (minus, 0);
7030 XEXP (contains_minus, 0) = symref;
7035 name = XSTR (symref, 0);
7036 newname = alloca (strlen (name) + sizeof ("@toc"));
7037 strcpy (newname, name);
7038 strcat (newname, "@toc");
7039 XSTR (symref, 0) = newname;
7041 output_addr_const (file, XEXP (x, 1));
7043 XSTR (symref, 0) = name;
7044 XEXP (contains_minus, 0) = minus;
7047 output_addr_const (file, XEXP (x, 1));
7049 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
7055 /* Target hook for assembling integer objects. The powerpc version has
7056 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
7057 is defined. It also needs to handle DI-mode objects on 64-bit
7061 rs6000_assemble_integer (x, size, aligned_p)
7066 #ifdef RELOCATABLE_NEEDS_FIXUP
7067 /* Special handling for SI values. */
7068 if (size == 4 && aligned_p)
7070 extern int in_toc_section PARAMS ((void));
7071 static int recurse = 0;
7073 /* For -mrelocatable, we mark all addresses that need to be fixed up
7074 in the .fixup section. */
7075 if (TARGET_RELOCATABLE
7076 && !in_toc_section ()
7077 && !in_text_section ()
7079 && GET_CODE (x) != CONST_INT
7080 && GET_CODE (x) != CONST_DOUBLE
7086 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
7088 ASM_OUTPUT_LABEL (asm_out_file, buf);
7089 fprintf (asm_out_file, "\t.long\t(");
7090 output_addr_const (asm_out_file, x);
7091 fprintf (asm_out_file, ")@fixup\n");
7092 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
7093 ASM_OUTPUT_ALIGN (asm_out_file, 2);
7094 fprintf (asm_out_file, "\t.long\t");
7095 assemble_name (asm_out_file, buf);
7096 fprintf (asm_out_file, "\n\t.previous\n");
7100 /* Remove initial .'s to turn a -mcall-aixdesc function
7101 address into the address of the descriptor, not the function
7103 else if (GET_CODE (x) == SYMBOL_REF
7104 && XSTR (x, 0)[0] == '.'
7105 && DEFAULT_ABI == ABI_AIX)
7107 const char *name = XSTR (x, 0);
7108 while (*name == '.')
7111 fprintf (asm_out_file, "\t.long\t%s\n", name);
7115 #endif /* RELOCATABLE_NEEDS_FIXUP */
7116 return default_assemble_integer (x, size, aligned_p);
7120 rs6000_reverse_condition (mode, code)
7121 enum machine_mode mode;
7124 /* Reversal of FP compares takes care -- an ordered compare
7125 becomes an unordered compare and vice versa. */
7126 if (mode == CCFPmode)
7127 return reverse_condition_maybe_unordered (code);
7129 return reverse_condition (code);
7132 /* Generate a compare for CODE. Return a brand-new rtx that
7133 represents the result of the compare. */
7136 rs6000_generate_compare (code)
7139 enum machine_mode comp_mode;
7142 if (rs6000_compare_fp_p)
7143 comp_mode = CCFPmode;
7144 else if (code == GTU || code == LTU
7145 || code == GEU || code == LEU)
7146 comp_mode = CCUNSmode;
7150 /* First, the compare. */
7151 compare_result = gen_reg_rtx (comp_mode);
7152 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
7153 gen_rtx_COMPARE (comp_mode,
7155 rs6000_compare_op1)));
7157 /* Some kinds of FP comparisons need an OR operation;
7158 except for flag_unsafe_math_optimizations we don't bother. */
7159 if (rs6000_compare_fp_p
7160 && ! flag_unsafe_math_optimizations
7161 && (code == LE || code == GE
7162 || code == UNEQ || code == LTGT
7163 || code == UNGT || code == UNLT))
7165 enum rtx_code or1, or2;
7166 rtx or1_rtx, or2_rtx, compare2_rtx;
7167 rtx or_result = gen_reg_rtx (CCEQmode);
7171 case LE: or1 = LT; or2 = EQ; break;
7172 case GE: or1 = GT; or2 = EQ; break;
7173 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
7174 case LTGT: or1 = LT; or2 = GT; break;
7175 case UNGT: or1 = UNORDERED; or2 = GT; break;
7176 case UNLT: or1 = UNORDERED; or2 = LT; break;
7179 validate_condition_mode (or1, comp_mode);
7180 validate_condition_mode (or2, comp_mode);
7181 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
7182 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
7183 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
7184 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
7186 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
7188 compare_result = or_result;
7192 validate_condition_mode (code, GET_MODE (compare_result));
7194 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
7198 /* Emit the RTL for an sCOND pattern. */
7201 rs6000_emit_sCOND (code, result)
7206 enum machine_mode op_mode;
7208 condition_rtx = rs6000_generate_compare (code);
7210 op_mode = GET_MODE (rs6000_compare_op0);
7211 if (op_mode == VOIDmode)
7212 op_mode = GET_MODE (rs6000_compare_op1);
7214 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
7216 PUT_MODE (condition_rtx, DImode);
7217 convert_move (result, condition_rtx, 0);
7221 PUT_MODE (condition_rtx, SImode);
7222 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
7226 /* Emit a branch of kind CODE to location LOC. */
7229 rs6000_emit_cbranch (code, loc)
7233 rtx condition_rtx, loc_ref;
7235 condition_rtx = rs6000_generate_compare (code);
7236 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
7237 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
7238 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
7242 /* Return the string to output a conditional branch to LABEL, which is
7243 the operand number of the label, or -1 if the branch is really a
7246 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
7247 condition code register and its mode specifies what kind of
7250 REVERSED is non-zero if we should reverse the sense of the comparison.
7252 INSN is the insn. */
7255 output_cbranch (op, label, reversed, insn)
7261 static char string[64];
7262 enum rtx_code code = GET_CODE (op);
7263 rtx cc_reg = XEXP (op, 0);
7264 enum machine_mode mode = GET_MODE (cc_reg);
7265 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
7266 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
7267 int really_reversed = reversed ^ need_longbranch;
7273 validate_condition_mode (code, mode);
7275 /* Work out which way this really branches. We could use
7276 reverse_condition_maybe_unordered here always but this
7277 makes the resulting assembler clearer. */
7278 if (really_reversed)
7279 code = rs6000_reverse_condition (mode, code);
7283 /* Not all of these are actually distinct opcodes, but
7284 we distinguish them for clarity of the resulting assembler. */
7286 ccode = "ne"; break;
7288 ccode = "eq"; break;
7290 ccode = "ge"; break;
7291 case GT: case GTU: case UNGT:
7292 ccode = "gt"; break;
7294 ccode = "le"; break;
7295 case LT: case LTU: case UNLT:
7296 ccode = "lt"; break;
7297 case UNORDERED: ccode = "un"; break;
7298 case ORDERED: ccode = "nu"; break;
7299 case UNGE: ccode = "nl"; break;
7300 case UNLE: ccode = "ng"; break;
7305 /* Maybe we have a guess as to how likely the branch is.
7306 The old mnemonics don't have a way to specify this information. */
7307 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
7308 if (note != NULL_RTX)
7310 /* PROB is the difference from 50%. */
7311 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
7313 /* For branches that are very close to 50%, assume not-taken. */
7314 if (abs (prob) > REG_BR_PROB_BASE / 20
7315 && ((prob > 0) ^ need_longbranch))
7324 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
7326 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
7328 /* We need to escape any '%' characters in the reg_names string.
7329 Assume they'd only be the first character... */
7330 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
7332 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
7336 /* If the branch distance was too far, we may have to use an
7337 unconditional branch to go the distance. */
7338 if (need_longbranch)
7339 s += sprintf (s, ",$+8\n\tb %s", label);
7341 s += sprintf (s, ",%s", label);
7347 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
7348 operands of the last comparison is nonzero/true, FALSE_COND if it
7349 is zero/false. Return 0 if the hardware has no such operation. */
7352 rs6000_emit_cmove (dest, op, true_cond, false_cond)
7358 enum rtx_code code = GET_CODE (op);
7359 rtx op0 = rs6000_compare_op0;
7360 rtx op1 = rs6000_compare_op1;
7362 enum machine_mode compare_mode = GET_MODE (op0);
7363 enum machine_mode result_mode = GET_MODE (dest);
7366 /* These modes should always match. */
7367 if (GET_MODE (op1) != compare_mode)
7369 if (GET_MODE (true_cond) != result_mode)
7371 if (GET_MODE (false_cond) != result_mode)
7374 /* First, work out if the hardware can do this at all, or
7375 if it's too slow... */
7376 /* If the comparison is an integer one, since we only have fsel
7377 it'll be cheaper to use a branch. */
7378 if (! rs6000_compare_fp_p)
7381 /* Eliminate half of the comparisons by switching operands, this
7382 makes the remaining code simpler. */
7383 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
7384 || code == LTGT || code == LT)
7386 code = reverse_condition_maybe_unordered (code);
7388 true_cond = false_cond;
7392 /* UNEQ and LTGT take four instructions for a comparison with zero,
7393 it'll probably be faster to use a branch here too. */
7397 if (GET_CODE (op1) == CONST_DOUBLE)
7398 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
7400 /* We're going to try to implement comparions by performing
7401 a subtract, then comparing against zero. Unfortunately,
7402 Inf - Inf is NaN which is not zero, and so if we don't
7403 know that the the operand is finite and the comparison
7404 would treat EQ different to UNORDERED, we can't do it. */
7405 if (! flag_unsafe_math_optimizations
7406 && code != GT && code != UNGE
7407 && (GET_CODE (op1) != CONST_DOUBLE || target_isinf (c1))
7408 /* Constructs of the form (a OP b ? a : b) are safe. */
7409 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
7410 || (! rtx_equal_p (op0, true_cond)
7411 && ! rtx_equal_p (op1, true_cond))))
7413 /* At this point we know we can use fsel. */
7415 /* Reduce the comparison to a comparison against zero. */
7416 temp = gen_reg_rtx (compare_mode);
7417 emit_insn (gen_rtx_SET (VOIDmode, temp,
7418 gen_rtx_MINUS (compare_mode, op0, op1)));
7420 op1 = CONST0_RTX (compare_mode);
7422 /* If we don't care about NaNs we can reduce some of the comparisons
7423 down to faster ones. */
7424 if (flag_unsafe_math_optimizations)
7430 true_cond = false_cond;
7443 /* Now, reduce everything down to a GE. */
7450 temp = gen_reg_rtx (compare_mode);
7451 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7456 temp = gen_reg_rtx (compare_mode);
7457 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
7462 temp = gen_reg_rtx (compare_mode);
7463 emit_insn (gen_rtx_SET (VOIDmode, temp,
7464 gen_rtx_NEG (compare_mode,
7465 gen_rtx_ABS (compare_mode, op0))));
7470 temp = gen_reg_rtx (result_mode);
7471 emit_insn (gen_rtx_SET (VOIDmode, temp,
7472 gen_rtx_IF_THEN_ELSE (result_mode,
7473 gen_rtx_GE (VOIDmode,
7475 true_cond, false_cond)));
7477 true_cond = false_cond;
7479 temp = gen_reg_rtx (compare_mode);
7480 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7485 temp = gen_reg_rtx (result_mode);
7486 emit_insn (gen_rtx_SET (VOIDmode, temp,
7487 gen_rtx_IF_THEN_ELSE (result_mode,
7488 gen_rtx_GE (VOIDmode,
7490 true_cond, false_cond)));
7492 false_cond = true_cond;
7494 temp = gen_reg_rtx (compare_mode);
7495 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
7503 emit_insn (gen_rtx_SET (VOIDmode, dest,
7504 gen_rtx_IF_THEN_ELSE (result_mode,
7505 gen_rtx_GE (VOIDmode,
7507 true_cond, false_cond)));
7512 rs6000_emit_minmax (dest, code, op0, op1)
7518 enum machine_mode mode = GET_MODE (op0);
7520 if (code == SMAX || code == UMAX)
7521 target = emit_conditional_move (dest, GE, op0, op1, mode,
7524 target = emit_conditional_move (dest, GE, op0, op1, mode,
7526 if (target == NULL_RTX)
7529 emit_move_insn (dest, target);
7532 /* This page contains routines that are used to determine what the
7533 function prologue and epilogue code will do and write them out. */
7535 /* Return the first fixed-point register that is required to be
7536 saved. 32 if none. */
7539 first_reg_to_save ()
7543 /* Find lowest numbered live register. */
7544 for (first_reg = 13; first_reg <= 31; first_reg++)
7545 if (regs_ever_live[first_reg]
7546 && (! call_used_regs[first_reg]
7547 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
7548 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
7549 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
7552 if (current_function_profile)
7554 /* AIX must save/restore every register that contains a parameter
7555 before/after the .__mcount call plus an additional register
7556 for the static chain, if needed; use registers from 30 down to 22
7558 if (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
7560 int last_parm_reg, profile_first_reg;
7562 /* Figure out last used parameter register. The proper thing
7563 to do is to walk incoming args of the function. A function
7564 might have live parameter registers even if it has no
7566 for (last_parm_reg = 10;
7567 last_parm_reg > 2 && ! regs_ever_live [last_parm_reg];
7571 /* Calculate first reg for saving parameter registers
7573 Skip reg 31 which may contain the frame pointer. */
7574 profile_first_reg = (33 - last_parm_reg
7575 - (current_function_needs_context ? 1 : 0));
7577 /* Need to skip another reg to account for R31 being PICBASE
7578 (when flag_pic is set) or R30 being used as the frame
7579 pointer (when flag_pic is not set). */
7580 --profile_first_reg;
7582 /* Do not save frame pointer if no parameters needs to be saved. */
7583 if (profile_first_reg == 31)
7584 profile_first_reg = 32;
7586 if (first_reg > profile_first_reg)
7587 first_reg = profile_first_reg;
7590 /* SVR4 may need one register to preserve the static chain. */
7591 else if (current_function_needs_context)
7593 /* Skip reg 31 which may contain the frame pointer. */
7600 if (flag_pic && current_function_uses_pic_offset_table &&
7601 (first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM))
7602 return RS6000_PIC_OFFSET_TABLE_REGNUM;
7608 /* Similar, for FP regs. */
7611 first_fp_reg_to_save ()
7615 /* Find lowest numbered live register. */
7616 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
7617 if (regs_ever_live[first_reg])
7623 /* Similar, for AltiVec regs. */
7626 first_altivec_reg_to_save ()
7630 /* Stack frame remains as is unless we are in AltiVec ABI. */
7631 if (! TARGET_ALTIVEC_ABI)
7632 return LAST_ALTIVEC_REGNO + 1;
7634 /* Find lowest numbered live register. */
7635 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
7636 if (regs_ever_live[i])
7642 /* Return a 32-bit mask of the AltiVec registers we need to set in
7643 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
7644 the 32-bit word is 0. */
7647 compute_vrsave_mask ()
7649 unsigned int i, mask = 0;
7651 /* First, find out if we use _any_ altivec registers. */
7652 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7653 if (regs_ever_live[i])
7654 mask |= ALTIVEC_REG_BIT (i);
7659 /* Next, add all registers that are call-clobbered. We do this
7660 because post-reload register optimizers such as regrename_optimize
7661 may choose to use them. They never change the register class
7662 chosen by reload, so cannot create new uses of altivec registers
7663 if there were none before, so the early exit above is safe. */
7664 /* ??? Alternately, we could define HARD_REGNO_RENAME_OK to disallow
7665 altivec registers not saved in the mask, which might well make the
7666 adjustments below more effective in eliding the save/restore of
7667 VRSAVE in small functions. */
7668 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
7669 if (call_used_regs[i])
7670 mask |= ALTIVEC_REG_BIT (i);
7672 /* Next, remove the argument registers from the set. These must
7673 be in the VRSAVE mask set by the caller, so we don't need to add
7674 them in again. More importantly, the mask we compute here is
7675 used to generate CLOBBERs in the set_vrsave insn, and we do not
7676 wish the argument registers to die. */
7677 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
7678 mask &= ~ALTIVEC_REG_BIT (i);
7680 /* Similarly, remove the return value from the set. */
7683 diddle_return_value (is_altivec_return_reg, &yes);
7685 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
7692 is_altivec_return_reg (reg, xyes)
7696 bool *yes = (bool *) xyes;
7697 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
7702 /* Calculate the stack information for the current function. This is
7703 complicated by having two separate calling sequences, the AIX calling
7704 sequence and the V.4 calling sequence.
7706 AIX (and Darwin/Mac OS X) stack frames look like:
7708 SP----> +---------------------------------------+
7709 | back chain to caller | 0 0
7710 +---------------------------------------+
7711 | saved CR | 4 8 (8-11)
7712 +---------------------------------------+
7714 +---------------------------------------+
7715 | reserved for compilers | 12 24
7716 +---------------------------------------+
7717 | reserved for binders | 16 32
7718 +---------------------------------------+
7719 | saved TOC pointer | 20 40
7720 +---------------------------------------+
7721 | Parameter save area (P) | 24 48
7722 +---------------------------------------+
7723 | Alloca space (A) | 24+P etc.
7724 +---------------------------------------+
7725 | Local variable space (L) | 24+P+A
7726 +---------------------------------------+
7727 | Float/int conversion temporary (X) | 24+P+A+L
7728 +---------------------------------------+
7729 | Save area for AltiVec registers (W) | 24+P+A+L+X
7730 +---------------------------------------+
7731 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
7732 +---------------------------------------+
7733 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
7734 +---------------------------------------+
7735 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
7736 +---------------------------------------+
7737 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
7738 +---------------------------------------+
7739 old SP->| back chain to caller's caller |
7740 +---------------------------------------+
7742 The required alignment for AIX configurations is two words (i.e., 8
7746 V.4 stack frames look like:
7748 SP----> +---------------------------------------+
7749 | back chain to caller | 0
7750 +---------------------------------------+
7751 | caller's saved LR | 4
7752 +---------------------------------------+
7753 | Parameter save area (P) | 8
7754 +---------------------------------------+
7755 | Alloca space (A) | 8+P
7756 +---------------------------------------+
7757 | Varargs save area (V) | 8+P+A
7758 +---------------------------------------+
7759 | Local variable space (L) | 8+P+A+V
7760 +---------------------------------------+
7761 | Float/int conversion temporary (X) | 8+P+A+V+L
7762 +---------------------------------------+
7763 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
7764 +---------------------------------------+
7765 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
7766 +---------------------------------------+
7767 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
7768 +---------------------------------------+
7769 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
7770 +---------------------------------------+
7771 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
7772 +---------------------------------------+
7773 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
7774 +---------------------------------------+
7775 old SP->| back chain to caller's caller |
7776 +---------------------------------------+
7778 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
7779 given. (But note below and in sysv4.h that we require only 8 and
7780 may round up the size of our stack frame anyways. The historical
7781 reason is early versions of powerpc-linux which didn't properly
7782 align the stack at program startup. A happy side-effect is that
7783 -mno-eabi libraries can be used with -meabi programs.)
7785 The EABI configuration defaults to the V.4 layout, unless
7786 -mcall-aix is used, in which case the AIX layout is used. However,
7787 the stack alignment requirements may differ. If -mno-eabi is not
7788 given, the required stack alignment is 8 bytes; if -mno-eabi is
7789 given, the required alignment is 16 bytes. (But see V.4 comment
7792 #ifndef ABI_STACK_BOUNDARY
7793 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
7797 rs6000_stack_info ()
7799 static rs6000_stack_t info, zero_info;
7800 rs6000_stack_t *info_ptr = &info;
7801 int reg_size = TARGET_POWERPC64 ? 8 : 4;
7802 enum rs6000_abi abi;
7806 /* Zero all fields portably. */
7809 /* Select which calling sequence. */
7810 info_ptr->abi = abi = DEFAULT_ABI;
7812 /* Calculate which registers need to be saved & save area size. */
7813 info_ptr->first_gp_reg_save = first_reg_to_save ();
7814 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
7815 even if it currently looks like we won't. */
7816 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
7817 || (flag_pic == 1 && abi == ABI_V4)
7818 || (flag_pic && abi == ABI_DARWIN))
7819 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
7820 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
7822 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
7824 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
7825 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
7827 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
7828 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
7829 - info_ptr->first_altivec_reg_save);
7831 /* Does this function call anything? */
7832 info_ptr->calls_p = (! current_function_is_leaf
7833 || cfun->machine->ra_needs_full_frame);
7835 /* Determine if we need to save the link register. */
7836 if (rs6000_ra_ever_killed ()
7837 || (DEFAULT_ABI == ABI_AIX && current_function_profile)
7838 #ifdef TARGET_RELOCATABLE
7839 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
7841 || (info_ptr->first_fp_reg_save != 64
7842 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
7843 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
7844 || (abi == ABI_V4 && current_function_calls_alloca)
7845 || (DEFAULT_ABI == ABI_DARWIN
7847 && current_function_uses_pic_offset_table)
7848 || info_ptr->calls_p)
7850 info_ptr->lr_save_p = 1;
7851 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
7854 /* Determine if we need to save the condition code registers. */
7855 if (regs_ever_live[CR2_REGNO]
7856 || regs_ever_live[CR3_REGNO]
7857 || regs_ever_live[CR4_REGNO])
7859 info_ptr->cr_save_p = 1;
7861 info_ptr->cr_size = reg_size;
7864 /* If the current function calls __builtin_eh_return, then we need
7865 to allocate stack space for registers that will hold data for
7866 the exception handler. */
7867 if (current_function_calls_eh_return)
7870 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
7872 ehrd_size = i * UNITS_PER_WORD;
7877 /* Determine various sizes. */
7878 info_ptr->reg_size = reg_size;
7879 info_ptr->fixed_size = RS6000_SAVE_AREA;
7880 info_ptr->varargs_size = RS6000_VARARGS_AREA;
7881 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
7882 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
7885 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
7887 info_ptr->vrsave_mask = compute_vrsave_mask ();
7888 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
7892 info_ptr->vrsave_mask = 0;
7893 info_ptr->vrsave_size = 0;
7896 /* Calculate the offsets. */
7904 case ABI_AIX_NODESC:
7906 info_ptr->fp_save_offset = - info_ptr->fp_size;
7907 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7909 if (TARGET_ALTIVEC_ABI)
7911 info_ptr->vrsave_save_offset
7912 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
7914 /* Align stack so vector save area is on a quadword boundary. */
7915 if (info_ptr->altivec_size != 0)
7916 info_ptr->altivec_padding_size
7917 = 16 - (-info_ptr->vrsave_save_offset % 16);
7919 info_ptr->altivec_padding_size = 0;
7921 info_ptr->altivec_save_offset
7922 = info_ptr->vrsave_save_offset
7923 - info_ptr->altivec_padding_size
7924 - info_ptr->altivec_size;
7926 /* Adjust for AltiVec case. */
7927 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
7930 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
7931 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
7932 info_ptr->lr_save_offset = 2*reg_size;
7936 info_ptr->fp_save_offset = - info_ptr->fp_size;
7937 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
7938 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
7940 if (TARGET_ALTIVEC_ABI)
7942 info_ptr->vrsave_save_offset
7943 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
7945 /* Align stack so vector save area is on a quadword boundary. */
7946 if (info_ptr->altivec_size != 0)
7947 info_ptr->altivec_padding_size
7948 = 16 - (-info_ptr->vrsave_save_offset % 16);
7950 info_ptr->altivec_padding_size = 0;
7952 info_ptr->altivec_save_offset
7953 = info_ptr->vrsave_save_offset
7954 - info_ptr->altivec_padding_size
7955 - info_ptr->altivec_size;
7957 /* Adjust for AltiVec case. */
7958 info_ptr->toc_save_offset
7959 = info_ptr->altivec_save_offset - info_ptr->toc_size;
7962 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
7963 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
7964 info_ptr->lr_save_offset = reg_size;
7968 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
7970 + info_ptr->altivec_size
7971 + info_ptr->altivec_padding_size
7972 + info_ptr->vrsave_size
7976 + info_ptr->vrsave_size
7977 + info_ptr->toc_size,
7978 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
7981 total_raw_size = (info_ptr->vars_size
7982 + info_ptr->parm_size
7983 + info_ptr->save_size
7984 + info_ptr->varargs_size
7985 + info_ptr->fixed_size);
7987 info_ptr->total_size =
7988 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
7990 /* Determine if we need to allocate any stack frame:
7992 For AIX we need to push the stack if a frame pointer is needed
7993 (because the stack might be dynamically adjusted), if we are
7994 debugging, if we make calls, or if the sum of fp_save, gp_save,
7995 and local variables are more than the space needed to save all
7996 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
7997 + 18*8 = 288 (GPR13 reserved).
7999 For V.4 we don't have the stack cushion that AIX uses, but assume
8000 that the debugger can handle stackless frames. */
8002 if (info_ptr->calls_p)
8003 info_ptr->push_p = 1;
8005 else if (abi == ABI_V4)
8006 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
8009 info_ptr->push_p = (frame_pointer_needed
8010 || (abi != ABI_DARWIN && write_symbols != NO_DEBUG)
8011 || ((total_raw_size - info_ptr->fixed_size)
8012 > (TARGET_32BIT ? 220 : 288)));
8014 /* Zero offsets if we're not saving those registers. */
8015 if (info_ptr->fp_size == 0)
8016 info_ptr->fp_save_offset = 0;
8018 if (info_ptr->gp_size == 0)
8019 info_ptr->gp_save_offset = 0;
8021 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
8022 info_ptr->altivec_save_offset = 0;
8024 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
8025 info_ptr->vrsave_save_offset = 0;
8027 if (! info_ptr->lr_save_p)
8028 info_ptr->lr_save_offset = 0;
8030 if (! info_ptr->cr_save_p)
8031 info_ptr->cr_save_offset = 0;
8033 if (! info_ptr->toc_save_p)
8034 info_ptr->toc_save_offset = 0;
8040 debug_stack_info (info)
8041 rs6000_stack_t *info;
8043 const char *abi_string;
8046 info = rs6000_stack_info ();
8048 fprintf (stderr, "\nStack information for function %s:\n",
8049 ((current_function_decl && DECL_NAME (current_function_decl))
8050 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
8055 default: abi_string = "Unknown"; break;
8056 case ABI_NONE: abi_string = "NONE"; break;
8058 case ABI_AIX_NODESC: abi_string = "AIX"; break;
8059 case ABI_DARWIN: abi_string = "Darwin"; break;
8060 case ABI_V4: abi_string = "V.4"; break;
8063 fprintf (stderr, "\tABI = %5s\n", abi_string);
8065 if (TARGET_ALTIVEC_ABI)
8066 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
8068 if (info->first_gp_reg_save != 32)
8069 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
8071 if (info->first_fp_reg_save != 64)
8072 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
8074 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
8075 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
8076 info->first_altivec_reg_save);
8078 if (info->lr_save_p)
8079 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
8081 if (info->cr_save_p)
8082 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
8084 if (info->toc_save_p)
8085 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
8087 if (info->vrsave_mask)
8088 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
8091 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
8094 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
8096 if (info->gp_save_offset)
8097 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
8099 if (info->fp_save_offset)
8100 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
8102 if (info->altivec_save_offset)
8103 fprintf (stderr, "\taltivec_save_offset = %5d\n",
8104 info->altivec_save_offset);
8106 if (info->vrsave_save_offset)
8107 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
8108 info->vrsave_save_offset);
8110 if (info->lr_save_offset)
8111 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
8113 if (info->cr_save_offset)
8114 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
8116 if (info->toc_save_offset)
8117 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
8119 if (info->varargs_save_offset)
8120 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
8122 if (info->total_size)
8123 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
8125 if (info->varargs_size)
8126 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
8128 if (info->vars_size)
8129 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
8131 if (info->parm_size)
8132 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
8134 if (info->fixed_size)
8135 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
8138 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
8141 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
8143 if (info->altivec_size)
8144 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
8146 if (info->vrsave_size)
8147 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
8149 if (info->altivec_padding_size)
8150 fprintf (stderr, "\taltivec_padding_size= %5d\n",
8151 info->altivec_padding_size);
8154 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
8157 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
8160 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
8162 if (info->save_size)
8163 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
8165 if (info->reg_size != 4)
8166 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
8168 fprintf (stderr, "\n");
8172 rs6000_return_addr (count, frame)
8176 /* Currently we don't optimize very well between prolog and body
8177 code and for PIC code the code can be actually quite bad, so
8178 don't try to be too clever here. */
8179 if (count != 0 || flag_pic != 0)
8181 cfun->machine->ra_needs_full_frame = 1;
8188 plus_constant (copy_to_reg
8189 (gen_rtx_MEM (Pmode,
8190 memory_address (Pmode, frame))),
8191 RETURN_ADDRESS_OFFSET)));
8194 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
8198 rs6000_ra_ever_killed ()
8202 #ifdef ASM_OUTPUT_MI_THUNK
8203 if (current_function_is_thunk)
8206 if (!has_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM)
8207 || cfun->machine->ra_needs_full_frame)
8208 return regs_ever_live[LINK_REGISTER_REGNUM];
8210 push_topmost_sequence ();
8212 pop_topmost_sequence ();
8214 return reg_set_between_p (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
8218 /* Add a REG_MAYBE_DEAD note to the insn. */
8220 rs6000_maybe_dead (insn)
8223 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
8228 /* Emit instructions needed to load the TOC register.
8229 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
8230 a constant pool; or for SVR4 -fpic. */
8233 rs6000_emit_load_toc_table (fromprolog)
8237 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
8239 if (TARGET_ELF && DEFAULT_ABI != ABI_AIX)
8241 if (DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8243 rtx temp = (fromprolog
8244 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8245 : gen_reg_rtx (Pmode));
8246 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_pic_si (temp)));
8247 rs6000_maybe_dead (emit_move_insn (dest, temp));
8249 else if (flag_pic == 2)
8252 rtx tempLR = (fromprolog
8253 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
8254 : gen_reg_rtx (Pmode));
8255 rtx temp0 = (fromprolog
8256 ? gen_rtx_REG (Pmode, 0)
8257 : gen_reg_rtx (Pmode));
8260 /* possibly create the toc section */
8261 if (! toc_initialized)
8264 function_section (current_function_decl);
8271 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
8272 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8274 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
8275 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8277 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
8279 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8280 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
8287 static int reload_toc_labelno = 0;
8289 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
8291 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
8292 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8294 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1b (tempLR,
8297 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
8298 rs6000_maybe_dead (emit_move_insn (temp0,
8299 gen_rtx_MEM (Pmode, dest)));
8301 rs6000_maybe_dead (emit_insn (gen_addsi3 (dest, temp0, dest)));
8303 else if (flag_pic == 0 && TARGET_MINIMAL_TOC)
8305 /* This is for AIX code running in non-PIC ELF. */
8308 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
8309 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
8311 rs6000_maybe_dead (emit_insn (gen_elf_high (dest, realsym)));
8312 rs6000_maybe_dead (emit_insn (gen_elf_low (dest, dest, realsym)));
8320 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_si (dest)));
8322 rs6000_maybe_dead (emit_insn (gen_load_toc_aix_di (dest)));
8327 get_TOC_alias_set ()
8329 static int set = -1;
8331 set = new_alias_set ();
8335 /* This retuns nonzero if the current function uses the TOC. This is
8336 determined by the presence of (unspec ... 7), which is generated by
8337 the various load_toc_* patterns. */
8344 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
8347 rtx pat = PATTERN (insn);
8350 if (GET_CODE (pat) == PARALLEL)
8351 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
8352 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == UNSPEC
8353 && XINT (XVECEXP (PATTERN (insn), 0, i), 1) == 7)
8360 create_TOC_reference (symbol)
8363 return gen_rtx_PLUS (Pmode,
8364 gen_rtx_REG (Pmode, TOC_REGISTER),
8365 gen_rtx_CONST (Pmode,
8366 gen_rtx_MINUS (Pmode, symbol,
8367 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
8371 /* __throw will restore its own return address to be the same as the
8372 return address of the function that the throw is being made to.
8373 This is unfortunate, because we want to check the original
8374 return address to see if we need to restore the TOC.
8375 So we have to squirrel it away here.
8376 This is used only in compiling __throw and __rethrow.
8378 Most of this code should be removed by CSE. */
8379 static rtx insn_after_throw;
8381 /* This does the saving... */
8383 rs6000_aix_emit_builtin_unwind_init ()
8386 rtx stack_top = gen_reg_rtx (Pmode);
8387 rtx opcode_addr = gen_reg_rtx (Pmode);
8389 insn_after_throw = gen_reg_rtx (SImode);
8391 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8392 emit_move_insn (stack_top, mem);
8394 mem = gen_rtx_MEM (Pmode,
8395 gen_rtx_PLUS (Pmode, stack_top,
8396 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8397 emit_move_insn (opcode_addr, mem);
8398 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
8401 /* Emit insns to _restore_ the TOC register, at runtime (specifically
8402 in _eh.o). Only used on AIX.
8404 The idea is that on AIX, function calls look like this:
8405 bl somefunction-trampoline
8409 somefunction-trampoline:
8411 ... load function address in the count register ...
8413 or like this, if the linker determines that this is not a cross-module call
8414 and so the TOC need not be restored:
8417 or like this, if the compiler could determine that this is not a
8420 now, the tricky bit here is that register 2 is saved and restored
8421 by the _linker_, so we can't readily generate debugging information
8422 for it. So we need to go back up the call chain looking at the
8423 insns at return addresses to see which calls saved the TOC register
8424 and so see where it gets restored from.
8426 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
8427 just before the actual epilogue.
8429 On the bright side, this incurs no space or time overhead unless an
8430 exception is thrown, except for the extra code in libgcc.a.
8432 The parameter STACKSIZE is a register containing (at runtime)
8433 the amount to be popped off the stack in addition to the stack frame
8434 of this routine (which will be __throw or __rethrow, and so is
8435 guaranteed to have a stack frame). */
8438 rs6000_emit_eh_toc_restore (stacksize)
8442 rtx bottom_of_stack = gen_reg_rtx (Pmode);
8443 rtx tocompare = gen_reg_rtx (SImode);
8444 rtx opcode = gen_reg_rtx (SImode);
8445 rtx opcode_addr = gen_reg_rtx (Pmode);
8447 rtx loop_start = gen_label_rtx ();
8448 rtx no_toc_restore_needed = gen_label_rtx ();
8449 rtx loop_exit = gen_label_rtx ();
8451 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
8452 set_mem_alias_set (mem, rs6000_sr_alias_set);
8453 emit_move_insn (bottom_of_stack, mem);
8455 top_of_stack = expand_binop (Pmode, add_optab,
8456 bottom_of_stack, stacksize,
8457 NULL_RTX, 1, OPTAB_WIDEN);
8459 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
8460 : 0xE8410028, SImode));
8462 if (insn_after_throw == NULL_RTX)
8464 emit_move_insn (opcode, insn_after_throw);
8466 emit_note (NULL, NOTE_INSN_LOOP_BEG);
8467 emit_label (loop_start);
8469 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
8470 SImode, NULL_RTX, NULL_RTX,
8471 no_toc_restore_needed);
8473 mem = gen_rtx_MEM (Pmode,
8474 gen_rtx_PLUS (Pmode, bottom_of_stack,
8475 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
8476 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
8478 emit_label (no_toc_restore_needed);
8479 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
8480 Pmode, NULL_RTX, NULL_RTX,
8483 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
8484 set_mem_alias_set (mem, rs6000_sr_alias_set);
8485 emit_move_insn (bottom_of_stack, mem);
8487 mem = gen_rtx_MEM (Pmode,
8488 gen_rtx_PLUS (Pmode, bottom_of_stack,
8489 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
8490 emit_move_insn (opcode_addr, mem);
8491 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
8493 emit_note (NULL, NOTE_INSN_LOOP_CONT);
8494 emit_jump (loop_start);
8495 emit_note (NULL, NOTE_INSN_LOOP_END);
8496 emit_label (loop_exit);
8498 #endif /* TARGET_AIX */
8500 /* This ties together stack memory (MEM with an alias set of
8501 rs6000_sr_alias_set) and the change to the stack pointer. */
8504 rs6000_emit_stack_tie ()
8506 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
8508 set_mem_alias_set (mem, rs6000_sr_alias_set);
8509 emit_insn (gen_stack_tie (mem));
8512 /* Emit the correct code for allocating stack space, as insns.
8513 If COPY_R12, make sure a copy of the old frame is left in r12.
8514 The generated code may use hard register 0 as a temporary. */
8517 rs6000_emit_allocate_stack (size, copy_r12)
8522 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8523 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
8524 rtx todec = GEN_INT (-size);
8526 if (current_function_limit_stack)
8528 if (REG_P (stack_limit_rtx)
8529 && REGNO (stack_limit_rtx) > 1
8530 && REGNO (stack_limit_rtx) <= 31)
8532 emit_insn (Pmode == SImode
8533 ? gen_addsi3 (tmp_reg,
8536 : gen_adddi3 (tmp_reg,
8540 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8543 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
8545 && DEFAULT_ABI == ABI_V4)
8547 rtx toload = gen_rtx_CONST (VOIDmode,
8548 gen_rtx_PLUS (Pmode,
8552 emit_insn (gen_elf_high (tmp_reg, toload));
8553 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
8554 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
8558 warning ("stack limit expression is not supported");
8561 if (copy_r12 || ! TARGET_UPDATE)
8562 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
8568 /* Need a note here so that try_split doesn't get confused. */
8569 if (get_last_insn() == NULL_RTX)
8570 emit_note (0, NOTE_INSN_DELETED);
8571 insn = emit_move_insn (tmp_reg, todec);
8572 try_split (PATTERN (insn), insn, 0);
8576 if (Pmode == SImode)
8577 insn = emit_insn (gen_movsi_update (stack_reg, stack_reg,
8580 insn = emit_insn (gen_movdi_update (stack_reg, stack_reg,
8585 if (Pmode == SImode)
8586 insn = emit_insn (gen_addsi3 (stack_reg, stack_reg, todec));
8588 insn = emit_insn (gen_adddi3 (stack_reg, stack_reg, todec));
8589 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
8590 gen_rtx_REG (Pmode, 12));
8593 RTX_FRAME_RELATED_P (insn) = 1;
8595 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8596 gen_rtx_SET (VOIDmode, stack_reg,
8597 gen_rtx_PLUS (Pmode, stack_reg,
8602 /* Add a RTX_FRAME_RELATED note so that dwarf2out_frame_debug_expr
8605 (mem (plus (blah) (regXX)))
8609 (mem (plus (blah) (const VALUE_OF_REGXX))). */
8612 altivec_frame_fixup (insn, reg, val)
8618 real = copy_rtx (PATTERN (insn));
8620 real = replace_rtx (real, reg, GEN_INT (val));
8622 RTX_FRAME_RELATED_P (insn) = 1;
8623 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8628 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
8629 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
8630 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
8631 deduce these equivalences by itself so it wasn't necessary to hold
8632 its hand so much. */
8635 rs6000_frame_related (insn, reg, val, reg2, rreg)
8644 real = copy_rtx (PATTERN (insn));
8646 real = replace_rtx (real, reg,
8647 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
8648 STACK_POINTER_REGNUM),
8651 /* We expect that 'real' is either a SET or a PARALLEL containing
8652 SETs (and possibly other stuff). In a PARALLEL, all the SETs
8653 are important so they all have to be marked RTX_FRAME_RELATED_P. */
8655 if (GET_CODE (real) == SET)
8659 temp = simplify_rtx (SET_SRC (set));
8661 SET_SRC (set) = temp;
8662 temp = simplify_rtx (SET_DEST (set));
8664 SET_DEST (set) = temp;
8665 if (GET_CODE (SET_DEST (set)) == MEM)
8667 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8669 XEXP (SET_DEST (set), 0) = temp;
8672 else if (GET_CODE (real) == PARALLEL)
8675 for (i = 0; i < XVECLEN (real, 0); i++)
8676 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
8678 rtx set = XVECEXP (real, 0, i);
8680 temp = simplify_rtx (SET_SRC (set));
8682 SET_SRC (set) = temp;
8683 temp = simplify_rtx (SET_DEST (set));
8685 SET_DEST (set) = temp;
8686 if (GET_CODE (SET_DEST (set)) == MEM)
8688 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
8690 XEXP (SET_DEST (set), 0) = temp;
8692 RTX_FRAME_RELATED_P (set) = 1;
8698 if (reg2 != NULL_RTX)
8699 real = replace_rtx (real, reg2, rreg);
8701 RTX_FRAME_RELATED_P (insn) = 1;
8702 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
8707 /* Returns an insn that has a vrsave set operation with the
8708 appropriate CLOBBERs. */
8711 generate_set_vrsave (reg, info, epiloguep)
8713 rs6000_stack_t *info;
8717 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
8718 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8721 = gen_rtx_SET (VOIDmode,
8723 gen_rtx_UNSPEC_VOLATILE (SImode,
8724 gen_rtvec (2, reg, vrsave),
8729 /* We need to clobber the registers in the mask so the scheduler
8730 does not move sets to VRSAVE before sets of AltiVec registers.
8732 However, if the function receives nonlocal gotos, reload will set
8733 all call saved registers live. We will end up with:
8735 (set (reg 999) (mem))
8736 (parallel [ (set (reg vrsave) (unspec blah))
8737 (clobber (reg 999))])
8739 The clobber will cause the store into reg 999 to be dead, and
8740 flow will attempt to delete an epilogue insn. In this case, we
8741 need an unspec use/set of the register. */
8743 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
8744 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
8746 if (!epiloguep || call_used_regs [i])
8747 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
8748 gen_rtx_REG (V4SImode, i));
8751 rtx reg = gen_rtx_REG (V4SImode, i);
8754 = gen_rtx_SET (VOIDmode,
8756 gen_rtx_UNSPEC (V4SImode,
8757 gen_rtvec (1, reg), 27));
8761 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
8763 for (i = 0; i < nclobs; ++i)
8764 XVECEXP (insn, 0, i) = clobs[i];
8769 /* Emit function prologue as insns. */
8772 rs6000_emit_prologue ()
8774 rs6000_stack_t *info = rs6000_stack_info ();
8775 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
8776 int reg_size = TARGET_POWERPC64 ? 8 : 4;
8777 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
8778 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
8779 rtx frame_reg_rtx = sp_reg_rtx;
8780 rtx cr_save_rtx = NULL;
8782 int saving_FPRs_inline;
8783 int using_store_multiple;
8784 HOST_WIDE_INT sp_offset = 0;
8786 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
8787 && info->first_gp_reg_save < 31);
8788 saving_FPRs_inline = (info->first_fp_reg_save == 64
8789 || FP_SAVE_INLINE (info->first_fp_reg_save));
8791 /* For V.4, update stack before we do any saving and set back pointer. */
8792 if (info->push_p && DEFAULT_ABI == ABI_V4)
8794 if (info->total_size < 32767)
8795 sp_offset = info->total_size;
8797 frame_reg_rtx = frame_ptr_rtx;
8798 rs6000_emit_allocate_stack (info->total_size,
8799 (frame_reg_rtx != sp_reg_rtx
8802 || info->first_fp_reg_save < 64
8803 || info->first_gp_reg_save < 32
8805 if (frame_reg_rtx != sp_reg_rtx)
8806 rs6000_emit_stack_tie ();
8809 /* Save AltiVec registers if needed. */
8810 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
8814 /* There should be a non inline version of this, for when we
8815 are saving lots of vector registers. */
8816 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
8817 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
8819 rtx areg, savereg, mem;
8822 offset = info->altivec_save_offset + sp_offset
8823 + 16 * (i - info->first_altivec_reg_save);
8825 savereg = gen_rtx_REG (V4SImode, i);
8827 areg = gen_rtx_REG (Pmode, 0);
8828 emit_move_insn (areg, GEN_INT (offset));
8830 /* AltiVec addressing mode is [reg+reg]. */
8831 mem = gen_rtx_MEM (V4SImode,
8832 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
8834 set_mem_alias_set (mem, rs6000_sr_alias_set);
8836 insn = emit_move_insn (mem, savereg);
8838 altivec_frame_fixup (insn, areg, offset);
8842 /* VRSAVE is a bit vector representing which AltiVec registers
8843 are used. The OS uses this to determine which vector
8844 registers to save on a context switch. We need to save
8845 VRSAVE on the stack frame, add whatever AltiVec registers we
8846 used in this function, and do the corresponding magic in the
8849 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
8851 rtx reg, mem, vrsave;
8854 /* Get VRSAVE onto a GPR. */
8855 reg = gen_rtx_REG (SImode, 12);
8856 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
8858 emit_insn (gen_get_vrsave_internal (reg));
8860 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
8863 offset = info->vrsave_save_offset + sp_offset;
8865 = gen_rtx_MEM (SImode,
8866 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
8867 set_mem_alias_set (mem, rs6000_sr_alias_set);
8868 insn = emit_move_insn (mem, reg);
8870 /* Include the registers in the mask. */
8871 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
8873 insn = emit_insn (generate_set_vrsave (reg, info, 0));
8876 /* If we use the link register, get it into r0. */
8877 if (info->lr_save_p)
8878 emit_move_insn (gen_rtx_REG (Pmode, 0),
8879 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
8881 /* If we need to save CR, put it into r12. */
8882 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
8884 cr_save_rtx = gen_rtx_REG (SImode, 12);
8885 emit_insn (gen_movesi_from_cr (cr_save_rtx));
8888 /* Do any required saving of fpr's. If only one or two to save, do
8889 it ourselves. Otherwise, call function. */
8890 if (saving_FPRs_inline)
8893 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8894 if ((regs_ever_live[info->first_fp_reg_save+i]
8895 && ! call_used_regs[info->first_fp_reg_save+i]))
8898 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8899 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8900 GEN_INT (info->fp_save_offset
8903 mem = gen_rtx_MEM (DFmode, addr);
8904 set_mem_alias_set (mem, rs6000_sr_alias_set);
8906 insn = emit_move_insn (mem, reg);
8907 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8908 NULL_RTX, NULL_RTX);
8911 else if (info->first_fp_reg_save != 64)
8915 const char *alloc_rname;
8917 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
8919 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
8921 LINK_REGISTER_REGNUM));
8922 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
8923 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
8924 alloc_rname = ggc_strdup (rname);
8925 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
8926 gen_rtx_SYMBOL_REF (Pmode,
8928 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
8931 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
8932 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8933 GEN_INT (info->fp_save_offset
8934 + sp_offset + 8*i));
8935 mem = gen_rtx_MEM (DFmode, addr);
8936 set_mem_alias_set (mem, rs6000_sr_alias_set);
8938 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
8940 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8941 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8942 NULL_RTX, NULL_RTX);
8945 /* Save GPRs. This is done as a PARALLEL if we are using
8946 the store-multiple instructions. */
8947 if (using_store_multiple)
8951 p = rtvec_alloc (32 - info->first_gp_reg_save);
8952 dwarfp = rtvec_alloc (32 - info->first_gp_reg_save);
8953 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8956 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8957 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8958 GEN_INT (info->gp_save_offset
8961 mem = gen_rtx_MEM (reg_mode, addr);
8962 set_mem_alias_set (mem, rs6000_sr_alias_set);
8964 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
8966 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
8967 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8968 NULL_RTX, NULL_RTX);
8973 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
8974 if ((regs_ever_live[info->first_gp_reg_save+i]
8975 && ! call_used_regs[info->first_gp_reg_save+i])
8976 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
8977 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
8978 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
8981 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
8982 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
8983 GEN_INT (info->gp_save_offset
8986 mem = gen_rtx_MEM (reg_mode, addr);
8987 set_mem_alias_set (mem, rs6000_sr_alias_set);
8989 insn = emit_move_insn (mem, reg);
8990 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
8991 NULL_RTX, NULL_RTX);
8995 /* ??? There's no need to emit actual instructions here, but it's the
8996 easiest way to get the frame unwind information emitted. */
8997 if (current_function_calls_eh_return)
8999 unsigned int i, regno;
9005 regno = EH_RETURN_DATA_REGNO (i);
9006 if (regno == INVALID_REGNUM)
9009 reg = gen_rtx_REG (reg_mode, regno);
9010 addr = plus_constant (frame_reg_rtx,
9011 info->ehrd_offset + sp_offset
9012 + reg_size * (int) i);
9013 mem = gen_rtx_MEM (reg_mode, addr);
9014 set_mem_alias_set (mem, rs6000_sr_alias_set);
9016 insn = emit_move_insn (mem, reg);
9017 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9018 NULL_RTX, NULL_RTX);
9022 /* Save lr if we used it. */
9023 if (info->lr_save_p)
9025 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9026 GEN_INT (info->lr_save_offset + sp_offset));
9027 rtx reg = gen_rtx_REG (Pmode, 0);
9028 rtx mem = gen_rtx_MEM (Pmode, addr);
9029 /* This should not be of rs6000_sr_alias_set, because of
9030 __builtin_return_address. */
9032 insn = emit_move_insn (mem, reg);
9033 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9034 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
9037 /* Save CR if we use any that must be preserved. */
9038 if (info->cr_save_p)
9040 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9041 GEN_INT (info->cr_save_offset + sp_offset));
9042 rtx mem = gen_rtx_MEM (SImode, addr);
9044 set_mem_alias_set (mem, rs6000_sr_alias_set);
9046 /* If r12 was used to hold the original sp, copy cr into r0 now
9048 if (REGNO (frame_reg_rtx) == 12)
9050 cr_save_rtx = gen_rtx_REG (SImode, 0);
9051 emit_insn (gen_movesi_from_cr (cr_save_rtx));
9053 insn = emit_move_insn (mem, cr_save_rtx);
9055 /* Now, there's no way that dwarf2out_frame_debug_expr is going
9056 to understand '(unspec:SI [(reg:CC 68) ...] 19)'. But that's
9057 OK. All we have to do is specify that _one_ condition code
9058 register is saved in this stack slot. The thrower's epilogue
9059 will then restore all the call-saved registers.
9060 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
9061 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
9062 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
9065 /* Update stack and set back pointer unless this is V.4,
9066 for which it was done previously. */
9067 if (info->push_p && DEFAULT_ABI != ABI_V4)
9068 rs6000_emit_allocate_stack (info->total_size, FALSE);
9070 /* Set frame pointer, if needed. */
9071 if (frame_pointer_needed)
9073 insn = emit_move_insn (gen_rtx_REG (reg_mode, FRAME_POINTER_REGNUM),
9075 RTX_FRAME_RELATED_P (insn) = 1;
9078 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
9079 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
9080 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
9081 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
9083 /* If emit_load_toc_table will use the link register, we need to save
9084 it. We use R11 for this purpose because emit_load_toc_table
9085 can use register 0. This allows us to use a plain 'blr' to return
9086 from the procedure more often. */
9087 int save_LR_around_toc_setup = (TARGET_ELF && flag_pic != 0
9088 && ! info->lr_save_p
9089 && EXIT_BLOCK_PTR->pred != NULL);
9090 if (save_LR_around_toc_setup)
9091 emit_move_insn (gen_rtx_REG (Pmode, 11),
9092 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
9094 rs6000_emit_load_toc_table (TRUE);
9096 if (save_LR_around_toc_setup)
9097 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9098 gen_rtx_REG (Pmode, 11));
9101 if (DEFAULT_ABI == ABI_DARWIN
9102 && flag_pic && current_function_uses_pic_offset_table)
9104 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
9106 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest)));
9109 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
9110 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
9114 /* Write function prologue. */
9117 rs6000_output_function_prologue (file, size)
9119 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9121 rs6000_stack_t *info = rs6000_stack_info ();
9123 if (TARGET_DEBUG_STACK)
9124 debug_stack_info (info);
9126 /* Write .extern for any function we will call to save and restore
9128 if (info->first_fp_reg_save < 64
9129 && !FP_SAVE_INLINE (info->first_fp_reg_save))
9130 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
9131 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
9132 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
9135 /* Write .extern for AIX common mode routines, if needed. */
9136 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
9138 fputs ("\t.extern __mulh\n", file);
9139 fputs ("\t.extern __mull\n", file);
9140 fputs ("\t.extern __divss\n", file);
9141 fputs ("\t.extern __divus\n", file);
9142 fputs ("\t.extern __quoss\n", file);
9143 fputs ("\t.extern __quous\n", file);
9144 common_mode_defined = 1;
9147 if (! HAVE_prologue)
9151 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
9152 the "toplevel" insn chain. */
9153 emit_note (0, NOTE_INSN_DELETED);
9154 rs6000_emit_prologue ();
9155 emit_note (0, NOTE_INSN_DELETED);
9157 /* Expand INSN_ADDRESSES so final() doesn't crash. */
9161 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9163 INSN_ADDRESSES_NEW (insn, addr);
9168 if (TARGET_DEBUG_STACK)
9169 debug_rtx_list (get_insns (), 100);
9170 final (get_insns (), file, FALSE, FALSE);
9174 rs6000_pic_labelno++;
9177 /* Emit function epilogue as insns.
9179 At present, dwarf2out_frame_debug_expr doesn't understand
9180 register restores, so we don't bother setting RTX_FRAME_RELATED_P
9181 anywhere in the epilogue. Most of the insns below would in any case
9182 need special notes to explain where r11 is in relation to the stack. */
9185 rs6000_emit_epilogue (sibcall)
9188 rs6000_stack_t *info;
9189 int restoring_FPRs_inline;
9190 int using_load_multiple;
9191 int using_mfcr_multiple;
9192 int use_backchain_to_restore_sp;
9194 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
9195 rtx frame_reg_rtx = sp_reg_rtx;
9196 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
9197 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9200 info = rs6000_stack_info ();
9201 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
9202 && info->first_gp_reg_save < 31);
9203 restoring_FPRs_inline = (sibcall
9204 || current_function_calls_eh_return
9205 || info->first_fp_reg_save == 64
9206 || FP_SAVE_INLINE (info->first_fp_reg_save));
9207 use_backchain_to_restore_sp = (frame_pointer_needed
9208 || current_function_calls_alloca
9209 || info->total_size > 32767);
9210 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
9211 || rs6000_cpu == PROCESSOR_PPC603
9212 || rs6000_cpu == PROCESSOR_PPC750
9215 /* If we have a frame pointer, a call to alloca, or a large stack
9216 frame, restore the old stack pointer using the backchain. Otherwise,
9217 we know what size to update it with. */
9218 if (use_backchain_to_restore_sp)
9220 /* Under V.4, don't reset the stack pointer until after we're done
9221 loading the saved registers. */
9222 if (DEFAULT_ABI == ABI_V4)
9223 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
9225 emit_move_insn (frame_reg_rtx,
9226 gen_rtx_MEM (Pmode, sp_reg_rtx));
9229 else if (info->push_p)
9231 if (DEFAULT_ABI == ABI_V4)
9232 sp_offset = info->total_size;
9235 emit_insn (TARGET_32BIT
9236 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9237 GEN_INT (info->total_size))
9238 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9239 GEN_INT (info->total_size)));
9243 /* Restore AltiVec registers if needed. */
9244 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
9248 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
9249 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
9251 rtx addr, areg, mem;
9253 areg = gen_rtx_REG (Pmode, 0);
9255 (areg, GEN_INT (info->altivec_save_offset
9257 + 16 * (i - info->first_altivec_reg_save)));
9259 /* AltiVec addressing mode is [reg+reg]. */
9260 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
9261 mem = gen_rtx_MEM (V4SImode, addr);
9262 set_mem_alias_set (mem, rs6000_sr_alias_set);
9264 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
9268 /* Restore VRSAVE if needed. */
9269 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
9273 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9274 GEN_INT (info->vrsave_save_offset + sp_offset));
9275 mem = gen_rtx_MEM (SImode, addr);
9276 set_mem_alias_set (mem, rs6000_sr_alias_set);
9277 reg = gen_rtx_REG (SImode, 12);
9278 emit_move_insn (reg, mem);
9280 emit_insn (generate_set_vrsave (reg, info, 1));
9283 /* Get the old lr if we saved it. */
9284 if (info->lr_save_p)
9286 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9287 GEN_INT (info->lr_save_offset + sp_offset));
9288 rtx mem = gen_rtx_MEM (Pmode, addr);
9290 set_mem_alias_set (mem, rs6000_sr_alias_set);
9292 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
9295 /* Get the old cr if we saved it. */
9296 if (info->cr_save_p)
9298 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9299 GEN_INT (info->cr_save_offset + sp_offset));
9300 rtx mem = gen_rtx_MEM (SImode, addr);
9302 set_mem_alias_set (mem, rs6000_sr_alias_set);
9304 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
9307 /* Set LR here to try to overlap restores below. */
9308 if (info->lr_save_p)
9309 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
9310 gen_rtx_REG (Pmode, 0));
9312 /* Load exception handler data registers, if needed. */
9313 if (current_function_calls_eh_return)
9315 unsigned int i, regno;
9321 regno = EH_RETURN_DATA_REGNO (i);
9322 if (regno == INVALID_REGNUM)
9325 addr = plus_constant (frame_reg_rtx,
9326 info->ehrd_offset + sp_offset
9327 + reg_size * (int) i);
9328 mem = gen_rtx_MEM (reg_mode, addr);
9329 set_mem_alias_set (mem, rs6000_sr_alias_set);
9331 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
9335 /* Restore GPRs. This is done as a PARALLEL if we are using
9336 the load-multiple instructions. */
9337 if (using_load_multiple)
9340 p = rtvec_alloc (32 - info->first_gp_reg_save);
9341 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9343 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9344 GEN_INT (info->gp_save_offset
9347 rtx mem = gen_rtx_MEM (reg_mode, addr);
9349 set_mem_alias_set (mem, rs6000_sr_alias_set);
9352 gen_rtx_SET (VOIDmode,
9353 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
9356 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9359 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
9360 if ((regs_ever_live[info->first_gp_reg_save+i]
9361 && ! call_used_regs[info->first_gp_reg_save+i])
9362 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
9363 && ((DEFAULT_ABI == ABI_V4 && flag_pic == 1)
9364 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
9366 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9367 GEN_INT (info->gp_save_offset
9370 rtx mem = gen_rtx_MEM (reg_mode, addr);
9372 set_mem_alias_set (mem, rs6000_sr_alias_set);
9374 emit_move_insn (gen_rtx_REG (reg_mode,
9375 info->first_gp_reg_save + i),
9379 /* Restore fpr's if we need to do it without calling a function. */
9380 if (restoring_FPRs_inline)
9381 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9382 if ((regs_ever_live[info->first_fp_reg_save+i]
9383 && ! call_used_regs[info->first_fp_reg_save+i]))
9386 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
9387 GEN_INT (info->fp_save_offset
9390 mem = gen_rtx_MEM (DFmode, addr);
9391 set_mem_alias_set (mem, rs6000_sr_alias_set);
9393 emit_move_insn (gen_rtx_REG (DFmode,
9394 info->first_fp_reg_save + i),
9398 /* If we saved cr, restore it here. Just those that were used. */
9399 if (info->cr_save_p)
9401 rtx r12_rtx = gen_rtx_REG (SImode, 12);
9404 if (using_mfcr_multiple)
9406 for (i = 0; i < 8; i++)
9407 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9413 if (using_mfcr_multiple && count > 1)
9418 p = rtvec_alloc (count);
9421 for (i = 0; i < 8; i++)
9422 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9424 rtvec r = rtvec_alloc (2);
9425 RTVEC_ELT (r, 0) = r12_rtx;
9426 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
9427 RTVEC_ELT (p, ndx) =
9428 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
9429 gen_rtx_UNSPEC (CCmode, r, 20));
9432 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
9437 for (i = 0; i < 8; i++)
9438 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
9440 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
9446 /* If this is V.4, unwind the stack pointer after all of the loads
9447 have been done. We need to emit a block here so that sched
9448 doesn't decide to move the sp change before the register restores
9449 (which may not have any obvious dependency on the stack). This
9450 doesn't hurt performance, because there is no scheduling that can
9451 be done after this point. */
9452 if (DEFAULT_ABI == ABI_V4)
9454 if (frame_reg_rtx != sp_reg_rtx)
9455 rs6000_emit_stack_tie ();
9457 if (use_backchain_to_restore_sp)
9459 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
9461 else if (sp_offset != 0)
9463 emit_insn (Pmode == SImode
9464 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
9465 GEN_INT (sp_offset))
9466 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
9467 GEN_INT (sp_offset)));
9471 if (current_function_calls_eh_return)
9473 rtx sa = EH_RETURN_STACKADJ_RTX;
9474 emit_insn (Pmode == SImode
9475 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
9476 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
9482 if (! restoring_FPRs_inline)
9483 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
9485 p = rtvec_alloc (2);
9487 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
9488 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
9490 LINK_REGISTER_REGNUM));
9492 /* If we have to restore more than two FP registers, branch to the
9493 restore function. It will return to our caller. */
9494 if (! restoring_FPRs_inline)
9498 const char *alloc_rname;
9500 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
9501 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
9502 alloc_rname = ggc_strdup (rname);
9503 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
9504 gen_rtx_SYMBOL_REF (Pmode,
9507 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
9510 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
9511 GEN_INT (info->fp_save_offset + 8*i));
9512 mem = gen_rtx_MEM (DFmode, addr);
9513 set_mem_alias_set (mem, rs6000_sr_alias_set);
9515 RTVEC_ELT (p, i+3) =
9516 gen_rtx_SET (VOIDmode,
9517 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
9522 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
9526 /* Write function epilogue. */
9529 rs6000_output_function_epilogue (file, size)
9531 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
9533 rs6000_stack_t *info = rs6000_stack_info ();
9534 int optional_tbtab = (optimize_size || TARGET_ELF) ? 0 : 1;
9536 if (! HAVE_epilogue)
9538 rtx insn = get_last_insn ();
9539 /* If the last insn was a BARRIER, we don't have to write anything except
9541 if (GET_CODE (insn) == NOTE)
9542 insn = prev_nonnote_insn (insn);
9543 if (insn == 0 || GET_CODE (insn) != BARRIER)
9545 /* This is slightly ugly, but at least we don't have two
9546 copies of the epilogue-emitting code. */
9549 /* A NOTE_INSN_DELETED is supposed to be at the start
9550 and end of the "toplevel" insn chain. */
9551 emit_note (0, NOTE_INSN_DELETED);
9552 rs6000_emit_epilogue (FALSE);
9553 emit_note (0, NOTE_INSN_DELETED);
9555 /* Expand INSN_ADDRESSES so final() doesn't crash. */
9559 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
9561 INSN_ADDRESSES_NEW (insn, addr);
9566 if (TARGET_DEBUG_STACK)
9567 debug_rtx_list (get_insns (), 100);
9568 final (get_insns (), file, FALSE, FALSE);
9573 /* Output a traceback table here. See /usr/include/sys/debug.h for info
9576 We don't output a traceback table if -finhibit-size-directive was
9577 used. The documentation for -finhibit-size-directive reads
9578 ``don't output a @code{.size} assembler directive, or anything
9579 else that would cause trouble if the function is split in the
9580 middle, and the two halves are placed at locations far apart in
9581 memory.'' The traceback table has this property, since it
9582 includes the offset from the start of the function to the
9583 traceback table itself.
9585 System V.4 Powerpc's (and the embedded ABI derived from it) use a
9586 different traceback table. */
9587 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive)
9589 const char *fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
9590 const char *language_string = lang_hooks.name;
9591 int fixed_parms = 0, float_parms = 0, parm_info = 0;
9594 while (*fname == '.') /* V.4 encodes . in the name */
9597 /* Need label immediately before tbtab, so we can compute its offset
9598 from the function start. */
9601 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9602 ASM_OUTPUT_LABEL (file, fname);
9604 /* The .tbtab pseudo-op can only be used for the first eight
9605 expressions, since it can't handle the possibly variable
9606 length fields that follow. However, if you omit the optional
9607 fields, the assembler outputs zeros for all optional fields
9608 anyways, giving each variable length field is minimum length
9609 (as defined in sys/debug.h). Thus we can not use the .tbtab
9610 pseudo-op at all. */
9612 /* An all-zero word flags the start of the tbtab, for debuggers
9613 that have to find it by searching forward from the entry
9614 point or from the current pc. */
9615 fputs ("\t.long 0\n", file);
9617 /* Tbtab format type. Use format type 0. */
9618 fputs ("\t.byte 0,", file);
9620 /* Language type. Unfortunately, there doesn't seem to be any
9621 official way to get this info, so we use language_string. C
9622 is 0. C++ is 9. No number defined for Obj-C, so use the
9623 value for C for now. There is no official value for Java,
9624 although IBM appears to be using 13. There is no official value
9625 for Chill, so we've chosen 44 pseudo-randomly. */
9626 if (! strcmp (language_string, "GNU C")
9627 || ! strcmp (language_string, "GNU Objective-C"))
9629 else if (! strcmp (language_string, "GNU F77"))
9631 else if (! strcmp (language_string, "GNU Ada"))
9633 else if (! strcmp (language_string, "GNU Pascal"))
9635 else if (! strcmp (language_string, "GNU C++"))
9637 else if (! strcmp (language_string, "GNU Java"))
9639 else if (! strcmp (language_string, "GNU CHILL"))
9643 fprintf (file, "%d,", i);
9645 /* 8 single bit fields: global linkage (not set for C extern linkage,
9646 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
9647 from start of procedure stored in tbtab, internal function, function
9648 has controlled storage, function has no toc, function uses fp,
9649 function logs/aborts fp operations. */
9650 /* Assume that fp operations are used if any fp reg must be saved. */
9651 fprintf (file, "%d,",
9652 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
9654 /* 6 bitfields: function is interrupt handler, name present in
9655 proc table, function calls alloca, on condition directives
9656 (controls stack walks, 3 bits), saves condition reg, saves
9658 /* The `function calls alloca' bit seems to be set whenever reg 31 is
9659 set up as a frame pointer, even when there is no alloca call. */
9660 fprintf (file, "%d,",
9661 ((optional_tbtab << 6)
9662 | ((optional_tbtab & frame_pointer_needed) << 5)
9663 | (info->cr_save_p << 1)
9664 | (info->lr_save_p)));
9666 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
9668 fprintf (file, "%d,",
9669 (info->push_p << 7) | (64 - info->first_fp_reg_save));
9671 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
9672 fprintf (file, "%d,", (32 - first_reg_to_save ()));
9676 /* Compute the parameter info from the function decl argument
9679 int next_parm_info_bit = 31;
9681 for (decl = DECL_ARGUMENTS (current_function_decl);
9682 decl; decl = TREE_CHAIN (decl))
9684 rtx parameter = DECL_INCOMING_RTL (decl);
9685 enum machine_mode mode = GET_MODE (parameter);
9687 if (GET_CODE (parameter) == REG)
9689 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
9697 else if (mode == DFmode)
9702 /* If only one bit will fit, don't or in this entry. */
9703 if (next_parm_info_bit > 0)
9704 parm_info |= (bits << (next_parm_info_bit - 1));
9705 next_parm_info_bit -= 2;
9709 fixed_parms += ((GET_MODE_SIZE (mode)
9710 + (UNITS_PER_WORD - 1))
9712 next_parm_info_bit -= 1;
9718 /* Number of fixed point parameters. */
9719 /* This is actually the number of words of fixed point parameters; thus
9720 an 8 byte struct counts as 2; and thus the maximum value is 8. */
9721 fprintf (file, "%d,", fixed_parms);
9723 /* 2 bitfields: number of floating point parameters (7 bits), parameters
9725 /* This is actually the number of fp registers that hold parameters;
9726 and thus the maximum value is 13. */
9727 /* Set parameters on stack bit if parameters are not in their original
9728 registers, regardless of whether they are on the stack? Xlc
9729 seems to set the bit when not optimizing. */
9730 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
9732 if (! optional_tbtab)
9735 /* Optional fields follow. Some are variable length. */
9737 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
9739 /* There is an entry for each parameter in a register, in the order that
9740 they occur in the parameter list. Any intervening arguments on the
9741 stack are ignored. If the list overflows a long (max possible length
9742 34 bits) then completely leave off all elements that don't fit. */
9743 /* Only emit this long if there was at least one parameter. */
9744 if (fixed_parms || float_parms)
9745 fprintf (file, "\t.long %d\n", parm_info);
9747 /* Offset from start of code to tb table. */
9748 fputs ("\t.long ", file);
9749 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
9751 RS6000_OUTPUT_BASENAME (file, fname);
9753 assemble_name (file, fname);
9757 RS6000_OUTPUT_BASENAME (file, fname);
9759 assemble_name (file, fname);
9763 /* Interrupt handler mask. */
9764 /* Omit this long, since we never set the interrupt handler bit
9767 /* Number of CTL (controlled storage) anchors. */
9768 /* Omit this long, since the has_ctl bit is never set above. */
9770 /* Displacement into stack of each CTL anchor. */
9771 /* Omit this list of longs, because there are no CTL anchors. */
9773 /* Length of function name. */
9774 fprintf (file, "\t.short %d\n", (int) strlen (fname));
9776 /* Function name. */
9777 assemble_string (fname, strlen (fname));
9779 /* Register for alloca automatic storage; this is always reg 31.
9780 Only emit this if the alloca bit was set above. */
9781 if (frame_pointer_needed)
9782 fputs ("\t.byte 31\n", file);
9784 fputs ("\t.align 2\n", file);
9789 /* A C compound statement that outputs the assembler code for a thunk
9790 function, used to implement C++ virtual function calls with
9791 multiple inheritance. The thunk acts as a wrapper around a virtual
9792 function, adjusting the implicit object parameter before handing
9793 control off to the real function.
9795 First, emit code to add the integer DELTA to the location that
9796 contains the incoming first argument. Assume that this argument
9797 contains a pointer, and is the one used to pass the `this' pointer
9798 in C++. This is the incoming argument *before* the function
9799 prologue, e.g. `%o0' on a sparc. The addition must preserve the
9800 values of all other incoming arguments.
9802 After the addition, emit code to jump to FUNCTION, which is a
9803 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
9804 not touch the return address. Hence returning from FUNCTION will
9805 return to whoever called the current `thunk'.
9807 The effect must be as if FUNCTION had been called directly with the
9808 adjusted first argument. This macro is responsible for emitting
9809 all of the code for a thunk function; output_function_prologue()
9810 and output_function_epilogue() are not invoked.
9812 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
9813 been extracted from it.) It might possibly be useful on some
9814 targets, but probably not.
9816 If you do not define this macro, the target-independent code in the
9817 C++ frontend will generate a less efficient heavyweight thunk that
9818 calls FUNCTION instead of jumping to it. The generic approach does
9819 not support varargs. */
9822 output_mi_thunk (file, thunk_fndecl, delta, function)
9824 tree thunk_fndecl ATTRIBUTE_UNUSED;
9828 const char *this_reg =
9829 reg_names[ aggregate_value_p (TREE_TYPE (TREE_TYPE (function))) ? 4 : 3 ];
9832 const char *r0 = reg_names[0];
9833 const char *toc = reg_names[2];
9834 const char *schain = reg_names[11];
9835 const char *r12 = reg_names[12];
9837 static int labelno = 0;
9839 /* Small constants that can be done by one add instruction. */
9840 if (delta >= -32768 && delta <= 32767)
9842 if (! TARGET_NEW_MNEMONICS)
9843 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta, this_reg);
9845 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta);
9848 /* Large constants that can be done by one addis instruction. */
9849 else if ((delta & 0xffff) == 0 && num_insns_constant_wide (delta) == 1)
9850 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9853 /* 32-bit constants that can be done by an add and addis instruction. */
9854 else if (TARGET_32BIT || num_insns_constant_wide (delta) == 1)
9856 /* Break into two pieces, propagating the sign bit from the low
9857 word to the upper word. */
9858 int delta_high = delta >> 16;
9859 int delta_low = delta & 0xffff;
9860 if ((delta_low & 0x8000) != 0)
9863 delta_low = (delta_low ^ 0x8000) - 0x8000; /* sign extend */
9866 asm_fprintf (file, "\t{cau|addis} %s,%s,%d\n", this_reg, this_reg,
9869 if (! TARGET_NEW_MNEMONICS)
9870 fprintf (file, "\tcal %s,%d(%s)\n", this_reg, delta_low, this_reg);
9872 fprintf (file, "\taddi %s,%s,%d\n", this_reg, this_reg, delta_low);
9875 /* 64-bit constants, fixme */
9879 /* Get the prefix in front of the names. */
9880 switch (DEFAULT_ABI)
9890 case ABI_AIX_NODESC:
9895 /* If the function is compiled in this module, jump to it directly.
9896 Otherwise, load up its address and jump to it. */
9898 fname = XSTR (XEXP (DECL_RTL (function), 0), 0);
9900 if (current_file_function_operand (XEXP (DECL_RTL (function), 0), VOIDmode)
9901 && (! lookup_attribute ("longcall",
9902 TYPE_ATTRIBUTES (TREE_TYPE (function)))
9903 || lookup_attribute ("shortcall",
9904 TYPE_ATTRIBUTES (TREE_TYPE (function)))))
9907 fprintf (file, "\tb %s", prefix);
9908 assemble_name (file, fname);
9909 if (DEFAULT_ABI == ABI_V4 && flag_pic) fputs ("@local", file);
9915 switch (DEFAULT_ABI)
9921 /* Set up a TOC entry for the function. */
9922 ASM_GENERATE_INTERNAL_LABEL (buf, "Lthunk", labelno);
9924 ASM_OUTPUT_INTERNAL_LABEL (file, "Lthunk", labelno);
9927 if (TARGET_MINIMAL_TOC)
9928 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
9931 fputs ("\t.tc ", file);
9932 assemble_name (file, fname);
9933 fputs ("[TC],", file);
9935 assemble_name (file, fname);
9938 if (TARGET_MINIMAL_TOC)
9939 asm_fprintf (file, (TARGET_32BIT)
9940 ? "\t{l|lwz} %s,%s(%s)\n" : "\tld %s,%s(%s)\n", r12,
9941 TARGET_ELF ? ".LCTOC0@toc" : ".LCTOC..1", toc);
9942 asm_fprintf (file, (TARGET_32BIT) ? "\t{l|lwz} %s," : "\tld %s,", r12);
9943 assemble_name (file, buf);
9944 if (TARGET_ELF && TARGET_MINIMAL_TOC)
9945 fputs ("-(.LCTOC1)", file);
9946 asm_fprintf (file, "(%s)\n", TARGET_MINIMAL_TOC ? r12 : toc);
9948 (TARGET_32BIT) ? "\t{l|lwz} %s,0(%s)\n" : "\tld %s,0(%s)\n",
9952 (TARGET_32BIT) ? "\t{l|lwz} %s,4(%s)\n" : "\tld %s,8(%s)\n",
9955 asm_fprintf (file, "\tmtctr %s\n", r0);
9957 (TARGET_32BIT) ? "\t{l|lwz} %s,8(%s)\n" : "\tld %s,16(%s)\n",
9960 asm_fprintf (file, "\tbctr\n");
9963 case ABI_AIX_NODESC:
9965 fprintf (file, "\tb %s", prefix);
9966 assemble_name (file, fname);
9967 if (flag_pic) fputs ("@plt", file);
9973 fprintf (file, "\tb %s", prefix);
9974 if (flag_pic && !machopic_name_defined_p (fname))
9975 assemble_name (file, machopic_stub_name (fname));
9977 assemble_name (file, fname);
9986 /* A quick summary of the various types of 'constant-pool tables'
9989 Target Flags Name One table per
9990 AIX (none) AIX TOC object file
9991 AIX -mfull-toc AIX TOC object file
9992 AIX -mminimal-toc AIX minimal TOC translation unit
9993 SVR4/EABI (none) SVR4 SDATA object file
9994 SVR4/EABI -fpic SVR4 pic object file
9995 SVR4/EABI -fPIC SVR4 PIC translation unit
9996 SVR4/EABI -mrelocatable EABI TOC function
9997 SVR4/EABI -maix AIX TOC object file
9998 SVR4/EABI -maix -mminimal-toc
9999 AIX minimal TOC translation unit
10001 Name Reg. Set by entries contains:
10002 made by addrs? fp? sum?
10004 AIX TOC 2 crt0 as Y option option
10005 AIX minimal TOC 30 prolog gcc Y Y option
10006 SVR4 SDATA 13 crt0 gcc N Y N
10007 SVR4 pic 30 prolog ld Y not yet N
10008 SVR4 PIC 30 prolog gcc Y option option
10009 EABI TOC 30 prolog gcc Y option option
10013 /* Hash table stuff for keeping track of TOC entries. */
10015 struct toc_hash_struct
10017 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
10018 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
10020 enum machine_mode key_mode;
10024 static htab_t toc_hash_table;
10026 /* Hash functions for the hash table. */
10029 rs6000_hash_constant (k)
10032 unsigned result = (GET_CODE (k) << 3) ^ GET_MODE (k);
10033 const char *format = GET_RTX_FORMAT (GET_CODE (k));
10034 int flen = strlen (format);
10037 if (GET_CODE (k) == LABEL_REF)
10038 return result * 1231 + X0INT (XEXP (k, 0), 3);
10040 if (GET_CODE (k) == CODE_LABEL)
10045 for (; fidx < flen; fidx++)
10046 switch (format[fidx])
10051 const char *str = XSTR (k, fidx);
10052 len = strlen (str);
10053 result = result * 613 + len;
10054 for (i = 0; i < len; i++)
10055 result = result * 613 + (unsigned) str[i];
10060 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
10064 result = result * 613 + (unsigned) XINT (k, fidx);
10067 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
10068 result = result * 613 + (unsigned) XWINT (k, fidx);
10072 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
10073 result = result * 613 + (unsigned) (XWINT (k, fidx)
10084 toc_hash_function (hash_entry)
10085 const void * hash_entry;
10087 const struct toc_hash_struct *thc =
10088 (const struct toc_hash_struct *) hash_entry;
10089 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
10092 /* Compare H1 and H2 for equivalence. */
10095 toc_hash_eq (h1, h2)
10099 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
10100 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
10102 if (((const struct toc_hash_struct *) h1)->key_mode
10103 != ((const struct toc_hash_struct *) h2)->key_mode)
10106 return rtx_equal_p (r1, r2);
10109 /* Mark the hash table-entry HASH_ENTRY. */
10112 toc_hash_mark_entry (hash_slot, unused)
10114 void * unused ATTRIBUTE_UNUSED;
10116 const struct toc_hash_struct * hash_entry =
10117 *(const struct toc_hash_struct **) hash_slot;
10118 rtx r = hash_entry->key;
10119 ggc_set_mark (hash_entry);
10120 /* For CODE_LABELS, we don't want to drag in the whole insn chain... */
10121 if (GET_CODE (r) == LABEL_REF)
10124 ggc_set_mark (XEXP (r, 0));
10131 /* Mark all the elements of the TOC hash-table *HT. */
10134 toc_hash_mark_table (vht)
10139 htab_traverse (*ht, toc_hash_mark_entry, (void *)0);
10142 /* These are the names given by the C++ front-end to vtables, and
10143 vtable-like objects. Ideally, this logic should not be here;
10144 instead, there should be some programmatic way of inquiring as
10145 to whether or not an object is a vtable. */
10147 #define VTABLE_NAME_P(NAME) \
10148 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
10149 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
10150 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
10151 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
10154 rs6000_output_symbol_ref (file, x)
10158 /* Currently C++ toc references to vtables can be emitted before it
10159 is decided whether the vtable is public or private. If this is
10160 the case, then the linker will eventually complain that there is
10161 a reference to an unknown section. Thus, for vtables only,
10162 we emit the TOC reference to reference the symbol and not the
10164 const char *name = XSTR (x, 0);
10166 if (VTABLE_NAME_P (name))
10168 RS6000_OUTPUT_BASENAME (file, name);
10171 assemble_name (file, name);
10174 /* Output a TOC entry. We derive the entry name from what is being
10178 output_toc (file, x, labelno, mode)
10182 enum machine_mode mode;
10185 const char *name = buf;
10186 const char *real_name;
10193 /* When the linker won't eliminate them, don't output duplicate
10194 TOC entries (this happens on AIX if there is any kind of TOC,
10195 and on SVR4 under -fPIC or -mrelocatable). */
10198 struct toc_hash_struct *h;
10201 h = ggc_alloc (sizeof (*h));
10203 h->key_mode = mode;
10204 h->labelno = labelno;
10206 found = htab_find_slot (toc_hash_table, h, 1);
10207 if (*found == NULL)
10209 else /* This is indeed a duplicate.
10210 Set this label equal to that label. */
10212 fputs ("\t.set ", file);
10213 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10214 fprintf (file, "%d,", labelno);
10215 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
10216 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
10222 /* If we're going to put a double constant in the TOC, make sure it's
10223 aligned properly when strict alignment is on. */
10224 if (GET_CODE (x) == CONST_DOUBLE
10225 && STRICT_ALIGNMENT
10226 && GET_MODE_BITSIZE (mode) >= 64
10227 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
10228 ASM_OUTPUT_ALIGN (file, 3);
10231 ASM_OUTPUT_INTERNAL_LABEL (file, "LC", labelno);
10233 /* Handle FP constants specially. Note that if we have a minimal
10234 TOC, things we put here aren't actually in the TOC, so we can allow
10236 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
10238 REAL_VALUE_TYPE rv;
10241 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10242 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
10246 if (TARGET_MINIMAL_TOC)
10247 fputs (DOUBLE_INT_ASM_OP, file);
10249 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10250 fprintf (file, "0x%lx%08lx\n", k[0], k[1]);
10255 if (TARGET_MINIMAL_TOC)
10256 fputs ("\t.long ", file);
10258 fprintf (file, "\t.tc FD_%lx_%lx[TC],", k[0], k[1]);
10259 fprintf (file, "0x%lx,0x%lx\n", k[0], k[1]);
10263 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
10265 REAL_VALUE_TYPE rv;
10268 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
10269 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
10273 if (TARGET_MINIMAL_TOC)
10274 fputs (DOUBLE_INT_ASM_OP, file);
10276 fprintf (file, "\t.tc FS_%lx[TC],", l);
10277 fprintf (file, "0x%lx00000000\n", l);
10282 if (TARGET_MINIMAL_TOC)
10283 fputs ("\t.long ", file);
10285 fprintf (file, "\t.tc FS_%lx[TC],", l);
10286 fprintf (file, "0x%lx\n", l);
10290 else if (GET_MODE (x) == VOIDmode
10291 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
10293 unsigned HOST_WIDE_INT low;
10294 HOST_WIDE_INT high;
10296 if (GET_CODE (x) == CONST_DOUBLE)
10298 low = CONST_DOUBLE_LOW (x);
10299 high = CONST_DOUBLE_HIGH (x);
10302 #if HOST_BITS_PER_WIDE_INT == 32
10305 high = (low & 0x80000000) ? ~0 : 0;
10309 low = INTVAL (x) & 0xffffffff;
10310 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
10314 /* TOC entries are always Pmode-sized, but since this
10315 is a bigendian machine then if we're putting smaller
10316 integer constants in the TOC we have to pad them.
10317 (This is still a win over putting the constants in
10318 a separate constant pool, because then we'd have
10319 to have both a TOC entry _and_ the actual constant.)
10321 For a 32-bit target, CONST_INT values are loaded and shifted
10322 entirely within `low' and can be stored in one TOC entry. */
10324 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
10325 abort ();/* It would be easy to make this work, but it doesn't now. */
10327 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
10328 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
10329 POINTER_SIZE, &low, &high, 0);
10333 if (TARGET_MINIMAL_TOC)
10334 fputs (DOUBLE_INT_ASM_OP, file);
10336 fprintf (file, "\t.tc ID_%lx_%lx[TC],", (long) high, (long) low);
10337 fprintf (file, "0x%lx%08lx\n", (long) high, (long) low);
10342 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
10344 if (TARGET_MINIMAL_TOC)
10345 fputs ("\t.long ", file);
10347 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
10348 (long) high, (long) low);
10349 fprintf (file, "0x%lx,0x%lx\n", (long) high, (long) low);
10353 if (TARGET_MINIMAL_TOC)
10354 fputs ("\t.long ", file);
10356 fprintf (file, "\t.tc IS_%lx[TC],", (long) low);
10357 fprintf (file, "0x%lx\n", (long) low);
10363 if (GET_CODE (x) == CONST)
10365 if (GET_CODE (XEXP (x, 0)) != PLUS)
10368 base = XEXP (XEXP (x, 0), 0);
10369 offset = INTVAL (XEXP (XEXP (x, 0), 1));
10372 if (GET_CODE (base) == SYMBOL_REF)
10373 name = XSTR (base, 0);
10374 else if (GET_CODE (base) == LABEL_REF)
10375 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
10376 else if (GET_CODE (base) == CODE_LABEL)
10377 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
10381 STRIP_NAME_ENCODING (real_name, name);
10382 if (TARGET_MINIMAL_TOC)
10383 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
10386 fprintf (file, "\t.tc %s", real_name);
10389 fprintf (file, ".N%d", - offset);
10391 fprintf (file, ".P%d", offset);
10393 fputs ("[TC],", file);
10396 /* Currently C++ toc references to vtables can be emitted before it
10397 is decided whether the vtable is public or private. If this is
10398 the case, then the linker will eventually complain that there is
10399 a TOC reference to an unknown section. Thus, for vtables only,
10400 we emit the TOC reference to reference the symbol and not the
10402 if (VTABLE_NAME_P (name))
10404 RS6000_OUTPUT_BASENAME (file, name);
10406 fprintf (file, "%d", offset);
10407 else if (offset > 0)
10408 fprintf (file, "+%d", offset);
10411 output_addr_const (file, x);
10415 /* Output an assembler pseudo-op to write an ASCII string of N characters
10416 starting at P to FILE.
10418 On the RS/6000, we have to do this using the .byte operation and
10419 write out special characters outside the quoted string.
10420 Also, the assembler is broken; very long strings are truncated,
10421 so we must artificially break them up early. */
10424 output_ascii (file, p, n)
10430 int i, count_string;
10431 const char *for_string = "\t.byte \"";
10432 const char *for_decimal = "\t.byte ";
10433 const char *to_close = NULL;
10436 for (i = 0; i < n; i++)
10439 if (c >= ' ' && c < 0177)
10442 fputs (for_string, file);
10445 /* Write two quotes to get one. */
10453 for_decimal = "\"\n\t.byte ";
10457 if (count_string >= 512)
10459 fputs (to_close, file);
10461 for_string = "\t.byte \"";
10462 for_decimal = "\t.byte ";
10470 fputs (for_decimal, file);
10471 fprintf (file, "%d", c);
10473 for_string = "\n\t.byte \"";
10474 for_decimal = ", ";
10480 /* Now close the string if we have written one. Then end the line. */
10482 fputs (to_close, file);
10485 /* Generate a unique section name for FILENAME for a section type
10486 represented by SECTION_DESC. Output goes into BUF.
10488 SECTION_DESC can be any string, as long as it is different for each
10489 possible section type.
10491 We name the section in the same manner as xlc. The name begins with an
10492 underscore followed by the filename (after stripping any leading directory
10493 names) with the last period replaced by the string SECTION_DESC. If
10494 FILENAME does not contain a period, SECTION_DESC is appended to the end of
10498 rs6000_gen_section_name (buf, filename, section_desc)
10500 const char *filename;
10501 const char *section_desc;
10503 const char *q, *after_last_slash, *last_period = 0;
10507 after_last_slash = filename;
10508 for (q = filename; *q; q++)
10511 after_last_slash = q + 1;
10512 else if (*q == '.')
10516 len = strlen (after_last_slash) + strlen (section_desc) + 2;
10517 *buf = (char *) permalloc (len);
10522 for (q = after_last_slash; *q; q++)
10524 if (q == last_period)
10526 strcpy (p, section_desc);
10527 p += strlen (section_desc);
10530 else if (ISALNUM (*q))
10534 if (last_period == 0)
10535 strcpy (p, section_desc);
10540 /* Emit profile function. */
10543 output_profile_hook (labelno)
10546 if (DEFAULT_ABI == ABI_AIX)
10549 const char *label_name;
10552 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10553 STRIP_NAME_ENCODING (label_name, ggc_strdup (buf));
10554 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
10556 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
10559 else if (DEFAULT_ABI == ABI_DARWIN)
10561 const char *mcount_name = RS6000_MCOUNT;
10562 int caller_addr_regno = LINK_REGISTER_REGNUM;
10564 /* Be conservative and always set this, at least for now. */
10565 current_function_uses_pic_offset_table = 1;
10568 /* For PIC code, set up a stub and collect the caller's address
10569 from r0, which is where the prologue puts it. */
10572 mcount_name = machopic_stub_name (mcount_name);
10573 if (current_function_uses_pic_offset_table)
10574 caller_addr_regno = 0;
10577 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
10579 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
10583 /* Write function profiler code. */
10586 output_function_profiler (file, labelno)
10592 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
10593 switch (DEFAULT_ABI)
10599 case ABI_AIX_NODESC:
10600 fprintf (file, "\tmflr %s\n", reg_names[0]);
10603 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
10604 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10605 reg_names[0], reg_names[1]);
10606 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
10607 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
10608 assemble_name (file, buf);
10609 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
10611 else if (flag_pic > 1)
10613 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10614 reg_names[0], reg_names[1]);
10615 /* Now, we need to get the address of the label. */
10616 fputs ("\tbl 1f\n\t.long ", file);
10617 assemble_name (file, buf);
10618 fputs ("-.\n1:", file);
10619 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
10620 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
10621 reg_names[0], reg_names[11]);
10622 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
10623 reg_names[0], reg_names[0], reg_names[11]);
10627 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
10628 assemble_name (file, buf);
10629 fputs ("@ha\n", file);
10630 asm_fprintf (file, "\t{st|stw} %s,4(%s)\n",
10631 reg_names[0], reg_names[1]);
10632 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
10633 assemble_name (file, buf);
10634 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
10637 if (current_function_needs_context)
10638 asm_fprintf (file, "\tmr %s,%s\n",
10639 reg_names[30], reg_names[STATIC_CHAIN_REGNUM]);
10640 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
10641 if (current_function_needs_context)
10642 asm_fprintf (file, "\tmr %s,%s\n",
10643 reg_names[STATIC_CHAIN_REGNUM], reg_names[30]);
10648 /* Don't do anything, done in output_profile_hook (). */
10654 /* Adjust the cost of a scheduling dependency. Return the new cost of
10655 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
10658 rs6000_adjust_cost (insn, link, dep_insn, cost)
10661 rtx dep_insn ATTRIBUTE_UNUSED;
10664 if (! recog_memoized (insn))
10667 if (REG_NOTE_KIND (link) != 0)
10670 if (REG_NOTE_KIND (link) == 0)
10672 /* Data dependency; DEP_INSN writes a register that INSN reads
10673 some cycles later. */
10674 switch (get_attr_type (insn))
10677 /* Tell the first scheduling pass about the latency between
10678 a mtctr and bctr (and mtlr and br/blr). The first
10679 scheduling pass will not know about this latency since
10680 the mtctr instruction, which has the latency associated
10681 to it, will be generated by reload. */
10682 return TARGET_POWER ? 5 : 4;
10684 /* Leave some extra cycles between a compare and its
10685 dependent branch, to inhibit expensive mispredicts. */
10686 if ((rs6000_cpu_attr == CPU_PPC750
10687 || rs6000_cpu_attr == CPU_PPC7400
10688 || rs6000_cpu_attr == CPU_PPC7450)
10689 && recog_memoized (dep_insn)
10690 && (INSN_CODE (dep_insn) >= 0)
10691 && (get_attr_type (dep_insn) == TYPE_COMPARE
10692 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
10693 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
10694 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL))
10699 /* Fall out to return default cost. */
10705 /* A C statement (sans semicolon) to update the integer scheduling
10706 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
10707 INSN earlier, increase the priority to execute INSN later. Do not
10708 define this macro if you do not need to adjust the scheduling
10709 priorities of insns. */
10712 rs6000_adjust_priority (insn, priority)
10713 rtx insn ATTRIBUTE_UNUSED;
10716 /* On machines (like the 750) which have asymmetric integer units,
10717 where one integer unit can do multiply and divides and the other
10718 can't, reduce the priority of multiply/divide so it is scheduled
10719 before other integer operations. */
10722 if (! INSN_P (insn))
10725 if (GET_CODE (PATTERN (insn)) == USE)
10728 switch (rs6000_cpu_attr) {
10730 switch (get_attr_type (insn))
10737 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
10738 priority, priority);
10739 if (priority >= 0 && priority < 0x01000000)
10749 /* Return how many instructions the machine can issue per cycle. */
10752 rs6000_issue_rate ()
10754 switch (rs6000_cpu_attr) {
10755 case CPU_RIOS1: /* ? */
10757 case CPU_PPC601: /* ? */
10776 /* Length in units of the trampoline for entering a nested function. */
10779 rs6000_trampoline_size ()
10783 switch (DEFAULT_ABI)
10789 ret = (TARGET_32BIT) ? 12 : 24;
10794 case ABI_AIX_NODESC:
10795 ret = (TARGET_32BIT) ? 40 : 48;
10802 /* Emit RTL insns to initialize the variable parts of a trampoline.
10803 FNADDR is an RTX for the address of the function's pure code.
10804 CXT is an RTX for the static chain value for the function. */
10807 rs6000_initialize_trampoline (addr, fnaddr, cxt)
10812 enum machine_mode pmode = Pmode;
10813 int regsize = (TARGET_32BIT) ? 4 : 8;
10814 rtx ctx_reg = force_reg (pmode, cxt);
10816 switch (DEFAULT_ABI)
10821 /* Macros to shorten the code expansions below. */
10822 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
10823 #define MEM_PLUS(addr,offset) \
10824 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
10826 /* Under AIX, just build the 3 word function descriptor */
10829 rtx fn_reg = gen_reg_rtx (pmode);
10830 rtx toc_reg = gen_reg_rtx (pmode);
10831 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
10832 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
10833 emit_move_insn (MEM_DEREF (addr), fn_reg);
10834 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
10835 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
10839 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
10842 case ABI_AIX_NODESC:
10843 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
10844 FALSE, VOIDmode, 4,
10846 GEN_INT (rs6000_trampoline_size ()), SImode,
10856 /* Table of valid machine attributes. */
10858 const struct attribute_spec rs6000_attribute_table[] =
10860 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
10861 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10862 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
10863 { NULL, 0, 0, false, false, false, NULL }
10866 /* Handle a "longcall" or "shortcall" attribute; arguments as in
10867 struct attribute_spec.handler. */
10870 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
10873 tree args ATTRIBUTE_UNUSED;
10874 int flags ATTRIBUTE_UNUSED;
10875 bool *no_add_attrs;
10877 if (TREE_CODE (*node) != FUNCTION_TYPE
10878 && TREE_CODE (*node) != FIELD_DECL
10879 && TREE_CODE (*node) != TYPE_DECL)
10881 warning ("`%s' attribute only applies to functions",
10882 IDENTIFIER_POINTER (name));
10883 *no_add_attrs = true;
10889 /* Set longcall attributes on all functions declared when
10890 rs6000_default_long_calls is true. */
10892 rs6000_set_default_type_attributes (type)
10895 if (rs6000_default_long_calls
10896 && (TREE_CODE (type) == FUNCTION_TYPE
10897 || TREE_CODE (type) == METHOD_TYPE))
10898 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
10900 TYPE_ATTRIBUTES (type));
10903 /* Return a reference suitable for calling a function with the
10904 longcall attribute. */
10907 rs6000_longcall_ref (call_ref)
10910 const char *call_name;
10913 if (GET_CODE (call_ref) != SYMBOL_REF)
10916 /* System V adds '.' to the internal name, so skip them. */
10917 call_name = XSTR (call_ref, 0);
10918 if (*call_name == '.')
10920 while (*call_name == '.')
10923 node = get_identifier (call_name);
10924 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
10927 return force_reg (Pmode, call_ref);
10931 /* A C statement or statements to switch to the appropriate section
10932 for output of RTX in mode MODE. You can assume that RTX is some
10933 kind of constant in RTL. The argument MODE is redundant except in
10934 the case of a `const_int' rtx. Select the section by calling
10935 `text_section' or one of the alternatives for other sections.
10937 Do not define this macro if you put all constants in the read-only
10940 #ifdef USING_ELFOS_H
10943 rs6000_select_rtx_section (mode, x)
10944 enum machine_mode mode;
10947 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
10950 && (GET_CODE (x) == SYMBOL_REF
10951 || GET_CODE (x) == LABEL_REF
10952 || GET_CODE (x) == CONST))
10958 /* A C statement or statements to switch to the appropriate
10959 section for output of DECL. DECL is either a `VAR_DECL' node
10960 or a constant of some sort. RELOC indicates whether forming
10961 the initial value of DECL requires link-time relocations. */
10964 rs6000_elf_select_section (decl, reloc, align)
10967 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
10969 int size = int_size_in_bytes (TREE_TYPE (decl));
10972 static void (* const sec_funcs[4]) PARAMS ((void)) = {
10979 needs_sdata = (size > 0
10980 && size <= g_switch_value
10981 && rs6000_sdata != SDATA_NONE
10982 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
10984 if (TREE_CODE (decl) == STRING_CST)
10985 readonly = ! flag_writable_strings;
10986 else if (TREE_CODE (decl) == VAR_DECL)
10987 readonly = (! (flag_pic && reloc)
10988 && TREE_READONLY (decl)
10989 && ! TREE_SIDE_EFFECTS (decl)
10990 && DECL_INITIAL (decl)
10991 && DECL_INITIAL (decl) != error_mark_node
10992 && TREE_CONSTANT (DECL_INITIAL (decl)));
10993 else if (TREE_CODE (decl) == CONSTRUCTOR)
10994 readonly = (! (flag_pic && reloc)
10995 && ! TREE_SIDE_EFFECTS (decl)
10996 && TREE_CONSTANT (decl));
10999 if (needs_sdata && rs6000_sdata != SDATA_EABI)
11002 (*sec_funcs[(readonly ? 0 : 2) + (needs_sdata ? 1 : 0)])();
11005 /* A C statement to build up a unique section name, expressed as a
11006 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
11007 RELOC indicates whether the initial value of EXP requires
11008 link-time relocations. If you do not define this macro, GCC will use
11009 the symbol name prefixed by `.' as the section name. Note - this
11010 macro can now be called for uninitialized data items as well as
11011 initialised data and functions. */
11014 rs6000_elf_unique_section (decl, reloc)
11022 const char *prefix;
11024 static const char *const prefixes[7][2] =
11026 { ".rodata.", ".gnu.linkonce.r." },
11027 { ".sdata2.", ".gnu.linkonce.s2." },
11028 { ".data.", ".gnu.linkonce.d." },
11029 { ".sdata.", ".gnu.linkonce.s." },
11030 { ".bss.", ".gnu.linkonce.b." },
11031 { ".sbss.", ".gnu.linkonce.sb." },
11032 { ".text.", ".gnu.linkonce.t." }
11035 if (TREE_CODE (decl) == FUNCTION_DECL)
11044 if (TREE_CODE (decl) == STRING_CST)
11045 readonly = ! flag_writable_strings;
11046 else if (TREE_CODE (decl) == VAR_DECL)
11047 readonly = (! (flag_pic && reloc)
11048 && TREE_READONLY (decl)
11049 && ! TREE_SIDE_EFFECTS (decl)
11050 && TREE_CONSTANT (DECL_INITIAL (decl)));
11052 size = int_size_in_bytes (TREE_TYPE (decl));
11053 needs_sdata = (size > 0
11054 && size <= g_switch_value
11055 && rs6000_sdata != SDATA_NONE
11056 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)));
11058 if (DECL_INITIAL (decl) == 0
11059 || DECL_INITIAL (decl) == error_mark_node)
11061 else if (! readonly)
11068 /* .sdata2 is only for EABI. */
11069 if (sec == 0 && rs6000_sdata != SDATA_EABI)
11075 STRIP_NAME_ENCODING (name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
11076 prefix = prefixes[sec][DECL_ONE_ONLY (decl)];
11077 len = strlen (name) + strlen (prefix);
11078 string = alloca (len + 1);
11080 sprintf (string, "%s%s", prefix, name);
11082 DECL_SECTION_NAME (decl) = build_string (len, string);
11086 /* If we are referencing a function that is static or is known to be
11087 in this file, make the SYMBOL_REF special. We can use this to indicate
11088 that we can branch to this function without emitting a no-op after the
11089 call. For real AIX calling sequences, we also replace the
11090 function name with the real name (1 or 2 leading .'s), rather than
11091 the function descriptor name. This saves a lot of overriding code
11092 to read the prefixes. */
11095 rs6000_encode_section_info (decl, first)
11102 if (TREE_CODE (decl) == FUNCTION_DECL)
11104 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11105 if ((TREE_ASM_WRITTEN (decl) || ! TREE_PUBLIC (decl))
11106 && ! DECL_WEAK (decl))
11107 SYMBOL_REF_FLAG (sym_ref) = 1;
11109 if (DEFAULT_ABI == ABI_AIX)
11111 size_t len1 = (DEFAULT_ABI == ABI_AIX) ? 1 : 2;
11112 size_t len2 = strlen (XSTR (sym_ref, 0));
11113 char *str = alloca (len1 + len2 + 1);
11116 memcpy (str + len1, XSTR (sym_ref, 0), len2 + 1);
11118 XSTR (sym_ref, 0) = ggc_alloc_string (str, len1 + len2);
11121 else if (rs6000_sdata != SDATA_NONE
11122 && DEFAULT_ABI == ABI_V4
11123 && TREE_CODE (decl) == VAR_DECL)
11125 int size = int_size_in_bytes (TREE_TYPE (decl));
11126 tree section_name = DECL_SECTION_NAME (decl);
11127 const char *name = (char *)0;
11132 if (TREE_CODE (section_name) == STRING_CST)
11134 name = TREE_STRING_POINTER (section_name);
11135 len = TREE_STRING_LENGTH (section_name);
11141 if ((size > 0 && size <= g_switch_value)
11143 && ((len == sizeof (".sdata") - 1
11144 && strcmp (name, ".sdata") == 0)
11145 || (len == sizeof (".sdata2") - 1
11146 && strcmp (name, ".sdata2") == 0)
11147 || (len == sizeof (".sbss") - 1
11148 && strcmp (name, ".sbss") == 0)
11149 || (len == sizeof (".sbss2") - 1
11150 && strcmp (name, ".sbss2") == 0)
11151 || (len == sizeof (".PPC.EMB.sdata0") - 1
11152 && strcmp (name, ".PPC.EMB.sdata0") == 0)
11153 || (len == sizeof (".PPC.EMB.sbss0") - 1
11154 && strcmp (name, ".PPC.EMB.sbss0") == 0))))
11156 rtx sym_ref = XEXP (DECL_RTL (decl), 0);
11157 size_t len = strlen (XSTR (sym_ref, 0));
11158 char *str = alloca (len + 2);
11161 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
11162 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
11167 #endif /* USING_ELFOS_H */
11170 /* Return a REG that occurs in ADDR with coefficient 1.
11171 ADDR can be effectively incremented by incrementing REG.
11173 r0 is special and we must not select it as an address
11174 register by this routine since our caller will try to
11175 increment the returned register via an "la" instruction. */
11178 find_addr_reg (addr)
11181 while (GET_CODE (addr) == PLUS)
11183 if (GET_CODE (XEXP (addr, 0)) == REG
11184 && REGNO (XEXP (addr, 0)) != 0)
11185 addr = XEXP (addr, 0);
11186 else if (GET_CODE (XEXP (addr, 1)) == REG
11187 && REGNO (XEXP (addr, 1)) != 0)
11188 addr = XEXP (addr, 1);
11189 else if (CONSTANT_P (XEXP (addr, 0)))
11190 addr = XEXP (addr, 1);
11191 else if (CONSTANT_P (XEXP (addr, 1)))
11192 addr = XEXP (addr, 0);
11196 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
11202 rs6000_fatal_bad_address (op)
11205 fatal_insn ("bad address", op);
11208 /* Called to register all of our global variables with the garbage
11212 rs6000_add_gc_roots ()
11214 ggc_add_rtx_root (&rs6000_compare_op0, 1);
11215 ggc_add_rtx_root (&rs6000_compare_op1, 1);
11217 toc_hash_table = htab_create (1021, toc_hash_function, toc_hash_eq, NULL);
11218 ggc_add_root (&toc_hash_table, 1, sizeof (toc_hash_table),
11219 toc_hash_mark_table);
11222 machopic_add_gc_roots ();
11229 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
11230 reference and a constant. */
11233 symbolic_operand (op)
11236 switch (GET_CODE (op))
11243 return (GET_CODE (op) == SYMBOL_REF ||
11244 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
11245 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
11246 && GET_CODE (XEXP (op, 1)) == CONST_INT);
11253 #ifdef RS6000_LONG_BRANCH
11255 static tree stub_list = 0;
11257 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
11258 procedure calls to the linked list. */
11261 add_compiler_stub (label_name, function_name, line_number)
11263 tree function_name;
11266 tree stub = build_tree_list (function_name, label_name);
11267 TREE_TYPE (stub) = build_int_2 (line_number, 0);
11268 TREE_CHAIN (stub) = stub_list;
11272 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
11273 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
11274 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
11276 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
11277 handling procedure calls from the linked list and initializes the
11281 output_compiler_stub ()
11284 char label_buf[256];
11286 tree tmp_stub, stub;
11289 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11291 fprintf (asm_out_file,
11292 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
11294 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11295 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11296 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
11297 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11299 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
11301 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
11304 label_buf[0] = '_';
11305 strcpy (label_buf+1,
11306 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
11309 strcpy (tmp_buf, "lis r12,hi16(");
11310 strcat (tmp_buf, label_buf);
11311 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
11312 strcat (tmp_buf, label_buf);
11313 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
11314 output_asm_insn (tmp_buf, 0);
11316 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
11317 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
11318 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
11319 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
11325 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
11326 already there or not. */
11329 no_previous_def (function_name)
11330 tree function_name;
11333 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11334 if (function_name == STUB_FUNCTION_NAME (stub))
11339 /* GET_PREV_LABEL gets the label name from the previous definition of
11343 get_prev_label (function_name)
11344 tree function_name;
11347 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
11348 if (function_name == STUB_FUNCTION_NAME (stub))
11349 return STUB_LABEL_NAME (stub);
11353 /* INSN is either a function call or a millicode call. It may have an
11354 unconditional jump in its delay slot.
11356 CALL_DEST is the routine we are calling. */
11359 output_call (insn, call_dest, operand_number)
11362 int operand_number;
11364 static char buf[256];
11365 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
11368 tree funname = get_identifier (XSTR (call_dest, 0));
11370 if (no_previous_def (funname))
11373 rtx label_rtx = gen_label_rtx ();
11374 char *label_buf, temp_buf[256];
11375 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
11376 CODE_LABEL_NUMBER (label_rtx));
11377 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
11378 labelname = get_identifier (label_buf);
11379 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
11381 line_number = NOTE_LINE_NUMBER (insn);
11382 add_compiler_stub (labelname, funname, line_number);
11385 labelname = get_prev_label (funname);
11387 sprintf (buf, "jbsr %%z%d,%.246s",
11388 operand_number, IDENTIFIER_POINTER (labelname));
11393 sprintf (buf, "bl %%z%d", operand_number);
11398 #endif /* RS6000_LONG_BRANCH */
11400 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
11402 const char *const symbol_ = (SYMBOL); \
11403 char *buffer_ = (BUF); \
11404 if (symbol_[0] == '"') \
11406 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
11408 else if (name_needs_quotes(symbol_)) \
11410 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
11414 sprintf(buffer_, "L%d$%s", (N), symbol_); \
11419 /* Generate PIC and indirect symbol stubs. */
11422 machopic_output_stub (file, symb, stub)
11424 const char *symb, *stub;
11426 unsigned int length;
11427 char *symbol_name, *lazy_ptr_name;
11428 char *local_label_0;
11429 static int label = 0;
11431 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
11432 STRIP_NAME_ENCODING (symb, symb);
11436 length = strlen (symb);
11437 symbol_name = alloca (length + 32);
11438 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
11440 lazy_ptr_name = alloca (length + 32);
11441 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
11443 local_label_0 = alloca (length + 32);
11444 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
11447 machopic_picsymbol_stub_section ();
11449 machopic_symbol_stub_section ();
11451 fprintf (file, "%s:\n", stub);
11452 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11456 fprintf (file, "\tmflr r0\n");
11457 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
11458 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
11459 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
11460 lazy_ptr_name, local_label_0);
11461 fprintf (file, "\tmtlr r0\n");
11462 fprintf (file, "\tlwz r12,lo16(%s-%s)(r11)\n",
11463 lazy_ptr_name, local_label_0);
11464 fprintf (file, "\tmtctr r12\n");
11465 fprintf (file, "\taddi r11,r11,lo16(%s-%s)\n",
11466 lazy_ptr_name, local_label_0);
11467 fprintf (file, "\tbctr\n");
11470 fprintf (file, "non-pure not supported\n");
11472 machopic_lazy_symbol_ptr_section ();
11473 fprintf (file, "%s:\n", lazy_ptr_name);
11474 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
11475 fprintf (file, "\t.long dyld_stub_binding_helper\n");
11478 /* Legitimize PIC addresses. If the address is already
11479 position-independent, we return ORIG. Newly generated
11480 position-independent addresses go into a reg. This is REG if non
11481 zero, otherwise we allocate register(s) as necessary. */
11483 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
11486 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
11488 enum machine_mode mode;
11493 if (reg == NULL && ! reload_in_progress && ! reload_completed)
11494 reg = gen_reg_rtx (Pmode);
11496 if (GET_CODE (orig) == CONST)
11498 if (GET_CODE (XEXP (orig, 0)) == PLUS
11499 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
11502 if (GET_CODE (XEXP (orig, 0)) == PLUS)
11505 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
11508 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
11514 if (GET_CODE (offset) == CONST_INT)
11516 if (SMALL_INT (offset))
11517 return plus_constant (base, INTVAL (offset));
11518 else if (! reload_in_progress && ! reload_completed)
11519 offset = force_reg (Pmode, offset);
11522 rtx mem = force_const_mem (Pmode, orig);
11523 return machopic_legitimize_pic_address (mem, Pmode, reg);
11526 return gen_rtx (PLUS, Pmode, base, offset);
11529 /* Fall back on generic machopic code. */
11530 return machopic_legitimize_pic_address (orig, mode, reg);
11533 /* This is just a placeholder to make linking work without having to
11534 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
11535 ever needed for Darwin (not too likely!) this would have to get a
11536 real definition. */
11543 #endif /* TARGET_MACHO */
11546 static unsigned int
11547 rs6000_elf_section_type_flags (decl, name, reloc)
11552 unsigned int flags = default_section_type_flags (decl, name, reloc);
11554 if (TARGET_RELOCATABLE)
11555 flags |= SECTION_WRITE;
11560 /* Record an element in the table of global constructors. SYMBOL is
11561 a SYMBOL_REF of the function to be called; PRIORITY is a number
11562 between 0 and MAX_INIT_PRIORITY.
11564 This differs from default_named_section_asm_out_constructor in
11565 that we have special handling for -mrelocatable. */
11568 rs6000_elf_asm_out_constructor (symbol, priority)
11572 const char *section = ".ctors";
11575 if (priority != DEFAULT_INIT_PRIORITY)
11577 sprintf (buf, ".ctors.%.5u",
11578 /* Invert the numbering so the linker puts us in the proper
11579 order; constructors are run from right to left, and the
11580 linker sorts in increasing order. */
11581 MAX_INIT_PRIORITY - priority);
11585 named_section_flags (section, SECTION_WRITE);
11586 assemble_align (POINTER_SIZE);
11588 if (TARGET_RELOCATABLE)
11590 fputs ("\t.long (", asm_out_file);
11591 output_addr_const (asm_out_file, symbol);
11592 fputs (")@fixup\n", asm_out_file);
11595 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11599 rs6000_elf_asm_out_destructor (symbol, priority)
11603 const char *section = ".dtors";
11606 if (priority != DEFAULT_INIT_PRIORITY)
11608 sprintf (buf, ".dtors.%.5u",
11609 /* Invert the numbering so the linker puts us in the proper
11610 order; constructors are run from right to left, and the
11611 linker sorts in increasing order. */
11612 MAX_INIT_PRIORITY - priority);
11616 named_section_flags (section, SECTION_WRITE);
11617 assemble_align (POINTER_SIZE);
11619 if (TARGET_RELOCATABLE)
11621 fputs ("\t.long (", asm_out_file);
11622 output_addr_const (asm_out_file, symbol);
11623 fputs (")@fixup\n", asm_out_file);
11626 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
11630 #ifdef OBJECT_FORMAT_COFF
11632 xcoff_asm_named_section (name, flags)
11634 unsigned int flags ATTRIBUTE_UNUSED;
11636 fprintf (asm_out_file, "\t.csect %s\n", name);
11640 rs6000_xcoff_select_section (exp, reloc, align)
11643 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
11645 if ((TREE_CODE (exp) == STRING_CST
11646 && ! flag_writable_strings)
11647 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
11648 && TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp)
11649 && DECL_INITIAL (exp)
11650 && (DECL_INITIAL (exp) == error_mark_node
11651 || TREE_CONSTANT (DECL_INITIAL (exp)))
11654 if (TREE_PUBLIC (exp))
11655 read_only_data_section ();
11657 read_only_private_data_section ();
11661 if (TREE_PUBLIC (exp))
11664 private_data_section ();
11669 rs6000_xcoff_unique_section (decl, reloc)
11677 if (TREE_CODE (decl) == FUNCTION_DECL)
11679 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
11680 len = strlen (name) + 5;
11681 string = alloca (len + 1);
11682 sprintf (string, ".%s[PR]", name);
11683 DECL_SECTION_NAME (decl) = build_string (len, string);