1 /* Test assembler support for --enable-profiling=instrument.
3 Copyright 2002, 2003 Free Software Foundation, Inc.
5 This file is part of the GNU MP Library.
7 The GNU MP Library is free software; you can redistribute it and/or modify
8 it under the terms of the GNU Lesser General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or (at your
10 option) any later version.
12 The GNU MP Library is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
15 License for more details.
17 You should have received a copy of the GNU Lesser General Public License
18 along with the GNU MP Library. If not, see http://www.gnu.org/licenses/. */
28 #if WANT_PROFILING_INSTRUMENT
30 /* This program exercises each mpn routine that might be implemented in
31 assembler. It ensures the __cyg_profile_func_enter and exit calls have
32 come out right, and that in the x86 code "ret_internal" is correctly used
36 /* Changes to enter_seen done by __cyg_profile_func_enter are essentially
37 unknown to the optimizer, so must use volatile. */
38 volatile int enter_seen;
40 /* Dummy used to stop various calls going dead. */
41 unsigned long notdead;
43 const char *name = "<none>";
53 void __cyg_profile_func_enter __GMP_PROTO ((void *this_fn, void *call_site))
54 __attribute__ ((no_instrument_function));
57 __cyg_profile_func_enter (void *this_fn, void *call_site)
60 printf ("%24s %p %p\n", name, this_fn, call_site);
62 ASSERT_ALWAYS (ncall >= 0);
63 ASSERT_ALWAYS (ncall <= numberof (call));
65 if (ncall >= numberof (call))
67 printf ("__cyg_profile_func_enter: oops, call stack full, from %s\n", name);
72 call[ncall].this_fn = this_fn;
73 call[ncall].call_site = call_site;
77 void __cyg_profile_func_exit __GMP_PROTO ((void *this_fn, void *call_site))
78 __attribute__ ((no_instrument_function));
81 __cyg_profile_func_exit (void *this_fn, void *call_site)
83 ASSERT_ALWAYS (ncall >= 0);
84 ASSERT_ALWAYS (ncall <= numberof (call));
88 printf ("__cyg_profile_func_exit: call stack empty, from %s\n", name);
93 if (this_fn != call[ncall].this_fn || call_site != call[ncall].call_site)
95 printf ("__cyg_profile_func_exit: unbalanced this_fn/call_site from %s\n", name);
96 printf (" this_fn got %p\n", this_fn);
97 printf (" want %p\n", call[ncall].this_fn);
98 printf (" call_site got %p\n", call_site);
99 printf (" want %p\n", call[ncall].call_site);
106 pre (const char *str)
118 printf ("did not reach __cyg_profile_func_enter from %s\n", name);
122 if (ncall != old_ncall)
124 printf ("unbalance enter/exit calls from %s\n", name);
125 printf (" ncall %d\n", ncall);
126 printf (" old_ncall %d\n", old_ncall);
134 mp_limb_t wp[100], xp[100], yp[100];
135 mp_size_t size = 100;
137 refmpn_zero (xp, size);
138 refmpn_zero (yp, size);
139 refmpn_zero (wp, size);
142 mpn_add_n (wp, xp, yp, size);
145 #if HAVE_NATIVE_mpn_add_nc
147 mpn_add_nc (wp, xp, yp, size, CNST_LIMB(0));
151 #if HAVE_NATIVE_mpn_addlsh1_n
152 pre ("mpn_addlsh1_n");
153 mpn_addlsh1_n (wp, xp, yp, size);
157 #if HAVE_NATIVE_mpn_and_n
159 mpn_and_n (wp, xp, yp, size);
163 #if HAVE_NATIVE_mpn_andn_n
165 mpn_andn_n (wp, xp, yp, size);
169 pre ("mpn_addmul_1");
170 mpn_addmul_1 (wp, xp, size, yp[0]);
173 #if HAVE_NATIVE_mpn_addmul_1c
174 pre ("mpn_addmul_1c");
175 mpn_addmul_1c (wp, xp, size, yp[0], CNST_LIMB(0));
179 #if HAVE_NATIVE_mpn_com
181 mpn_com (wp, xp, size);
185 #if HAVE_NATIVE_mpn_copyd
187 mpn_copyd (wp, xp, size);
191 #if HAVE_NATIVE_mpn_copyi
193 mpn_copyi (wp, xp, size);
197 pre ("mpn_divexact_1");
198 mpn_divexact_1 (wp, xp, size, CNST_LIMB(123));
201 pre ("mpn_divexact_by3c");
202 mpn_divexact_by3c (wp, xp, size, CNST_LIMB(0));
205 pre ("mpn_divrem_1");
206 mpn_divrem_1 (wp, (mp_size_t) 0, xp, size, CNST_LIMB(123));
209 #if HAVE_NATIVE_mpn_divrem_1c
210 pre ("mpn_divrem_1c");
211 mpn_divrem_1c (wp, (mp_size_t) 0, xp, size, CNST_LIMB(123), CNST_LIMB(122));
217 notdead += (unsigned long) mpn_gcd_1 (xp, size, CNST_LIMB(123));
221 notdead += mpn_hamdist (xp, yp, size);
224 #if HAVE_NATIVE_mpn_ior_n
226 mpn_ior_n (wp, xp, yp, size);
230 #if HAVE_NATIVE_mpn_iorn_n
232 mpn_iorn_n (wp, xp, yp, size);
237 mpn_lshift (wp, xp, size, 1);
241 notdead += mpn_mod_1 (xp, size, CNST_LIMB(123));
244 #if HAVE_NATIVE_mpn_mod_1c
246 notdead += mpn_mod_1c (xp, size, CNST_LIMB(123), CNST_LIMB(122));
250 #if GMP_NUMB_BITS % 4 == 0
251 pre ("mpn_mod_34lsub1");
252 notdead += mpn_mod_34lsub1 (xp, size);
256 pre ("mpn_modexact_1_odd");
257 notdead += mpn_modexact_1_odd (xp, size, CNST_LIMB(123));
260 pre ("mpn_modexact_1c_odd");
261 notdead += mpn_modexact_1c_odd (xp, size, CNST_LIMB(123), CNST_LIMB(456));
265 mpn_mul_1 (wp, xp, size, yp[0]);
268 #if HAVE_NATIVE_mpn_mul_1c
270 mpn_mul_1c (wp, xp, size, yp[0], CNST_LIMB(0));
274 #if HAVE_NATIVE_mpn_mul_2
276 mpn_mul_2 (wp, xp, size-1, yp);
280 pre ("mpn_mul_basecase");
281 mpn_mul_basecase (wp, xp, (mp_size_t) 3, yp, (mp_size_t) 3);
284 #if HAVE_NATIVE_mpn_nand_n
286 mpn_nand_n (wp, xp, yp, size);
290 #if HAVE_NATIVE_mpn_nior_n
292 mpn_nior_n (wp, xp, yp, size);
296 pre ("mpn_popcount");
297 notdead += mpn_popcount (xp, size);
300 pre ("mpn_preinv_mod_1");
301 notdead += mpn_preinv_mod_1 (xp, size, GMP_NUMB_MAX,
302 refmpn_invert_limb (GMP_NUMB_MAX));
305 #if USE_PREINV_DIVREM_1 || HAVE_NATIVE_mpn_preinv_divrem_1
306 pre ("mpn_preinv_divrem_1");
307 mpn_preinv_divrem_1 (wp, (mp_size_t) 0, xp, size, GMP_NUMB_MAX,
308 refmpn_invert_limb (GMP_NUMB_MAX), 0);
312 #if HAVE_NATIVE_mpn_rsh1add_n
313 pre ("mpn_rsh1add_n");
314 mpn_rsh1add_n (wp, xp, yp, size);
318 #if HAVE_NATIVE_mpn_rsh1sub_n
319 pre ("mpn_rsh1sub_n");
320 mpn_rsh1sub_n (wp, xp, yp, size);
325 mpn_rshift (wp, xp, size, 1);
328 pre ("mpn_sqr_basecase");
329 mpn_sqr_basecase (wp, xp, (mp_size_t) 3);
332 pre ("mpn_submul_1");
333 mpn_submul_1 (wp, xp, size, yp[0]);
336 #if HAVE_NATIVE_mpn_submul_1c
337 pre ("mpn_submul_1c");
338 mpn_submul_1c (wp, xp, size, yp[0], CNST_LIMB(0));
343 mpn_sub_n (wp, xp, yp, size);
346 #if HAVE_NATIVE_mpn_sub_nc
348 mpn_sub_nc (wp, xp, yp, size, CNST_LIMB(0));
352 #if HAVE_NATIVE_mpn_sublsh1_n
353 pre ("mpn_sublsh1_n");
354 mpn_sublsh1_n (wp, xp, yp, size);
358 #if HAVE_NATIVE_mpn_udiv_qrnnd
359 pre ("mpn_udiv_qrnnd");
360 mpn_udiv_qrnnd (&wp[0], CNST_LIMB(122), xp[0], CNST_LIMB(123));
364 #if HAVE_NATIVE_mpn_udiv_qrnnd_r
365 pre ("mpn_udiv_qrnnd_r");
366 mpn_udiv_qrnnd (CNST_LIMB(122), xp[0], CNST_LIMB(123), &wp[0]);
370 #if HAVE_NATIVE_mpn_umul_ppmm
371 pre ("mpn_umul_ppmm");
372 mpn_umul_ppmm (&wp[0], xp[0], yp[0]);
376 #if HAVE_NATIVE_mpn_umul_ppmm_r
377 pre ("mpn_umul_ppmm_r");
378 mpn_umul_ppmm_r (&wp[0], xp[0], yp[0]);
382 #if HAVE_NATIVE_mpn_xor_n
384 mpn_xor_n (wp, xp, yp, size);
388 #if HAVE_NATIVE_mpn_xnor_n
390 mpn_xnor_n (wp, xp, yp, size);
408 #else /* ! WANT_PROFILING_INSTRUMENT */