1 /* Test assembler support for --enable-profiling=instrument. 2 3 Copyright 2002, 2003 Free Software Foundation, Inc. 4 5 This file is part of the GNU MP Library test suite. 6 7 The GNU MP Library test suite is free software; you can redistribute it 8 and/or modify it under the terms of the GNU General Public License as 9 published by the Free Software Foundation; either version 3 of the License, 10 or (at your option) any later version. 11 12 The GNU MP Library test suite is distributed in the hope that it will be 13 useful, but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General 15 Public License for more details. 16 17 You should have received a copy of the GNU General Public License along with 18 the GNU MP Library test suite. If not, see https://www.gnu.org/licenses/. */ 19 20 #include <stdio.h> 21 #include <stdlib.h> 22 #include "gmp-impl.h" 23 #include "longlong.h" 24 #include "tests.h" 25 26 27 #if WANT_PROFILING_INSTRUMENT 28 29 /* This program exercises each mpn routine that might be implemented in 30 assembler. It ensures the __cyg_profile_func_enter and exit calls have 31 come out right, and that in the x86 code "ret_internal" is correctly used 32 for PIC setups. */ 33 34 35 /* Changes to enter_seen done by __cyg_profile_func_enter are essentially 36 unknown to the optimizer, so must use volatile. */ 37 volatile int enter_seen; 38 39 /* Dummy used to stop various calls going dead. */ 40 unsigned long notdead; 41 42 const char *name = "<none>"; 43 int old_ncall; 44 45 struct { 46 void *this_fn; 47 void *call_site; 48 } call[100]; 49 int ncall; 50 51 52 void __cyg_profile_func_enter (void *, void *) 53 __attribute__ ((no_instrument_function)); 54 55 void 56 __cyg_profile_func_enter (void *this_fn, void *call_site) 57 { 58 #if 0 59 printf ("%24s %p %p\n", name, this_fn, call_site); 60 #endif 61 ASSERT_ALWAYS (ncall >= 0); 62 ASSERT_ALWAYS (ncall <= numberof (call)); 63 64 if (ncall >= numberof (call)) 65 { 66 printf ("__cyg_profile_func_enter: oops, call stack full, from %s\n", name); 67 abort (); 68 } 69 70 enter_seen = 1; 71 call[ncall].this_fn = this_fn; 72 call[ncall].call_site = call_site; 73 ncall++; 74 } 75 76 void __cyg_profile_func_exit (void *, void *) 77 __attribute__ ((no_instrument_function)); 78 79 void 80 __cyg_profile_func_exit (void *this_fn, void *call_site) 81 { 82 ASSERT_ALWAYS (ncall >= 0); 83 ASSERT_ALWAYS (ncall <= numberof (call)); 84 85 if (ncall == 0) 86 { 87 printf ("__cyg_profile_func_exit: call stack empty, from %s\n", name); 88 abort (); 89 } 90 91 ncall--; 92 if (this_fn != call[ncall].this_fn || call_site != call[ncall].call_site) 93 { 94 printf ("__cyg_profile_func_exit: unbalanced this_fn/call_site from %s\n", name); 95 printf (" this_fn got %p\n", this_fn); 96 printf (" want %p\n", call[ncall].this_fn); 97 printf (" call_site got %p\n", call_site); 98 printf (" want %p\n", call[ncall].call_site); 99 abort (); 100 } 101 } 102 103 104 void 105 pre (const char *str) 106 { 107 name = str; 108 enter_seen = 0; 109 old_ncall = ncall; 110 } 111 112 void 113 post (void) 114 { 115 if (! enter_seen) 116 { 117 printf ("did not reach __cyg_profile_func_enter from %s\n", name); 118 abort (); 119 } 120 121 if (ncall != old_ncall) 122 { 123 printf ("unbalance enter/exit calls from %s\n", name); 124 printf (" ncall %d\n", ncall); 125 printf (" old_ncall %d\n", old_ncall); 126 abort (); 127 } 128 } 129 130 void 131 check (void) 132 { 133 mp_limb_t wp[100], xp[100], yp[100]; 134 mp_size_t size = 100; 135 136 refmpn_zero (xp, size); 137 refmpn_zero (yp, size); 138 refmpn_zero (wp, size); 139 140 pre ("mpn_add_n"); 141 mpn_add_n (wp, xp, yp, size); 142 post (); 143 144 #if HAVE_NATIVE_mpn_add_nc 145 pre ("mpn_add_nc"); 146 mpn_add_nc (wp, xp, yp, size, CNST_LIMB(0)); 147 post (); 148 #endif 149 150 #if HAVE_NATIVE_mpn_addlsh1_n 151 pre ("mpn_addlsh1_n"); 152 mpn_addlsh1_n (wp, xp, yp, size); 153 post (); 154 #endif 155 156 #if HAVE_NATIVE_mpn_and_n 157 pre ("mpn_and_n"); 158 mpn_and_n (wp, xp, yp, size); 159 post (); 160 #endif 161 162 #if HAVE_NATIVE_mpn_andn_n 163 pre ("mpn_andn_n"); 164 mpn_andn_n (wp, xp, yp, size); 165 post (); 166 #endif 167 168 pre ("mpn_addmul_1"); 169 mpn_addmul_1 (wp, xp, size, yp[0]); 170 post (); 171 172 #if HAVE_NATIVE_mpn_addmul_1c 173 pre ("mpn_addmul_1c"); 174 mpn_addmul_1c (wp, xp, size, yp[0], CNST_LIMB(0)); 175 post (); 176 #endif 177 178 #if HAVE_NATIVE_mpn_com 179 pre ("mpn_com"); 180 mpn_com (wp, xp, size); 181 post (); 182 #endif 183 184 #if HAVE_NATIVE_mpn_copyd 185 pre ("mpn_copyd"); 186 mpn_copyd (wp, xp, size); 187 post (); 188 #endif 189 190 #if HAVE_NATIVE_mpn_copyi 191 pre ("mpn_copyi"); 192 mpn_copyi (wp, xp, size); 193 post (); 194 #endif 195 196 pre ("mpn_divexact_1"); 197 mpn_divexact_1 (wp, xp, size, CNST_LIMB(123)); 198 post (); 199 200 pre ("mpn_divexact_by3c"); 201 mpn_divexact_by3c (wp, xp, size, CNST_LIMB(0)); 202 post (); 203 204 pre ("mpn_divrem_1"); 205 mpn_divrem_1 (wp, (mp_size_t) 0, xp, size, CNST_LIMB(123)); 206 post (); 207 208 #if HAVE_NATIVE_mpn_divrem_1c 209 pre ("mpn_divrem_1c"); 210 mpn_divrem_1c (wp, (mp_size_t) 0, xp, size, CNST_LIMB(123), CNST_LIMB(122)); 211 post (); 212 #endif 213 214 pre ("mpn_gcd_1"); 215 xp[0] |= 1; 216 notdead += (unsigned long) mpn_gcd_1 (xp, size, CNST_LIMB(123)); 217 post (); 218 219 pre ("mpn_hamdist"); 220 notdead += mpn_hamdist (xp, yp, size); 221 post (); 222 223 #if HAVE_NATIVE_mpn_ior_n 224 pre ("mpn_ior_n"); 225 mpn_ior_n (wp, xp, yp, size); 226 post (); 227 #endif 228 229 #if HAVE_NATIVE_mpn_iorn_n 230 pre ("mpn_iorn_n"); 231 mpn_iorn_n (wp, xp, yp, size); 232 post (); 233 #endif 234 235 pre ("mpn_lshift"); 236 mpn_lshift (wp, xp, size, 1); 237 post (); 238 239 pre ("mpn_mod_1"); 240 notdead += mpn_mod_1 (xp, size, CNST_LIMB(123)); 241 post (); 242 243 #if HAVE_NATIVE_mpn_mod_1c 244 pre ("mpn_mod_1c"); 245 notdead += mpn_mod_1c (xp, size, CNST_LIMB(123), CNST_LIMB(122)); 246 post (); 247 #endif 248 249 #if GMP_NUMB_BITS % 4 == 0 250 pre ("mpn_mod_34lsub1"); 251 notdead += mpn_mod_34lsub1 (xp, size); 252 post (); 253 #endif 254 255 pre ("mpn_modexact_1_odd"); 256 notdead += mpn_modexact_1_odd (xp, size, CNST_LIMB(123)); 257 post (); 258 259 pre ("mpn_modexact_1c_odd"); 260 notdead += mpn_modexact_1c_odd (xp, size, CNST_LIMB(123), CNST_LIMB(456)); 261 post (); 262 263 pre ("mpn_mul_1"); 264 mpn_mul_1 (wp, xp, size, yp[0]); 265 post (); 266 267 #if HAVE_NATIVE_mpn_mul_1c 268 pre ("mpn_mul_1c"); 269 mpn_mul_1c (wp, xp, size, yp[0], CNST_LIMB(0)); 270 post (); 271 #endif 272 273 #if HAVE_NATIVE_mpn_mul_2 274 pre ("mpn_mul_2"); 275 mpn_mul_2 (wp, xp, size-1, yp); 276 post (); 277 #endif 278 279 pre ("mpn_mul_basecase"); 280 mpn_mul_basecase (wp, xp, (mp_size_t) 3, yp, (mp_size_t) 3); 281 post (); 282 283 #if HAVE_NATIVE_mpn_nand_n 284 pre ("mpn_nand_n"); 285 mpn_nand_n (wp, xp, yp, size); 286 post (); 287 #endif 288 289 #if HAVE_NATIVE_mpn_nior_n 290 pre ("mpn_nior_n"); 291 mpn_nior_n (wp, xp, yp, size); 292 post (); 293 #endif 294 295 pre ("mpn_popcount"); 296 notdead += mpn_popcount (xp, size); 297 post (); 298 299 pre ("mpn_preinv_mod_1"); 300 notdead += mpn_preinv_mod_1 (xp, size, GMP_NUMB_MAX, 301 refmpn_invert_limb (GMP_NUMB_MAX)); 302 post (); 303 304 #if USE_PREINV_DIVREM_1 || HAVE_NATIVE_mpn_preinv_divrem_1 305 pre ("mpn_preinv_divrem_1"); 306 mpn_preinv_divrem_1 (wp, (mp_size_t) 0, xp, size, GMP_NUMB_MAX, 307 refmpn_invert_limb (GMP_NUMB_MAX), 0); 308 post (); 309 #endif 310 311 #if HAVE_NATIVE_mpn_rsh1add_n 312 pre ("mpn_rsh1add_n"); 313 mpn_rsh1add_n (wp, xp, yp, size); 314 post (); 315 #endif 316 317 #if HAVE_NATIVE_mpn_rsh1sub_n 318 pre ("mpn_rsh1sub_n"); 319 mpn_rsh1sub_n (wp, xp, yp, size); 320 post (); 321 #endif 322 323 pre ("mpn_rshift"); 324 mpn_rshift (wp, xp, size, 1); 325 post (); 326 327 pre ("mpn_sqr_basecase"); 328 mpn_sqr_basecase (wp, xp, (mp_size_t) 3); 329 post (); 330 331 pre ("mpn_submul_1"); 332 mpn_submul_1 (wp, xp, size, yp[0]); 333 post (); 334 335 #if HAVE_NATIVE_mpn_submul_1c 336 pre ("mpn_submul_1c"); 337 mpn_submul_1c (wp, xp, size, yp[0], CNST_LIMB(0)); 338 post (); 339 #endif 340 341 pre ("mpn_sub_n"); 342 mpn_sub_n (wp, xp, yp, size); 343 post (); 344 345 #if HAVE_NATIVE_mpn_sub_nc 346 pre ("mpn_sub_nc"); 347 mpn_sub_nc (wp, xp, yp, size, CNST_LIMB(0)); 348 post (); 349 #endif 350 351 #if HAVE_NATIVE_mpn_sublsh1_n 352 pre ("mpn_sublsh1_n"); 353 mpn_sublsh1_n (wp, xp, yp, size); 354 post (); 355 #endif 356 357 #if HAVE_NATIVE_mpn_udiv_qrnnd 358 pre ("mpn_udiv_qrnnd"); 359 mpn_udiv_qrnnd (&wp[0], CNST_LIMB(122), xp[0], CNST_LIMB(123)); 360 post (); 361 #endif 362 363 #if HAVE_NATIVE_mpn_udiv_qrnnd_r 364 pre ("mpn_udiv_qrnnd_r"); 365 mpn_udiv_qrnnd (CNST_LIMB(122), xp[0], CNST_LIMB(123), &wp[0]); 366 post (); 367 #endif 368 369 #if HAVE_NATIVE_mpn_umul_ppmm 370 pre ("mpn_umul_ppmm"); 371 mpn_umul_ppmm (&wp[0], xp[0], yp[0]); 372 post (); 373 #endif 374 375 #if HAVE_NATIVE_mpn_umul_ppmm_r 376 pre ("mpn_umul_ppmm_r"); 377 mpn_umul_ppmm_r (&wp[0], xp[0], yp[0]); 378 post (); 379 #endif 380 381 #if HAVE_NATIVE_mpn_xor_n 382 pre ("mpn_xor_n"); 383 mpn_xor_n (wp, xp, yp, size); 384 post (); 385 #endif 386 387 #if HAVE_NATIVE_mpn_xnor_n 388 pre ("mpn_xnor_n"); 389 mpn_xnor_n (wp, xp, yp, size); 390 post (); 391 #endif 392 } 393 394 395 int 396 main (void) 397 { 398 tests_start (); 399 400 check (); 401 402 tests_end (); 403 exit (0); 404 } 405 406 407 #else /* ! WANT_PROFILING_INSTRUMENT */ 408 409 int 410 main (void) 411 { 412 exit (0); 413 } 414 415 #endif 416