1 /* pp_hot.c 2 * 3 * Copyright (C) 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 4 * 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 by Larry Wall and others 5 * 6 * You may distribute under the terms of either the GNU General Public 7 * License or the Artistic License, as specified in the README file. 8 * 9 */ 10 11 /* 12 * Then he heard Merry change the note, and up went the Horn-cry of Buckland, 13 * shaking the air. 14 * 15 * Awake! Awake! Fear, Fire, Foes! Awake! 16 * Fire, Foes! Awake! 17 * 18 * [p.1007 of _The Lord of the Rings_, VI/viii: "The Scouring of the Shire"] 19 */ 20 21 /* This file contains 'hot' pp ("push/pop") functions that 22 * execute the opcodes that make up a perl program. A typical pp function 23 * expects to find its arguments on the stack, and usually pushes its 24 * results onto the stack, hence the 'pp' terminology. Each OP structure 25 * contains a pointer to the relevant pp_foo() function. 26 * 27 * By 'hot', we mean common ops whose execution speed is critical. 28 * By gathering them together into a single file, we encourage 29 * CPU cache hits on hot code. Also it could be taken as a warning not to 30 * change any code in this file unless you're sure it won't affect 31 * performance. 32 */ 33 34 #include "EXTERN.h" 35 #define PERL_IN_PP_HOT_C 36 #include "perl.h" 37 #include "regcomp.h" 38 39 /* Hot code. */ 40 41 PP(pp_const) 42 { 43 dSP; 44 XPUSHs(cSVOP_sv); 45 RETURN; 46 } 47 48 PP(pp_nextstate) 49 { 50 PL_curcop = (COP*)PL_op; 51 TAINT_NOT; /* Each statement is presumed innocent */ 52 PL_stack_sp = PL_stack_base + CX_CUR()->blk_oldsp; 53 FREETMPS; 54 PERL_ASYNC_CHECK(); 55 return NORMAL; 56 } 57 58 PP(pp_gvsv) 59 { 60 dSP; 61 EXTEND(SP,1); 62 if (UNLIKELY(PL_op->op_private & OPpLVAL_INTRO)) 63 PUSHs(save_scalar(cGVOP_gv)); 64 else 65 PUSHs(GvSVn(cGVOP_gv)); 66 RETURN; 67 } 68 69 70 /* also used for: pp_lineseq() pp_regcmaybe() pp_scalar() pp_scope() */ 71 72 PP(pp_null) 73 { 74 return NORMAL; 75 } 76 77 /* This is sometimes called directly by pp_coreargs, pp_grepstart and 78 amagic_call. */ 79 PP(pp_pushmark) 80 { 81 PUSHMARK(PL_stack_sp); 82 return NORMAL; 83 } 84 85 PP(pp_stringify) 86 { 87 dSP; dTARGET; 88 SV * const sv = TOPs; 89 SETs(TARG); 90 sv_copypv(TARG, sv); 91 SvSETMAGIC(TARG); 92 /* no PUTBACK, SETs doesn't inc/dec SP */ 93 return NORMAL; 94 } 95 96 PP(pp_gv) 97 { 98 dSP; 99 XPUSHs(MUTABLE_SV(cGVOP_gv)); 100 RETURN; 101 } 102 103 104 /* also used for: pp_andassign() */ 105 106 PP(pp_and) 107 { 108 PERL_ASYNC_CHECK(); 109 { 110 /* SP is not used to remove a variable that is saved across the 111 sv_2bool_flags call in SvTRUE_NN, if a RISC/CISC or low/high machine 112 register or load/store vs direct mem ops macro is introduced, this 113 should be a define block between direct PL_stack_sp and dSP operations, 114 presently, using PL_stack_sp is bias towards CISC cpus */ 115 SV * const sv = *PL_stack_sp; 116 if (!SvTRUE_NN(sv)) 117 return NORMAL; 118 else { 119 if (PL_op->op_type == OP_AND) 120 --PL_stack_sp; 121 return cLOGOP->op_other; 122 } 123 } 124 } 125 126 PP(pp_sassign) 127 { 128 dSP; 129 /* sassign keeps its args in the optree traditionally backwards. 130 So we pop them differently. 131 */ 132 SV *left = POPs; SV *right = TOPs; 133 134 if (PL_op->op_private & OPpASSIGN_BACKWARDS) { /* {or,and,dor}assign */ 135 SV * const temp = left; 136 left = right; right = temp; 137 } 138 assert(TAINTING_get || !TAINT_get); 139 if (UNLIKELY(TAINT_get) && !SvTAINTED(right)) 140 TAINT_NOT; 141 if (UNLIKELY(PL_op->op_private & OPpASSIGN_CV_TO_GV)) { 142 /* *foo =\&bar */ 143 SV * const cv = SvRV(right); 144 const U32 cv_type = SvTYPE(cv); 145 const bool is_gv = isGV_with_GP(left); 146 const bool got_coderef = cv_type == SVt_PVCV || cv_type == SVt_PVFM; 147 148 if (!got_coderef) { 149 assert(SvROK(cv)); 150 } 151 152 /* Can do the optimisation if left (LVALUE) is not a typeglob, 153 right (RVALUE) is a reference to something, and we're in void 154 context. */ 155 if (!got_coderef && !is_gv && GIMME_V == G_VOID) { 156 /* Is the target symbol table currently empty? */ 157 GV * const gv = gv_fetchsv_nomg(left, GV_NOINIT, SVt_PVGV); 158 if (SvTYPE(gv) != SVt_PVGV && !SvOK(gv)) { 159 /* Good. Create a new proxy constant subroutine in the target. 160 The gv becomes a(nother) reference to the constant. */ 161 SV *const value = SvRV(cv); 162 163 SvUPGRADE(MUTABLE_SV(gv), SVt_IV); 164 SvPCS_IMPORTED_on(gv); 165 SvRV_set(gv, value); 166 SvREFCNT_inc_simple_void(value); 167 SETs(left); 168 RETURN; 169 } 170 } 171 172 /* Need to fix things up. */ 173 if (!is_gv) { 174 /* Need to fix GV. */ 175 left = MUTABLE_SV(gv_fetchsv_nomg(left,GV_ADD, SVt_PVGV)); 176 } 177 178 if (!got_coderef) { 179 /* We've been returned a constant rather than a full subroutine, 180 but they expect a subroutine reference to apply. */ 181 if (SvROK(cv)) { 182 ENTER_with_name("sassign_coderef"); 183 SvREFCNT_inc_void(SvRV(cv)); 184 /* newCONSTSUB takes a reference count on the passed in SV 185 from us. We set the name to NULL, otherwise we get into 186 all sorts of fun as the reference to our new sub is 187 donated to the GV that we're about to assign to. 188 */ 189 SvRV_set(right, MUTABLE_SV(newCONSTSUB(GvSTASH(left), NULL, 190 SvRV(cv)))); 191 SvREFCNT_dec_NN(cv); 192 LEAVE_with_name("sassign_coderef"); 193 } else { 194 /* What can happen for the corner case *{"BONK"} = \&{"BONK"}; 195 is that 196 First: ops for \&{"BONK"}; return us the constant in the 197 symbol table 198 Second: ops for *{"BONK"} cause that symbol table entry 199 (and our reference to it) to be upgraded from RV 200 to typeblob) 201 Thirdly: We get here. cv is actually PVGV now, and its 202 GvCV() is actually the subroutine we're looking for 203 204 So change the reference so that it points to the subroutine 205 of that typeglob, as that's what they were after all along. 206 */ 207 GV *const upgraded = MUTABLE_GV(cv); 208 CV *const source = GvCV(upgraded); 209 210 assert(source); 211 assert(CvFLAGS(source) & CVf_CONST); 212 213 SvREFCNT_inc_simple_void_NN(source); 214 SvREFCNT_dec_NN(upgraded); 215 SvRV_set(right, MUTABLE_SV(source)); 216 } 217 } 218 219 } 220 if ( 221 UNLIKELY(SvTEMP(left)) && !SvSMAGICAL(left) && SvREFCNT(left) == 1 && 222 (!isGV_with_GP(left) || SvFAKE(left)) && ckWARN(WARN_MISC) 223 ) 224 Perl_warner(aTHX_ 225 packWARN(WARN_MISC), "Useless assignment to a temporary" 226 ); 227 SvSetMagicSV(left, right); 228 SETs(left); 229 RETURN; 230 } 231 232 PP(pp_cond_expr) 233 { 234 dSP; 235 SV *sv; 236 237 PERL_ASYNC_CHECK(); 238 sv = POPs; 239 RETURNOP(SvTRUE_NN(sv) ? cLOGOP->op_other : cLOGOP->op_next); 240 } 241 242 PP(pp_unstack) 243 { 244 PERL_CONTEXT *cx; 245 PERL_ASYNC_CHECK(); 246 TAINT_NOT; /* Each statement is presumed innocent */ 247 cx = CX_CUR(); 248 PL_stack_sp = PL_stack_base + cx->blk_oldsp; 249 FREETMPS; 250 if (!(PL_op->op_flags & OPf_SPECIAL)) { 251 assert(CxTYPE(cx) == CXt_BLOCK || CxTYPE_is_LOOP(cx)); 252 CX_LEAVE_SCOPE(cx); 253 } 254 return NORMAL; 255 } 256 257 258 /* The main body of pp_concat, not including the magic/overload and 259 * stack handling. 260 * It does targ = left . right. 261 * Moved into a separate function so that pp_multiconcat() can use it 262 * too. 263 */ 264 265 PERL_STATIC_INLINE void 266 S_do_concat(pTHX_ SV *left, SV *right, SV *targ, U8 targmy) 267 { 268 bool lbyte; 269 STRLEN rlen; 270 const char *rpv = NULL; 271 bool rbyte = FALSE; 272 bool rcopied = FALSE; 273 274 if (TARG == right && right != left) { /* $r = $l.$r */ 275 rpv = SvPV_nomg_const(right, rlen); 276 rbyte = !DO_UTF8(right); 277 right = newSVpvn_flags(rpv, rlen, SVs_TEMP); 278 rpv = SvPV_const(right, rlen); /* no point setting UTF-8 here */ 279 rcopied = TRUE; 280 } 281 282 if (TARG != left) { /* not $l .= $r */ 283 STRLEN llen; 284 const char* const lpv = SvPV_nomg_const(left, llen); 285 lbyte = !DO_UTF8(left); 286 sv_setpvn(TARG, lpv, llen); 287 if (!lbyte) 288 SvUTF8_on(TARG); 289 else 290 SvUTF8_off(TARG); 291 } 292 else { /* $l .= $r and left == TARG */ 293 if (!SvOK(left)) { 294 if ((left == right /* $l .= $l */ 295 || targmy) /* $l = $l . $r */ 296 && ckWARN(WARN_UNINITIALIZED) 297 ) 298 report_uninit(left); 299 SvPVCLEAR(left); 300 } 301 else { 302 SvPV_force_nomg_nolen(left); 303 } 304 lbyte = !DO_UTF8(left); 305 if (IN_BYTES) 306 SvUTF8_off(left); 307 } 308 309 if (!rcopied) { 310 rpv = SvPV_nomg_const(right, rlen); 311 rbyte = !DO_UTF8(right); 312 } 313 if (lbyte != rbyte) { 314 if (lbyte) 315 sv_utf8_upgrade_nomg(TARG); 316 else { 317 if (!rcopied) 318 right = newSVpvn_flags(rpv, rlen, SVs_TEMP); 319 sv_utf8_upgrade_nomg(right); 320 rpv = SvPV_nomg_const(right, rlen); 321 } 322 } 323 sv_catpvn_nomg(TARG, rpv, rlen); 324 SvSETMAGIC(TARG); 325 } 326 327 328 PP(pp_concat) 329 { 330 dSP; dATARGET; tryAMAGICbin_MG(concat_amg, AMGf_assign); 331 { 332 dPOPTOPssrl; 333 S_do_concat(aTHX_ left, right, targ, PL_op->op_private & OPpTARGET_MY); 334 SETs(TARG); 335 RETURN; 336 } 337 } 338 339 340 /* pp_multiconcat() 341 342 Concatenate one or more args, possibly interleaved with constant string 343 segments. The result may be assigned to, or appended to, a variable or 344 expression. 345 346 Several op_flags and/or op_private bits indicate what the target is, and 347 whether it's appended to. Valid permutations are: 348 349 - (PADTMP) = (A.B.C....) 350 OPpTARGET_MY $lex = (A.B.C....) 351 OPpTARGET_MY,OPpLVAL_INTRO my $lex = (A.B.C....) 352 OPpTARGET_MY,OPpMULTICONCAT_APPEND $lex .= (A.B.C....) 353 OPf_STACKED expr = (A.B.C....) 354 OPf_STACKED,OPpMULTICONCAT_APPEND expr .= (A.B.C....) 355 356 Other combinations like (A.B).(C.D) are not optimised into a multiconcat 357 op, as it's too hard to get the correct ordering of ties, overload etc. 358 359 In addition: 360 361 OPpMULTICONCAT_FAKE: not a real concat, instead an optimised 362 sprintf "...%s...". Don't call '.' 363 overloading: only use '""' overloading. 364 365 OPpMULTICONCAT_STRINGIFY: the RHS was of the form 366 "...$a...$b..." rather than 367 "..." . $a . "..." . $b . "..." 368 369 An OP_MULTICONCAT is of type UNOP_AUX. The fixed slots of the aux array are 370 defined with PERL_MULTICONCAT_IX_FOO constants, where: 371 372 373 FOO index description 374 -------- ----- ---------------------------------- 375 NARGS 0 number of arguments 376 PLAIN_PV 1 non-utf8 constant string 377 PLAIN_LEN 2 non-utf8 constant string length 378 UTF8_PV 3 utf8 constant string 379 UTF8_LEN 4 utf8 constant string length 380 LENGTHS 5 first of nargs+1 const segment lengths 381 382 The idea is that a general string concatenation will have a fixed (known 383 at compile time) number of variable args, interspersed with constant 384 strings, e.g. "a=$a b=$b\n" 385 386 All the constant string segments "a=", " b=" and "\n" are stored as a 387 single string "a= b=\n", pointed to from the PLAIN_PV/UTF8_PV slot, along 388 with a series of segment lengths: e.g. 2,3,1. In the case where the 389 constant string is plain but has a different utf8 representation, both 390 variants are stored, and two sets of (nargs+1) segments lengths are stored 391 in the slots beginning at PERL_MULTICONCAT_IX_LENGTHS. 392 393 A segment length of -1 indicates that there is no constant string at that 394 point; this distinguishes between e.g. ($a . $b) and ($a . "" . $b), which 395 have differing overloading behaviour. 396 397 */ 398 399 PP(pp_multiconcat) 400 { 401 dSP; 402 SV *targ; /* The SV to be assigned or appended to */ 403 char *targ_pv; /* where within SvPVX(targ) we're writing to */ 404 STRLEN targ_len; /* SvCUR(targ) */ 405 SV **toparg; /* the highest arg position on the stack */ 406 UNOP_AUX_item *aux; /* PL_op->op_aux buffer */ 407 UNOP_AUX_item *const_lens; /* the segment length array part of aux */ 408 const char *const_pv; /* the current segment of the const string buf */ 409 SSize_t nargs; /* how many args were expected */ 410 SSize_t stack_adj; /* how much to adjust SP on return */ 411 STRLEN grow; /* final size of destination string (targ) */ 412 UV targ_count; /* how many times targ has appeared on the RHS */ 413 bool is_append; /* OPpMULTICONCAT_APPEND flag is set */ 414 bool slow_concat; /* args too complex for quick concat */ 415 U32 dst_utf8; /* the result will be utf8 (indicate this with 416 SVf_UTF8 in a U32, rather than using bool, 417 for ease of testing and setting) */ 418 /* for each arg, holds the result of an SvPV() call */ 419 struct multiconcat_svpv { 420 char *pv; 421 SSize_t len; 422 } 423 *targ_chain, /* chain of slots where targ has appeared on RHS */ 424 *svpv_p, /* ptr for looping through svpv_buf */ 425 *svpv_base, /* first slot (may be greater than svpv_buf), */ 426 *svpv_end, /* and slot after highest result so far, of: */ 427 svpv_buf[PERL_MULTICONCAT_MAXARG]; /* buf for storing SvPV() results */ 428 429 aux = cUNOP_AUXx(PL_op)->op_aux; 430 stack_adj = nargs = aux[PERL_MULTICONCAT_IX_NARGS].ssize; 431 is_append = cBOOL(PL_op->op_private & OPpMULTICONCAT_APPEND); 432 433 /* get targ from the stack or pad */ 434 435 if (PL_op->op_flags & OPf_STACKED) { 436 if (is_append) { 437 /* for 'expr .= ...', expr is the bottom item on the stack */ 438 targ = SP[-nargs]; 439 stack_adj++; 440 } 441 else 442 /* for 'expr = ...', expr is the top item on the stack */ 443 targ = POPs; 444 } 445 else { 446 SV **svp = &(PAD_SVl(PL_op->op_targ)); 447 targ = *svp; 448 if (PL_op->op_private & OPpLVAL_INTRO) { 449 assert(PL_op->op_private & OPpTARGET_MY); 450 save_clearsv(svp); 451 } 452 if (!nargs) 453 /* $lex .= "const" doesn't cause anything to be pushed */ 454 EXTEND(SP,1); 455 } 456 457 toparg = SP; 458 SP -= (nargs - 1); 459 grow = 1; /* allow for '\0' at minimum */ 460 targ_count = 0; 461 targ_chain = NULL; 462 targ_len = 0; 463 svpv_end = svpv_buf; 464 /* only utf8 variants of the const strings? */ 465 dst_utf8 = aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv ? 0 : SVf_UTF8; 466 467 468 /* -------------------------------------------------------------- 469 * Phase 1: 470 * 471 * stringify (i.e. SvPV()) every arg and store the resultant pv/len/utf8 472 * triplets in svpv_buf[]. Also increment 'grow' by the args' lengths. 473 * 474 * utf8 is indicated by storing a negative length. 475 * 476 * Where an arg is actually targ, the stringification is deferred: 477 * the length is set to 0, and the slot is added to targ_chain. 478 * 479 * If a magic, overloaded, or otherwise weird arg is found, which 480 * might have side effects when stringified, the loop is abandoned and 481 * we goto a code block where a more basic 'emulate calling 482 * pp_cpncat() on each arg in turn' is done. 483 */ 484 485 for (; SP <= toparg; SP++, svpv_end++) { 486 U32 utf8; 487 STRLEN len; 488 SV *sv; 489 490 assert(svpv_end - svpv_buf < PERL_MULTICONCAT_MAXARG); 491 492 sv = *SP; 493 494 /* this if/else chain is arranged so that common/simple cases 495 * take few conditionals */ 496 497 if (LIKELY((SvFLAGS(sv) & (SVs_GMG|SVf_ROK|SVf_POK)) == SVf_POK)) { 498 /* common case: sv is a simple non-magical PV */ 499 if (targ == sv) { 500 /* targ appears on RHS. 501 * Delay storing PV pointer; instead, add slot to targ_chain 502 * so it can be populated later, after targ has been grown and 503 * we know its final SvPVX() address. 504 */ 505 targ_on_rhs: 506 svpv_end->len = 0; /* zerojng here means we can skip 507 updating later if targ_len == 0 */ 508 svpv_end->pv = (char*)targ_chain; 509 targ_chain = svpv_end; 510 targ_count++; 511 continue; 512 } 513 514 len = SvCUR(sv); 515 svpv_end->pv = SvPVX(sv); 516 } 517 else if (UNLIKELY(SvFLAGS(sv) & (SVs_GMG|SVf_ROK))) 518 /* may have side effects: tie, overload etc. 519 * Abandon 'stringify everything first' and handle 520 * args in strict order. Note that already-stringified args 521 * will be reprocessed, which is safe because the each first 522 * stringification would have been idempotent. 523 */ 524 goto do_magical; 525 else if (SvNIOK(sv)) { 526 if (targ == sv) 527 goto targ_on_rhs; 528 /* stringify general valid scalar */ 529 svpv_end->pv = sv_2pv_flags(sv, &len, 0); 530 } 531 else if (!SvOK(sv)) { 532 if (ckWARN(WARN_UNINITIALIZED)) 533 /* an undef value in the presence of warnings may trigger 534 * side affects */ 535 goto do_magical; 536 svpv_end->pv = (char*)""; 537 len = 0; 538 } 539 else 540 goto do_magical; /* something weird */ 541 542 utf8 = (SvFLAGS(sv) & SVf_UTF8); 543 dst_utf8 |= utf8; 544 ASSUME(len < SSize_t_MAX); 545 svpv_end->len = utf8 ? -(SSize_t)len : (SSize_t)len; 546 grow += len; 547 } 548 549 /* -------------------------------------------------------------- 550 * Phase 2: 551 * 552 * Stringify targ: 553 * 554 * if targ appears on the RHS or is appended to, force stringify it; 555 * otherwise set it to "". Then set targ_len. 556 */ 557 558 if (is_append) { 559 /* abandon quick route if using targ might have side effects */ 560 if (UNLIKELY(SvFLAGS(targ) & (SVs_GMG|SVf_ROK))) 561 goto do_magical; 562 563 if (SvOK(targ)) { 564 U32 targ_utf8; 565 stringify_targ: 566 SvPV_force_nomg_nolen(targ); 567 targ_utf8 = SvFLAGS(targ) & SVf_UTF8; 568 if (UNLIKELY(dst_utf8 & ~targ_utf8)) { 569 if (LIKELY(!IN_BYTES)) 570 sv_utf8_upgrade_nomg(targ); 571 } 572 else 573 dst_utf8 |= targ_utf8; 574 575 targ_len = SvCUR(targ); 576 grow += targ_len * (targ_count + is_append); 577 goto phase3; 578 } 579 else if (ckWARN(WARN_UNINITIALIZED)) 580 /* warning might have side effects */ 581 goto do_magical; 582 /* the undef targ will be silently SvPVCLEAR()ed below */ 583 } 584 else if (UNLIKELY(SvTYPE(targ) >= SVt_REGEXP)) { 585 /* Assigning to some weird LHS type. Don't force the LHS to be an 586 * empty string; instead, do things 'long hand' by using the 587 * overload code path, which concats to a TEMP sv and does 588 * sv_catsv() calls rather than COPY()s. This ensures that even 589 * bizarre code like this doesn't break or crash: 590 * *F = *F . *F. 591 * (which makes the 'F' typeglob an alias to the 592 * '*main::F*main::F' typeglob). 593 */ 594 goto do_magical; 595 } 596 else if (targ_chain) 597 /* targ was found on RHS. 598 * Force stringify it, using the same code as the append branch 599 * above, except that we don't need the magic/overload/undef 600 * checks as these will already have been done in the phase 1 601 * loop. 602 */ 603 goto stringify_targ; 604 605 /* unrolled SvPVCLEAR() - mostly: no need to grow or set SvCUR() to 0; 606 * those will be done later. */ 607 SV_CHECK_THINKFIRST_COW_DROP(targ); 608 SvUPGRADE(targ, SVt_PV); 609 SvFLAGS(targ) &= ~(SVf_OK|SVf_IVisUV|SVf_UTF8); 610 SvFLAGS(targ) |= (SVf_POK|SVp_POK|dst_utf8); 611 612 phase3: 613 614 /* -------------------------------------------------------------- 615 * Phase 3: 616 * 617 * UTF-8 tweaks and grow targ: 618 * 619 * Now that we know the length and utf8-ness of both the targ and 620 * args, grow targ to the size needed to accumulate all the args, based 621 * on whether targ appears on the RHS, whether we're appending, and 622 * whether any non-utf8 args expand in size if converted to utf8. 623 * 624 * For the latter, if dst_utf8 we scan non-utf8 args looking for 625 * variant chars, and adjust the svpv->len value of those args to the 626 * utf8 size and negate it to flag them. At the same time we un-negate 627 * the lens of any utf8 args since after this phase we no longer care 628 * whether an arg is utf8 or not. 629 * 630 * Finally, initialise const_lens and const_pv based on utf8ness. 631 * Note that there are 3 permutations: 632 * 633 * * If the constant string is invariant whether utf8 or not (e.g. "abc"), 634 * then aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN] are the same as 635 * aux[PERL_MULTICONCAT_IX_UTF8_PV/LEN] and there is one set of 636 * segment lengths. 637 * 638 * * If the string is fully utf8, e.g. "\x{100}", then 639 * aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN] == (NULL,0) and there is 640 * one set of segment lengths. 641 * 642 * * If the string has different plain and utf8 representations 643 * (e.g. "\x80"), then aux[PERL_MULTICONCAT_IX_PLAIN_PV/LEN]] 644 * holds the plain rep, while aux[PERL_MULTICONCAT_IX_UTF8_PV/LEN] 645 * holds the utf8 rep, and there are 2 sets of segment lengths, 646 * with the utf8 set following after the plain set. 647 * 648 * On entry to this section the (pv,len) pairs in svpv_buf have the 649 * following meanings: 650 * (pv, len) a plain string 651 * (pv, -len) a utf8 string 652 * (NULL, 0) left-most targ \ linked together R-to-L 653 * (next, 0) other targ / in targ_chain 654 */ 655 656 /* turn off utf8 handling if 'use bytes' is in scope */ 657 if (UNLIKELY(dst_utf8 && IN_BYTES)) { 658 dst_utf8 = 0; 659 SvUTF8_off(targ); 660 /* undo all the negative lengths which flag utf8-ness */ 661 for (svpv_p = svpv_buf; svpv_p < svpv_end; svpv_p++) { 662 SSize_t len = svpv_p->len; 663 if (len < 0) 664 svpv_p->len = -len; 665 } 666 } 667 668 /* grow += total of lengths of constant string segments */ 669 { 670 SSize_t len; 671 len = aux[dst_utf8 ? PERL_MULTICONCAT_IX_UTF8_LEN 672 : PERL_MULTICONCAT_IX_PLAIN_LEN].ssize; 673 slow_concat = cBOOL(len); 674 grow += len; 675 } 676 677 const_lens = aux + PERL_MULTICONCAT_IX_LENGTHS; 678 679 if (dst_utf8) { 680 const_pv = aux[PERL_MULTICONCAT_IX_UTF8_PV].pv; 681 if ( aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv 682 && const_pv != aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv) 683 /* separate sets of lengths for plain and utf8 */ 684 const_lens += nargs + 1; 685 686 /* If the result is utf8 but some of the args aren't, 687 * calculate how much extra growth is needed for all the chars 688 * which will expand to two utf8 bytes. 689 * Also, if the growth is non-zero, negate the length to indicate 690 * that this is a variant string. Conversely, un-negate the 691 * length on utf8 args (which was only needed to flag non-utf8 692 * args in this loop */ 693 for (svpv_p = svpv_buf; svpv_p < svpv_end; svpv_p++) { 694 SSize_t len, extra; 695 696 len = svpv_p->len; 697 if (len <= 0) { 698 svpv_p->len = -len; 699 continue; 700 } 701 702 extra = variant_under_utf8_count((U8 *) svpv_p->pv, 703 (U8 *) svpv_p->pv + len); 704 if (UNLIKELY(extra)) { 705 grow += extra; 706 /* -ve len indicates special handling */ 707 svpv_p->len = -(len + extra); 708 slow_concat = TRUE; 709 } 710 } 711 } 712 else 713 const_pv = aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv; 714 715 /* unrolled SvGROW(), except don't check for SVf_IsCOW, which should 716 * already have been dropped */ 717 assert(!SvIsCOW(targ)); 718 targ_pv = (SvLEN(targ) < (grow) ? sv_grow(targ,grow) : SvPVX(targ)); 719 720 721 /* -------------------------------------------------------------- 722 * Phase 4: 723 * 724 * Now that targ has been grown, we know the final address of the targ 725 * PVX, if needed. Preserve / move targ contents if appending or if 726 * targ appears on RHS. 727 * 728 * Also update svpv_buf slots in targ_chain. 729 * 730 * Don't bother with any of this if the target length is zero: 731 * targ_len is set to zero unless we're appending or targ appears on 732 * RHS. And even if it is, we can optimise by skipping this chunk of 733 * code for zero targ_len. In the latter case, we don't need to update 734 * the slots in targ_chain with the (zero length) target string, since 735 * we set the len in such slots to 0 earlier, and since the Copy() is 736 * skipped on zero length, it doesn't matter what svpv_p->pv contains. 737 * 738 * On entry to this section the (pv,len) pairs in svpv_buf have the 739 * following meanings: 740 * (pv, len) a pure-plain or utf8 string 741 * (pv, -(len+extra)) a plain string which will expand by 'extra' 742 * bytes when converted to utf8 743 * (NULL, 0) left-most targ \ linked together R-to-L 744 * (next, 0) other targ / in targ_chain 745 * 746 * On exit, the targ contents will have been moved to the 747 * earliest place they are needed (e.g. $x = "abc$x" will shift them 748 * 3 bytes, while $x .= ... will leave them at the beginning); 749 * and dst_pv will point to the location within SvPVX(targ) where the 750 * next arg should be copied. 751 */ 752 753 svpv_base = svpv_buf; 754 755 if (targ_len) { 756 struct multiconcat_svpv *tc_stop; 757 char *targ_buf = targ_pv; /* ptr to original targ string */ 758 759 assert(is_append || targ_count); 760 761 if (is_append) { 762 targ_pv += targ_len; 763 tc_stop = NULL; 764 } 765 else { 766 /* The targ appears on RHS, e.g. '$t = $a . $t . $t'. 767 * Move the current contents of targ to the first 768 * position where it's needed, and use that as the src buffer 769 * for any further uses (such as the second RHS $t above). 770 * In calculating the first position, we need to sum the 771 * lengths of all consts and args before that. 772 */ 773 774 UNOP_AUX_item *lens = const_lens; 775 /* length of first const string segment */ 776 STRLEN offset = lens->ssize > 0 ? lens->ssize : 0; 777 778 assert(targ_chain); 779 svpv_p = svpv_base; 780 781 for (;;) { 782 SSize_t len; 783 if (!svpv_p->pv) 784 break; /* the first targ argument */ 785 /* add lengths of the next arg and const string segment */ 786 len = svpv_p->len; 787 if (len < 0) /* variant args have this */ 788 len = -len; 789 offset += (STRLEN)len; 790 len = (++lens)->ssize; 791 offset += (len >= 0) ? (STRLEN)len : 0; 792 if (!offset) { 793 /* all args and consts so far are empty; update 794 * the start position for the concat later */ 795 svpv_base++; 796 const_lens++; 797 } 798 svpv_p++; 799 assert(svpv_p < svpv_end); 800 } 801 802 if (offset) { 803 targ_buf += offset; 804 Move(targ_pv, targ_buf, targ_len, char); 805 /* a negative length implies don't Copy(), but do increment */ 806 svpv_p->len = -((SSize_t)targ_len); 807 slow_concat = TRUE; 808 } 809 else { 810 /* skip the first targ copy */ 811 svpv_base++; 812 const_lens++; 813 targ_pv += targ_len; 814 } 815 816 /* Don't populate the first targ slot in the loop below; it's 817 * either not used because we advanced svpv_base beyond it, or 818 * we already stored the special -targ_len value in it 819 */ 820 tc_stop = svpv_p; 821 } 822 823 /* populate slots in svpv_buf representing targ on RHS */ 824 while (targ_chain != tc_stop) { 825 struct multiconcat_svpv *p = targ_chain; 826 targ_chain = (struct multiconcat_svpv *)(p->pv); 827 p->pv = targ_buf; 828 p->len = (SSize_t)targ_len; 829 } 830 } 831 832 833 /* -------------------------------------------------------------- 834 * Phase 5: 835 * 836 * Append all the args in svpv_buf, plus the const strings, to targ. 837 * 838 * On entry to this section the (pv,len) pairs in svpv_buf have the 839 * following meanings: 840 * (pv, len) a pure-plain or utf8 string (which may be targ) 841 * (pv, -(len+extra)) a plain string which will expand by 'extra' 842 * bytes when converted to utf8 843 * (0, -len) left-most targ, whose content has already 844 * been copied. Just advance targ_pv by len. 845 */ 846 847 /* If there are no constant strings and no special case args 848 * (svpv_p->len < 0), use a simpler, more efficient concat loop 849 */ 850 if (!slow_concat) { 851 for (svpv_p = svpv_base; svpv_p < svpv_end; svpv_p++) { 852 SSize_t len = svpv_p->len; 853 if (!len) 854 continue; 855 Copy(svpv_p->pv, targ_pv, len, char); 856 targ_pv += len; 857 } 858 const_lens += (svpv_end - svpv_base + 1); 859 } 860 else { 861 /* Note that we iterate the loop nargs+1 times: to append nargs 862 * arguments and nargs+1 constant strings. For example, "-$a-$b-" 863 */ 864 svpv_p = svpv_base - 1; 865 866 for (;;) { 867 SSize_t len = (const_lens++)->ssize; 868 869 /* append next const string segment */ 870 if (len > 0) { 871 Copy(const_pv, targ_pv, len, char); 872 targ_pv += len; 873 const_pv += len; 874 } 875 876 if (++svpv_p == svpv_end) 877 break; 878 879 /* append next arg */ 880 len = svpv_p->len; 881 882 if (LIKELY(len > 0)) { 883 Copy(svpv_p->pv, targ_pv, len, char); 884 targ_pv += len; 885 } 886 else if (UNLIKELY(len < 0)) { 887 /* negative length indicates two special cases */ 888 const char *p = svpv_p->pv; 889 len = -len; 890 if (UNLIKELY(p)) { 891 /* copy plain-but-variant pv to a utf8 targ */ 892 char * end_pv = targ_pv + len; 893 assert(dst_utf8); 894 while (targ_pv < end_pv) { 895 U8 c = (U8) *p++; 896 append_utf8_from_native_byte(c, (U8**)&targ_pv); 897 } 898 } 899 else 900 /* arg is already-copied targ */ 901 targ_pv += len; 902 } 903 904 } 905 } 906 907 *targ_pv = '\0'; 908 SvCUR_set(targ, targ_pv - SvPVX(targ)); 909 assert(grow >= SvCUR(targ) + 1); 910 assert(SvLEN(targ) >= SvCUR(targ) + 1); 911 912 /* -------------------------------------------------------------- 913 * Phase 6: 914 * 915 * return result 916 */ 917 918 SP -= stack_adj; 919 SvTAINT(targ); 920 SETTARG; 921 RETURN; 922 923 /* -------------------------------------------------------------- 924 * Phase 7: 925 * 926 * We only get here if any of the args (or targ too in the case of 927 * append) have something which might cause side effects, such 928 * as magic, overload, or an undef value in the presence of warnings. 929 * In that case, any earlier attempt to stringify the args will have 930 * been abandoned, and we come here instead. 931 * 932 * Here, we concat each arg in turn the old-fashioned way: essentially 933 * emulating pp_concat() in a loop. This means that all the weird edge 934 * cases will be handled correctly, if not necessarily speedily. 935 * 936 * Note that some args may already have been stringified - those are 937 * processed again, which is safe, since only args without side-effects 938 * were stringified earlier. 939 */ 940 941 do_magical: 942 { 943 SSize_t i, n; 944 SV *left = NULL; 945 SV *right; 946 SV* nexttarg; 947 bool nextappend; 948 U32 utf8 = 0; 949 SV **svp; 950 const char *cpv = aux[PERL_MULTICONCAT_IX_PLAIN_PV].pv; 951 UNOP_AUX_item *lens = aux + PERL_MULTICONCAT_IX_LENGTHS; 952 Size_t arg_count = 0; /* how many args have been processed */ 953 954 if (!cpv) { 955 cpv = aux[PERL_MULTICONCAT_IX_UTF8_PV].pv; 956 utf8 = SVf_UTF8; 957 } 958 959 svp = toparg - nargs + 1; 960 961 /* iterate for: 962 * nargs arguments, 963 * plus possible nargs+1 consts, 964 * plus, if appending, a final targ in an extra last iteration 965 */ 966 967 n = nargs *2 + 1; 968 for (i = 0; i <= n; i++) { 969 SSize_t len; 970 971 /* if necessary, stringify the final RHS result in 972 * something like $targ .= "$a$b$c" - simulating 973 * pp_stringify 974 */ 975 if ( i == n 976 && (PL_op->op_private &OPpMULTICONCAT_STRINGIFY) 977 && !(SvPOK(left)) 978 /* extra conditions for backwards compatibility: 979 * probably incorrect, but keep the existing behaviour 980 * for now. The rules are: 981 * $x = "$ov" single arg: stringify; 982 * $x = "$ov$y" multiple args: don't stringify, 983 * $lex = "$ov$y$z" except TARGMY with at least 2 concats 984 */ 985 && ( arg_count == 1 986 || ( arg_count >= 3 987 && !is_append 988 && (PL_op->op_private & OPpTARGET_MY) 989 && !(PL_op->op_private & OPpLVAL_INTRO) 990 ) 991 ) 992 ) 993 { 994 SV *tmp = sv_newmortal(); 995 sv_copypv(tmp, left); 996 SvSETMAGIC(tmp); 997 left = tmp; 998 } 999 1000 /* do one extra iteration to handle $targ in $targ .= ... */ 1001 if (i == n && !is_append) 1002 break; 1003 1004 /* get the next arg SV or regen the next const SV */ 1005 len = lens[i >> 1].ssize; 1006 if (i == n) { 1007 /* handle the final targ .= (....) */ 1008 right = left; 1009 left = targ; 1010 } 1011 else if (i & 1) 1012 right = svp[(i >> 1)]; 1013 else if (len < 0) 1014 continue; /* no const in this position */ 1015 else { 1016 right = newSVpvn_flags(cpv, len, (utf8 | SVs_TEMP)); 1017 cpv += len; 1018 } 1019 1020 arg_count++; 1021 1022 if (arg_count <= 1) { 1023 left = right; 1024 continue; /* need at least two SVs to concat together */ 1025 } 1026 1027 if (arg_count == 2 && i < n) { 1028 /* for the first concat, create a mortal acting like the 1029 * padtmp from OP_CONST. In later iterations this will 1030 * be appended to */ 1031 nexttarg = sv_newmortal(); 1032 nextappend = FALSE; 1033 } 1034 else { 1035 nexttarg = left; 1036 nextappend = TRUE; 1037 } 1038 1039 /* Handle possible overloading. 1040 * This is basically an unrolled 1041 * tryAMAGICbin_MG(concat_amg, AMGf_assign); 1042 * and 1043 * Perl_try_amagic_bin() 1044 * call, but using left and right rather than SP[-1], SP[0], 1045 * and not relying on OPf_STACKED implying .= 1046 */ 1047 1048 if ((SvFLAGS(left)|SvFLAGS(right)) & (SVf_ROK|SVs_GMG)) { 1049 SvGETMAGIC(left); 1050 if (left != right) 1051 SvGETMAGIC(right); 1052 1053 if ((SvAMAGIC(left) || SvAMAGIC(right)) 1054 /* sprintf doesn't do concat overloading, 1055 * but allow for $x .= sprintf(...) 1056 */ 1057 && ( !(PL_op->op_private & OPpMULTICONCAT_FAKE) 1058 || i == n) 1059 ) 1060 { 1061 SV * const tmpsv = amagic_call(left, right, concat_amg, 1062 (nextappend ? AMGf_assign: 0)); 1063 if (tmpsv) { 1064 /* NB: tryAMAGICbin_MG() includes an OPpTARGET_MY test 1065 * here, which isn't needed as any implicit 1066 * assign done under OPpTARGET_MY is done after 1067 * this loop */ 1068 if (nextappend) { 1069 sv_setsv(left, tmpsv); 1070 SvSETMAGIC(left); 1071 } 1072 else 1073 left = tmpsv; 1074 continue; 1075 } 1076 } 1077 1078 /* if both args are the same magical value, make one a copy */ 1079 if (left == right && SvGMAGICAL(left)) { 1080 left = sv_newmortal(); 1081 /* Print the uninitialized warning now, so it includes the 1082 * variable name. */ 1083 if (!SvOK(right)) { 1084 if (ckWARN(WARN_UNINITIALIZED)) 1085 report_uninit(right); 1086 sv_setsv_flags(left, &PL_sv_no, 0); 1087 } 1088 else 1089 sv_setsv_flags(left, right, 0); 1090 SvGETMAGIC(right); 1091 } 1092 } 1093 1094 /* nexttarg = left . right */ 1095 S_do_concat(aTHX_ left, right, nexttarg, 0); 1096 left = nexttarg; 1097 } 1098 1099 SP = toparg - stack_adj + 1; 1100 1101 /* Return the result of all RHS concats, unless this op includes 1102 * an assign ($lex = x.y.z or expr = x.y.z), in which case copy 1103 * to target (which will be $lex or expr). 1104 * If we are appending, targ will already have been appended to in 1105 * the loop */ 1106 if ( !is_append 1107 && ( (PL_op->op_flags & OPf_STACKED) 1108 || (PL_op->op_private & OPpTARGET_MY)) 1109 ) { 1110 sv_setsv(targ, left); 1111 SvSETMAGIC(targ); 1112 } 1113 else 1114 targ = left; 1115 SETs(targ); 1116 RETURN; 1117 } 1118 } 1119 1120 1121 /* push the elements of av onto the stack. 1122 * Returns PL_op->op_next to allow tail-call optimisation of its callers */ 1123 1124 STATIC OP* 1125 S_pushav(pTHX_ AV* const av) 1126 { 1127 dSP; 1128 const SSize_t maxarg = AvFILL(av) + 1; 1129 EXTEND(SP, maxarg); 1130 if (UNLIKELY(SvRMAGICAL(av))) { 1131 PADOFFSET i; 1132 for (i=0; i < (PADOFFSET)maxarg; i++) { 1133 SV ** const svp = av_fetch(av, i, FALSE); 1134 SP[i+1] = LIKELY(svp) 1135 ? *svp 1136 : UNLIKELY(PL_op->op_flags & OPf_MOD) 1137 ? av_nonelem(av,i) 1138 : &PL_sv_undef; 1139 } 1140 } 1141 else { 1142 PADOFFSET i; 1143 for (i=0; i < (PADOFFSET)maxarg; i++) { 1144 SV *sv = AvARRAY(av)[i]; 1145 SP[i+1] = LIKELY(sv) 1146 ? sv 1147 : UNLIKELY(PL_op->op_flags & OPf_MOD) 1148 ? av_nonelem(av,i) 1149 : &PL_sv_undef; 1150 } 1151 } 1152 SP += maxarg; 1153 PUTBACK; 1154 return NORMAL; 1155 } 1156 1157 1158 /* ($lex1,@lex2,...) or my ($lex1,@lex2,...) */ 1159 1160 PP(pp_padrange) 1161 { 1162 dSP; 1163 PADOFFSET base = PL_op->op_targ; 1164 int count = (int)(PL_op->op_private) & OPpPADRANGE_COUNTMASK; 1165 if (PL_op->op_flags & OPf_SPECIAL) { 1166 /* fake the RHS of my ($x,$y,..) = @_ */ 1167 PUSHMARK(SP); 1168 (void)S_pushav(aTHX_ GvAVn(PL_defgv)); 1169 SPAGAIN; 1170 } 1171 1172 /* note, this is only skipped for compile-time-known void cxt */ 1173 if ((PL_op->op_flags & OPf_WANT) != OPf_WANT_VOID) { 1174 int i; 1175 1176 EXTEND(SP, count); 1177 PUSHMARK(SP); 1178 for (i = 0; i <count; i++) 1179 *++SP = PAD_SV(base+i); 1180 } 1181 if (PL_op->op_private & OPpLVAL_INTRO) { 1182 SV **svp = &(PAD_SVl(base)); 1183 const UV payload = (UV)( 1184 (base << (OPpPADRANGE_COUNTSHIFT + SAVE_TIGHT_SHIFT)) 1185 | (count << SAVE_TIGHT_SHIFT) 1186 | SAVEt_CLEARPADRANGE); 1187 int i; 1188 1189 STATIC_ASSERT_STMT(OPpPADRANGE_COUNTMASK + 1 == (1 << OPpPADRANGE_COUNTSHIFT)); 1190 assert((payload >> (OPpPADRANGE_COUNTSHIFT+SAVE_TIGHT_SHIFT)) 1191 == (Size_t)base); 1192 { 1193 dSS_ADD; 1194 SS_ADD_UV(payload); 1195 SS_ADD_END(1); 1196 } 1197 1198 for (i = 0; i <count; i++) 1199 SvPADSTALE_off(*svp++); /* mark lexical as active */ 1200 } 1201 RETURN; 1202 } 1203 1204 1205 PP(pp_padsv) 1206 { 1207 dSP; 1208 EXTEND(SP, 1); 1209 { 1210 OP * const op = PL_op; 1211 /* access PL_curpad once */ 1212 SV ** const padentry = &(PAD_SVl(op->op_targ)); 1213 { 1214 dTARG; 1215 TARG = *padentry; 1216 PUSHs(TARG); 1217 PUTBACK; /* no pop/push after this, TOPs ok */ 1218 } 1219 if (op->op_flags & OPf_MOD) { 1220 if (op->op_private & OPpLVAL_INTRO) 1221 if (!(op->op_private & OPpPAD_STATE)) 1222 save_clearsv(padentry); 1223 if (op->op_private & OPpDEREF) { 1224 /* TOPs is equivalent to TARG here. Using TOPs (SP) rather 1225 than TARG reduces the scope of TARG, so it does not 1226 span the call to save_clearsv, resulting in smaller 1227 machine code. */ 1228 TOPs = vivify_ref(TOPs, op->op_private & OPpDEREF); 1229 } 1230 } 1231 return op->op_next; 1232 } 1233 } 1234 1235 PP(pp_readline) 1236 { 1237 dSP; 1238 /* pp_coreargs pushes a NULL to indicate no args passed to 1239 * CORE::readline() */ 1240 if (TOPs) { 1241 SvGETMAGIC(TOPs); 1242 tryAMAGICunTARGETlist(iter_amg, 0); 1243 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--); 1244 } 1245 else PL_last_in_gv = PL_argvgv, PL_stack_sp--; 1246 if (!isGV_with_GP(PL_last_in_gv)) { 1247 if (SvROK(PL_last_in_gv) && isGV_with_GP(SvRV(PL_last_in_gv))) 1248 PL_last_in_gv = MUTABLE_GV(SvRV(PL_last_in_gv)); 1249 else { 1250 dSP; 1251 XPUSHs(MUTABLE_SV(PL_last_in_gv)); 1252 PUTBACK; 1253 Perl_pp_rv2gv(aTHX); 1254 PL_last_in_gv = MUTABLE_GV(*PL_stack_sp--); 1255 assert((SV*)PL_last_in_gv == &PL_sv_undef || isGV_with_GP(PL_last_in_gv)); 1256 } 1257 } 1258 return do_readline(); 1259 } 1260 1261 PP(pp_eq) 1262 { 1263 dSP; 1264 SV *left, *right; 1265 1266 tryAMAGICbin_MG(eq_amg, AMGf_numeric); 1267 right = POPs; 1268 left = TOPs; 1269 SETs(boolSV( 1270 (SvIOK_notUV(left) && SvIOK_notUV(right)) 1271 ? (SvIVX(left) == SvIVX(right)) 1272 : ( do_ncmp(left, right) == 0) 1273 )); 1274 RETURN; 1275 } 1276 1277 1278 /* also used for: pp_i_preinc() */ 1279 1280 PP(pp_preinc) 1281 { 1282 SV *sv = *PL_stack_sp; 1283 1284 if (LIKELY(((sv->sv_flags & 1285 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV| 1286 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK)) 1287 == SVf_IOK)) 1288 && SvIVX(sv) != IV_MAX) 1289 { 1290 SvIV_set(sv, SvIVX(sv) + 1); 1291 } 1292 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_inc */ 1293 sv_inc(sv); 1294 SvSETMAGIC(sv); 1295 return NORMAL; 1296 } 1297 1298 1299 /* also used for: pp_i_predec() */ 1300 1301 PP(pp_predec) 1302 { 1303 SV *sv = *PL_stack_sp; 1304 1305 if (LIKELY(((sv->sv_flags & 1306 (SVf_THINKFIRST|SVs_GMG|SVf_IVisUV| 1307 SVf_IOK|SVf_NOK|SVf_POK|SVp_NOK|SVp_POK|SVf_ROK)) 1308 == SVf_IOK)) 1309 && SvIVX(sv) != IV_MIN) 1310 { 1311 SvIV_set(sv, SvIVX(sv) - 1); 1312 } 1313 else /* Do all the PERL_PRESERVE_IVUV and hard cases in sv_dec */ 1314 sv_dec(sv); 1315 SvSETMAGIC(sv); 1316 return NORMAL; 1317 } 1318 1319 1320 /* also used for: pp_orassign() */ 1321 1322 PP(pp_or) 1323 { 1324 dSP; 1325 SV *sv; 1326 PERL_ASYNC_CHECK(); 1327 sv = TOPs; 1328 if (SvTRUE_NN(sv)) 1329 RETURN; 1330 else { 1331 if (PL_op->op_type == OP_OR) 1332 --SP; 1333 RETURNOP(cLOGOP->op_other); 1334 } 1335 } 1336 1337 1338 /* also used for: pp_dor() pp_dorassign() */ 1339 1340 PP(pp_defined) 1341 { 1342 dSP; 1343 SV* sv; 1344 bool defined; 1345 const int op_type = PL_op->op_type; 1346 const bool is_dor = (op_type == OP_DOR || op_type == OP_DORASSIGN); 1347 1348 if (is_dor) { 1349 PERL_ASYNC_CHECK(); 1350 sv = TOPs; 1351 if (UNLIKELY(!sv || !SvANY(sv))) { 1352 if (op_type == OP_DOR) 1353 --SP; 1354 RETURNOP(cLOGOP->op_other); 1355 } 1356 } 1357 else { 1358 /* OP_DEFINED */ 1359 sv = POPs; 1360 if (UNLIKELY(!sv || !SvANY(sv))) 1361 RETPUSHNO; 1362 } 1363 1364 defined = FALSE; 1365 switch (SvTYPE(sv)) { 1366 case SVt_PVAV: 1367 if (AvMAX(sv) >= 0 || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied))) 1368 defined = TRUE; 1369 break; 1370 case SVt_PVHV: 1371 if (HvARRAY(sv) || SvGMAGICAL(sv) || (SvRMAGICAL(sv) && mg_find(sv, PERL_MAGIC_tied))) 1372 defined = TRUE; 1373 break; 1374 case SVt_PVCV: 1375 if (CvROOT(sv) || CvXSUB(sv)) 1376 defined = TRUE; 1377 break; 1378 default: 1379 SvGETMAGIC(sv); 1380 if (SvOK(sv)) 1381 defined = TRUE; 1382 break; 1383 } 1384 1385 if (is_dor) { 1386 if(defined) 1387 RETURN; 1388 if(op_type == OP_DOR) 1389 --SP; 1390 RETURNOP(cLOGOP->op_other); 1391 } 1392 /* assuming OP_DEFINED */ 1393 if(defined) 1394 RETPUSHYES; 1395 RETPUSHNO; 1396 } 1397 1398 1399 1400 PP(pp_add) 1401 { 1402 dSP; dATARGET; bool useleft; SV *svl, *svr; 1403 1404 tryAMAGICbin_MG(add_amg, AMGf_assign|AMGf_numeric); 1405 svr = TOPs; 1406 svl = TOPm1s; 1407 1408 #ifdef PERL_PRESERVE_IVUV 1409 1410 /* special-case some simple common cases */ 1411 if (!((svl->sv_flags|svr->sv_flags) & (SVf_IVisUV|SVs_GMG))) { 1412 IV il, ir; 1413 U32 flags = (svl->sv_flags & svr->sv_flags); 1414 if (flags & SVf_IOK) { 1415 /* both args are simple IVs */ 1416 UV topl, topr; 1417 il = SvIVX(svl); 1418 ir = SvIVX(svr); 1419 do_iv: 1420 topl = ((UV)il) >> (UVSIZE * 8 - 2); 1421 topr = ((UV)ir) >> (UVSIZE * 8 - 2); 1422 1423 /* if both are in a range that can't under/overflow, do a 1424 * simple integer add: if the top of both numbers 1425 * are 00 or 11, then it's safe */ 1426 if (!( ((topl+1) | (topr+1)) & 2)) { 1427 SP--; 1428 TARGi(il + ir, 0); /* args not GMG, so can't be tainted */ 1429 SETs(TARG); 1430 RETURN; 1431 } 1432 goto generic; 1433 } 1434 else if (flags & SVf_NOK) { 1435 /* both args are NVs */ 1436 NV nl = SvNVX(svl); 1437 NV nr = SvNVX(svr); 1438 1439 if (lossless_NV_to_IV(nl, &il) && lossless_NV_to_IV(nr, &ir)) { 1440 /* nothing was lost by converting to IVs */ 1441 goto do_iv; 1442 } 1443 SP--; 1444 TARGn(nl + nr, 0); /* args not GMG, so can't be tainted */ 1445 SETs(TARG); 1446 RETURN; 1447 } 1448 } 1449 1450 generic: 1451 1452 useleft = USE_LEFT(svl); 1453 /* We must see if we can perform the addition with integers if possible, 1454 as the integer code detects overflow while the NV code doesn't. 1455 If either argument hasn't had a numeric conversion yet attempt to get 1456 the IV. It's important to do this now, rather than just assuming that 1457 it's not IOK as a PV of "9223372036854775806" may not take well to NV 1458 addition, and an SV which is NOK, NV=6.0 ought to be coerced to 1459 integer in case the second argument is IV=9223372036854775806 1460 We can (now) rely on sv_2iv to do the right thing, only setting the 1461 public IOK flag if the value in the NV (or PV) slot is truly integer. 1462 1463 A side effect is that this also aggressively prefers integer maths over 1464 fp maths for integer values. 1465 1466 How to detect overflow? 1467 1468 C 99 section 6.2.6.1 says 1469 1470 The range of nonnegative values of a signed integer type is a subrange 1471 of the corresponding unsigned integer type, and the representation of 1472 the same value in each type is the same. A computation involving 1473 unsigned operands can never overflow, because a result that cannot be 1474 represented by the resulting unsigned integer type is reduced modulo 1475 the number that is one greater than the largest value that can be 1476 represented by the resulting type. 1477 1478 (the 9th paragraph) 1479 1480 which I read as "unsigned ints wrap." 1481 1482 signed integer overflow seems to be classed as "exception condition" 1483 1484 If an exceptional condition occurs during the evaluation of an 1485 expression (that is, if the result is not mathematically defined or not 1486 in the range of representable values for its type), the behavior is 1487 undefined. 1488 1489 (6.5, the 5th paragraph) 1490 1491 I had assumed that on 2s complement machines signed arithmetic would 1492 wrap, hence coded pp_add and pp_subtract on the assumption that 1493 everything perl builds on would be happy. After much wailing and 1494 gnashing of teeth it would seem that irix64 knows its ANSI spec well, 1495 knows that it doesn't need to, and doesn't. Bah. Anyway, the all- 1496 unsigned code below is actually shorter than the old code. :-) 1497 */ 1498 1499 if (SvIV_please_nomg(svr)) { 1500 /* Unless the left argument is integer in range we are going to have to 1501 use NV maths. Hence only attempt to coerce the right argument if 1502 we know the left is integer. */ 1503 UV auv = 0; 1504 bool auvok = FALSE; 1505 bool a_valid = 0; 1506 1507 if (!useleft) { 1508 auv = 0; 1509 a_valid = auvok = 1; 1510 /* left operand is undef, treat as zero. + 0 is identity, 1511 Could SETi or SETu right now, but space optimise by not adding 1512 lots of code to speed up what is probably a rarish case. */ 1513 } else { 1514 /* Left operand is defined, so is it IV? */ 1515 if (SvIV_please_nomg(svl)) { 1516 if ((auvok = SvUOK(svl))) 1517 auv = SvUVX(svl); 1518 else { 1519 const IV aiv = SvIVX(svl); 1520 if (aiv >= 0) { 1521 auv = aiv; 1522 auvok = 1; /* Now acting as a sign flag. */ 1523 } else { 1524 /* Using 0- here and later to silence bogus warning 1525 * from MS VC */ 1526 auv = (UV) (0 - (UV) aiv); 1527 } 1528 } 1529 a_valid = 1; 1530 } 1531 } 1532 if (a_valid) { 1533 bool result_good = 0; 1534 UV result; 1535 UV buv; 1536 bool buvok = SvUOK(svr); 1537 1538 if (buvok) 1539 buv = SvUVX(svr); 1540 else { 1541 const IV biv = SvIVX(svr); 1542 if (biv >= 0) { 1543 buv = biv; 1544 buvok = 1; 1545 } else 1546 buv = (UV) (0 - (UV) biv); 1547 } 1548 /* ?uvok if value is >= 0. basically, flagged as UV if it's +ve, 1549 else "IV" now, independent of how it came in. 1550 if a, b represents positive, A, B negative, a maps to -A etc 1551 a + b => (a + b) 1552 A + b => -(a - b) 1553 a + B => (a - b) 1554 A + B => -(a + b) 1555 all UV maths. negate result if A negative. 1556 add if signs same, subtract if signs differ. */ 1557 1558 if (auvok ^ buvok) { 1559 /* Signs differ. */ 1560 if (auv >= buv) { 1561 result = auv - buv; 1562 /* Must get smaller */ 1563 if (result <= auv) 1564 result_good = 1; 1565 } else { 1566 result = buv - auv; 1567 if (result <= buv) { 1568 /* result really should be -(auv-buv). as its negation 1569 of true value, need to swap our result flag */ 1570 auvok = !auvok; 1571 result_good = 1; 1572 } 1573 } 1574 } else { 1575 /* Signs same */ 1576 result = auv + buv; 1577 if (result >= auv) 1578 result_good = 1; 1579 } 1580 if (result_good) { 1581 SP--; 1582 if (auvok) 1583 SETu( result ); 1584 else { 1585 /* Negate result */ 1586 if (result <= (UV)IV_MIN) 1587 SETi(result == (UV)IV_MIN 1588 ? IV_MIN : -(IV)result); 1589 else { 1590 /* result valid, but out of range for IV. */ 1591 SETn( -(NV)result ); 1592 } 1593 } 1594 RETURN; 1595 } /* Overflow, drop through to NVs. */ 1596 } 1597 } 1598 1599 #else 1600 useleft = USE_LEFT(svl); 1601 #endif 1602 1603 { 1604 NV value = SvNV_nomg(svr); 1605 (void)POPs; 1606 if (!useleft) { 1607 /* left operand is undef, treat as zero. + 0.0 is identity. */ 1608 SETn(value); 1609 RETURN; 1610 } 1611 SETn( value + SvNV_nomg(svl) ); 1612 RETURN; 1613 } 1614 } 1615 1616 1617 /* also used for: pp_aelemfast_lex() */ 1618 1619 PP(pp_aelemfast) 1620 { 1621 dSP; 1622 AV * const av = PL_op->op_type == OP_AELEMFAST_LEX 1623 ? MUTABLE_AV(PAD_SV(PL_op->op_targ)) : GvAVn(cGVOP_gv); 1624 const U32 lval = PL_op->op_flags & OPf_MOD; 1625 const I8 key = (I8)PL_op->op_private; 1626 SV** svp; 1627 SV *sv; 1628 1629 assert(SvTYPE(av) == SVt_PVAV); 1630 1631 EXTEND(SP, 1); 1632 1633 /* inlined av_fetch() for simple cases ... */ 1634 if (!SvRMAGICAL(av) && key >= 0 && key <= AvFILLp(av)) { 1635 sv = AvARRAY(av)[key]; 1636 if (sv) { 1637 PUSHs(sv); 1638 RETURN; 1639 } 1640 } 1641 1642 /* ... else do it the hard way */ 1643 svp = av_fetch(av, key, lval); 1644 sv = (svp ? *svp : &PL_sv_undef); 1645 1646 if (UNLIKELY(!svp && lval)) 1647 DIE(aTHX_ PL_no_aelem, (int)key); 1648 1649 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */ 1650 mg_get(sv); 1651 PUSHs(sv); 1652 RETURN; 1653 } 1654 1655 PP(pp_join) 1656 { 1657 dSP; dMARK; dTARGET; 1658 MARK++; 1659 do_join(TARG, *MARK, MARK, SP); 1660 SP = MARK; 1661 SETs(TARG); 1662 RETURN; 1663 } 1664 1665 /* Oversized hot code. */ 1666 1667 /* also used for: pp_say() */ 1668 1669 PP(pp_print) 1670 { 1671 dSP; dMARK; dORIGMARK; 1672 PerlIO *fp; 1673 MAGIC *mg; 1674 GV * const gv 1675 = (PL_op->op_flags & OPf_STACKED) ? MUTABLE_GV(*++MARK) : PL_defoutgv; 1676 IO *io = GvIO(gv); 1677 1678 if (io 1679 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar))) 1680 { 1681 had_magic: 1682 if (MARK == ORIGMARK) { 1683 /* If using default handle then we need to make space to 1684 * pass object as 1st arg, so move other args up ... 1685 */ 1686 MEXTEND(SP, 1); 1687 ++MARK; 1688 Move(MARK, MARK + 1, (SP - MARK) + 1, SV*); 1689 ++SP; 1690 } 1691 return Perl_tied_method(aTHX_ SV_CONST(PRINT), mark - 1, MUTABLE_SV(io), 1692 mg, 1693 (G_SCALAR | TIED_METHOD_ARGUMENTS_ON_STACK 1694 | (PL_op->op_type == OP_SAY 1695 ? TIED_METHOD_SAY : 0)), sp - mark); 1696 } 1697 if (!io) { 1698 if ( gv && GvEGVx(gv) && (io = GvIO(GvEGV(gv))) 1699 && (mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar))) 1700 goto had_magic; 1701 report_evil_fh(gv); 1702 SETERRNO(EBADF,RMS_IFI); 1703 goto just_say_no; 1704 } 1705 else if (!(fp = IoOFP(io))) { 1706 if (IoIFP(io)) 1707 report_wrongway_fh(gv, '<'); 1708 else 1709 report_evil_fh(gv); 1710 SETERRNO(EBADF,IoIFP(io)?RMS_FAC:RMS_IFI); 1711 goto just_say_no; 1712 } 1713 else { 1714 SV * const ofs = GvSV(PL_ofsgv); /* $, */ 1715 MARK++; 1716 if (ofs && (SvGMAGICAL(ofs) || SvOK(ofs))) { 1717 while (MARK <= SP) { 1718 if (!do_print(*MARK, fp)) 1719 break; 1720 MARK++; 1721 if (MARK <= SP) { 1722 /* don't use 'ofs' here - it may be invalidated by magic callbacks */ 1723 if (!do_print(GvSV(PL_ofsgv), fp)) { 1724 MARK--; 1725 break; 1726 } 1727 } 1728 } 1729 } 1730 else { 1731 while (MARK <= SP) { 1732 if (!do_print(*MARK, fp)) 1733 break; 1734 MARK++; 1735 } 1736 } 1737 if (MARK <= SP) 1738 goto just_say_no; 1739 else { 1740 if (PL_op->op_type == OP_SAY) { 1741 if (PerlIO_write(fp, "\n", 1) == 0 || PerlIO_error(fp)) 1742 goto just_say_no; 1743 } 1744 else if (PL_ors_sv && SvOK(PL_ors_sv)) 1745 if (!do_print(PL_ors_sv, fp)) /* $\ */ 1746 goto just_say_no; 1747 1748 if (IoFLAGS(io) & IOf_FLUSH) 1749 if (PerlIO_flush(fp) == EOF) 1750 goto just_say_no; 1751 } 1752 } 1753 SP = ORIGMARK; 1754 XPUSHs(&PL_sv_yes); 1755 RETURN; 1756 1757 just_say_no: 1758 SP = ORIGMARK; 1759 XPUSHs(&PL_sv_undef); 1760 RETURN; 1761 } 1762 1763 1764 /* do the common parts of pp_padhv() and pp_rv2hv() 1765 * It assumes the caller has done EXTEND(SP, 1) or equivalent. 1766 * 'is_keys' indicates the OPpPADHV_ISKEYS/OPpRV2HV_ISKEYS flag is set. 1767 * 'has_targ' indicates that the op has a target - this should 1768 * be a compile-time constant so that the code can constant-folded as 1769 * appropriate 1770 * */ 1771 1772 PERL_STATIC_INLINE OP* 1773 S_padhv_rv2hv_common(pTHX_ HV *hv, U8 gimme, bool is_keys, bool has_targ) 1774 { 1775 bool is_tied; 1776 bool is_bool; 1777 MAGIC *mg; 1778 dSP; 1779 IV i; 1780 SV *sv; 1781 1782 assert(PL_op->op_type == OP_PADHV || PL_op->op_type == OP_RV2HV); 1783 1784 if (gimme == G_ARRAY) { 1785 hv_pushkv(hv, 3); 1786 return NORMAL; 1787 } 1788 1789 if (is_keys) 1790 /* 'keys %h' masquerading as '%h': reset iterator */ 1791 (void)hv_iterinit(hv); 1792 1793 if (gimme == G_VOID) 1794 return NORMAL; 1795 1796 is_bool = ( PL_op->op_private & OPpTRUEBOOL 1797 || ( PL_op->op_private & OPpMAYBE_TRUEBOOL 1798 && block_gimme() == G_VOID)); 1799 is_tied = SvRMAGICAL(hv) && (mg = mg_find(MUTABLE_SV(hv), PERL_MAGIC_tied)); 1800 1801 if (UNLIKELY(is_tied)) { 1802 if (is_keys && !is_bool) { 1803 i = 0; 1804 while (hv_iternext(hv)) 1805 i++; 1806 goto push_i; 1807 } 1808 else { 1809 sv = magic_scalarpack(hv, mg); 1810 goto push_sv; 1811 } 1812 } 1813 else { 1814 i = HvUSEDKEYS(hv); 1815 if (is_bool) { 1816 sv = i ? &PL_sv_yes : &PL_sv_zero; 1817 push_sv: 1818 PUSHs(sv); 1819 } 1820 else { 1821 push_i: 1822 if (has_targ) { 1823 dTARGET; 1824 PUSHi(i); 1825 } 1826 else 1827 if (is_keys) { 1828 /* parent op should be an unused OP_KEYS whose targ we can 1829 * use */ 1830 dTARG; 1831 OP *k; 1832 1833 assert(!OpHAS_SIBLING(PL_op)); 1834 k = PL_op->op_sibparent; 1835 assert(k->op_type == OP_KEYS); 1836 TARG = PAD_SV(k->op_targ); 1837 PUSHi(i); 1838 } 1839 else 1840 mPUSHi(i); 1841 } 1842 } 1843 1844 PUTBACK; 1845 return NORMAL; 1846 } 1847 1848 1849 /* This is also called directly by pp_lvavref. */ 1850 PP(pp_padav) 1851 { 1852 dSP; dTARGET; 1853 U8 gimme; 1854 assert(SvTYPE(TARG) == SVt_PVAV); 1855 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO )) 1856 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) )) 1857 SAVECLEARSV(PAD_SVl(PL_op->op_targ)); 1858 EXTEND(SP, 1); 1859 1860 if (PL_op->op_flags & OPf_REF) { 1861 PUSHs(TARG); 1862 RETURN; 1863 } 1864 else if (PL_op->op_private & OPpMAYBE_LVSUB) { 1865 const I32 flags = is_lvalue_sub(); 1866 if (flags && !(flags & OPpENTERSUB_INARGS)) { 1867 if (GIMME_V == G_SCALAR) 1868 /* diag_listed_as: Can't return %s to lvalue scalar context */ 1869 Perl_croak(aTHX_ "Can't return array to lvalue scalar context"); 1870 PUSHs(TARG); 1871 RETURN; 1872 } 1873 } 1874 1875 gimme = GIMME_V; 1876 if (gimme == G_ARRAY) 1877 return S_pushav(aTHX_ (AV*)TARG); 1878 1879 if (gimme == G_SCALAR) { 1880 const SSize_t maxarg = AvFILL(MUTABLE_AV(TARG)) + 1; 1881 if (!maxarg) 1882 PUSHs(&PL_sv_zero); 1883 else if (PL_op->op_private & OPpTRUEBOOL) 1884 PUSHs(&PL_sv_yes); 1885 else 1886 mPUSHi(maxarg); 1887 } 1888 RETURN; 1889 } 1890 1891 1892 PP(pp_padhv) 1893 { 1894 dSP; dTARGET; 1895 U8 gimme; 1896 1897 assert(SvTYPE(TARG) == SVt_PVHV); 1898 if (UNLIKELY( PL_op->op_private & OPpLVAL_INTRO )) 1899 if (LIKELY( !(PL_op->op_private & OPpPAD_STATE) )) 1900 SAVECLEARSV(PAD_SVl(PL_op->op_targ)); 1901 1902 EXTEND(SP, 1); 1903 1904 if (PL_op->op_flags & OPf_REF) { 1905 PUSHs(TARG); 1906 RETURN; 1907 } 1908 else if (PL_op->op_private & OPpMAYBE_LVSUB) { 1909 const I32 flags = is_lvalue_sub(); 1910 if (flags && !(flags & OPpENTERSUB_INARGS)) { 1911 if (GIMME_V == G_SCALAR) 1912 /* diag_listed_as: Can't return %s to lvalue scalar context */ 1913 Perl_croak(aTHX_ "Can't return hash to lvalue scalar context"); 1914 PUSHs(TARG); 1915 RETURN; 1916 } 1917 } 1918 1919 gimme = GIMME_V; 1920 1921 return S_padhv_rv2hv_common(aTHX_ (HV*)TARG, gimme, 1922 cBOOL(PL_op->op_private & OPpPADHV_ISKEYS), 1923 0 /* has_targ*/); 1924 } 1925 1926 1927 /* also used for: pp_rv2hv() */ 1928 /* also called directly by pp_lvavref */ 1929 1930 PP(pp_rv2av) 1931 { 1932 dSP; dTOPss; 1933 const U8 gimme = GIMME_V; 1934 static const char an_array[] = "an ARRAY"; 1935 static const char a_hash[] = "a HASH"; 1936 const bool is_pp_rv2av = PL_op->op_type == OP_RV2AV 1937 || PL_op->op_type == OP_LVAVREF; 1938 const svtype type = is_pp_rv2av ? SVt_PVAV : SVt_PVHV; 1939 1940 SvGETMAGIC(sv); 1941 if (SvROK(sv)) { 1942 if (UNLIKELY(SvAMAGIC(sv))) { 1943 sv = amagic_deref_call(sv, is_pp_rv2av ? to_av_amg : to_hv_amg); 1944 } 1945 sv = SvRV(sv); 1946 if (UNLIKELY(SvTYPE(sv) != type)) 1947 /* diag_listed_as: Not an ARRAY reference */ 1948 DIE(aTHX_ "Not %s reference", is_pp_rv2av ? an_array : a_hash); 1949 else if (UNLIKELY(PL_op->op_flags & OPf_MOD 1950 && PL_op->op_private & OPpLVAL_INTRO)) 1951 Perl_croak(aTHX_ "%s", PL_no_localize_ref); 1952 } 1953 else if (UNLIKELY(SvTYPE(sv) != type)) { 1954 GV *gv; 1955 1956 if (!isGV_with_GP(sv)) { 1957 gv = Perl_softref2xv(aTHX_ sv, is_pp_rv2av ? an_array : a_hash, 1958 type, &sp); 1959 if (!gv) 1960 RETURN; 1961 } 1962 else { 1963 gv = MUTABLE_GV(sv); 1964 } 1965 sv = is_pp_rv2av ? MUTABLE_SV(GvAVn(gv)) : MUTABLE_SV(GvHVn(gv)); 1966 if (PL_op->op_private & OPpLVAL_INTRO) 1967 sv = is_pp_rv2av ? MUTABLE_SV(save_ary(gv)) : MUTABLE_SV(save_hash(gv)); 1968 } 1969 if (PL_op->op_flags & OPf_REF) { 1970 SETs(sv); 1971 RETURN; 1972 } 1973 else if (UNLIKELY(PL_op->op_private & OPpMAYBE_LVSUB)) { 1974 const I32 flags = is_lvalue_sub(); 1975 if (flags && !(flags & OPpENTERSUB_INARGS)) { 1976 if (gimme != G_ARRAY) 1977 goto croak_cant_return; 1978 SETs(sv); 1979 RETURN; 1980 } 1981 } 1982 1983 if (is_pp_rv2av) { 1984 AV *const av = MUTABLE_AV(sv); 1985 1986 if (gimme == G_ARRAY) { 1987 SP--; 1988 PUTBACK; 1989 return S_pushav(aTHX_ av); 1990 } 1991 1992 if (gimme == G_SCALAR) { 1993 const SSize_t maxarg = AvFILL(av) + 1; 1994 if (PL_op->op_private & OPpTRUEBOOL) 1995 SETs(maxarg ? &PL_sv_yes : &PL_sv_zero); 1996 else { 1997 dTARGET; 1998 SETi(maxarg); 1999 } 2000 } 2001 } 2002 else { 2003 SP--; PUTBACK; 2004 return S_padhv_rv2hv_common(aTHX_ (HV*)sv, gimme, 2005 cBOOL(PL_op->op_private & OPpRV2HV_ISKEYS), 2006 1 /* has_targ*/); 2007 } 2008 RETURN; 2009 2010 croak_cant_return: 2011 Perl_croak(aTHX_ "Can't return %s to lvalue scalar context", 2012 is_pp_rv2av ? "array" : "hash"); 2013 RETURN; 2014 } 2015 2016 STATIC void 2017 S_do_oddball(pTHX_ SV **oddkey, SV **firstkey) 2018 { 2019 PERL_ARGS_ASSERT_DO_ODDBALL; 2020 2021 if (*oddkey) { 2022 if (ckWARN(WARN_MISC)) { 2023 const char *err; 2024 if (oddkey == firstkey && 2025 SvROK(*oddkey) && 2026 (SvTYPE(SvRV(*oddkey)) == SVt_PVAV || 2027 SvTYPE(SvRV(*oddkey)) == SVt_PVHV)) 2028 { 2029 err = "Reference found where even-sized list expected"; 2030 } 2031 else 2032 err = "Odd number of elements in hash assignment"; 2033 Perl_warner(aTHX_ packWARN(WARN_MISC), "%s", err); 2034 } 2035 2036 } 2037 } 2038 2039 2040 /* Do a mark and sweep with the SVf_BREAK flag to detect elements which 2041 * are common to both the LHS and RHS of an aassign, and replace them 2042 * with copies. All these copies are made before the actual list assign is 2043 * done. 2044 * 2045 * For example in ($a,$b) = ($b,$a), assigning the value of the first RHS 2046 * element ($b) to the first LH element ($a), modifies $a; when the 2047 * second assignment is done, the second RH element now has the wrong 2048 * value. So we initially replace the RHS with ($b, mortalcopy($a)). 2049 * Note that we don't need to make a mortal copy of $b. 2050 * 2051 * The algorithm below works by, for every RHS element, mark the 2052 * corresponding LHS target element with SVf_BREAK. Then if the RHS 2053 * element is found with SVf_BREAK set, it means it would have been 2054 * modified, so make a copy. 2055 * Note that by scanning both LHS and RHS in lockstep, we avoid 2056 * unnecessary copies (like $b above) compared with a naive 2057 * "mark all LHS; copy all marked RHS; unmark all LHS". 2058 * 2059 * If the LHS element is a 'my' declaration' and has a refcount of 1, then 2060 * it can't be common and can be skipped. 2061 * 2062 * On DEBUGGING builds it takes an extra boolean, fake. If true, it means 2063 * that we thought we didn't need to call S_aassign_copy_common(), but we 2064 * have anyway for sanity checking. If we find we need to copy, then panic. 2065 */ 2066 2067 PERL_STATIC_INLINE void 2068 S_aassign_copy_common(pTHX_ SV **firstlelem, SV **lastlelem, 2069 SV **firstrelem, SV **lastrelem 2070 #ifdef DEBUGGING 2071 , bool fake 2072 #endif 2073 ) 2074 { 2075 dVAR; 2076 SV **relem; 2077 SV **lelem; 2078 SSize_t lcount = lastlelem - firstlelem + 1; 2079 bool marked = FALSE; /* have we marked any LHS with SVf_BREAK ? */ 2080 bool const do_rc1 = cBOOL(PL_op->op_private & OPpASSIGN_COMMON_RC1); 2081 bool copy_all = FALSE; 2082 2083 assert(!PL_in_clean_all); /* SVf_BREAK not already in use */ 2084 assert(firstlelem < lastlelem); /* at least 2 LH elements */ 2085 assert(firstrelem < lastrelem); /* at least 2 RH elements */ 2086 2087 2088 lelem = firstlelem; 2089 /* we never have to copy the first RH element; it can't be corrupted 2090 * by assigning something to the corresponding first LH element. 2091 * So this scan does in a loop: mark LHS[N]; test RHS[N+1] 2092 */ 2093 relem = firstrelem + 1; 2094 2095 for (; relem <= lastrelem; relem++) { 2096 SV *svr; 2097 2098 /* mark next LH element */ 2099 2100 if (--lcount >= 0) { 2101 SV *svl = *lelem++; 2102 2103 if (UNLIKELY(!svl)) {/* skip AV alias marker */ 2104 assert (lelem <= lastlelem); 2105 svl = *lelem++; 2106 lcount--; 2107 } 2108 2109 assert(svl); 2110 if (SvSMAGICAL(svl)) { 2111 copy_all = TRUE; 2112 } 2113 if (SvTYPE(svl) == SVt_PVAV || SvTYPE(svl) == SVt_PVHV) { 2114 if (!marked) 2115 return; 2116 /* this LH element will consume all further args; 2117 * no need to mark any further LH elements (if any). 2118 * But we still need to scan any remaining RHS elements; 2119 * set lcount negative to distinguish from lcount == 0, 2120 * so the loop condition continues being true 2121 */ 2122 lcount = -1; 2123 lelem--; /* no need to unmark this element */ 2124 } 2125 else if (!(do_rc1 && SvREFCNT(svl) == 1) && !SvIMMORTAL(svl)) { 2126 SvFLAGS(svl) |= SVf_BREAK; 2127 marked = TRUE; 2128 } 2129 else if (!marked) { 2130 /* don't check RH element if no SVf_BREAK flags set yet */ 2131 if (!lcount) 2132 break; 2133 continue; 2134 } 2135 } 2136 2137 /* see if corresponding RH element needs copying */ 2138 2139 assert(marked); 2140 svr = *relem; 2141 assert(svr); 2142 2143 if (UNLIKELY(SvFLAGS(svr) & (SVf_BREAK|SVs_GMG) || copy_all)) { 2144 U32 brk = (SvFLAGS(svr) & SVf_BREAK); 2145 2146 #ifdef DEBUGGING 2147 if (fake) { 2148 /* op_dump(PL_op); */ 2149 Perl_croak(aTHX_ 2150 "panic: aassign skipped needed copy of common RH elem %" 2151 UVuf, (UV)(relem - firstrelem)); 2152 } 2153 #endif 2154 2155 TAINT_NOT; /* Each item is independent */ 2156 2157 /* Dear TODO test in t/op/sort.t, I love you. 2158 (It's relying on a panic, not a "semi-panic" from newSVsv() 2159 and then an assertion failure below.) */ 2160 if (UNLIKELY(SvIS_FREED(svr))) { 2161 Perl_croak(aTHX_ "panic: attempt to copy freed scalar %p", 2162 (void*)svr); 2163 } 2164 /* avoid break flag while copying; otherwise COW etc 2165 * disabled... */ 2166 SvFLAGS(svr) &= ~SVf_BREAK; 2167 /* Not newSVsv(), as it does not allow copy-on-write, 2168 resulting in wasteful copies. 2169 Also, we use SV_NOSTEAL in case the SV is used more than 2170 once, e.g. (...) = (f())[0,0] 2171 Where the same SV appears twice on the RHS without a ref 2172 count bump. (Although I suspect that the SV won't be 2173 stealable here anyway - DAPM). 2174 */ 2175 *relem = sv_mortalcopy_flags(svr, 2176 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL); 2177 /* ... but restore afterwards in case it's needed again, 2178 * e.g. ($a,$b,$c) = (1,$a,$a) 2179 */ 2180 SvFLAGS(svr) |= brk; 2181 } 2182 2183 if (!lcount) 2184 break; 2185 } 2186 2187 if (!marked) 2188 return; 2189 2190 /*unmark LHS */ 2191 2192 while (lelem > firstlelem) { 2193 SV * const svl = *(--lelem); 2194 if (svl) 2195 SvFLAGS(svl) &= ~SVf_BREAK; 2196 } 2197 } 2198 2199 2200 2201 PP(pp_aassign) 2202 { 2203 dVAR; dSP; 2204 SV **lastlelem = PL_stack_sp; 2205 SV **lastrelem = PL_stack_base + POPMARK; 2206 SV **firstrelem = PL_stack_base + POPMARK + 1; 2207 SV **firstlelem = lastrelem + 1; 2208 2209 SV **relem; 2210 SV **lelem; 2211 U8 gimme; 2212 /* PL_delaymagic is restored by JUMPENV_POP on dieing, so we 2213 * only need to save locally, not on the save stack */ 2214 U16 old_delaymagic = PL_delaymagic; 2215 #ifdef DEBUGGING 2216 bool fake = 0; 2217 #endif 2218 2219 PL_delaymagic = DM_DELAY; /* catch simultaneous items */ 2220 2221 /* If there's a common identifier on both sides we have to take 2222 * special care that assigning the identifier on the left doesn't 2223 * clobber a value on the right that's used later in the list. 2224 */ 2225 2226 /* at least 2 LH and RH elements, or commonality isn't an issue */ 2227 if (firstlelem < lastlelem && firstrelem < lastrelem) { 2228 for (relem = firstrelem+1; relem <= lastrelem; relem++) { 2229 if (SvGMAGICAL(*relem)) 2230 goto do_scan; 2231 } 2232 for (lelem = firstlelem; lelem <= lastlelem; lelem++) { 2233 if (*lelem && SvSMAGICAL(*lelem)) 2234 goto do_scan; 2235 } 2236 if ( PL_op->op_private & (OPpASSIGN_COMMON_SCALAR|OPpASSIGN_COMMON_RC1) ) { 2237 if (PL_op->op_private & OPpASSIGN_COMMON_RC1) { 2238 /* skip the scan if all scalars have a ref count of 1 */ 2239 for (lelem = firstlelem; lelem <= lastlelem; lelem++) { 2240 SV *sv = *lelem; 2241 if (!sv || SvREFCNT(sv) == 1) 2242 continue; 2243 if (SvTYPE(sv) != SVt_PVAV && SvTYPE(sv) != SVt_PVAV) 2244 goto do_scan; 2245 break; 2246 } 2247 } 2248 else { 2249 do_scan: 2250 S_aassign_copy_common(aTHX_ 2251 firstlelem, lastlelem, firstrelem, lastrelem 2252 #ifdef DEBUGGING 2253 , fake 2254 #endif 2255 ); 2256 } 2257 } 2258 } 2259 #ifdef DEBUGGING 2260 else { 2261 /* on debugging builds, do the scan even if we've concluded we 2262 * don't need to, then panic if we find commonality. Note that the 2263 * scanner assumes at least 2 elements */ 2264 if (firstlelem < lastlelem && firstrelem < lastrelem) { 2265 fake = 1; 2266 goto do_scan; 2267 } 2268 } 2269 #endif 2270 2271 gimme = GIMME_V; 2272 relem = firstrelem; 2273 lelem = firstlelem; 2274 2275 if (relem > lastrelem) 2276 goto no_relems; 2277 2278 /* first lelem loop while there are still relems */ 2279 while (LIKELY(lelem <= lastlelem)) { 2280 bool alias = FALSE; 2281 SV *lsv = *lelem++; 2282 2283 TAINT_NOT; /* Each item stands on its own, taintwise. */ 2284 2285 assert(relem <= lastrelem); 2286 if (UNLIKELY(!lsv)) { 2287 alias = TRUE; 2288 lsv = *lelem++; 2289 ASSUME(SvTYPE(lsv) == SVt_PVAV); 2290 } 2291 2292 switch (SvTYPE(lsv)) { 2293 case SVt_PVAV: { 2294 SV **svp; 2295 SSize_t i; 2296 SSize_t tmps_base; 2297 SSize_t nelems = lastrelem - relem + 1; 2298 AV *ary = MUTABLE_AV(lsv); 2299 2300 /* Assigning to an aggregate is tricky. First there is the 2301 * issue of commonality, e.g. @a = ($a[0]). Since the 2302 * stack isn't refcounted, clearing @a prior to storing 2303 * elements will free $a[0]. Similarly with 2304 * sub FETCH { $status[$_[1]] } @status = @tied[0,1]; 2305 * 2306 * The way to avoid these issues is to make the copy of each 2307 * SV (and we normally store a *copy* in the array) *before* 2308 * clearing the array. But this has a problem in that 2309 * if the code croaks during copying, the not-yet-stored copies 2310 * could leak. One way to avoid this is to make all the copies 2311 * mortal, but that's quite expensive. 2312 * 2313 * The current solution to these issues is to use a chunk 2314 * of the tmps stack as a temporary refcounted-stack. SVs 2315 * will be put on there during processing to avoid leaks, 2316 * but will be removed again before the end of this block, 2317 * so free_tmps() is never normally called. Also, the 2318 * sv_refcnt of the SVs doesn't have to be manipulated, since 2319 * the ownership of 1 reference count is transferred directly 2320 * from the tmps stack to the AV when the SV is stored. 2321 * 2322 * We disarm slots in the temps stack by storing PL_sv_undef 2323 * there: it doesn't matter if that SV's refcount is 2324 * repeatedly decremented during a croak. But usually this is 2325 * only an interim measure. By the end of this code block 2326 * we try where possible to not leave any PL_sv_undef's on the 2327 * tmps stack e.g. by shuffling newer entries down. 2328 * 2329 * There is one case where we don't copy: non-magical 2330 * SvTEMP(sv)'s with a ref count of 1. The only owner of these 2331 * is on the tmps stack, so its safe to directly steal the SV 2332 * rather than copying. This is common in things like function 2333 * returns, map etc, which all return a list of such SVs. 2334 * 2335 * Note however something like @a = (f())[0,0], where there is 2336 * a danger of the same SV being shared: this avoided because 2337 * when the SV is stored as $a[0], its ref count gets bumped, 2338 * so the RC==1 test fails and the second element is copied 2339 * instead. 2340 * 2341 * We also use one slot in the tmps stack to hold an extra 2342 * ref to the array, to ensure it doesn't get prematurely 2343 * freed. Again, this is removed before the end of this block. 2344 * 2345 * Note that OPpASSIGN_COMMON_AGG is used to flag a possible 2346 * @a = ($a[0]) case, but the current implementation uses the 2347 * same algorithm regardless, so ignores that flag. (It *is* 2348 * used in the hash branch below, however). 2349 */ 2350 2351 /* Reserve slots for ary, plus the elems we're about to copy, 2352 * then protect ary and temporarily void the remaining slots 2353 * with &PL_sv_undef */ 2354 EXTEND_MORTAL(nelems + 1); 2355 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(ary); 2356 tmps_base = PL_tmps_ix + 1; 2357 for (i = 0; i < nelems; i++) 2358 PL_tmps_stack[tmps_base + i] = &PL_sv_undef; 2359 PL_tmps_ix += nelems; 2360 2361 /* Make a copy of each RHS elem and save on the tmps_stack 2362 * (or pass through where we can optimise away the copy) */ 2363 2364 if (UNLIKELY(alias)) { 2365 U32 lval = (gimme == G_ARRAY) 2366 ? (PL_op->op_flags & OPf_MOD || LVRET) : 0; 2367 for (svp = relem; svp <= lastrelem; svp++) { 2368 SV *rsv = *svp; 2369 2370 SvGETMAGIC(rsv); 2371 if (!SvROK(rsv)) 2372 DIE(aTHX_ "Assigned value is not a reference"); 2373 if (SvTYPE(SvRV(rsv)) > SVt_PVLV) 2374 /* diag_listed_as: Assigned value is not %s reference */ 2375 DIE(aTHX_ 2376 "Assigned value is not a SCALAR reference"); 2377 if (lval) 2378 *svp = rsv = sv_mortalcopy(rsv); 2379 /* XXX else check for weak refs? */ 2380 rsv = SvREFCNT_inc_NN(SvRV(rsv)); 2381 assert(tmps_base <= PL_tmps_max); 2382 PL_tmps_stack[tmps_base++] = rsv; 2383 } 2384 } 2385 else { 2386 for (svp = relem; svp <= lastrelem; svp++) { 2387 SV *rsv = *svp; 2388 2389 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) { 2390 /* can skip the copy */ 2391 SvREFCNT_inc_simple_void_NN(rsv); 2392 SvTEMP_off(rsv); 2393 } 2394 else { 2395 SV *nsv; 2396 /* do get before newSV, in case it dies and leaks */ 2397 SvGETMAGIC(rsv); 2398 nsv = newSV(0); 2399 /* see comment in S_aassign_copy_common about 2400 * SV_NOSTEAL */ 2401 sv_setsv_flags(nsv, rsv, 2402 (SV_DO_COW_SVSETSV|SV_NOSTEAL)); 2403 rsv = *svp = nsv; 2404 } 2405 2406 assert(tmps_base <= PL_tmps_max); 2407 PL_tmps_stack[tmps_base++] = rsv; 2408 } 2409 } 2410 2411 if (SvRMAGICAL(ary) || AvFILLp(ary) >= 0) /* may be non-empty */ 2412 av_clear(ary); 2413 2414 /* store in the array, the SVs that are in the tmps stack */ 2415 2416 tmps_base -= nelems; 2417 2418 if (SvMAGICAL(ary) || SvREADONLY(ary) || !AvREAL(ary)) { 2419 /* for arrays we can't cheat with, use the official API */ 2420 av_extend(ary, nelems - 1); 2421 for (i = 0; i < nelems; i++) { 2422 SV **svp = &(PL_tmps_stack[tmps_base + i]); 2423 SV *rsv = *svp; 2424 /* A tied store won't take ownership of rsv, so keep 2425 * the 1 refcnt on the tmps stack; otherwise disarm 2426 * the tmps stack entry */ 2427 if (av_store(ary, i, rsv)) 2428 *svp = &PL_sv_undef; 2429 /* av_store() may have added set magic to rsv */; 2430 SvSETMAGIC(rsv); 2431 } 2432 /* disarm ary refcount: see comments below about leak */ 2433 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef; 2434 } 2435 else { 2436 /* directly access/set the guts of the AV */ 2437 SSize_t fill = nelems - 1; 2438 if (fill > AvMAX(ary)) 2439 av_extend_guts(ary, fill, &AvMAX(ary), &AvALLOC(ary), 2440 &AvARRAY(ary)); 2441 AvFILLp(ary) = fill; 2442 Copy(&(PL_tmps_stack[tmps_base]), AvARRAY(ary), nelems, SV*); 2443 /* Quietly remove all the SVs from the tmps stack slots, 2444 * since ary has now taken ownership of the refcnt. 2445 * Also remove ary: which will now leak if we die before 2446 * the SvREFCNT_dec_NN(ary) below */ 2447 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems)) 2448 Move(&PL_tmps_stack[tmps_base + nelems], 2449 &PL_tmps_stack[tmps_base - 1], 2450 PL_tmps_ix - (tmps_base + nelems) + 1, 2451 SV*); 2452 PL_tmps_ix -= (nelems + 1); 2453 } 2454 2455 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA)) 2456 /* its assumed @ISA set magic can't die and leak ary */ 2457 SvSETMAGIC(MUTABLE_SV(ary)); 2458 SvREFCNT_dec_NN(ary); 2459 2460 relem = lastrelem + 1; 2461 goto no_relems; 2462 } 2463 2464 case SVt_PVHV: { /* normal hash */ 2465 2466 SV **svp; 2467 bool dirty_tmps; 2468 SSize_t i; 2469 SSize_t tmps_base; 2470 SSize_t nelems = lastrelem - relem + 1; 2471 HV *hash = MUTABLE_HV(lsv); 2472 2473 if (UNLIKELY(nelems & 1)) { 2474 do_oddball(lastrelem, relem); 2475 /* we have firstlelem to reuse, it's not needed any more */ 2476 *++lastrelem = &PL_sv_undef; 2477 nelems++; 2478 } 2479 2480 /* See the SVt_PVAV branch above for a long description of 2481 * how the following all works. The main difference for hashes 2482 * is that we treat keys and values separately (and have 2483 * separate loops for them): as for arrays, values are always 2484 * copied (except for the SvTEMP optimisation), since they 2485 * need to be stored in the hash; while keys are only 2486 * processed where they might get prematurely freed or 2487 * whatever. */ 2488 2489 /* tmps stack slots: 2490 * * reserve a slot for the hash keepalive; 2491 * * reserve slots for the hash values we're about to copy; 2492 * * preallocate for the keys we'll possibly copy or refcount bump 2493 * later; 2494 * then protect hash and temporarily void the remaining 2495 * value slots with &PL_sv_undef */ 2496 EXTEND_MORTAL(nelems + 1); 2497 2498 /* convert to number of key/value pairs */ 2499 nelems >>= 1; 2500 2501 PL_tmps_stack[++PL_tmps_ix] = SvREFCNT_inc_simple_NN(hash); 2502 tmps_base = PL_tmps_ix + 1; 2503 for (i = 0; i < nelems; i++) 2504 PL_tmps_stack[tmps_base + i] = &PL_sv_undef; 2505 PL_tmps_ix += nelems; 2506 2507 /* Make a copy of each RHS hash value and save on the tmps_stack 2508 * (or pass through where we can optimise away the copy) */ 2509 2510 for (svp = relem + 1; svp <= lastrelem; svp += 2) { 2511 SV *rsv = *svp; 2512 2513 if (SvTEMP(rsv) && !SvGMAGICAL(rsv) && SvREFCNT(rsv) == 1) { 2514 /* can skip the copy */ 2515 SvREFCNT_inc_simple_void_NN(rsv); 2516 SvTEMP_off(rsv); 2517 } 2518 else { 2519 SV *nsv; 2520 /* do get before newSV, in case it dies and leaks */ 2521 SvGETMAGIC(rsv); 2522 nsv = newSV(0); 2523 /* see comment in S_aassign_copy_common about 2524 * SV_NOSTEAL */ 2525 sv_setsv_flags(nsv, rsv, 2526 (SV_DO_COW_SVSETSV|SV_NOSTEAL)); 2527 rsv = *svp = nsv; 2528 } 2529 2530 assert(tmps_base <= PL_tmps_max); 2531 PL_tmps_stack[tmps_base++] = rsv; 2532 } 2533 tmps_base -= nelems; 2534 2535 2536 /* possibly protect keys */ 2537 2538 if (UNLIKELY(gimme == G_ARRAY)) { 2539 /* handle e.g. 2540 * @a = ((%h = ($$r, 1)), $r = "x"); 2541 * $_++ for %h = (1,2,3,4); 2542 */ 2543 EXTEND_MORTAL(nelems); 2544 for (svp = relem; svp <= lastrelem; svp += 2) 2545 *svp = sv_mortalcopy_flags(*svp, 2546 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL); 2547 } 2548 else if (PL_op->op_private & OPpASSIGN_COMMON_AGG) { 2549 /* for possible commonality, e.g. 2550 * %h = ($h{a},1) 2551 * avoid premature freeing RHS keys by mortalising 2552 * them. 2553 * For a magic element, make a copy so that its magic is 2554 * called *before* the hash is emptied (which may affect 2555 * a tied value for example). 2556 * In theory we should check for magic keys in all 2557 * cases, not just under OPpASSIGN_COMMON_AGG, but in 2558 * practice, !OPpASSIGN_COMMON_AGG implies only 2559 * constants or padtmps on the RHS. 2560 */ 2561 EXTEND_MORTAL(nelems); 2562 for (svp = relem; svp <= lastrelem; svp += 2) { 2563 SV *rsv = *svp; 2564 if (UNLIKELY(SvGMAGICAL(rsv))) { 2565 SSize_t n; 2566 *svp = sv_mortalcopy_flags(*svp, 2567 SV_GMAGIC|SV_DO_COW_SVSETSV|SV_NOSTEAL); 2568 /* allow other branch to continue pushing 2569 * onto tmps stack without checking each time */ 2570 n = (lastrelem - relem) >> 1; 2571 EXTEND_MORTAL(n); 2572 } 2573 else 2574 PL_tmps_stack[++PL_tmps_ix] = 2575 SvREFCNT_inc_simple_NN(rsv); 2576 } 2577 } 2578 2579 if (SvRMAGICAL(hash) || HvUSEDKEYS(hash)) 2580 hv_clear(hash); 2581 2582 /* now assign the keys and values to the hash */ 2583 2584 dirty_tmps = FALSE; 2585 2586 if (UNLIKELY(gimme == G_ARRAY)) { 2587 /* @a = (%h = (...)) etc */ 2588 SV **svp; 2589 SV **topelem = relem; 2590 2591 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) { 2592 SV *key = *svp++; 2593 SV *val = *svp; 2594 /* remove duplicates from list we return */ 2595 if (!hv_exists_ent(hash, key, 0)) { 2596 /* copy key back: possibly to an earlier 2597 * stack location if we encountered dups earlier, 2598 * The values will be updated later 2599 */ 2600 *topelem = key; 2601 topelem += 2; 2602 } 2603 /* A tied store won't take ownership of val, so keep 2604 * the 1 refcnt on the tmps stack; otherwise disarm 2605 * the tmps stack entry */ 2606 if (hv_store_ent(hash, key, val, 0)) 2607 PL_tmps_stack[tmps_base + i] = &PL_sv_undef; 2608 else 2609 dirty_tmps = TRUE; 2610 /* hv_store_ent() may have added set magic to val */; 2611 SvSETMAGIC(val); 2612 } 2613 if (topelem < svp) { 2614 /* at this point we have removed the duplicate key/value 2615 * pairs from the stack, but the remaining values may be 2616 * wrong; i.e. with (a 1 a 2 b 3) on the stack we've removed 2617 * the (a 2), but the stack now probably contains 2618 * (a <freed> b 3), because { hv_save(a,1); hv_save(a,2) } 2619 * obliterates the earlier key. So refresh all values. */ 2620 lastrelem = topelem - 1; 2621 while (relem < lastrelem) { 2622 HE *he; 2623 he = hv_fetch_ent(hash, *relem++, 0, 0); 2624 *relem++ = (he ? HeVAL(he) : &PL_sv_undef); 2625 } 2626 } 2627 } 2628 else { 2629 SV **svp; 2630 for (i = 0, svp = relem; svp <= lastrelem; i++, svp++) { 2631 SV *key = *svp++; 2632 SV *val = *svp; 2633 if (hv_store_ent(hash, key, val, 0)) 2634 PL_tmps_stack[tmps_base + i] = &PL_sv_undef; 2635 else 2636 dirty_tmps = TRUE; 2637 /* hv_store_ent() may have added set magic to val */; 2638 SvSETMAGIC(val); 2639 } 2640 } 2641 2642 if (dirty_tmps) { 2643 /* there are still some 'live' recounts on the tmps stack 2644 * - usually caused by storing into a tied hash. So let 2645 * free_tmps() do the proper but slow job later. 2646 * Just disarm hash refcount: see comments below about leak 2647 */ 2648 PL_tmps_stack[tmps_base - 1] = &PL_sv_undef; 2649 } 2650 else { 2651 /* Quietly remove all the SVs from the tmps stack slots, 2652 * since hash has now taken ownership of the refcnt. 2653 * Also remove hash: which will now leak if we die before 2654 * the SvREFCNT_dec_NN(hash) below */ 2655 if (UNLIKELY(PL_tmps_ix >= tmps_base + nelems)) 2656 Move(&PL_tmps_stack[tmps_base + nelems], 2657 &PL_tmps_stack[tmps_base - 1], 2658 PL_tmps_ix - (tmps_base + nelems) + 1, 2659 SV*); 2660 PL_tmps_ix -= (nelems + 1); 2661 } 2662 2663 SvREFCNT_dec_NN(hash); 2664 2665 relem = lastrelem + 1; 2666 goto no_relems; 2667 } 2668 2669 default: 2670 if (!SvIMMORTAL(lsv)) { 2671 SV *ref; 2672 2673 if (UNLIKELY( 2674 SvTEMP(lsv) && !SvSMAGICAL(lsv) && SvREFCNT(lsv) == 1 && 2675 (!isGV_with_GP(lsv) || SvFAKE(lsv)) && ckWARN(WARN_MISC) 2676 )) 2677 Perl_warner(aTHX_ 2678 packWARN(WARN_MISC), 2679 "Useless assignment to a temporary" 2680 ); 2681 2682 /* avoid freeing $$lsv if it might be needed for further 2683 * elements, e.g. ($ref, $foo) = (1, $$ref) */ 2684 if ( SvROK(lsv) 2685 && ( ((ref = SvRV(lsv)), SvREFCNT(ref)) == 1) 2686 && lelem <= lastlelem 2687 ) { 2688 SSize_t ix; 2689 SvREFCNT_inc_simple_void_NN(ref); 2690 /* an unrolled sv_2mortal */ 2691 ix = ++PL_tmps_ix; 2692 if (UNLIKELY(ix >= PL_tmps_max)) 2693 /* speculatively grow enough to cover other 2694 * possible refs */ 2695 (void)tmps_grow_p(ix + (lastlelem - lelem)); 2696 PL_tmps_stack[ix] = ref; 2697 } 2698 2699 sv_setsv(lsv, *relem); 2700 *relem = lsv; 2701 SvSETMAGIC(lsv); 2702 } 2703 if (++relem > lastrelem) 2704 goto no_relems; 2705 break; 2706 } /* switch */ 2707 } /* while */ 2708 2709 2710 no_relems: 2711 2712 /* simplified lelem loop for when there are no relems left */ 2713 while (LIKELY(lelem <= lastlelem)) { 2714 SV *lsv = *lelem++; 2715 2716 TAINT_NOT; /* Each item stands on its own, taintwise. */ 2717 2718 if (UNLIKELY(!lsv)) { 2719 lsv = *lelem++; 2720 ASSUME(SvTYPE(lsv) == SVt_PVAV); 2721 } 2722 2723 switch (SvTYPE(lsv)) { 2724 case SVt_PVAV: 2725 if (SvRMAGICAL(lsv) || AvFILLp((SV*)lsv) >= 0) { 2726 av_clear((AV*)lsv); 2727 if (UNLIKELY(PL_delaymagic & DM_ARRAY_ISA)) 2728 SvSETMAGIC(lsv); 2729 } 2730 break; 2731 2732 case SVt_PVHV: 2733 if (SvRMAGICAL(lsv) || HvUSEDKEYS((HV*)lsv)) 2734 hv_clear((HV*)lsv); 2735 break; 2736 2737 default: 2738 if (!SvIMMORTAL(lsv)) { 2739 sv_set_undef(lsv); 2740 SvSETMAGIC(lsv); 2741 } 2742 *relem++ = lsv; 2743 break; 2744 } /* switch */ 2745 } /* while */ 2746 2747 TAINT_NOT; /* result of list assign isn't tainted */ 2748 2749 if (UNLIKELY(PL_delaymagic & ~DM_DELAY)) { 2750 /* Will be used to set PL_tainting below */ 2751 Uid_t tmp_uid = PerlProc_getuid(); 2752 Uid_t tmp_euid = PerlProc_geteuid(); 2753 Gid_t tmp_gid = PerlProc_getgid(); 2754 Gid_t tmp_egid = PerlProc_getegid(); 2755 2756 /* XXX $> et al currently silently ignore failures */ 2757 if (PL_delaymagic & DM_UID) { 2758 #ifdef HAS_SETRESUID 2759 PERL_UNUSED_RESULT( 2760 setresuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1, 2761 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1, 2762 (Uid_t)-1)); 2763 #elif defined(HAS_SETREUID) 2764 PERL_UNUSED_RESULT( 2765 setreuid((PL_delaymagic & DM_RUID) ? PL_delaymagic_uid : (Uid_t)-1, 2766 (PL_delaymagic & DM_EUID) ? PL_delaymagic_euid : (Uid_t)-1)); 2767 #else 2768 # ifdef HAS_SETRUID 2769 if ((PL_delaymagic & DM_UID) == DM_RUID) { 2770 PERL_UNUSED_RESULT(setruid(PL_delaymagic_uid)); 2771 PL_delaymagic &= ~DM_RUID; 2772 } 2773 # endif /* HAS_SETRUID */ 2774 # ifdef HAS_SETEUID 2775 if ((PL_delaymagic & DM_UID) == DM_EUID) { 2776 PERL_UNUSED_RESULT(seteuid(PL_delaymagic_euid)); 2777 PL_delaymagic &= ~DM_EUID; 2778 } 2779 # endif /* HAS_SETEUID */ 2780 if (PL_delaymagic & DM_UID) { 2781 if (PL_delaymagic_uid != PL_delaymagic_euid) 2782 DIE(aTHX_ "No setreuid available"); 2783 PERL_UNUSED_RESULT(PerlProc_setuid(PL_delaymagic_uid)); 2784 } 2785 #endif /* HAS_SETRESUID */ 2786 2787 tmp_uid = PerlProc_getuid(); 2788 tmp_euid = PerlProc_geteuid(); 2789 } 2790 /* XXX $> et al currently silently ignore failures */ 2791 if (PL_delaymagic & DM_GID) { 2792 #ifdef HAS_SETRESGID 2793 PERL_UNUSED_RESULT( 2794 setresgid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1, 2795 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1, 2796 (Gid_t)-1)); 2797 #elif defined(HAS_SETREGID) 2798 PERL_UNUSED_RESULT( 2799 setregid((PL_delaymagic & DM_RGID) ? PL_delaymagic_gid : (Gid_t)-1, 2800 (PL_delaymagic & DM_EGID) ? PL_delaymagic_egid : (Gid_t)-1)); 2801 #else 2802 # ifdef HAS_SETRGID 2803 if ((PL_delaymagic & DM_GID) == DM_RGID) { 2804 PERL_UNUSED_RESULT(setrgid(PL_delaymagic_gid)); 2805 PL_delaymagic &= ~DM_RGID; 2806 } 2807 # endif /* HAS_SETRGID */ 2808 # ifdef HAS_SETEGID 2809 if ((PL_delaymagic & DM_GID) == DM_EGID) { 2810 PERL_UNUSED_RESULT(setegid(PL_delaymagic_egid)); 2811 PL_delaymagic &= ~DM_EGID; 2812 } 2813 # endif /* HAS_SETEGID */ 2814 if (PL_delaymagic & DM_GID) { 2815 if (PL_delaymagic_gid != PL_delaymagic_egid) 2816 DIE(aTHX_ "No setregid available"); 2817 PERL_UNUSED_RESULT(PerlProc_setgid(PL_delaymagic_gid)); 2818 } 2819 #endif /* HAS_SETRESGID */ 2820 2821 tmp_gid = PerlProc_getgid(); 2822 tmp_egid = PerlProc_getegid(); 2823 } 2824 TAINTING_set( TAINTING_get | (tmp_uid && (tmp_euid != tmp_uid || tmp_egid != tmp_gid)) ); 2825 #ifdef NO_TAINT_SUPPORT 2826 PERL_UNUSED_VAR(tmp_uid); 2827 PERL_UNUSED_VAR(tmp_euid); 2828 PERL_UNUSED_VAR(tmp_gid); 2829 PERL_UNUSED_VAR(tmp_egid); 2830 #endif 2831 } 2832 PL_delaymagic = old_delaymagic; 2833 2834 if (gimme == G_VOID) 2835 SP = firstrelem - 1; 2836 else if (gimme == G_SCALAR) { 2837 SP = firstrelem; 2838 EXTEND(SP,1); 2839 if (PL_op->op_private & OPpASSIGN_TRUEBOOL) 2840 SETs((firstlelem - firstrelem) ? &PL_sv_yes : &PL_sv_zero); 2841 else { 2842 dTARGET; 2843 SETi(firstlelem - firstrelem); 2844 } 2845 } 2846 else 2847 SP = relem - 1; 2848 2849 RETURN; 2850 } 2851 2852 PP(pp_qr) 2853 { 2854 dSP; 2855 PMOP * const pm = cPMOP; 2856 REGEXP * rx = PM_GETRE(pm); 2857 regexp *prog = ReANY(rx); 2858 SV * const pkg = RXp_ENGINE(prog)->qr_package(aTHX_ (rx)); 2859 SV * const rv = sv_newmortal(); 2860 CV **cvp; 2861 CV *cv; 2862 2863 SvUPGRADE(rv, SVt_IV); 2864 /* For a subroutine describing itself as "This is a hacky workaround" I'm 2865 loathe to use it here, but it seems to be the right fix. Or close. 2866 The key part appears to be that it's essential for pp_qr to return a new 2867 object (SV), which implies that there needs to be an effective way to 2868 generate a new SV from the existing SV that is pre-compiled in the 2869 optree. */ 2870 SvRV_set(rv, MUTABLE_SV(reg_temp_copy(NULL, rx))); 2871 SvROK_on(rv); 2872 2873 cvp = &( ReANY((REGEXP *)SvRV(rv))->qr_anoncv); 2874 if (UNLIKELY((cv = *cvp) && CvCLONE(*cvp))) { 2875 *cvp = cv_clone(cv); 2876 SvREFCNT_dec_NN(cv); 2877 } 2878 2879 if (pkg) { 2880 HV *const stash = gv_stashsv(pkg, GV_ADD); 2881 SvREFCNT_dec_NN(pkg); 2882 (void)sv_bless(rv, stash); 2883 } 2884 2885 if (UNLIKELY(RXp_ISTAINTED(prog))) { 2886 SvTAINTED_on(rv); 2887 SvTAINTED_on(SvRV(rv)); 2888 } 2889 XPUSHs(rv); 2890 RETURN; 2891 } 2892 2893 STATIC bool 2894 S_are_we_in_Debug_EXECUTE_r(pTHX) 2895 { 2896 /* Given a 'use re' is in effect, does it ask for outputting execution 2897 * debug info? 2898 * 2899 * This is separated from the sole place it's called, an inline function, 2900 * because it is the large-ish slow portion of the function */ 2901 2902 DECLARE_AND_GET_RE_DEBUG_FLAGS_NON_REGEX; 2903 2904 return cBOOL(RE_DEBUG_FLAG(RE_DEBUG_EXECUTE_MASK)); 2905 } 2906 2907 PERL_STATIC_INLINE bool 2908 S_should_we_output_Debug_r(pTHX_ regexp *prog) 2909 { 2910 PERL_ARGS_ASSERT_SHOULD_WE_OUTPUT_DEBUG_R; 2911 2912 /* pp_match can output regex debugging info. This function returns a 2913 * boolean as to whether or not it should. 2914 * 2915 * Under -Dr, it should. Any reasonable compiler will optimize this bit of 2916 * code away on non-debugging builds. */ 2917 if (UNLIKELY(DEBUG_r_TEST)) { 2918 return TRUE; 2919 } 2920 2921 /* If the regex engine is using the non-debugging execution routine, then 2922 * no debugging should be output. Same if the field is NULL that pluggable 2923 * engines are not supposed to fill. */ 2924 if ( LIKELY(prog->engine->exec == &Perl_regexec_flags) 2925 || UNLIKELY(prog->engine->op_comp == NULL)) 2926 { 2927 return FALSE; 2928 } 2929 2930 /* Otherwise have to check */ 2931 return S_are_we_in_Debug_EXECUTE_r(aTHX); 2932 } 2933 2934 PP(pp_match) 2935 { 2936 dSP; dTARG; 2937 PMOP *pm = cPMOP; 2938 PMOP *dynpm = pm; 2939 const char *s; 2940 const char *strend; 2941 SSize_t curpos = 0; /* initial pos() or current $+[0] */ 2942 I32 global; 2943 U8 r_flags = 0; 2944 const char *truebase; /* Start of string */ 2945 REGEXP *rx = PM_GETRE(pm); 2946 regexp *prog = ReANY(rx); 2947 bool rxtainted; 2948 const U8 gimme = GIMME_V; 2949 STRLEN len; 2950 const I32 oldsave = PL_savestack_ix; 2951 I32 had_zerolen = 0; 2952 MAGIC *mg = NULL; 2953 2954 if (PL_op->op_flags & OPf_STACKED) 2955 TARG = POPs; 2956 else { 2957 if (ARGTARG) 2958 GETTARGET; 2959 else { 2960 TARG = DEFSV; 2961 } 2962 EXTEND(SP,1); 2963 } 2964 2965 PUTBACK; /* EVAL blocks need stack_sp. */ 2966 /* Skip get-magic if this is a qr// clone, because regcomp has 2967 already done it. */ 2968 truebase = prog->mother_re 2969 ? SvPV_nomg_const(TARG, len) 2970 : SvPV_const(TARG, len); 2971 if (!truebase) 2972 DIE(aTHX_ "panic: pp_match"); 2973 strend = truebase + len; 2974 rxtainted = (RXp_ISTAINTED(prog) || 2975 (TAINT_get && (pm->op_pmflags & PMf_RETAINT))); 2976 TAINT_NOT; 2977 2978 /* We need to know this in case we fail out early - pos() must be reset */ 2979 global = dynpm->op_pmflags & PMf_GLOBAL; 2980 2981 /* PMdf_USED is set after a ?? matches once */ 2982 if ( 2983 #ifdef USE_ITHREADS 2984 SvREADONLY(PL_regex_pad[pm->op_pmoffset]) 2985 #else 2986 pm->op_pmflags & PMf_USED 2987 #endif 2988 ) { 2989 if (UNLIKELY(should_we_output_Debug_r(prog))) { 2990 PerlIO_printf(Perl_debug_log, "?? already matched once"); 2991 } 2992 goto nope; 2993 } 2994 2995 /* handle the empty pattern */ 2996 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) { 2997 if (PL_curpm == PL_reg_curpm) { 2998 if (PL_curpm_under) { 2999 if (PL_curpm_under == PL_reg_curpm) { 3000 Perl_croak(aTHX_ "Infinite recursion via empty pattern"); 3001 } else { 3002 pm = PL_curpm_under; 3003 } 3004 } 3005 } else { 3006 pm = PL_curpm; 3007 } 3008 rx = PM_GETRE(pm); 3009 prog = ReANY(rx); 3010 } 3011 3012 if (RXp_MINLEN(prog) >= 0 && (STRLEN)RXp_MINLEN(prog) > len) { 3013 if (UNLIKELY(should_we_output_Debug_r(prog))) { 3014 PerlIO_printf(Perl_debug_log, 3015 "String shorter than min possible regex match (%zd < %zd)\n", 3016 len, RXp_MINLEN(prog)); 3017 } 3018 goto nope; 3019 } 3020 3021 /* get pos() if //g */ 3022 if (global) { 3023 mg = mg_find_mglob(TARG); 3024 if (mg && mg->mg_len >= 0) { 3025 curpos = MgBYTEPOS(mg, TARG, truebase, len); 3026 /* last time pos() was set, it was zero-length match */ 3027 if (mg->mg_flags & MGf_MINMATCH) 3028 had_zerolen = 1; 3029 } 3030 } 3031 3032 #ifdef PERL_SAWAMPERSAND 3033 if ( RXp_NPARENS(prog) 3034 || PL_sawampersand 3035 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY)) 3036 || (dynpm->op_pmflags & PMf_KEEPCOPY) 3037 ) 3038 #endif 3039 { 3040 r_flags |= (REXEC_COPY_STR|REXEC_COPY_SKIP_PRE); 3041 /* in @a =~ /(.)/g, we iterate multiple times, but copy the buffer 3042 * only on the first iteration. Therefore we need to copy $' as well 3043 * as $&, to make the rest of the string available for captures in 3044 * subsequent iterations */ 3045 if (! (global && gimme == G_ARRAY)) 3046 r_flags |= REXEC_COPY_SKIP_POST; 3047 }; 3048 #ifdef PERL_SAWAMPERSAND 3049 if (dynpm->op_pmflags & PMf_KEEPCOPY) 3050 /* handle KEEPCOPY in pmop but not rx, eg $r=qr/a/; /$r/p */ 3051 r_flags &= ~(REXEC_COPY_SKIP_PRE|REXEC_COPY_SKIP_POST); 3052 #endif 3053 3054 s = truebase; 3055 3056 play_it_again: 3057 if (global) 3058 s = truebase + curpos; 3059 3060 if (!CALLREGEXEC(rx, (char*)s, (char *)strend, (char*)truebase, 3061 had_zerolen, TARG, NULL, r_flags)) 3062 goto nope; 3063 3064 PL_curpm = pm; 3065 if (dynpm->op_pmflags & PMf_ONCE) 3066 #ifdef USE_ITHREADS 3067 SvREADONLY_on(PL_regex_pad[dynpm->op_pmoffset]); 3068 #else 3069 dynpm->op_pmflags |= PMf_USED; 3070 #endif 3071 3072 if (rxtainted) 3073 RXp_MATCH_TAINTED_on(prog); 3074 TAINT_IF(RXp_MATCH_TAINTED(prog)); 3075 3076 /* update pos */ 3077 3078 if (global && (gimme != G_ARRAY || (dynpm->op_pmflags & PMf_CONTINUE))) { 3079 if (!mg) 3080 mg = sv_magicext_mglob(TARG); 3081 MgBYTEPOS_set(mg, TARG, truebase, RXp_OFFS(prog)[0].end); 3082 if (RXp_ZERO_LEN(prog)) 3083 mg->mg_flags |= MGf_MINMATCH; 3084 else 3085 mg->mg_flags &= ~MGf_MINMATCH; 3086 } 3087 3088 if ((!RXp_NPARENS(prog) && !global) || gimme != G_ARRAY) { 3089 LEAVE_SCOPE(oldsave); 3090 RETPUSHYES; 3091 } 3092 3093 /* push captures on stack */ 3094 3095 { 3096 const I32 nparens = RXp_NPARENS(prog); 3097 I32 i = (global && !nparens) ? 1 : 0; 3098 3099 SPAGAIN; /* EVAL blocks could move the stack. */ 3100 EXTEND(SP, nparens + i); 3101 EXTEND_MORTAL(nparens + i); 3102 for (i = !i; i <= nparens; i++) { 3103 PUSHs(sv_newmortal()); 3104 if (LIKELY((RXp_OFFS(prog)[i].start != -1) 3105 && RXp_OFFS(prog)[i].end != -1 )) 3106 { 3107 const I32 len = RXp_OFFS(prog)[i].end - RXp_OFFS(prog)[i].start; 3108 const char * const s = RXp_OFFS(prog)[i].start + truebase; 3109 if (UNLIKELY( RXp_OFFS(prog)[i].end < 0 3110 || RXp_OFFS(prog)[i].start < 0 3111 || len < 0 3112 || len > strend - s) 3113 ) 3114 DIE(aTHX_ "panic: pp_match start/end pointers, i=%ld, " 3115 "start=%ld, end=%ld, s=%p, strend=%p, len=%" UVuf, 3116 (long) i, (long) RXp_OFFS(prog)[i].start, 3117 (long)RXp_OFFS(prog)[i].end, s, strend, (UV) len); 3118 sv_setpvn(*SP, s, len); 3119 if (DO_UTF8(TARG) && is_utf8_string((U8*)s, len)) 3120 SvUTF8_on(*SP); 3121 } 3122 } 3123 if (global) { 3124 curpos = (UV)RXp_OFFS(prog)[0].end; 3125 had_zerolen = RXp_ZERO_LEN(prog); 3126 PUTBACK; /* EVAL blocks may use stack */ 3127 r_flags |= REXEC_IGNOREPOS | REXEC_NOT_FIRST; 3128 goto play_it_again; 3129 } 3130 LEAVE_SCOPE(oldsave); 3131 RETURN; 3132 } 3133 NOT_REACHED; /* NOTREACHED */ 3134 3135 nope: 3136 if (global && !(dynpm->op_pmflags & PMf_CONTINUE)) { 3137 if (!mg) 3138 mg = mg_find_mglob(TARG); 3139 if (mg) 3140 mg->mg_len = -1; 3141 } 3142 LEAVE_SCOPE(oldsave); 3143 if (gimme == G_ARRAY) 3144 RETURN; 3145 RETPUSHNO; 3146 } 3147 3148 OP * 3149 Perl_do_readline(pTHX) 3150 { 3151 dSP; dTARGETSTACKED; 3152 SV *sv; 3153 STRLEN tmplen = 0; 3154 STRLEN offset; 3155 PerlIO *fp; 3156 IO * const io = GvIO(PL_last_in_gv); 3157 const I32 type = PL_op->op_type; 3158 const U8 gimme = GIMME_V; 3159 3160 if (io) { 3161 const MAGIC *const mg = SvTIED_mg((const SV *)io, PERL_MAGIC_tiedscalar); 3162 if (mg) { 3163 Perl_tied_method(aTHX_ SV_CONST(READLINE), SP, MUTABLE_SV(io), mg, gimme, 0); 3164 if (gimme == G_SCALAR) { 3165 SPAGAIN; 3166 SvSetSV_nosteal(TARG, TOPs); 3167 SETTARG; 3168 } 3169 return NORMAL; 3170 } 3171 } 3172 fp = NULL; 3173 if (io) { 3174 fp = IoIFP(io); 3175 if (!fp) { 3176 if (IoFLAGS(io) & IOf_ARGV) { 3177 if (IoFLAGS(io) & IOf_START) { 3178 IoLINES(io) = 0; 3179 if (av_tindex(GvAVn(PL_last_in_gv)) < 0) { 3180 IoFLAGS(io) &= ~IOf_START; 3181 do_open6(PL_last_in_gv, "-", 1, NULL, NULL, 0); 3182 SvTAINTED_off(GvSVn(PL_last_in_gv)); /* previous tainting irrelevant */ 3183 sv_setpvs(GvSVn(PL_last_in_gv), "-"); 3184 SvSETMAGIC(GvSV(PL_last_in_gv)); 3185 fp = IoIFP(io); 3186 goto have_fp; 3187 } 3188 } 3189 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL); 3190 if (!fp) { /* Note: fp != IoIFP(io) */ 3191 (void)do_close(PL_last_in_gv, FALSE); /* now it does*/ 3192 } 3193 } 3194 else if (type == OP_GLOB) 3195 fp = Perl_start_glob(aTHX_ POPs, io); 3196 } 3197 else if (type == OP_GLOB) 3198 SP--; 3199 else if (IoTYPE(io) == IoTYPE_WRONLY) { 3200 report_wrongway_fh(PL_last_in_gv, '>'); 3201 } 3202 } 3203 if (!fp) { 3204 if ((!io || !(IoFLAGS(io) & IOf_START)) 3205 && ckWARN(WARN_CLOSED) 3206 && type != OP_GLOB) 3207 { 3208 report_evil_fh(PL_last_in_gv); 3209 } 3210 if (gimme == G_SCALAR) { 3211 /* undef TARG, and push that undefined value */ 3212 if (type != OP_RCATLINE) { 3213 sv_set_undef(TARG); 3214 } 3215 PUSHTARG; 3216 } 3217 RETURN; 3218 } 3219 have_fp: 3220 if (gimme == G_SCALAR) { 3221 sv = TARG; 3222 if (type == OP_RCATLINE && SvGMAGICAL(sv)) 3223 mg_get(sv); 3224 if (SvROK(sv)) { 3225 if (type == OP_RCATLINE) 3226 SvPV_force_nomg_nolen(sv); 3227 else 3228 sv_unref(sv); 3229 } 3230 else if (isGV_with_GP(sv)) { 3231 SvPV_force_nomg_nolen(sv); 3232 } 3233 SvUPGRADE(sv, SVt_PV); 3234 tmplen = SvLEN(sv); /* remember if already alloced */ 3235 if (!tmplen && !SvREADONLY(sv) && !SvIsCOW(sv)) { 3236 /* try short-buffering it. Please update t/op/readline.t 3237 * if you change the growth length. 3238 */ 3239 Sv_Grow(sv, 80); 3240 } 3241 offset = 0; 3242 if (type == OP_RCATLINE && SvOK(sv)) { 3243 if (!SvPOK(sv)) { 3244 SvPV_force_nomg_nolen(sv); 3245 } 3246 offset = SvCUR(sv); 3247 } 3248 } 3249 else { 3250 sv = sv_2mortal(newSV(80)); 3251 offset = 0; 3252 } 3253 3254 /* This should not be marked tainted if the fp is marked clean */ 3255 #define MAYBE_TAINT_LINE(io, sv) \ 3256 if (!(IoFLAGS(io) & IOf_UNTAINT)) { \ 3257 TAINT; \ 3258 SvTAINTED_on(sv); \ 3259 } 3260 3261 /* delay EOF state for a snarfed empty file */ 3262 #define SNARF_EOF(gimme,rs,io,sv) \ 3263 (gimme != G_SCALAR || SvCUR(sv) \ 3264 || (IoFLAGS(io) & IOf_NOLINE) || !RsSNARF(rs)) 3265 3266 for (;;) { 3267 PUTBACK; 3268 if (!sv_gets(sv, fp, offset) 3269 && (type == OP_GLOB 3270 || SNARF_EOF(gimme, PL_rs, io, sv) 3271 || PerlIO_error(fp))) 3272 { 3273 PerlIO_clearerr(fp); 3274 if (IoFLAGS(io) & IOf_ARGV) { 3275 fp = nextargv(PL_last_in_gv, PL_op->op_flags & OPf_SPECIAL); 3276 if (fp) 3277 continue; 3278 (void)do_close(PL_last_in_gv, FALSE); 3279 } 3280 else if (type == OP_GLOB) { 3281 if (!do_close(PL_last_in_gv, FALSE)) { 3282 Perl_ck_warner(aTHX_ packWARN(WARN_GLOB), 3283 "glob failed (child exited with status %d%s)", 3284 (int)(STATUS_CURRENT >> 8), 3285 (STATUS_CURRENT & 0x80) ? ", core dumped" : ""); 3286 } 3287 } 3288 if (gimme == G_SCALAR) { 3289 if (type != OP_RCATLINE) { 3290 SV_CHECK_THINKFIRST_COW_DROP(TARG); 3291 SvOK_off(TARG); 3292 } 3293 SPAGAIN; 3294 PUSHTARG; 3295 } 3296 MAYBE_TAINT_LINE(io, sv); 3297 RETURN; 3298 } 3299 MAYBE_TAINT_LINE(io, sv); 3300 IoLINES(io)++; 3301 IoFLAGS(io) |= IOf_NOLINE; 3302 SvSETMAGIC(sv); 3303 SPAGAIN; 3304 XPUSHs(sv); 3305 if (type == OP_GLOB) { 3306 const char *t1; 3307 Stat_t statbuf; 3308 3309 if (SvCUR(sv) > 0 && SvCUR(PL_rs) > 0) { 3310 char * const tmps = SvEND(sv) - 1; 3311 if (*tmps == *SvPVX_const(PL_rs)) { 3312 *tmps = '\0'; 3313 SvCUR_set(sv, SvCUR(sv) - 1); 3314 } 3315 } 3316 for (t1 = SvPVX_const(sv); *t1; t1++) 3317 #ifdef __VMS 3318 if (memCHRs("*%?", *t1)) 3319 #else 3320 if (memCHRs("$&*(){}[]'\";\\|?<>~`", *t1)) 3321 #endif 3322 break; 3323 if (*t1 && PerlLIO_lstat(SvPVX_const(sv), &statbuf) < 0) { 3324 (void)POPs; /* Unmatched wildcard? Chuck it... */ 3325 continue; 3326 } 3327 } else if (SvUTF8(sv)) { /* OP_READLINE, OP_RCATLINE */ 3328 if (ckWARN(WARN_UTF8)) { 3329 const U8 * const s = (const U8*)SvPVX_const(sv) + offset; 3330 const STRLEN len = SvCUR(sv) - offset; 3331 const U8 *f; 3332 3333 if (!is_utf8_string_loc(s, len, &f)) 3334 /* Emulate :encoding(utf8) warning in the same case. */ 3335 Perl_warner(aTHX_ packWARN(WARN_UTF8), 3336 "utf8 \"\\x%02X\" does not map to Unicode", 3337 f < (U8*)SvEND(sv) ? *f : 0); 3338 } 3339 } 3340 if (gimme == G_ARRAY) { 3341 if (SvLEN(sv) - SvCUR(sv) > 20) { 3342 SvPV_shrink_to_cur(sv); 3343 } 3344 sv = sv_2mortal(newSV(80)); 3345 continue; 3346 } 3347 else if (gimme == G_SCALAR && !tmplen && SvLEN(sv) - SvCUR(sv) > 80) { 3348 /* try to reclaim a bit of scalar space (only on 1st alloc) */ 3349 const STRLEN new_len 3350 = SvCUR(sv) < 60 ? 80 : SvCUR(sv)+40; /* allow some slop */ 3351 SvPV_renew(sv, new_len); 3352 } 3353 RETURN; 3354 } 3355 } 3356 3357 PP(pp_helem) 3358 { 3359 dSP; 3360 HE* he; 3361 SV **svp; 3362 SV * const keysv = POPs; 3363 HV * const hv = MUTABLE_HV(POPs); 3364 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET; 3365 const U32 defer = PL_op->op_private & OPpLVAL_DEFER; 3366 SV *sv; 3367 const bool localizing = PL_op->op_private & OPpLVAL_INTRO; 3368 bool preeminent = TRUE; 3369 3370 if (SvTYPE(hv) != SVt_PVHV) 3371 RETPUSHUNDEF; 3372 3373 if (localizing) { 3374 MAGIC *mg; 3375 HV *stash; 3376 3377 /* If we can determine whether the element exist, 3378 * Try to preserve the existenceness of a tied hash 3379 * element by using EXISTS and DELETE if possible. 3380 * Fallback to FETCH and STORE otherwise. */ 3381 if (SvCANEXISTDELETE(hv)) 3382 preeminent = hv_exists_ent(hv, keysv, 0); 3383 } 3384 3385 he = hv_fetch_ent(hv, keysv, lval && !defer, 0); 3386 svp = he ? &HeVAL(he) : NULL; 3387 if (lval) { 3388 if (!svp || !*svp || *svp == &PL_sv_undef) { 3389 SV* lv; 3390 SV* key2; 3391 if (!defer) { 3392 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv)); 3393 } 3394 lv = sv_newmortal(); 3395 sv_upgrade(lv, SVt_PVLV); 3396 LvTYPE(lv) = 'y'; 3397 sv_magic(lv, key2 = newSVsv(keysv), PERL_MAGIC_defelem, NULL, 0); 3398 SvREFCNT_dec_NN(key2); /* sv_magic() increments refcount */ 3399 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv); 3400 LvTARGLEN(lv) = 1; 3401 PUSHs(lv); 3402 RETURN; 3403 } 3404 if (localizing) { 3405 if (HvNAME_get(hv) && isGV_or_RVCV(*svp)) 3406 save_gp(MUTABLE_GV(*svp), !(PL_op->op_flags & OPf_SPECIAL)); 3407 else if (preeminent) 3408 save_helem_flags(hv, keysv, svp, 3409 (PL_op->op_flags & OPf_SPECIAL) ? 0 : SAVEf_SETMAGIC); 3410 else 3411 SAVEHDELETE(hv, keysv); 3412 } 3413 else if (PL_op->op_private & OPpDEREF) { 3414 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF)); 3415 RETURN; 3416 } 3417 } 3418 sv = (svp && *svp ? *svp : &PL_sv_undef); 3419 /* Originally this did a conditional C<sv = sv_mortalcopy(sv)>; this 3420 * was to make C<local $tied{foo} = $tied{foo}> possible. 3421 * However, it seems no longer to be needed for that purpose, and 3422 * introduced a new bug: stuff like C<while ($hash{taintedval} =~ /.../g> 3423 * would loop endlessly since the pos magic is getting set on the 3424 * mortal copy and lost. However, the copy has the effect of 3425 * triggering the get magic, and losing it altogether made things like 3426 * c<$tied{foo};> in void context no longer do get magic, which some 3427 * code relied on. Also, delayed triggering of magic on @+ and friends 3428 * meant the original regex may be out of scope by now. So as a 3429 * compromise, do the get magic here. (The MGf_GSKIP flag will stop it 3430 * being called too many times). */ 3431 if (!lval && SvRMAGICAL(hv) && SvGMAGICAL(sv)) 3432 mg_get(sv); 3433 PUSHs(sv); 3434 RETURN; 3435 } 3436 3437 3438 /* a stripped-down version of Perl_softref2xv() for use by 3439 * pp_multideref(), which doesn't use PL_op->op_flags */ 3440 3441 STATIC GV * 3442 S_softref2xv_lite(pTHX_ SV *const sv, const char *const what, 3443 const svtype type) 3444 { 3445 if (PL_op->op_private & HINT_STRICT_REFS) { 3446 if (SvOK(sv)) 3447 Perl_die(aTHX_ PL_no_symref_sv, sv, 3448 (SvPOKp(sv) && SvCUR(sv)>32 ? "..." : ""), what); 3449 else 3450 Perl_die(aTHX_ PL_no_usym, what); 3451 } 3452 if (!SvOK(sv)) 3453 Perl_die(aTHX_ PL_no_usym, what); 3454 return gv_fetchsv_nomg(sv, GV_ADD, type); 3455 } 3456 3457 3458 /* Handle one or more aggregate derefs and array/hash indexings, e.g. 3459 * $h->{foo} or $a[0]{$key}[$i] or f()->[1] 3460 * 3461 * op_aux points to an array of unions of UV / IV / SV* / PADOFFSET. 3462 * Each of these either contains a set of actions, or an argument, such as 3463 * an IV to use as an array index, or a lexical var to retrieve. 3464 * Several actions re stored per UV; we keep shifting new actions off the 3465 * one UV, and only reload when it becomes zero. 3466 */ 3467 3468 PP(pp_multideref) 3469 { 3470 SV *sv = NULL; /* init to avoid spurious 'may be used uninitialized' */ 3471 UNOP_AUX_item *items = cUNOP_AUXx(PL_op)->op_aux; 3472 UV actions = items->uv; 3473 3474 assert(actions); 3475 /* this tells find_uninit_var() where we're up to */ 3476 PL_multideref_pc = items; 3477 3478 while (1) { 3479 /* there are three main classes of action; the first retrieve 3480 * the initial AV or HV from a variable or the stack; the second 3481 * does the equivalent of an unrolled (/DREFAV, rv2av, aelem), 3482 * the third an unrolled (/DREFHV, rv2hv, helem). 3483 */ 3484 switch (actions & MDEREF_ACTION_MASK) { 3485 3486 case MDEREF_reload: 3487 actions = (++items)->uv; 3488 continue; 3489 3490 case MDEREF_AV_padav_aelem: /* $lex[...] */ 3491 sv = PAD_SVl((++items)->pad_offset); 3492 goto do_AV_aelem; 3493 3494 case MDEREF_AV_gvav_aelem: /* $pkg[...] */ 3495 sv = UNOP_AUX_item_sv(++items); 3496 assert(isGV_with_GP(sv)); 3497 sv = (SV*)GvAVn((GV*)sv); 3498 goto do_AV_aelem; 3499 3500 case MDEREF_AV_pop_rv2av_aelem: /* expr->[...] */ 3501 { 3502 dSP; 3503 sv = POPs; 3504 PUTBACK; 3505 goto do_AV_rv2av_aelem; 3506 } 3507 3508 case MDEREF_AV_gvsv_vivify_rv2av_aelem: /* $pkg->[...] */ 3509 sv = UNOP_AUX_item_sv(++items); 3510 assert(isGV_with_GP(sv)); 3511 sv = GvSVn((GV*)sv); 3512 goto do_AV_vivify_rv2av_aelem; 3513 3514 case MDEREF_AV_padsv_vivify_rv2av_aelem: /* $lex->[...] */ 3515 sv = PAD_SVl((++items)->pad_offset); 3516 /* FALLTHROUGH */ 3517 3518 do_AV_vivify_rv2av_aelem: 3519 case MDEREF_AV_vivify_rv2av_aelem: /* vivify, ->[...] */ 3520 /* this is the OPpDEREF action normally found at the end of 3521 * ops like aelem, helem, rv2sv */ 3522 sv = vivify_ref(sv, OPpDEREF_AV); 3523 /* FALLTHROUGH */ 3524 3525 do_AV_rv2av_aelem: 3526 /* this is basically a copy of pp_rv2av when it just has the 3527 * sKR/1 flags */ 3528 SvGETMAGIC(sv); 3529 if (LIKELY(SvROK(sv))) { 3530 if (UNLIKELY(SvAMAGIC(sv))) { 3531 sv = amagic_deref_call(sv, to_av_amg); 3532 } 3533 sv = SvRV(sv); 3534 if (UNLIKELY(SvTYPE(sv) != SVt_PVAV)) 3535 DIE(aTHX_ "Not an ARRAY reference"); 3536 } 3537 else if (SvTYPE(sv) != SVt_PVAV) { 3538 if (!isGV_with_GP(sv)) 3539 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "an ARRAY", SVt_PVAV); 3540 sv = MUTABLE_SV(GvAVn((GV*)sv)); 3541 } 3542 /* FALLTHROUGH */ 3543 3544 do_AV_aelem: 3545 { 3546 /* retrieve the key; this may be either a lexical or package 3547 * var (whose index/ptr is stored as an item) or a signed 3548 * integer constant stored as an item. 3549 */ 3550 SV *elemsv; 3551 IV elem = 0; /* to shut up stupid compiler warnings */ 3552 3553 3554 assert(SvTYPE(sv) == SVt_PVAV); 3555 3556 switch (actions & MDEREF_INDEX_MASK) { 3557 case MDEREF_INDEX_none: 3558 goto finish; 3559 case MDEREF_INDEX_const: 3560 elem = (++items)->iv; 3561 break; 3562 case MDEREF_INDEX_padsv: 3563 elemsv = PAD_SVl((++items)->pad_offset); 3564 goto check_elem; 3565 case MDEREF_INDEX_gvsv: 3566 elemsv = UNOP_AUX_item_sv(++items); 3567 assert(isGV_with_GP(elemsv)); 3568 elemsv = GvSVn((GV*)elemsv); 3569 check_elem: 3570 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) 3571 && ckWARN(WARN_MISC))) 3572 Perl_warner(aTHX_ packWARN(WARN_MISC), 3573 "Use of reference \"%" SVf "\" as array index", 3574 SVfARG(elemsv)); 3575 /* the only time that S_find_uninit_var() needs this 3576 * is to determine which index value triggered the 3577 * undef warning. So just update it here. Note that 3578 * since we don't save and restore this var (e.g. for 3579 * tie or overload execution), its value will be 3580 * meaningless apart from just here */ 3581 PL_multideref_pc = items; 3582 elem = SvIV(elemsv); 3583 break; 3584 } 3585 3586 3587 /* this is basically a copy of pp_aelem with OPpDEREF skipped */ 3588 3589 if (!(actions & MDEREF_FLAG_last)) { 3590 SV** svp = av_fetch((AV*)sv, elem, 1); 3591 if (!svp || ! (sv=*svp)) 3592 DIE(aTHX_ PL_no_aelem, elem); 3593 break; 3594 } 3595 3596 if (PL_op->op_private & 3597 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE)) 3598 { 3599 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) { 3600 sv = av_exists((AV*)sv, elem) ? &PL_sv_yes : &PL_sv_no; 3601 } 3602 else { 3603 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0; 3604 sv = av_delete((AV*)sv, elem, discard); 3605 if (discard) 3606 return NORMAL; 3607 if (!sv) 3608 sv = &PL_sv_undef; 3609 } 3610 } 3611 else { 3612 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET; 3613 const U32 defer = PL_op->op_private & OPpLVAL_DEFER; 3614 const bool localizing = PL_op->op_private & OPpLVAL_INTRO; 3615 bool preeminent = TRUE; 3616 AV *const av = (AV*)sv; 3617 SV** svp; 3618 3619 if (UNLIKELY(localizing)) { 3620 MAGIC *mg; 3621 HV *stash; 3622 3623 /* If we can determine whether the element exist, 3624 * Try to preserve the existenceness of a tied array 3625 * element by using EXISTS and DELETE if possible. 3626 * Fallback to FETCH and STORE otherwise. */ 3627 if (SvCANEXISTDELETE(av)) 3628 preeminent = av_exists(av, elem); 3629 } 3630 3631 svp = av_fetch(av, elem, lval && !defer); 3632 3633 if (lval) { 3634 if (!svp || !(sv = *svp)) { 3635 IV len; 3636 if (!defer) 3637 DIE(aTHX_ PL_no_aelem, elem); 3638 len = av_tindex(av); 3639 /* Resolve a negative index that falls within 3640 * the array. Leave it negative it if falls 3641 * outside the array. */ 3642 if (elem < 0 && len + elem >= 0) 3643 elem = len + elem; 3644 if (elem >= 0 && elem <= len) 3645 /* Falls within the array. */ 3646 sv = av_nonelem(av,elem); 3647 else 3648 /* Falls outside the array. If it is neg- 3649 ative, magic_setdefelem will use the 3650 index for error reporting. */ 3651 sv = sv_2mortal(newSVavdefelem(av,elem,1)); 3652 } 3653 else { 3654 if (UNLIKELY(localizing)) { 3655 if (preeminent) { 3656 save_aelem(av, elem, svp); 3657 sv = *svp; /* may have changed */ 3658 } 3659 else 3660 SAVEADELETE(av, elem); 3661 } 3662 } 3663 } 3664 else { 3665 sv = (svp ? *svp : &PL_sv_undef); 3666 /* see note in pp_helem() */ 3667 if (SvRMAGICAL(av) && SvGMAGICAL(sv)) 3668 mg_get(sv); 3669 } 3670 } 3671 3672 } 3673 finish: 3674 { 3675 dSP; 3676 XPUSHs(sv); 3677 RETURN; 3678 } 3679 /* NOTREACHED */ 3680 3681 3682 3683 3684 case MDEREF_HV_padhv_helem: /* $lex{...} */ 3685 sv = PAD_SVl((++items)->pad_offset); 3686 goto do_HV_helem; 3687 3688 case MDEREF_HV_gvhv_helem: /* $pkg{...} */ 3689 sv = UNOP_AUX_item_sv(++items); 3690 assert(isGV_with_GP(sv)); 3691 sv = (SV*)GvHVn((GV*)sv); 3692 goto do_HV_helem; 3693 3694 case MDEREF_HV_pop_rv2hv_helem: /* expr->{...} */ 3695 { 3696 dSP; 3697 sv = POPs; 3698 PUTBACK; 3699 goto do_HV_rv2hv_helem; 3700 } 3701 3702 case MDEREF_HV_gvsv_vivify_rv2hv_helem: /* $pkg->{...} */ 3703 sv = UNOP_AUX_item_sv(++items); 3704 assert(isGV_with_GP(sv)); 3705 sv = GvSVn((GV*)sv); 3706 goto do_HV_vivify_rv2hv_helem; 3707 3708 case MDEREF_HV_padsv_vivify_rv2hv_helem: /* $lex->{...} */ 3709 sv = PAD_SVl((++items)->pad_offset); 3710 /* FALLTHROUGH */ 3711 3712 do_HV_vivify_rv2hv_helem: 3713 case MDEREF_HV_vivify_rv2hv_helem: /* vivify, ->{...} */ 3714 /* this is the OPpDEREF action normally found at the end of 3715 * ops like aelem, helem, rv2sv */ 3716 sv = vivify_ref(sv, OPpDEREF_HV); 3717 /* FALLTHROUGH */ 3718 3719 do_HV_rv2hv_helem: 3720 /* this is basically a copy of pp_rv2hv when it just has the 3721 * sKR/1 flags (and pp_rv2hv is aliased to pp_rv2av) */ 3722 3723 SvGETMAGIC(sv); 3724 if (LIKELY(SvROK(sv))) { 3725 if (UNLIKELY(SvAMAGIC(sv))) { 3726 sv = amagic_deref_call(sv, to_hv_amg); 3727 } 3728 sv = SvRV(sv); 3729 if (UNLIKELY(SvTYPE(sv) != SVt_PVHV)) 3730 DIE(aTHX_ "Not a HASH reference"); 3731 } 3732 else if (SvTYPE(sv) != SVt_PVHV) { 3733 if (!isGV_with_GP(sv)) 3734 sv = (SV*)S_softref2xv_lite(aTHX_ sv, "a HASH", SVt_PVHV); 3735 sv = MUTABLE_SV(GvHVn((GV*)sv)); 3736 } 3737 /* FALLTHROUGH */ 3738 3739 do_HV_helem: 3740 { 3741 /* retrieve the key; this may be either a lexical / package 3742 * var or a string constant, whose index/ptr is stored as an 3743 * item 3744 */ 3745 SV *keysv = NULL; /* to shut up stupid compiler warnings */ 3746 3747 assert(SvTYPE(sv) == SVt_PVHV); 3748 3749 switch (actions & MDEREF_INDEX_MASK) { 3750 case MDEREF_INDEX_none: 3751 goto finish; 3752 3753 case MDEREF_INDEX_const: 3754 keysv = UNOP_AUX_item_sv(++items); 3755 break; 3756 3757 case MDEREF_INDEX_padsv: 3758 keysv = PAD_SVl((++items)->pad_offset); 3759 break; 3760 3761 case MDEREF_INDEX_gvsv: 3762 keysv = UNOP_AUX_item_sv(++items); 3763 keysv = GvSVn((GV*)keysv); 3764 break; 3765 } 3766 3767 /* see comment above about setting this var */ 3768 PL_multideref_pc = items; 3769 3770 3771 /* ensure that candidate CONSTs have been HEKified */ 3772 assert( ((actions & MDEREF_INDEX_MASK) != MDEREF_INDEX_const) 3773 || SvTYPE(keysv) >= SVt_PVMG 3774 || !SvOK(keysv) 3775 || SvROK(keysv) 3776 || SvIsCOW_shared_hash(keysv)); 3777 3778 /* this is basically a copy of pp_helem with OPpDEREF skipped */ 3779 3780 if (!(actions & MDEREF_FLAG_last)) { 3781 HE *he = hv_fetch_ent((HV*)sv, keysv, 1, 0); 3782 if (!he || !(sv=HeVAL(he)) || sv == &PL_sv_undef) 3783 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv)); 3784 break; 3785 } 3786 3787 if (PL_op->op_private & 3788 (OPpMULTIDEREF_EXISTS|OPpMULTIDEREF_DELETE)) 3789 { 3790 if (PL_op->op_private & OPpMULTIDEREF_EXISTS) { 3791 sv = hv_exists_ent((HV*)sv, keysv, 0) 3792 ? &PL_sv_yes : &PL_sv_no; 3793 } 3794 else { 3795 I32 discard = (GIMME_V == G_VOID) ? G_DISCARD : 0; 3796 sv = hv_delete_ent((HV*)sv, keysv, discard, 0); 3797 if (discard) 3798 return NORMAL; 3799 if (!sv) 3800 sv = &PL_sv_undef; 3801 } 3802 } 3803 else { 3804 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET; 3805 const U32 defer = PL_op->op_private & OPpLVAL_DEFER; 3806 const bool localizing = PL_op->op_private & OPpLVAL_INTRO; 3807 bool preeminent = TRUE; 3808 SV **svp; 3809 HV * const hv = (HV*)sv; 3810 HE* he; 3811 3812 if (UNLIKELY(localizing)) { 3813 MAGIC *mg; 3814 HV *stash; 3815 3816 /* If we can determine whether the element exist, 3817 * Try to preserve the existenceness of a tied hash 3818 * element by using EXISTS and DELETE if possible. 3819 * Fallback to FETCH and STORE otherwise. */ 3820 if (SvCANEXISTDELETE(hv)) 3821 preeminent = hv_exists_ent(hv, keysv, 0); 3822 } 3823 3824 he = hv_fetch_ent(hv, keysv, lval && !defer, 0); 3825 svp = he ? &HeVAL(he) : NULL; 3826 3827 3828 if (lval) { 3829 if (!svp || !(sv = *svp) || sv == &PL_sv_undef) { 3830 SV* lv; 3831 SV* key2; 3832 if (!defer) 3833 DIE(aTHX_ PL_no_helem_sv, SVfARG(keysv)); 3834 lv = sv_newmortal(); 3835 sv_upgrade(lv, SVt_PVLV); 3836 LvTYPE(lv) = 'y'; 3837 sv_magic(lv, key2 = newSVsv(keysv), 3838 PERL_MAGIC_defelem, NULL, 0); 3839 /* sv_magic() increments refcount */ 3840 SvREFCNT_dec_NN(key2); 3841 LvTARG(lv) = SvREFCNT_inc_simple_NN(hv); 3842 LvTARGLEN(lv) = 1; 3843 sv = lv; 3844 } 3845 else { 3846 if (localizing) { 3847 if (HvNAME_get(hv) && isGV_or_RVCV(sv)) 3848 save_gp(MUTABLE_GV(sv), 3849 !(PL_op->op_flags & OPf_SPECIAL)); 3850 else if (preeminent) { 3851 save_helem_flags(hv, keysv, svp, 3852 (PL_op->op_flags & OPf_SPECIAL) 3853 ? 0 : SAVEf_SETMAGIC); 3854 sv = *svp; /* may have changed */ 3855 } 3856 else 3857 SAVEHDELETE(hv, keysv); 3858 } 3859 } 3860 } 3861 else { 3862 sv = (svp && *svp ? *svp : &PL_sv_undef); 3863 /* see note in pp_helem() */ 3864 if (SvRMAGICAL(hv) && SvGMAGICAL(sv)) 3865 mg_get(sv); 3866 } 3867 } 3868 goto finish; 3869 } 3870 3871 } /* switch */ 3872 3873 actions >>= MDEREF_SHIFT; 3874 } /* while */ 3875 /* NOTREACHED */ 3876 } 3877 3878 3879 PP(pp_iter) 3880 { 3881 PERL_CONTEXT *cx; 3882 SV *oldsv; 3883 SV **itersvp; 3884 3885 SV *sv; 3886 AV *av; 3887 IV ix; 3888 IV inc; 3889 3890 cx = CX_CUR(); 3891 itersvp = CxITERVAR(cx); 3892 assert(itersvp); 3893 3894 switch (CxTYPE(cx)) { 3895 3896 case CXt_LOOP_LAZYSV: /* string increment */ 3897 { 3898 SV* cur = cx->blk_loop.state_u.lazysv.cur; 3899 SV *end = cx->blk_loop.state_u.lazysv.end; 3900 /* If the maximum is !SvOK(), pp_enteriter substitutes PL_sv_no. 3901 It has SvPVX of "" and SvCUR of 0, which is what we want. */ 3902 STRLEN maxlen = 0; 3903 const char *max = SvPV_const(end, maxlen); 3904 if (DO_UTF8(end) && IN_UNI_8_BIT) 3905 maxlen = sv_len_utf8_nomg(end); 3906 if (UNLIKELY(SvNIOK(cur) || SvCUR(cur) > maxlen)) 3907 goto retno; 3908 3909 oldsv = *itersvp; 3910 /* NB: on the first iteration, oldsv will have a ref count of at 3911 * least 2 (one extra from blk_loop.itersave), so the GV or pad 3912 * slot will get localised; on subsequent iterations the RC==1 3913 * optimisation may kick in and the SV will be reused. */ 3914 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) { 3915 /* safe to reuse old SV */ 3916 sv_setsv(oldsv, cur); 3917 } 3918 else 3919 { 3920 /* we need a fresh SV every time so that loop body sees a 3921 * completely new SV for closures/references to work as 3922 * they used to */ 3923 *itersvp = newSVsv(cur); 3924 SvREFCNT_dec(oldsv); 3925 } 3926 if (strEQ(SvPVX_const(cur), max)) 3927 sv_setiv(cur, 0); /* terminate next time */ 3928 else 3929 sv_inc(cur); 3930 break; 3931 } 3932 3933 case CXt_LOOP_LAZYIV: /* integer increment */ 3934 { 3935 IV cur = cx->blk_loop.state_u.lazyiv.cur; 3936 if (UNLIKELY(cur > cx->blk_loop.state_u.lazyiv.end)) 3937 goto retno; 3938 3939 oldsv = *itersvp; 3940 /* see NB comment above */ 3941 if (oldsv && LIKELY(SvREFCNT(oldsv) == 1 && !SvMAGICAL(oldsv))) { 3942 /* safe to reuse old SV */ 3943 3944 if ( (SvFLAGS(oldsv) & (SVTYPEMASK|SVf_THINKFIRST|SVf_IVisUV)) 3945 == SVt_IV) 3946 { 3947 /* Cheap SvIOK_only(). 3948 * Assert that flags which SvIOK_only() would test or 3949 * clear can't be set, because we're SVt_IV */ 3950 assert(!(SvFLAGS(oldsv) & 3951 (SVf_OOK|SVf_UTF8|(SVf_OK & ~(SVf_IOK|SVp_IOK))))); 3952 SvFLAGS(oldsv) |= (SVf_IOK|SVp_IOK); 3953 /* SvIV_set() where sv_any points to head */ 3954 oldsv->sv_u.svu_iv = cur; 3955 3956 } 3957 else 3958 sv_setiv(oldsv, cur); 3959 } 3960 else 3961 { 3962 /* we need a fresh SV every time so that loop body sees a 3963 * completely new SV for closures/references to work as they 3964 * used to */ 3965 *itersvp = newSViv(cur); 3966 SvREFCNT_dec(oldsv); 3967 } 3968 3969 if (UNLIKELY(cur == IV_MAX)) { 3970 /* Handle end of range at IV_MAX */ 3971 cx->blk_loop.state_u.lazyiv.end = IV_MIN; 3972 } else 3973 ++cx->blk_loop.state_u.lazyiv.cur; 3974 break; 3975 } 3976 3977 case CXt_LOOP_LIST: /* for (1,2,3) */ 3978 3979 assert(OPpITER_REVERSED == 2); /* so inc becomes -1 or 1 */ 3980 inc = (IV)1 - (IV)(PL_op->op_private & OPpITER_REVERSED); 3981 ix = (cx->blk_loop.state_u.stack.ix += inc); 3982 if (UNLIKELY(inc > 0 3983 ? ix > cx->blk_oldsp 3984 : ix <= cx->blk_loop.state_u.stack.basesp) 3985 ) 3986 goto retno; 3987 3988 sv = PL_stack_base[ix]; 3989 av = NULL; 3990 goto loop_ary_common; 3991 3992 case CXt_LOOP_ARY: /* for (@ary) */ 3993 3994 av = cx->blk_loop.state_u.ary.ary; 3995 inc = (IV)1 - (IV)(PL_op->op_private & OPpITER_REVERSED); 3996 ix = (cx->blk_loop.state_u.ary.ix += inc); 3997 if (UNLIKELY(inc > 0 3998 ? ix > AvFILL(av) 3999 : ix < 0) 4000 ) 4001 goto retno; 4002 4003 if (UNLIKELY(SvRMAGICAL(av))) { 4004 SV * const * const svp = av_fetch(av, ix, FALSE); 4005 sv = svp ? *svp : NULL; 4006 } 4007 else { 4008 sv = AvARRAY(av)[ix]; 4009 } 4010 4011 loop_ary_common: 4012 4013 if (UNLIKELY(cx->cx_type & CXp_FOR_LVREF)) { 4014 SvSetMagicSV(*itersvp, sv); 4015 break; 4016 } 4017 4018 if (LIKELY(sv)) { 4019 if (UNLIKELY(SvIS_FREED(sv))) { 4020 *itersvp = NULL; 4021 Perl_croak(aTHX_ "Use of freed value in iteration"); 4022 } 4023 if (SvPADTMP(sv)) { 4024 sv = newSVsv(sv); 4025 } 4026 else { 4027 SvTEMP_off(sv); 4028 SvREFCNT_inc_simple_void_NN(sv); 4029 } 4030 } 4031 else if (av) { 4032 sv = newSVavdefelem(av, ix, 0); 4033 } 4034 else 4035 sv = &PL_sv_undef; 4036 4037 oldsv = *itersvp; 4038 *itersvp = sv; 4039 SvREFCNT_dec(oldsv); 4040 break; 4041 4042 default: 4043 DIE(aTHX_ "panic: pp_iter, type=%u", CxTYPE(cx)); 4044 } 4045 4046 /* Try to bypass pushing &PL_sv_yes and calling pp_and(); instead 4047 * jump straight to the AND op's op_other */ 4048 assert(PL_op->op_next->op_type == OP_AND); 4049 if (PL_op->op_next->op_ppaddr == Perl_pp_and) { 4050 return cLOGOPx(PL_op->op_next)->op_other; 4051 } 4052 else { 4053 /* An XS module has replaced the op_ppaddr, so fall back to the slow, 4054 * obvious way. */ 4055 /* pp_enteriter should have pre-extended the stack */ 4056 EXTEND_SKIP(PL_stack_sp, 1); 4057 *++PL_stack_sp = &PL_sv_yes; 4058 return PL_op->op_next; 4059 } 4060 4061 retno: 4062 /* Try to bypass pushing &PL_sv_no and calling pp_and(); instead 4063 * jump straight to the AND op's op_next */ 4064 assert(PL_op->op_next->op_type == OP_AND); 4065 /* pp_enteriter should have pre-extended the stack */ 4066 EXTEND_SKIP(PL_stack_sp, 1); 4067 /* we only need this for the rare case where the OP_AND isn't 4068 * in void context, e.g. $x = do { for (..) {...} }; 4069 * (or for when an XS module has replaced the op_ppaddr) 4070 * but it's cheaper to just push it rather than testing first 4071 */ 4072 *++PL_stack_sp = &PL_sv_no; 4073 if (PL_op->op_next->op_ppaddr == Perl_pp_and) { 4074 return PL_op->op_next->op_next; 4075 } 4076 else { 4077 /* An XS module has replaced the op_ppaddr, so fall back to the slow, 4078 * obvious way. */ 4079 return PL_op->op_next; 4080 } 4081 } 4082 4083 4084 /* 4085 A description of how taint works in pattern matching and substitution. 4086 4087 This is all conditional on NO_TAINT_SUPPORT not being defined. Under 4088 NO_TAINT_SUPPORT, taint-related operations should become no-ops. 4089 4090 While the pattern is being assembled/concatenated and then compiled, 4091 PL_tainted will get set (via TAINT_set) if any component of the pattern 4092 is tainted, e.g. /.*$tainted/. At the end of pattern compilation, 4093 the RXf_TAINTED flag is set on the pattern if PL_tainted is set (via 4094 TAINT_get). It will also be set if any component of the pattern matches 4095 based on locale-dependent behavior. 4096 4097 When the pattern is copied, e.g. $r = qr/..../, the SV holding the ref to 4098 the pattern is marked as tainted. This means that subsequent usage, such 4099 as /x$r/, will set PL_tainted using TAINT_set, and thus RXf_TAINTED, 4100 on the new pattern too. 4101 4102 RXf_TAINTED_SEEN is used post-execution by the get magic code 4103 of $1 et al to indicate whether the returned value should be tainted. 4104 It is the responsibility of the caller of the pattern (i.e. pp_match, 4105 pp_subst etc) to set this flag for any other circumstances where $1 needs 4106 to be tainted. 4107 4108 The taint behaviour of pp_subst (and pp_substcont) is quite complex. 4109 4110 There are three possible sources of taint 4111 * the source string 4112 * the pattern (both compile- and run-time, RXf_TAINTED / RXf_TAINTED_SEEN) 4113 * the replacement string (or expression under /e) 4114 4115 There are four destinations of taint and they are affected by the sources 4116 according to the rules below: 4117 4118 * the return value (not including /r): 4119 tainted by the source string and pattern, but only for the 4120 number-of-iterations case; boolean returns aren't tainted; 4121 * the modified string (or modified copy under /r): 4122 tainted by the source string, pattern, and replacement strings; 4123 * $1 et al: 4124 tainted by the pattern, and under 'use re "taint"', by the source 4125 string too; 4126 * PL_taint - i.e. whether subsequent code (e.g. in a /e block) is tainted: 4127 should always be unset before executing subsequent code. 4128 4129 The overall action of pp_subst is: 4130 4131 * at the start, set bits in rxtainted indicating the taint status of 4132 the various sources. 4133 4134 * After each pattern execution, update the SUBST_TAINT_PAT bit in 4135 rxtainted if RXf_TAINTED_SEEN has been set, to indicate that the 4136 pattern has subsequently become tainted via locale ops. 4137 4138 * If control is being passed to pp_substcont to execute a /e block, 4139 save rxtainted in the CXt_SUBST block, for future use by 4140 pp_substcont. 4141 4142 * Whenever control is being returned to perl code (either by falling 4143 off the "end" of pp_subst/pp_substcont, or by entering a /e block), 4144 use the flag bits in rxtainted to make all the appropriate types of 4145 destination taint visible; e.g. set RXf_TAINTED_SEEN so that $1 4146 et al will appear tainted. 4147 4148 pp_match is just a simpler version of the above. 4149 4150 */ 4151 4152 PP(pp_subst) 4153 { 4154 dSP; dTARG; 4155 PMOP *pm = cPMOP; 4156 PMOP *rpm = pm; 4157 char *s; 4158 char *strend; 4159 const char *c; 4160 STRLEN clen; 4161 SSize_t iters = 0; 4162 SSize_t maxiters; 4163 bool once; 4164 U8 rxtainted = 0; /* holds various SUBST_TAINT_* flag bits. 4165 See "how taint works" above */ 4166 char *orig; 4167 U8 r_flags; 4168 REGEXP *rx = PM_GETRE(pm); 4169 regexp *prog = ReANY(rx); 4170 STRLEN len; 4171 int force_on_match = 0; 4172 const I32 oldsave = PL_savestack_ix; 4173 STRLEN slen; 4174 bool doutf8 = FALSE; /* whether replacement is in utf8 */ 4175 #ifdef PERL_ANY_COW 4176 bool was_cow; 4177 #endif 4178 SV *nsv = NULL; 4179 /* known replacement string? */ 4180 SV *dstr = (pm->op_pmflags & PMf_CONST) ? POPs : NULL; 4181 4182 PERL_ASYNC_CHECK(); 4183 4184 if (PL_op->op_flags & OPf_STACKED) 4185 TARG = POPs; 4186 else { 4187 if (ARGTARG) 4188 GETTARGET; 4189 else { 4190 TARG = DEFSV; 4191 } 4192 EXTEND(SP,1); 4193 } 4194 4195 SvGETMAGIC(TARG); /* must come before cow check */ 4196 #ifdef PERL_ANY_COW 4197 /* note that a string might get converted to COW during matching */ 4198 was_cow = cBOOL(SvIsCOW(TARG)); 4199 #endif 4200 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) { 4201 #ifndef PERL_ANY_COW 4202 if (SvIsCOW(TARG)) 4203 sv_force_normal_flags(TARG,0); 4204 #endif 4205 if ((SvREADONLY(TARG) 4206 || ( ((SvTYPE(TARG) == SVt_PVGV && isGV_with_GP(TARG)) 4207 || SvTYPE(TARG) > SVt_PVLV) 4208 && !(SvTYPE(TARG) == SVt_PVGV && SvFAKE(TARG))))) 4209 Perl_croak_no_modify(); 4210 } 4211 PUTBACK; 4212 4213 orig = SvPV_nomg(TARG, len); 4214 /* note we don't (yet) force the var into being a string; if we fail 4215 * to match, we leave as-is; on successful match however, we *will* 4216 * coerce into a string, then repeat the match */ 4217 if (!SvPOKp(TARG) || SvTYPE(TARG) == SVt_PVGV || SvVOK(TARG)) 4218 force_on_match = 1; 4219 4220 /* only replace once? */ 4221 once = !(rpm->op_pmflags & PMf_GLOBAL); 4222 4223 /* See "how taint works" above */ 4224 if (TAINTING_get) { 4225 rxtainted = ( 4226 (SvTAINTED(TARG) ? SUBST_TAINT_STR : 0) 4227 | (RXp_ISTAINTED(prog) ? SUBST_TAINT_PAT : 0) 4228 | ((pm->op_pmflags & PMf_RETAINT) ? SUBST_TAINT_RETAINT : 0) 4229 | (( (once && !(rpm->op_pmflags & PMf_NONDESTRUCT)) 4230 || (PL_op->op_private & OPpTRUEBOOL)) ? SUBST_TAINT_BOOLRET : 0)); 4231 TAINT_NOT; 4232 } 4233 4234 force_it: 4235 if (!pm || !orig) 4236 DIE(aTHX_ "panic: pp_subst, pm=%p, orig=%p", pm, orig); 4237 4238 strend = orig + len; 4239 slen = DO_UTF8(TARG) ? utf8_length((U8*)orig, (U8*)strend) : len; 4240 maxiters = 2 * slen + 10; /* We can match twice at each 4241 position, once with zero-length, 4242 second time with non-zero. */ 4243 4244 /* handle the empty pattern */ 4245 if (!RX_PRELEN(rx) && PL_curpm && !prog->mother_re) { 4246 if (PL_curpm == PL_reg_curpm) { 4247 if (PL_curpm_under) { 4248 if (PL_curpm_under == PL_reg_curpm) { 4249 Perl_croak(aTHX_ "Infinite recursion via empty pattern"); 4250 } else { 4251 pm = PL_curpm_under; 4252 } 4253 } 4254 } else { 4255 pm = PL_curpm; 4256 } 4257 rx = PM_GETRE(pm); 4258 prog = ReANY(rx); 4259 } 4260 4261 #ifdef PERL_SAWAMPERSAND 4262 r_flags = ( RXp_NPARENS(prog) 4263 || PL_sawampersand 4264 || (RXp_EXTFLAGS(prog) & (RXf_EVAL_SEEN|RXf_PMf_KEEPCOPY)) 4265 || (rpm->op_pmflags & PMf_KEEPCOPY) 4266 ) 4267 ? REXEC_COPY_STR 4268 : 0; 4269 #else 4270 r_flags = REXEC_COPY_STR; 4271 #endif 4272 4273 if (!CALLREGEXEC(rx, orig, strend, orig, 0, TARG, NULL, r_flags)) 4274 { 4275 SPAGAIN; 4276 PUSHs(rpm->op_pmflags & PMf_NONDESTRUCT ? TARG : &PL_sv_no); 4277 LEAVE_SCOPE(oldsave); 4278 RETURN; 4279 } 4280 PL_curpm = pm; 4281 4282 /* known replacement string? */ 4283 if (dstr) { 4284 /* replacement needing upgrading? */ 4285 if (DO_UTF8(TARG) && !doutf8) { 4286 nsv = sv_newmortal(); 4287 SvSetSV(nsv, dstr); 4288 sv_utf8_upgrade(nsv); 4289 c = SvPV_const(nsv, clen); 4290 doutf8 = TRUE; 4291 } 4292 else { 4293 c = SvPV_const(dstr, clen); 4294 doutf8 = DO_UTF8(dstr); 4295 } 4296 4297 if (UNLIKELY(TAINT_get)) 4298 rxtainted |= SUBST_TAINT_REPL; 4299 } 4300 else { 4301 c = NULL; 4302 doutf8 = FALSE; 4303 } 4304 4305 /* can do inplace substitution? */ 4306 if (c 4307 #ifdef PERL_ANY_COW 4308 && !was_cow 4309 #endif 4310 && (I32)clen <= RXp_MINLENRET(prog) 4311 && ( once 4312 || !(r_flags & REXEC_COPY_STR) 4313 || (!SvGMAGICAL(dstr) && !(RXp_EXTFLAGS(prog) & RXf_EVAL_SEEN)) 4314 ) 4315 && !(RXp_EXTFLAGS(prog) & RXf_NO_INPLACE_SUBST) 4316 && (!doutf8 || SvUTF8(TARG)) 4317 && !(rpm->op_pmflags & PMf_NONDESTRUCT)) 4318 { 4319 4320 #ifdef PERL_ANY_COW 4321 /* string might have got converted to COW since we set was_cow */ 4322 if (SvIsCOW(TARG)) { 4323 if (!force_on_match) 4324 goto have_a_cow; 4325 assert(SvVOK(TARG)); 4326 } 4327 #endif 4328 if (force_on_match) { 4329 /* redo the first match, this time with the orig var 4330 * forced into being a string */ 4331 force_on_match = 0; 4332 orig = SvPV_force_nomg(TARG, len); 4333 goto force_it; 4334 } 4335 4336 if (once) { 4337 char *d, *m; 4338 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */ 4339 rxtainted |= SUBST_TAINT_PAT; 4340 m = orig + RXp_OFFS(prog)[0].start; 4341 d = orig + RXp_OFFS(prog)[0].end; 4342 s = orig; 4343 if (m - s > strend - d) { /* faster to shorten from end */ 4344 I32 i; 4345 if (clen) { 4346 Copy(c, m, clen, char); 4347 m += clen; 4348 } 4349 i = strend - d; 4350 if (i > 0) { 4351 Move(d, m, i, char); 4352 m += i; 4353 } 4354 *m = '\0'; 4355 SvCUR_set(TARG, m - s); 4356 } 4357 else { /* faster from front */ 4358 I32 i = m - s; 4359 d -= clen; 4360 if (i > 0) 4361 Move(s, d - i, i, char); 4362 sv_chop(TARG, d-i); 4363 if (clen) 4364 Copy(c, d, clen, char); 4365 } 4366 SPAGAIN; 4367 PUSHs(&PL_sv_yes); 4368 } 4369 else { 4370 char *d, *m; 4371 d = s = RXp_OFFS(prog)[0].start + orig; 4372 do { 4373 I32 i; 4374 if (UNLIKELY(iters++ > maxiters)) 4375 DIE(aTHX_ "Substitution loop"); 4376 /* run time pattern taint, eg locale */ 4377 if (UNLIKELY(RXp_MATCH_TAINTED(prog))) 4378 rxtainted |= SUBST_TAINT_PAT; 4379 m = RXp_OFFS(prog)[0].start + orig; 4380 if ((i = m - s)) { 4381 if (s != d) 4382 Move(s, d, i, char); 4383 d += i; 4384 } 4385 if (clen) { 4386 Copy(c, d, clen, char); 4387 d += clen; 4388 } 4389 s = RXp_OFFS(prog)[0].end + orig; 4390 } while (CALLREGEXEC(rx, s, strend, orig, 4391 s == m, /* don't match same null twice */ 4392 TARG, NULL, 4393 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW)); 4394 if (s != d) { 4395 I32 i = strend - s; 4396 SvCUR_set(TARG, d - SvPVX_const(TARG) + i); 4397 Move(s, d, i+1, char); /* include the NUL */ 4398 } 4399 SPAGAIN; 4400 assert(iters); 4401 if (PL_op->op_private & OPpTRUEBOOL) 4402 PUSHs(&PL_sv_yes); 4403 else 4404 mPUSHi(iters); 4405 } 4406 } 4407 else { 4408 bool first; 4409 char *m; 4410 SV *repl; 4411 if (force_on_match) { 4412 /* redo the first match, this time with the orig var 4413 * forced into being a string */ 4414 force_on_match = 0; 4415 if (rpm->op_pmflags & PMf_NONDESTRUCT) { 4416 /* I feel that it should be possible to avoid this mortal copy 4417 given that the code below copies into a new destination. 4418 However, I suspect it isn't worth the complexity of 4419 unravelling the C<goto force_it> for the small number of 4420 cases where it would be viable to drop into the copy code. */ 4421 TARG = sv_2mortal(newSVsv(TARG)); 4422 } 4423 orig = SvPV_force_nomg(TARG, len); 4424 goto force_it; 4425 } 4426 #ifdef PERL_ANY_COW 4427 have_a_cow: 4428 #endif 4429 if (RXp_MATCH_TAINTED(prog)) /* run time pattern taint, eg locale */ 4430 rxtainted |= SUBST_TAINT_PAT; 4431 repl = dstr; 4432 s = RXp_OFFS(prog)[0].start + orig; 4433 dstr = newSVpvn_flags(orig, s-orig, 4434 SVs_TEMP | (DO_UTF8(TARG) ? SVf_UTF8 : 0)); 4435 if (!c) { 4436 PERL_CONTEXT *cx; 4437 SPAGAIN; 4438 m = orig; 4439 /* note that a whole bunch of local vars are saved here for 4440 * use by pp_substcont: here's a list of them in case you're 4441 * searching for places in this sub that uses a particular var: 4442 * iters maxiters r_flags oldsave rxtainted orig dstr targ 4443 * s m strend rx once */ 4444 CX_PUSHSUBST(cx); 4445 RETURNOP(cPMOP->op_pmreplrootu.op_pmreplroot); 4446 } 4447 first = TRUE; 4448 do { 4449 if (UNLIKELY(iters++ > maxiters)) 4450 DIE(aTHX_ "Substitution loop"); 4451 if (UNLIKELY(RXp_MATCH_TAINTED(prog))) 4452 rxtainted |= SUBST_TAINT_PAT; 4453 if (RXp_MATCH_COPIED(prog) && RXp_SUBBEG(prog) != orig) { 4454 char *old_s = s; 4455 char *old_orig = orig; 4456 assert(RXp_SUBOFFSET(prog) == 0); 4457 4458 orig = RXp_SUBBEG(prog); 4459 s = orig + (old_s - old_orig); 4460 strend = s + (strend - old_s); 4461 } 4462 m = RXp_OFFS(prog)[0].start + orig; 4463 sv_catpvn_nomg_maybeutf8(dstr, s, m - s, DO_UTF8(TARG)); 4464 s = RXp_OFFS(prog)[0].end + orig; 4465 if (first) { 4466 /* replacement already stringified */ 4467 if (clen) 4468 sv_catpvn_nomg_maybeutf8(dstr, c, clen, doutf8); 4469 first = FALSE; 4470 } 4471 else { 4472 sv_catsv(dstr, repl); 4473 } 4474 if (once) 4475 break; 4476 } while (CALLREGEXEC(rx, s, strend, orig, 4477 s == m, /* Yields minend of 0 or 1 */ 4478 TARG, NULL, 4479 REXEC_NOT_FIRST|REXEC_IGNOREPOS|REXEC_FAIL_ON_UNDERFLOW)); 4480 assert(strend >= s); 4481 sv_catpvn_nomg_maybeutf8(dstr, s, strend - s, DO_UTF8(TARG)); 4482 4483 if (rpm->op_pmflags & PMf_NONDESTRUCT) { 4484 /* From here on down we're using the copy, and leaving the original 4485 untouched. */ 4486 TARG = dstr; 4487 SPAGAIN; 4488 PUSHs(dstr); 4489 } else { 4490 #ifdef PERL_ANY_COW 4491 /* The match may make the string COW. If so, brilliant, because 4492 that's just saved us one malloc, copy and free - the regexp has 4493 donated the old buffer, and we malloc an entirely new one, rather 4494 than the regexp malloc()ing a buffer and copying our original, 4495 only for us to throw it away here during the substitution. */ 4496 if (SvIsCOW(TARG)) { 4497 sv_force_normal_flags(TARG, SV_COW_DROP_PV); 4498 } else 4499 #endif 4500 { 4501 SvPV_free(TARG); 4502 } 4503 SvPV_set(TARG, SvPVX(dstr)); 4504 SvCUR_set(TARG, SvCUR(dstr)); 4505 SvLEN_set(TARG, SvLEN(dstr)); 4506 SvFLAGS(TARG) |= SvUTF8(dstr); 4507 SvPV_set(dstr, NULL); 4508 4509 SPAGAIN; 4510 if (PL_op->op_private & OPpTRUEBOOL) 4511 PUSHs(&PL_sv_yes); 4512 else 4513 mPUSHi(iters); 4514 } 4515 } 4516 4517 if (!(rpm->op_pmflags & PMf_NONDESTRUCT)) { 4518 (void)SvPOK_only_UTF8(TARG); 4519 } 4520 4521 /* See "how taint works" above */ 4522 if (TAINTING_get) { 4523 if ((rxtainted & SUBST_TAINT_PAT) || 4524 ((rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) == 4525 (SUBST_TAINT_STR|SUBST_TAINT_RETAINT)) 4526 ) 4527 (RXp_MATCH_TAINTED_on(prog)); /* taint $1 et al */ 4528 4529 if (!(rxtainted & SUBST_TAINT_BOOLRET) 4530 && (rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT)) 4531 ) 4532 SvTAINTED_on(TOPs); /* taint return value */ 4533 else 4534 SvTAINTED_off(TOPs); /* may have got tainted earlier */ 4535 4536 /* needed for mg_set below */ 4537 TAINT_set( 4538 cBOOL(rxtainted & (SUBST_TAINT_STR|SUBST_TAINT_PAT|SUBST_TAINT_REPL)) 4539 ); 4540 SvTAINT(TARG); 4541 } 4542 SvSETMAGIC(TARG); /* PL_tainted must be correctly set for this mg_set */ 4543 TAINT_NOT; 4544 LEAVE_SCOPE(oldsave); 4545 RETURN; 4546 } 4547 4548 PP(pp_grepwhile) 4549 { 4550 dSP; 4551 dPOPss; 4552 4553 if (SvTRUE_NN(sv)) 4554 PL_stack_base[PL_markstack_ptr[-1]++] = PL_stack_base[*PL_markstack_ptr]; 4555 ++*PL_markstack_ptr; 4556 FREETMPS; 4557 LEAVE_with_name("grep_item"); /* exit inner scope */ 4558 4559 /* All done yet? */ 4560 if (UNLIKELY(PL_stack_base + *PL_markstack_ptr > SP)) { 4561 I32 items; 4562 const U8 gimme = GIMME_V; 4563 4564 LEAVE_with_name("grep"); /* exit outer scope */ 4565 (void)POPMARK; /* pop src */ 4566 items = --*PL_markstack_ptr - PL_markstack_ptr[-1]; 4567 (void)POPMARK; /* pop dst */ 4568 SP = PL_stack_base + POPMARK; /* pop original mark */ 4569 if (gimme == G_SCALAR) { 4570 if (PL_op->op_private & OPpTRUEBOOL) 4571 PUSHs(items ? &PL_sv_yes : &PL_sv_zero); 4572 else { 4573 dTARGET; 4574 PUSHi(items); 4575 } 4576 } 4577 else if (gimme == G_ARRAY) 4578 SP += items; 4579 RETURN; 4580 } 4581 else { 4582 SV *src; 4583 4584 ENTER_with_name("grep_item"); /* enter inner scope */ 4585 SAVEVPTR(PL_curpm); 4586 4587 src = PL_stack_base[TOPMARK]; 4588 if (SvPADTMP(src)) { 4589 src = PL_stack_base[TOPMARK] = sv_mortalcopy(src); 4590 PL_tmps_floor++; 4591 } 4592 SvTEMP_off(src); 4593 DEFSV_set(src); 4594 4595 RETURNOP(cLOGOP->op_other); 4596 } 4597 } 4598 4599 /* leave_adjust_stacks(): 4600 * 4601 * Process a scope's return args (in the range from_sp+1 .. PL_stack_sp), 4602 * positioning them at to_sp+1 onwards, and do the equivalent of a 4603 * FREEMPS and TAINT_NOT. 4604 * 4605 * Not intended to be called in void context. 4606 * 4607 * When leaving a sub, eval, do{} or other scope, the things that need 4608 * doing to process the return args are: 4609 * * in scalar context, only return the last arg (or PL_sv_undef if none); 4610 * * for the types of return that return copies of their args (such 4611 * as rvalue sub return), make a mortal copy of every return arg, 4612 * except where we can optimise the copy away without it being 4613 * semantically visible; 4614 * * make sure that the arg isn't prematurely freed; in the case of an 4615 * arg not copied, this may involve mortalising it. For example, in 4616 * C<sub f { my $x = ...; $x }>, $x would be freed when we do 4617 * CX_LEAVE_SCOPE(cx) unless it's protected or copied. 4618 * 4619 * What condition to use when deciding whether to pass the arg through 4620 * or make a copy, is determined by the 'pass' arg; its valid values are: 4621 * 0: rvalue sub/eval exit 4622 * 1: other rvalue scope exit 4623 * 2: :lvalue sub exit in rvalue context 4624 * 3: :lvalue sub exit in lvalue context and other lvalue scope exits 4625 * 4626 * There is a big issue with doing a FREETMPS. We would like to free any 4627 * temps created by the last statement which the sub executed, rather than 4628 * leaving them for the caller. In a situation where a sub call isn't 4629 * soon followed by a nextstate (e.g. nested recursive calls, a la 4630 * fibonacci()), temps can accumulate, causing memory and performance 4631 * issues. 4632 * 4633 * On the other hand, we don't want to free any TEMPs which are keeping 4634 * alive any return args that we skipped copying; nor do we wish to undo 4635 * any mortalising done here. 4636 * 4637 * The solution is to split the temps stack frame into two, with a cut 4638 * point delineating the two halves. We arrange that by the end of this 4639 * function, all the temps stack frame entries we wish to keep are in the 4640 * range PL_tmps_floor+1.. tmps_base-1, while the ones to free now are in 4641 * the range tmps_base .. PL_tmps_ix. During the course of this 4642 * function, tmps_base starts off as PL_tmps_floor+1, then increases 4643 * whenever we find or create a temp that we know should be kept. In 4644 * general the stuff above tmps_base is undecided until we reach the end, 4645 * and we may need a sort stage for that. 4646 * 4647 * To determine whether a TEMP is keeping a return arg alive, every 4648 * arg that is kept rather than copied and which has the SvTEMP flag 4649 * set, has the flag temporarily unset, to mark it. At the end we scan 4650 * the temps stack frame above the cut for entries without SvTEMP and 4651 * keep them, while turning SvTEMP on again. Note that if we die before 4652 * the SvTEMPs flags are set again, its safe: at worst, subsequent use of 4653 * those SVs may be slightly less efficient. 4654 * 4655 * In practice various optimisations for some common cases mean we can 4656 * avoid most of the scanning and swapping about with the temps stack. 4657 */ 4658 4659 void 4660 Perl_leave_adjust_stacks(pTHX_ SV **from_sp, SV **to_sp, U8 gimme, int pass) 4661 { 4662 dVAR; 4663 dSP; 4664 SSize_t tmps_base; /* lowest index into tmps stack that needs freeing now */ 4665 SSize_t nargs; 4666 4667 PERL_ARGS_ASSERT_LEAVE_ADJUST_STACKS; 4668 4669 TAINT_NOT; 4670 4671 if (gimme == G_ARRAY) { 4672 nargs = SP - from_sp; 4673 from_sp++; 4674 } 4675 else { 4676 assert(gimme == G_SCALAR); 4677 if (UNLIKELY(from_sp >= SP)) { 4678 /* no return args */ 4679 assert(from_sp == SP); 4680 EXTEND(SP, 1); 4681 *++SP = &PL_sv_undef; 4682 to_sp = SP; 4683 nargs = 0; 4684 } 4685 else { 4686 from_sp = SP; 4687 nargs = 1; 4688 } 4689 } 4690 4691 /* common code for G_SCALAR and G_ARRAY */ 4692 4693 tmps_base = PL_tmps_floor + 1; 4694 4695 assert(nargs >= 0); 4696 if (nargs) { 4697 /* pointer version of tmps_base. Not safe across temp stack 4698 * reallocs. */ 4699 SV **tmps_basep; 4700 4701 EXTEND_MORTAL(nargs); /* one big extend for worst-case scenario */ 4702 tmps_basep = PL_tmps_stack + tmps_base; 4703 4704 /* process each return arg */ 4705 4706 do { 4707 SV *sv = *from_sp++; 4708 4709 assert(PL_tmps_ix + nargs < PL_tmps_max); 4710 #ifdef DEBUGGING 4711 /* PADTMPs with container set magic shouldn't appear in the 4712 * wild. This assert is more important for pp_leavesublv(), 4713 * but by testing for it here, we're more likely to catch 4714 * bad cases (what with :lvalue subs not being widely 4715 * deployed). The two issues are that for something like 4716 * sub :lvalue { $tied{foo} } 4717 * or 4718 * sub :lvalue { substr($foo,1,2) } 4719 * pp_leavesublv() will croak if the sub returns a PADTMP, 4720 * and currently functions like pp_substr() return a mortal 4721 * rather than using their PADTMP when returning a PVLV. 4722 * This is because the PVLV will hold a ref to $foo, 4723 * so $foo would get delayed in being freed while 4724 * the PADTMP SV remained in the PAD. 4725 * So if this assert fails it means either: 4726 * 1) there is pp code similar to pp_substr that is 4727 * returning a PADTMP instead of a mortal, and probably 4728 * needs fixing, or 4729 * 2) pp_leavesublv is making unwarranted assumptions 4730 * about always croaking on a PADTMP 4731 */ 4732 if (SvPADTMP(sv) && SvSMAGICAL(sv)) { 4733 MAGIC *mg; 4734 for (mg = SvMAGIC(sv); mg; mg = mg->mg_moremagic) { 4735 assert(PERL_MAGIC_TYPE_IS_VALUE_MAGIC(mg->mg_type)); 4736 } 4737 } 4738 #endif 4739 4740 if ( 4741 pass == 0 ? (SvTEMP(sv) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1) 4742 : pass == 1 ? ((SvTEMP(sv) || SvPADTMP(sv)) && !SvMAGICAL(sv) && SvREFCNT(sv) == 1) 4743 : pass == 2 ? (!SvPADTMP(sv)) 4744 : 1) 4745 { 4746 /* pass through: skip copy for logic or optimisation 4747 * reasons; instead mortalise it, except that ... */ 4748 *++to_sp = sv; 4749 4750 if (SvTEMP(sv)) { 4751 /* ... since this SV is an SvTEMP , we don't need to 4752 * re-mortalise it; instead we just need to ensure 4753 * that its existing entry in the temps stack frame 4754 * ends up below the cut and so avoids being freed 4755 * this time round. We mark it as needing to be kept 4756 * by temporarily unsetting SvTEMP; then at the end, 4757 * we shuffle any !SvTEMP entries on the tmps stack 4758 * back below the cut. 4759 * However, there's a significant chance that there's 4760 * a 1:1 correspondence between the first few (or all) 4761 * elements in the return args stack frame and those 4762 * in the temps stack frame; e,g.: 4763 * sub f { ....; map {...} .... }, 4764 * or if we're exiting multiple scopes and one of the 4765 * inner scopes has already made mortal copies of each 4766 * return arg. 4767 * 4768 * If so, this arg sv will correspond to the next item 4769 * on the tmps stack above the cut, and so can be kept 4770 * merely by moving the cut boundary up one, rather 4771 * than messing with SvTEMP. If all args are 1:1 then 4772 * we can avoid the sorting stage below completely. 4773 * 4774 * If there are no items above the cut on the tmps 4775 * stack, then the SvTEMP must comne from an item 4776 * below the cut, so there's nothing to do. 4777 */ 4778 if (tmps_basep <= &PL_tmps_stack[PL_tmps_ix]) { 4779 if (sv == *tmps_basep) 4780 tmps_basep++; 4781 else 4782 SvTEMP_off(sv); 4783 } 4784 } 4785 else if (!SvPADTMP(sv)) { 4786 /* mortalise arg to avoid it being freed during save 4787 * stack unwinding. Pad tmps don't need mortalising as 4788 * they're never freed. This is the equivalent of 4789 * sv_2mortal(SvREFCNT_inc(sv)), except that: 4790 * * it assumes that the temps stack has already been 4791 * extended; 4792 * * it puts the new item at the cut rather than at 4793 * ++PL_tmps_ix, moving the previous occupant there 4794 * instead. 4795 */ 4796 if (!SvIMMORTAL(sv)) { 4797 SvREFCNT_inc_simple_void_NN(sv); 4798 SvTEMP_on(sv); 4799 /* Note that if there's nothing above the cut, 4800 * this copies the garbage one slot above 4801 * PL_tmps_ix onto itself. This is harmless (the 4802 * stack's already been extended), but might in 4803 * theory trigger warnings from tools like ASan 4804 */ 4805 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep; 4806 *tmps_basep++ = sv; 4807 } 4808 } 4809 } 4810 else { 4811 /* Make a mortal copy of the SV. 4812 * The following code is the equivalent of sv_mortalcopy() 4813 * except that: 4814 * * it assumes the temps stack has already been extended; 4815 * * it optimises the copying for some simple SV types; 4816 * * it puts the new item at the cut rather than at 4817 * ++PL_tmps_ix, moving the previous occupant there 4818 * instead. 4819 */ 4820 SV *newsv = newSV(0); 4821 4822 PL_tmps_stack[++PL_tmps_ix] = *tmps_basep; 4823 /* put it on the tmps stack early so it gets freed if we die */ 4824 *tmps_basep++ = newsv; 4825 *++to_sp = newsv; 4826 4827 if (SvTYPE(sv) <= SVt_IV) { 4828 /* arg must be one of undef, IV/UV, or RV: skip 4829 * sv_setsv_flags() and do the copy directly */ 4830 U32 dstflags; 4831 U32 srcflags = SvFLAGS(sv); 4832 4833 assert(!SvGMAGICAL(sv)); 4834 if (srcflags & (SVf_IOK|SVf_ROK)) { 4835 SET_SVANY_FOR_BODYLESS_IV(newsv); 4836 4837 if (srcflags & SVf_ROK) { 4838 newsv->sv_u.svu_rv = SvREFCNT_inc(SvRV(sv)); 4839 /* SV type plus flags */ 4840 dstflags = (SVt_IV|SVf_ROK|SVs_TEMP); 4841 } 4842 else { 4843 /* both src and dst are <= SVt_IV, so sv_any 4844 * points to the head; so access the heads 4845 * directly rather than going via sv_any. 4846 */ 4847 assert( &(sv->sv_u.svu_iv) 4848 == &(((XPVIV*) SvANY(sv))->xiv_iv)); 4849 assert( &(newsv->sv_u.svu_iv) 4850 == &(((XPVIV*) SvANY(newsv))->xiv_iv)); 4851 newsv->sv_u.svu_iv = sv->sv_u.svu_iv; 4852 /* SV type plus flags */ 4853 dstflags = (SVt_IV|SVf_IOK|SVp_IOK|SVs_TEMP 4854 |(srcflags & SVf_IVisUV)); 4855 } 4856 } 4857 else { 4858 assert(!(srcflags & SVf_OK)); 4859 dstflags = (SVt_NULL|SVs_TEMP); /* SV type plus flags */ 4860 } 4861 SvFLAGS(newsv) = dstflags; 4862 4863 } 4864 else { 4865 /* do the full sv_setsv() */ 4866 SSize_t old_base; 4867 4868 SvTEMP_on(newsv); 4869 old_base = tmps_basep - PL_tmps_stack; 4870 SvGETMAGIC(sv); 4871 sv_setsv_flags(newsv, sv, SV_DO_COW_SVSETSV); 4872 /* the mg_get or sv_setsv might have created new temps 4873 * or realloced the tmps stack; regrow and reload */ 4874 EXTEND_MORTAL(nargs); 4875 tmps_basep = PL_tmps_stack + old_base; 4876 TAINT_NOT; /* Each item is independent */ 4877 } 4878 4879 } 4880 } while (--nargs); 4881 4882 /* If there are any temps left above the cut, we need to sort 4883 * them into those to keep and those to free. The only ones to 4884 * keep are those for which we've temporarily unset SvTEMP. 4885 * Work inwards from the two ends at tmps_basep .. PL_tmps_ix, 4886 * swapping pairs as necessary. Stop when we meet in the middle. 4887 */ 4888 { 4889 SV **top = PL_tmps_stack + PL_tmps_ix; 4890 while (tmps_basep <= top) { 4891 SV *sv = *top; 4892 if (SvTEMP(sv)) 4893 top--; 4894 else { 4895 SvTEMP_on(sv); 4896 *top = *tmps_basep; 4897 *tmps_basep = sv; 4898 tmps_basep++; 4899 } 4900 } 4901 } 4902 4903 tmps_base = tmps_basep - PL_tmps_stack; 4904 } 4905 4906 PL_stack_sp = to_sp; 4907 4908 /* unrolled FREETMPS() but using tmps_base-1 rather than PL_tmps_floor */ 4909 while (PL_tmps_ix >= tmps_base) { 4910 SV* const sv = PL_tmps_stack[PL_tmps_ix--]; 4911 #ifdef PERL_POISON 4912 PoisonWith(PL_tmps_stack + PL_tmps_ix + 1, 1, SV *, 0xAB); 4913 #endif 4914 if (LIKELY(sv)) { 4915 SvTEMP_off(sv); 4916 SvREFCNT_dec_NN(sv); /* note, can modify tmps_ix!!! */ 4917 } 4918 } 4919 } 4920 4921 4922 /* also tail-called by pp_return */ 4923 4924 PP(pp_leavesub) 4925 { 4926 U8 gimme; 4927 PERL_CONTEXT *cx; 4928 SV **oldsp; 4929 OP *retop; 4930 4931 cx = CX_CUR(); 4932 assert(CxTYPE(cx) == CXt_SUB); 4933 4934 if (CxMULTICALL(cx)) { 4935 /* entry zero of a stack is always PL_sv_undef, which 4936 * simplifies converting a '()' return into undef in scalar context */ 4937 assert(PL_stack_sp > PL_stack_base || *PL_stack_base == &PL_sv_undef); 4938 return 0; 4939 } 4940 4941 gimme = cx->blk_gimme; 4942 oldsp = PL_stack_base + cx->blk_oldsp; /* last arg of previous frame */ 4943 4944 if (gimme == G_VOID) 4945 PL_stack_sp = oldsp; 4946 else 4947 leave_adjust_stacks(oldsp, oldsp, gimme, 0); 4948 4949 CX_LEAVE_SCOPE(cx); 4950 cx_popsub(cx); /* Stack values are safe: release CV and @_ ... */ 4951 cx_popblock(cx); 4952 retop = cx->blk_sub.retop; 4953 CX_POP(cx); 4954 4955 return retop; 4956 } 4957 4958 4959 /* clear (if possible) or abandon the current @_. If 'abandon' is true, 4960 * forces an abandon */ 4961 4962 void 4963 Perl_clear_defarray(pTHX_ AV* av, bool abandon) 4964 { 4965 const SSize_t fill = AvFILLp(av); 4966 4967 PERL_ARGS_ASSERT_CLEAR_DEFARRAY; 4968 4969 if (LIKELY(!abandon && SvREFCNT(av) == 1 && !SvMAGICAL(av))) { 4970 av_clear(av); 4971 AvREIFY_only(av); 4972 } 4973 else { 4974 AV *newav = newAV(); 4975 av_extend(newav, fill); 4976 AvREIFY_only(newav); 4977 PAD_SVl(0) = MUTABLE_SV(newav); 4978 SvREFCNT_dec_NN(av); 4979 } 4980 } 4981 4982 4983 PP(pp_entersub) 4984 { 4985 dSP; dPOPss; 4986 GV *gv; 4987 CV *cv; 4988 PERL_CONTEXT *cx; 4989 I32 old_savestack_ix; 4990 4991 if (UNLIKELY(!sv)) 4992 goto do_die; 4993 4994 /* Locate the CV to call: 4995 * - most common case: RV->CV: f(), $ref->(): 4996 * note that if a sub is compiled before its caller is compiled, 4997 * the stash entry will be a ref to a CV, rather than being a GV. 4998 * - second most common case: CV: $ref->method() 4999 */ 5000 5001 /* a non-magic-RV -> CV ? */ 5002 if (LIKELY( (SvFLAGS(sv) & (SVf_ROK|SVs_GMG)) == SVf_ROK)) { 5003 cv = MUTABLE_CV(SvRV(sv)); 5004 if (UNLIKELY(SvOBJECT(cv))) /* might be overloaded */ 5005 goto do_ref; 5006 } 5007 else 5008 cv = MUTABLE_CV(sv); 5009 5010 /* a CV ? */ 5011 if (UNLIKELY(SvTYPE(cv) != SVt_PVCV)) { 5012 /* handle all the weird cases */ 5013 switch (SvTYPE(sv)) { 5014 case SVt_PVLV: 5015 if (!isGV_with_GP(sv)) 5016 goto do_default; 5017 /* FALLTHROUGH */ 5018 case SVt_PVGV: 5019 cv = GvCVu((const GV *)sv); 5020 if (UNLIKELY(!cv)) { 5021 HV *stash; 5022 cv = sv_2cv(sv, &stash, &gv, 0); 5023 if (!cv) { 5024 old_savestack_ix = PL_savestack_ix; 5025 goto try_autoload; 5026 } 5027 } 5028 break; 5029 5030 default: 5031 do_default: 5032 SvGETMAGIC(sv); 5033 if (SvROK(sv)) { 5034 do_ref: 5035 if (UNLIKELY(SvAMAGIC(sv))) { 5036 sv = amagic_deref_call(sv, to_cv_amg); 5037 /* Don't SPAGAIN here. */ 5038 } 5039 } 5040 else { 5041 const char *sym; 5042 STRLEN len; 5043 if (UNLIKELY(!SvOK(sv))) 5044 DIE(aTHX_ PL_no_usym, "a subroutine"); 5045 5046 sym = SvPV_nomg_const(sv, len); 5047 if (PL_op->op_private & HINT_STRICT_REFS) 5048 DIE(aTHX_ "Can't use string (\"%" SVf32 "\"%s) as a subroutine ref while \"strict refs\" in use", sv, len>32 ? "..." : ""); 5049 cv = get_cvn_flags(sym, len, GV_ADD|SvUTF8(sv)); 5050 break; 5051 } 5052 cv = MUTABLE_CV(SvRV(sv)); 5053 if (LIKELY(SvTYPE(cv) == SVt_PVCV)) 5054 break; 5055 /* FALLTHROUGH */ 5056 case SVt_PVHV: 5057 case SVt_PVAV: 5058 do_die: 5059 DIE(aTHX_ "Not a CODE reference"); 5060 } 5061 } 5062 5063 /* At this point we want to save PL_savestack_ix, either by doing a 5064 * cx_pushsub(), or for XS, doing an ENTER. But we don't yet know the final 5065 * CV we will be using (so we don't know whether its XS, so we can't 5066 * cx_pushsub() or ENTER yet), and determining cv may itself push stuff on 5067 * the save stack. So remember where we are currently on the save 5068 * stack, and later update the CX or scopestack entry accordingly. */ 5069 old_savestack_ix = PL_savestack_ix; 5070 5071 /* these two fields are in a union. If they ever become separate, 5072 * we have to test for both of them being null below */ 5073 assert(cv); 5074 assert((void*)&CvROOT(cv) == (void*)&CvXSUB(cv)); 5075 while (UNLIKELY(!CvROOT(cv))) { 5076 GV* autogv; 5077 SV* sub_name; 5078 5079 /* anonymous or undef'd function leaves us no recourse */ 5080 if (CvLEXICAL(cv) && CvHASGV(cv)) 5081 DIE(aTHX_ "Undefined subroutine &%" SVf " called", 5082 SVfARG(cv_name(cv, NULL, 0))); 5083 if (CvANON(cv) || !CvHASGV(cv)) { 5084 DIE(aTHX_ "Undefined subroutine called"); 5085 } 5086 5087 /* autoloaded stub? */ 5088 if (cv != GvCV(gv = CvGV(cv))) { 5089 cv = GvCV(gv); 5090 } 5091 /* should call AUTOLOAD now? */ 5092 else { 5093 try_autoload: 5094 autogv = gv_autoload_pvn(GvSTASH(gv), GvNAME(gv), GvNAMELEN(gv), 5095 (GvNAMEUTF8(gv) ? SVf_UTF8 : 0) 5096 |(PL_op->op_flags & OPf_REF 5097 ? GV_AUTOLOAD_ISMETHOD 5098 : 0)); 5099 cv = autogv ? GvCV(autogv) : NULL; 5100 } 5101 if (!cv) { 5102 sub_name = sv_newmortal(); 5103 gv_efullname3(sub_name, gv, NULL); 5104 DIE(aTHX_ "Undefined subroutine &%" SVf " called", SVfARG(sub_name)); 5105 } 5106 } 5107 5108 /* unrolled "CvCLONE(cv) && ! CvCLONED(cv)" */ 5109 if (UNLIKELY((CvFLAGS(cv) & (CVf_CLONE|CVf_CLONED)) == CVf_CLONE)) 5110 DIE(aTHX_ "Closure prototype called"); 5111 5112 if (UNLIKELY((PL_op->op_private & OPpENTERSUB_DB) && GvCV(PL_DBsub) 5113 && !CvNODEBUG(cv))) 5114 { 5115 Perl_get_db_sub(aTHX_ &sv, cv); 5116 if (CvISXSUB(cv)) 5117 PL_curcopdb = PL_curcop; 5118 if (CvLVALUE(cv)) { 5119 /* check for lsub that handles lvalue subroutines */ 5120 cv = GvCV(gv_fetchpvs("DB::lsub", GV_ADDMULTI, SVt_PVCV)); 5121 /* if lsub not found then fall back to DB::sub */ 5122 if (!cv) cv = GvCV(PL_DBsub); 5123 } else { 5124 cv = GvCV(PL_DBsub); 5125 } 5126 5127 if (!cv || (!CvXSUB(cv) && !CvSTART(cv))) 5128 DIE(aTHX_ "No DB::sub routine defined"); 5129 } 5130 5131 if (!(CvISXSUB(cv))) { 5132 /* This path taken at least 75% of the time */ 5133 dMARK; 5134 PADLIST *padlist; 5135 I32 depth; 5136 bool hasargs; 5137 U8 gimme; 5138 5139 /* keep PADTMP args alive throughout the call (we need to do this 5140 * because @_ isn't refcounted). Note that we create the mortals 5141 * in the caller's tmps frame, so they won't be freed until after 5142 * we return from the sub. 5143 */ 5144 { 5145 SV **svp = MARK; 5146 while (svp < SP) { 5147 SV *sv = *++svp; 5148 if (!sv) 5149 continue; 5150 if (SvPADTMP(sv)) 5151 *svp = sv = sv_mortalcopy(sv); 5152 SvTEMP_off(sv); 5153 } 5154 } 5155 5156 gimme = GIMME_V; 5157 cx = cx_pushblock(CXt_SUB, gimme, MARK, old_savestack_ix); 5158 hasargs = cBOOL(PL_op->op_flags & OPf_STACKED); 5159 cx_pushsub(cx, cv, PL_op->op_next, hasargs); 5160 5161 padlist = CvPADLIST(cv); 5162 if (UNLIKELY((depth = ++CvDEPTH(cv)) >= 2)) 5163 pad_push(padlist, depth); 5164 PAD_SET_CUR_NOSAVE(padlist, depth); 5165 if (LIKELY(hasargs)) { 5166 AV *const av = MUTABLE_AV(PAD_SVl(0)); 5167 SSize_t items; 5168 AV **defavp; 5169 5170 defavp = &GvAV(PL_defgv); 5171 cx->blk_sub.savearray = *defavp; 5172 *defavp = MUTABLE_AV(SvREFCNT_inc_simple_NN(av)); 5173 5174 /* it's the responsibility of whoever leaves a sub to ensure 5175 * that a clean, empty AV is left in pad[0]. This is normally 5176 * done by cx_popsub() */ 5177 assert(!AvREAL(av) && AvFILLp(av) == -1); 5178 5179 items = SP - MARK; 5180 if (UNLIKELY(items - 1 > AvMAX(av))) { 5181 SV **ary = AvALLOC(av); 5182 Renew(ary, items, SV*); 5183 AvMAX(av) = items - 1; 5184 AvALLOC(av) = ary; 5185 AvARRAY(av) = ary; 5186 } 5187 5188 if (items) 5189 Copy(MARK+1,AvARRAY(av),items,SV*); 5190 AvFILLp(av) = items - 1; 5191 } 5192 if (UNLIKELY((cx->blk_u16 & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO && 5193 !CvLVALUE(cv))) 5194 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf, 5195 SVfARG(cv_name(cv, NULL, 0))); 5196 /* warning must come *after* we fully set up the context 5197 * stuff so that __WARN__ handlers can safely dounwind() 5198 * if they want to 5199 */ 5200 if (UNLIKELY(depth == PERL_SUB_DEPTH_WARN 5201 && ckWARN(WARN_RECURSION) 5202 && !(PERLDB_SUB && cv == GvCV(PL_DBsub)))) 5203 sub_crush_depth(cv); 5204 RETURNOP(CvSTART(cv)); 5205 } 5206 else { 5207 SSize_t markix = TOPMARK; 5208 bool is_scalar; 5209 5210 ENTER; 5211 /* pretend we did the ENTER earlier */ 5212 PL_scopestack[PL_scopestack_ix - 1] = old_savestack_ix; 5213 5214 SAVETMPS; 5215 PUTBACK; 5216 5217 if (UNLIKELY(((PL_op->op_private 5218 & CX_PUSHSUB_GET_LVALUE_MASK(Perl_is_lvalue_sub) 5219 ) & OPpENTERSUB_LVAL_MASK) == OPpLVAL_INTRO && 5220 !CvLVALUE(cv))) 5221 DIE(aTHX_ "Can't modify non-lvalue subroutine call of &%" SVf, 5222 SVfARG(cv_name(cv, NULL, 0))); 5223 5224 if (UNLIKELY(!(PL_op->op_flags & OPf_STACKED) && GvAV(PL_defgv))) { 5225 /* Need to copy @_ to stack. Alternative may be to 5226 * switch stack to @_, and copy return values 5227 * back. This would allow popping @_ in XSUB, e.g.. XXXX */ 5228 AV * const av = GvAV(PL_defgv); 5229 const SSize_t items = AvFILL(av) + 1; 5230 5231 if (items) { 5232 SSize_t i = 0; 5233 const bool m = cBOOL(SvRMAGICAL(av)); 5234 /* Mark is at the end of the stack. */ 5235 EXTEND(SP, items); 5236 for (; i < items; ++i) 5237 { 5238 SV *sv; 5239 if (m) { 5240 SV ** const svp = av_fetch(av, i, 0); 5241 sv = svp ? *svp : NULL; 5242 } 5243 else sv = AvARRAY(av)[i]; 5244 if (sv) SP[i+1] = sv; 5245 else { 5246 SP[i+1] = av_nonelem(av, i); 5247 } 5248 } 5249 SP += items; 5250 PUTBACK ; 5251 } 5252 } 5253 else { 5254 SV **mark = PL_stack_base + markix; 5255 SSize_t items = SP - mark; 5256 while (items--) { 5257 mark++; 5258 if (*mark && SvPADTMP(*mark)) { 5259 *mark = sv_mortalcopy(*mark); 5260 } 5261 } 5262 } 5263 /* We assume first XSUB in &DB::sub is the called one. */ 5264 if (UNLIKELY(PL_curcopdb)) { 5265 SAVEVPTR(PL_curcop); 5266 PL_curcop = PL_curcopdb; 5267 PL_curcopdb = NULL; 5268 } 5269 /* Do we need to open block here? XXXX */ 5270 5271 /* calculate gimme here as PL_op might get changed and then not 5272 * restored until the LEAVE further down */ 5273 is_scalar = (GIMME_V == G_SCALAR); 5274 5275 /* CvXSUB(cv) must not be NULL because newXS() refuses NULL xsub address */ 5276 assert(CvXSUB(cv)); 5277 CvXSUB(cv)(aTHX_ cv); 5278 5279 #if defined DEBUGGING && !defined DEBUGGING_RE_ONLY 5280 /* This duplicates the check done in runops_debug(), but provides more 5281 * information in the common case of the fault being with an XSUB. 5282 * 5283 * It should also catch an XSUB pushing more than it extends 5284 * in scalar context. 5285 */ 5286 if (PL_curstackinfo->si_stack_hwm < PL_stack_sp - PL_stack_base) 5287 Perl_croak_nocontext( 5288 "panic: XSUB %s::%s (%s) failed to extend arg stack: " 5289 "base=%p, sp=%p, hwm=%p\n", 5290 HvNAME(GvSTASH(CvGV(cv))), GvNAME(CvGV(cv)), CvFILE(cv), 5291 PL_stack_base, PL_stack_sp, 5292 PL_stack_base + PL_curstackinfo->si_stack_hwm); 5293 #endif 5294 /* Enforce some sanity in scalar context. */ 5295 if (is_scalar) { 5296 SV **svp = PL_stack_base + markix + 1; 5297 if (svp != PL_stack_sp) { 5298 *svp = svp > PL_stack_sp ? &PL_sv_undef : *PL_stack_sp; 5299 PL_stack_sp = svp; 5300 } 5301 } 5302 LEAVE; 5303 return NORMAL; 5304 } 5305 } 5306 5307 void 5308 Perl_sub_crush_depth(pTHX_ CV *cv) 5309 { 5310 PERL_ARGS_ASSERT_SUB_CRUSH_DEPTH; 5311 5312 if (CvANON(cv)) 5313 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on anonymous subroutine"); 5314 else { 5315 Perl_warner(aTHX_ packWARN(WARN_RECURSION), "Deep recursion on subroutine \"%" SVf "\"", 5316 SVfARG(cv_name(cv,NULL,0))); 5317 } 5318 } 5319 5320 5321 5322 /* like croak, but report in context of caller */ 5323 5324 void 5325 Perl_croak_caller(const char *pat, ...) 5326 { 5327 dTHX; 5328 va_list args; 5329 const PERL_CONTEXT *cx = caller_cx(0, NULL); 5330 5331 /* make error appear at call site */ 5332 assert(cx); 5333 PL_curcop = cx->blk_oldcop; 5334 5335 va_start(args, pat); 5336 vcroak(pat, &args); 5337 NOT_REACHED; /* NOTREACHED */ 5338 va_end(args); 5339 } 5340 5341 5342 PP(pp_aelem) 5343 { 5344 dSP; 5345 SV** svp; 5346 SV* const elemsv = POPs; 5347 IV elem = SvIV(elemsv); 5348 AV *const av = MUTABLE_AV(POPs); 5349 const U32 lval = PL_op->op_flags & OPf_MOD || LVRET; 5350 const U32 defer = PL_op->op_private & OPpLVAL_DEFER; 5351 const bool localizing = PL_op->op_private & OPpLVAL_INTRO; 5352 bool preeminent = TRUE; 5353 SV *sv; 5354 5355 if (UNLIKELY(SvROK(elemsv) && !SvGAMAGIC(elemsv) && ckWARN(WARN_MISC))) 5356 Perl_warner(aTHX_ packWARN(WARN_MISC), 5357 "Use of reference \"%" SVf "\" as array index", 5358 SVfARG(elemsv)); 5359 if (UNLIKELY(SvTYPE(av) != SVt_PVAV)) 5360 RETPUSHUNDEF; 5361 5362 if (UNLIKELY(localizing)) { 5363 MAGIC *mg; 5364 HV *stash; 5365 5366 /* If we can determine whether the element exist, 5367 * Try to preserve the existenceness of a tied array 5368 * element by using EXISTS and DELETE if possible. 5369 * Fallback to FETCH and STORE otherwise. */ 5370 if (SvCANEXISTDELETE(av)) 5371 preeminent = av_exists(av, elem); 5372 } 5373 5374 svp = av_fetch(av, elem, lval && !defer); 5375 if (lval) { 5376 #ifdef PERL_MALLOC_WRAP 5377 if (SvUOK(elemsv)) { 5378 const UV uv = SvUV(elemsv); 5379 elem = uv > IV_MAX ? IV_MAX : uv; 5380 } 5381 else if (SvNOK(elemsv)) 5382 elem = (IV)SvNV(elemsv); 5383 if (elem > 0) { 5384 MEM_WRAP_CHECK_s(elem,SV*,"Out of memory during array extend"); 5385 } 5386 #endif 5387 if (!svp || !*svp) { 5388 IV len; 5389 if (!defer) 5390 DIE(aTHX_ PL_no_aelem, elem); 5391 len = av_tindex(av); 5392 /* Resolve a negative index that falls within the array. Leave 5393 it negative it if falls outside the array. */ 5394 if (elem < 0 && len + elem >= 0) 5395 elem = len + elem; 5396 if (elem >= 0 && elem <= len) 5397 /* Falls within the array. */ 5398 PUSHs(av_nonelem(av,elem)); 5399 else 5400 /* Falls outside the array. If it is negative, 5401 magic_setdefelem will use the index for error reporting. 5402 */ 5403 mPUSHs(newSVavdefelem(av, elem, 1)); 5404 RETURN; 5405 } 5406 if (UNLIKELY(localizing)) { 5407 if (preeminent) 5408 save_aelem(av, elem, svp); 5409 else 5410 SAVEADELETE(av, elem); 5411 } 5412 else if (PL_op->op_private & OPpDEREF) { 5413 PUSHs(vivify_ref(*svp, PL_op->op_private & OPpDEREF)); 5414 RETURN; 5415 } 5416 } 5417 sv = (svp ? *svp : &PL_sv_undef); 5418 if (!lval && SvRMAGICAL(av) && SvGMAGICAL(sv)) /* see note in pp_helem() */ 5419 mg_get(sv); 5420 PUSHs(sv); 5421 RETURN; 5422 } 5423 5424 SV* 5425 Perl_vivify_ref(pTHX_ SV *sv, U32 to_what) 5426 { 5427 PERL_ARGS_ASSERT_VIVIFY_REF; 5428 5429 SvGETMAGIC(sv); 5430 if (!SvOK(sv)) { 5431 if (SvREADONLY(sv)) 5432 Perl_croak_no_modify(); 5433 prepare_SV_for_RV(sv); 5434 switch (to_what) { 5435 case OPpDEREF_SV: 5436 SvRV_set(sv, newSV(0)); 5437 break; 5438 case OPpDEREF_AV: 5439 SvRV_set(sv, MUTABLE_SV(newAV())); 5440 break; 5441 case OPpDEREF_HV: 5442 SvRV_set(sv, MUTABLE_SV(newHV())); 5443 break; 5444 } 5445 SvROK_on(sv); 5446 SvSETMAGIC(sv); 5447 SvGETMAGIC(sv); 5448 } 5449 if (SvGMAGICAL(sv)) { 5450 /* copy the sv without magic to prevent magic from being 5451 executed twice */ 5452 SV* msv = sv_newmortal(); 5453 sv_setsv_nomg(msv, sv); 5454 return msv; 5455 } 5456 return sv; 5457 } 5458 5459 PERL_STATIC_INLINE HV * 5460 S_opmethod_stash(pTHX_ SV* meth) 5461 { 5462 SV* ob; 5463 HV* stash; 5464 5465 SV* const sv = PL_stack_base + TOPMARK == PL_stack_sp 5466 ? (Perl_croak(aTHX_ "Can't call method \"%" SVf "\" without a " 5467 "package or object reference", SVfARG(meth)), 5468 (SV *)NULL) 5469 : *(PL_stack_base + TOPMARK + 1); 5470 5471 PERL_ARGS_ASSERT_OPMETHOD_STASH; 5472 5473 if (UNLIKELY(!sv)) 5474 undefined: 5475 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on an undefined value", 5476 SVfARG(meth)); 5477 5478 if (UNLIKELY(SvGMAGICAL(sv))) mg_get(sv); 5479 else if (SvIsCOW_shared_hash(sv)) { /* MyClass->meth() */ 5480 stash = gv_stashsv(sv, GV_CACHE_ONLY); 5481 if (stash) return stash; 5482 } 5483 5484 if (SvROK(sv)) 5485 ob = MUTABLE_SV(SvRV(sv)); 5486 else if (!SvOK(sv)) goto undefined; 5487 else if (isGV_with_GP(sv)) { 5488 if (!GvIO(sv)) 5489 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" " 5490 "without a package or object reference", 5491 SVfARG(meth)); 5492 ob = sv; 5493 if (SvTYPE(ob) == SVt_PVLV && LvTYPE(ob) == 'y') { 5494 assert(!LvTARGLEN(ob)); 5495 ob = LvTARG(ob); 5496 assert(ob); 5497 } 5498 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(ob)); 5499 } 5500 else { 5501 /* this isn't a reference */ 5502 GV* iogv; 5503 STRLEN packlen; 5504 const char * const packname = SvPV_nomg_const(sv, packlen); 5505 const U32 packname_utf8 = SvUTF8(sv); 5506 stash = gv_stashpvn(packname, packlen, packname_utf8 | GV_CACHE_ONLY); 5507 if (stash) return stash; 5508 5509 if (!(iogv = gv_fetchpvn_flags( 5510 packname, packlen, packname_utf8, SVt_PVIO 5511 )) || 5512 !(ob=MUTABLE_SV(GvIO(iogv)))) 5513 { 5514 /* this isn't the name of a filehandle either */ 5515 if (!packlen) 5516 { 5517 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" " 5518 "without a package or object reference", 5519 SVfARG(meth)); 5520 } 5521 /* assume it's a package name */ 5522 stash = gv_stashpvn(packname, packlen, packname_utf8); 5523 if (stash) return stash; 5524 else return MUTABLE_HV(sv); 5525 } 5526 /* it _is_ a filehandle name -- replace with a reference */ 5527 *(PL_stack_base + TOPMARK + 1) = sv_2mortal(newRV(MUTABLE_SV(iogv))); 5528 } 5529 5530 /* if we got here, ob should be an object or a glob */ 5531 if (!ob || !(SvOBJECT(ob) 5532 || (isGV_with_GP(ob) 5533 && (ob = MUTABLE_SV(GvIO((const GV *)ob))) 5534 && SvOBJECT(ob)))) 5535 { 5536 Perl_croak(aTHX_ "Can't call method \"%" SVf "\" on unblessed reference", 5537 SVfARG((SvPOK(meth) && SvPVX(meth) == PL_isa_DOES) 5538 ? newSVpvs_flags("DOES", SVs_TEMP) 5539 : meth)); 5540 } 5541 5542 return SvSTASH(ob); 5543 } 5544 5545 PP(pp_method) 5546 { 5547 dSP; 5548 GV* gv; 5549 HV* stash; 5550 SV* const meth = TOPs; 5551 5552 if (SvROK(meth)) { 5553 SV* const rmeth = SvRV(meth); 5554 if (SvTYPE(rmeth) == SVt_PVCV) { 5555 SETs(rmeth); 5556 RETURN; 5557 } 5558 } 5559 5560 stash = opmethod_stash(meth); 5561 5562 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK); 5563 assert(gv); 5564 5565 SETs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv)); 5566 RETURN; 5567 } 5568 5569 #define METHOD_CHECK_CACHE(stash,cache,meth) \ 5570 const HE* const he = hv_fetch_ent(cache, meth, 0, 0); \ 5571 if (he) { \ 5572 gv = MUTABLE_GV(HeVAL(he)); \ 5573 if (isGV(gv) && GvCV(gv) && (!GvCVGEN(gv) || GvCVGEN(gv) \ 5574 == (PL_sub_generation + HvMROMETA(stash)->cache_gen))) \ 5575 { \ 5576 XPUSHs(MUTABLE_SV(GvCV(gv))); \ 5577 RETURN; \ 5578 } \ 5579 } \ 5580 5581 PP(pp_method_named) 5582 { 5583 dSP; 5584 GV* gv; 5585 SV* const meth = cMETHOPx_meth(PL_op); 5586 HV* const stash = opmethod_stash(meth); 5587 5588 if (LIKELY(SvTYPE(stash) == SVt_PVHV)) { 5589 METHOD_CHECK_CACHE(stash, stash, meth); 5590 } 5591 5592 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK); 5593 assert(gv); 5594 5595 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv)); 5596 RETURN; 5597 } 5598 5599 PP(pp_method_super) 5600 { 5601 dSP; 5602 GV* gv; 5603 HV* cache; 5604 SV* const meth = cMETHOPx_meth(PL_op); 5605 HV* const stash = CopSTASH(PL_curcop); 5606 /* Actually, SUPER doesn't need real object's (or class') stash at all, 5607 * as it uses CopSTASH. However, we must ensure that object(class) is 5608 * correct (this check is done by S_opmethod_stash) */ 5609 opmethod_stash(meth); 5610 5611 if ((cache = HvMROMETA(stash)->super)) { 5612 METHOD_CHECK_CACHE(stash, cache, meth); 5613 } 5614 5615 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER); 5616 assert(gv); 5617 5618 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv)); 5619 RETURN; 5620 } 5621 5622 PP(pp_method_redir) 5623 { 5624 dSP; 5625 GV* gv; 5626 SV* const meth = cMETHOPx_meth(PL_op); 5627 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0); 5628 opmethod_stash(meth); /* not used but needed for error checks */ 5629 5630 if (stash) { METHOD_CHECK_CACHE(stash, stash, meth); } 5631 else stash = MUTABLE_HV(cMETHOPx_rclass(PL_op)); 5632 5633 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK); 5634 assert(gv); 5635 5636 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv)); 5637 RETURN; 5638 } 5639 5640 PP(pp_method_redir_super) 5641 { 5642 dSP; 5643 GV* gv; 5644 HV* cache; 5645 SV* const meth = cMETHOPx_meth(PL_op); 5646 HV* stash = gv_stashsv(cMETHOPx_rclass(PL_op), 0); 5647 opmethod_stash(meth); /* not used but needed for error checks */ 5648 5649 if (UNLIKELY(!stash)) stash = MUTABLE_HV(cMETHOPx_rclass(PL_op)); 5650 else if ((cache = HvMROMETA(stash)->super)) { 5651 METHOD_CHECK_CACHE(stash, cache, meth); 5652 } 5653 5654 gv = gv_fetchmethod_sv_flags(stash, meth, GV_AUTOLOAD|GV_CROAK|GV_SUPER); 5655 assert(gv); 5656 5657 XPUSHs(isGV(gv) ? MUTABLE_SV(GvCV(gv)) : MUTABLE_SV(gv)); 5658 RETURN; 5659 } 5660 5661 /* 5662 * ex: set ts=8 sts=4 sw=4 et: 5663 */ 5664