xref: /openbsd-src/gnu/usr.bin/perl/t/op/hash.t (revision f2da64fbbbf1b03f09f390ab01267c93dfd77c4c)
1#!./perl -w
2
3BEGIN {
4    chdir 't' if -d 't';
5    @INC = '../lib';
6    require './test.pl';
7}
8
9use strict;
10
11# This will crash perl if it fails
12
13use constant PVBM => 'foo';
14
15my $dummy = index 'foo', PVBM;
16eval { my %h = (a => PVBM); 1 };
17
18ok (!$@, 'fbm scalar can be inserted into a hash');
19
20
21my $destroyed;
22{ package Class; DESTROY { ++$destroyed; } }
23
24$destroyed = 0;
25{
26    my %h;
27    keys(%h) = 1;
28    $h{key} = bless({}, 'Class');
29}
30is($destroyed, 1, 'Timely hash destruction with lvalue keys');
31
32
33# [perl #79178] Hash keys must not be stringified during compilation
34# Run perl -MO=Concise -e '$a{\"foo"}' on a non-threaded pre-5.13.8 version
35# to see why.
36{
37    my $key;
38    package bar;
39    sub TIEHASH { bless {}, $_[0] }
40    sub FETCH { $key = $_[1] }
41    package main;
42    tie my %h, "bar";
43    () = $h{\'foo'};
44    is ref $key, SCALAR =>
45     'ref hash keys are not stringified during compilation';
46    use constant u => undef;
47    no warnings 'uninitialized'; # work around unfixed bug #105918
48    () = $h{+u};
49    is $key, undef,
50      'undef hash keys are not stringified during compilation, either';
51}
52
53# Part of RT #85026: Deleting the current iterator in void context does not
54# free it.
55{
56    my $gone;
57    no warnings 'once';
58    local *::DESTROY = sub { ++$gone };
59    my %a=(a=>bless[]);
60    each %a;   # make the entry with the obj the current iterator
61    delete $a{a};
62    ok $gone, 'deleting the current iterator in void context frees the val'
63}
64
65# [perl #99660] Deleted hash element visible to destructor
66{
67    my %h;
68    $h{k} = bless [];
69    my $normal_exit;
70    local *::DESTROY = sub { my $x = $h{k}; ++$normal_exit };
71    delete $h{k}; # must be in void context to trigger the bug
72    ok $normal_exit, 'freed hash elems are not visible to DESTROY';
73}
74
75# [perl #100340] Similar bug: freeing a hash elem during a delete
76sub guard::DESTROY {
77   ${$_[0]}->();
78};
79*guard = sub (&) {
80   my $callback = shift;
81   return bless \$callback, "guard"
82};
83{
84  my $ok;
85  my %t; %t = (
86    stash => {
87        guard => guard(sub{
88            $ok++;
89            delete $t{stash};
90        }),
91        foo => "bar",
92        bar => "baz",
93    },
94  );
95  ok eval { delete $t{stash}{guard}; # must be in void context
96            1 },
97    'freeing a hash elem from destructor called by delete does not die';
98  diag $@ if $@; # panic: free from wrong pool
99  is $ok, 1, 'the destructor was called';
100}
101
102# Weak references to pad hashes
103SKIP: {
104    skip_if_miniperl("No Scalar::Util::weaken under miniperl", 1);
105    my $ref;
106    require Scalar::Util;
107    {
108        my %hash;
109        Scalar::Util::weaken($ref = \%hash);
110        1;  # the previous statement must not be the last
111    }
112    is $ref, undef, 'weak refs to pad hashes go stale on scope exit';
113}
114
115# [perl #107440]
116sub A::DESTROY { $::ra = 0 }
117$::ra = {a=>bless [], 'A'};
118undef %$::ra;
119pass 'no crash when freeing hash that is being undeffed';
120$::ra = {a=>bless [], 'A'};
121%$::ra = ('a'..'z');
122pass 'no crash when freeing hash that is being exonerated, ahem, cleared';
123
124# If I have these correct then removing any part of the lazy hash fill handling
125# code in hv.c will cause some of these tests to start failing.
126sub validate_hash {
127  my ($desc, $h) = @_;
128  local $::Level = $::Level + 1;
129
130  my $scalar = %$h;
131  my $expect = qr!\A(\d+)/(\d+)\z!;
132  like($scalar, $expect, "$desc in scalar context matches pattern");
133  my ($used, $total) = $scalar =~ $expect;
134  cmp_ok($total, '>', 0, "$desc has >0 array size ($total)");
135  cmp_ok($used, '>', 0, "$desc uses >0 heads ($used)");
136  cmp_ok($used, '<=', $total,
137         "$desc doesn't use more heads than are available");
138  return ($used, $total);
139}
140
141sub torture_hash {
142  my $desc = shift;
143  # Intentionally use an anon hash rather than a lexical, as lexicals default
144  # to getting reused on subsequent calls
145  my $h = {};
146  ++$h->{$_} foreach @_;
147
148  my ($used0, $total0) = validate_hash($desc, $h);
149  # Remove half the keys each time round, until there are only 1 or 2 left
150  my @groups;
151  my ($h2, $h3, $h4);
152  while (keys %$h > 2) {
153    my $take = (keys %$h) / 2 - 1;
154    my @keys = (keys %$h)[0 .. $take];
155    my $scalar = %$h;
156    delete @$h{@keys};
157    push @groups, $scalar, \@keys;
158
159    my $count = keys %$h;
160    my ($used, $total) = validate_hash("$desc (-$count)", $h);
161    is($total, $total0, "$desc ($count) has same array size");
162    cmp_ok($used, '<=', $used0, "$desc ($count) has same or fewer heads");
163    ++$h2->{$_} foreach @keys;
164    my (undef, $total2) = validate_hash("$desc (+$count)", $h2);
165    cmp_ok($total2, '<=', $total0, "$desc ($count) array size no larger");
166
167    # Each time this will get emptied then repopulated. If the fill isn't reset
168    # when the hash is emptied, the used count will likely exceed the array
169    %$h3 = %$h2;
170    my (undef, $total3) = validate_hash("$desc (+$count copy)", $h3);
171    is($total3, $total2, "$desc (+$count copy) has same array size");
172
173    # This might use fewer buckets than the original
174    %$h4 = %$h;
175    my (undef, $total4) = validate_hash("$desc ($count copy)", $h4);
176    cmp_ok($total4, '<=', $total0, "$desc ($count copy) array size no larger");
177  }
178
179  my $scalar = %$h;
180  my @keys = keys %$h;
181  delete @$h{@keys};
182  is(scalar %$h, 0, "scalar keys for empty $desc");
183
184  # Rebuild the original hash, and build a copy
185  # These will fail if hash key addition and deletion aren't handled correctly
186  my $h1;
187  foreach (@keys) {
188    ++$h->{$_};
189    ++$h1->{$_};
190  }
191  is(scalar %$h, $scalar, "scalar keys restored when rebuilding");
192
193  while (@groups) {
194    my $keys = pop @groups;
195    ++$h->{$_} foreach @$keys;
196    my (undef, $total) = validate_hash("$desc " . keys %$h, $h);
197    is($total, $total0, "bucket count is constant when rebuilding");
198    is(scalar %$h, pop @groups, "scalar keys is identical when rebuilding");
199    ++$h1->{$_} foreach @$keys;
200    validate_hash("$desc copy " . keys %$h1, $h1);
201  }
202  # This will fail if the fill count isn't handled correctly on hash split
203  is(scalar %$h1, scalar %$h, "scalar keys is identical on copy and original");
204}
205
206torture_hash('a .. zz', 'a' .. 'zz');
207torture_hash('0 .. 9', 0 .. 9);
208torture_hash("'Perl'", 'Rules');
209
210done_testing();
211