This is a live mirror of the Perl 5 development currently hosted at https://github.com/perl/perl5
Storable: Fix t/huge.t PERL_TEST_MEMORY diagnostic messages
[perl5.git] / dist / Storable / t / huge.t
CommitLineData
ec4c8774
AC
1#!./perl
2
3use strict;
4use warnings;
5
6use Config;
7use Storable qw(dclone);
8use Test::More;
9
10BEGIN {
11 plan skip_all => 'Storable was not built'
12 if $ENV{PERL_CORE} && $Config{'extensions'} !~ /\b Storable \b/x;
13 plan skip_all => 'Need 64-bit pointers for this test'
fb502597
RU
14 if $Config{ptrsize} < 8 and $] > 5.013;
15 plan skip_all => 'Need 64-bit int for this test on older versions'
16 if $Config{uvsize} < 8 and $] < 5.013;
1cb8a344 17 plan skip_all => 'Need ~4 GiB memory for this test, set PERL_TEST_MEMORY > 4'
ec4c8774
AC
18 if !$ENV{PERL_TEST_MEMORY} || $ENV{PERL_TEST_MEMORY} < 4;
19}
20
21# Just too big to fit in an I32.
22my $huge = int(2 ** 31);
fb502597
RU
23# v5.24.1c/v5.25.1c switched to die earlier with "Too many elements",
24# which is much safer.
25my $has_too_many = ($Config{usecperl} and
26 (($] >= 5.024001 and $] < 5.025000)
27 or $] >= 5.025001)) ? 1 : 0;
ec4c8774 28
b0823abe
RU
29# These overlarge sizes are enabled only since Storable 3.00 and some
30# cases need cperl support. Perl5 (as of 5.24) has some internal
91524bf0 31# problems with >I32 sizes, which only cperl has fixed.
fb502597
RU
32# perl5 is not yet 2GB safe, esp. with hashes.
33
34# string len (xpv_cur): STRLEN (ptrsize>=8)
35# array size (xav_max): SSize_t (I32/I64) (ptrsize>=8)
36# hash size (xhv_keys):
37# IV - 5.12 (ivsize>=8)
38# STRLEN 5.14 - 5.24 (size_t: U32/U64)
39# SSize_t 5.22c - 5.24c (I32/I64)
40# U32 5.25c -
41# hash key: I32
91524bf0 42
ec4c8774
AC
43my @cases = (
44 ['huge string',
45 sub { my $s = 'x' x $huge; \$s }],
46
ec4c8774
AC
47 ['array with huge element',
48 sub { my $s = 'x' x $huge; [$s] }],
49
ec4c8774
AC
50 ['hash with huge value',
51 sub { my $s = 'x' x $huge; +{ foo => $s } }],
52
fb502597
RU
53 # There's no huge key, limited to I32.
54 ) if $Config{ptrsize} > 4;
1cb8a344 55
fb502597
RU
56
57# An array with a huge number of elements requires several gigabytes of
58# virtual memory. On darwin it is evtl killed.
59if ($Config{ptrsize} > 4 and !$has_too_many) {
60 # needs 20-55G virtual memory, 4.6M heap and several minutes on a fast machine
be827e1b 61 if ($ENV{PERL_TEST_MEMORY} >= 55) {
fb502597
RU
62 push @cases,
63 [ 'huge array',
64 sub { my @x; $x[$huge] = undef; \@x } ];
65 } else {
fc204a7a 66 diag "skip huge array, need PERL_TEST_MEMORY >= 55";
fb502597
RU
67 }
68}
69
70# A hash with a huge number of keys would require tens of gigabytes of
71# memory, which doesn't seem like a good idea even for this test file.
72# Unfortunately even older 32bit perls do allow this.
73if (!$has_too_many) {
74 # needs >90G virtual mem, and is evtl. killed
be827e1b 75 if ($ENV{PERL_TEST_MEMORY} >= 96) {
fb502597
RU
76 # number of keys >I32. impossible to handle with perl5, but Storable can.
77 push @cases,
78 ['huge hash',
79 sub { my %x = (0 .. $huge); \%x } ];
80 } else {
fc204a7a 81 diag "skip huge hash, need PERL_TEST_MEMORY >= 96";
fb502597 82 }
91524bf0
RU
83}
84
ec4c8774 85
1cb8a344 86plan tests => 2 * scalar @cases;
ec4c8774
AC
87
88for (@cases) {
89 my ($desc, $build) = @$_;
fb502597 90 diag "building test input: $desc";
91524bf0 91 my ($input, $exn, $clone);
fb502597
RU
92 diag "these huge subtests need a lot of memory and time!" if $desc eq 'huge array';
93 $input = $build->();
94 diag "running test: $desc";
95 $exn = $@ if !eval { $clone = dclone($input); 1 };
96
97 is($exn, undef, "$desc no exception");
98 is_deeply($input, $clone, "$desc cloned");
99 #ok($clone, "$desc cloned");
ec4c8774
AC
100
101 # Ensure the huge objects are freed right now:
102 undef $input;
103 undef $clone;
104}