@@ -1,4 +1,4 @@
From 3db6700b9b3faaa2b0afaba91e53383929653ecb Mon Sep 17 00:00:00 2001
From 138028ee8d96c12c7a1fa463ea1303bb9b93577f Mon Sep 17 00:00:00 2001
From: Oliver Tappe <zooey@hirschkaefer.de>
Date: Sun, 22 Sep 2013 14:52:03 +0200
Subject: Tell perl that BFS has a link count of 1
@@ -22,7 +22,7 @@ index 817c6d9..c3298a6 100644
2.21.0


From 4eabb29aee8c3a2d888e70123e554a5f28dd2f85 Mon Sep 17 00:00:00 2001
From ecb07d51515bb563ccd2663cbbff04f1b93c9959 Mon Sep 17 00:00:00 2001
From: Oliver Tappe <zooey@hirschkaefer.de>
Date: Sun, 22 Sep 2013 14:52:53 +0200
Subject: Haiku defines, but does not implement O_EXLOCK
@@ -46,7 +46,7 @@ index ff8c7f9..4364bf6 100644
2.21.0


From bac2c3f2d94428de753f69c061882830dbc347a2 Mon Sep 17 00:00:00 2001
From 2e5b03c3468e59495c1a00dc60fe83a2e6e9912e Mon Sep 17 00:00:00 2001
From: Oliver Tappe <zooey@hirschkaefer.de>
Date: Sun, 22 Sep 2013 14:53:40 +0200
Subject: haiku sets all its specifics via Configure
@@ -106,17 +106,17 @@ index fa8ebe5..0f09f53 100644
2.21.0


From ca22cd63f9ca9539a7093e74bc78204f13f3c6e2 Mon Sep 17 00:00:00 2001
From 05292785ddb832fb7545a0d310bd857b1c31f4c9 Mon Sep 17 00:00:00 2001
From: Oliver Tappe <zooey@hirschkaefer.de>
Date: Sun, 22 Sep 2013 14:54:15 +0200
Subject: Tell perl that Haiku needs haikuish.h installed as well


diff --git a/installperl b/installperl
index 3bf79d2..5cb6b3f 100755
index 6cd65a0..6114fce 100755
--- a/installperl
+++ b/installperl
@@ -381,6 +381,11 @@ elsif ($Is_Cygwin) { # On Cygwin symlink it to CORE to make Makefile happy
@@ -382,6 +382,11 @@ elsif ($Is_Cygwin) { # On Cygwin symlink it to CORE to make Makefile happy

# AIX needs perl.exp installed as well.
push(@corefiles,'perl.exp') if $^O eq 'aix';
@@ -132,7 +132,7 @@ index 3bf79d2..5cb6b3f 100755
2.21.0


From 84218cf7e177c3c172016daa255956e4a200d71f Mon Sep 17 00:00:00 2001
From f574057129f4fb1241c2fae8440aa139ae5f33f9 Mon Sep 17 00:00:00 2001
From: Oliver Tappe <zooey@hirschkaefer.de>
Date: Sun, 22 Sep 2013 14:55:13 +0200
Subject: Fix handling of exit codes on Haiku
@@ -183,7 +183,7 @@ index ce3270e..cab9a79 100644
2.21.0


From b935e9ef50b380b18511c12f3496e4f35a1bd460 Mon Sep 17 00:00:00 2001
From cf5605acf93f6e07484df56b695d93f39b96e0bf Mon Sep 17 00:00:00 2001
From: Oliver Tappe <zooey@hirschkaefer.de>
Date: Sun, 22 Sep 2013 15:00:44 +0200
Subject: Fix include path of errno.h
@@ -206,7 +206,7 @@ index d565f31..f0ee43b 100644
2.21.0


From 519a5e068966c7d04c8796d6e73d8c5d5ae01a9d Mon Sep 17 00:00:00 2001
From f15968c2bbe5db9ed5545f1c923a4b476756aa1e Mon Sep 17 00:00:00 2001
From: Oliver Tappe <zooey@hirschkaefer.de>
Date: Sat, 28 Sep 2013 13:46:42 +0200
Subject: Adjust ExtUtils::MakeMaker for PM-Haiku.
@@ -310,7 +310,7 @@ index 0000000..81e5f99
2.21.0


From 014cd8b9ff8639d212e1f29dec307f58cd5d0e83 Mon Sep 17 00:00:00 2001
From fe71122a98b710baf365fc2bd12e3298b1633889 Mon Sep 17 00:00:00 2001
From: Oliver Tappe <zooey@hirschkaefer.de>
Date: Tue, 8 Oct 2013 22:16:37 +0200
Subject: Avoid using -rpath for dynamic modules.
@@ -358,7 +358,7 @@ index 81e5f99..25ace13 100644
2.21.0


From 9b18cb650121876c6a7087fb2c0fcc7a958ce537 Mon Sep 17 00:00:00 2001
From 50e36aceedc3a20d6a1b0d8b5b109c80d5b07782 Mon Sep 17 00:00:00 2001
From: Oliver Tappe <zooey@hirschkaefer.de>
Date: Tue, 8 Oct 2013 22:17:26 +0200
Subject: Add script sitecustomize.pl for setting up @INC as we need it.
@@ -410,7 +410,7 @@ index 0000000..a321e51
2.21.0


From 3764128c11b9d4327d08c99e714d4226dc8d96e0 Mon Sep 17 00:00:00 2001
From b17410fa468a27c33cc3a2293af2fb577a16141c Mon Sep 17 00:00:00 2001
From: Oliver Tappe <zooey@hirschkaefer.de>
Date: Wed, 9 Oct 2013 20:29:38 +0200
Subject: Fix initialization check for CPAN.
@@ -440,7 +440,7 @@ index 49fa8ab..bc701d8 100644
2.21.0


From a64e11a378dd9adcdba97af967f94306e8ebd763 Mon Sep 17 00:00:00 2001
From 5165526c7021de44f4036f64430cca9a1619d5b4 Mon Sep 17 00:00:00 2001
From: Oliver Tappe <zooey@hirschkaefer.de>
Date: Sun, 13 Oct 2013 17:32:50 +0200
Subject: Add support for HAIKU_USE_VENDOR_DIRECTORIES.
@@ -516,14 +516,14 @@ index 25ace13..8a04ead 100644
2.21.0


From a4dbb9ef2b1c5e42b5b17a55c11cb2988856842c Mon Sep 17 00:00:00 2001
From 4f18bd7b734a0e3a951aa1e8f37231b0bafdac63 Mon Sep 17 00:00:00 2001
From: Jerome Duval <jerome.duval@gmail.com>
Date: Fri, 9 Jun 2017 21:30:33 +0200
Subject: disable fstack-protector for Haiku


diff --git a/Configure b/Configure
index 3be9f05..e5c3c0a 100755
index f99377e..4cb2905 100755
--- a/Configure
+++ b/Configure
@@ -5493,6 +5493,7 @@ default|recommended)
@@ -538,7 +538,7 @@ index 3be9f05..e5c3c0a 100755
2.21.0


From 7c1c6508a5be3606e9c4d77041eba0e57b5666ac Mon Sep 17 00:00:00 2001
From 944bba5bc3e7aad7196e5a9e87a946c4f5842b99 Mon Sep 17 00:00:00 2001
From: Jerome Duval <jerome.duval@gmail.com>
Date: Wed, 14 Mar 2018 21:33:54 +0100
Subject: sitecustomize.pl: use the shortVersion for site and vendor.
@@ -578,307 +578,18 @@ index a321e51..76c0e46 100644
2.21.0


From 7e4b9dd12eba09d7d0df167ab8576ef6db9e4308 Mon Sep 17 00:00:00 2001
From: Tony Cook <tony@develop-help.com>
Date: Tue, 7 Aug 2018 15:34:06 +1000
Subject: (perl #133326) fix and clarify handling of recurs_sv.

There were a few problems:

- the purpose of recur_sv wasn't clear, I believe I understand it
now from looking at where recur_sv was actually being used.
Frankly the logic of the code itself was hard to follow, apparently
only counting a level if the recur_sv was equal to the current
SV.

Fixed by adding some documentation to recur_sv in the context
structure. The logic has been re-worked (see below) to hopefully
make it more understandable.

- the conditional checks for inc/decrementing recur_depth didn't
match between the beginnings and ends of the store_array() and
store_hash() handlers didn't match, since recur_sv was both
explicitly modified by those functions and implicitly modified
in their recursive calls to process elements.

Fixing by storing the starting value of cxt->recur_sv locally
testing against that instead of against the value that might be
modified recursively.

- the checks in store_ref(), store_array(), store_l?hash() were
over complex, obscuring their purpose.

Fixed by:
- always count a recursion level in store_ref() and store the
RV in recur_sv
- only count a recursion level in the array/hash handlers if
the SV didn't match.
- skip the check against cxt->entry, if we're in this code
we could be recursing, so we want to detect it.

- (after the other changes) the recursion checks in store_hash()/
store_lhash() only checked the limit if the SV didn't match the
recur_sv, which horribly broke things.

Fixed by:
- Now only make the depth increment conditional, and always
check against the limit if one is set.

(cherry picked from commit 120060c86e233cb9f588314214137f3ed1b48e2a)

diff --git a/dist/Storable/Storable.xs b/dist/Storable/Storable.xs
index 6a90e24..f6df32b 100644
--- a/dist/Storable/Storable.xs
+++ b/dist/Storable/Storable.xs
@@ -418,6 +418,24 @@ typedef struct stcxt {
SV *(**retrieve_vtbl)(pTHX_ struct stcxt *, const char *); /* retrieve dispatch table */
SV *prev; /* contexts chained backwards in real recursion */
SV *my_sv; /* the blessed scalar who's SvPVX() I am */
+
+ /* recur_sv:
+
+ A hashref of hashrefs or arrayref of arrayrefs is actually a
+ chain of four SVs, eg for an array ref containing an array ref:
+
+ RV -> AV (element) -> RV -> AV
+
+ To make this depth appear natural from a perl level we only
+ want to count this as two levels, so store_ref() stores it's RV
+ into recur_sv and store_array()/store_hash() will only count
+ that level if the AV/HV *isn't* recur_sv.
+
+ We can't just have store_hash()/store_array() not count that
+ level, since it's possible for XS code to store an AV or HV
+ directly as an element (though perl code trying to access such
+ an object will generally croak.)
+ */
SV *recur_sv; /* check only one recursive SV */
int in_retrieve_overloaded; /* performance hack for retrieving overloaded objects */
int flags; /* controls whether to bless or tie objects */
@@ -431,8 +449,13 @@ typedef struct stcxt {

#define RECURSION_TOO_DEEP() \
(cxt->max_recur_depth != -1 && ++cxt->recur_depth > cxt->max_recur_depth)
+
+/* There's cases where we need to check whether the hash recursion
+ limit has been reached without bumping the recursion levels, so the
+ hash check doesn't bump the depth.
+*/
#define RECURSION_TOO_DEEP_HASH() \
- (cxt->max_recur_depth_hash != -1 && ++cxt->recur_depth > cxt->max_recur_depth_hash)
+ (cxt->max_recur_depth_hash != -1 && cxt->recur_depth > cxt->max_recur_depth_hash)
#define MAX_DEPTH_ERROR "Max. recursion depth with nested structures exceeded"

static int storable_free(pTHX_ SV *sv, MAGIC* mg);
@@ -2360,21 +2383,20 @@ static int store_ref(pTHX_ stcxt_t *cxt, SV *sv)
} else
PUTMARK(is_weak ? SX_WEAKREF : SX_REF);

- TRACEME(("recur_depth %" IVdf ", recur_sv (0x%" UVxf ")", cxt->recur_depth,
- PTR2UV(cxt->recur_sv)));
- if (cxt->entry && cxt->recur_sv == sv) {
- if (RECURSION_TOO_DEEP()) {
+ cxt->recur_sv = sv;
+
+ TRACEME((">ref recur_depth %" IVdf ", recur_sv (0x%" UVxf ") max %" IVdf, cxt->recur_depth,
+ PTR2UV(cxt->recur_sv), cxt->max_recur_depth));
+ if (RECURSION_TOO_DEEP()) {
#if PERL_VERSION < 15
- cleanup_recursive_data(aTHX_ (SV*)sv);
+ cleanup_recursive_data(aTHX_ (SV*)sv);
#endif
- CROAK((MAX_DEPTH_ERROR));
- }
+ CROAK((MAX_DEPTH_ERROR));
}
- cxt->recur_sv = sv;

retval = store(aTHX_ cxt, sv);
- if (cxt->entry && cxt->recur_sv == sv && cxt->recur_depth > 0) {
- TRACEME(("recur_depth --%" IVdf, cxt->recur_depth));
+ if (cxt->max_recur_depth != -1 && cxt->recur_depth > 0) {
+ TRACEME(("<ref recur_depth --%" IVdf, cxt->recur_depth));
--cxt->recur_depth;
}
return retval;
@@ -2635,6 +2657,7 @@ static int store_array(pTHX_ stcxt_t *cxt, AV *av)
UV len = av_len(av) + 1;
UV i;
int ret;
+ SV *const recur_sv = cxt->recur_sv;

TRACEME(("store_array (0x%" UVxf ")", PTR2UV(av)));

@@ -2659,9 +2682,9 @@ static int store_array(pTHX_ stcxt_t *cxt, AV *av)
TRACEME(("size = %d", (int)l));
}

- TRACEME(("recur_depth %" IVdf ", recur_sv (0x%" UVxf ")", cxt->recur_depth,
- PTR2UV(cxt->recur_sv)));
- if (cxt->entry && cxt->recur_sv == (SV*)av) {
+ TRACEME((">array recur_depth %" IVdf ", recur_sv (0x%" UVxf ") max %" IVdf, cxt->recur_depth,
+ PTR2UV(cxt->recur_sv), cxt->max_recur_depth));
+ if (recur_sv != (SV*)av) {
if (RECURSION_TOO_DEEP()) {
/* with <= 5.14 it recurses in the cleanup also, needing 2x stack size */
#if PERL_VERSION < 15
@@ -2670,7 +2693,6 @@ static int store_array(pTHX_ stcxt_t *cxt, AV *av)
CROAK((MAX_DEPTH_ERROR));
}
}
- cxt->recur_sv = (SV*)av;

/*
* Now store each item recursively.
@@ -2701,9 +2723,12 @@ static int store_array(pTHX_ stcxt_t *cxt, AV *av)
return ret;
}

- if (cxt->entry && cxt->recur_sv == (SV*)av && cxt->recur_depth > 0) {
- TRACEME(("recur_depth --%" IVdf, cxt->recur_depth));
- --cxt->recur_depth;
+ if (recur_sv != (SV*)av) {
+ assert(cxt->max_recur_depth == -1 || cxt->recur_depth > 0);
+ if (cxt->max_recur_depth != -1 && cxt->recur_depth > 0) {
+ TRACEME(("<array recur_depth --%" IVdf, cxt->recur_depth));
+ --cxt->recur_depth;
+ }
}
TRACEME(("ok (array)"));

@@ -2766,6 +2791,7 @@ static int store_hash(pTHX_ stcxt_t *cxt, HV *hv)
#endif
) ? 1 : 0);
unsigned char hash_flags = (SvREADONLY(hv) ? SHV_RESTRICTED : 0);
+ SV * const recur_sv = cxt->recur_sv;

/*
* Signal hash by emitting SX_HASH, followed by the table length.
@@ -2817,17 +2843,17 @@ static int store_hash(pTHX_ stcxt_t *cxt, HV *hv)
TRACEME(("size = %d, used = %d", (int)l, (int)HvUSEDKEYS(hv)));
}

- TRACEME(("recur_depth %" IVdf ", recur_sv (0x%" UVxf ")", cxt->recur_depth,
- PTR2UV(cxt->recur_sv)));
- if (cxt->entry && cxt->recur_sv == (SV*)hv) {
- if (RECURSION_TOO_DEEP_HASH()) {
+ TRACEME((">hash recur_depth %" IVdf ", recur_sv (0x%" UVxf ") max %" IVdf, cxt->recur_depth,
+ PTR2UV(cxt->recur_sv), cxt->max_recur_depth_hash));
+ if (recur_sv != (SV*)hv && cxt->max_recur_depth_hash != -1) {
+ ++cxt->recur_depth;
+ }
+ if (RECURSION_TOO_DEEP_HASH()) {
#if PERL_VERSION < 15
- cleanup_recursive_data(aTHX_ (SV*)hv);
+ cleanup_recursive_data(aTHX_ (SV*)hv);
#endif
- CROAK((MAX_DEPTH_ERROR));
- }
+ CROAK((MAX_DEPTH_ERROR));
}
- cxt->recur_sv = (SV*)hv;

/*
* Save possible iteration state via each() on that table.
@@ -3107,8 +3133,9 @@ static int store_hash(pTHX_ stcxt_t *cxt, HV *hv)
TRACEME(("ok (hash 0x%" UVxf ")", PTR2UV(hv)));

out:
- if (cxt->entry && cxt->recur_sv == (SV*)hv && cxt->recur_depth > 0) {
- TRACEME(("recur_depth --%" IVdf , cxt->recur_depth));
+ assert(cxt->max_recur_depth_hash != -1 && cxt->recur_depth > 0);
+ TRACEME(("<hash recur_depth --%" IVdf , cxt->recur_depth));
+ if (cxt->max_recur_depth_hash != -1 && recur_sv != (SV*)hv && cxt->recur_depth > 0) {
--cxt->recur_depth;
}
HvRITER_set(hv, riter); /* Restore hash iterator state */
@@ -3221,6 +3248,7 @@ static int store_lhash(pTHX_ stcxt_t *cxt, HV *hv, unsigned char hash_flags)
#ifdef DEBUGME
UV len = (UV)HvTOTALKEYS(hv);
#endif
+ SV * const recur_sv = cxt->recur_sv;
if (hash_flags) {
TRACEME(("store_lhash (0x%" UVxf ") (flags %x)", PTR2UV(hv),
(int) hash_flags));
@@ -3231,15 +3259,15 @@ static int store_lhash(pTHX_ stcxt_t *cxt, HV *hv, unsigned char hash_flags)

TRACEME(("recur_depth %" IVdf ", recur_sv (0x%" UVxf ")", cxt->recur_depth,
PTR2UV(cxt->recur_sv)));
- if (cxt->entry && cxt->recur_sv == (SV*)hv) {
- if (RECURSION_TOO_DEEP_HASH()) {
+ if (recur_sv != (SV*)hv && cxt->max_recur_depth_hash != -1) {
+ ++cxt->recur_depth;
+ }
+ if (RECURSION_TOO_DEEP_HASH()) {
#if PERL_VERSION < 15
- cleanup_recursive_data(aTHX_ (SV*)hv);
+ cleanup_recursive_data(aTHX_ (SV*)hv);
#endif
- CROAK((MAX_DEPTH_ERROR));
- }
+ CROAK((MAX_DEPTH_ERROR));
}
- cxt->recur_sv = (SV*)hv;

array = HvARRAY(hv);
for (i = 0; i <= (Size_t)HvMAX(hv); i++) {
@@ -3252,7 +3280,7 @@ static int store_lhash(pTHX_ stcxt_t *cxt, HV *hv, unsigned char hash_flags)
return ret;
}
}
- if (cxt->entry && cxt->recur_sv == (SV*)hv && cxt->recur_depth > 0) {
+ if (recur_sv == (SV*)hv && cxt->max_recur_depth_hash != -1 && cxt->recur_depth > 0) {
TRACEME(("recur_depth --%" IVdf, cxt->recur_depth));
--cxt->recur_depth;
}
diff --git a/dist/Storable/t/recurse.t b/dist/Storable/t/recurse.t
index fa8be0b..63fde90 100644
--- a/dist/Storable/t/recurse.t
+++ b/dist/Storable/t/recurse.t
@@ -20,7 +20,7 @@ use Storable qw(freeze thaw dclone);

$Storable::flags = Storable::FLAGS_COMPAT;

-use Test::More tests => 38;
+use Test::More tests => 39;

package OBJ_REAL;

@@ -364,5 +364,17 @@ else {
dclone $t;
};
like $@, qr/Max\. recursion depth with nested structures exceeded/,
- 'Caught href stack overflow '.MAX_DEPTH*2;
+ 'Caught href stack overflow '.MAX_DEPTH_HASH*2;
+}
+
+{
+ # perl #133326
+ my @tt;
+ #$Storable::DEBUGME=1;
+ for (1..16000) {
+ my $t = [[[]]];
+ push @tt, $t;
+ }
+ ok(eval { dclone \@tt; 1 },
+ "low depth structure shouldn't be treated as nested");
}
--
2.21.0


From 517d79b3f74b6093e527d2e417d76ff8079506c9 Mon Sep 17 00:00:00 2001
From 6edd86c7e4f618e63918a685df1c5a045cd08911 Mon Sep 17 00:00:00 2001
From: Tony Cook <tony@develop-help.com>
Date: Tue, 4 Dec 2018 15:11:17 +1100
Subject: (perl #133708) remove build-time probing for stack limits for
Storable


diff --git a/Makefile.SH b/Makefile.SH
index 5197009..93f4ae1 100755
index 47edfe1..20c2ab1 100755
--- a/Makefile.SH
+++ b/Makefile.SH
@@ -176,15 +176,11 @@ esac
@@ -184,15 +184,11 @@ esac

: is Cwd static or dynamic
static_cwd='define'
@@ -894,15 +605,15 @@ index 5197009..93f4ae1 100755
esac
done

@@ -221,7 +217,6 @@ for f in $static_ext; do
@@ -229,7 +225,6 @@ for f in $static_ext; do
$this_target: lib/auto/List/Util/Util\$(LIB_EXT)" ;;
Unicode/Normalize) extra_dep="$extra_dep
$this_target: uni.data" ;;
- Storable) storable_type='static' ;;
esac
done

@@ -1158,26 +1153,6 @@ no_install no-install: install-notify
@@ -1188,26 +1183,6 @@ no_install no-install: install-notify
install: install-all
!NO!SUBS!

@@ -930,7 +641,7 @@ index 5197009..93f4ae1 100755
flags="--$name";
flags=`echo $flags | sed -e 's/--all//'`
diff --git a/dist/Storable/Makefile.PL b/dist/Storable/Makefile.PL
index 6977505..60af5d2 100644
index 092bab4..60af5d2 100644
--- a/dist/Storable/Makefile.PL
+++ b/dist/Storable/Makefile.PL
@@ -12,13 +12,7 @@ use Config;
@@ -947,7 +658,7 @@ index 6977505..60af5d2 100644

WriteMakefile(
NAME => 'Storable',
@@ -81,21 +75,7 @@ sub xlinkext {
@@ -81,28 +75,7 @@ sub xlinkext {
}

sub depend {
@@ -960,16 +671,23 @@ index 6977505..60af5d2 100644
- # blib.pm needs arch/lib
- $extra_deps = ' Storable.pm';
- }
- my $whichperl;
- if ($Config::Config{usecrosscompile}) {
- $whichperl = '$(PERLRUN)';
- }
- else {
- $whichperl = '$(FULLPERLRUNINST)';
- }
- my $linktype = uc($_[0]->{LINKTYPE});
- my $limit_pm = File::Spec->catfile('lib', 'Storable', 'Limit.pm');
"
-$limit_pm : stacksize \$(INST_$linktype)$extra_deps
- \$(MKPATH) \$(INST_LIB)
- \$(FULLPERLRUNINST) stacksize $options
- $whichperl stacksize $options

release : dist
git tag \$(VERSION)
@@ -105,18 +85,6 @@ release : dist
@@ -112,18 +85,6 @@ release : dist
"
}

@@ -989,10 +707,10 @@ index 6977505..60af5d2 100644
'
all :: Storable.pm
diff --git a/dist/Storable/__Storable__.pm b/dist/Storable/__Storable__.pm
index 71c669d..e5a655e 100644
index e77b240..8bb9117 100644
--- a/dist/Storable/__Storable__.pm
+++ b/dist/Storable/__Storable__.pm
@@ -32,8 +32,6 @@ our $VERSION = '3.08';
@@ -32,8 +32,6 @@ our $VERSION = '3.08_01';
our $recursion_limit;
our $recursion_limit_hash;

@@ -1038,19 +756,20 @@ index 71c669d..e5a655e 100644
modifying C<$Storable::recursion_limit> and
C<$Storable::recursion_limit_hash> respectively. Either can be set to
diff --git a/dist/Storable/stacksize b/dist/Storable/stacksize
index 7abd3a8..27bd826 100644
index 14e0739..08bff78 100644
--- a/dist/Storable/stacksize
+++ b/dist/Storable/stacksize
@@ -1,18 +1,17 @@
@@ -1,6 +1,6 @@
#!/usr/bin/perl
# binary search maximum stack depth for arrays and hashes
-# and store it in lib/Storable/Limit.pm
+# and report it to stdout as code to set the limits

use Config;
use Cwd;
use File::Spec;
use strict;
@@ -10,12 +10,11 @@ use strict;
-d "lib" or mkdir "lib";
-d "lib/Storable" or mkdir "lib/Storable";

-my $fn = "lib/Storable/Limit.pm";
my $ptrsize = $Config{ptrsize};
@@ -1062,7 +781,7 @@ index 7abd3a8..27bd826 100644
? 1 : 0
}
sub PARALLEL () {
@@ -44,61 +43,28 @@ elsif (system("ulimit -c 0 ;") == 0) {
@@ -51,58 +50,28 @@ elsif (system("ulimit -c 0 ;") == 0) {
# try to prevent core dumps
$prefix = "ulimit -c 0 ; ";
}
@@ -1089,9 +808,6 @@ index 7abd3a8..27bd826 100644
- $PERL = "$ldlib=$path $PERL";
- }
-}
-
--d "lib" or mkdir "lib";
--d "lib/Storable" or mkdir "lib/Storable";
-
if ($^O eq "MSWin32") {
require Win32;
@@ -1131,7 +847,7 @@ index 7abd3a8..27bd826 100644
}

sub cmd {
@@ -113,7 +79,7 @@ sub cmd {
@@ -117,7 +86,7 @@ sub cmd {
sub good {
my $i = shift; # this passed
my $j = $i + abs(int(($bad - $i) / 2));
@@ -1140,7 +856,7 @@ index 7abd3a8..27bd826 100644
$good = $i;
if ($j <= $i) {
$found++;
@@ -124,7 +90,7 @@ sub good {
@@ -128,7 +97,7 @@ sub good {
sub bad {
my $i = shift; # this failed
my $j = $i - abs(int(($i - $good) / 2));
@@ -1149,7 +865,7 @@ index 7abd3a8..27bd826 100644
$bad = $i;
if ($j >= $i) {
$j = $good;
@@ -158,7 +124,7 @@ while (!$found) {
@@ -162,7 +131,7 @@ while (!$found) {
$n = bad($n);
}
}
@@ -1158,7 +874,7 @@ index 7abd3a8..27bd826 100644
my $max_depth = $n;

($n, $good, $bad, $found) =
@@ -182,13 +148,13 @@ if ($max_depth == $bad1-1
@@ -186,13 +155,13 @@ if ($max_depth == $bad1-1
and $n == $bad2-1)
{
# more likely the shell. travis docker ubuntu, mingw e.g.
@@ -1174,7 +890,7 @@ index 7abd3a8..27bd826 100644
my $max_depth_hash = $n;

# Previously this calculation was done in the macro, calculate it here
@@ -199,7 +165,7 @@ my $max_depth_hash = $n;
@@ -203,7 +172,7 @@ my $max_depth_hash = $n;
# several apparently random failures here, eg. working in one
# configuration, but not in a very similar configuration.
$max_depth = int(0.6 * $max_depth);
@@ -1183,7 +899,7 @@ index 7abd3a8..27bd826 100644

my $stack_reserve = $^O eq "MSWin32" ? 32 : 16;
if ($] ge "5.016" && !($^O eq "cygwin" && $ptrsize == 8)) {
@@ -217,16 +183,11 @@ write_limits($max_depth, $max_depth_hash);
@@ -221,16 +190,11 @@ write_limits($max_depth, $max_depth_hash);

sub write_limits {
my ($max_depth, $max_depth_hash) = @_;
@@ -1219,10 +935,10 @@ index 63fde90..b5967a0 100644
my $t;
print "# max depth ", MAX_DEPTH, "\n";
diff --git a/win32/GNUmakefile b/win32/GNUmakefile
index 05193a1..afc18ec 100644
index 200d8a5..1c163d7 100644
--- a/win32/GNUmakefile
+++ b/win32/GNUmakefile
@@ -1168,7 +1168,7 @@ CFG_VARS = \
@@ -1177,7 +1177,7 @@ CFG_VARS = \

.PHONY: all info

@@ -1231,7 +947,7 @@ index 05193a1..afc18ec 100644

info :
@echo # CCTYPE=$(CCTYPE)
@@ -1612,16 +1612,6 @@ Extensions_clean :
@@ -1621,16 +1621,6 @@ Extensions_clean :
Extensions_realclean :
-if exist $(MINIPERL) $(MINIPERL) -I..\lib ..\make_ext.pl "MAKE=$(PLMAKE)" --dir=$(CPANDIR) --dir=$(DISTDIR) --dir=$(EXTDIR) --all --target=realclean

@@ -1249,10 +965,10 @@ index 05193a1..afc18ec 100644
# be running in parallel like UNIDATAFILES, this target a placeholder for the
# future
diff --git a/win32/Makefile b/win32/Makefile
index 1e245f7..a4776dd 100644
index 481fcd8..8e7878c 100644
--- a/win32/Makefile
+++ b/win32/Makefile
@@ -939,7 +939,7 @@ CFG_VARS = \
@@ -948,7 +948,7 @@ CFG_VARS = \
#

all : .\config.h ..\git_version.h $(GLOBEXE) $(CONFIGPM) \
@@ -1261,7 +977,7 @@ index 1e245f7..a4776dd 100644
$(PERLSTATIC)
@echo Everything is up to date. '$(MAKE_BARE) test' to run test suite.

@@ -1225,13 +1225,6 @@ Extensions_clean:
@@ -1234,13 +1234,6 @@ Extensions_clean:
Extensions_realclean:
-if exist $(MINIPERL) $(MINIPERL) -I..\lib ..\make_ext.pl "MAKE=$(MAKE)" --dir=$(CPANDIR) --dir=$(DISTDIR) --dir=$(EXTDIR) --all --target=realclean

@@ -1275,7 +991,7 @@ index 1e245f7..a4776dd 100644
#-------------------------------------------------------------------------------

doc: $(PERLEXE) ..\pod\perltoc.pod
@@ -1310,7 +1303,6 @@ distclean: realclean
@@ -1319,7 +1312,6 @@ distclean: realclean
-del /f $(LIBDIR)\Time\HiRes.pm
-del /f $(LIBDIR)\Unicode\Normalize.pm
-del /f $(LIBDIR)\Math\BigInt\FastCalc.pm
@@ -1284,10 +1000,10 @@ index 1e245f7..a4776dd 100644
-del /f $(LIBDIR)\Win32CORE.pm
-del /f $(LIBDIR)\Win32API\File.pm
diff --git a/win32/makefile.mk b/win32/makefile.mk
index a6072a9..15b148d 100644
index 7dae753..8122c75 100644
--- a/win32/makefile.mk
+++ b/win32/makefile.mk
@@ -1139,7 +1139,7 @@ CFG_VARS = \
@@ -1148,7 +1148,7 @@ CFG_VARS = \
# Top targets
#

@@ -1296,7 +1012,7 @@ index a6072a9..15b148d 100644

..\regcomp$(o) : ..\regnodes.h ..\regcharclass.h

@@ -1563,17 +1563,6 @@ rebasePE : Extensions $(PERLDLL) $(NORMALIZE_DYN) $(PERLEXE)
@@ -1572,17 +1572,6 @@ rebasePE : Extensions $(PERLDLL) $(NORMALIZE_DYN) $(PERLEXE)
.ENDIF
$(NOOP)

@@ -1314,7 +1030,7 @@ index a6072a9..15b148d 100644
#-------------------------------------------------------------------------------


@@ -1648,7 +1637,7 @@ distclean: realclean
@@ -1657,7 +1646,7 @@ distclean: realclean
-del /f $(LIBDIR)\Time\HiRes.pm
-del /f $(LIBDIR)\Unicode\Normalize.pm
-del /f $(LIBDIR)\Math\BigInt\FastCalc.pm
@@ -1327,14 +1043,14 @@ index a6072a9..15b148d 100644
2.21.0


From 8b7162312646bf1ac283e8765dd328d049f170c8 Mon Sep 17 00:00:00 2001
From 13ba96f1b4b43793549a74f0d339d8534ae551ff Mon Sep 17 00:00:00 2001
From: Jerome Duval <jerome.duval@gmail.com>
Date: Thu, 25 Apr 2019 23:19:43 +0200
Subject: locale.c: cast to char before comparing with a char constant.


diff --git a/locale.c b/locale.c
index 5b6460a..dc7cb0b 100644
index 7653340..fc21330 100644
--- a/locale.c
+++ b/locale.c
@@ -4927,7 +4927,7 @@ Perl__is_cur_LC_category_utf8(pTHX_ int category)
@@ -13,13 +13,13 @@ applications. Perl is nicknamed 'the Swiss Army chainsaw of scripting \
languages' because of its flexibility and power, and possibly also because of \
its perceived 'ugliness'."
HOMEPAGE="http://www.perl.org/"
COPYRIGHT="1993-2018 Larry Wall and others"
COPYRIGHT="1993-2019 Larry Wall and others"
LICENSE="GNU GPL v1
Artistic"
REVISION="2"
REVISION="1"
perlShortVersion="${portVersion%.*}"
SOURCE_URI="http://www.cpan.org/src/perl-$portVersion.tar.gz"
CHECKSUM_SHA256="3ebf85fe65df2ee165b22596540b7d5d42f84d4b72d84834f74e2e0b8956c347"
CHECKSUM_SHA256="aa95456dddb3eb1cc5475fed4e08f91876bea71fb636fba6399054dfbabed6c7"
PATCHES="perl-$portVersion.patchset"

ARCHITECTURES="x86_gcc2 x86 x86_64 ?arm"
@@ -0,0 +1,109 @@
From 55aabcadc9ec96985a69b0422e66673eaa34d174 Mon Sep 17 00:00:00 2001
From: Sergei Reznikov <diver@gelios.net>
Date: Tue, 3 Nov 2015 14:31:04 +0300
Subject: Haiku: implement processNameByPid


diff --git a/src/lib/corelib/tools/processutils.cpp b/src/lib/corelib/tools/processutils.cpp
index 060a057..22fcbe2 100644
--- a/src/lib/corelib/tools/processutils.cpp
+++ b/src/lib/corelib/tools/processutils.cpp
@@ -49,6 +49,8 @@
# include "fileinfo.h"
# include <unistd.h>
# include <cstdio>
+#elif defined(Q_OS_HAIKU)
+# include <OS.h>
#elif defined(Q_OS_BSD4)
# include <QtCore/qfile.h>
# include <sys/cdefs.h>
@@ -94,6 +96,13 @@ QString processNameByPid(qint64 pid)
sprintf(exePath, "/proc/%lld/exe", pid);
readlink(exePath, buf, sizeof(buf));
return FileInfo::fileName(QString::fromUtf8(buf));
+#elif defined(Q_OS_HAIKU)
+ team_info info;
+ if(get_team_info((team_id)pid, &info)==B_OK) {
+ if(info.argc >= 1)
+ return QString::fromUtf8(info.args);
+ }
+ return QString();
#elif defined(Q_OS_BSD4)
# if defined(Q_OS_NETBSD)
struct kinfo_proc2 kp;
--
2.21.0


From aa68eea2f2803bb9678c9096e0863dc10fa714d7 Mon Sep 17 00:00:00 2001
From: Jerome Duval <jerome.duval@gmail.com>
Date: Sat, 4 May 2019 01:19:32 +0200
Subject: haiku changes for 1.13.0


diff --git a/src/lib/corelib/buildgraph/processcommandexecutor.cpp b/src/lib/corelib/buildgraph/processcommandexecutor.cpp
index 0818e12..fbcf74c 100644
--- a/src/lib/corelib/buildgraph/processcommandexecutor.cpp
+++ b/src/lib/corelib/buildgraph/processcommandexecutor.cpp
@@ -88,6 +88,7 @@ static QProcessEnvironment mergeEnvironments(const QProcessEnvironment &baseEnv,
QStringLiteral("LD_LIBRARY_PATH"),
QStringLiteral("DYLD_LIBRARY_PATH"),
QStringLiteral("DYLD_FRAMEWORK_PATH"),
+ QStringLiteral("LIBRARY_PATH"),
};
for (const QString &key : additionalEnv.keys()) {
QString newValue = additionalEnv.value(key);
diff --git a/src/lib/corelib/tools/hostosinfo.h b/src/lib/corelib/tools/hostosinfo.h
index cae4516..d1310ed 100644
--- a/src/lib/corelib/tools/hostosinfo.h
+++ b/src/lib/corelib/tools/hostosinfo.h
@@ -71,7 +71,7 @@ class QBS_EXPORT HostOsInfo // Exported for use by command-line tools.
{
public:
// Add more as needed.
- enum HostOs { HostOsWindows, HostOsLinux, HostOsMacos, HostOsOtherUnix, HostOsOther };
+ enum HostOs { HostOsWindows, HostOsLinux, HostOsMacos, HostOsHaiku, HostOsOtherUnix, HostOsOther };

static inline std::string hostOSIdentifier();
static inline std::vector<std::string> hostOSIdentifiers();
@@ -100,6 +100,7 @@ public:
static bool isWindowsHost() { return hostOs() == HostOsWindows; }
static bool isLinuxHost() { return hostOs() == HostOsLinux; }
static bool isMacosHost() { return hostOs() == HostOsMacos; }
+ static bool isHaikuHost() { return hostOs() == HostOsHaiku; }
static inline bool isAnyUnixHost();
static inline QString rfc1034Identifier(const QString &str);

@@ -128,6 +129,8 @@ public:
return QStringLiteral("PATH");
if (isMacosHost())
return QStringLiteral("DYLD_LIBRARY_PATH");
+ if (isHaikuHost())
+ return QStringLiteral("LIBRARY_PATH");
return QStringLiteral("LD_LIBRARY_PATH");
}

@@ -214,6 +217,8 @@ HostOsInfo::HostOs HostOsInfo::hostOs()
return HostOsLinux;
#elif defined(Q_OS_DARWIN)
return HostOsMacos;
+#elif defined(Q_OS_HAIKU)
+ return HostOsHaiku;
#elif defined(Q_OS_UNIX)
return HostOsOtherUnix;
#else
diff --git a/static-res.pro b/static-res.pro
index 86816bb..3c4d823 100644
--- a/static-res.pro
+++ b/static-res.pro
@@ -8,6 +8,7 @@ unix:qbs_disable_rpath {
!isEmpty(QBS_DESTDIR): qbslibdir = $$QBS_DESTDIR
else: qbslibdir = $$OUT_PWD/lib
macos: envVar = DYLD_LIBRARY_PATH
+ else: haiku: envVar = LIBRARY_PATH
else: envVar = LD_LIBRARY_PATH
oldVal = $$getenv($$envVar)
newVal = $$qbslibdir
--
2.21.0

@@ -0,0 +1,73 @@
SUMMARY="Qt Build Suite"
DESCRIPTION="Qbs is a cross-platform build tool."
HOMEPAGE="https://wiki.qt.io/Qbs"
COPYRIGHT="2016-2017, Digia Plc and/or its subsidiary(-ies)"
LICENSE="GNU LGPL v2"
REVISION="1"
SOURCE_URI="https://download.qt.io/official_releases/qbs/$portVersion/qbs-src-$portVersion.tar.gz"
CHECKSUM_SHA256="4daa764a944bdb33a4a0dd792864dd9acc24036ad84ef34cfb3215538c6cef89"
SOURCE_DIR="qbs-src-$portVersion"
PATCHES="qbs-$portVersion.patchset"

ARCHITECTURES="!x86_gcc2 x86 x86_64"
SECONDARY_ARCHITECTURES="?x86"

PROVIDES="
qbs$secondaryArchSuffix = $portVersion
cmd:qbs = $portVersion
cmd:qbs_config = $portVersion
cmd:qbs_config_ui = $portVersion
cmd:qbs_create_project = $portVersion
cmd:qbs_qmltypes = $portVersion
cmd:qbs_setup_android = $portVersion
cmd:qbs_setup_qt = $portVersion
cmd:qbs_setup_toolchains = $portVersion
lib:libqbscore$secondaryArchSuffix = $portVersion
lib:libqbsqtprofilesetup$secondaryArchSuffix = $portVersion
"
REQUIRES="
haiku$secondaryArchSuffix
lib:libGL$secondaryArchSuffix
lib:libQt5Core$secondaryArchSuffix
lib:libQt5Gui$secondaryArchSuffix
lib:libQt5Network$secondaryArchSuffix
lib:libQt5Script$secondaryArchSuffix
lib:libQt5Widgets$secondaryArchSuffix
lib:libQt5Xml$secondaryArchSuffix
"

BUILD_REQUIRES="
haiku${secondaryArchSuffix}_devel
devel:libQt5Core$secondaryArchSuffix
devel:libQt5Gui$secondaryArchSuffix
devel:libQt5Network$secondaryArchSuffix
devel:libQt5Script$secondaryArchSuffix
devel:libQt5Widgets$secondaryArchSuffix
devel:libQt5Xml$secondaryArchSuffix
"
BUILD_PREREQUIRES="
cmd:gcc$secondaryArchSuffix
cmd:ld$secondaryArchSuffix
cmd:make
"

BUILD()
{
qmake -r qbs.pro QBS_INSTALL_PREFIX=$prefix
ln -s ../lib/ bin/lib
make $jobArgs
}

INSTALL()
{
make install

mv $prefix/lib $prefix/lib2
mkdir -p $(dirname $libDir) $(dirname $includeDir) $(diname $dataDir)

mv $prefix/include $includeDir
mv $prefix/lib2 $libDir
mv $prefix/share $dataDir
mv $prefix/libexec/qbs/* $libDir/qbs/
rm -rf $prefix/libexec
}