summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid Mitchell <davem@iabyn.com>2010-10-31 17:01:10 +0000
committerDavid Mitchell <davem@iabyn.com>2010-11-01 16:38:31 +0000
commit51698cb360d5bba06e12496ef9c7bf82e3352b71 (patch)
tree98d362c2891820fa8fc1def26eaa824f33dd7ebd
parent328552296d8d53b34dfe2dcb81fd7119bdc526f5 (diff)
downloadperl-51698cb360d5bba06e12496ef9c7bf82e3352b71.tar.gz
RT 75254: Slow GC after Scalar::Util::weaken
Freeing lots of weak RVs to the same object results in quadratic search through the backrefs array. This is probably sufficiently rare that its not worth the expense of replacing the array with a ptr table (say); but in the meantime, this patch makes the code as tight as possible, and adds a check for the sv being at element 0, so that both types of linear create/destroy order are optimised (previously only created last / deleted first order was quick). It's still slow for random deletion order. The RT code, modified to give high-res timing for the return from the sub, and with various types of destruct order, gives the following timings: LIFO FIFO random before 0.05s 17.37s 17.28s now 0.04s 0.04s 12.05s
-rw-r--r--sv.c61
1 files changed, 42 insertions, 19 deletions
diff --git a/sv.c b/sv.c
index 2d4e2ab31e..aefcde879b 100644
--- a/sv.c
+++ b/sv.c
@@ -5473,7 +5473,6 @@ Perl_sv_del_backref(pTHX_ SV *const tsv, SV *const sv)
{
dVAR;
SV **svp = NULL;
- I32 i;
PERL_ARGS_ASSERT_SV_DEL_BACKREF;
@@ -5490,30 +5489,54 @@ Perl_sv_del_backref(pTHX_ SV *const tsv, SV *const sv)
Perl_croak(aTHX_ "panic: del_backref");
if (SvTYPE(*svp) == SVt_PVAV) {
- int count = 0;
+#ifdef DEBUGGING
+ int count = 1;
+#endif
AV * const av = (AV*)*svp;
+ SSize_t fill;
assert(!SvIS_FREED(av));
+ fill = AvFILLp(av);
+ assert(fill > -1);
svp = AvARRAY(av);
- for (i = AvFILLp(av); i >= 0; i--) {
- if (svp[i] == sv) {
- const SSize_t fill = AvFILLp(av);
- if (i != fill) {
- /* We weren't the last entry.
- An unordered list has this property that you can take the
- last element off the end to fill the hole, and it's still
- an unordered list :-)
- */
- svp[i] = svp[fill];
- }
- svp[fill] = NULL;
- AvFILLp(av) = fill - 1;
- count++;
-#ifndef DEBUGGING
- break; /* should only be one */
+ /* for an SV with N weak references to it, if all those
+ * weak refs are deleted, then sv_del_backref will be called
+ * N times and O(N^2) compares will be done within the backref
+ * array. To ameliorate this potential slowness, we:
+ * 1) make sure this code is as tight as possible;
+ * 2) when looking for SV, look for it at both the head and tail of the
+ * array first before searching the rest, since some create/destroy
+ * patterns will cause the backrefs to be freed in order.
+ */
+ if (*svp == sv) {
+ AvARRAY(av)++;
+ AvMAX(av)--;
+ }
+ else {
+ SV **p = &svp[fill];
+ SV *const topsv = *p;
+ if (topsv != sv) {
+#ifdef DEBUGGING
+ count = 0;
+#endif
+ while (--p > svp) {
+ if (*p == sv) {
+ /* We weren't the last entry.
+ An unordered list has this property that you
+ can take the last element off the end to fill
+ the hole, and it's still an unordered list :-)
+ */
+ *p = topsv;
+#ifdef DEBUGGING
+ count++;
+#else
+ break; /* should only be one */
#endif
+ }
+ }
}
}
- assert(count == 1);
+ assert(count ==1);
+ AvFILLp(av) = fill-1;
}
else {
/* optimisation: only a single backref, stored directly */