summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNathan Gass2025-08-13 14:25:31 +0200
committerNathan Gass2025-08-13 14:25:31 +0200
commit9d04d01fd7fe8967457e7a9f51cd0d33e8188266 (patch)
treef7d624a00890f2c745ff1e66626359238789eafe
parent2b6a6eed7544bfb39aa0c1a8fb8fd94b4e17c947 (diff)
downloaditools-9d04d01fd7fe8967457e7a9f51cd0d33e8188266.tar.gz
itools-9d04d01fd7fe8967457e7a9f51cd0d33e8188266.tar.bz2
itools-9d04d01fd7fe8967457e7a9f51cd0d33e8188266.zip
only get new urls from iterator after retries so they do not get stalled for ever in a large get_multiHEADmaster
-rw-r--r--it_url.class29
1 files changed, 16 insertions, 13 deletions
diff --git a/it_url.class b/it_url.class
index ea2771c..a937212 100644
--- a/it_url.class
+++ b/it_url.class
@@ -512,26 +512,29 @@ static function get_multi($p=null)
unset($urls[$key]);
$closehandle($key);
}
-
- if (!$abort && count($handles) < $parallel && $iterator->valid())
- {
- $addhandle($iterator->key(), $iterator->current());
- $iterator->next();
- }
}
}
} while ($mrc == CURLM_CALL_MULTI_PERFORM);
- foreach ((array)$sleepuntils as $key => $time)
- {
- if (microtime(true) >= $time && count($handles) < $parallel)
+ if (!$abort) {
+ foreach ((array)$sleepuntils as $key => $time)
+ {
+ if (microtime(true) >= $time && count($handles) < $parallel)
+ {
+ $addhandle($key, $urls[$key]);
+ unset($sleepuntils[$key]);
+ }
+ }
+
+ while (count($handles) < $parallel && $iterator->valid())
{
- $addhandle($key, $urls[$key]);
- unset($sleepuntils[$key]);
+ $addhandle($iterator->key(), $iterator->current());
+ $iterator->next();
}
+
+ if ($sleepuntils && !count($handles))
+ usleep(100000);
}
- if ($sleepuntils && !count($handles))
- usleep(100000);
$timeout = 0.1; # Longer delay to avoid busy loop but shorter than default of 1s in case we stil hit cURL 7.25.0 problem
}