summaryrefslogtreecommitdiff
path: root/it_url.class
diff options
context:
space:
mode:
authorUrban Müller2020-07-30 17:06:52 +0200
committerUrban Müller2020-07-30 17:06:52 +0200
commit1e4439010c6cf239d5005b0341ca12c71733f1b0 (patch)
tree21ea16137684ca29c3fcd0967d83fdaa53ef2f1d /it_url.class
parenta8b1474b149e2a12253a31ec85b04f9f3933e92a (diff)
downloaditools-1e4439010c6cf239d5005b0341ca12c71733f1b0.tar.gz
itools-1e4439010c6cf239d5005b0341ca12c71733f1b0.tar.bz2
itools-1e4439010c6cf239d5005b0341ca12c71733f1b0.zip
remove option with single use for now, fix inserting too many woke up handles at once
Diffstat (limited to 'it_url.class')
-rw-r--r--it_url.class4
1 files changed, 1 insertions, 3 deletions
diff --git a/it_url.class b/it_url.class
index 792a839..712074d 100644
--- a/it_url.class
+++ b/it_url.class
@@ -337,7 +337,6 @@ function request($p=array())
* @param $p['headers'] optional array of HTTP headers to send
* @param $p['parallel'] max number of parallel requests
* @param $p['noresults'] do not keep results around
- * @param $p['fetchsleep'] number of seconds to wait after fetch, fractions ok
* @return array of contents (or false for errors like timesou) of resulting page using same
* keys as the urls input array, considering redirects, excluding headers
*/
@@ -405,7 +404,6 @@ static function get_multi($p=null)
{
if ($info['msg'] == CURLMSG_DONE)
{
- usleep($p['fetchsleep'] * 1000000);
$key = $keys[$info['handle']];
$content = curl_multi_getcontent($info['handle']);
if (isset($p['postprocess']))
@@ -439,7 +437,7 @@ static function get_multi($p=null)
} while ($mrc == CURLM_CALL_MULTI_PERFORM);
foreach ((array)$sleepuntils as $key => $time) {
- if (microtime(true) >= $time) {
+ if (microtime(true) >= $time && count($handles) < $parallel) {
$addhandle($key, $urls[$key]);
unset($sleepuntils[$key]);
}