summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNathan Gass2019-01-16 16:35:51 +0100
committerNathan Gass2019-01-16 16:35:51 +0100
commit9bcb2f848b8418d646f0f2ae3c17c5a320056008 (patch)
tree97674697f44fb2826520b1ef20af334e9c597e7f
parent4b234ab26583e680e408c3b659a1cd58a7fa9bb9 (diff)
downloaditools-9bcb2f848b8418d646f0f2ae3c17c5a320056008.tar.gz
itools-9bcb2f848b8418d646f0f2ae3c17c5a320056008.tar.bz2
itools-9bcb2f848b8418d646f0f2ae3c17c5a320056008.zip
implement parameter parallel to limit parallel requests in it_url::get_multi
-rw-r--r--it_url.class12
1 files changed, 7 insertions, 5 deletions
diff --git a/it_url.class b/it_url.class
index db09ea0..125f5d2 100644
--- a/it_url.class
+++ b/it_url.class
@@ -309,6 +309,7 @@ function request($p=array())
* @param $p['timeout']: timeout per read in seconds, defaults to 5. (TODO: fractions allowed?)
* @param $p['totaltimeout']: timeout for the whole function call (fractions allowed)
* @param $p['headers']: optional array of HTTP headers to send
+ * @param $p['parallel']: max number of parallel requests
* @return array of contents (or false for errors like timesou) of resulting page using same
* keys as the urls input array, considering redirects, excluding headers
*/
@@ -343,11 +344,10 @@ function get_multi($p=null)
unset($handles[$key]);
};
- foreach ($urls as $key => $dummy)
- {
- $addhandle($key);
- $retries[$key] = 0;
- }
+ $tofetch = array_keys($urls);
+ $parallel = $p['parallel'] ?: count($tofetch);
+ while (count($handles) < $parallel && $tofetch)
+ $addhandle(array_shift($tofetch));
$start = gettimeofday(true);
@@ -385,6 +385,8 @@ function get_multi($p=null)
if (($handler = $urls[$keys[$info['handle']]]['handler']))
$abort = $handler($info['result'], $results_unordered[$key]);
+ if (!$abort && count($handles) < $parallel && $tofetch)
+ $addhandle(array_shift($tofetch));
}
}
} while ($mrc == CURLM_CALL_MULTI_PERFORM);