summaryrefslogtreecommitdiff
path: root/it_url.class
diff options
context:
space:
mode:
Diffstat (limited to 'it_url.class')
-rw-r--r--it_url.class22
1 files changed, 12 insertions, 10 deletions
diff --git a/it_url.class b/it_url.class
index 6b1f81c..dbd39e4 100644
--- a/it_url.class
+++ b/it_url.class
@@ -106,6 +106,7 @@ static function _postprocess($data, $p)
* @param $p['files'] [fieldname => filename] of files to upload
* @param $p['writefunction'] function to be called whenever data is received (for server-sent-events etc.)
* @param $p['fetchsleep'] Number of seconds to wait after fetch, fractions ok
+ * @param $p['followlocation']Follow redirects [true]
* @param $p['retries'] Number of retries if download fails, default 1
* @param $p['retrysleep'] Number of seconds to wait before retry (additional to fetchsleep), fractions ok
* @param $p['compression'] use compression (uses curl to do that)
@@ -170,7 +171,7 @@ function parse_http_header($header)
static function _default_headers($url, $p)
{
$search_subrequest = it::match('search\.ch/', $p['url']);
- if ((!it::is_devel() || EDC('subreqcheck')) && $p['url'] && !$p['headers']['Accept-Language'] && T_lang() != T_defaultlang() && $search_subrequest && !it::match('\blogin\.|banner\.html|machines\.txt|mbtiles\.php|/itjs/|/images/|\.(de|fr|en|it)(\.js|\.html|\.txt|\.php|\.ics|\.pdf|\.json|\.csv|\.gif|\.jpg|\.png)', $p['url']))
+ if ((!it::is_devel() || EDC('subreqcheck')) && $p['url'] && !$p['headers']['Accept-Language'] && T_lang() != T_defaultlang() && $search_subrequest && !it::match('\blogin\.|banner\.html|machines\.txt|mbtiles\.php|/fonts/|/itjs/|/images/|\.(de|fr|en|it)(\.js|\.html|\.txt|\.php|\.ics|\.pdf|\.json|\.csv|\.gif|\.jpg|\.png)', $p['url']))
it::error(['title' => "Subrequest without language override", 'body' => [ $p ]]);
$headers = array_filter([
@@ -332,12 +333,13 @@ function request($p=array())
/**
* Get multiple URL in parallel with timeout. Needs to be called statically
* @param $p parameter array with the following keys (same as it_url::get)
- * @param $p['urls'] array/generator of urls to get
- * @param $p['timeout'] timeout per read in seconds, defaults to 5. (TODO: fractions allowed?)
- * @param $p['totaltimeout'] timeout for the whole function call (fractions allowed)
- * @param $p['headers'] optional array of HTTP headers to send
- * @param $p['parallel'] max number of parallel requests
- * @param $p['noresults'] do not keep results around
+ * @param $p['urls'] array/generator of urls to get
+ * @param $p['timeout'] timeout per read in seconds, defaults to 5. (TODO: fractions allowed?)
+ * @param $p['totaltimeout'] timeout for the whole function call (fractions allowed)
+ * @param $p['followlocation'] follow redirects [true]
+ * @param $p['headers'] optional array of HTTP headers to send
+ * @param $p['parallel'] max number of parallel requests
+ * @param $p['noresults'] do not keep results around
* @return array of contents (or false for errors like timesou) of resulting page using same
* keys as the urls input array, considering redirects, excluding headers
*/
@@ -361,13 +363,13 @@ static function get_multi($p=null)
curl_setopt($handle, CURLOPT_URL, it::replace([ '^//' => "http://" ], is_array($url) ? $url['url'] : $url));
curl_setopt_array($handle, $opts);
curl_multi_add_handle($mh, $handle);
- $keys[$handle] = $key;
+ $keys[(int)$handle] = $key;
$handles[$key] = $handle;
};
$closehandle = function ($key) use (&$keys, &$handles, $mh) {
curl_multi_remove_handle($mh, $handles[$key]);
curl_close($handles[$key]);
- unset($keys[$handles[$key]]);
+ unset($keys[(int)$handles[$key]]);
unset($handles[$key]);
};
@@ -406,7 +408,7 @@ static function get_multi($p=null)
{
if ($info['msg'] == CURLMSG_DONE)
{
- $key = $keys[$info['handle']];
+ $key = $keys[(int)$info['handle']];
$content = curl_multi_getcontent($info['handle']);
if (isset($p['postprocess']))
$content = $p['postprocess']($content, ['it_error' => $retries[$key] < $p['retries'] ? false : (array)$p['it_error'] + ['title' => "invalid content from " . $urls[$key]]]);