summaryrefslogtreecommitdiff
path: root/it_url.class
diff options
context:
space:
mode:
authorUrban Müller2017-08-10 16:01:08 +0200
committerUrban Müller2017-08-10 16:01:08 +0200
commitcd20fc7045b70bec5e7e688d22f938f5258b6e97 (patch)
tree3f66fb00f815ffa1765a1711bb91692ea871da29 /it_url.class
parenta8b66880eb54994318c638268d3002f0cbebfe82 (diff)
downloaditools-cd20fc7045b70bec5e7e688d22f938f5258b6e97.tar.gz
itools-cd20fc7045b70bec5e7e688d22f938f5258b6e97.tar.bz2
itools-cd20fc7045b70bec5e7e688d22f938f5258b6e97.zip
enable filtering of it_url errors, enable errors in it_url::get by default (like ::get_cache)
Diffstat (limited to 'it_url.class')
-rw-r--r--it_url.class25
1 files changed, 13 insertions, 12 deletions
diff --git a/it_url.class b/it_url.class
index 119b1d5..081c9ee 100644
--- a/it_url.class
+++ b/it_url.class
@@ -94,8 +94,8 @@ function is_reachable($timeout = 5)
* @param $p parameter array with the following keys
* @param $p['url'] url to get, defaults to constructor URL
* @param $p['headers'] optional associative array of HTTP headers to send
- * @param $p['safety'] set to 1 to generate an it::error in case of timeout
- * @param $p['it_error'] extra arguments given to it_error if safety is on an an error occurs
+ * @param $p['safety'] DEPRECATED
+ * @param $p['it_error'] extra arguments for it_error or false to ignore errors
* @param $p['timeout'] timeout per read in seconds, defaults to 5. fractions allowed. silent, see $p['safety']
* @param $p['totaltimeout'] timeout for the whole function call
* @param $p['maxlength'] maximum length of response
@@ -270,8 +270,8 @@ function request($p=array())
$errstr = $this->errstr = "maxlength reached";
}
- if ($result === false && $p['safety'] == 1)
- it::error(array('title' => "problem (timeout?) getting $url->url " . $errstr) + (array)$p['it_error']);
+ if ($result === false)
+ it::error((array)$p['it_error'] + ['title' => "problem (timeout?) getting $url->url " . $errstr]);
return $result;
}
@@ -367,8 +367,8 @@ function request_curl($p=array())
$errstr = $this->errstr = curl_error($curl);
}
- if ($got === false && $p['safety'] == 1)
- it::error(array('title' => "problem getting $url->url with curl: " . curl_error($curl)) + (array)$p['it_error']);
+ if ($got === false)
+ it::error((array)$p['it_error'] + ['title' => "problem getting $url->url with curl: " . curl_error($curl)]);
return $result;
}
@@ -496,7 +496,8 @@ static function get_cache_filename($p)
* @param $p['maxage'] maximum age of cache entries in seconds, default 86400
* @param $p['cleanbefore'] maximum daytime when attempting cleanup, default 7200
* @param $p['preprocess'] callback function (or array for methods) to change received file or array('function' => ..., 'in' => $src, 'out' => $dst, ...) with callback function plus args
- * @param $p['safety'] value 0 means dont generate alert, value 1 means generate alerts on timeouts and failures
+ * @param $p['safety'] DEPRECATED. see $p['it_error']
+ * @param $p['it_error'] false means ignore errors, anything else gets passed to it::error() if errors occur
* @param $p['keepfailed'] keep old versions of files if download fails (sending alerts conservatively)
* @param $p['returnheaders'] Return array($path, $headers) instead of simply $path
* @param $p['it_error'] parameters for it::error()
@@ -504,7 +505,7 @@ static function get_cache_filename($p)
*/
function get_cache($p = array())
{
- $p += array('timeout' => 10, 'maxage' => 86400, 'cleanbefore' => 7200, 'safety' => 1, 'it_error' => array());
+ $p += ['timeout' => 10, 'maxage' => 86400, 'cleanbefore' => 7200, 'it_error' => $p['safety'] == 0 ? false : ($p['safety'] == 2 ? ['fatal' => true] : [])];
$p['totaltimeout'] = $p['timeout'];
$path = it_url::get_cache_filename($p); # Must be before changing cachedir below
$p['cachedir'] = it_url::get_cache_dir($p);
@@ -527,7 +528,7 @@ function get_cache($p = array())
EDC('getcache', "new", $filemtime, $p['url'], $path);
$url = new it_url;
- if ($result = $url->get(array('safety' => 0) + $p + array('filemtime' => EDC('nocache') ? null : $filemtime))) # => true means not modified (no new data fetched)
+ if ($result = $url->get(array('it_error' => false) + $p + array('filemtime' => EDC('nocache') ? null : $filemtime))) # => true means not modified (no new data fetched)
{
$newfile = it_url::_atomicwrite($path, $result);
if ($p['returnheaders'])
@@ -625,7 +626,7 @@ function get_cache($p = array())
*/
function get_cache_contents($p)
{
- return ($fn = self::get_cache($p)) ? file_get_contents($fn) : ($p['safety'] === 0 ? null : it::error(array('title' => "failed getting " . it_url::absolute($p['url']), 'body' => var_export($p, true))));
+ return ($fn = self::get_cache($p)) ? file_get_contents($fn) : it::error((array)$p['it_error'] + ['title' => "failed getting " . it_url::absolute($p['url']), 'body' => $p]);
}
/**
@@ -688,8 +689,8 @@ static function _waitforlockedfile($path, $p)
clearstatcache();
}
- if ($lockedbyother && $p['safety'] == 1)
- it::error(($passes < $maxpasses ? "error getting url" : "timeout") . " in it_url::get_cache(): url={$p['url']}, passes=$passes, maxpasses=$maxpasses, path={$p['path']}");
+ if ($lockedbyother)
+ it::error((array)$p['it_error'] + ['title' => ($passes < $maxpasses ? "error getting url" : "timeout") . " in it_url::get_cache(): url={$p['url']}, passes=$passes, maxpasses=$maxpasses, path={$p['path']}"]);
return !$lockedbyother && file_exists($path);
}