summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorUrban Müller2020-04-17 16:38:19 +0200
committerUrban Müller2020-04-17 16:39:00 +0200
commit1bd13e02d21ba01f38cd6df04de84b25a75a5264 (patch)
tree5cea18fc830744f041470ef90f1c843d7d153c18
parentf3ae6e3c50d217d3fd46df8edf8079547e6f2864 (diff)
downloaditools-1bd13e02d21ba01f38cd6df04de84b25a75a5264.tar.gz
itools-1bd13e02d21ba01f38cd6df04de84b25a75a5264.tar.bz2
itools-1bd13e02d21ba01f38cd6df04de84b25a75a5264.zip
support $p["postprocess"] in get_cache_contents()
-rw-r--r--it_url.class17
1 files changed, 13 insertions, 4 deletions
diff --git a/it_url.class b/it_url.class
index 8eb7aa8..5e2b681 100644
--- a/it_url.class
+++ b/it_url.class
@@ -77,6 +77,15 @@ function is_reachable($timeout = 5)
return $url->result >= 200 && $url->result < 400;
}
+# internal
+static function _postprocess($data, $p)
+{
+ if ($p['postprocess'])
+ $data = ($t = $p['postprocess']($data, ['it_error' => $p['retries'] > 0 ? false : ['title' => "invalid content from " . $p['url']]])) && $p['checkonly'] ? $data : $t;
+
+ return $data;
+}
+
/**
* Get simple URL with timeout and one retry. Can be called statically. Times out, calls it::error for all errs
*
@@ -122,8 +131,7 @@ function get($p=null, $timeout=5)
$url = new it_url($p['url']);
$result = $url->request($p + ['followlocation' => true]);
- if ($p['postprocess'])
- $result = $p['postprocess']($result, ['it_error' => $p['retries'] > 0 ? false : ['title' => "invalid content from " . $p['url']]]);
+ $result = self::_postprocess($result, $p);
if (!$result && $p['retries'] > 0 && !it::match('^(4..|204)$', $url->result))
{
@@ -473,6 +481,7 @@ static function get_cache_filename($p)
* @param $p['it_error'] parameters for it::error(), false means ignore errors, anything else gets passed to it::error() if errors occur
* @param $p['keepfailed'] keep old versions of files if download fails (sending alerts conservatively)
* @param $p['returnheaders'] Return array($path, $headers) instead of simply $path
+ * @param $p['postprocess'] NOT SUPPORTED, use ::get_cache_contents
* @return Cache filename or false if fetch failed
*/
static function get_cache($p = array())
@@ -503,7 +512,7 @@ static function get_cache($p = array())
EDC('getcache', "new", $filemtime, $p['url'], $path);
$url = new it_url;
- if ($success = $url->get($p + array('filemtime' => EDC('nocache') ? null : $filemtime))) # => true means not modified (no new data fetched)
+ if ($success = $url->get($p + ['checkonly' => true, 'filemtime' => EDC('nocache') ? null : $filemtime])) # => true means not modified (no new data fetched)
{
$newfile = it_url::_atomicwrite($path, $success);
if ($p['returnheaders'])
@@ -601,7 +610,7 @@ static function get_cache($p = array())
*/
function get_cache_contents($p)
{
- return ($fn = self::get_cache($p)) ? it::file_get_contents($fn) : it::error((array)$p['it_error'] + ['title' => $p['safety'] == 0 ? false : "failed getting " . it_url::absolute($p['url']), 'body' => $p]);
+ return ($fn = self::get_cache($p)) ? self::_postprocess(it::file_get_contents($fn), $p) : it::error((array)$p['it_error'] + ['title' => $p['safety'] == 0 ? false : "failed getting " . it_url::absolute($p['url']), 'body' => $p]);
}
/**