diff options
author | Christian Schneider | 2009-06-24 14:10:46 +0000 |
---|---|---|
committer | Christian Schneider | 2009-06-24 14:10:46 +0000 |
commit | a916788b6ba0986cb3a424f4cd4dc60bb110b8bc (patch) | |
tree | 73d2d9bc4930a1aca76afc2175c778d46c25a75b | |
parent | 43261032de58cee70955551d4e6006208b79100c (diff) | |
download | itools-a916788b6ba0986cb3a424f4cd4dc60bb110b8bc.tar.gz itools-a916788b6ba0986cb3a424f4cd4dc60bb110b8bc.tar.bz2 itools-a916788b6ba0986cb3a424f4cd4dc60bb110b8bc.zip |
Added get_cache_dir with support for paths relative to /var/urlcache
-rw-r--r-- | it_url.class | 32 |
1 files changed, 24 insertions, 8 deletions
diff --git a/it_url.class b/it_url.class index 4eeb8f5..513255d 100644 --- a/it_url.class +++ b/it_url.class @@ -328,42 +328,58 @@ function get($p=null, $timeout=5) /** + * Construct a local directory name to cache an URL. Named args: + * @param $p['cachedir'] directory to store cache files in, relative paths are appended to $ULTRAHOME/var/urlcache and that is also the default path + */ +function get_cache_dir($p) +{ + $result = $p['cachedir']; + + if (!it::match('^/', $result)) + $result = $GLOBALS['ULTRAHOME'] . "/var/urlcache/$result"; + + return rtrim($result, "/"); +} + + +/** * Construct a local file name to cache an URL. Named args: * @param $p['url'] remote url to get - * @param $p['cachedir'] path to cache directory + * @param $p['cachedir'] directory to store cache files in, @see get_cache_dir */ function get_cache_filename($p) { if (!is_array($p)) $p = array('url'=>$p); - $p += array('cachedir' => $GLOBALS['ULTRAHOME'] . "/var/urlcache"); + + $p['cachedir'] = it_url::get_cache_dir($p); $filename = md5(T_lang() . $p['url']); return $p['cachedir'] . "/" . substr($filename, 0, 2) . "/$filename"; } - /** * Store contents of url in a file and return file name. Threadsafe: Provides locking. Called statically. * Requires webserver writeable directory in $p['cachdedir']. Params in associative array p: * @param $p['url'] url to get - * @param $p['cachedir'] path to cache directory + * @param $p['cachedir'] directory to store cache files in, @see get_cache_dir * @param $p['timeout'] timeout in seconds, default 10. fractions allowed * @param $p['maxage'] maximum age of cache entries in seconds, default 86400 * @param $p['cleanbefore'] maximum daytime when attempting cleanup, default 7200 - * @param $p['preprocess'] callback function (or array for methods) to change received file or array('function' => ..., 'in' => $src, 'out' => $dst, ...) with callback function plus args + * @param $p['preprocess'] callback function (or array for methods) to change received file or array('function' => ..., 'in' => $src, 'out' => $dst, ...) with callback function plus args * @param $p['safety'] value 0 means dont generate alert, value 1 means generate alerts on timeouts and failures * @param $p['keepfailed'] keep old versions of files if download fails (sending alerts conservatively) - * @param $p['cachedir'] directory to store cache files in. NO TRAILING SLASH * @param $p['it_error'] parameters for it::error() */ function get_cache($p = array()) { - $p += array('timeout'=>10, 'maxage'=>86400, 'cleanbefore'=>7200, 'safety'=>1, 'cachedir'=>$GLOBALS['ULTRAHOME']."/var/urlcache", 'it_error'=>array()); + $p += array('timeout' => 10, 'maxage' => 86400, 'cleanbefore' => 7200, 'safety' => 1, 'it_error' => array()); $p['totaltimeout'] = $p['timeout']; + $p['cachedir'] = it_url::get_cache_dir($p); $path = it_url::get_cache_filename($p); + @mkdir($p['cachedir']); @mkdir(dirname($path)); $age = file_exists($path) ? (time() - @filemtime($path)) : 0; @@ -454,7 +470,7 @@ function get_cache($p = array()) { touch($p['cachedir'] . "/cleaned"); $maxagemin = intval($p['maxage']/60); - exec("nohup bash -c 'cd {$p['cachedir']} && sleep 10 && find -mmin +$maxagemin -print0 | xargs -0 -r rm' </dev/null >/dev/null 2>&1 &"); + exec("nohup bash -c 'cd {$p['cachedir']} && sleep 10 && find ?? -mmin +$maxagemin -print0 | xargs -0 -r rm' </dev/null >/dev/null 2>&1 &"); } return $result; |