summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--it_url.class18
1 files changed, 11 insertions, 7 deletions
diff --git a/it_url.class b/it_url.class
index 9b84996..6fcc941 100644
--- a/it_url.class
+++ b/it_url.class
@@ -501,6 +501,7 @@ static function get_cache_filename($p)
* @param $p['keepfailed'] keep old versions of files if download fails (sending alerts conservatively)
* @param $p['returnheaders'] Return array($path, $headers) instead of simply $path
* @param $p['postprocess'] NOT SUPPORTED, use ::get_cache_contents
+ * @param $p['lock'] prevent multiple requests to same url from different processes [true]
* @return Cache filename or false if fetch failed
*/
static function get_cache($p = array())
@@ -523,7 +524,7 @@ static function get_cache($p = array())
{
$fileexists = $filemtime !== true;
- if ($lock = it_url::_lock($path))
+ if ($lock = it_url::_lock($path, $p))
{
# Touch existing file to prevent locking other getters while refreshing
if ($fileexists)
@@ -542,7 +543,7 @@ static function get_cache($p = array())
else
@unlink($path); # Expired and failed to get
- it_url::_unlock($path, $lock);
+ it_url::_unlock($path, $lock, $p);
}
else
{
@@ -675,10 +676,10 @@ static function _expired($path, $maxage)
* @param $path File to lock
* @return Lock handle if successfully locked file
*/
-static function _lock($path)
+static function _lock($path, $p)
{
$force = EDC('nocache') || (($mtime = @filemtime("$path.lock")) && (time() - $mtime > 30)); # expire forgotten locks
- return @it::fopen("$path.lock", $force ? "w" : "x");
+ return ($p['lock'] ?? true) ? @it::fopen("$path.lock", $force ? "w" : "x") : true;
}
/**
@@ -686,10 +687,13 @@ static function _lock($path)
* @param $path File to unlock
* @param $lock Handle to lock acquird by _lock
*/
-static function _unlock($path, $lock)
+static function _unlock($path, $lock, $p)
{
- fclose($lock);
- @unlink("$path.lock");
+ if ($p['lock'] ?? true)
+ {
+ fclose($lock);
+ @unlink("$path.lock");
+ }
}
/**