summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorUrban Müller2007-10-04 14:46:33 +0000
committerUrban Müller2007-10-04 14:46:33 +0000
commit06a0ba8f47505caf42cf1b65a890fac10b545edc (patch)
tree5bacd74b170e1cb4583d3c2bb8072b2619276186
parentff8fbaa3598ec6399bdb3f7e39d0889c1c891d94 (diff)
downloaditools-06a0ba8f47505caf42cf1b65a890fac10b545edc.tar.gz
itools-06a0ba8f47505caf42cf1b65a890fac10b545edc.tar.bz2
itools-06a0ba8f47505caf42cf1b65a890fac10b545edc.zip
timeout now uses seconds
-rw-r--r--it_url.class44
1 files changed, 22 insertions, 22 deletions
diff --git a/it_url.class b/it_url.class
index d279ab7..833d4f9 100644
--- a/it_url.class
+++ b/it_url.class
@@ -213,13 +213,13 @@ function is_reachable($timeout = 5)
/**
* Get simple URL with timeout. Can be called statically
- * @p parameter array with the following keys
- * @p['url']: url to get, defaults to constructor URL
- * @p['timeout']: timeout per read in milliseconds, defaults to 5000
- * @p['data']: POST data array with key-value pairs
+ * @param $p parameter array with the following keys
+ * @param $p['url']: url to get, defaults to constructor URL
+ * @param $p['timeout']: timeout per read in seconds, defaults to 5. fractions allowed
+ * @param $p['data']: POST data array with key-value pairs
* @return contents of resulting page, considering redirects, excluding headers, or false on error
*/
-function get($p=null, $timeout=5000)
+function get($p=null, $timeout=5)
{
if (!is_array($p))
$p = array('url' => $p);
@@ -238,7 +238,7 @@ function get($p=null, $timeout=5000)
if ($url->protocol == 'http')
{
- if ($fp = @fsockopen($url->realhostname, $url->port, $errno, $errstr, $p['timeout']/1000))
+ if ($fp = @fsockopen($url->realhostname, $url->port, $errno, $errstr, $p['timeout']))
{
# urlencode data pairs if is array
if (is_array($p['data']))
@@ -267,7 +267,7 @@ function get($p=null, $timeout=5000)
foreach ($p['headers'] as $header => $value)
$headers .= "$header: $value\r\n";
- stream_set_timeout($fp, intval($p['timeout']/1000), ($p['timeout']%1000)*1000);
+ stream_set_timeout($fp, intval($p['timeout']), intval(($p['timeout']*1000000)%1000000));
@fputs($fp, "$method /$url->path HTTP/1.0\r\n$headers\r\n$data");
while (!feof($fp) && ($line = @fgets($fp, 10240)) && ($line = trim($line)))
@@ -319,8 +319,8 @@ function get($p=null, $timeout=5000)
/**
* Construct a local file name to cache an URL. Named args:
- * @p['url'] remote url to get
- * @p['cachedir'] path to cache directory
+ * @param $p['url'] remote url to get
+ * @param $p['cachedir'] path to cache directory
*/
function get_cache_filename($p)
{
@@ -337,20 +337,20 @@ function get_cache_filename($p)
/**
* Store contents of url in a file and return file name. Threadsafe: Provides locking. Called statically.
* Requires webserver writeable directory in $p['cachdedir']. Params in associative array p:
- * @p['url'] url to get
- * @p['cachedir'] path to cache directory
- * @p['timeout'] timeout in milliseconds, default 10000
- * @p['maxage'] maximum age of cache entries in seconds, default 86400
- * @p['cleanbefore'] maximum daytime when attempting cleanup, default 7200
- * @p['preprocess'] callback function (or array for methods) to change received file or array('function' => ..., 'in' => $src, 'out' => $dst, ...) with callback function plus args
- * @p['safety'] value 0 means dont generate alert, value 1 means generate alerts on timeouts and failures
- * @p['keepfailed'] keep old versions of files if download fails (sending alerts conservatively)
- * @p['cachedir'] directory to store cache files in. NO TRAILING SLASH
- * @p['it_error'] parameters for it::error()
+ * @param $p['url'] url to get
+ * @param $p['cachedir'] path to cache directory
+ * @param $p['timeout'] timeout in seconds, default 10. fractions allowed
+ * @param $p['maxage'] maximum age of cache entries in seconds, default 86400
+ * @param $p['cleanbefore'] maximum daytime when attempting cleanup, default 7200
+ * @param $p['preprocess'] callback function (or array for methods) to change received file or array('function' => ..., 'in' => $src, 'out' => $dst, ...) with callback function plus args
+ * @param $p['safety'] value 0 means dont generate alert, value 1 means generate alerts on timeouts and failures
+ * @param $p['keepfailed'] keep old versions of files if download fails (sending alerts conservatively)
+ * @param $p['cachedir'] directory to store cache files in. NO TRAILING SLASH
+ * @param $p['it_error'] parameters for it::error()
*/
function get_cache($p = array())
{
- $p += array('timeout'=>10000, 'maxage'=>86400, 'cleanbefore'=>7200, 'safety'=>1, 'cachedir'=>$GLOBALS['ULTRAHOME']."/var/urlcache", 'it_error'=>array());
+ $p += array('timeout'=>10, 'maxage'=>86400, 'cleanbefore'=>7200, 'safety'=>1, 'cachedir'=>$GLOBALS['ULTRAHOME']."/var/urlcache", 'it_error'=>array());
$path = it_url::get_cache_filename($p);
@mkdir(dirname($path));
@@ -451,12 +451,12 @@ function get_cache($p = array())
function _waitforpath($p)
{
- $p += array('sleeptime' => 100); # millisecs to wait
+ $p += array('sleeptime' => 0.1); # seconds to wait per pass
# wait until cache is ready, then read from cache
for ($maxpasses = $p['timeout'] / $p['sleeptime'], $passes = 0; (@filesize($p['path']) <= 0) && ($passes < $maxpasses); ++$passes)
{
- usleep($p['sleeptime'] * 1000);
+ usleep($p['sleeptime'] * 1000000);
clearstatcache();
}