summaryrefslogtreecommitdiff
path: root/it_url.class
diff options
context:
space:
mode:
authorUrban Müller2007-11-15 14:16:15 +0000
committerUrban Müller2007-11-15 14:16:15 +0000
commit765c68821450436c1666086aa47df1cc58427dae (patch)
tree6ef3a8eb2fb8265edd3ecd3e802b1d8a7d0db3f9 /it_url.class
parent2817104f99769bf7f82d179b8295d1ede12832eb (diff)
downloaditools-765c68821450436c1666086aa47df1cc58427dae.tar.gz
itools-765c68821450436c1666086aa47df1cc58427dae.tar.bz2
itools-765c68821450436c1666086aa47df1cc58427dae.zip
dont block httpd for 3 hours if server doesnt deliver eof
Diffstat (limited to 'it_url.class')
-rw-r--r--it_url.class23
1 files changed, 11 insertions, 12 deletions
diff --git a/it_url.class b/it_url.class
index f92f4bb..f556ebb 100644
--- a/it_url.class
+++ b/it_url.class
@@ -224,16 +224,16 @@ function is_reachable($timeout = 5)
* @param $p parameter array with the following keys
* @param $p['url']: url to get, defaults to constructor URL
* @param $p['timeout']: timeout per read in seconds, defaults to 5. fractions allowed
+ * @param $p['totaltimeout']: timeout for the whole function call
* @param $p['data']: POST data array with key-value pairs
* @return contents of resulting page, considering redirects, excluding headers, or false on error
*/
function get($p=null, $timeout=5)
{
if (!is_array($p))
- $p = array('url' => $p);
+ $p = array('url' => $p, 'timeout' => $timeout);
- if (!isset($p['timeout']))
- $p['timeout'] = $timeout;
+ $p += array('totaltimeout' => "999999", 'timeout' => 5);
if ($p['url'])
$url = new it_url($p['url']);
@@ -243,6 +243,7 @@ function get($p=null, $timeout=5)
$url->result = $result = false;
unset($url->data);
$url->headers = array();
+ $endtime = time() + $p['totaltimeout'];
if ($url->protocol == 'http')
{
@@ -278,7 +279,7 @@ function get($p=null, $timeout=5)
stream_set_timeout($fp, intval($p['timeout']), intval(($p['timeout']*1000000)%1000000));
@fputs($fp, "$method /$url->path HTTP/1.0\r\n$headers\r\n$data");
- while (!feof($fp) && ($line = @fgets($fp, 10240)) && ($line = trim($line)))
+ while (!feof($fp) && ($line = @fgets($fp, 10240)) && ($line = trim($line)) && (time() < $endtime))
{
if (preg_match('#^(HTTP\S+)\s(\d+)#', $line, $parts)) # Parse result code
$url->headers[$parts[1]] = $url->result = $parts[2];
@@ -301,7 +302,7 @@ function get($p=null, $timeout=5)
{
$chunk = "";
- while (!feof($fp) && (strlen($chunk) < $len))
+ while (!feof($fp) && (strlen($chunk) < $len) && (time() < $endtime))
$chunk .= @fread($fp, $len - strlen($chunk));
$url->data .= $chunk;
@@ -309,7 +310,7 @@ function get($p=null, $timeout=5)
}
else
{
- while (!feof($fp))
+ while (!feof($fp) && (time() < $endtime))
$url->data .= @fread($fp, 20480);
}
@@ -321,7 +322,7 @@ function get($p=null, $timeout=5)
}
}
- return $result;
+ return time() < $endtime ? $result : false;
}
@@ -359,9 +360,7 @@ function get_cache_filename($p)
function get_cache($p = array())
{
$p += array('timeout'=>10, 'maxage'=>86400, 'cleanbefore'=>7200, 'safety'=>1, 'cachedir'=>$GLOBALS['ULTRAHOME']."/var/urlcache", 'it_error'=>array());
-
- if ($p['timeout'] > 600)
- it::error("suspiciously high timeout");
+ $p['totaltimeout'] = $p['timeout'];
$path = it_url::get_cache_filename($p);
@mkdir(dirname($path));
@@ -381,7 +380,7 @@ function get_cache($p = array())
touch($path);
EDC('getcache', "refresh", $p['url'], $path);
- if (($result = it_url::get($p['url'], $p['timeout'])))
+ if (($result = it_url::get($p)))
it_url::_atomicwrite($path, $result);
else
touch($path);
@@ -403,7 +402,7 @@ function get_cache($p = array())
# fill cache myself
fclose($dummy);
EDC('getcache', "new", $p['url'], $path);
- $result = it_url::_atomicwrite($path, it_url::get($p['url'], $p['timeout']));
+ $result = it_url::_atomicwrite($path, it_url::get($p));
}
else
{