summaryrefslogtreecommitdiff
path: root/it_url.class
diff options
context:
space:
mode:
authorUrban Müller2020-10-16 14:26:54 +0200
committerUrban Müller2020-10-16 14:26:54 +0200
commit8fc9ba53252afdef2c78d811ef9b3ec65b755caf (patch)
tree61b1612f5d1f406f9efeb1cd8bdb15f17dc031c9 /it_url.class
parenta49fca3511402105635994df890e1348883598a6 (diff)
downloaditools-8fc9ba53252afdef2c78d811ef9b3ec65b755caf.tar.gz
itools-8fc9ba53252afdef2c78d811ef9b3ec65b755caf.tar.bz2
itools-8fc9ba53252afdef2c78d811ef9b3ec65b755caf.zip
document "followlocation"
Diffstat (limited to 'it_url.class')
-rw-r--r--it_url.class14
1 files changed, 8 insertions, 6 deletions
diff --git a/it_url.class b/it_url.class
index c1d7ec5..4e40be1 100644
--- a/it_url.class
+++ b/it_url.class
@@ -106,6 +106,7 @@ static function _postprocess($data, $p)
* @param $p['files'] [fieldname => filename] of files to upload
* @param $p['writefunction'] function to be called whenever data is received (for server-sent-events etc.)
* @param $p['fetchsleep'] Number of seconds to wait after fetch, fractions ok
+ * @param $p['followlocation']Follow redirects [true]
* @param $p['retries'] Number of retries if download fails, default 1
* @param $p['retrysleep'] Number of seconds to wait before retry (additional to fetchsleep), fractions ok
* @param $p['compression'] use compression (uses curl to do that)
@@ -332,12 +333,13 @@ function request($p=array())
/**
* Get multiple URL in parallel with timeout. Needs to be called statically
* @param $p parameter array with the following keys (same as it_url::get)
- * @param $p['urls'] array/generator of urls to get
- * @param $p['timeout'] timeout per read in seconds, defaults to 5. (TODO: fractions allowed?)
- * @param $p['totaltimeout'] timeout for the whole function call (fractions allowed)
- * @param $p['headers'] optional array of HTTP headers to send
- * @param $p['parallel'] max number of parallel requests
- * @param $p['noresults'] do not keep results around
+ * @param $p['urls'] array/generator of urls to get
+ * @param $p['timeout'] timeout per read in seconds, defaults to 5. (TODO: fractions allowed?)
+ * @param $p['totaltimeout'] timeout for the whole function call (fractions allowed)
+ * @param $p['followlocation'] follow redirects [true]
+ * @param $p['headers'] optional array of HTTP headers to send
+ * @param $p['parallel'] max number of parallel requests
+ * @param $p['noresults'] do not keep results around
* @return array of contents (or false for errors like timesou) of resulting page using same
* keys as the urls input array, considering redirects, excluding headers
*/