From 9238b32da3afc88b80b1d51a15844fa0c1e8a9f6 Mon Sep 17 00:00:00 2001 From: Christian Schneider Date: Tue, 30 Apr 2024 10:00:32 +0200 Subject: Fix is_reachable fetching always twice if content longer than 1000 bytes, make retry test having to retry more often to test that code path --- it_url.class | 18 ++++++++++-------- test/it_url.t | 2 +- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/it_url.class b/it_url.class index a94975d..83ea9fb 100644 --- a/it_url.class +++ b/it_url.class @@ -305,17 +305,15 @@ function request($p=array()) // FIXME 2025-01 NG just use CURLOPT_MAXFILESIZE if we have curl 8.4 + $content = ""; if ($p['maxlength'] && !$p['writefunction']) { - $content = ""; $opts[CURLOPT_WRITEFUNCTION] = function ($dummy, $data) use ($p, &$content) { - static $total = 0; - $size = strlen($data); - $total += $size; - if ($total > $p['maxlength']) - return 0; - $content .= $data; - return $size; + static $space; + $write = min($space ?? $p['maxlength'], strlen($data)); + $content .= substr($data, 0, $write); + $space -= $write; + return $write; }; } @@ -341,6 +339,10 @@ function request($p=array()) $url->parse_http_header($url->header); + # Change result status for content longer than maxlength to 204 as we do not return partial data but still want to indicate success e.g. for is_reachable + if ($p['maxlength'] && $url->result == 200 && strlen($content) && !$got) + $url->result = 204; + if ($p['filemtime'] && ($url->result == 304)) { $result = true; # Not modified, success but no data diff --git a/test/it_url.t b/test/it_url.t index 47064b5..1898d4b 100755 --- a/test/it_url.t +++ b/test/it_url.t @@ -356,7 +356,7 @@ handle_server( "failure" ), is( - it_url::get(['url' => "http://$host/maybe_error?chance=10", 'empty_on_fail' => true, 'retries' => 10]), + it_url::get(['url' => "http://$host/maybe_error?chance=25", 'empty_on_fail' => true, 'retries' => 10]), "success", "Retry on sporadically failing url in ::get" ), -- cgit v1.2.3