From 24331e26b7308f25ed9e75401c79152e58bd9557 Mon Sep 17 00:00:00 2001 From: Urban Müller Date: Tue, 6 Aug 2024 00:51:14 +0200 Subject: try to reduce crawler accesses on cacheable data --- itjs.class | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/itjs.class b/itjs.class index 37cefe6..cf77f0c 100644 --- a/itjs.class +++ b/itjs.class @@ -186,7 +186,8 @@ static function far_future_headers($p = array()) if (it::is_live() && !$_REQUEST['retry']) { $keeptime = $crc == "-" ? 0 : ($crc || $p['nocrc'] ? 30*86400 : 900); # long expire if checksum present - header("Cache-Control: max-age=$keeptime, private"); # proxies should not cache since contents of same url can differ between browsers + $private = it::match('Googlebot|Bingbot|Slurp|DuckDuckBot', $_SERVER['HTTP_USER_AGENT']) ? "public" : "private"; + header("Cache-Control: max-age=$keeptime, $private"); # proxies should not cache since contents of same url can differ between browsers header("Expires: " . gmdate("D, d M Y H:i:s", time() + $keeptime). " GMT"); } } -- cgit v1.2.3