1<?php 2 3/** 4 * webtrees: online genealogy 5 * Copyright (C) 2023 webtrees development team 6 * This program is free software: you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundation, either version 3 of the License, or 9 * (at your option) any later version. 10 * This program is distributed in the hope that it will be useful, 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 * GNU General Public License for more details. 14 * You should have received a copy of the GNU General Public License 15 * along with this program. If not, see <https://www.gnu.org/licenses/>. 16 */ 17 18declare(strict_types=1); 19 20namespace Fisharebest\Webtrees\Http\Middleware; 21 22use Fig\Http\Message\StatusCodeInterface; 23use Fisharebest\Webtrees\Registry; 24use Fisharebest\Webtrees\Validator; 25use GuzzleHttp\Client; 26use GuzzleHttp\Exception\GuzzleException; 27use Iodev\Whois\Loaders\CurlLoader; 28use Iodev\Whois\Modules\Asn\AsnRouteInfo; 29use Iodev\Whois\Whois; 30use IPLib\Address\AddressInterface; 31use IPLib\Factory as IPFactory; 32use IPLib\Range\RangeInterface; 33use Psr\Http\Message\ResponseInterface; 34use Psr\Http\Message\ServerRequestInterface; 35use Psr\Http\Server\MiddlewareInterface; 36use Psr\Http\Server\RequestHandlerInterface; 37use Throwable; 38 39use function array_filter; 40use function array_map; 41use function assert; 42use function gethostbyaddr; 43use function gethostbyname; 44use function preg_match_all; 45use function random_int; 46use function response; 47use function str_contains; 48use function str_ends_with; 49 50/** 51 * Middleware to block bad robots before they waste our valuable CPU cycles. 52 */ 53class BadBotBlocker implements MiddlewareInterface 54{ 55 private const REGEX_OCTET = '(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'; 56 private const REGEX_IPV4 = '/\\b' . self::REGEX_OCTET . '(?:\\.' . self::REGEX_OCTET . '){3}\\b/'; 57 58 // Cache whois requests. Try to avoid all caches expiring at the same time. 59 private const WHOIS_TTL_MIN = 28 * 86400; 60 private const WHOIS_TTL_MAX = 35 * 86400; 61 private const WHOIS_TIMEOUT = 5; 62 63 // Bad robots - SEO optimisers, advertisers, etc. This list is shared with robots.txt. 64 public const BAD_ROBOTS = [ 65 'admantx', 66 'Adsbot', 67 'AhrefsBot', 68 'Amazonbot', // Until it understands crawl-delay and noindex / nofollow 69 'AntBot', // Aggressive crawler 70 'AspiegelBot', 71 'Awario', // Brand management 72 'Barkrowler', 73 'BLEXBot', 74 'Bytespider', 75 'CCBot', // Used to train a number of LLMs 76 'ChatGPT-User', // Used by ChatGPT during operation 77 'DataForSeoBot', // https://dataforseo.com/dataforseo-bot 78 'DotBot', 79 'FacebookBot', // Collects training data for Facebook's LLM translator. 80 'fidget-spinner-bot', // Agressive crawler 81 'Google-Extended', // Collects training data for Google Bard 82 'GPTBot', // Collects training data for ChatGPT 83 'Grapeshot', 84 'Honolulu-bot', // Aggressive crawer, no info available 85 'ia_archiver', 86 'linabot', // Aggressive crawer, no info available 87 'Linguee', 88 'MegaIndex.ru', 89 'MJ12bot', 90 'netEstate NE', 91 'Omgilibot', // Collects training data for LLMs 92 'panscient', 93 'PetalBot', 94 'proximic', 95 'SeekportBot', // Pretends to be a search engine - but isn't 96 'SemrushBot', 97 'serpstatbot', 98 'SEOkicks', 99 'SiteKiosk', 100 'test-bot', // Agressive crawler 101 'TinyTestBot', 102 'Turnitin', 103 'wp_is_mobile', // Nothing to do with wordpress 104 'XoviBot', 105 'YisouSpider', 106 'ZoominfoBot', 107 ]; 108 109 /** 110 * Some search engines use reverse/forward DNS to verify the IP address. 111 * 112 * @see https://developer.amazon.com/support/amazonbot 113 * @see https://support.google.com/webmasters/answer/80553?hl=en 114 * @see https://www.bing.com/webmaster/help/which-crawlers-does-bing-use-8c184ec0 115 * @see https://www.bing.com/webmaster/help/how-to-verify-bingbot-3905dc26 116 * @see https://yandex.com/support/webmaster/robot-workings/check-yandex-robots.html 117 * @see https://www.mojeek.com/bot.html 118 * @see https://support.apple.com/en-gb/HT204683 119 */ 120 private const ROBOT_REV_FWD_DNS = [ 121 'Amazonbot' => ['.crawl.amazon.com'], 122 'Applebot' => ['.applebot.apple.com'], 123 'BingPreview' => ['.search.msn.com'], 124 'Google' => ['.google.com', '.googlebot.com'], 125 'Mail.RU_Bot' => ['.mail.ru'], 126 'MicrosoftPreview' => ['.search.msn.com'], 127 'MojeekBot' => ['.mojeek.com'], 128 'Qwantify' => ['.search.qwant.com'], 129 'Sogou' => ['.crawl.sogou.com'], 130 'Yahoo' => ['.crawl.yahoo.net'], 131 'Yandex' => ['.yandex.ru', '.yandex.net', '.yandex.com'], 132 'bingbot' => ['.search.msn.com'], 133 'msnbot' => ['.search.msn.com'], 134 ]; 135 136 /** 137 * Some search engines only use reverse DNS to verify the IP address. 138 * 139 * @see https://help.baidu.com/question?prod_id=99&class=0&id=3001 140 * @see https://napoveda.seznam.cz/en/full-text-search/seznambot-crawler 141 * @see https://www.ionos.de/terms-gtc/faq-crawler 142 */ 143 private const ROBOT_REV_ONLY_DNS = [ 144 'Baiduspider' => ['.baidu.com', '.baidu.jp'], 145 'FreshBot' => ['.seznam.cz'], 146 'IonCrawl' => ['.1und1.org'], 147 'Neevabot' => ['.neeva.com'], 148 'SeznamBot' => ['.seznam.cz'], 149 ]; 150 151 /** 152 * Some search engines operate from designated IP addresses. 153 * 154 * @see https://www.apple.com/go/applebot 155 * @see https://help.duckduckgo.com/duckduckgo-help-pages/results/duckduckbot 156 */ 157 private const ROBOT_IPS = [ 158 'AppleBot' => [ 159 '17.0.0.0/8', 160 ], 161 'Ask Jeeves' => [ 162 '65.214.45.143', 163 '65.214.45.148', 164 '66.235.124.192', 165 '66.235.124.7', 166 '66.235.124.101', 167 '66.235.124.193', 168 '66.235.124.73', 169 '66.235.124.196', 170 '66.235.124.74', 171 '63.123.238.8', 172 '202.143.148.61', 173 ], 174 'DuckDuckBot' => [ 175 '23.21.227.69', 176 '50.16.241.113', 177 '50.16.241.114', 178 '50.16.241.117', 179 '50.16.247.234', 180 '52.204.97.54', 181 '52.5.190.19', 182 '54.197.234.188', 183 '54.208.100.253', 184 '54.208.102.37', 185 '107.21.1.8', 186 ], 187 ]; 188 189 /** 190 * Some search engines operate from designated IP addresses. 191 * 192 * @see https://bot.seekport.com/ 193 */ 194 private const ROBOT_IP_FILES = [ 195 'SeekportBot' => 'https://bot.seekport.com/seekportbot_ips.txt', 196 ]; 197 198 /** 199 * Some search engines operate from within a designated autonomous system. 200 * 201 * @see https://developers.facebook.com/docs/sharing/webmasters/crawler 202 * @see https://www.facebook.com/peering/ 203 */ 204 private const ROBOT_ASNS = [ 205 'facebook' => ['AS32934', 'AS63293'], 206 'twitter' => ['AS13414'], 207 ]; 208 209 /** 210 * @param ServerRequestInterface $request 211 * @param RequestHandlerInterface $handler 212 * 213 * @return ResponseInterface 214 */ 215 public function process(ServerRequestInterface $request, RequestHandlerInterface $handler): ResponseInterface 216 { 217 $ua = Validator::serverParams($request)->string('HTTP_USER_AGENT', ''); 218 $ip = Validator::attributes($request)->string('client-ip'); 219 $address = IPFactory::parseAddressString($ip); 220 assert($address instanceof AddressInterface); 221 222 foreach (self::BAD_ROBOTS as $robot) { 223 if (str_contains($ua, $robot)) { 224 return $this->response(); 225 } 226 } 227 228 foreach (self::ROBOT_REV_FWD_DNS as $robot => $valid_domains) { 229 if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, false)) { 230 return $this->response(); 231 } 232 } 233 234 foreach (self::ROBOT_REV_ONLY_DNS as $robot => $valid_domains) { 235 if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, true)) { 236 return $this->response(); 237 } 238 } 239 240 foreach (self::ROBOT_IPS as $robot => $valid_ip_ranges) { 241 if (str_contains($ua, $robot)) { 242 foreach ($valid_ip_ranges as $ip_range) { 243 $range = IPFactory::parseRangeString($ip_range); 244 245 if ($range instanceof RangeInterface && $range->contains($address)) { 246 continue 2; 247 } 248 } 249 250 return $this->response(); 251 } 252 } 253 254 foreach (self::ROBOT_IP_FILES as $robot => $url) { 255 if (str_contains($ua, $robot)) { 256 $valid_ip_ranges = $this->fetchIpRangesForUrl($robot, $url); 257 258 foreach ($valid_ip_ranges as $ip_range) { 259 $range = IPFactory::parseRangeString($ip_range); 260 261 if ($range instanceof RangeInterface && $range->contains($address)) { 262 continue 2; 263 } 264 } 265 266 return $this->response(); 267 } 268 } 269 270 foreach (self::ROBOT_ASNS as $robot => $asns) { 271 foreach ($asns as $asn) { 272 if (str_contains($ua, $robot)) { 273 foreach ($this->fetchIpRangesForAsn($asn) as $range) { 274 if ($range->contains($address)) { 275 continue 2; 276 } 277 } 278 279 return $this->response(); 280 } 281 } 282 } 283 284 // Allow sites to block access from entire networks. 285 $block_asn = Validator::attributes($request)->string('block_asn', ''); 286 preg_match_all('/(AS\d+)/', $block_asn, $matches); 287 288 foreach ($matches[1] as $asn) { 289 foreach ($this->fetchIpRangesForAsn($asn) as $range) { 290 if ($range->contains($address)) { 291 return $this->response(); 292 } 293 } 294 } 295 296 return $handler->handle($request); 297 } 298 299 /** 300 * Check that an IP address belongs to a robot operator using a forward/reverse DNS lookup. 301 * 302 * @param string $ip 303 * @param array<string> $valid_domains 304 * @param bool $reverse_only 305 * 306 * @return bool 307 */ 308 private function checkRobotDNS(string $ip, array $valid_domains, bool $reverse_only): bool 309 { 310 $host = gethostbyaddr($ip); 311 312 if ($host === false) { 313 return false; 314 } 315 316 foreach ($valid_domains as $domain) { 317 if (str_ends_with($host, $domain)) { 318 return $reverse_only || $ip === gethostbyname($host); 319 } 320 } 321 322 return false; 323 } 324 325 /** 326 * Perform a whois search for an ASN. 327 * 328 * @param string $asn The autonomous system number to query 329 * 330 * @return array<RangeInterface> 331 */ 332 private function fetchIpRangesForAsn(string $asn): array 333 { 334 return Registry::cache()->file()->remember('whois-asn-' . $asn, static function () use ($asn): array { 335 $mapper = static fn (AsnRouteInfo $route_info): ?RangeInterface => IPFactory::parseRangeString($route_info->route ?: $route_info->route6); 336 337 try { 338 $loader = new CurlLoader(self::WHOIS_TIMEOUT); 339 $whois = new Whois($loader); 340 $info = $whois->loadAsnInfo($asn); 341 $routes = $info->routes; 342 $ranges = array_map($mapper, $routes); 343 344 return array_filter($ranges); 345 } catch (Throwable) { 346 return []; 347 } 348 }, random_int(self::WHOIS_TTL_MIN, self::WHOIS_TTL_MAX)); 349 } 350 351 /** 352 * Fetch a list of IP addresses from a remote file. 353 * 354 * @param string $ua 355 * @param string $url 356 * 357 * @return array<string> 358 */ 359 private function fetchIpRangesForUrl(string $ua, string $url): array 360 { 361 return Registry::cache()->file()->remember('url-ip-list-' . $ua, static function () use ($url): array { 362 try { 363 $client = new Client(); 364 $response = $client->get($url, ['timeout' => 5]); 365 $contents = $response->getBody()->getContents(); 366 367 preg_match_all(self::REGEX_IPV4, $contents, $matches); 368 369 return $matches[0]; 370 } catch (GuzzleException) { 371 return []; 372 } 373 }, random_int(self::WHOIS_TTL_MIN, self::WHOIS_TTL_MAX)); 374 } 375 376 /** 377 * @return ResponseInterface 378 */ 379 private function response(): ResponseInterface 380 { 381 return response('Not acceptable', StatusCodeInterface::STATUS_NOT_ACCEPTABLE); 382 } 383} 384