1<?php 2 3/** 4 * webtrees: online genealogy 5 * Copyright (C) 2023 webtrees development team 6 * This program is free software: you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundation, either version 3 of the License, or 9 * (at your option) any later version. 10 * This program is distributed in the hope that it will be useful, 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 * GNU General Public License for more details. 14 * You should have received a copy of the GNU General Public License 15 * along with this program. If not, see <https://www.gnu.org/licenses/>. 16 */ 17 18declare(strict_types=1); 19 20namespace Fisharebest\Webtrees\Http\Middleware; 21 22use Fig\Http\Message\StatusCodeInterface; 23use Fisharebest\Webtrees\Registry; 24use Fisharebest\Webtrees\Validator; 25use GuzzleHttp\Client; 26use GuzzleHttp\Exception\GuzzleException; 27use Iodev\Whois\Loaders\CurlLoader; 28use Iodev\Whois\Modules\Asn\AsnRouteInfo; 29use Iodev\Whois\Whois; 30use IPLib\Address\AddressInterface; 31use IPLib\Factory as IPFactory; 32use IPLib\Range\RangeInterface; 33use Psr\Http\Message\ResponseInterface; 34use Psr\Http\Message\ServerRequestInterface; 35use Psr\Http\Server\MiddlewareInterface; 36use Psr\Http\Server\RequestHandlerInterface; 37use Throwable; 38 39use function array_filter; 40use function array_map; 41use function assert; 42use function gethostbyaddr; 43use function gethostbyname; 44use function preg_match_all; 45use function random_int; 46use function response; 47use function str_contains; 48use function str_ends_with; 49 50/** 51 * Middleware to block bad robots before they waste our valuable CPU cycles. 52 */ 53class BadBotBlocker implements MiddlewareInterface 54{ 55 private const REGEX_OCTET = '(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'; 56 private const REGEX_IPV4 = '/\\b' . self::REGEX_OCTET . '(?:\\.' . self::REGEX_OCTET . '){3}\\b/'; 57 58 // Cache whois requests. Try to avoid all caches expiring at the same time. 59 private const WHOIS_TTL_MIN = 28 * 86400; 60 private const WHOIS_TTL_MAX = 35 * 86400; 61 private const WHOIS_TIMEOUT = 5; 62 63 // Bad robots - SEO optimisers, advertisers, etc. This list is shared with robots.txt. 64 public const BAD_ROBOTS = [ 65 'admantx', 66 'Adsbot', 67 'AhrefsBot', 68 'Amazonbot', // Until it understands crawl-delay and noindex / nofollow 69 'AntBot', // Aggressive crawler 70 'AspiegelBot', 71 'Awario', // Brand management 72 'Barkrowler', // Crawler for babbar.tech 73 'BLEXBot', 74 'Bytespider', // Aggressive crawler from Bytedance/TikTok 75 'CCBot', // Used to train a number of LLMs 76 'CensysInspect', // Vulnerability scanner 77 'ChatGPT-User', // Used by ChatGPT during operation 78 'ClaudeBot', // Collects training data for LLMs 79 'DataForSeoBot', // https://dataforseo.com/dataforseo-bot 80 'DotBot', 81 'Expanse', // Another pointless crawler 82 'FacebookBot', // Collects training data for Facebook's LLM translator. 83 'fidget-spinner-bot', // Agressive crawler 84 'Foregenix', // Vulnerability scanner 85 'Go-http-client', // Crawler library used by many bots 86 'Google-Extended', // Collects training data for Google Bard 87 'GPTBot', // Collects training data for ChatGPT 88 'Grapeshot', 89 'Honolulu-bot', // Aggressive crawer, no info available 90 'ia_archiver', 91 'internet-measurement', // Driftnet 92 'IonCrawl', 93 'Java', // Crawler library used by many bots 94 'linabot', // Aggressive crawer, no info available 95 'Linguee', 96 'MegaIndex.ru', 97 'MJ12bot', 98 'netEstate NE', 99 'Omgilibot', // Collects training data for LLMs 100 'panscient', 101 'PetalBot', 102 'phxbot', // Badly written crawler 103 'proximic', 104 'python-requests', // Crawler library used by many bots 105 'Scrapy', // Scraping tool 106 'SeekportBot', // Pretends to be a search engine - but isn't 107 'SemrushBot', 108 'serpstatbot', 109 'SEOkicks', 110 'SiteKiosk', 111 'test-bot', // Agressive crawler 112 'TinyTestBot', 113 'Turnitin', 114 'wp_is_mobile', // Nothing to do with wordpress 115 'XoviBot', 116 'YisouSpider', 117 'ZoominfoBot', 118 ]; 119 120 /** 121 * Some search engines use reverse/forward DNS to verify the IP address. 122 * 123 * @see https://developer.amazon.com/support/amazonbot 124 * @see https://support.google.com/webmasters/answer/80553?hl=en 125 * @see https://www.bing.com/webmaster/help/which-crawlers-does-bing-use-8c184ec0 126 * @see https://www.bing.com/webmaster/help/how-to-verify-bingbot-3905dc26 127 * @see https://yandex.com/support/webmaster/robot-workings/check-yandex-robots.html 128 * @see https://www.mojeek.com/bot.html 129 * @see https://support.apple.com/en-gb/HT204683 130 */ 131 private const ROBOT_REV_FWD_DNS = [ 132 'Amazonbot' => ['.crawl.amazon.com'], 133 'Applebot' => ['.applebot.apple.com'], 134 'BingPreview' => ['.search.msn.com'], 135 'Google' => ['.google.com', '.googlebot.com'], 136 'Mail.RU_Bot' => ['.mail.ru'], 137 'MicrosoftPreview' => ['.search.msn.com'], 138 'MojeekBot' => ['.mojeek.com'], 139 'Qwantify' => ['.qwant.com'], 140 'Sogou' => ['.crawl.sogou.com'], 141 'Yahoo' => ['.crawl.yahoo.net'], 142 'Yandex' => ['.yandex.ru', '.yandex.net', '.yandex.com'], 143 'bingbot' => ['.search.msn.com'], 144 'msnbot' => ['.search.msn.com'], 145 ]; 146 147 /** 148 * Some search engines only use reverse DNS to verify the IP address. 149 * 150 * @see https://help.baidu.com/question?prod_id=99&class=0&id=3001 151 * @see https://napoveda.seznam.cz/en/full-text-search/seznambot-crawler 152 * @see https://www.ionos.de/terms-gtc/faq-crawler 153 */ 154 private const ROBOT_REV_ONLY_DNS = [ 155 'Baiduspider' => ['.baidu.com', '.baidu.jp'], 156 'FreshBot' => ['.seznam.cz'], 157 'IonCrawl' => ['.1und1.org'], 158 'Neevabot' => ['.neeva.com'], 159 'SeznamBot' => ['.seznam.cz'], 160 ]; 161 162 /** 163 * Some search engines operate from designated IP addresses. 164 * 165 * @see https://www.apple.com/go/applebot 166 * @see https://help.duckduckgo.com/duckduckgo-help-pages/results/duckduckbot 167 */ 168 private const ROBOT_IPS = [ 169 'AppleBot' => [ 170 '17.0.0.0/8', 171 ], 172 'Ask Jeeves' => [ 173 '65.214.45.143', 174 '65.214.45.148', 175 '66.235.124.192', 176 '66.235.124.7', 177 '66.235.124.101', 178 '66.235.124.193', 179 '66.235.124.73', 180 '66.235.124.196', 181 '66.235.124.74', 182 '63.123.238.8', 183 '202.143.148.61', 184 ], 185 'DuckDuckBot' => [ 186 '23.21.227.69', 187 '50.16.241.113', 188 '50.16.241.114', 189 '50.16.241.117', 190 '50.16.247.234', 191 '52.204.97.54', 192 '52.5.190.19', 193 '54.197.234.188', 194 '54.208.100.253', 195 '54.208.102.37', 196 '107.21.1.8', 197 ], 198 ]; 199 200 /** 201 * Some search engines operate from designated IP addresses. 202 * 203 * @see https://bot.seekport.com/ 204 */ 205 private const ROBOT_IP_FILES = [ 206 'SeekportBot' => 'https://bot.seekport.com/seekportbot_ips.txt', 207 ]; 208 209 /** 210 * Some search engines operate from within a designated autonomous system. 211 * 212 * @see https://developers.facebook.com/docs/sharing/webmasters/crawler 213 * @see https://www.facebook.com/peering/ 214 */ 215 private const ROBOT_ASNS = [ 216 'facebook' => ['AS32934', 'AS63293'], 217 'twitter' => ['AS13414'], 218 ]; 219 220 /** 221 * @param ServerRequestInterface $request 222 * @param RequestHandlerInterface $handler 223 * 224 * @return ResponseInterface 225 */ 226 public function process(ServerRequestInterface $request, RequestHandlerInterface $handler): ResponseInterface 227 { 228 $ua = Validator::serverParams($request)->string('HTTP_USER_AGENT', ''); 229 $ip = Validator::attributes($request)->string('client-ip'); 230 $address = IPFactory::parseAddressString($ip); 231 assert($address instanceof AddressInterface); 232 233 foreach (self::BAD_ROBOTS as $robot) { 234 if (str_contains($ua, $robot)) { 235 return $this->response(); 236 } 237 } 238 239 foreach (self::ROBOT_REV_FWD_DNS as $robot => $valid_domains) { 240 if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, false)) { 241 return $this->response(); 242 } 243 } 244 245 foreach (self::ROBOT_REV_ONLY_DNS as $robot => $valid_domains) { 246 if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, true)) { 247 return $this->response(); 248 } 249 } 250 251 foreach (self::ROBOT_IPS as $robot => $valid_ip_ranges) { 252 if (str_contains($ua, $robot)) { 253 foreach ($valid_ip_ranges as $ip_range) { 254 $range = IPFactory::parseRangeString($ip_range); 255 256 if ($range instanceof RangeInterface && $range->contains($address)) { 257 continue 2; 258 } 259 } 260 261 return $this->response(); 262 } 263 } 264 265 foreach (self::ROBOT_IP_FILES as $robot => $url) { 266 if (str_contains($ua, $robot)) { 267 $valid_ip_ranges = $this->fetchIpRangesForUrl($robot, $url); 268 269 foreach ($valid_ip_ranges as $ip_range) { 270 $range = IPFactory::parseRangeString($ip_range); 271 272 if ($range instanceof RangeInterface && $range->contains($address)) { 273 continue 2; 274 } 275 } 276 277 return $this->response(); 278 } 279 } 280 281 foreach (self::ROBOT_ASNS as $robot => $asns) { 282 foreach ($asns as $asn) { 283 if (str_contains($ua, $robot)) { 284 foreach ($this->fetchIpRangesForAsn($asn) as $range) { 285 if ($range->contains($address)) { 286 continue 2; 287 } 288 } 289 290 return $this->response(); 291 } 292 } 293 } 294 295 // Allow sites to block access from entire networks. 296 $block_asn = Validator::attributes($request)->string('block_asn', ''); 297 preg_match_all('/(AS\d+)/', $block_asn, $matches); 298 299 foreach ($matches[1] as $asn) { 300 foreach ($this->fetchIpRangesForAsn($asn) as $range) { 301 if ($range->contains($address)) { 302 return $this->response(); 303 } 304 } 305 } 306 307 return $handler->handle($request); 308 } 309 310 /** 311 * Check that an IP address belongs to a robot operator using a forward/reverse DNS lookup. 312 * 313 * @param string $ip 314 * @param array<string> $valid_domains 315 * @param bool $reverse_only 316 * 317 * @return bool 318 */ 319 private function checkRobotDNS(string $ip, array $valid_domains, bool $reverse_only): bool 320 { 321 $host = gethostbyaddr($ip); 322 323 if ($host === false) { 324 return false; 325 } 326 327 foreach ($valid_domains as $domain) { 328 if (str_ends_with($host, $domain)) { 329 return $reverse_only || $ip === gethostbyname($host); 330 } 331 } 332 333 return false; 334 } 335 336 /** 337 * Perform a whois search for an ASN. 338 * 339 * @param string $asn The autonomous system number to query 340 * 341 * @return array<RangeInterface> 342 */ 343 private function fetchIpRangesForAsn(string $asn): array 344 { 345 return Registry::cache()->file()->remember('whois-asn-' . $asn, static function () use ($asn): array { 346 $mapper = static fn (AsnRouteInfo $route_info): ?RangeInterface => IPFactory::parseRangeString($route_info->route ?: $route_info->route6); 347 348 try { 349 $loader = new CurlLoader(self::WHOIS_TIMEOUT); 350 $whois = new Whois($loader); 351 $info = $whois->loadAsnInfo($asn); 352 $routes = $info->routes; 353 $ranges = array_map($mapper, $routes); 354 355 return array_filter($ranges); 356 } catch (Throwable) { 357 return []; 358 } 359 }, random_int(self::WHOIS_TTL_MIN, self::WHOIS_TTL_MAX)); 360 } 361 362 /** 363 * Fetch a list of IP addresses from a remote file. 364 * 365 * @param string $ua 366 * @param string $url 367 * 368 * @return array<string> 369 */ 370 private function fetchIpRangesForUrl(string $ua, string $url): array 371 { 372 return Registry::cache()->file()->remember('url-ip-list-' . $ua, static function () use ($url): array { 373 try { 374 $client = new Client(); 375 $response = $client->get($url, ['timeout' => 5]); 376 $contents = $response->getBody()->getContents(); 377 378 preg_match_all(self::REGEX_IPV4, $contents, $matches); 379 380 return $matches[0]; 381 } catch (GuzzleException) { 382 return []; 383 } 384 }, random_int(self::WHOIS_TTL_MIN, self::WHOIS_TTL_MAX)); 385 } 386 387 /** 388 * @return ResponseInterface 389 */ 390 private function response(): ResponseInterface 391 { 392 return response('Not acceptable', StatusCodeInterface::STATUS_NOT_ACCEPTABLE); 393 } 394} 395