1<?php 2 3/** 4 * webtrees: online genealogy 5 * Copyright (C) 2023 webtrees development team 6 * This program is free software: you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundation, either version 3 of the License, or 9 * (at your option) any later version. 10 * This program is distributed in the hope that it will be useful, 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 * GNU General Public License for more details. 14 * You should have received a copy of the GNU General Public License 15 * along with this program. If not, see <https://www.gnu.org/licenses/>. 16 */ 17 18declare(strict_types=1); 19 20namespace Fisharebest\Webtrees\Http\Middleware; 21 22use Fig\Http\Message\StatusCodeInterface; 23use Fisharebest\Webtrees\Registry; 24use Fisharebest\Webtrees\Validator; 25use GuzzleHttp\Client; 26use GuzzleHttp\Exception\GuzzleException; 27use Iodev\Whois\Loaders\CurlLoader; 28use Iodev\Whois\Modules\Asn\AsnRouteInfo; 29use Iodev\Whois\Whois; 30use IPLib\Address\AddressInterface; 31use IPLib\Factory as IPFactory; 32use IPLib\Range\RangeInterface; 33use Psr\Http\Message\ResponseInterface; 34use Psr\Http\Message\ServerRequestInterface; 35use Psr\Http\Server\MiddlewareInterface; 36use Psr\Http\Server\RequestHandlerInterface; 37use Throwable; 38 39use function array_filter; 40use function array_map; 41use function assert; 42use function gethostbyaddr; 43use function gethostbyname; 44use function preg_match_all; 45use function random_int; 46use function response; 47use function str_contains; 48use function str_ends_with; 49 50/** 51 * Middleware to block bad robots before they waste our valuable CPU cycles. 52 */ 53class BadBotBlocker implements MiddlewareInterface 54{ 55 private const REGEX_OCTET = '(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'; 56 private const REGEX_IPV4 = '/\\b' . self::REGEX_OCTET . '(?:\\.' . self::REGEX_OCTET . '){3}\\b/'; 57 58 // Cache whois requests. Try to avoid all caches expiring at the same time. 59 private const WHOIS_TTL_MIN = 28 * 86400; 60 private const WHOIS_TTL_MAX = 35 * 86400; 61 private const WHOIS_TIMEOUT = 5; 62 63 // Bad robots - SEO optimisers, advertisers, etc. This list is shared with robots.txt. 64 public const BAD_ROBOTS = [ 65 'admantx', 66 'Adsbot', 67 'AhrefsBot', 68 'Amazonbot', // Until it understands crawl-delay and noindex / nofollow 69 'AntBot', // Aggressive crawler 70 'AspiegelBot', 71 'Awario', // Brand management 72 'Barkrowler', // Crawler for babbar.tech 73 'BLEXBot', 74 'Bytespider', // Aggressive crawler from Bytedance/TikTok 75 'CCBot', // Used to train a number of LLMs 76 'CensysInspect', // Vulnerability scanner 77 'ChatGPT-User', // Used by ChatGPT during operation 78 'ClaudeBot', // Collects training data for LLMs 79 'DataForSeoBot', // https://dataforseo.com/dataforseo-bot 80 'DotBot', 81 'Expanse', // Another pointless crawler 82 'FacebookBot', // Collects training data for Facebook's LLM translator. 83 'fidget-spinner-bot', // Agressive crawler 84 'Foregenix', // Vulnerability scanner 85 'Go-http-client', // Crawler library used by many bots 86 'Google-Extended', // Collects training data for Google Bard 87 'GPTBot', // Collects training data for ChatGPT 88 'Grapeshot', 89 'Honolulu-bot', // Aggressive crawer, no info available 90 'ia_archiver', 91 'internet-measurement', // Driftnet 92 'IonCrawl', 93 'Java', // Crawler library used by many bots 94 'linabot', // Aggressive crawer, no info available 95 'Linguee', 96 'MegaIndex.ru', 97 'MJ12bot', 98 'netEstate NE', 99 'Omgilibot', // Collects training data for LLMs 100 'panscient', 101 'PetalBot', 102 'proximic', 103 'python-requests', // Crawler library used by many bots 104 'Scrapy', // Scraping tool 105 'SeekportBot', // Pretends to be a search engine - but isn't 106 'SemrushBot', 107 'serpstatbot', 108 'SEOkicks', 109 'SiteKiosk', 110 'test-bot', // Agressive crawler 111 'TinyTestBot', 112 'Turnitin', 113 'wp_is_mobile', // Nothing to do with wordpress 114 'XoviBot', 115 'YisouSpider', 116 'ZoominfoBot', 117 ]; 118 119 /** 120 * Some search engines use reverse/forward DNS to verify the IP address. 121 * 122 * @see https://developer.amazon.com/support/amazonbot 123 * @see https://support.google.com/webmasters/answer/80553?hl=en 124 * @see https://www.bing.com/webmaster/help/which-crawlers-does-bing-use-8c184ec0 125 * @see https://www.bing.com/webmaster/help/how-to-verify-bingbot-3905dc26 126 * @see https://yandex.com/support/webmaster/robot-workings/check-yandex-robots.html 127 * @see https://www.mojeek.com/bot.html 128 * @see https://support.apple.com/en-gb/HT204683 129 */ 130 private const ROBOT_REV_FWD_DNS = [ 131 'Amazonbot' => ['.crawl.amazon.com'], 132 'Applebot' => ['.applebot.apple.com'], 133 'BingPreview' => ['.search.msn.com'], 134 'Google' => ['.google.com', '.googlebot.com'], 135 'Mail.RU_Bot' => ['.mail.ru'], 136 'MicrosoftPreview' => ['.search.msn.com'], 137 'MojeekBot' => ['.mojeek.com'], 138 'Qwantify' => ['.qwant.com'], 139 'Sogou' => ['.crawl.sogou.com'], 140 'Yahoo' => ['.crawl.yahoo.net'], 141 'Yandex' => ['.yandex.ru', '.yandex.net', '.yandex.com'], 142 'bingbot' => ['.search.msn.com'], 143 'msnbot' => ['.search.msn.com'], 144 ]; 145 146 /** 147 * Some search engines only use reverse DNS to verify the IP address. 148 * 149 * @see https://help.baidu.com/question?prod_id=99&class=0&id=3001 150 * @see https://napoveda.seznam.cz/en/full-text-search/seznambot-crawler 151 * @see https://www.ionos.de/terms-gtc/faq-crawler 152 */ 153 private const ROBOT_REV_ONLY_DNS = [ 154 'Baiduspider' => ['.baidu.com', '.baidu.jp'], 155 'FreshBot' => ['.seznam.cz'], 156 'IonCrawl' => ['.1und1.org'], 157 'Neevabot' => ['.neeva.com'], 158 'SeznamBot' => ['.seznam.cz'], 159 ]; 160 161 /** 162 * Some search engines operate from designated IP addresses. 163 * 164 * @see https://www.apple.com/go/applebot 165 * @see https://help.duckduckgo.com/duckduckgo-help-pages/results/duckduckbot 166 */ 167 private const ROBOT_IPS = [ 168 'AppleBot' => [ 169 '17.0.0.0/8', 170 ], 171 'Ask Jeeves' => [ 172 '65.214.45.143', 173 '65.214.45.148', 174 '66.235.124.192', 175 '66.235.124.7', 176 '66.235.124.101', 177 '66.235.124.193', 178 '66.235.124.73', 179 '66.235.124.196', 180 '66.235.124.74', 181 '63.123.238.8', 182 '202.143.148.61', 183 ], 184 'DuckDuckBot' => [ 185 '23.21.227.69', 186 '50.16.241.113', 187 '50.16.241.114', 188 '50.16.241.117', 189 '50.16.247.234', 190 '52.204.97.54', 191 '52.5.190.19', 192 '54.197.234.188', 193 '54.208.100.253', 194 '54.208.102.37', 195 '107.21.1.8', 196 ], 197 ]; 198 199 /** 200 * Some search engines operate from designated IP addresses. 201 * 202 * @see https://bot.seekport.com/ 203 */ 204 private const ROBOT_IP_FILES = [ 205 'SeekportBot' => 'https://bot.seekport.com/seekportbot_ips.txt', 206 ]; 207 208 /** 209 * Some search engines operate from within a designated autonomous system. 210 * 211 * @see https://developers.facebook.com/docs/sharing/webmasters/crawler 212 * @see https://www.facebook.com/peering/ 213 */ 214 private const ROBOT_ASNS = [ 215 'facebook' => ['AS32934', 'AS63293'], 216 'twitter' => ['AS13414'], 217 ]; 218 219 /** 220 * @param ServerRequestInterface $request 221 * @param RequestHandlerInterface $handler 222 * 223 * @return ResponseInterface 224 */ 225 public function process(ServerRequestInterface $request, RequestHandlerInterface $handler): ResponseInterface 226 { 227 $ua = Validator::serverParams($request)->string('HTTP_USER_AGENT', ''); 228 $ip = Validator::attributes($request)->string('client-ip'); 229 $address = IPFactory::parseAddressString($ip); 230 assert($address instanceof AddressInterface); 231 232 foreach (self::BAD_ROBOTS as $robot) { 233 if (str_contains($ua, $robot)) { 234 return $this->response(); 235 } 236 } 237 238 foreach (self::ROBOT_REV_FWD_DNS as $robot => $valid_domains) { 239 if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, false)) { 240 return $this->response(); 241 } 242 } 243 244 foreach (self::ROBOT_REV_ONLY_DNS as $robot => $valid_domains) { 245 if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, true)) { 246 return $this->response(); 247 } 248 } 249 250 foreach (self::ROBOT_IPS as $robot => $valid_ip_ranges) { 251 if (str_contains($ua, $robot)) { 252 foreach ($valid_ip_ranges as $ip_range) { 253 $range = IPFactory::parseRangeString($ip_range); 254 255 if ($range instanceof RangeInterface && $range->contains($address)) { 256 continue 2; 257 } 258 } 259 260 return $this->response(); 261 } 262 } 263 264 foreach (self::ROBOT_IP_FILES as $robot => $url) { 265 if (str_contains($ua, $robot)) { 266 $valid_ip_ranges = $this->fetchIpRangesForUrl($robot, $url); 267 268 foreach ($valid_ip_ranges as $ip_range) { 269 $range = IPFactory::parseRangeString($ip_range); 270 271 if ($range instanceof RangeInterface && $range->contains($address)) { 272 continue 2; 273 } 274 } 275 276 return $this->response(); 277 } 278 } 279 280 foreach (self::ROBOT_ASNS as $robot => $asns) { 281 foreach ($asns as $asn) { 282 if (str_contains($ua, $robot)) { 283 foreach ($this->fetchIpRangesForAsn($asn) as $range) { 284 if ($range->contains($address)) { 285 continue 2; 286 } 287 } 288 289 return $this->response(); 290 } 291 } 292 } 293 294 // Allow sites to block access from entire networks. 295 $block_asn = Validator::attributes($request)->string('block_asn', ''); 296 preg_match_all('/(AS\d+)/', $block_asn, $matches); 297 298 foreach ($matches[1] as $asn) { 299 foreach ($this->fetchIpRangesForAsn($asn) as $range) { 300 if ($range->contains($address)) { 301 return $this->response(); 302 } 303 } 304 } 305 306 return $handler->handle($request); 307 } 308 309 /** 310 * Check that an IP address belongs to a robot operator using a forward/reverse DNS lookup. 311 * 312 * @param string $ip 313 * @param array<string> $valid_domains 314 * @param bool $reverse_only 315 * 316 * @return bool 317 */ 318 private function checkRobotDNS(string $ip, array $valid_domains, bool $reverse_only): bool 319 { 320 $host = gethostbyaddr($ip); 321 322 if ($host === false) { 323 return false; 324 } 325 326 foreach ($valid_domains as $domain) { 327 if (str_ends_with($host, $domain)) { 328 return $reverse_only || $ip === gethostbyname($host); 329 } 330 } 331 332 return false; 333 } 334 335 /** 336 * Perform a whois search for an ASN. 337 * 338 * @param string $asn The autonomous system number to query 339 * 340 * @return array<RangeInterface> 341 */ 342 private function fetchIpRangesForAsn(string $asn): array 343 { 344 return Registry::cache()->file()->remember('whois-asn-' . $asn, static function () use ($asn): array { 345 $mapper = static fn (AsnRouteInfo $route_info): ?RangeInterface => IPFactory::parseRangeString($route_info->route ?: $route_info->route6); 346 347 try { 348 $loader = new CurlLoader(self::WHOIS_TIMEOUT); 349 $whois = new Whois($loader); 350 $info = $whois->loadAsnInfo($asn); 351 $routes = $info->routes; 352 $ranges = array_map($mapper, $routes); 353 354 return array_filter($ranges); 355 } catch (Throwable) { 356 return []; 357 } 358 }, random_int(self::WHOIS_TTL_MIN, self::WHOIS_TTL_MAX)); 359 } 360 361 /** 362 * Fetch a list of IP addresses from a remote file. 363 * 364 * @param string $ua 365 * @param string $url 366 * 367 * @return array<string> 368 */ 369 private function fetchIpRangesForUrl(string $ua, string $url): array 370 { 371 return Registry::cache()->file()->remember('url-ip-list-' . $ua, static function () use ($url): array { 372 try { 373 $client = new Client(); 374 $response = $client->get($url, ['timeout' => 5]); 375 $contents = $response->getBody()->getContents(); 376 377 preg_match_all(self::REGEX_IPV4, $contents, $matches); 378 379 return $matches[0]; 380 } catch (GuzzleException) { 381 return []; 382 } 383 }, random_int(self::WHOIS_TTL_MIN, self::WHOIS_TTL_MAX)); 384 } 385 386 /** 387 * @return ResponseInterface 388 */ 389 private function response(): ResponseInterface 390 { 391 return response('Not acceptable', StatusCodeInterface::STATUS_NOT_ACCEPTABLE); 392 } 393} 394