1<?php 2 3/** 4 * webtrees: online genealogy 5 * Copyright (C) 2022 webtrees development team 6 * This program is free software: you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundation, either version 3 of the License, or 9 * (at your option) any later version. 10 * This program is distributed in the hope that it will be useful, 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 * GNU General Public License for more details. 14 * You should have received a copy of the GNU General Public License 15 * along with this program. If not, see <https://www.gnu.org/licenses/>. 16 */ 17 18declare(strict_types=1); 19 20namespace Fisharebest\Webtrees\Http\Middleware; 21 22use Fig\Http\Message\StatusCodeInterface; 23use Fisharebest\Webtrees\Registry; 24use Fisharebest\Webtrees\Validator; 25use Iodev\Whois\Loaders\CurlLoader; 26use Iodev\Whois\Modules\Asn\AsnRouteInfo; 27use Iodev\Whois\Whois; 28use IPLib\Address\AddressInterface; 29use IPLib\Factory as IPFactory; 30use IPLib\Range\RangeInterface; 31use Psr\Http\Message\ResponseInterface; 32use Psr\Http\Message\ServerRequestInterface; 33use Psr\Http\Server\MiddlewareInterface; 34use Psr\Http\Server\RequestHandlerInterface; 35use Throwable; 36 37use function array_filter; 38use function array_map; 39use function assert; 40use function gethostbyaddr; 41use function gethostbyname; 42use function preg_match_all; 43use function random_int; 44use function response; 45use function str_contains; 46use function str_ends_with; 47 48/** 49 * Middleware to block bad robots before they waste our valuable CPU cycles. 50 */ 51class BadBotBlocker implements MiddlewareInterface 52{ 53 // Cache whois requests. Try to avoid all caches expiring at the same time. 54 private const WHOIS_TTL_MIN = 28 * 86400; 55 private const WHOIS_TTL_MAX = 35 * 86400; 56 private const WHOIS_TIMEOUT = 5; 57 58 // Bad robots - SEO optimisers, advertisers, etc. This list is shared with robots.txt. 59 public const BAD_ROBOTS = [ 60 'admantx', 61 'Adsbot', 62 'AhrefsBot', 63 'AspiegelBot', 64 'Barkrowler', 65 'BLEXBot', 66 'DataForSEO', 67 'DotBot', 68 'Grapeshot', 69 'ia_archiver', 70 'Linguee', 71 'MJ12bot', 72 'netEstate NE', 73 'panscient', 74 'PetalBot', 75 'proximic', 76 'SemrushBot', 77 'SEOkicks', 78 'SiteKiosk', 79 'Turnitin', 80 'XoviBot', 81 'ZoominfoBot', 82 ]; 83 84 /** 85 * Some search engines use reverse/forward DNS to verify the IP address. 86 * 87 * @see https://developer.amazon.com/support/amazonbot 88 * @see https://support.google.com/webmasters/answer/80553?hl=en 89 * @see https://www.bing.com/webmaster/help/which-crawlers-does-bing-use-8c184ec0 90 * @see https://www.bing.com/webmaster/help/how-to-verify-bingbot-3905dc26 91 * @see https://yandex.com/support/webmaster/robot-workings/check-yandex-robots.html 92 */ 93 private const ROBOT_REV_FWD_DNS = [ 94 'Amazonbot' => ['.crawl.amazon.com'], 95 'bingbot' => ['.search.msn.com'], 96 'BingPreview' => ['.search.msn.com'], 97 'Google' => ['.google.com', '.googlebot.com'], 98 'Mail.RU_Bot' => ['.mail.ru'], 99 'msnbot' => ['.search.msn.com'], 100 'Qwantify' => ['.search.qwant.com'], 101 'Sogou' => ['.crawl.sogou.com'], 102 'Yahoo' => ['.crawl.yahoo.net'], 103 'Yandex' => ['.yandex.ru', '.yandex.net', '.yandex.com'], 104 ]; 105 106 /** 107 * Some search engines only use reverse DNS to verify the IP address. 108 * 109 * @see https://help.baidu.com/question?prod_id=99&class=0&id=3001 110 * @see https://napoveda.seznam.cz/en/full-text-search/seznambot-crawler 111 * @see https://www.ionos.de/terms-gtc/faq-crawler 112 */ 113 private const ROBOT_REV_ONLY_DNS = [ 114 'Baiduspider' => ['.baidu.com', '.baidu.jp'], 115 'FreshBot' => ['.seznam.cz'], 116 'IonCrawl' => ['.1und1.org'], 117 'Neevabot' => ['.neeva.com'], 118 ]; 119 120 /** 121 * Some search engines operate from designated IP addresses. 122 * 123 * @see https://www.apple.com/go/applebot 124 * @see https://help.duckduckgo.com/duckduckgo-help-pages/results/duckduckbot 125 */ 126 private const ROBOT_IPS = [ 127 'AppleBot' => [ 128 '17.0.0.0/8', 129 ], 130 'Ask Jeeves' => [ 131 '65.214.45.143', 132 '65.214.45.148', 133 '66.235.124.192', 134 '66.235.124.7', 135 '66.235.124.101', 136 '66.235.124.193', 137 '66.235.124.73', 138 '66.235.124.196', 139 '66.235.124.74', 140 '63.123.238.8', 141 '202.143.148.61', 142 ], 143 'DuckDuckBot' => [ 144 '23.21.227.69', 145 '50.16.241.113', 146 '50.16.241.114', 147 '50.16.241.117', 148 '50.16.247.234', 149 '52.204.97.54', 150 '52.5.190.19', 151 '54.197.234.188', 152 '54.208.100.253', 153 '54.208.102.37', 154 '107.21.1.8', 155 ], 156 ]; 157 158 /** 159 * Some search engines operate from within a designated autonomous system. 160 * 161 * @see https://developers.facebook.com/docs/sharing/webmasters/crawler 162 * @see https://www.facebook.com/peering/ 163 */ 164 private const ROBOT_ASNS = [ 165 'facebook' => ['AS32934', 'AS63293'], 166 'twitter' => ['AS13414'], 167 ]; 168 169 /** 170 * @param ServerRequestInterface $request 171 * @param RequestHandlerInterface $handler 172 * 173 * @return ResponseInterface 174 */ 175 public function process(ServerRequestInterface $request, RequestHandlerInterface $handler): ResponseInterface 176 { 177 $ua = Validator::serverParams($request)->string('HTTP_USER_AGENT', ''); 178 $ip = Validator::attributes($request)->string('client-ip'); 179 $address = IPFactory::parseAddressString($ip); 180 assert($address instanceof AddressInterface); 181 182 foreach (self::BAD_ROBOTS as $robot) { 183 if (str_contains($ua, $robot)) { 184 return $this->response(); 185 } 186 } 187 188 foreach (self::ROBOT_REV_FWD_DNS as $robot => $valid_domains) { 189 if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, false)) { 190 return $this->response(); 191 } 192 } 193 194 foreach (self::ROBOT_REV_ONLY_DNS as $robot => $valid_domains) { 195 if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, true)) { 196 return $this->response(); 197 } 198 } 199 200 foreach (self::ROBOT_IPS as $robot => $valid_ips) { 201 if (str_contains($ua, $robot)) { 202 foreach ($valid_ips as $ip) { 203 $range = IPFactory::parseRangeString($ip); 204 205 if ($range instanceof RangeInterface && $range->contains($address)) { 206 continue 2; 207 } 208 } 209 210 return $this->response(); 211 } 212 } 213 214 foreach (self::ROBOT_ASNS as $robot => $asns) { 215 foreach ($asns as $asn) { 216 if (str_contains($ua, $robot)) { 217 foreach ($this->fetchIpRangesForAsn($asn) as $range) { 218 if ($range->contains($address)) { 219 continue 2; 220 } 221 } 222 223 return $this->response(); 224 } 225 } 226 } 227 228 // Allow sites to block access from entire networks. 229 $block_asn = Validator::attributes($request)->string('block_asn', ''); 230 preg_match_all('/(AS\d+)/', $block_asn, $matches); 231 232 foreach ($matches[1] as $asn) { 233 foreach ($this->fetchIpRangesForAsn($asn) as $range) { 234 if ($range->contains($address)) { 235 return $this->response(); 236 } 237 } 238 } 239 240 return $handler->handle($request); 241 } 242 243 /** 244 * Check that an IP address belongs to a robot operator using a forward/reverse DNS lookup. 245 * 246 * @param string $ip 247 * @param array<string> $valid_domains 248 * @param bool $reverse_only 249 * 250 * @return bool 251 */ 252 private function checkRobotDNS(string $ip, array $valid_domains, bool $reverse_only): bool 253 { 254 $host = gethostbyaddr($ip); 255 256 if ($host === false) { 257 return false; 258 } 259 260 foreach ($valid_domains as $domain) { 261 if (str_ends_with($host, $domain)) { 262 return $reverse_only || $ip === gethostbyname($host); 263 } 264 } 265 266 return false; 267 } 268 269 /** 270 * Perform a whois search for an ASN. 271 * 272 * @param string $asn - The autonomous system number to query 273 * 274 * @return array<RangeInterface> 275 */ 276 private function fetchIpRangesForAsn(string $asn): array 277 { 278 return Registry::cache()->file()->remember('whois-asn-' . $asn, static function () use ($asn): array { 279 $mapper = static fn (AsnRouteInfo $route_info): ?RangeInterface => IPFactory::parseRangeString($route_info->route ?: $route_info->route6); 280 281 try { 282 $loader = new CurlLoader(self::WHOIS_TIMEOUT); 283 $whois = new Whois($loader); 284 $info = $whois->loadAsnInfo($asn); 285 $routes = $info->routes; 286 $ranges = array_map($mapper, $routes); 287 288 return array_filter($ranges); 289 } catch (Throwable $ex) { 290 return []; 291 } 292 }, random_int(self::WHOIS_TTL_MIN, self::WHOIS_TTL_MAX)); 293 } 294 295 /** 296 * @return ResponseInterface 297 */ 298 private function response(): ResponseInterface 299 { 300 return response('Not acceptable', StatusCodeInterface::STATUS_NOT_ACCEPTABLE); 301 } 302} 303