1<?php 2 3/** 4 * webtrees: online genealogy 5 * Copyright (C) 2022 webtrees development team 6 * This program is free software: you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundation, either version 3 of the License, or 9 * (at your option) any later version. 10 * This program is distributed in the hope that it will be useful, 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 * GNU General Public License for more details. 14 * You should have received a copy of the GNU General Public License 15 * along with this program. If not, see <https://www.gnu.org/licenses/>. 16 */ 17 18declare(strict_types=1); 19 20namespace Fisharebest\Webtrees\Http\Middleware; 21 22use Fig\Http\Message\StatusCodeInterface; 23use Fisharebest\Webtrees\Registry; 24use Fisharebest\Webtrees\Validator; 25use Iodev\Whois\Loaders\CurlLoader; 26use Iodev\Whois\Modules\Asn\AsnRouteInfo; 27use Iodev\Whois\Whois; 28use IPLib\Address\AddressInterface; 29use IPLib\Factory as IPFactory; 30use IPLib\Range\RangeInterface; 31use Psr\Http\Message\ResponseInterface; 32use Psr\Http\Message\ServerRequestInterface; 33use Psr\Http\Server\MiddlewareInterface; 34use Psr\Http\Server\RequestHandlerInterface; 35use Throwable; 36 37use function array_filter; 38use function array_map; 39use function assert; 40use function gethostbyaddr; 41use function gethostbyname; 42use function preg_match_all; 43use function random_int; 44use function response; 45use function str_contains; 46use function str_ends_with; 47 48/** 49 * Middleware to block bad robots before they waste our valuable CPU cycles. 50 */ 51class BadBotBlocker implements MiddlewareInterface 52{ 53 // Cache whois requests. Try to avoid all caches expiring at the same time. 54 private const WHOIS_TTL_MIN = 28 * 86400; 55 private const WHOIS_TTL_MAX = 35 * 86400; 56 private const WHOIS_TIMEOUT = 5; 57 58 // Bad robots - SEO optimisers, advertisers, etc. This list is shared with robots.txt. 59 public const BAD_ROBOTS = [ 60 'admantx', 61 'Adsbot', 62 'AhrefsBot', 63 'AspiegelBot', 64 'Barkrowler', 65 'BLEXBot', 66 'DataForSEO', 67 'DotBot', 68 'Grapeshot', 69 'ia_archiver', 70 'Linguee', 71 'MJ12bot', 72 'panscient', 73 'PetalBot', 74 'proximic', 75 'SemrushBot', 76 'Turnitin', 77 'XoviBot', 78 'ZoominfoBot', 79 ]; 80 81 /** 82 * Some search engines use reverse/forward DNS to verify the IP address. 83 * 84 * @see https://developer.amazon.com/support/amazonbot 85 * @see https://support.google.com/webmasters/answer/80553?hl=en 86 * @see https://www.bing.com/webmaster/help/which-crawlers-does-bing-use-8c184ec0 87 * @see https://www.bing.com/webmaster/help/how-to-verify-bingbot-3905dc26 88 * @see https://yandex.com/support/webmaster/robot-workings/check-yandex-robots.html 89 */ 90 private const ROBOT_REV_FWD_DNS = [ 91 'Amazonbot' => ['.crawl.amazon.com'], 92 'bingbot' => ['.search.msn.com'], 93 'BingPreview' => ['.search.msn.com'], 94 'Google' => ['.google.com', '.googlebot.com'], 95 'Mail.RU_Bot' => ['mail.ru'], 96 'msnbot' => ['.search.msn.com'], 97 'Qwantify' => ['.search.qwant.com'], 98 'Sogou' => ['.crawl.sogou.com'], 99 'Yahoo' => ['.crawl.yahoo.net'], 100 'Yandex' => ['.yandex.ru', '.yandex.net', '.yandex.com'], 101 ]; 102 103 /** 104 * Some search engines only use reverse DNS to verify the IP address. 105 * 106 * @see https://help.baidu.com/question?prod_id=99&class=0&id=3001 107 * @see https://napoveda.seznam.cz/en/full-text-search/seznambot-crawler 108 * @see https://www.ionos.de/terms-gtc/faq-crawler 109 */ 110 private const ROBOT_REV_ONLY_DNS = [ 111 'Baiduspider' => ['.baidu.com', '.baidu.jp'], 112 'FreshBot' => ['.seznam.cz'], 113 'IonCrawl' => ['.1und1.org'], 114 ]; 115 116 /** 117 * Some search engines operate from designated IP addresses. 118 * 119 * @see https://www.apple.com/go/applebot 120 * @see https://help.duckduckgo.com/duckduckgo-help-pages/results/duckduckbot 121 */ 122 private const ROBOT_IPS = [ 123 'AppleBot' => [ 124 '17.0.0.0/8', 125 ], 126 'Ask Jeeves' => [ 127 '65.214.45.143', 128 '65.214.45.148', 129 '66.235.124.192', 130 '66.235.124.7', 131 '66.235.124.101', 132 '66.235.124.193', 133 '66.235.124.73', 134 '66.235.124.196', 135 '66.235.124.74', 136 '63.123.238.8', 137 '202.143.148.61', 138 ], 139 'DuckDuckBot' => [ 140 '23.21.227.69', 141 '50.16.241.113', 142 '50.16.241.114', 143 '50.16.241.117', 144 '50.16.247.234', 145 '52.204.97.54', 146 '52.5.190.19', 147 '54.197.234.188', 148 '54.208.100.253', 149 '54.208.102.37', 150 '107.21.1.8', 151 ], 152 ]; 153 154 /** 155 * Some search engines operate from within a designated autonomous system. 156 * 157 * @see https://developers.facebook.com/docs/sharing/webmasters/crawler 158 * @see https://www.facebook.com/peering/ 159 */ 160 private const ROBOT_ASNS = [ 161 'facebook' => ['AS32934', 'AS63293'], 162 'twitter' => ['AS13414'], 163 ]; 164 165 /** 166 * @param ServerRequestInterface $request 167 * @param RequestHandlerInterface $handler 168 * 169 * @return ResponseInterface 170 */ 171 public function process(ServerRequestInterface $request, RequestHandlerInterface $handler): ResponseInterface 172 { 173 $ua = Validator::serverParams($request)->string('HTTP_USER_AGENT', ''); 174 $ip = Validator::attributes($request)->string('client-ip'); 175 $address = IPFactory::parseAddressString($ip); 176 assert($address instanceof AddressInterface); 177 178 foreach (self::BAD_ROBOTS as $robot) { 179 if (str_contains($ua, $robot)) { 180 return $this->response(); 181 } 182 } 183 184 foreach (self::ROBOT_REV_FWD_DNS as $robot => $valid_domains) { 185 if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, false)) { 186 return $this->response(); 187 } 188 } 189 190 foreach (self::ROBOT_REV_ONLY_DNS as $robot => $valid_domains) { 191 if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, true)) { 192 return $this->response(); 193 } 194 } 195 196 foreach (self::ROBOT_IPS as $robot => $valid_ips) { 197 if (str_contains($ua, $robot)) { 198 foreach ($valid_ips as $ip) { 199 $range = IPFactory::parseRangeString($ip); 200 201 if ($range instanceof RangeInterface && $range->contains($address)) { 202 continue 2; 203 } 204 } 205 206 return $this->response(); 207 } 208 } 209 210 foreach (self::ROBOT_ASNS as $robot => $asns) { 211 foreach ($asns as $asn) { 212 if (str_contains($ua, $robot)) { 213 foreach ($this->fetchIpRangesForAsn($asn) as $range) { 214 if ($range->contains($address)) { 215 continue 2; 216 } 217 } 218 219 return $this->response(); 220 } 221 } 222 } 223 224 // Allow sites to block access from entire networks. 225 $block_asn = Validator::attributes($request)->string('block_asn', ''); 226 preg_match_all('/(AS\d+)/', $block_asn, $matches); 227 228 foreach ($matches[1] as $asn) { 229 foreach ($this->fetchIpRangesForAsn($asn) as $range) { 230 if ($range->contains($address)) { 231 return $this->response(); 232 } 233 } 234 } 235 236 return $handler->handle($request); 237 } 238 239 /** 240 * Check that an IP address belongs to a robot operator using a forward/reverse DNS lookup. 241 * 242 * @param string $ip 243 * @param array<string> $valid_domains 244 * @param bool $reverse_only 245 * 246 * @return bool 247 */ 248 private function checkRobotDNS(string $ip, array $valid_domains, bool $reverse_only): bool 249 { 250 $host = gethostbyaddr($ip); 251 252 if ($host === false) { 253 return false; 254 } 255 256 foreach ($valid_domains as $domain) { 257 if (str_ends_with($host, $domain)) { 258 return $reverse_only || $ip === gethostbyname($host); 259 } 260 } 261 262 return false; 263 } 264 265 /** 266 * Perform a whois search for an ASN. 267 * 268 * @param string $asn - The autonomous system number to query 269 * 270 * @return array<RangeInterface> 271 */ 272 private function fetchIpRangesForAsn(string $asn): array 273 { 274 return Registry::cache()->file()->remember('whois-asn-' . $asn, static function () use ($asn): array { 275 $mapper = static fn (AsnRouteInfo $route_info): ?RangeInterface => IPFactory::parseRangeString($route_info->route ?: $route_info->route6); 276 277 try { 278 $loader = new CurlLoader(self::WHOIS_TIMEOUT); 279 $whois = new Whois($loader); 280 $info = $whois->loadAsnInfo($asn); 281 $routes = $info->routes; 282 $ranges = array_map($mapper, $routes); 283 284 return array_filter($ranges); 285 } catch (Throwable $ex) { 286 return []; 287 } 288 }, random_int(self::WHOIS_TTL_MIN, self::WHOIS_TTL_MAX)); 289 } 290 291 /** 292 * @return ResponseInterface 293 */ 294 private function response(): ResponseInterface 295 { 296 return response('Not acceptable', StatusCodeInterface::STATUS_NOT_ACCEPTABLE); 297 } 298} 299