xref: /webtrees/app/Http/Middleware/BadBotBlocker.php (revision 10d2770824f18773013bccd4c8be480c4cecac74)
1<?php
2
3/**
4 * webtrees: online genealogy
5 * Copyright (C) 2022 webtrees development team
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 3 of the License, or
9 * (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program. If not, see <https://www.gnu.org/licenses/>.
16 */
17
18declare(strict_types=1);
19
20namespace Fisharebest\Webtrees\Http\Middleware;
21
22use Fig\Http\Message\StatusCodeInterface;
23use Fisharebest\Webtrees\Registry;
24use Fisharebest\Webtrees\Validator;
25use GuzzleHttp\Client;
26use GuzzleHttp\Exception\GuzzleException;
27use Iodev\Whois\Loaders\CurlLoader;
28use Iodev\Whois\Modules\Asn\AsnRouteInfo;
29use Iodev\Whois\Whois;
30use IPLib\Address\AddressInterface;
31use IPLib\Factory as IPFactory;
32use IPLib\Range\RangeInterface;
33use Psr\Http\Message\ResponseInterface;
34use Psr\Http\Message\ServerRequestInterface;
35use Psr\Http\Server\MiddlewareInterface;
36use Psr\Http\Server\RequestHandlerInterface;
37use Throwable;
38
39use function array_filter;
40use function array_map;
41use function assert;
42use function gethostbyaddr;
43use function gethostbyname;
44use function preg_match_all;
45use function random_int;
46use function response;
47use function str_contains;
48use function str_ends_with;
49
50/**
51 * Middleware to block bad robots before they waste our valuable CPU cycles.
52 */
53class BadBotBlocker implements MiddlewareInterface
54{
55    private const REGEX_OCTET = '(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)';
56    private const REGEX_IPV4  = '/\\b' . self::REGEX_OCTET . '(?:\\.' . self::REGEX_OCTET . '){3}\\b/';
57
58    // Cache whois requests.  Try to avoid all caches expiring at the same time.
59    private const WHOIS_TTL_MIN = 28 * 86400;
60    private const WHOIS_TTL_MAX = 35 * 86400;
61    private const WHOIS_TIMEOUT = 5;
62
63    // Bad robots - SEO optimisers, advertisers, etc.  This list is shared with robots.txt.
64    public const BAD_ROBOTS = [
65        'admantx',
66        'Adsbot',
67        'AhrefsBot',
68        'Amazonbot', // Until it understands crawl-delay and noindex / nofollow
69        'AspiegelBot',
70        'Barkrowler',
71        'BLEXBot',
72        'DataForSEO',
73        'DataForSeoBot', // https://dataforseo.com/dataforseo-bot
74        'DotBot',
75        'Grapeshot',
76        'Honolulu-bot', // Aggressive crawer, no info available
77        'ia_archiver',
78        'linabot', // Aggressive crawer, no info available
79        'Linguee',
80        'MegaIndex.ru',
81        'MJ12bot',
82        'netEstate NE',
83        'panscient',
84        'PetalBot',
85        'proximic',
86        'SeekportBot', // Pretends to be a search engine - but isn't
87        'SemrushBot',
88        'serpstatbot',
89        'SEOkicks',
90        'SiteKiosk',
91        'Turnitin',
92        'wp_is_mobile', // Nothing to do with wordpress
93        'XoviBot',
94        'ZoominfoBot',
95    ];
96
97    /**
98     * Some search engines use reverse/forward DNS to verify the IP address.
99     *
100     * @see https://developer.amazon.com/support/amazonbot
101     * @see https://support.google.com/webmasters/answer/80553?hl=en
102     * @see https://www.bing.com/webmaster/help/which-crawlers-does-bing-use-8c184ec0
103     * @see https://www.bing.com/webmaster/help/how-to-verify-bingbot-3905dc26
104     * @see https://yandex.com/support/webmaster/robot-workings/check-yandex-robots.html
105     * @see https://www.mojeek.com/bot.html
106     * @see https://support.apple.com/en-gb/HT204683
107     */
108    private const ROBOT_REV_FWD_DNS = [
109        'Amazonbot'   => ['.crawl.amazon.com'],
110        'Applebot'    => ['.applebot.apple.com'],
111        'bingbot'     => ['.search.msn.com'],
112        'BingPreview' => ['.search.msn.com'],
113        'Google'      => ['.google.com', '.googlebot.com'],
114        'MojeekBot'   => ['.mojeek.com'],
115        'Mail.RU_Bot' => ['.mail.ru'],
116        'msnbot'      => ['.search.msn.com'],
117        'Qwantify'    => ['.search.qwant.com'],
118        'Sogou'       => ['.crawl.sogou.com'],
119        'Yahoo'       => ['.crawl.yahoo.net'],
120        'Yandex'      => ['.yandex.ru', '.yandex.net', '.yandex.com'],
121    ];
122
123    /**
124     * Some search engines only use reverse DNS to verify the IP address.
125     *
126     * @see https://help.baidu.com/question?prod_id=99&class=0&id=3001
127     * @see https://napoveda.seznam.cz/en/full-text-search/seznambot-crawler
128     * @see https://www.ionos.de/terms-gtc/faq-crawler
129     */
130    private const ROBOT_REV_ONLY_DNS = [
131        'Baiduspider' => ['.baidu.com', '.baidu.jp'],
132        'FreshBot'    => ['.seznam.cz'],
133        'IonCrawl'    => ['.1und1.org'],
134        'Neevabot'    => ['.neeva.com'],
135    ];
136
137    /**
138     * Some search engines operate from designated IP addresses.
139     *
140     * @see https://www.apple.com/go/applebot
141     * @see https://help.duckduckgo.com/duckduckgo-help-pages/results/duckduckbot
142     */
143    private const ROBOT_IPS = [
144        'AppleBot'    => [
145            '17.0.0.0/8',
146        ],
147        'Ask Jeeves'  => [
148            '65.214.45.143',
149            '65.214.45.148',
150            '66.235.124.192',
151            '66.235.124.7',
152            '66.235.124.101',
153            '66.235.124.193',
154            '66.235.124.73',
155            '66.235.124.196',
156            '66.235.124.74',
157            '63.123.238.8',
158            '202.143.148.61',
159        ],
160        'DuckDuckBot' => [
161            '23.21.227.69',
162            '50.16.241.113',
163            '50.16.241.114',
164            '50.16.241.117',
165            '50.16.247.234',
166            '52.204.97.54',
167            '52.5.190.19',
168            '54.197.234.188',
169            '54.208.100.253',
170            '54.208.102.37',
171            '107.21.1.8',
172        ],
173    ];
174
175    /**
176     * Some search engines operate from designated IP addresses.
177     *
178     * @see https://bot.seekport.com/
179     */
180    private const ROBOT_IP_FILES = [
181        'SeekportBot' => 'https://bot.seekport.com/seekportbot_ips.txt',
182    ];
183
184    /**
185     * Some search engines operate from within a designated autonomous system.
186     *
187     * @see https://developers.facebook.com/docs/sharing/webmasters/crawler
188     * @see https://www.facebook.com/peering/
189     */
190    private const ROBOT_ASNS = [
191        'facebook' => ['AS32934', 'AS63293'],
192        'twitter'  => ['AS13414'],
193    ];
194
195    /**
196     * @param ServerRequestInterface  $request
197     * @param RequestHandlerInterface $handler
198     *
199     * @return ResponseInterface
200     */
201    public function process(ServerRequestInterface $request, RequestHandlerInterface $handler): ResponseInterface
202    {
203        $ua      = Validator::serverParams($request)->string('HTTP_USER_AGENT', '');
204        $ip      = Validator::attributes($request)->string('client-ip');
205        $address = IPFactory::parseAddressString($ip);
206        assert($address instanceof AddressInterface);
207
208        foreach (self::BAD_ROBOTS as $robot) {
209            if (str_contains($ua, $robot)) {
210                return $this->response();
211            }
212        }
213
214        foreach (self::ROBOT_REV_FWD_DNS as $robot => $valid_domains) {
215            if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, false)) {
216                return $this->response();
217            }
218        }
219
220        foreach (self::ROBOT_REV_ONLY_DNS as $robot => $valid_domains) {
221            if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, true)) {
222                return $this->response();
223            }
224        }
225
226        foreach (self::ROBOT_IPS as $robot => $valid_ip_ranges) {
227            if (str_contains($ua, $robot)) {
228                foreach ($valid_ip_ranges as $ip_range) {
229                    $range = IPFactory::parseRangeString($ip_range);
230
231                    if ($range instanceof RangeInterface && $range->contains($address)) {
232                        continue 2;
233                    }
234                }
235
236                return $this->response();
237            }
238        }
239
240        foreach (self::ROBOT_IP_FILES as $robot => $url) {
241            if (str_contains($ua, $robot)) {
242                $valid_ip_ranges = $this->fetchIpRangesForUrl($robot, $url);
243
244                foreach ($valid_ip_ranges as $ip_range) {
245                    $range = IPFactory::parseRangeString($ip_range);
246
247                    if ($range instanceof RangeInterface && $range->contains($address)) {
248                        continue 2;
249                    }
250                }
251
252                return $this->response();
253            }
254        }
255
256        foreach (self::ROBOT_ASNS as $robot => $asns) {
257            foreach ($asns as $asn) {
258                if (str_contains($ua, $robot)) {
259                    foreach ($this->fetchIpRangesForAsn($asn) as $range) {
260                        if ($range->contains($address)) {
261                            continue 2;
262                        }
263                    }
264
265                    return $this->response();
266                }
267            }
268        }
269
270        // Allow sites to block access from entire networks.
271        $block_asn = Validator::attributes($request)->string('block_asn', '');
272        preg_match_all('/(AS\d+)/', $block_asn, $matches);
273
274        foreach ($matches[1] as $asn) {
275            foreach ($this->fetchIpRangesForAsn($asn) as $range) {
276                if ($range->contains($address)) {
277                    return $this->response();
278                }
279            }
280        }
281
282        return $handler->handle($request);
283    }
284
285    /**
286     * Check that an IP address belongs to a robot operator using a forward/reverse DNS lookup.
287     *
288     * @param string        $ip
289     * @param array<string> $valid_domains
290     * @param bool          $reverse_only
291     *
292     * @return bool
293     */
294    private function checkRobotDNS(string $ip, array $valid_domains, bool $reverse_only): bool
295    {
296        $host = gethostbyaddr($ip);
297
298        if ($host === false) {
299            return false;
300        }
301
302        foreach ($valid_domains as $domain) {
303            if (str_ends_with($host, $domain)) {
304                return $reverse_only || $ip === gethostbyname($host);
305            }
306        }
307
308        return false;
309    }
310
311    /**
312     * Perform a whois search for an ASN.
313     *
314     * @param string $asn - The autonomous system number to query
315     *
316     * @return array<RangeInterface>
317     */
318    private function fetchIpRangesForAsn(string $asn): array
319    {
320        return Registry::cache()->file()->remember('whois-asn-' . $asn, static function () use ($asn): array {
321            $mapper = static fn (AsnRouteInfo $route_info): ?RangeInterface => IPFactory::parseRangeString($route_info->route ?: $route_info->route6);
322
323            try {
324                $loader = new CurlLoader(self::WHOIS_TIMEOUT);
325                $whois  = new Whois($loader);
326                $info   = $whois->loadAsnInfo($asn);
327                $routes = $info->routes;
328                $ranges = array_map($mapper, $routes);
329
330                return array_filter($ranges);
331            } catch (Throwable) {
332                return [];
333            }
334        }, random_int(self::WHOIS_TTL_MIN, self::WHOIS_TTL_MAX));
335    }
336
337    /**
338     * Fetch a list of IP addresses from a remote file.
339     *
340     * @param string $ua
341     * @param string $url
342     *
343     * @return array<string>
344     */
345    private function fetchIpRangesForUrl(string $ua, string $url): array
346    {
347        return Registry::cache()->file()->remember('url-ip-list-' . $ua, static function () use ($url): array {
348            try {
349                $client   = new Client();
350                $response = $client->get($url, ['timeout' => 5]);
351                $contents = $response->getBody()->getContents();
352
353                preg_match_all(self::REGEX_IPV4, $contents, $matches);
354
355                return $matches[0];
356            } catch (GuzzleException) {
357                return [];
358            }
359        }, random_int(self::WHOIS_TTL_MIN, self::WHOIS_TTL_MAX));
360    }
361
362    /**
363     * @return ResponseInterface
364     */
365    private function response(): ResponseInterface
366    {
367        return response('Not acceptable', StatusCodeInterface::STATUS_NOT_ACCEPTABLE);
368    }
369}
370