xref: /webtrees/app/Http/Middleware/BadBotBlocker.php (revision 0036e960eaf4376dc006570a22a09ad41d8da137)
1<?php
2
3/**
4 * webtrees: online genealogy
5 * Copyright (C) 2023 webtrees development team
6 * This program is free software: you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation, either version 3 of the License, or
9 * (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program. If not, see <https://www.gnu.org/licenses/>.
16 */
17
18declare(strict_types=1);
19
20namespace Fisharebest\Webtrees\Http\Middleware;
21
22use Fig\Http\Message\StatusCodeInterface;
23use Fisharebest\Webtrees\Registry;
24use Fisharebest\Webtrees\Validator;
25use GuzzleHttp\Client;
26use GuzzleHttp\Exception\GuzzleException;
27use Iodev\Whois\Loaders\CurlLoader;
28use Iodev\Whois\Modules\Asn\AsnRouteInfo;
29use Iodev\Whois\Whois;
30use IPLib\Address\AddressInterface;
31use IPLib\Factory as IPFactory;
32use IPLib\Range\RangeInterface;
33use Psr\Http\Message\ResponseInterface;
34use Psr\Http\Message\ServerRequestInterface;
35use Psr\Http\Server\MiddlewareInterface;
36use Psr\Http\Server\RequestHandlerInterface;
37use Throwable;
38
39use function array_filter;
40use function array_map;
41use function assert;
42use function gethostbyaddr;
43use function gethostbyname;
44use function preg_match_all;
45use function random_int;
46use function response;
47use function str_contains;
48use function str_ends_with;
49
50/**
51 * Middleware to block bad robots before they waste our valuable CPU cycles.
52 */
53class BadBotBlocker implements MiddlewareInterface
54{
55    private const REGEX_OCTET = '(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)';
56    private const REGEX_IPV4  = '/\\b' . self::REGEX_OCTET . '(?:\\.' . self::REGEX_OCTET . '){3}\\b/';
57
58    // Cache whois requests.  Try to avoid all caches expiring at the same time.
59    private const WHOIS_TTL_MIN = 28 * 86400;
60    private const WHOIS_TTL_MAX = 35 * 86400;
61    private const WHOIS_TIMEOUT = 5;
62
63    // Bad robots - SEO optimisers, advertisers, etc.  This list is shared with robots.txt.
64    public const BAD_ROBOTS = [
65        'admantx',
66        'Adsbot',
67        'AhrefsBot',
68        'Amazonbot', // Until it understands crawl-delay and noindex / nofollow
69        'AspiegelBot',
70        'Awario', // Brand management
71        'Barkrowler',
72        'BLEXBot',
73        'DataForSEO',
74        'DataForSeoBot', // https://dataforseo.com/dataforseo-bot
75        'DotBot',
76        'Grapeshot',
77        'Honolulu-bot', // Aggressive crawer, no info available
78        'ia_archiver',
79        'linabot', // Aggressive crawer, no info available
80        'Linguee',
81        'MegaIndex.ru',
82        'MJ12bot',
83        'netEstate NE',
84        'panscient',
85        'PetalBot',
86        'proximic',
87        'SeekportBot', // Pretends to be a search engine - but isn't
88        'SemrushBot',
89        'serpstatbot',
90        'SEOkicks',
91        'SiteKiosk',
92        'Turnitin',
93        'wp_is_mobile', // Nothing to do with wordpress
94        'XoviBot',
95        'ZoominfoBot',
96    ];
97
98    /**
99     * Some search engines use reverse/forward DNS to verify the IP address.
100     *
101     * @see https://developer.amazon.com/support/amazonbot
102     * @see https://support.google.com/webmasters/answer/80553?hl=en
103     * @see https://www.bing.com/webmaster/help/which-crawlers-does-bing-use-8c184ec0
104     * @see https://www.bing.com/webmaster/help/how-to-verify-bingbot-3905dc26
105     * @see https://yandex.com/support/webmaster/robot-workings/check-yandex-robots.html
106     * @see https://www.mojeek.com/bot.html
107     * @see https://support.apple.com/en-gb/HT204683
108     */
109    private const ROBOT_REV_FWD_DNS = [
110        'Amazonbot'   => ['.crawl.amazon.com'],
111        'Applebot'    => ['.applebot.apple.com'],
112        'bingbot'     => ['.search.msn.com'],
113        'BingPreview' => ['.search.msn.com'],
114        'Google'      => ['.google.com', '.googlebot.com'],
115        'MojeekBot'   => ['.mojeek.com'],
116        'Mail.RU_Bot' => ['.mail.ru'],
117        'msnbot'      => ['.search.msn.com'],
118        'Qwantify'    => ['.search.qwant.com'],
119        'Sogou'       => ['.crawl.sogou.com'],
120        'Yahoo'       => ['.crawl.yahoo.net'],
121        'Yandex'      => ['.yandex.ru', '.yandex.net', '.yandex.com'],
122    ];
123
124    /**
125     * Some search engines only use reverse DNS to verify the IP address.
126     *
127     * @see https://help.baidu.com/question?prod_id=99&class=0&id=3001
128     * @see https://napoveda.seznam.cz/en/full-text-search/seznambot-crawler
129     * @see https://www.ionos.de/terms-gtc/faq-crawler
130     */
131    private const ROBOT_REV_ONLY_DNS = [
132        'Baiduspider' => ['.baidu.com', '.baidu.jp'],
133        'FreshBot'    => ['.seznam.cz'],
134        'IonCrawl'    => ['.1und1.org'],
135        'Neevabot'    => ['.neeva.com'],
136    ];
137
138    /**
139     * Some search engines operate from designated IP addresses.
140     *
141     * @see https://www.apple.com/go/applebot
142     * @see https://help.duckduckgo.com/duckduckgo-help-pages/results/duckduckbot
143     */
144    private const ROBOT_IPS = [
145        'AppleBot'    => [
146            '17.0.0.0/8',
147        ],
148        'Ask Jeeves'  => [
149            '65.214.45.143',
150            '65.214.45.148',
151            '66.235.124.192',
152            '66.235.124.7',
153            '66.235.124.101',
154            '66.235.124.193',
155            '66.235.124.73',
156            '66.235.124.196',
157            '66.235.124.74',
158            '63.123.238.8',
159            '202.143.148.61',
160        ],
161        'DuckDuckBot' => [
162            '23.21.227.69',
163            '50.16.241.113',
164            '50.16.241.114',
165            '50.16.241.117',
166            '50.16.247.234',
167            '52.204.97.54',
168            '52.5.190.19',
169            '54.197.234.188',
170            '54.208.100.253',
171            '54.208.102.37',
172            '107.21.1.8',
173        ],
174    ];
175
176    /**
177     * Some search engines operate from designated IP addresses.
178     *
179     * @see https://bot.seekport.com/
180     */
181    private const ROBOT_IP_FILES = [
182        'SeekportBot' => 'https://bot.seekport.com/seekportbot_ips.txt',
183    ];
184
185    /**
186     * Some search engines operate from within a designated autonomous system.
187     *
188     * @see https://developers.facebook.com/docs/sharing/webmasters/crawler
189     * @see https://www.facebook.com/peering/
190     */
191    private const ROBOT_ASNS = [
192        'facebook' => ['AS32934', 'AS63293'],
193        'twitter'  => ['AS13414'],
194    ];
195
196    /**
197     * @param ServerRequestInterface  $request
198     * @param RequestHandlerInterface $handler
199     *
200     * @return ResponseInterface
201     */
202    public function process(ServerRequestInterface $request, RequestHandlerInterface $handler): ResponseInterface
203    {
204        $ua      = Validator::serverParams($request)->string('HTTP_USER_AGENT', '');
205        $ip      = Validator::attributes($request)->string('client-ip');
206        $address = IPFactory::parseAddressString($ip);
207        assert($address instanceof AddressInterface);
208
209        foreach (self::BAD_ROBOTS as $robot) {
210            if (str_contains($ua, $robot)) {
211                return $this->response();
212            }
213        }
214
215        foreach (self::ROBOT_REV_FWD_DNS as $robot => $valid_domains) {
216            if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, false)) {
217                return $this->response();
218            }
219        }
220
221        foreach (self::ROBOT_REV_ONLY_DNS as $robot => $valid_domains) {
222            if (str_contains($ua, $robot) && !$this->checkRobotDNS($ip, $valid_domains, true)) {
223                return $this->response();
224            }
225        }
226
227        foreach (self::ROBOT_IPS as $robot => $valid_ip_ranges) {
228            if (str_contains($ua, $robot)) {
229                foreach ($valid_ip_ranges as $ip_range) {
230                    $range = IPFactory::parseRangeString($ip_range);
231
232                    if ($range instanceof RangeInterface && $range->contains($address)) {
233                        continue 2;
234                    }
235                }
236
237                return $this->response();
238            }
239        }
240
241        foreach (self::ROBOT_IP_FILES as $robot => $url) {
242            if (str_contains($ua, $robot)) {
243                $valid_ip_ranges = $this->fetchIpRangesForUrl($robot, $url);
244
245                foreach ($valid_ip_ranges as $ip_range) {
246                    $range = IPFactory::parseRangeString($ip_range);
247
248                    if ($range instanceof RangeInterface && $range->contains($address)) {
249                        continue 2;
250                    }
251                }
252
253                return $this->response();
254            }
255        }
256
257        foreach (self::ROBOT_ASNS as $robot => $asns) {
258            foreach ($asns as $asn) {
259                if (str_contains($ua, $robot)) {
260                    foreach ($this->fetchIpRangesForAsn($asn) as $range) {
261                        if ($range->contains($address)) {
262                            continue 2;
263                        }
264                    }
265
266                    return $this->response();
267                }
268            }
269        }
270
271        // Allow sites to block access from entire networks.
272        $block_asn = Validator::attributes($request)->string('block_asn', '');
273        preg_match_all('/(AS\d+)/', $block_asn, $matches);
274
275        foreach ($matches[1] as $asn) {
276            foreach ($this->fetchIpRangesForAsn($asn) as $range) {
277                if ($range->contains($address)) {
278                    return $this->response();
279                }
280            }
281        }
282
283        return $handler->handle($request);
284    }
285
286    /**
287     * Check that an IP address belongs to a robot operator using a forward/reverse DNS lookup.
288     *
289     * @param string        $ip
290     * @param array<string> $valid_domains
291     * @param bool          $reverse_only
292     *
293     * @return bool
294     */
295    private function checkRobotDNS(string $ip, array $valid_domains, bool $reverse_only): bool
296    {
297        $host = gethostbyaddr($ip);
298
299        if ($host === false) {
300            return false;
301        }
302
303        foreach ($valid_domains as $domain) {
304            if (str_ends_with($host, $domain)) {
305                return $reverse_only || $ip === gethostbyname($host);
306            }
307        }
308
309        return false;
310    }
311
312    /**
313     * Perform a whois search for an ASN.
314     *
315     * @param string $asn - The autonomous system number to query
316     *
317     * @return array<RangeInterface>
318     */
319    private function fetchIpRangesForAsn(string $asn): array
320    {
321        return Registry::cache()->file()->remember('whois-asn-' . $asn, static function () use ($asn): array {
322            $mapper = static fn (AsnRouteInfo $route_info): ?RangeInterface => IPFactory::parseRangeString($route_info->route ?: $route_info->route6);
323
324            try {
325                $loader = new CurlLoader(self::WHOIS_TIMEOUT);
326                $whois  = new Whois($loader);
327                $info   = $whois->loadAsnInfo($asn);
328                $routes = $info->routes;
329                $ranges = array_map($mapper, $routes);
330
331                return array_filter($ranges);
332            } catch (Throwable) {
333                return [];
334            }
335        }, random_int(self::WHOIS_TTL_MIN, self::WHOIS_TTL_MAX));
336    }
337
338    /**
339     * Fetch a list of IP addresses from a remote file.
340     *
341     * @param string $ua
342     * @param string $url
343     *
344     * @return array<string>
345     */
346    private function fetchIpRangesForUrl(string $ua, string $url): array
347    {
348        return Registry::cache()->file()->remember('url-ip-list-' . $ua, static function () use ($url): array {
349            try {
350                $client   = new Client();
351                $response = $client->get($url, ['timeout' => 5]);
352                $contents = $response->getBody()->getContents();
353
354                preg_match_all(self::REGEX_IPV4, $contents, $matches);
355
356                return $matches[0];
357            } catch (GuzzleException) {
358                return [];
359            }
360        }, random_int(self::WHOIS_TTL_MIN, self::WHOIS_TTL_MAX));
361    }
362
363    /**
364     * @return ResponseInterface
365     */
366    private function response(): ResponseInterface
367    {
368        return response('Not acceptable', StatusCodeInterface::STATUS_NOT_ACCEPTABLE);
369    }
370}
371