. */ declare(strict_types=1); namespace Fisharebest\Webtrees\Http\RequestHandlers; use Fisharebest\Webtrees\Http\Middleware\BadBotBlocker; use Fisharebest\Webtrees\Module\SiteMapModule; use Fisharebest\Webtrees\Services\ModuleService; use Fisharebest\Webtrees\Services\TreeService; use Fisharebest\Webtrees\Tree; use Fisharebest\Webtrees\Validator; use Psr\Http\Message\ResponseInterface; use Psr\Http\Message\ServerRequestInterface; use Psr\Http\Server\RequestHandlerInterface; use function response; use const PHP_URL_PATH; /** * Generate a robots exclusion file. * * @link https://robotstxt.org */ class RobotsTxt implements RequestHandlerInterface { private const DISALLOWED_PATHS = [ 'admin', 'manager', 'moderator', 'editor', 'account', ]; private ModuleService $module_service; private TreeService $tree_service; /** * @param ModuleService $module_service */ public function __construct(ModuleService $module_service, TreeService $tree_service) { $this->module_service = $module_service; $this->tree_service = $tree_service; } /** * @param ServerRequestInterface $request * * @return ResponseInterface */ public function handle(ServerRequestInterface $request): ResponseInterface { $base_url = Validator::attributes($request)->string('base_url'); $trees = $this->tree_service->all()->map(static fn (Tree $tree): string => $tree->name()); $data = [ 'bad_user_agents' => BadBotBlocker::BAD_ROBOTS, 'base_url' => $base_url, 'base_path' => parse_url($base_url, PHP_URL_PATH) ?? '', 'disallowed_paths' => self::DISALLOWED_PATHS, 'sitemap_url' => '', 'trees' => $trees, ]; $sitemap_module = $this->module_service->findByInterface(SiteMapModule::class)->first(); if ($sitemap_module instanceof SiteMapModule) { $data['sitemap_url'] = route('sitemap-index'); } return response(view('robots-txt', $data)) ->withHeader('content-type', 'text/plain'); } }