# DoonDefenceDreamers.com - robots.txt # Open to search engines + AI assistants; block only admin, search & junk params. User-agent: Googlebot Allow: / User-agent: * Disallow: /wp-admin/ Allow: /wp-admin/admin-ajax.php # Block thin/duplicate URLs & tracking params Disallow: /*?s= Disallow: /*&s= Disallow: /*?replytocom= Disallow: /*&replytocom= Disallow: /*?*preview=true Disallow: /*&preview=true Disallow: /*?*attachment_id= Disallow: /*&attachment_id= Disallow: /*?*add-to-cart= Disallow: /*&add-to-cart= Disallow: /*?*orderby= Disallow: /*&orderby= Disallow: /*?*utm_ Disallow: /*&utm_ Disallow: /*?*gclid= Disallow: /*&gclid= Disallow: /*?*fbclid= Disallow: /*&fbclid= Disallow: /trackback/ Disallow: /xmlrpc.php # Keep assets crawlable Allow: /wp-content/uploads/ Allow: /wp-includes/js/ Allow: /wp-content/themes/ Allow: /wp-json/ # --- AI / LLM allow-list (search & assistants visibility) --- User-agent: OAI-SearchBot Allow: / User-agent: GPTBot Allow: / User-agent: Google-Extended Allow: / User-agent: PerplexityBot Allow: / User-agent: CCBot Allow: / User-agent: ClaudeBot Allow: / User-agent: Claude-Web Allow: / User-agent: Applebot-Extended Allow: / User-agent: Meta-ExternalAgent Allow: / # Optional: gentle crawl pacing for engines that support it User-agent: Bingbot Crawl-delay: 5 User-agent: msnbot Crawl-delay: 5 User-agent: Yandex Crawl-delay: 5 Sitemap: https://doondefencedreamers.com/sitemap_index.xml