{"id":85798,"date":"2018-05-16T18:09:21","date_gmt":"2018-05-16T18:09:21","guid":{"rendered":"https:\/\/wordpress.org\/plugins\/better-robots-txt-index-rank-booster-by-pagup\/"},"modified":"2026-03-31T13:59:22","modified_gmt":"2026-03-31T13:59:22","slug":"better-robots-txt","status":"publish","type":"plugin","link":"https:\/\/azb.wordpress.org\/plugins\/better-robots-txt\/","author":16149021,"comment_status":"closed","ping_status":"closed","template":"","meta":{"_crdt_document":"","version":"3.0.1","stable_tag":"3.0.1","tested":"6.9.4","requires":"5.0","requires_php":"7.4","requires_plugins":null,"header_name":"Better Robots.txt - AI-Ready Crawl Control & Bot Governance","header_author":"Pagup","header_description":"Better Robots.txt creates a virtual robots.txt file and helps you boosting your website indexation and ranking by adding specific instructions in your robots.txt","assets_banners_color":"3f4f63","last_updated":"2026-03-31 13:59:22","external_support_url":"","external_repository_url":"","donate_link":"","header_plugin_uri":"","header_author_uri":"https:\/\/pagup.com\/","rating":4.5,"author_block_rating":0,"active_installs":6000,"downloads":308720,"num_ratings":103,"support_threads":1,"support_threads_resolved":1,"author_block_count":0,"sections":["description","installation","faq","changelog"],"tags":{"1.0.0":{"tag":"1.0.0","author":"pagup","date":"2018-05-16 18:13:27"},"1.0.1":{"tag":"1.0.1","author":"pagup","date":"2018-05-16 19:51:45"},"1.0.2":{"tag":"1.0.2","author":"pagup","date":"2018-05-24 10:33:44"},"1.1.0":{"tag":"1.1.0","author":"pagup","date":"2018-06-08 20:12:11"},"1.1.1":{"tag":"1.1.1","author":"pagup","date":"2018-07-07 17:55:38"},"1.5.2":{"tag":"1.5.2","author":"pagup","date":"2024-03-07 22:42:10"},"2.0.2":{"tag":"2.0.2","author":"pagup","date":"2025-11-20 03:37:04"},"2.0.4":{"tag":"2.0.4","author":"pagup","date":"2026-02-02 17:10:14"},"3.0.0":{"tag":"3.0.0","author":"pagup","date":"2026-03-30 13:17:37"},"3.0.1":{"tag":"3.0.1","author":"pagup","date":"2026-03-31 13:59:22"}},"upgrade_notice":[],"ratings":{"1":10,"2":2,"3":2,"4":2,"5":87},"assets_icons":{"icon-128x128.png":{"filename":"icon-128x128.png","revision":1941286,"resolution":"128x128","location":"assets","locale":""},"icon-256x256.png":{"filename":"icon-256x256.png","revision":1941286,"resolution":"256x256","location":"assets","locale":""}},"assets_banners":{"banner-1544x500.jpg":{"filename":"banner-1544x500.jpg","revision":3479411,"resolution":"1544x500","location":"assets","locale":""},"banner-772x250.jpg":{"filename":"banner-772x250.jpg","revision":3479411,"resolution":"772x250","location":"assets","locale":""}},"assets_blueprints":{},"all_blocks":[],"tagged_versions":["1.0.0","1.0.1","1.0.2","1.1.0","1.1.1","1.5.2","2.0.2","2.0.4","3.0.0","3.0.1"],"block_files":[],"assets_screenshots":{"screenshot-1.jpg":{"filename":"screenshot-1.jpg","revision":3479411,"resolution":"1","location":"assets","locale":""},"screenshot-2.jpg":{"filename":"screenshot-2.jpg","revision":3479455,"resolution":"2","location":"assets","locale":""},"screenshot-3.jpg":{"filename":"screenshot-3.jpg","revision":3479455,"resolution":"3","location":"assets","locale":""},"screenshot-4.jpg":{"filename":"screenshot-4.jpg","revision":3479455,"resolution":"4","location":"assets","locale":""},"screenshot-5.jpg":{"filename":"screenshot-5.jpg","revision":3479455,"resolution":"5","location":"assets","locale":""},"screenshot-6.jpg":{"filename":"screenshot-6.jpg","revision":3479455,"resolution":"6","location":"assets","locale":""},"screenshot-7.jpg":{"filename":"screenshot-7.jpg","revision":3479455,"resolution":"7","location":"assets","locale":""},"screenshot-8.jpg":{"filename":"screenshot-8.jpg","revision":3479455,"resolution":"8","location":"assets","locale":""}},"screenshots":{"1":"Step 0 - Preset selection with product introduction and setup guidance.","2":"Step 1 - Search engine visibility controls.","3":"Step 2 - AI and LLM governance settings.","4":"Step 4 - Bad bot protection options.","5":"Step 8 - WooCommerce cleanup settings.","6":"Step 10 - Social media crawler controls.","7":"Step 13 - Advanced settings and output options.","8":"Step 14 - Review &amp; Save preview screen."},"jetpack_post_was_ever_published":false},"plugin_section":[],"plugin_tags":[246479,13859,244604,12753,186],"plugin_category":[55],"plugin_contributors":[78154,176401,157123],"plugin_business_model":[],"class_list":["post-85798","plugin","type-plugin","status-publish","hentry","plugin_tags-ai-crawlers","plugin_tags-bot-blocker","plugin_tags-llms-txt","plugin_tags-robots-txt","plugin_tags-seo","plugin_category-seo-and-marketing","plugin_contributors-freemius","plugin_contributors-pagup","plugin_contributors-the-rock","plugin_committers-pagup","plugin_committers-the-rock"],"banners":{"banner":"https:\/\/ps.w.org\/better-robots-txt\/assets\/banner-772x250.jpg?rev=3479411","banner_2x":"https:\/\/ps.w.org\/better-robots-txt\/assets\/banner-1544x500.jpg?rev=3479411","banner_rtl":false,"banner_2x_rtl":false},"icons":{"svg":false,"icon":"https:\/\/ps.w.org\/better-robots-txt\/assets\/icon-128x128.png?rev=1941286","icon_2x":"https:\/\/ps.w.org\/better-robots-txt\/assets\/icon-256x256.png?rev=1941286","generated":false},"screenshots":[{"src":"https:\/\/ps.w.org\/better-robots-txt\/assets\/screenshot-1.jpg?rev=3479411","caption":"Step 0 - Preset selection with product introduction and setup guidance."},{"src":"https:\/\/ps.w.org\/better-robots-txt\/assets\/screenshot-2.jpg?rev=3479455","caption":"Step 1 - Search engine visibility controls."},{"src":"https:\/\/ps.w.org\/better-robots-txt\/assets\/screenshot-3.jpg?rev=3479455","caption":"Step 2 - AI and LLM governance settings."},{"src":"https:\/\/ps.w.org\/better-robots-txt\/assets\/screenshot-4.jpg?rev=3479455","caption":"Step 4 - Bad bot protection options."},{"src":"https:\/\/ps.w.org\/better-robots-txt\/assets\/screenshot-5.jpg?rev=3479455","caption":"Step 8 - WooCommerce cleanup settings."},{"src":"https:\/\/ps.w.org\/better-robots-txt\/assets\/screenshot-6.jpg?rev=3479455","caption":"Step 10 - Social media crawler controls."},{"src":"https:\/\/ps.w.org\/better-robots-txt\/assets\/screenshot-7.jpg?rev=3479455","caption":"Step 13 - Advanced settings and output options."},{"src":"https:\/\/ps.w.org\/better-robots-txt\/assets\/screenshot-8.jpg?rev=3479455","caption":"Step 14 - Review &amp; Save preview screen."}],"raw_content":"<!--section=description-->\n<p>Better Robots.txt replaces the default WordPress robots.txt workflow with a smarter, structured version you can configure and preview before publishing.<\/p>\n\n<p>Instead of a blank textarea, you get a guided wizard with presets, plain-language explanations, and a final Review &amp; Save step so you can inspect the generated robots.txt before it goes live.<\/p>\n\n<p>Built for beginners and advanced users alike, Better Robots.txt helps you control how search engines, AI crawlers, SEO tools, archive bots, bad bots, social preview bots, and other automated agents interact with your site.<\/p>\n\n<p>Trusted by thousands of WordPress sites, Better Robots.txt is designed for the AI era without resorting to hype, vague promises, or hidden rules.<\/p>\n\n<p>Better Robots.txt is available in Free, Pro, and Premium editions. The free plugin covers the guided workflow and essential crawl control features, while Pro and Premium unlock additional governance, protection, and AI-ready modules. Some screenshots on the plugin page show features from all three editions.<\/p>\n\n<h3>A quick overview<\/h3>\n\n<p>https:\/\/vimeo.com\/1169756981<\/p>\n\n<h3>Why Better Robots.txt is different<\/h3>\n\n<p>Most robots.txt plugins fall into one of three categories:<\/p>\n\n<ul>\n<li>Simple text editor<\/li>\n<li>Virtual robots.txt manager<\/li>\n<li>Single-purpose AI or policy add-on<\/li>\n<\/ul>\n\n<p>Better Robots.txt goes further.<\/p>\n\n<p>It gives you a complete, guided crawl control workflow so you can:<\/p>\n\n<ul>\n<li>Choose a preset that matches your goals<\/li>\n<li>Control major crawler categories without writing everything by hand<\/li>\n<li>Keep core WordPress protection rules visible and editable<\/li>\n<li>Clean up low-value crawl paths that waste crawl budget<\/li>\n<li>Generate a cleaner robots.txt output<\/li>\n<li>Preview the final result before saving<\/li>\n<\/ul>\n\n<h3>What you can control<\/h3>\n\n<p>Better Robots.txt helps you manage:<\/p>\n\n<ul>\n<li>Search engine visibility<\/li>\n<li>AI and LLM crawler behavior<\/li>\n<li>AI usage signals such as search, ai-input, and ai-train preferences<\/li>\n<li>SEO tool crawlers<\/li>\n<li>Bad bots and abusive crawlers<\/li>\n<li>Archive and Wayback access<\/li>\n<li>Feed crawlers and crawl traps<\/li>\n<li>WooCommerce crawl cleanup<\/li>\n<li>CSS, JavaScript, and image crawling rules<\/li>\n<li>Social media preview crawlers<\/li>\n<li>ads.txt and app-ads.txt allowance<\/li>\n<li>llms.txt generation<\/li>\n<li>Advanced directives such as crawl-delay and custom rules<\/li>\n<li>Final review before publishing<\/li>\n<\/ul>\n\n<h3>Editions<\/h3>\n\n<p>Better Robots.txt is available in three editions:<\/p>\n\n<ul>\n<li>Free - Includes the guided setup, the Essential preset, core crawl control features, and the final Review &amp; Save workflow.<\/li>\n<li>Pro - Adds more advanced governance and protection modules, including additional AI, crawler, and cleanup controls.<\/li>\n<li>Premium - Unlocks the most restrictive and advanced protection options, including the Fortress preset and additional high-control modules.<\/li>\n<\/ul>\n\n<p>Some options shown in the interface are marked Free, Pro, or Premium so users can immediately understand which modules belong to each edition.<\/p>\n\n<h3>Presets<\/h3>\n\n<p>Setup starts with four modes:<\/p>\n\n<ul>\n<li>Essential - A clean, practical configuration for most websites that want a better robots.txt without complexity.<\/li>\n<li>AI-First - For publishers and content sites that want AI-ready governance without shutting down discovery.<\/li>\n<li>Fortress - For websites that want stronger protection against scraping, archive capture, and unnecessary crawl activity.<\/li>\n<li>Custom - For users who prefer to configure each module manually.<\/li>\n<\/ul>\n\n<p>For many sites, one preset plus a quick review is enough.<\/p>\n\n<h3>Built for beginners and experts<\/h3>\n\n<p>Beginners get:<\/p>\n\n<ul>\n<li>A guided setup instead of a raw robots.txt box<\/li>\n<li>Preset-based configuration<\/li>\n<li>Plain-language explanations for important choices<\/li>\n<li>A safer workflow with a final preview step<\/li>\n<\/ul>\n\n<p>Advanced users get:<\/p>\n\n<ul>\n<li>Editable core WordPress protection rules<\/li>\n<li>Fine-grained crawler controls by category<\/li>\n<li>WooCommerce-oriented cleanup options<\/li>\n<li>Consolidated output options<\/li>\n<li>Advanced directives and custom rules<\/li>\n<li>A final output they can inspect before publishing<\/li>\n<\/ul>\n\n<h3>AI-ready, without hype<\/h3>\n\n<p>Better Robots.txt includes features for modern AI-related crawl governance, including:<\/p>\n\n<ul>\n<li>AI crawler handling<\/li>\n<li>Optional llms.txt support<\/li>\n<li>AI usage signals for compliant systems<\/li>\n<li>Optional machine-readable governance signals for advanced use cases<\/li>\n<\/ul>\n\n<p>These features help you express how you want automated systems to use your content.<\/p>\n\n<p>However, Better Robots.txt does not claim to control AI by force. Like robots.txt itself, these signals are most useful with compliant systems and good-faith crawlers.<\/p>\n\n<h3>What Better Robots.txt is<\/h3>\n\n<p>Better Robots.txt is:<\/p>\n\n<ul>\n<li>A robots.txt governance plugin for WordPress<\/li>\n<li>A guided configuration workflow instead of a raw text editor<\/li>\n<li>A crawl control layer to reduce wasteful crawling<\/li>\n<li>A practical bridge between SEO, crawl hygiene, and AI-era policy signaling<\/li>\n<li>A way to keep your crawl policy clearer for humans and machines<\/li>\n<\/ul>\n\n<p>Technical reference for advanced users: Better Robots.txt also maintains a public <a href=\"https:\/\/github.com\/GautierDorval\/better-robots-txt\" rel=\"nofollow noopener noreferrer\">GitHub repository<\/a> with product definition, governance notes, and machine-readable artefacts.<\/p>\n\n<h3>What Better Robots.txt is not<\/h3>\n\n<p>Better Robots.txt is not:<\/p>\n\n<ul>\n<li>A firewall or Web Application Firewall (WAF)<\/li>\n<li>An anti-scraping enforcement engine<\/li>\n<li>A legal compliance engine<\/li>\n<li>A guarantee that every bot will obey your rules<\/li>\n<li>A replacement for server-level security or access control<\/li>\n<\/ul>\n\n<p>It helps you publish a clearer crawl policy.<\/p>\n\n<p>It does not replace infrastructure-level protection.<\/p>\n\n<h3>Typical use cases<\/h3>\n\n<p>Use Better Robots.txt if you want to:<\/p>\n\n<ul>\n<li>Clean up a weak or noisy default robots.txt<\/li>\n<li>Reduce crawl waste on WordPress or WooCommerce<\/li>\n<li>Keep major search engines allowed while restricting other bots<\/li>\n<li>Control whether archive bots can snapshot your site<\/li>\n<li>Publish AI usage preferences more clearly<\/li>\n<li>Keep social preview bots allowed while limiting scrapers<\/li>\n<li>Review the final file before making it live<\/li>\n<\/ul>\n\n<h3>Key Features<\/h3>\n\n<ul>\n<li>Guided step-by-step wizard<\/li>\n<li>Preset-based setup: Essential, AI-First, Fortress, Custom<\/li>\n<li>Search engine visibility controls<\/li>\n<li>AI and LLM crawler governance<\/li>\n<li>AI usage signals support<\/li>\n<li>SEO tool crawler controls<\/li>\n<li>Bad bot and abusive crawler options<\/li>\n<li>Archive and Wayback access controls<\/li>\n<li>Spam, feed, and crawl trap cleanup<\/li>\n<li>WooCommerce crawl cleanup options<\/li>\n<li>CSS, JavaScript, and image crawling rules<\/li>\n<li>Social media preview crawler controls<\/li>\n<li>ads.txt and app-ads.txt allowance<\/li>\n<li>Optional llms.txt generation<\/li>\n<li>Consolidated output option<\/li>\n<li>Core WordPress protection rules remain visible and editable<\/li>\n<li>Final Review &amp; Save preview screen<\/li>\n<\/ul>\n\n<h4>About the publisher<\/h4>\n\n<p>Better Robots.txt is developed and maintained by <a href=\"https:\/\/pagup.com\/\">Pagup<\/a>, a digital readability firm based in Quebec, Canada. Pagup helps organizations become correctly understood by search engines, generative AI systems, and autonomous agents.<\/p>\n\n<p>The robots.txt file is the first surface that AI crawlers read when they discover a site. A well-structured robots.txt that references governance files such as llms.txt, ai-manifest.json, and interpretation policies helps AI systems understand your site faster and more accurately.<\/p>\n\n<p>Better Robots.txt is one component of a broader digital readability practice that includes <a href=\"https:\/\/pagup.com\/en\/services\/semantic-content-architecture\/\">semantic content architecture<\/a>, <a href=\"https:\/\/pagup.com\/en\/services\/ai-governance-and-machine-readability\/\">AI governance and machine readability<\/a>, and <a href=\"https:\/\/pagup.com\/en\/glossary\/interpretive-seo\/\">interpretive SEO<\/a>.<\/p>\n\n<h4>Part of the Pagup ecosystem<\/h4>\n\n<ul>\n<li><a href=\"https:\/\/pagup.com\/\">pagup.com<\/a> \u2014 Digital readability firm. Diagnostic, semantic architecture, AI governance.<\/li>\n<li><a href=\"https:\/\/gautierdorval.com\/\">gautierdorval.com<\/a> \u2014 Doctrine, canonical definitions, interpretive governance research.<\/li>\n<li><a href=\"https:\/\/interpretive-governance.org\/\">interpretive-governance.org<\/a> \u2014 Formal versioned standard for interpretive governance.<\/li>\n<li><a href=\"https:\/\/better-robots.com\/\">better-robots.com<\/a> \u2014 Documentation and resources for Better Robots.txt.<\/li>\n<\/ul>\n\n<!--section=installation-->\n<ol>\n<li>Upload the plugin files to the \/wp-content\/plugins\/better-robots-txt\/ directory, or install Better Robots.txt through the WordPress Plugins screen.<\/li>\n<li>Activate the plugin through the Plugins screen in WordPress.<\/li>\n<li>Open the Better Robots.txt settings page from your WordPress dashboard.<\/li>\n<li>Choose a preset or configure each module manually.<\/li>\n<li>Follow the wizard until the final Review &amp; Save step.<\/li>\n<li>Review the generated robots.txt preview.<\/li>\n<li>Save your changes.<\/li>\n<\/ol>\n\n<!--section=faq-->\n<dl>\n<dt id=\"does%20this%20plugin%20create%20or%20manage%20robots.txt%3F\"><h3>Does this plugin create or manage robots.txt?<\/h3><\/dt>\n<dd><p>Yes. Better Robots.txt generates and manages your WordPress robots.txt through a guided interface, with a preview before you apply changes.<\/p><\/dd>\n<dt id=\"is%20this%20only%20for%20advanced%20users%3F\"><h3>Is this only for advanced users?<\/h3><\/dt>\n<dd><p>No. The plugin is designed for both beginners and advanced users. Presets make the first setup easy, while experts can fine-tune individual modules and directives.<\/p><\/dd>\n<dt id=\"can%20i%20preview%20the%20result%20before%20saving%3F\"><h3>Can I preview the result before saving?<\/h3><\/dt>\n<dd><p>Yes. The final Review &amp; Save step shows you the generated robots.txt before you publish it.<\/p><\/dd>\n<dt id=\"can%20i%20block%20ai%20crawlers%3F\"><h3>Can I block AI crawlers?<\/h3><\/dt>\n<dd><p>You can configure how AI-related crawlers and tools are treated and publish AI usage preferences. Respect and enforcement still depend on each crawler's behavior, just like with robots.txt.<\/p><\/dd>\n<dt id=\"does%20llms.txt%20guarantee%20that%20ai%20systems%20will%20follow%20my%20rules%3F\"><h3>Does llms.txt guarantee that AI systems will follow my rules?<\/h3><\/dt>\n<dd><p>No. llms.txt and similar policy signals help you express intent more clearly, but they are not a hard technical barrier.<\/p><\/dd>\n<dt id=\"can%20i%20keep%20search%20engines%20allowed%20while%20restricting%20other%20bots%3F\"><h3>Can I keep search engines allowed while restricting other bots?<\/h3><\/dt>\n<dd><p>Yes. Better Robots.txt helps you differentiate between crawler categories instead of using a simple all-or-nothing approach.<\/p><\/dd>\n<dt id=\"can%20i%20change%20preset%20decisions%20later%3F\"><h3>Can I change preset decisions later?<\/h3><\/dt>\n<dd><p>Yes. Presets are a starting point. You can revisit the settings page, adjust modules, and regenerate your robots.txt at any time.<\/p><\/dd>\n<dt id=\"are%20all%20screenshots%20from%20the%20free%20version%3F\"><h3>Are all screenshots from the free version?<\/h3><\/dt>\n<dd><p>No. The screenshots reflect the current product family and may include Free, Pro, and Premium features. Features marked Pro or Premium in the interface require a paid edition.<\/p><\/dd>\n<dt id=\"is%20the%20free%20version%20still%20useful%20on%20its%20own%3F\"><h3>Is the free version still useful on its own?<\/h3><\/dt>\n<dd><p>Yes. The free edition includes the guided workflow, essential crawl control, and the final preview step. Pro and Premium are for sites that need broader governance and stricter protection.<\/p><\/dd>\n<dt id=\"does%20this%20plugin%20help%20woocommerce%20sites%3F\"><h3>Does this plugin help WooCommerce sites?<\/h3><\/dt>\n<dd><p>Yes. Better Robots.txt includes WooCommerce-related cleanup options to reduce unnecessary crawling of dynamic, low-value, or duplicate URLs.<\/p><\/dd>\n<dt id=\"who%20develops%20better%20robots.txt%3F\"><h3>Who develops Better Robots.txt?<\/h3><\/dt>\n<dd><p>Better Robots.txt is developed by <a href=\"https:\/\/pagup.com\/\">Pagup<\/a>, a digital readability firm based in Quebec, Canada. Pagup specializes in helping organizations become correctly readable by search engines, AI systems, and autonomous agents.<\/p><\/dd>\n<dt id=\"why%20does%20robots.txt%20matter%20for%20ai%20readability%3F\"><h3>Why does robots.txt matter for AI readability?<\/h3><\/dt>\n<dd><p>Your robots.txt is the first file that AI crawlers read when they visit your site. It determines what content they can access and what governance signals they discover. In 2026, AI systems such as ChatGPT, Perplexity, Gemini, and autonomous agents rely on robots.txt to understand how to interact with your site. A robots.txt that references your llms.txt, ai-manifest.json, and governance policies helps these systems interpret your organization more accurately.<\/p>\n\n<p>Learn more about <a href=\"https:\/\/pagup.com\/en\/services\/ai-governance-and-machine-readability\/\">AI governance and machine readability<\/a> and why <a href=\"https:\/\/pagup.com\/en\/glossary\/digital-readability\/\">digital readability<\/a> goes beyond traditional SEO.<\/p><\/dd>\n<dt id=\"what%20is%20digital%20readability%3F\"><h3>What is digital readability?<\/h3><\/dt>\n<dd><p>Digital readability is the capacity of a website to be correctly understood by all four reading layers: humans, search engines, generative AI systems, and autonomous agents. Traditional SEO addresses only the search engine layer. Digital readability covers all four. Learn more at <a href=\"https:\/\/pagup.com\/en\/glossary\/digital-readability\/\">pagup.com<\/a>.<\/p><\/dd>\n\n<\/dl>\n\n<!--section=changelog-->\n<h4>1.0.0<\/h4>\n\n<ul>\n<li>Initial release.<\/li>\n<\/ul>\n\n<h4>1.0.1<\/h4>\n\n<ul>\n<li>fixed plugin directory url issue<\/li>\n<li>some text improvements<\/li>\n<\/ul>\n\n<h4>1.0.2<\/h4>\n\n<ul>\n<li>fixed some minor issues with styling<\/li>\n<li>improved text and translation<\/li>\n<\/ul>\n\n<h4>1.1.0<\/h4>\n\n<ul>\n<li>added some major improvements<\/li>\n<li>allow\/off option changed with allow\/disallow\/off<\/li>\n<li>improved overall text and french translation<\/li>\n<\/ul>\n\n<h4>1.1.1<\/h4>\n\n<ul>\n<li>fixed a bug and improved code<\/li>\n<\/ul>\n\n<h4>1.1.2<\/h4>\n\n<ul>\n<li>added new feature \"Spam Backlink Blocker\"<\/li>\n<\/ul>\n\n<h4>1.1.3<\/h4>\n\n<ul>\n<li>fixed a bug<\/li>\n<\/ul>\n\n<h4>1.1.4<\/h4>\n\n<ul>\n<li>added new \"personalize your robots.txt\" feature to add custom signature<\/li>\n<li>added recommended seo tools to improve search engine optimization<\/li>\n<\/ul>\n\n<h4>1.1.5<\/h4>\n\n<ul>\n<li>added feature to detect physical robots.txt file and delete it if server permissions allows<\/li>\n<\/ul>\n\n<h4>1.1.6<\/h4>\n\n<ul>\n<li>added russian and chinese (simplified) languages<\/li>\n<li>fixed bug causing redirection to better robots.txt settings page upon activating other plugins<\/li>\n<\/ul>\n\n<h4>1.1.7<\/h4>\n\n<ul>\n<li>added new feature: Top plugins for SEO performance<\/li>\n<li>fixed plugin notices issue to dismiss for define period of time after being closed<\/li>\n<li>fixed stylesheet issue to get proper updated file after plugin update (cache buster)<\/li>\n<li>added spanish and portuguese languages<\/li>\n<\/ul>\n\n<h4>1.1.8<\/h4>\n\n<ul>\n<li>added new feature: xml sitemap detection<\/li>\n<li>fixed translations<\/li>\n<\/ul>\n\n<h4>1.1.9<\/h4>\n\n<ul>\n<li>added new feature: loading performance for woocommerce<\/li>\n<\/ul>\n\n<h4>1.1.9.1<\/h4>\n\n<ul>\n<li>fixed a bug in disallow rules for woocommerce<\/li>\n<\/ul>\n\n<h4>1.1.9.2<\/h4>\n\n<ul>\n<li>boost your site with alt tags<\/li>\n<\/ul>\n\n<h4>1.1.9.3<\/h4>\n\n<ul>\n<li>fixed readability issues<\/li>\n<\/ul>\n\n<h4>1.1.9.4<\/h4>\n\n<ul>\n<li>fixed default robots.txt file issue upon plugin activation for first time<\/li>\n<li>fixed php error upon saving settings and permalinks<\/li>\n<li>refactored code<\/li>\n<\/ul>\n\n<h4>1.1.9.5<\/h4>\n\n<ul>\n<li>added clean-param for yandex bot<\/li>\n<li>ask backlinks feature for pro users<\/li>\n<li>avoid crawler traps feature for pro users<\/li>\n<li>improved default robots.txt rules<\/li>\n<\/ul>\n\n<h4>1.1.9.6<\/h4>\n\n<ul>\n<li>added 150+ growth hacking tools<\/li>\n<li>fixed layout bug<\/li>\n<li>updated default rules<\/li>\n<\/ul>\n\n<h4>1.2.0<\/h4>\n\n<ul>\n<li>Added Post Meta Box to Disable Indivdual post, pages and products (woocommerce pro only). It will add Disallow and Noindex rule in robots.txt for any page you choose to disallow from post meta box options.<\/li>\n<\/ul>\n\n<h4>1.2.1<\/h4>\n\n<ul>\n<li>Added multisite feature for directory based network sites (pro only). it can duplicate all default rules, yoast sitemap, woocommerce rules, bad bots, pinterest bot blocker, backlinks blocker etc with a single click for all directory based network sites.<\/li>\n<li>Added version timestamp for wp_register_script 'assets\/rt-script.js'<\/li>\n<\/ul>\n\n<h4>1.2.2<\/h4>\n\n<ul>\n<li>Fixed some bugs creating error in google search console<\/li>\n<li>Text improvement<\/li>\n<\/ul>\n\n<h4>1.2.3<\/h4>\n\n<ul>\n<li>Added \"Hide your robots.txt from SERPs\" feature<\/li>\n<li>Text improvements<\/li>\n<\/ul>\n\n<h4>1.2.4<\/h4>\n\n<ul>\n<li>Fixed a bug<\/li>\n<li>Text improvements<\/li>\n<\/ul>\n\n<h4>1.2.5<\/h4>\n\n<ul>\n<li>Fixed crawl-delay issue<\/li>\n<li>Updated translations<\/li>\n<\/ul>\n\n<h4>1.2.5.1<\/h4>\n\n<ul>\n<li>Fixed a minor issue<\/li>\n<\/ul>\n\n<h4>1.2.6<\/h4>\n\n<ul>\n<li>Security patched in freemius sdk<\/li>\n<\/ul>\n\n<h4>1.2.6.1<\/h4>\n\n<ul>\n<li>Fixed Multisite Issue for pro users<\/li>\n<\/ul>\n\n<h4>1.2.6.2<\/h4>\n\n<ul>\n<li>Fixed Yoast sitemap issue for Multisite users<\/li>\n<\/ul>\n\n<h4>1.2.6.3<\/h4>\n\n<ul>\n<li>Fixed some text<\/li>\n<\/ul>\n\n<h4>1.2.7<\/h4>\n\n<ul>\n<li>Added Baidu\/Sogou\/Soso\/Youdao - Chinese search engines features for pro users<\/li>\n<li>Added social media crawl feature for pro users<\/li>\n<\/ul>\n\n<h4>1.2.8<\/h4>\n\n<ul>\n<li>Notification will be disabled for 4 months. Fixed some other minor stuff<\/li>\n<\/ul>\n\n<h4>1.2.9.2<\/h4>\n\n<ul>\n<li>Updated Freemius SDK v2.3.0<\/li>\n<li>BIGTA recommendation<\/li>\n<\/ul>\n\n<h4>1.2.9.3<\/h4>\n\n<ul>\n<li>Fixed Undefined index error while saving MENUS for some sites<\/li>\n<li>Removed \"noindex\" rule for individual posts as Google will stop supprting it from Sep 01 2019<\/li>\n<\/ul>\n\n<h4>1.3.0<\/h4>\n\n<ul>\n<li>Added 5 new rules to default config. Removed 4 old default rules which were cuasing some issues with WPML<\/li>\n<li>Added a search rule to Avoid crawling traps<\/li>\n<li>Added several new rules to Spam Backlink Blocker<\/li>\n<li>Fixed security issues<\/li>\n<\/ul>\n\n<h4>1.3.0.1<\/h4>\n\n<ul>\n<li>VidSEO recommendation<\/li>\n<\/ul>\n\n<h4>1.3.0.2<\/h4>\n\n<ul>\n<li>Fixed some security issues<\/li>\n<li>Added new rules to Backlink Protector (Pro only)<\/li>\n<li>Multisite notification will be disabled permenantly once dismissed<\/li>\n<\/ul>\n\n<h4>1.3.0.3<\/h4>\n\n<ul>\n<li>Fixed php notice (in php log) for $host_url variable<\/li>\n<\/ul>\n\n<h4>1.3.0.4<\/h4>\n\n<ul>\n<li>Fixed php notice (in php log) for $active_tab variable<\/li>\n<li>Fixed some typos<\/li>\n<\/ul>\n\n<h4>1.3.0.5<\/h4>\n\n<ul>\n<li>Added option to Be part of our worldwide Movement against CoronaVirus (Covid-19)<\/li>\n<li>Fixed several php undefined index notices (in php log) related to Step 7 and 8 options<\/li>\n<\/ul>\n\n<h4>1.3.0.6<\/h4>\n\n<ul>\n<li>\ud83d\udc4c IMPROVE: Updated freemius to latest version 2.3.2<\/li>\n<li>\ud83d\udc1b FIX: Some minor issues<\/li>\n<\/ul>\n\n<h4>1.3.0.7<\/h4>\n\n<ul>\n<li>\ud83d\udd25 NEW: WP Google Street View promotion<\/li>\n<li>\ud83d\udc1b FIX: Some minor text issues<\/li>\n<\/ul>\n\n<h4>1.3.1.0<\/h4>\n\n<ul>\n<li>\ud83d\udc4c IMPROVE: Admin Notices are set to permenantly dismissed based on user.<\/li>\n<li>\ud83d\udc4c IMPROVE: Top level menu for Better Robots.txt Settings<\/li>\n<li>\ud83d\udc1b FIX: Styling conflict with Norebro Theme.<\/li>\n<li>\ud83d\udc1b FIX: Undefined variables php errors for some options<\/li>\n<\/ul>\n\n<h4>1.3.2.0<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIXED: XSS vulnerability.<\/li>\n<li>\ud83d\udc1b FIX: Non-static method errors<\/li>\n<li>\ud83d\udc4c IMPROVE: Tested up to WordPress v5.5<\/li>\n<\/ul>\n\n<h4>1.3.2.1<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIXED: Call to undefined method error.<\/li>\n<\/ul>\n\n<h4>1.3.2.2<\/h4>\n\n<ul>\n<li>\ud83d\udc4c IMPROVE: Update Freemius to v2.4.1<\/li>\n<\/ul>\n\n<h4>1.3.2.3<\/h4>\n\n<ul>\n<li>\ud83d\udc4c IMPROVE: Tested up to WordPress v5.6<\/li>\n<li>\ud83d\udc1b FIX: Get Pro URL<\/li>\n<\/ul>\n\n<h4>1.3.2.4<\/h4>\n\n<ul>\n<li>\ud83d\udc4c IMPROVE: Added some more rules for Woocommerce performance<\/li>\n<li>\ud83d\udc4c IMPROVE: Update Freemius to v2.4.2<\/li>\n<\/ul>\n\n<h4>1.3.2.5<\/h4>\n\n<ul>\n<li>\ud83d\udd25 NEW: Meta Tags for SEO promotion<\/li>\n<\/ul>\n\n<h4>1.4.0<\/h4>\n\n<ul>\n<li>\ud83d\udc4c IMPROVE: Refactored code to MVC<\/li>\n<li>\ud83d\udc4c IMPROVE: New clean design<\/li>\n<li>\ud83d\udc4c IMPROVE: Many small improvements<\/li>\n<\/ul>\n\n<h4>1.4.0.1<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIX: Added trailing backslash for using trait<\/li>\n<\/ul>\n\n<h4>1.4.1<\/h4>\n\n<ul>\n<li>\ud83d\udd25 NEW: Search engine visibility feature (Pro version)<\/li>\n<li>\ud83d\udd25 NEW: Image Crawlability feature (Pro version)<\/li>\n<\/ul>\n\n<h4>1.4.1.1<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIX: Sitemap issue<\/li>\n<\/ul>\n\n<h4>1.4.2<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIX: Bugs and improvements<\/li>\n<li>\ud83d\udd25 NEW: Option to add default WordPress Sitemap (Pro Version)<\/li>\n<li>\ud83d\udd25 NEW: Option to add All in One SEO Sitemap (Pro Version)<\/li>\n<\/ul>\n\n<h4>1.4.3<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIX: Text issues<\/li>\n<\/ul>\n\n<h4>1.4.4<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIX: Security fix<\/li>\n<\/ul>\n\n<h4>1.4.5<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIX: PHP warning undefined index<\/li>\n<\/ul>\n\n<h4>1.4.6<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIX: SECURITY PATCH. Verify nonce for CSRF attack.<\/li>\n<li>\ud83d\udc1b FIX: PHP 8.2 warning undefined index<\/li>\n<\/ul>\n\n<h4>1.4.7<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIX: Removed Be part of movement against CoronaVirus (Covid-19) option<\/li>\n<\/ul>\n\n<h4>1.5.0<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIX: Moved Moz bots from Bad bots list to Backlink Protector list.<\/li>\n<li>\ud83d\udd25 NEW: You can select and exclude bots in Backlink Protector list (Pro Version)<\/li>\n<li>\ud83d\udd25 NEW: Bad Bots - \"AI recommended setting\" by ChatGPT-4 (Pro Version)<\/li>\n<li>\ud83d\udc1b FIX: Security fix<\/li>\n<\/ul>\n\n<h4>1.5.1<\/h4>\n\n<ul>\n<li>\ud83d\udd25 NEW: ChatGPT Bot Blocker - Block ChatGPT Bot from scrapping your content (Pro Version)<\/li>\n<li>\ud83d\udc4c IMPROVE: Encapsulation of Radio Switch Buttons and code refactoring.<\/li>\n<\/ul>\n\n<h4>1.5.2<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIX: Class initialization<\/li>\n<\/ul>\n\n<h4>2.0.0<\/h4>\n\n<ul>\n<li>\ud83d\udd25 NEW: UI\/UX with better experience<\/li>\n<li>\ud83d\udd25 NEW: ChatGPT Bot Blocker - Block ChatGPT Bot supported in free version<\/li>\n<li>\ud83d\udd25 NEW: Ads.txt and App-ads.txt support for free version<\/li>\n<li>\ud83d\udc1b FIX: Personalization line break issue<\/li>\n<li>\ud83d\udc1b FIX: Other fixed and improvements<\/li>\n<\/ul>\n\n<h4>2.0.1<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIX: Freemius SDK Security fix<\/li>\n<\/ul>\n\n<h4>2.0.2<\/h4>\n\n<ul>\n<li>\ud83d\udd25 NEW: Generate a Physical file in PRO version. Recommended for PageSpeed Insights compatibility.<\/li>\n<li>\ud83d\udc4c IMPROVE: Update Freemius to v2.12.0<\/li>\n<\/ul>\n\n<h4>2.0.3<\/h4>\n\n<ul>\n<li>\ud83d\udc4c IMPROVE: Physical file (PRO version) functionality. Recommended for PageSpeed Insights compatibility.<\/li>\n<li>\ud83d\udc4c IMPROVE: Update Freemius to v2.13.0<\/li>\n<\/ul>\n\n<h4>2.0.4<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIX: Freemius and dev mode issues.<\/li>\n<\/ul>\n\n<h4>3.0.0<\/h4>\n\n<ul>\n<li>\ud83d\udd25 NEW: 14-steps granular configuration process<\/li>\n<li>\ud83d\udd25 NEW: Complete UI redesign with an intuitive stepper-based interface and dynamic Preview<\/li>\n<li>\ud83d\udd25 NEW: Configuration Modes: Custom, Free, Pro, and Premium Presets<\/li>\n<li>\ud83d\udd25 NEW: Granular AI &amp; LLM Governance (Block AI training bots, OpenAI, Claude, AI search engines)<\/li>\n<li>\ud83d\udd25 NEW: SSA (Safe Software Association) doctrine integration for AI governance declaration<\/li>\n<li>\ud83d\udd25 NEW: Advanced Bot &amp; Scraper protection with new basic and AI-curated full lists<\/li>\n<li>\ud83d\udd25 NEW: Enhanced Spam &amp; Feed Protection (Feeds, author archives, comments)<\/li>\n<li>\ud83d\udd25 NEW: Advanced E-commerce optimization for WooCommerce<\/li>\n<li>\ud83d\udd25 NEW: Dedicated control for Social Media bots, Archive Services, and Ads crawlability<\/li>\n<li>\ud83d\udc1b FIX: Numerous backend optimizations, compatibility improvements, and security patches<\/li>\n<\/ul>\n\n<h4>3.0.1<\/h4>\n\n<ul>\n<li>\ud83d\udc1b FIX: Removed Push notification popup during freemius opt-in.<\/li>\n<li>\ud83d\udc4c IMPROVE: Update Freemius to v2.13.1<\/li>\n<\/ul>","raw_excerpt":"Replace the default WordPress robots.txt workflow with a smarter, structured version you can preview before publishing, with Free, Pro, and Premium ed &hellip;","jetpack_sharing_enabled":true,"_links":{"self":[{"href":"https:\/\/azb.wordpress.org\/plugins\/wp-json\/wp\/v2\/plugin\/85798","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/azb.wordpress.org\/plugins\/wp-json\/wp\/v2\/plugin"}],"about":[{"href":"https:\/\/azb.wordpress.org\/plugins\/wp-json\/wp\/v2\/types\/plugin"}],"replies":[{"embeddable":true,"href":"https:\/\/azb.wordpress.org\/plugins\/wp-json\/wp\/v2\/comments?post=85798"}],"author":[{"embeddable":true,"href":"https:\/\/azb.wordpress.org\/plugins\/wp-json\/wporg\/v1\/users\/pagup"}],"wp:attachment":[{"href":"https:\/\/azb.wordpress.org\/plugins\/wp-json\/wp\/v2\/media?parent=85798"}],"wp:term":[{"taxonomy":"plugin_section","embeddable":true,"href":"https:\/\/azb.wordpress.org\/plugins\/wp-json\/wp\/v2\/plugin_section?post=85798"},{"taxonomy":"plugin_tags","embeddable":true,"href":"https:\/\/azb.wordpress.org\/plugins\/wp-json\/wp\/v2\/plugin_tags?post=85798"},{"taxonomy":"plugin_category","embeddable":true,"href":"https:\/\/azb.wordpress.org\/plugins\/wp-json\/wp\/v2\/plugin_category?post=85798"},{"taxonomy":"plugin_contributors","embeddable":true,"href":"https:\/\/azb.wordpress.org\/plugins\/wp-json\/wp\/v2\/plugin_contributors?post=85798"},{"taxonomy":"plugin_business_model","embeddable":true,"href":"https:\/\/azb.wordpress.org\/plugins\/wp-json\/wp\/v2\/plugin_business_model?post=85798"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}