# robots.txt # LLM-aware, strict crawl policy with controlled access # ================ # AI / LLM Crawlers # ================ User-agent: GPTBot Disallow: / Allow: /index.html Allow: / Allow: /llm.txt Allow: /ai-policy.html Allow: /sitemap.xml User-agent: Google-Extended Disallow: / Allow: /index.html Allow: / Allow: /llm.txt Allow: /ai-policy.html Allow: /sitemap.xml User-agent: ClaudeBot Disallow: / Allow: /index.html Allow: / Allow: /llm.txt Allow: /ai-policy.html Allow: /sitemap.xml User-agent: CCBot Disallow: / Allow: /index.html Allow: / Allow: /llm.txt Allow: /ai-policy.html Allow: /sitemap.xml # ======================= # Default Rule for All Other Bots # ======================= User-agent: * Disallow: / Allow: /index.html Allow: / Allow: /llm.txt Allow: /ai-policy.html Allow: /sitemap.xml Allow: /.well-known/ Allow: /.well-known/mta-sts.txt # ==================== # Sitemap Declaration # ==================== Sitemap: https://www.codeguy.dev/sitemap.xml