# Shadow OS - robots.txt # https://shadowos.io/robots.txt # Allow all search engines to crawl the site User-agent: * Allow: / # Sitemap location Sitemap: https://shadowos.io/sitemap.xml # LLM context files # https://llmstxt.org/ # llms.txt: concise site overview for AI models # llms-full.txt: comprehensive version with all pages and context # Block common non-content paths (if they exist) Disallow: /api/ Disallow: /admin/ Disallow: /_next/ Disallow: /.well-known/ # Allow specific bots that help with SEO/analytics User-agent: Googlebot Allow: / User-agent: Bingbot Allow: / User-agent: Slurp Allow: / User-agent: DuckDuckBot Allow: / User-agent: BraveBot Allow: / # AI crawlers - allow for GEO (Generative Engine Optimization) User-agent: GPTBot Allow: / User-agent: ChatGPT-User Allow: / User-agent: Claude-Web Allow: / User-agent: ClaudeBot Allow: / User-agent: anthropic-ai Allow: / User-agent: Google-Extended Allow: / User-agent: PerplexityBot Allow: / User-agent: Applebot Allow: / # Crawl-delay to be polite (optional, most major bots ignore this) # Crawl-delay: 1