# robots.txt for CoderFile.io # Allow all search engines to crawl User-agent: Googlebot Allow: / Crawl-delay: 0 User-agent: Bingbot Allow: / Crawl-delay: 0 User-agent: Slurp Allow: / User-agent: DuckDuckBot Allow: / User-agent: Baiduspider Allow: / User-agent: Yandex Allow: / User-agent: Twitterbot Allow: / User-agent: facebookexternalhit Allow: / User-agent: LinkedInBot Allow: / User-agent: * Allow: / Disallow: /auth Disallow: /editor/ Disallow: /projects Disallow: /project/ # Disallow thin programmatic SEO pages from ad crawlers Disallow: /*-sandbox Disallow: /*-fiddle Disallow: /*-repl Disallow: /*-execute Disallow: /*-compiler Crawl-delay: 1 # Sitemap location Sitemap: https://coderfile.io/sitemap.xml