# robots.txt for SaferPDF.com # Generated to optimize SEO and crawler access # Allow all crawlers by default User-agent: * Allow: / # Disallow admin and build files Disallow: /admin/ Disallow: /*.json$ Disallow: /*.xml$ # Allow specific important files Allow: /sitemap.xml Allow: /searchindex.json # Sitemap location Sitemap: https://www.saferpdf.com/sitemap.xml # Crawl-delay for polite crawlers Crawl-delay: 0 # Specific bot rules User-agent: Googlebot Allow: / Crawl-delay: 0 User-agent: Bingbot Allow: / Crawl-delay: 0 # Block bad bots User-agent: MJ12bot Disallow: / User-agent: AhrefsBot Disallow: / User-agent: SemrushBot Disallow: / User-agent: DotBot Disallow: /