# Robots.txt for Ledsreact - Sports Performance Testing Technology # Website: https://ledsreact.com # Generated following SEO best practices # Default rule for all robots User-agent: * # Allow crawling of all public content Allow: / # Disallow crawling of technical and admin areas Disallow: /api/ Disallow: /_next/ Disallow: /.next/ Disallow: /admin/ Disallow: /dashboard/ Disallow: /private/ Disallow: /temp/ Disallow: /tmp/ # Disallow crawling of development and testing files Disallow: /*.json$ Disallow: /*.log$ Disallow: /*test* Disallow: /*dev* # Disallow crawling of sensitive configuration files Disallow: /config/ Disallow: /.env* Disallow: /.git/ Disallow: /node_modules/ # Disallow crawling of duplicate content with URL parameters Disallow: /*?*utm_* Disallow: /*?*ref=* Disallow: /*?*fbclid=* Disallow: /*?*gclid=* # Allow crawling of important static assets Allow: /img/ Allow: /video/ Allow: /whitepapers/ Allow: /clients/ Allow: /product/ Allow: /*.css$ Allow: /*.js$ Allow: /*.png$ Allow: /*.jpg$ Allow: /*.jpeg$ Allow: /*.gif$ Allow: /*.webp$ Allow: /*.svg$ Allow: /*.ico$ Allow: /*.pdf$ # Special rules for major search engines User-agent: Googlebot Allow: / Crawl-delay: 1 User-agent: Bingbot Allow: / Crawl-delay: 1 User-agent: facebookexternalhit Allow: / User-agent: Twitterbot Allow: / User-agent: LinkedInBot Allow: / # Block competitive analysis and aggressive scraping bots # Note: These are legitimate tools but blocked to limit competitive intelligence User-agent: AhrefsBot Disallow: / # Currently allowing this one, for ourselves #User-agent: SemrushBot #Disallow: / # Block known aggressive scrapers User-agent: MJ12bot Disallow: / User-agent: DotBot Disallow: / User-agent: ia_archiver Disallow: / # Sitemap references Sitemap: https://ledsreact.com/sitemap.xml # Host declaration (helps with international targeting) Host: https://ledsreact.com