# Robots.txt for Tradinos eCommerce Platform # Allow all search engines to crawl the site User-agent: * Allow: / # Disallow admin and private routes Disallow: /api/ Disallow: /(dashboard)/ # Disallow sensitive files Disallow: /*.json$ Disallow: /*.xml$ # Allow important files Allow: /sitemap.xml Allow: /robots.txt # Sitemap location Sitemap: https://ecommerce.tradinos.com/sitemap.xml # Crawl-delay for good practice (optional) Crawl-delay: 0.5 # Specific rules for different bots User-agent: Googlebot Allow: / User-agent: Bingbot Allow: / User-agent: Slurp Allow: /