# Dcatur Travel Platform - robots.txt # Allows search engine crawlers to discover and index content # Default rules for all crawlers User-agent: * Allow: / Disallow: /admin/ Disallow: /hotel-owner/ Disallow: /login Disallow: /register Disallow: /forgot-password Disallow: /reset-password Disallow: /dashboard Disallow: /notifications Disallow: /checkout Disallow: /success Disallow: /reservations/ Disallow: /booking-confirmation Disallow: /*.json Disallow: /*.xml Disallow: /api/ Disallow: /.well-known/ # Allow specific paths Allow: /sitemap.xml Allow: /flights Allow: /hotels Allow: /offers Allow: /packages Allow: /car-rental Allow: /insurance Allow: /about-us Allow: /contact Allow: /help Allow: /careers Allow: /privacy Allow: /terms Allow: /hotel-owner/terms # Crawl delay for all bots (in seconds) Crawl-delay: 1 # Specific rules for Googlebot User-agent: Googlebot Allow: / Disallow: /admin/ Disallow: /hotel-owner/ Disallow: /login Disallow: /register Disallow: /dashboard Disallow: /*.json Disallow: /api/ Crawl-delay: 0 # Specific rules for Bingbot User-agent: Bingbot Allow: / Disallow: /admin/ Disallow: /hotel-owner/ Disallow: /login Disallow: /register Disallow: /dashboard Disallow: /*.json Disallow: /api/ Crawl-delay: 1 # Block bad bots User-agent: AhrefsBot Disallow: / User-agent: SemrushBot Disallow: / User-agent: DotBot Disallow: / # Sitemap location Sitemap: https://dcatur.com/sitemap.xml Sitemap: https://dcatur.com/sitemap-pages.xml