# Biodata360 Robots.txt - SEO Optimized User-agent: * Allow: / # Explicitly allow important pages Allow: /privacy Allow: /terms Allow: /about Allow: /contact-us Allow: /create Allow: /blog Allow: /biodata-profile-maker Allow: /dowry-calculator Allow: /partner-compatibility Allow: /dmca Allow: /disclaimer # Specify the location of the XML sitemap Sitemap: https://biodata360.in/sitemap.xml # Block only specific directories that should not be crawled Disallow: /admin/ Disallow: /private/ Disallow: /api/ Disallow: /internal/ Disallow: /test/ Disallow: /dev/ Disallow: /.git/ Disallow: /node_modules/ # Block search result pages and tracking parameters to prevent duplicate content Disallow: /*?q= Disallow: /*?filter= Disallow: /*?sort= Disallow: /*?utm_ Disallow: /*?ref= Disallow: /*?source= Disallow: /*?campaign= # Crawl delay for server protection Crawl-delay: 1 # Specific bot instructions User-agent: Googlebot Crawl-delay: 0.5 User-agent: Bingbot Crawl-delay: 1