# robots.txt for ByteVox Web # Optimized for SEO and maximum crawlability # Allow all search engines to crawl the site User-agent: * Allow: / # Allow crawling of CSS, JS, and other assets for better rendering Allow: /assets/ Allow: /*.css$ Allow: /*.js$ Allow: /*.jpg$ Allow: /*.jpeg$ Allow: /*.png$ Allow: /*.gif$ Allow: /*.svg$ Allow: /*.webp$ Allow: /*.woff$ Allow: /*.woff2$ # Disallow crawling of specific paths (if any sensitive areas exist) # Disallow: /admin/ # Disallow: /private/ # Explicitly allow important pages and sections Allow: /pages/ Allow: /products/ Allow: /blogs/ # Sitemap location Sitemap: https://bytevox.ai/sitemap.xml # Crawl delay (optional - remove if you want faster crawling) # Crawl-delay: 1 # Specific rules for major search engines (optional) User-agent: Googlebot Allow: / User-agent: Googlebot-Image Allow: / User-agent: Bingbot Allow: / User-agent: Slurp Allow: /