# robots.txt for adrianfreed.com # This file tells search engine crawlers which pages they can and cannot access User-agent: * Allow: / # Disallow sensitive or duplicate content if any # Disallow: /private/ # Disallow: /temp/ # Sitemap location (using sitemap index for better organization) Sitemap: https://adrianfreed.com/sitemap_index.xml Sitemap: https://adrianfreed.com/sitemap.xml # Crawl-delay (optional - uncomment if needed) # Crawl-delay: 10