# All Social Trends - Robots.txt # This file tells search engines which pages to crawl User-agent: * Allow: / Disallow: /admin Disallow: /api/ # Sitemaps Sitemap: https://allsocialtrends.com/sitemap.xml # Crawl-delay for all bots (helps prevent server overload) Crawl-delay: 1 # Specific rules for major search engines User-agent: Googlebot Allow: / Crawl-delay: 0 User-agent: Bingbot Allow: / Crawl-delay: 0 User-agent: Slurp Allow: / Crawl-delay: 1 # Block bad bots User-agent: AhrefsBot Disallow: / User-agent: MJ12bot Disallow: / User-agent: SemrushBot Crawl-delay: 10