# Specific domain robots #robots.txt User-agent: * #Folder #Disallow: /it/user/ #Disallow: /en/user/ #Disallow: /user/ #User-agent: msnbot #Crawl-delay: 10 #User-agent: bingbot #Crawl-delay: 10 #User-agent: Yandex #Disallow: / #User-agent: Baiduspider #Disallow: / #Sitemap: http://www.documenti-privacy.it/sitemap.xml # Common robots