# Disallow spiders by default User-agent: * Disallow: Disallow: /secure/ # Add Crawl-delay parameter for those crawlers that support it User-agent: * Crawl-delay: 5 #Allow freindly spiders # "Disallow:" means don't disallow anything, so all can be crawled. Same as "Allow: /" but better supported User-agent: Googlebot Disallow: User-agent: Mediapartners-Google Disallow: User-agent: Adsbot-Google Disallow: User-agent: Googlebot-Image Disallow: User-agent: Googlebot-Mobile Disallow: User-agent: MSNBot Crawl-delay: 1 #Yahoo User-agent: Slurp Disallow: #Google China User-agent: baiduspider Disallow: User-agent: PicoSearch/1.0 Disallow: #ask.com User-agent: Teoma Disallow: #gigablast.com User-agent: Gigabot Disallow: #scrub the web User-agent: Scrubby Disallow: #DMOZ User-agent: Robozilla Disallow: # PRS defined crawler User-agent: gsa-crawler Disallow: # ASPS/PSEN defined crawler User-agent: portal-crawler Disallow: User-agent: Googlebot-News Allow: /oncology-times