# Disallow spiders by default User-agent: * Disallow: /secure/ Disallow: /thehearingjournal/Pages/blogs.aspx # Add Crawl-delay parameter for those crawlers that support it User-agent: * Crawl-delay: 5 #Allow freindly spiders # "Disallow:" means don't disallow anything, so all can be crawled. Same as "Allow: /" but better supported User-agent: Googlebot Disallow: /thehearingjournal/Pages/blogs.aspx User-agent: Twitterbot Disallow: /thehearingjournal/Pages/blogs.aspx User-agent: Mediapartners-Google Disallow: /thehearingjournal/Pages/blogs.aspx User-agent: Adsbot-Google Disallow: /thehearingjournal/Pages/blogs.aspx User-agent: Googlebot-Image Disallow: /thehearingjournal/Pages/blogs.aspx User-agent: Googlebot-Mobile Disallow: /thehearingjournal/Pages/blogs.aspx User-agent: MSNBot Crawl-delay: 1 Disallow: /thehearingjournal/Pages/blogs.aspx #Yahoo User-agent: Slurp Disallow: /thehearingjournal/Pages/blogs.aspx #Google China User-agent: baiduspider Disallow: /thehearingjournal/Pages/blogs.aspx User-agent: PicoSearch/1.0 Disallow: /thehearingjournal/Pages/blogs.aspx #ask.com User-agent: Teoma Disallow: /thehearingjournal/Pages/blogs.aspx #gigablast.com User-agent: Gigabot Disallow: /thehearingjournal/Pages/blogs.aspx #scrub the web User-agent: Scrubby Disallow: /thehearingjournal/Pages/blogs.aspx #DMOZ User-agent: Robozilla Disallow: /thehearingjournal/Pages/blogs.aspx # PRS defined crawler User-agent: gsa-crawler Disallow: /thehearingjournal/Pages/blogs.aspx # ASPS/PSEN defined crawler User-agent: portal-crawler Disallow: /thehearingjournal/Pages/blogs.aspx User-agent: Googlebot-News Allow: /oncology-times Disallow: /thehearingjournal/Pages/blogs.aspx