# Default robots.txt file # Last updated: 09.05.05 # Generated by: Bloghosts # The below file is in place by default to block bad robots # from indexing your site while allowing good robots to browse # freely. Remove the below entries at your own risk. User-agent: digout4u User-agent: extractorpro User-agent: GetRight User-agent: go-ahead-got-it User-agent: grub User-agent: HTTPClient User-agent: LinkWalker User-agent: nearsite User-agent: netattache User-agent: NEWT ActiveX User-agent: sitesnagger User-agent: teleport User-agent: TovekTools Web Indexer User-agent: UbiCrawler User-agent: Web Downloader User-agent: WebTrends User-agent: webwhacker User-agent: webzip Disallow: / # Disallow directory /secure/ User-agent: * Disallow: /secure/