# robots.txt file # # This file's purpose is to stop robots from getting to potentially spammy wiki # pages which are not linked into the main body of pages. User-agent: * Disallow: /wiki/RecentChanges Disallow: /wiki/TitleIndex Disallow: /wiki/WordIndex Disallow: /wiki/FindPage Disallow: /wiki/WantedPages Disallow: /wiki/OrphanedPages Disallow: /wiki/AbandonedPages Disallow: /wiki/RandomPage Disallow: /wiki/PageSize Disallow: /wiki/PageHits # Spammy pages typically contain URLs referenced by escape codes (e.g. %ee) Disallow: /wiki/*% # And disallow any GET requests. Disallow: /wiki/*? # Don't crawl local documentation. We don't want the hits. Disallow: /doc/ Disallow: /fpdocs/ Disallow: /forums/