|
| 1 | +# The following lines list known AI, data-scraping, and plagiarism-checking crawlers. |
| 2 | +# They are disallowed to prevent content from being used for AI training and to avoid |
| 3 | +# incorrect plagiarism flags on original academic and written work. |
| 4 | +User-agent: AnthropicAI |
| 5 | +User-agent: TurnitinBot |
| 6 | +User-agent: UnicheckBot |
| 7 | +User-agent: PlagScan |
| 8 | +User-agent: PlagTracker |
| 9 | +User-agent: QueText |
| 10 | +User-agent: Plagiarisma |
| 11 | +User-agent: Copyscape |
| 12 | +User-agent: Scribbr-bot |
| 13 | +User-agent: DrillBitBot |
| 14 | +User-agent: OpenAI |
| 15 | +User-agent: Sogou |
| 16 | +User-agent: AhrefsBot |
| 17 | +User-agent: SemrushBot |
| 18 | +User-agent: ia_archiver |
| 19 | +User-agent: AI2Bot |
| 20 | +User-agent: Ai2Bot-Dolma |
| 21 | +User-agent: Amazonbot |
| 22 | +User-agent: anthropic-ai |
| 23 | +User-agent: Applebot |
| 24 | +User-agent: Applebot-Extended |
| 25 | +User-agent: Bytespider |
| 26 | +User-agent: CCBot |
| 27 | +User-agent: ChatGPT-User |
| 28 | +User-agent: Claude-Web |
| 29 | +User-agent: ClaudeBot |
| 30 | +User-agent: cohere-ai |
| 31 | +User-agent: cohere-training-data-crawler |
| 32 | +User-agent: Crawlspace |
| 33 | +User-agent: Diffbot |
| 34 | +User-agent: DuckAssistBot |
| 35 | +User-agent: FacebookBot |
| 36 | +User-agent: FriendlyCrawler |
| 37 | +User-agent: Google-Extended |
| 38 | +User-agent: GoogleOther |
| 39 | +User-agent: GoogleOther-Image |
| 40 | +User-agent: GoogleOther-Video |
| 41 | +User-agent: GPTBot |
| 42 | +User-agent: iaskspider/2.0 |
| 43 | +User-agent: ICC-Crawler |
| 44 | +User-agent: ImagesiftBot |
| 45 | +User-agent: img2dataset |
| 46 | +User-agent: ISSCyberRiskCrawler |
| 47 | +User-agent: Kangaroo Bot |
| 48 | +User-agent: Meta-ExternalAgent |
| 49 | +User-agent: Meta-ExternalFetcher |
| 50 | +User-agent: OAI-SearchBot |
| 51 | +User-agent: omgili |
| 52 | +User-agent: omgilibot |
| 53 | +User-agent: PanguBot |
| 54 | +User-agent: PerplexityBot |
| 55 | +User-agent: Perplexity-User-Agent |
| 56 | +User-agent: PetalBot |
| 57 | +User-agent: Scrapy |
| 58 | +User-agent: SemrushBot-OCOB |
| 59 | +User-agent: SemrushBot-SWA |
| 60 | +User-agent: Sidetrade indexer bot |
| 61 | +User-agent: Timpibot |
| 62 | +User-agent: Seekr |
| 63 | +User-agent: VelenPublicWebCrawler |
| 64 | +User-agent: Webzio-Extended |
| 65 | +User-agent: YouBot |
| 66 | +User-agent: yandex |
| 67 | +Disallow: / |
| 68 | + |
| 69 | +# The following lines explicitly allow all other user agents to crawl the entire site. |
| 70 | +# This is the default behavior, but it's good practice to be explicit. |
| 71 | +# The _assets directory is disallowed to prevent indexing of style, script, and image files. |
| 72 | +# The Setup directory is also disallowed as it contains build-related files. |
| 73 | +User-agent: * |
| 74 | +Disallow: /_assets/ # Disallow crawling of the assets directory |
| 75 | +Disallow: /Setup/ # Disallow crawling of the setup/build directory |
0 commit comments