-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathrobots.txt
47 lines (35 loc) · 1009 Bytes
/
robots.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
Sitemap: http://joshfrankel.me/sitemap.xml
# The Common Crawl dataset. Original source for GPT and others.
User-agent: CCBot
Disallow: /
# The example for img2dataset, although the default is *None*
User-agent: img2dataset
Disallow: /
# GPTBot is OpenAI's web crawler
User-agent: GPTBot
Disallow: /
# ChatGPT-User takes direct actions on behalf of ChatGPT users
User-agent: ChatGPT-User
Disallow: /
# Google's Bard and Vertex AI generative APIs
User-agent: Google-Extended
Disallow: /
# Speculative blocks for Anthropic
User-agent: anthropic-ai
Disallow: /
User-agent: Claude-Web
Disallow: /
# webz.io - they sell data for training LLMs.
User-agent: Omgilibot
Disallow: /
User-agent: Omgili
Disallow: /
# Meta's bot that crawls public web pages to improve language models
User-agent: FacebookBot
Disallow: /
# ByteDance's bot used to gather data for their LLMs, including Doubao.
User-agent: Bytespider
Disallow: /
# Brandwatch - "AI to discover new trends"
User-agent: magpie-crawler
Disallow: /