feat: add robots.txt
SQUASH: use EndBug/add-and-commit SQUASH: document cron time more desciptive job name
This commit is contained in:
parent
6e218b38df
commit
5af2e66ab7
34
.github/workflows/robots.txt.yml
vendored
Normal file
34
.github/workflows/robots.txt.yml
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
name: "Update robots.txt"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * 6" # At 00:00 on Saturday
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update:
|
||||
name: "Update robots.txt"
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Pull latest robots
|
||||
run: |
|
||||
curl -X POST https://api.darkvisitors.com/robots-txts \
|
||||
-H "Authorization: Bearer ${{ secrets.DarkVisitorsBearer }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data-raw '{"agent_types": ["AI Assistant", "AI Data Scraper", "AI Search Crawler", "Undocumented AI Agent"]}' \
|
||||
--output ./public/robots.txt
|
||||
|
||||
- name: Commit changes
|
||||
uses: EndBug/add-and-commit@v9
|
||||
with:
|
||||
message: "chore: generate robots.txt"
|
||||
default_author: github_actions
|
||||
push: true
|
||||
|
95
public/robots.txt
Normal file
95
public/robots.txt
Normal file
|
@ -0,0 +1,95 @@
|
|||
# AI Search Crawler
|
||||
# https://darkvisitors.com/agents/amazonbot
|
||||
|
||||
User-agent: Amazonbot
|
||||
Disallow: /
|
||||
|
||||
# Undocumented AI Agent
|
||||
# https://darkvisitors.com/agents/anthropic-ai
|
||||
|
||||
User-agent: anthropic-ai
|
||||
Disallow: /
|
||||
|
||||
# AI Search Crawler
|
||||
# https://darkvisitors.com/agents/applebot
|
||||
|
||||
User-agent: Applebot
|
||||
Disallow: /
|
||||
|
||||
# AI Data Scraper
|
||||
# https://darkvisitors.com/agents/bytespider
|
||||
|
||||
User-agent: Bytespider
|
||||
Disallow: /
|
||||
|
||||
# AI Data Scraper
|
||||
# https://darkvisitors.com/agents/ccbot
|
||||
|
||||
User-agent: CCBot
|
||||
Disallow: /
|
||||
|
||||
# AI Assistant
|
||||
# https://darkvisitors.com/agents/chatgpt-user
|
||||
|
||||
User-agent: ChatGPT-User
|
||||
Disallow: /
|
||||
|
||||
# Undocumented AI Agent
|
||||
# https://darkvisitors.com/agents/claude-web
|
||||
|
||||
User-agent: Claude-Web
|
||||
Disallow: /
|
||||
|
||||
# Undocumented AI Agent
|
||||
# https://darkvisitors.com/agents/claudebot
|
||||
|
||||
User-agent: ClaudeBot
|
||||
Disallow: /
|
||||
|
||||
# Undocumented AI Agent
|
||||
# https://darkvisitors.com/agents/cohere-ai
|
||||
|
||||
User-agent: cohere-ai
|
||||
Disallow: /
|
||||
|
||||
# AI Data Scraper
|
||||
# https://darkvisitors.com/agents/diffbot
|
||||
|
||||
User-agent: Diffbot
|
||||
Disallow: /
|
||||
|
||||
# AI Data Scraper
|
||||
# https://darkvisitors.com/agents/facebookbot
|
||||
|
||||
User-agent: FacebookBot
|
||||
Disallow: /
|
||||
|
||||
# AI Data Scraper
|
||||
# https://darkvisitors.com/agents/google-extended
|
||||
|
||||
User-agent: Google-Extended
|
||||
Disallow: /
|
||||
|
||||
# AI Data Scraper
|
||||
# https://darkvisitors.com/agents/gptbot
|
||||
|
||||
User-agent: GPTBot
|
||||
Disallow: /
|
||||
|
||||
# AI Data Scraper
|
||||
# https://darkvisitors.com/agents/omgili
|
||||
|
||||
User-agent: omgili
|
||||
Disallow: /
|
||||
|
||||
# AI Search Crawler
|
||||
# https://darkvisitors.com/agents/perplexitybot
|
||||
|
||||
User-agent: PerplexityBot
|
||||
Disallow: /
|
||||
|
||||
# AI Search Crawler
|
||||
# https://darkvisitors.com/agents/youbot
|
||||
|
||||
User-agent: YouBot
|
||||
Disallow: /
|
Loading…
Reference in a new issue