chore: format robots.txt.yml with prettier (#21)

This commit is contained in:
~hedy 2024-05-08 12:29:23 +08:00 committed by GitHub
parent 16a33cb38f
commit 9843063ad4
Failed to generate hash of commit

View file

@ -1,34 +1,33 @@
name: "Update robots.txt"
on:
schedule:
- cron: "0 0 * * 6" # At 00:00 on Saturday
workflow_dispatch:
schedule:
- cron: "0 0 * * 6" # At 00:00 on Saturday
workflow_dispatch:
jobs:
update:
name: "Update robots.txt"
update:
name: "Update robots.txt"
runs-on: ubuntu-latest
runs-on: ubuntu-latest
permissions:
contents: write
permissions:
contents: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Pull latest robots
run: |
curl -X POST https://api.darkvisitors.com/robots-txts \
-H "Authorization: Bearer ${{ secrets.DarkVisitorsBearer }}" \
-H "Content-Type: application/json" \
--data-raw '{"agent_types": ["AI Assistant", "AI Data Scraper", "AI Search Crawler", "Undocumented AI Agent"]}' \
--output ./public/robots.txt
- name: Commit changes
uses: EndBug/add-and-commit@v9
with:
message: "chore: generate robots.txt"
default_author: github_actions
push: true
- name: Pull latest robots
run: |
curl -X POST https://api.darkvisitors.com/robots-txts \
-H "Authorization: Bearer ${{ secrets.DarkVisitorsBearer }}" \
-H "Content-Type: application/json" \
--data-raw '{"agent_types": ["AI Assistant", "AI Data Scraper", "AI Search Crawler", "Undocumented AI Agent"]}' \
--output ./public/robots.txt
- name: Commit changes
uses: EndBug/add-and-commit@v9
with:
message: "chore: generate robots.txt"
default_author: github_actions
push: true