From a08bbfba75eb4b7610ab5e595f485433d26c42b2 Mon Sep 17 00:00:00 2001 From: Kwankyu Lee Date: Sun, 22 Sep 2024 14:19:50 +0900 Subject: [PATCH] Dynamically create robots.txt --- .github/workflows/doc-build.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/doc-build.yml b/.github/workflows/doc-build.yml index 86418ff1f1f..9deab60f8be 100644 --- a/.github/workflows/doc-build.yml +++ b/.github/workflows/doc-build.yml @@ -213,6 +213,9 @@ jobs: (cd doc && mv .git ../git && mv .gitattributes ../gitattributes) mv CHANGES.html doc fi + # Create the robots.txt file to discourage web crawlers from indexing doc preview webpages + echo "User-agent: *" > doc/robots.txt + echo "Disallow: /" >> doc/robots.txt # Zip everything for increased performance zip -r doc.zip doc