Skip to content

Commit

Permalink
📦 NEW: Added --robots flag with public (default) and private options
Browse files Browse the repository at this point in the history
  • Loading branch information
robertdevore committed Jan 6, 2025
1 parent eda3889 commit 121ff7c
Showing 1 changed file with 35 additions and 0 deletions.
35 changes: 35 additions & 0 deletions stattic.py
Original file line number Diff line number Diff line change
Expand Up @@ -954,6 +954,37 @@ def format_xml_sitemap_entry(self, url, lastmod):
</url>
'''

def generate_robots_txt(self, mode="public"):
"""Generate a robots.txt file with public or private settings."""
try:
# Prepare robots.txt content based on mode
if mode == "private":
robots_txt_content = "User-agent: *\nDisallow: /"
self.logger.info("Generated private robots.txt (Disallow all)")
elif mode == "public":
if self.site_url: # Only generate the public robots.txt if site_url is provided
robots_txt_content = f"""User-agent: *
Allow: /
# Sitemap URL
Sitemap: {self.site_url.rstrip('/')}/sitemap.xml
"""
self.logger.info("Generated public robots.txt (Allow all)")
else:
self.logger.warning("Public robots.txt requires site_url. Skipping creation.")
return
else:
self.logger.warning(f"Unknown robots.txt mode '{mode}'. Skipping creation.")
return

# Write robots.txt to the output directory
robots_txt_path = os.path.join(self.output_dir, 'robots.txt')
with open(robots_txt_path, 'w') as robots_file:
robots_file.write(robots_txt_content)
self.logger.info(f"robots.txt written to {robots_txt_path}")
except Exception as e:
self.logger.error(f"Failed to generate robots.txt: {e}")

def build_404_page(self):
"""Build and generate the 404 error page for GitHub Pages."""
try:
Expand Down Expand Up @@ -989,6 +1020,9 @@ def build(self):
# Build the 404 page
self.build_404_page()

# Generate robots.txt based on the flag
self.generate_robots_txt(mode=getattr(args, 'robots', 'public'))

# Minify assets if --minify is enabled
if getattr(args, 'minify', False):
self.minify_assets()
Expand Down Expand Up @@ -1017,6 +1051,7 @@ def resolve_output_path(output_dir):
parser.add_argument('--site-url', type=str, help='Specify the site URL for production builds')
parser.add_argument('--watch', action='store_true', help='Enable watch mode to automatically rebuild on file changes')
parser.add_argument('--minify', action='store_true', help='Minify CSS and JS into single files')
parser.add_argument('--robots',type=str,choices=['public', 'private'],default='public',help="Generate a public or private robots.txt file (default: public)")

args = parser.parse_args()

Expand Down

0 comments on commit 121ff7c

Please sign in to comment.