From 93e2b45fd5fca966ec477b7ec80173746010e064 Mon Sep 17 00:00:00 2001 From: Martin Vassor Date: Fri, 19 Jul 2024 11:19:06 +0100 Subject: [PATCH] Add feature to disable robots.txt and sitemap.xml from the config file. Addresses feature request #2248 --- components/config/src/config/mod.rs | 41 +++++++++++++++++++++++++++++ components/site/src/lib.rs | 12 ++++++--- 2 files changed, 49 insertions(+), 4 deletions(-) diff --git a/components/config/src/config/mod.rs b/components/config/src/config/mod.rs index 57ff1a805..467eabec1 100644 --- a/components/config/src/config/mod.rs +++ b/components/config/src/config/mod.rs @@ -98,6 +98,10 @@ pub struct Config { pub markdown: markup::Markdown, /// All user params set in `[extra]` in the config pub extra: HashMap, + /// Disable the generation of Sitemap.xml + pub no_sitemap: bool, + /// Disable the generation of robots.txt + pub no_robots: bool, } #[derive(Serialize)] @@ -117,6 +121,8 @@ pub struct SerializedConfig<'a> { extra: &'a HashMap, markdown: &'a markup::Markdown, search: search::SerializedSearch<'a>, + no_sitemap: bool, + no_robots: bool, } impl Config { @@ -332,6 +338,8 @@ impl Config { extra: &self.extra, markdown: &self.markdown, search: self.search.serialize(), + no_sitemap: self.no_sitemap, + no_robots: self.no_robots, } } } @@ -395,6 +403,8 @@ impl Default for Config { search: search::Search::default(), markdown: markup::Markdown::default(), extra: HashMap::new(), + no_sitemap: false, + no_robots: false, } } } @@ -992,4 +1002,35 @@ feed_filename = "test.xml" Config::parse(config).unwrap(); } + + #[test] + fn parse_no_sitemap() { + let config = r#" +title = "My Site" +base_url = "example.com" +no_sitemap = true +"#; + let config = Config::parse(config).unwrap(); + assert!(config.no_sitemap); + } + + #[test] + fn default_no_sitemap_false() { + let config = r#" +title = "My Site" +base_url = "example.com" +"#; + let config = Config::parse(config).unwrap(); + assert!(!config.no_sitemap); + } + + #[test] + fn default_no_robots_false() { + let config = r#" +title = "My Site" +base_url = "example.com" +"#; + let config = Config::parse(config).unwrap(); + assert!(!config.no_robots); + } } diff --git a/components/site/src/lib.rs b/components/site/src/lib.rs index 46c62c959..27a332dc3 100644 --- a/components/site/src/lib.rs +++ b/components/site/src/lib.rs @@ -742,8 +742,10 @@ impl Site { start = log_time(start, "Rendered sections"); self.render_orphan_pages()?; start = log_time(start, "Rendered orphan pages"); - self.render_sitemap()?; - start = log_time(start, "Rendered sitemap"); + if !self.config.no_sitemap { + self.render_sitemap()?; + start = log_time(start, "Rendered sitemap"); + } let library = self.library.read().unwrap(); if self.config.generate_feeds { @@ -769,8 +771,10 @@ impl Site { start = log_time(start, "Rendered themes css"); self.render_404()?; start = log_time(start, "Rendered 404"); - self.render_robots()?; - start = log_time(start, "Rendered robots.txt"); + if !self.config.no_robots { + self.render_robots()?; + start = log_time(start, "Rendered robots.txt"); + } self.render_taxonomies()?; start = log_time(start, "Rendered taxonomies"); // We process images at the end as we might have picked up images to process from markdown