Skip to content

Commit

Permalink
Add fields to disable sitemap.xml and robots.txt generation (#2579)
Browse files Browse the repository at this point in the history
* Add feature to disable robots.txt and sitemap.xml from the config file.

Addresses feature request #2248

* Add documentation for no_sitemap & no_robots

* Rename no_robots and no_sitemap into generate_robots_txt and generate_sitemap (default to true)

* fix rustfmt issues

* Change documentation to show defaults

* Update documentation for the fields generate_sitemaps (resp. robots_txt), now that the default is true and false is needed to disable
  • Loading branch information
Bromind authored and Keats committed Aug 15, 2024
1 parent 8a26da7 commit b3fc3f4
Show file tree
Hide file tree
Showing 3 changed files with 88 additions and 4 deletions.
74 changes: 74 additions & 0 deletions components/config/src/config/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,10 @@ pub struct Config {
pub markdown: markup::Markdown,
/// All user params set in `[extra]` in the config
pub extra: HashMap<String, Toml>,
/// Enables the generation of Sitemap.xml
pub generate_sitemap: bool,
/// Enables the generation of robots.txt
pub generate_robots_txt: bool,
}

#[derive(Serialize)]
Expand All @@ -117,6 +121,8 @@ pub struct SerializedConfig<'a> {
extra: &'a HashMap<String, Toml>,
markdown: &'a markup::Markdown,
search: search::SerializedSearch<'a>,
generate_sitemap: bool,
generate_robots_txt: bool,
}

impl Config {
Expand Down Expand Up @@ -332,6 +338,8 @@ impl Config {
extra: &self.extra,
markdown: &self.markdown,
search: self.search.serialize(),
generate_sitemap: self.generate_sitemap,
generate_robots_txt: self.generate_robots_txt,
}
}
}
Expand Down Expand Up @@ -395,6 +403,8 @@ impl Default for Config {
search: search::Search::default(),
markdown: markup::Markdown::default(),
extra: HashMap::new(),
generate_sitemap: true,
generate_robots_txt: true,
}
}
}
Expand Down Expand Up @@ -992,4 +1002,68 @@ feed_filename = "test.xml"

Config::parse(config).unwrap();
}

#[test]
fn parse_generate_sitemap_true() {
let config = r#"
title = "My Site"
base_url = "example.com"
generate_sitemap = true
"#;
let config = Config::parse(config).unwrap();
assert!(config.generate_sitemap);
}

#[test]
fn parse_generate_sitemap_false() {
let config = r#"
title = "My Site"
base_url = "example.com"
generate_sitemap = false
"#;
let config = Config::parse(config).unwrap();
assert!(!config.generate_sitemap);
}

#[test]
fn default_no_sitemap_true() {
let config = r#"
title = "My Site"
base_url = "example.com"
"#;
let config = Config::parse(config).unwrap();
assert!(config.generate_sitemap);
}

#[test]
fn parse_generate_robots_true() {
let config = r#"
title = "My Site"
base_url = "example.com"
generate_robots_txt = true
"#;
let config = Config::parse(config).unwrap();
assert!(config.generate_robots_txt);
}

#[test]
fn parse_generate_robots_false() {
let config = r#"
title = "My Site"
base_url = "example.com"
generate_robots_txt = false
"#;
let config = Config::parse(config).unwrap();
assert!(!config.generate_robots_txt);
}

#[test]
fn default_no_robots_true() {
let config = r#"
title = "My Site"
base_url = "example.com"
"#;
let config = Config::parse(config).unwrap();
assert!(config.generate_robots_txt);
}
}
12 changes: 8 additions & 4 deletions components/site/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -742,8 +742,10 @@ impl Site {
start = log_time(start, "Rendered sections");
self.render_orphan_pages()?;
start = log_time(start, "Rendered orphan pages");
self.render_sitemap()?;
start = log_time(start, "Rendered sitemap");
if self.config.generate_sitemap {
self.render_sitemap()?;
start = log_time(start, "Rendered sitemap");
}

let library = self.library.read().unwrap();
if self.config.generate_feeds {
Expand All @@ -769,8 +771,10 @@ impl Site {
start = log_time(start, "Rendered themes css");
self.render_404()?;
start = log_time(start, "Rendered 404");
self.render_robots()?;
start = log_time(start, "Rendered robots.txt");
if self.config.generate_robots_txt {
self.render_robots()?;
start = log_time(start, "Rendered robots.txt");
}
self.render_taxonomies()?;
start = log_time(start, "Rendered taxonomies");
// We process images at the end as we might have picked up images to process from markdown
Expand Down
6 changes: 6 additions & 0 deletions docs/content/documentation/getting-started/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,12 @@ taxonomies = []
# content for `default_language`.
build_search_index = false

# When set to "false", Sitemap.xml is not generated
generate_sitemap = true

# When set to "false", robots.txt is not generated
generate_robots_txt = true

# Configuration of the Markdown rendering
[markdown]
# When set to "true", all code blocks are highlighted.
Expand Down

0 comments on commit b3fc3f4

Please sign in to comment.