Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add fields to disable sitemap.xml and robots.txt generation #2579

Merged
merged 6 commits into from
Jul 25, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 74 additions & 0 deletions components/config/src/config/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,10 @@ pub struct Config {
pub markdown: markup::Markdown,
/// All user params set in `[extra]` in the config
pub extra: HashMap<String, Toml>,
/// Disable the generation of Sitemap.xml
pub generate_sitemap: bool,
/// Disable the generation of robots.txt
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Need to update the coomments

pub generate_robots_txt: bool,
}

#[derive(Serialize)]
Expand All @@ -117,6 +121,8 @@ pub struct SerializedConfig<'a> {
extra: &'a HashMap<String, Toml>,
markdown: &'a markup::Markdown,
search: search::SerializedSearch<'a>,
generate_sitemap: bool,
generate_robots_txt: bool,
}

impl Config {
Expand Down Expand Up @@ -332,6 +338,8 @@ impl Config {
extra: &self.extra,
markdown: &self.markdown,
search: self.search.serialize(),
generate_sitemap: self.generate_sitemap,
generate_robots_txt: self.generate_robots_txt,
}
}
}
Expand Down Expand Up @@ -395,6 +403,8 @@ impl Default for Config {
search: search::Search::default(),
markdown: markup::Markdown::default(),
extra: HashMap::new(),
generate_sitemap: true,
generate_robots_txt: true,
}
}
}
Expand Down Expand Up @@ -992,4 +1002,68 @@ feed_filename = "test.xml"

Config::parse(config).unwrap();
}

#[test]
fn parse_generate_sitemap_true() {
let config = r#"
title = "My Site"
base_url = "example.com"
generate_sitemap = true
"#;
let config = Config::parse(config).unwrap();
assert!(config.generate_sitemap);
}

#[test]
fn parse_generate_sitemap_false() {
let config = r#"
title = "My Site"
base_url = "example.com"
generate_sitemap = false
"#;
let config = Config::parse(config).unwrap();
assert!(!config.generate_sitemap);
}

#[test]
fn default_no_sitemap_true() {
let config = r#"
title = "My Site"
base_url = "example.com"
"#;
let config = Config::parse(config).unwrap();
assert!(config.generate_sitemap);
}

#[test]
fn parse_generate_robots_true() {
let config = r#"
title = "My Site"
base_url = "example.com"
generate_robots_txt = true
"#;
let config = Config::parse(config).unwrap();
assert!(config.generate_robots_txt);
}

#[test]
fn parse_generate_robots_false() {
let config = r#"
title = "My Site"
base_url = "example.com"
generate_robots_txt = false
"#;
let config = Config::parse(config).unwrap();
assert!(!config.generate_robots_txt);
}

#[test]
fn default_no_robots_true() {
let config = r#"
title = "My Site"
base_url = "example.com"
"#;
let config = Config::parse(config).unwrap();
assert!(config.generate_robots_txt);
}
}
12 changes: 8 additions & 4 deletions components/site/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -742,8 +742,10 @@ impl Site {
start = log_time(start, "Rendered sections");
self.render_orphan_pages()?;
start = log_time(start, "Rendered orphan pages");
self.render_sitemap()?;
start = log_time(start, "Rendered sitemap");
if self.config.generate_sitemap {
self.render_sitemap()?;
start = log_time(start, "Rendered sitemap");
}

let library = self.library.read().unwrap();
if self.config.generate_feeds {
Expand All @@ -769,8 +771,10 @@ impl Site {
start = log_time(start, "Rendered themes css");
self.render_404()?;
start = log_time(start, "Rendered 404");
self.render_robots()?;
start = log_time(start, "Rendered robots.txt");
if self.config.generate_robots_txt {
self.render_robots()?;
start = log_time(start, "Rendered robots.txt");
}
self.render_taxonomies()?;
start = log_time(start, "Rendered taxonomies");
// We process images at the end as we might have picked up images to process from markdown
Expand Down
6 changes: 6 additions & 0 deletions docs/content/documentation/getting-started/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,12 @@ taxonomies = []
# content for `default_language`.
build_search_index = false

# When set to "false", Sitemap.xml is not generated (default: true)
generate_sitemap = false

# When set to "false", robots.txt is not generated (default: true)
generate_robots_txt = false
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We use the default in the docs so set them to true

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

My bad, I didn't look carefully at the docs. It should be correct by now.


# Configuration of the Markdown rendering
[markdown]
# When set to "true", all code blocks are highlighted.
Expand Down