Skip to content

Commit cc4f89e

Browse files
committed
feat: add robots.txt
1 parent 83c4229 commit cc4f89e

File tree

2 files changed

+11
-1
lines changed

2 files changed

+11
-1
lines changed

src/main.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ async fn main() {
55
use blog::app::{self, shell};
66
use blog::ssr::app_state::AppState;
77
use blog::ssr::redirect::redirect_www;
8-
use blog::ssr::server_utils::{connect, rss_handler, sitemap_handler};
8+
use blog::ssr::server_utils::{connect, robots_handler, rss_handler, sitemap_handler};
99
use dotenvy::dotenv;
1010
use leptos::logging;
1111
use leptos::prelude::*;
@@ -58,6 +58,7 @@ async fn main() {
5858
)
5959
.route("/rss.xml", get(rss_handler))
6060
.route("/sitemap.xml", get(sitemap_handler))
61+
.route("/robots.txt", get(robots_handler))
6162
.layer(
6263
tower::ServiceBuilder::new()
6364
.layer(TraceLayer::new_for_http())

src/ssr/server_utils.rs

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -332,3 +332,12 @@ pub async fn sitemap_handler(State(state): State<AppState>) -> Response<String>
332332
.body(sitemap)
333333
.unwrap()
334334
}
335+
336+
pub async fn robots_handler() -> Response<String> {
337+
let mut robots = String::new();
338+
robots.push_str("User-agent: *\nDisallow:\n\nAllow: /\n\nSitemap: https://rust-dd.com/sitemap.xml\n");
339+
Response::builder()
340+
.header("Content-Type", "text/plain")
341+
.body(robots)
342+
.unwrap()
343+
}

0 commit comments

Comments
 (0)