Skip to content
This repository has been archived by the owner on Jul 24, 2023. It is now read-only.

Commit

Permalink
feat(SEO): generate robots.txt onPostBuild
Browse files Browse the repository at this point in the history
robots.txt will be generated onPostBuild depending on evironment defaulting to disallow everything, except for production
remove static/robots.txt
  • Loading branch information
CanRau committed Oct 6, 2018
1 parent 3350c19 commit 0f4a6cb
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 5 deletions.
9 changes: 5 additions & 4 deletions gatsby-node.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ const { redirects } = require(`./redirects.js`)
// })

// const isDev = process.env.NODE_ENV === `development`
const isProduction = process.env.NODE_ENV === `production`
const publicDir = join(__dirname, `public`)
const feeds = {}

Expand Down Expand Up @@ -401,16 +402,16 @@ exports.onPostBuild = () => {
redirects.push(`/en/* /en/404/?url=:splat 404`)
redirects.push(`/de/* /de/404/?url=:splat 404`)

if (redirects.length) {
writeFileSync(join(publicDir, `_redirects`), redirects.join(`\n`))
}

if (feeds.dir) {
mkDir.sync(feeds.dir)
writeFileSync(join(feeds.dir, `atom.xml`), feeds.atom)
writeFileSync(join(feeds.dir, `rss.xml`), feeds.rss)
writeFileSync(join(feeds.dir, `feed.json`), feeds.json)
}

// add robots.txt to site root
const robotsTxt = `User-agent: *\nDisallow:${isProduction ? `` : ` /`}`
writeFileSync(join(publicDir, `robots.txt`), robotsTxt)
}

/**
Expand Down
1 change: 0 additions & 1 deletion static/robots.txt

This file was deleted.

0 comments on commit 0f4a6cb

Please sign in to comment.