A Nuxt.js module that injects a middleware to generate a robots.txt file
- Nuxt 3 and Nuxt Bridge support
- Generate
robots.txtfor static mode - Add middleware for
robots.txt
- Add
@nuxtjs/robotsdependency to your project
yarn add @nuxtjs/robots # or npm install @nuxtjs/robots- Add
@nuxtjs/robotsto themodulessection ofnuxt.config.js
export default {
modules: [
// Simple usage
'@nuxtjs/robots',
// With options
['@nuxtjs/robots', { /* module options */ }]
]
}export default {
modules: [
'@nuxtjs/robots'
],
robots: {
/* module options */
}
}- Type:
String - Default:
robots.config
- Type:
Object|Array - Default:
{
UserAgent: '*',
Disallow: ''
}If you need to use function in any rule, you need to create a config file through the configPath option
export default [
{ UserAgent: '*' },
{ Disallow: '/' },
{ BlankLine: true },
{ Comment: 'Comment here' },
// Be aware that this will NOT work on target: 'static' mode
{ Sitemap: (req) => `https://${req.headers.host}/sitemap.xml` }
]output:
User-agent: *
Disallow: /
# Comment here
Sitemap: https://robots.nuxtjs.org/sitemap.xml- UserAgent =
User-agent - CrawlDelay =
Crawl-delay - Disallow =
Disallow - Allow =
Allow - Host =
Host - Sitemap =
Sitemap - CleanParam =
Clean-param - Comment =
# Comment - BlankLine =
Add blank line
Note: Don't worry, keys are parsed with case insensitivity and special characters.
You can contribute to this module online with CodeSandBox:
Or locally:
- Clone this repository
- Install dependencies using
pnpm install - Prepare development server using
pnpm dev:prepare - Build module using
pnpm build - Launch playground using
pnpm dev
Copyright (c) - Nuxt Community