1 | # generate-robotstxt
|
2 |
|
3 | [![NPM version](https://img.shields.io/npm/v/generate-robotstxt.svg)](https://www.npmjs.org/package/generate-robotstxt) [![Travis Build Status](https://img.shields.io/travis/itgalaxy/generate-robotstxt/master.svg?label=build)](https://travis-ci.org/itgalaxy/generate-robotstxt) [![dependencies Status](https://david-dm.org/itgalaxy/generate-robotstxt/status.svg)](https://david-dm.org/itgalaxy/generate-robotstxt) [![devDependencies Status](https://david-dm.org/itgalaxy/generate-robotstxt/dev-status.svg)](https://david-dm.org/itgalaxy/generate-robotstxt?type=dev)
|
4 |
|
5 | Awesome generator robots.txt.
|
6 |
|
7 | ## Installation
|
8 |
|
9 | ```shell
|
10 | npm install generate-robotstxt
|
11 | ```
|
12 |
|
13 | ## Usage
|
14 |
|
15 | ```js
|
16 | const robotstxt = require('generate-robotstxt')
|
17 |
|
18 | // Pass in the absolute path to your robots.txt file
|
19 | robotstxt({
|
20 | policy: [
|
21 | {
|
22 | userAgent: 'Googlebot',
|
23 | allow: '/',
|
24 | disallow: '/search'
|
25 | crawlDelay: 2
|
26 | },
|
27 | {
|
28 | userAgent: '*',
|
29 | allow: '/',
|
30 | disallow: '/search'
|
31 | crawlDelay: 10,
|
32 | cleanParam: 'ref /articles/',
|
33 | }
|
34 | ],
|
35 | sitemap: 'sitemap.xml',
|
36 | host: 'http://example.com'
|
37 | })
|
38 | .then((content) => {
|
39 | console.log(content);
|
40 | });
|
41 | ```
|
42 |
|
43 | ## [Changelog](CHANGELOG.md)
|
44 |
|
45 | ## [License](LICENSE.md)
|