Browse Source

Add robots.txt

Disallow all crawlers.
master
Perception 2 months ago
parent
commit
9a0954e162
1 changed files with 6 additions and 0 deletions
  1. +6
    -0
      site.js

+ 6
- 0
site.js View File

@ -3,6 +3,11 @@ require('dotenv').config()
const { send } = require('micro')
const files = require('serve-handler')
const robotsTxt =
`User-agent: *
Disallow: /
`
const staticPaths = [ '/css', '/images', '/archive' ]
function isStaticPath(req) {
@ -26,5 +31,6 @@ module.exports = async function(req, res) {
if (isStaticPath(req)) return await files(req, res, { public: 'routes' })
let matched = match(req)
if (matched) return await matched(req, res)
if (req.url === '/robots.txt') return send(res, 200, robotsTxt)
return send(res, 404, { error: 'Not found' })
}

Loading…
Cancel
Save