disallow robots index for self hosted deployments

This commit is contained in:
Jori Lallo
2018-01-21 18:47:43 -08:00
parent 1b5513be0c
commit 69af7f4efe
2 changed files with 13 additions and 0 deletions

View File

@ -10,6 +10,7 @@ import serve from 'koa-static';
import subdomainRedirect from './middlewares/subdomainRedirect';
import renderpage from './utils/renderpage';
import { slackAuth } from '../shared/utils/routeHelpers';
import { robotsResponse } from './utils/robots';
import Home from './pages/Home';
import About from './pages/About';
@ -81,6 +82,9 @@ router.get('/', async ctx => {
}
});
// Other
router.get('/robots.txt', ctx => (ctx.body = robotsResponse(ctx)));
// catch all for react app
router.get('*', async ctx => {
await renderapp(ctx);

9
server/utils/robots.js Normal file
View File

@ -0,0 +1,9 @@
// @flow
import { type Context } from 'koa';
const DISALLOW_ROBOTS = `User-agent: *
Disallow: /`;
export const robotsResponse = (ctx: Context): ?string => {
if (ctx.headers.host.indexOf('getoutline.com') < 0) return DISALLOW_ROBOTS;
};