fix: apply seo agent improvements to src/app/robots.ts
This commit is contained in:
parent
7713f8b27e
commit
f773803821
|
|
@ -2,28 +2,25 @@ import type { MetadataRoute } from 'next'
|
|||
|
||||
const SITE_URL = process.env.NEXT_PUBLIC_SITE_URL || 'https://example.com'
|
||||
|
||||
/**
|
||||
* Next.js 14 robots.ts — automatically served at /robots.txt.
|
||||
*
|
||||
* Rules:
|
||||
* - All crawlers: allow public routes, disallow /admin/* and /api/*
|
||||
* - Googlebot: same rules, with specific sitemap pointer
|
||||
*/
|
||||
export default function robots(): MetadataRoute.Robots {
|
||||
return {
|
||||
rules: [
|
||||
{
|
||||
userAgent: '*',
|
||||
allow: '/',
|
||||
disallow: [
|
||||
'/admin/',
|
||||
'/admin',
|
||||
'/api/',
|
||||
],
|
||||
disallow: ['/admin/', '/api/'],
|
||||
},
|
||||
{
|
||||
// Block AI training crawlers
|
||||
userAgent: [
|
||||
'GPTBot',
|
||||
'ChatGPT-User',
|
||||
'CCBot',
|
||||
'anthropic-ai',
|
||||
'Claude-Web',
|
||||
],
|
||||
disallow: '/',
|
||||
userAgent: 'Googlebot',
|
||||
allow: '/',
|
||||
disallow: ['/admin/', '/api/'],
|
||||
},
|
||||
],
|
||||
sitemap: `${SITE_URL}/sitemap.xml`,
|
||||
|
|
|
|||
Loading…
Reference in New Issue