diff --git a/src/app/robots.ts b/src/app/robots.ts index f42fc53..3fc53b7 100644 --- a/src/app/robots.ts +++ b/src/app/robots.ts @@ -2,28 +2,25 @@ import type { MetadataRoute } from 'next' const SITE_URL = process.env.NEXT_PUBLIC_SITE_URL || 'https://example.com' +/** + * Next.js 14 robots.ts — automatically served at /robots.txt. + * + * Rules: + * - All crawlers: allow public routes, disallow /admin/* and /api/* + * - Googlebot: same rules, with specific sitemap pointer + */ export default function robots(): MetadataRoute.Robots { return { rules: [ { userAgent: '*', allow: '/', - disallow: [ - '/admin/', - '/admin', - '/api/', - ], + disallow: ['/admin/', '/api/'], }, { - // Block AI training crawlers - userAgent: [ - 'GPTBot', - 'ChatGPT-User', - 'CCBot', - 'anthropic-ai', - 'Claude-Web', - ], - disallow: '/', + userAgent: 'Googlebot', + allow: '/', + disallow: ['/admin/', '/api/'], }, ], sitemap: `${SITE_URL}/sitemap.xml`,