fix: apply seo agent improvements to src/app/robots.ts

This commit is contained in:
cupadev-admin 2026-03-09 20:55:27 +00:00
parent 7713f8b27e
commit f773803821
1 changed files with 11 additions and 14 deletions

View File

@ -2,28 +2,25 @@ import type { MetadataRoute } from 'next'
const SITE_URL = process.env.NEXT_PUBLIC_SITE_URL || 'https://example.com' const SITE_URL = process.env.NEXT_PUBLIC_SITE_URL || 'https://example.com'
/**
* Next.js 14 robots.ts automatically served at /robots.txt.
*
* Rules:
* - All crawlers: allow public routes, disallow /admin/* and /api/*
* - Googlebot: same rules, with specific sitemap pointer
*/
export default function robots(): MetadataRoute.Robots { export default function robots(): MetadataRoute.Robots {
return { return {
rules: [ rules: [
{ {
userAgent: '*', userAgent: '*',
allow: '/', allow: '/',
disallow: [ disallow: ['/admin/', '/api/'],
'/admin/',
'/admin',
'/api/',
],
}, },
{ {
// Block AI training crawlers userAgent: 'Googlebot',
userAgent: [ allow: '/',
'GPTBot', disallow: ['/admin/', '/api/'],
'ChatGPT-User',
'CCBot',
'anthropic-ai',
'Claude-Web',
],
disallow: '/',
}, },
], ],
sitemap: `${SITE_URL}/sitemap.xml`, sitemap: `${SITE_URL}/sitemap.xml`,