Skip to content

Commit

Permalink
🎉 sitemap 및 robots 추가 (#156)
Browse files Browse the repository at this point in the history
  • Loading branch information
oaoong authored Dec 2, 2023
1 parent e4ae331 commit 877302b
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 0 deletions.
13 changes: 13 additions & 0 deletions src/app/robots.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import type { MetadataRoute } from 'next'
import { Environment } from '@/config/environment'

export default function robots(): MetadataRoute.Robots {
return {
rules: {
userAgent: '*',
allow: '/',
},
sitemap: `${Environment.currentAddress()}/sitemap.xml`,
host: Environment.currentAddress(),
}
}
12 changes: 12 additions & 0 deletions src/app/sitemap.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
import type { MetadataRoute } from 'next'
import AppPath from '@/config/appPath'
import { Environment } from '@/config/environment'

export default function sitemap(): MetadataRoute.Sitemap {
const routes = Object.values(AppPath).map((route) => ({
url: `${Environment.currentAddress()}${route('' as never)}`,
lastModified: new Date().toISOString().split('T')[0],
}))

return [...routes]
}
31 changes: 31 additions & 0 deletions src/types/metadata-interface.d.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
type Robots = {
rules:
| {
userAgent?: string | string[]
allow?: string | string[]
disallow?: string | string[]
crawlDelay?: number
}
| Array<{
userAgent: string | string[]
allow?: string | string[]
disallow?: string | string[]
crawlDelay?: number
}>
sitemap?: string | string[]
host?: string
}

type Sitemap = Array<{
url: string
lastModified?: string | Date
changeFrequency?:
| 'always'
| 'hourly'
| 'daily'
| 'weekly'
| 'monthly'
| 'yearly'
| 'never'
priority?: number
}>

0 comments on commit 877302b

Please sign in to comment.