Skip to content

Commit f499cb4

Browse files
authoredNov 24, 2024··
fix!: drop all deprecations (#167)
1 parent aa82756 commit f499cb4

File tree

13 files changed

+8
-131
lines changed

13 files changed

+8
-131
lines changed
 

Diff for: ‎docs/content/2.guides/2.route-rules.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ You can provide the following rules:
1212

1313
The rules are applied using the following logic:
1414
- `X-Robots-Tag` header - SSR only,
15-
- `<meta name="robots">`{lang="html"} - When using the `defineRobotMeta` or `RobotMeta` composable or component
15+
- `<meta name="robots">`{lang="html"}
1616
- `/robots.txt` disallow entry - When [disallowNonIndexableRoutes](/docs/robots/api/config#robotsdisabledvalue) is enabled
1717

1818
## Inline Route Rules

Diff for: ‎src/module.ts

+2-70
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
1-
import type { Arrayable, AutoI18nConfig, Robots3Rules, RobotsGroupInput, RobotsGroupResolved } from './runtime/types'
1+
import type { Arrayable, AutoI18nConfig, RobotsGroupInput, RobotsGroupResolved } from './runtime/types'
22
import fsp from 'node:fs/promises'
33
import {
4-
addComponent,
54
addImports,
65
addPlugin,
76
addServerHandler,
@@ -80,10 +79,6 @@ export interface ModuleOptions {
8079
* ]
8180
*/
8281
groups: RobotsGroupInput[]
83-
/**
84-
* @deprecated backwards compatibility with Nuxt Robots v3
85-
*/
86-
rules?: Robots3Rules | Robots3Rules[]
8782
/**
8883
* The value to use when the site is indexable.
8984
*
@@ -205,10 +200,7 @@ export default defineNuxtModule<ModuleOptions>({
205200
if (config.enabled === false) {
206201
logger.debug('The module is disabled, skipping setup.')
207202
// need to mock the composables to allow module still to work when disabled
208-
;['defineRobotMeta', 'useRobotsRule']
209-
.forEach((name) => {
210-
addImports({ name, from: resolve(`./runtime/app/composables/mock`) })
211-
})
203+
addImports({ name: 'useRobotsRule', from: resolve(`./runtime/app/composables/mock`) })
212204
nuxt.options.nitro = nuxt.options.nitro || {}
213205
nuxt.options.nitro.imports = nuxt.options.nitro.imports || {}
214206
nuxt.options.nitro.imports.presets = nuxt.options.nitro.imports.presets || []
@@ -227,43 +219,6 @@ export default defineNuxtModule<ModuleOptions>({
227219
config.robotsTxt = false
228220
}
229221

230-
// TODO remove with v5
231-
if (config.rules) {
232-
// warn v3 usage and convert to v4
233-
logger.warn('The `rules` option is deprecated, please use the `groups` option instead.')
234-
if (!config.groups?.length) {
235-
const group: RobotsGroupInput = {}
236-
const keyMap: Robots3Rules = {
237-
UserAgent: 'userAgent',
238-
Disallow: 'disallow',
239-
Allow: 'allow',
240-
} as const
241-
const rules = asArray(config.rules)
242-
for (const k in rules) {
243-
// need to map all keys within the rules
244-
const rule = rules[k]
245-
for (const k2 in rule) {
246-
const key = (keyMap[k2 as keyof Robots3Rules] || k2) as (keyof RobotsGroupInput | 'Sitemap')
247-
if (key === 'Sitemap') {
248-
config.sitemap = asArray(config.sitemap)
249-
config.sitemap.push(rule[k2])
250-
}
251-
else if (keyMap[k2 as keyof Robots3Rules]) {
252-
if (group[key]) {
253-
// @ts-expect-error untyped
254-
group[key] = asArray(group[key])
255-
group[key].push(rule[k2])
256-
}
257-
else {
258-
group[key] = rule[k2]
259-
}
260-
}
261-
}
262-
}
263-
config.groups.push(group)
264-
}
265-
}
266-
267222
const resolvedAutoI18n = typeof config.autoI18n === 'boolean' ? false : (config.autoI18n || await resolveI18nConfig())
268223

269224
if (config.blockNonSeoBots) {
@@ -471,8 +426,6 @@ export default defineNuxtModule<ModuleOptions>({
471426
// @ts-expect-error untyped
472427
cacheControl: config.cacheControl,
473428
}
474-
// TODO deprecated, backwards compatiblity
475-
nuxt.options.runtimeConfig['nuxt-simple-robots'] = nuxt.options.runtimeConfig['nuxt-robots']
476429
})
477430

478431
extendTypes('nuxt-robots', ({ typesPath }) => {
@@ -486,20 +439,12 @@ declare module 'nitropack' {
486439
_robotsRuleMactcher: (url: string) => string
487440
}
488441
interface NitroRouteRules {
489-
/**
490-
* @deprecated Use \`robots: <boolean>\` instead.
491-
*/
492-
index?: boolean
493442
robots?: boolean | string | {
494443
indexable: boolean
495444
rule: string
496445
}
497446
}
498447
interface NitroRouteConfig {
499-
/**
500-
* @deprecated Use \`robots: <boolean>\` instead.
501-
*/
502-
index?: boolean
503448
robots?: boolean | string | {
504449
indexable: boolean
505450
rule: string
@@ -531,24 +476,11 @@ declare module 'h3' {
531476
logger.info('Firebase does not support dynamic robots.txt files. Prerendering /robots.txt.')
532477
}
533478

534-
// defineRobotMeta is a server-only composable
535-
nuxt.options.optimization.treeShake.composables.client['nuxt-robots'] = ['defineRobotMeta']
536-
537-
addImports({
538-
name: 'defineRobotMeta',
539-
from: resolve('./runtime/app/composables/defineRobotMeta'),
540-
})
541-
542479
addImports({
543480
name: 'useRobotsRule',
544481
from: resolve('./runtime/app/composables/useRobotsRule'),
545482
})
546483

547-
addComponent({
548-
name: 'RobotMeta',
549-
filePath: resolve('./runtime/app/components/RobotMeta'),
550-
})
551-
552484
if (config.robotsTxt) {
553485
// add robots.txt server handler
554486
addServerHandler({

Diff for: ‎src/runtime/app/components/RobotMeta.ts

-13
This file was deleted.

Diff for: ‎src/runtime/app/composables/defineRobotMeta.ts

-16
This file was deleted.

Diff for: ‎src/runtime/app/composables/mock.ts

-3
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,6 @@
11
import type { MaybeRef } from 'vue'
22
import { ref } from 'vue'
33

4-
// eslint-disable-next-line unused-imports/no-unused-vars
5-
export function defineRobotMeta(component?: boolean) {}
6-
74
// eslint-disable-next-line unused-imports/no-unused-vars
85
export function useRobotsRule(rule?: MaybeRef<boolean | string>) {
96
return ref('')

Diff for: ‎src/runtime/server/routes/__robots__/nuxt-content.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ export default defineEventHandler(async (e) => {
1717
if (c._draft || c._extension !== 'md' || c._partial)
1818
return false
1919
if (c.path) {
20-
if (String(c.robots) === 'false' || String(c.indexable) === 'false' || String(c.index) === 'false')
20+
if (String(c.robots) === 'false')
2121
return c.path
2222
}
2323
return false

Diff for: ‎src/runtime/types.ts

-13
Original file line numberDiff line numberDiff line change
@@ -10,19 +10,6 @@ export interface ParsedRobotsTxt {
1010

1111
export type RobotsGroupInput = GoogleInput | YandexInput
1212

13-
export interface Robots3Rules {
14-
UserAgent?: string
15-
BlankLine?: true
16-
Comment?: string
17-
Disallow?: string
18-
Allow?: string
19-
Host?: string
20-
Sitemap?: string
21-
// yandex only
22-
CleanParam?: string
23-
CrawlDelay?: string
24-
}
25-
2613
// google is the base input
2714
export interface GoogleInput {
2815
comment?: Arrayable<string>

Diff for: ‎src/runtime/util.ts

-2
Original file line numberDiff line numberDiff line change
@@ -287,8 +287,6 @@ export function normaliseRobotsRouteRule(config: NitroRouteConfig) {
287287
allow = config.robots
288288
else if (typeof config.robots === 'object' && typeof config.robots.indexable !== 'undefined')
289289
allow = config.robots.indexable
290-
else if (typeof config.index !== 'undefined')
291-
allow = config.index
292290
// parse rule
293291
let rule: string | undefined
294292
if (typeof config.robots === 'object' && typeof config.robots.rule !== 'undefined')

Diff for: ‎test/fixtures/basic/pages/hidden-route-rules.vue

-6
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,3 @@
1-
<script lang="ts" setup>
2-
import { defineRobotMeta } from '#imports'
3-
4-
defineRobotMeta()
5-
</script>
6-
71
<template>
82
<div>hello world</div>
93
</template>

Diff for: ‎test/manualNoIndexing.test.ts

-1
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ describe('manualNoIndexing', () => {
2525
it('basic', async () => {
2626
const robotsTxt = await $fetch('/robots.txt')
2727
// the site.url should be appended
28-
// site.indexable should be honoured
2928
expect(robotsTxt).toMatchInlineSnapshot(`
3029
"# START nuxt-robots (indexing disabled)
3130
User-agent: *

Diff for: ‎test/routeRules.test.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ await setup({
1414
},
1515
routeRules: {
1616
'/index-rule/*': {
17-
index: false,
17+
robots: false,
1818
},
1919
'/robots-rule/*': {
2020
robots: 'noindex',
@@ -28,7 +28,7 @@ await setup({
2828
robots: 'index, follow',
2929
},
3030
'/excluded/*': {
31-
index: false,
31+
robots: false,
3232
},
3333
},
3434
},

Diff for: ‎test/routeRulesTrailingSlash.test.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,10 @@ await setup({
1212
},
1313
routeRules: {
1414
'/hidden-route-rules': {
15-
index: false,
15+
robots: false,
1616
},
1717
'/hidden-route-rules/': {
18-
index: false,
18+
robots: false,
1919
},
2020
},
2121
},

Diff for: ‎test/siteConfigLegacy.test.ts

-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ await setup({
2424
describe('siteConfig', () => {
2525
it('basic', async () => {
2626
const robotsTxt = await $fetch('/robots.txt')
27-
// site.indexable should be honoured
2827
expect(robotsTxt.includes('(indexable)')).toBe(true)
2928
})
3029
})

0 commit comments

Comments
 (0)
Please sign in to comment.