139 lines
3.8 KiB
TypeScript
139 lines
3.8 KiB
TypeScript
import * as _nuxt_schema from '@nuxt/schema';
|
|
import { Arrayable, RobotsGroupInput, Robots3Rules, AutoI18nConfig } from '../dist/runtime/types.js';
|
|
|
|
interface ModuleOptions {
|
|
/**
|
|
* Whether the robots.txt should be generated.
|
|
*
|
|
* @default true
|
|
*/
|
|
enabled: boolean;
|
|
/**
|
|
* Should a `X-Robots-Tag` header be added to the response.
|
|
*
|
|
* @default true
|
|
*/
|
|
header: boolean;
|
|
/**
|
|
* Should a `<meta name="robots" content="<>">` tag be added to the head.
|
|
*
|
|
* @default true
|
|
*/
|
|
metaTag: boolean;
|
|
/**
|
|
* Path to the sitemaps, if it exists.
|
|
* Will automatically be resolved as an absolute path.
|
|
*
|
|
* @default []
|
|
*/
|
|
sitemap: Arrayable<string>;
|
|
/**
|
|
* Paths to add to the robots.txt with the allow keyword.
|
|
*
|
|
* @default []
|
|
*/
|
|
allow: Arrayable<string>;
|
|
/**
|
|
* Paths to add to the robots.txt with the disallow keyword.
|
|
*
|
|
* @default []
|
|
*/
|
|
disallow: Arrayable<string>;
|
|
/**
|
|
* Define more granular rules for the robots.txt. Each stack is a set of rules for specific user agent(s).
|
|
*
|
|
* @default []
|
|
* @example [
|
|
* {
|
|
* userAgents: ['AdsBot-Google-Mobile', 'AdsBot-Google-Mobile-Apps'],
|
|
* disallow: ['/admin'],
|
|
* allow: ['/admin/login'],
|
|
* },
|
|
* ]
|
|
*/
|
|
groups: RobotsGroupInput[];
|
|
/**
|
|
* @deprecated backwards compatibility with Nuxt Robots v3
|
|
*/
|
|
rules?: Robots3Rules | Robots3Rules[];
|
|
/**
|
|
* The value to use when the site is indexable.
|
|
*
|
|
* @default 'index, follow, max-image-preview:large, max-snippet:-1, max-video-preview:-1'
|
|
*/
|
|
robotsEnabledValue: string;
|
|
/**
|
|
* The value to use when the site is not indexable.
|
|
*
|
|
* @default 'noindex, nofollow'
|
|
*/
|
|
robotsDisabledValue: string;
|
|
/**
|
|
* Should route rules which disallow indexing be added to the `/robots.txt` file.
|
|
*
|
|
* @default true
|
|
*/
|
|
disallowNonIndexableRoutes: boolean;
|
|
/**
|
|
* Specify a robots.txt path to merge the config from, relative to the root directory.
|
|
*
|
|
* When set to `true`, the default path of `<publicDir>/robots.txt` will be used.
|
|
*
|
|
* When set to `false`, no merging will occur.
|
|
*
|
|
* @default true
|
|
*/
|
|
mergeWithRobotsTxtPath: boolean | string;
|
|
/**
|
|
* Blocks bots that don't benefit our SEO and are known to cause issues.
|
|
*
|
|
* @default false
|
|
*/
|
|
blockNonSeoBots: boolean;
|
|
/**
|
|
* Override the auto i18n configuration.
|
|
*/
|
|
autoI18n?: false | AutoI18nConfig;
|
|
/**
|
|
* Configure the Cache-Control header for the robots.txt file. Providing false will set the header to 'no-store'.
|
|
*
|
|
* @default max-age=14400, must-revalidate
|
|
*/
|
|
cacheControl?: string | false;
|
|
/**
|
|
* Disabled the Frontmatter Nuxt Content integration.
|
|
*/
|
|
disableNuxtContentIntegration?: boolean;
|
|
/**
|
|
* Whether the robots.txt file should be generated. Useful to disable when running your app with a base URL.
|
|
*
|
|
* @default false
|
|
*/
|
|
robotsTxt?: boolean;
|
|
/**
|
|
* Enables debug logs and a debug endpoint.
|
|
*
|
|
* @default false
|
|
*/
|
|
debug: boolean;
|
|
/**
|
|
* Should the robots.txt display credits for the module.
|
|
*
|
|
* @default true
|
|
*/
|
|
credits: boolean;
|
|
}
|
|
interface ResolvedModuleOptions extends ModuleOptions {
|
|
sitemap: string[];
|
|
disallow: string[];
|
|
}
|
|
interface ModuleHooks {
|
|
'robots:config': (config: ResolvedModuleOptions) => Promise<void> | void;
|
|
}
|
|
interface ModulePublicRuntimeConfig {
|
|
['nuxt-robots']: ResolvedModuleOptions;
|
|
}
|
|
declare const _default: _nuxt_schema.NuxtModule<ModuleOptions, ModuleOptions, false>;
|
|
|
|
export { type ModuleHooks, type ModuleOptions, type ModulePublicRuntimeConfig, type ResolvedModuleOptions, _default as default };
|