From 70a0eb8aa1ec41f4189ae2c43f82dc0ecdafb138 Mon Sep 17 00:00:00 2001 From: Bart van der Braak Date: Tue, 16 Jan 2024 02:44:01 +0100 Subject: [PATCH] feat: create robots.txt programmatically --- src/routes/robots.txt/+server.ts | 51 ++++++++++++++++++++++++++++++++ static/robots.txt | 6 ---- 2 files changed, 51 insertions(+), 6 deletions(-) create mode 100644 src/routes/robots.txt/+server.ts delete mode 100644 static/robots.txt diff --git a/src/routes/robots.txt/+server.ts b/src/routes/robots.txt/+server.ts new file mode 100644 index 0000000..33556c6 --- /dev/null +++ b/src/routes/robots.txt/+server.ts @@ -0,0 +1,51 @@ +import { siteConfig } from '$lib/config/site'; + +const SITE_URL = siteConfig.url; + +/** + * SvelteKit RequestHandler for generating robots.txt. + */ +export async function GET() { + // Define the robots.txt configuration + const robotsConfig = [ + { + agent: '*', + disallow: ['/'] + } + ]; + + const body = generateRobotsTxt(SITE_URL, robotsConfig); + + return new Response(body, { + headers: { + 'Cache-Control': `public, max-age=${86400}`, // 24 hours + 'Content-Type': 'text/plain' // Corrected MIME type for robots.txt + } + }); +} + +/** + * Generates robots.txt content. + * @param {string} siteUrl Base site URL. + * @param {Array} config Robots.txt configuration array. + * @returns {string} robots.txt content. + */ +function generateRobotsTxt(siteUrl: string, config: { agent: string; disallow: string[] }[]) { + return `Sitemap: ${siteUrl}/sitemap.xml + +# https://developers.google.com/search/docs/advanced/sitemaps/build-sitemap#addsitemap +# https://www.robotstxt.org/robotstxt.html +${config.map((item) => generateRobotsTxtAgent(item.agent, item.disallow)).join('\n')} +`; +} + +/** + * Generates a user-agent section for robots.txt. + * @param {string} agent User-agent string. + * @param {string[]} disallow Array of paths to disallow. + * @returns {string} User-agent section of robots.txt. + */ +function generateRobotsTxtAgent(agent: string, disallow: string[]) { + const disallowEntries = disallow.map((path) => `Disallow: ${path}`).join('\n'); + return `User-agent: ${agent}\n${disallowEntries}`; +} diff --git a/static/robots.txt b/static/robots.txt deleted file mode 100644 index c37fa05..0000000 --- a/static/robots.txt +++ /dev/null @@ -1,6 +0,0 @@ -Sitemap: https://hellob.art/sitemap.xml - -# https://developers.google.com/search/docs/advanced/sitemaps/build-sitemap#addsitemap -# https://www.robotstxt.org/robotstxt.html -User-agent: * -Disallow: