diff --git a/public/robots.txt b/public/robots.txt index e9e57dc..9c5b465 100644 --- a/public/robots.txt +++ b/public/robots.txt @@ -1,3 +1,4 @@ # https://www.robotstxt.org/robotstxt.html User-agent: * Disallow: +Sitemap: https://grammar.lingdocs.com/sitemap.xml diff --git a/scripts/make-sitemap.js b/scripts/make-sitemap.js index 85e37a7..2e2874c 100644 --- a/scripts/make-sitemap.js +++ b/scripts/make-sitemap.js @@ -7,32 +7,38 @@ console.log("generating sitemap.xml"); const indexText = fs.readFileSync("./src/content/index.ts", "utf-8"); const contentTreeRaw = indexText.split("/* content-tree */")[1]; const safeContentTreeText = contentTreeRaw - .split("\n") - .filter(l => !l.includes(`"import":`)) - .join("\n"); + .split("\n") + .filter((l) => !l.includes(`"import":`)) + .join("\n"); const contentTree = JSON.parse(safeContentTreeText); -const urls = contentTree.reduce((acc, section) => { +const urls = contentTree.reduce( + (acc, section) => { if ("slug" in section) { - return [ - ...acc, - `${base}/${section.slug}`, - ]; + return [...acc, `${base}/${section.slug}/`]; } else { - return [ - ...acc, - ...section.chapters.map(x => `${base}/${section.subdirectory}/${x.slug}`), - ]; + return [ + ...acc, + ...section.chapters.map( + (x) => `${base}/${section.subdirectory}/${x.slug}/` + ), + ]; } -}, [base]); + }, + [`${base}`] +); const siteMap = ` -${urls.map(u => ` +${urls + .map( + (u) => ` ${u} -`).join("")} - ` + ) + .join("")} + +`; fs.writeFileSync("./public/sitemap.xml", siteMap);