feat: revise robots.txt and sitemap.xml functions

This commit is contained in:
Devin Haska 2024-03-13 16:23:50 -07:00
parent e81263efb9
commit 27447d2343
5 changed files with 11 additions and 1 deletions

3
src/admin/admin.json Normal file
View file

@ -0,0 +1,3 @@
{
"excludeFromSitemap": true
}

View file

@ -27,6 +27,7 @@ module.exports = class {
return {
permalink: `css/styles.css`,
excludeFromSitemap: true,
rawFilepath,
rawCss: css,
};

3
src/fun/fun.json Normal file
View file

@ -0,0 +1,3 @@
{
"excludeFromSitemap": true
}

View file

@ -1,8 +1,9 @@
---
permalink: /robots.txt
eleventyExcludeFromCollections: true
excludeFromSitemap: true
---
Sitemap: {{ meta.url }}/sitemap.xml
User-agent: *
Disallow:
Disallow: /404.html

View file

@ -5,9 +5,11 @@ eleventyExcludeFromCollections: true
<?xml version="1.0" encoding="utf-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
{% for page in collections.all %}
{% if not page.data.excludeFromSitemap %}
<url>
<loc>{{ site.url }}{{ page.url | url }}</loc>
<lastmod>{{ page.date.toISOString() }}</lastmod>
</url>
{% endif %}
{% endfor %}
</urlset>