-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathnext-sitemap.js
40 lines (37 loc) · 1.95 KB
/
next-sitemap.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
// A config for next-sitemap to generate a robots.txt and sitemap.xml files for improved SEO. This
// config is used by the "postbuild" step in package.json. Essentially this config adds a
// robots.txt file that permits crawling in production but disallows it for any other deployment.
// All other deployments are dev or staging builds and should not be crawled by search engines to
// prevent the risk of a duplicate content penalty. A sitemap.xml file is created for production
// that helps search engines know which routes this site offers. For all non-production deployments
// no sitemap file is created.
//
// Note: we optionally exclude some routes from the sitemap but we don't explicitly disallow those
// routes in robots.txt. This is intentional. Listing `Disallow` directive in robots.txt does not
// guarantee it being excluded from search engines. For that we have to add a `noindex` to either
// the HTML or HTTP header of each page. Listing excluded routes in robots.txt may reveal routes we
// do not intend to reveal yet so it's better to be silent on them.
//
// See: https://github.com/iamvishnusankar/next-sitemap
const isProduction = process.env.VERCEL_ENV === "production";
const excludeAllRoutes = ["*"]; // Excluding with a "*" prevents the creation of sitemap.xml
const allowCrawlingOnAllRoutes = "";
const preventCrawlingOnAllRoutes = "/";
const siteUrl = process.env.VERCEL_URL
? `https://${process.env.VERCEL_URL}`
: "https://jonogmarteinn.is";
// Work in progress routes if any we don't want search engines to know about
const routesToHideInProduction = ["/málari", "/málningarþjónusta*"];
module.exports = {
siteUrl: isProduction ? "https://jonogmarteinn.is": siteUrl,
exclude: isProduction ? routesToHideInProduction : excludeAllRoutes,
generateRobotsTxt: true,
robotsTxtOptions: {
policies: [
{
userAgent: "*",
disallow: isProduction ? allowCrawlingOnAllRoutes : preventCrawlingOnAllRoutes,
}
]
}
};