From 6a11bee404ab2cbf9c22c14b100f11cbf9bde0ae Mon Sep 17 00:00:00 2001 From: Michael Ritter Date: Wed, 24 May 2017 14:36:15 +0200 Subject: [PATCH] Fixed crawling of projects with publicPath != "/" - express just returned 404 page --- src/Server.js | 14 ++++++++------ src/cli.js | 2 +- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/src/Server.js b/src/Server.js index 448c7d6..74036e0 100644 --- a/src/Server.js +++ b/src/Server.js @@ -15,12 +15,14 @@ export default class Server { }) // Yes I just copied most of this from react-scripts ¯\_(ツ)_/¯ - app.use(historyApiFallback({ - index: '/200.html', - disableDotRule: true, - htmlAcceptHeaders: ['text/html'], - })) - app.use(publicPath, express.static(baseDir, { index: '200.html' })) + app.use(publicPath, + historyApiFallback({ + index: '/200.html', + disableDotRule: true, + htmlAcceptHeaders: ['text/html'], + }), + express.static(baseDir, { index: '200.html' }) + ) if (proxy) { if (typeof proxy !== "string") throw new Error("Only string proxies are implemented currently.") diff --git a/src/cli.js b/src/cli.js index 434dbe2..5aac579 100644 --- a/src/cli.js +++ b/src/cli.js @@ -26,7 +26,7 @@ export default () => { const server = new Server(buildDir, basename, 0, pkg.proxy) server.start().then(() => { - const crawler = new Crawler(`http://localhost:${server.port()}/`, snapshotDelay, options) + const crawler = new Crawler(`http://localhost:${server.port()}${basename}`, snapshotDelay, options) return crawler.crawl(({ urlPath, html }) => { if (!urlPath.startsWith(basename)) { console.log(`❗ Refusing to crawl ${urlPath} because it is outside of the ${basename} sub-folder`)