diff --git a/src/MediaWiki.ts b/src/MediaWiki.ts index 92409428..36684b5c 100644 --- a/src/MediaWiki.ts +++ b/src/MediaWiki.ts @@ -134,7 +134,7 @@ class MediaWiki { this.#actionApiPath = 'w/api.php' this.#restApiPath = 'api/rest_v1' - this.#wikiPath = '' + this.#wikiPath = 'wiki/' this.#modulePathOpt = 'w/load.php' this.namespaces = {} @@ -213,9 +213,8 @@ class MediaWiki { } private initApiURLDirector() { - // TODO: this.webUrl probably shouldn't accept hardcoded 'wiki/' param, check test/e2e/extra.e2e.test.ts - this.webUrl = this.baseUrlDirector.buildURL('wiki/') - this.actionApiUrl = this.baseUrlDirector.buildURL(this.#actionApiPath, this.#wikiPath) + this.webUrl = this.baseUrlDirector.buildURL(this.#wikiPath) + this.actionApiUrl = this.baseUrlDirector.buildURL(this.#actionApiPath) this.apiUrlDirector = new ApiURLDirector(this.actionApiUrl.href) this.visualEditorApiUrl = this.apiUrlDirector.buildVisualEditorURL() this.visualEditorURLDirector = new VisualEditorURLDirector(this.visualEditorApiUrl.href) diff --git a/src/parameterList.ts b/src/parameterList.ts index 1541ae9d..3d9e54ac 100644 --- a/src/parameterList.ts +++ b/src/parameterList.ts @@ -16,7 +16,7 @@ export const parameterDescriptions = { format: 'Specify a flavour for the scraping. If missing, scrape all article contents. Each --format argument will cause a new local file to be created but options can be combined. Supported options are:\n * novid: no video & audio content\n * nopic: no pictures (implies "novid")\n * nopdf: no PDF files\n * nodet: only the first/head paragraph (implies "novid")\nFormat names can also be aliased using a ":"\nExample: "... --format=nopic:mini --format=novid,nopdf"', keepEmptyParagraphs: 'Keep all paragraphs, even empty ones.', - mwWikiPath: 'Mediawiki wiki base path (per default empty string)', + mwWikiPath: 'Mediawiki wiki base path (per default "/wiki/")', mwActionApiPath: 'Mediawiki API path (per default "/w/api.php")', mwRestApiPath: 'Mediawiki Rest API path (per default "/api/rest_v1")', mwModulePath: 'Mediawiki module load path (per default "/w/load.php")', diff --git a/src/util/builders/url/base.director.ts b/src/util/builders/url/base.director.ts index 84c7aeae..c0791cc2 100644 --- a/src/util/builders/url/base.director.ts +++ b/src/util/builders/url/base.director.ts @@ -10,8 +10,7 @@ export default class BaseURLDirector { this.baseDomain = baseDomain } - buildURL(path: string, wikiPath = '') { - path = `${wikiPath}${path}` + buildURL(path: string) { return urlBuilder.setDomain(this.baseDomain).setPath(path).build(true) } diff --git a/test/e2e/apiPathParamsSanitizing.e2e.test.ts b/test/e2e/apiPathParamsSanitizing.e2e.test.ts index da443cdb..46bd1b11 100644 --- a/test/e2e/apiPathParamsSanitizing.e2e.test.ts +++ b/test/e2e/apiPathParamsSanitizing.e2e.test.ts @@ -1,9 +1,8 @@ -import { testAllRenders } from '../testAllRenders.js' -import { zimcheck } from '../util.js' +import { testAllRenders } from '../testRenders.js' import 'dotenv/config.js' import { jest } from '@jest/globals' import rimraf from 'rimraf' -import { sanitizeApiPathParam } from '../../src/sanitize-argument.js' +import { sanitizeApiPathParam, sanitizeWikiPath } from '../../src/sanitize-argument.js' jest.setTimeout(60000) @@ -14,23 +13,24 @@ const parameters = { mwActionApiPath: sanitizeApiPathParam('/w/api.php'), mwRestApiPath: sanitizeApiPathParam('/api/rest_v1'), mwModulePath: sanitizeApiPathParam('/w/load.php'), + mwWikiPath: sanitizeWikiPath('wiki'), } await testAllRenders(parameters, async (outFiles) => { describe(`e2e test for api url params for en.wikipedia.org for ${outFiles[0]?.renderer} renderer`, () => { - test('Mediawiki actionApiPath ', () => { + test('Mediawiki actionApiPath option sanitized', () => { expect(outFiles[0].mwMetaData.actionApiPath).toBe('w/api.php') }) - test('Mediawiki restApiPath option', () => { + test('Mediawiki restApiPath option sanitized', () => { expect(outFiles[0].mwMetaData.restApiPath).toBe('api/rest_v1') }) - test('Mediawiki default empty wikiPath option', () => { - expect(outFiles[0].mwMetaData.wikiPath).toBe('') + test('Mediawiki wikiPath option sanitized', () => { + expect(outFiles[0].mwMetaData.wikiPath).toBe('wiki/') }) - test('Mediawiki modulePathOpt option', () => { + test('Mediawiki modulePathOpt option sanitized', () => { expect(outFiles[0].mwMetaData.modulePathOpt).toBe('w/load.php') }) @@ -39,9 +39,12 @@ await testAllRenders(parameters, async (outFiles) => { expect(outFiles[0].mwMetaData.actionApiUrl).toBe('https://en.wikipedia.org/w/api.php') }) + // TODO: blocked by issues/1931 + /* test(`test zim integrity for ${outFiles[0]?.renderer} renderer`, async () => { await expect(zimcheck(outFiles[0].outFile)).resolves.not.toThrowError() }) + */ afterAll(() => { rimraf.sync(`./${outFiles[0].testId}`) diff --git a/test/e2e/articleLists.test.ts b/test/e2e/articleLists.test.ts index 55d40b3f..38166a22 100644 --- a/test/e2e/articleLists.test.ts +++ b/test/e2e/articleLists.test.ts @@ -2,7 +2,7 @@ import { execa } from 'execa' import rimraf from 'rimraf' import 'dotenv/config' import { jest } from '@jest/globals' -import { testAllRenders } from '../testAllRenders.js' +import { testAllRenders } from '../testRenders.js' jest.setTimeout(10000) diff --git a/test/e2e/bm.e2e.test.ts b/test/e2e/bm.e2e.test.ts index 33644d82..d89a3cdd 100644 --- a/test/e2e/bm.e2e.test.ts +++ b/test/e2e/bm.e2e.test.ts @@ -1,5 +1,5 @@ import { zimdump } from '../util.js' -import { testAllRenders } from '../testAllRenders.js' +import { testAllRenders } from '../testRenders.js' import { execa } from 'execa' import { jest } from '@jest/globals' import rimraf from 'rimraf' diff --git a/test/e2e/downloadImage.e2e.test.ts b/test/e2e/downloadImage.e2e.test.ts index 77cdbba0..4c93482c 100644 --- a/test/e2e/downloadImage.e2e.test.ts +++ b/test/e2e/downloadImage.e2e.test.ts @@ -1,7 +1,6 @@ import { execa } from 'execa' -import { zimcheck } from '../util.js' import rimraf from 'rimraf' -import { testAllRenders } from '../testAllRenders.js' +import { testAllRenders } from '../testRenders.js' import 'dotenv/config.js' import { jest } from '@jest/globals' @@ -21,7 +20,8 @@ const parameters = { await testAllRenders(parameters, async (outFiles) => { describeIf('Check image downloading from S3 using optimisationCacheUrl parameter', () => { test(`right scrapping from fr.wikipedia.org with optimisationCacheUrl parameter for ${outFiles[0]?.renderer} renderer`, async () => { - await expect(zimcheck(outFiles[0].outFile)).resolves.not.toThrowError() + // TODO: blocked by issues/1931, doesn't work for VE + // await expect(zimcheck(outFiles[0].outFile)).resolves.not.toThrowError() await execa('redis-cli flushall', { shell: true }) const redisScan = await execa('redis-cli --scan', { shell: true }) diff --git a/test/e2e/en.e2e.test.ts b/test/e2e/en.e2e.test.ts index 437d660e..40f010c2 100644 --- a/test/e2e/en.e2e.test.ts +++ b/test/e2e/en.e2e.test.ts @@ -1,4 +1,4 @@ -import { testAllRenders } from '../testAllRenders.js' +import { testAllRenders } from '../testRenders.js' import domino from 'domino' import { zimdump } from '../util.js' import 'dotenv/config.js' diff --git a/test/e2e/en10.e2e.test.ts b/test/e2e/en10.e2e.test.ts index 13610e95..fb5dfd09 100644 --- a/test/e2e/en10.e2e.test.ts +++ b/test/e2e/en10.e2e.test.ts @@ -1,5 +1,5 @@ import rimraf from 'rimraf' -import { testAllRenders } from '../testAllRenders.js' +import { testAllRenders } from '../testRenders.js' import { jest } from '@jest/globals' import 'dotenv/config.js' diff --git a/test/e2e/forceRender.test.ts b/test/e2e/forceRender.test.ts index a462eb6f..21b710f6 100644 --- a/test/e2e/forceRender.test.ts +++ b/test/e2e/forceRender.test.ts @@ -40,7 +40,7 @@ describe('forceRender', () => { expect(redisScan.stdout).toEqual('') }) - test.skip('Scrape article from bm.wikipedia.org should throw error when using VisualEditor render', async () => { + test('Scrape article from bm.wikipedia.org should throw error when using VisualEditor render', async () => { const forceRender = 'VisualEditor' expect(async () => { await mwoffliner.execute({ ...parameters, forceRender }) diff --git a/test/e2e/formatParams.test.ts b/test/e2e/formatParams.test.ts index 79ec8bc4..a2bc4981 100644 --- a/test/e2e/formatParams.test.ts +++ b/test/e2e/formatParams.test.ts @@ -1,6 +1,6 @@ import 'dotenv/config.js' import domino from 'domino' -import { testAllRenders } from '../testAllRenders.js' +import { testAllRenders } from '../testRenders.js' import { jest } from '@jest/globals' import { zimdump } from '../util.js' import rimraf from 'rimraf' @@ -77,7 +77,7 @@ await testAllRenders({ ...parameters, format: 'novid', articleList: 'English_alp // TODO: blocked by issues/1928 /* -await testAllRenders({ ...parameters, format: 'nopdf', articleList: 'PDF' }, async (outFiles) => { +await testRenders({ ...parameters, format: 'nopdf', articleList: 'PDF' }, async (outFiles) => { describe('format:pdf to check no internal links pdf files', () => { test(`Test en.wikipedia.org using format:nopdf for ${outFiles[0]?.renderer} renderer`, async () => { await execa('redis-cli flushall', { shell: true }) diff --git a/test/e2e/multimediaContent.test.ts b/test/e2e/multimediaContent.test.ts index bab78694..83f67a2e 100644 --- a/test/e2e/multimediaContent.test.ts +++ b/test/e2e/multimediaContent.test.ts @@ -1,5 +1,5 @@ import { execa } from 'execa' -import { testAllRenders } from '../testAllRenders.js' +import { testRenders } from '../testRenders.js' import rimraf from 'rimraf' import { zimdump } from '../util.js' import 'dotenv/config' @@ -16,7 +16,7 @@ const parameters = { forceRender: 'WikimediaDesktop', } -await testAllRenders( +await testRenders( parameters, async (outFiles) => { describe('Multimedia', () => { @@ -49,8 +49,7 @@ await testAllRenders( }) break case 'VisualEditor': - // TODO: Enable back once regression Phabricator:T350117 fixed - test.skip(`check multimedia content from wikipedia test page for ${outFiles[0]?.renderer} renderer`, async () => { + test(`check multimedia content from wikipedia test page for ${outFiles[0]?.renderer} renderer`, async () => { await execa('redis-cli flushall', { shell: true }) expect(outFiles[0].status.articles.success).toEqual(1) @@ -76,10 +75,10 @@ await testAllRenders( } }) }, - ['WikimediaDesktop'], + ['WikimediaDesktop', 'VisualEditor'], ) -await testAllRenders( +await testRenders( { ...parameters, format: ['nopic', 'novid', 'nopdf', 'nodet'] }, async (outFiles) => { describe('Multimedia for different formats', () => { @@ -146,8 +145,7 @@ await testAllRenders( }) break case 'VisualEditor': - // TODO: Enable back once regression Phabricator:T350117 fixed - test.skip(`check multimedia content from wikipedia test page with different formates for ${outFiles[0]?.renderer} renderer`, async () => { + test(`check multimedia content from wikipedia test page with different formates for ${outFiles[0]?.renderer} renderer`, async () => { await execa('redis-cli flushall', { shell: true }) expect(outFiles).toHaveLength(4) @@ -156,7 +154,7 @@ await testAllRenders( expect(dump.status.articles.success).toEqual(1) expect(dump.status.articles.fail).toEqual(0) - // TODO: blocked by issues/1931 + // TODO: blocked by issues/1931, doesn't work for VE // await expect(zimcheck(dump.outFile)).resolves.not.toThrowError() const mediaFiles = await zimdump(`list --ns I ${dump.outFile}`) @@ -206,5 +204,5 @@ await testAllRenders( } }) }, - ['WikimediaDesktop'], + ['WikimediaDesktop', 'VisualEditor'], ) diff --git a/test/e2e/treatMedia.e2e.test.ts b/test/e2e/treatMedia.e2e.test.ts index 475290c3..9ff18b3c 100644 --- a/test/e2e/treatMedia.e2e.test.ts +++ b/test/e2e/treatMedia.e2e.test.ts @@ -1,6 +1,6 @@ import { execa } from 'execa' import rimraf from 'rimraf' -import { testAllRenders } from '../testAllRenders.js' +import { testAllRenders } from '../testRenders.js' import { zimdump } from '../util.js' import 'dotenv/config' import { jest } from '@jest/globals' diff --git a/test/e2e/vikidia.e2e.test.ts b/test/e2e/vikidia.e2e.test.ts index 0c1e3bf4..b79dd609 100644 --- a/test/e2e/vikidia.e2e.test.ts +++ b/test/e2e/vikidia.e2e.test.ts @@ -1,7 +1,6 @@ import { execa } from 'execa' import rimraf from 'rimraf' -import { testAllRenders } from '../testAllRenders.js' -import { zimcheck } from '../util.js' +import { testRenders } from '../testRenders.js' import 'dotenv/config.js' import { jest } from '@jest/globals' @@ -15,21 +14,21 @@ const parameters = { customZimDescription: 'Alaska article', } -await testAllRenders( +await testRenders( parameters, async (outFiles) => { - // TODO: Enable back once regression Phabricator:T350117 fixed - test.skip(`right scrapping from vikidia.org for ${outFiles[0]?.renderer} renderer`, async () => { + test(`right scrapping from vikidia.org for ${outFiles[0]?.renderer} renderer`, async () => { await execa('redis-cli flushall', { shell: true }) // Created 1 output expect(outFiles).toHaveLength(1) - await expect(zimcheck(outFiles[0].outFile)).resolves.not.toThrowError() + // TODO: Blocked by issues/1931 + // await expect(zimcheck(outFiles[0].outFile)).resolves.not.toThrowError() }) rimraf.sync(`./${outFiles[0].testId}`) }, - // vikidia supports only VisualEditor (which is disabled for now) among other renders + // en.vikidia.org supports only VisualEditor among other renders ['VisualEditor'], ) diff --git a/test/e2e/wikisource.e2e.test.ts b/test/e2e/wikisource.e2e.test.ts index 781bfa08..f5d63344 100644 --- a/test/e2e/wikisource.e2e.test.ts +++ b/test/e2e/wikisource.e2e.test.ts @@ -1,6 +1,6 @@ import { execa } from 'execa' import rimraf from 'rimraf' -import { testAllRenders } from '../testAllRenders.js' +import { testRenders } from '../testRenders.js' import 'dotenv/config.js' import { jest } from '@jest/globals' @@ -14,7 +14,7 @@ const parameters = { noLocalParserFallback: true, } -await testAllRenders( +await testRenders( parameters, async (outFiles) => { describe('wikisource', () => { @@ -42,8 +42,7 @@ await testAllRenders( }) break case 'VisualEditor': - // TODO: Enable back once regression Phabricator:T350117 fixed - test.skip(`Wikisource List for ${outFiles[0]?.renderer} renderer`, async () => { + test(`Wikisource List for ${outFiles[0]?.renderer} renderer`, async () => { await execa('redis-cli flushall', { shell: true }) expect(outFiles).toHaveLength(1) @@ -64,5 +63,5 @@ await testAllRenders( } }) }, - ['WikimediaDesktop'], + ['WikimediaDesktop', 'VisualEditor'], ) diff --git a/test/e2e/zimMetadata.e2e.test.ts b/test/e2e/zimMetadata.e2e.test.ts index c7beedab..6f753626 100644 --- a/test/e2e/zimMetadata.e2e.test.ts +++ b/test/e2e/zimMetadata.e2e.test.ts @@ -1,6 +1,6 @@ import rimraf from 'rimraf' import { execa } from 'execa' -import { testAllRenders } from '../testAllRenders.js' +import { testAllRenders } from '../testRenders.js' import { zimdump } from '../util.js' import 'dotenv/config' import { jest } from '@jest/globals' diff --git a/test/testAllRenders.ts b/test/testRenders.ts similarity index 92% rename from test/testAllRenders.ts rename to test/testRenders.ts index fbbc7086..68062359 100644 --- a/test/testAllRenders.ts +++ b/test/testRenders.ts @@ -52,9 +52,8 @@ async function getOutFiles(renderName: string, testId: string, parameters: Param return outFiles } -export async function testAllRenders(parameters: Parameters, callback, optionalRenderesList?: Array) { +export async function testRenders(parameters: Parameters, callback, renderersList: Array) { await checkZimTools() - const renderersList = optionalRenderesList || RENDERERS_LIST for (const renderer of renderersList) { try { const now = new Date() @@ -69,3 +68,7 @@ export async function testAllRenders(parameters: Parameters, callback, optionalR } } } + +export async function testAllRenders(parameters: Parameters, callback) { + return testRenders(parameters, callback, RENDERERS_LIST) +} diff --git a/test/unit/renderers/article.renderer.test.ts b/test/unit/renderers/article.renderer.test.ts index 44a98c08..d8c5c57a 100644 --- a/test/unit/renderers/article.renderer.test.ts +++ b/test/unit/renderers/article.renderer.test.ts @@ -36,7 +36,7 @@ describe('ArticleRenderer', () => { }) it('should return visualeditor content if the main page flag is true', async () => { - const { downloader, dump } = await setupScrapeClasses({ format: '', mwWikiPath: '/' }) + const { downloader, dump } = await setupScrapeClasses() const { data, articleId, articleDetail } = prepareFixtures({ visualeditor: { content: 'Lorem ipsum dolor sit amet' } }) const _moduleDependencies = await downloader.getModuleDependencies(articleDetail.title) const result = await visualEditorRenderer.render({ @@ -56,7 +56,7 @@ describe('ArticleRenderer', () => { }) it('should inject header to the visual editor content if the main page flag is false', async () => { - const { downloader, dump } = await setupScrapeClasses({ format: '', mwWikiPath: '/' }) + const { downloader, dump } = await setupScrapeClasses() const content = 'consectetur adipiscing elit' const { data, articleId, articleDetail } = prepareFixtures({ visualeditor: { content } }) const _moduleDependencies = await downloader.getModuleDependencies(articleDetail.title) @@ -75,7 +75,7 @@ describe('ArticleRenderer', () => { }) it('should return html body if json contentmodel param is `wikitext`', async () => { - const { downloader, dump } = await setupScrapeClasses({ format: '', mwWikiPath: '/' }) + const { downloader, dump } = await setupScrapeClasses() const htmlBody = 'sed do eiusmod tempor incididunt' const { data, articleId, articleDetail } = prepareFixtures({ html: { body: htmlBody }, contentmodel: 'wikitext' }) const _moduleDependencies = await downloader.getModuleDependencies(articleDetail.title) @@ -96,7 +96,7 @@ describe('ArticleRenderer', () => { }) it('should return html body if it`s presented even if contentmodel param is not equal to wikitext', async () => { - const { downloader, dump } = await setupScrapeClasses({ format: '', mwWikiPath: '/' }) + const { downloader, dump } = await setupScrapeClasses() const htmlBody = 'ut labore et dolore magna aliqua. Ut enim ad minim veniam' const { data, articleId, articleDetail } = prepareFixtures({ html: { body: htmlBody } }) const _moduleDependencies = await downloader.getModuleDependencies(articleDetail.title) @@ -117,7 +117,7 @@ describe('ArticleRenderer', () => { }) it('should return empty string if there was an error during article retrievement', async () => { - const { downloader, dump } = await setupScrapeClasses({ format: '', mwWikiPath: '/' }) + const { downloader, dump } = await setupScrapeClasses() const { data, articleId, articleDetail } = prepareFixtures({ error: 'Unexpected internal error' }) const _moduleDependencies = await downloader.getModuleDependencies(articleDetail.title) const result = await visualEditorRenderer.render({ diff --git a/test/unit/renderers/renderer.builder.test.ts b/test/unit/renderers/renderer.builder.test.ts index 3c982548..25e6b7db 100644 --- a/test/unit/renderers/renderer.builder.test.ts +++ b/test/unit/renderers/renderer.builder.test.ts @@ -14,7 +14,7 @@ describe('RendererBuilder', () => { }) it('should create a WikimediaDesktopRenderer for desktop mode', async () => { - const { MediaWiki } = await setupScrapeClasses({ mwWikiPath: '/' }) // en wikipedia + const { MediaWiki } = await setupScrapeClasses() // en wikipedia const renderer = await rendererBuilder.createRenderer({ MediaWiki, @@ -24,7 +24,7 @@ describe('RendererBuilder', () => { }) it('should create a WikimediaDesktopRenderer for auto mode for en wikipedia', async () => { - const { MediaWiki } = await setupScrapeClasses({ mwWikiPath: '/' }) // en wikipedia + const { MediaWiki } = await setupScrapeClasses() // en wikipedia const renderer = await rendererBuilder.createRenderer({ MediaWiki, @@ -34,7 +34,7 @@ describe('RendererBuilder', () => { }) it('should throw error for unknown render mode', async () => { - const { MediaWiki } = await setupScrapeClasses({ mwWikiPath: '/' }) // en wikipedia + const { MediaWiki } = await setupScrapeClasses() // en wikipedia expect(async () => { await rendererBuilder.createRenderer({ @@ -45,7 +45,7 @@ describe('RendererBuilder', () => { }) it('should return VisualEditorRenderer for specific mode with RendererAPI as VisualEditor', async () => { - const { MediaWiki } = await setupScrapeClasses({ mwWikiPath: '/' }) // en wikipedia + const { MediaWiki } = await setupScrapeClasses() // en wikipedia // Force MediaWiki to have capability for the VisualEditor for test purpose jest.spyOn(MediaWiki, 'hasVisualEditorApi').mockResolvedValue(true) @@ -62,7 +62,7 @@ describe('RendererBuilder', () => { }) it('should return WikimediaDesktopRenderer for specific mode with RendererAPI as WikimediaDesktop', async () => { - const { MediaWiki } = await setupScrapeClasses({ mwWikiPath: '/' }) // en wikipedia + const { MediaWiki } = await setupScrapeClasses() // en wikipedia // Force MediaWiki to have capability for the WikimediaDesktop for test purpose jest.spyOn(MediaWiki, 'hasWikimediaDesktopApi').mockResolvedValue(true) @@ -79,7 +79,7 @@ describe('RendererBuilder', () => { }) it('should throw an error for unknown RendererAPI in specific mode', async () => { - const { downloader, MediaWiki } = await setupScrapeClasses({ mwWikiPath: '/' }) // en wikipedia + const { downloader, MediaWiki } = await setupScrapeClasses() // en wikipedia await MediaWiki.hasCoordinates(downloader) await MediaWiki.hasWikimediaDesktopApi() await MediaWiki.hasWikimediaMobileApi() diff --git a/test/unit/saveArticles.test.ts b/test/unit/saveArticles.test.ts index 4ab038ce..7ccbcb03 100644 --- a/test/unit/saveArticles.test.ts +++ b/test/unit/saveArticles.test.ts @@ -36,7 +36,7 @@ describe('saveArticles', () => { } test(`Article html processing using ${renderer} renderer`, async () => { - const { MediaWiki, downloader, dump } = await setupScrapeClasses({ mwWikiPath: '/' }) // en wikipedia + const { MediaWiki, downloader, dump } = await setupScrapeClasses() // en wikipedia await MediaWiki.hasCoordinates(downloader) await MediaWiki.hasWikimediaDesktopApi() await MediaWiki.hasWikimediaMobileApi() @@ -90,7 +90,7 @@ describe('saveArticles', () => { }) test(`Check nodet article for en.wikipedia.org using ${renderer} renderer`, async () => { - const { downloader, dump } = await setupScrapeClasses({ mwUrl: 'https://en.wikipedia.org', format: 'nodet', mwWikiPath: '/' }) // en wikipedia + const { downloader, dump } = await setupScrapeClasses({ mwUrl: 'https://en.wikipedia.org', format: 'nodet' }) // en wikipedia await downloader.setBaseUrls(renderer) const articleId = 'Canada' const articleUrl = getArticleUrl(downloader, dump, articleId) @@ -121,7 +121,7 @@ describe('saveArticles', () => { }) test(`Load main page and check that it is without header using ${renderer} renderer`, async () => { - const { downloader, dump } = await setupScrapeClasses({ mwUrl: 'https://en.wikivoyage.org', mwWikiPath: '/' }) // en wikipedia + const { downloader, dump } = await setupScrapeClasses({ mwUrl: 'https://en.wikivoyage.org' }) // en wikipedia await downloader.setBaseUrls(renderer) const articleId = 'Main_Page' const articleUrl = getArticleUrl(downloader, dump, articleId) @@ -147,12 +147,11 @@ describe('saveArticles', () => { }) test(`--customFlavour using ${renderer} renderer`, async () => { - const { MediaWiki, downloader, dump } = await setupScrapeClasses({ format: 'nopic', mwWikiPath: '/' }) // en wikipedia + const { MediaWiki, downloader, dump } = await setupScrapeClasses({ format: 'nopic' }) // en wikipedia await MediaWiki.hasCoordinates(downloader) await MediaWiki.hasWikimediaDesktopApi() await MediaWiki.hasWikimediaMobileApi() - // TODO: Enable back once regression Phabricator:T350117 fixed - // await MediaWiki.hasVisualEditorApi() + await MediaWiki.hasVisualEditorApi() await downloader.setBaseUrls(renderer) class CustomFlavour implements CustomProcessor { // eslint-disable-next-line @typescript-eslint/no-unused-vars @@ -284,7 +283,7 @@ describe('saveArticles', () => { }) test('Test deleted article rendering (Visual editor renderer)', async () => { - const { downloader, dump } = await setupScrapeClasses({ mwWikiPath: '/' }) // en wikipedia + const { downloader, dump } = await setupScrapeClasses() // en wikipedia const { articleDetailXId } = RedisStore const articleId = 'deletedArticle' @@ -315,7 +314,7 @@ describe('saveArticles', () => { }) test('Load inline js from HTML', async () => { - const { downloader } = await setupScrapeClasses({ mwWikiPath: '/' }) // en wikipedia + const { downloader } = await setupScrapeClasses() // en wikipedia const _moduleDependencies = await downloader.getModuleDependencies('Potato') diff --git a/test/unit/styles.test.ts b/test/unit/styles.test.ts index e28b41e5..a3c47faa 100644 --- a/test/unit/styles.test.ts +++ b/test/unit/styles.test.ts @@ -13,7 +13,7 @@ describe('Styles', () => { test('Stylesheet downloading', async () => { const { articleDetailXId } = RedisStore - const { downloader } = await setupScrapeClasses({ mwWikiPath: '/' }) // en wikipedia + const { downloader } = await setupScrapeClasses() // en wikipedia const _articlesDetail = await downloader.getArticleDetailsIds(['London']) const articlesDetail = mwRetToArticleDetail(_articlesDetail) diff --git a/test/unit/treatments/article.treatment.test.ts b/test/unit/treatments/article.treatment.test.ts index 706b52c3..a26076dd 100644 --- a/test/unit/treatments/article.treatment.test.ts +++ b/test/unit/treatments/article.treatment.test.ts @@ -35,7 +35,7 @@ describe('ArticleTreatment', () => { } test(`Article html processing for ${renderer} render`, async () => { - const { downloader, dump } = await setupScrapeClasses({ mwWikiPath: '/' }) // en wikipedia + const { downloader, dump } = await setupScrapeClasses() // en wikipedia await downloader.setBaseUrls() const title = 'London' const _articlesDetail = await downloader.getArticleDetailsIds([title]) diff --git a/test/unit/treatments/media.treatment.test.ts b/test/unit/treatments/media.treatment.test.ts index 7a99bcce..bc6eb5d4 100644 --- a/test/unit/treatments/media.treatment.test.ts +++ b/test/unit/treatments/media.treatment.test.ts @@ -29,7 +29,7 @@ describe('MediaTreatment', () => { describe('treatSubtitle', () => { test('treat one subtitle', async () => { - const { dump } = await setupScrapeClasses({ format: '' }) + const { dump } = await setupScrapeClasses() // Wikicode is taken from article "Mechanical energy" which has a video with subtitle const wikicode = @@ -47,7 +47,7 @@ describe('MediaTreatment', () => { describe('treatVideo', () => { test('treat multiple subtitles in one video', async () => { - const { dump } = await setupScrapeClasses({ format: '', mwWikiPath: '/' }) + const { dump } = await setupScrapeClasses({ format: '' }) // Wikicode is taken from article "User:Charliechlorine/sandbox" which has multiple(4) subtitles in this video const wikicode = '[[File:Videoonwikipedia.ogv|thumb|thumbtime=0:58|left|320px|Video about kola nuts ]]' @@ -70,7 +70,7 @@ describe('MediaTreatment', () => { }) test('correct resolution retrieval', async () => { - const { dump } = await setupScrapeClasses({ format: '', mwWikiPath: '/' }) + const { dump } = await setupScrapeClasses({ format: '' }) let htmlStr = `