diff --git a/src/optimize/base_optimizer.js b/src/optimize/base_optimizer.js
index d9df2a1955df3..539c55c969653 100644
--- a/src/optimize/base_optimizer.js
+++ b/src/optimize/base_optimizer.js
@@ -459,7 +459,7 @@ export default class BaseOptimizer {
       optimization: {
         minimizer: [
           new TerserPlugin({
-            parallel: this.getThreadLoaderPoolConfig().workers,
+            parallel: false,
             sourceMap: false,
             cache: false,
             extractComments: false,
diff --git a/src/optimize/dynamic_dll_plugin/dll_config_model.js b/src/optimize/dynamic_dll_plugin/dll_config_model.js
index 2e74cb6af86d4..9ca6071b8f515 100644
--- a/src/optimize/dynamic_dll_plugin/dll_config_model.js
+++ b/src/optimize/dynamic_dll_plugin/dll_config_model.js
@@ -214,16 +214,20 @@ function common(config) {
   return webpackMerge(generateDLL(config));
 }
 
-function optimized(config) {
+function optimized() {
   return webpackMerge({
     mode: 'production',
     optimization: {
       minimizer: [
         new TerserPlugin({
-          // Apply the same logic used to calculate the
-          // threadLoaderPool workers number to spawn
-          // the parallel processes on terser
-          parallel: config.threadLoaderPoolConfig.workers,
+          // NOTE: we should not enable that option for now
+          // Since 2.0.0 terser-webpack-plugin is using jest-worker
+          // to run tasks in a pool of workers. Currently it looks like
+          // is requiring too much memory and break on large entry points
+          // compilations (like this) one. Also the gain we have enabling
+          // that option was barely noticed.
+          // https://github.com/webpack-contrib/terser-webpack-plugin/issues/143
+          parallel: false,
           sourceMap: false,
           cache: false,
           extractComments: false,
@@ -250,5 +254,5 @@ export function configModel(rawConfig = {}) {
     return webpackMerge(common(config), unoptimized());
   }
 
-  return webpackMerge(common(config), optimized(config));
+  return webpackMerge(common(config), optimized());
 }
diff --git a/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.js b/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.js
deleted file mode 100644
index 28bb7c24cf12e..0000000000000
--- a/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.js
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import Boom from 'boom';
-import fs from 'fs';
-import os from 'os';
-const util = require('util');
-// const readFile = util.promisify(fs.readFile);
-const readdir = util.promisify(fs.readdir);
-const writeFile = util.promisify(fs.writeFile);
-
-export function fileDataVisualizerProvider(callWithRequest) {
-  async function analyzeFile(data, overrides) {
-    let cached = false;
-    let results = [];
-
-    try {
-      results = await callWithRequest('ml.fileStructure', { body: data, ...overrides });
-      if (false) {
-        // disabling caching for now
-        cached = await cacheData(data);
-      }
-    } catch (error) {
-      const err = error.message !== undefined ? error.message : error;
-      throw Boom.badRequest(err);
-    }
-
-    const { hasOverrides, reducedOverrides } = formatOverrides(overrides);
-
-    return {
-      ...(hasOverrides && { overrides: reducedOverrides }),
-      cached,
-      results,
-    };
-  }
-
-  async function cacheData(data) {
-    const outputPath = `${os.tmpdir()}/kibana-ml`;
-    const tempFile = 'es-ml-tempFile';
-    const tempFilePath = `${outputPath}/${tempFile}`;
-
-    try {
-      createOutputDir(outputPath);
-      await deleteOutputFiles(outputPath);
-      await writeFile(tempFilePath, data);
-      return true;
-    } catch (error) {
-      return false;
-    }
-  }
-
-  function createOutputDir(dir) {
-    if (fs.existsSync(dir) === false) {
-      fs.mkdirSync(dir);
-    }
-  }
-
-  async function deleteOutputFiles(outputPath) {
-    const files = await readdir(outputPath);
-    files.forEach(f => {
-      fs.unlinkSync(`${outputPath}/${f}`);
-    });
-  }
-
-  return {
-    analyzeFile,
-  };
-}
-
-function formatOverrides(overrides) {
-  let hasOverrides = false;
-
-  const reducedOverrides = Object.keys(overrides).reduce((p, c) => {
-    if (overrides[c] !== '') {
-      p[c] = overrides[c];
-      hasOverrides = true;
-    }
-    return p;
-  }, {});
-
-  if (reducedOverrides.column_names !== undefined) {
-    reducedOverrides.column_names = reducedOverrides.column_names.split(',');
-  }
-
-  if (reducedOverrides.has_header_row !== undefined) {
-    reducedOverrides.has_header_row = reducedOverrides.has_header_row === 'true';
-  }
-
-  if (reducedOverrides.should_trim_fields !== undefined) {
-    reducedOverrides.should_trim_fields = reducedOverrides.should_trim_fields === 'true';
-  }
-
-  return {
-    reducedOverrides,
-    hasOverrides,
-  };
-}
diff --git a/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.ts b/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.ts
new file mode 100644
index 0000000000000..fd5b5221393fc
--- /dev/null
+++ b/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/file_data_visualizer.ts
@@ -0,0 +1,103 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import Boom from 'boom';
+import { RequestHandlerContext } from 'kibana/server';
+
+export type InputData = any[];
+
+export interface InputOverrides {
+  [key: string]: string;
+}
+
+export type FormattedOverrides = InputOverrides & {
+  column_names: string[];
+  has_header_row: boolean;
+  should_trim_fields: boolean;
+};
+
+export interface AnalysisResult {
+  results: {
+    charset: string;
+    has_header_row: boolean;
+    has_byte_order_marker: boolean;
+    format: string;
+    field_stats: {
+      [fieldName: string]: {
+        count: number;
+        cardinality: number;
+        top_hits: Array<{ count: number; value: any }>;
+      };
+    };
+    sample_start: string;
+    num_messages_analyzed: number;
+    mappings: {
+      [fieldName: string]: {
+        type: string;
+      };
+    };
+    quote: string;
+    delimiter: string;
+    need_client_timezone: boolean;
+    num_lines_analyzed: number;
+    column_names: string[];
+  };
+  overrides?: FormattedOverrides;
+}
+
+export function fileDataVisualizerProvider(context: RequestHandlerContext) {
+  async function analyzeFile(data: any, overrides: any): Promise<AnalysisResult> {
+    let results = [];
+
+    try {
+      results = await context.ml!.mlClient.callAsCurrentUser('ml.fileStructure', {
+        body: data,
+        ...overrides,
+      });
+    } catch (error) {
+      const err = error.message !== undefined ? error.message : error;
+      throw Boom.badRequest(err);
+    }
+
+    const { hasOverrides, reducedOverrides } = formatOverrides(overrides);
+
+    return {
+      ...(hasOverrides && { overrides: reducedOverrides }),
+      results,
+    };
+  }
+
+  return {
+    analyzeFile,
+  };
+}
+
+function formatOverrides(overrides: InputOverrides) {
+  let hasOverrides = false;
+
+  const reducedOverrides: FormattedOverrides = Object.keys(overrides).reduce((acc, overrideKey) => {
+    const overrideValue: string = overrides[overrideKey];
+    if (overrideValue !== '') {
+      if (overrideKey === 'column_names') {
+        acc.column_names = overrideValue.split(',');
+      } else if (overrideKey === 'has_header_row') {
+        acc.has_header_row = overrideValue === 'true';
+      } else if (overrideKey === 'should_trim_fields') {
+        acc.should_trim_fields = overrideValue === 'true';
+      } else {
+        acc[overrideKey] = overrideValue;
+      }
+
+      hasOverrides = true;
+    }
+    return acc;
+  }, {} as FormattedOverrides);
+
+  return {
+    reducedOverrides,
+    hasOverrides,
+  };
+}
diff --git a/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/import_data.js b/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/import_data.ts
similarity index 71%
rename from x-pack/legacy/plugins/ml/server/models/file_data_visualizer/import_data.js
rename to x-pack/legacy/plugins/ml/server/models/file_data_visualizer/import_data.ts
index 644a137fbc092..008efb43a6c07 100644
--- a/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/import_data.js
+++ b/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/import_data.ts
@@ -4,10 +4,43 @@
  * you may not use this file except in compliance with the Elastic License.
  */
 
+import { RequestHandlerContext } from 'kibana/server';
 import { INDEX_META_DATA_CREATED_BY } from '../../../common/constants/file_datavisualizer';
+import { InputData } from './file_data_visualizer';
 
-export function importDataProvider(callWithRequest) {
-  async function importData(id, index, settings, mappings, ingestPipeline, data) {
+export interface Settings {
+  pipeline?: string;
+  index: string;
+  body: any[];
+  [key: string]: any;
+}
+
+export interface Mappings {
+  [key: string]: any;
+}
+
+export interface InjectPipeline {
+  id: string;
+  pipeline: any;
+}
+
+interface Failure {
+  item: number;
+  reason: string;
+  doc: any;
+}
+
+export function importDataProvider(context: RequestHandlerContext) {
+  const callAsCurrentUser = context.ml!.mlClient.callAsCurrentUser;
+
+  async function importData(
+    id: string,
+    index: string,
+    settings: Settings,
+    mappings: Mappings,
+    ingestPipeline: InjectPipeline,
+    data: InputData
+  ) {
     let createdIndex;
     let createdPipelineId;
     const docCount = data.length;
@@ -35,7 +68,7 @@ export function importDataProvider(callWithRequest) {
         createdPipelineId = pipelineId;
       }
 
-      let failures = [];
+      let failures: Failure[] = [];
       if (data.length) {
         const resp = await indexData(index, createdPipelineId, data);
         if (resp.success === false) {
@@ -72,8 +105,8 @@ export function importDataProvider(callWithRequest) {
     }
   }
 
-  async function createIndex(index, settings, mappings) {
-    const body = {
+  async function createIndex(index: string, settings: Settings, mappings: Mappings) {
+    const body: { mappings: Mappings; settings?: Settings } = {
       mappings: {
         _meta: {
           created_by: INDEX_META_DATA_CREATED_BY,
@@ -86,10 +119,10 @@ export function importDataProvider(callWithRequest) {
       body.settings = settings;
     }
 
-    await callWithRequest('indices.create', { index, body });
+    await callAsCurrentUser('indices.create', { index, body });
   }
 
-  async function indexData(index, pipelineId, data) {
+  async function indexData(index: string, pipelineId: string, data: InputData) {
     try {
       const body = [];
       for (let i = 0; i < data.length; i++) {
@@ -97,12 +130,12 @@ export function importDataProvider(callWithRequest) {
         body.push(data[i]);
       }
 
-      const settings = { index, body };
+      const settings: Settings = { index, body };
       if (pipelineId !== undefined) {
         settings.pipeline = pipelineId;
       }
 
-      const resp = await callWithRequest('bulk', settings);
+      const resp = await callAsCurrentUser('bulk', settings);
       if (resp.errors) {
         throw resp;
       } else {
@@ -113,7 +146,7 @@ export function importDataProvider(callWithRequest) {
         };
       }
     } catch (error) {
-      let failures = [];
+      let failures: Failure[] = [];
       let ingestError = false;
       if (error.errors !== undefined && Array.isArray(error.items)) {
         // an expected error where some or all of the bulk request
@@ -134,11 +167,11 @@ export function importDataProvider(callWithRequest) {
     }
   }
 
-  async function createPipeline(id, pipeline) {
-    return await callWithRequest('ingest.putPipeline', { id, body: pipeline });
+  async function createPipeline(id: string, pipeline: any) {
+    return await callAsCurrentUser('ingest.putPipeline', { id, body: pipeline });
   }
 
-  function getFailures(items, data) {
+  function getFailures(items: any[], data: InputData): Failure[] {
     const failures = [];
     for (let i = 0; i < items.length; i++) {
       const item = items[i];
diff --git a/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/index.js b/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/index.ts
similarity index 53%
rename from x-pack/legacy/plugins/ml/server/models/file_data_visualizer/index.js
rename to x-pack/legacy/plugins/ml/server/models/file_data_visualizer/index.ts
index 3bda5599e7181..94529dc111696 100644
--- a/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/index.js
+++ b/x-pack/legacy/plugins/ml/server/models/file_data_visualizer/index.ts
@@ -4,5 +4,11 @@
  * you may not use this file except in compliance with the Elastic License.
  */
 
-export { fileDataVisualizerProvider } from './file_data_visualizer';
-export { importDataProvider } from './import_data';
+export {
+  fileDataVisualizerProvider,
+  InputOverrides,
+  InputData,
+  AnalysisResult,
+} from './file_data_visualizer';
+
+export { importDataProvider, Settings, InjectPipeline, Mappings } from './import_data';
diff --git a/x-pack/legacy/plugins/ml/server/routes/apidoc.json b/x-pack/legacy/plugins/ml/server/routes/apidoc.json
index 574065446827d..1be31e2316228 100644
--- a/x-pack/legacy/plugins/ml/server/routes/apidoc.json
+++ b/x-pack/legacy/plugins/ml/server/routes/apidoc.json
@@ -3,7 +3,6 @@
   "version": "0.1.0",
   "description": "ML Kibana API",
   "title": "ML Kibana API",
-  "url" : "/api/ml/",
   "order": [
     "DataFrameAnalytics",
     "GetDataFrameAnalytics",
@@ -34,6 +33,9 @@
     "ForecastAnomalyDetector",
     "GetOverallBuckets",
     "GetCategories",
+    "FileDataVisualizer",
+    "AnalyzeFile",
+    "ImportFile"
     "ResultsService",
     "GetAnomaliesTableData",
     "GetCategoryDefinition",
diff --git a/x-pack/legacy/plugins/ml/server/routes/file_data_visualizer.js b/x-pack/legacy/plugins/ml/server/routes/file_data_visualizer.js
deleted file mode 100644
index fc6a0ff756928..0000000000000
--- a/x-pack/legacy/plugins/ml/server/routes/file_data_visualizer.js
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-
-import { callWithRequestFactory } from '../client/call_with_request_factory';
-import { wrapError } from '../client/errors';
-import { fileDataVisualizerProvider, importDataProvider } from '../models/file_data_visualizer';
-import { MAX_BYTES } from '../../common/constants/file_datavisualizer';
-
-import { incrementFileDataVisualizerIndexCreationCount } from '../lib/ml_telemetry/ml_telemetry';
-
-function analyzeFiles(callWithRequest, data, overrides) {
-  const { analyzeFile } = fileDataVisualizerProvider(callWithRequest);
-  return analyzeFile(data, overrides);
-}
-
-function importData(callWithRequest, id, index, settings, mappings, ingestPipeline, data) {
-  const { importData: importDataFunc } = importDataProvider(callWithRequest);
-  return importDataFunc(id, index, settings, mappings, ingestPipeline, data);
-}
-
-export function fileDataVisualizerRoutes({
-  commonRouteConfig,
-  elasticsearchPlugin,
-  route,
-  savedObjects,
-}) {
-  route({
-    method: 'POST',
-    path: '/api/ml/file_data_visualizer/analyze_file',
-    handler(request) {
-      const callWithRequest = callWithRequestFactory(elasticsearchPlugin, request);
-      const data = request.payload;
-
-      return analyzeFiles(callWithRequest, data, request.query).catch(wrapError);
-    },
-    config: {
-      ...commonRouteConfig,
-      payload: { maxBytes: MAX_BYTES },
-    },
-  });
-
-  route({
-    method: 'POST',
-    path: '/api/ml/file_data_visualizer/import',
-    handler(request) {
-      const callWithRequest = callWithRequestFactory(elasticsearchPlugin, request);
-      const { id } = request.query;
-      const { index, data, settings, mappings, ingestPipeline } = request.payload;
-
-      // `id` being `undefined` tells us that this is a new import due to create a new index.
-      // follow-up import calls to just add additional data will include the `id` of the created
-      // index, we'll ignore those and don't increment the counter.
-      if (id === undefined) {
-        incrementFileDataVisualizerIndexCreationCount(elasticsearchPlugin, savedObjects);
-      }
-
-      return importData(callWithRequest, id, index, settings, mappings, ingestPipeline, data).catch(
-        wrapError
-      );
-    },
-    config: {
-      ...commonRouteConfig,
-      payload: { maxBytes: MAX_BYTES },
-    },
-  });
-}
diff --git a/x-pack/legacy/plugins/ml/server/routes/file_data_visualizer.ts b/x-pack/legacy/plugins/ml/server/routes/file_data_visualizer.ts
new file mode 100644
index 0000000000000..95f2a9fe7298f
--- /dev/null
+++ b/x-pack/legacy/plugins/ml/server/routes/file_data_visualizer.ts
@@ -0,0 +1,159 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { schema } from '@kbn/config-schema';
+import { RequestHandlerContext } from 'kibana/server';
+import { MAX_BYTES } from '../../common/constants/file_datavisualizer';
+import { wrapError } from '../client/error_wrapper';
+import {
+  InputOverrides,
+  InputData,
+  fileDataVisualizerProvider,
+  importDataProvider,
+  Settings,
+  InjectPipeline,
+  Mappings,
+} from '../models/file_data_visualizer';
+
+import { licensePreRoutingFactory } from '../new_platform/licence_check_pre_routing_factory';
+import { RouteInitialization } from '../new_platform/plugin';
+import { incrementFileDataVisualizerIndexCreationCount } from '../lib/ml_telemetry';
+
+function analyzeFiles(context: RequestHandlerContext, data: InputData, overrides: InputOverrides) {
+  const { analyzeFile } = fileDataVisualizerProvider(context);
+  return analyzeFile(data, overrides);
+}
+
+function importData(
+  context: RequestHandlerContext,
+  id: string,
+  index: string,
+  settings: Settings,
+  mappings: Mappings,
+  ingestPipeline: InjectPipeline,
+  data: InputData
+) {
+  const { importData: importDataFunc } = importDataProvider(context);
+  return importDataFunc(id, index, settings, mappings, ingestPipeline, data);
+}
+
+/**
+ * Routes for the file data visualizer.
+ */
+export function fileDataVisualizerRoutes({
+  router,
+  xpackMainPlugin,
+  savedObjects,
+  elasticsearchPlugin,
+}: RouteInitialization) {
+  /**
+   * @apiGroup FileDataVisualizer
+   *
+   * @api {post} /api/ml/file_data_visualizer/analyze_file Analyze file data
+   * @apiName AnalyzeFile
+   * @apiDescription Performs analysis of the file data.
+   */
+  router.post(
+    {
+      path: '/api/ml/file_data_visualizer/analyze_file',
+      validate: {
+        body: schema.any(),
+        query: schema.maybe(
+          schema.object({
+            charset: schema.maybe(schema.string()),
+            column_names: schema.maybe(schema.string()),
+            delimiter: schema.maybe(schema.string()),
+            explain: schema.maybe(schema.string()),
+            format: schema.maybe(schema.string()),
+            grok_pattern: schema.maybe(schema.string()),
+            has_header_row: schema.maybe(schema.string()),
+            line_merge_size_limit: schema.maybe(schema.string()),
+            lines_to_sample: schema.maybe(schema.string()),
+            quote: schema.maybe(schema.string()),
+            should_trim_fields: schema.maybe(schema.string()),
+            timeout: schema.maybe(schema.string()),
+            timestamp_field: schema.maybe(schema.string()),
+            timestamp_format: schema.maybe(schema.string()),
+          })
+        ),
+      },
+      options: {
+        body: {
+          accepts: ['text/*', 'application/json'],
+          maxBytes: MAX_BYTES,
+        },
+      },
+    },
+    licensePreRoutingFactory(xpackMainPlugin, async (context, request, response) => {
+      try {
+        const result = await analyzeFiles(context, request.body, request.query);
+        return response.ok({ body: result });
+      } catch (e) {
+        return response.customError(wrapError(e));
+      }
+    })
+  );
+
+  /**
+   * @apiGroup FileDataVisualizer
+   *
+   * @api {post} /api/ml/file_data_visualizer/import Import file data
+   * @apiName ImportFile
+   * @apiDescription Imports file data into elasticsearch index.
+   */
+  router.post(
+    {
+      path: '/api/ml/file_data_visualizer/import',
+      validate: {
+        query: schema.object({
+          id: schema.maybe(schema.string()),
+        }),
+        body: schema.object({
+          index: schema.maybe(schema.string()),
+          data: schema.arrayOf(schema.any()),
+          settings: schema.maybe(schema.any()),
+          mappings: schema.any(),
+          ingestPipeline: schema.object({
+            id: schema.maybe(schema.string()),
+            pipeline: schema.maybe(schema.any()),
+          }),
+        }),
+      },
+      options: {
+        body: {
+          accepts: ['application/json'],
+          maxBytes: MAX_BYTES,
+        },
+      },
+    },
+    licensePreRoutingFactory(xpackMainPlugin, async (context, request, response) => {
+      try {
+        const { id } = request.query;
+        const { index, data, settings, mappings, ingestPipeline } = request.body;
+
+        // `id` being `undefined` tells us that this is a new import due to create a new index.
+        // follow-up import calls to just add additional data will include the `id` of the created
+        // index, we'll ignore those and don't increment the counter.
+        if (id === undefined) {
+          await incrementFileDataVisualizerIndexCreationCount(elasticsearchPlugin, savedObjects!);
+        }
+
+        const result = await importData(
+          context,
+          id,
+          index,
+          settings,
+          mappings,
+          ingestPipeline,
+          data
+        );
+        return response.ok({ body: result });
+      } catch (e) {
+        return response.customError(wrapError(e));
+      }
+    })
+  );
+}
diff --git a/x-pack/test/functional/apps/endpoint/index.ts b/x-pack/test/functional/apps/endpoint/index.ts
index 1a0d3e973285b..e44a4cb846f2c 100644
--- a/x-pack/test/functional/apps/endpoint/index.ts
+++ b/x-pack/test/functional/apps/endpoint/index.ts
@@ -10,5 +10,6 @@ export default function({ loadTestFile }: FtrProviderContext) {
     this.tags('ciGroup7');
 
     loadTestFile(require.resolve('./feature_controls'));
+    loadTestFile(require.resolve('./landing_page'));
   });
 }
diff --git a/x-pack/test/functional/apps/endpoint/landing_page.ts b/x-pack/test/functional/apps/endpoint/landing_page.ts
new file mode 100644
index 0000000000000..65af91feae407
--- /dev/null
+++ b/x-pack/test/functional/apps/endpoint/landing_page.ts
@@ -0,0 +1,24 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import expect from '@kbn/expect';
+import { FtrProviderContext } from '../../ftr_provider_context';
+
+export default ({ getPageObjects }: FtrProviderContext) => {
+  const pageObjects = getPageObjects(['common', 'endpoint']);
+
+  describe('Endpoint landing page', function() {
+    this.tags('ciGroup7');
+    before(async () => {
+      await pageObjects.common.navigateToApp('endpoint');
+    });
+
+    it('Loads the endpoint app', async () => {
+      const welcomeEndpointMessage = await pageObjects.endpoint.welcomeEndpointTitle();
+      expect(welcomeEndpointMessage).to.be('Hello World');
+    });
+  });
+};
diff --git a/x-pack/test/functional/page_objects/endpoint_page.ts b/x-pack/test/functional/page_objects/endpoint_page.ts
new file mode 100644
index 0000000000000..f02a899f6d37d
--- /dev/null
+++ b/x-pack/test/functional/page_objects/endpoint_page.ts
@@ -0,0 +1,17 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+
+import { FtrProviderContext } from '../ftr_provider_context';
+
+export function EndpointPageProvider({ getService }: FtrProviderContext) {
+  const testSubjects = getService('testSubjects');
+
+  return {
+    async welcomeEndpointTitle() {
+      return await testSubjects.getVisibleText('welcomeTitle');
+    },
+  };
+}
diff --git a/x-pack/test/functional/page_objects/index.ts b/x-pack/test/functional/page_objects/index.ts
index 91d4a3663fa65..19a626536f1bd 100644
--- a/x-pack/test/functional/page_objects/index.ts
+++ b/x-pack/test/functional/page_objects/index.ts
@@ -46,6 +46,7 @@ import { CopySavedObjectsToSpacePageProvider } from './copy_saved_objects_to_spa
 import { LensPageProvider } from './lens_page';
 import { InfraMetricExplorerProvider } from './infra_metric_explorer';
 import { RoleMappingsPageProvider } from './role_mappings_page';
+import { EndpointPageProvider } from './endpoint_page';
 
 // just like services, PageObjects are defined as a map of
 // names to Providers. Merge in Kibana's or pick specific ones
@@ -78,4 +79,5 @@ export const pageObjects = {
   copySavedObjectsToSpace: CopySavedObjectsToSpacePageProvider,
   lens: LensPageProvider,
   roleMappings: RoleMappingsPageProvider,
+  endpoint: EndpointPageProvider,
 };
diff --git a/yarn.lock b/yarn.lock
index a35cd1d541762..5dc4db12c5db4 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -20051,18 +20051,10 @@ markdown-it@^10.0.0:
     mdurl "^1.0.1"
     uc.micro "^1.0.5"
 
-markdown-to-jsx@^6.9.1:
-  version "6.9.3"
-  resolved "https://registry.yarnpkg.com/markdown-to-jsx/-/markdown-to-jsx-6.9.3.tgz#31719e3c54517ba9805db81d53701b89f5d2ed7e"
-  integrity sha512-iXteiv317VZd1vk/PBH5MWMD4r0XWekoWCHRVVadBcnCtxavhtfV1UaEaQgq9KyckTv31L60ASh5ZVVrOh37Qg==
-  dependencies:
-    prop-types "^15.6.2"
-    unquote "^1.1.0"
-
-markdown-to-jsx@^6.9.3:
-  version "6.10.3"
-  resolved "https://registry.yarnpkg.com/markdown-to-jsx/-/markdown-to-jsx-6.10.3.tgz#7f0946684acd321125ff2de7fd258a9b9c7c40b7"
-  integrity sha512-PSoUyLnW/xoW6RsxZrquSSz5eGEOTwa15H5eqp3enmrp8esmgDJmhzd6zmQ9tgAA9TxJzx1Hmf3incYU/IamoQ==
+markdown-to-jsx@^6.9.1, markdown-to-jsx@^6.9.3:
+  version "6.11.0"
+  resolved "https://registry.yarnpkg.com/markdown-to-jsx/-/markdown-to-jsx-6.11.0.tgz#a2e3f2bc781c3402d8bb0f8e0a12a186474623b0"
+  integrity sha512-RH7LCJQ4RFmPqVeZEesKaO1biRzB/k4utoofmTCp3Eiw6D7qfvK8fzZq/2bjEJAtVkfPrM5SMt5APGf2rnaKMg==
   dependencies:
     prop-types "^15.6.2"
     unquote "^1.1.0"
@@ -23809,10 +23801,10 @@ querystring@0.2.0, querystring@^0.2.0:
   resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620"
   integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=
 
-querystringify@^2.0.0:
-  version "2.1.0"
-  resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.1.0.tgz#7ded8dfbf7879dcc60d0a644ac6754b283ad17ef"
-  integrity sha512-sluvZZ1YiTLD5jsqZcDmFyV2EwToyXZBfpoVOmktMmW+VEnhgakFHnasVph65fOjGPTWN0Nw3+XQaSeMayr0kg==
+querystringify@^2.1.1:
+  version "2.1.1"
+  resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.1.1.tgz#60e5a5fd64a7f8bfa4d2ab2ed6fdf4c85bad154e"
+  integrity sha512-w7fLxIRCRT7U8Qu53jQnJyPkYZIaR4n5151KMfcJlO/A9397Wxb1amJvROTK6TOnp7PfoAmg/qXiNHI+08jRfA==
 
 quick-lru@^1.0.0:
   version "1.1.0"
@@ -30206,11 +30198,11 @@ url-parse-lax@^3.0.0:
     prepend-http "^2.0.0"
 
 url-parse@^1.4.3:
-  version "1.4.4"
-  resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.4.4.tgz#cac1556e95faa0303691fec5cf9d5a1bc34648f8"
-  integrity sha512-/92DTTorg4JjktLNLe6GPS2/RvAd/RGr6LuktmWSMLEOa6rjnlrFXNgSbSmkNvCoL2T028A0a1JaJLzRMlFoHg==
+  version "1.4.7"
+  resolved "https://registry.yarnpkg.com/url-parse/-/url-parse-1.4.7.tgz#a8a83535e8c00a316e403a5db4ac1b9b853ae278"
+  integrity sha512-d3uaVyzDB9tQoSXFvuSUNFibTd9zxd2bkVrDRvF5TmvWWQwqE4lgYJ5m+x1DbecWkw+LK4RNl2CU1hHuOKPVlg==
   dependencies:
-    querystringify "^2.0.0"
+    querystringify "^2.1.1"
     requires-port "^1.0.0"
 
 url-pattern@^1.0.3: