diff --git a/web-console/src/druid-models/execution/execution-ingest-complete.mock.ts b/web-console/src/druid-models/execution/execution-ingest-complete.mock.ts
index f9659c61556c..64bb2201d8f2 100644
--- a/web-console/src/druid-models/execution/execution-ingest-complete.mock.ts
+++ b/web-console/src/druid-models/execution/execution-ingest-complete.mock.ts
@@ -35,10 +35,10 @@ PARTITIONED BY ALL TIME
export const EXECUTION_INGEST_COMPLETE = Execution.fromTaskPayloadAndReport(
{
- task: 'query-32ced762-7679-4a25-9220-3915c5976961',
+ task: 'query-09af0c1e-1c0f-4539-917c-b0458849d0d9',
payload: {
type: 'query_controller',
- id: 'query-32ced762-7679-4a25-9220-3915c5976961',
+ id: 'query-09af0c1e-1c0f-4539-917c-b0458849d0d9',
spec: {
query: {
queryType: 'scan',
@@ -47,14 +47,12 @@ export const EXECUTION_INGEST_COMPLETE = Execution.fromTaskPayloadAndReport(
inputSource: {
type: 'http',
uris: ['https://static.imply.io/example-data/kttm-v2/kttm-v2-2019-08-25.json.gz'],
- httpAuthenticationUsername: null,
- httpAuthenticationPassword: null,
},
inputFormat: {
type: 'json',
- flattenSpec: null,
- featureSpec: {},
keepNullColumns: false,
+ assumeNewlineDelimited: false,
+ useJsonNodeReader: false,
},
signature: [
{ name: 'timestamp', type: 'STRING' },
@@ -80,9 +78,10 @@ export const EXECUTION_INGEST_COMPLETE = Execution.fromTaskPayloadAndReport(
finalize: false,
finalizeAggregations: false,
groupByEnableMultiValueUnnesting: false,
+ queryId: '09af0c1e-1c0f-4539-917c-b0458849d0d9',
scanSignature: '[{"name":"agent_type","type":"STRING"},{"name":"v0","type":"LONG"}]',
sqlInsertSegmentGranularity: '{"type":"all"}',
- sqlQueryId: '32ced762-7679-4a25-9220-3915c5976961',
+ sqlQueryId: '09af0c1e-1c0f-4539-917c-b0458849d0d9',
sqlReplaceTimeChunks: 'all',
},
granularity: { type: 'all' },
@@ -104,18 +103,19 @@ export const EXECUTION_INGEST_COMPLETE = Execution.fromTaskPayloadAndReport(
'REPLACE INTO "kttm_simple" OVERWRITE ALL\nSELECT TIME_PARSE("timestamp") AS "__time", agent_type\nFROM TABLE(\n EXTERN(\n \'{"type":"http","uris":["https://static.imply.io/example-data/kttm-v2/kttm-v2-2019-08-25.json.gz"]}\',\n \'{"type":"json"}\',\n \'[{"name":"timestamp","type":"string"},{"name":"agent_type","type":"string"}]\'\n )\n)\nPARTITIONED BY ALL TIME',
sqlQueryContext: {
finalizeAggregations: false,
- groupByEnableMultiValueUnnesting: false,
maxParseExceptions: 0,
+ sqlQueryId: '09af0c1e-1c0f-4539-917c-b0458849d0d9',
+ groupByEnableMultiValueUnnesting: false,
sqlInsertSegmentGranularity: '{"type":"all"}',
- sqlQueryId: '32ced762-7679-4a25-9220-3915c5976961',
sqlReplaceTimeChunks: 'all',
+ queryId: '09af0c1e-1c0f-4539-917c-b0458849d0d9',
},
sqlTypeNames: ['TIMESTAMP', 'VARCHAR'],
context: { forceTimeChunkLock: true, useLineageBasedSegmentAllocation: true },
- groupId: 'query-32ced762-7679-4a25-9220-3915c5976961',
+ groupId: 'query-09af0c1e-1c0f-4539-917c-b0458849d0d9',
dataSource: 'kttm_simple',
resource: {
- availabilityGroup: 'query-32ced762-7679-4a25-9220-3915c5976961',
+ availabilityGroup: 'query-09af0c1e-1c0f-4539-917c-b0458849d0d9',
requiredCapacity: 1,
},
},
@@ -124,14 +124,20 @@ export const EXECUTION_INGEST_COMPLETE = Execution.fromTaskPayloadAndReport(
{
multiStageQuery: {
type: 'multiStageQuery',
- taskId: 'query-32ced762-7679-4a25-9220-3915c5976961',
+ taskId: 'query-09af0c1e-1c0f-4539-917c-b0458849d0d9',
payload: {
- status: { status: 'SUCCESS', startTime: '2022-08-22T20:12:51.391Z', durationMs: 25097 },
+ status: {
+ status: 'SUCCESS',
+ startTime: '2022-10-31T16:11:13.160Z',
+ durationMs: 9012,
+ pendingTasks: 0,
+ runningTasks: 2,
+ },
stages: [
{
stageNumber: 0,
definition: {
- id: '0b353011-6ea1-480a-8ca8-386771621672_0',
+ id: 'b94caff0-f693-47ed-a242-ccf837550383_0',
input: [
{
type: 'external',
@@ -140,14 +146,12 @@ export const EXECUTION_INGEST_COMPLETE = Execution.fromTaskPayloadAndReport(
uris: [
'https://static.imply.io/example-data/kttm-v2/kttm-v2-2019-08-25.json.gz',
],
- httpAuthenticationUsername: null,
- httpAuthenticationPassword: null,
},
inputFormat: {
type: 'json',
- flattenSpec: null,
- featureSpec: {},
keepNullColumns: false,
+ assumeNewlineDelimited: false,
+ useJsonNodeReader: false,
},
signature: [
{ name: 'timestamp', type: 'STRING' },
@@ -180,10 +184,11 @@ export const EXECUTION_INGEST_COMPLETE = Execution.fromTaskPayloadAndReport(
finalize: false,
finalizeAggregations: false,
groupByEnableMultiValueUnnesting: false,
+ queryId: '09af0c1e-1c0f-4539-917c-b0458849d0d9',
scanSignature:
'[{"name":"agent_type","type":"STRING"},{"name":"v0","type":"LONG"}]',
sqlInsertSegmentGranularity: '{"type":"all"}',
- sqlQueryId: '32ced762-7679-4a25-9220-3915c5976961',
+ sqlQueryId: '09af0c1e-1c0f-4539-917c-b0458849d0d9',
sqlReplaceTimeChunks: 'all',
},
granularity: { type: 'all' },
@@ -205,14 +210,14 @@ export const EXECUTION_INGEST_COMPLETE = Execution.fromTaskPayloadAndReport(
phase: 'FINISHED',
workerCount: 1,
partitionCount: 1,
- startTime: '2022-08-22T20:12:53.790Z',
- duration: 20229,
+ startTime: '2022-10-31T16:11:15.380Z',
+ duration: 4887,
sort: true,
},
{
stageNumber: 1,
definition: {
- id: '0b353011-6ea1-480a-8ca8-386771621672_1',
+ id: 'b94caff0-f693-47ed-a242-ccf837550383_1',
input: [{ type: 'stage', stage: 0 }],
processor: {
type: 'segmentGenerator',
@@ -256,8 +261,8 @@ export const EXECUTION_INGEST_COMPLETE = Execution.fromTaskPayloadAndReport(
phase: 'FINISHED',
workerCount: 1,
partitionCount: 1,
- startTime: '2022-08-22T20:13:13.991Z',
- duration: 2497,
+ startTime: '2022-10-31T16:11:20.264Z',
+ duration: 1908,
},
],
counters: {
@@ -269,8 +274,8 @@ export const EXECUTION_INGEST_COMPLETE = Execution.fromTaskPayloadAndReport(
sortProgress: {
type: 'sortProgress',
totalMergingLevels: 3,
- levelToTotalBatches: { '0': 1, '1': 1, '2': 1 },
- levelToMergedBatches: { '0': 1, '1': 1, '2': 1 },
+ levelToTotalBatches: { '0': 2, '1': 1, '2': 1 },
+ levelToMergedBatches: { '0': 2, '1': 1, '2': 1 },
totalMergersForUltimateLevel: 1,
progressDigest: 1.0,
},
diff --git a/web-console/src/druid-models/execution/execution-ingest-error.mock.ts b/web-console/src/druid-models/execution/execution-ingest-error.mock.ts
index ae1c85907170..42fbde8dbf35 100644
--- a/web-console/src/druid-models/execution/execution-ingest-error.mock.ts
+++ b/web-console/src/druid-models/execution/execution-ingest-error.mock.ts
@@ -41,10 +41,10 @@ PARTITIONED BY ALL
export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
{
- task: 'query-c6bffa9d-43c4-45a0-95f8-0c8c453655fb',
+ task: 'query-955fa5a2-0ae6-4912-bc39-cc9fc012de5a',
payload: {
type: 'query_controller',
- id: 'query-c6bffa9d-43c4-45a0-95f8-0c8c453655fb',
+ id: 'query-955fa5a2-0ae6-4912-bc39-cc9fc012de5a',
spec: {
query: {
queryType: 'scan',
@@ -53,14 +53,12 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
inputSource: {
type: 'http',
uris: ['https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz'],
- httpAuthenticationUsername: null,
- httpAuthenticationPassword: null,
},
inputFormat: {
type: 'json',
- flattenSpec: null,
- featureSpec: {},
keepNullColumns: false,
+ assumeNewlineDelimited: false,
+ useJsonNodeReader: false,
},
signature: [
{ name: 'timestamp', type: 'STRING' },
@@ -87,9 +85,10 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
finalizeAggregations: false,
groupByEnableMultiValueUnnesting: false,
maxParseExceptions: 10,
+ queryId: '955fa5a2-0ae6-4912-bc39-cc9fc012de5a',
scanSignature: '[{"name":"agent_type","type":"STRING"},{"name":"v0","type":"LONG"}]',
sqlInsertSegmentGranularity: '{"type":"all"}',
- sqlQueryId: 'c6bffa9d-43c4-45a0-95f8-0c8c453655fb',
+ sqlQueryId: '955fa5a2-0ae6-4912-bc39-cc9fc012de5a',
sqlReplaceTimeChunks: 'all',
},
granularity: { type: 'all' },
@@ -110,19 +109,20 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
sqlQuery:
'REPLACE INTO "kttm_simple" OVERWRITE ALL\nSELECT TIME_PARSE("timestamp") AS "__time", agent_type\nFROM TABLE(\n EXTERN(\n \'{"type":"http","uris":["https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz"]}\',\n \'{"type":"json"}\',\n \'[{"name":"timestamp","type":"string"},{"name":"agent_type","type":"string"}]\'\n )\n)\nPARTITIONED BY ALL',
sqlQueryContext: {
+ maxParseExceptions: 10,
finalizeAggregations: false,
+ sqlQueryId: '955fa5a2-0ae6-4912-bc39-cc9fc012de5a',
groupByEnableMultiValueUnnesting: false,
- maxParseExceptions: 10,
sqlInsertSegmentGranularity: '{"type":"all"}',
- sqlQueryId: 'c6bffa9d-43c4-45a0-95f8-0c8c453655fb',
sqlReplaceTimeChunks: 'all',
+ queryId: '955fa5a2-0ae6-4912-bc39-cc9fc012de5a',
},
sqlTypeNames: ['TIMESTAMP', 'VARCHAR'],
context: { forceTimeChunkLock: true, useLineageBasedSegmentAllocation: true },
- groupId: 'query-c6bffa9d-43c4-45a0-95f8-0c8c453655fb',
+ groupId: 'query-955fa5a2-0ae6-4912-bc39-cc9fc012de5a',
dataSource: 'kttm_simple',
resource: {
- availabilityGroup: 'query-c6bffa9d-43c4-45a0-95f8-0c8c453655fb',
+ availabilityGroup: 'query-955fa5a2-0ae6-4912-bc39-cc9fc012de5a',
requiredCapacity: 1,
},
},
@@ -131,12 +131,12 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
{
multiStageQuery: {
type: 'multiStageQuery',
- taskId: 'query-c6bffa9d-43c4-45a0-95f8-0c8c453655fb',
+ taskId: 'query-955fa5a2-0ae6-4912-bc39-cc9fc012de5a',
payload: {
status: {
status: 'FAILED',
errorReport: {
- taskId: 'query-c6bffa9d-43c4-45a0-95f8-0c8c453655fb',
+ taskId: 'query-955fa5a2-0ae6-4912-bc39-cc9fc012de5a',
host: 'localhost',
error: {
errorCode: 'TooManyWarnings',
@@ -148,8 +148,8 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
},
warnings: [
{
- taskId: 'query-c6bffa9d-43c4-45a0-95f8-0c8c453655fb-worker0',
- host: 'localhost:8101',
+ taskId: 'query-955fa5a2-0ae6-4912-bc39-cc9fc012de5a-worker0',
+ host: 'localhost:8091',
stageNumber: 0,
error: {
errorCode: 'CannotParseExternalData',
@@ -157,11 +157,11 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
'Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 13588, Line: 13588)',
},
exceptionStackTrace:
- 'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 13588, Line: 13588)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:179)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:251)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:182)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:111)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:148)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:70)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:634)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:69)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
+ 'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 13588, Line: 13588)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:179)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:245)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:172)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:164)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:137)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:70)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:666)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:69)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
},
{
- taskId: 'query-c6bffa9d-43c4-45a0-95f8-0c8c453655fb-worker0',
- host: 'localhost:8101',
+ taskId: 'query-955fa5a2-0ae6-4912-bc39-cc9fc012de5a-worker0',
+ host: 'localhost:8091',
stageNumber: 0,
error: {
errorCode: 'CannotParseExternalData',
@@ -169,11 +169,11 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
'Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 27029, Line: 27030)',
},
exceptionStackTrace:
- 'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 27029, Line: 27030)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:179)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:251)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:182)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:111)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:148)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:70)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:634)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:69)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
+ 'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 27029, Line: 27030)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:179)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:245)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:172)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:164)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:137)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:70)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:666)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:69)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
},
{
- taskId: 'query-c6bffa9d-43c4-45a0-95f8-0c8c453655fb-worker0',
- host: 'localhost:8101',
+ taskId: 'query-955fa5a2-0ae6-4912-bc39-cc9fc012de5a-worker0',
+ host: 'localhost:8091',
stageNumber: 0,
error: {
errorCode: 'CannotParseExternalData',
@@ -181,11 +181,11 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
'Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 42034, Line: 42036)',
},
exceptionStackTrace:
- 'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 42034, Line: 42036)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:179)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:251)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:182)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:111)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:148)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:70)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:634)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:69)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
+ 'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 42034, Line: 42036)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:179)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:245)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:172)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:164)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:137)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:70)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:666)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:69)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
},
{
- taskId: 'query-c6bffa9d-43c4-45a0-95f8-0c8c453655fb-worker0',
- host: 'localhost:8101',
+ taskId: 'query-955fa5a2-0ae6-4912-bc39-cc9fc012de5a-worker0',
+ host: 'localhost:8091',
stageNumber: 0,
error: {
errorCode: 'CannotParseExternalData',
@@ -193,11 +193,11 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
'Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 54912, Line: 54915)',
},
exceptionStackTrace:
- 'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 54912, Line: 54915)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:179)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:251)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:182)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:111)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:148)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:70)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:634)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:69)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
+ 'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 54912, Line: 54915)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:179)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:245)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:172)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:164)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:137)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:70)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:666)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:69)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
},
{
- taskId: 'query-c6bffa9d-43c4-45a0-95f8-0c8c453655fb-worker0',
- host: 'localhost:8101',
+ taskId: 'query-955fa5a2-0ae6-4912-bc39-cc9fc012de5a-worker0',
+ host: 'localhost:8091',
stageNumber: 0,
error: {
errorCode: 'CannotParseExternalData',
@@ -205,31 +205,31 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
'Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 63995, Line: 63999)',
},
exceptionStackTrace:
- 'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 63995, Line: 63999)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:179)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:251)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:182)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:111)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:148)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:70)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:634)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:69)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
+ 'org.apache.druid.java.util.common.parsers.ParseException: Unable to parse row [] (Path: https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz, Record: 63995, Line: 63999)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:79)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.findNextIteratorIfNecessary(CloseableIterator.java:74)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$2.next(CloseableIterator.java:108)\n\tat org.apache.druid.java.util.common.parsers.CloseableIterator$1.next(CloseableIterator.java:52)\n\tat org.apache.druid.msq.input.external.ExternalInputSliceReader$1$1.hasNext(ExternalInputSliceReader.java:179)\n\tat org.apache.druid.java.util.common.guava.BaseSequence$1.next(BaseSequence.java:115)\n\tat org.apache.druid.segment.RowWalker.advance(RowWalker.java:70)\n\tat org.apache.druid.segment.RowBasedCursor.advanceUninterruptibly(RowBasedCursor.java:110)\n\tat org.apache.druid.segment.RowBasedCursor.advance(RowBasedCursor.java:103)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.populateFrameWriterAndFlushIfNeeded(ScanQueryFrameProcessor.java:245)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runWithSegment(ScanQueryFrameProcessor.java:172)\n\tat org.apache.druid.msq.querykit.BaseLeafFrameProcessor.runIncrementally(BaseLeafFrameProcessor.java:164)\n\tat org.apache.druid.msq.querykit.scan.ScanQueryFrameProcessor.runIncrementally(ScanQueryFrameProcessor.java:137)\n\tat org.apache.druid.frame.processor.FrameProcessors$1FrameProcessorWithBaggage.runIncrementally(FrameProcessors.java:70)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.runProcessorNow(FrameProcessorExecutor.java:229)\n\tat org.apache.druid.frame.processor.FrameProcessorExecutor$1ExecutorRunnable.run(FrameProcessorExecutor.java:137)\n\tat org.apache.druid.msq.exec.WorkerImpl$1$2.run(WorkerImpl.java:666)\n\tat java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)\n\tat java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)\n\tat org.apache.druid.query.PrioritizedListenableFutureTask.run(PrioritizedExecutorService.java:251)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\n\tat java.base/java.lang.Thread.run(Thread.java:829)\nCaused by: com.fasterxml.jackson.databind.exc.MismatchedInputException: No content to map due to end-of-input\n at [Source: (String)""; line: 1, column: 0]\n\tat com.fasterxml.jackson.databind.exc.MismatchedInputException.from(MismatchedInputException.java:59)\n\tat com.fasterxml.jackson.databind.ObjectMapper._initForReading(ObjectMapper.java:4360)\n\tat com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4205)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3214)\n\tat com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:3182)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:69)\n\tat org.apache.druid.data.input.impl.JsonLineReader.parseInputRows(JsonLineReader.java:48)\n\tat org.apache.druid.data.input.IntermediateRowParsingReader$1.hasNext(IntermediateRowParsingReader.java:71)\n\t... 22 more\n',
},
],
- startTime: '2022-08-25T18:00:03.140Z',
- durationMs: 20813,
+ startTime: '2022-10-31T16:16:22.464Z',
+ durationMs: 7229,
+ pendingTasks: 0,
+ runningTasks: 2,
},
stages: [
{
stageNumber: 0,
definition: {
- id: '47e8d8a9-80ed-4b71-adcb-44dc5b1dace3_0',
+ id: 'fc6c3715-9211-4f6e-9d7a-a7e6a8e6b297_0',
input: [
{
type: 'external',
inputSource: {
type: 'http',
uris: ['https://static.imply.io/example-data/kttm/kttm-2019-08-25.json.gz'],
- httpAuthenticationUsername: null,
- httpAuthenticationPassword: null,
},
inputFormat: {
type: 'json',
- flattenSpec: null,
- featureSpec: {},
keepNullColumns: false,
+ assumeNewlineDelimited: false,
+ useJsonNodeReader: false,
},
signature: [
{ name: 'timestamp', type: 'STRING' },
@@ -263,10 +263,11 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
finalizeAggregations: false,
groupByEnableMultiValueUnnesting: false,
maxParseExceptions: 10,
+ queryId: '955fa5a2-0ae6-4912-bc39-cc9fc012de5a',
scanSignature:
'[{"name":"agent_type","type":"STRING"},{"name":"v0","type":"LONG"}]',
sqlInsertSegmentGranularity: '{"type":"all"}',
- sqlQueryId: 'c6bffa9d-43c4-45a0-95f8-0c8c453655fb',
+ sqlQueryId: '955fa5a2-0ae6-4912-bc39-cc9fc012de5a',
sqlReplaceTimeChunks: 'all',
},
granularity: { type: 'all' },
@@ -287,14 +288,14 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
},
phase: 'FAILED',
workerCount: 1,
- startTime: '2022-08-25T18:00:05.539Z',
- duration: 18414,
+ startTime: '2022-10-31T16:16:24.680Z',
+ duration: 5013,
sort: true,
},
{
stageNumber: 1,
definition: {
- id: '47e8d8a9-80ed-4b71-adcb-44dc5b1dace3_1',
+ id: 'fc6c3715-9211-4f6e-9d7a-a7e6a8e6b297_1',
input: [{ type: 'stage', stage: 0 }],
processor: {
type: 'segmentGenerator',
@@ -340,7 +341,7 @@ export const EXECUTION_INGEST_ERROR = Execution.fromTaskPayloadAndReport(
counters: {
'0': {
'0': {
- input0: { type: 'channel', rows: [200322], totalFiles: [1] },
+ input0: { type: 'channel', rows: [237883], totalFiles: [1] },
output: { type: 'channel', rows: [141660], bytes: [7685544], frames: [1] },
sortProgress: {
type: 'sortProgress',
diff --git a/web-console/src/druid-models/execution/execution.spec.ts b/web-console/src/druid-models/execution/execution.spec.ts
index 31ec5af457a6..43f4301c4ce7 100644
--- a/web-console/src/druid-models/execution/execution.spec.ts
+++ b/web-console/src/druid-models/execution/execution.spec.ts
@@ -43,10 +43,10 @@ describe('Execution', () => {
"useLineageBasedSegmentAllocation": true,
},
"dataSource": "kttm_simple",
- "groupId": "query-32ced762-7679-4a25-9220-3915c5976961",
- "id": "query-32ced762-7679-4a25-9220-3915c5976961",
+ "groupId": "query-09af0c1e-1c0f-4539-917c-b0458849d0d9",
+ "id": "query-09af0c1e-1c0f-4539-917c-b0458849d0d9",
"resource": Object {
- "availabilityGroup": "query-32ced762-7679-4a25-9220-3915c5976961",
+ "availabilityGroup": "query-09af0c1e-1c0f-4539-917c-b0458849d0d9",
"requiredCapacity": 1,
},
"spec": Object {
@@ -80,21 +80,20 @@ describe('Execution', () => {
"finalize": false,
"finalizeAggregations": false,
"groupByEnableMultiValueUnnesting": false,
+ "queryId": "09af0c1e-1c0f-4539-917c-b0458849d0d9",
"scanSignature": "[{\\"name\\":\\"agent_type\\",\\"type\\":\\"STRING\\"},{\\"name\\":\\"v0\\",\\"type\\":\\"LONG\\"}]",
"sqlInsertSegmentGranularity": "{\\"type\\":\\"all\\"}",
- "sqlQueryId": "32ced762-7679-4a25-9220-3915c5976961",
+ "sqlQueryId": "09af0c1e-1c0f-4539-917c-b0458849d0d9",
"sqlReplaceTimeChunks": "all",
},
"dataSource": Object {
"inputFormat": Object {
- "featureSpec": Object {},
- "flattenSpec": null,
+ "assumeNewlineDelimited": false,
"keepNullColumns": false,
"type": "json",
+ "useJsonNodeReader": false,
},
"inputSource": Object {
- "httpAuthenticationPassword": null,
- "httpAuthenticationUsername": null,
"type": "http",
"uris": Array [
"https://static.imply.io/example-data/kttm-v2/kttm-v2-2019-08-25.json.gz",
@@ -153,8 +152,9 @@ describe('Execution', () => {
"finalizeAggregations": false,
"groupByEnableMultiValueUnnesting": false,
"maxParseExceptions": 0,
+ "queryId": "09af0c1e-1c0f-4539-917c-b0458849d0d9",
"sqlInsertSegmentGranularity": "{\\"type\\":\\"all\\"}",
- "sqlQueryId": "32ced762-7679-4a25-9220-3915c5976961",
+ "sqlQueryId": "09af0c1e-1c0f-4539-917c-b0458849d0d9",
"sqlReplaceTimeChunks": "all",
},
"sqlTypeNames": Array [
@@ -163,8 +163,9 @@ describe('Execution', () => {
],
"type": "query_controller",
},
- "task": "query-32ced762-7679-4a25-9220-3915c5976961",
+ "task": "query-09af0c1e-1c0f-4539-917c-b0458849d0d9",
},
+ "capacityInfo": undefined,
"destination": Object {
"dataSource": "kttm_simple",
"replaceTimeChunks": Array [
@@ -175,10 +176,10 @@ describe('Execution', () => {
},
"type": "dataSource",
},
- "duration": 25097,
+ "duration": 9012,
"engine": "sql-msq-task",
"error": undefined,
- "id": "query-32ced762-7679-4a25-9220-3915c5976961",
+ "id": "query-09af0c1e-1c0f-4539-917c-b0458849d0d9",
"nativeQuery": Object {
"columns": Array [
"agent_type",
@@ -188,21 +189,20 @@ describe('Execution', () => {
"finalize": false,
"finalizeAggregations": false,
"groupByEnableMultiValueUnnesting": false,
+ "queryId": "09af0c1e-1c0f-4539-917c-b0458849d0d9",
"scanSignature": "[{\\"name\\":\\"agent_type\\",\\"type\\":\\"STRING\\"},{\\"name\\":\\"v0\\",\\"type\\":\\"LONG\\"}]",
"sqlInsertSegmentGranularity": "{\\"type\\":\\"all\\"}",
- "sqlQueryId": "32ced762-7679-4a25-9220-3915c5976961",
+ "sqlQueryId": "09af0c1e-1c0f-4539-917c-b0458849d0d9",
"sqlReplaceTimeChunks": "all",
},
"dataSource": Object {
"inputFormat": Object {
- "featureSpec": Object {},
- "flattenSpec": null,
+ "assumeNewlineDelimited": false,
"keepNullColumns": false,
"type": "json",
+ "useJsonNodeReader": false,
},
"inputSource": Object {
- "httpAuthenticationPassword": null,
- "httpAuthenticationUsername": null,
"type": "http",
"uris": Array [
"https://static.imply.io/example-data/kttm-v2/kttm-v2-2019-08-25.json.gz",
@@ -299,12 +299,12 @@ describe('Execution', () => {
},
"sortProgress": Object {
"levelToMergedBatches": Object {
- "0": 1,
+ "0": 2,
"1": 1,
"2": 1,
},
"levelToTotalBatches": Object {
- "0": 1,
+ "0": 2,
"1": 1,
"2": 1,
},
@@ -335,18 +335,16 @@ describe('Execution', () => {
"stages": Array [
Object {
"definition": Object {
- "id": "0b353011-6ea1-480a-8ca8-386771621672_0",
+ "id": "b94caff0-f693-47ed-a242-ccf837550383_0",
"input": Array [
Object {
"inputFormat": Object {
- "featureSpec": Object {},
- "flattenSpec": null,
+ "assumeNewlineDelimited": false,
"keepNullColumns": false,
"type": "json",
+ "useJsonNodeReader": false,
},
"inputSource": Object {
- "httpAuthenticationPassword": null,
- "httpAuthenticationUsername": null,
"type": "http",
"uris": Array [
"https://static.imply.io/example-data/kttm-v2/kttm-v2-2019-08-25.json.gz",
@@ -377,9 +375,10 @@ describe('Execution', () => {
"finalize": false,
"finalizeAggregations": false,
"groupByEnableMultiValueUnnesting": false,
+ "queryId": "09af0c1e-1c0f-4539-917c-b0458849d0d9",
"scanSignature": "[{\\"name\\":\\"agent_type\\",\\"type\\":\\"STRING\\"},{\\"name\\":\\"v0\\",\\"type\\":\\"LONG\\"}]",
"sqlInsertSegmentGranularity": "{\\"type\\":\\"all\\"}",
- "sqlQueryId": "32ced762-7679-4a25-9220-3915c5976961",
+ "sqlQueryId": "09af0c1e-1c0f-4539-917c-b0458849d0d9",
"sqlReplaceTimeChunks": "all",
},
"dataSource": Object {
@@ -436,17 +435,17 @@ describe('Execution', () => {
},
],
},
- "duration": 20229,
+ "duration": 4887,
"partitionCount": 1,
"phase": "FINISHED",
"sort": true,
"stageNumber": 0,
- "startTime": "2022-08-22T20:12:53.790Z",
+ "startTime": "2022-10-31T16:11:15.380Z",
"workerCount": 1,
},
Object {
"definition": Object {
- "id": "0b353011-6ea1-480a-8ca8-386771621672_1",
+ "id": "b94caff0-f693-47ed-a242-ccf837550383_1",
"input": Array [
Object {
"stage": 0,
@@ -511,17 +510,21 @@ describe('Execution', () => {
},
"signature": Array [],
},
- "duration": 2497,
+ "duration": 1908,
"partitionCount": 1,
"phase": "FINISHED",
"stageNumber": 1,
- "startTime": "2022-08-22T20:13:13.991Z",
+ "startTime": "2022-10-31T16:11:20.264Z",
"workerCount": 1,
},
],
},
- "startTime": 2022-08-22T20:12:51.391Z,
+ "startTime": 2022-10-31T16:11:13.160Z,
"status": "SUCCESS",
+ "usageInfo": Object {
+ "pendingTasks": 0,
+ "runningTasks": 2,
+ },
"warnings": undefined,
}
`);
diff --git a/web-console/src/druid-models/execution/execution.ts b/web-console/src/druid-models/execution/execution.ts
index 0c92cd98514b..f2ae3f21fa56 100644
--- a/web-console/src/druid-models/execution/execution.ts
+++ b/web-console/src/druid-models/execution/execution.ts
@@ -18,7 +18,14 @@
import { Column, QueryResult, SqlExpression, SqlQuery, SqlWithQuery } from 'druid-query-toolkit';
-import { deepGet, deleteKeys, nonEmptyArray, oneOf } from '../../utils';
+import {
+ deepGet,
+ deleteKeys,
+ formatInteger,
+ nonEmptyArray,
+ oneOf,
+ pluralIfNeeded,
+} from '../../utils';
import { DruidEngine, validDruidEngine } from '../druid-engine/druid-engine';
import { QueryContext } from '../query-context/query-context';
import { Stages } from '../stages/stages';
@@ -84,6 +91,68 @@ export function validateLastExecution(possibleLastExecution: any): LastExecution
};
}
+export interface UsageInfo {
+ pendingTasks: number;
+ runningTasks: number;
+}
+
+function getUsageInfoFromStatusPayload(status: any): UsageInfo | undefined {
+ const { pendingTasks, runningTasks } = status;
+ if (typeof pendingTasks !== 'number' || typeof runningTasks !== 'number') return;
+ return {
+ pendingTasks,
+ runningTasks,
+ };
+}
+
+export interface CapacityInfo {
+ usedTaskSlots: number;
+ totalTaskSlots: number;
+}
+
+function formatPendingMessage(
+ usageInfo: UsageInfo,
+ capacityInfo: CapacityInfo | undefined,
+): string | undefined {
+ const { pendingTasks, runningTasks } = usageInfo;
+ if (!pendingTasks) return;
+
+ const totalNeeded = runningTasks + pendingTasks;
+
+ let baseMessage = `Launched ${formatInteger(runningTasks)}/${formatInteger(totalNeeded)} tasks.`;
+
+ if (!capacityInfo) {
+ return baseMessage;
+ }
+
+ const { usedTaskSlots, totalTaskSlots } = capacityInfo;
+ const availableTaskSlots = totalTaskSlots - usedTaskSlots;
+
+ // If there are enough slots free: "Launched 2/4 tasks." (It will resolve very soon, no need to make it complicated.)
+ if (pendingTasks <= availableTaskSlots) {
+ return baseMessage;
+ }
+
+ baseMessage += ` Cluster is currently using ${formatInteger(usedTaskSlots)}/${formatInteger(
+ totalTaskSlots,
+ )} task slots.`;
+
+ // If there are not enough slots free then there are two cases:
+ if (totalNeeded <= totalTaskSlots) {
+ // (1) not enough free, but enough total: "Launched 2/4 tasks. Cluster is currently using 5/6 task slots. Waiting for 1 other task to finish."
+ const tasksThatNeedToFinish = pendingTasks - availableTaskSlots;
+ return (
+ baseMessage + ` Waiting for ${pluralIfNeeded(tasksThatNeedToFinish, 'other task')} to finish.`
+ );
+ } else {
+ // (2) not enough total: "Launched 2/4 tasks. Cluster is currently using 2/2 task slots. Add more capacity or reduce maxNumTasks to 2 or lower."
+ return (
+ baseMessage +
+ ` Add more capacity or reduce maxNumTasks to ${formatInteger(totalTaskSlots)} or lower.`
+ );
+ }
+}
+
export interface ExecutionValue {
engine: DruidEngine;
id: string;
@@ -93,11 +162,13 @@ export interface ExecutionValue {
status?: ExecutionStatus;
startTime?: Date;
duration?: number;
+ usageInfo?: UsageInfo;
stages?: Stages;
destination?: ExecutionDestination;
result?: QueryResult;
error?: ExecutionError;
warnings?: ExecutionError[];
+ capacityInfo?: CapacityInfo;
_payload?: { payload: any; task: string };
}
@@ -186,6 +257,7 @@ export class Execution {
engine: 'sql-msq-task',
id: taskStatus.task,
status: taskStatus.status.error ? 'FAILED' : status,
+ usageInfo: getUsageInfoFromStatusPayload(taskStatus.status),
sqlQuery,
queryContext,
error: taskStatus.status.error
@@ -260,6 +332,9 @@ export class Execution {
status: Execution.normalizeTaskStatus(status),
startTime: isNaN(startTime.getTime()) ? undefined : startTime,
duration: typeof durationMs === 'number' ? durationMs : undefined,
+ usageInfo: getUsageInfoFromStatusPayload(
+ deepGet(taskReport, 'multiStageQuery.payload.status'),
+ ),
stages: Array.isArray(stages)
? new Stages(stages, deepGet(taskReport, 'multiStageQuery.payload.counters'))
: undefined,
@@ -292,6 +367,22 @@ export class Execution {
});
}
+ static getProgressDescription(execution: Execution | undefined): string {
+ if (!execution?.stages) return 'Loading...';
+ if (!execution.isWaitingForQuery())
+ return 'Query complete, waiting for segments to be loaded...';
+
+ let ret = execution.stages.getStage(0)?.phase ? 'Running query...' : 'Starting query...';
+ if (execution.usageInfo) {
+ const pendingMessage = formatPendingMessage(execution.usageInfo, execution.capacityInfo);
+ if (pendingMessage) {
+ ret += ` ${pendingMessage}`;
+ }
+ }
+
+ return ret;
+ }
+
public readonly engine: DruidEngine;
public readonly id: string;
public readonly sqlQuery?: string;
@@ -300,11 +391,13 @@ export class Execution {
public readonly status?: ExecutionStatus;
public readonly startTime?: Date;
public readonly duration?: number;
+ public readonly usageInfo?: UsageInfo;
public readonly stages?: Stages;
public readonly destination?: ExecutionDestination;
public readonly result?: QueryResult;
public readonly error?: ExecutionError;
public readonly warnings?: ExecutionError[];
+ public readonly capacityInfo?: CapacityInfo;
public readonly _payload?: { payload: any; task: string };
@@ -318,11 +411,13 @@ export class Execution {
this.status = value.status;
this.startTime = value.startTime;
this.duration = value.duration;
+ this.usageInfo = value.usageInfo;
this.stages = value.stages;
this.destination = value.destination;
this.result = value.result;
this.error = value.error;
this.warnings = nonEmptyArray(value.warnings) ? value.warnings : undefined;
+ this.capacityInfo = value.capacityInfo;
this._payload = value._payload;
}
@@ -337,11 +432,13 @@ export class Execution {
status: this.status,
startTime: this.startTime,
duration: this.duration,
+ usageInfo: this.usageInfo,
stages: this.stages,
destination: this.destination,
result: this.result,
error: this.error,
warnings: this.warnings,
+ capacityInfo: this.capacityInfo,
_payload: this._payload,
};
@@ -374,6 +471,13 @@ export class Execution {
});
}
+ public changeCapacityInfo(capacityInfo: CapacityInfo | undefined): Execution {
+ return new Execution({
+ ...this.valueOf(),
+ capacityInfo,
+ });
+ }
+
public updateWith(newSummary: Execution): Execution {
let nextSummary = newSummary;
if (this.sqlQuery && !nextSummary.sqlQuery) {
@@ -464,4 +568,14 @@ export class Execution {
if (!startTime || !duration) return;
return new Date(startTime.valueOf() + duration);
}
+
+ public hasPotentiallyStuckStage(): boolean {
+ return Boolean(
+ this.status === 'RUNNING' &&
+ this.stages &&
+ this.stages.getPotentiallyStuckStageIndex() >= 0 &&
+ this.usageInfo &&
+ this.usageInfo.pendingTasks > 0,
+ );
+ }
}
diff --git a/web-console/src/druid-models/stages/stages.ts b/web-console/src/druid-models/stages/stages.ts
index 8c4dcf9780ba..0304a93c9185 100644
--- a/web-console/src/druid-models/stages/stages.ts
+++ b/web-console/src/druid-models/stages/stages.ts
@@ -521,4 +521,19 @@ export class Stages {
(stage.duration / 1000),
);
}
+
+ getPotentiallyStuckStageIndex(): number {
+ const { stages } = this;
+ const potentiallyStuckIndex = stages.findIndex(stage => typeof stage.phase === 'undefined');
+
+ if (potentiallyStuckIndex > 0) {
+ const prevStage = stages[potentiallyStuckIndex - 1];
+ if (oneOf(prevStage.phase, 'NEW', 'READING_INPUT')) {
+ // Previous stage is still working so this stage is not stuck, it is just waiting
+ return -1;
+ }
+ }
+
+ return potentiallyStuckIndex;
+ }
}
diff --git a/web-console/src/helpers/capacity.ts b/web-console/src/helpers/capacity.ts
new file mode 100644
index 000000000000..1ded088952ac
--- /dev/null
+++ b/web-console/src/helpers/capacity.ts
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import { sum } from 'd3-array';
+
+import { CapacityInfo } from '../druid-models';
+import { Api } from '../singletons';
+
+export async function getClusterCapacity(): Promise
+ The cluster does not have enough total task slot capacity (
+
+ The cluster does not currently have enough available task slots (current usage:{' '}
+ Are you sure you want to run it?
- General info for query-c6bffa9d-43c4-45a0-95f8-0c8c453655fb ingesting into "kttm_simple"
+ General info for query-955fa5a2-0ae6-4912-bc39-cc9fc012de5a ingesting into "kttm_simple"
{formatInteger(totalTaskSlots)}
) to run this query which is set to use up to{' '}
+ {formatInteger(maxNumTasks)}
tasks. Unless more capacity is added this query
+ might stall and never run.
+ {`${formatInteger(usedTaskSlots)}/${formatInteger(totalTaskSlots)}`}
) to run
+ this query which is set to use up to {formatInteger(maxNumTasks)}
tasks. This
+ query might have to wait for task slots to free up before running.
+
- Insert query took 0:00:25.
+ Insert query took 0:00:09.
();
const handleQueryStringChange = usePermanentCallback((queryString: string) => {
if (query.isEmptyQuery() && queryString.split('=====').length > 2) {
let parsedWorkbenchQuery: WorkbenchQuery | undefined;
@@ -125,7 +128,7 @@ export const QueryTab = React.memo(function QueryTab(props: QueryTabProps) {
onQueryChange(query.changeQueryString(parsedQuery.apply(queryAction).toString()));
if (shouldAutoRun()) {
- setTimeout(() => handleRun(false), 20);
+ setTimeout(() => void handleRun(false), 20);
}
});
@@ -259,11 +262,40 @@ export const QueryTab = React.memo(function QueryTab(props: QueryTabProps) {
currentQueryInput.goToPosition(position);
}
- const handleRun = usePermanentCallback((preview: boolean) => {
+ const handleRun = usePermanentCallback(async (preview: boolean) => {
if (!query.isValid()) return;
WorkbenchHistory.addQueryToHistory(query);
- queryManager.runQuery(preview ? query.makePreview() : query);
+
+ if (query.getEffectiveEngine() !== 'sql-msq-task') {
+ queryManager.runQuery(query);
+ return;
+ }
+
+ const effectiveQuery = preview ? query.makePreview() : query;
+
+ const capacityInfo = await maybeGetClusterCapacity();
+
+ const effectiveMaxNumTasks = effectiveQuery.queryContext.maxNumTasks ?? 2;
+ if (
+ capacityInfo &&
+ capacityInfo.totalTaskSlots - capacityInfo.usedTaskSlots < effectiveMaxNumTasks
+ ) {
+ setAlertElement(
+