From abe61159a82ad9277788d3de0796f0986bb11e55 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 4 Jun 2018 14:18:46 -0400 Subject: [PATCH 1/6] Upgrade to Lucene-7.4.0-snapshot-0a7c3f462f (#31073) This snapshot includes: - LUCENE-8341: Record soft deletes in SegmentCommitInfo which will resolve #30851 - LUCENE-8335: Enforce soft-deletes field up-front --- buildSrc/version.properties | 2 +- .../lucene-expressions-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../lucene-expressions-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../lucene-analyzers-icu-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../lucene-analyzers-icu-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - ...ne-analyzers-kuromoji-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + ...ne-analyzers-kuromoji-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - ...lucene-analyzers-nori-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + ...lucene-analyzers-nori-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - ...ne-analyzers-phonetic-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + ...ne-analyzers-phonetic-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - ...ene-analyzers-smartcn-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + ...ene-analyzers-smartcn-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - ...ene-analyzers-stempel-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + ...ene-analyzers-stempel-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - ...-analyzers-morfologik-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + ...-analyzers-morfologik-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - ...cene-analyzers-common-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + ...cene-analyzers-common-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - ...ucene-backward-codecs-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + ...ucene-backward-codecs-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../licenses/lucene-core-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../licenses/lucene-core-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../lucene-grouping-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../lucene-grouping-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../lucene-highlighter-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../lucene-highlighter-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../licenses/lucene-join-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../licenses/lucene-join-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../lucene-memory-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../lucene-memory-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../licenses/lucene-misc-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../licenses/lucene-misc-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../lucene-queries-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../lucene-queries-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../lucene-queryparser-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../lucene-queryparser-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../lucene-sandbox-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../lucene-sandbox-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../lucene-spatial-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../lucene-spatial-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - ...lucene-spatial-extras-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + ...lucene-spatial-extras-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../lucene-spatial3d-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../lucene-spatial3d-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../lucene-suggest-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../lucene-suggest-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - .../org/elasticsearch/index/engine/TranslogLeafReader.java | 6 +++--- .../licenses/lucene-core-7.4.0-snapshot-0a7c3f462f.jar.sha1 | 1 + .../licenses/lucene-core-7.4.0-snapshot-1cbadda4d3.jar.sha1 | 1 - 50 files changed, 28 insertions(+), 28 deletions(-) create mode 100644 modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-analyzers-common-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-analyzers-common-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-core-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-core-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-grouping-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-join-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-join-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-memory-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-memory-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-misc-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-misc-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-queries-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-queries-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-spatial-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 server/licenses/lucene-suggest-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-7.4.0-snapshot-1cbadda4d3.jar.sha1 create mode 100644 x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-0a7c3f462f.jar.sha1 delete mode 100644 x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-1cbadda4d3.jar.sha1 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 6009021da14ed..7aedd395b93b5 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 7.0.0-alpha1 -lucene = 7.4.0-snapshot-1cbadda4d3 +lucene = 7.4.0-snapshot-0a7c3f462f # optional dependencies spatial4j = 0.7 diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..25e2291d36e8b --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +bf2cfa0551ebdf08a2cf3079f3c74643bd9dbb76 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 3bbaa2ba0a715..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -98c920972b2f5e8563540e805d87e6a3bc888972 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..3fdd3366122cb --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +82d83fcac1d9c8948aa0247fc9c87f177ddbd59b \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 7f3d3b5ccf63c..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -844e2b76f4bc6e646e1c3257d668ac598e03f36a \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..855d6ebe4aeb0 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +73fd4364f2931e7c8303b5927e140a7d21116c36 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 65423fff2a441..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2f2bd2d67c7952e4ae14ab3f742824a45d0d1719 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..091097f1a8477 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +0a2c4417fa9a8be078864f590a5a66b98d551cf5 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 04fa62ce64a1d..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -46ad7ebcfcdbdb60dd54aae4d720356a7a51c7c0 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..b18addf0b5819 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +6fa179924f139a30fc0e5399256e1a44562ed32b \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 55bc8869196e0..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -548e9f2b4d4a985dc174b2eee4007c0bd5642e68 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..7b7141b6f407c --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +5ed135d34d7868b71a725257a46dc8d8735a15d4 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index be66854321699..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b90e66f4104f0234cfef335762f65a6fed695231 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..73be96c477eab --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +875911b36b99c2103719f94559878a0ecb862fb6 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index b77acdc34f31c..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -929a4eb52b11f6d3f0df9c8eba014f5ee2464c67 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..0c85d3f6c8522 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +e7191628df8cb72382a20da79224aef677117849 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index cce4b6ff18df5..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0e6575a411b65cd95e0e54f04d3da278b68be521 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..db3885eb62fab --- /dev/null +++ b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +8cd761f40c4a89ed977167f0518d12e409eaf3d8 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 82585bb7ff3b3..0000000000000 --- a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0f75703c30756c31f7d09ec79191dab6fb35c958 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..bd8711a4d53d9 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +8c93ed67599d345b9359586248ab92342d7d3033 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 981e6d1a1730f..0000000000000 --- a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c5c519fdea65726612f79e3dd942b7316966646e \ No newline at end of file diff --git a/server/licenses/lucene-core-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-core-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..36bf03bbbdb54 --- /dev/null +++ b/server/licenses/lucene-core-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +003ed080e5184661e606091cd321c229798b22f8 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-core-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index cea13d14fe1c7..0000000000000 --- a/server/licenses/lucene-core-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f345b6aa3c550dafc63de3e5a5c404691e782336 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-grouping-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..0f940ee9c7ac7 --- /dev/null +++ b/server/licenses/lucene-grouping-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +0b4be9f96edfd3dbcff5aa9b3f0914e86eb9cc51 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-grouping-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index fcb173608efb8..0000000000000 --- a/server/licenses/lucene-grouping-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7a74855e37124a27af36390c9d15abe33614129e \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..fdc9336fb2ce2 --- /dev/null +++ b/server/licenses/lucene-highlighter-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +a5dcceb5bc017cee6ab5d3ee1943aca1ac6fe074 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 79addefbfc615..0000000000000 --- a/server/licenses/lucene-highlighter-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0e3df4b469465ef101254fdcbb08ebd8a19f1f9d \ No newline at end of file diff --git a/server/licenses/lucene-join-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-join-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..62726ca415a48 --- /dev/null +++ b/server/licenses/lucene-join-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +b59e7441f121da969bef8eef2c0c61743b4230a8 \ No newline at end of file diff --git a/server/licenses/lucene-join-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-join-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index a9838db7caae4..0000000000000 --- a/server/licenses/lucene-join-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -05d236149c99c860e6b627a8f78ea32918c108c3 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-memory-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..a68093d2fc42e --- /dev/null +++ b/server/licenses/lucene-memory-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +46736dbb07b432f0a7c1b3080f62932c483e5cb9 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-memory-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 679c79788f500..0000000000000 --- a/server/licenses/lucene-memory-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d83e7e65eb268425f7bd5be2425d4a00b556bc47 \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-misc-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..23e2b68f3dfcf --- /dev/null +++ b/server/licenses/lucene-misc-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +ee203718d525da0c6258a51a5a32d877089fe5af \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-misc-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index c403d4c4f86b7..0000000000000 --- a/server/licenses/lucene-misc-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -440a998b5bf99871bec4272a219de01b25751d5c \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-queries-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..5bac053813ea2 --- /dev/null +++ b/server/licenses/lucene-queries-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +cf17a332d8e42a45e8f013d5df408f4391d2620a \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-queries-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 6b8897d1ae7b7..0000000000000 --- a/server/licenses/lucene-queries-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2a5c031155fdfa743af321150c0dd654a6ea3c71 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..471aa797028a7 --- /dev/null +++ b/server/licenses/lucene-queryparser-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +04832303d70502d2ece44501cb1716f42e24fe35 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index b6c6bf766101d..0000000000000 --- a/server/licenses/lucene-queryparser-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d021c9a461ff0f020d038ad5ecc5127973d4674a \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..486dafc10c73f --- /dev/null +++ b/server/licenses/lucene-sandbox-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +639313e3a9573779b6a28b45a7f57fc1f73ffa46 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 92f64fca2c749..0000000000000 --- a/server/licenses/lucene-sandbox-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9877a14c53e69b39fff2bf10d49a61171746d940 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-spatial-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..0a083b5a078ac --- /dev/null +++ b/server/licenses/lucene-spatial-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +6144b493ba3588a638858d0058054758acc619b9 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-spatial-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 2f691988c4495..0000000000000 --- a/server/licenses/lucene-spatial-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7d7e5101b46a120efa311509948c0d1f9bf30155 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..851b0d76d3e7a --- /dev/null +++ b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +9d00c6b8bbbbb496aecd555406267fee9e0af914 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 86c147f961020..0000000000000 --- a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5a4c11db96ae70b9048243cc530fcbc76faa0978 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..22ce3c7244338 --- /dev/null +++ b/server/licenses/lucene-spatial3d-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +159cdb6d36845690cb1972d02cc0b472bb14b7f3 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 2fbdcdecf1a08..0000000000000 --- a/server/licenses/lucene-spatial3d-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -afb01af1450067b145ca2c1d737b5907288af560 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/server/licenses/lucene-suggest-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..0724381bcc6a6 --- /dev/null +++ b/server/licenses/lucene-suggest-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +af1dd0218d58990cca5c1592d9722e67d233c996 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/server/licenses/lucene-suggest-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index 1a86525735c05..0000000000000 --- a/server/licenses/lucene-suggest-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -473f0221e0b2ea45940d8ae6dcf16e39c81b18c2 \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogLeafReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogLeafReader.java index cbe1721f07f71..40c8277d3991a 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogLeafReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogLeafReader.java @@ -56,13 +56,13 @@ final class TranslogLeafReader extends LeafReader { private final Translog.Index operation; private static final FieldInfo FAKE_SOURCE_FIELD = new FieldInfo(SourceFieldMapper.NAME, 1, false, false, false, IndexOptions.NONE, DocValuesType.NONE, -1, Collections.emptyMap(), - 0,0); + 0, 0, false); private static final FieldInfo FAKE_ROUTING_FIELD = new FieldInfo(RoutingFieldMapper.NAME, 2, false, false, false, IndexOptions.NONE, DocValuesType.NONE, -1, Collections.emptyMap(), - 0,0); + 0, 0, false); private static final FieldInfo FAKE_ID_FIELD = new FieldInfo(IdFieldMapper.NAME, 3, false, false, false, IndexOptions.NONE, DocValuesType.NONE, -1, Collections.emptyMap(), - 0,0); + 0, 0, false); private final Version indexVersionCreated; TranslogLeafReader(Translog.Index operation, Version indexVersionCreated) { diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-0a7c3f462f.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-0a7c3f462f.jar.sha1 new file mode 100644 index 0000000000000..36bf03bbbdb54 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-0a7c3f462f.jar.sha1 @@ -0,0 +1 @@ +003ed080e5184661e606091cd321c229798b22f8 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-1cbadda4d3.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-1cbadda4d3.jar.sha1 deleted file mode 100644 index cea13d14fe1c7..0000000000000 --- a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-1cbadda4d3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f345b6aa3c550dafc63de3e5a5c404691e782336 \ No newline at end of file From 3f87c7950019316f59edd0199852bd3233db76b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Mon, 4 Jun 2018 20:20:37 +0200 Subject: [PATCH 2/6] Change ObjectParser exception (#31030) ObjectParser should throw XContentParseExceptions, not IAE. A dedicated parsing exception can includes the place where the error occurred. Closes #30605 --- .../common/xcontent/ObjectParser.java | 7 ++++--- .../common/xcontent/ObjectParserTests.java | 20 +++++++------------ .../index/query/FeatureQueryBuilderTests.java | 6 ------ .../DiscountedCumulativeGainTests.java | 7 ++++--- .../rankeval/MeanReciprocalRankTests.java | 7 ++++--- .../index/rankeval/PrecisionAtKTests.java | 7 ++++--- .../index/rankeval/RatedDocumentTests.java | 7 ++++--- .../ClusterUpdateSettingsRequestTests.java | 8 +++++--- .../action/update/UpdateRequestTests.java | 10 ++++++---- .../highlight/HighlightBuilderTests.java | 18 ++++++++--------- .../rescore/QueryRescorerBuilderTests.java | 5 +++-- .../search/sort/FieldSortBuilderTests.java | 9 +++++---- .../search/sort/ScriptSortBuilderTests.java | 13 ++++++------ .../test/AbstractQueryTestCase.java | 14 +++++++++---- .../core/ml/datafeed/DatafeedConfigTests.java | 7 ++++--- .../xpack/core/ml/job/config/JobTests.java | 6 ++++-- .../output/AutodetectResultsParserTests.java | 7 +++---- .../common/text/TextTemplateTests.java | 17 +++++----------- 18 files changed, 87 insertions(+), 88 deletions(-) diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java index 71b888bf44acb..dfcc4271b922e 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java @@ -155,7 +155,7 @@ public Value parse(XContentParser parser, Value value, Context context) throws I while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - fieldParser = getParser(currentFieldName); + fieldParser = getParser(currentFieldName, parser); } else { if (currentFieldName == null) { throw new XContentParseException(parser.getTokenLocation(), "[" + name + "] no field found"); @@ -341,10 +341,11 @@ private void parseSub(XContentParser parser, FieldParser fieldParser, String cur } } - private FieldParser getParser(String fieldName) { + private FieldParser getParser(String fieldName, XContentParser xContentParser) { FieldParser parser = fieldParserMap.get(fieldName); if (parser == null && false == ignoreUnknownFields) { - throw new IllegalArgumentException("[" + name + "] unknown field [" + fieldName + "], parser not found"); + throw new XContentParseException(xContentParser.getTokenLocation(), + "[" + name + "] unknown field [" + fieldName + "], parser not found"); } return parser; } diff --git a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java index 3dd33e997b2ea..6aa0a321adf4d 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java @@ -35,7 +35,6 @@ import java.util.Collections; import java.util.List; -import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; @@ -186,7 +185,6 @@ public URI parseURI(XContentParser parser) { } public void testExceptions() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"test\" : \"foo\"}"); class TestStruct { public void setTest(int test) { } @@ -195,20 +193,16 @@ public void setTest(int test) { TestStruct s = new TestStruct(); objectParser.declareInt(TestStruct::setTest, new ParseField("test")); - try { - objectParser.parse(parser, s, null); - fail("numeric value expected"); - } catch (XContentParseException ex) { + { + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"test\" : \"foo\"}"); + XContentParseException ex = expectThrows(XContentParseException.class, () -> objectParser.parse(parser, s, null)); assertThat(ex.getMessage(), containsString("[the_parser] failed to parse field [test]")); assertTrue(ex.getCause() instanceof NumberFormatException); } - - parser = createParser(JsonXContent.jsonXContent, "{\"not_supported_field\" : \"foo\"}"); - try { - objectParser.parse(parser, s, null); - fail("field not supported"); - } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "[the_parser] unknown field [not_supported_field], parser not found"); + { + XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"not_supported_field\" : \"foo\"}"); + XContentParseException ex = expectThrows(XContentParseException.class, () -> objectParser.parse(parser, s, null)); + assertEquals(ex.getMessage(), "[1:2] [the_parser] unknown field [not_supported_field], parser not found"); } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/FeatureQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/FeatureQueryBuilderTests.java index 883dce5f3858c..f9101447ef72a 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/FeatureQueryBuilderTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/query/FeatureQueryBuilderTests.java @@ -84,12 +84,6 @@ protected void doAssertLuceneQuery(FeatureQueryBuilder queryBuilder, Query query assertThat(query, either(instanceOf(MatchNoDocsQuery.class)).or(instanceOf(expectedClass))); } - @Override - @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/30605") - public void testUnknownField() { - super.testUnknownField(); - } - public void testDefaultScoreFunction() throws IOException { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); String query = "{\n" + diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java index ba03a734ec760..64337786b1eb6 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -41,7 +42,7 @@ import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; -import static org.hamcrest.Matchers.startsWith; +import static org.hamcrest.CoreMatchers.containsString; public class DiscountedCumulativeGainTests extends ESTestCase { @@ -280,9 +281,9 @@ public void testXContentParsingIsNotLenient() throws IOException { try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { parser.nextToken(); parser.nextToken(); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + XContentParseException exception = expectThrows(XContentParseException.class, () -> DiscountedCumulativeGain.fromXContent(parser)); - assertThat(exception.getMessage(), startsWith("[dcg_at] unknown field")); + assertThat(exception.getMessage(), containsString("[dcg_at] unknown field")); } } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java index a5597873103bc..f88b0cc663489 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -41,7 +42,7 @@ import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; -import static org.hamcrest.Matchers.startsWith; +import static org.hamcrest.CoreMatchers.containsString; public class MeanReciprocalRankTests extends ESTestCase { @@ -189,9 +190,9 @@ public void testXContentParsingIsNotLenient() throws IOException { try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { parser.nextToken(); parser.nextToken(); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + XContentParseException exception = expectThrows(XContentParseException.class, () -> MeanReciprocalRank.fromXContent(parser)); - assertThat(exception.getMessage(), startsWith("[reciprocal_rank] unknown field")); + assertThat(exception.getMessage(), containsString("[reciprocal_rank] unknown field")); } } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java index c65ad76fdf9af..c0035d5dbb72e 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -41,7 +42,7 @@ import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; -import static org.hamcrest.Matchers.startsWith; +import static org.hamcrest.CoreMatchers.containsString; public class PrecisionAtKTests extends ESTestCase { @@ -203,8 +204,8 @@ public void testXContentParsingIsNotLenient() throws IOException { try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { parser.nextToken(); parser.nextToken(); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> PrecisionAtK.fromXContent(parser)); - assertThat(exception.getMessage(), startsWith("[precision] unknown field")); + XContentParseException exception = expectThrows(XContentParseException.class, () -> PrecisionAtK.fromXContent(parser)); + assertThat(exception.getMessage(), containsString("[precision] unknown field")); } } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedDocumentTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedDocumentTests.java index cd38233bfa9a9..c62fc1fa2bb47 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedDocumentTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedDocumentTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; @@ -33,7 +34,7 @@ import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; -import static org.hamcrest.Matchers.startsWith; +import static org.hamcrest.CoreMatchers.containsString; public class RatedDocumentTests extends ESTestCase { @@ -59,8 +60,8 @@ public void testXContentParsingIsNotLenient() throws IOException { BytesReference originalBytes = toShuffledXContent(testItem, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean()); BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, null, random()); try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { - Exception exception = expectThrows(IllegalArgumentException.class, () -> RatedDocument.fromXContent(parser)); - assertThat(exception.getMessage(), startsWith("[rated_document] unknown field")); + XContentParseException exception = expectThrows(XContentParseException.class, () -> RatedDocument.fromXContent(parser)); + assertThat(exception.getMessage(), containsString("[rated_document] unknown field")); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java index 6c9277a61bdee..c358d0fb6ca52 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; @@ -29,7 +30,8 @@ import java.io.IOException; import java.util.Collections; -import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo;; public class ClusterUpdateSettingsRequestTests extends ESTestCase { @@ -51,10 +53,10 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws String unsupportedField = "unsupported_field"; BytesReference mutated = BytesReference.bytes(XContentTestUtils.insertIntoXContent(xContentType.xContent(), originalBytes, Collections.singletonList(""), () -> unsupportedField, () -> randomAlphaOfLengthBetween(3, 10))); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, + XContentParseException iae = expectThrows(XContentParseException.class, () -> ClusterUpdateSettingsRequest.fromXContent(createParser(xContentType.xContent(), mutated))); assertThat(iae.getMessage(), - equalTo("[cluster_update_settings_request] unknown field [" + unsupportedField + "], parser not found")); + containsString("[cluster_update_settings_request] unknown field [" + unsupportedField + "], parser not found")); } else { XContentParser parser = createParser(xContentType.xContent(), originalBytes); ClusterUpdateSettingsRequest parsedRequest = ClusterUpdateSettingsRequest.fromXContent(parser); diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index f562cbd0ec184..036e8b4ca6c97 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -70,6 +71,7 @@ public class UpdateRequestTests extends ESTestCase { private UpdateHelper updateHelper; + @Override @Before public void setUp() throws Exception { super.setUp(); @@ -283,8 +285,8 @@ public void testUnknownFieldParsing() throws Exception { .field("unknown_field", "test") .endObject()); - IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> request.fromXContent(contentParser)); - assertEquals("[UpdateRequest] unknown field [unknown_field], parser not found", ex.getMessage()); + XContentParseException ex = expectThrows(XContentParseException.class, () -> request.fromXContent(contentParser)); + assertEquals("[1:2] [UpdateRequest] unknown field [unknown_field], parser not found", ex.getMessage()); UpdateRequest request2 = new UpdateRequest("test", "type", "1"); XContentParser unknownObject = createParser(XContentFactory.jsonBuilder() @@ -294,8 +296,8 @@ public void testUnknownFieldParsing() throws Exception { .field("count", 1) .endObject() .endObject()); - ex = expectThrows(IllegalArgumentException.class, () -> request2.fromXContent(unknownObject)); - assertEquals("[UpdateRequest] unknown field [params], parser not found", ex.getMessage()); + ex = expectThrows(XContentParseException.class, () -> request2.fromXContent(unknownObject)); + assertEquals("[1:76] [UpdateRequest] unknown field [params], parser not found", ex.getMessage()); } public void testFetchSourceParsing() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 5d06fd4cd400b..95da15e838c31 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -158,10 +158,10 @@ public void testFromXContent() throws IOException { */ public void testUnknownArrayNameExpection() throws IOException { { - IllegalArgumentException e = expectParseThrows(IllegalArgumentException.class, "{\n" + + XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + " \"bad_fieldname\" : [ \"field1\" 1 \"field2\" ]\n" + "}\n"); - assertEquals("[highlight] unknown field [bad_fieldname], parser not found", e.getMessage()); + assertEquals("[2:5] [highlight] unknown field [bad_fieldname], parser not found", e.getMessage()); } { @@ -174,7 +174,7 @@ public void testUnknownArrayNameExpection() throws IOException { "}\n"); assertThat(e.getMessage(), containsString("[highlight] failed to parse field [fields]")); assertThat(e.getCause().getMessage(), containsString("[fields] failed to parse field [body]")); - assertEquals("[highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage()); + assertEquals("[4:9] [highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage()); } } @@ -188,10 +188,10 @@ private T expectParseThrows(Class exceptionClass, Strin */ public void testUnknownFieldnameExpection() throws IOException { { - IllegalArgumentException e = expectParseThrows(IllegalArgumentException.class, "{\n" + + XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + " \"bad_fieldname\" : \"value\"\n" + "}\n"); - assertEquals("[highlight] unknown field [bad_fieldname], parser not found", e.getMessage()); + assertEquals("[2:5] [highlight] unknown field [bad_fieldname], parser not found", e.getMessage()); } { @@ -204,7 +204,7 @@ public void testUnknownFieldnameExpection() throws IOException { "}\n"); assertThat(e.getMessage(), containsString("[highlight] failed to parse field [fields]")); assertThat(e.getCause().getMessage(), containsString("[fields] failed to parse field [body]")); - assertEquals("[highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage()); + assertEquals("[4:9] [highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage()); } } @@ -213,10 +213,10 @@ public void testUnknownFieldnameExpection() throws IOException { */ public void testUnknownObjectFieldnameExpection() throws IOException { { - IllegalArgumentException e = expectParseThrows(IllegalArgumentException.class, "{\n" + + XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + " \"bad_fieldname\" : { \"field\" : \"value\" }\n \n" + "}\n"); - assertEquals("[highlight] unknown field [bad_fieldname], parser not found", e.getMessage()); + assertEquals("[2:5] [highlight] unknown field [bad_fieldname], parser not found", e.getMessage()); } { @@ -229,7 +229,7 @@ public void testUnknownObjectFieldnameExpection() throws IOException { "}\n"); assertThat(e.getMessage(), containsString("[highlight] failed to parse field [fields]")); assertThat(e.getCause().getMessage(), containsString("[fields] failed to parse field [body]")); - assertEquals("[highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage()); + assertEquals("[4:9] [highlight_field] unknown field [bad_fieldname], parser not found", e.getCause().getCause().getMessage()); } } diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index 75ac542d9853a..efd3e5ef2ca06 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -170,6 +170,7 @@ public void testRescoreQueryNull() throws IOException { class AlwaysRewriteQueryBuilder extends MatchAllQueryBuilder { + @Override protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { return new MatchAllQueryBuilder(); } @@ -254,8 +255,8 @@ public void testUnknownFieldsExpection() throws IOException { "}\n"; { XContentParser parser = createParser(rescoreElement); - Exception e = expectThrows(IllegalArgumentException.class, () -> RescorerBuilder.parseFromXContent(parser)); - assertEquals("[query] unknown field [bad_fieldname], parser not found", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> RescorerBuilder.parseFromXContent(parser)); + assertEquals("[3:17] [query] unknown field [bad_fieldname], parser not found", e.getMessage()); } rescoreElement = "{\n" + diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index 163b9391a1b98..6aceed996ccdc 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.TermQuery; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource; @@ -309,8 +310,8 @@ public void testUnknownOptionFails() throws IOException { parser.nextToken(); parser.nextToken(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> FieldSortBuilder.fromXContent(parser, "")); - assertEquals("[field_sort] unknown field [reverse], parser not found", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> FieldSortBuilder.fromXContent(parser, "")); + assertEquals("[1:18] [field_sort] unknown field [reverse], parser not found", e.getMessage()); } @Override @@ -383,7 +384,7 @@ public QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOEx } }; sortBuilder.setNestedPath("path").setNestedFilter(rangeQuery); - FieldSortBuilder rewritten = (FieldSortBuilder) sortBuilder + FieldSortBuilder rewritten = sortBuilder .rewrite(createMockShardContext()); assertNotSame(rangeQuery, rewritten.getNestedFilter()); } @@ -400,7 +401,7 @@ public QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOEx } }; sortBuilder.setNestedSort(new NestedSortBuilder("path").setFilter(rangeQuery)); - FieldSortBuilder rewritten = (FieldSortBuilder) sortBuilder + FieldSortBuilder rewritten = sortBuilder .rewrite(createMockShardContext()); assertNotSame(rangeQuery, rewritten.getNestedSort().getFilter()); } diff --git a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index ed83011c26609..9a030cc3aabcb 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -24,7 +24,6 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermQuery; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -225,8 +224,8 @@ public void testParseBadFieldNameExceptions() throws IOException { parser.nextToken(); parser.nextToken(); - Exception e = expectThrows(IllegalArgumentException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); - assertEquals("[_script] unknown field [bad_field], parser not found", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); + assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); } public void testParseBadFieldNameExceptionsOnStartObject() throws IOException { @@ -237,8 +236,8 @@ public void testParseBadFieldNameExceptionsOnStartObject() throws IOException { parser.nextToken(); parser.nextToken(); - Exception e = expectThrows(IllegalArgumentException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); - assertEquals("[_script] unknown field [bad_field], parser not found", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); + assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); } public void testParseUnexpectedToken() throws IOException { @@ -374,7 +373,7 @@ public QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOEx } }; sortBuilder.setNestedPath("path").setNestedFilter(rangeQuery); - ScriptSortBuilder rewritten = (ScriptSortBuilder) sortBuilder + ScriptSortBuilder rewritten = sortBuilder .rewrite(createMockShardContext()); assertNotSame(rangeQuery, rewritten.getNestedFilter()); } @@ -391,7 +390,7 @@ public QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOEx } }; sortBuilder.setNestedSort(new NestedSortBuilder("path").setFilter(rangeQuery)); - ScriptSortBuilder rewritten = (ScriptSortBuilder) sortBuilder + ScriptSortBuilder rewritten = sortBuilder .rewrite(createMockShardContext()); assertNotSame(rangeQuery, rewritten.getNestedSort().getFilter()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index d2f3a56aebe3d..48301fa5746e2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -132,8 +132,9 @@ protected String[] shuffleProtectedFields() { * To find the right position in the root query, we add a marker as `queryName` which * all query builders support. The added bogus field after that should trigger the exception. * Queries that allow arbitrary field names at this level need to override this test. + * @throws IOException */ - public void testUnknownField() { + public void testUnknownField() throws IOException { String marker = "#marker#"; QB testQuery; do { @@ -141,9 +142,14 @@ public void testUnknownField() { } while (testQuery.toString().contains(marker)); testQuery.queryName(marker); // to find root query to add additional bogus field there String queryAsString = testQuery.toString().replace("\"" + marker + "\"", "\"" + marker + "\", \"bogusField\" : \"someValue\""); - ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(queryAsString)); - // we'd like to see the offending field name here - assertThat(e.getMessage(), containsString("bogusField")); + try { + parseQuery(queryAsString); + fail("expected ParsingException or XContentParsingException"); + } catch (ParsingException | XContentParseException e) { + // we'd like to see the offending field name here + assertThat(e.getMessage(), containsString("bogusField")); + } + } /** diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java index a3f74d25531e4..6aa987fc0e932 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java @@ -6,15 +6,16 @@ package org.elasticsearch.xpack.core.ml.datafeed; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; + import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.DeprecationHandler; -import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.BoolQueryBuilder; @@ -155,9 +156,9 @@ protected DatafeedConfig doParseInstance(XContentParser parser) { public void testFutureConfigParse() throws IOException { XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, FUTURE_DATAFEED); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + XContentParseException e = expectThrows(XContentParseException.class, () -> DatafeedConfig.CONFIG_PARSER.apply(parser, null).build()); - assertEquals("[datafeed_config] unknown field [tomorrows_technology_today], parser not found", e.getMessage()); + assertEquals("[6:5] [datafeed_config] unknown field [tomorrows_technology_today], parser not found", e.getMessage()); } public void testFutureMetadataParse() throws IOException { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java index c1fae72af27f7..0f35abd4bcfc9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core.ml.job.config; import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; + import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesReference; @@ -17,6 +18,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.AbstractSerializingTestCase; @@ -78,9 +80,9 @@ protected Job doParseInstance(XContentParser parser) { public void testFutureConfigParse() throws IOException { XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, FUTURE_JOB); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + XContentParseException e = expectThrows(XContentParseException.class, () -> Job.CONFIG_PARSER.apply(parser, null).build()); - assertEquals("[job_details] unknown field [tomorrows_technology_today], parser not found", e.getMessage()); + assertEquals("[4:5] [job_details] unknown field [tomorrows_technology_today], parser not found", e.getMessage()); } public void testFutureMetadataParse() throws IOException { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParserTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParserTests.java index d4b6f4732b352..d2356a79677c3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParserTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/output/AutodetectResultsParserTests.java @@ -6,14 +6,13 @@ package org.elasticsearch.xpack.ml.job.process.autodetect.output; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; -import org.elasticsearch.xpack.ml.job.results.AutodetectResult; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.BucketInfluencer; +import org.elasticsearch.xpack.ml.job.results.AutodetectResult; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -389,9 +388,9 @@ public void testParse_GivenUnknownObject() throws ElasticsearchParseException, I String json = "[{\"unknown\":{\"id\": 18}}]"; InputStream inputStream = new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)); AutodetectResultsParser parser = new AutodetectResultsParser(Settings.EMPTY); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + XContentParseException e = expectThrows(XContentParseException.class, () -> parser.parseResults(inputStream).forEachRemaining(a -> {})); - assertEquals("[autodetect_result] unknown field [unknown], parser not found", e.getMessage()); + assertEquals("[1:3] [autodetect_result] unknown field [unknown], parser not found", e.getMessage()); } public void testParse_GivenArrayContainsAnotherArray() throws ElasticsearchParseException, IOException { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java index 45c34e3465096..0e084af23e1fb 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/text/TextTemplateTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.script.Script; @@ -172,12 +173,8 @@ public void testParserInvalidUnknownScriptType() throws Exception { BytesReference bytes = BytesReference.bytes(builder); XContentParser parser = createParser(JsonXContent.jsonXContent, bytes); parser.nextToken(); - try { - TextTemplate.parse(parser); - fail("expected parse exception when script type is unknown"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), is("[script] unknown field [template], parser not found")); - } + XContentParseException ex = expectThrows(XContentParseException.class, () -> TextTemplate.parse(parser)); + assertEquals("[1:2] [script] unknown field [template], parser not found", ex.getMessage()); } public void testParserInvalidMissingText() throws Exception { @@ -188,12 +185,8 @@ public void testParserInvalidMissingText() throws Exception { BytesReference bytes = BytesReference.bytes(builder); XContentParser parser = createParser(JsonXContent.jsonXContent, bytes); parser.nextToken(); - try { - TextTemplate.parse(parser); - fail("expected parse exception when template text is missing"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("[script] unknown field [type], parser not found")); - } + XContentParseException ex = expectThrows(XContentParseException.class, () -> TextTemplate.parse(parser)); + assertEquals("[1:2] [script] unknown field [type], parser not found", ex.getMessage()); } public void testNullObject() throws Exception { From 409da09a81e569e1d4be993d5045f85364a0df89 Mon Sep 17 00:00:00 2001 From: lcawl Date: Mon, 4 Jun 2018 11:30:11 -0700 Subject: [PATCH 3/6] [DOCS] Re-adds custom realm --- .../authentication/custom-realm.asciidoc | 100 ++++++++++++++++++ 1 file changed, 100 insertions(+) create mode 100644 x-pack/docs/en/security/authentication/custom-realm.asciidoc diff --git a/x-pack/docs/en/security/authentication/custom-realm.asciidoc b/x-pack/docs/en/security/authentication/custom-realm.asciidoc new file mode 100644 index 0000000000000..0ae33d434a1f5 --- /dev/null +++ b/x-pack/docs/en/security/authentication/custom-realm.asciidoc @@ -0,0 +1,100 @@ +[role="xpack"] +[[custom-realms]] +=== Integrating with other authentication systems + +If you are using an authentication system that is not supported out-of-the-box +by {security}, you can create a custom realm to interact with it to authenticate +users. You implement a custom realm as an SPI loaded security extension +as part of an ordinary elasticsearch plugin. + +[[implementing-custom-realm]] +==== Implementing a custom realm + +Sample code that illustrates the structure and implementation of a custom realm +is provided in the https://github.com/elastic/shield-custom-realm-example[custom-realm-example] +repository on GitHub. You can use this code as a starting point for creating your +own realm. + +To create a custom realm, you need to: + +. Extend `org.elasticsearch.xpack.security.authc.Realm` to communicate with your + authentication system to authenticate users. +. Implement the `org.elasticsearch.xpack.security.authc.Realm.Factory` interface in + a class that will be used to create the custom realm. +. Extend `org.elasticsearch.xpack.security.authc.DefaultAuthenticationFailureHandler` to + handle authentication failures when using your custom realm. + +To package your custom realm as a plugin: + +. Implement an extension class for your realm that extends + `org.elasticsearch.xpack.core.security.SecurityExtension`. There you need to + override one or more of the following methods: ++ +[source,java] +---------------------------------------------------- +@Override +public Map getRealms() { + ... +} +---------------------------------------------------- ++ +The `getRealms` method is used to provide a map of type names to the `Factory` that +will be used to create the realm. ++ +[source,java] +---------------------------------------------------- +@Override +public AuthenticationFailureHandler getAuthenticationFailureHandler() { + ... +} +---------------------------------------------------- ++ +The `getAuthenticationFailureHandler` method is used to optionally provide a +custom `AuthenticationFailureHandler`, which will control how {security} responds +in certain authentication failure events. ++ +[source,java] +---------------------------------------------------- +@Override +public List getSettingsFilter() { + ... +} +---------------------------------------------------- ++ +The `Plugin#getSettingsFilter` method returns a list of setting names that should be +filtered from the settings APIs as they may contain sensitive credentials. Note this method is not +part of the `SecurityExtension` interface, it's available as part of the elasticsearch plugin main class. + +. Create a build configuration file for the plugin; Gradle is our recommendation. +. Create a `META-INF/services/org.elasticsearch.xpack.core.security.SecurityExtension` descriptor file for the + extension that contains the fully qualified class name of your `org.elasticsearch.xpack.core.security.SecurityExtension` implementation +. Bundle all in a single zip file. + +[[using-custom-realm]] +==== Using a custom realm to authenticate users + +To use a custom realm: + +. Install the realm extension on each node in the cluster. You run + `bin/elasticsearch-plugin` with the `install` sub-command and specify the URL + pointing to the zip file that contains the extension. For example: ++ +[source,shell] +---------------------------------------- +bin/elasticsearch-plugin install file:////my-realm-1.0.zip +---------------------------------------- + +. Add a realm configuration of the appropriate realm type to `elasticsearch.yml` +under the `xpack.security.authc.realms` namespace. The options you can set depend +on the settings exposed by the custom realm. At a minimum, you must set the realm +`type` to the type defined by the extension. If you are configuring multiple +realms, you should also explicitly set the `order` attribute to control the +order in which the realms are consulted during authentication. You should make +sure each configured realm has a distinct `order` setting. In the event that +two or more realms have the same `order`, they will be processed in realm `name` order. ++ +IMPORTANT: When you configure realms in `elasticsearch.yml`, only the +realms you specify are used for authentication. If you also want to use the +`native` or `file` realms, you must include them in the realm chain. + +. Restart Elasticsearch. From 024400bcb8b2de5b0b5c3648865eeded73df87ac Mon Sep 17 00:00:00 2001 From: lcawl Date: Mon, 4 Jun 2018 12:02:50 -0700 Subject: [PATCH 4/6] [DOCS] Removes redundant authorization pages --- .../authorization/built-in-roles.asciidoc | 114 --------------- .../security/authorization/overview.asciidoc | 74 ---------- .../authorization/privileges.asciidoc | 135 ------------------ 3 files changed, 323 deletions(-) delete mode 100644 x-pack/docs/en/security/authorization/built-in-roles.asciidoc delete mode 100644 x-pack/docs/en/security/authorization/overview.asciidoc delete mode 100644 x-pack/docs/en/security/authorization/privileges.asciidoc diff --git a/x-pack/docs/en/security/authorization/built-in-roles.asciidoc b/x-pack/docs/en/security/authorization/built-in-roles.asciidoc deleted file mode 100644 index f336393d81d3d..0000000000000 --- a/x-pack/docs/en/security/authorization/built-in-roles.asciidoc +++ /dev/null @@ -1,114 +0,0 @@ -[role="xpack"] -[[built-in-roles]] -=== Built-in roles - -{security} applies a default role to all users, including -<>. The default role enables users to access -the authenticate endpoint, change their own passwords, and get information about -themselves. - -{security} also provides a set of built-in roles you can explicitly assign -to users. These roles have a fixed set of privileges and cannot be updated. - -[[built-in-roles-ingest-user]] `ingest_admin` :: -Grants access to manage *all* index templates and *all* ingest pipeline configurations. -+ -NOTE: This role does *not* provide the ability to create indices; those privileges -must be defined in a separate role. - -[[built-in-roles-kibana-dashboard]] `kibana_dashboard_only_user` :: -Grants access to the {kib} Dashboard and read-only permissions on the `.kibana` -index. This role does not have access to editing tools in {kib}. For more -information, see -{kibana-ref}/xpack-dashboard-only-mode.html[{kib} Dashboard Only Mode]. - -[[built-in-roles-kibana-system]] `kibana_system` :: -Grants access necessary for the {kib} system user to read from and write to the -{kib} indices, manage index templates, and check the availability of the {es} cluster. -This role grants read access to the `.monitoring-*` indices and read and write access -to the `.reporting-*` indices. For more information, see -{kibana-ref}/using-kibana-with-security.html[Configuring Security in {kib}]. -+ -NOTE: This role should not be assigned to users as the granted permissions may -change between releases. - -[[built-in-roles-kibana-user]] `kibana_user`:: -Grants the minimum privileges required for any user of {kib}. This role grants -access to the {kib} indices and grants monitoring privileges for the cluster. - -[[built-in-roles-logstash-admin]] `logstash_admin` :: -Grants access to the `.logstash*` indices for managing configurations. - -[[built-in-roles-logstash-system]] `logstash_system` :: -Grants access necessary for the Logstash system user to send system-level data -(such as monitoring) to {es}. For more information, see -{logstash-ref}/ls-security.html[Configuring Security in Logstash]. -+ -NOTE: This role should not be assigned to users as the granted permissions may -change between releases. -+ -NOTE: This role does not provide access to the logstash indices and is not -suitable for use within a Logstash pipeline. - -[[built-in-roles-beats-system]] `beats_system` :: -Grants access necessary for the Beats system user to send system-level data -(such as monitoring) to {es}. -+ -NOTE: This role should not be assigned to users as the granted permissions may -change between releases. -+ -NOTE: This role does not provide access to the beats indices and is not -suitable for writing beats output to {es}. - -[[built-in-roles-ml-admin]] `machine_learning_admin`:: -Grants `manage_ml` cluster privileges and read access to the `.ml-*` indices. - -[[built-in-roles-ml-user]] `machine_learning_user`:: -Grants the minimum privileges required to view {xpackml} configuration, -status, and results. This role grants `monitor_ml` cluster privileges and -read access to the `.ml-notifications` and `.ml-anomalies*` indices, -which store {ml} results. - -[[built-in-roles-monitoring-user]] `monitoring_user`:: -Grants the minimum privileges required for any user of {monitoring} other than those -required to use {kib}. This role grants access to the monitoring indices and grants -privileges necessary for reading basic cluster information. Monitoring users should -also be assigned the `kibana_user` role. - -[[built-in-roles-remote-monitoring-agent]] `remote_monitoring_agent`:: -Grants the minimum privileges required for a remote monitoring agent to write data -into this cluster. - -[[built-in-roles-reporting-user]] `reporting_user`:: -Grants the specific privileges required for users of {reporting} other than those -required to use {kib}. This role grants access to the reporting indices. Reporting -users should also be assigned the `kibana_user` role and a role that grants them -access to the data that will be used to generate reports with. - -[[built-in-roles-superuser]] `superuser`:: -Grants full access to the cluster, including all indices and data. A user with -the `superuser` role can also manage users and roles and -<> any other user in the system. Due to the -permissive nature of this role, take extra care when assigning it to a user. - -[[built-in-roles-transport-client]] `transport_client`:: -Grants the privileges required to access the cluster through the Java Transport -Client. The Java Transport Client fetches information about the nodes in the -cluster using the _Node Liveness API_ and the _Cluster State API_ (when -sniffing is enabled). Assign your users this role if they use the -Transport Client. -+ -NOTE: Using the Transport Client effectively means the users are granted access -to the cluster state. This means users can view the metadata over all indices, -index templates, mappings, node and basically everything about the cluster. -However, this role does not grant permission to view the data in all indices. - -[[built-in-roles-watcher-admin]] `watcher_admin`:: -+ -Grants write access to the `.watches` index, read access to the watch history and -the triggered watches index and allows to execute all watcher actions. - -[[built-in-roles-watcher-user]] `watcher_user`:: -+ -Grants read access to the `.watches` index, the get watch action and the watcher -stats. \ No newline at end of file diff --git a/x-pack/docs/en/security/authorization/overview.asciidoc b/x-pack/docs/en/security/authorization/overview.asciidoc deleted file mode 100644 index e5b61e585c67c..0000000000000 --- a/x-pack/docs/en/security/authorization/overview.asciidoc +++ /dev/null @@ -1,74 +0,0 @@ -[role="xpack"] -[[authorization]] -== Configuring role-based access control - -{security} introduces the concept of _authorization_ to {es}. -Authorization is the process of determining whether the user behind an incoming -request is allowed to execute it. This process takes place once a request is -successfully authenticated and the user behind the request is identified. - -[[roles]] -[float] -=== Roles, permissions, and privileges - -The authorization process revolves around the following 5 constructs: - -_Secured Resource_:: -A resource to which access is restricted. Indices/aliases, documents, fields, -users and the {es} cluster itself are all examples of secured objects. - -_Privilege_:: -A named group representing one or more actions that a user may execute against a -secured resource. Each secured resource has its own sets of available privileges. -For example, `read` is an index privilege that represents all actions that enable -reading the indexed/stored data. For a complete list of available privileges -see <>. - -_Permissions_:: -A set of one or more privileges against a secured resource. Permissions can -easily be described in words, here are few examples: - * `read` privilege on the `products` index - * `manage` privilege on the cluster - * `run_as` privilege on `john` user - * `read` privilege on documents that match query X - * `read` privilege on `credit_card` field - -_Role_:: -A named sets of permissions - -_User_:: -The authenticated user. - -A secure {es} cluster manages the privileges of users through _roles_. -A role has a unique name and identifies a set of permissions that translate to -privileges on resources. A user can be associated with an arbitrary number of -roles. The total set of permissions that a user has is therefore defined by -union of the permissions in all its roles. - -As an administrator, you will need to define the roles that you want to use, -then assign users to the roles. These can be assigned to users in a number of -ways depending on the realms by which the users are authenticated. - -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/authorization/built-in-roles.asciidoc -include::built-in-roles.asciidoc[] - -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/authorization/managing-roles.asciidoc -include::managing-roles.asciidoc[] - -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/authorization/privileges.asciidoc -include::privileges.asciidoc[] - -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/authorization/alias-privileges.asciidoc -include::alias-privileges.asciidoc[] - -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/authorization/mapping-roles.asciidoc -include::mapping-roles.asciidoc[] - -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/authorization/field-and-document-access-control.asciidoc -include::field-and-document-access-control.asciidoc[] - -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/authorization/run-as-privilege.asciidoc -include::run-as-privilege.asciidoc[] - -:edit_url: https://github.com/elastic/elasticsearch/edit/{branch}/x-pack/docs/en/security/authorization/custom-roles-provider.asciidoc -include::custom-roles-provider.asciidoc[] diff --git a/x-pack/docs/en/security/authorization/privileges.asciidoc b/x-pack/docs/en/security/authorization/privileges.asciidoc deleted file mode 100644 index e5b22d3674a76..0000000000000 --- a/x-pack/docs/en/security/authorization/privileges.asciidoc +++ /dev/null @@ -1,135 +0,0 @@ -[role="xpack"] -[[security-privileges]] -=== Security privileges - -This section lists the privileges that you can assign to a role. - -[[privileges-list-cluster]] -==== Cluster privileges - -[horizontal] -`all`:: -All cluster administration operations, like snapshotting, node shutdown/restart, -settings update, rerouting, or managing users and roles. - -`monitor`:: -All cluster read-only operations, like cluster health and state, hot threads, -node info, node and cluster stats, and pending cluster tasks. - -`monitor_ml`:: -All read only {ml} operations, such as getting information about {dfeeds}, jobs, -model snapshots, or results. - -`monitor_watcher`:: -All read only watcher operations, such as getting a watch and watcher stats. - -`manage`:: -Builds on `monitor` and adds cluster operations that change values in the cluster. -This includes snapshotting, updating settings, and rerouting. It also includes -obtaining snapshot and restore status. This privilege does not include the -ability to manage security. - -`manage_index_templates`:: -All operations on index templates. - -`manage_ml`:: -All {ml} operations, such as creating and deleting {dfeeds}, jobs, and model -snapshots. -+ --- -NOTE: {dfeeds-cap} that were created prior to version 6.2 or created when {security} -was disabled run as a system user with elevated privileges, including permission -to read all indices. Newer {dfeeds} run with the security roles of the user who created -or updated them. - --- - -`manage_pipeline`:: -All operations on ingest pipelines. - -`manage_security`:: -All security related operations such as CRUD operations on users and roles and -cache clearing. - -`manage_watcher`:: -All watcher operations, such as putting watches, executing, activate or acknowledging. -+ --- -NOTE: Watches that were created prior to version 6.1 or created when {security} -was disabled run as a system user with elevated privileges, including permission -to read and write all indices. Newer watches run with the security roles of the user -who created or updated them. - --- - -`transport_client`:: -All privileges necessary for a transport client to connect. Required by the remote -cluster to enable <>. - -[[privileges-list-indices]] -==== Indices privileges - -[horizontal] -`all`:: -Any action on an index - -`monitor`:: -All actions that are required for monitoring (recovery, segments info, index -stats and status). - -`manage`:: -All `monitor` privileges plus index administration (aliases, analyze, cache clear, -close, delete, exists, flush, mapping, open, force merge, refresh, settings, -search shards, templates, validate). - -`view_index_metadata`:: -Read-only access to index metadata (aliases, aliases exists, get index, exists, field mappings, -mappings, search shards, type exists, validate, warmers, settings). This -privilege is primarily available for use by {kib} users. - -`read`:: -Read only access to actions (count, explain, get, mget, get indexed scripts, -more like this, multi percolate/search/termvector, percolate, scroll, -clear_scroll, search, suggest, tv). - -`read_cross_cluster`:: -Read only access to the search action from a <>. - -`index`:: -Privilege to index and update documents. Also grants access to the update -mapping action. - -`create`:: -Privilege to index documents. Also grants access to the update mapping -action. -+ --- -NOTE: This privilege does not restrict the index operation to the creation -of documents but instead restricts API use to the index API. The index API allows a user -to overwrite a previously indexed document. - --- - -`delete`:: -Privilege to delete documents. - -`write`:: -Privilege to perform all write operations to documents, which includes the -permission to index, update, and delete documents as well as performing bulk -operations. Also grants access to the update mapping action. - -`delete_index`:: -Privilege to delete an index. - -`create_index`:: -Privilege to create an index. A create index request may contain aliases to be -added to the index once created. In that case the request requires the `manage` -privilege as well, on both the index and the aliases names. - -==== Run as privilege - -The `run_as` permission enables an authenticated user to submit requests on -behalf of another user. The value can be a user name or a comma-separated list -of user names. (You can also specify users as an array of strings or a YAML -sequence.) For more information, see -<>. From f94a75778c3ab833a2ce15eec7a44f187aa9ccee Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Mon, 4 Jun 2018 21:48:56 +0200 Subject: [PATCH 5/6] Fix index prefixes to work with span_multi (#31066) * Fix index prefixes to work with span_multi Text fields that use `index_prefixes` can rewrite `prefix` queries into `term` queries internally. This commit fix the handling of this rewriting in the `span_multi` query. This change also copies the index options of the text field into the prefix field in order to be able to run positional queries. This is mandatory for `span_multi` to work but this could also be useful to optimize `match_phrase_prefix` queries in a follow up. Note that this change can only be done on indices created after 6.3 since we set the index options to doc only in this version. Fixes #31056 --- .../query-dsl/span-multi-term-query.asciidoc | 8 + .../test/search/190_index_prefix_search.yml | 31 ++- .../index/mapper/TextFieldMapper.java | 12 +- .../query/SpanMultiTermQueryBuilder.java | 77 ++++++-- .../index/mapper/TextFieldMapperTests.java | 36 +++- .../query/SpanMultiTermQueryBuilderTests.java | 186 ++++++++++++------ 6 files changed, 277 insertions(+), 73 deletions(-) diff --git a/docs/reference/query-dsl/span-multi-term-query.asciidoc b/docs/reference/query-dsl/span-multi-term-query.asciidoc index b41906b565070..ff7af83451be1 100644 --- a/docs/reference/query-dsl/span-multi-term-query.asciidoc +++ b/docs/reference/query-dsl/span-multi-term-query.asciidoc @@ -36,3 +36,11 @@ GET /_search } -------------------------------------------------- // CONSOLE + +WARNING: By default `span_multi queries are rewritten to a `span_or` query +containing **all** the expanded terms. This can be expensive if the number of expanded +terms is large. To avoid an unbounded expansion you can set the +<> of the multi term query to `top_terms_*` +rewrite. Or, if you use `span_multi` on `prefix` query only, you can +activate the <> field option of the `text` field instead. This will +rewrite any prefix query on the field to a a single term query that matches the indexed prefix. \ No newline at end of file diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml index 963bed70750a5..dfe0b6825cdc5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/190_index_prefix_search.yml @@ -1,8 +1,8 @@ ---- -"search with index prefixes": +setup: - skip: - version: " - 6.99.99" + version: " - 6.2.99" reason: index_prefixes is only available as of 6.3.0 + - do: indices.create: index: test @@ -27,6 +27,11 @@ indices.refresh: index: [test] +--- +"search with index prefixes": + - skip: + version: " - 6.2.99" + reason: index_prefixes is only available as of 6.3.0 - do: search: index: test @@ -57,3 +62,23 @@ - match: {hits.total: 1} - match: {hits.hits.0._score: 1} + +--- +"search index prefixes with span_multi": + - skip: + version: " - 6.99.99" + reason: span_multi throws an exception with prefix fields on < versions + + - do: + search: + index: test + body: + query: + span_near: + clauses: [ + { "span_term": { "text": "short" } }, + { "span_multi": { "match": { "prefix": { "text": "word" } } } } + ] + + - match: {hits.total: 1} + diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index d2ba5fbc0c2d1..29f1cbb721feb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -40,6 +40,7 @@ import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.elasticsearch.Version; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; @@ -175,7 +176,16 @@ public TextFieldMapper build(BuilderContext context) { if (fieldType().isSearchable() == false) { throw new IllegalArgumentException("Cannot set index_prefixes on unindexed field [" + name() + "]"); } - if (fieldType.indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { + // Copy the index options of the main field to allow phrase queries on + // the prefix field. + if (context.indexCreatedVersion().onOrAfter(Version.V_6_4_0)) { + if (fieldType.indexOptions() == IndexOptions.DOCS_AND_FREQS) { + // frequencies are not needed because prefix queries always use a constant score + prefixFieldType.setIndexOptions(IndexOptions.DOCS); + } else { + prefixFieldType.setIndexOptions(fieldType.indexOptions()); + } + } else if (fieldType.indexOptions() == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) { prefixFieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); } if (fieldType.storeTermVectorOffsets()) { diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index 4f102b58616f6..b574cadc423b4 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -18,18 +18,28 @@ */ package org.elasticsearch.index.query; +import org.apache.lucene.index.Term; import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.spans.FieldMaskingSpanQuery; import org.apache.lucene.search.spans.SpanBoostQuery; import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanTermQuery; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; import java.util.Objects; @@ -124,22 +134,67 @@ public static SpanMultiTermQueryBuilder fromXContent(XContentParser parser) thro protected Query doToQuery(QueryShardContext context) throws IOException { Query subQuery = multiTermQueryBuilder.toQuery(context); float boost = AbstractQueryBuilder.DEFAULT_BOOST; - if (subQuery instanceof BoostQuery) { - BoostQuery boostQuery = (BoostQuery) subQuery; - subQuery = boostQuery.getQuery(); - boost = boostQuery.getBoost(); + while (true) { + if (subQuery instanceof ConstantScoreQuery) { + subQuery = ((ConstantScoreQuery) subQuery).getQuery(); + boost = 1; + } else if (subQuery instanceof BoostQuery) { + BoostQuery boostQuery = (BoostQuery) subQuery; + subQuery = boostQuery.getQuery(); + boost *= boostQuery.getBoost(); + } else { + break; + } } - //no MultiTermQuery extends SpanQuery, so SpanBoostQuery is not supported here + final SpanQuery spanQuery; + // no MultiTermQuery extends SpanQuery, so SpanBoostQuery is not supported here assert subQuery instanceof SpanBoostQuery == false; - if (subQuery instanceof MultiTermQuery == false) { - throw new UnsupportedOperationException("unsupported inner query, should be " + MultiTermQuery.class.getName() +" but was " - + subQuery.getClass().getName()); + if (subQuery instanceof TermQuery) { + /** + * Text fields that index prefixes can rewrite prefix queries + * into term queries. See {@link TextFieldMapper.TextFieldType#prefixQuery}. + */ + if (multiTermQueryBuilder.getClass() != PrefixQueryBuilder.class) { + throw new UnsupportedOperationException("unsupported inner query generated by " + + multiTermQueryBuilder.getClass().getName() + ", should be " + MultiTermQuery.class.getName() + + " but was " + subQuery.getClass().getName()); + } + if (context.getIndexSettings().getIndexVersionCreated().before(Version.V_6_4_0)) { + /** + * Indices created in this version do not index positions on the prefix field + * so we cannot use it to match positional queries. Instead, we explicitly create the prefix + * query on the main field to avoid the rewrite. + */ + PrefixQueryBuilder prefixBuilder = (PrefixQueryBuilder) multiTermQueryBuilder; + PrefixQuery prefixQuery = new PrefixQuery(new Term(prefixBuilder.fieldName(), prefixBuilder.value())); + if (prefixBuilder.rewrite() != null) { + MultiTermQuery.RewriteMethod rewriteMethod = + QueryParsers.parseRewriteMethod(prefixBuilder.rewrite(), null, LoggingDeprecationHandler.INSTANCE); + prefixQuery.setRewriteMethod(rewriteMethod); + } + spanQuery = new SpanMultiTermQueryWrapper<>(prefixQuery); + } else { + String origFieldName = ((PrefixQueryBuilder) multiTermQueryBuilder).fieldName(); + SpanTermQuery spanTermQuery = new SpanTermQuery(((TermQuery) subQuery).getTerm()); + /** + * Prefixes are indexed in a different field so we mask the term query with the original field + * name. This is required because span_near and span_or queries don't work across different field. + * The masking is safe because the prefix field is indexed using the same content than the original field + * and the prefix analyzer preserves positions. + */ + spanQuery = new FieldMaskingSpanQuery(spanTermQuery, origFieldName); + } + } else { + if (subQuery instanceof MultiTermQuery == false) { + throw new UnsupportedOperationException("unsupported inner query, should be " + + MultiTermQuery.class.getName() + " but was " + subQuery.getClass().getName()); + } + spanQuery = new SpanMultiTermQueryWrapper<>((MultiTermQuery) subQuery); } - SpanQuery wrapper = new SpanMultiTermQueryWrapper<>((MultiTermQuery) subQuery); if (boost != AbstractQueryBuilder.DEFAULT_BOOST) { - wrapper = new SpanBoostQuery(wrapper, boost); + return new SpanBoostQuery(spanQuery, boost); } - return wrapper; + return spanQuery; } @Override diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index b7da270a15ab3..ed8274fad05da 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -37,6 +37,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; @@ -638,7 +639,7 @@ public void testIndexPrefixIndexTypes() throws IOException { .field("type", "text") .field("analyzer", "english") .startObject("index_prefixes").endObject() - .field("index_options", "positions") + .field("index_options", "freqs") .endObject().endObject().endObject().endObject()); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); @@ -649,6 +650,27 @@ public void testIndexPrefixIndexTypes() throws IOException { assertFalse(ft.storeTermVectors()); } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "text") + .field("analyzer", "english") + .startObject("index_prefixes").endObject() + .field("index_options", "positions") + .endObject().endObject().endObject().endObject()); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); + FieldType ft = prefix.fieldType; + if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { + assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions()); + } else { + assertEquals(IndexOptions.DOCS, ft.indexOptions()); + } + assertFalse(ft.storeTermVectors()); + } + { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") @@ -662,7 +684,11 @@ public void testIndexPrefixIndexTypes() throws IOException { FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); FieldType ft = prefix.fieldType; - assertEquals(IndexOptions.DOCS, ft.indexOptions()); + if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { + assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions()); + } else { + assertEquals(IndexOptions.DOCS, ft.indexOptions()); + } assertTrue(ft.storeTermVectorOffsets()); } @@ -679,7 +705,11 @@ public void testIndexPrefixIndexTypes() throws IOException { FieldMapper prefix = mapper.mappers().getMapper("field._index_prefix"); FieldType ft = prefix.fieldType; - assertEquals(IndexOptions.DOCS, ft.indexOptions()); + if (indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { + assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, ft.indexOptions()); + } else { + assertEquals(IndexOptions.DOCS, ft.indexOptions()); + } assertFalse(ft.storeTermVectorOffsets()); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java index 50371ced0e02e..b778168235977 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilderTests.java @@ -22,24 +22,46 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.MultiTermQuery; +import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.spans.FieldMaskingSpanQuery; import org.apache.lucene.search.spans.SpanBoostQuery; import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.search.spans.SpanQuery; +import org.apache.lucene.search.spans.SpanTermQuery; +import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import java.io.IOException; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.either; public class SpanMultiTermQueryBuilderTests extends AbstractQueryTestCase { + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + XContentBuilder mapping = jsonBuilder().startObject().startObject("_doc").startObject("properties") + .startObject("prefix_field") + .field("type", "text") + .startObject("index_prefixes").endObject() + .endObject() + .endObject().endObject().endObject(); + + mapperService.merge("_doc", + new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE); + } + @Override protected SpanMultiTermQueryBuilder doCreateTestQueryBuilder() { MultiTermQueryBuilder multiTermQueryBuilder = RandomQueryBuilder.createMultiTermQuery(random()); @@ -62,14 +84,67 @@ protected void doAssertLuceneQuery(SpanMultiTermQueryBuilder queryBuilder, Query BoostQuery boostQuery = (BoostQuery) multiTermQuery; multiTermQuery = boostQuery.getQuery(); } - assertThat(multiTermQuery, instanceOf(MultiTermQuery.class)); - assertThat(spanMultiTermQueryWrapper.getWrappedQuery(), equalTo(new SpanMultiTermQueryWrapper<>((MultiTermQuery)multiTermQuery).getWrappedQuery())); + assertThat(multiTermQuery, either(instanceOf(MultiTermQuery.class)).or(instanceOf(TermQuery.class))); + assertThat(spanMultiTermQueryWrapper.getWrappedQuery(), + equalTo(new SpanMultiTermQueryWrapper<>((MultiTermQuery)multiTermQuery).getWrappedQuery())); } public void testIllegalArgument() { expectThrows(IllegalArgumentException.class, () -> new SpanMultiTermQueryBuilder((MultiTermQueryBuilder) null)); } + private static class TermMultiTermQueryBuilder implements MultiTermQueryBuilder { + @Override + public Query toQuery(QueryShardContext context) throws IOException { + return new TermQuery(new Term("foo", "bar")); + } + + @Override + public Query toFilter(QueryShardContext context) throws IOException { + return toQuery(context); + } + + @Override + public QueryBuilder queryName(String queryName) { + return this; + } + + @Override + public String queryName() { + return "foo"; + } + + @Override + public float boost() { + return 1f; + } + + @Override + public QueryBuilder boost(float boost) { + return this; + } + + @Override + public String getName() { + return "foo"; + } + + @Override + public String getWriteableName() { + return "foo"; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + + } + } + /** * test checks that we throw an {@link UnsupportedOperationException} if the query wrapped * by {@link SpanMultiTermQueryBuilder} does not generate a lucene {@link MultiTermQuery}. @@ -77,69 +152,70 @@ public void testIllegalArgument() { * to a date. */ public void testUnsupportedInnerQueryType() throws IOException { - MultiTermQueryBuilder query = new MultiTermQueryBuilder() { - @Override - public Query toQuery(QueryShardContext context) throws IOException { - return new TermQuery(new Term("foo", "bar")); - } - - @Override - public Query toFilter(QueryShardContext context) throws IOException { - return toQuery(context); - } - - @Override - public QueryBuilder queryName(String queryName) { - return this; - } - - @Override - public String queryName() { - return "foo"; - } - - @Override - public float boost() { - return 1f; - } - - @Override - public QueryBuilder boost(float boost) { - return this; - } - - @Override - public String getName() { - return "foo"; - } - - @Override - public String getWriteableName() { - return "foo"; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - - } - }; + MultiTermQueryBuilder query = new TermMultiTermQueryBuilder(); SpanMultiTermQueryBuilder spamMultiTermQuery = new SpanMultiTermQueryBuilder(query); UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, () -> spamMultiTermQuery.toQuery(createShardContext())); - assertThat(e.getMessage(), containsString("unsupported inner query, should be " + MultiTermQuery.class.getName())); + assertThat(e.getMessage(), containsString("unsupported inner query generated by " + TermMultiTermQueryBuilder.class.getName() + + ", should be " + MultiTermQuery.class.getName())); } public void testToQueryInnerSpanMultiTerm() throws IOException { + Query query = new SpanOrQueryBuilder(createTestQueryBuilder()).toQuery(createShardContext()); //verify that the result is still a span query, despite the boost that might get set (SpanBoostQuery rather than BoostQuery) assertThat(query, instanceOf(SpanQuery.class)); } + public void testToQueryInnerTermQuery() throws IOException { + final QueryShardContext context = createShardContext(); + if (context.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_4_0)) { + Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder("prefix_field", "foo")) + .toQuery(context); + assertThat(query, instanceOf(FieldMaskingSpanQuery.class)); + FieldMaskingSpanQuery fieldSpanQuery = (FieldMaskingSpanQuery) query; + assertThat(fieldSpanQuery.getField(), equalTo("prefix_field")); + assertThat(fieldSpanQuery.getMaskedQuery(), instanceOf(SpanTermQuery.class)); + SpanTermQuery spanTermQuery = (SpanTermQuery) fieldSpanQuery.getMaskedQuery(); + assertThat(spanTermQuery.getTerm().text(), equalTo("foo")); + + query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder("prefix_field", "foo")) + .boost(2.0f) + .toQuery(context); + assertThat(query, instanceOf(SpanBoostQuery.class)); + SpanBoostQuery boostQuery = (SpanBoostQuery) query; + assertThat(boostQuery.getBoost(), equalTo(2.0f)); + assertThat(boostQuery.getQuery(), instanceOf(FieldMaskingSpanQuery.class)); + fieldSpanQuery = (FieldMaskingSpanQuery) boostQuery.getQuery(); + assertThat(fieldSpanQuery.getField(), equalTo("prefix_field")); + assertThat(fieldSpanQuery.getMaskedQuery(), instanceOf(SpanTermQuery.class)); + spanTermQuery = (SpanTermQuery) fieldSpanQuery.getMaskedQuery(); + assertThat(spanTermQuery.getTerm().text(), equalTo("foo")); + } else { + Query query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder("prefix_field", "foo")) + .toQuery(context); + assertThat(query, instanceOf(SpanMultiTermQueryWrapper.class)); + SpanMultiTermQueryWrapper wrapper = (SpanMultiTermQueryWrapper) query; + assertThat(wrapper.getWrappedQuery(), instanceOf(PrefixQuery.class)); + PrefixQuery prefixQuery = (PrefixQuery) wrapper.getWrappedQuery(); + assertThat(prefixQuery.getField(), equalTo("prefix_field")); + assertThat(prefixQuery.getPrefix().text(), equalTo("foo")); + + query = new SpanMultiTermQueryBuilder(new PrefixQueryBuilder("prefix_field", "foo")) + .boost(2.0f) + .toQuery(context); + assertThat(query, instanceOf(SpanBoostQuery.class)); + SpanBoostQuery boostQuery = (SpanBoostQuery) query; + assertThat(boostQuery.getBoost(), equalTo(2.0f)); + assertThat(boostQuery.getQuery(), instanceOf(SpanMultiTermQueryWrapper.class)); + wrapper = (SpanMultiTermQueryWrapper) boostQuery.getQuery(); + assertThat(wrapper.getWrappedQuery(), instanceOf(PrefixQuery.class)); + prefixQuery = (PrefixQuery) wrapper.getWrappedQuery(); + assertThat(prefixQuery.getField(), equalTo("prefix_field")); + assertThat(prefixQuery.getPrefix().text(), equalTo("foo")); + } + } + public void testFromJson() throws IOException { String json = "{\n" + From b22a055bcffa3a3667b3aae6a3420442e889ad1c Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 4 Jun 2018 14:31:08 -0600 Subject: [PATCH 6/6] Add get mappings support to high-level rest client (#30889) This adds support for the get mappings API to the high level rest client. Relates to #27205 --- .../elasticsearch/client/IndicesClient.java | 28 +++- .../client/RequestConverters.java | 14 ++ .../elasticsearch/client/IndicesClientIT.java | 39 +++++ .../client/RequestConvertersTests.java | 42 +++++ .../IndicesClientDocumentationIT.java | 153 ++++++++++++++++-- .../high-level/indices/get_mappings.asciidoc | 80 +++++++++ .../high-level/supported-apis.asciidoc | 1 + .../common/xcontent/XContentParserTests.java | 37 +++++ .../api/indices.get_mapping.json | 4 + .../mapping/get/GetMappingsResponse.java | 108 ++++++++++++- .../admin/indices/RestGetMappingAction.java | 47 +----- .../mapping/get/GetMappingsResponseTests.java | 153 ++++++++++++++++++ 12 files changed, 653 insertions(+), 53 deletions(-) create mode 100644 docs/java-rest/high-level/indices/get_mappings.asciidoc create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index b08b045d287c0..d51a92ea00fc5 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -20,6 +20,7 @@ package org.elasticsearch.client; import org.apache.http.Header; +import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesResponse; @@ -38,6 +39,8 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; @@ -134,11 +137,34 @@ public PutMappingResponse putMapping(PutMappingRequest putMappingRequest, Header * Put Mapping API on elastic.co */ public void putMappingAsync(PutMappingRequest putMappingRequest, ActionListener listener, - Header... headers) { + Header... headers) { restHighLevelClient.performRequestAsyncAndParseEntity(putMappingRequest, RequestConverters::putMapping, PutMappingResponse::fromXContent, listener, emptySet(), headers); } + /** + * Retrieves the mappings on an index or indices using the Get Mapping API + *

+ * See + * Get Mapping API on elastic.co + */ + public GetMappingsResponse getMappings(GetMappingsRequest getMappingsRequest, Header... headers) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(getMappingsRequest, RequestConverters::getMappings, + GetMappingsResponse::fromXContent, emptySet(), headers); + } + + /** + * Asynchronously retrieves the mappings on an index on indices using the Get Mapping API + *

+ * See + * Get Mapping API on elastic.co + */ + public void getMappingsAsync(GetMappingsRequest getMappingsRequest, ActionListener listener, + Header... headers) { + restHighLevelClient.performRequestAsyncAndParseEntity(getMappingsRequest, RequestConverters::getMappings, + GetMappingsResponse::fromXContent, listener, emptySet(), headers); + } + /** * Updates aliases using the Index Aliases API *

diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 6c8bb845259e6..e5a45e19fe0d3 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -45,6 +45,7 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -195,6 +196,19 @@ static Request putMapping(PutMappingRequest putMappingRequest) throws IOExceptio return request; } + static Request getMappings(GetMappingsRequest getMappingsRequest) throws IOException { + String[] indices = getMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.indices(); + String[] types = getMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getMappingsRequest.types(); + + Request request = new Request(HttpGet.METHOD_NAME, endpoint(indices, "_mapping", types)); + + Params parameters = new Params(request); + parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout()); + parameters.withIndicesOptions(getMappingsRequest.indicesOptions()); + parameters.withLocal(getMappingsRequest.local()); + return request; + } + static Request refresh(RefreshRequest refreshRequest) { String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices(); Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_refresh")); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 448ff0138d3ac..55357e06ab299 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -42,6 +42,8 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; @@ -79,6 +81,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.Map; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; @@ -328,6 +331,42 @@ public void testPutMapping() throws IOException { } } + public void testGetMapping() throws IOException { + String indexName = "test"; + createIndex(indexName, Settings.EMPTY); + + PutMappingRequest putMappingRequest = new PutMappingRequest(indexName); + putMappingRequest.type("_doc"); + XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); + mappingBuilder.startObject().startObject("properties").startObject("field"); + mappingBuilder.field("type", "text"); + mappingBuilder.endObject().endObject().endObject(); + putMappingRequest.source(mappingBuilder); + + PutMappingResponse putMappingResponse = + execute(putMappingRequest, highLevelClient().indices()::putMapping, highLevelClient().indices()::putMappingAsync); + assertTrue(putMappingResponse.isAcknowledged()); + + Map getIndexResponse = getAsMap(indexName); + assertEquals("text", XContentMapValues.extractValue(indexName + ".mappings._doc.properties.field.type", getIndexResponse)); + + GetMappingsRequest request = new GetMappingsRequest() + .indices(indexName) + .types("_doc"); + + GetMappingsResponse getMappingsResponse = + execute(request, highLevelClient().indices()::getMappings, highLevelClient().indices()::getMappingsAsync); + + Map mappings = getMappingsResponse.getMappings().get(indexName).get("_doc").sourceAsMap(); + Map type = new HashMap<>(); + type.put("type", "text"); + Map field = new HashMap<>(); + field.put("field", type); + Map expected = new HashMap<>(); + expected.put("properties", field); + assertThat(mappings, equalTo(expected)); + } + public void testDeleteIndex() throws IOException { { // Delete index if exists diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index f61d79b8d42e4..ee372e255e70a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -47,6 +47,7 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; @@ -403,6 +404,47 @@ public void testPutMapping() throws IOException { assertToXContentBody(putMappingRequest, request.getEntity()); } + public void testGetMapping() throws IOException { + GetMappingsRequest getMappingRequest = new GetMappingsRequest(); + + String[] indices = Strings.EMPTY_ARRAY; + if (randomBoolean()) { + indices = randomIndicesNames(0, 5); + getMappingRequest.indices(indices); + } else if (randomBoolean()) { + getMappingRequest.indices((String[]) null); + } + + String type = null; + if (randomBoolean()) { + type = randomAlphaOfLengthBetween(3, 10); + getMappingRequest.types(type); + } else if (randomBoolean()) { + getMappingRequest.types((String[]) null); + } + + Map expectedParams = new HashMap<>(); + + setRandomIndicesOptions(getMappingRequest::indicesOptions, getMappingRequest::indicesOptions, expectedParams); + setRandomMasterTimeout(getMappingRequest, expectedParams); + setRandomLocal(getMappingRequest, expectedParams); + + Request request = RequestConverters.getMappings(getMappingRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + String index = String.join(",", indices); + if (Strings.hasLength(index)) { + endpoint.add(index); + } + endpoint.add("_mapping"); + if (type != null) { + endpoint.add(type); + } + assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + + assertThat(expectedParams, equalTo(request.getParameters())); + assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); + } + public void testDeleteIndex() { String[] indices = randomIndicesNames(0, 5); DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indices); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index fd733b83d5ace..c3decd93a174c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -41,6 +41,8 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; @@ -64,6 +66,8 @@ import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.SyncedFlushResponse; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -81,6 +85,8 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.equalTo; + /** * This class is used to generate the Java Indices API documentation. * You need to wrap your code between two tags like: @@ -532,17 +538,17 @@ public void testPutMappingAsync() throws Exception { // tag::put-mapping-execute-listener ActionListener listener = - new ActionListener() { - @Override - public void onResponse(PutMappingResponse putMappingResponse) { - // <1> - } + new ActionListener() { + @Override + public void onResponse(PutMappingResponse putMappingResponse) { + // <1> + } - @Override - public void onFailure(Exception e) { - // <2> - } - }; + @Override + public void onFailure(Exception e) { + // <2> + } + }; // end::put-mapping-execute-listener // Replace the empty listener by a blocking listener in test @@ -557,6 +563,133 @@ public void onFailure(Exception e) { } } + public void testGetMapping() throws IOException { + RestHighLevelClient client = highLevelClient(); + + { + CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("twitter")); + assertTrue(createIndexResponse.isAcknowledged()); + PutMappingRequest request = new PutMappingRequest("twitter"); + request.type("tweet"); + request.source( + "{\n" + + " \"properties\": {\n" + + " \"message\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + "}", // <1> + XContentType.JSON); + PutMappingResponse putMappingResponse = client.indices().putMapping(request); + assertTrue(putMappingResponse.isAcknowledged()); + } + + { + // tag::get-mapping-request + GetMappingsRequest request = new GetMappingsRequest(); // <1> + request.indices("twitter"); // <2> + request.types("tweet"); // <3> + // end::get-mapping-request + + // tag::get-mapping-request-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> + request.masterNodeTimeout("1m"); // <2> + // end::get-mapping-request-masterTimeout + + // tag::get-mapping-request-indicesOptions + request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1> + // end::get-mapping-request-indicesOptions + + // tag::get-mapping-execute + GetMappingsResponse getMappingResponse = client.indices().getMappings(request); + // end::get-mapping-execute + + // tag::get-mapping-response + ImmutableOpenMap> allMappings = getMappingResponse.mappings(); // <1> + MappingMetaData typeMapping = allMappings.get("twitter").get("tweet"); // <2> + Map tweetMapping = typeMapping.sourceAsMap(); // <3> + // end::get-mapping-response + + Map type = new HashMap<>(); + type.put("type", "text"); + Map field = new HashMap<>(); + field.put("message", type); + Map expected = new HashMap<>(); + expected.put("properties", field); + assertThat(tweetMapping, equalTo(expected)); + } + } + + public void testGetMappingAsync() throws Exception { + final RestHighLevelClient client = highLevelClient(); + + { + CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("twitter")); + assertTrue(createIndexResponse.isAcknowledged()); + PutMappingRequest request = new PutMappingRequest("twitter"); + request.type("tweet"); + request.source( + "{\n" + + " \"properties\": {\n" + + " \"message\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + "}", // <1> + XContentType.JSON); + PutMappingResponse putMappingResponse = client.indices().putMapping(request); + assertTrue(putMappingResponse.isAcknowledged()); + } + + { + GetMappingsRequest request = new GetMappingsRequest(); + request.indices("twitter"); + request.types("tweet"); + + // tag::get-mapping-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetMappingsResponse putMappingResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::get-mapping-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + final ActionListener latchListener = new LatchedActionListener<>(listener, latch); + listener = ActionListener.wrap(r -> { + ImmutableOpenMap> allMappings = r.mappings(); + MappingMetaData typeMapping = allMappings.get("twitter").get("tweet"); + Map tweetMapping = typeMapping.sourceAsMap(); + + Map type = new HashMap<>(); + type.put("type", "text"); + Map field = new HashMap<>(); + field.put("message", type); + Map expected = new HashMap<>(); + expected.put("properties", field); + assertThat(tweetMapping, equalTo(expected)); + latchListener.onResponse(r); + }, e -> { + latchListener.onFailure(e); + fail("should not fail"); + }); + + // tag::get-mapping-execute-async + client.indices().getMappingsAsync(request, listener); // <1> + // end::get-mapping-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testOpenIndex() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/docs/java-rest/high-level/indices/get_mappings.asciidoc b/docs/java-rest/high-level/indices/get_mappings.asciidoc new file mode 100644 index 0000000000000..f3506b6bcda57 --- /dev/null +++ b/docs/java-rest/high-level/indices/get_mappings.asciidoc @@ -0,0 +1,80 @@ +[[java-rest-high-get-mappings]] +=== Get Mappings API + +[[java-rest-high-get-mappings-request]] +==== Get Mappings Request + +A `GetMappingsRequest` can have an optional list of indices and optional list of types: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-request] +-------------------------------------------------- +<1> An empty request that will return all indices and types +<2> Setting the indices to fetch mapping for +<3> The types to be returned + +==== Optional arguments +The following arguments can also optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-request-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue` +<2> Timeout to connect to the master node as a `String` + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-request-indicesOptions] +-------------------------------------------------- +<1> Options for expanding indices names + +[[java-rest-high-get-mappings-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-execute] +-------------------------------------------------- + +[[java-rest-high-get-mapping-async]] +==== Asynchronous Execution + +The asynchronous execution of a get mappings request requires both the +`GetMappingsRequest` instance and an `ActionListener` instance to be passed to +the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-execute-async] +-------------------------------------------------- +<1> The `GetMappingsRequest` to execute and the `ActionListener` to use when the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method if +the execution successfully completed or using the `onFailure` method if it +failed. + +A typical listener for `GetMappingsResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-get-mapping-response]] +==== Get Mappings Response + +The returned `GetMappingsResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-mapping-response] +-------------------------------------------------- +<1> Returning all indices' mappings +<2> Retrieving the mappings for a particular index and type +<3> Getting the mappings for the "tweet" as a Java Map diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index f15baeb6b7311..34149bee52880 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -95,6 +95,7 @@ include::indices/clear_cache.asciidoc[] include::indices/force_merge.asciidoc[] include::indices/rollover.asciidoc[] include::indices/put_mapping.asciidoc[] +include::indices/get_mappings.asciidoc[] include::indices/update_aliases.asciidoc[] include::indices/exists_alias.asciidoc[] include::indices/put_settings.asciidoc[] diff --git a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java index fe41352741e71..b6164c2696735 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/XContentParserTests.java @@ -29,6 +29,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -140,6 +141,42 @@ public void testReadMapStrings() throws IOException { assertThat(map.size(), equalTo(0)); } + public void testMap() throws IOException { + String source = "{\"i\": {\"_doc\": {\"f1\": {\"type\": \"text\", \"analyzer\": \"english\"}, " + + "\"f2\": {\"type\": \"object\", \"properties\": {\"sub1\": {\"type\": \"keyword\", \"foo\": 17}}}}}}"; + Map f1 = new HashMap<>(); + f1.put("type", "text"); + f1.put("analyzer", "english"); + + Map sub1 = new HashMap<>(); + sub1.put("type", "keyword"); + sub1.put("foo", 17); + + Map properties = new HashMap<>(); + properties.put("sub1", sub1); + + Map f2 = new HashMap<>(); + f2.put("type", "object"); + f2.put("properties", properties); + + Map doc = new HashMap<>(); + doc.put("f1", f1); + doc.put("f2", f2); + + Map expected = new HashMap<>(); + expected.put("_doc", doc); + + Map i = new HashMap<>(); + i.put("i", expected); + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + XContentParser.Token token = parser.nextToken(); + assertThat(token, equalTo(XContentParser.Token.START_OBJECT)); + Map map = parser.map(); + assertThat(map, equalTo(i)); + } + } + private Map readMapStrings(String source) throws IOException { try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { XContentParser.Token token = parser.nextToken(); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json index ae54c7c10e677..9bfb9c76abf82 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json @@ -34,6 +34,10 @@ "default" : "open", "description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both." }, + "master_timeout": { + "type" : "time", + "description" : "Specify timeout for connection to master" + }, "local": { "type": "boolean", "description": "Return local information, do not retrieve the state from master node (default: false)" diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java index 12975c765d094..d21261abad89e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponse.java @@ -20,15 +20,31 @@ package org.elasticsearch.action.admin.indices.mapping.get; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Map; -public class GetMappingsResponse extends ActionResponse { +public class GetMappingsResponse extends ActionResponse implements ToXContentFragment { + + private static final ParseField MAPPINGS = new ParseField("mappings"); + + private static final ObjectParser PARSER = + new ObjectParser("get-mappings", false, GetMappingsResponse::new); private ImmutableOpenMap> mappings = ImmutableOpenMap.of(); @@ -77,4 +93,94 @@ public void writeTo(StreamOutput out) throws IOException { } } } + + public static GetMappingsResponse fromXContent(XContentParser parser) throws IOException { + if (parser.currentToken() == null) { + parser.nextToken(); + } + assert parser.currentToken() == XContentParser.Token.START_OBJECT; + Map parts = parser.map(); + + ImmutableOpenMap.Builder> builder = new ImmutableOpenMap.Builder<>(); + for (Map.Entry entry : parts.entrySet()) { + final String indexName = entry.getKey(); + assert entry.getValue() instanceof Map : "expected a map as type mapping, but got: " + entry.getValue().getClass(); + final Map mapping = (Map) ((Map) entry.getValue()).get(MAPPINGS.getPreferredName()); + + ImmutableOpenMap.Builder typeBuilder = new ImmutableOpenMap.Builder<>(); + for (Map.Entry typeEntry : mapping.entrySet()) { + final String typeName = typeEntry.getKey(); + assert typeEntry.getValue() instanceof Map : "expected a map as inner type mapping, but got: " + + typeEntry.getValue().getClass(); + final Map fieldMappings = (Map) typeEntry.getValue(); + MappingMetaData mmd = new MappingMetaData(typeName, fieldMappings); + typeBuilder.put(typeName, mmd); + } + builder.put(indexName, typeBuilder.build()); + } + + return new GetMappingsResponse(builder.build()); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return toXContent(builder, params, true); + } + + public XContentBuilder toXContent(XContentBuilder builder, Params params, boolean includeTypeName) throws IOException { + for (final ObjectObjectCursor> indexEntry : getMappings()) { + builder.startObject(indexEntry.key); + { + if (includeTypeName == false) { + MappingMetaData mappings = null; + for (final ObjectObjectCursor typeEntry : indexEntry.value) { + if (typeEntry.key.equals("_default_") == false) { + assert mappings == null; + mappings = typeEntry.value; + } + } + if (mappings == null) { + // no mappings yet + builder.startObject(MAPPINGS.getPreferredName()).endObject(); + } else { + builder.field(MAPPINGS.getPreferredName(), mappings.sourceAsMap()); + } + } else { + builder.startObject(MAPPINGS.getPreferredName()); + { + for (final ObjectObjectCursor typeEntry : indexEntry.value) { + builder.field(typeEntry.key, typeEntry.value.sourceAsMap()); + } + } + builder.endObject(); + } + } + builder.endObject(); + } + return builder; + } + + @Override + public String toString() { + return Strings.toString(this); + } + + @Override + public int hashCode() { + return mappings.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + + if (getClass() != obj.getClass()) { + return false; + } + + GetMappingsResponse other = (GetMappingsResponse) obj; + return this.mappings.equals(other.mappings); + } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index 62356824365ae..46388e6947f3e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.TypeMissingException; @@ -83,6 +84,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC final GetMappingsRequest getMappingsRequest = new GetMappingsRequest(); getMappingsRequest.indices(indices).types(types); getMappingsRequest.indicesOptions(IndicesOptions.fromRequest(request, getMappingsRequest.indicesOptions())); + getMappingsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getMappingsRequest.masterNodeTimeout())); getMappingsRequest.local(request.paramAsBoolean("local", getMappingsRequest.local())); return channel -> client.admin().indices().getMappings(getMappingsRequest, new RestBuilderListener(channel) { @Override @@ -129,54 +131,17 @@ public RestResponse buildResponse(final GetMappingsResponse response, final XCon status = RestStatus.OK; } else { status = RestStatus.NOT_FOUND; - final String message; - if (difference.size() == 1) { - message = String.format(Locale.ROOT, "type [%s] missing", toNamesString(difference.iterator().next())); - } else { - message = String.format(Locale.ROOT, "types [%s] missing", toNamesString(difference.toArray(new String[0]))); - } + final String message = String.format(Locale.ROOT, "type" + (difference.size() == 1 ? "" : "s") + + " [%s] missing", Strings.collectionToCommaDelimitedString(difference)); builder.field("error", message); builder.field("status", status.getStatus()); } - - for (final ObjectObjectCursor> indexEntry : mappingsByIndex) { - builder.startObject(indexEntry.key); - { - if (includeTypeName == false) { - MappingMetaData mappings = null; - for (final ObjectObjectCursor typeEntry : indexEntry.value) { - if (typeEntry.key.equals("_default_") == false) { - assert mappings == null; - mappings = typeEntry.value; - } - } - if (mappings == null) { - // no mappings yet - builder.startObject("mappings").endObject(); - } else { - builder.field("mappings", mappings.sourceAsMap()); - } - } else { - builder.startObject("mappings"); - { - for (final ObjectObjectCursor typeEntry : indexEntry.value) { - builder.field(typeEntry.key, typeEntry.value.sourceAsMap()); - } - } - builder.endObject(); - } - } - builder.endObject(); - } + response.toXContent(builder, ToXContent.EMPTY_PARAMS, includeTypeName); } builder.endObject(); + return new BytesRestResponse(status, builder); } }); } - - private static String toNamesString(final String... names) { - return Arrays.stream(names).collect(Collectors.joining(",")); - } - } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java new file mode 100644 index 0000000000000..0fa5ca075fa8d --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsResponseTests.java @@ -0,0 +1,153 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.mapping.get; + +import com.carrotsearch.hppc.cursors.ObjectCursor; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class GetMappingsResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + public void testCheckEqualsAndHashCode() { + GetMappingsResponse resp = createTestInstance(); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(resp, r -> new GetMappingsResponse(r.mappings()), GetMappingsResponseTests::mutate); + } + + @Override + protected GetMappingsResponse doParseInstance(XContentParser parser) throws IOException { + return GetMappingsResponse.fromXContent(parser); + } + + @Override + protected GetMappingsResponse createBlankInstance() { + return new GetMappingsResponse(); + } + + private static GetMappingsResponse mutate(GetMappingsResponse original) throws IOException { + ImmutableOpenMap.Builder> builder = ImmutableOpenMap.builder(original.mappings()); + String indexKey = original.mappings().keys().iterator().next().value; + + ImmutableOpenMap.Builder typeBuilder = ImmutableOpenMap.builder(original.mappings().get(indexKey)); + final String typeKey; + Iterator> iter = original.mappings().get(indexKey).keys().iterator(); + if (iter.hasNext()) { + typeKey = iter.next().value; + } else { + typeKey = "new-type"; + } + + typeBuilder.put(typeKey, new MappingMetaData("type-" + randomAlphaOfLength(6), randomFieldMapping())); + + builder.put(indexKey, typeBuilder.build()); + return new GetMappingsResponse(builder.build()); + } + + @Override + protected GetMappingsResponse mutateInstance(GetMappingsResponse instance) throws IOException { + return mutate(instance); + } + + @Override + protected GetMappingsResponse createTestInstance() { + // rarely have no types + int typeCount = rarely() ? 0 : scaledRandomIntBetween(1, 3); + List typeMappings = new ArrayList<>(typeCount); + + for (int i = 0; i < typeCount; i++) { + Map mappings = new HashMap<>(); + if (rarely() == false) { // rarely have no fields + mappings.put("field-" + i, randomFieldMapping()); + if (randomBoolean()) { + mappings.put("field2-" + i, randomFieldMapping()); + } + } + + try { + MappingMetaData mmd = new MappingMetaData("type-" + randomAlphaOfLength(5), mappings); + typeMappings.add(mmd); + } catch (IOException e) { + fail("shouldn't have failed " + e); + } + } + ImmutableOpenMap.Builder typeBuilder = ImmutableOpenMap.builder(); + typeMappings.forEach(mmd -> typeBuilder.put(mmd.type(), mmd)); + ImmutableOpenMap.Builder> indexBuilder = ImmutableOpenMap.builder(); + indexBuilder.put("index-" + randomAlphaOfLength(5), typeBuilder.build()); + GetMappingsResponse resp = new GetMappingsResponse(indexBuilder.build()); + logger.debug("--> created: {}", resp); + return resp; + } + + // Not meant to be exhaustive + private static Map randomFieldMapping() { + Map mappings = new HashMap<>(); + if (randomBoolean()) { + Map regularMapping = new HashMap<>(); + regularMapping.put("type", randomBoolean() ? "text" : "keyword"); + regularMapping.put("index", "analyzed"); + regularMapping.put("analyzer", "english"); + return regularMapping; + } else if (randomBoolean()) { + Map numberMapping = new HashMap<>(); + numberMapping.put("type", randomFrom("integer", "float", "long", "double")); + numberMapping.put("index", Objects.toString(randomBoolean())); + return numberMapping; + } else if (randomBoolean()) { + Map objMapping = new HashMap<>(); + objMapping.put("type", "object"); + objMapping.put("dynamic", "strict"); + Map properties = new HashMap<>(); + Map props1 = new HashMap<>(); + props1.put("type", randomFrom("text", "keyword")); + props1.put("analyzer", "keyword"); + properties.put("subtext", props1); + Map props2 = new HashMap<>(); + props2.put("type", "object"); + Map prop2properties = new HashMap<>(); + Map props3 = new HashMap<>(); + props3.put("type", "integer"); + props3.put("index", "false"); + prop2properties.put("subsubfield", props3); + props2.put("properties", prop2properties); + objMapping.put("properties", properties); + return objMapping; + } else { + Map plainMapping = new HashMap<>(); + plainMapping.put("type", "keyword"); + return plainMapping; + } + } +}