Skip to content

Commit

Permalink
Make sure to use ParseContext.Document#addAll when possible.
Browse files Browse the repository at this point in the history
  • Loading branch information
jtibshirani committed May 4, 2020
1 parent 8686b13 commit 77ac5d8
Show file tree
Hide file tree
Showing 6 changed files with 3 additions and 32 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.Term;
Expand Down Expand Up @@ -460,11 +459,8 @@ void processQuery(Query query, ParseContext context) {
} else {
doc.add(new Field(extractionResultField.name(), EXTRACTION_PARTIAL, extractionResultField.fieldType()));
}
List<IndexableField> fields = new ArrayList<>(1);

createFieldNamesField(context);
for (IndexableField field : fields) {
context.doc().add(field);
}
doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -298,10 +298,8 @@ public void parse(ParseContext context) throws IOException {
}

List<IndexableField> fields = geometryIndexer.indexShape(context, shape);
context.doc().addAll(fields);
createFieldNamesField(context);
for (IndexableField field : fields) {
context.doc().add(field);
}
} catch (Exception e) {
if (ignoreMalformed.value() == false) {
throw new MapperParsingException("failed to parse field [{}] of type [{}]", e, fieldType().name(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

import org.apache.logging.log4j.LogManager;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
Expand Down Expand Up @@ -52,7 +51,6 @@
import org.elasticsearch.search.suggest.completion.context.ContextMappings;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
Expand Down Expand Up @@ -503,12 +501,7 @@ public void parse(ParseContext context) throws IOException {
}
}

List<IndexableField> fields = new ArrayList<>(1);
createFieldNamesField(context);
for (IndexableField field : fields) {
context.doc().add(field);
}

for (CompletionInputMetadata metadata: inputMap.values()) {
ParseContext externalValueContext = context.createExternalValueContext(metadata);
multiFields.parse(this, externalValueContext);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
Expand All @@ -42,9 +41,7 @@
import org.elasticsearch.search.aggregations.support.ValuesSourceType;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import static org.elasticsearch.index.mapper.TypeParsers.parseField;
Expand Down Expand Up @@ -225,11 +222,7 @@ protected void parse(ParseContext context, GeoPoint point) throws IOException {
if (fieldType.hasDocValues()) {
context.doc().add(new LatLonDocValuesField(fieldType().name(), point.lat(), point.lon()));
} else if (fieldType().stored() || fieldType().indexOptions() != IndexOptions.NONE) {
List<IndexableField> fields = new ArrayList<>(1);
createFieldNamesField(context);
for (IndexableField field : fields) {
context.doc().add(field);
}
}
// if the mapping contains multifields then use the geohash string
if (multiFields.iterator().hasNext()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import org.apache.lucene.document.XYDocValuesField;
import org.apache.lucene.document.XYPointField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.ParseField;
Expand All @@ -27,9 +26,7 @@
import org.elasticsearch.xpack.spatial.index.query.ShapeQueryPointProcessor;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import static org.elasticsearch.index.mapper.TypeParsers.parseField;
Expand Down Expand Up @@ -174,11 +171,7 @@ protected void parse(ParseContext context, CartesianPoint point) throws IOExcept
if (fieldType.hasDocValues()) {
context.doc().add(new XYDocValuesField(fieldType().name(), point.getX(), point.getY()));
} else if (fieldType().stored() || fieldType().indexOptions() != IndexOptions.NONE) {
List<IndexableField> fields = new ArrayList<>(1);
createFieldNamesField(context);
for (IndexableField field : fields) {
context.doc().add(field);
}
}
// if the mapping contains multi-fields then throw an error?
if (multiFields.iterator().hasNext()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -548,9 +548,7 @@ protected void parseCreateField(ParseContext context) throws IOException {

List<IndexableField> fields = new ArrayList<>();
createFields(value, parseDoc, fields);
for (IndexableField field : fields) {
parseDoc.add(field);
}
parseDoc.addAll(fields);
}

// For internal use by Lucene only - used to define ngram index
Expand Down

0 comments on commit 77ac5d8

Please sign in to comment.