Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,13 @@ public void testHitsExecute() throws Exception {
LeafReaderContext context = reader.leaves().get(0);
// A match:
{
HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup(), new HashMap<>());
HitContext hit = new HitContext(
new SearchHit(0),
context,
0,
new SourceLookup(),
new HashMap<>()
);
PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value"));
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value", new WhitespaceAnalyzer());
Expand All @@ -86,7 +92,13 @@ public void testHitsExecute() throws Exception {

// No match:
{
HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup(), new HashMap<>());
HitContext hit = new HitContext(
new SearchHit(0),
context,
0,
new SourceLookup(),
new HashMap<>()
);
PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value"));
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value1", new WhitespaceAnalyzer());
Expand All @@ -106,7 +118,13 @@ public void testHitsExecute() throws Exception {

// No query:
{
HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup(), new HashMap<>());
HitContext hit = new HitContext(
new SearchHit(0),
context,
0,
new SourceLookup(),
new HashMap<>()
);
PercolateQuery.QueryStore queryStore = ctx -> docId -> null;
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value", new WhitespaceAnalyzer());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,19 @@
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;

import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
Expand All @@ -40,6 +44,7 @@
public class SourceLookup implements Map {

private LeafReader reader;
CheckedBiConsumer<Integer, FieldsVisitor, IOException> fieldReader;

private int docId = -1;

Expand Down Expand Up @@ -75,7 +80,7 @@ public Map<String, Object> loadSourceIfNeeded() {
}
try {
FieldsVisitor sourceFieldVisitor = new FieldsVisitor(true);
reader.document(docId, sourceFieldVisitor);
fieldReader.accept(docId, sourceFieldVisitor);
BytesReference source = sourceFieldVisitor.source();
if (source == null) {
this.source = emptyMap();
Expand All @@ -91,20 +96,40 @@ public Map<String, Object> loadSourceIfNeeded() {
return this.source;
}

public static Tuple<XContentType, Map<String, Object>> sourceAsMapAndType(BytesReference source) throws ElasticsearchParseException {
private static Tuple<XContentType, Map<String, Object>> sourceAsMapAndType(BytesReference source) throws ElasticsearchParseException {
return XContentHelper.convertToMap(source, false);
}

public static Map<String, Object> sourceAsMap(BytesReference source) throws ElasticsearchParseException {
return sourceAsMapAndType(source).v2();
}

public void setSegmentAndDocument(LeafReaderContext context, int docId) {
public void setSegmentAndDocument(
LeafReaderContext context,
int docId
) {
if (this.reader == context.reader() && this.docId == docId) {
// if we are called with the same document, don't invalidate source
return;
}
this.reader = context.reader();
if (this.reader != context.reader()) {
this.reader = context.reader();
// only reset reader and fieldReader when reader changes
try {
if (context.reader() instanceof SequentialStoredFieldsLeafReader) {
// All the docs to fetch are adjacent but Lucene stored fields are optimized
// for random access and don't optimize for sequential access - except for merging.
// So we do a little hack here and pretend we're going to do merges in order to
// get better sequential access.
SequentialStoredFieldsLeafReader lf = (SequentialStoredFieldsLeafReader) context.reader();
fieldReader = lf.getSequentialStoredFieldsReader()::visitDocument;
} else {
fieldReader = context.reader()::document;
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
this.source = null;
this.sourceAsBytes = null;
this.docId = docId;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,13 @@ private HitContext hitExecuteMultiple(XContentBuilder source, boolean fetchSourc
// We don't need a real index, just a LeafReaderContext which cannot be mocked.
MemoryIndex index = new MemoryIndex();
LeafReaderContext leafReaderContext = index.createSearcher().getIndexReader().leaves().get(0);
HitContext hitContext = new HitContext(searchHit, leafReaderContext, 1, new SourceLookup(), new HashMap<>());
HitContext hitContext = new HitContext(
searchHit,
leafReaderContext,
1,
new SourceLookup(),
new HashMap<>()
);
hitContext.sourceLookup().setSource(source == null ? null : BytesReference.bytes(source));

FetchSourcePhase phase = new FetchSourcePhase();
Expand Down