Skip to content

Commit

Permalink
Return both concrete fields and aliases in DocumentFieldMappers#getMa…
Browse files Browse the repository at this point in the history
…pper. (#31671)

* Rename DocumentFieldMappers#getMapper to getFieldMapper.
* Introduce a new DocumentFieldMappers#getMapper that returns a Mapper.
* Fix an issue around field aliases in geo suggestion contexts.
* Make sure a field alias can refer to a percolate query.
* Remove easy-to-fix uses of DocumentFieldMappers#getFieldMapper.
* Include alias mappers in DocumentFieldMappers#getMappers.
* Make sure we detect conflicts between dynamic object mappers and field aliases.
* Throw an exception if aliases are specified as the target of copy_to.
  • Loading branch information
jtibshirani committed Jul 4, 2018
1 parent fdb43d7 commit 40b5790
Show file tree
Hide file tree
Showing 47 changed files with 563 additions and 349 deletions.
2 changes: 1 addition & 1 deletion docs/reference/mapping/types/alias.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ field alias to query over multiple target fields in a single clause.
==== Unsupported APIs

Writes to field aliases are not supported: attempting to use an alias in an index or update request
will result in a failure.
will result in a failure. This also precludes aliases from being the target of `copy_to`.

Because alias names are not present in the document source, aliases cannot be used when performing
source filtering. For example, the following request will return an empty result for `_source`:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,10 @@ Query getCandidateMatchesQuery() {
return candidateMatchesQuery;
}

Query getVerifiedMatchesQuery() {
return verifiedMatchesQuery;
}

// Comparing identity here to avoid being cached
// Note that in theory if the same instance gets used multiple times it could still get cached,
// however since we create a new query instance each time we this query this shouldn't happen and thus
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -618,13 +618,13 @@ protected Analyzer getWrappedAnalyzer(String fieldName) {
docSearcher.setQueryCache(null);
}

PercolatorFieldMapper percolatorFieldMapper = (PercolatorFieldMapper) docMapper.mappers().getMapper(field);
boolean mapUnmappedFieldsAsString = percolatorFieldMapper.isMapUnmappedFieldAsText();
PercolatorFieldMapper.FieldType pft = (PercolatorFieldMapper.FieldType) fieldType;
String name = this.name != null ? this.name : pft.name();
QueryShardContext percolateShardContext = wrap(context);
PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField,
percolateShardContext,
pft.mapUnmappedFieldsAsText);

String name = this.name != null ? this.name : field;
PercolatorFieldMapper.FieldType pft = (PercolatorFieldMapper.FieldType) fieldType;
PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField, percolateShardContext, mapUnmappedFieldsAsString);
return pft.percolateQuery(name, queryStore, documents, docSearcher, context.indexVersionCreated());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,13 +136,19 @@ public PercolatorFieldMapper build(BuilderContext context) {
fieldType.rangeField = rangeFieldMapper.fieldType();
NumberFieldMapper minimumShouldMatchFieldMapper = createMinimumShouldMatchField(context);
fieldType.minimumShouldMatchField = minimumShouldMatchFieldMapper.fieldType();
fieldType.mapUnmappedFieldsAsText = getMapUnmappedFieldAsText(context.indexSettings());

context.path().remove();
setupFieldType(context);
return new PercolatorFieldMapper(name(), fieldType, defaultFieldType, context.indexSettings(),
multiFieldsBuilder.build(this, context), copyTo, queryShardContext, extractedTermsField,
extractionResultField, queryBuilderField, rangeFieldMapper, minimumShouldMatchFieldMapper);
}

private static boolean getMapUnmappedFieldAsText(Settings indexSettings) {
return INDEX_MAP_UNMAPPED_FIELDS_AS_TEXT_SETTING.get(indexSettings);
}

static KeywordFieldMapper createExtractQueryFieldBuilder(String name, BuilderContext context) {
KeywordFieldMapper.Builder queryMetaDataFieldBuilder = new KeywordFieldMapper.Builder(name);
queryMetaDataFieldBuilder.docValues(false);
Expand Down Expand Up @@ -195,6 +201,7 @@ static class FieldType extends MappedFieldType {
MappedFieldType minimumShouldMatchField;

RangeFieldMapper.RangeFieldType rangeField;
boolean mapUnmappedFieldsAsText;

FieldType() {
setIndexOptions(IndexOptions.NONE);
Expand All @@ -209,6 +216,7 @@ static class FieldType extends MappedFieldType {
queryBuilderField = ref.queryBuilderField;
rangeField = ref.rangeField;
minimumShouldMatchField = ref.minimumShouldMatchField;
mapUnmappedFieldsAsText = ref.mapUnmappedFieldsAsText;
}

@Override
Expand Down Expand Up @@ -327,7 +335,6 @@ Tuple<List<BytesRef>, Map<String, List<byte[]>>> extractTermsAndRanges(IndexRead

}

private final boolean mapUnmappedFieldAsText;
private final Supplier<QueryShardContext> queryShardContext;
private KeywordFieldMapper queryTermsField;
private KeywordFieldMapper extractionResultField;
Expand All @@ -348,14 +355,9 @@ Tuple<List<BytesRef>, Map<String, List<byte[]>>> extractTermsAndRanges(IndexRead
this.extractionResultField = extractionResultField;
this.queryBuilderField = queryBuilderField;
this.minimumShouldMatchFieldMapper = minimumShouldMatchFieldMapper;
this.mapUnmappedFieldAsText = getMapUnmappedFieldAsText(indexSettings);
this.rangeFieldMapper = rangeFieldMapper;
}

private static boolean getMapUnmappedFieldAsText(Settings indexSettings) {
return INDEX_MAP_UNMAPPED_FIELDS_AS_TEXT_SETTING.get(indexSettings);
}

@Override
public FieldMapper updateFieldType(Map<String, MappedFieldType> fullNameToFieldType) {
PercolatorFieldMapper updated = (PercolatorFieldMapper) super.updateFieldType(fullNameToFieldType);
Expand Down Expand Up @@ -402,7 +404,7 @@ public Mapper parse(ParseContext context) throws IOException {

Version indexVersion = context.mapperService().getIndexSettings().getIndexVersionCreated();
createQueryBuilderField(indexVersion, queryBuilderField, queryBuilder, context);
Query query = toQuery(queryShardContext, mapUnmappedFieldAsText, queryBuilder);
Query query = toQuery(queryShardContext, isMapUnmappedFieldAsText(), queryBuilder);
processQuery(query, context);
return null;
}
Expand Down Expand Up @@ -522,7 +524,7 @@ protected String contentType() {
}

boolean isMapUnmappedFieldAsText() {
return mapUnmappedFieldAsText;
return ((FieldType) fieldType).mapUnmappedFieldsAsText;
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -194,8 +194,7 @@ public void testDuel() throws Exception {
}
Collections.sort(intValues);

MappedFieldType intFieldType = mapperService.documentMapper("type").mappers()
.getMapper("int_field").fieldType();
MappedFieldType intFieldType = mapperService.fullName("int_field");

List<Supplier<Query>> queryFunctions = new ArrayList<>();
queryFunctions.add(MatchNoDocsQuery::new);
Expand Down Expand Up @@ -327,8 +326,7 @@ public void testDuel2() throws Exception {
stringValues.add("value2");
stringValues.add("value3");

MappedFieldType intFieldType = mapperService.documentMapper("type").mappers()
.getMapper("int_field").fieldType();
MappedFieldType intFieldType = mapperService.fullName("int_field");
List<int[]> ranges = new ArrayList<>();
ranges.add(new int[]{-5, 5});
ranges.add(new int[]{0, 10});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,8 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
PercolateQueryBuilder.DOCUMENTS_FIELD.getPreferredName()
};

private static String queryField;
private static String queryField = "field";
private static String aliasField = "alias";
private static String docType;

private String indexedDocumentIndex;
Expand All @@ -96,9 +97,11 @@ protected Collection<Class<? extends Plugin>> getPlugins() {
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
queryField = randomAlphaOfLength(4);
aliasField = randomAlphaOfLength(4);

String docType = "_doc";
mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType,
queryField, "type=percolator"
queryField, "type=percolator", aliasField, "type=alias,path=" + queryField
))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType,
STRING_FIELD_NAME, "type=text"
Expand Down Expand Up @@ -355,4 +358,21 @@ public void testSerializationFailsUnlessFetched() throws IOException {
builder = rewriteAndFetch(builder, createShardContext());
builder.writeTo(new BytesStreamOutput(10));
}

public void testFieldAlias() throws IOException {
QueryShardContext shardContext = createShardContext();

PercolateQueryBuilder builder = doCreateTestQueryBuilder(false);
QueryBuilder rewrittenBuilder = rewriteAndFetch(builder, shardContext);
PercolateQuery query = (PercolateQuery) rewrittenBuilder.toQuery(shardContext);

PercolateQueryBuilder aliasBuilder = new PercolateQueryBuilder(aliasField,
builder.getDocuments(),
builder.getXContentType());
QueryBuilder rewrittenAliasBuilder = rewriteAndFetch(aliasBuilder, shardContext);
PercolateQuery aliasQuery = (PercolateQuery) rewrittenAliasBuilder.toQuery(shardContext);

assertEquals(query.getCandidateMatchesQuery(), aliasQuery.getCandidateMatchesQuery());
assertEquals(query.getVerifiedMatchesQuery(), aliasQuery.getVerifiedMatchesQuery());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -224,10 +224,10 @@ public void testExtractTerms() throws Exception {
public void testExtractRanges() throws Exception {
addQueryFieldMappings();
BooleanQuery.Builder bq = new BooleanQuery.Builder();
Query rangeQuery1 = mapperService.documentMapper("doc").mappers().getMapper("number_field1").fieldType()
Query rangeQuery1 = mapperService.fullName("number_field1")
.rangeQuery(10, 20, true, true, null, null, null, null);
bq.add(rangeQuery1, Occur.MUST);
Query rangeQuery2 = mapperService.documentMapper("doc").mappers().getMapper("number_field1").fieldType()
Query rangeQuery2 = mapperService.fullName("number_field1")
.rangeQuery(15, 20, true, true, null, null, null, null);
bq.add(rangeQuery2, Occur.MUST);

Expand Down Expand Up @@ -255,7 +255,7 @@ public void testExtractRanges() throws Exception {
// Range queries on different fields:
bq = new BooleanQuery.Builder();
bq.add(rangeQuery1, Occur.MUST);
rangeQuery2 = mapperService.documentMapper("doc").mappers().getMapper("number_field2").fieldType()
rangeQuery2 = mapperService.fullName("number_field2")
.rangeQuery(15, 20, true, true, null, null, null, null);
bq.add(rangeQuery2, Occur.MUST);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentFieldMappers;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.TypeMissingException;
Expand Down Expand Up @@ -174,19 +174,19 @@ private static Map<String, FieldMappingMetaData> findFieldMappingsByType(Predica
final DocumentFieldMappers allFieldMappers = documentMapper.mappers();
for (String field : request.fields()) {
if (Regex.isMatchAllPattern(field)) {
for (FieldMapper fieldMapper : allFieldMappers) {
addFieldMapper(fieldPredicate, fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults());
for (Mapper fieldMapper : allFieldMappers) {
addFieldMapper(fieldPredicate, fieldMapper.name(), fieldMapper, fieldMappings, request.includeDefaults());
}
} else if (Regex.isSimpleMatchPattern(field)) {
for (FieldMapper fieldMapper : allFieldMappers) {
if (Regex.simpleMatch(field, fieldMapper.fieldType().name())) {
addFieldMapper(fieldPredicate, fieldMapper.fieldType().name(),
for (Mapper fieldMapper : allFieldMappers) {
if (Regex.simpleMatch(field, fieldMapper.name())) {
addFieldMapper(fieldPredicate, fieldMapper.name(),
fieldMapper, fieldMappings, request.includeDefaults());
}
}
} else {
// not a pattern
FieldMapper fieldMapper = allFieldMappers.getMapper(field);
Mapper fieldMapper = allFieldMappers.getMapper(field);
if (fieldMapper != null) {
addFieldMapper(fieldPredicate, field, fieldMapper, fieldMappings, request.includeDefaults());
} else if (request.probablySingleFieldRequest()) {
Expand All @@ -198,7 +198,7 @@ private static Map<String, FieldMappingMetaData> findFieldMappingsByType(Predica
}

private static void addFieldMapper(Predicate<String> fieldPredicate,
String field, FieldMapper fieldMapper, Map<String, FieldMappingMetaData> fieldMappings,
String field, Mapper fieldMapper, Map<String, FieldMappingMetaData> fieldMappings,
boolean includeDefaults) {
if (fieldMappings.containsKey(field)) {
return;
Expand All @@ -207,7 +207,7 @@ private static void addFieldMapper(Predicate<String> fieldPredicate,
try {
BytesReference bytes = XContentHelper.toXContent(fieldMapper, XContentType.JSON,
includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS, false);
fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().name(), bytes));
fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.name(), bytes));
} catch (IOException e) {
throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e);
}
Expand Down
16 changes: 5 additions & 11 deletions server/src/main/java/org/elasticsearch/index/IndexWarmer.java
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.shard.IndexShard;
Expand Down Expand Up @@ -121,16 +119,12 @@ private static class FieldDataWarmer implements IndexWarmer.Listener {
public TerminationHandle warmReader(final IndexShard indexShard, final Engine.Searcher searcher) {
final MapperService mapperService = indexShard.mapperService();
final Map<String, MappedFieldType> warmUpGlobalOrdinals = new HashMap<>();
DocumentMapper docMapper = mapperService.documentMapper();
if (docMapper != null) {
for (FieldMapper fieldMapper : docMapper.mappers()) {
final MappedFieldType fieldType = fieldMapper.fieldType();
final String indexName = fieldType.name();
if (fieldType.eagerGlobalOrdinals() == false) {
continue;
}
warmUpGlobalOrdinals.put(indexName, fieldType);
for (MappedFieldType fieldType : mapperService.fieldTypes()) {
final String indexName = fieldType.name();
if (fieldType.eagerGlobalOrdinals() == false) {
continue;
}
warmUpGlobalOrdinals.put(indexName, fieldType);
}
final CountDownLatch latch = new CountDownLatch(warmUpGlobalOrdinals.size());
for (final MappedFieldType fieldType : warmUpGlobalOrdinals.values()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
Expand Down Expand Up @@ -202,7 +202,7 @@ private GetResult innerGetLoadFromStoredFields(String type, String id, String[]

if (gFields != null && gFields.length > 0) {
for (String field : gFields) {
FieldMapper fieldMapper = docMapper.mappers().getMapper(field);
Mapper fieldMapper = docMapper.mappers().getMapper(field);
if (fieldMapper == null) {
if (docMapper.objectMappers().get(field) != null) {
// Only fail if we know it is a object field, missing paths / fields shouldn't fail.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,10 @@
import java.util.Iterator;
import java.util.Map;

public final class DocumentFieldMappers implements Iterable<FieldMapper> {
public final class DocumentFieldMappers implements Iterable<Mapper> {

/** Full field name to mapper */
private final Map<String, FieldMapper> fieldMappers;
private final Map<String, Mapper> fieldMappers;

private final FieldNameAnalyzer indexAnalyzer;
private final FieldNameAnalyzer searchAnalyzer;
Expand All @@ -44,8 +44,12 @@ private static void put(Map<String, Analyzer> analyzers, String key, Analyzer va
analyzers.put(key, value);
}

public DocumentFieldMappers(Collection<FieldMapper> mappers, Analyzer defaultIndex, Analyzer defaultSearch, Analyzer defaultSearchQuote) {
Map<String, FieldMapper> fieldMappers = new HashMap<>();
public DocumentFieldMappers(Collection<FieldMapper> mappers,
Collection<FieldAliasMapper> aliasMappers,
Analyzer defaultIndex,
Analyzer defaultSearch,
Analyzer defaultSearchQuote) {
Map<String, Mapper> fieldMappers = new HashMap<>();
Map<String, Analyzer> indexAnalyzers = new HashMap<>();
Map<String, Analyzer> searchAnalyzers = new HashMap<>();
Map<String, Analyzer> searchQuoteAnalyzers = new HashMap<>();
Expand All @@ -56,14 +60,32 @@ public DocumentFieldMappers(Collection<FieldMapper> mappers, Analyzer defaultInd
put(searchAnalyzers, fieldType.name(), fieldType.searchAnalyzer(), defaultSearch);
put(searchQuoteAnalyzers, fieldType.name(), fieldType.searchQuoteAnalyzer(), defaultSearchQuote);
}

for (FieldAliasMapper aliasMapper : aliasMappers) {
fieldMappers.put(aliasMapper.name(), aliasMapper);
}

this.fieldMappers = Collections.unmodifiableMap(fieldMappers);
this.indexAnalyzer = new FieldNameAnalyzer(indexAnalyzers);
this.searchAnalyzer = new FieldNameAnalyzer(searchAnalyzers);
this.searchQuoteAnalyzer = new FieldNameAnalyzer(searchQuoteAnalyzers);
}

/** Returns the mapper for the given field */
public FieldMapper getMapper(String field) {
/**
* @deprecated Use {@link DocumentFieldMappers#getMapper} instead. To access a field's
* type information, instead use {@link MapperService#fullName}.
*/
@Deprecated
public FieldMapper getFieldMapper(String field) {
Mapper mapper = getMapper(field);
return (mapper instanceof FieldMapper) ? (FieldMapper) mapper : null;
}

/**
* Returns the leaf mapper associated with this field name. Note that the returned mapper
* could be either a concrete {@link FieldMapper}, or a {@link FieldAliasMapper}.
*/
public Mapper getMapper(String field) {
return fieldMappers.get(field);
}

Expand All @@ -87,7 +109,7 @@ public Analyzer searchQuoteAnalyzer() {
return this.searchQuoteAnalyzer;
}

public Iterator<FieldMapper> iterator() {
public Iterator<Mapper> iterator() {
return fieldMappers.values().iterator();
}
}
Loading

0 comments on commit 40b5790

Please sign in to comment.