Skip to content
Permalink
Browse files

Prohibit indexing a document with parent for a type that doesn't have…

… a `_parent` field configured and prohibit adding a _parent field to an existing mapping.

Closes #3848
Closes #3849
  • Loading branch information...
martijnvg committed Oct 14, 2013
1 parent 30a7dc5 commit 39dafc064865a624f93ea1bb8f05cc6a730bc3ac
@@ -592,6 +592,14 @@ public void process(MetaData metaData, String aliasOrIndex, @Nullable MappingMet
if (mappingMd.routing().required() && routing == null) {
throw new RoutingMissingException(index, type, id);
}

if (parent != null && !mappingMd.hasParentField()) {
throw new ElasticSearchIllegalArgumentException("Can't specify parent if no parent field has been configured");
}
} else {
if (parent != null) {
throw new ElasticSearchIllegalArgumentException("Can't specify parent if no parent field has been configured");
}
}

// generate id if not already provided and id generation is allowed
@@ -20,6 +20,7 @@
package org.elasticsearch.cluster.metadata;

import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.Version;
import org.elasticsearch.action.TimestampParsingException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
@@ -258,13 +259,15 @@ public int hashCode() {
private Id id;
private Routing routing;
private Timestamp timestamp;
private boolean hasParentField;

public MappingMetaData(DocumentMapper docMapper) {
this.type = docMapper.type();
this.source = docMapper.mappingSource();
this.id = new Id(docMapper.idFieldMapper().path());
this.routing = new Routing(docMapper.routingFieldMapper().required(), docMapper.routingFieldMapper().path());
this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), docMapper.timestampFieldMapper().path(), docMapper.timestampFieldMapper().dateTimeFormatter().format());
this.hasParentField = docMapper.parentFieldMapper().active();
}

public MappingMetaData(CompressedString mapping) throws IOException {
@@ -344,14 +347,20 @@ private void initMappers(Map<String, Object> withoutType) {
} else {
this.timestamp = Timestamp.EMPTY;
}
if (withoutType.containsKey("_parent")) {
this.hasParentField = true;
} else {
this.hasParentField = false;
}
}

public MappingMetaData(String type, CompressedString source, Id id, Routing routing, Timestamp timestamp) {
public MappingMetaData(String type, CompressedString source, Id id, Routing routing, Timestamp timestamp, boolean hasParentField) {
this.type = type;
this.source = source;
this.id = id;
this.routing = routing;
this.timestamp = timestamp;
this.hasParentField = hasParentField;
}

void updateDefaultMapping(MappingMetaData defaultMapping) {
@@ -374,6 +383,10 @@ public CompressedString source() {
return this.source;
}

public boolean hasParentField() {
return hasParentField;
}

/**
* Converts the serialized compressed form of the mappings into a parsed map.
*/
@@ -516,6 +529,9 @@ public static void writeTo(MappingMetaData mappingMd, StreamOutput out) throws I
out.writeBoolean(false);
}
out.writeString(mappingMd.timestamp().format());
if (out.getVersion().onOrAfter(Version.V_0_90_6)) {
out.writeBoolean(mappingMd.hasParentField());
}
}

@Override
@@ -553,7 +569,13 @@ public static MappingMetaData readFrom(StreamInput in) throws IOException {
Routing routing = new Routing(in.readBoolean(), in.readBoolean() ? in.readString() : null);
// timestamp
Timestamp timestamp = new Timestamp(in.readBoolean(), in.readBoolean() ? in.readString() : null, in.readString());
return new MappingMetaData(type, source, id, routing, timestamp);
final boolean hasParentField;
if (in.getVersion().onOrAfter(Version.V_0_90_6)) {
hasParentField = in.readBoolean();
} else {
hasParentField = true; // We assume here that the type has a parent field, which is confirm with the behaviour of <= 0.90.5
}
return new MappingMetaData(type, source, id, routing, timestamp, hasParentField);
}

public static class ParseContext {
@@ -123,7 +123,7 @@ public void refresh(List<AtomicReaderContext> atomicReaderContexts) throws IOExc
BytesRef spare = new BytesRef();
for (String type : indexService.mapperService().types()) {
ParentFieldMapper parentFieldMapper = indexService.mapperService().documentMapper(type).parentFieldMapper();
if (parentFieldMapper != null) {
if (parentFieldMapper.active()) {
parentTypes.add(new HashedBytesArray(Strings.toUTF8Bytes(parentFieldMapper.type(), spare)));
}
}
@@ -204,7 +204,7 @@ public GetResult innerGet(String type, String id, String[] gFields, boolean real
Object value = null;
if (field.equals(RoutingFieldMapper.NAME) && docMapper.routingFieldMapper().fieldType().stored()) {
value = source.routing;
} else if (field.equals(ParentFieldMapper.NAME) && docMapper.parentFieldMapper() != null && docMapper.parentFieldMapper().fieldType().stored()) {
} else if (field.equals(ParentFieldMapper.NAME) && docMapper.parentFieldMapper().active() && docMapper.parentFieldMapper().fieldType().stored()) {
value = source.parent;
} else if (field.equals(TimestampFieldMapper.NAME) && docMapper.timestampFieldMapper().fieldType().stored()) {
value = source.timestamp;
@@ -176,7 +176,7 @@ public Builder(String index, @Nullable Settings indexSettings, RootObjectMapper.
this.rootMappers.put(BoostFieldMapper.class, new BoostFieldMapper());
this.rootMappers.put(TimestampFieldMapper.class, new TimestampFieldMapper());
this.rootMappers.put(TTLFieldMapper.class, new TTLFieldMapper());
// don't add parent field, by default its "null"
this.rootMappers.put(ParentFieldMapper.class, new ParentFieldMapper());
}

public Builder meta(ImmutableMap<String, Object> meta) {
@@ -305,7 +305,7 @@ public DocumentMapper(String index, @Nullable Settings indexSettings, DocumentMa

this.typeFilter = typeMapper().termFilter(type, null);

if (rootMapper(ParentFieldMapper.class) != null) {
if (rootMapper(ParentFieldMapper.class).active()) {
// mark the routing field mapper as required
rootMapper(RoutingFieldMapper.class).markAsRequired();
}
@@ -630,8 +630,9 @@ public void traverse(ObjectMapperListener listener) {

public synchronized MergeResult merge(DocumentMapper mergeWith, MergeFlags mergeFlags) {
MergeContext mergeContext = new MergeContext(this, mergeFlags);
rootObjectMapper.merge(mergeWith.rootObjectMapper, mergeContext);
assert rootMappers.size() == mergeWith.rootMappers.size();

rootObjectMapper.merge(mergeWith.rootObjectMapper, mergeContext);
for (Map.Entry<Class<? extends RootMapper>, RootMapper> entry : rootMappers.entrySet()) {
// root mappers included in root object will get merge in the rootObjectMapper
if (entry.getValue().includeInObject()) {
@@ -130,6 +130,13 @@ protected ParentFieldMapper(String name, String indexName, String type, Postings
this.typeAsBytes = new BytesRef(type);
}

public ParentFieldMapper() {
super(new Names(Defaults.NAME, Defaults.NAME, Defaults.NAME, Defaults.NAME), Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE),
Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER, null, null, null);
type = null;
typeAsBytes = null;
}

public String type() {
return type;
}
@@ -164,6 +171,10 @@ public boolean includeInObject() {

@Override
protected Field parseCreateField(ParseContext context) throws IOException {
if (!active()) {
return null;
}

if (context.parser().currentName() != null && context.parser().currentName().equals(Defaults.NAME)) {
// we are in the parsing of _parent phase
String parentId = context.parser().text();
@@ -248,7 +259,7 @@ public Filter termFilter(Object value, @Nullable QueryParseContext context) {

List<String> types = new ArrayList<String>(context.mapperService().types().size());
for (DocumentMapper documentMapper : context.mapperService()) {
if (documentMapper.parentFieldMapper() == null) {
if (!documentMapper.parentFieldMapper().active()) {
types.add(documentMapper.type());
}
}
@@ -279,7 +290,7 @@ public Filter termsFilter(List values, @Nullable QueryParseContext context) {

List<String> types = new ArrayList<String>(context.mapperService().types().size());
for (DocumentMapper documentMapper : context.mapperService()) {
if (documentMapper.parentFieldMapper() == null) {
if (!documentMapper.parentFieldMapper().active()) {
types.add(documentMapper.type());
}
}
@@ -314,6 +325,10 @@ protected String contentType() {

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (!active()) {
return builder;
}

builder.startObject(CONTENT_TYPE);
builder.field("type", type);
builder.endObject();
@@ -322,6 +337,21 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws

@Override
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
// do nothing here, no merging, but also no exception
ParentFieldMapper other = (ParentFieldMapper) mergeWith;
if (active() == other.active()) {
return;
}

if (active() != other.active() || !type.equals(other.type)) {
mergeContext.addConflict("The _parent field can't be added or updated");
}
}

/**
* @return Whether the _parent field is actually used.
*/
public boolean active() {
return type != null;
}

}
@@ -121,7 +121,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
if (childDocMapper == null) {
throw new QueryParsingException(parseContext.index(), "No mapping for for type [" + childType + "]");
}
if (childDocMapper.parentFieldMapper() == null) {
if (!childDocMapper.parentFieldMapper().active()) {
throw new QueryParsingException(parseContext.index(), "Type [" + childType + "] does not have parent mapping");
}
String parentType = childDocMapper.parentFieldMapper().type();
@@ -123,7 +123,7 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars
if (childDocMapper == null) {
throw new QueryParsingException(parseContext.index(), "[has_child] No mapping for for type [" + childType + "]");
}
if (childDocMapper.parentFieldMapper() == null) {
if (!childDocMapper.parentFieldMapper().active()) {
throw new QueryParsingException(parseContext.index(), "[has_child] Type [" + childType + "] does not have parent mapping");
}
String parentType = childDocMapper.parentFieldMapper().type();
@@ -135,7 +135,7 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
parentTypes.add(parentType);
for (DocumentMapper documentMapper : parseContext.mapperService()) {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper != null) {
if (parentFieldMapper.active()) {
DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type());
if (parentTypeDocumentMapper == null) {
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
@@ -134,7 +134,7 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars
parentTypes.add(parentType);
for (DocumentMapper documentMapper : parseContext.mapperService()) {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper != null) {
if (parentFieldMapper.active()) {
DocumentMapper parentTypeDocumentMapper = parseContext.mapperService().documentMapper(parentFieldMapper.type());
if (parentTypeDocumentMapper == null) {
// Only add this, if this parentFieldMapper (also a parent) isn't a child of another parent.
@@ -120,7 +120,7 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars
if (childDocMapper == null) {
throw new QueryParsingException(parseContext.index(), "No mapping for for type [" + childType + "]");
}
if (childDocMapper.parentFieldMapper() == null) {
if (!childDocMapper.parentFieldMapper().active()) {
throw new QueryParsingException(parseContext.index(), "Type [" + childType + "] does not have parent mapping");
}
String parentType = childDocMapper.parentFieldMapper().type();
Oops, something went wrong.

0 comments on commit 39dafc0

Please sign in to comment.
You can’t perform that action at this time.