From 0e3d4c0944e6cab5ef515a26b4b1cdccfe70e486 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 26 Feb 2021 15:43:18 +0100 Subject: [PATCH 1/7] Prepare issue branch. --- pom.xml | 6 ++++-- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/pom.xml b/pom.xml index 28784cc464..4b7c541237 100644 --- a/pom.xml +++ b/pom.xml @@ -1,11 +1,13 @@ - + 4.0.0 org.springframework.data spring-data-mongodb-parent - 3.2.0-SNAPSHOT + 3.2.0-GH-3571-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index f0fbb601c8..0c8d061f09 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.2.0-SNAPSHOT + 3.2.0-GH-3571-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index 1a17321782..9a9674fb16 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.2.0-SNAPSHOT + 3.2.0-GH-3571-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 0248517caf..44f86c4b46 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.2.0-SNAPSHOT + 3.2.0-GH-3571-SNAPSHOT ../pom.xml From bab34c70e44fe6b7f5affc44f5f19b350ddc01eb Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 26 Feb 2021 10:46:01 +0100 Subject: [PATCH 2/7] Deduplicate code --- .../core/convert/MappingMongoConverter.java | 57 ++++++------------- 1 file changed, 18 insertions(+), 39 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index fd3a832e56..c98e58fe0d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -1145,28 +1145,7 @@ private Object readCollectionOrArray(TypeInformation targetType, Collection) element, path)); - } else { - items.add(getPotentiallyConvertedSimpleRead(element, rawComponentType)); - } - } + items.add(readValue(element, componentType, path)); } return getPotentiallyConvertedSimpleRead(items, targetType.getType()); @@ -1216,25 +1195,13 @@ protected Map readMap(TypeInformation type, Bson bson, Object Object value = entry.getValue(); TypeInformation defaultedValueType = valueType != null ? valueType : ClassTypeInformation.OBJECT; - - if (value instanceof Document) { - map.put(key, read(defaultedValueType, (Document) value, path)); - } else if (value instanceof BasicDBObject) { - map.put(key, read(defaultedValueType, (BasicDBObject) value, path)); - } else if (value instanceof DBRef) { - map.put(key, DBRef.class.equals(rawValueType) ? value - : readAndConvertDBRef((DBRef) value, defaultedValueType, ObjectPath.ROOT, rawValueType)); - } else if (value instanceof List) { - map.put(key, readCollectionOrArray(valueType != null ? valueType : ClassTypeInformation.LIST, - (List) value, path)); - } else { - map.put(key, getPotentiallyConvertedSimpleRead(value, rawValueType)); - } + map.put(key, readValue(value, defaultedValueType, path)); } return map; } + @SuppressWarnings("unchecked") private static Map asMap(Bson bson) { @@ -1608,7 +1575,13 @@ protected T potentiallyConvertSpelValue(Object object, Parameter T readValue(Object value, TypeInformation type, ObjectPath path) { + T readValue(@Nullable Object value, TypeInformation type, ObjectPath path) { + + if (value == null) { + return null; + } + + Assert.notNull(type, "TypeInformation must not be null"); Class rawType = type.getType(); @@ -1616,8 +1589,14 @@ T readValue(Object value, TypeInformation type, ObjectPath path) { return (T) conversionService.convert(value, rawType); } else if (value instanceof DBRef) { return potentiallyReadOrResolveDbRef((DBRef) value, type, path, rawType); - } else if (value instanceof List) { - return (T) readCollectionOrArray(type, (List) value, path); + } else if (value instanceof Collection) { + + if (!Object.class.equals(rawType)) { + if (!rawType.isArray() && !ClassUtils.isAssignable(Iterable.class, rawType)) { + throw new MappingException(String.format(INCOMPATIBLE_TYPES, value, value.getClass(), rawType, path)); + } + } + return (T) readCollectionOrArray(type, (Collection) value, path); } else if (value instanceof Document) { return (T) read(type, (Document) value, path); } else if (value instanceof DBObject) { From 3b8f28acaad6fdcc6fff4d3cdf2fee7ce72e39b7 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 26 Feb 2021 15:07:52 +0100 Subject: [PATCH 3/7] Introduce ConversionContext. --- .../core/convert/MappingMongoConverter.java | 367 ++++++++++++------ .../DbRefMappingMongoConverterUnitTests.java | 2 +- .../MappingMongoConverterUnitTests.java | 5 +- 3 files changed, 254 insertions(+), 120 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index c98e58fe0d..a3d41df9ff 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -102,6 +102,8 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App private static final String INCOMPATIBLE_TYPES = "Cannot convert %1$s of type %2$s into an instance of %3$s! Implement a custom Converter<%2$s, %3$s> and register it with the CustomConversions. Parent object was: %4$s"; private static final String INVALID_TYPE_TO_READ = "Expected to read Document %s into type %s but didn't find a PersistentEntity for the latter!"; + public static final ClassTypeInformation BSON = ClassTypeInformation.from(Bson.class); + protected static final Logger LOGGER = LoggerFactory.getLogger(MappingMongoConverter.class); protected final MappingContext, MongoPersistentProperty> mappingContext; @@ -137,9 +139,19 @@ public MappingMongoConverter(DbRefResolver dbRefResolver, this::getWriteTarget); this.idMapper = new QueryMapper(this); + this.spELContext = new SpELContext(DocumentPropertyAccessor.INSTANCE); this.dbRefProxyHandler = new DefaultDbRefProxyHandler(spELContext, mappingContext, - MappingMongoConverter.this::getValueInternal); + (prop, bson, evaluator, path) -> { + + ConversionContext context = getConversionContext(path); + return MappingMongoConverter.this.getValueInternal(context, prop, bson, evaluator); + }); + } + + ConversionContext getConversionContext(ObjectPath path) { + return new ConversionContext(path, this::readDocument, this::readCollectionOrArray, this::readMap, this::readDBRef, + this::getPotentiallyConvertedSimpleRead); } /** @@ -252,12 +264,19 @@ protected S read(TypeInformation type, Bson bson) { return read(type, bson, ObjectPath.ROOT); } - @Nullable @SuppressWarnings("unchecked") private S read(TypeInformation type, Bson bson, ObjectPath path) { + ConversionContext conversionContext = getConversionContext(path); + + return doRead(conversionContext, type, bson); + } + + private S doRead(ConversionContext context, TypeInformation type, Bson bson) { + Assert.notNull(bson, "Bson must not be null!"); + // TODO: Cleanup duplication TypeInformation typeToUse = typeMapper.readType(bson, type); Class rawType = typeToUse.getType(); @@ -282,26 +301,32 @@ private S read(TypeInformation type, Bson bson, ObjectPath return (S) bson; } - if (typeToUse.isCollectionLike() && bson instanceof List) { - return (S) readCollectionOrArray(typeToUse, (List) bson, path); - } + return context.convert(typeToUse, bson); + } - if (typeToUse.isMap()) { - return (S) readMap(typeToUse, bson, path); - } + private S readDocument(ConversionContext ctx, Bson bson, TypeInformation typeHint) { - if (bson instanceof Collection) { - throw new MappingException(String.format(INCOMPATIBLE_TYPES, bson, BasicDBList.class, typeToUse.getType(), path)); + // TODO: Cleanup duplication + + Document document = bson instanceof BasicDBObject ? new Document((BasicDBObject) bson) : (Document) bson; + TypeInformation typeToRead = typeMapper.readType(document, typeHint); + Class rawType = typeToRead.getType(); + + // Discuss: Potentially the wrong thing to do. In a Map<…, Object> if the database type is Person we would apply a + // custom converter if registered for Person + if (conversions.hasCustomReadTarget(bson.getClass(), rawType)) { + return conversionService.convert(bson, rawType); } - if (typeToUse.equals(ClassTypeInformation.OBJECT)) { + if (typeToRead.isMap()) { return (S) bson; } - // Retrieve persistent entity info - Document target = bson instanceof BasicDBObject ? new Document((BasicDBObject) bson) : (Document) bson; + if (BSON.isAssignableFrom(typeHint)) { + return (S) bson; + } - MongoPersistentEntity entity = mappingContext.getPersistentEntity(typeToUse); + MongoPersistentEntity entity = mappingContext.getPersistentEntity(typeToRead); if (entity == null) { @@ -309,29 +334,29 @@ private S read(TypeInformation type, Bson bson, ObjectPath Optional> codec = codecRegistryProvider.getCodecFor(rawType); if (codec.isPresent()) { - return codec.get().decode(new JsonReader(target.toJson()), DecoderContext.builder().build()); + return codec.get().decode(new JsonReader(document.toJson()), DecoderContext.builder().build()); } } - throw new MappingException(String.format(INVALID_TYPE_TO_READ, target, typeToUse.getType())); + throw new MappingException(String.format(INVALID_TYPE_TO_READ, document, rawType)); } - return read((MongoPersistentEntity) entity, target, path); + return read(ctx, (MongoPersistentEntity) entity, document); } - private ParameterValueProvider getParameterProvider(MongoPersistentEntity entity, - DocumentAccessor source, SpELExpressionEvaluator evaluator, ObjectPath path) { + private ParameterValueProvider getParameterProvider(ConversionContext context, + MongoPersistentEntity entity, DocumentAccessor source, SpELExpressionEvaluator evaluator) { - AssociationAwareMongoDbPropertyValueProvider provider = new AssociationAwareMongoDbPropertyValueProvider(source, - evaluator, path); + AssociationAwareMongoDbPropertyValueProvider provider = new AssociationAwareMongoDbPropertyValueProvider(context, + source, evaluator); PersistentEntityParameterValueProvider parameterProvider = new PersistentEntityParameterValueProvider<>( - entity, provider, path.getCurrentObject()); + entity, provider, context.getPath().getCurrentObject()); return new ConverterAwareSpELExpressionParameterValueProvider(evaluator, conversionService, parameterProvider, - path); + context); } - private S read(final MongoPersistentEntity entity, final Document bson, final ObjectPath path) { + private S read(ConversionContext context, MongoPersistentEntity entity, Document bson) { SpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(bson, spELContext); DocumentAccessor documentAccessor = new DocumentAccessor(bson); @@ -339,20 +364,21 @@ private S read(final MongoPersistentEntity entity, final D PreferredConstructor persistenceConstructor = entity.getPersistenceConstructor(); ParameterValueProvider provider = persistenceConstructor != null - && persistenceConstructor.hasParameters() ? getParameterProvider(entity, documentAccessor, evaluator, path) + && persistenceConstructor.hasParameters() ? getParameterProvider(context, entity, documentAccessor, evaluator) : NoOpParameterValueProvider.INSTANCE; EntityInstantiator instantiator = instantiators.getInstantiatorFor(entity); S instance = instantiator.createInstance(entity, provider); if (entity.requiresPropertyPopulation()) { - return populateProperties(entity, documentAccessor, path, evaluator, instance); + return populateProperties(context, entity, documentAccessor, evaluator, instance); } return instance; } - private S populateProperties(MongoPersistentEntity entity, DocumentAccessor documentAccessor, ObjectPath path, + private S populateProperties(ConversionContext context, MongoPersistentEntity entity, + DocumentAccessor documentAccessor, SpELExpressionEvaluator evaluator, S instance) { PersistentPropertyAccessor accessor = new ConvertingPropertyAccessor<>(entity.getPropertyAccessor(instance), @@ -360,13 +386,14 @@ private S populateProperties(MongoPersistentEntity entity, DocumentAccess // Make sure id property is set before all other properties - Object rawId = readAndPopulateIdentifier(accessor, documentAccessor, entity, path, evaluator); - ObjectPath currentPath = path.push(accessor.getBean(), entity, rawId); + Object rawId = readAndPopulateIdentifier(context, accessor, documentAccessor, entity, evaluator); + ObjectPath currentPath = context.getPath().push(accessor.getBean(), entity, rawId); + ConversionContext contextToUse = context.withPath(currentPath); - MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(documentAccessor, evaluator, - currentPath); + MongoDbPropertyValueProvider valueProvider = new MongoDbPropertyValueProvider(contextToUse, documentAccessor, + evaluator); - readProperties(entity, accessor, documentAccessor, valueProvider, currentPath, evaluator); + readProperties(contextToUse, entity, accessor, documentAccessor, valueProvider, evaluator); return accessor.getBean(); } @@ -375,15 +402,15 @@ private S populateProperties(MongoPersistentEntity entity, DocumentAccess * Reads the identifier from either the bean backing the {@link PersistentPropertyAccessor} or the source document in * case the identifier has not be populated yet. In this case the identifier is set on the bean for further reference. * + * @param context * @param accessor must not be {@literal null}. * @param document must not be {@literal null}. * @param entity must not be {@literal null}. - * @param path * @param evaluator * @return */ - private Object readAndPopulateIdentifier(PersistentPropertyAccessor accessor, DocumentAccessor document, - MongoPersistentEntity entity, ObjectPath path, SpELExpressionEvaluator evaluator) { + private Object readAndPopulateIdentifier(ConversionContext context, PersistentPropertyAccessor accessor, + DocumentAccessor document, MongoPersistentEntity entity, SpELExpressionEvaluator evaluator) { Object rawId = document.getRawId(entity); @@ -397,22 +424,24 @@ private Object readAndPopulateIdentifier(PersistentPropertyAccessor accessor, return rawId; } - accessor.setProperty(idProperty, readIdValue(path, evaluator, idProperty, rawId)); + accessor.setProperty(idProperty, readIdValue(context, evaluator, idProperty, rawId)); return rawId; } - private Object readIdValue(ObjectPath path, SpELExpressionEvaluator evaluator, MongoPersistentProperty idProperty, + private Object readIdValue(ConversionContext context, SpELExpressionEvaluator evaluator, + MongoPersistentProperty idProperty, Object rawId) { String expression = idProperty.getSpelExpression(); Object resolvedValue = expression != null ? evaluator.evaluate(expression) : rawId; - return resolvedValue != null ? readValue(resolvedValue, idProperty.getTypeInformation(), path) : null; + return resolvedValue != null ? readValue(context, resolvedValue, idProperty.getTypeInformation()) : null; } - private void readProperties(MongoPersistentEntity entity, PersistentPropertyAccessor accessor, - DocumentAccessor documentAccessor, MongoDbPropertyValueProvider valueProvider, ObjectPath currentPath, + private void readProperties(ConversionContext context, MongoPersistentEntity entity, + PersistentPropertyAccessor accessor, DocumentAccessor documentAccessor, + MongoDbPropertyValueProvider valueProvider, SpELExpressionEvaluator evaluator) { DbRefResolverCallback callback = null; @@ -422,7 +451,7 @@ private void readProperties(MongoPersistentEntity entity, PersistentPropertyA if (prop.isAssociation() && !entity.isConstructorArgument(prop)) { if (callback == null) { - callback = getDbRefResolverCallback(documentAccessor, currentPath, evaluator); + callback = getDbRefResolverCallback(context, documentAccessor, evaluator); } readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback); @@ -432,7 +461,7 @@ private void readProperties(MongoPersistentEntity entity, PersistentPropertyA if (prop.isEmbedded()) { accessor.setProperty(prop, - readEmbedded(documentAccessor, currentPath, prop, mappingContext.getPersistentEntity(prop))); + readEmbedded(context, documentAccessor, prop, mappingContext.getPersistentEntity(prop))); continue; } @@ -449,7 +478,7 @@ private void readProperties(MongoPersistentEntity entity, PersistentPropertyA if (prop.isAssociation()) { if (callback == null) { - callback = getDbRefResolverCallback(documentAccessor, currentPath, evaluator); + callback = getDbRefResolverCallback(context, documentAccessor, evaluator); } readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback); @@ -460,11 +489,11 @@ private void readProperties(MongoPersistentEntity entity, PersistentPropertyA } } - private DbRefResolverCallback getDbRefResolverCallback(DocumentAccessor documentAccessor, ObjectPath currentPath, + private DbRefResolverCallback getDbRefResolverCallback(ConversionContext context, DocumentAccessor documentAccessor, SpELExpressionEvaluator evaluator) { - return new DefaultDbRefResolverCallback(documentAccessor.getDocument(), currentPath, evaluator, - MappingMongoConverter.this::getValueInternal); + return new DefaultDbRefResolverCallback(documentAccessor.getDocument(), context.getPath(), evaluator, + (prop, bson, e, path) -> MappingMongoConverter.this.getValueInternal(context, prop, bson, e)); } private void readAssociation(Association association, PersistentPropertyAccessor accessor, @@ -482,16 +511,17 @@ private void readAssociation(Association association, P } @Nullable - private Object readEmbedded(DocumentAccessor documentAccessor, ObjectPath currentPath, MongoPersistentProperty prop, + private Object readEmbedded(ConversionContext context, DocumentAccessor documentAccessor, + MongoPersistentProperty prop, MongoPersistentEntity embeddedEntity) { if (prop.findAnnotation(Embedded.class).onEmpty().equals(OnEmpty.USE_EMPTY)) { - return read(embeddedEntity, (Document) documentAccessor.getDocument(), currentPath); + return read(context, embeddedEntity, (Document) documentAccessor.getDocument()); } for (MongoPersistentProperty persistentProperty : embeddedEntity) { if (documentAccessor.hasValue(persistentProperty)) { - return read(embeddedEntity, (Document) documentAccessor.getDocument(), currentPath); + return read(context, embeddedEntity, (Document) documentAccessor.getDocument()); } } return null; @@ -1039,6 +1069,19 @@ private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nulla return Enum.class.isAssignableFrom(value.getClass()) ? ((Enum) value).name() : value; } + /** + * Checks whether we have a custom conversion for the given simple object. Converts the given value if so, applies + * {@link Enum} handling or returns the value as is. + * + * @param value + * @param target must not be {@literal null}. + * @return + */ + @Nullable + private Object getPotentiallyConvertedSimpleRead(@Nullable Object value, TypeInformation target) { + return getPotentiallyConvertedSimpleRead(value, target.getType()); + } + /** * Checks whether we have a custom conversion for the given simple object. Converts the given value if so, applies * {@link Enum} handling or returns the value as is. @@ -1102,24 +1145,24 @@ protected DBRef createDBRef(Object target, MongoPersistentProperty property) { } @Nullable - private Object getValueInternal(MongoPersistentProperty prop, Bson bson, SpELExpressionEvaluator evaluator, - ObjectPath path) { - return new MongoDbPropertyValueProvider(bson, evaluator, path).getPropertyValue(prop); + private Object getValueInternal(ConversionContext context, MongoPersistentProperty prop, Bson bson, + SpELExpressionEvaluator evaluator) { + return new MongoDbPropertyValueProvider(bson, evaluator, context).getPropertyValue(prop); } /** * Reads the given {@link BasicDBList} into a collection of the given {@link TypeInformation}. * - * @param targetType must not be {@literal null}. * @param source must not be {@literal null}. + * @param targetType must not be {@literal null}. * @param path must not be {@literal null}. * @return the converted {@link Collection} or array, will never be {@literal null}. */ @SuppressWarnings("unchecked") - private Object readCollectionOrArray(TypeInformation targetType, Collection source, ObjectPath path) { + @Nullable + private Object readCollectionOrArray(ConversionContext context, Collection source, TypeInformation targetType) { Assert.notNull(targetType, "Target type must not be null!"); - Assert.notNull(path, "Object path must not be null!"); Class collectionType = targetType.isSubTypeOf(Collection.class) // ? targetType.getType() // @@ -1140,12 +1183,12 @@ private Object readCollectionOrArray(TypeInformation targetType, Collection objects = bulkReadAndConvertDBRefs((List) source, componentType, path, rawComponentType); + List objects = bulkReadAndConvertDBRefs(context, (List) source, componentType, rawComponentType); return getPotentiallyConvertedSimpleRead(objects, targetType.getType()); } for (Object element : source) { - items.add(readValue(element, componentType, path)); + items.add(context.convert(componentType, element)); } return getPotentiallyConvertedSimpleRead(items, targetType.getType()); @@ -1154,16 +1197,14 @@ private Object readCollectionOrArray(TypeInformation targetType, Collection readMap(TypeInformation type, Bson bson, ObjectPath path) { + protected Map readMap(ConversionContext context, Bson bson, TypeInformation type) { Assert.notNull(bson, "Document must not be null!"); - Assert.notNull(path, "Object path must not be null!"); Class mapType = typeMapper.readType(bson, type).getType(); @@ -1177,7 +1218,7 @@ protected Map readMap(TypeInformation type, Bson bson, Object Map map = CollectionFactory.createMap(mapType, rawKeyType, sourceMap.keySet().size()); if (!DBRef.class.equals(rawValueType) && isCollectionOfDbRefWhereBulkFetchIsPossible(sourceMap.values())) { - bulkReadAndConvertDBRefMapIntoTarget(valueType, rawValueType, sourceMap, map); + bulkReadAndConvertDBRefMapIntoTarget(context, valueType, rawValueType, sourceMap, map); return map; } @@ -1195,13 +1236,12 @@ protected Map readMap(TypeInformation type, Bson bson, Object Object value = entry.getValue(); TypeInformation defaultedValueType = valueType != null ? valueType : ClassTypeInformation.OBJECT; - map.put(key, readValue(value, defaultedValueType, path)); + map.put(key, context.convert(defaultedValueType, value)); } return map; } - @SuppressWarnings("unchecked") private static Map asMap(Bson bson) { @@ -1270,7 +1310,7 @@ private static void removeFromMap(Bson bson, String key) { @Nullable @SuppressWarnings("unchecked") @Override - public Object convertToMongoType(@Nullable Object obj, TypeInformation typeInformation) { + public Object convertToMongoType(@Nullable Object obj, @Nullable TypeInformation typeInformation) { if (obj == null) { return null; @@ -1433,11 +1473,11 @@ private Object removeTypeInfo(Object object, boolean recursively) { * @author Mark Paluch * @author Christoph Strobl */ - class MongoDbPropertyValueProvider implements PropertyValueProvider { + static class MongoDbPropertyValueProvider implements PropertyValueProvider { + final ConversionContext context; final DocumentAccessor accessor; final SpELExpressionEvaluator evaluator; - final ObjectPath path; /** * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and @@ -1445,29 +1485,29 @@ class MongoDbPropertyValueProvider implements PropertyValueProvider T getPropertyValue(MongoPersistentProperty property) { return null; } - return readValue(value, property.getTypeInformation(), path); + return (T) context.convert(property.getTypeInformation(), value); } } @@ -1504,11 +1544,10 @@ class AssociationAwareMongoDbPropertyValueProvider extends MongoDbPropertyValueP * * @param source must not be {@literal null}. * @param evaluator must not be {@literal null}. - * @param path must not be {@literal null}. */ - AssociationAwareMongoDbPropertyValueProvider(DocumentAccessor source, SpELExpressionEvaluator evaluator, - ObjectPath path) { - super(source, evaluator, path); + AssociationAwareMongoDbPropertyValueProvider(ConversionContext context, DocumentAccessor source, + SpELExpressionEvaluator evaluator) { + super(context, source, evaluator); } /* @@ -1526,8 +1565,9 @@ public T getPropertyValue(MongoPersistentProperty property) { return null; } - DbRefResolverCallback callback = new DefaultDbRefResolverCallback(accessor.getDocument(), path, evaluator, - MappingMongoConverter.this::getValueInternal); + DbRefResolverCallback callback = new DefaultDbRefResolverCallback(accessor.getDocument(), context.getPath(), + evaluator, (prop, bson, evaluator, path) -> MappingMongoConverter.this.getValueInternal(context, prop, bson, + evaluator)); DBRef dbref = rawRefValue instanceof DBRef ? (DBRef) rawRefValue : null; return (T) dbRefResolver.resolveDbRef(property, dbref, callback, dbRefProxyHandler); @@ -1546,7 +1586,7 @@ public T getPropertyValue(MongoPersistentProperty property) { private class ConverterAwareSpELExpressionParameterValueProvider extends SpELExpressionParameterValueProvider { - private final ObjectPath path; + private final ConversionContext context; /** * Creates a new {@link ConverterAwareSpELExpressionParameterValueProvider}. @@ -1554,13 +1594,14 @@ private class ConverterAwareSpELExpressionParameterValueProvider * @param evaluator must not be {@literal null}. * @param conversionService must not be {@literal null}. * @param delegate must not be {@literal null}. + * @param context */ public ConverterAwareSpELExpressionParameterValueProvider(SpELExpressionEvaluator evaluator, ConversionService conversionService, ParameterValueProvider delegate, - ObjectPath path) { + ConversionContext context) { super(evaluator, conversionService, delegate); - this.path = path; + this.context = context; } /* @@ -1569,13 +1610,13 @@ public ConverterAwareSpELExpressionParameterValueProvider(SpELExpressionEvaluato */ @Override protected T potentiallyConvertSpelValue(Object object, Parameter parameter) { - return readValue(object, parameter.getType(), path); + return context.convert(parameter.getType(), object); } } @Nullable @SuppressWarnings("unchecked") - T readValue(@Nullable Object value, TypeInformation type, ObjectPath path) { + T readValue(ConversionContext context, @Nullable Object value, TypeInformation type) { if (value == null) { return null; @@ -1588,52 +1629,39 @@ T readValue(@Nullable Object value, TypeInformation type, ObjectPath path if (conversions.hasCustomReadTarget(value.getClass(), rawType)) { return (T) conversionService.convert(value, rawType); } else if (value instanceof DBRef) { - return potentiallyReadOrResolveDbRef((DBRef) value, type, path, rawType); - } else if (value instanceof Collection) { - - if (!Object.class.equals(rawType)) { - if (!rawType.isArray() && !ClassUtils.isAssignable(Iterable.class, rawType)) { - throw new MappingException(String.format(INCOMPATIBLE_TYPES, value, value.getClass(), rawType, path)); - } - } - return (T) readCollectionOrArray(type, (Collection) value, path); - } else if (value instanceof Document) { - return (T) read(type, (Document) value, path); - } else if (value instanceof DBObject) { - return (T) read(type, (BasicDBObject) value, path); - } else { - return (T) getPotentiallyConvertedSimpleRead(value, rawType); + return readDBRef(context, (DBRef) value, type); } + + return (T) context.convert(type, value); } @Nullable - @SuppressWarnings("unchecked") - private T potentiallyReadOrResolveDbRef(@Nullable DBRef dbref, TypeInformation type, ObjectPath path, - Class rawType) { + private T readDBRef(ConversionContext context, @Nullable DBRef dbref, TypeInformation type) { - if (rawType.equals(DBRef.class)) { + if (type.getType().equals(DBRef.class)) { return (T) dbref; } - T object = dbref == null ? null : path.getPathItem(dbref.getId(), dbref.getCollectionName(), (Class) rawType); - return object != null ? object : readAndConvertDBRef(dbref, type, path, rawType); - } + ObjectPath path = context.getPath(); - @Nullable - private T readAndConvertDBRef(@Nullable DBRef dbref, TypeInformation type, ObjectPath path, - @Nullable Class rawType) { + T object = dbref == null ? null + : path.getPathItem(dbref.getId(), dbref.getCollectionName(), (Class) type.getType()); + if (object != null) { + return object; + } - List result = bulkReadAndConvertDBRefs(Collections.singletonList(dbref), type, path, rawType); + List result = bulkReadAndConvertDBRefs(context, Collections.singletonList(dbref), type, type.getType()); return CollectionUtils.isEmpty(result) ? null : result.iterator().next(); } @SuppressWarnings({ "unchecked", "rawtypes" }) - private void bulkReadAndConvertDBRefMapIntoTarget(TypeInformation valueType, Class rawValueType, + private void bulkReadAndConvertDBRefMapIntoTarget(ConversionContext context, TypeInformation valueType, + Class rawValueType, Map sourceMap, Map targetMap) { LinkedHashMap referenceMap = new LinkedHashMap<>(sourceMap); - List convertedObjects = bulkReadAndConvertDBRefs((List) new ArrayList(referenceMap.values()), - valueType, ObjectPath.ROOT, rawValueType); + List convertedObjects = bulkReadAndConvertDBRefs(context.withPath(ObjectPath.ROOT), + (List) new ArrayList(referenceMap.values()), valueType, rawValueType); int index = 0; for (String key : referenceMap.keySet()) { @@ -1643,7 +1671,7 @@ private void bulkReadAndConvertDBRefMapIntoTarget(TypeInformation valueType, } @SuppressWarnings("unchecked") - private List bulkReadAndConvertDBRefs(List dbrefs, TypeInformation type, ObjectPath path, + private List bulkReadAndConvertDBRefs(ConversionContext context, List dbrefs, TypeInformation type, @Nullable Class rawType) { if (CollectionUtils.isEmpty(dbrefs)) { @@ -1664,7 +1692,7 @@ private List bulkReadAndConvertDBRefs(List dbrefs, TypeInformation maybeEmitEvent( new AfterLoadEvent<>(document, (Class) (rawType != null ? rawType : Object.class), collectionName)); - target = (T) read(type, document, path); + target = (T) doRead(context, type, document); } if (target != null) { @@ -1886,4 +1914,109 @@ public org.springframework.data.util.TypeInformation specialize(ClassTypeInforma return delegate.specialize(type); } } + + /** + * Conversion context holding references to simple {@link ValueConverter} and {@link ContainerValueConverter}. + */ + static class ConversionContext { + + private final ObjectPath path; + private final ContainerValueConverter documentConverter; + private final ContainerValueConverter> collectionConverter; + private final ContainerValueConverter mapConverter; + private final ContainerValueConverter dbRefConverter; + private final ValueConverter elementConverter; + + ConversionContext(ObjectPath path, ContainerValueConverter documentConverter, + ContainerValueConverter> collectionConverter, ContainerValueConverter mapConverter, + ContainerValueConverter dbRefConverter, ValueConverter elementConverter) { + + this.path = path; + this.documentConverter = documentConverter; + this.collectionConverter = collectionConverter; + this.mapConverter = mapConverter; + this.dbRefConverter = dbRefConverter; + this.elementConverter = elementConverter; + } + + @SuppressWarnings("unchecked") + public S convert(TypeInformation typeToUse, Object source) { + + if (source instanceof Collection) { + + Class rawType = typeToUse.getType(); + if (!Object.class.equals(rawType)) { + if (!rawType.isArray() && !ClassUtils.isAssignable(Iterable.class, rawType)) { + throw new MappingException( + String.format(INCOMPATIBLE_TYPES, source, source.getClass(), rawType, getPath())); + } + } + + if (typeToUse.isCollectionLike() || typeToUse.getType().isAssignableFrom(Collection.class)) { + return (S) collectionConverter.convert(this, (Collection) source, typeToUse); + } + } + + if (typeToUse.isMap()) { + return (S) mapConverter.convert(this, (Bson) source, typeToUse); + } + + if (source instanceof DBRef) { + return (S) dbRefConverter.convert(this, (DBRef) source, typeToUse); + } + + if (source instanceof Collection) { + throw new MappingException( + String.format(INCOMPATIBLE_TYPES, source, BasicDBList.class, typeToUse.getType(), getPath())); + } + + if (source instanceof Bson) { + return (S) documentConverter.convert(this, (Bson) source, typeToUse); + } + + return (S) elementConverter.convert(source, typeToUse); + } + + /** + * Create a new {@link ConversionContext} with {@link ObjectPath currentPath} applied. + * + * @param currentPath + * @return + */ + public ConversionContext withPath(ObjectPath currentPath) { + + Assert.notNull(currentPath, "ObjectPath must not be null"); + + return new ConversionContext(currentPath, documentConverter, collectionConverter, mapConverter, dbRefConverter, + elementConverter); + } + + public ObjectPath getPath() { + return path; + } + + /** + * Converts a simple {@code source} value into {@link TypeInformation the target type}. + * + * @param + */ + interface ValueConverter { + + Object convert(T source, TypeInformation typeHint); + + } + + /** + * Converts a container {@code source} value into {@link TypeInformation the target type}. Containers may + * recursively apply conversions for entities, collections, maps, etc. + * + * @param + */ + interface ContainerValueConverter { + + Object convert(ConversionContext context, T source, TypeInformation typeHint); + + } + + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java index a4c1ab788d..2c0f8649e2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java @@ -62,7 +62,7 @@ import com.mongodb.client.MongoDatabase; /** - * Unit tests for {@link DbRefMappingMongoConverter}. + * Unit tests for {@link MappingMongoConverter}. * * @author Oliver Gierke * @author Thomas Darimont diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index abdd84a4a1..ffad28b231 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -2177,9 +2177,10 @@ public void readAndConvertDBRefNestedByMapCorrectly() { MappingMongoConverter spyConverter = spy(converter); Mockito.doReturn(cluster).when(spyConverter).readRef(dbRef); - Map result = spyConverter.readMap(ClassTypeInformation.MAP, data, ObjectPath.ROOT); + Map result = spyConverter.readMap(spyConverter.getConversionContext(ObjectPath.ROOT), data, + ClassTypeInformation.MAP); - assertThat(((LinkedHashMap) result.get("cluster")).get("_id")).isEqualTo(100L); + assertThat(((Map) result.get("cluster")).get("_id")).isEqualTo(100L); } @Test // GH-3546 From c792536be1113a2d122687ad4d46048e8403b2fe Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 26 Feb 2021 15:36:38 +0100 Subject: [PATCH 4/7] Polishing. Move static methods around. Revisit nullability annotations. Extract conversion service call into own method. --- .../core/convert/DocumentAccessor.java | 1 + .../data/mongodb/core/convert/MapUtils.java | 111 ++++ .../core/convert/MappingMongoConverter.java | 576 ++++++++---------- 3 files changed, 351 insertions(+), 337 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MapUtils.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java index 53dd03f06f..ee29fea509 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java @@ -154,6 +154,7 @@ public Object get(MongoPersistentProperty property) { * @param entity must not be {@literal null}. * @return */ + @Nullable public Object getRawId(MongoPersistentEntity entity) { return entity.hasIdProperty() ? get(entity.getRequiredIdProperty()) : BsonUtils.asMap(document).get("_id"); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MapUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MapUtils.java new file mode 100644 index 0000000000..75bc72beaf --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MapUtils.java @@ -0,0 +1,111 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.Collection; +import java.util.Collections; +import java.util.Map; + +import org.bson.Document; +import org.bson.conversions.Bson; + +import org.springframework.lang.Nullable; +import org.springframework.util.CollectionUtils; + +import com.mongodb.DBObject; + +/** + * @author Mark Paluch + */ +class MapUtils { + /** + * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a + * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element + * collection for everything else. + * + * @param source + * @return + */ + static Collection asCollection(Object source) { + + if (source instanceof Collection) { + return (Collection) source; + } + + return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); + } + + @SuppressWarnings("unchecked") + static Map asMap(Bson bson) { + + if (bson instanceof Document) { + return (Document) bson; + } + + if (bson instanceof DBObject) { + return ((DBObject) bson).toMap(); + } + + throw new IllegalArgumentException( + String.format("Cannot read %s. as map. Given Bson must be a Document or DBObject!", bson.getClass())); + } + + static void addToMap(Bson bson, String key, @Nullable Object value) { + + if (bson instanceof Document) { + ((Document) bson).put(key, value); + return; + } + if (bson instanceof DBObject) { + ((DBObject) bson).put(key, value); + return; + } + throw new IllegalArgumentException(String.format( + "Cannot add key/value pair to %s. as map. Given Bson must be a Document or DBObject!", bson.getClass())); + } + + static void addAllToMap(Bson bson, Map value) { + + if (bson instanceof Document) { + ((Document) bson).putAll(value); + return; + } + + if (bson instanceof DBObject) { + ((DBObject) bson).putAll(value); + return; + } + + throw new IllegalArgumentException( + String.format("Cannot add all to %s. Given Bson must be a Document or DBObject.", bson.getClass())); + } + + static void removeFromMap(Bson bson, String key) { + + if (bson instanceof Document) { + ((Document) bson).remove(key); + return; + } + + if (bson instanceof DBObject) { + ((DBObject) bson).removeField(key); + return; + } + + throw new IllegalArgumentException( + String.format("Cannot remove from %s. Given Bson must be a Document or DBObject.", bson.getClass())); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index a3d41df9ff..cb1a945cec 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -261,17 +261,10 @@ public S read(Class clazz, final Bson bson) { } protected S read(TypeInformation type, Bson bson) { - return read(type, bson, ObjectPath.ROOT); + return doRead(getConversionContext(ObjectPath.ROOT), type, bson); } @SuppressWarnings("unchecked") - private S read(TypeInformation type, Bson bson, ObjectPath path) { - - ConversionContext conversionContext = getConversionContext(path); - - return doRead(conversionContext, type, bson); - } - private S doRead(ConversionContext context, TypeInformation type, Bson bson) { Assert.notNull(bson, "Bson must not be null!"); @@ -281,7 +274,7 @@ private S doRead(ConversionContext context, TypeInformation rawType = typeToUse.getType(); if (conversions.hasCustomReadTarget(bson.getClass(), rawType)) { - return conversionService.convert(bson, rawType); + return doConvert(bson, rawType); } if (Document.class.isAssignableFrom(rawType)) { @@ -304,6 +297,7 @@ private S doRead(ConversionContext context, TypeInformation S readDocument(ConversionContext ctx, Bson bson, TypeInformation typeHint) { // TODO: Cleanup duplication @@ -315,7 +309,7 @@ private S readDocument(ConversionContext ctx, Bson bson, Type // Discuss: Potentially the wrong thing to do. In a Map<…, Object> if the database type is Person we would apply a // custom converter if registered for Person if (conversions.hasCustomReadTarget(bson.getClass(), rawType)) { - return conversionService.convert(bson, rawType); + return doConvert(bson, rawType); } if (typeToRead.isMap()) { @@ -352,8 +346,8 @@ private ParameterValueProvider getParameterProvider(Con PersistentEntityParameterValueProvider parameterProvider = new PersistentEntityParameterValueProvider<>( entity, provider, context.getPath().getCurrentObject()); - return new ConverterAwareSpELExpressionParameterValueProvider(evaluator, conversionService, parameterProvider, - context); + return new ConverterAwareSpELExpressionParameterValueProvider(context, evaluator, conversionService, + parameterProvider); } private S read(ConversionContext context, MongoPersistentEntity entity, Document bson) { @@ -402,13 +396,14 @@ private S populateProperties(ConversionContext context, MongoPersistentEntit * Reads the identifier from either the bean backing the {@link PersistentPropertyAccessor} or the source document in * case the identifier has not be populated yet. In this case the identifier is set on the bean for further reference. * - * @param context + * @param context must not be {@literal null}. * @param accessor must not be {@literal null}. * @param document must not be {@literal null}. * @param entity must not be {@literal null}. - * @param evaluator + * @param evaluator must not be {@literal null}. * @return */ + @Nullable private Object readAndPopulateIdentifier(ConversionContext context, PersistentPropertyAccessor accessor, DocumentAccessor document, MongoPersistentEntity entity, SpELExpressionEvaluator evaluator) { @@ -429,6 +424,7 @@ private Object readAndPopulateIdentifier(ConversionContext context, PersistentPr return rawId; } + @Nullable private Object readIdValue(ConversionContext context, SpELExpressionEvaluator evaluator, MongoPersistentProperty idProperty, Object rawId) { @@ -461,7 +457,7 @@ private void readProperties(ConversionContext context, MongoPersistentEntity if (prop.isEmbedded()) { accessor.setProperty(prop, - readEmbedded(context, documentAccessor, prop, mappingContext.getPersistentEntity(prop))); + readEmbedded(context, documentAccessor, prop, mappingContext.getRequiredPersistentEntity(prop))); continue; } @@ -566,8 +562,8 @@ public void write(Object obj, Bson bson) { Object target = obj instanceof LazyLoadingProxy ? ((LazyLoadingProxy) obj).getTarget() : obj; writeInternal(target, bson, type); - if (asMap(bson).containsKey("_id") && asMap(bson).get("_id") == null) { - removeFromMap(bson, "_id"); + if (MapUtils.asMap(bson).containsKey("_id") && MapUtils.asMap(bson).get("_id") == null) { + MapUtils.removeFromMap(bson, "_id"); } if (requiresTypeHint(entityType)) { @@ -589,10 +585,6 @@ private boolean requiresTypeHint(Class type) { /** * Internal write conversion method which should be used for nested invocations. - * - * @param obj - * @param bson - * @param typeHint */ @SuppressWarnings("unchecked") protected void writeInternal(@Nullable Object obj, Bson bson, @Nullable TypeInformation typeHint) { @@ -605,8 +597,8 @@ protected void writeInternal(@Nullable Object obj, Bson bson, @Nullable TypeInfo Optional> customTarget = conversions.getCustomWriteTarget(entityType, Document.class); if (customTarget.isPresent()) { - Document result = conversionService.convert(obj, Document.class); - addAllToMap(bson, result); + Document result = doConvert(obj, Document.class); + MapUtils.addAllToMap(bson, result); return; } @@ -707,7 +699,7 @@ protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor acce } if (valueType.isCollectionLike()) { - List collectionInternal = createCollection(asCollection(obj), prop); + List collectionInternal = createCollection(MapUtils.asCollection(obj), prop); accessor.put(prop, collectionInternal); return; } @@ -732,10 +724,8 @@ protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor acce dbRefObj = dbRefObj != null ? dbRefObj : createDBRef(obj, prop); - if (null != dbRefObj) { - accessor.put(prop, dbRefObj); - return; - } + accessor.put(prop, dbRefObj); + return; } /* @@ -750,7 +740,7 @@ protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor acce if (basicTargetType.isPresent()) { - accessor.put(prop, conversionService.convert(obj, basicTargetType.get())); + accessor.put(prop, doConvert(obj, basicTargetType.get())); return; } @@ -766,36 +756,18 @@ protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor acce accessor.put(prop, document); } - /** - * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a - * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element - * collection for everything else. - * - * @param source - * @return - */ - private static Collection asCollection(Object source) { - - if (source instanceof Collection) { - return (Collection) source; - } - - return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); - } - /** * Writes the given {@link Collection} using the given {@link MongoPersistentProperty} information. * * @param collection must not be {@literal null}. * @param property must not be {@literal null}. - * @return */ protected List createCollection(Collection collection, MongoPersistentProperty property) { if (!property.isDbReference()) { if (property.hasExplicitWriteTarget()) { - return writeCollectionInternal(collection, new TypeInformationWrapper<>(property), new ArrayList<>()); + return writeCollectionInternal(collection, new FieldTypeInformation<>(property), new ArrayList<>()); } return writeCollectionInternal(collection, property.getTypeInformation(), new BasicDBList()); } @@ -820,7 +792,6 @@ protected List createCollection(Collection collection, MongoPersisten * * @param map must not {@literal null}. * @param property must not be {@literal null}. - * @return */ protected Bson createMap(Map map, MongoPersistentProperty property) { @@ -857,7 +828,6 @@ protected Bson createMap(Map map, MongoPersistentProperty proper * @param source the collection to create a {@link Collection} for, must not be {@literal null}. * @param type the {@link TypeInformation} to consider or {@literal null} if unknown. * @param sink the {@link Collection} to write to. - * @return */ @SuppressWarnings("unchecked") private List writeCollectionInternal(Collection source, @Nullable TypeInformation type, @@ -879,7 +849,7 @@ private List writeCollectionInternal(Collection source, @Nullable Typ collection.add(getPotentiallyConvertedSimpleWrite(element, componentType != null ? componentType.getType() : Object.class)); } else if (element instanceof Collection || elementType.isArray()) { - collection.add(writeCollectionInternal(asCollection(element), componentType, new BasicDBList())); + collection.add(writeCollectionInternal(MapUtils.asCollection(element), componentType, new BasicDBList())); } else { Document document = new Document(); writeInternal(element, document, componentType); @@ -896,7 +866,6 @@ private List writeCollectionInternal(Collection source, @Nullable Typ * @param obj must not be {@literal null}. * @param bson must not be {@literal null}. * @param propertyType must not be {@literal null}. - * @return */ protected Bson writeMapInternal(Map obj, Bson bson, TypeInformation propertyType) { @@ -911,14 +880,14 @@ protected Bson writeMapInternal(Map obj, Bson bson, TypeInformat if (val == null || conversions.isSimpleType(val.getClass())) { writeSimpleInternal(val, bson, simpleKey); } else if (val instanceof Collection || val.getClass().isArray()) { - addToMap(bson, simpleKey, - writeCollectionInternal(asCollection(val), propertyType.getMapValueType(), new BasicDBList())); + MapUtils.addToMap(bson, simpleKey, + writeCollectionInternal(MapUtils.asCollection(val), propertyType.getMapValueType(), new BasicDBList())); } else { Document document = new Document(); TypeInformation valueTypeInfo = propertyType.isMap() ? propertyType.getMapValueType() : ClassTypeInformation.OBJECT; writeInternal(val, document, valueTypeInfo); - addToMap(bson, simpleKey, document); + MapUtils.addToMap(bson, simpleKey, document); } } else { throw new MappingException("Cannot use a complex object as a key value."); @@ -933,7 +902,6 @@ protected Bson writeMapInternal(Map obj, Bson bson, TypeInformat * conversions and escape dots from the result as they're not supported as {@link Map} key in MongoDB. * * @param key must not be {@literal null}. - * @return */ private String prepareMapKey(Object key) { @@ -948,8 +916,7 @@ private String prepareMapKey(Object key) { * conversion if none is configured. * * @see #setMapKeyDotReplacement(String) - * @param source - * @return + * @param source must not be {@literal null}. */ protected String potentiallyEscapeMapKey(String source) { @@ -971,7 +938,6 @@ protected String potentiallyEscapeMapKey(String source) { * Returns a {@link String} representation of the given {@link Map} key * * @param key - * @return */ private String potentiallyConvertMapKey(Object key) { @@ -988,8 +954,7 @@ private String potentiallyConvertMapKey(Object key) { * Translates the map key replacements in the given key just read with a dot in case a map key replacement has been * configured. * - * @param source - * @return + * @param source must not be {@literal null}. */ protected String potentiallyUnescapeMapKey(String source) { return mapKeyDotReplacement == null ? source : source.replaceAll(mapKeyDotReplacement, "\\."); @@ -999,13 +964,13 @@ protected String potentiallyUnescapeMapKey(String source) { * Adds custom type information to the given {@link Document} if necessary. That is if the value is not the same as * the one given. This is usually the case if you store a subtype of the actual declared type of the property. * - * @param type + * @param type can be {@literal null}. * @param value must not be {@literal null}. * @param bson must not be {@literal null}. */ protected void addCustomTypeKeyIfNecessary(@Nullable TypeInformation type, Object value, Bson bson) { - Class reference = type != null ? type.getActualType().getType() : Object.class; + Class reference = type != null ? type.getRequiredActualType().getType() : Object.class; Class valueType = ClassUtils.getUserClass(value.getClass()); boolean notTheSameClass = !valueType.equals(reference); @@ -1017,15 +982,15 @@ protected void addCustomTypeKeyIfNecessary(@Nullable TypeInformation type, Ob /** * Writes the given simple value to the given {@link Document}. Will store enum names for enum values. * - * @param value + * @param value can be {@literal null}. * @param bson must not be {@literal null}. * @param key must not be {@literal null}. */ - private void writeSimpleInternal(Object value, Bson bson, String key) { - addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value, Object.class)); + private void writeSimpleInternal(@Nullable Object value, Bson bson, String key) { + MapUtils.addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value, Object.class)); } - private void writeSimpleInternal(Object value, Bson bson, MongoPersistentProperty property) { + private void writeSimpleInternal(@Nullable Object value, Bson bson, MongoPersistentProperty property) { DocumentAccessor accessor = new DocumentAccessor(bson); accessor.put(property, getPotentiallyConvertedSimpleWrite(value, property.hasExplicitWriteTarget() ? property.getFieldType() : Object.class)); @@ -1034,9 +999,6 @@ private void writeSimpleInternal(Object value, Bson bson, MongoPersistentPropert /** * Checks whether we have a custom conversion registered for the given value into an arbitrary simple Mongo type. * Returns the converted value if so. If not, we perform special enum handling or simply return the value as is. - * - * @param value - * @return */ @Nullable private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nullable Class typeHint) { @@ -1048,14 +1010,14 @@ private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nulla if (typeHint != null && Object.class != typeHint) { if (conversionService.canConvert(value.getClass(), typeHint)) { - value = conversionService.convert(value, typeHint); + value = doConvert(value, typeHint); } } Optional> customTarget = conversions.getCustomWriteTarget(value.getClass()); if (customTarget.isPresent()) { - return conversionService.convert(value, customTarget.get()); + return doConvert(value, customTarget.get()); } if (ObjectUtils.isArray(value)) { @@ -1063,7 +1025,7 @@ private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nulla if (value instanceof byte[]) { return value; } - return asCollection(value); + return MapUtils.asCollection(value); } return Enum.class.isAssignableFrom(value.getClass()) ? ((Enum) value).name() : value; @@ -1072,44 +1034,34 @@ private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nulla /** * Checks whether we have a custom conversion for the given simple object. Converts the given value if so, applies * {@link Enum} handling or returns the value as is. - * - * @param value - * @param target must not be {@literal null}. - * @return */ - @Nullable - private Object getPotentiallyConvertedSimpleRead(@Nullable Object value, TypeInformation target) { + private Object getPotentiallyConvertedSimpleRead(Object value, TypeInformation target) { return getPotentiallyConvertedSimpleRead(value, target.getType()); } /** * Checks whether we have a custom conversion for the given simple object. Converts the given value if so, applies * {@link Enum} handling or returns the value as is. - * - * @param value - * @param target must not be {@literal null}. - * @return */ - @Nullable @SuppressWarnings({ "rawtypes", "unchecked" }) - private Object getPotentiallyConvertedSimpleRead(@Nullable Object value, @Nullable Class target) { + private Object getPotentiallyConvertedSimpleRead(Object value, @Nullable Class target) { - if (value == null || target == null || ClassUtils.isAssignableValue(target, value)) { + if (target == null || ClassUtils.isAssignableValue(target, value)) { return value; } if (conversions.hasCustomReadTarget(value.getClass(), target)) { - return conversionService.convert(value, target); + return doConvert(value, target); } if (Enum.class.isAssignableFrom(target)) { return Enum.valueOf((Class) target, value.toString()); } - return conversionService.convert(value, target); + return doConvert(value, target); } - protected DBRef createDBRef(Object target, MongoPersistentProperty property) { + protected DBRef createDBRef(Object target, @Nullable MongoPersistentProperty property) { Assert.notNull(target, "Target object must not be null!"); @@ -1147,7 +1099,7 @@ protected DBRef createDBRef(Object target, MongoPersistentProperty property) { @Nullable private Object getValueInternal(ConversionContext context, MongoPersistentProperty prop, Bson bson, SpELExpressionEvaluator evaluator) { - return new MongoDbPropertyValueProvider(bson, evaluator, context).getPropertyValue(prop); + return new MongoDbPropertyValueProvider(context, bson, evaluator).getPropertyValue(prop); } /** @@ -1159,7 +1111,6 @@ private Object getValueInternal(ConversionContext context, MongoPersistentProper * @return the converted {@link Collection} or array, will never be {@literal null}. */ @SuppressWarnings("unchecked") - @Nullable private Object readCollectionOrArray(ConversionContext context, Collection source, TypeInformation targetType) { Assert.notNull(targetType, "Target type must not be null!"); @@ -1183,7 +1134,7 @@ private Object readCollectionOrArray(ConversionContext context, Collection so if (!DBRef.class.equals(rawComponentType) && isCollectionOfDbRefWhereBulkFetchIsPossible(source)) { - List objects = bulkReadAndConvertDBRefs(context, (List) source, componentType, rawComponentType); + List objects = bulkReadAndConvertDBRefs(context, (List) source, componentType); return getPotentiallyConvertedSimpleRead(objects, targetType.getType()); } @@ -1199,26 +1150,26 @@ private Object readCollectionOrArray(ConversionContext context, Collection so * * @param bson must not be {@literal null} * @param type the {@link Map} {@link TypeInformation} to be used to unmarshall this {@link Document}. - * @return */ - @SuppressWarnings("unchecked") protected Map readMap(ConversionContext context, Bson bson, TypeInformation type) { Assert.notNull(bson, "Document must not be null!"); + Assert.notNull(type, "TypeInformation must not be null!"); Class mapType = typeMapper.readType(bson, type).getType(); TypeInformation keyType = type.getComponentType(); - TypeInformation valueType = type.getMapValueType(); + TypeInformation valueType = type.getMapValueType() == null ? ClassTypeInformation.OBJECT + : type.getRequiredMapValueType(); - Class rawKeyType = keyType != null ? keyType.getType() : null; - Class rawValueType = valueType != null ? valueType.getType() : null; + Class rawKeyType = keyType != null ? keyType.getType() : Object.class; + Class rawValueType = valueType.getType(); - Map sourceMap = asMap(bson); + Map sourceMap = MapUtils.asMap(bson); Map map = CollectionFactory.createMap(mapType, rawKeyType, sourceMap.keySet().size()); if (!DBRef.class.equals(rawValueType) && isCollectionOfDbRefWhereBulkFetchIsPossible(sourceMap.values())) { - bulkReadAndConvertDBRefMapIntoTarget(context, valueType, rawValueType, sourceMap, map); + bulkReadAndConvertDBRefMapIntoTarget(context, valueType, sourceMap, map); return map; } @@ -1230,79 +1181,17 @@ protected Map readMap(ConversionContext context, Bson bson, Type Object key = potentiallyUnescapeMapKey(entry.getKey()); - if (rawKeyType != null && !rawKeyType.isAssignableFrom(key.getClass())) { - key = conversionService.convert(key, rawKeyType); + if (!rawKeyType.isAssignableFrom(key.getClass())) { + key = doConvert(key, rawKeyType); } Object value = entry.getValue(); - TypeInformation defaultedValueType = valueType != null ? valueType : ClassTypeInformation.OBJECT; - map.put(key, context.convert(defaultedValueType, value)); + map.put(key, context.convert(valueType, value)); } return map; } - @SuppressWarnings("unchecked") - private static Map asMap(Bson bson) { - - if (bson instanceof Document) { - return (Document) bson; - } - - if (bson instanceof DBObject) { - return ((DBObject) bson).toMap(); - } - - throw new IllegalArgumentException( - String.format("Cannot read %s. as map. Given Bson must be a Document or DBObject!", bson.getClass())); - } - - private static void addToMap(Bson bson, String key, @Nullable Object value) { - - if (bson instanceof Document) { - ((Document) bson).put(key, value); - return; - } - if (bson instanceof DBObject) { - ((DBObject) bson).put(key, value); - return; - } - throw new IllegalArgumentException(String.format( - "Cannot add key/value pair to %s. as map. Given Bson must be a Document or DBObject!", bson.getClass())); - } - - private static void addAllToMap(Bson bson, Map value) { - - if (bson instanceof Document) { - ((Document) bson).putAll(value); - return; - } - - if (bson instanceof DBObject) { - ((DBObject) bson).putAll(value); - return; - } - - throw new IllegalArgumentException( - String.format("Cannot add all to %s. Given Bson must be a Document or DBObject.", bson.getClass())); - } - - private static void removeFromMap(Bson bson, String key) { - - if (bson instanceof Document) { - ((Document) bson).remove(key); - return; - } - - if (bson instanceof DBObject) { - ((DBObject) bson).removeField(key); - return; - } - - throw new IllegalArgumentException( - String.format("Cannot remove from %s. Given Bson must be a Document or DBObject.", bson.getClass())); - } - /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.convert.MongoWriter#convertToMongoType(java.lang.Object, org.springframework.data.util.TypeInformation) @@ -1318,7 +1207,7 @@ public Object convertToMongoType(@Nullable Object obj, @Nullable TypeInformation Optional> target = conversions.getCustomWriteTarget(obj.getClass()); if (target.isPresent()) { - return conversionService.convert(obj, target.get()); + return doConvert(obj, target.get()); } if (conversions.isSimpleType(obj.getClass())) { @@ -1393,7 +1282,6 @@ public Object convertToMongoType(@Nullable Object obj, @Nullable TypeInformation return !obj.getClass().equals(typeInformation.getType()) ? newDocument : removeTypeInfo(newDocument, true); } - @Nullable @Override public Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity entity) { Document newDocument = new Document(); @@ -1401,7 +1289,8 @@ public Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity ent return newDocument; } - public List maybeConvertList(Iterable source, TypeInformation typeInformation) { + // TODO: hide + public List maybeConvertList(Iterable source, @Nullable TypeInformation typeInformation) { List newDbl = new ArrayList<>(); @@ -1465,155 +1354,6 @@ private Object removeTypeInfo(Object object, boolean recursively) { return document; } - /** - * {@link PropertyValueProvider} to evaluate a SpEL expression if present on the property or simply accesses the field - * of the configured source {@link Document}. - * - * @author Oliver Gierke - * @author Mark Paluch - * @author Christoph Strobl - */ - static class MongoDbPropertyValueProvider implements PropertyValueProvider { - - final ConversionContext context; - final DocumentAccessor accessor; - final SpELExpressionEvaluator evaluator; - - /** - * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and - * {@link ObjectPath}. - * - * @param source must not be {@literal null}. - * @param evaluator must not be {@literal null}. - * @param context - */ - MongoDbPropertyValueProvider(Bson source, SpELExpressionEvaluator evaluator, ConversionContext context) { - this(context, new DocumentAccessor(source), evaluator); - } - - /** - * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and - * {@link ObjectPath}. - * - * @param context - * @param accessor must not be {@literal null}. - * @param evaluator must not be {@literal null}. - */ - MongoDbPropertyValueProvider(ConversionContext context, DocumentAccessor accessor, - SpELExpressionEvaluator evaluator) { - - Assert.notNull(accessor, "DocumentAccessor must no be null!"); - Assert.notNull(evaluator, "SpELExpressionEvaluator must not be null!"); - - this.context = context; - this.accessor = accessor; - this.evaluator = evaluator; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.convert.PropertyValueProvider#getPropertyValue(org.springframework.data.mapping.PersistentProperty) - */ - @Nullable - public T getPropertyValue(MongoPersistentProperty property) { - - String expression = property.getSpelExpression(); - Object value = expression != null ? evaluator.evaluate(expression) : accessor.get(property); - - if (value == null) { - return null; - } - - return (T) context.convert(property.getTypeInformation(), value); - } - } - - /** - * {@link PropertyValueProvider} that is aware of {@link MongoPersistentProperty#isAssociation()} and that delegates - * resolution to {@link DbRefResolver}. - * - * @author Mark Paluch - * @author Christoph Strobl - * @since 2.1 - */ - class AssociationAwareMongoDbPropertyValueProvider extends MongoDbPropertyValueProvider { - - /** - * Creates a new {@link AssociationAwareMongoDbPropertyValueProvider} for the given source, - * {@link SpELExpressionEvaluator} and {@link ObjectPath}. - * - * @param source must not be {@literal null}. - * @param evaluator must not be {@literal null}. - */ - AssociationAwareMongoDbPropertyValueProvider(ConversionContext context, DocumentAccessor source, - SpELExpressionEvaluator evaluator) { - super(context, source, evaluator); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.convert.PropertyValueProvider#getPropertyValue(org.springframework.data.mapping.PersistentProperty) - */ - @Nullable - @SuppressWarnings("unchecked") - public T getPropertyValue(MongoPersistentProperty property) { - - if (property.isDbReference() && property.getDBRef().lazy()) { - - Object rawRefValue = accessor.get(property); - if (rawRefValue == null) { - return null; - } - - DbRefResolverCallback callback = new DefaultDbRefResolverCallback(accessor.getDocument(), context.getPath(), - evaluator, (prop, bson, evaluator, path) -> MappingMongoConverter.this.getValueInternal(context, prop, bson, - evaluator)); - - DBRef dbref = rawRefValue instanceof DBRef ? (DBRef) rawRefValue : null; - return (T) dbRefResolver.resolveDbRef(property, dbref, callback, dbRefProxyHandler); - } - - return super.getPropertyValue(property); - } - } - - /** - * Extension of {@link SpELExpressionParameterValueProvider} to recursively trigger value conversion on the raw - * resolved SpEL value. - * - * @author Oliver Gierke - */ - private class ConverterAwareSpELExpressionParameterValueProvider - extends SpELExpressionParameterValueProvider { - - private final ConversionContext context; - - /** - * Creates a new {@link ConverterAwareSpELExpressionParameterValueProvider}. - * - * @param evaluator must not be {@literal null}. - * @param conversionService must not be {@literal null}. - * @param delegate must not be {@literal null}. - * @param context - */ - public ConverterAwareSpELExpressionParameterValueProvider(SpELExpressionEvaluator evaluator, - ConversionService conversionService, ParameterValueProvider delegate, - ConversionContext context) { - - super(evaluator, conversionService, delegate); - this.context = context; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mapping.model.SpELExpressionParameterValueProvider#potentiallyConvertSpelValue(java.lang.Object, org.springframework.data.mapping.PreferredConstructor.Parameter) - */ - @Override - protected T potentiallyConvertSpelValue(Object object, Parameter parameter) { - return context.convert(parameter.getType(), object); - } - } - @Nullable @SuppressWarnings("unchecked") T readValue(ConversionContext context, @Nullable Object value, TypeInformation type) { @@ -1627,41 +1367,39 @@ T readValue(ConversionContext context, @Nullable Object value, TypeInformati Class rawType = type.getType(); if (conversions.hasCustomReadTarget(value.getClass(), rawType)) { - return (T) conversionService.convert(value, rawType); + return (T) doConvert(value, rawType); } else if (value instanceof DBRef) { - return readDBRef(context, (DBRef) value, type); + return (T) readDBRef(context, (DBRef) value, type); } return (T) context.convert(type, value); } @Nullable - private T readDBRef(ConversionContext context, @Nullable DBRef dbref, TypeInformation type) { + private Object readDBRef(ConversionContext context, @Nullable DBRef dbref, TypeInformation type) { if (type.getType().equals(DBRef.class)) { - return (T) dbref; + return dbref; } ObjectPath path = context.getPath(); - T object = dbref == null ? null - : path.getPathItem(dbref.getId(), dbref.getCollectionName(), (Class) type.getType()); + Object object = dbref == null ? null : path.getPathItem(dbref.getId(), dbref.getCollectionName(), type.getType()); if (object != null) { return object; } - List result = bulkReadAndConvertDBRefs(context, Collections.singletonList(dbref), type, type.getType()); + List result = bulkReadAndConvertDBRefs(context, Collections.singletonList(dbref), type); return CollectionUtils.isEmpty(result) ? null : result.iterator().next(); } @SuppressWarnings({ "unchecked", "rawtypes" }) private void bulkReadAndConvertDBRefMapIntoTarget(ConversionContext context, TypeInformation valueType, - Class rawValueType, Map sourceMap, Map targetMap) { LinkedHashMap referenceMap = new LinkedHashMap<>(sourceMap); List convertedObjects = bulkReadAndConvertDBRefs(context.withPath(ObjectPath.ROOT), - (List) new ArrayList(referenceMap.values()), valueType, rawValueType); + (List) new ArrayList(referenceMap.values()), valueType); int index = 0; for (String key : referenceMap.keySet()) { @@ -1671,8 +1409,7 @@ private void bulkReadAndConvertDBRefMapIntoTarget(ConversionContext context, Typ } @SuppressWarnings("unchecked") - private List bulkReadAndConvertDBRefs(ConversionContext context, List dbrefs, TypeInformation type, - @Nullable Class rawType) { + private List bulkReadAndConvertDBRefs(ConversionContext context, List dbrefs, TypeInformation type) { if (CollectionUtils.isEmpty(dbrefs)) { return Collections.emptyList(); @@ -1691,7 +1428,7 @@ private List bulkReadAndConvertDBRefs(ConversionContext context, List(document, (Class) (rawType != null ? rawType : Object.class), collectionName)); + new AfterLoadEvent<>(document, (Class) type.getType(), collectionName)); target = (T) doRead(context, type, document); } @@ -1779,6 +1516,11 @@ public MappingMongoConverter with(MongoDatabaseFactory dbFactory) { return target; } + @SuppressWarnings("ConstantConditions") + private T doConvert(Object value, Class target) { + return conversionService.convert(value, target); + } + /** * Returns whether the given {@link Iterable} contains {@link DBRef} instances all pointing to the same collection. * @@ -1807,6 +1549,160 @@ private static boolean isCollectionOfDbRefWhereBulkFetchIsPossible(Iterable s return true; } + /** + * {@link PropertyValueProvider} to evaluate a SpEL expression if present on the property or simply accesses the field + * of the configured source {@link Document}. + * + * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl + */ + static class MongoDbPropertyValueProvider implements PropertyValueProvider { + + final ConversionContext context; + final DocumentAccessor accessor; + final SpELExpressionEvaluator evaluator; + + /** + * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and + * {@link ObjectPath}. + * + * @param context must not be {@literal null}. + * @param source must not be {@literal null}. + * @param evaluator must not be {@literal null}. + */ + MongoDbPropertyValueProvider(ConversionContext context, Bson source, SpELExpressionEvaluator evaluator) { + this(context, new DocumentAccessor(source), evaluator); + } + + /** + * Creates a new {@link MongoDbPropertyValueProvider} for the given source, {@link SpELExpressionEvaluator} and + * {@link ObjectPath}. + * + * @param context must not be {@literal null}. + * @param accessor must not be {@literal null}. + * @param evaluator must not be {@literal null}. + */ + MongoDbPropertyValueProvider(ConversionContext context, DocumentAccessor accessor, + SpELExpressionEvaluator evaluator) { + + Assert.notNull(context, "ConversionContext must no be null!"); + Assert.notNull(accessor, "DocumentAccessor must no be null!"); + Assert.notNull(evaluator, "SpELExpressionEvaluator must not be null!"); + + this.context = context; + this.accessor = accessor; + this.evaluator = evaluator; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.convert.PropertyValueProvider#getPropertyValue(org.springframework.data.mapping.PersistentProperty) + */ + @Nullable + @SuppressWarnings("unchecked") + public T getPropertyValue(MongoPersistentProperty property) { + + String expression = property.getSpelExpression(); + Object value = expression != null ? evaluator.evaluate(expression) : accessor.get(property); + + if (value == null) { + return null; + } + + return (T) context.convert(property.getTypeInformation(), value); + } + } + + /** + * {@link PropertyValueProvider} that is aware of {@link MongoPersistentProperty#isAssociation()} and that delegates + * resolution to {@link DbRefResolver}. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.1 + */ + class AssociationAwareMongoDbPropertyValueProvider extends MongoDbPropertyValueProvider { + + /** + * Creates a new {@link AssociationAwareMongoDbPropertyValueProvider} for the given source, + * {@link SpELExpressionEvaluator} and {@link ObjectPath}. + * + * @param source must not be {@literal null}. + * @param evaluator must not be {@literal null}. + */ + AssociationAwareMongoDbPropertyValueProvider(ConversionContext context, DocumentAccessor source, + SpELExpressionEvaluator evaluator) { + super(context, source, evaluator); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.convert.PropertyValueProvider#getPropertyValue(org.springframework.data.mapping.PersistentProperty) + */ + @Nullable + @SuppressWarnings("unchecked") + public T getPropertyValue(MongoPersistentProperty property) { + + if (property.isDbReference() && property.getDBRef().lazy()) { + + Object rawRefValue = accessor.get(property); + if (rawRefValue == null) { + return null; + } + + DbRefResolverCallback callback = new DefaultDbRefResolverCallback(accessor.getDocument(), context.getPath(), + evaluator, (prop, bson, evaluator, path) -> MappingMongoConverter.this.getValueInternal(context, prop, bson, + evaluator)); + + DBRef dbref = rawRefValue instanceof DBRef ? (DBRef) rawRefValue : null; + return (T) dbRefResolver.resolveDbRef(property, dbref, callback, dbRefProxyHandler); + } + + return super.getPropertyValue(property); + } + } + + /** + * Extension of {@link SpELExpressionParameterValueProvider} to recursively trigger value conversion on the raw + * resolved SpEL value. + * + * @author Oliver Gierke + */ + private static class ConverterAwareSpELExpressionParameterValueProvider + extends SpELExpressionParameterValueProvider { + + private final ConversionContext context; + + /** + * Creates a new {@link ConverterAwareSpELExpressionParameterValueProvider}. + * + * @param context must not be {@literal null}. + * @param evaluator must not be {@literal null}. + * @param conversionService must not be {@literal null}. + * @param delegate must not be {@literal null}. + */ + public ConverterAwareSpELExpressionParameterValueProvider(ConversionContext context, + SpELExpressionEvaluator evaluator, ConversionService conversionService, + ParameterValueProvider delegate) { + + super(evaluator, conversionService, delegate); + + Assert.notNull(context, "ConversionContext must no be null!"); + + this.context = context; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mapping.model.SpELExpressionParameterValueProvider#potentiallyConvertSpelValue(java.lang.Object, org.springframework.data.mapping.PreferredConstructor.Parameter) + */ + @Override + protected T potentiallyConvertSpelValue(Object object, Parameter parameter) { + return context.convert(parameter.getType(), object); + } + } + /** * Marker class used to indicate we have a non root document object here that might be used within an update - so we * need to preserve type hints for potential nested elements but need to remove it on top level. @@ -1828,15 +1724,21 @@ public T getParameterValue(Parameter parameter) } } - private static class TypeInformationWrapper implements TypeInformation { + /** + * {@link TypeInformation} considering {@link MongoPersistentProperty#getFieldType()} as type source. + * + * @param + */ + private static class FieldTypeInformation implements TypeInformation { - private MongoPersistentProperty persistentProperty; - private TypeInformation delegate; + private final MongoPersistentProperty persistentProperty; + private final TypeInformation delegate; - public TypeInformationWrapper(MongoPersistentProperty property) { + @SuppressWarnings("unchecked") + public FieldTypeInformation(MongoPersistentProperty property) { this.persistentProperty = property; - this.delegate = property.getTypeInformation(); + this.delegate = (TypeInformation) property.getTypeInformation(); } @Override @@ -1870,7 +1772,7 @@ public org.springframework.data.util.TypeInformation getMapValueType() { } @Override - public Class getType() { + public Class getType() { return delegate.getType(); } @@ -1910,7 +1812,7 @@ public List> getTypeArguments() } @Override - public org.springframework.data.util.TypeInformation specialize(ClassTypeInformation type) { + public org.springframework.data.util.TypeInformation specialize(ClassTypeInformation type) { return delegate.specialize(type); } } @@ -1980,8 +1882,8 @@ public S convert(TypeInformation typeToUse, Obje /** * Create a new {@link ConversionContext} with {@link ObjectPath currentPath} applied. * - * @param currentPath - * @return + * @param currentPath must not be {@literal null}. + * @return a new {@link ConversionContext} with {@link ObjectPath currentPath} applied. */ public ConversionContext withPath(ObjectPath currentPath) { From 9419861a19a555f703647ce9a2668e5fddf75158 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 1 Mar 2021 14:34:34 +0100 Subject: [PATCH 5/7] Polishing. Reintroduce deprecated API. Make conversion entrypoint methods protected so that they can be overridden. Tweak Javadoc. --- .../core/convert/MappingMongoConverter.java | 131 ++++++++++++------ 1 file changed, 88 insertions(+), 43 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index cb1a945cec..4cd799238b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -149,7 +149,16 @@ public MappingMongoConverter(DbRefResolver dbRefResolver, }); } - ConversionContext getConversionContext(ObjectPath path) { + /** + * Creates a new {@link ConversionContext} given {@link ObjectPath}. + * + * @param path the current {@link ObjectPath}, must not be {@literal null}. + * @return the {@link ConversionContext}. + */ + protected ConversionContext getConversionContext(ObjectPath path) { + + Assert.notNull(path, "ObjectPath must not be null"); + return new ConversionContext(path, this::readDocument, this::readCollectionOrArray, this::readMap, this::readDBRef, this::getPotentiallyConvertedSimpleRead); } @@ -294,11 +303,21 @@ private S doRead(ConversionContext context, TypeInformation S readDocument(ConversionContext ctx, Bson bson, TypeInformation typeHint) { + protected S readDocument(ConversionContext context, Bson bson, + TypeInformation typeHint) { // TODO: Cleanup duplication @@ -306,8 +325,6 @@ private S readDocument(ConversionContext ctx, Bson bson, Type TypeInformation typeToRead = typeMapper.readType(document, typeHint); Class rawType = typeToRead.getType(); - // Discuss: Potentially the wrong thing to do. In a Map<…, Object> if the database type is Person we would apply a - // custom converter if registered for Person if (conversions.hasCustomReadTarget(bson.getClass(), rawType)) { return doConvert(bson, rawType); } @@ -335,7 +352,7 @@ private S readDocument(ConversionContext ctx, Bson bson, Type throw new MappingException(String.format(INVALID_TYPE_TO_READ, document, rawType)); } - return read(ctx, (MongoPersistentEntity) entity, document); + return read(context, (MongoPersistentEntity) entity, document); } private ParameterValueProvider getParameterProvider(ConversionContext context, @@ -395,13 +412,6 @@ private S populateProperties(ConversionContext context, MongoPersistentEntit /** * Reads the identifier from either the bean backing the {@link PersistentPropertyAccessor} or the source document in * case the identifier has not be populated yet. In this case the identifier is set on the bean for further reference. - * - * @param context must not be {@literal null}. - * @param accessor must not be {@literal null}. - * @param document must not be {@literal null}. - * @param entity must not be {@literal null}. - * @param evaluator must not be {@literal null}. - * @return */ @Nullable private Object readAndPopulateIdentifier(ConversionContext context, PersistentPropertyAccessor accessor, @@ -1033,9 +1043,11 @@ private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nulla /** * Checks whether we have a custom conversion for the given simple object. Converts the given value if so, applies - * {@link Enum} handling or returns the value as is. + * {@link Enum} handling or returns the value as is. Can be overridden by subclasses. + * + * @since 3.2 */ - private Object getPotentiallyConvertedSimpleRead(Object value, TypeInformation target) { + protected Object getPotentiallyConvertedSimpleRead(Object value, TypeInformation target) { return getPotentiallyConvertedSimpleRead(value, target.getType()); } @@ -1103,15 +1115,18 @@ private Object getValueInternal(ConversionContext context, MongoPersistentProper } /** - * Reads the given {@link BasicDBList} into a collection of the given {@link TypeInformation}. + * Reads the given {@link Collection} into a collection of the given {@link TypeInformation}. Can be overridden by + * subclasses. * - * @param source must not be {@literal null}. - * @param targetType must not be {@literal null}. - * @param path must not be {@literal null}. + * @param context must not be {@literal null} + * @param source must not be {@literal null} + * @param targetType the {@link Map} {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @since 3.2 * @return the converted {@link Collection} or array, will never be {@literal null}. */ @SuppressWarnings("unchecked") - private Object readCollectionOrArray(ConversionContext context, Collection source, TypeInformation targetType) { + protected Object readCollectionOrArray(ConversionContext context, Collection source, + TypeInformation targetType) { Assert.notNull(targetType, "Target type must not be null!"); @@ -1139,7 +1154,7 @@ private Object readCollectionOrArray(ConversionContext context, Collection so } for (Object element : source) { - items.add(context.convert(componentType, element)); + items.add(context.convert(element, componentType)); } return getPotentiallyConvertedSimpleRead(items, targetType.getType()); @@ -1148,19 +1163,37 @@ private Object readCollectionOrArray(ConversionContext context, Collection so /** * Reads the given {@link Document} into a {@link Map}. will recursively resolve nested {@link Map}s as well. * - * @param bson must not be {@literal null} * @param type the {@link Map} {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @param bson must not be {@literal null} + * @param path must not be {@literal null} + * @return + * @deprecated since 3.2. Use {@link #readMap(ConversionContext, Bson, TypeInformation)} instead. */ - protected Map readMap(ConversionContext context, Bson bson, TypeInformation type) { + @Deprecated + protected Map readMap(TypeInformation type, Bson bson, ObjectPath path) { + return readMap(getConversionContext(path), bson, type); + } + + /** + * Reads the given {@link Document} into a {@link Map}. will recursively resolve nested {@link Map}s as well. Can be + * overridden by subclasses. + * + * @param context must not be {@literal null} + * @param bson must not be {@literal null} + * @param targetType the {@link Map} {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @return the converted {@link Map}, will never be {@literal null}. + * @since 3.2 + */ + protected Map readMap(ConversionContext context, Bson bson, TypeInformation targetType) { Assert.notNull(bson, "Document must not be null!"); - Assert.notNull(type, "TypeInformation must not be null!"); + Assert.notNull(targetType, "TypeInformation must not be null!"); - Class mapType = typeMapper.readType(bson, type).getType(); + Class mapType = typeMapper.readType(bson, targetType).getType(); - TypeInformation keyType = type.getComponentType(); - TypeInformation valueType = type.getMapValueType() == null ? ClassTypeInformation.OBJECT - : type.getRequiredMapValueType(); + TypeInformation keyType = targetType.getComponentType(); + TypeInformation valueType = targetType.getMapValueType() == null ? ClassTypeInformation.OBJECT + : targetType.getRequiredMapValueType(); Class rawKeyType = keyType != null ? keyType.getType() : Object.class; Class rawValueType = valueType.getType(); @@ -1186,7 +1219,7 @@ protected Map readMap(ConversionContext context, Bson bson, Type } Object value = entry.getValue(); - map.put(key, context.convert(valueType, value)); + map.put(key, context.convert(value, valueType)); } return map; @@ -1372,7 +1405,7 @@ T readValue(ConversionContext context, @Nullable Object value, TypeInformati return (T) readDBRef(context, (DBRef) value, type); } - return (T) context.convert(type, value); + return (T) context.convert(value, type); } @Nullable @@ -1610,7 +1643,7 @@ public T getPropertyValue(MongoPersistentProperty property) { return null; } - return (T) context.convert(property.getTypeInformation(), value); + return (T) context.convert(value, property.getTypeInformation()); } } @@ -1699,7 +1732,7 @@ public ConverterAwareSpELExpressionParameterValueProvider(ConversionContext cont */ @Override protected T potentiallyConvertSpelValue(Object object, Parameter parameter) { - return context.convert(parameter.getType(), object); + return context.convert(object, parameter.getType()); } } @@ -1819,8 +1852,11 @@ public org.springframework.data.util.TypeInformation specialize(Cla /** * Conversion context holding references to simple {@link ValueConverter} and {@link ContainerValueConverter}. + * Entrypoint for recursive conversion of {@link Document} and other types. + * + * @since 3.2 */ - static class ConversionContext { + protected static class ConversionContext { private final ObjectPath path; private final ContainerValueConverter documentConverter; @@ -1841,12 +1877,21 @@ static class ConversionContext { this.elementConverter = elementConverter; } + /** + * Converts a source object into {@link TypeInformation target}. + * + * @param source must not be {@literal null}. + * @param typeHint must not be {@literal null}. + * @return the converted object. + */ @SuppressWarnings("unchecked") - public S convert(TypeInformation typeToUse, Object source) { + public S convert(Object source, TypeInformation typeHint) { + + Assert.notNull(typeHint, "TypeInformation must not be null"); if (source instanceof Collection) { - Class rawType = typeToUse.getType(); + Class rawType = typeHint.getType(); if (!Object.class.equals(rawType)) { if (!rawType.isArray() && !ClassUtils.isAssignable(Iterable.class, rawType)) { throw new MappingException( @@ -1854,29 +1899,29 @@ public S convert(TypeInformation typeToUse, Obje } } - if (typeToUse.isCollectionLike() || typeToUse.getType().isAssignableFrom(Collection.class)) { - return (S) collectionConverter.convert(this, (Collection) source, typeToUse); + if (typeHint.isCollectionLike() || typeHint.getType().isAssignableFrom(Collection.class)) { + return (S) collectionConverter.convert(this, (Collection) source, typeHint); } } - if (typeToUse.isMap()) { - return (S) mapConverter.convert(this, (Bson) source, typeToUse); + if (typeHint.isMap()) { + return (S) mapConverter.convert(this, (Bson) source, typeHint); } if (source instanceof DBRef) { - return (S) dbRefConverter.convert(this, (DBRef) source, typeToUse); + return (S) dbRefConverter.convert(this, (DBRef) source, typeHint); } if (source instanceof Collection) { throw new MappingException( - String.format(INCOMPATIBLE_TYPES, source, BasicDBList.class, typeToUse.getType(), getPath())); + String.format(INCOMPATIBLE_TYPES, source, BasicDBList.class, typeHint.getType(), getPath())); } if (source instanceof Bson) { - return (S) documentConverter.convert(this, (Bson) source, typeToUse); + return (S) documentConverter.convert(this, (Bson) source, typeHint); } - return (S) elementConverter.convert(source, typeToUse); + return (S) elementConverter.convert(source, typeHint); } /** From 8e56d3ced7519d0912f913f34f3161479a2c35f3 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 2 Mar 2021 14:33:23 +0100 Subject: [PATCH 6/7] Remove MapUtils and use BsonUtils instead. --- .../data/mongodb/core/convert/MapUtils.java | 111 --------------- .../core/convert/MappingMongoConverter.java | 25 ++-- .../data/mongodb/util/BsonUtils.java | 127 +++++++++++++++++- 3 files changed, 136 insertions(+), 127 deletions(-) delete mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MapUtils.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MapUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MapUtils.java deleted file mode 100644 index 75bc72beaf..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MapUtils.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright 2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.convert; - -import java.util.Collection; -import java.util.Collections; -import java.util.Map; - -import org.bson.Document; -import org.bson.conversions.Bson; - -import org.springframework.lang.Nullable; -import org.springframework.util.CollectionUtils; - -import com.mongodb.DBObject; - -/** - * @author Mark Paluch - */ -class MapUtils { - /** - * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a - * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element - * collection for everything else. - * - * @param source - * @return - */ - static Collection asCollection(Object source) { - - if (source instanceof Collection) { - return (Collection) source; - } - - return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); - } - - @SuppressWarnings("unchecked") - static Map asMap(Bson bson) { - - if (bson instanceof Document) { - return (Document) bson; - } - - if (bson instanceof DBObject) { - return ((DBObject) bson).toMap(); - } - - throw new IllegalArgumentException( - String.format("Cannot read %s. as map. Given Bson must be a Document or DBObject!", bson.getClass())); - } - - static void addToMap(Bson bson, String key, @Nullable Object value) { - - if (bson instanceof Document) { - ((Document) bson).put(key, value); - return; - } - if (bson instanceof DBObject) { - ((DBObject) bson).put(key, value); - return; - } - throw new IllegalArgumentException(String.format( - "Cannot add key/value pair to %s. as map. Given Bson must be a Document or DBObject!", bson.getClass())); - } - - static void addAllToMap(Bson bson, Map value) { - - if (bson instanceof Document) { - ((Document) bson).putAll(value); - return; - } - - if (bson instanceof DBObject) { - ((DBObject) bson).putAll(value); - return; - } - - throw new IllegalArgumentException( - String.format("Cannot add all to %s. Given Bson must be a Document or DBObject.", bson.getClass())); - } - - static void removeFromMap(Bson bson, String key) { - - if (bson instanceof Document) { - ((Document) bson).remove(key); - return; - } - - if (bson instanceof DBObject) { - ((DBObject) bson).removeField(key); - return; - } - - throw new IllegalArgumentException( - String.format("Cannot remove from %s. Given Bson must be a Document or DBObject.", bson.getClass())); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 4cd799238b..196fb7b31f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -70,6 +70,7 @@ import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent; import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.data.util.ClassTypeInformation; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; @@ -572,9 +573,7 @@ public void write(Object obj, Bson bson) { Object target = obj instanceof LazyLoadingProxy ? ((LazyLoadingProxy) obj).getTarget() : obj; writeInternal(target, bson, type); - if (MapUtils.asMap(bson).containsKey("_id") && MapUtils.asMap(bson).get("_id") == null) { - MapUtils.removeFromMap(bson, "_id"); - } + BsonUtils.removeNullId(bson); if (requiresTypeHint(entityType)) { typeMapper.writeType(type, bson); @@ -608,7 +607,7 @@ protected void writeInternal(@Nullable Object obj, Bson bson, @Nullable TypeInfo if (customTarget.isPresent()) { Document result = doConvert(obj, Document.class); - MapUtils.addAllToMap(bson, result); + BsonUtils.addAllToMap(bson, result); return; } @@ -709,12 +708,14 @@ protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor acce } if (valueType.isCollectionLike()) { - List collectionInternal = createCollection(MapUtils.asCollection(obj), prop); + + List collectionInternal = createCollection(BsonUtils.asCollection(obj), prop); accessor.put(prop, collectionInternal); return; } if (valueType.isMap()) { + Bson mapDbObj = createMap((Map) obj, prop); accessor.put(prop, mapDbObj); return; @@ -859,7 +860,7 @@ private List writeCollectionInternal(Collection source, @Nullable Typ collection.add(getPotentiallyConvertedSimpleWrite(element, componentType != null ? componentType.getType() : Object.class)); } else if (element instanceof Collection || elementType.isArray()) { - collection.add(writeCollectionInternal(MapUtils.asCollection(element), componentType, new BasicDBList())); + collection.add(writeCollectionInternal(BsonUtils.asCollection(element), componentType, new BasicDBList())); } else { Document document = new Document(); writeInternal(element, document, componentType); @@ -890,14 +891,14 @@ protected Bson writeMapInternal(Map obj, Bson bson, TypeInformat if (val == null || conversions.isSimpleType(val.getClass())) { writeSimpleInternal(val, bson, simpleKey); } else if (val instanceof Collection || val.getClass().isArray()) { - MapUtils.addToMap(bson, simpleKey, - writeCollectionInternal(MapUtils.asCollection(val), propertyType.getMapValueType(), new BasicDBList())); + BsonUtils.addToMap(bson, simpleKey, + writeCollectionInternal(BsonUtils.asCollection(val), propertyType.getMapValueType(), new BasicDBList())); } else { Document document = new Document(); TypeInformation valueTypeInfo = propertyType.isMap() ? propertyType.getMapValueType() : ClassTypeInformation.OBJECT; writeInternal(val, document, valueTypeInfo); - MapUtils.addToMap(bson, simpleKey, document); + BsonUtils.addToMap(bson, simpleKey, document); } } else { throw new MappingException("Cannot use a complex object as a key value."); @@ -997,7 +998,7 @@ protected void addCustomTypeKeyIfNecessary(@Nullable TypeInformation type, Ob * @param key must not be {@literal null}. */ private void writeSimpleInternal(@Nullable Object value, Bson bson, String key) { - MapUtils.addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value, Object.class)); + BsonUtils.addToMap(bson, key, getPotentiallyConvertedSimpleWrite(value, Object.class)); } private void writeSimpleInternal(@Nullable Object value, Bson bson, MongoPersistentProperty property) { @@ -1035,7 +1036,7 @@ private Object getPotentiallyConvertedSimpleWrite(@Nullable Object value, @Nulla if (value instanceof byte[]) { return value; } - return MapUtils.asCollection(value); + return BsonUtils.asCollection(value); } return Enum.class.isAssignableFrom(value.getClass()) ? ((Enum) value).name() : value; @@ -1198,7 +1199,7 @@ protected Map readMap(ConversionContext context, Bson bson, Type Class rawKeyType = keyType != null ? keyType.getType() : Object.class; Class rawValueType = valueType.getType(); - Map sourceMap = MapUtils.asMap(bson); + Map sourceMap = BsonUtils.asMap(bson); Map map = CollectionFactory.createMap(mapType, rawKeyType, sourceMap.keySet().size()); if (!DBRef.class.equals(rawValueType) && isCollectionOfDbRefWhereBulkFetchIsPossible(sourceMap.values())) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java index 5b036072bf..d3255437dc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java @@ -17,12 +17,14 @@ import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.Date; import java.util.Map; import java.util.StringJoiner; import java.util.function.Function; import java.util.stream.StreamSupport; +import org.bson.BSONObject; import org.bson.BsonBinary; import org.bson.BsonBoolean; import org.bson.BsonDouble; @@ -36,11 +38,11 @@ import org.bson.conversions.Bson; import org.bson.json.JsonParseException; import org.bson.types.ObjectId; - import org.springframework.core.convert.converter.Converter; import org.springframework.data.mongodb.CodecRegistryProvider; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; @@ -50,6 +52,8 @@ import com.mongodb.MongoClientSettings; /** + * Internal API for operations on {@link Bson} elements that can be either {@link Document} or {@link DBObject}. + * * @author Christoph Strobl * @author Mark Paluch * @since 2.0 @@ -62,6 +66,47 @@ public static T get(Bson bson, String key) { return (T) asMap(bson).get(key); } + /** + * Remove {@code _id : null} from the given {@link Bson} if present. + * + * @param bson must not be {@literal null}. + * @since 2.5 + */ + public static void removeNullId(Bson bson) { + + if (!contains(bson, "_id", null)) { + return; + } + + removeFrom(bson, "_id"); + } + + /** + * Check if a given entry (key/value pair) is present in the given {@link Bson}. + * + * @param bson must not be {@literal null}. + * @param key must not be {@literal null}. + * @param value can be {@literal null}. + * @return {@literal true} if (key/value pair) is present. + * @since 2.5 + */ + public static boolean contains(Bson bson, String key, @Nullable Object value) { + + if (bson instanceof Document) { + + Document doc = (Document) bson; + return doc.containsKey(key) && ObjectUtils.nullSafeEquals(doc.get(key), value); + } + if (bson instanceof BSONObject) { + + BSONObject bsonObject = (BSONObject) bson; + return bsonObject.containsField(key) && ObjectUtils.nullSafeEquals(bsonObject.get(key), value); + } + + Map map = asMap(bson); + return map.containsKey(key) && ObjectUtils.nullSafeEquals(map.get(key), value); + } + public static Map asMap(Bson bson) { if (bson instanceof Document) { @@ -70,6 +115,9 @@ public static Map asMap(Bson bson) { if (bson instanceof BasicDBObject) { return ((BasicDBObject) bson); } + if (bson instanceof DBObject) { + return ((DBObject) bson).toMap(); + } return (Map) bson.toBsonDocument(Document.class, MongoClientSettings.getDefaultCodecRegistry()); } @@ -77,14 +125,68 @@ public static Map asMap(Bson bson) { public static void addToMap(Bson bson, String key, @Nullable Object value) { if (bson instanceof Document) { + ((Document) bson).put(key, value); return; } - if (bson instanceof DBObject) { - ((DBObject) bson).put(key, value); + if (bson instanceof BSONObject) { + + ((BSONObject) bson).put(key, value); return; } - throw new IllegalArgumentException("o_O what's that? Cannot add value to " + bson.getClass()); + + throw new IllegalArgumentException(String.format( + "Cannot add key/value pair to %s. as map. Given Bson must be a Document or BSONObject!", bson.getClass())); + } + + /** + * Add all entries from the given {@literal source} {@link Map} to the {@literal target}. + * + * @param target must not be {@literal null}. + * @param source must not be {@literal null}. + * @since 2.5 + */ + public static void addAllToMap(Bson target, Map source) { + + if (target instanceof Document) { + + ((Document) target).putAll(source); + return; + } + + if (target instanceof BSONObject) { + + ((BSONObject) target).putAll(source); + return; + } + + throw new IllegalArgumentException( + String.format("Cannot add all to %s. Given Bson must be a Document or BSONObject.", target.getClass())); + } + + /** + * Remove the given {@literal key} from the {@link Bson} value. + * + * @param bson must not be {@literal null}. + * @param key must not be {@literal null}. + * @since 2.5 + */ + static void removeFrom(Bson bson, String key) { + + if (bson instanceof Document) { + + ((Document) bson).remove(key); + return; + } + + if (bson instanceof BSONObject) { + + ((BSONObject) bson).removeField(key); + return; + } + + throw new IllegalArgumentException( + String.format("Cannot remove from %s. Given Bson must be a Document or BSONObject.", bson.getClass())); } /** @@ -282,6 +384,23 @@ public static Document parse(String json, @Nullable CodecRegistryProvider codecR .orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry()))); } + /** + * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a + * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element + * collection for everything else. + * + * @param source + * @return + */ + public static Collection asCollection(Object source) { + + if (source instanceof Collection) { + return (Collection) source; + } + + return source.getClass().isArray() ? CollectionUtils.arrayToList(source) : Collections.singleton(source); + } + @Nullable private static String toJson(@Nullable Object value) { From 2f802cd734508408d0f880e281faa1076ace6aa2 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 2 Mar 2021 15:07:38 +0100 Subject: [PATCH 7/7] Remove duplicate code. --- .../core/convert/MappingMongoConverter.java | 52 +++++++------------ 1 file changed, 18 insertions(+), 34 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 196fb7b31f..4dcb825606 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -271,17 +271,27 @@ public S read(Class clazz, final Bson bson) { } protected S read(TypeInformation type, Bson bson) { - return doRead(getConversionContext(ObjectPath.ROOT), type, bson); + return readDocument(getConversionContext(ObjectPath.ROOT), bson, type); } + /** + * Conversion method to materialize an object from a {@link Bson document}. Can be overridden by subclasses. + * + * @param context must not be {@literal null} + * @param bson must not be {@literal null} + * @param typeHint the {@link TypeInformation} to be used to unmarshall this {@link Document}. + * @return the converted object, will never be {@literal null}. + * @since 3.2 + */ @SuppressWarnings("unchecked") - private S doRead(ConversionContext context, TypeInformation type, Bson bson) { - - Assert.notNull(bson, "Bson must not be null!"); + protected S readDocument(ConversionContext context, Bson bson, + TypeInformation typeHint) { // TODO: Cleanup duplication - TypeInformation typeToUse = typeMapper.readType(bson, type); - Class rawType = typeToUse.getType(); + + Document document = bson instanceof BasicDBObject ? new Document((BasicDBObject) bson) : (Document) bson; + TypeInformation typeToRead = typeMapper.readType(document, typeHint); + Class rawType = typeToRead.getType(); if (conversions.hasCustomReadTarget(bson.getClass(), rawType)) { return doConvert(bson, rawType); @@ -304,34 +314,8 @@ private S doRead(ConversionContext context, TypeInformation S readDocument(ConversionContext context, Bson bson, - TypeInformation typeHint) { - - // TODO: Cleanup duplication - - Document document = bson instanceof BasicDBObject ? new Document((BasicDBObject) bson) : (Document) bson; - TypeInformation typeToRead = typeMapper.readType(document, typeHint); - Class rawType = typeToRead.getType(); - - if (conversions.hasCustomReadTarget(bson.getClass(), rawType)) { - return doConvert(bson, rawType); - } - if (typeToRead.isMap()) { - return (S) bson; + return context.convert(bson, typeToRead); } if (BSON.isAssignableFrom(typeHint)) { @@ -1463,7 +1447,7 @@ private List bulkReadAndConvertDBRefs(ConversionContext context, List(document, (Class) type.getType(), collectionName)); - target = (T) doRead(context, type, document); + target = (T) readDocument(context, document, type); } if (target != null) {