Skip to content

Commit

Permalink
Revert "Deprecate resolution loss on date field (#78921) backport(#79355
Browse files Browse the repository at this point in the history
)" (#79613)

This reverts commit ff6e589.
and
docs commit 69cf4a3.
  • Loading branch information
pgomulka committed Oct 21, 2021
1 parent d5b05fd commit ca1c03c
Show file tree
Hide file tree
Showing 14 changed files with 37 additions and 120 deletions.
21 changes: 3 additions & 18 deletions docs/reference/migration/migrate_7_16.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ logging>>.
====
*Details* +
In SAML, Identity Providers (IdPs) can either be explicitly configured to
release a `NameID` with a specific format, or configured to attempt to conform
release a `NameID` with a specific format, or configured to attempt to conform
with the requirements of a Service Provider (SP). The SP declares its
requirements in the `NameIDPolicy` element of a SAML Authentication Request.
In {es}, the `nameid_format` SAML realm setting controls the `NameIDPolicy`
Expand All @@ -103,9 +103,9 @@ IdP. If you want to retain the previous behavior, set `nameid_format` to
*Impact* +
If you currently don't configure `nameid_format` explicitly, it's possible
that your IdP will reject authentication requests from {es} because the requests
that your IdP will reject authentication requests from {es} because the requests
do not specify a `NameID` format (and your IdP is configured to expect one).
This mismatch can result in a broken SAML configuration. If you're unsure whether
This mismatch can result in a broken SAML configuration. If you're unsure whether
your IdP is explicitly configured to use a certain `NameID` format and you want to retain current behavior
, try setting `nameid_format` to `urn:oasis:names:tc:SAML:2.0:nameid-format:transient` explicitly.
====
Expand Down Expand Up @@ -347,19 +347,4 @@ cache do not expire.
To override the defaults, configure the `script.cache.max_size`,
`script.max_compilations_rate`, and `script.cache.expire` settings.
====

.Attempting to store nanosecond resolution in a `date` field is deprecated.
[%collapsible]
====
*Details* +
Attempting to store a nanosecond resolution in a {ref}/date.html[`date`] field is deprecated.
While previously allowed, these attempts always resulted in resolution loss.
A `date` field can only store up to millisecond resolutions.
*Impact* +
If you attempt to store a nanosecond resolution in a `date` type field, {es} will
emit a deprecation warning. To avoid deprecation warnings, use a
{ref}/date_nanos.html[`date_nanos`] field instead.
====

// end::notable-breaking-changes[]
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ public void testDateNanosFormatUpgrade() throws IOException {
Request index = new Request("POST", "/" + indexName + "/_doc/");
XContentBuilder doc = XContentBuilder.builder(XContentType.JSON.xContent())
.startObject()
.field("date", "2015-01-01T12:10:30.123Z")
.field("date", "2015-01-01T12:10:30.123456789Z")
.field("date_nanos", "2015-01-01T12:10:30.123456789Z")
.endObject();
index.addParameter("refresh", "true");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
index: timetest
body:
mappings:
"properties": { "my_time": {"type": "date_nanos", "format": "strict_date_optional_time_nanos"}}
"properties": { "my_time": {"type": "date", "format": "strict_date_optional_time_nanos"}}

- do:
ingest.put_pipeline:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ setup:
mappings:
properties:
mydate:
type: date_nanos
type: date
format: "uuuu-MM-dd'T'HH:mm:ss.SSSSSSSSSZZZZZ"

- do:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,6 @@ public final class DateFieldMapper extends FieldMapper {
public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time||epoch_millis");
public static final DateFormatter DEFAULT_DATE_TIME_NANOS_FORMATTER =
DateFormatter.forPattern("strict_date_optional_time_nanos||epoch_millis");
private final String indexName;

public enum Resolution {
MILLISECONDS(CONTENT_TYPE, NumericType.DATE) {
Expand Down Expand Up @@ -235,7 +234,6 @@ public static class Builder extends FieldMapper.Builder {
private final Parameter<String> nullValue
= Parameter.stringParam("null_value", false, m -> toType(m).nullValueAsString, null).acceptsNull();
private final Parameter<Boolean> ignoreMalformed;
private String indexName;

private final Parameter<Script> script = Parameter.scriptParam(m -> toType(m).script);
private final Parameter<String> onScriptError = Parameter.onScriptErrorParam(m -> toType(m).onScriptError, script);
Expand All @@ -246,14 +244,13 @@ public static class Builder extends FieldMapper.Builder {

public Builder(String name, Resolution resolution, DateFormatter dateFormatter,
ScriptCompiler scriptCompiler,
boolean ignoreMalformedByDefault, Version indexCreatedVersion, String indexName) {
boolean ignoreMalformedByDefault, Version indexCreatedVersion) {
super(name);
this.resolution = resolution;
this.indexCreatedVersion = indexCreatedVersion;
this.scriptCompiler = Objects.requireNonNull(scriptCompiler);
this.ignoreMalformed
= Parameter.boolParam("ignore_malformed", true, m -> toType(m).ignoreMalformed, ignoreMalformedByDefault);
this.indexName = indexName;

DateFormatter defaultFormat = resolution == Resolution.NANOSECONDS && indexCreatedVersion.onOrAfter(Version.V_7_0_0) ?
DEFAULT_DATE_TIME_NANOS_FORMATTER : DEFAULT_DATE_TIME_FORMATTER;
Expand Down Expand Up @@ -295,13 +292,12 @@ protected List<Parameter<?>> getParameters() {
return Arrays.asList(index, docValues, store, format, locale, nullValue, ignoreMalformed, script, onScriptError, boost, meta);
}

private Long parseNullValue(DateFieldType fieldType, String indexName) {
private Long parseNullValue(DateFieldType fieldType) {
if (nullValue.getValue() == null) {
return null;
}
try {
final String fieldName = fieldType.name();
return fieldType.parseNullValueWithDeprecation(nullValue.getValue(), fieldName, indexName);
return fieldType.parse(nullValue.getValue());
} catch (Exception e) {
DEPRECATION_LOGGER.critical(DeprecationCategory.MAPPINGS, "date_mapper_null_field",
"Error parsing [" + nullValue.getValue() + "] as date in [null_value] on field [" + name() + "]);"
Expand All @@ -315,7 +311,7 @@ public DateFieldMapper build(MapperBuilderContext context) {
DateFieldType ft = new DateFieldType(context.buildFullName(name()), index.getValue(), store.getValue(), docValues.getValue(),
buildFormatter(), resolution, nullValue.getValue(), scriptValues(), meta.getValue());
ft.setBoost(boost.getValue());
Long nullTimestamp = parseNullValue(ft, indexName);
Long nullTimestamp = parseNullValue(ft);
return new DateFieldMapper(name, ft, multiFieldsBuilder.build(this, context),
copyTo.build(), nullTimestamp, resolution, this);
}
Expand All @@ -324,15 +320,13 @@ public DateFieldMapper build(MapperBuilderContext context) {
public static final TypeParser MILLIS_PARSER = new TypeParser((n, c) -> {
boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(c.getSettings());
return new Builder(n, Resolution.MILLISECONDS, c.getDateFormatter(), c.scriptCompiler(),
ignoreMalformedByDefault, c.indexVersionCreated(),
c.getIndexSettings() != null ? c.getIndexSettings().getIndex().getName() : null);
ignoreMalformedByDefault, c.indexVersionCreated());
});

public static final TypeParser NANOS_PARSER = new TypeParser((n, c) -> {
boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(c.getSettings());
return new Builder(n, Resolution.NANOSECONDS, c.getDateFormatter(), c.scriptCompiler(),
ignoreMalformedByDefault, c.indexVersionCreated(),
c.getIndexSettings() != null ? c.getIndexSettings().getIndex().getName() : null);
ignoreMalformedByDefault, c.indexVersionCreated());
});

public static final class DateFieldType extends MappedFieldType {
Expand Down Expand Up @@ -388,31 +382,7 @@ protected DateMathParser dateMathParser() {

// Visible for testing.
public long parse(String value) {
final Instant instant = getInstant(value);
return resolution.convert(instant);
}

public long parseWithDeprecation(String value, String fieldName, String indexName) {
final Instant instant = getInstant(value);
if (resolution == Resolution.MILLISECONDS && instant.getNano() % 1000000 != 0) {
DEPRECATION_LOGGER.warn(DeprecationCategory.MAPPINGS, "date_field_with_nanos",
"You are attempting to store a nanosecond resolution on a field [{}] of type date on index [{}]. " +
"The nanosecond part was lost. Use date_nanos field type.", fieldName, indexName);
}
return resolution.convert(instant);
}

public long parseNullValueWithDeprecation(String value, String fieldName, String indexName) {
final Instant instant = getInstant(value);
if (resolution == Resolution.MILLISECONDS && instant.getNano() % 1000000 != 0) {
DEPRECATION_LOGGER.warn(DeprecationCategory.MAPPINGS, "date_field_with_nanos",
"You are attempting to set null_value with a nanosecond resolution on a field [{}] of type date on index [{}]. " +
"The nanosecond part was lost. Use date_nanos field type.", fieldName, indexName);
}
return resolution.convert(instant);
}
private Instant getInstant(String value) {
return DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant();
return resolution.convert(DateFormatters.from(dateTimeFormatter().parse(value), dateTimeFormatter().locale()).toInstant());
}

/**
Expand Down Expand Up @@ -696,13 +666,11 @@ private DateFieldMapper(
this.script = builder.script.get();
this.scriptCompiler = builder.scriptCompiler;
this.scriptValues = builder.scriptValues();
this.indexName = builder.indexName;
}

@Override
public FieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName(), resolution, null, scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion, indexName)
.init(this);
return new Builder(simpleName(), resolution, null, scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion).init(this);
}

@Override
Expand All @@ -727,9 +695,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio
timestamp = nullValue;
} else {
try {
final String fieldName = fieldType().name();
final String indexName = context.indexSettings().getIndex().getName();
timestamp = fieldType().parseWithDeprecation(dateAsString, fieldName, indexName);
timestamp = fieldType().parse(dateAsString);
} catch (IllegalArgumentException | ElasticsearchParseException | DateTimeException | ArithmeticException e) {
if (ignoreMalformed) {
context.addIgnoredField(mappedFieldType.name());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -300,8 +300,7 @@ public void newDynamicDateField(DocumentParserContext context, String name, Date
Settings settings = context.indexSettings().getSettings();
boolean ignoreMalformed = FieldMapper.IGNORE_MALFORMED_SETTING.get(settings);
createDynamicField(new DateFieldMapper.Builder(name, DateFieldMapper.Resolution.MILLISECONDS,
dateTimeFormatter, ScriptCompiler.NONE, ignoreMalformed, context.indexSettings().getIndexVersionCreated(),
context.indexSettings().getIndex().getName()), context);
dateTimeFormatter, ScriptCompiler.NONE, ignoreMalformed, context.indexSettings().getIndexVersionCreated()), context);
}

void newDynamicBinaryField(DocumentParserContext context, String name) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,8 @@ public Mapping parse(@Nullable String type, CompressedXContent source, String de
}

MappingParserContext parserContext = parserContextSupplier.get();
RootObjectMapper rootObjectMapper = rootObjectTypeParser.parse(type, mapping, parserContext).build(MapperBuilderContext.ROOT);
RootObjectMapper rootObjectMapper
= rootObjectTypeParser.parse(type, mapping, parserContext).build(MapperBuilderContext.ROOT);

Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = metadataMappersFunction.apply(type);
Map<String, Object> meta = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -565,15 +565,15 @@ public void testRolloverClusterStateForDataStream() throws Exception {
null,
ScriptCompiler.NONE,
false,
Version.CURRENT, "indexName").build(MapperBuilderContext.ROOT);
Version.CURRENT).build(MapperBuilderContext.ROOT);
ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool);
Environment env = mock(Environment.class);
when(env.sharedDataFile()).thenReturn(null);
AllocationService allocationService = mock(AllocationService.class);
when(allocationService.reroute(any(ClusterState.class), any(String.class))).then(i -> i.getArguments()[0]);
RootObjectMapper.Builder root = new RootObjectMapper.Builder("_doc");
root.add(new DateFieldMapper.Builder(dataStream.getTimeStampField().getName(), DateFieldMapper.Resolution.MILLISECONDS,
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ScriptCompiler.NONE, true, Version.CURRENT, "indexName"));
DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER, ScriptCompiler.NONE, true, Version.CURRENT));
MetadataFieldMapper dtfm = getDataStreamTimestampFieldMapper();
Mapping mapping = new Mapping(
root.build(MapperBuilderContext.ROOT),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@

package org.elasticsearch.index.mapper;

import org.apache.logging.log4j.Level;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.time.DateFormatter;
Expand Down Expand Up @@ -149,17 +148,6 @@ public void testIgnoreMalformed() throws IOException {
testIgnoreMalformedForValue("-522000000", "long overflow", "date_optional_time");
}

public void testResolutionLossDeprecation() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b
.field("type", "date")));

ParsedDocument doc = mapper.parse(source(b -> b.field("field", "2018-10-03T14:42:44.123456+0000")));

assertWarnings(true, new DeprecationWarning(Level.WARN, "You are attempting to store a nanosecond resolution " +
"on a field [field] of type date on index [index]. " +
"The nanosecond part was lost. Use date_nanos field type."));
}

private void testIgnoreMalformedForValue(String value, String expectedCause, String dateFormat) throws IOException {

DocumentMapper mapper = createDocumentMapper(fieldMapping((builder)-> dateFieldMapping(builder, dateFormat)));
Expand Down Expand Up @@ -415,11 +403,11 @@ public void testFetchMillisFromIso8601() throws IOException {
}

public void testFetchMillisFromIso8601Nanos() throws IOException {
assertFetch(dateNanosMapperService(), "field", randomIs8601Nanos(MAX_NANOS), null);
assertFetch(dateMapperService(), "field", randomIs8601Nanos(MAX_ISO_DATE), null);
}

public void testFetchMillisFromIso8601NanosFormatted() throws IOException {
assertFetch(dateNanosMapperService(), "field", randomIs8601Nanos(MAX_NANOS), "strict_date_optional_time_nanos");
assertFetch(dateMapperService(), "field", randomIs8601Nanos(MAX_ISO_DATE), "strict_date_optional_time_nanos");
}

/**
Expand All @@ -430,8 +418,7 @@ public void testFetchMillisFromIso8601NanosFormatted() throws IOException {
* way.
*/
public void testFetchMillisFromRoundedNanos() throws IOException {
assertFetch(dateMapperService(), "field", randomDecimalMillis(MAX_ISO_DATE), null);
assertFetch(dateNanosMapperService(), "field", randomDecimalNanos(MAX_NANOS), null);
assertFetch(dateMapperService(), "field", randomDecimalNanos(MAX_ISO_DATE), null);
}

/**
Expand Down Expand Up @@ -544,7 +531,7 @@ protected Object generateRandomInputValue(MappedFieldType ft) {
switch (((DateFieldType) ft).resolution()) {
case MILLISECONDS:
if (randomBoolean()) {
return randomDecimalMillis(MAX_ISO_DATE);
return randomIs8601Nanos(MAX_ISO_DATE);
}
return randomLongBetween(0, Long.MAX_VALUE);
case NANOSECONDS:
Expand Down Expand Up @@ -577,10 +564,6 @@ private String randomDecimalNanos(long maxMillis) {
return Long.toString(randomLongBetween(0, maxMillis)) + "." + between(0, 999999);
}

private String randomDecimalMillis(long maxMillis) {
return Long.toString(randomLongBetween(0, maxMillis));
}

public void testScriptAndPrecludedParameters() {
{
Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
import org.elasticsearch.test.rest.ESRestTestCase;
Expand Down Expand Up @@ -102,9 +101,7 @@ private void createTemplateWithAllowAutoCreate(Boolean allowAutoCreate) throws I

private Response indexDocument() throws IOException {
final Request indexDocumentRequest = new Request("POST", "recipe_kr/_doc");
final Instant now = Instant.now();
final String time = DateFormatter.forPattern("strict_date_optional_time").format(now);
indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + time + "\", \"name\": \"Kimchi\" }");
indexDocumentRequest.setJsonEntity("{ \"@timestamp\": \"" + Instant.now() + "\", \"name\": \"Kimchi\" }");
return client().performRequest(indexDocumentRequest);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateFormatters;
import org.elasticsearch.test.rest.ESRestTestCase;

import java.time.ZoneOffset;
Expand Down Expand Up @@ -282,7 +280,7 @@ public void testHRDSplit() throws Exception {

for (int i = 1; i <= 100; i++) {
ZonedDateTime time = baseTime.plusHours(i);
String formattedTime = DateFormatter.forPattern("strict_date_optional_time").format(time);
String formattedTime = time.format(DateTimeFormatter.ISO_DATE_TIME);
if (i % 50 == 0) {
// Anomaly has 100 docs, but we don't care about the value
for (int j = 0; j < 100; j++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -210,8 +210,8 @@ teardown:
test_alias: {}
mappings:
properties:
date:
type: date_nanos
time:
type: date
user:
type: keyword
stars:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@ teardown:
test_alias: {}
mappings:
properties:
date:
type: date_nanos
time:
type: date
user:
type: keyword
stars:
Expand Down Expand Up @@ -107,8 +107,8 @@ teardown:
test_alias: {}
mappings:
properties:
date:
type: date_nanos
time:
type: date
user:
type: keyword
stars:
Expand Down

0 comments on commit ca1c03c

Please sign in to comment.