diff --git a/src/test/resources/log4j.properties b/src/test/resources/log4j.properties new file mode 100644 index 0000000..c0e243c --- /dev/null +++ b/src/test/resources/log4j.properties @@ -0,0 +1,38 @@ +# +# Copyright 2021 ABSA Group Limited +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Set everything to be logged to the console +log4j.rootCategory=WARN, general + +log4j.appender.general=org.apache.log4j.ConsoleAppender +log4j.appender.general.target=System.err +log4j.appender.general.layout=org.apache.log4j.PatternLayout +log4j.appender.general.layout.ConversionPattern=[%p] %d{yy/MM/dd HH:mm:ss} %c{1}: %m%n + +# Suppress warnings logged within the code inside the tests +log4j.logger.za.co.absa.standardization.stages.SchemaChecker$=FATAL +log4j.logger.za.co.absa.standardization.stages.TypeParser$=ERROR + +# Suppress a spamming warning from SparkSession$Builder +log4j.appender.forsparksessionbuilder=org.apache.log4j.ConsoleAppender +log4j.appender.forsparksessionbuilder.target=System.err +log4j.appender.forsparksessionbuilder.layout=org.apache.log4j.PatternLayout +log4j.appender.forsparksessionbuilder.layout.ConversionPattern=@log.pattern@ +log4j.appender.forsparksessionbuilder.filter.01=org.apache.log4j.varia.StringMatchFilter +log4j.appender.forsparksessionbuilder.filter.01.StringToMatch=Using an existing SparkSession +log4j.appender.forsparksessionbuilder.filter.01.AcceptOnMatch=false +log4j.logger.org.apache.spark.sql.SparkSession$Builder=WARN, forsparksessionbuilder +log4j.additivity.org.apache.spark.sql.SparkSession$Builder=false + diff --git a/src/test/scala/za/co/absa/standardization/interpreter/StandardizationInterpreter_ArraySuite.scala b/src/test/scala/za/co/absa/standardization/interpreter/StandardizationInterpreter_ArraySuite.scala index f7f0876..8ed40d0 100644 --- a/src/test/scala/za/co/absa/standardization/interpreter/StandardizationInterpreter_ArraySuite.scala +++ b/src/test/scala/za/co/absa/standardization/interpreter/StandardizationInterpreter_ArraySuite.scala @@ -74,7 +74,6 @@ class StandardizationInterpreter_ArraySuite extends AnyFunSuite with SparkTestBa ) val stdDF = Standardization.standardize(src, desiredSchema, stdConfig).cacheIfNotCachedYet() - println(stdDF.schema.treeString) assert(stdDF.schema.treeString == expectedSchema) // checking schema first val expectedData = Seq( diff --git a/src/test/scala/za/co/absa/standardization/interpreter/StandardizationInterpreter_TimestampSuite.scala b/src/test/scala/za/co/absa/standardization/interpreter/StandardizationInterpreter_TimestampSuite.scala index 516a8a1..fdb3b79 100644 --- a/src/test/scala/za/co/absa/standardization/interpreter/StandardizationInterpreter_TimestampSuite.scala +++ b/src/test/scala/za/co/absa/standardization/interpreter/StandardizationInterpreter_TimestampSuite.scala @@ -371,8 +371,6 @@ class StandardizationInterpreter_TimestampSuite extends AnyFunSuite with SparkTe val std = Standardization.standardize(src, desiredSchema).cacheIfNotCachedYet() logDataFrameContent(std) - std.show(false) - std.printSchema() assertResult(exp)(std.as[TimestampRow].collect().toList) } diff --git a/src/test/scala/za/co/absa/standardization/validation/field/IntegralFieldValidatorSuite.scala b/src/test/scala/za/co/absa/standardization/validation/field/IntegralFieldValidatorSuite.scala index 280dc37..c82ac08 100644 --- a/src/test/scala/za/co/absa/standardization/validation/field/IntegralFieldValidatorSuite.scala +++ b/src/test/scala/za/co/absa/standardization/validation/field/IntegralFieldValidatorSuite.scala @@ -75,10 +75,6 @@ class IntegralFieldValidatorSuite extends AnyFunSuite { ValidationError("""Malformed pattern "%0.###,#"""") ) - println(NumericFieldValidator.validate(f).toSet) - - println(exp) - assert(NumericFieldValidator.validate(f).toSet == exp) }