diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala index 3292cbef41772..c41e92e618bac 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala @@ -96,7 +96,7 @@ private[hive] class SparkExecuteStatementOperation( private def getNextRowSetInternal( order: FetchOrientation, maxRowsL: Long): TRowSet = withLocalProperties { - log.info(s"Received getNextRowSet request order=${order} and maxRowsL=${maxRowsL} " + + log.debug(s"Received getNextRowSet request order=${order} and maxRowsL=${maxRowsL} " + s"with ${statementId}") validateDefaultFetchOrientation(order) assertState(OperationState.FINISHED) @@ -112,7 +112,7 @@ private[hive] class SparkExecuteStatementOperation( val maxRows = maxRowsL.toInt val offset = iter.getPosition val rows = iter.take(maxRows).toList - log.info(s"Returning result set with ${rows.length} rows from offsets " + + log.debug(s"Returning result set with ${rows.length} rows from offsets " + s"[${iter.getFetchStart}, ${offset}) with $statementId") RowSetUtils.toTRowSet(offset, rows, dataTypes, getProtocolVersion, getTimeFormatters) }