From 232364b10e82495b1ccb3fa36d095cf66588c849 Mon Sep 17 00:00:00 2001 From: Ruifeng Zheng Date: Wed, 23 Oct 2024 15:01:03 +0800 Subject: [PATCH 1/2] nit --- python/pyspark/sql/connect/functions/builtin.py | 6 +++--- python/pyspark/sql/functions/builtin.py | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/python/pyspark/sql/connect/functions/builtin.py b/python/pyspark/sql/connect/functions/builtin.py index d93cbd71e686e..f51d021905a9e 100644 --- a/python/pyspark/sql/connect/functions/builtin.py +++ b/python/pyspark/sql/connect/functions/builtin.py @@ -3190,21 +3190,21 @@ def dayname(col: "ColumnOrName") -> Column: dayname.__doc__ = pysparkfuncs.dayname.__doc__ -def extract(field: "ColumnOrName", source: "ColumnOrName") -> Column: +def extract(field: Column, source: "ColumnOrName") -> Column: return _invoke_function_over_columns("extract", field, source) extract.__doc__ = pysparkfuncs.extract.__doc__ -def date_part(field: "ColumnOrName", source: "ColumnOrName") -> Column: +def date_part(field: Column, source: "ColumnOrName") -> Column: return _invoke_function_over_columns("date_part", field, source) extract.__doc__ = pysparkfuncs.extract.__doc__ -def datepart(field: "ColumnOrName", source: "ColumnOrName") -> Column: +def datepart(field: Column, source: "ColumnOrName") -> Column: return _invoke_function_over_columns("datepart", field, source) diff --git a/python/pyspark/sql/functions/builtin.py b/python/pyspark/sql/functions/builtin.py index caa83bd2e1a57..c8a809a00cdc3 100644 --- a/python/pyspark/sql/functions/builtin.py +++ b/python/pyspark/sql/functions/builtin.py @@ -8564,7 +8564,7 @@ def dayname(col: "ColumnOrName") -> Column: @_try_remote_functions -def extract(field: "ColumnOrName", source: "ColumnOrName") -> Column: +def extract(field: Column, source: "ColumnOrName") -> Column: """ Extracts a part of the date/timestamp or interval source. @@ -8572,7 +8572,7 @@ def extract(field: "ColumnOrName", source: "ColumnOrName") -> Column: Parameters ---------- - field : :class:`~pyspark.sql.Column` or str + field : :class:`~pyspark.sql.Column` selects which part of the source should be extracted. source : :class:`~pyspark.sql.Column` or str a date/timestamp or interval column from where `field` should be extracted. @@ -8600,7 +8600,7 @@ def extract(field: "ColumnOrName", source: "ColumnOrName") -> Column: @_try_remote_functions -def date_part(field: "ColumnOrName", source: "ColumnOrName") -> Column: +def date_part(field: Column, source: "ColumnOrName") -> Column: """ Extracts a part of the date/timestamp or interval source. @@ -8637,7 +8637,7 @@ def date_part(field: "ColumnOrName", source: "ColumnOrName") -> Column: @_try_remote_functions -def datepart(field: "ColumnOrName", source: "ColumnOrName") -> Column: +def datepart(field: Column, source: "ColumnOrName") -> Column: """ Extracts a part of the date/timestamp or interval source. From eaecfcd2ce375453fc7ea227e40de433a612ab49 Mon Sep 17 00:00:00 2001 From: Ruifeng Zheng Date: Wed, 23 Oct 2024 15:03:49 +0800 Subject: [PATCH 2/2] nit --- python/pyspark/sql/functions/builtin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/pyspark/sql/functions/builtin.py b/python/pyspark/sql/functions/builtin.py index c8a809a00cdc3..29d96e9e848d1 100644 --- a/python/pyspark/sql/functions/builtin.py +++ b/python/pyspark/sql/functions/builtin.py @@ -8608,7 +8608,7 @@ def date_part(field: Column, source: "ColumnOrName") -> Column: Parameters ---------- - field : :class:`~pyspark.sql.Column` or str + field : :class:`~pyspark.sql.Column` selects which part of the source should be extracted, and supported string values are as same as the fields of the equivalent function `extract`. source : :class:`~pyspark.sql.Column` or str @@ -8645,7 +8645,7 @@ def datepart(field: Column, source: "ColumnOrName") -> Column: Parameters ---------- - field : :class:`~pyspark.sql.Column` or str + field : :class:`~pyspark.sql.Column` selects which part of the source should be extracted, and supported string values are as same as the fields of the equivalent function `extract`. source : :class:`~pyspark.sql.Column` or str