From 3ef9a06daf2e79d9282c108243b194a5c54bed62 Mon Sep 17 00:00:00 2001 From: Xiao Li Date: Fri, 24 Mar 2017 17:48:09 -0700 Subject: [PATCH] added desc function and desc extended function to sql test --- .../inputs/describe-functions-extended.sql | 279 ++ .../sql-tests/inputs/describe-functions.sql | 279 ++ .../sql-tests/inputs/json-functions.sql | 4 - .../describe-functions-extended.sql.out | 3579 +++++++++++++++++ .../results/describe-functions.sql.out | 2604 ++++++++++++ .../sql-tests/results/json-functions.sql.out | 134 +- .../apache/spark/sql/SQLQueryTestSuite.scala | 2 + .../sql/execution/command/DDLSuite.scala | 58 - 8 files changed, 6782 insertions(+), 157 deletions(-) create mode 100644 sql/core/src/test/resources/sql-tests/inputs/describe-functions-extended.sql create mode 100644 sql/core/src/test/resources/sql-tests/inputs/describe-functions.sql create mode 100644 sql/core/src/test/resources/sql-tests/results/describe-functions-extended.sql.out create mode 100644 sql/core/src/test/resources/sql-tests/results/describe-functions.sql.out diff --git a/sql/core/src/test/resources/sql-tests/inputs/describe-functions-extended.sql b/sql/core/src/test/resources/sql-tests/inputs/describe-functions-extended.sql new file mode 100644 index 0000000000000..c7df7a44ef2e5 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/inputs/describe-functions-extended.sql @@ -0,0 +1,279 @@ +-- misc non-aggregate functions +DESC FUNCTION EXTENDED abs; +DESC FUNCTION EXTENDED coalesce; +DESC FUNCTION EXTENDED explode; +DESC FUNCTION EXTENDED explode_outer; +DESC FUNCTION EXTENDED greatest; +DESC FUNCTION EXTENDED if; +DESC FUNCTION EXTENDED inline; +DESC FUNCTION EXTENDED inline_outer; +DESC FUNCTION EXTENDED isnan; +DESC FUNCTION EXTENDED ifnull; +DESC FUNCTION EXTENDED isnull; +DESC FUNCTION EXTENDED isnotnull; +DESC FUNCTION EXTENDED least; +DESC FUNCTION EXTENDED nanvl; +DESC FUNCTION EXTENDED nullif; +DESC FUNCTION EXTENDED nvl; +DESC FUNCTION EXTENDED nvl2; +DESC FUNCTION EXTENDED posexplode; +DESC FUNCTION EXTENDED posexplode_outer; +DESC FUNCTION EXTENDED rand; +DESC FUNCTION EXTENDED randn; +DESC FUNCTION EXTENDED stack; +DESC FUNCTION EXTENDED when; + +-- math functions; +DESC FUNCTION EXTENDED acos; +DESC FUNCTION EXTENDED asin; +DESC FUNCTION EXTENDED atan; +DESC FUNCTION EXTENDED atan2; +DESC FUNCTION EXTENDED bin; +DESC FUNCTION EXTENDED bround; +DESC FUNCTION EXTENDED cbrt; +DESC FUNCTION EXTENDED ceil; +DESC FUNCTION EXTENDED ceiling; +DESC FUNCTION EXTENDED cos; +DESC FUNCTION EXTENDED cosh; +DESC FUNCTION EXTENDED conv; +DESC FUNCTION EXTENDED degrees; +DESC FUNCTION EXTENDED e; +DESC FUNCTION EXTENDED exp; +DESC FUNCTION EXTENDED expm1; +DESC FUNCTION EXTENDED floor; +DESC FUNCTION EXTENDED factorial; +DESC FUNCTION EXTENDED hex; +DESC FUNCTION EXTENDED hypot; +DESC FUNCTION EXTENDED log; +DESC FUNCTION EXTENDED log10; +DESC FUNCTION EXTENDED log1p; +DESC FUNCTION EXTENDED log2; +DESC FUNCTION EXTENDED ln; +DESC FUNCTION EXTENDED negative; +DESC FUNCTION EXTENDED pi; +DESC FUNCTION EXTENDED pmod; +DESC FUNCTION EXTENDED positive; +DESC FUNCTION EXTENDED pow; +DESC FUNCTION EXTENDED power; +DESC FUNCTION EXTENDED radians; +DESC FUNCTION EXTENDED rint; +DESC FUNCTION EXTENDED round; +DESC FUNCTION EXTENDED shiftleft; +DESC FUNCTION EXTENDED shiftright; +DESC FUNCTION EXTENDED shiftrightunsigned; +DESC FUNCTION EXTENDED sign; +DESC FUNCTION EXTENDED signum; +DESC FUNCTION EXTENDED sin; +DESC FUNCTION EXTENDED sinh; +DESC FUNCTION EXTENDED str_to_map; +DESC FUNCTION EXTENDED sqrt; +DESC FUNCTION EXTENDED tan; +DESC FUNCTION EXTENDED tanh; + +DESC FUNCTION EXTENDED +; +DESC FUNCTION EXTENDED -; +DESC FUNCTION EXTENDED *; +DESC FUNCTION EXTENDED /; +DESC FUNCTION EXTENDED %; + +-- aggregate functions +DESC FUNCTION EXTENDED approx_count_distinct; +DESC FUNCTION EXTENDED avg; +DESC FUNCTION EXTENDED corr; +DESC FUNCTION EXTENDED count; +DESC FUNCTION EXTENDED covar_pop; +DESC FUNCTION EXTENDED covar_samp; +DESC FUNCTION EXTENDED first; +DESC FUNCTION EXTENDED first_value; +DESC FUNCTION EXTENDED kurtosis; +DESC FUNCTION EXTENDED last; +DESC FUNCTION EXTENDED last_value; +DESC FUNCTION EXTENDED max; +DESC FUNCTION EXTENDED mean; +DESC FUNCTION EXTENDED min; +DESC FUNCTION EXTENDED percentile; +DESC FUNCTION EXTENDED skewness; +DESC FUNCTION EXTENDED percentile_approx; +DESC FUNCTION EXTENDED approx_percentile; +DESC FUNCTION EXTENDED std; +DESC FUNCTION EXTENDED stddev; +DESC FUNCTION EXTENDED stddev_pop; +DESC FUNCTION EXTENDED stddev_samp; +DESC FUNCTION EXTENDED sum; +DESC FUNCTION EXTENDED variance; +DESC FUNCTION EXTENDED var_pop; +DESC FUNCTION EXTENDED var_samp; +DESC FUNCTION EXTENDED collect_list; +DESC FUNCTION EXTENDED collect_set; +DESC FUNCTION EXTENDED count_min_sketch; + +-- string functions +DESC FUNCTION EXTENDED ascii; +DESC FUNCTION EXTENDED base64; +DESC FUNCTION EXTENDED concat; +DESC FUNCTION EXTENDED concat_ws; +DESC FUNCTION EXTENDED decode; +DESC FUNCTION EXTENDED elt; +DESC FUNCTION EXTENDED encode; +DESC FUNCTION EXTENDED find_in_set; +DESC FUNCTION EXTENDED format_number; +DESC FUNCTION EXTENDED format_string; +DESC FUNCTION EXTENDED get_json_object; +DESC FUNCTION EXTENDED initcap; +DESC FUNCTION EXTENDED instr; +DESC FUNCTION EXTENDED lcase; +DESC FUNCTION EXTENDED length; +DESC FUNCTION EXTENDED levenshtein; +DESC FUNCTION EXTENDED like; +DESC FUNCTION EXTENDED lower; +DESC FUNCTION EXTENDED locate; +DESC FUNCTION EXTENDED lpad; +DESC FUNCTION EXTENDED ltrim; +DESC FUNCTION EXTENDED json_tuple; +DESC FUNCTION EXTENDED parse_url; +DESC FUNCTION EXTENDED printf; +DESC FUNCTION EXTENDED regexp_extract; +DESC FUNCTION EXTENDED regexp_replace; +DESC FUNCTION EXTENDED repeat; +DESC FUNCTION EXTENDED reverse; +DESC FUNCTION EXTENDED rlike; +DESC FUNCTION EXTENDED rpad; +DESC FUNCTION EXTENDED rtrim; +DESC FUNCTION EXTENDED sentences; +DESC FUNCTION EXTENDED soundex; +DESC FUNCTION EXTENDED space; +DESC FUNCTION EXTENDED split; +DESC FUNCTION EXTENDED substr; +DESC FUNCTION EXTENDED substring; +DESC FUNCTION EXTENDED substring_index; +DESC FUNCTION EXTENDED translate; +DESC FUNCTION EXTENDED trim; +DESC FUNCTION EXTENDED ucase; +DESC FUNCTION EXTENDED unbase64; +DESC FUNCTION EXTENDED unhex; +DESC FUNCTION EXTENDED upper; +DESC FUNCTION EXTENDED xpath; +DESC FUNCTION EXTENDED xpath_boolean; +DESC FUNCTION EXTENDED xpath_double; +DESC FUNCTION EXTENDED xpath_number; +DESC FUNCTION EXTENDED xpath_float; +DESC FUNCTION EXTENDED xpath_int; +DESC FUNCTION EXTENDED xpath_long; +DESC FUNCTION EXTENDED xpath_short; +DESC FUNCTION EXTENDED xpath_string; + +-- datetime functions +DESC FUNCTION EXTENDED add_months; +DESC FUNCTION EXTENDED current_date; +DESC FUNCTION EXTENDED current_timestamp; +DESC FUNCTION EXTENDED datediff; +DESC FUNCTION EXTENDED date_add; +DESC FUNCTION EXTENDED date_format; +DESC FUNCTION EXTENDED date_sub; +DESC FUNCTION EXTENDED day; +DESC FUNCTION EXTENDED dayofyear; +DESC FUNCTION EXTENDED dayofmonth; +DESC FUNCTION EXTENDED from_unixtime; +DESC FUNCTION EXTENDED from_utc_timestamp; +DESC FUNCTION EXTENDED hour; +DESC FUNCTION EXTENDED last_day; +DESC FUNCTION EXTENDED minute; +DESC FUNCTION EXTENDED month; +DESC FUNCTION EXTENDED months_between; +DESC FUNCTION EXTENDED next_day; +DESC FUNCTION EXTENDED now; +DESC FUNCTION EXTENDED quarter; +DESC FUNCTION EXTENDED second; +DESC FUNCTION EXTENDED to_timestamp; +DESC FUNCTION EXTENDED to_date; +DESC FUNCTION EXTENDED to_unix_timestamp; +DESC FUNCTION EXTENDED to_utc_timestamp; +DESC FUNCTION EXTENDED trunc; +DESC FUNCTION EXTENDED unix_timestamp; +DESC FUNCTION EXTENDED weekofyear; +DESC FUNCTION EXTENDED year; +DESC FUNCTION EXTENDED window; + +-- collection functions +DESC FUNCTION EXTENDED array; +DESC FUNCTION EXTENDED array_contains; +DESC FUNCTION EXTENDED map; +DESC FUNCTION EXTENDED named_struct; +DESC FUNCTION EXTENDED map_keys; +DESC FUNCTION EXTENDED map_values; +DESC FUNCTION EXTENDED size; +DESC FUNCTION EXTENDED sort_array; +DESC FUNCTION EXTENDED struct; + +-- misc functions +DESC FUNCTION EXTENDED assert_true; +DESC FUNCTION EXTENDED crc32; +DESC FUNCTION EXTENDED md5; +DESC FUNCTION EXTENDED hash; +DESC FUNCTION EXTENDED sha; +DESC FUNCTION EXTENDED sha1; +DESC FUNCTION EXTENDED sha2; +DESC FUNCTION EXTENDED spark_partition_id; +DESC FUNCTION EXTENDED input_file_name; +DESC FUNCTION EXTENDED input_file_block_start; +DESC FUNCTION EXTENDED input_file_block_length; +DESC FUNCTION EXTENDED monotonically_increasing_id; +DESC FUNCTION EXTENDED current_database; +DESC FUNCTION EXTENDED reflect; +DESC FUNCTION EXTENDED java_method; + +-- grouping sets +DESC FUNCTION EXTENDED cube; +DESC FUNCTION EXTENDED rollup; +DESC FUNCTION EXTENDED grouping; +DESC FUNCTION EXTENDED grouping_id; + +-- window functions; +DESC FUNCTION EXTENDED lead; +DESC FUNCTION EXTENDED lag; +DESC FUNCTION EXTENDED row_number; +DESC FUNCTION EXTENDED cume_dist; +DESC FUNCTION EXTENDED ntile; +DESC FUNCTION EXTENDED rank; +DESC FUNCTION EXTENDED dense_rank; +DESC FUNCTION EXTENDED percent_rank; + +-- predicates; +DESC FUNCTION EXTENDED and; +DESC FUNCTION EXTENDED in; +DESC FUNCTION EXTENDED not; +DESC FUNCTION EXTENDED or; + +-- comparison operators; +DESC FUNCTION EXTENDED <=>; +DESC FUNCTION EXTENDED =; +DESC FUNCTION EXTENDED ==; +DESC FUNCTION EXTENDED >; +DESC FUNCTION EXTENDED >=; +DESC FUNCTION EXTENDED <; +DESC FUNCTION EXTENDED <=; +DESC FUNCTION EXTENDED !; + +-- bitwise +DESC FUNCTION EXTENDED &; +DESC FUNCTION EXTENDED ~; +DESC FUNCTION EXTENDED |; +DESC FUNCTION EXTENDED ^; + +-- json +DESC FUNCTION EXTENDED to_json; +DESC FUNCTION EXTENDED from_json; + +-- Cast aliases (SPARK-16730) +DESC FUNCTION EXTENDED boolean; +DESC FUNCTION EXTENDED tinyint; +DESC FUNCTION EXTENDED smallint; +DESC FUNCTION EXTENDED int; +DESC FUNCTION EXTENDED bigint; +DESC FUNCTION EXTENDED float; +DESC FUNCTION EXTENDED double; +DESC FUNCTION EXTENDED decimal; +DESC FUNCTION EXTENDED date; +DESC FUNCTION EXTENDED timestamp; +DESC FUNCTION EXTENDED binary; +DESC FUNCTION EXTENDED string; \ No newline at end of file diff --git a/sql/core/src/test/resources/sql-tests/inputs/describe-functions.sql b/sql/core/src/test/resources/sql-tests/inputs/describe-functions.sql new file mode 100644 index 0000000000000..85effa3074466 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/inputs/describe-functions.sql @@ -0,0 +1,279 @@ +-- misc non-aggregate functions +DESC FUNCTION abs; +DESC FUNCTION coalesce; +DESC FUNCTION explode; +DESC FUNCTION explode_outer; +DESC FUNCTION greatest; +DESC FUNCTION if; +DESC FUNCTION inline; +DESC FUNCTION inline_outer; +DESC FUNCTION isnan; +DESC FUNCTION ifnull; +DESC FUNCTION isnull; +DESC FUNCTION isnotnull; +DESC FUNCTION least; +DESC FUNCTION nanvl; +DESC FUNCTION nullif; +DESC FUNCTION nvl; +DESC FUNCTION nvl2; +DESC FUNCTION posexplode; +DESC FUNCTION posexplode_outer; +DESC FUNCTION rand; +DESC FUNCTION randn; +DESC FUNCTION stack; +DESC FUNCTION when; + +-- math functions; +DESC FUNCTION acos; +DESC FUNCTION asin; +DESC FUNCTION atan; +DESC FUNCTION atan2; +DESC FUNCTION bin; +DESC FUNCTION bround; +DESC FUNCTION cbrt; +DESC FUNCTION ceil; +DESC FUNCTION ceiling; +DESC FUNCTION cos; +DESC FUNCTION cosh; +DESC FUNCTION conv; +DESC FUNCTION degrees; +DESC FUNCTION e; +DESC FUNCTION exp; +DESC FUNCTION expm1; +DESC FUNCTION floor; +DESC FUNCTION factorial; +DESC FUNCTION hex; +DESC FUNCTION hypot; +DESC FUNCTION log; +DESC FUNCTION log10; +DESC FUNCTION log1p; +DESC FUNCTION log2; +DESC FUNCTION ln; +DESC FUNCTION negative; +DESC FUNCTION pi; +DESC FUNCTION pmod; +DESC FUNCTION positive; +DESC FUNCTION pow; +DESC FUNCTION power; +DESC FUNCTION radians; +DESC FUNCTION rint; +DESC FUNCTION round; +DESC FUNCTION shiftleft; +DESC FUNCTION shiftright; +DESC FUNCTION shiftrightunsigned; +DESC FUNCTION sign; +DESC FUNCTION signum; +DESC FUNCTION sin; +DESC FUNCTION sinh; +DESC FUNCTION str_to_map; +DESC FUNCTION sqrt; +DESC FUNCTION tan; +DESC FUNCTION tanh; + +DESC FUNCTION +; +DESC FUNCTION -; +DESC FUNCTION *; +DESC FUNCTION /; +DESC FUNCTION %; + +-- aggregate functions +DESC FUNCTION approx_count_distinct; +DESC FUNCTION avg; +DESC FUNCTION corr; +DESC FUNCTION count; +DESC FUNCTION covar_pop; +DESC FUNCTION covar_samp; +DESC FUNCTION first; +DESC FUNCTION first_value; +DESC FUNCTION kurtosis; +DESC FUNCTION last; +DESC FUNCTION last_value; +DESC FUNCTION max; +DESC FUNCTION mean; +DESC FUNCTION min; +DESC FUNCTION percentile; +DESC FUNCTION skewness; +DESC FUNCTION percentile_approx; +DESC FUNCTION approx_percentile; +DESC FUNCTION std; +DESC FUNCTION stddev; +DESC FUNCTION stddev_pop; +DESC FUNCTION stddev_samp; +DESC FUNCTION sum; +DESC FUNCTION variance; +DESC FUNCTION var_pop; +DESC FUNCTION var_samp; +DESC FUNCTION collect_list; +DESC FUNCTION collect_set; +DESC FUNCTION count_min_sketch; + +-- string functions +DESC FUNCTION ascii; +DESC FUNCTION base64; +DESC FUNCTION concat; +DESC FUNCTION concat_ws; +DESC FUNCTION decode; +DESC FUNCTION elt; +DESC FUNCTION encode; +DESC FUNCTION find_in_set; +DESC FUNCTION format_number; +DESC FUNCTION format_string; +DESC FUNCTION get_json_object; +DESC FUNCTION initcap; +DESC FUNCTION instr; +DESC FUNCTION lcase; +DESC FUNCTION length; +DESC FUNCTION levenshtein; +DESC FUNCTION like; +DESC FUNCTION lower; +DESC FUNCTION locate; +DESC FUNCTION lpad; +DESC FUNCTION ltrim; +DESC FUNCTION json_tuple; +DESC FUNCTION parse_url; +DESC FUNCTION printf; +DESC FUNCTION regexp_extract; +DESC FUNCTION regexp_replace; +DESC FUNCTION repeat; +DESC FUNCTION reverse; +DESC FUNCTION rlike; +DESC FUNCTION rpad; +DESC FUNCTION rtrim; +DESC FUNCTION sentences; +DESC FUNCTION soundex; +DESC FUNCTION space; +DESC FUNCTION split; +DESC FUNCTION substr; +DESC FUNCTION substring; +DESC FUNCTION substring_index; +DESC FUNCTION translate; +DESC FUNCTION trim; +DESC FUNCTION ucase; +DESC FUNCTION unbase64; +DESC FUNCTION unhex; +DESC FUNCTION upper; +DESC FUNCTION xpath; +DESC FUNCTION xpath_boolean; +DESC FUNCTION xpath_double; +DESC FUNCTION xpath_number; +DESC FUNCTION xpath_float; +DESC FUNCTION xpath_int; +DESC FUNCTION xpath_long; +DESC FUNCTION xpath_short; +DESC FUNCTION xpath_string; + +-- datetime functions +DESC FUNCTION add_months; +DESC FUNCTION current_date; +DESC FUNCTION current_timestamp; +DESC FUNCTION datediff; +DESC FUNCTION date_add; +DESC FUNCTION date_format; +DESC FUNCTION date_sub; +DESC FUNCTION day; +DESC FUNCTION dayofyear; +DESC FUNCTION dayofmonth; +DESC FUNCTION from_unixtime; +DESC FUNCTION from_utc_timestamp; +DESC FUNCTION hour; +DESC FUNCTION last_day; +DESC FUNCTION minute; +DESC FUNCTION month; +DESC FUNCTION months_between; +DESC FUNCTION next_day; +DESC FUNCTION now; +DESC FUNCTION quarter; +DESC FUNCTION second; +DESC FUNCTION to_timestamp; +DESC FUNCTION to_date; +DESC FUNCTION to_unix_timestamp; +DESC FUNCTION to_utc_timestamp; +DESC FUNCTION trunc; +DESC FUNCTION unix_timestamp; +DESC FUNCTION weekofyear; +DESC FUNCTION year; +DESC FUNCTION window; + +-- collection functions +DESC FUNCTION array; +DESC FUNCTION array_contains; +DESC FUNCTION map; +DESC FUNCTION named_struct; +DESC FUNCTION map_keys; +DESC FUNCTION map_values; +DESC FUNCTION size; +DESC FUNCTION sort_array; +DESC FUNCTION struct; + +-- misc functions +DESC FUNCTION assert_true; +DESC FUNCTION crc32; +DESC FUNCTION md5; +DESC FUNCTION hash; +DESC FUNCTION sha; +DESC FUNCTION sha1; +DESC FUNCTION sha2; +DESC FUNCTION spark_partition_id; +DESC FUNCTION input_file_name; +DESC FUNCTION input_file_block_start; +DESC FUNCTION input_file_block_length; +DESC FUNCTION monotonically_increasing_id; +DESC FUNCTION current_database; +DESC FUNCTION reflect; +DESC FUNCTION java_method; + +-- grouping sets +DESC FUNCTION cube; +DESC FUNCTION rollup; +DESC FUNCTION grouping; +DESC FUNCTION grouping_id; + +-- window functions; +DESC FUNCTION lead; +DESC FUNCTION lag; +DESC FUNCTION row_number; +DESC FUNCTION cume_dist; +DESC FUNCTION ntile; +DESC FUNCTION rank; +DESC FUNCTION dense_rank; +DESC FUNCTION percent_rank; + +-- predicates; +DESC FUNCTION and; +DESC FUNCTION in; +DESC FUNCTION not; +DESC FUNCTION or; + +-- comparison operators; +DESC FUNCTION <=>; +DESC FUNCTION =; +DESC FUNCTION ==; +DESC FUNCTION >; +DESC FUNCTION >=; +DESC FUNCTION <; +DESC FUNCTION <=; +DESC FUNCTION !; + +-- bitwise +DESC FUNCTION &; +DESC FUNCTION ~; +DESC FUNCTION |; +DESC FUNCTION ^; + +-- json +DESC FUNCTION to_json; +DESC FUNCTION from_json; + +-- Cast aliases (SPARK-16730) +DESC FUNCTION boolean; +DESC FUNCTION tinyint; +DESC FUNCTION smallint; +DESC FUNCTION int; +DESC FUNCTION bigint; +DESC FUNCTION float; +DESC FUNCTION double; +DESC FUNCTION decimal; +DESC FUNCTION date; +DESC FUNCTION timestamp; +DESC FUNCTION binary; +DESC FUNCTION string; diff --git a/sql/core/src/test/resources/sql-tests/inputs/json-functions.sql b/sql/core/src/test/resources/sql-tests/inputs/json-functions.sql index b3cc2cea51d43..b0b7ffa9dd194 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/json-functions.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/json-functions.sql @@ -1,6 +1,4 @@ -- to_json -describe function to_json; -describe function extended to_json; select to_json(named_struct('a', 1, 'b', 2)); select to_json(named_struct('time', to_timestamp('2015-08-26', 'yyyy-MM-dd')), map('timestampFormat', 'dd/MM/yyyy')); select to_json(array(named_struct('a', 1, 'b', 2))); @@ -10,8 +8,6 @@ select to_json(named_struct('a', 1, 'b', 2), map('mode', 1)); select to_json(); -- from_json -describe function from_json; -describe function extended from_json; select from_json('{"a":1}', 'a INT'); select from_json('{"time":"26/08/2015"}', 'time Timestamp', map('timestampFormat', 'dd/MM/yyyy')); -- Check if errors handled diff --git a/sql/core/src/test/resources/sql-tests/results/describe-functions-extended.sql.out b/sql/core/src/test/resources/sql-tests/results/describe-functions-extended.sql.out new file mode 100644 index 0000000000000..0d61d3c7b5b48 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/describe-functions-extended.sql.out @@ -0,0 +1,3579 @@ +-- Automatically generated by SQLQueryTestSuite +-- Number of queries: 251 + + +-- !query 0 +DESC FUNCTION EXTENDED abs +-- !query 0 schema +struct +-- !query 0 output +Function: abs +Class: org.apache.spark.sql.catalyst.expressions.Abs +Usage: abs(expr) - Returns the absolute value of the numeric value. +Extended Usage: + Examples: + > SELECT abs(-1); + 1 + + +-- !query 1 +DESC FUNCTION EXTENDED coalesce +-- !query 1 schema +struct +-- !query 1 output +Function: coalesce +Class: org.apache.spark.sql.catalyst.expressions.Coalesce +Usage: coalesce(expr1, expr2, ...) - Returns the first non-null argument if exists. Otherwise, null. +Extended Usage: + Examples: + > SELECT coalesce(NULL, 1, NULL); + 1 + + +-- !query 2 +DESC FUNCTION EXTENDED explode +-- !query 2 schema +struct +-- !query 2 output +Function: explode +Class: org.apache.spark.sql.catalyst.expressions.Explode +Usage: explode(expr) - Separates the elements of array `expr` into multiple rows, or the elements of map `expr` into multiple rows and columns. +Extended Usage: + Examples: + > SELECT explode(array(10, 20)); + 10 + 20 + + +-- !query 3 +DESC FUNCTION EXTENDED explode_outer +-- !query 3 schema +struct +-- !query 3 output +Function: explode_outer +Class: org.apache.spark.sql.catalyst.expressions.Explode +Usage: explode_outer(expr) - Separates the elements of array `expr` into multiple rows, or the elements of map `expr` into multiple rows and columns. +Extended Usage: + Examples: + > SELECT explode_outer(array(10, 20)); + 10 + 20 + + +-- !query 4 +DESC FUNCTION EXTENDED greatest +-- !query 4 schema +struct +-- !query 4 output +Function: greatest +Class: org.apache.spark.sql.catalyst.expressions.Greatest +Usage: greatest(expr, ...) - Returns the greatest value of all parameters, skipping null values. +Extended Usage: + Examples: + > SELECT greatest(10, 9, 2, 4, 3); + 10 + + +-- !query 5 +DESC FUNCTION EXTENDED if +-- !query 5 schema +struct +-- !query 5 output +Function: if +Class: org.apache.spark.sql.catalyst.expressions.If +Usage: if(expr1, expr2, expr3) - If `expr1` evaluates to true, then returns `expr2`; otherwise returns `expr3`. +Extended Usage: + Examples: + > SELECT if(1 < 2, 'a', 'b'); + a + + +-- !query 6 +DESC FUNCTION EXTENDED inline +-- !query 6 schema +struct +-- !query 6 output +Function: inline +Class: org.apache.spark.sql.catalyst.expressions.Inline +Usage: inline(expr) - Explodes an array of structs into a table. +Extended Usage: + Examples: + > SELECT inline(array(struct(1, 'a'), struct(2, 'b'))); + 1 a + 2 b + + +-- !query 7 +DESC FUNCTION EXTENDED inline_outer +-- !query 7 schema +struct +-- !query 7 output +Function: inline_outer +Class: org.apache.spark.sql.catalyst.expressions.Inline +Usage: inline_outer(expr) - Explodes an array of structs into a table. +Extended Usage: + Examples: + > SELECT inline_outer(array(struct(1, 'a'), struct(2, 'b'))); + 1 a + 2 b + + +-- !query 8 +DESC FUNCTION EXTENDED isnan +-- !query 8 schema +struct +-- !query 8 output +Function: isnan +Class: org.apache.spark.sql.catalyst.expressions.IsNaN +Usage: isnan(expr) - Returns true if `expr` is NaN, or false otherwise. +Extended Usage: + Examples: + > SELECT isnan(cast('NaN' as double)); + true + + +-- !query 9 +DESC FUNCTION EXTENDED ifnull +-- !query 9 schema +struct +-- !query 9 output +Function: ifnull +Class: org.apache.spark.sql.catalyst.expressions.IfNull +Usage: ifnull(expr1, expr2) - Returns `expr2` if `expr1` is null, or `expr1` otherwise. +Extended Usage: + Examples: + > SELECT ifnull(NULL, array('2')); + ["2"] + + +-- !query 10 +DESC FUNCTION EXTENDED isnull +-- !query 10 schema +struct +-- !query 10 output +Function: isnull +Class: org.apache.spark.sql.catalyst.expressions.IsNull +Usage: isnull(expr) - Returns true if `expr` is null, or false otherwise. +Extended Usage: + Examples: + > SELECT isnull(1); + false + + +-- !query 11 +DESC FUNCTION EXTENDED isnotnull +-- !query 11 schema +struct +-- !query 11 output +Function: isnotnull +Class: org.apache.spark.sql.catalyst.expressions.IsNotNull +Usage: isnotnull(expr) - Returns true if `expr` is not null, or false otherwise. +Extended Usage: + Examples: + > SELECT isnotnull(1); + true + + +-- !query 12 +DESC FUNCTION EXTENDED least +-- !query 12 schema +struct +-- !query 12 output +Function: least +Class: org.apache.spark.sql.catalyst.expressions.Least +Usage: least(expr, ...) - Returns the least value of all parameters, skipping null values. +Extended Usage: + Examples: + > SELECT least(10, 9, 2, 4, 3); + 2 + + +-- !query 13 +DESC FUNCTION EXTENDED nanvl +-- !query 13 schema +struct +-- !query 13 output +Function: nanvl +Class: org.apache.spark.sql.catalyst.expressions.NaNvl +Usage: nanvl(expr1, expr2) - Returns `expr1` if it's not NaN, or `expr2` otherwise. +Extended Usage: + Examples: + > SELECT nanvl(cast('NaN' as double), 123); + 123.0 + + +-- !query 14 +DESC FUNCTION EXTENDED nullif +-- !query 14 schema +struct +-- !query 14 output +Function: nullif +Class: org.apache.spark.sql.catalyst.expressions.NullIf +Usage: nullif(expr1, expr2) - Returns null if `expr1` equals to `expr2`, or `expr1` otherwise. +Extended Usage: + Examples: + > SELECT nullif(2, 2); + NULL + + +-- !query 15 +DESC FUNCTION EXTENDED nvl +-- !query 15 schema +struct +-- !query 15 output +Function: nvl +Class: org.apache.spark.sql.catalyst.expressions.Nvl +Usage: nvl(expr1, expr2) - Returns `expr2` if `expr1` is null, or `expr1` otherwise. +Extended Usage: + Examples: + > SELECT nvl(NULL, array('2')); + ["2"] + + +-- !query 16 +DESC FUNCTION EXTENDED nvl2 +-- !query 16 schema +struct +-- !query 16 output +Function: nvl2 +Class: org.apache.spark.sql.catalyst.expressions.Nvl2 +Usage: nvl2(expr1, expr2, expr3) - Returns `expr2` if `expr1` is not null, or `expr3` otherwise. +Extended Usage: + Examples: + > SELECT nvl2(NULL, 2, 1); + 1 + + +-- !query 17 +DESC FUNCTION EXTENDED posexplode +-- !query 17 schema +struct +-- !query 17 output +Function: posexplode +Class: org.apache.spark.sql.catalyst.expressions.PosExplode +Usage: posexplode(expr) - Separates the elements of array `expr` into multiple rows with positions, or the elements of map `expr` into multiple rows and columns with positions. +Extended Usage: + Examples: + > SELECT posexplode(array(10,20)); + 0 10 + 1 20 + + +-- !query 18 +DESC FUNCTION EXTENDED posexplode_outer +-- !query 18 schema +struct +-- !query 18 output +Function: posexplode_outer +Class: org.apache.spark.sql.catalyst.expressions.PosExplode +Usage: posexplode_outer(expr) - Separates the elements of array `expr` into multiple rows with positions, or the elements of map `expr` into multiple rows and columns with positions. +Extended Usage: + Examples: + > SELECT posexplode_outer(array(10,20)); + 0 10 + 1 20 + + +-- !query 19 +DESC FUNCTION EXTENDED rand +-- !query 19 schema +struct +-- !query 19 output +Function: rand +Class: org.apache.spark.sql.catalyst.expressions.Rand +Usage: rand([seed]) - Returns a random value with independent and identically distributed (i.i.d.) uniformly distributed values in [0, 1). +Extended Usage: + Examples: + > SELECT rand(); + 0.9629742951434543 + > SELECT rand(0); + 0.8446490682263027 + > SELECT rand(null); + 0.8446490682263027 + + +-- !query 20 +DESC FUNCTION EXTENDED randn +-- !query 20 schema +struct +-- !query 20 output +Function: randn +Class: org.apache.spark.sql.catalyst.expressions.Randn +Usage: randn([seed]) - Returns a random value with independent and identically distributed (i.i.d.) values drawn from the standard normal distribution. +Extended Usage: + Examples: + > SELECT randn(); + -0.3254147983080288 + > SELECT randn(0); + 1.1164209726833079 + > SELECT randn(null); + 1.1164209726833079 + + +-- !query 21 +DESC FUNCTION EXTENDED stack +-- !query 21 schema +struct +-- !query 21 output +Function: stack +Class: org.apache.spark.sql.catalyst.expressions.Stack +Usage: stack(n, expr1, ..., exprk) - Separates `expr1`, ..., `exprk` into `n` rows. +Extended Usage: + Examples: + > SELECT stack(2, 1, 2, 3); + 1 2 + 3 NULL + + +-- !query 22 +DESC FUNCTION EXTENDED when +-- !query 22 schema +struct +-- !query 22 output +Function: when +Class: org.apache.spark.sql.catalyst.expressions.CaseWhen +Usage: CASE WHEN expr1 THEN expr2 [WHEN expr3 THEN expr4]* [ELSE expr5] END - When `expr1` = true, returns `expr2`; when `expr3` = true, return `expr4`; else return `expr5`. +Extended Usage: + No example/argument for when. + + +-- !query 23 +DESC FUNCTION EXTENDED acos +-- !query 23 schema +struct +-- !query 23 output +Function: acos +Class: org.apache.spark.sql.catalyst.expressions.Acos +Usage: acos(expr) - Returns the inverse cosine (a.k.a. arccosine) of `expr` if -1<=`expr`<=1 or NaN otherwise. +Extended Usage: + Examples: + > SELECT acos(1); + 0.0 + > SELECT acos(2); + NaN + + +-- !query 24 +DESC FUNCTION EXTENDED asin +-- !query 24 schema +struct +-- !query 24 output +Function: asin +Class: org.apache.spark.sql.catalyst.expressions.Asin +Usage: asin(expr) - Returns the inverse sine (a.k.a. arcsine) the arc sin of `expr` if -1<=`expr`<=1 or NaN otherwise. +Extended Usage: + Examples: + > SELECT asin(0); + 0.0 + > SELECT asin(2); + NaN + + +-- !query 25 +DESC FUNCTION EXTENDED atan +-- !query 25 schema +struct +-- !query 25 output +Function: atan +Class: org.apache.spark.sql.catalyst.expressions.Atan +Usage: atan(expr) - Returns the inverse tangent (a.k.a. arctangent). +Extended Usage: + Examples: + > SELECT atan(0); + 0.0 + + +-- !query 26 +DESC FUNCTION EXTENDED atan2 +-- !query 26 schema +struct +-- !query 26 output +Function: atan2 +Class: org.apache.spark.sql.catalyst.expressions.Atan2 +Usage: atan2(expr1, expr2) - Returns the angle in radians between the positive x-axis of a plane and the point given by the coordinates (`expr1`, `expr2`). +Extended Usage: + Examples: + > SELECT atan2(0, 0); + 0.0 + + +-- !query 27 +DESC FUNCTION EXTENDED bin +-- !query 27 schema +struct +-- !query 27 output +Function: bin +Class: org.apache.spark.sql.catalyst.expressions.Bin +Usage: bin(expr) - Returns the string representation of the long value `expr` represented in binary. +Extended Usage: + Examples: + > SELECT bin(13); + 1101 + > SELECT bin(-13); + 1111111111111111111111111111111111111111111111111111111111110011 + > SELECT bin(13.3); + 1101 + + +-- !query 28 +DESC FUNCTION EXTENDED bround +-- !query 28 schema +struct +-- !query 28 output +Function: bround +Class: org.apache.spark.sql.catalyst.expressions.BRound +Usage: bround(expr, d) - Returns `expr` rounded to `d` decimal places using HALF_EVEN rounding mode. +Extended Usage: + Examples: + > SELECT bround(2.5, 0); + 2.0 + + +-- !query 29 +DESC FUNCTION EXTENDED cbrt +-- !query 29 schema +struct +-- !query 29 output +Function: cbrt +Class: org.apache.spark.sql.catalyst.expressions.Cbrt +Usage: cbrt(expr) - Returns the cube root of `expr`. +Extended Usage: + Examples: + > SELECT cbrt(27.0); + 3.0 + + +-- !query 30 +DESC FUNCTION EXTENDED ceil +-- !query 30 schema +struct +-- !query 30 output +Function: ceil +Class: org.apache.spark.sql.catalyst.expressions.Ceil +Usage: ceil(expr) - Returns the smallest integer not smaller than `expr`. +Extended Usage: + Examples: + > SELECT ceil(-0.1); + 0 + > SELECT ceil(5); + 5 + + +-- !query 31 +DESC FUNCTION EXTENDED ceiling +-- !query 31 schema +struct +-- !query 31 output +Function: ceiling +Class: org.apache.spark.sql.catalyst.expressions.Ceil +Usage: ceiling(expr) - Returns the smallest integer not smaller than `expr`. +Extended Usage: + Examples: + > SELECT ceiling(-0.1); + 0 + > SELECT ceiling(5); + 5 + + +-- !query 32 +DESC FUNCTION EXTENDED cos +-- !query 32 schema +struct +-- !query 32 output +Function: cos +Class: org.apache.spark.sql.catalyst.expressions.Cos +Usage: cos(expr) - Returns the cosine of `expr`. +Extended Usage: + Examples: + > SELECT cos(0); + 1.0 + + +-- !query 33 +DESC FUNCTION EXTENDED cosh +-- !query 33 schema +struct +-- !query 33 output +Function: cosh +Class: org.apache.spark.sql.catalyst.expressions.Cosh +Usage: cosh(expr) - Returns the hyperbolic cosine of `expr`. +Extended Usage: + Examples: + > SELECT cosh(0); + 1.0 + + +-- !query 34 +DESC FUNCTION EXTENDED conv +-- !query 34 schema +struct +-- !query 34 output +Function: conv +Class: org.apache.spark.sql.catalyst.expressions.Conv +Usage: conv(num, from_base, to_base) - Convert `num` from `from_base` to `to_base`. +Extended Usage: + Examples: + > SELECT conv('100', 2, 10); + 4 + > SELECT conv(-10, 16, -10); + 16 + + +-- !query 35 +DESC FUNCTION EXTENDED degrees +-- !query 35 schema +struct +-- !query 35 output +Function: degrees +Class: org.apache.spark.sql.catalyst.expressions.ToDegrees +Usage: degrees(expr) - Converts radians to degrees. +Extended Usage: + Examples: + > SELECT degrees(3.141592653589793); + 180.0 + + +-- !query 36 +DESC FUNCTION EXTENDED e +-- !query 36 schema +struct +-- !query 36 output +Function: e +Class: org.apache.spark.sql.catalyst.expressions.EulerNumber +Usage: e() - Returns Euler's number, e. +Extended Usage: + Examples: + > SELECT e(); + 2.718281828459045 + + +-- !query 37 +DESC FUNCTION EXTENDED exp +-- !query 37 schema +struct +-- !query 37 output +Function: exp +Class: org.apache.spark.sql.catalyst.expressions.Exp +Usage: exp(expr) - Returns e to the power of `expr`. +Extended Usage: + Examples: + > SELECT exp(0); + 1.0 + + +-- !query 38 +DESC FUNCTION EXTENDED expm1 +-- !query 38 schema +struct +-- !query 38 output +Function: expm1 +Class: org.apache.spark.sql.catalyst.expressions.Expm1 +Usage: expm1(expr) - Returns exp(`expr`) - 1. +Extended Usage: + Examples: + > SELECT expm1(0); + 0.0 + + +-- !query 39 +DESC FUNCTION EXTENDED floor +-- !query 39 schema +struct +-- !query 39 output +Function: floor +Class: org.apache.spark.sql.catalyst.expressions.Floor +Usage: floor(expr) - Returns the largest integer not greater than `expr`. +Extended Usage: + Examples: + > SELECT floor(-0.1); + -1 + > SELECT floor(5); + 5 + + +-- !query 40 +DESC FUNCTION EXTENDED factorial +-- !query 40 schema +struct +-- !query 40 output +Function: factorial +Class: org.apache.spark.sql.catalyst.expressions.Factorial +Usage: factorial(expr) - Returns the factorial of `expr`. `expr` is [0..20]. Otherwise, null. +Extended Usage: + Examples: + > SELECT factorial(5); + 120 + + +-- !query 41 +DESC FUNCTION EXTENDED hex +-- !query 41 schema +struct +-- !query 41 output +Function: hex +Class: org.apache.spark.sql.catalyst.expressions.Hex +Usage: hex(expr) - Converts `expr` to hexadecimal. +Extended Usage: + Examples: + > SELECT hex(17); + 11 + > SELECT hex('Spark SQL'); + 537061726B2053514C + + +-- !query 42 +DESC FUNCTION EXTENDED hypot +-- !query 42 schema +struct +-- !query 42 output +Function: hypot +Class: org.apache.spark.sql.catalyst.expressions.Hypot +Usage: hypot(expr1, expr2) - Returns sqrt(`expr1`**2 + `expr2`**2). +Extended Usage: + Examples: + > SELECT hypot(3, 4); + 5.0 + + +-- !query 43 +DESC FUNCTION EXTENDED log +-- !query 43 schema +struct +-- !query 43 output +Function: log +Class: org.apache.spark.sql.catalyst.expressions.Logarithm +Usage: log(base, expr) - Returns the logarithm of `expr` with `base`. +Extended Usage: + Examples: + > SELECT log(10, 100); + 2.0 + + +-- !query 44 +DESC FUNCTION EXTENDED log10 +-- !query 44 schema +struct +-- !query 44 output +Function: log10 +Class: org.apache.spark.sql.catalyst.expressions.Log10 +Usage: log10(expr) - Returns the logarithm of `expr` with base 10. +Extended Usage: + Examples: + > SELECT log10(10); + 1.0 + + +-- !query 45 +DESC FUNCTION EXTENDED log1p +-- !query 45 schema +struct +-- !query 45 output +Function: log1p +Class: org.apache.spark.sql.catalyst.expressions.Log1p +Usage: log1p(expr) - Returns log(1 + `expr`). +Extended Usage: + Examples: + > SELECT log1p(0); + 0.0 + + +-- !query 46 +DESC FUNCTION EXTENDED log2 +-- !query 46 schema +struct +-- !query 46 output +Function: log2 +Class: org.apache.spark.sql.catalyst.expressions.Log2 +Usage: log2(expr) - Returns the logarithm of `expr` with base 2. +Extended Usage: + Examples: + > SELECT log2(2); + 1.0 + + +-- !query 47 +DESC FUNCTION EXTENDED ln +-- !query 47 schema +struct +-- !query 47 output +Function: ln +Class: org.apache.spark.sql.catalyst.expressions.Log +Usage: ln(expr) - Returns the natural logarithm (base e) of `expr`. +Extended Usage: + Examples: + > SELECT ln(1); + 0.0 + + +-- !query 48 +DESC FUNCTION EXTENDED negative +-- !query 48 schema +struct +-- !query 48 output +Function: negative +Class: org.apache.spark.sql.catalyst.expressions.UnaryMinus +Usage: negative(expr) - Returns the negated value of `expr`. +Extended Usage: + Examples: + > SELECT negative(1); + -1 + + +-- !query 49 +DESC FUNCTION EXTENDED pi +-- !query 49 schema +struct +-- !query 49 output +Function: pi +Class: org.apache.spark.sql.catalyst.expressions.Pi +Usage: pi() - Returns pi. +Extended Usage: + Examples: + > SELECT pi(); + 3.141592653589793 + + +-- !query 50 +DESC FUNCTION EXTENDED pmod +-- !query 50 schema +struct +-- !query 50 output +Function: pmod +Class: org.apache.spark.sql.catalyst.expressions.Pmod +Usage: pmod(expr1, expr2) - Returns the positive value of `expr1` mod `expr2`. +Extended Usage: + Examples: + > SELECT pmod(10, 3); + 1 + > SELECT pmod(-10, 3); + 2 + + +-- !query 51 +DESC FUNCTION EXTENDED positive +-- !query 51 schema +struct +-- !query 51 output +Function: positive +Class: org.apache.spark.sql.catalyst.expressions.UnaryPositive +Usage: positive(expr) - Returns the value of `expr`. +Extended Usage: + No example/argument for positive. + + +-- !query 52 +DESC FUNCTION EXTENDED pow +-- !query 52 schema +struct +-- !query 52 output +Function: pow +Class: org.apache.spark.sql.catalyst.expressions.Pow +Usage: pow(expr1, expr2) - Raises `expr1` to the power of `expr2`. +Extended Usage: + Examples: + > SELECT pow(2, 3); + 8.0 + + +-- !query 53 +DESC FUNCTION EXTENDED power +-- !query 53 schema +struct +-- !query 53 output +Function: power +Class: org.apache.spark.sql.catalyst.expressions.Pow +Usage: power(expr1, expr2) - Raises `expr1` to the power of `expr2`. +Extended Usage: + Examples: + > SELECT power(2, 3); + 8.0 + + +-- !query 54 +DESC FUNCTION EXTENDED radians +-- !query 54 schema +struct +-- !query 54 output +Function: radians +Class: org.apache.spark.sql.catalyst.expressions.ToRadians +Usage: radians(expr) - Converts degrees to radians. +Extended Usage: + Examples: + > SELECT radians(180); + 3.141592653589793 + + +-- !query 55 +DESC FUNCTION EXTENDED rint +-- !query 55 schema +struct +-- !query 55 output +Function: rint +Class: org.apache.spark.sql.catalyst.expressions.Rint +Usage: rint(expr) - Returns the double value that is closest in value to the argument and is equal to a mathematical integer. +Extended Usage: + Examples: + > SELECT rint(12.3456); + 12.0 + + +-- !query 56 +DESC FUNCTION EXTENDED round +-- !query 56 schema +struct +-- !query 56 output +Function: round +Class: org.apache.spark.sql.catalyst.expressions.Round +Usage: round(expr, d) - Returns `expr` rounded to `d` decimal places using HALF_UP rounding mode. +Extended Usage: + Examples: + > SELECT round(2.5, 0); + 3.0 + + +-- !query 57 +DESC FUNCTION EXTENDED shiftleft +-- !query 57 schema +struct +-- !query 57 output +Function: shiftleft +Class: org.apache.spark.sql.catalyst.expressions.ShiftLeft +Usage: shiftleft(base, expr) - Bitwise left shift. +Extended Usage: + Examples: + > SELECT shiftleft(2, 1); + 4 + + +-- !query 58 +DESC FUNCTION EXTENDED shiftright +-- !query 58 schema +struct +-- !query 58 output +Function: shiftright +Class: org.apache.spark.sql.catalyst.expressions.ShiftRight +Usage: shiftright(base, expr) - Bitwise (signed) right shift. +Extended Usage: + Examples: + > SELECT shiftright(4, 1); + 2 + + +-- !query 59 +DESC FUNCTION EXTENDED shiftrightunsigned +-- !query 59 schema +struct +-- !query 59 output +Function: shiftrightunsigned +Class: org.apache.spark.sql.catalyst.expressions.ShiftRightUnsigned +Usage: shiftrightunsigned(base, expr) - Bitwise unsigned right shift. +Extended Usage: + Examples: + > SELECT shiftrightunsigned(4, 1); + 2 + + +-- !query 60 +DESC FUNCTION EXTENDED sign +-- !query 60 schema +struct +-- !query 60 output +Function: sign +Class: org.apache.spark.sql.catalyst.expressions.Signum +Usage: sign(expr) - Returns -1.0, 0.0 or 1.0 as `expr` is negative, 0 or positive. +Extended Usage: + Examples: + > SELECT sign(40); + 1.0 + + +-- !query 61 +DESC FUNCTION EXTENDED signum +-- !query 61 schema +struct +-- !query 61 output +Function: signum +Class: org.apache.spark.sql.catalyst.expressions.Signum +Usage: signum(expr) - Returns -1.0, 0.0 or 1.0 as `expr` is negative, 0 or positive. +Extended Usage: + Examples: + > SELECT signum(40); + 1.0 + + +-- !query 62 +DESC FUNCTION EXTENDED sin +-- !query 62 schema +struct +-- !query 62 output +Function: sin +Class: org.apache.spark.sql.catalyst.expressions.Sin +Usage: sin(expr) - Returns the sine of `expr`. +Extended Usage: + Examples: + > SELECT sin(0); + 0.0 + + +-- !query 63 +DESC FUNCTION EXTENDED sinh +-- !query 63 schema +struct +-- !query 63 output +Function: sinh +Class: org.apache.spark.sql.catalyst.expressions.Sinh +Usage: sinh(expr) - Returns the hyperbolic sine of `expr`. +Extended Usage: + Examples: + > SELECT sinh(0); + 0.0 + + +-- !query 64 +DESC FUNCTION EXTENDED str_to_map +-- !query 64 schema +struct +-- !query 64 output +Function: str_to_map +Class: org.apache.spark.sql.catalyst.expressions.StringToMap +Usage: str_to_map(text[, pairDelim[, keyValueDelim]]) - Creates a map after splitting the text into key/value pairs using delimiters. Default delimiters are ',' for `pairDelim` and ':' for `keyValueDelim`. +Extended Usage: + Examples: + > SELECT str_to_map('a:1,b:2,c:3', ',', ':'); + map("a":"1","b":"2","c":"3") + > SELECT str_to_map('a'); + map("a":null) + + +-- !query 65 +DESC FUNCTION EXTENDED sqrt +-- !query 65 schema +struct +-- !query 65 output +Function: sqrt +Class: org.apache.spark.sql.catalyst.expressions.Sqrt +Usage: sqrt(expr) - Returns the square root of `expr`. +Extended Usage: + Examples: + > SELECT sqrt(4); + 2.0 + + +-- !query 66 +DESC FUNCTION EXTENDED tan +-- !query 66 schema +struct +-- !query 66 output +Function: tan +Class: org.apache.spark.sql.catalyst.expressions.Tan +Usage: tan(expr) - Returns the tangent of `expr`. +Extended Usage: + Examples: + > SELECT tan(0); + 0.0 + + +-- !query 67 +DESC FUNCTION EXTENDED tanh +-- !query 67 schema +struct +-- !query 67 output +Function: tanh +Class: org.apache.spark.sql.catalyst.expressions.Tanh +Usage: tanh(expr) - Returns the hyperbolic tangent of `expr`. +Extended Usage: + Examples: + > SELECT tanh(0); + 0.0 + + +-- !query 68 +DESC FUNCTION EXTENDED + +-- !query 68 schema +struct +-- !query 68 output +Function: + +Class: org.apache.spark.sql.catalyst.expressions.Add +Usage: expr1 + expr2 - Returns `expr1`+`expr2`. +Extended Usage: + Examples: + > SELECT 1 + 2; + 3 + + +-- !query 69 +DESC FUNCTION EXTENDED - +-- !query 69 schema +struct +-- !query 69 output +Function: - +Class: org.apache.spark.sql.catalyst.expressions.Subtract +Usage: expr1 - expr2 - Returns `expr1`-`expr2`. +Extended Usage: + Examples: + > SELECT 2 - 1; + 1 + + +-- !query 70 +DESC FUNCTION EXTENDED * +-- !query 70 schema +struct +-- !query 70 output +Function: * +Class: org.apache.spark.sql.catalyst.expressions.Multiply +Usage: expr1 * expr2 - Returns `expr1`*`expr2`. +Extended Usage: + Examples: + > SELECT 2 * 3; + 6 + + +-- !query 71 +DESC FUNCTION EXTENDED / +-- !query 71 schema +struct +-- !query 71 output +Function: / +Class: org.apache.spark.sql.catalyst.expressions.Divide +Usage: expr1 / expr2 - Returns `expr1`/`expr2`. It always performs floating point division. +Extended Usage: + Examples: + > SELECT 3 / 2; + 1.5 + > SELECT 2L / 2L; + 1.0 + + +-- !query 72 +DESC FUNCTION EXTENDED % +-- !query 72 schema +struct +-- !query 72 output +Function: % +Class: org.apache.spark.sql.catalyst.expressions.Remainder +Usage: expr1 % expr2 - Returns the remainder after `expr1`/`expr2`. +Extended Usage: + Examples: + > SELECT 2 % 1.8; + 0.2 + + +-- !query 73 +DESC FUNCTION EXTENDED approx_count_distinct +-- !query 73 schema +struct +-- !query 73 output +Function: approx_count_distinct +Class: org.apache.spark.sql.catalyst.expressions.aggregate.HyperLogLogPlusPlus +Usage: + approx_count_distinct(expr[, relativeSD]) - Returns the estimated cardinality by HyperLogLog++. + `relativeSD` defines the maximum estimation error allowed. + +Extended Usage: + No example/argument for approx_count_distinct. + + +-- !query 74 +DESC FUNCTION EXTENDED avg +-- !query 74 schema +struct +-- !query 74 output +Function: avg +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Average +Usage: avg(expr) - Returns the mean calculated from values of a group. +Extended Usage: + No example/argument for avg. + + +-- !query 75 +DESC FUNCTION EXTENDED corr +-- !query 75 schema +struct +-- !query 75 output +Function: corr +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Corr +Usage: corr(expr1, expr2) - Returns Pearson coefficient of correlation between a set of number pairs. +Extended Usage: + No example/argument for corr. + + +-- !query 76 +DESC FUNCTION EXTENDED count +-- !query 76 schema +struct +-- !query 76 output +Function: count +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Count +Usage: + count(*) - Returns the total number of retrieved rows, including rows containing null. + + count(expr) - Returns the number of rows for which the supplied expression is non-null. + + count(DISTINCT expr[, expr...]) - Returns the number of rows for which the supplied expression(s) are unique and non-null. + +Extended Usage: + No example/argument for count. + + +-- !query 77 +DESC FUNCTION EXTENDED covar_pop +-- !query 77 schema +struct +-- !query 77 output +Function: covar_pop +Class: org.apache.spark.sql.catalyst.expressions.aggregate.CovPopulation +Usage: covar_pop(expr1, expr2) - Returns the population covariance of a set of number pairs. +Extended Usage: + No example/argument for covar_pop. + + +-- !query 78 +DESC FUNCTION EXTENDED covar_samp +-- !query 78 schema +struct +-- !query 78 output +Function: covar_samp +Class: org.apache.spark.sql.catalyst.expressions.aggregate.CovSample +Usage: covar_samp(expr1, expr2) - Returns the sample covariance of a set of number pairs. +Extended Usage: + No example/argument for covar_samp. + + +-- !query 79 +DESC FUNCTION EXTENDED first +-- !query 79 schema +struct +-- !query 79 output +Function: first +Class: org.apache.spark.sql.catalyst.expressions.aggregate.First +Usage: + first(expr[, isIgnoreNull]) - Returns the first value of `expr` for a group of rows. + If `isIgnoreNull` is true, returns only non-null values. + +Extended Usage: + No example/argument for first. + + +-- !query 80 +DESC FUNCTION EXTENDED first_value +-- !query 80 schema +struct +-- !query 80 output +Function: first_value +Class: org.apache.spark.sql.catalyst.expressions.aggregate.First +Usage: + first_value(expr[, isIgnoreNull]) - Returns the first value of `expr` for a group of rows. + If `isIgnoreNull` is true, returns only non-null values. + +Extended Usage: + No example/argument for first_value. + + +-- !query 81 +DESC FUNCTION EXTENDED kurtosis +-- !query 81 schema +struct +-- !query 81 output +Function: kurtosis +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Kurtosis +Usage: kurtosis(expr) - Returns the kurtosis value calculated from values of a group. +Extended Usage: + No example/argument for kurtosis. + + +-- !query 82 +DESC FUNCTION EXTENDED last +-- !query 82 schema +struct +-- !query 82 output +Function: last +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Last +Usage: + last(expr[, isIgnoreNull]) - Returns the last value of `expr` for a group of rows. + If `isIgnoreNull` is true, returns only non-null values. + +Extended Usage: + No example/argument for last. + + +-- !query 83 +DESC FUNCTION EXTENDED last_value +-- !query 83 schema +struct +-- !query 83 output +Function: last_value +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Last +Usage: + last_value(expr[, isIgnoreNull]) - Returns the last value of `expr` for a group of rows. + If `isIgnoreNull` is true, returns only non-null values. + +Extended Usage: + No example/argument for last_value. + + +-- !query 84 +DESC FUNCTION EXTENDED max +-- !query 84 schema +struct +-- !query 84 output +Function: max +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Max +Usage: max(expr) - Returns the maximum value of `expr`. +Extended Usage: + No example/argument for max. + + +-- !query 85 +DESC FUNCTION EXTENDED mean +-- !query 85 schema +struct +-- !query 85 output +Function: mean +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Average +Usage: mean(expr) - Returns the mean calculated from values of a group. +Extended Usage: + No example/argument for mean. + + +-- !query 86 +DESC FUNCTION EXTENDED min +-- !query 86 schema +struct +-- !query 86 output +Function: min +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Min +Usage: min(expr) - Returns the minimum value of `expr`. +Extended Usage: + No example/argument for min. + + +-- !query 87 +DESC FUNCTION EXTENDED percentile +-- !query 87 schema +struct +-- !query 87 output +Function: percentile +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Percentile +Usage: + percentile(col, percentage [, frequency]) - Returns the exact percentile value of numeric column + `col` at the given percentage. The value of percentage must be between 0.0 and 1.0. The + value of frequency should be positive integral + + percentile(col, array(percentage1 [, percentage2]...) [, frequency]) - Returns the exact + percentile value array of numeric column `col` at the given percentage(s). Each value + of the percentage array must be between 0.0 and 1.0. The value of frequency should be + positive integral + + +Extended Usage: + No example/argument for percentile. + + +-- !query 88 +DESC FUNCTION EXTENDED skewness +-- !query 88 schema +struct +-- !query 88 output +Function: skewness +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Skewness +Usage: skewness(expr) - Returns the skewness value calculated from values of a group. +Extended Usage: + No example/argument for skewness. + + +-- !query 89 +DESC FUNCTION EXTENDED percentile_approx +-- !query 89 schema +struct +-- !query 89 output +Function: percentile_approx +Class: org.apache.spark.sql.catalyst.expressions.aggregate.ApproximatePercentile +Usage: + percentile_approx(col, percentage [, accuracy]) - Returns the approximate percentile value of numeric + column `col` at the given percentage. The value of percentage must be between 0.0 + and 1.0. The `accuracy` parameter (default: 10000) is a positive numeric literal which + controls approximation accuracy at the cost of memory. Higher value of `accuracy` yields + better accuracy, `1.0/accuracy` is the relative error of the approximation. + When `percentage` is an array, each value of the percentage array must be between 0.0 and 1.0. + In this case, returns the approximate percentile array of column `col` at the given + percentage array. + +Extended Usage: + Examples: + > SELECT percentile_approx(10.0, array(0.5, 0.4, 0.1), 100); + [10.0,10.0,10.0] + > SELECT percentile_approx(10.0, 0.5, 100); + 10.0 + + +-- !query 90 +DESC FUNCTION EXTENDED approx_percentile +-- !query 90 schema +struct +-- !query 90 output +Function: approx_percentile +Class: org.apache.spark.sql.catalyst.expressions.aggregate.ApproximatePercentile +Usage: + approx_percentile(col, percentage [, accuracy]) - Returns the approximate percentile value of numeric + column `col` at the given percentage. The value of percentage must be between 0.0 + and 1.0. The `accuracy` parameter (default: 10000) is a positive numeric literal which + controls approximation accuracy at the cost of memory. Higher value of `accuracy` yields + better accuracy, `1.0/accuracy` is the relative error of the approximation. + When `percentage` is an array, each value of the percentage array must be between 0.0 and 1.0. + In this case, returns the approximate percentile array of column `col` at the given + percentage array. + +Extended Usage: + Examples: + > SELECT approx_percentile(10.0, array(0.5, 0.4, 0.1), 100); + [10.0,10.0,10.0] + > SELECT approx_percentile(10.0, 0.5, 100); + 10.0 + + +-- !query 91 +DESC FUNCTION EXTENDED std +-- !query 91 schema +struct +-- !query 91 output +Function: std +Class: org.apache.spark.sql.catalyst.expressions.aggregate.StddevSamp +Usage: std(expr) - Returns the sample standard deviation calculated from values of a group. +Extended Usage: + No example/argument for std. + + +-- !query 92 +DESC FUNCTION EXTENDED stddev +-- !query 92 schema +struct +-- !query 92 output +Function: stddev +Class: org.apache.spark.sql.catalyst.expressions.aggregate.StddevSamp +Usage: stddev(expr) - Returns the sample standard deviation calculated from values of a group. +Extended Usage: + No example/argument for stddev. + + +-- !query 93 +DESC FUNCTION EXTENDED stddev_pop +-- !query 93 schema +struct +-- !query 93 output +Function: stddev_pop +Class: org.apache.spark.sql.catalyst.expressions.aggregate.StddevPop +Usage: stddev_pop(expr) - Returns the population standard deviation calculated from values of a group. +Extended Usage: + No example/argument for stddev_pop. + + +-- !query 94 +DESC FUNCTION EXTENDED stddev_samp +-- !query 94 schema +struct +-- !query 94 output +Function: stddev_samp +Class: org.apache.spark.sql.catalyst.expressions.aggregate.StddevSamp +Usage: stddev_samp(expr) - Returns the sample standard deviation calculated from values of a group. +Extended Usage: + No example/argument for stddev_samp. + + +-- !query 95 +DESC FUNCTION EXTENDED sum +-- !query 95 schema +struct +-- !query 95 output +Function: sum +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Sum +Usage: sum(expr) - Returns the sum calculated from values of a group. +Extended Usage: + No example/argument for sum. + + +-- !query 96 +DESC FUNCTION EXTENDED variance +-- !query 96 schema +struct +-- !query 96 output +Function: variance +Class: org.apache.spark.sql.catalyst.expressions.aggregate.VarianceSamp +Usage: variance(expr) - Returns the sample variance calculated from values of a group. +Extended Usage: + No example/argument for variance. + + +-- !query 97 +DESC FUNCTION EXTENDED var_pop +-- !query 97 schema +struct +-- !query 97 output +Function: var_pop +Class: org.apache.spark.sql.catalyst.expressions.aggregate.VariancePop +Usage: var_pop(expr) - Returns the population variance calculated from values of a group. +Extended Usage: + No example/argument for var_pop. + + +-- !query 98 +DESC FUNCTION EXTENDED var_samp +-- !query 98 schema +struct +-- !query 98 output +Function: var_samp +Class: org.apache.spark.sql.catalyst.expressions.aggregate.VarianceSamp +Usage: var_samp(expr) - Returns the sample variance calculated from values of a group. +Extended Usage: + No example/argument for var_samp. + + +-- !query 99 +DESC FUNCTION EXTENDED collect_list +-- !query 99 schema +struct +-- !query 99 output +Function: collect_list +Class: org.apache.spark.sql.catalyst.expressions.aggregate.CollectList +Usage: collect_list(expr) - Collects and returns a list of non-unique elements. +Extended Usage: + No example/argument for collect_list. + + +-- !query 100 +DESC FUNCTION EXTENDED collect_set +-- !query 100 schema +struct +-- !query 100 output +Function: collect_set +Class: org.apache.spark.sql.catalyst.expressions.aggregate.CollectSet +Usage: collect_set(expr) - Collects and returns a set of unique elements. +Extended Usage: + No example/argument for collect_set. + + +-- !query 101 +DESC FUNCTION EXTENDED count_min_sketch +-- !query 101 schema +struct +-- !query 101 output +Function: count_min_sketch +Class: org.apache.spark.sql.catalyst.expressions.aggregate.CountMinSketchAgg +Usage: + count_min_sketch(col, eps, confidence, seed) - Returns a count-min sketch of a column with the given esp, + confidence and seed. The result is an array of bytes, which can be deserialized to a + `CountMinSketch` before usage. Count-min sketch is a probabilistic data structure used for + cardinality estimation using sub-linear space. + +Extended Usage: + No example/argument for count_min_sketch. + + +-- !query 102 +DESC FUNCTION EXTENDED ascii +-- !query 102 schema +struct +-- !query 102 output +Function: ascii +Class: org.apache.spark.sql.catalyst.expressions.Ascii +Usage: ascii(str) - Returns the numeric value of the first character of `str`. +Extended Usage: + Examples: + > SELECT ascii('222'); + 50 + > SELECT ascii(2); + 50 + + +-- !query 103 +DESC FUNCTION EXTENDED base64 +-- !query 103 schema +struct +-- !query 103 output +Function: base64 +Class: org.apache.spark.sql.catalyst.expressions.Base64 +Usage: base64(bin) - Converts the argument from a binary `bin` to a base 64 string. +Extended Usage: + Examples: + > SELECT base64('Spark SQL'); + U3BhcmsgU1FM + + +-- !query 104 +DESC FUNCTION EXTENDED concat +-- !query 104 schema +struct +-- !query 104 output +Function: concat +Class: org.apache.spark.sql.catalyst.expressions.Concat +Usage: concat(str1, str2, ..., strN) - Returns the concatenation of str1, str2, ..., strN. +Extended Usage: + Examples: + > SELECT concat('Spark', 'SQL'); + SparkSQL + + +-- !query 105 +DESC FUNCTION EXTENDED concat_ws +-- !query 105 schema +struct +-- !query 105 output +Function: concat_ws +Class: org.apache.spark.sql.catalyst.expressions.ConcatWs +Usage: concat_ws(sep, [str | array(str)]+) - Returns the concatenation of the strings separated by `sep`. +Extended Usage: + Examples: + > SELECT concat_ws(' ', 'Spark', 'SQL'); + Spark SQL + + +-- !query 106 +DESC FUNCTION EXTENDED decode +-- !query 106 schema +struct +-- !query 106 output +Function: decode +Class: org.apache.spark.sql.catalyst.expressions.Decode +Usage: decode(bin, charset) - Decodes the first argument using the second argument character set. +Extended Usage: + Examples: + > SELECT decode(encode('abc', 'utf-8'), 'utf-8'); + abc + + +-- !query 107 +DESC FUNCTION EXTENDED elt +-- !query 107 schema +struct +-- !query 107 output +Function: elt +Class: org.apache.spark.sql.catalyst.expressions.Elt +Usage: elt(n, str1, str2, ...) - Returns the `n`-th string, e.g., returns `str2` when `n` is 2. +Extended Usage: + Examples: + > SELECT elt(1, 'scala', 'java'); + scala + + +-- !query 108 +DESC FUNCTION EXTENDED encode +-- !query 108 schema +struct +-- !query 108 output +Function: encode +Class: org.apache.spark.sql.catalyst.expressions.Encode +Usage: encode(str, charset) - Encodes the first argument using the second argument character set. +Extended Usage: + Examples: + > SELECT encode('abc', 'utf-8'); + abc + + +-- !query 109 +DESC FUNCTION EXTENDED find_in_set +-- !query 109 schema +struct +-- !query 109 output +Function: find_in_set +Class: org.apache.spark.sql.catalyst.expressions.FindInSet +Usage: + find_in_set(str, str_array) - Returns the index (1-based) of the given string (`str`) in the comma-delimited list (`str_array`). + Returns 0, if the string was not found or if the given string (`str`) contains a comma. + +Extended Usage: + Examples: + > SELECT find_in_set('ab','abc,b,ab,c,def'); + 3 + + +-- !query 110 +DESC FUNCTION EXTENDED format_number +-- !query 110 schema +struct +-- !query 110 output +Function: format_number +Class: org.apache.spark.sql.catalyst.expressions.FormatNumber +Usage: + format_number(expr1, expr2) - Formats the number `expr1` like '#,###,###.##', rounded to `expr2` + decimal places. If `expr2` is 0, the result has no decimal point or fractional part. + This is supposed to function like MySQL's FORMAT. + +Extended Usage: + Examples: + > SELECT format_number(12332.123456, 4); + 12,332.1235 + + +-- !query 111 +DESC FUNCTION EXTENDED format_string +-- !query 111 schema +struct +-- !query 111 output +Function: format_string +Class: org.apache.spark.sql.catalyst.expressions.FormatString +Usage: format_string(strfmt, obj, ...) - Returns a formatted string from printf-style format strings. +Extended Usage: + Examples: + > SELECT format_string("Hello World %d %s", 100, "days"); + Hello World 100 days + + +-- !query 112 +DESC FUNCTION EXTENDED get_json_object +-- !query 112 schema +struct +-- !query 112 output +Function: get_json_object +Class: org.apache.spark.sql.catalyst.expressions.GetJsonObject +Usage: get_json_object(json_txt, path) - Extracts a json object from `path`. +Extended Usage: + Examples: + > SELECT get_json_object('{"a":"b"}', '$.a'); + b + + +-- !query 113 +DESC FUNCTION EXTENDED initcap +-- !query 113 schema +struct +-- !query 113 output +Function: initcap +Class: org.apache.spark.sql.catalyst.expressions.InitCap +Usage: + initcap(str) - Returns `str` with the first letter of each word in uppercase. + All other letters are in lowercase. Words are delimited by white space. + +Extended Usage: + Examples: + > SELECT initcap('sPark sql'); + Spark Sql + + +-- !query 114 +DESC FUNCTION EXTENDED instr +-- !query 114 schema +struct +-- !query 114 output +Function: instr +Class: org.apache.spark.sql.catalyst.expressions.StringInstr +Usage: instr(str, substr) - Returns the (1-based) index of the first occurrence of `substr` in `str`. +Extended Usage: + Examples: + > SELECT instr('SparkSQL', 'SQL'); + 6 + + +-- !query 115 +DESC FUNCTION EXTENDED lcase +-- !query 115 schema +struct +-- !query 115 output +Function: lcase +Class: org.apache.spark.sql.catalyst.expressions.Lower +Usage: lcase(str) - Returns `str` with all characters changed to lowercase. +Extended Usage: + Examples: + > SELECT lcase('SparkSql'); + sparksql + + +-- !query 116 +DESC FUNCTION EXTENDED length +-- !query 116 schema +struct +-- !query 116 output +Function: length +Class: org.apache.spark.sql.catalyst.expressions.Length +Usage: length(expr) - Returns the length of `expr` or number of bytes in binary data. +Extended Usage: + Examples: + > SELECT length('Spark SQL'); + 9 + + +-- !query 117 +DESC FUNCTION EXTENDED levenshtein +-- !query 117 schema +struct +-- !query 117 output +Function: levenshtein +Class: org.apache.spark.sql.catalyst.expressions.Levenshtein +Usage: levenshtein(str1, str2) - Returns the Levenshtein distance between the two given strings. +Extended Usage: + Examples: + > SELECT levenshtein('kitten', 'sitting'); + 3 + + +-- !query 118 +DESC FUNCTION EXTENDED like +-- !query 118 schema +struct +-- !query 118 output +Function: like +Class: org.apache.spark.sql.catalyst.expressions.Like +Usage: str like pattern - Returns true if `str` matches `pattern`, or false otherwise. +Extended Usage: + No example/argument for like. + + +-- !query 119 +DESC FUNCTION EXTENDED lower +-- !query 119 schema +struct +-- !query 119 output +Function: lower +Class: org.apache.spark.sql.catalyst.expressions.Lower +Usage: lower(str) - Returns `str` with all characters changed to lowercase. +Extended Usage: + Examples: + > SELECT lower('SparkSql'); + sparksql + + +-- !query 120 +DESC FUNCTION EXTENDED locate +-- !query 120 schema +struct +-- !query 120 output +Function: locate +Class: org.apache.spark.sql.catalyst.expressions.StringLocate +Usage: + locate(substr, str[, pos]) - Returns the position of the first occurrence of `substr` in `str` after position `pos`. + The given `pos` and return value are 1-based. + +Extended Usage: + Examples: + > SELECT locate('bar', 'foobarbar', 5); + 7 + + +-- !query 121 +DESC FUNCTION EXTENDED lpad +-- !query 121 schema +struct +-- !query 121 output +Function: lpad +Class: org.apache.spark.sql.catalyst.expressions.StringLPad +Usage: + lpad(str, len, pad) - Returns `str`, left-padded with `pad` to a length of `len`. + If `str` is longer than `len`, the return value is shortened to `len` characters. + +Extended Usage: + Examples: + > SELECT lpad('hi', 5, '??'); + ???hi + > SELECT lpad('hi', 1, '??'); + h + + +-- !query 122 +DESC FUNCTION EXTENDED ltrim +-- !query 122 schema +struct +-- !query 122 output +Function: ltrim +Class: org.apache.spark.sql.catalyst.expressions.StringTrimLeft +Usage: ltrim(str) - Removes the leading and trailing space characters from `str`. +Extended Usage: + Examples: + > SELECT ltrim(' SparkSQL'); + SparkSQL + + +-- !query 123 +DESC FUNCTION EXTENDED json_tuple +-- !query 123 schema +struct +-- !query 123 output +Function: json_tuple +Class: org.apache.spark.sql.catalyst.expressions.JsonTuple +Usage: json_tuple(jsonStr, p1, p2, ..., pn) - Returns a tuple like the function get_json_object, but it takes multiple names. All the input parameters and output column types are string. +Extended Usage: + Examples: + > SELECT json_tuple('{"a":1, "b":2}', 'a', 'b'); + 1 2 + + +-- !query 124 +DESC FUNCTION EXTENDED parse_url +-- !query 124 schema +struct +-- !query 124 output +Function: parse_url +Class: org.apache.spark.sql.catalyst.expressions.ParseUrl +Usage: parse_url(url, partToExtract[, key]) - Extracts a part from a URL. +Extended Usage: + Examples: + > SELECT parse_url('http://spark.apache.org/path?query=1', 'HOST') + spark.apache.org + > SELECT parse_url('http://spark.apache.org/path?query=1', 'QUERY') + query=1 + > SELECT parse_url('http://spark.apache.org/path?query=1', 'QUERY', 'query') + 1 + + +-- !query 125 +DESC FUNCTION EXTENDED printf +-- !query 125 schema +struct +-- !query 125 output +Function: printf +Class: org.apache.spark.sql.catalyst.expressions.FormatString +Usage: printf(strfmt, obj, ...) - Returns a formatted string from printf-style format strings. +Extended Usage: + Examples: + > SELECT printf("Hello World %d %s", 100, "days"); + Hello World 100 days + + +-- !query 126 +DESC FUNCTION EXTENDED regexp_extract +-- !query 126 schema +struct +-- !query 126 output +Function: regexp_extract +Class: org.apache.spark.sql.catalyst.expressions.RegExpExtract +Usage: regexp_extract(str, regexp[, idx]) - Extracts a group that matches `regexp`. +Extended Usage: + Examples: + > SELECT regexp_extract('100-200', '(\d+)-(\d+)', 1); + 100 + + +-- !query 127 +DESC FUNCTION EXTENDED regexp_replace +-- !query 127 schema +struct +-- !query 127 output +Function: regexp_replace +Class: org.apache.spark.sql.catalyst.expressions.RegExpReplace +Usage: regexp_replace(str, regexp, rep) - Replaces all substrings of `str` that match `regexp` with `rep`. +Extended Usage: + Examples: + > SELECT regexp_replace('100-200', '(\d+)', 'num'); + num-num + + +-- !query 128 +DESC FUNCTION EXTENDED repeat +-- !query 128 schema +struct +-- !query 128 output +Function: repeat +Class: org.apache.spark.sql.catalyst.expressions.StringRepeat +Usage: repeat(str, n) - Returns the string which repeats the given string value n times. +Extended Usage: + Examples: + > SELECT repeat('123', 2); + 123123 + + +-- !query 129 +DESC FUNCTION EXTENDED reverse +-- !query 129 schema +struct +-- !query 129 output +Function: reverse +Class: org.apache.spark.sql.catalyst.expressions.StringReverse +Usage: reverse(str) - Returns the reversed given string. +Extended Usage: + Examples: + > SELECT reverse('Spark SQL'); + LQS krapS + + +-- !query 130 +DESC FUNCTION EXTENDED rlike +-- !query 130 schema +struct +-- !query 130 output +Function: rlike +Class: org.apache.spark.sql.catalyst.expressions.RLike +Usage: str rlike regexp - Returns true if `str` matches `regexp`, or false otherwise. +Extended Usage: + No example/argument for rlike. + + +-- !query 131 +DESC FUNCTION EXTENDED rpad +-- !query 131 schema +struct +-- !query 131 output +Function: rpad +Class: org.apache.spark.sql.catalyst.expressions.StringRPad +Usage: + rpad(str, len, pad) - Returns `str`, right-padded with `pad` to a length of `len`. + If `str` is longer than `len`, the return value is shortened to `len` characters. + +Extended Usage: + Examples: + > SELECT rpad('hi', 5, '??'); + hi??? + > SELECT rpad('hi', 1, '??'); + h + + +-- !query 132 +DESC FUNCTION EXTENDED rtrim +-- !query 132 schema +struct +-- !query 132 output +Function: rtrim +Class: org.apache.spark.sql.catalyst.expressions.StringTrimRight +Usage: rtrim(str) - Removes the trailing space characters from `str`. +Extended Usage: + Examples: + > SELECT rtrim(' SparkSQL '); + SparkSQL + + +-- !query 133 +DESC FUNCTION EXTENDED sentences +-- !query 133 schema +struct +-- !query 133 output +Function: sentences +Class: org.apache.spark.sql.catalyst.expressions.Sentences +Usage: sentences(str[, lang, country]) - Splits `str` into an array of array of words. +Extended Usage: + Examples: + > SELECT sentences('Hi there! Good morning.'); + [["Hi","there"],["Good","morning"]] + + +-- !query 134 +DESC FUNCTION EXTENDED soundex +-- !query 134 schema +struct +-- !query 134 output +Function: soundex +Class: org.apache.spark.sql.catalyst.expressions.SoundEx +Usage: soundex(str) - Returns Soundex code of the string. +Extended Usage: + Examples: + > SELECT soundex('Miller'); + M460 + + +-- !query 135 +DESC FUNCTION EXTENDED space +-- !query 135 schema +struct +-- !query 135 output +Function: space +Class: org.apache.spark.sql.catalyst.expressions.StringSpace +Usage: space(n) - Returns a string consisting of `n` spaces. +Extended Usage: + Examples: + > SELECT concat(space(2), '1'); + 1 + + +-- !query 136 +DESC FUNCTION EXTENDED split +-- !query 136 schema +struct +-- !query 136 output +Function: split +Class: org.apache.spark.sql.catalyst.expressions.StringSplit +Usage: split(str, regex) - Splits `str` around occurrences that match `regex`. +Extended Usage: + Examples: + > SELECT split('oneAtwoBthreeC', '[ABC]'); + ["one","two","three",""] + + +-- !query 137 +DESC FUNCTION EXTENDED substr +-- !query 137 schema +struct +-- !query 137 output +Function: substr +Class: org.apache.spark.sql.catalyst.expressions.Substring +Usage: substr(str, pos[, len]) - Returns the substring of `str` that starts at `pos` and is of length `len`, or the slice of byte array that starts at `pos` and is of length `len`. +Extended Usage: + Examples: + > SELECT substr('Spark SQL', 5); + k SQL + > SELECT substr('Spark SQL', -3); + SQL + > SELECT substr('Spark SQL', 5, 1); + k + + +-- !query 138 +DESC FUNCTION EXTENDED substring +-- !query 138 schema +struct +-- !query 138 output +Function: substring +Class: org.apache.spark.sql.catalyst.expressions.Substring +Usage: substring(str, pos[, len]) - Returns the substring of `str` that starts at `pos` and is of length `len`, or the slice of byte array that starts at `pos` and is of length `len`. +Extended Usage: + Examples: + > SELECT substring('Spark SQL', 5); + k SQL + > SELECT substring('Spark SQL', -3); + SQL + > SELECT substring('Spark SQL', 5, 1); + k + + +-- !query 139 +DESC FUNCTION EXTENDED substring_index +-- !query 139 schema +struct +-- !query 139 output +Function: substring_index +Class: org.apache.spark.sql.catalyst.expressions.SubstringIndex +Usage: + substring_index(str, delim, count) - Returns the substring from `str` before `count` occurrences of the delimiter `delim`. + If `count` is positive, everything to the left of the final delimiter (counting from the + left) is returned. If `count` is negative, everything to the right of the final delimiter + (counting from the right) is returned. The function substring_index performs a case-sensitive match + when searching for `delim`. + +Extended Usage: + Examples: + > SELECT substring_index('www.apache.org', '.', 2); + www.apache + + +-- !query 140 +DESC FUNCTION EXTENDED translate +-- !query 140 schema +struct +-- !query 140 output +Function: translate +Class: org.apache.spark.sql.catalyst.expressions.StringTranslate +Usage: translate(input, from, to) - Translates the `input` string by replacing the characters present in the `from` string with the corresponding characters in the `to` string. +Extended Usage: + Examples: + > SELECT translate('AaBbCc', 'abc', '123'); + A1B2C3 + + +-- !query 141 +DESC FUNCTION EXTENDED trim +-- !query 141 schema +struct +-- !query 141 output +Function: trim +Class: org.apache.spark.sql.catalyst.expressions.StringTrim +Usage: trim(str) - Removes the leading and trailing space characters from `str`. +Extended Usage: + Examples: + > SELECT trim(' SparkSQL '); + SparkSQL + + +-- !query 142 +DESC FUNCTION EXTENDED ucase +-- !query 142 schema +struct +-- !query 142 output +Function: ucase +Class: org.apache.spark.sql.catalyst.expressions.Upper +Usage: ucase(str) - Returns `str` with all characters changed to uppercase. +Extended Usage: + Examples: + > SELECT ucase('SparkSql'); + SPARKSQL + + +-- !query 143 +DESC FUNCTION EXTENDED unbase64 +-- !query 143 schema +struct +-- !query 143 output +Function: unbase64 +Class: org.apache.spark.sql.catalyst.expressions.UnBase64 +Usage: unbase64(str) - Converts the argument from a base 64 string `str` to a binary. +Extended Usage: + Examples: + > SELECT unbase64('U3BhcmsgU1FM'); + Spark SQL + + +-- !query 144 +DESC FUNCTION EXTENDED unhex +-- !query 144 schema +struct +-- !query 144 output +Function: unhex +Class: org.apache.spark.sql.catalyst.expressions.Unhex +Usage: unhex(expr) - Converts hexadecimal `expr` to binary. +Extended Usage: + Examples: + > SELECT decode(unhex('537061726B2053514C'), 'UTF-8'); + Spark SQL + + +-- !query 145 +DESC FUNCTION EXTENDED upper +-- !query 145 schema +struct +-- !query 145 output +Function: upper +Class: org.apache.spark.sql.catalyst.expressions.Upper +Usage: upper(str) - Returns `str` with all characters changed to uppercase. +Extended Usage: + Examples: + > SELECT upper('SparkSql'); + SPARKSQL + + +-- !query 146 +DESC FUNCTION EXTENDED xpath +-- !query 146 schema +struct +-- !query 146 output +Function: xpath +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathList +Usage: xpath(xml, xpath) - Returns a string array of values within the nodes of xml that match the XPath expression. +Extended Usage: + Examples: + > SELECT xpath('b1b2b3c1c2','a/b/text()'); + ['b1','b2','b3'] + + +-- !query 147 +DESC FUNCTION EXTENDED xpath_boolean +-- !query 147 schema +struct +-- !query 147 output +Function: xpath_boolean +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathBoolean +Usage: xpath_boolean(xml, xpath) - Returns true if the XPath expression evaluates to true, or if a matching node is found. +Extended Usage: + Examples: + > SELECT xpath_boolean('1','a/b'); + true + + +-- !query 148 +DESC FUNCTION EXTENDED xpath_double +-- !query 148 schema +struct +-- !query 148 output +Function: xpath_double +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathDouble +Usage: xpath_double(xml, xpath) - Returns a double value, the value zero if no match is found, or NaN if a match is found but the value is non-numeric. +Extended Usage: + Examples: + > SELECT xpath_double('12', 'sum(a/b)'); + 3.0 + + +-- !query 149 +DESC FUNCTION EXTENDED xpath_number +-- !query 149 schema +struct +-- !query 149 output +Function: xpath_number +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathDouble +Usage: xpath_number(xml, xpath) - Returns a double value, the value zero if no match is found, or NaN if a match is found but the value is non-numeric. +Extended Usage: + Examples: + > SELECT xpath_number('12', 'sum(a/b)'); + 3.0 + + +-- !query 150 +DESC FUNCTION EXTENDED xpath_float +-- !query 150 schema +struct +-- !query 150 output +Function: xpath_float +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathFloat +Usage: xpath_float(xml, xpath) - Returns a float value, the value zero if no match is found, or NaN if a match is found but the value is non-numeric. +Extended Usage: + Examples: + > SELECT xpath_float('12', 'sum(a/b)'); + 3.0 + + +-- !query 151 +DESC FUNCTION EXTENDED xpath_int +-- !query 151 schema +struct +-- !query 151 output +Function: xpath_int +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathInt +Usage: xpath_int(xml, xpath) - Returns an integer value, or the value zero if no match is found, or a match is found but the value is non-numeric. +Extended Usage: + Examples: + > SELECT xpath_int('12', 'sum(a/b)'); + 3 + + +-- !query 152 +DESC FUNCTION EXTENDED xpath_long +-- !query 152 schema +struct +-- !query 152 output +Function: xpath_long +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathLong +Usage: xpath_long(xml, xpath) - Returns a long integer value, or the value zero if no match is found, or a match is found but the value is non-numeric. +Extended Usage: + Examples: + > SELECT xpath_long('12', 'sum(a/b)'); + 3 + + +-- !query 153 +DESC FUNCTION EXTENDED xpath_short +-- !query 153 schema +struct +-- !query 153 output +Function: xpath_short +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathShort +Usage: xpath_short(xml, xpath) - Returns a short integer value, or the value zero if no match is found, or a match is found but the value is non-numeric. +Extended Usage: + Examples: + > SELECT xpath_short('12', 'sum(a/b)'); + 3 + + +-- !query 154 +DESC FUNCTION EXTENDED xpath_string +-- !query 154 schema +struct +-- !query 154 output +Function: xpath_string +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathString +Usage: xpath_string(xml, xpath) - Returns the text contents of the first xml node that matches the XPath expression. +Extended Usage: + Examples: + > SELECT xpath_string('bcc','a/c'); + cc + + +-- !query 155 +DESC FUNCTION EXTENDED add_months +-- !query 155 schema +struct +-- !query 155 output +Function: add_months +Class: org.apache.spark.sql.catalyst.expressions.AddMonths +Usage: add_months(start_date, num_months) - Returns the date that is `num_months` after `start_date`. +Extended Usage: + Examples: + > SELECT add_months('2016-08-31', 1); + 2016-09-30 + + +-- !query 156 +DESC FUNCTION EXTENDED current_date +-- !query 156 schema +struct +-- !query 156 output +Function: current_date +Class: org.apache.spark.sql.catalyst.expressions.CurrentDate +Usage: current_date() - Returns the current date at the start of query evaluation. +Extended Usage: + No example/argument for current_date. + + +-- !query 157 +DESC FUNCTION EXTENDED current_timestamp +-- !query 157 schema +struct +-- !query 157 output +Function: current_timestamp +Class: org.apache.spark.sql.catalyst.expressions.CurrentTimestamp +Usage: current_timestamp() - Returns the current timestamp at the start of query evaluation. +Extended Usage: + No example/argument for current_timestamp. + + +-- !query 158 +DESC FUNCTION EXTENDED datediff +-- !query 158 schema +struct +-- !query 158 output +Function: datediff +Class: org.apache.spark.sql.catalyst.expressions.DateDiff +Usage: datediff(endDate, startDate) - Returns the number of days from `startDate` to `endDate`. +Extended Usage: + Examples: + > SELECT datediff('2009-07-31', '2009-07-30'); + 1 + + > SELECT datediff('2009-07-30', '2009-07-31'); + -1 + + +-- !query 159 +DESC FUNCTION EXTENDED date_add +-- !query 159 schema +struct +-- !query 159 output +Function: date_add +Class: org.apache.spark.sql.catalyst.expressions.DateAdd +Usage: date_add(start_date, num_days) - Returns the date that is `num_days` after `start_date`. +Extended Usage: + Examples: + > SELECT date_add('2016-07-30', 1); + 2016-07-31 + + +-- !query 160 +DESC FUNCTION EXTENDED date_format +-- !query 160 schema +struct +-- !query 160 output +Function: date_format +Class: org.apache.spark.sql.catalyst.expressions.DateFormatClass +Usage: date_format(timestamp, fmt) - Converts `timestamp` to a value of string in the format specified by the date format `fmt`. +Extended Usage: + Examples: + > SELECT date_format('2016-04-08', 'y'); + 2016 + + +-- !query 161 +DESC FUNCTION EXTENDED date_sub +-- !query 161 schema +struct +-- !query 161 output +Function: date_sub +Class: org.apache.spark.sql.catalyst.expressions.DateSub +Usage: date_sub(start_date, num_days) - Returns the date that is `num_days` before `start_date`. +Extended Usage: + Examples: + > SELECT date_sub('2016-07-30', 1); + 2016-07-29 + + +-- !query 162 +DESC FUNCTION EXTENDED day +-- !query 162 schema +struct +-- !query 162 output +Function: day +Class: org.apache.spark.sql.catalyst.expressions.DayOfMonth +Usage: day(date) - Returns the day of month of the date/timestamp. +Extended Usage: + Examples: + > SELECT day('2009-07-30'); + 30 + + +-- !query 163 +DESC FUNCTION EXTENDED dayofyear +-- !query 163 schema +struct +-- !query 163 output +Function: dayofyear +Class: org.apache.spark.sql.catalyst.expressions.DayOfYear +Usage: dayofyear(date) - Returns the day of year of the date/timestamp. +Extended Usage: + Examples: + > SELECT dayofyear('2016-04-09'); + 100 + + +-- !query 164 +DESC FUNCTION EXTENDED dayofmonth +-- !query 164 schema +struct +-- !query 164 output +Function: dayofmonth +Class: org.apache.spark.sql.catalyst.expressions.DayOfMonth +Usage: dayofmonth(date) - Returns the day of month of the date/timestamp. +Extended Usage: + Examples: + > SELECT dayofmonth('2009-07-30'); + 30 + + +-- !query 165 +DESC FUNCTION EXTENDED from_unixtime +-- !query 165 schema +struct +-- !query 165 output +Function: from_unixtime +Class: org.apache.spark.sql.catalyst.expressions.FromUnixTime +Usage: from_unixtime(unix_time, format) - Returns `unix_time` in the specified `format`. +Extended Usage: + Examples: + > SELECT from_unixtime(0, 'yyyy-MM-dd HH:mm:ss'); + 1970-01-01 00:00:00 + + +-- !query 166 +DESC FUNCTION EXTENDED from_utc_timestamp +-- !query 166 schema +struct +-- !query 166 output +Function: from_utc_timestamp +Class: org.apache.spark.sql.catalyst.expressions.FromUTCTimestamp +Usage: from_utc_timestamp(timestamp, timezone) - Given a timestamp, which corresponds to a certain time of day in UTC, returns another timestamp that corresponds to the same time of day in the given timezone. +Extended Usage: + Examples: + > SELECT from_utc_timestamp('2016-08-31', 'Asia/Seoul'); + 2016-08-31 09:00:00 + + +-- !query 167 +DESC FUNCTION EXTENDED hour +-- !query 167 schema +struct +-- !query 167 output +Function: hour +Class: org.apache.spark.sql.catalyst.expressions.Hour +Usage: hour(timestamp) - Returns the hour component of the string/timestamp. +Extended Usage: + Examples: + > SELECT hour('2009-07-30 12:58:59'); + 12 + + +-- !query 168 +DESC FUNCTION EXTENDED last_day +-- !query 168 schema +struct +-- !query 168 output +Function: last_day +Class: org.apache.spark.sql.catalyst.expressions.LastDay +Usage: last_day(date) - Returns the last day of the month which the date belongs to. +Extended Usage: + Examples: + > SELECT last_day('2009-01-12'); + 2009-01-31 + + +-- !query 169 +DESC FUNCTION EXTENDED minute +-- !query 169 schema +struct +-- !query 169 output +Function: minute +Class: org.apache.spark.sql.catalyst.expressions.Minute +Usage: minute(timestamp) - Returns the minute component of the string/timestamp. +Extended Usage: + Examples: + > SELECT minute('2009-07-30 12:58:59'); + 58 + + +-- !query 170 +DESC FUNCTION EXTENDED month +-- !query 170 schema +struct +-- !query 170 output +Function: month +Class: org.apache.spark.sql.catalyst.expressions.Month +Usage: month(date) - Returns the month component of the date/timestamp. +Extended Usage: + Examples: + > SELECT month('2016-07-30'); + 7 + + +-- !query 171 +DESC FUNCTION EXTENDED months_between +-- !query 171 schema +struct +-- !query 171 output +Function: months_between +Class: org.apache.spark.sql.catalyst.expressions.MonthsBetween +Usage: months_between(timestamp1, timestamp2) - Returns number of months between `timestamp1` and `timestamp2`. +Extended Usage: + Examples: + > SELECT months_between('1997-02-28 10:30:00', '1996-10-30'); + 3.94959677 + + +-- !query 172 +DESC FUNCTION EXTENDED next_day +-- !query 172 schema +struct +-- !query 172 output +Function: next_day +Class: org.apache.spark.sql.catalyst.expressions.NextDay +Usage: next_day(start_date, day_of_week) - Returns the first date which is later than `start_date` and named as indicated. +Extended Usage: + Examples: + > SELECT next_day('2015-01-14', 'TU'); + 2015-01-20 + + +-- !query 173 +DESC FUNCTION EXTENDED now +-- !query 173 schema +struct +-- !query 173 output +Function: now +Class: org.apache.spark.sql.catalyst.expressions.CurrentTimestamp +Usage: now() - Returns the current timestamp at the start of query evaluation. +Extended Usage: + No example/argument for now. + + +-- !query 174 +DESC FUNCTION EXTENDED quarter +-- !query 174 schema +struct +-- !query 174 output +Function: quarter +Class: org.apache.spark.sql.catalyst.expressions.Quarter +Usage: quarter(date) - Returns the quarter of the year for date, in the range 1 to 4. +Extended Usage: + Examples: + > SELECT quarter('2016-08-31'); + 3 + + +-- !query 175 +DESC FUNCTION EXTENDED second +-- !query 175 schema +struct +-- !query 175 output +Function: second +Class: org.apache.spark.sql.catalyst.expressions.Second +Usage: second(timestamp) - Returns the second component of the string/timestamp. +Extended Usage: + Examples: + > SELECT second('2009-07-30 12:58:59'); + 59 + + +-- !query 176 +DESC FUNCTION EXTENDED to_timestamp +-- !query 176 schema +struct +-- !query 176 output +Function: to_timestamp +Class: org.apache.spark.sql.catalyst.expressions.ParseToTimestamp +Usage: to_timestamp(timestamp, fmt) - Parses the `left` expression with the `format` expression to a timestamp. Returns null with invalid input. +Extended Usage: + Examples: + > SELECT to_timestamp('2016-12-31', 'yyyy-MM-dd'); + 2016-12-31 00:00:00.0 + + +-- !query 177 +DESC FUNCTION EXTENDED to_date +-- !query 177 schema +struct +-- !query 177 output +Function: to_date +Class: org.apache.spark.sql.catalyst.expressions.ParseToDate +Usage: to_date(date_str, fmt) - Parses the `left` expression with the `fmt` expression. Returns null with invalid input. +Extended Usage: + Examples: + > SELECT to_date('2016-12-31', 'yyyy-MM-dd'); + 2016-12-31 + + +-- !query 178 +DESC FUNCTION EXTENDED to_unix_timestamp +-- !query 178 schema +struct +-- !query 178 output +Function: to_unix_timestamp +Class: org.apache.spark.sql.catalyst.expressions.ToUnixTimestamp +Usage: to_unix_timestamp(expr[, pattern]) - Returns the UNIX timestamp of the give time. +Extended Usage: + Examples: + > SELECT to_unix_timestamp('2016-04-08', 'yyyy-MM-dd'); + 1460041200 + + +-- !query 179 +DESC FUNCTION EXTENDED to_utc_timestamp +-- !query 179 schema +struct +-- !query 179 output +Function: to_utc_timestamp +Class: org.apache.spark.sql.catalyst.expressions.ToUTCTimestamp +Usage: to_utc_timestamp(timestamp, timezone) - Given a timestamp, which corresponds to a certain time of day in the given timezone, returns another timestamp that corresponds to the same time of day in UTC. +Extended Usage: + Examples: + > SELECT to_utc_timestamp('2016-08-31', 'Asia/Seoul'); + 2016-08-30 15:00:00 + + +-- !query 180 +DESC FUNCTION EXTENDED trunc +-- !query 180 schema +struct +-- !query 180 output +Function: trunc +Class: org.apache.spark.sql.catalyst.expressions.TruncDate +Usage: trunc(date, fmt) - Returns `date` with the time portion of the day truncated to the unit specified by the format model `fmt`. +Extended Usage: + Examples: + > SELECT trunc('2009-02-12', 'MM'); + 2009-02-01 + > SELECT trunc('2015-10-27', 'YEAR'); + 2015-01-01 + + +-- !query 181 +DESC FUNCTION EXTENDED unix_timestamp +-- !query 181 schema +struct +-- !query 181 output +Function: unix_timestamp +Class: org.apache.spark.sql.catalyst.expressions.UnixTimestamp +Usage: unix_timestamp([expr[, pattern]]) - Returns the UNIX timestamp of current or specified time. +Extended Usage: + Examples: + > SELECT unix_timestamp(); + 1476884637 + > SELECT unix_timestamp('2016-04-08', 'yyyy-MM-dd'); + 1460041200 + + +-- !query 182 +DESC FUNCTION EXTENDED weekofyear +-- !query 182 schema +struct +-- !query 182 output +Function: weekofyear +Class: org.apache.spark.sql.catalyst.expressions.WeekOfYear +Usage: weekofyear(date) - Returns the week of the year of the given date. +Extended Usage: + Examples: + > SELECT weekofyear('2008-02-20'); + 8 + + +-- !query 183 +DESC FUNCTION EXTENDED year +-- !query 183 schema +struct +-- !query 183 output +Function: year +Class: org.apache.spark.sql.catalyst.expressions.Year +Usage: year(date) - Returns the year component of the date/timestamp. +Extended Usage: + Examples: + > SELECT year('2016-07-30'); + 2016 + + +-- !query 184 +DESC FUNCTION EXTENDED window +-- !query 184 schema +struct +-- !query 184 output +Function: window +Class: org.apache.spark.sql.catalyst.expressions.TimeWindow +Usage: N/A. +Extended Usage:N/A. + + +-- !query 185 +DESC FUNCTION EXTENDED array +-- !query 185 schema +struct +-- !query 185 output +Function: array +Class: org.apache.spark.sql.catalyst.expressions.CreateArray +Usage: array(expr, ...) - Returns an array with the given elements. +Extended Usage: + Examples: + > SELECT array(1, 2, 3); + [1,2,3] + + +-- !query 186 +DESC FUNCTION EXTENDED array_contains +-- !query 186 schema +struct +-- !query 186 output +Function: array_contains +Class: org.apache.spark.sql.catalyst.expressions.ArrayContains +Usage: array_contains(array, value) - Returns true if the array contains the value. +Extended Usage: + Examples: + > SELECT array_contains(array(1, 2, 3), 2); + true + + +-- !query 187 +DESC FUNCTION EXTENDED map +-- !query 187 schema +struct +-- !query 187 output +Function: map +Class: org.apache.spark.sql.catalyst.expressions.CreateMap +Usage: map(key0, value0, key1, value1, ...) - Creates a map with the given key/value pairs. +Extended Usage: + Examples: + > SELECT map(1.0, '2', 3.0, '4'); + {1.0:"2",3.0:"4"} + + +-- !query 188 +DESC FUNCTION EXTENDED named_struct +-- !query 188 schema +struct +-- !query 188 output +Function: named_struct +Class: org.apache.spark.sql.catalyst.expressions.CreateNamedStruct +Usage: named_struct(name1, val1, name2, val2, ...) - Creates a struct with the given field names and values. +Extended Usage: + Examples: + > SELECT named_struct("a", 1, "b", 2, "c", 3); + {"a":1,"b":2,"c":3} + + +-- !query 189 +DESC FUNCTION EXTENDED map_keys +-- !query 189 schema +struct +-- !query 189 output +Function: map_keys +Class: org.apache.spark.sql.catalyst.expressions.MapKeys +Usage: map_keys(map) - Returns an unordered array containing the keys of the map. +Extended Usage: + Examples: + > SELECT map_keys(map(1, 'a', 2, 'b')); + [1,2] + + +-- !query 190 +DESC FUNCTION EXTENDED map_values +-- !query 190 schema +struct +-- !query 190 output +Function: map_values +Class: org.apache.spark.sql.catalyst.expressions.MapValues +Usage: map_values(map) - Returns an unordered array containing the values of the map. +Extended Usage: + Examples: + > SELECT map_values(map(1, 'a', 2, 'b')); + ["a","b"] + + +-- !query 191 +DESC FUNCTION EXTENDED size +-- !query 191 schema +struct +-- !query 191 output +Function: size +Class: org.apache.spark.sql.catalyst.expressions.Size +Usage: size(expr) - Returns the size of an array or a map. Returns -1 if null. +Extended Usage: + Examples: + > SELECT size(array('b', 'd', 'c', 'a')); + 4 + + +-- !query 192 +DESC FUNCTION EXTENDED sort_array +-- !query 192 schema +struct +-- !query 192 output +Function: sort_array +Class: org.apache.spark.sql.catalyst.expressions.SortArray +Usage: sort_array(array[, ascendingOrder]) - Sorts the input array in ascending or descending order according to the natural ordering of the array elements. +Extended Usage: + Examples: + > SELECT sort_array(array('b', 'd', 'c', 'a'), true); + ["a","b","c","d"] + + +-- !query 193 +DESC FUNCTION EXTENDED struct +-- !query 193 schema +struct +-- !query 193 output +Function: struct +Class: org.apache.spark.sql.catalyst.expressions.NamedStruct +Usage: struct(col1, col2, col3, ...) - Creates a struct with the given field values. +Extended Usage: + + +-- !query 194 +DESC FUNCTION EXTENDED assert_true +-- !query 194 schema +struct +-- !query 194 output +Function: assert_true +Class: org.apache.spark.sql.catalyst.expressions.AssertTrue +Usage: assert_true(expr) - Throws an exception if `expr` is not true. +Extended Usage: + Examples: + > SELECT assert_true(0 < 1); + NULL + + +-- !query 195 +DESC FUNCTION EXTENDED crc32 +-- !query 195 schema +struct +-- !query 195 output +Function: crc32 +Class: org.apache.spark.sql.catalyst.expressions.Crc32 +Usage: crc32(expr) - Returns a cyclic redundancy check value of the `expr` as a bigint. +Extended Usage: + Examples: + > SELECT crc32('Spark'); + 1557323817 + + +-- !query 196 +DESC FUNCTION EXTENDED md5 +-- !query 196 schema +struct +-- !query 196 output +Function: md5 +Class: org.apache.spark.sql.catalyst.expressions.Md5 +Usage: md5(expr) - Returns an MD5 128-bit checksum as a hex string of `expr`. +Extended Usage: + Examples: + > SELECT md5('Spark'); + 8cde774d6f7333752ed72cacddb05126 + + +-- !query 197 +DESC FUNCTION EXTENDED hash +-- !query 197 schema +struct +-- !query 197 output +Function: hash +Class: org.apache.spark.sql.catalyst.expressions.Murmur3Hash +Usage: hash(expr1, expr2, ...) - Returns a hash value of the arguments. +Extended Usage: + Examples: + > SELECT hash('Spark', array(123), 2); + -1321691492 + + +-- !query 198 +DESC FUNCTION EXTENDED sha +-- !query 198 schema +struct +-- !query 198 output +Function: sha +Class: org.apache.spark.sql.catalyst.expressions.Sha1 +Usage: sha(expr) - Returns a sha1 hash value as a hex string of the `expr`. +Extended Usage: + Examples: + > SELECT sha('Spark'); + 85f5955f4b27a9a4c2aab6ffe5d7189fc298b92c + + +-- !query 199 +DESC FUNCTION EXTENDED sha1 +-- !query 199 schema +struct +-- !query 199 output +Function: sha1 +Class: org.apache.spark.sql.catalyst.expressions.Sha1 +Usage: sha1(expr) - Returns a sha1 hash value as a hex string of the `expr`. +Extended Usage: + Examples: + > SELECT sha1('Spark'); + 85f5955f4b27a9a4c2aab6ffe5d7189fc298b92c + + +-- !query 200 +DESC FUNCTION EXTENDED sha2 +-- !query 200 schema +struct +-- !query 200 output +Function: sha2 +Class: org.apache.spark.sql.catalyst.expressions.Sha2 +Usage: + sha2(expr, bitLength) - Returns a checksum of SHA-2 family as a hex string of `expr`. + SHA-224, SHA-256, SHA-384, and SHA-512 are supported. Bit length of 0 is equivalent to 256. + +Extended Usage: + Examples: + > SELECT sha2('Spark', 256); + 529bc3b07127ecb7e53a4dcf1991d9152c24537d919178022b2c42657f79a26b + + +-- !query 201 +DESC FUNCTION EXTENDED spark_partition_id +-- !query 201 schema +struct +-- !query 201 output +Function: spark_partition_id +Class: org.apache.spark.sql.catalyst.expressions.SparkPartitionID +Usage: spark_partition_id() - Returns the current partition id. +Extended Usage: + No example/argument for spark_partition_id. + + +-- !query 202 +DESC FUNCTION EXTENDED input_file_name +-- !query 202 schema +struct +-- !query 202 output +Function: input_file_name +Class: org.apache.spark.sql.catalyst.expressions.InputFileName +Usage: input_file_name() - Returns the name of the file being read, or empty string if not available. +Extended Usage: + No example/argument for input_file_name. + + +-- !query 203 +DESC FUNCTION EXTENDED input_file_block_start +-- !query 203 schema +struct +-- !query 203 output +Function: input_file_block_start +Class: org.apache.spark.sql.catalyst.expressions.InputFileBlockStart +Usage: input_file_block_start() - Returns the start offset of the block being read, or -1 if not available. +Extended Usage: + No example/argument for input_file_block_start. + + +-- !query 204 +DESC FUNCTION EXTENDED input_file_block_length +-- !query 204 schema +struct +-- !query 204 output +Function: input_file_block_length +Class: org.apache.spark.sql.catalyst.expressions.InputFileBlockLength +Usage: input_file_block_length() - Returns the length of the block being read, or -1 if not available. +Extended Usage: + No example/argument for input_file_block_length. + + +-- !query 205 +DESC FUNCTION EXTENDED monotonically_increasing_id +-- !query 205 schema +struct +-- !query 205 output +Function: monotonically_increasing_id +Class: org.apache.spark.sql.catalyst.expressions.MonotonicallyIncreasingID +Usage: + monotonically_increasing_id() - Returns monotonically increasing 64-bit integers. The generated ID is guaranteed + to be monotonically increasing and unique, but not consecutive. The current implementation + puts the partition ID in the upper 31 bits, and the lower 33 bits represent the record number + within each partition. The assumption is that the data frame has less than 1 billion + partitions, and each partition has less than 8 billion records. + +Extended Usage: + No example/argument for monotonically_increasing_id. + + +-- !query 206 +DESC FUNCTION EXTENDED current_database +-- !query 206 schema +struct +-- !query 206 output +Function: current_database +Class: org.apache.spark.sql.catalyst.expressions.CurrentDatabase +Usage: current_database() - Returns the current database. +Extended Usage: + Examples: + > SELECT current_database(); + default + + +-- !query 207 +DESC FUNCTION EXTENDED reflect +-- !query 207 schema +struct +-- !query 207 output +Function: reflect +Class: org.apache.spark.sql.catalyst.expressions.CallMethodViaReflection +Usage: reflect(class, method[, arg1[, arg2 ..]]) - Calls a method with reflection. +Extended Usage: + Examples: + > SELECT reflect('java.util.UUID', 'randomUUID'); + c33fb387-8500-4bfa-81d2-6e0e3e930df2 + > SELECT reflect('java.util.UUID', 'fromString', 'a5cf6c42-0c85-418f-af6c-3e4e5b1328f2'); + a5cf6c42-0c85-418f-af6c-3e4e5b1328f2 + + +-- !query 208 +DESC FUNCTION EXTENDED java_method +-- !query 208 schema +struct +-- !query 208 output +Function: java_method +Class: org.apache.spark.sql.catalyst.expressions.CallMethodViaReflection +Usage: java_method(class, method[, arg1[, arg2 ..]]) - Calls a method with reflection. +Extended Usage: + Examples: + > SELECT java_method('java.util.UUID', 'randomUUID'); + c33fb387-8500-4bfa-81d2-6e0e3e930df2 + > SELECT java_method('java.util.UUID', 'fromString', 'a5cf6c42-0c85-418f-af6c-3e4e5b1328f2'); + a5cf6c42-0c85-418f-af6c-3e4e5b1328f2 + + +-- !query 209 +DESC FUNCTION EXTENDED cube +-- !query 209 schema +struct +-- !query 209 output +Function: cube +Class: org.apache.spark.sql.catalyst.expressions.Cube +Usage: N/A. +Extended Usage:N/A. + + +-- !query 210 +DESC FUNCTION EXTENDED rollup +-- !query 210 schema +struct +-- !query 210 output +Function: rollup +Class: org.apache.spark.sql.catalyst.expressions.Rollup +Usage: N/A. +Extended Usage:N/A. + + +-- !query 211 +DESC FUNCTION EXTENDED grouping +-- !query 211 schema +struct +-- !query 211 output +Function: grouping +Class: org.apache.spark.sql.catalyst.expressions.Grouping +Usage: N/A. +Extended Usage:N/A. + + +-- !query 212 +DESC FUNCTION EXTENDED grouping_id +-- !query 212 schema +struct +-- !query 212 output +Function: grouping_id +Class: org.apache.spark.sql.catalyst.expressions.GroupingID +Usage: N/A. +Extended Usage:N/A. + + +-- !query 213 +DESC FUNCTION EXTENDED lead +-- !query 213 schema +struct +-- !query 213 output +Function: lead +Class: org.apache.spark.sql.catalyst.expressions.Lead +Usage: + lead(input[, offset[, default]]) - Returns the value of `input` at the `offset`th row + after the current row in the window. The default value of `offset` is 1 and the default + value of `default` is null. If the value of `input` at the `offset`th row is null, + null is returned. If there is no such an offset row (e.g., when the offset is 1, the last + row of the window does not have any subsequent row), `default` is returned. + +Extended Usage: + No example/argument for lead. + + +-- !query 214 +DESC FUNCTION EXTENDED lag +-- !query 214 schema +struct +-- !query 214 output +Function: lag +Class: org.apache.spark.sql.catalyst.expressions.Lag +Usage: + lag(input[, offset[, default]]) - Returns the value of `input` at the `offset`th row + before the current row in the window. The default value of `offset` is 1 and the default + value of `default` is null. If the value of `input` at the `offset`th row is null, + null is returned. If there is no such offset row (e.g., when the offset is 1, the first + row of the window does not have any previous row), `default` is returned. + +Extended Usage: + No example/argument for lag. + + +-- !query 215 +DESC FUNCTION EXTENDED row_number +-- !query 215 schema +struct +-- !query 215 output +Function: row_number +Class: org.apache.spark.sql.catalyst.expressions.RowNumber +Usage: + row_number() - Assigns a unique, sequential number to each row, starting with one, + according to the ordering of rows within the window partition. + +Extended Usage: + No example/argument for row_number. + + +-- !query 216 +DESC FUNCTION EXTENDED cume_dist +-- !query 216 schema +struct +-- !query 216 output +Function: cume_dist +Class: org.apache.spark.sql.catalyst.expressions.CumeDist +Usage: + cume_dist() - Computes the position of a value relative to all values in the partition. + +Extended Usage: + No example/argument for cume_dist. + + +-- !query 217 +DESC FUNCTION EXTENDED ntile +-- !query 217 schema +struct +-- !query 217 output +Function: ntile +Class: org.apache.spark.sql.catalyst.expressions.NTile +Usage: + ntile(n) - Divides the rows for each window partition into `n` buckets ranging + from 1 to at most `n`. + +Extended Usage: + No example/argument for ntile. + + +-- !query 218 +DESC FUNCTION EXTENDED rank +-- !query 218 schema +struct +-- !query 218 output +Function: rank +Class: org.apache.spark.sql.catalyst.expressions.Rank +Usage: + rank() - Computes the rank of a value in a group of values. The result is one plus the number + of rows preceding or equal to the current row in the ordering of the partition. The values + will produce gaps in the sequence. + +Extended Usage: + No example/argument for rank. + + +-- !query 219 +DESC FUNCTION EXTENDED dense_rank +-- !query 219 schema +struct +-- !query 219 output +Function: dense_rank +Class: org.apache.spark.sql.catalyst.expressions.DenseRank +Usage: + dense_rank() - Computes the rank of a value in a group of values. The result is one plus the + previously assigned rank value. Unlike the function rank, dense_rank will not produce gaps + in the ranking sequence. + +Extended Usage: + No example/argument for dense_rank. + + +-- !query 220 +DESC FUNCTION EXTENDED percent_rank +-- !query 220 schema +struct +-- !query 220 output +Function: percent_rank +Class: org.apache.spark.sql.catalyst.expressions.PercentRank +Usage: + percent_rank() - Computes the percentage ranking of a value in a group of values. + +Extended Usage: + No example/argument for percent_rank. + + +-- !query 221 +DESC FUNCTION EXTENDED and +-- !query 221 schema +struct +-- !query 221 output +Function: and +Class: org.apache.spark.sql.catalyst.expressions.And +Usage: expr1 and expr2 - Logical AND. +Extended Usage: + No example/argument for and. + + +-- !query 222 +DESC FUNCTION EXTENDED in +-- !query 222 schema +struct +-- !query 222 output +Function: in +Class: org.apache.spark.sql.catalyst.expressions.In +Usage: expr1 in(expr2, expr3, ...) - Returns true if `expr` equals to any valN. +Extended Usage: + No example/argument for in. + + +-- !query 223 +DESC FUNCTION EXTENDED not +-- !query 223 schema +struct +-- !query 223 output +Function: not +Class: org.apache.spark.sql.catalyst.expressions.Not +Usage: not expr - Logical not. +Extended Usage: + No example/argument for not. + + +-- !query 224 +DESC FUNCTION EXTENDED or +-- !query 224 schema +struct +-- !query 224 output +Function: or +Class: org.apache.spark.sql.catalyst.expressions.Or +Usage: expr1 or expr2 - Logical OR. +Extended Usage: + No example/argument for or. + + +-- !query 225 +DESC FUNCTION EXTENDED <=> +-- !query 225 schema +struct +-- !query 225 output +Function: <=> +Class: org.apache.spark.sql.catalyst.expressions.EqualNullSafe +Usage: + expr1 <=> expr2 - Returns same result as the EQUAL(=) operator for non-null operands, + but returns true if both are null, false if one of the them is null. + +Extended Usage: + No example/argument for <=>. + + +-- !query 226 +DESC FUNCTION EXTENDED = +-- !query 226 schema +struct +-- !query 226 output +Function: = +Class: org.apache.spark.sql.catalyst.expressions.EqualTo +Usage: expr1 = expr2 - Returns true if `expr1` equals `expr2`, or false otherwise. +Extended Usage: + No example/argument for =. + + +-- !query 227 +DESC FUNCTION EXTENDED == +-- !query 227 schema +struct +-- !query 227 output +Function: == +Class: org.apache.spark.sql.catalyst.expressions.EqualTo +Usage: expr1 == expr2 - Returns true if `expr1` equals `expr2`, or false otherwise. +Extended Usage: + No example/argument for ==. + + +-- !query 228 +DESC FUNCTION EXTENDED > +-- !query 228 schema +struct +-- !query 228 output +Function: > +Class: org.apache.spark.sql.catalyst.expressions.GreaterThan +Usage: expr1 > expr2 - Returns true if `expr1` is greater than `expr2`. +Extended Usage: + No example/argument for >. + + +-- !query 229 +DESC FUNCTION EXTENDED >= +-- !query 229 schema +struct +-- !query 229 output +Function: >= +Class: org.apache.spark.sql.catalyst.expressions.GreaterThanOrEqual +Usage: expr1 >= expr2 - Returns true if `expr1` is greater than or equal to `expr2`. +Extended Usage: + No example/argument for >=. + + +-- !query 230 +DESC FUNCTION EXTENDED < +-- !query 230 schema +struct +-- !query 230 output +Function: < +Class: org.apache.spark.sql.catalyst.expressions.LessThan +Usage: expr1 < expr2 - Returns true if `expr1` is less than `expr2`. +Extended Usage: + No example/argument for <. + + +-- !query 231 +DESC FUNCTION EXTENDED <= +-- !query 231 schema +struct +-- !query 231 output +Function: <= +Class: org.apache.spark.sql.catalyst.expressions.LessThanOrEqual +Usage: expr1 <= expr2 - Returns true if `expr1` is less than or equal to `expr2`. +Extended Usage: + No example/argument for <=. + + +-- !query 232 +DESC FUNCTION EXTENDED ! +-- !query 232 schema +struct +-- !query 232 output +Function: ! +Class: org.apache.spark.sql.catalyst.expressions.Not +Usage: ! expr - Logical not. +Extended Usage: + No example/argument for !. + + +-- !query 233 +DESC FUNCTION EXTENDED & +-- !query 233 schema +struct +-- !query 233 output +Function: & +Class: org.apache.spark.sql.catalyst.expressions.BitwiseAnd +Usage: expr1 & expr2 - Returns the result of bitwise AND of `expr1` and `expr2`. +Extended Usage: + Examples: + > SELECT 3 & 5; + 1 + + +-- !query 234 +DESC FUNCTION EXTENDED ~ +-- !query 234 schema +struct +-- !query 234 output +Function: ~ +Class: org.apache.spark.sql.catalyst.expressions.BitwiseNot +Usage: ~ expr - Returns the result of bitwise NOT of `expr`. +Extended Usage: + Examples: + > SELECT ~ 0; + -1 + + +-- !query 235 +DESC FUNCTION EXTENDED | +-- !query 235 schema +struct +-- !query 235 output +Function: | +Class: org.apache.spark.sql.catalyst.expressions.BitwiseOr +Usage: expr1 | expr2 - Returns the result of bitwise OR of `expr1` and `expr2`. +Extended Usage: + Examples: + > SELECT 3 | 5; + 7 + + +-- !query 236 +DESC FUNCTION EXTENDED ^ +-- !query 236 schema +struct +-- !query 236 output +Function: ^ +Class: org.apache.spark.sql.catalyst.expressions.BitwiseXor +Usage: expr1 ^ expr2 - Returns the result of bitwise exclusive OR of `expr1` and `expr2`. +Extended Usage: + Examples: + > SELECT 3 ^ 5; + 2 + + +-- !query 237 +DESC FUNCTION EXTENDED to_json +-- !query 237 schema +struct +-- !query 237 output +Function: to_json +Class: org.apache.spark.sql.catalyst.expressions.StructsToJson +Usage: to_json(expr[, options]) - Returns a json string with a given struct value +Extended Usage: + Examples: + > SELECT to_json(named_struct('a', 1, 'b', 2)); + {"a":1,"b":2} + > SELECT to_json(named_struct('time', to_timestamp('2015-08-26', 'yyyy-MM-dd')), map('timestampFormat', 'dd/MM/yyyy')); + {"time":"26/08/2015"} + > SELECT to_json(array(named_struct('a', 1, 'b', 2)); + [{"a":1,"b":2}] + + +-- !query 238 +DESC FUNCTION EXTENDED from_json +-- !query 238 schema +struct +-- !query 238 output +Function: from_json +Class: org.apache.spark.sql.catalyst.expressions.JsonToStructs +Usage: from_json(jsonStr, schema[, options]) - Returns a struct value with the given `jsonStr` and `schema`. +Extended Usage: + Examples: + > SELECT from_json('{"a":1, "b":0.8}', 'a INT, b DOUBLE'); + {"a":1, "b":0.8} + > SELECT from_json('{"time":"26/08/2015"}', 'time Timestamp', map('timestampFormat', 'dd/MM/yyyy')); + {"time":"2015-08-26 00:00:00.0"} + + +-- !query 239 +DESC FUNCTION EXTENDED boolean +-- !query 239 schema +struct +-- !query 239 output +Function: boolean +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: boolean(expr AS type) - Casts the value `expr` to the target data type `type`. +Extended Usage: + Examples: + > SELECT boolean('10' as int); + 10 + + +-- !query 240 +DESC FUNCTION EXTENDED tinyint +-- !query 240 schema +struct +-- !query 240 output +Function: tinyint +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: tinyint(expr AS type) - Casts the value `expr` to the target data type `type`. +Extended Usage: + Examples: + > SELECT tinyint('10' as int); + 10 + + +-- !query 241 +DESC FUNCTION EXTENDED smallint +-- !query 241 schema +struct +-- !query 241 output +Function: smallint +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: smallint(expr AS type) - Casts the value `expr` to the target data type `type`. +Extended Usage: + Examples: + > SELECT smallint('10' as int); + 10 + + +-- !query 242 +DESC FUNCTION EXTENDED int +-- !query 242 schema +struct +-- !query 242 output +Function: int +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: int(expr AS type) - Casts the value `expr` to the target data type `type`. +Extended Usage: + Examples: + > SELECT int('10' as int); + 10 + + +-- !query 243 +DESC FUNCTION EXTENDED bigint +-- !query 243 schema +struct +-- !query 243 output +Function: bigint +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: bigint(expr AS type) - Casts the value `expr` to the target data type `type`. +Extended Usage: + Examples: + > SELECT bigint('10' as int); + 10 + + +-- !query 244 +DESC FUNCTION EXTENDED float +-- !query 244 schema +struct +-- !query 244 output +Function: float +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: float(expr AS type) - Casts the value `expr` to the target data type `type`. +Extended Usage: + Examples: + > SELECT float('10' as int); + 10 + + +-- !query 245 +DESC FUNCTION EXTENDED double +-- !query 245 schema +struct +-- !query 245 output +Function: double +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: double(expr AS type) - Casts the value `expr` to the target data type `type`. +Extended Usage: + Examples: + > SELECT double('10' as int); + 10 + + +-- !query 246 +DESC FUNCTION EXTENDED decimal +-- !query 246 schema +struct +-- !query 246 output +Function: decimal +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: decimal(expr AS type) - Casts the value `expr` to the target data type `type`. +Extended Usage: + Examples: + > SELECT decimal('10' as int); + 10 + + +-- !query 247 +DESC FUNCTION EXTENDED date +-- !query 247 schema +struct +-- !query 247 output +Function: date +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: date(expr AS type) - Casts the value `expr` to the target data type `type`. +Extended Usage: + Examples: + > SELECT date('10' as int); + 10 + + +-- !query 248 +DESC FUNCTION EXTENDED timestamp +-- !query 248 schema +struct +-- !query 248 output +Function: timestamp +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: timestamp(expr AS type) - Casts the value `expr` to the target data type `type`. +Extended Usage: + Examples: + > SELECT timestamp('10' as int); + 10 + + +-- !query 249 +DESC FUNCTION EXTENDED binary +-- !query 249 schema +struct +-- !query 249 output +Function: binary +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: binary(expr AS type) - Casts the value `expr` to the target data type `type`. +Extended Usage: + Examples: + > SELECT binary('10' as int); + 10 + + +-- !query 250 +DESC FUNCTION EXTENDED string +-- !query 250 schema +struct +-- !query 250 output +Function: string +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: string(expr AS type) - Casts the value `expr` to the target data type `type`. +Extended Usage: + Examples: + > SELECT string('10' as int); + 10 diff --git a/sql/core/src/test/resources/sql-tests/results/describe-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/describe-functions.sql.out new file mode 100644 index 0000000000000..a8b1bcca9a667 --- /dev/null +++ b/sql/core/src/test/resources/sql-tests/results/describe-functions.sql.out @@ -0,0 +1,2604 @@ +-- Automatically generated by SQLQueryTestSuite +-- Number of queries: 251 + + +-- !query 0 +DESC FUNCTION abs +-- !query 0 schema +struct +-- !query 0 output +Function: abs +Class: org.apache.spark.sql.catalyst.expressions.Abs +Usage: abs(expr) - Returns the absolute value of the numeric value. + + +-- !query 1 +DESC FUNCTION coalesce +-- !query 1 schema +struct +-- !query 1 output +Function: coalesce +Class: org.apache.spark.sql.catalyst.expressions.Coalesce +Usage: coalesce(expr1, expr2, ...) - Returns the first non-null argument if exists. Otherwise, null. + + +-- !query 2 +DESC FUNCTION explode +-- !query 2 schema +struct +-- !query 2 output +Function: explode +Class: org.apache.spark.sql.catalyst.expressions.Explode +Usage: explode(expr) - Separates the elements of array `expr` into multiple rows, or the elements of map `expr` into multiple rows and columns. + + +-- !query 3 +DESC FUNCTION explode_outer +-- !query 3 schema +struct +-- !query 3 output +Function: explode_outer +Class: org.apache.spark.sql.catalyst.expressions.Explode +Usage: explode_outer(expr) - Separates the elements of array `expr` into multiple rows, or the elements of map `expr` into multiple rows and columns. + + +-- !query 4 +DESC FUNCTION greatest +-- !query 4 schema +struct +-- !query 4 output +Function: greatest +Class: org.apache.spark.sql.catalyst.expressions.Greatest +Usage: greatest(expr, ...) - Returns the greatest value of all parameters, skipping null values. + + +-- !query 5 +DESC FUNCTION if +-- !query 5 schema +struct +-- !query 5 output +Function: if +Class: org.apache.spark.sql.catalyst.expressions.If +Usage: if(expr1, expr2, expr3) - If `expr1` evaluates to true, then returns `expr2`; otherwise returns `expr3`. + + +-- !query 6 +DESC FUNCTION inline +-- !query 6 schema +struct +-- !query 6 output +Function: inline +Class: org.apache.spark.sql.catalyst.expressions.Inline +Usage: inline(expr) - Explodes an array of structs into a table. + + +-- !query 7 +DESC FUNCTION inline_outer +-- !query 7 schema +struct +-- !query 7 output +Function: inline_outer +Class: org.apache.spark.sql.catalyst.expressions.Inline +Usage: inline_outer(expr) - Explodes an array of structs into a table. + + +-- !query 8 +DESC FUNCTION isnan +-- !query 8 schema +struct +-- !query 8 output +Function: isnan +Class: org.apache.spark.sql.catalyst.expressions.IsNaN +Usage: isnan(expr) - Returns true if `expr` is NaN, or false otherwise. + + +-- !query 9 +DESC FUNCTION ifnull +-- !query 9 schema +struct +-- !query 9 output +Function: ifnull +Class: org.apache.spark.sql.catalyst.expressions.IfNull +Usage: ifnull(expr1, expr2) - Returns `expr2` if `expr1` is null, or `expr1` otherwise. + + +-- !query 10 +DESC FUNCTION isnull +-- !query 10 schema +struct +-- !query 10 output +Function: isnull +Class: org.apache.spark.sql.catalyst.expressions.IsNull +Usage: isnull(expr) - Returns true if `expr` is null, or false otherwise. + + +-- !query 11 +DESC FUNCTION isnotnull +-- !query 11 schema +struct +-- !query 11 output +Function: isnotnull +Class: org.apache.spark.sql.catalyst.expressions.IsNotNull +Usage: isnotnull(expr) - Returns true if `expr` is not null, or false otherwise. + + +-- !query 12 +DESC FUNCTION least +-- !query 12 schema +struct +-- !query 12 output +Function: least +Class: org.apache.spark.sql.catalyst.expressions.Least +Usage: least(expr, ...) - Returns the least value of all parameters, skipping null values. + + +-- !query 13 +DESC FUNCTION nanvl +-- !query 13 schema +struct +-- !query 13 output +Function: nanvl +Class: org.apache.spark.sql.catalyst.expressions.NaNvl +Usage: nanvl(expr1, expr2) - Returns `expr1` if it's not NaN, or `expr2` otherwise. + + +-- !query 14 +DESC FUNCTION nullif +-- !query 14 schema +struct +-- !query 14 output +Function: nullif +Class: org.apache.spark.sql.catalyst.expressions.NullIf +Usage: nullif(expr1, expr2) - Returns null if `expr1` equals to `expr2`, or `expr1` otherwise. + + +-- !query 15 +DESC FUNCTION nvl +-- !query 15 schema +struct +-- !query 15 output +Function: nvl +Class: org.apache.spark.sql.catalyst.expressions.Nvl +Usage: nvl(expr1, expr2) - Returns `expr2` if `expr1` is null, or `expr1` otherwise. + + +-- !query 16 +DESC FUNCTION nvl2 +-- !query 16 schema +struct +-- !query 16 output +Function: nvl2 +Class: org.apache.spark.sql.catalyst.expressions.Nvl2 +Usage: nvl2(expr1, expr2, expr3) - Returns `expr2` if `expr1` is not null, or `expr3` otherwise. + + +-- !query 17 +DESC FUNCTION posexplode +-- !query 17 schema +struct +-- !query 17 output +Function: posexplode +Class: org.apache.spark.sql.catalyst.expressions.PosExplode +Usage: posexplode(expr) - Separates the elements of array `expr` into multiple rows with positions, or the elements of map `expr` into multiple rows and columns with positions. + + +-- !query 18 +DESC FUNCTION posexplode_outer +-- !query 18 schema +struct +-- !query 18 output +Function: posexplode_outer +Class: org.apache.spark.sql.catalyst.expressions.PosExplode +Usage: posexplode_outer(expr) - Separates the elements of array `expr` into multiple rows with positions, or the elements of map `expr` into multiple rows and columns with positions. + + +-- !query 19 +DESC FUNCTION rand +-- !query 19 schema +struct +-- !query 19 output +Function: rand +Class: org.apache.spark.sql.catalyst.expressions.Rand +Usage: rand([seed]) - Returns a random value with independent and identically distributed (i.i.d.) uniformly distributed values in [0, 1). + + +-- !query 20 +DESC FUNCTION randn +-- !query 20 schema +struct +-- !query 20 output +Function: randn +Class: org.apache.spark.sql.catalyst.expressions.Randn +Usage: randn([seed]) - Returns a random value with independent and identically distributed (i.i.d.) values drawn from the standard normal distribution. + + +-- !query 21 +DESC FUNCTION stack +-- !query 21 schema +struct +-- !query 21 output +Function: stack +Class: org.apache.spark.sql.catalyst.expressions.Stack +Usage: stack(n, expr1, ..., exprk) - Separates `expr1`, ..., `exprk` into `n` rows. + + +-- !query 22 +DESC FUNCTION when +-- !query 22 schema +struct +-- !query 22 output +Function: when +Class: org.apache.spark.sql.catalyst.expressions.CaseWhen +Usage: CASE WHEN expr1 THEN expr2 [WHEN expr3 THEN expr4]* [ELSE expr5] END - When `expr1` = true, returns `expr2`; when `expr3` = true, return `expr4`; else return `expr5`. + + +-- !query 23 +DESC FUNCTION acos +-- !query 23 schema +struct +-- !query 23 output +Function: acos +Class: org.apache.spark.sql.catalyst.expressions.Acos +Usage: acos(expr) - Returns the inverse cosine (a.k.a. arccosine) of `expr` if -1<=`expr`<=1 or NaN otherwise. + + +-- !query 24 +DESC FUNCTION asin +-- !query 24 schema +struct +-- !query 24 output +Function: asin +Class: org.apache.spark.sql.catalyst.expressions.Asin +Usage: asin(expr) - Returns the inverse sine (a.k.a. arcsine) the arc sin of `expr` if -1<=`expr`<=1 or NaN otherwise. + + +-- !query 25 +DESC FUNCTION atan +-- !query 25 schema +struct +-- !query 25 output +Function: atan +Class: org.apache.spark.sql.catalyst.expressions.Atan +Usage: atan(expr) - Returns the inverse tangent (a.k.a. arctangent). + + +-- !query 26 +DESC FUNCTION atan2 +-- !query 26 schema +struct +-- !query 26 output +Function: atan2 +Class: org.apache.spark.sql.catalyst.expressions.Atan2 +Usage: atan2(expr1, expr2) - Returns the angle in radians between the positive x-axis of a plane and the point given by the coordinates (`expr1`, `expr2`). + + +-- !query 27 +DESC FUNCTION bin +-- !query 27 schema +struct +-- !query 27 output +Function: bin +Class: org.apache.spark.sql.catalyst.expressions.Bin +Usage: bin(expr) - Returns the string representation of the long value `expr` represented in binary. + + +-- !query 28 +DESC FUNCTION bround +-- !query 28 schema +struct +-- !query 28 output +Function: bround +Class: org.apache.spark.sql.catalyst.expressions.BRound +Usage: bround(expr, d) - Returns `expr` rounded to `d` decimal places using HALF_EVEN rounding mode. + + +-- !query 29 +DESC FUNCTION cbrt +-- !query 29 schema +struct +-- !query 29 output +Function: cbrt +Class: org.apache.spark.sql.catalyst.expressions.Cbrt +Usage: cbrt(expr) - Returns the cube root of `expr`. + + +-- !query 30 +DESC FUNCTION ceil +-- !query 30 schema +struct +-- !query 30 output +Function: ceil +Class: org.apache.spark.sql.catalyst.expressions.Ceil +Usage: ceil(expr) - Returns the smallest integer not smaller than `expr`. + + +-- !query 31 +DESC FUNCTION ceiling +-- !query 31 schema +struct +-- !query 31 output +Function: ceiling +Class: org.apache.spark.sql.catalyst.expressions.Ceil +Usage: ceiling(expr) - Returns the smallest integer not smaller than `expr`. + + +-- !query 32 +DESC FUNCTION cos +-- !query 32 schema +struct +-- !query 32 output +Function: cos +Class: org.apache.spark.sql.catalyst.expressions.Cos +Usage: cos(expr) - Returns the cosine of `expr`. + + +-- !query 33 +DESC FUNCTION cosh +-- !query 33 schema +struct +-- !query 33 output +Function: cosh +Class: org.apache.spark.sql.catalyst.expressions.Cosh +Usage: cosh(expr) - Returns the hyperbolic cosine of `expr`. + + +-- !query 34 +DESC FUNCTION conv +-- !query 34 schema +struct +-- !query 34 output +Function: conv +Class: org.apache.spark.sql.catalyst.expressions.Conv +Usage: conv(num, from_base, to_base) - Convert `num` from `from_base` to `to_base`. + + +-- !query 35 +DESC FUNCTION degrees +-- !query 35 schema +struct +-- !query 35 output +Function: degrees +Class: org.apache.spark.sql.catalyst.expressions.ToDegrees +Usage: degrees(expr) - Converts radians to degrees. + + +-- !query 36 +DESC FUNCTION e +-- !query 36 schema +struct +-- !query 36 output +Function: e +Class: org.apache.spark.sql.catalyst.expressions.EulerNumber +Usage: e() - Returns Euler's number, e. + + +-- !query 37 +DESC FUNCTION exp +-- !query 37 schema +struct +-- !query 37 output +Function: exp +Class: org.apache.spark.sql.catalyst.expressions.Exp +Usage: exp(expr) - Returns e to the power of `expr`. + + +-- !query 38 +DESC FUNCTION expm1 +-- !query 38 schema +struct +-- !query 38 output +Function: expm1 +Class: org.apache.spark.sql.catalyst.expressions.Expm1 +Usage: expm1(expr) - Returns exp(`expr`) - 1. + + +-- !query 39 +DESC FUNCTION floor +-- !query 39 schema +struct +-- !query 39 output +Function: floor +Class: org.apache.spark.sql.catalyst.expressions.Floor +Usage: floor(expr) - Returns the largest integer not greater than `expr`. + + +-- !query 40 +DESC FUNCTION factorial +-- !query 40 schema +struct +-- !query 40 output +Function: factorial +Class: org.apache.spark.sql.catalyst.expressions.Factorial +Usage: factorial(expr) - Returns the factorial of `expr`. `expr` is [0..20]. Otherwise, null. + + +-- !query 41 +DESC FUNCTION hex +-- !query 41 schema +struct +-- !query 41 output +Function: hex +Class: org.apache.spark.sql.catalyst.expressions.Hex +Usage: hex(expr) - Converts `expr` to hexadecimal. + + +-- !query 42 +DESC FUNCTION hypot +-- !query 42 schema +struct +-- !query 42 output +Function: hypot +Class: org.apache.spark.sql.catalyst.expressions.Hypot +Usage: hypot(expr1, expr2) - Returns sqrt(`expr1`**2 + `expr2`**2). + + +-- !query 43 +DESC FUNCTION log +-- !query 43 schema +struct +-- !query 43 output +Function: log +Class: org.apache.spark.sql.catalyst.expressions.Logarithm +Usage: log(base, expr) - Returns the logarithm of `expr` with `base`. + + +-- !query 44 +DESC FUNCTION log10 +-- !query 44 schema +struct +-- !query 44 output +Function: log10 +Class: org.apache.spark.sql.catalyst.expressions.Log10 +Usage: log10(expr) - Returns the logarithm of `expr` with base 10. + + +-- !query 45 +DESC FUNCTION log1p +-- !query 45 schema +struct +-- !query 45 output +Function: log1p +Class: org.apache.spark.sql.catalyst.expressions.Log1p +Usage: log1p(expr) - Returns log(1 + `expr`). + + +-- !query 46 +DESC FUNCTION log2 +-- !query 46 schema +struct +-- !query 46 output +Function: log2 +Class: org.apache.spark.sql.catalyst.expressions.Log2 +Usage: log2(expr) - Returns the logarithm of `expr` with base 2. + + +-- !query 47 +DESC FUNCTION ln +-- !query 47 schema +struct +-- !query 47 output +Function: ln +Class: org.apache.spark.sql.catalyst.expressions.Log +Usage: ln(expr) - Returns the natural logarithm (base e) of `expr`. + + +-- !query 48 +DESC FUNCTION negative +-- !query 48 schema +struct +-- !query 48 output +Function: negative +Class: org.apache.spark.sql.catalyst.expressions.UnaryMinus +Usage: negative(expr) - Returns the negated value of `expr`. + + +-- !query 49 +DESC FUNCTION pi +-- !query 49 schema +struct +-- !query 49 output +Function: pi +Class: org.apache.spark.sql.catalyst.expressions.Pi +Usage: pi() - Returns pi. + + +-- !query 50 +DESC FUNCTION pmod +-- !query 50 schema +struct +-- !query 50 output +Function: pmod +Class: org.apache.spark.sql.catalyst.expressions.Pmod +Usage: pmod(expr1, expr2) - Returns the positive value of `expr1` mod `expr2`. + + +-- !query 51 +DESC FUNCTION positive +-- !query 51 schema +struct +-- !query 51 output +Function: positive +Class: org.apache.spark.sql.catalyst.expressions.UnaryPositive +Usage: positive(expr) - Returns the value of `expr`. + + +-- !query 52 +DESC FUNCTION pow +-- !query 52 schema +struct +-- !query 52 output +Function: pow +Class: org.apache.spark.sql.catalyst.expressions.Pow +Usage: pow(expr1, expr2) - Raises `expr1` to the power of `expr2`. + + +-- !query 53 +DESC FUNCTION power +-- !query 53 schema +struct +-- !query 53 output +Function: power +Class: org.apache.spark.sql.catalyst.expressions.Pow +Usage: power(expr1, expr2) - Raises `expr1` to the power of `expr2`. + + +-- !query 54 +DESC FUNCTION radians +-- !query 54 schema +struct +-- !query 54 output +Function: radians +Class: org.apache.spark.sql.catalyst.expressions.ToRadians +Usage: radians(expr) - Converts degrees to radians. + + +-- !query 55 +DESC FUNCTION rint +-- !query 55 schema +struct +-- !query 55 output +Function: rint +Class: org.apache.spark.sql.catalyst.expressions.Rint +Usage: rint(expr) - Returns the double value that is closest in value to the argument and is equal to a mathematical integer. + + +-- !query 56 +DESC FUNCTION round +-- !query 56 schema +struct +-- !query 56 output +Function: round +Class: org.apache.spark.sql.catalyst.expressions.Round +Usage: round(expr, d) - Returns `expr` rounded to `d` decimal places using HALF_UP rounding mode. + + +-- !query 57 +DESC FUNCTION shiftleft +-- !query 57 schema +struct +-- !query 57 output +Function: shiftleft +Class: org.apache.spark.sql.catalyst.expressions.ShiftLeft +Usage: shiftleft(base, expr) - Bitwise left shift. + + +-- !query 58 +DESC FUNCTION shiftright +-- !query 58 schema +struct +-- !query 58 output +Function: shiftright +Class: org.apache.spark.sql.catalyst.expressions.ShiftRight +Usage: shiftright(base, expr) - Bitwise (signed) right shift. + + +-- !query 59 +DESC FUNCTION shiftrightunsigned +-- !query 59 schema +struct +-- !query 59 output +Function: shiftrightunsigned +Class: org.apache.spark.sql.catalyst.expressions.ShiftRightUnsigned +Usage: shiftrightunsigned(base, expr) - Bitwise unsigned right shift. + + +-- !query 60 +DESC FUNCTION sign +-- !query 60 schema +struct +-- !query 60 output +Function: sign +Class: org.apache.spark.sql.catalyst.expressions.Signum +Usage: sign(expr) - Returns -1.0, 0.0 or 1.0 as `expr` is negative, 0 or positive. + + +-- !query 61 +DESC FUNCTION signum +-- !query 61 schema +struct +-- !query 61 output +Function: signum +Class: org.apache.spark.sql.catalyst.expressions.Signum +Usage: signum(expr) - Returns -1.0, 0.0 or 1.0 as `expr` is negative, 0 or positive. + + +-- !query 62 +DESC FUNCTION sin +-- !query 62 schema +struct +-- !query 62 output +Function: sin +Class: org.apache.spark.sql.catalyst.expressions.Sin +Usage: sin(expr) - Returns the sine of `expr`. + + +-- !query 63 +DESC FUNCTION sinh +-- !query 63 schema +struct +-- !query 63 output +Function: sinh +Class: org.apache.spark.sql.catalyst.expressions.Sinh +Usage: sinh(expr) - Returns the hyperbolic sine of `expr`. + + +-- !query 64 +DESC FUNCTION str_to_map +-- !query 64 schema +struct +-- !query 64 output +Function: str_to_map +Class: org.apache.spark.sql.catalyst.expressions.StringToMap +Usage: str_to_map(text[, pairDelim[, keyValueDelim]]) - Creates a map after splitting the text into key/value pairs using delimiters. Default delimiters are ',' for `pairDelim` and ':' for `keyValueDelim`. + + +-- !query 65 +DESC FUNCTION sqrt +-- !query 65 schema +struct +-- !query 65 output +Function: sqrt +Class: org.apache.spark.sql.catalyst.expressions.Sqrt +Usage: sqrt(expr) - Returns the square root of `expr`. + + +-- !query 66 +DESC FUNCTION tan +-- !query 66 schema +struct +-- !query 66 output +Function: tan +Class: org.apache.spark.sql.catalyst.expressions.Tan +Usage: tan(expr) - Returns the tangent of `expr`. + + +-- !query 67 +DESC FUNCTION tanh +-- !query 67 schema +struct +-- !query 67 output +Function: tanh +Class: org.apache.spark.sql.catalyst.expressions.Tanh +Usage: tanh(expr) - Returns the hyperbolic tangent of `expr`. + + +-- !query 68 +DESC FUNCTION + +-- !query 68 schema +struct +-- !query 68 output +Function: + +Class: org.apache.spark.sql.catalyst.expressions.Add +Usage: expr1 + expr2 - Returns `expr1`+`expr2`. + + +-- !query 69 +DESC FUNCTION - +-- !query 69 schema +struct +-- !query 69 output +Function: - +Class: org.apache.spark.sql.catalyst.expressions.Subtract +Usage: expr1 - expr2 - Returns `expr1`-`expr2`. + + +-- !query 70 +DESC FUNCTION * +-- !query 70 schema +struct +-- !query 70 output +Function: * +Class: org.apache.spark.sql.catalyst.expressions.Multiply +Usage: expr1 * expr2 - Returns `expr1`*`expr2`. + + +-- !query 71 +DESC FUNCTION / +-- !query 71 schema +struct +-- !query 71 output +Function: / +Class: org.apache.spark.sql.catalyst.expressions.Divide +Usage: expr1 / expr2 - Returns `expr1`/`expr2`. It always performs floating point division. + + +-- !query 72 +DESC FUNCTION % +-- !query 72 schema +struct +-- !query 72 output +Function: % +Class: org.apache.spark.sql.catalyst.expressions.Remainder +Usage: expr1 % expr2 - Returns the remainder after `expr1`/`expr2`. + + +-- !query 73 +DESC FUNCTION approx_count_distinct +-- !query 73 schema +struct +-- !query 73 output +Function: approx_count_distinct +Class: org.apache.spark.sql.catalyst.expressions.aggregate.HyperLogLogPlusPlus +Usage: + approx_count_distinct(expr[, relativeSD]) - Returns the estimated cardinality by HyperLogLog++. + `relativeSD` defines the maximum estimation error allowed. + + +-- !query 74 +DESC FUNCTION avg +-- !query 74 schema +struct +-- !query 74 output +Function: avg +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Average +Usage: avg(expr) - Returns the mean calculated from values of a group. + + +-- !query 75 +DESC FUNCTION corr +-- !query 75 schema +struct +-- !query 75 output +Function: corr +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Corr +Usage: corr(expr1, expr2) - Returns Pearson coefficient of correlation between a set of number pairs. + + +-- !query 76 +DESC FUNCTION count +-- !query 76 schema +struct +-- !query 76 output +Function: count +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Count +Usage: + count(*) - Returns the total number of retrieved rows, including rows containing null. + + count(expr) - Returns the number of rows for which the supplied expression is non-null. + + count(DISTINCT expr[, expr...]) - Returns the number of rows for which the supplied expression(s) are unique and non-null. + + +-- !query 77 +DESC FUNCTION covar_pop +-- !query 77 schema +struct +-- !query 77 output +Function: covar_pop +Class: org.apache.spark.sql.catalyst.expressions.aggregate.CovPopulation +Usage: covar_pop(expr1, expr2) - Returns the population covariance of a set of number pairs. + + +-- !query 78 +DESC FUNCTION covar_samp +-- !query 78 schema +struct +-- !query 78 output +Function: covar_samp +Class: org.apache.spark.sql.catalyst.expressions.aggregate.CovSample +Usage: covar_samp(expr1, expr2) - Returns the sample covariance of a set of number pairs. + + +-- !query 79 +DESC FUNCTION first +-- !query 79 schema +struct +-- !query 79 output +Function: first +Class: org.apache.spark.sql.catalyst.expressions.aggregate.First +Usage: + first(expr[, isIgnoreNull]) - Returns the first value of `expr` for a group of rows. + If `isIgnoreNull` is true, returns only non-null values. + + +-- !query 80 +DESC FUNCTION first_value +-- !query 80 schema +struct +-- !query 80 output +Function: first_value +Class: org.apache.spark.sql.catalyst.expressions.aggregate.First +Usage: + first_value(expr[, isIgnoreNull]) - Returns the first value of `expr` for a group of rows. + If `isIgnoreNull` is true, returns only non-null values. + + +-- !query 81 +DESC FUNCTION kurtosis +-- !query 81 schema +struct +-- !query 81 output +Function: kurtosis +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Kurtosis +Usage: kurtosis(expr) - Returns the kurtosis value calculated from values of a group. + + +-- !query 82 +DESC FUNCTION last +-- !query 82 schema +struct +-- !query 82 output +Function: last +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Last +Usage: + last(expr[, isIgnoreNull]) - Returns the last value of `expr` for a group of rows. + If `isIgnoreNull` is true, returns only non-null values. + + +-- !query 83 +DESC FUNCTION last_value +-- !query 83 schema +struct +-- !query 83 output +Function: last_value +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Last +Usage: + last_value(expr[, isIgnoreNull]) - Returns the last value of `expr` for a group of rows. + If `isIgnoreNull` is true, returns only non-null values. + + +-- !query 84 +DESC FUNCTION max +-- !query 84 schema +struct +-- !query 84 output +Function: max +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Max +Usage: max(expr) - Returns the maximum value of `expr`. + + +-- !query 85 +DESC FUNCTION mean +-- !query 85 schema +struct +-- !query 85 output +Function: mean +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Average +Usage: mean(expr) - Returns the mean calculated from values of a group. + + +-- !query 86 +DESC FUNCTION min +-- !query 86 schema +struct +-- !query 86 output +Function: min +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Min +Usage: min(expr) - Returns the minimum value of `expr`. + + +-- !query 87 +DESC FUNCTION percentile +-- !query 87 schema +struct +-- !query 87 output +Function: percentile +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Percentile +Usage: + percentile(col, percentage [, frequency]) - Returns the exact percentile value of numeric column + `col` at the given percentage. The value of percentage must be between 0.0 and 1.0. The + value of frequency should be positive integral + + percentile(col, array(percentage1 [, percentage2]...) [, frequency]) - Returns the exact + percentile value array of numeric column `col` at the given percentage(s). Each value + of the percentage array must be between 0.0 and 1.0. The value of frequency should be + positive integral + + +-- !query 88 +DESC FUNCTION skewness +-- !query 88 schema +struct +-- !query 88 output +Function: skewness +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Skewness +Usage: skewness(expr) - Returns the skewness value calculated from values of a group. + + +-- !query 89 +DESC FUNCTION percentile_approx +-- !query 89 schema +struct +-- !query 89 output +Function: percentile_approx +Class: org.apache.spark.sql.catalyst.expressions.aggregate.ApproximatePercentile +Usage: + percentile_approx(col, percentage [, accuracy]) - Returns the approximate percentile value of numeric + column `col` at the given percentage. The value of percentage must be between 0.0 + and 1.0. The `accuracy` parameter (default: 10000) is a positive numeric literal which + controls approximation accuracy at the cost of memory. Higher value of `accuracy` yields + better accuracy, `1.0/accuracy` is the relative error of the approximation. + When `percentage` is an array, each value of the percentage array must be between 0.0 and 1.0. + In this case, returns the approximate percentile array of column `col` at the given + percentage array. + + +-- !query 90 +DESC FUNCTION approx_percentile +-- !query 90 schema +struct +-- !query 90 output +Function: approx_percentile +Class: org.apache.spark.sql.catalyst.expressions.aggregate.ApproximatePercentile +Usage: + approx_percentile(col, percentage [, accuracy]) - Returns the approximate percentile value of numeric + column `col` at the given percentage. The value of percentage must be between 0.0 + and 1.0. The `accuracy` parameter (default: 10000) is a positive numeric literal which + controls approximation accuracy at the cost of memory. Higher value of `accuracy` yields + better accuracy, `1.0/accuracy` is the relative error of the approximation. + When `percentage` is an array, each value of the percentage array must be between 0.0 and 1.0. + In this case, returns the approximate percentile array of column `col` at the given + percentage array. + + +-- !query 91 +DESC FUNCTION std +-- !query 91 schema +struct +-- !query 91 output +Function: std +Class: org.apache.spark.sql.catalyst.expressions.aggregate.StddevSamp +Usage: std(expr) - Returns the sample standard deviation calculated from values of a group. + + +-- !query 92 +DESC FUNCTION stddev +-- !query 92 schema +struct +-- !query 92 output +Function: stddev +Class: org.apache.spark.sql.catalyst.expressions.aggregate.StddevSamp +Usage: stddev(expr) - Returns the sample standard deviation calculated from values of a group. + + +-- !query 93 +DESC FUNCTION stddev_pop +-- !query 93 schema +struct +-- !query 93 output +Function: stddev_pop +Class: org.apache.spark.sql.catalyst.expressions.aggregate.StddevPop +Usage: stddev_pop(expr) - Returns the population standard deviation calculated from values of a group. + + +-- !query 94 +DESC FUNCTION stddev_samp +-- !query 94 schema +struct +-- !query 94 output +Function: stddev_samp +Class: org.apache.spark.sql.catalyst.expressions.aggregate.StddevSamp +Usage: stddev_samp(expr) - Returns the sample standard deviation calculated from values of a group. + + +-- !query 95 +DESC FUNCTION sum +-- !query 95 schema +struct +-- !query 95 output +Function: sum +Class: org.apache.spark.sql.catalyst.expressions.aggregate.Sum +Usage: sum(expr) - Returns the sum calculated from values of a group. + + +-- !query 96 +DESC FUNCTION variance +-- !query 96 schema +struct +-- !query 96 output +Function: variance +Class: org.apache.spark.sql.catalyst.expressions.aggregate.VarianceSamp +Usage: variance(expr) - Returns the sample variance calculated from values of a group. + + +-- !query 97 +DESC FUNCTION var_pop +-- !query 97 schema +struct +-- !query 97 output +Function: var_pop +Class: org.apache.spark.sql.catalyst.expressions.aggregate.VariancePop +Usage: var_pop(expr) - Returns the population variance calculated from values of a group. + + +-- !query 98 +DESC FUNCTION var_samp +-- !query 98 schema +struct +-- !query 98 output +Function: var_samp +Class: org.apache.spark.sql.catalyst.expressions.aggregate.VarianceSamp +Usage: var_samp(expr) - Returns the sample variance calculated from values of a group. + + +-- !query 99 +DESC FUNCTION collect_list +-- !query 99 schema +struct +-- !query 99 output +Function: collect_list +Class: org.apache.spark.sql.catalyst.expressions.aggregate.CollectList +Usage: collect_list(expr) - Collects and returns a list of non-unique elements. + + +-- !query 100 +DESC FUNCTION collect_set +-- !query 100 schema +struct +-- !query 100 output +Function: collect_set +Class: org.apache.spark.sql.catalyst.expressions.aggregate.CollectSet +Usage: collect_set(expr) - Collects and returns a set of unique elements. + + +-- !query 101 +DESC FUNCTION count_min_sketch +-- !query 101 schema +struct +-- !query 101 output +Function: count_min_sketch +Class: org.apache.spark.sql.catalyst.expressions.aggregate.CountMinSketchAgg +Usage: + count_min_sketch(col, eps, confidence, seed) - Returns a count-min sketch of a column with the given esp, + confidence and seed. The result is an array of bytes, which can be deserialized to a + `CountMinSketch` before usage. Count-min sketch is a probabilistic data structure used for + cardinality estimation using sub-linear space. + + +-- !query 102 +DESC FUNCTION ascii +-- !query 102 schema +struct +-- !query 102 output +Function: ascii +Class: org.apache.spark.sql.catalyst.expressions.Ascii +Usage: ascii(str) - Returns the numeric value of the first character of `str`. + + +-- !query 103 +DESC FUNCTION base64 +-- !query 103 schema +struct +-- !query 103 output +Function: base64 +Class: org.apache.spark.sql.catalyst.expressions.Base64 +Usage: base64(bin) - Converts the argument from a binary `bin` to a base 64 string. + + +-- !query 104 +DESC FUNCTION concat +-- !query 104 schema +struct +-- !query 104 output +Function: concat +Class: org.apache.spark.sql.catalyst.expressions.Concat +Usage: concat(str1, str2, ..., strN) - Returns the concatenation of str1, str2, ..., strN. + + +-- !query 105 +DESC FUNCTION concat_ws +-- !query 105 schema +struct +-- !query 105 output +Function: concat_ws +Class: org.apache.spark.sql.catalyst.expressions.ConcatWs +Usage: concat_ws(sep, [str | array(str)]+) - Returns the concatenation of the strings separated by `sep`. + + +-- !query 106 +DESC FUNCTION decode +-- !query 106 schema +struct +-- !query 106 output +Function: decode +Class: org.apache.spark.sql.catalyst.expressions.Decode +Usage: decode(bin, charset) - Decodes the first argument using the second argument character set. + + +-- !query 107 +DESC FUNCTION elt +-- !query 107 schema +struct +-- !query 107 output +Function: elt +Class: org.apache.spark.sql.catalyst.expressions.Elt +Usage: elt(n, str1, str2, ...) - Returns the `n`-th string, e.g., returns `str2` when `n` is 2. + + +-- !query 108 +DESC FUNCTION encode +-- !query 108 schema +struct +-- !query 108 output +Function: encode +Class: org.apache.spark.sql.catalyst.expressions.Encode +Usage: encode(str, charset) - Encodes the first argument using the second argument character set. + + +-- !query 109 +DESC FUNCTION find_in_set +-- !query 109 schema +struct +-- !query 109 output +Function: find_in_set +Class: org.apache.spark.sql.catalyst.expressions.FindInSet +Usage: + find_in_set(str, str_array) - Returns the index (1-based) of the given string (`str`) in the comma-delimited list (`str_array`). + Returns 0, if the string was not found or if the given string (`str`) contains a comma. + + +-- !query 110 +DESC FUNCTION format_number +-- !query 110 schema +struct +-- !query 110 output +Function: format_number +Class: org.apache.spark.sql.catalyst.expressions.FormatNumber +Usage: + format_number(expr1, expr2) - Formats the number `expr1` like '#,###,###.##', rounded to `expr2` + decimal places. If `expr2` is 0, the result has no decimal point or fractional part. + This is supposed to function like MySQL's FORMAT. + + +-- !query 111 +DESC FUNCTION format_string +-- !query 111 schema +struct +-- !query 111 output +Function: format_string +Class: org.apache.spark.sql.catalyst.expressions.FormatString +Usage: format_string(strfmt, obj, ...) - Returns a formatted string from printf-style format strings. + + +-- !query 112 +DESC FUNCTION get_json_object +-- !query 112 schema +struct +-- !query 112 output +Function: get_json_object +Class: org.apache.spark.sql.catalyst.expressions.GetJsonObject +Usage: get_json_object(json_txt, path) - Extracts a json object from `path`. + + +-- !query 113 +DESC FUNCTION initcap +-- !query 113 schema +struct +-- !query 113 output +Function: initcap +Class: org.apache.spark.sql.catalyst.expressions.InitCap +Usage: + initcap(str) - Returns `str` with the first letter of each word in uppercase. + All other letters are in lowercase. Words are delimited by white space. + + +-- !query 114 +DESC FUNCTION instr +-- !query 114 schema +struct +-- !query 114 output +Function: instr +Class: org.apache.spark.sql.catalyst.expressions.StringInstr +Usage: instr(str, substr) - Returns the (1-based) index of the first occurrence of `substr` in `str`. + + +-- !query 115 +DESC FUNCTION lcase +-- !query 115 schema +struct +-- !query 115 output +Function: lcase +Class: org.apache.spark.sql.catalyst.expressions.Lower +Usage: lcase(str) - Returns `str` with all characters changed to lowercase. + + +-- !query 116 +DESC FUNCTION length +-- !query 116 schema +struct +-- !query 116 output +Function: length +Class: org.apache.spark.sql.catalyst.expressions.Length +Usage: length(expr) - Returns the length of `expr` or number of bytes in binary data. + + +-- !query 117 +DESC FUNCTION levenshtein +-- !query 117 schema +struct +-- !query 117 output +Function: levenshtein +Class: org.apache.spark.sql.catalyst.expressions.Levenshtein +Usage: levenshtein(str1, str2) - Returns the Levenshtein distance between the two given strings. + + +-- !query 118 +DESC FUNCTION like +-- !query 118 schema +struct +-- !query 118 output +Function: like +Class: org.apache.spark.sql.catalyst.expressions.Like +Usage: str like pattern - Returns true if `str` matches `pattern`, or false otherwise. + + +-- !query 119 +DESC FUNCTION lower +-- !query 119 schema +struct +-- !query 119 output +Function: lower +Class: org.apache.spark.sql.catalyst.expressions.Lower +Usage: lower(str) - Returns `str` with all characters changed to lowercase. + + +-- !query 120 +DESC FUNCTION locate +-- !query 120 schema +struct +-- !query 120 output +Function: locate +Class: org.apache.spark.sql.catalyst.expressions.StringLocate +Usage: + locate(substr, str[, pos]) - Returns the position of the first occurrence of `substr` in `str` after position `pos`. + The given `pos` and return value are 1-based. + + +-- !query 121 +DESC FUNCTION lpad +-- !query 121 schema +struct +-- !query 121 output +Function: lpad +Class: org.apache.spark.sql.catalyst.expressions.StringLPad +Usage: + lpad(str, len, pad) - Returns `str`, left-padded with `pad` to a length of `len`. + If `str` is longer than `len`, the return value is shortened to `len` characters. + + +-- !query 122 +DESC FUNCTION ltrim +-- !query 122 schema +struct +-- !query 122 output +Function: ltrim +Class: org.apache.spark.sql.catalyst.expressions.StringTrimLeft +Usage: ltrim(str) - Removes the leading and trailing space characters from `str`. + + +-- !query 123 +DESC FUNCTION json_tuple +-- !query 123 schema +struct +-- !query 123 output +Function: json_tuple +Class: org.apache.spark.sql.catalyst.expressions.JsonTuple +Usage: json_tuple(jsonStr, p1, p2, ..., pn) - Returns a tuple like the function get_json_object, but it takes multiple names. All the input parameters and output column types are string. + + +-- !query 124 +DESC FUNCTION parse_url +-- !query 124 schema +struct +-- !query 124 output +Function: parse_url +Class: org.apache.spark.sql.catalyst.expressions.ParseUrl +Usage: parse_url(url, partToExtract[, key]) - Extracts a part from a URL. + + +-- !query 125 +DESC FUNCTION printf +-- !query 125 schema +struct +-- !query 125 output +Function: printf +Class: org.apache.spark.sql.catalyst.expressions.FormatString +Usage: printf(strfmt, obj, ...) - Returns a formatted string from printf-style format strings. + + +-- !query 126 +DESC FUNCTION regexp_extract +-- !query 126 schema +struct +-- !query 126 output +Function: regexp_extract +Class: org.apache.spark.sql.catalyst.expressions.RegExpExtract +Usage: regexp_extract(str, regexp[, idx]) - Extracts a group that matches `regexp`. + + +-- !query 127 +DESC FUNCTION regexp_replace +-- !query 127 schema +struct +-- !query 127 output +Function: regexp_replace +Class: org.apache.spark.sql.catalyst.expressions.RegExpReplace +Usage: regexp_replace(str, regexp, rep) - Replaces all substrings of `str` that match `regexp` with `rep`. + + +-- !query 128 +DESC FUNCTION repeat +-- !query 128 schema +struct +-- !query 128 output +Function: repeat +Class: org.apache.spark.sql.catalyst.expressions.StringRepeat +Usage: repeat(str, n) - Returns the string which repeats the given string value n times. + + +-- !query 129 +DESC FUNCTION reverse +-- !query 129 schema +struct +-- !query 129 output +Function: reverse +Class: org.apache.spark.sql.catalyst.expressions.StringReverse +Usage: reverse(str) - Returns the reversed given string. + + +-- !query 130 +DESC FUNCTION rlike +-- !query 130 schema +struct +-- !query 130 output +Function: rlike +Class: org.apache.spark.sql.catalyst.expressions.RLike +Usage: str rlike regexp - Returns true if `str` matches `regexp`, or false otherwise. + + +-- !query 131 +DESC FUNCTION rpad +-- !query 131 schema +struct +-- !query 131 output +Function: rpad +Class: org.apache.spark.sql.catalyst.expressions.StringRPad +Usage: + rpad(str, len, pad) - Returns `str`, right-padded with `pad` to a length of `len`. + If `str` is longer than `len`, the return value is shortened to `len` characters. + + +-- !query 132 +DESC FUNCTION rtrim +-- !query 132 schema +struct +-- !query 132 output +Function: rtrim +Class: org.apache.spark.sql.catalyst.expressions.StringTrimRight +Usage: rtrim(str) - Removes the trailing space characters from `str`. + + +-- !query 133 +DESC FUNCTION sentences +-- !query 133 schema +struct +-- !query 133 output +Function: sentences +Class: org.apache.spark.sql.catalyst.expressions.Sentences +Usage: sentences(str[, lang, country]) - Splits `str` into an array of array of words. + + +-- !query 134 +DESC FUNCTION soundex +-- !query 134 schema +struct +-- !query 134 output +Function: soundex +Class: org.apache.spark.sql.catalyst.expressions.SoundEx +Usage: soundex(str) - Returns Soundex code of the string. + + +-- !query 135 +DESC FUNCTION space +-- !query 135 schema +struct +-- !query 135 output +Function: space +Class: org.apache.spark.sql.catalyst.expressions.StringSpace +Usage: space(n) - Returns a string consisting of `n` spaces. + + +-- !query 136 +DESC FUNCTION split +-- !query 136 schema +struct +-- !query 136 output +Function: split +Class: org.apache.spark.sql.catalyst.expressions.StringSplit +Usage: split(str, regex) - Splits `str` around occurrences that match `regex`. + + +-- !query 137 +DESC FUNCTION substr +-- !query 137 schema +struct +-- !query 137 output +Function: substr +Class: org.apache.spark.sql.catalyst.expressions.Substring +Usage: substr(str, pos[, len]) - Returns the substring of `str` that starts at `pos` and is of length `len`, or the slice of byte array that starts at `pos` and is of length `len`. + + +-- !query 138 +DESC FUNCTION substring +-- !query 138 schema +struct +-- !query 138 output +Function: substring +Class: org.apache.spark.sql.catalyst.expressions.Substring +Usage: substring(str, pos[, len]) - Returns the substring of `str` that starts at `pos` and is of length `len`, or the slice of byte array that starts at `pos` and is of length `len`. + + +-- !query 139 +DESC FUNCTION substring_index +-- !query 139 schema +struct +-- !query 139 output +Function: substring_index +Class: org.apache.spark.sql.catalyst.expressions.SubstringIndex +Usage: + substring_index(str, delim, count) - Returns the substring from `str` before `count` occurrences of the delimiter `delim`. + If `count` is positive, everything to the left of the final delimiter (counting from the + left) is returned. If `count` is negative, everything to the right of the final delimiter + (counting from the right) is returned. The function substring_index performs a case-sensitive match + when searching for `delim`. + + +-- !query 140 +DESC FUNCTION translate +-- !query 140 schema +struct +-- !query 140 output +Function: translate +Class: org.apache.spark.sql.catalyst.expressions.StringTranslate +Usage: translate(input, from, to) - Translates the `input` string by replacing the characters present in the `from` string with the corresponding characters in the `to` string. + + +-- !query 141 +DESC FUNCTION trim +-- !query 141 schema +struct +-- !query 141 output +Function: trim +Class: org.apache.spark.sql.catalyst.expressions.StringTrim +Usage: trim(str) - Removes the leading and trailing space characters from `str`. + + +-- !query 142 +DESC FUNCTION ucase +-- !query 142 schema +struct +-- !query 142 output +Function: ucase +Class: org.apache.spark.sql.catalyst.expressions.Upper +Usage: ucase(str) - Returns `str` with all characters changed to uppercase. + + +-- !query 143 +DESC FUNCTION unbase64 +-- !query 143 schema +struct +-- !query 143 output +Function: unbase64 +Class: org.apache.spark.sql.catalyst.expressions.UnBase64 +Usage: unbase64(str) - Converts the argument from a base 64 string `str` to a binary. + + +-- !query 144 +DESC FUNCTION unhex +-- !query 144 schema +struct +-- !query 144 output +Function: unhex +Class: org.apache.spark.sql.catalyst.expressions.Unhex +Usage: unhex(expr) - Converts hexadecimal `expr` to binary. + + +-- !query 145 +DESC FUNCTION upper +-- !query 145 schema +struct +-- !query 145 output +Function: upper +Class: org.apache.spark.sql.catalyst.expressions.Upper +Usage: upper(str) - Returns `str` with all characters changed to uppercase. + + +-- !query 146 +DESC FUNCTION xpath +-- !query 146 schema +struct +-- !query 146 output +Function: xpath +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathList +Usage: xpath(xml, xpath) - Returns a string array of values within the nodes of xml that match the XPath expression. + + +-- !query 147 +DESC FUNCTION xpath_boolean +-- !query 147 schema +struct +-- !query 147 output +Function: xpath_boolean +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathBoolean +Usage: xpath_boolean(xml, xpath) - Returns true if the XPath expression evaluates to true, or if a matching node is found. + + +-- !query 148 +DESC FUNCTION xpath_double +-- !query 148 schema +struct +-- !query 148 output +Function: xpath_double +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathDouble +Usage: xpath_double(xml, xpath) - Returns a double value, the value zero if no match is found, or NaN if a match is found but the value is non-numeric. + + +-- !query 149 +DESC FUNCTION xpath_number +-- !query 149 schema +struct +-- !query 149 output +Function: xpath_number +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathDouble +Usage: xpath_number(xml, xpath) - Returns a double value, the value zero if no match is found, or NaN if a match is found but the value is non-numeric. + + +-- !query 150 +DESC FUNCTION xpath_float +-- !query 150 schema +struct +-- !query 150 output +Function: xpath_float +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathFloat +Usage: xpath_float(xml, xpath) - Returns a float value, the value zero if no match is found, or NaN if a match is found but the value is non-numeric. + + +-- !query 151 +DESC FUNCTION xpath_int +-- !query 151 schema +struct +-- !query 151 output +Function: xpath_int +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathInt +Usage: xpath_int(xml, xpath) - Returns an integer value, or the value zero if no match is found, or a match is found but the value is non-numeric. + + +-- !query 152 +DESC FUNCTION xpath_long +-- !query 152 schema +struct +-- !query 152 output +Function: xpath_long +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathLong +Usage: xpath_long(xml, xpath) - Returns a long integer value, or the value zero if no match is found, or a match is found but the value is non-numeric. + + +-- !query 153 +DESC FUNCTION xpath_short +-- !query 153 schema +struct +-- !query 153 output +Function: xpath_short +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathShort +Usage: xpath_short(xml, xpath) - Returns a short integer value, or the value zero if no match is found, or a match is found but the value is non-numeric. + + +-- !query 154 +DESC FUNCTION xpath_string +-- !query 154 schema +struct +-- !query 154 output +Function: xpath_string +Class: org.apache.spark.sql.catalyst.expressions.xml.XPathString +Usage: xpath_string(xml, xpath) - Returns the text contents of the first xml node that matches the XPath expression. + + +-- !query 155 +DESC FUNCTION add_months +-- !query 155 schema +struct +-- !query 155 output +Function: add_months +Class: org.apache.spark.sql.catalyst.expressions.AddMonths +Usage: add_months(start_date, num_months) - Returns the date that is `num_months` after `start_date`. + + +-- !query 156 +DESC FUNCTION current_date +-- !query 156 schema +struct +-- !query 156 output +Function: current_date +Class: org.apache.spark.sql.catalyst.expressions.CurrentDate +Usage: current_date() - Returns the current date at the start of query evaluation. + + +-- !query 157 +DESC FUNCTION current_timestamp +-- !query 157 schema +struct +-- !query 157 output +Function: current_timestamp +Class: org.apache.spark.sql.catalyst.expressions.CurrentTimestamp +Usage: current_timestamp() - Returns the current timestamp at the start of query evaluation. + + +-- !query 158 +DESC FUNCTION datediff +-- !query 158 schema +struct +-- !query 158 output +Function: datediff +Class: org.apache.spark.sql.catalyst.expressions.DateDiff +Usage: datediff(endDate, startDate) - Returns the number of days from `startDate` to `endDate`. + + +-- !query 159 +DESC FUNCTION date_add +-- !query 159 schema +struct +-- !query 159 output +Function: date_add +Class: org.apache.spark.sql.catalyst.expressions.DateAdd +Usage: date_add(start_date, num_days) - Returns the date that is `num_days` after `start_date`. + + +-- !query 160 +DESC FUNCTION date_format +-- !query 160 schema +struct +-- !query 160 output +Function: date_format +Class: org.apache.spark.sql.catalyst.expressions.DateFormatClass +Usage: date_format(timestamp, fmt) - Converts `timestamp` to a value of string in the format specified by the date format `fmt`. + + +-- !query 161 +DESC FUNCTION date_sub +-- !query 161 schema +struct +-- !query 161 output +Function: date_sub +Class: org.apache.spark.sql.catalyst.expressions.DateSub +Usage: date_sub(start_date, num_days) - Returns the date that is `num_days` before `start_date`. + + +-- !query 162 +DESC FUNCTION day +-- !query 162 schema +struct +-- !query 162 output +Function: day +Class: org.apache.spark.sql.catalyst.expressions.DayOfMonth +Usage: day(date) - Returns the day of month of the date/timestamp. + + +-- !query 163 +DESC FUNCTION dayofyear +-- !query 163 schema +struct +-- !query 163 output +Function: dayofyear +Class: org.apache.spark.sql.catalyst.expressions.DayOfYear +Usage: dayofyear(date) - Returns the day of year of the date/timestamp. + + +-- !query 164 +DESC FUNCTION dayofmonth +-- !query 164 schema +struct +-- !query 164 output +Function: dayofmonth +Class: org.apache.spark.sql.catalyst.expressions.DayOfMonth +Usage: dayofmonth(date) - Returns the day of month of the date/timestamp. + + +-- !query 165 +DESC FUNCTION from_unixtime +-- !query 165 schema +struct +-- !query 165 output +Function: from_unixtime +Class: org.apache.spark.sql.catalyst.expressions.FromUnixTime +Usage: from_unixtime(unix_time, format) - Returns `unix_time` in the specified `format`. + + +-- !query 166 +DESC FUNCTION from_utc_timestamp +-- !query 166 schema +struct +-- !query 166 output +Function: from_utc_timestamp +Class: org.apache.spark.sql.catalyst.expressions.FromUTCTimestamp +Usage: from_utc_timestamp(timestamp, timezone) - Given a timestamp, which corresponds to a certain time of day in UTC, returns another timestamp that corresponds to the same time of day in the given timezone. + + +-- !query 167 +DESC FUNCTION hour +-- !query 167 schema +struct +-- !query 167 output +Function: hour +Class: org.apache.spark.sql.catalyst.expressions.Hour +Usage: hour(timestamp) - Returns the hour component of the string/timestamp. + + +-- !query 168 +DESC FUNCTION last_day +-- !query 168 schema +struct +-- !query 168 output +Function: last_day +Class: org.apache.spark.sql.catalyst.expressions.LastDay +Usage: last_day(date) - Returns the last day of the month which the date belongs to. + + +-- !query 169 +DESC FUNCTION minute +-- !query 169 schema +struct +-- !query 169 output +Function: minute +Class: org.apache.spark.sql.catalyst.expressions.Minute +Usage: minute(timestamp) - Returns the minute component of the string/timestamp. + + +-- !query 170 +DESC FUNCTION month +-- !query 170 schema +struct +-- !query 170 output +Function: month +Class: org.apache.spark.sql.catalyst.expressions.Month +Usage: month(date) - Returns the month component of the date/timestamp. + + +-- !query 171 +DESC FUNCTION months_between +-- !query 171 schema +struct +-- !query 171 output +Function: months_between +Class: org.apache.spark.sql.catalyst.expressions.MonthsBetween +Usage: months_between(timestamp1, timestamp2) - Returns number of months between `timestamp1` and `timestamp2`. + + +-- !query 172 +DESC FUNCTION next_day +-- !query 172 schema +struct +-- !query 172 output +Function: next_day +Class: org.apache.spark.sql.catalyst.expressions.NextDay +Usage: next_day(start_date, day_of_week) - Returns the first date which is later than `start_date` and named as indicated. + + +-- !query 173 +DESC FUNCTION now +-- !query 173 schema +struct +-- !query 173 output +Function: now +Class: org.apache.spark.sql.catalyst.expressions.CurrentTimestamp +Usage: now() - Returns the current timestamp at the start of query evaluation. + + +-- !query 174 +DESC FUNCTION quarter +-- !query 174 schema +struct +-- !query 174 output +Function: quarter +Class: org.apache.spark.sql.catalyst.expressions.Quarter +Usage: quarter(date) - Returns the quarter of the year for date, in the range 1 to 4. + + +-- !query 175 +DESC FUNCTION second +-- !query 175 schema +struct +-- !query 175 output +Function: second +Class: org.apache.spark.sql.catalyst.expressions.Second +Usage: second(timestamp) - Returns the second component of the string/timestamp. + + +-- !query 176 +DESC FUNCTION to_timestamp +-- !query 176 schema +struct +-- !query 176 output +Function: to_timestamp +Class: org.apache.spark.sql.catalyst.expressions.ParseToTimestamp +Usage: to_timestamp(timestamp, fmt) - Parses the `left` expression with the `format` expression to a timestamp. Returns null with invalid input. + + +-- !query 177 +DESC FUNCTION to_date +-- !query 177 schema +struct +-- !query 177 output +Function: to_date +Class: org.apache.spark.sql.catalyst.expressions.ParseToDate +Usage: to_date(date_str, fmt) - Parses the `left` expression with the `fmt` expression. Returns null with invalid input. + + +-- !query 178 +DESC FUNCTION to_unix_timestamp +-- !query 178 schema +struct +-- !query 178 output +Function: to_unix_timestamp +Class: org.apache.spark.sql.catalyst.expressions.ToUnixTimestamp +Usage: to_unix_timestamp(expr[, pattern]) - Returns the UNIX timestamp of the give time. + + +-- !query 179 +DESC FUNCTION to_utc_timestamp +-- !query 179 schema +struct +-- !query 179 output +Function: to_utc_timestamp +Class: org.apache.spark.sql.catalyst.expressions.ToUTCTimestamp +Usage: to_utc_timestamp(timestamp, timezone) - Given a timestamp, which corresponds to a certain time of day in the given timezone, returns another timestamp that corresponds to the same time of day in UTC. + + +-- !query 180 +DESC FUNCTION trunc +-- !query 180 schema +struct +-- !query 180 output +Function: trunc +Class: org.apache.spark.sql.catalyst.expressions.TruncDate +Usage: trunc(date, fmt) - Returns `date` with the time portion of the day truncated to the unit specified by the format model `fmt`. + + +-- !query 181 +DESC FUNCTION unix_timestamp +-- !query 181 schema +struct +-- !query 181 output +Function: unix_timestamp +Class: org.apache.spark.sql.catalyst.expressions.UnixTimestamp +Usage: unix_timestamp([expr[, pattern]]) - Returns the UNIX timestamp of current or specified time. + + +-- !query 182 +DESC FUNCTION weekofyear +-- !query 182 schema +struct +-- !query 182 output +Function: weekofyear +Class: org.apache.spark.sql.catalyst.expressions.WeekOfYear +Usage: weekofyear(date) - Returns the week of the year of the given date. + + +-- !query 183 +DESC FUNCTION year +-- !query 183 schema +struct +-- !query 183 output +Function: year +Class: org.apache.spark.sql.catalyst.expressions.Year +Usage: year(date) - Returns the year component of the date/timestamp. + + +-- !query 184 +DESC FUNCTION window +-- !query 184 schema +struct +-- !query 184 output +Function: window +Class: org.apache.spark.sql.catalyst.expressions.TimeWindow +Usage: N/A. + + +-- !query 185 +DESC FUNCTION array +-- !query 185 schema +struct +-- !query 185 output +Function: array +Class: org.apache.spark.sql.catalyst.expressions.CreateArray +Usage: array(expr, ...) - Returns an array with the given elements. + + +-- !query 186 +DESC FUNCTION array_contains +-- !query 186 schema +struct +-- !query 186 output +Function: array_contains +Class: org.apache.spark.sql.catalyst.expressions.ArrayContains +Usage: array_contains(array, value) - Returns true if the array contains the value. + + +-- !query 187 +DESC FUNCTION map +-- !query 187 schema +struct +-- !query 187 output +Function: map +Class: org.apache.spark.sql.catalyst.expressions.CreateMap +Usage: map(key0, value0, key1, value1, ...) - Creates a map with the given key/value pairs. + + +-- !query 188 +DESC FUNCTION named_struct +-- !query 188 schema +struct +-- !query 188 output +Function: named_struct +Class: org.apache.spark.sql.catalyst.expressions.CreateNamedStruct +Usage: named_struct(name1, val1, name2, val2, ...) - Creates a struct with the given field names and values. + + +-- !query 189 +DESC FUNCTION map_keys +-- !query 189 schema +struct +-- !query 189 output +Function: map_keys +Class: org.apache.spark.sql.catalyst.expressions.MapKeys +Usage: map_keys(map) - Returns an unordered array containing the keys of the map. + + +-- !query 190 +DESC FUNCTION map_values +-- !query 190 schema +struct +-- !query 190 output +Function: map_values +Class: org.apache.spark.sql.catalyst.expressions.MapValues +Usage: map_values(map) - Returns an unordered array containing the values of the map. + + +-- !query 191 +DESC FUNCTION size +-- !query 191 schema +struct +-- !query 191 output +Function: size +Class: org.apache.spark.sql.catalyst.expressions.Size +Usage: size(expr) - Returns the size of an array or a map. Returns -1 if null. + + +-- !query 192 +DESC FUNCTION sort_array +-- !query 192 schema +struct +-- !query 192 output +Function: sort_array +Class: org.apache.spark.sql.catalyst.expressions.SortArray +Usage: sort_array(array[, ascendingOrder]) - Sorts the input array in ascending or descending order according to the natural ordering of the array elements. + + +-- !query 193 +DESC FUNCTION struct +-- !query 193 schema +struct +-- !query 193 output +Function: struct +Class: org.apache.spark.sql.catalyst.expressions.NamedStruct +Usage: struct(col1, col2, col3, ...) - Creates a struct with the given field values. + + +-- !query 194 +DESC FUNCTION assert_true +-- !query 194 schema +struct +-- !query 194 output +Function: assert_true +Class: org.apache.spark.sql.catalyst.expressions.AssertTrue +Usage: assert_true(expr) - Throws an exception if `expr` is not true. + + +-- !query 195 +DESC FUNCTION crc32 +-- !query 195 schema +struct +-- !query 195 output +Function: crc32 +Class: org.apache.spark.sql.catalyst.expressions.Crc32 +Usage: crc32(expr) - Returns a cyclic redundancy check value of the `expr` as a bigint. + + +-- !query 196 +DESC FUNCTION md5 +-- !query 196 schema +struct +-- !query 196 output +Function: md5 +Class: org.apache.spark.sql.catalyst.expressions.Md5 +Usage: md5(expr) - Returns an MD5 128-bit checksum as a hex string of `expr`. + + +-- !query 197 +DESC FUNCTION hash +-- !query 197 schema +struct +-- !query 197 output +Function: hash +Class: org.apache.spark.sql.catalyst.expressions.Murmur3Hash +Usage: hash(expr1, expr2, ...) - Returns a hash value of the arguments. + + +-- !query 198 +DESC FUNCTION sha +-- !query 198 schema +struct +-- !query 198 output +Function: sha +Class: org.apache.spark.sql.catalyst.expressions.Sha1 +Usage: sha(expr) - Returns a sha1 hash value as a hex string of the `expr`. + + +-- !query 199 +DESC FUNCTION sha1 +-- !query 199 schema +struct +-- !query 199 output +Function: sha1 +Class: org.apache.spark.sql.catalyst.expressions.Sha1 +Usage: sha1(expr) - Returns a sha1 hash value as a hex string of the `expr`. + + +-- !query 200 +DESC FUNCTION sha2 +-- !query 200 schema +struct +-- !query 200 output +Function: sha2 +Class: org.apache.spark.sql.catalyst.expressions.Sha2 +Usage: + sha2(expr, bitLength) - Returns a checksum of SHA-2 family as a hex string of `expr`. + SHA-224, SHA-256, SHA-384, and SHA-512 are supported. Bit length of 0 is equivalent to 256. + + +-- !query 201 +DESC FUNCTION spark_partition_id +-- !query 201 schema +struct +-- !query 201 output +Function: spark_partition_id +Class: org.apache.spark.sql.catalyst.expressions.SparkPartitionID +Usage: spark_partition_id() - Returns the current partition id. + + +-- !query 202 +DESC FUNCTION input_file_name +-- !query 202 schema +struct +-- !query 202 output +Function: input_file_name +Class: org.apache.spark.sql.catalyst.expressions.InputFileName +Usage: input_file_name() - Returns the name of the file being read, or empty string if not available. + + +-- !query 203 +DESC FUNCTION input_file_block_start +-- !query 203 schema +struct +-- !query 203 output +Function: input_file_block_start +Class: org.apache.spark.sql.catalyst.expressions.InputFileBlockStart +Usage: input_file_block_start() - Returns the start offset of the block being read, or -1 if not available. + + +-- !query 204 +DESC FUNCTION input_file_block_length +-- !query 204 schema +struct +-- !query 204 output +Function: input_file_block_length +Class: org.apache.spark.sql.catalyst.expressions.InputFileBlockLength +Usage: input_file_block_length() - Returns the length of the block being read, or -1 if not available. + + +-- !query 205 +DESC FUNCTION monotonically_increasing_id +-- !query 205 schema +struct +-- !query 205 output +Function: monotonically_increasing_id +Class: org.apache.spark.sql.catalyst.expressions.MonotonicallyIncreasingID +Usage: + monotonically_increasing_id() - Returns monotonically increasing 64-bit integers. The generated ID is guaranteed + to be monotonically increasing and unique, but not consecutive. The current implementation + puts the partition ID in the upper 31 bits, and the lower 33 bits represent the record number + within each partition. The assumption is that the data frame has less than 1 billion + partitions, and each partition has less than 8 billion records. + + +-- !query 206 +DESC FUNCTION current_database +-- !query 206 schema +struct +-- !query 206 output +Function: current_database +Class: org.apache.spark.sql.catalyst.expressions.CurrentDatabase +Usage: current_database() - Returns the current database. + + +-- !query 207 +DESC FUNCTION reflect +-- !query 207 schema +struct +-- !query 207 output +Function: reflect +Class: org.apache.spark.sql.catalyst.expressions.CallMethodViaReflection +Usage: reflect(class, method[, arg1[, arg2 ..]]) - Calls a method with reflection. + + +-- !query 208 +DESC FUNCTION java_method +-- !query 208 schema +struct +-- !query 208 output +Function: java_method +Class: org.apache.spark.sql.catalyst.expressions.CallMethodViaReflection +Usage: java_method(class, method[, arg1[, arg2 ..]]) - Calls a method with reflection. + + +-- !query 209 +DESC FUNCTION cube +-- !query 209 schema +struct +-- !query 209 output +Function: cube +Class: org.apache.spark.sql.catalyst.expressions.Cube +Usage: N/A. + + +-- !query 210 +DESC FUNCTION rollup +-- !query 210 schema +struct +-- !query 210 output +Function: rollup +Class: org.apache.spark.sql.catalyst.expressions.Rollup +Usage: N/A. + + +-- !query 211 +DESC FUNCTION grouping +-- !query 211 schema +struct +-- !query 211 output +Function: grouping +Class: org.apache.spark.sql.catalyst.expressions.Grouping +Usage: N/A. + + +-- !query 212 +DESC FUNCTION grouping_id +-- !query 212 schema +struct +-- !query 212 output +Function: grouping_id +Class: org.apache.spark.sql.catalyst.expressions.GroupingID +Usage: N/A. + + +-- !query 213 +DESC FUNCTION lead +-- !query 213 schema +struct +-- !query 213 output +Function: lead +Class: org.apache.spark.sql.catalyst.expressions.Lead +Usage: + lead(input[, offset[, default]]) - Returns the value of `input` at the `offset`th row + after the current row in the window. The default value of `offset` is 1 and the default + value of `default` is null. If the value of `input` at the `offset`th row is null, + null is returned. If there is no such an offset row (e.g., when the offset is 1, the last + row of the window does not have any subsequent row), `default` is returned. + + +-- !query 214 +DESC FUNCTION lag +-- !query 214 schema +struct +-- !query 214 output +Function: lag +Class: org.apache.spark.sql.catalyst.expressions.Lag +Usage: + lag(input[, offset[, default]]) - Returns the value of `input` at the `offset`th row + before the current row in the window. The default value of `offset` is 1 and the default + value of `default` is null. If the value of `input` at the `offset`th row is null, + null is returned. If there is no such offset row (e.g., when the offset is 1, the first + row of the window does not have any previous row), `default` is returned. + + +-- !query 215 +DESC FUNCTION row_number +-- !query 215 schema +struct +-- !query 215 output +Function: row_number +Class: org.apache.spark.sql.catalyst.expressions.RowNumber +Usage: + row_number() - Assigns a unique, sequential number to each row, starting with one, + according to the ordering of rows within the window partition. + + +-- !query 216 +DESC FUNCTION cume_dist +-- !query 216 schema +struct +-- !query 216 output +Function: cume_dist +Class: org.apache.spark.sql.catalyst.expressions.CumeDist +Usage: + cume_dist() - Computes the position of a value relative to all values in the partition. + + +-- !query 217 +DESC FUNCTION ntile +-- !query 217 schema +struct +-- !query 217 output +Function: ntile +Class: org.apache.spark.sql.catalyst.expressions.NTile +Usage: + ntile(n) - Divides the rows for each window partition into `n` buckets ranging + from 1 to at most `n`. + + +-- !query 218 +DESC FUNCTION rank +-- !query 218 schema +struct +-- !query 218 output +Function: rank +Class: org.apache.spark.sql.catalyst.expressions.Rank +Usage: + rank() - Computes the rank of a value in a group of values. The result is one plus the number + of rows preceding or equal to the current row in the ordering of the partition. The values + will produce gaps in the sequence. + + +-- !query 219 +DESC FUNCTION dense_rank +-- !query 219 schema +struct +-- !query 219 output +Function: dense_rank +Class: org.apache.spark.sql.catalyst.expressions.DenseRank +Usage: + dense_rank() - Computes the rank of a value in a group of values. The result is one plus the + previously assigned rank value. Unlike the function rank, dense_rank will not produce gaps + in the ranking sequence. + + +-- !query 220 +DESC FUNCTION percent_rank +-- !query 220 schema +struct +-- !query 220 output +Function: percent_rank +Class: org.apache.spark.sql.catalyst.expressions.PercentRank +Usage: + percent_rank() - Computes the percentage ranking of a value in a group of values. + + +-- !query 221 +DESC FUNCTION and +-- !query 221 schema +struct +-- !query 221 output +Function: and +Class: org.apache.spark.sql.catalyst.expressions.And +Usage: expr1 and expr2 - Logical AND. + + +-- !query 222 +DESC FUNCTION in +-- !query 222 schema +struct +-- !query 222 output +Function: in +Class: org.apache.spark.sql.catalyst.expressions.In +Usage: expr1 in(expr2, expr3, ...) - Returns true if `expr` equals to any valN. + + +-- !query 223 +DESC FUNCTION not +-- !query 223 schema +struct +-- !query 223 output +Function: not +Class: org.apache.spark.sql.catalyst.expressions.Not +Usage: not expr - Logical not. + + +-- !query 224 +DESC FUNCTION or +-- !query 224 schema +struct +-- !query 224 output +Function: or +Class: org.apache.spark.sql.catalyst.expressions.Or +Usage: expr1 or expr2 - Logical OR. + + +-- !query 225 +DESC FUNCTION <=> +-- !query 225 schema +struct +-- !query 225 output +Function: <=> +Class: org.apache.spark.sql.catalyst.expressions.EqualNullSafe +Usage: + expr1 <=> expr2 - Returns same result as the EQUAL(=) operator for non-null operands, + but returns true if both are null, false if one of the them is null. + + +-- !query 226 +DESC FUNCTION = +-- !query 226 schema +struct +-- !query 226 output +Function: = +Class: org.apache.spark.sql.catalyst.expressions.EqualTo +Usage: expr1 = expr2 - Returns true if `expr1` equals `expr2`, or false otherwise. + + +-- !query 227 +DESC FUNCTION == +-- !query 227 schema +struct +-- !query 227 output +Function: == +Class: org.apache.spark.sql.catalyst.expressions.EqualTo +Usage: expr1 == expr2 - Returns true if `expr1` equals `expr2`, or false otherwise. + + +-- !query 228 +DESC FUNCTION > +-- !query 228 schema +struct +-- !query 228 output +Function: > +Class: org.apache.spark.sql.catalyst.expressions.GreaterThan +Usage: expr1 > expr2 - Returns true if `expr1` is greater than `expr2`. + + +-- !query 229 +DESC FUNCTION >= +-- !query 229 schema +struct +-- !query 229 output +Function: >= +Class: org.apache.spark.sql.catalyst.expressions.GreaterThanOrEqual +Usage: expr1 >= expr2 - Returns true if `expr1` is greater than or equal to `expr2`. + + +-- !query 230 +DESC FUNCTION < +-- !query 230 schema +struct +-- !query 230 output +Function: < +Class: org.apache.spark.sql.catalyst.expressions.LessThan +Usage: expr1 < expr2 - Returns true if `expr1` is less than `expr2`. + + +-- !query 231 +DESC FUNCTION <= +-- !query 231 schema +struct +-- !query 231 output +Function: <= +Class: org.apache.spark.sql.catalyst.expressions.LessThanOrEqual +Usage: expr1 <= expr2 - Returns true if `expr1` is less than or equal to `expr2`. + + +-- !query 232 +DESC FUNCTION ! +-- !query 232 schema +struct +-- !query 232 output +Function: ! +Class: org.apache.spark.sql.catalyst.expressions.Not +Usage: ! expr - Logical not. + + +-- !query 233 +DESC FUNCTION & +-- !query 233 schema +struct +-- !query 233 output +Function: & +Class: org.apache.spark.sql.catalyst.expressions.BitwiseAnd +Usage: expr1 & expr2 - Returns the result of bitwise AND of `expr1` and `expr2`. + + +-- !query 234 +DESC FUNCTION ~ +-- !query 234 schema +struct +-- !query 234 output +Function: ~ +Class: org.apache.spark.sql.catalyst.expressions.BitwiseNot +Usage: ~ expr - Returns the result of bitwise NOT of `expr`. + + +-- !query 235 +DESC FUNCTION | +-- !query 235 schema +struct +-- !query 235 output +Function: | +Class: org.apache.spark.sql.catalyst.expressions.BitwiseOr +Usage: expr1 | expr2 - Returns the result of bitwise OR of `expr1` and `expr2`. + + +-- !query 236 +DESC FUNCTION ^ +-- !query 236 schema +struct +-- !query 236 output +Function: ^ +Class: org.apache.spark.sql.catalyst.expressions.BitwiseXor +Usage: expr1 ^ expr2 - Returns the result of bitwise exclusive OR of `expr1` and `expr2`. + + +-- !query 237 +DESC FUNCTION to_json +-- !query 237 schema +struct +-- !query 237 output +Function: to_json +Class: org.apache.spark.sql.catalyst.expressions.StructsToJson +Usage: to_json(expr[, options]) - Returns a json string with a given struct value + + +-- !query 238 +DESC FUNCTION from_json +-- !query 238 schema +struct +-- !query 238 output +Function: from_json +Class: org.apache.spark.sql.catalyst.expressions.JsonToStructs +Usage: from_json(jsonStr, schema[, options]) - Returns a struct value with the given `jsonStr` and `schema`. + + +-- !query 239 +DESC FUNCTION boolean +-- !query 239 schema +struct +-- !query 239 output +Function: boolean +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: boolean(expr AS type) - Casts the value `expr` to the target data type `type`. + + +-- !query 240 +DESC FUNCTION tinyint +-- !query 240 schema +struct +-- !query 240 output +Function: tinyint +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: tinyint(expr AS type) - Casts the value `expr` to the target data type `type`. + + +-- !query 241 +DESC FUNCTION smallint +-- !query 241 schema +struct +-- !query 241 output +Function: smallint +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: smallint(expr AS type) - Casts the value `expr` to the target data type `type`. + + +-- !query 242 +DESC FUNCTION int +-- !query 242 schema +struct +-- !query 242 output +Function: int +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: int(expr AS type) - Casts the value `expr` to the target data type `type`. + + +-- !query 243 +DESC FUNCTION bigint +-- !query 243 schema +struct +-- !query 243 output +Function: bigint +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: bigint(expr AS type) - Casts the value `expr` to the target data type `type`. + + +-- !query 244 +DESC FUNCTION float +-- !query 244 schema +struct +-- !query 244 output +Function: float +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: float(expr AS type) - Casts the value `expr` to the target data type `type`. + + +-- !query 245 +DESC FUNCTION double +-- !query 245 schema +struct +-- !query 245 output +Function: double +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: double(expr AS type) - Casts the value `expr` to the target data type `type`. + + +-- !query 246 +DESC FUNCTION decimal +-- !query 246 schema +struct +-- !query 246 output +Function: decimal +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: decimal(expr AS type) - Casts the value `expr` to the target data type `type`. + + +-- !query 247 +DESC FUNCTION date +-- !query 247 schema +struct +-- !query 247 output +Function: date +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: date(expr AS type) - Casts the value `expr` to the target data type `type`. + + +-- !query 248 +DESC FUNCTION timestamp +-- !query 248 schema +struct +-- !query 248 output +Function: timestamp +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: timestamp(expr AS type) - Casts the value `expr` to the target data type `type`. + + +-- !query 249 +DESC FUNCTION binary +-- !query 249 schema +struct +-- !query 249 output +Function: binary +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: binary(expr AS type) - Casts the value `expr` to the target data type `type`. + + +-- !query 250 +DESC FUNCTION string +-- !query 250 schema +struct +-- !query 250 output +Function: string +Class: org.apache.spark.sql.catalyst.expressions.Cast +Usage: string(expr AS type) - Casts the value `expr` to the target data type `type`. diff --git a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out index 315e1730ce7df..9f8494176b16c 100644 --- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out @@ -1,144 +1,88 @@ -- Automatically generated by SQLQueryTestSuite --- Number of queries: 17 +-- Number of queries: 13 -- !query 0 -describe function to_json --- !query 0 schema -struct --- !query 0 output -Class: org.apache.spark.sql.catalyst.expressions.StructsToJson -Function: to_json -Usage: to_json(expr[, options]) - Returns a json string with a given struct value - - --- !query 1 -describe function extended to_json --- !query 1 schema -struct --- !query 1 output -Class: org.apache.spark.sql.catalyst.expressions.StructsToJson -Extended Usage: - Examples: - > SELECT to_json(named_struct('a', 1, 'b', 2)); - {"a":1,"b":2} - > SELECT to_json(named_struct('time', to_timestamp('2015-08-26', 'yyyy-MM-dd')), map('timestampFormat', 'dd/MM/yyyy')); - {"time":"26/08/2015"} - > SELECT to_json(array(named_struct('a', 1, 'b', 2)); - [{"a":1,"b":2}] - -Function: to_json -Usage: to_json(expr[, options]) - Returns a json string with a given struct value - - --- !query 2 select to_json(named_struct('a', 1, 'b', 2)) --- !query 2 schema +-- !query 0 schema struct --- !query 2 output +-- !query 0 output {"a":1,"b":2} --- !query 3 +-- !query 1 select to_json(named_struct('time', to_timestamp('2015-08-26', 'yyyy-MM-dd')), map('timestampFormat', 'dd/MM/yyyy')) --- !query 3 schema +-- !query 1 schema struct --- !query 3 output +-- !query 1 output {"time":"26/08/2015"} --- !query 4 +-- !query 2 select to_json(array(named_struct('a', 1, 'b', 2))) --- !query 4 schema +-- !query 2 schema struct --- !query 4 output +-- !query 2 output [{"a":1,"b":2}] --- !query 5 +-- !query 3 select to_json(named_struct('a', 1, 'b', 2), named_struct('mode', 'PERMISSIVE')) --- !query 5 schema +-- !query 3 schema struct<> --- !query 5 output +-- !query 3 output org.apache.spark.sql.AnalysisException Must use a map() function for options;; line 1 pos 7 --- !query 6 +-- !query 4 select to_json(named_struct('a', 1, 'b', 2), map('mode', 1)) --- !query 6 schema +-- !query 4 schema struct<> --- !query 6 output +-- !query 4 output org.apache.spark.sql.AnalysisException A type of keys and values in map() must be string, but got MapType(StringType,IntegerType,false);; line 1 pos 7 --- !query 7 +-- !query 5 select to_json() --- !query 7 schema +-- !query 5 schema struct<> --- !query 7 output +-- !query 5 output org.apache.spark.sql.AnalysisException Invalid number of arguments for function to_json; line 1 pos 7 --- !query 8 -describe function from_json --- !query 8 schema -struct --- !query 8 output -Class: org.apache.spark.sql.catalyst.expressions.JsonToStructs -Function: from_json -Usage: from_json(jsonStr, schema[, options]) - Returns a struct value with the given `jsonStr` and `schema`. - - --- !query 9 -describe function extended from_json --- !query 9 schema -struct --- !query 9 output -Class: org.apache.spark.sql.catalyst.expressions.JsonToStructs -Extended Usage: - Examples: - > SELECT from_json('{"a":1, "b":0.8}', 'a INT, b DOUBLE'); - {"a":1, "b":0.8} - > SELECT from_json('{"time":"26/08/2015"}', 'time Timestamp', map('timestampFormat', 'dd/MM/yyyy')); - {"time":"2015-08-26 00:00:00.0"} - -Function: from_json -Usage: from_json(jsonStr, schema[, options]) - Returns a struct value with the given `jsonStr` and `schema`. - - --- !query 10 +-- !query 6 select from_json('{"a":1}', 'a INT') --- !query 10 schema +-- !query 6 schema struct> --- !query 10 output +-- !query 6 output {"a":1} --- !query 11 +-- !query 7 select from_json('{"time":"26/08/2015"}', 'time Timestamp', map('timestampFormat', 'dd/MM/yyyy')) --- !query 11 schema +-- !query 7 schema struct> --- !query 11 output +-- !query 7 output {"time":2015-08-26 00:00:00.0} --- !query 12 +-- !query 8 select from_json('{"a":1}', 1) --- !query 12 schema +-- !query 8 schema struct<> --- !query 12 output +-- !query 8 output org.apache.spark.sql.AnalysisException Expected a string literal instead of 1;; line 1 pos 7 --- !query 13 +-- !query 9 select from_json('{"a":1}', 'a InvalidType') --- !query 13 schema +-- !query 9 schema struct<> --- !query 13 output +-- !query 9 output org.apache.spark.sql.AnalysisException DataType invalidtype() is not supported.(line 1, pos 2) @@ -149,28 +93,28 @@ a InvalidType ; line 1 pos 7 --- !query 14 +-- !query 10 select from_json('{"a":1}', 'a INT', named_struct('mode', 'PERMISSIVE')) --- !query 14 schema +-- !query 10 schema struct<> --- !query 14 output +-- !query 10 output org.apache.spark.sql.AnalysisException Must use a map() function for options;; line 1 pos 7 --- !query 15 +-- !query 11 select from_json('{"a":1}', 'a INT', map('mode', 1)) --- !query 15 schema +-- !query 11 schema struct<> --- !query 15 output +-- !query 11 output org.apache.spark.sql.AnalysisException A type of keys and values in map() must be string, but got MapType(StringType,IntegerType,false);; line 1 pos 7 --- !query 16 +-- !query 12 select from_json() --- !query 16 schema +-- !query 12 schema struct<> --- !query 16 output +-- !query 12 output org.apache.spark.sql.AnalysisException Invalid number of arguments for function from_json; line 1 pos 7 diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala index 4092862c430b1..7be28e72ba204 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala @@ -26,6 +26,7 @@ import org.apache.spark.sql.catalyst.planning.PhysicalOperation import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.rules.RuleExecutor import org.apache.spark.sql.catalyst.util.{fileToString, stringToFile} +import org.apache.spark.sql.execution.command.DescribeFunctionCommand import org.apache.spark.sql.test.SharedSQLContext import org.apache.spark.sql.types.StructType @@ -215,6 +216,7 @@ class SQLQueryTestSuite extends QueryTest with SharedSQLContext { def isSorted(plan: LogicalPlan): Boolean = plan match { case _: Join | _: Aggregate | _: Generate | _: Sample | _: Distinct => false case PhysicalOperation(_, _, Sort(_, true, _)) => true + case _: DescribeFunctionCommand => true case _ => plan.children.iterator.exists(isSorted) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index 648b1798c66e0..7fa23e0cb8df0 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -1503,64 +1503,6 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { } } - test("describe function") { - checkAnswer( - sql("DESCRIBE FUNCTION log"), - Row("Class: org.apache.spark.sql.catalyst.expressions.Logarithm") :: - Row("Function: log") :: - Row("Usage: log(base, expr) - Returns the logarithm of `expr` with `base`.") :: Nil - ) - // predicate operator - checkAnswer( - sql("DESCRIBE FUNCTION or"), - Row("Class: org.apache.spark.sql.catalyst.expressions.Or") :: - Row("Function: or") :: - Row("Usage: expr1 or expr2 - Logical OR.") :: Nil - ) - checkAnswer( - sql("DESCRIBE FUNCTION !"), - Row("Class: org.apache.spark.sql.catalyst.expressions.Not") :: - Row("Function: !") :: - Row("Usage: ! expr - Logical not.") :: Nil - ) - // arithmetic operators - checkAnswer( - sql("DESCRIBE FUNCTION +"), - Row("Class: org.apache.spark.sql.catalyst.expressions.Add") :: - Row("Function: +") :: - Row("Usage: expr1 + expr2 - Returns `expr1`+`expr2`.") :: Nil - ) - // comparison operators - checkAnswer( - sql("DESCRIBE FUNCTION <"), - Row("Class: org.apache.spark.sql.catalyst.expressions.LessThan") :: - Row("Function: <") :: - Row("Usage: expr1 < expr2 - Returns true if `expr1` is less than `expr2`.") :: Nil - ) - // STRING - checkAnswer( - sql("DESCRIBE FUNCTION 'concat'"), - Row("Class: org.apache.spark.sql.catalyst.expressions.Concat") :: - Row("Function: concat") :: - Row("Usage: concat(str1, str2, ..., strN) - " + - "Returns the concatenation of str1, str2, ..., strN.") :: Nil - ) - // extended mode - checkAnswer( - sql("DESCRIBE FUNCTION EXTENDED ^"), - Row("Class: org.apache.spark.sql.catalyst.expressions.BitwiseXor") :: - Row( - """Extended Usage: - | Examples: - | > SELECT 3 ^ 5; - | 2 - | """.stripMargin) :: - Row("Function: ^") :: - Row("Usage: expr1 ^ expr2 - Returns the result of " + - "bitwise exclusive OR of `expr1` and `expr2`.") :: Nil - ) - } - test("create a data source table without schema") { import testImplicits._ withTempPath { tempDir =>