diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE index b927a6b96b810..91f6e6dc8a0e6 100644 --- a/R/pkg/NAMESPACE +++ b/R/pkg/NAMESPACE @@ -202,6 +202,7 @@ exportMethods("%<=>%", "%in%", "abs", "acos", + "acosh", "add_months", "alias", "approx_count_distinct", @@ -232,8 +233,10 @@ exportMethods("%<=>%", "asc_nulls_last", "ascii", "asin", + "asinh", "assert_true", "atan", + "atanh", "atan2", "avg", "base64", diff --git a/R/pkg/R/functions.R b/R/pkg/R/functions.R index 039d28a3a37b6..b12f7b472ec83 100644 --- a/R/pkg/R/functions.R +++ b/R/pkg/R/functions.R @@ -455,6 +455,19 @@ setMethod("acos", column(jc) }) +#' @details +#' \code{acosh}: Computes inverse hyperbolic cosine of the input column. +#' +#' @rdname column_math_functions +#' @aliases acosh acosh,Column-method +#' @note acosh since 3.1.0 +setMethod("acosh", + signature(x = "Column"), + function(x) { + jc <- callJStatic("org.apache.spark.sql.functions", "acosh", x@jc) + column(jc) + }) + #' @details #' \code{approx_count_distinct}: Returns the approximate number of distinct items in a group. #' @@ -522,6 +535,19 @@ setMethod("asin", column(jc) }) +#' @details +#' \code{asinh}: Computes inverse hyperbolic sine of the input column. +#' +#' @rdname column_math_functions +#' @aliases asinh asinh,Column-method +#' @note asinh since 3.1.0 +setMethod("asinh", + signature(x = "Column"), + function(x) { + jc <- callJStatic("org.apache.spark.sql.functions", "asinh", x@jc) + column(jc) + }) + #' @details #' \code{atan}: Returns the inverse tangent of the given value, #' as if computed by \code{java.lang.Math.atan()} @@ -536,6 +562,19 @@ setMethod("atan", column(jc) }) +#' @details +#' \code{atanh}: Computes inverse hyperbolic tangent of the input column. +#' +#' @rdname column_math_functions +#' @aliases atanh atanh,Column-method +#' @note atanh since 3.1.0 +setMethod("atanh", + signature(x = "Column"), + function(x) { + jc <- callJStatic("org.apache.spark.sql.functions", "atanh", x@jc) + column(jc) + }) + #' avg #' #' Aggregate function: returns the average of the values in a group. diff --git a/R/pkg/tests/fulltests/test_sparkSQL.R b/R/pkg/tests/fulltests/test_sparkSQL.R index 45de1ef1bd3d1..81d4e14df791d 100644 --- a/R/pkg/tests/fulltests/test_sparkSQL.R +++ b/R/pkg/tests/fulltests/test_sparkSQL.R @@ -1430,6 +1430,7 @@ test_that("column functions", { nth_value(column("v"), 3) + nth_value(column("z"), 4L, FALSE) c28 <- asc_nulls_first(c1) + asc_nulls_last(c1) + desc_nulls_first(c1) + desc_nulls_last(c1) + c29 <- acosh(c1) + asinh(c1) + atanh(c1) # Test if base::is.nan() is exposed expect_equal(is.nan(c("a", "b")), c(FALSE, FALSE)) diff --git a/python/docs/source/reference/pyspark.sql.rst b/python/docs/source/reference/pyspark.sql.rst index 3f903fe8c7acd..0dc2f6e55bb96 100644 --- a/python/docs/source/reference/pyspark.sql.rst +++ b/python/docs/source/reference/pyspark.sql.rst @@ -307,6 +307,7 @@ Functions abs acos + acosh add_months aggregate approxCountDistinct @@ -331,8 +332,10 @@ Functions asc_nulls_last ascii asin + asinh assert_true atan + atanh atan2 avg base64 @@ -583,4 +586,3 @@ Grouping GroupedData.pivot GroupedData.sum PandasCogroupedOps.applyInPandas - diff --git a/python/pyspark/sql/functions.py b/python/pyspark/sql/functions.py index 4af5d1f484ee4..ea91e8593e21f 100644 --- a/python/pyspark/sql/functions.py +++ b/python/pyspark/sql/functions.py @@ -220,6 +220,19 @@ def acos(col): return _invoke_function_over_column("acos", col) +def acosh(col): + """ + Computes inverse hyperbolic cosine of the input column. + + .. versionadded:: 3.1.0 + + Returns + ------- + :class:`Column` + """ + return _invoke_function_over_column("acosh", col) + + def asin(col): """ .. versionadded:: 1.3.0 @@ -233,6 +246,19 @@ def asin(col): return _invoke_function_over_column("asin", col) +def asinh(col): + """ + Computes inverse hyperbolic sine of the input column. + + .. versionadded:: 3.1.0 + + Returns + ------- + :class:`Column` + """ + return _invoke_function_over_column("asinh", col) + + def atan(col): """ .. versionadded:: 1.4.0 @@ -245,6 +271,19 @@ def atan(col): return _invoke_function_over_column("atan", col) +def atanh(col): + """ + Computes inverse hyperbolic tangent of the input column. + + .. versionadded:: 3.1.0 + + Returns + ------- + :class:`Column` + """ + return _invoke_function_over_column("atanh", col) + + @since(1.4) def cbrt(col): """ diff --git a/python/pyspark/sql/functions.pyi b/python/pyspark/sql/functions.pyi index 252f883b5fb09..50e178df9996f 100644 --- a/python/pyspark/sql/functions.pyi +++ b/python/pyspark/sql/functions.pyi @@ -260,12 +260,15 @@ def map_zip_with( ) -> Column: ... def abs(col: ColumnOrName) -> Column: ... def acos(col: ColumnOrName) -> Column: ... +def acosh(col: ColumnOrName) -> Column: ... def asc(col: ColumnOrName) -> Column: ... def asc_nulls_first(col: ColumnOrName) -> Column: ... def asc_nulls_last(col: ColumnOrName) -> Column: ... def ascii(col: ColumnOrName) -> Column: ... def asin(col: ColumnOrName) -> Column: ... +def asinh(col: ColumnOrName) -> Column: ... def atan(col: ColumnOrName) -> Column: ... +def atanh(col: ColumnOrName) -> Column: ... @overload def atan2(col1: ColumnOrName, col2: ColumnOrName) -> Column: ... @overload diff --git a/python/pyspark/sql/tests/test_functions.py b/python/pyspark/sql/tests/test_functions.py index 32549343d938f..2858bdeca0d5a 100644 --- a/python/pyspark/sql/tests/test_functions.py +++ b/python/pyspark/sql/tests/test_functions.py @@ -116,6 +116,7 @@ def assert_close(a, b): c = get_values(b) diff = [abs(v - c[k]) < 1e-6 for k, v in enumerate(a)] return sum(diff) == len(a) + assert_close([math.cos(i) for i in range(10)], df.select(functions.cos(df.a)).collect()) assert_close([math.cos(i) for i in range(10)], @@ -139,6 +140,21 @@ def assert_close(a, b): assert_close([math.hypot(i, 2) for i in range(10)], df.select(functions.hypot(df.a, 2)).collect()) + def test_inverse_trig_functions(self): + from pyspark.sql import functions + + funs = [ + (functions.acosh, "ACOSH"), + (functions.asinh, "ASINH"), + (functions.atanh, "ATANH"), + ] + + cols = ["a", functions.col("a")] + + for f, alias in funs: + for c in cols: + self.assertIn(f"{alias}(a)", repr(f(c))) + def test_rand_functions(self): df = self.df from pyspark.sql import functions