Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion R/pkg/tests/fulltests/test_sparkSQL.R
Original file line number Diff line number Diff line change
Expand Up @@ -3038,7 +3038,7 @@ test_that("Method coltypes() to get and set R's data types of a DataFrame", {
expect_equal(coltypes(x), "map<string,string>")

df <- selectExpr(read.json(jsonPath), "name", "(age * 1.21) as age")
expect_equal(dtypes(df), list(c("name", "string"), c("age", "decimal(24,2)")))
expect_equal(dtypes(df), list(c("name", "string"), c("age", "double")))

df1 <- select(df, cast(df$age, "integer"))
coltypes(df) <- c("character", "integer")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1591,23 +1591,29 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging

/**
* Create an integral literal expression. The code selects the most narrow integral type
* possible, either a BigDecimal, a Long or an Integer is returned.
* possible, either a BigDecimal, a Double, a Long or an Integer is returned.
*/
override def visitIntegerLiteral(ctx: IntegerLiteralContext): Literal = withOrigin(ctx) {
BigDecimal(ctx.getText) match {
case v if v.isValidInt =>
Literal(v.intValue())
case v if v.isValidLong =>
Literal(v.longValue())
case v if v.isDecimalDouble && !conf.getConf(SQLConf.LEGACY_LITERALS_AS_DECIMAL) =>
Literal(v.doubleValue())
case v => Literal(v.underlying())
}
}

/**
* Create a decimal literal for a regular decimal number.
* Create a Double or Decimal literal for a regular decimal number.
*/
override def visitDecimalLiteral(ctx: DecimalLiteralContext): Literal = withOrigin(ctx) {
Literal(BigDecimal(ctx.getText).underlying())
BigDecimal(ctx.getText) match {
case v if v.isDecimalDouble && !conf.getConf(SQLConf.LEGACY_LITERALS_AS_DECIMAL) =>
Literal(v.doubleValue())
case v => Literal(v.underlying())
}
}

/** Create a numeric literal expression. */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1639,6 +1639,14 @@ object SQLConf {
"java.time.* packages are used for the same purpose.")
.booleanConf
.createWithDefault(false)

val LEGACY_LITERALS_AS_DECIMAL = buildConf("spark.sql.legacy.literals.asDecimal")
.internal()
.doc("When set to true, literal values which don't fit in the long range are considered as " +
"decimals. If set to false (default), they are parsed as double if possible; if the value" +
"is not representable as double, then we fallback to decimal.")
.booleanConf
.createWithDefault(false)
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -437,8 +437,12 @@ class ExpressionParserSuite extends PlanTest {
}

test("literals") {
def testDecimal(value: String): Unit = {
assertEqual(value, Literal(BigDecimal(value).underlying))
def testDecimal(value: String, asDecimal: Boolean, parser: ParserInterface): Unit = {
if (asDecimal) {
assertEqual(value, Literal(BigDecimal(value).underlying), parser)
} else {
assertEqual(value, Literal(BigDecimal(value).doubleValue()), parser)
}
}

// NULL
Expand All @@ -451,18 +455,26 @@ class ExpressionParserSuite extends PlanTest {
// Integral should have the narrowest possible type
assertEqual("787324", Literal(787324))
assertEqual("7873247234798249234", Literal(7873247234798249234L))
testDecimal("78732472347982492793712334")

// Decimal
testDecimal("7873247234798249279371.2334")

// Scientific Decimal
testDecimal("9.0e1")
testDecimal(".9e+2")
testDecimal("0.9e+2")
testDecimal("900e-1")
testDecimal("900.0E-1")
testDecimal("9.e+1")

Seq(true, false).foreach { asDecimal =>
val conf = new SQLConf()
conf.setConf(SQLConf.LEGACY_LITERALS_AS_DECIMAL, asDecimal)
val parser = new CatalystSqlParser(conf)
testDecimal("78732472347982492793712334", true, parser)
testDecimal("1e40", asDecimal, parser)

// Decimal
testDecimal("7873247234798249279371.2334", true, parser)

// Scientific Decimal
testDecimal("9.0e1", asDecimal, parser)
testDecimal(".9e+2", asDecimal, parser)
testDecimal("0.9e+2", asDecimal, parser)
testDecimal("900e-1", asDecimal, parser)
testDecimal("900.0E-1", asDecimal, parser)
testDecimal("9.e+1", asDecimal, parser)
}

intercept(".e3")

// Tiny Int Literal
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
-- limitations under the License.
--

set spark.sql.legacy.literals.asDecimal=true;

CREATE TEMPORARY VIEW t AS SELECT 1.0 as a, 0.0 as b;

-- division, remainder and pmod by 0 return NULL
Expand Down
14 changes: 7 additions & 7 deletions sql/core/src/test/resources/sql-tests/results/literals.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -167,17 +167,17 @@ select 1234567890123456789012345678901234567890.0
-- !query 17
select 1D, 1.2D, 1e10, 1.5e5, .10D, 0.10D, .1e5, .9e+2, 0.9e+2, 900e-1, 9.e+1
-- !query 17 schema
struct<1.0:double,1.2:double,1E+10:decimal(1,-10),1.5E+5:decimal(2,-4),0.1:double,0.1:double,1E+4:decimal(1,-4),9E+1:decimal(1,-1),9E+1:decimal(1,-1),90.0:decimal(3,1),9E+1:decimal(1,-1)>
struct<1.0:double,1.2:double,1.0E10:double,150000.0:double,0.1:double,0.1:double,10000.0:double,90.0:double,90.0:double,90.0:double,90.0:double>
-- !query 17 output
1.0 1.2 10000000000 150000 0.1 0.1 10000 90 90 90 90
1.0 1.2 1.0E10 150000.0 0.1 0.1 10000.0 90.0 90.0 90.0 90.0


-- !query 18
select -1D, -1.2D, -1e10, -1.5e5, -.10D, -0.10D, -.1e5
-- !query 18 schema
struct<-1.0:double,-1.2:double,-1E+10:decimal(1,-10),-1.5E+5:decimal(2,-4),-0.1:double,-0.1:double,-1E+4:decimal(1,-4)>
struct<-1.0:double,-1.2:double,-1.0E10:double,-150000.0:double,-0.1:double,-0.1:double,-10000.0:double>
-- !query 18 output
-1.0 -1.2 -10000000000 -150000 -0.1 -0.1 -10000
-1.0 -1.2 -1.0E10 -150000.0 -0.1 -0.1 -10000.0


-- !query 19
Expand Down Expand Up @@ -205,7 +205,7 @@ struct<1E+309:decimal(1,-309),-1E+309:decimal(1,-309)>
-- !query 21
select 0.3, -0.8, .5, -.18, 0.1111, .1111
-- !query 21 schema
struct<0.3:decimal(1,1),-0.8:decimal(1,1),0.5:decimal(1,1),-0.18:decimal(2,2),0.1111:decimal(4,4),0.1111:decimal(4,4)>
struct<0.3:double,-0.8:double,0.5:double,-0.18:double,0.1111:double,0.1111:double>
-- !query 21 output
0.3 -0.8 0.5 -0.18 0.1111 0.1111

Expand Down Expand Up @@ -411,9 +411,9 @@ select X'XuZ'
-- !query 42
SELECT 3.14, -3.14, 3.14e8, 3.14e-8, -3.14e8, -3.14e-8, 3.14e+8, 3.14E8, 3.14E-8
-- !query 42 schema
struct<3.14:decimal(3,2),-3.14:decimal(3,2),3.14E+8:decimal(3,-6),3.14E-8:decimal(10,10),-3.14E+8:decimal(3,-6),-3.14E-8:decimal(10,10),3.14E+8:decimal(3,-6),3.14E+8:decimal(3,-6),3.14E-8:decimal(10,10)>
struct<3.14:double,-3.14:double,3.14E8:double,3.14E-8:double,-3.14E8:double,-3.14E-8:double,3.14E8:double,3.14E8:double,3.14E-8:double>
-- !query 42 output
3.14 -3.14 314000000 0.0000000314 -314000000 -0.0000000314 314000000 314000000 0.0000000314
3.14 -3.14 3.14E8 3.14E-8 -3.14E8 -3.14E-8 3.14E8 3.14E8 3.14E-8


-- !query 43
Expand Down
18 changes: 9 additions & 9 deletions sql/core/src/test/resources/sql-tests/results/operators.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@ struct<230:int>
-- !query 2
select -5.2
-- !query 2 schema
struct<-5.2:decimal(2,1)>
struct<-5.2:double>
-- !query 2 output
-5.2


-- !query 3
select +6.8e0
-- !query 3 schema
struct<6.8:decimal(2,1)>
struct<6.8:double>
-- !query 3 output
6.8

Expand Down Expand Up @@ -261,15 +261,15 @@ struct<CEIL(1234567890123456):bigint>
-- !query 32
select ceil(0.01)
-- !query 32 schema
struct<CEIL(0.01):decimal(1,0)>
struct<CEIL(0.01):bigint>
-- !query 32 output
1


-- !query 33
select ceiling(-0.10)
-- !query 33 schema
struct<CEIL(-0.10):decimal(1,0)>
struct<CEIL(-0.1):bigint>
-- !query 33 output
0

Expand Down Expand Up @@ -301,23 +301,23 @@ struct<FLOOR(1234567890123456):bigint>
-- !query 37
select floor(0.01)
-- !query 37 schema
struct<FLOOR(0.01):decimal(1,0)>
struct<FLOOR(0.01):bigint>
-- !query 37 output
0


-- !query 38
select floor(-0.10)
-- !query 38 schema
struct<FLOOR(-0.10):decimal(1,0)>
struct<FLOOR(-0.1):bigint>
-- !query 38 output
-1


-- !query 39
select 1 > 0.00001
-- !query 39 schema
struct<(CAST(1 AS BIGINT) > 0):boolean>
struct<(CAST(1 AS DOUBLE) > 1.0E-5):boolean>
-- !query 39 output
true

Expand Down Expand Up @@ -365,15 +365,15 @@ struct<octet_length(abc):int>
-- !query 45
select abs(-3.13), abs('-2.19')
-- !query 45 schema
struct<abs(-3.13):decimal(3,2),abs(CAST(-2.19 AS DOUBLE)):double>
struct<abs(-3.13):double,abs(CAST(-2.19 AS DOUBLE)):double>
-- !query 45 output
3.13 2.19


-- !query 46
select positive('-1.11'), positive(-1.11), negative('-1.11'), negative(-1.11)
-- !query 46 schema
struct<(+ CAST(-1.11 AS DOUBLE)):double,(+ -1.11):decimal(3,2),(- CAST(-1.11 AS DOUBLE)):double,(- -1.11):decimal(3,2)>
struct<(+ CAST(-1.11 AS DOUBLE)):double,(+ -1.11):double,(- CAST(-1.11 AS DOUBLE)):double,(- -1.11):double>
-- !query 46 output
-1.11 -1.11 1.11 1.11

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@ true
-- !query 2
select 1.0 = '1'
-- !query 2 schema
struct<(CAST(1.0 AS DOUBLE) = CAST(1 AS DOUBLE)):boolean>
struct<(1.0 = CAST(1 AS DOUBLE)):boolean>
-- !query 2 output
true


-- !query 3
select 1.5 = '1.51'
-- !query 3 schema
struct<(CAST(1.5 AS DOUBLE) = CAST(1.51 AS DOUBLE)):boolean>
struct<(1.5 = CAST(1.51 AS DOUBLE)):boolean>
-- !query 3 output
false

Expand Down Expand Up @@ -69,7 +69,7 @@ false
-- !query 8
select '1.5' > 0.5
-- !query 8 schema
struct<(CAST(1.5 AS DOUBLE) > CAST(0.5 AS DOUBLE)):boolean>
struct<(CAST(1.5 AS DOUBLE) > 0.5):boolean>
-- !query 8 output
true

Expand Down Expand Up @@ -117,15 +117,15 @@ true
-- !query 14
select 2.0 >= '2.2'
-- !query 14 schema
struct<(CAST(2.0 AS DOUBLE) >= CAST(2.2 AS DOUBLE)):boolean>
struct<(2.0 >= CAST(2.2 AS DOUBLE)):boolean>
-- !query 14 output
false


-- !query 15
select '1.5' >= 0.5
-- !query 15 schema
struct<(CAST(1.5 AS DOUBLE) >= CAST(0.5 AS DOUBLE)):boolean>
struct<(CAST(1.5 AS DOUBLE) >= 0.5):boolean>
-- !query 15 output
true

Expand Down Expand Up @@ -173,15 +173,15 @@ false
-- !query 21
select 2.0 < '2.2'
-- !query 21 schema
struct<(CAST(2.0 AS DOUBLE) < CAST(2.2 AS DOUBLE)):boolean>
struct<(2.0 < CAST(2.2 AS DOUBLE)):boolean>
-- !query 21 output
true


-- !query 22
select 0.5 < '1.5'
-- !query 22 schema
struct<(CAST(0.5 AS DOUBLE) < CAST(1.5 AS DOUBLE)):boolean>
struct<(0.5 < CAST(1.5 AS DOUBLE)):boolean>
-- !query 22 output
true

Expand Down Expand Up @@ -229,15 +229,15 @@ true
-- !query 28
select 2.0 <= '2.2'
-- !query 28 schema
struct<(CAST(2.0 AS DOUBLE) <= CAST(2.2 AS DOUBLE)):boolean>
struct<(2.0 <= CAST(2.2 AS DOUBLE)):boolean>
-- !query 28 output
true


-- !query 29
select 0.5 <= '1.5'
-- !query 29 schema
struct<(CAST(0.5 AS DOUBLE) <= CAST(1.5 AS DOUBLE)):boolean>
struct<(0.5 <= CAST(1.5 AS DOUBLE)):boolean>
-- !query 29 output
true

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ SELECT rand(1.0)
struct<>
-- !query 4 output
org.apache.spark.sql.AnalysisException
cannot resolve 'rand(1.0BD)' due to data type mismatch: argument 1 requires (int or bigint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7
cannot resolve 'rand(1.0D)' due to data type mismatch: argument 1 requires (int or bigint) type, however, '1.0D' is of double type.; line 1 pos 7


-- !query 5
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,16 +172,16 @@ WHERE t1b IN (SELECT Max(t2b)
FROM t2
GROUP BY t2a)
-- !query 9 schema
struct<t1a:string,t1b:smallint,t1c:int,t1d:bigint,t1e:float,t1f:double,t1g:decimal(2,-2),t1h:timestamp,t1i:date>
struct<t1a:string,t1b:smallint,t1c:int,t1d:bigint,t1e:float,t1f:double,t1g:double,t1h:timestamp,t1i:date>
-- !query 9 output
t1a 6 8 10 15.0 20.0 2000 2014-04-04 01:00:00 2014-04-04
t1a 6 8 10 15.0 20.0 2000 2014-04-04 01:02:00.001 2014-04-04
t1b 8 16 19 17.0 25.0 2600 2014-05-04 01:01:00 2014-05-04
t1c 8 16 19 17.0 25.0 2600 2014-05-04 01:02:00.001 2014-05-05
t1d 10 NULL 12 17.0 25.0 2600 2015-05-04 01:01:00 2015-05-04
t1e 10 NULL 19 17.0 25.0 2600 2014-05-04 01:01:00 2014-05-04
t1e 10 NULL 19 17.0 25.0 2600 2014-09-04 01:02:00.001 2014-09-04
t1e 10 NULL 25 17.0 25.0 2600 2014-08-04 01:01:00 2014-08-04
t1a 6 8 10 15.0 20.0 2000.0 2014-04-04 01:00:00 2014-04-04
t1a 6 8 10 15.0 20.0 2000.0 2014-04-04 01:02:00.001 2014-04-04
t1b 8 16 19 17.0 25.0 2600.0 2014-05-04 01:01:00 2014-05-04
t1c 8 16 19 17.0 25.0 2600.0 2014-05-04 01:02:00.001 2014-05-05
t1d 10 NULL 12 17.0 25.0 2600.0 2015-05-04 01:01:00 2015-05-04
t1e 10 NULL 19 17.0 25.0 2600.0 2014-05-04 01:01:00 2014-05-04
t1e 10 NULL 19 17.0 25.0 2600.0 2014-09-04 01:02:00.001 2014-09-04
t1e 10 NULL 25 17.0 25.0 2600.0 2014-08-04 01:01:00 2014-08-04


-- !query 10
Expand Down
Loading