diff --git a/src/tokenize.jl b/src/tokenize.jl index c330113f..730052f2 100644 --- a/src/tokenize.jl +++ b/src/tokenize.jl @@ -766,15 +766,17 @@ function lex_xor(l::Lexer) end function accept_number(l::Lexer, f::F) where {F} + lexed_number = false while true pc, ppc = dpeekchar(l) if pc == '_' && !f(ppc) - return + return lexed_number elseif f(pc) || pc == '_' readchar(l) else - return + return lexed_number end + lexed_number = true end end @@ -864,7 +866,9 @@ function lex_digit(l::Lexer, kind) if accept(l, "pP") kind = K"Float" accept(l, "+-−") - accept_number(l, isdigit) + if !accept_number(l, isdigit) + return emit_error(l, K"ErrorInvalidNumericConstant") + end elseif isfloat return emit_error(l, K"ErrorInvalidNumericConstant") end diff --git a/test/tokenize.jl b/test/tokenize.jl index 246e4799..5c969ade 100644 --- a/test/tokenize.jl +++ b/test/tokenize.jl @@ -589,6 +589,7 @@ end @test kind.(collect(tokenize("3.2e2.2"))) == [K"ErrorInvalidNumericConstant", K"Integer", K"EndMarker"] @test kind.(collect(tokenize("3e2.2"))) == [K"ErrorInvalidNumericConstant", K"Integer", K"EndMarker"] @test kind.(collect(tokenize("0b101__101"))) == [K"BinInt", K"Identifier", K"EndMarker"] + @test tok("0x1p").kind == K"ErrorInvalidNumericConstant" end @testset "floating points" begin