@@ -224,10 +224,22 @@ impl <'input> Tokenizer<'input> {
224
224
225
225
fn process_number ( & mut self ) -> Result < TokenSpan , TokenizationError > {
226
226
let ( start_idx, start_char) = self . lookahead . expect ( "Unexpected end of input, was expecting numeric char" ) ;
227
+ let mut last_index = start_idx;
228
+ let mut decimal_seen: bool = false ;
229
+ let mut exponent_seen: bool = false ;
230
+ let mut unary_seen: bool = false ;
231
+ if start_char == '.' {
232
+ decimal_seen = true
233
+ }
227
234
228
235
let maybe_second_char = self . chars . peek ( ) ;
229
236
match maybe_second_char {
230
- None => return Ok ( ( start_idx, TokType :: Integer , start_idx + 1 ) ) ,
237
+ None => {
238
+ if decimal_seen {
239
+ return Err ( self . make_error ( "Lone decimal is an invalid literal" . to_string ( ) , start_idx) )
240
+ }
241
+ return Ok ( ( start_idx, TokType :: Integer , start_idx + 1 ) )
242
+ } ,
231
243
Some ( ( _second_idx, second_char) ) if start_char == '0' => {
232
244
match second_char {
233
245
'x' | 'X' => { return self . process_hexadecimal ( ) }
@@ -240,15 +252,6 @@ impl <'input> Tokenizer<'input> {
240
252
_ => { }
241
253
}
242
254
243
- let mut last_index = start_idx;
244
- let mut decimal_seen: bool = false ;
245
- let mut exponent_seen: bool = false ;
246
- let mut unary_seen: bool = false ;
247
- match start_char {
248
- '.' => { decimal_seen = true }
249
- '+' | '-' => { unary_seen = true }
250
- _ => { }
251
- }
252
255
loop {
253
256
match self . chars . peek ( ) {
254
257
None => {
0 commit comments